From 14794731a1956e7a6683600182250dd28565cee5 Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:30:28 +0000 Subject: [PATCH 1/8] Sync typeshed. Source commit: https://github.com/python/typeshed/commit/d6f4a0f7102b1400a21742cf9b7ea93614e2b6ec --- .../vendor/typeshed/pyproject.toml | 254 ++ .../vendor/typeshed/source_commit.txt | 2 +- .../vendor/typeshed/stdlib/__future__.pyi | 63 +- .../vendor/typeshed/stdlib/_asyncio.pyi | 238 +- .../vendor/typeshed/stdlib/_bisect.pyi | 103 +- .../vendor/typeshed/stdlib/_blake2.pyi | 36 +- .../vendor/typeshed/stdlib/_bootlocale.pyi | 6 - .../vendor/typeshed/stdlib/_bz2.pyi | 60 +- .../vendor/typeshed/stdlib/_codecs.pyi | 56 +- .../typeshed/stdlib/_collections_abc.pyi | 29 +- .../vendor/typeshed/stdlib/_compression.pyi | 7 +- .../vendor/typeshed/stdlib/_contextvars.pyi | 67 +- .../vendor/typeshed/stdlib/_csv.pyi | 132 +- .../vendor/typeshed/stdlib/_ctypes.pyi | 137 +- .../vendor/typeshed/stdlib/_curses.pyi | 1199 +---- .../vendor/typeshed/stdlib/_curses_panel.pyi | 70 +- .../vendor/typeshed/stdlib/_dbm.pyi | 24 +- .../vendor/typeshed/stdlib/_decimal.pyi | 32 +- .../typeshed/stdlib/_frozen_importlib.pyi | 171 +- .../stdlib/_frozen_importlib_external.pyi | 343 +- .../vendor/typeshed/stdlib/_gdbm.pyi | 37 +- .../vendor/typeshed/stdlib/_hashlib.pyi | 261 +- .../vendor/typeshed/stdlib/_heapq.pyi | 85 +- .../vendor/typeshed/stdlib/_imp.pyi | 78 +- .../typeshed/stdlib/_interpchannels.pyi | 210 +- .../vendor/typeshed/stdlib/_interpqueues.pyi | 95 +- .../vendor/typeshed/stdlib/_interpreters.pyi | 170 +- .../vendor/typeshed/stdlib/_io.pyi | 687 +-- .../vendor/typeshed/stdlib/_json.pyi | 71 +- .../vendor/typeshed/stdlib/_locale.pyi | 52 +- .../vendor/typeshed/stdlib/_lsprof.pyi | 56 +- .../vendor/typeshed/stdlib/_lzma.pyi | 108 +- .../vendor/typeshed/stdlib/_markupbase.pyi | 15 +- .../vendor/typeshed/stdlib/_msi.pyi | 33 +- .../vendor/typeshed/stdlib/_operator.pyi | 243 +- .../vendor/typeshed/stdlib/_osx_support.pyi | 104 +- .../vendor/typeshed/stdlib/_pickle.pyi | 185 +- .../typeshed/stdlib/_posixsubprocess.pyi | 50 +- .../vendor/typeshed/stdlib/_py_abc.pyi | 27 +- .../vendor/typeshed/stdlib/_pydecimal.pyi | 2 - .../vendor/typeshed/stdlib/_queue.pyi | 57 +- .../vendor/typeshed/stdlib/_random.pyi | 27 +- .../vendor/typeshed/stdlib/_sitebuiltins.pyi | 17 - .../vendor/typeshed/stdlib/_socket.pyi | 574 +-- .../vendor/typeshed/stdlib/_sqlite3.pyi | 60 +- .../vendor/typeshed/stdlib/_ssl.pyi | 132 +- .../vendor/typeshed/stdlib/_stat.pyi | 152 +- .../vendor/typeshed/stdlib/_struct.pyi | 140 +- .../vendor/typeshed/stdlib/_thread.pyi | 331 +- .../typeshed/stdlib/_threading_local.pyi | 19 +- .../vendor/typeshed/stdlib/_tkinter.pyi | 46 +- .../vendor/typeshed/stdlib/_tracemalloc.pyi | 72 +- .../typeshed/stdlib/_typeshed/__init__.pyi | 3 + .../vendor/typeshed/stdlib/_warnings.pyi | 30 +- .../vendor/typeshed/stdlib/_weakref.pyi | 17 +- .../vendor/typeshed/stdlib/_weakrefset.pyi | 6 +- .../vendor/typeshed/stdlib/_winapi.pyi | 142 +- .../vendor/typeshed/stdlib/_zstd.pyi | 261 +- .../vendor/typeshed/stdlib/abc.pyi | 117 +- .../vendor/typeshed/stdlib/aifc.pyi | 138 - .../vendor/typeshed/stdlib/annotationlib.pyi | 129 +- .../vendor/typeshed/stdlib/argparse.pyi | 248 +- .../vendor/typeshed/stdlib/array.pyi | 221 +- .../vendor/typeshed/stdlib/ast.pyi | 963 +--- .../vendor/typeshed/stdlib/asynchat.pyi | 36 +- .../typeshed/stdlib/asyncio/__init__.pyi | 2 - .../typeshed/stdlib/asyncio/base_events.pyi | 384 +- .../typeshed/stdlib/asyncio/base_futures.pyi | 16 +- .../stdlib/asyncio/base_subprocess.pyi | 7 +- .../typeshed/stdlib/asyncio/constants.pyi | 2 - .../typeshed/stdlib/asyncio/coroutines.pyi | 14 +- .../vendor/typeshed/stdlib/asyncio/events.pyi | 531 +-- .../typeshed/stdlib/asyncio/exceptions.pyi | 35 +- .../stdlib/asyncio/format_helpers.pyi | 23 +- .../typeshed/stdlib/asyncio/futures.pyi | 5 +- .../vendor/typeshed/stdlib/asyncio/graph.pyi | 52 +- .../vendor/typeshed/stdlib/asyncio/locks.pyi | 263 +- .../vendor/typeshed/stdlib/asyncio/log.pyi | 2 - .../vendor/typeshed/stdlib/asyncio/mixins.pyi | 2 - .../stdlib/asyncio/proactor_events.pyi | 20 +- .../typeshed/stdlib/asyncio/protocols.pyi | 190 +- .../vendor/typeshed/stdlib/asyncio/queues.pyi | 136 +- .../typeshed/stdlib/asyncio/runners.pyi | 91 +- .../stdlib/asyncio/selector_events.pyi | 19 +- .../typeshed/stdlib/asyncio/sslproto.pyi | 182 +- .../typeshed/stdlib/asyncio/staggered.pyi | 54 +- .../typeshed/stdlib/asyncio/streams.pyi | 251 +- .../typeshed/stdlib/asyncio/subprocess.pyi | 6 +- .../typeshed/stdlib/asyncio/taskgroups.pyi | 23 +- .../vendor/typeshed/stdlib/asyncio/tasks.pyi | 357 +- .../typeshed/stdlib/asyncio/threads.pyi | 13 +- .../typeshed/stdlib/asyncio/timeouts.pyi | 64 +- .../vendor/typeshed/stdlib/asyncio/tools.pyi | 27 +- .../typeshed/stdlib/asyncio/transports.pyi | 229 +- .../vendor/typeshed/stdlib/asyncio/trsock.pyi | 7 - .../typeshed/stdlib/asyncio/unix_events.pyi | 272 +- .../stdlib/asyncio/windows_events.pyi | 36 +- .../typeshed/stdlib/asyncio/windows_utils.pyi | 15 +- .../vendor/typeshed/stdlib/asyncore.pyi | 21 - .../vendor/typeshed/stdlib/atexit.pyi | 39 +- .../vendor/typeshed/stdlib/audioop.pyi | 103 +- .../vendor/typeshed/stdlib/base64.pyi | 211 +- .../vendor/typeshed/stdlib/bdb.pyi | 387 +- .../vendor/typeshed/stdlib/binascii.pyi | 111 +- .../vendor/typeshed/stdlib/binhex.pyi | 13 +- .../vendor/typeshed/stdlib/bisect.pyi | 2 - .../vendor/typeshed/stdlib/builtins.pyi | 3860 +++------------- .../vendor/typeshed/stdlib/bz2.pyi | 158 +- .../vendor/typeshed/stdlib/cProfile.pyi | 33 +- .../vendor/typeshed/stdlib/calendar.pyi | 303 +- .../vendor/typeshed/stdlib/cgi.pyi | 229 +- .../vendor/typeshed/stdlib/cgitb.pyi | 53 +- .../vendor/typeshed/stdlib/chunk.pyi | 79 +- .../vendor/typeshed/stdlib/cmath.pyi | 117 +- .../vendor/typeshed/stdlib/cmd.pyi | 163 +- .../vendor/typeshed/stdlib/code.pyi | 232 +- .../vendor/typeshed/stdlib/codecs.pyi | 548 +-- .../vendor/typeshed/stdlib/codeop.pyi | 107 +- .../typeshed/stdlib/collections/__init__.pyi | 529 +-- .../typeshed/stdlib/collections/abc.pyi | 5 - .../vendor/typeshed/stdlib/colorsys.pyi | 18 - .../vendor/typeshed/stdlib/compileall.pyi | 129 +- .../stdlib/compression/_common/_streams.pyi | 7 +- .../typeshed/stdlib/compression/bz2.pyi | 6 - .../typeshed/stdlib/compression/gzip.pyi | 6 - .../typeshed/stdlib/compression/lzma.pyi | 10 - .../typeshed/stdlib/compression/zlib.pyi | 15 - .../stdlib/compression/zstd/__init__.pyi | 99 +- .../stdlib/compression/zstd/_zstdfile.pyi | 149 +- .../stdlib/concurrent/futures/__init__.pyi | 2 - .../stdlib/concurrent/futures/_base.pyi | 275 +- .../stdlib/concurrent/futures/interpreter.pyi | 20 +- .../stdlib/concurrent/futures/process.pyi | 172 +- .../stdlib/concurrent/futures/thread.pyi | 32 +- .../concurrent/interpreters/__init__.pyi | 94 +- .../concurrent/interpreters/_crossinterp.pyi | 19 +- .../concurrent/interpreters/_queues.pyi | 90 +- .../vendor/typeshed/stdlib/configparser.pyi | 399 +- .../vendor/typeshed/stdlib/contextlib.pyi | 273 +- .../vendor/typeshed/stdlib/copy.pyi | 71 +- .../vendor/typeshed/stdlib/copyreg.pyi | 14 +- .../vendor/typeshed/stdlib/crypt.pyi | 32 +- .../vendor/typeshed/stdlib/csv.pyi | 106 +- .../typeshed/stdlib/ctypes/__init__.pyi | 116 +- .../vendor/typeshed/stdlib/ctypes/_endian.pyi | 10 +- .../stdlib/ctypes/macholib/__init__.pyi | 8 - .../typeshed/stdlib/ctypes/macholib/dyld.pyi | 20 +- .../typeshed/stdlib/ctypes/macholib/dylib.pyi | 25 +- .../stdlib/ctypes/macholib/framework.pyi | 25 +- .../vendor/typeshed/stdlib/ctypes/util.pyi | 6 +- .../typeshed/stdlib/curses/__init__.pyi | 20 +- .../vendor/typeshed/stdlib/curses/ascii.pyi | 2 - .../vendor/typeshed/stdlib/curses/panel.pyi | 5 - .../vendor/typeshed/stdlib/curses/textpad.pyi | 42 +- .../vendor/typeshed/stdlib/dataclasses.pyi | 300 +- .../vendor/typeshed/stdlib/datetime.pyi | 352 +- .../vendor/typeshed/stdlib/dbm/__init__.pyi | 81 +- .../vendor/typeshed/stdlib/dbm/dumb.pyi | 53 +- .../vendor/typeshed/stdlib/dbm/gnu.pyi | 2 - .../vendor/typeshed/stdlib/dbm/ndbm.pyi | 2 - .../vendor/typeshed/stdlib/dbm/sqlite3.pyi | 15 +- .../vendor/typeshed/stdlib/decimal.pyi | 996 +--- .../vendor/typeshed/stdlib/difflib.pyi | 778 +--- .../vendor/typeshed/stdlib/dis.pyi | 255 +- .../typeshed/stdlib/distutils/__init__.pyi | 10 - .../stdlib/distutils/_msvccompiler.pyi | 13 - .../stdlib/distutils/archive_util.pyi | 53 +- .../stdlib/distutils/bcppcompiler.pyi | 11 +- .../typeshed/stdlib/distutils/ccompiler.pyi | 370 +- .../vendor/typeshed/stdlib/distutils/cmd.pyi | 167 +- .../stdlib/distutils/command/__init__.pyi | 6 - .../stdlib/distutils/command/bdist.pyi | 9 +- .../stdlib/distutils/command/bdist_dumb.pyi | 7 - .../stdlib/distutils/command/bdist_msi.pyi | 64 +- .../stdlib/distutils/command/bdist_rpm.pyi | 6 - .../distutils/command/bdist_wininst.pyi | 6 - .../stdlib/distutils/command/build.pyi | 5 - .../stdlib/distutils/command/build_clib.pyi | 19 +- .../stdlib/distutils/command/build_ext.pyi | 70 +- .../stdlib/distutils/command/build_py.pyi | 44 +- .../distutils/command/build_scripts.pyi | 12 +- .../stdlib/distutils/command/check.pyi | 35 +- .../stdlib/distutils/command/clean.pyi | 5 - .../stdlib/distutils/command/config.pyi | 89 +- .../stdlib/distutils/command/install.pyi | 92 +- .../stdlib/distutils/command/install_data.pyi | 6 - .../distutils/command/install_egg_info.pyi | 29 +- .../distutils/command/install_headers.pyi | 6 - .../stdlib/distutils/command/install_lib.pyi | 20 +- .../distutils/command/install_scripts.pyi | 6 - .../stdlib/distutils/command/register.pyi | 50 +- .../stdlib/distutils/command/sdist.pyi | 103 +- .../stdlib/distutils/command/upload.pyi | 7 - .../typeshed/stdlib/distutils/config.pyi | 27 +- .../vendor/typeshed/stdlib/distutils/core.pyi | 74 +- .../stdlib/distutils/cygwinccompiler.pyi | 48 +- .../typeshed/stdlib/distutils/dep_util.pyi | 37 +- .../typeshed/stdlib/distutils/dir_util.pyi | 58 +- .../vendor/typeshed/stdlib/distutils/dist.pyi | 195 +- .../typeshed/stdlib/distutils/errors.pyi | 116 +- .../typeshed/stdlib/distutils/extension.pyi | 70 - .../stdlib/distutils/fancy_getopt.pyi | 68 +- .../typeshed/stdlib/distutils/file_util.pyi | 47 +- .../typeshed/stdlib/distutils/filelist.pyi | 85 +- .../vendor/typeshed/stdlib/distutils/log.pyi | 2 - .../stdlib/distutils/msvccompiler.pyi | 11 +- .../typeshed/stdlib/distutils/spawn.pyi | 33 +- .../typeshed/stdlib/distutils/sysconfig.pyi | 91 +- .../typeshed/stdlib/distutils/text_file.pyi | 122 +- .../stdlib/distutils/unixccompiler.pyi | 15 - .../vendor/typeshed/stdlib/distutils/util.pyi | 159 +- .../typeshed/stdlib/distutils/version.pyi | 91 - .../vendor/typeshed/stdlib/doctest.pyi | 857 +--- .../vendor/typeshed/stdlib/email/__init__.pyi | 30 +- .../stdlib/email/_header_value_parser.pyi | 508 +-- .../typeshed/stdlib/email/_policybase.pyi | 318 +- .../typeshed/stdlib/email/base64mime.pyi | 53 +- .../vendor/typeshed/stdlib/email/charset.pyi | 151 +- .../vendor/typeshed/stdlib/email/encoders.pyi | 23 +- .../vendor/typeshed/stdlib/email/errors.pyi | 100 +- .../typeshed/stdlib/email/feedparser.pyi | 48 +- .../typeshed/stdlib/email/generator.pyi | 122 +- .../vendor/typeshed/stdlib/email/header.pyi | 125 +- .../typeshed/stdlib/email/headerregistry.pyi | 196 +- .../typeshed/stdlib/email/iterators.pyi | 28 +- .../vendor/typeshed/stdlib/email/message.pyi | 494 +- .../stdlib/email/mime/application.pyi | 20 +- .../typeshed/stdlib/email/mime/audio.pyi | 26 +- .../typeshed/stdlib/email/mime/base.pyi | 12 +- .../typeshed/stdlib/email/mime/image.pyi | 25 +- .../typeshed/stdlib/email/mime/message.pyi | 15 +- .../typeshed/stdlib/email/mime/multipart.pyi | 24 +- .../stdlib/email/mime/nonmultipart.pyi | 5 +- .../typeshed/stdlib/email/mime/text.pyi | 18 +- .../vendor/typeshed/stdlib/email/parser.pyi | 81 +- .../vendor/typeshed/stdlib/email/policy.pyi | 213 +- .../typeshed/stdlib/email/quoprimime.pyi | 105 +- .../vendor/typeshed/stdlib/email/utils.pyi | 158 +- .../typeshed/stdlib/encodings/__init__.pyi | 43 +- .../typeshed/stdlib/encodings/aliases.pyi | 18 - .../typeshed/stdlib/encodings/ascii.pyi | 9 - .../stdlib/encodings/base64_codec.pyi | 7 - .../typeshed/stdlib/encodings/bz2_codec.pyi | 9 - .../typeshed/stdlib/encodings/charmap.pyi | 12 - .../typeshed/stdlib/encodings/cp037.pyi | 2 - .../typeshed/stdlib/encodings/cp1006.pyi | 2 - .../typeshed/stdlib/encodings/cp1026.pyi | 2 - .../typeshed/stdlib/encodings/cp1125.pyi | 2 - .../typeshed/stdlib/encodings/cp1140.pyi | 2 - .../typeshed/stdlib/encodings/cp1250.pyi | 2 - .../typeshed/stdlib/encodings/cp1251.pyi | 2 - .../typeshed/stdlib/encodings/cp1252.pyi | 2 - .../typeshed/stdlib/encodings/cp1253.pyi | 2 - .../typeshed/stdlib/encodings/cp1254.pyi | 2 - .../typeshed/stdlib/encodings/cp1255.pyi | 2 - .../typeshed/stdlib/encodings/cp1256.pyi | 2 - .../typeshed/stdlib/encodings/cp1257.pyi | 2 - .../typeshed/stdlib/encodings/cp1258.pyi | 2 - .../typeshed/stdlib/encodings/cp273.pyi | 2 - .../typeshed/stdlib/encodings/cp424.pyi | 2 - .../typeshed/stdlib/encodings/cp437.pyi | 2 - .../typeshed/stdlib/encodings/cp500.pyi | 2 - .../typeshed/stdlib/encodings/cp720.pyi | 5 - .../typeshed/stdlib/encodings/cp737.pyi | 2 - .../typeshed/stdlib/encodings/cp775.pyi | 2 - .../typeshed/stdlib/encodings/cp850.pyi | 2 - .../typeshed/stdlib/encodings/cp852.pyi | 2 - .../typeshed/stdlib/encodings/cp855.pyi | 2 - .../typeshed/stdlib/encodings/cp856.pyi | 2 - .../typeshed/stdlib/encodings/cp857.pyi | 2 - .../typeshed/stdlib/encodings/cp858.pyi | 2 - .../typeshed/stdlib/encodings/cp860.pyi | 2 - .../typeshed/stdlib/encodings/cp861.pyi | 2 - .../typeshed/stdlib/encodings/cp862.pyi | 2 - .../typeshed/stdlib/encodings/cp863.pyi | 2 - .../typeshed/stdlib/encodings/cp864.pyi | 2 - .../typeshed/stdlib/encodings/cp865.pyi | 2 - .../typeshed/stdlib/encodings/cp866.pyi | 2 - .../typeshed/stdlib/encodings/cp869.pyi | 2 - .../typeshed/stdlib/encodings/cp874.pyi | 2 - .../typeshed/stdlib/encodings/cp875.pyi | 2 - .../typeshed/stdlib/encodings/hex_codec.pyi | 7 - .../typeshed/stdlib/encodings/hp_roman8.pyi | 11 - .../typeshed/stdlib/encodings/iso8859_1.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_10.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_11.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_13.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_14.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_15.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_16.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_2.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_3.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_4.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_5.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_6.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_7.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_8.pyi | 2 - .../typeshed/stdlib/encodings/iso8859_9.pyi | 2 - .../typeshed/stdlib/encodings/koi8_r.pyi | 2 - .../typeshed/stdlib/encodings/koi8_t.pyi | 2 - .../typeshed/stdlib/encodings/koi8_u.pyi | 2 - .../typeshed/stdlib/encodings/kz1048.pyi | 2 - .../typeshed/stdlib/encodings/latin_1.pyi | 9 - .../typeshed/stdlib/encodings/mac_arabic.pyi | 2 - .../stdlib/encodings/mac_croatian.pyi | 2 - .../stdlib/encodings/mac_cyrillic.pyi | 2 - .../typeshed/stdlib/encodings/mac_farsi.pyi | 2 - .../typeshed/stdlib/encodings/mac_greek.pyi | 2 - .../typeshed/stdlib/encodings/mac_iceland.pyi | 2 - .../typeshed/stdlib/encodings/mac_latin2.pyi | 9 - .../typeshed/stdlib/encodings/mac_roman.pyi | 2 - .../stdlib/encodings/mac_romanian.pyi | 2 - .../typeshed/stdlib/encodings/mac_turkish.pyi | 2 - .../vendor/typeshed/stdlib/encodings/mbcs.pyi | 10 - .../vendor/typeshed/stdlib/encodings/oem.pyi | 2 - .../typeshed/stdlib/encodings/palmos.pyi | 6 - .../typeshed/stdlib/encodings/ptcp154.pyi | 9 - .../typeshed/stdlib/encodings/punycode.pyi | 42 +- .../stdlib/encodings/quopri_codec.pyi | 5 - .../stdlib/encodings/raw_unicode_escape.pyi | 9 - .../typeshed/stdlib/encodings/rot_13.pyi | 7 - .../typeshed/stdlib/encodings/tis_620.pyi | 2 - .../typeshed/stdlib/encodings/undefined.pyi | 12 - .../stdlib/encodings/unicode_escape.pyi | 9 - .../typeshed/stdlib/encodings/utf_16.pyi | 9 - .../typeshed/stdlib/encodings/utf_16_be.pyi | 9 - .../typeshed/stdlib/encodings/utf_16_le.pyi | 9 - .../typeshed/stdlib/encodings/utf_32.pyi | 4 - .../typeshed/stdlib/encodings/utf_32_be.pyi | 4 - .../typeshed/stdlib/encodings/utf_32_le.pyi | 4 - .../typeshed/stdlib/encodings/utf_7.pyi | 5 - .../typeshed/stdlib/encodings/utf_8.pyi | 9 - .../typeshed/stdlib/encodings/utf_8_sig.pyi | 10 - .../typeshed/stdlib/encodings/uu_codec.pyi | 9 - .../typeshed/stdlib/encodings/zlib_codec.pyi | 7 - .../typeshed/stdlib/ensurepip/__init__.pyi | 14 +- .../vendor/typeshed/stdlib/enum.pyi | 397 +- .../vendor/typeshed/stdlib/errno.pyi | 14 - .../vendor/typeshed/stdlib/faulthandler.pyi | 37 +- .../vendor/typeshed/stdlib/fcntl.pyi | 85 +- .../vendor/typeshed/stdlib/filecmp.pyi | 97 +- .../vendor/typeshed/stdlib/fileinput.pyi | 162 +- .../vendor/typeshed/stdlib/fnmatch.pyi | 50 +- .../vendor/typeshed/stdlib/formatter.pyi | 62 +- .../vendor/typeshed/stdlib/fractions.pyi | 285 +- .../vendor/typeshed/stdlib/ftplib.pyi | 375 +- .../vendor/typeshed/stdlib/functools.pyi | 259 +- .../ty_vendored/vendor/typeshed/stdlib/gc.pyi | 134 +- .../vendor/typeshed/stdlib/genericpath.pyi | 70 +- .../vendor/typeshed/stdlib/getopt.pyi | 61 +- .../vendor/typeshed/stdlib/getpass.pyi | 59 +- .../vendor/typeshed/stdlib/gettext.pyi | 17 +- .../vendor/typeshed/stdlib/glob.pyi | 100 +- .../vendor/typeshed/stdlib/graphlib.pyi | 97 +- .../vendor/typeshed/stdlib/grp.pyi | 59 +- .../vendor/typeshed/stdlib/gzip.pyi | 124 +- .../vendor/typeshed/stdlib/hashlib.pyi | 68 +- .../vendor/typeshed/stdlib/heapq.pyi | 68 +- .../vendor/typeshed/stdlib/hmac.pyi | 77 +- .../vendor/typeshed/stdlib/html/__init__.pyi | 22 +- .../vendor/typeshed/stdlib/html/entities.pyi | 2 - .../vendor/typeshed/stdlib/html/parser.pyi | 60 +- .../vendor/typeshed/stdlib/http/__init__.pyi | 27 - .../vendor/typeshed/stdlib/http/client.pyi | 259 +- .../vendor/typeshed/stdlib/http/cookiejar.pyi | 239 +- .../vendor/typeshed/stdlib/http/cookies.pyi | 158 +- .../vendor/typeshed/stdlib/http/server.pyi | 449 +- .../vendor/typeshed/stdlib/imaplib.pyi | 555 +-- .../vendor/typeshed/stdlib/imghdr.pyi | 6 +- .../vendor/typeshed/stdlib/imp.pyi | 124 +- .../typeshed/stdlib/importlib/__init__.pyi | 33 +- .../vendor/typeshed/stdlib/importlib/_abc.pyi | 36 +- .../typeshed/stdlib/importlib/_bootstrap.pyi | 9 - .../stdlib/importlib/_bootstrap_external.pyi | 9 - .../vendor/typeshed/stdlib/importlib/abc.pyi | 398 +- .../typeshed/stdlib/importlib/machinery.pyi | 5 +- .../stdlib/importlib/metadata/__init__.pyi | 560 +-- .../stdlib/importlib/metadata/_meta.pyi | 26 +- .../typeshed/stdlib/importlib/readers.pyi | 37 +- .../stdlib/importlib/resources/__init__.pyi | 72 +- .../stdlib/importlib/resources/_common.pyi | 50 +- .../importlib/resources/_functional.pyi | 39 +- .../stdlib/importlib/resources/abc.pyi | 105 +- .../stdlib/importlib/resources/simple.pyi | 48 +- .../typeshed/stdlib/importlib/simple.pyi | 7 - .../vendor/typeshed/stdlib/importlib/util.pyi | 71 +- .../vendor/typeshed/stdlib/inspect.pyi | 842 +--- .../ty_vendored/vendor/typeshed/stdlib/io.pyi | 118 +- .../vendor/typeshed/stdlib/ipaddress.pyi | 863 +--- .../vendor/typeshed/stdlib/itertools.pyi | 306 +- .../vendor/typeshed/stdlib/json/__init__.pyi | 240 +- .../vendor/typeshed/stdlib/json/decoder.pyi | 93 +- .../vendor/typeshed/stdlib/json/encoder.pyi | 136 +- .../vendor/typeshed/stdlib/json/scanner.pyi | 2 - .../vendor/typeshed/stdlib/json/tool.pyi | 6 - .../vendor/typeshed/stdlib/keyword.pyi | 18 +- .../typeshed/stdlib/lib2to3/btm_matcher.pyi | 47 +- .../typeshed/stdlib/lib2to3/fixer_base.pyi | 105 +- .../stdlib/lib2to3/fixes/fix_apply.pyi | 5 - .../stdlib/lib2to3/fixes/fix_asserts.pyi | 2 - .../stdlib/lib2to3/fixes/fix_basestring.pyi | 2 - .../stdlib/lib2to3/fixes/fix_buffer.pyi | 2 - .../stdlib/lib2to3/fixes/fix_dict.pyi | 26 - .../stdlib/lib2to3/fixes/fix_except.pyi | 22 - .../stdlib/lib2to3/fixes/fix_exec.pyi | 8 - .../stdlib/lib2to3/fixes/fix_execfile.pyi | 6 - .../stdlib/lib2to3/fixes/fix_exitfunc.pyi | 4 - .../stdlib/lib2to3/fixes/fix_filter.pyi | 12 - .../stdlib/lib2to3/fixes/fix_funcattrs.pyi | 2 - .../stdlib/lib2to3/fixes/fix_future.pyi | 5 - .../stdlib/lib2to3/fixes/fix_getcwdu.pyi | 4 - .../stdlib/lib2to3/fixes/fix_has_key.pyi | 28 - .../stdlib/lib2to3/fixes/fix_idioms.pyi | 28 - .../stdlib/lib2to3/fixes/fix_import.pyi | 17 +- .../stdlib/lib2to3/fixes/fix_imports.pyi | 2 - .../stdlib/lib2to3/fixes/fix_imports2.pyi | 4 - .../stdlib/lib2to3/fixes/fix_input.pyi | 2 - .../stdlib/lib2to3/fixes/fix_intern.pyi | 5 - .../stdlib/lib2to3/fixes/fix_isinstance.pyi | 8 - .../stdlib/lib2to3/fixes/fix_itertools.pyi | 9 - .../lib2to3/fixes/fix_itertools_imports.pyi | 2 - .../stdlib/lib2to3/fixes/fix_long.pyi | 2 - .../typeshed/stdlib/lib2to3/fixes/fix_map.pyi | 18 - .../stdlib/lib2to3/fixes/fix_metaclass.pyi | 44 +- .../stdlib/lib2to3/fixes/fix_methodattrs.pyi | 2 - .../typeshed/stdlib/lib2to3/fixes/fix_ne.pyi | 2 - .../stdlib/lib2to3/fixes/fix_next.pyi | 2 - .../stdlib/lib2to3/fixes/fix_nonzero.pyi | 2 - .../stdlib/lib2to3/fixes/fix_numliterals.pyi | 2 - .../stdlib/lib2to3/fixes/fix_operator.pyi | 11 - .../stdlib/lib2to3/fixes/fix_paren.pyi | 5 - .../stdlib/lib2to3/fixes/fix_print.pyi | 12 - .../stdlib/lib2to3/fixes/fix_raise.pyi | 23 - .../stdlib/lib2to3/fixes/fix_raw_input.pyi | 2 - .../stdlib/lib2to3/fixes/fix_reduce.pyi | 6 - .../stdlib/lib2to3/fixes/fix_reload.pyi | 5 - .../stdlib/lib2to3/fixes/fix_renames.pyi | 6 - .../stdlib/lib2to3/fixes/fix_repr.pyi | 2 - .../stdlib/lib2to3/fixes/fix_set_literal.pyi | 4 - .../lib2to3/fixes/fix_standarderror.pyi | 2 - .../stdlib/lib2to3/fixes/fix_sys_exc.pyi | 7 - .../stdlib/lib2to3/fixes/fix_throw.pyi | 9 - .../stdlib/lib2to3/fixes/fix_tuple_params.pyi | 19 - .../stdlib/lib2to3/fixes/fix_types.pyi | 18 - .../stdlib/lib2to3/fixes/fix_unicode.pyi | 10 - .../stdlib/lib2to3/fixes/fix_urllib.pyi | 23 +- .../stdlib/lib2to3/fixes/fix_ws_comma.pyi | 7 - .../stdlib/lib2to3/fixes/fix_xrange.pyi | 2 - .../stdlib/lib2to3/fixes/fix_xreadlines.pyi | 5 - .../typeshed/stdlib/lib2to3/fixes/fix_zip.pyi | 9 - .../vendor/typeshed/stdlib/lib2to3/main.pyi | 47 +- .../stdlib/lib2to3/pgen2/__init__.pyi | 2 - .../typeshed/stdlib/lib2to3/pgen2/driver.pyi | 30 +- .../typeshed/stdlib/lib2to3/pgen2/grammar.pyi | 81 +- .../stdlib/lib2to3/pgen2/literals.pyi | 2 - .../typeshed/stdlib/lib2to3/pgen2/parse.pyi | 105 +- .../typeshed/stdlib/lib2to3/pgen2/token.pyi | 2 - .../stdlib/lib2to3/pgen2/tokenize.pyi | 76 +- .../vendor/typeshed/stdlib/lib2to3/pygram.pyi | 9 +- .../vendor/typeshed/stdlib/lib2to3/pytree.pyi | 335 +- .../typeshed/stdlib/lib2to3/refactor.pyi | 176 +- .../vendor/typeshed/stdlib/linecache.pyi | 50 +- .../vendor/typeshed/stdlib/locale.pyi | 141 +- .../typeshed/stdlib/logging/__init__.pyi | 1099 +---- .../vendor/typeshed/stdlib/logging/config.pyi | 145 +- .../typeshed/stdlib/logging/handlers.pyi | 559 +-- .../vendor/typeshed/stdlib/lzma.pyi | 171 +- .../vendor/typeshed/stdlib/mailbox.pyi | 597 +-- .../vendor/typeshed/stdlib/mailcap.pyi | 24 +- .../vendor/typeshed/stdlib/marshal.pyi | 170 +- .../vendor/typeshed/stdlib/math.pyi | 360 +- .../vendor/typeshed/stdlib/mimetypes.pyi | 200 +- .../vendor/typeshed/stdlib/mmap.pyi | 45 +- .../vendor/typeshed/stdlib/modulefinder.pyi | 26 +- .../typeshed/stdlib/msilib/__init__.pyi | 43 +- .../vendor/typeshed/stdlib/msvcrt.pyi | 100 +- .../stdlib/multiprocessing/connection.pyi | 106 +- .../stdlib/multiprocessing/context.pyi | 137 +- .../stdlib/multiprocessing/forkserver.pyi | 36 +- .../typeshed/stdlib/multiprocessing/heap.pyi | 4 - .../stdlib/multiprocessing/managers.pyi | 183 +- .../typeshed/stdlib/multiprocessing/pool.pyi | 62 +- .../multiprocessing/popen_spawn_win32.pyi | 4 - .../stdlib/multiprocessing/process.pyi | 92 +- .../stdlib/multiprocessing/queues.pyi | 12 +- .../stdlib/multiprocessing/reduction.pyi | 53 +- .../multiprocessing/resource_sharer.pyi | 13 +- .../multiprocessing/resource_tracker.pyi | 18 +- .../stdlib/multiprocessing/shared_memory.pyi | 85 +- .../stdlib/multiprocessing/sharedctypes.pyi | 24 +- .../typeshed/stdlib/multiprocessing/spawn.pyi | 39 +- .../typeshed/stdlib/multiprocessing/util.pyi | 40 +- .../vendor/typeshed/stdlib/netrc.pyi | 7 +- .../vendor/typeshed/stdlib/nis.pyi | 30 +- .../vendor/typeshed/stdlib/nntplib.pyi | 326 +- .../ty_vendored/vendor/typeshed/stdlib/nt.pyi | 6 - .../vendor/typeshed/stdlib/ntpath.pyi | 9 +- .../vendor/typeshed/stdlib/nturl2path.pyi | 28 +- .../vendor/typeshed/stdlib/numbers.pyi | 334 +- .../vendor/typeshed/stdlib/opcode.pyi | 8 +- .../vendor/typeshed/stdlib/operator.pyi | 42 +- .../vendor/typeshed/stdlib/optparse.pyi | 346 +- .../vendor/typeshed/stdlib/os/__init__.pyi | 2158 ++------- .../vendor/typeshed/stdlib/os/path.pyi | 12 - .../vendor/typeshed/stdlib/parser.pyi | 58 +- .../typeshed/stdlib/pathlib/__init__.pyi | 671 +-- .../vendor/typeshed/stdlib/pathlib/types.pyi | 8 - .../vendor/typeshed/stdlib/pdb.pyi | 902 +--- .../vendor/typeshed/stdlib/pickle.pyi | 183 +- .../vendor/typeshed/stdlib/pickletools.pyi | 376 +- .../vendor/typeshed/stdlib/pipes.pyi | 91 +- .../vendor/typeshed/stdlib/pkgutil.pyi | 206 +- .../vendor/typeshed/stdlib/platform.pyi | 281 +- .../vendor/typeshed/stdlib/plistlib.pyi | 101 +- .../vendor/typeshed/stdlib/poplib.pyi | 196 +- .../vendor/typeshed/stdlib/posix.pyi | 6 - .../vendor/typeshed/stdlib/posixpath.pyi | 114 +- .../vendor/typeshed/stdlib/pprint.pyi | 115 +- .../vendor/typeshed/stdlib/profile.pyi | 60 +- .../vendor/typeshed/stdlib/pstats.pyi | 58 +- .../vendor/typeshed/stdlib/pty.pyi | 45 +- .../vendor/typeshed/stdlib/pwd.pyi | 64 +- .../vendor/typeshed/stdlib/py_compile.pyi | 91 +- .../vendor/typeshed/stdlib/pyclbr.pyi | 64 +- .../vendor/typeshed/stdlib/pydoc.pyi | 405 +- .../typeshed/stdlib/pyexpat/__init__.pyi | 68 +- .../vendor/typeshed/stdlib/pyexpat/errors.pyi | 2 - .../vendor/typeshed/stdlib/pyexpat/model.pyi | 2 - .../vendor/typeshed/stdlib/queue.pyi | 140 +- .../vendor/typeshed/stdlib/quopri.pyi | 20 +- .../vendor/typeshed/stdlib/random.pyi | 391 +- .../ty_vendored/vendor/typeshed/stdlib/re.pyi | 343 +- .../vendor/typeshed/stdlib/readline.pyi | 145 +- .../vendor/typeshed/stdlib/reprlib.pyi | 5 +- .../vendor/typeshed/stdlib/resource.pyi | 74 +- .../vendor/typeshed/stdlib/rlcompleter.pyi | 78 +- .../vendor/typeshed/stdlib/runpy.pyi | 49 +- .../vendor/typeshed/stdlib/sched.pyi | 97 +- .../vendor/typeshed/stdlib/secrets.pyi | 56 +- .../vendor/typeshed/stdlib/select.pyi | 164 +- .../vendor/typeshed/stdlib/selectors.pyi | 134 +- .../vendor/typeshed/stdlib/shelve.pyi | 110 +- .../vendor/typeshed/stdlib/shlex.pyi | 42 +- .../vendor/typeshed/stdlib/shutil.pyi | 329 +- .../vendor/typeshed/stdlib/signal.pyi | 198 +- .../vendor/typeshed/stdlib/site.pyi | 192 +- .../vendor/typeshed/stdlib/smtpd.pyi | 79 +- .../vendor/typeshed/stdlib/smtplib.pyi | 507 +-- .../vendor/typeshed/stdlib/sndhdr.pyi | 38 +- .../vendor/typeshed/stdlib/socket.pyi | 265 +- .../vendor/typeshed/stdlib/socketserver.pyi | 398 +- .../vendor/typeshed/stdlib/spwd.pyi | 73 +- .../typeshed/stdlib/sqlite3/__init__.pyi | 387 +- .../vendor/typeshed/stdlib/sre_compile.pyi | 2 - .../vendor/typeshed/stdlib/sre_constants.pyi | 2 - .../vendor/typeshed/stdlib/sre_parse.pyi | 2 - .../vendor/typeshed/stdlib/ssl.pyi | 495 +- .../vendor/typeshed/stdlib/stat.pyi | 5 - .../vendor/typeshed/stdlib/statistics.pyi | 884 +--- .../typeshed/stdlib/string/__init__.pyi | 30 +- .../typeshed/stdlib/string/templatelib.pyi | 28 +- .../vendor/typeshed/stdlib/stringprep.pyi | 6 - .../vendor/typeshed/stdlib/struct.pyi | 30 - .../vendor/typeshed/stdlib/subprocess.pyi | 540 +-- .../vendor/typeshed/stdlib/sunau.pyi | 107 - .../vendor/typeshed/stdlib/symbol.pyi | 2 - .../vendor/typeshed/stdlib/symtable.pyi | 193 +- .../vendor/typeshed/stdlib/sys/__init__.pyi | 666 +-- .../vendor/typeshed/stdlib/sysconfig.pyi | 83 +- .../vendor/typeshed/stdlib/syslog.pyi | 26 +- .../vendor/typeshed/stdlib/tabnanny.pyi | 28 +- .../vendor/typeshed/stdlib/tarfile.pyi | 480 +- .../vendor/typeshed/stdlib/telnetlib.pyi | 289 +- .../vendor/typeshed/stdlib/tempfile.pyi | 214 +- .../vendor/typeshed/stdlib/termios.pyi | 77 +- .../vendor/typeshed/stdlib/textwrap.pyi | 192 +- .../vendor/typeshed/stdlib/threading.pyi | 634 +-- .../vendor/typeshed/stdlib/time.pyi | 319 +- .../vendor/typeshed/stdlib/timeit.pyi | 159 +- .../typeshed/stdlib/tkinter/__init__.pyi | 4043 +++-------------- .../typeshed/stdlib/tkinter/colorchooser.pyi | 22 +- .../vendor/typeshed/stdlib/tkinter/dnd.pyi | 101 - .../typeshed/stdlib/tkinter/filedialog.pyi | 84 +- .../vendor/typeshed/stdlib/tkinter/font.pyi | 61 +- .../typeshed/stdlib/tkinter/messagebox.pyi | 33 +- .../typeshed/stdlib/tkinter/scrolledtext.pyi | 13 - .../typeshed/stdlib/tkinter/simpledialog.pyi | 100 +- .../vendor/typeshed/stdlib/tkinter/tix.pyi | 477 +- .../vendor/typeshed/stdlib/tkinter/ttk.pyi | 1250 +---- .../vendor/typeshed/stdlib/token.pyi | 2 - .../vendor/typeshed/stdlib/tokenize.pyi | 99 +- .../vendor/typeshed/stdlib/tomllib.pyi | 20 +- .../vendor/typeshed/stdlib/trace.pyi | 100 +- .../vendor/typeshed/stdlib/traceback.pyi | 382 +- .../vendor/typeshed/stdlib/tracemalloc.pyi | 119 +- .../vendor/typeshed/stdlib/tty.pyi | 15 +- .../vendor/typeshed/stdlib/turtle.pyi | 3671 +-------------- .../vendor/typeshed/stdlib/types.pyi | 440 +- .../vendor/typeshed/stdlib/typing.pyi | 1085 +---- .../typeshed/stdlib/typing_extensions.pyi | 708 +-- .../vendor/typeshed/stdlib/unicodedata.pyi | 197 +- .../typeshed/stdlib/unittest/__init__.pyi | 46 - .../vendor/typeshed/stdlib/unittest/_log.pyi | 4 - .../typeshed/stdlib/unittest/async_case.pyi | 7 +- .../vendor/typeshed/stdlib/unittest/case.pyi | 520 +-- .../typeshed/stdlib/unittest/loader.pyi | 71 +- .../vendor/typeshed/stdlib/unittest/main.pyi | 6 - .../vendor/typeshed/stdlib/unittest/mock.pyi | 533 +-- .../typeshed/stdlib/unittest/result.pyi | 88 +- .../typeshed/stdlib/unittest/runner.pyi | 37 +- .../vendor/typeshed/stdlib/unittest/suite.pyi | 17 +- .../vendor/typeshed/stdlib/unittest/util.pyi | 32 +- .../vendor/typeshed/stdlib/urllib/error.pyi | 17 - .../vendor/typeshed/stdlib/urllib/parse.pyi | 347 +- .../vendor/typeshed/stdlib/urllib/request.pyi | 418 +- .../typeshed/stdlib/urllib/response.pyi | 16 - .../typeshed/stdlib/urllib/robotparser.pyi | 55 +- .../ty_vendored/vendor/typeshed/stdlib/uu.pyi | 15 +- .../vendor/typeshed/stdlib/uuid.pyi | 215 +- .../vendor/typeshed/stdlib/venv/__init__.pyi | 140 +- .../vendor/typeshed/stdlib/warnings.pyi | 120 +- .../vendor/typeshed/stdlib/wave.pyi | 130 - .../vendor/typeshed/stdlib/weakref.pyi | 113 +- .../vendor/typeshed/stdlib/webbrowser.pyi | 92 +- .../vendor/typeshed/stdlib/winreg.pyi | 501 +- .../vendor/typeshed/stdlib/winsound.pyi | 45 +- .../typeshed/stdlib/wsgiref/__init__.pyi | 25 - .../typeshed/stdlib/wsgiref/handlers.pyi | 204 +- .../typeshed/stdlib/wsgiref/headers.pyi | 108 +- .../typeshed/stdlib/wsgiref/simple_server.pyi | 18 +- .../vendor/typeshed/stdlib/wsgiref/types.pyi | 10 - .../vendor/typeshed/stdlib/wsgiref/util.pyi | 48 +- .../typeshed/stdlib/wsgiref/validate.pyi | 123 +- .../vendor/typeshed/stdlib/xdrlib.pyi | 20 - .../vendor/typeshed/stdlib/xml/__init__.pyi | 18 - .../typeshed/stdlib/xml/dom/NodeFilter.pyi | 4 - .../typeshed/stdlib/xml/dom/__init__.pyi | 24 - .../vendor/typeshed/stdlib/xml/dom/domreg.pyi | 30 +- .../typeshed/stdlib/xml/dom/expatbuilder.pyi | 123 +- .../typeshed/stdlib/xml/dom/minicompat.pyi | 14 +- .../typeshed/stdlib/xml/dom/minidom.pyi | 269 +- .../typeshed/stdlib/xml/dom/pulldom.pyi | 6 +- .../typeshed/stdlib/xml/dom/xmlbuilder.pyi | 14 - .../typeshed/stdlib/xml/etree/ElementTree.pyi | 410 +- .../typeshed/stdlib/xml/parsers/__init__.pyi | 9 - .../stdlib/xml/parsers/expat/__init__.pyi | 2 - .../stdlib/xml/parsers/expat/errors.pyi | 2 - .../stdlib/xml/parsers/expat/model.pyi | 2 - .../typeshed/stdlib/xml/sax/__init__.pyi | 31 +- .../typeshed/stdlib/xml/sax/_exceptions.pyi | 95 +- .../typeshed/stdlib/xml/sax/expatreader.pyi | 17 +- .../typeshed/stdlib/xml/sax/handler.pyi | 288 +- .../typeshed/stdlib/xml/sax/saxutils.pyi | 51 +- .../typeshed/stdlib/xml/sax/xmlreader.pyi | 254 +- .../vendor/typeshed/stdlib/xmlrpc/client.pyi | 186 +- .../vendor/typeshed/stdlib/xmlrpc/server.pyi | 372 +- .../vendor/typeshed/stdlib/xxlimited.pyi | 20 +- .../vendor/typeshed/stdlib/zipapp.pyi | 21 +- .../typeshed/stdlib/zipfile/__init__.pyi | 336 +- .../stdlib/zipfile/_path/__init__.pyi | 178 +- .../typeshed/stdlib/zipfile/_path/glob.pyi | 122 +- .../vendor/typeshed/stdlib/zipimport.pyi | 163 +- .../vendor/typeshed/stdlib/zlib.pyi | 103 +- .../typeshed/stdlib/zoneinfo/__init__.pyi | 24 +- .../typeshed/stdlib/zoneinfo/_common.pyi | 3 +- .../typeshed/stdlib/zoneinfo/_tzpath.pyi | 21 +- 666 files changed, 9197 insertions(+), 75583 deletions(-) create mode 100644 crates/ty_vendored/vendor/typeshed/pyproject.toml diff --git a/crates/ty_vendored/vendor/typeshed/pyproject.toml b/crates/ty_vendored/vendor/typeshed/pyproject.toml new file mode 100644 index 0000000000000..4cf4f95fbcc5e --- /dev/null +++ b/crates/ty_vendored/vendor/typeshed/pyproject.toml @@ -0,0 +1,254 @@ +[project] +# This section is needed to avoid writing --no-project everytime when using "uv run" +# https://github.com/astral-sh/uv/issues/8666 +name = "typeshed" +version = "0" +requires-python = ">=3.9" # Minimum version to run tests, used by uv run + +[tool.black] +line-length = 130 +target-version = ["py310"] +skip-magic-trailing-comma = true + +[tool.ruff] +line-length = 130 +# Oldest supported Python version +target-version = "py39" +fix = true +exclude = [ + # virtual environment + ".env", + ".venv", + "env", + # cache directories, etc.: + ".git", + ".mypy_cache", +] + +[tool.ruff.lint] +# Disable all rules on test cases by default: +# test cases often deliberately contain code +# that might not be considered idiomatic or modern. +# +# Note: some rules that are specifically useful to the test cases +# are invoked via separate runs of ruff in pre-commit: +# see our .pre-commit-config.yaml file for details +exclude = ["**/test_cases/**/*.py"] +# We still use flake8-pyi to check these (see .flake8 config file); +# tell ruff not to flag these as e.g. "unused noqa comments" +external = ["F821", "Y"] +select = [ + "A", # flake8-builtins + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "D", # pydocstyle + "DTZ", # flake8-datetimez + "EXE", # flake8-executable + "FA", # flake8-future-annotations + "FBT", # flake8-boolean-trap + "FLY", # flynt + "I", # isort + "N", # pep8-naming + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # Pylint + "PTH", # flake8-use-pathlib + "RSE", # flake8-raise + "RUF", # Ruff-specific and unused-noqa + "SLOT", # flake8-slots + "T10", # flake8-debugger + "TD", # flake8-todos + "TRY", # tryceratops + "UP", # pyupgrade + "YTT", # flake8-2020 + # Flake8 base rules + "E", # pycodestyle Error + "F", # Pyflakes + "W", # pycodestyle Warning + # Only include flake8-annotations rules that are autofixable. Otherwise leave this to mypy+pyright + "ANN2", + # Most refurb rules are in preview and can be opinionated, + # consider them individually as they come out of preview (last check: 0.8.4) + "FURB105", # Unnecessary empty string passed to `print` + "FURB129", # Instead of calling `readlines()`, iterate over file object directly + "FURB136", # Replace `if` expression with `{min_max}` call + "FURB167", # Use of regular expression alias `re.{}` + "FURB168", # Prefer `is` operator over `isinstance` to check if an object is `None` + "FURB169", # Compare the identities of `{object}` and None instead of their respective types + "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups + "FURB187", # Use of assignment of `reversed` on list `{name}` + # Used for lint.flake8-import-conventions.aliases + "ICN001", # `{name}` should be imported as `{asname}` + # PYI: only enable rules that have autofixes and that we always want to fix (even manually), + # avoids duplicate # noqa with flake8-pyi + "PYI009", # Empty body should contain `...`, not pass + "PYI010", # Function body must contain only `...` + "PYI012", # Class bodies must not contain `pass` + "PYI013", # Non-empty class bodies must not contain `...` + "PYI014", # Only simple default values allowed for arguments + "PYI015", # Only simple default values allowed for assignments + "PYI016", # Duplicate union member `{}` + "PYI019", # Methods like `{method_name}` should return `Self` instead of a custom `TypeVar` + "PYI020", # Quoted annotations should not be included in stubs + "PYI025", # Use `from collections.abc import Set as AbstractSet` to avoid confusion with the `set` builtin + # "PYI026", Waiting for this mypy bug to be fixed: https://github.com/python/mypy/issues/16581 + "PYI030", # Multiple literal members in a union. Use a single literal, e.g. `Literal[{}]` + "PYI032", # Prefer `object` to `Any` for the second parameter to `{method_name}` + "PYI036", # Star-args in `{method_name}` should be annotated with `object` + "PYI044", # `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics + "PYI055", # Multiple `type[T]` usages in a union. Combine them into one, e.g., `type[{union_str}]`. + "PYI058", # Use `{return_type}` as the return value for simple `{method}` methods + # "PYI061", # TODO: Enable when out of preview + "PYI062", # Duplicate literal member `{}` + "PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final + # flake8-simplify, excluding rules that can reduce performance or readability due to long line formatting + "SIM101", # Multiple `isinstance` calls for `{name}`, merge into a single call + "SIM103", # Return the condition `{condition}` directly + "SIM107", # Don't use return in `try-except` and `finally` + "SIM109", # Use `{replacement}` instead of multiple equality comparisons + "SIM112", # Use capitalized environment variable `{expected}` instead of `{actual}` + "SIM113", # Use `enumerate()` for index variable `{index}` in `for` loop + "SIM114", # Combine `if` branches using logical `or` operator + "SIM115", # Use a context manager for opening files + "SIM118", # Use key `{operator}` dict instead of key `{operator} dict.keys()` + "SIM201", # Use `{left} != {right}` instead of not `{left} == {right}` + "SIM202", # Use `{left} == {right}` instead of not `{left} != {right}` + "SIM208", # Use `{expr}` instead of `not (not {expr})` + "SIM210", # Remove unnecessary `True if ... else False` + "SIM211", # Use `not ...` instead of `False if ... else True` + "SIM212", # Use `{expr_else} if {expr_else} else {expr_body}` instead of `{expr_body} if not {expr_else} else {expr_else}` + "SIM220", # Use `False` instead of `{name} and not {name}` + "SIM221", # Use `True` instead of `{name} or not {name}` + "SIM222", # Use `{expr}` instead of `{replaced}` + "SIM223", # Use `{expr}` instead of `{replaced}` + "SIM300", # Yoda condition detected + "SIM401", # Use `{contents}` instead of an if block + "SIM910", # Use `{expected}` instead of `{actual}` (dict-get-with-none-default) + "SIM911", # Use `{expected}` instead of `{actual}` (zip-dict-keys-and-values) + # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations + "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. + "TC005", # Found empty type-checking block + # "TC008", # TODO: Enable when out of preview + "TC010", # Invalid string member in `X | Y`-style union type + # Used for lint.flake8-import-conventions.aliases + "TID251", # `{name}` is banned: {message} +] +extend-safe-fixes = [ + "UP036", # Remove unnecessary `sys.version_info` blocks +] +ignore = [ + ### + # Rules that can conflict with the formatter (Black) + # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + ### + "E111", # indentation-with-invalid-multiple + "E114", # indentation-with-invalid-multiple-comment + "E117", # over-indented + "W191", # tab-indentation + ### + # Rules we don't want or don't agree with + ### + # We're not a library, no need to document everything + "D1", # Missing docstring in ... + # Sometimes, an extra blank line is more readable + "D202", # No blank lines allowed after function docstring + # Doesn't support split "summary line" + "D205", # 1 blank line required between summary line and description + # Used for direct, non-subclass type comparison, for example: `type(val) is str` + # see https://github.com/astral-sh/ruff/issues/6465 + "E721", # Do not compare types, use `isinstance()` + # Highly opinionated, and it's often necessary to violate it + "PLC0415", # `import` should be at the top-level of a file + # Leave the size and complexity of tests to human interpretation + "PLR09", # Too many ... + # Too many magic number "2" that are preferable inline. https://github.com/astral-sh/ruff/issues/10009 + "PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable + # Keep codeflow path separation explicit + "PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation + # Often just leads to redundant more verbose code when needing an actual str + "PTH208", # Use `pathlib.Path.iterdir()` instead. + # Allow FIXME + "TD001", # Invalid TODO tag: `{tag}` + # Git blame is sufficient + "TD002", # Missing author in TODO; + "TD003", # Missing issue link for this TODO + # Mostly from scripts and tests, it's ok to have messages passed directly to exceptions + "TRY003", # Avoid specifying long messages outside the exception class + "PLC0205", # Sometimes __slots__ really is a string at runtime + ### + # False-positives, but already checked by type-checkers + ### + # Ruff doesn't support multi-file analysis yet: https://github.com/astral-sh/ruff/issues/5295 + "RUF013", # PEP 484 prohibits implicit `Optional` +] + +[tool.ruff.lint.per-file-ignores] +"*.pyi" = [ + # A lot of stubs are incomplete on purpose, and that's configured through pyright + # Some ANN204 (special method) are autofixable in stubs, but not all. + "ANN2", # Missing return type annotation for ... + # Ruff 0.8.0 added sorting of __all__ and __slots_. + # There is no consensus on whether we want to apply this to stubs, so keeping the status quo. + # See https://github.com/python/typeshed/pull/13108 + "RUF022", # `__all__` is not sorted + "RUF023", # `{}.__slots__` is not sorted + ### + # Rules that are out of the control of stub authors: + ### + # Names in stubs should match the implementation, even if it's ambiguous. + # https://github.com/astral-sh/ruff/issues/15293 + "A", # flake8-builtins + # Stubs can sometimes re-export entire modules. + # Issues with using a star-imported name will be caught by type-checkers. + "F403", # `from . import *` used; unable to detect undefined names + "F405", # may be undefined, or defined from star imports + # Most pep8-naming rules don't apply for third-party stubs like typeshed. + # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one + "N8", # pep8-naming + # Stubs are allowed to use private variables (pyright's reportPrivateUsage is also disabled) + "PLC2701", # Private name import from external module + # Names in stubs should match implementation + "PLW0211", # First argument of a static method should not be named `{argument_name}` +] +"lib/ts_utils/**" = [ + # Doesn't affect stubs. The only re-exports we have should be in our local lib ts_utils + "PLC0414", # Import alias does not rename original package +] +"*_pb2.pyi" = [ + # Special autogenerated typing --> typing_extensions aliases + "ICN001", # `{name}` should be imported as `{asname}` + # Leave the docstrings as-is, matching source + "D", # pydocstyle + # See comment on black's force-exclude config above + "E501", # Line too long +] + +[tool.ruff.lint.pydocstyle] +convention = "pep257" # https://docs.astral.sh/ruff/settings/#lint_pydocstyle_convention + +[tool.ruff.lint.flake8-import-conventions.aliases] +# Prevent aliasing these, as it causes false-negatives for certain rules +typing_extensions = "typing_extensions" +typing = "typing" + +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"tempfile.NamedTemporaryFile".msg = "Use `ts_util.util.NamedTemporaryFile` instead." + +[tool.ruff.lint.isort] +split-on-trailing-comma = false +combine-as-imports = true +extra-standard-library = [ + # Group these with stdlib + "_typeshed", + "typing_extensions", + # Extra modules not recognized by Ruff + # Added in Python 3.9 + "zoneinfo", + # Added in Python 3.14 + "compression", +] +known-first-party = ["_utils", "ts_utils"] + +[tool.typeshed] +oldest_supported_python = "3.9" diff --git a/crates/ty_vendored/vendor/typeshed/source_commit.txt b/crates/ty_vendored/vendor/typeshed/source_commit.txt index 713417f2a49b4..54a8607d259fb 100644 --- a/crates/ty_vendored/vendor/typeshed/source_commit.txt +++ b/crates/ty_vendored/vendor/typeshed/source_commit.txt @@ -1 +1 @@ -91055c730ffcda6311654cf32d663858ece69bad +d6f4a0f7102b1400a21742cf9b7ea93614e2b6ec diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi index 9f0b95bc4a127..a90cf1eddab76 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi @@ -1,70 +1,11 @@ -"""Record of phased-in incompatible language changes. - -Each line is of the form: - - FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease "," - CompilerFlag ")" - -where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples -of the same form as sys.version_info: - - (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int - PY_MINOR_VERSION, # the 1; an int - PY_MICRO_VERSION, # the 0; an int - PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string - PY_RELEASE_SERIAL # the 3; an int - ) - -OptionalRelease records the first release in which - - from __future__ import FeatureName - -was accepted. - -In the case of MandatoryReleases that have not yet occurred, -MandatoryRelease predicts the release in which the feature will become part -of the language. - -Else MandatoryRelease records when the feature became part of the language; -in releases at or after that, modules no longer need - - from __future__ import FeatureName - -to use the feature in question, but may continue to use such imports. - -MandatoryRelease may also be None, meaning that a planned feature got -dropped or that the release version is undetermined. - -Instances of class _Feature have two corresponding methods, -.getOptionalRelease() and .getMandatoryRelease(). - -CompilerFlag is the (bitfield) flag that should be passed in the fourth -argument to the builtin function compile() to enable the feature in -dynamically compiled code. This flag is stored in the .compiler_flag -attribute on _Future instances. These values must match the appropriate -#defines of CO_xxx flags in Include/cpython/compile.h. - -No feature line is ever to be deleted from this file. -""" - from typing_extensions import TypeAlias _VersionInfo: TypeAlias = tuple[int, int, int, str, int] class _Feature: def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ... - def getOptionalRelease(self) -> _VersionInfo: - """Return first release in which this feature was recognized. - - This is a 5-tuple, of the same form as sys.version_info. - """ - - def getMandatoryRelease(self) -> _VersionInfo | None: - """Return release in which this feature will become mandatory. - - This is a 5-tuple, of the same form as sys.version_info, or, if - the feature was dropped, or the release date is undetermined, is None. - """ + def getOptionalRelease(self) -> _VersionInfo: ... + def getMandatoryRelease(self) -> _VersionInfo | None: ... compiler_flag: int absolute_import: _Feature diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi index 5b11b3445aab1..d663f5d935554 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi @@ -1,5 +1,3 @@ -"""Accelerator module for asyncio""" - import sys from asyncio.events import AbstractEventLoop from collections.abc import Awaitable, Callable, Coroutine, Generator @@ -14,20 +12,6 @@ _TaskYieldType: TypeAlias = Future[object] | None @disjoint_base class Future(Awaitable[_T]): - """This class is *almost* compatible with concurrent.futures.Future. - - Differences: - - - result() and exception() do not take a timeout argument and - raise an exception when the future isn't done yet. - - - Callbacks registered with add_done_callback() are always called - via the event loop's call_soon_threadsafe(). - - - This class is not compatible with the wait() and as_completed() - methods in the concurrent.futures package. - """ - _state: str @property def _exception(self) -> BaseException | None: ... @@ -38,87 +22,24 @@ class Future(Awaitable[_T]): def _log_traceback(self, val: Literal[False]) -> None: ... _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def __del__(self) -> None: - """Called when the instance is about to be destroyed.""" - - def get_loop(self) -> AbstractEventLoop: - """Return the event loop the Future is bound to.""" - + def __del__(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... @property def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... - def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: - """Add a callback to be run when the future becomes done. - - The callback is called with a single argument - the future object. If - the future is already done when this is called, the callback is - scheduled with call_soon. - """ - - def cancel(self, msg: Any | None = None) -> bool: - """Cancel the future and schedule callbacks. - - If the future is already done or cancelled, return False. Otherwise, - change the future's state to cancelled, schedule the callbacks and - return True. - """ - - def cancelled(self) -> bool: - """Return True if the future was cancelled.""" - - def done(self) -> bool: - """Return True if the future is done. - - Done means either that a result / exception are available, or that the - future was cancelled. - """ - - def result(self) -> _T: - """Return the result this future represents. - - If the future has been cancelled, raises CancelledError. If the - future's result isn't yet available, raises InvalidStateError. If - the future is done and has an exception set, this exception is raised. - """ - - def exception(self) -> BaseException | None: - """Return the exception that was set on this future. - - The exception (or None if no exception was set) is returned only if - the future is done. If the future has been cancelled, raises - CancelledError. If the future isn't done yet, raises - InvalidStateError. - """ - - def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: - """Remove all instances of a callback from the "call when done" list. - - Returns the number of callbacks removed. - """ - - def set_result(self, result: _T, /) -> None: - """Mark the future done and set its result. - - If the future is already done when this method is called, raises - InvalidStateError. - """ - - def set_exception(self, exception: type | BaseException, /) -> None: - """Mark the future done and set an exception. - - If the future is already done when this method is called, raises - InvalidStateError. - """ - - def __iter__(self) -> Generator[Any, None, _T]: - """Implement iter(self).""" - - def __await__(self) -> Generator[Any, None, _T]: - """Return an iterator to be used in await expression.""" - + def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: ... + def cancel(self, msg: Any | None = None) -> bool: ... + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + def result(self) -> _T: ... + def exception(self) -> BaseException | None: ... + def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: ... + def set_result(self, result: _T, /) -> None: ... + def set_exception(self, exception: type | BaseException, /) -> None: ... + def __iter__(self) -> Generator[Any, None, _T]: ... + def __await__(self) -> Generator[Any, None, _T]: ... @property def _loop(self) -> AbstractEventLoop: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] @@ -131,8 +52,6 @@ else: # and `asyncio.Task.set_result()` always raises. @disjoint_base class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] - """A coroutine wrapped in a Future.""" - if sys.version_info >= (3, 12): def __init__( self, @@ -167,124 +86,27 @@ class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportIn if sys.version_info >= (3, 12): def get_context(self) -> Context: ... - def get_stack(self, *, limit: int | None = None) -> list[FrameType]: - """Return the list of stack frames for this task's coroutine. - - If the coroutine is not done, this returns the stack where it is - suspended. If the coroutine has completed successfully or was - cancelled, this returns an empty list. If the coroutine was - terminated by an exception, this returns the list of traceback - frames. - - The frames are always ordered from oldest to newest. - - The optional limit gives the maximum number of frames to - return; by default all available frames are returned. Its - meaning differs depending on whether a stack or a traceback is - returned: the newest frames of a stack are returned, but the - oldest frames of a traceback are returned. (This matches the - behavior of the traceback module.) - - For reasons beyond our control, only one stack frame is - returned for a suspended coroutine. - """ - - def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: - """Print the stack or traceback for this task's coroutine. - - This produces output similar to that of the traceback module, - for the frames retrieved by get_stack(). The limit argument - is passed to get_stack(). The file argument is an I/O stream - to which the output is written; by default output is written - to sys.stderr. - """ + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... if sys.version_info >= (3, 11): - def cancelling(self) -> int: - """Return the count of the task's cancellation requests. - - This count is incremented when .cancel() is called - and may be decremented using .uncancel(). - """ - - def uncancel(self) -> int: - """Decrement the task's count of cancellation requests. - - This should be used by tasks that catch CancelledError - and wish to continue indefinitely until they are cancelled again. - - Returns the remaining number of cancellation requests. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - -def get_event_loop() -> AbstractEventLoop: - """Return an asyncio event loop. - - When called from a coroutine or a callback (e.g. scheduled with - call_soon or similar API), this function will always return the - running event loop. - - If there is no running event loop set, the function will return - the result of `get_event_loop_policy().get_event_loop()` call. - """ - -def get_running_loop() -> AbstractEventLoop: - """Return the running event loop. Raise a RuntimeError if there is none. + def cancelling(self) -> int: ... + def uncancel(self) -> int: ... - This function is thread-specific. - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... -def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: - """Set the running event loop. - - This is a low-level function intended to be used by event loops. - This function is thread-specific. - """ - -def _get_running_loop() -> AbstractEventLoop: - """Return the running event loop or None. - - This is a low-level function intended to be used by event loops. - This function is thread-specific. - """ - -def _register_task(task: Task[Any]) -> None: - """Register a new task in asyncio as executed by loop. - - Returns None. - """ - -def _unregister_task(task: Task[Any]) -> None: - """Unregister a task. - - Returns None. - """ - -def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: - """Enter into task execution or resume suspended task. - - Task belongs to loop. - - Returns None. - """ - -def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: - """Leave task execution or suspend a task. - - Task belongs to loop. - - Returns None. - """ +def get_event_loop() -> AbstractEventLoop: ... +def get_running_loop() -> AbstractEventLoop: ... +def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: ... +def _get_running_loop() -> AbstractEventLoop: ... +def _register_task(task: Task[Any]) -> None: ... +def _unregister_task(task: Task[Any]) -> None: ... +def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... if sys.version_info >= (3, 12): - def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: - """Return a currently executed task.""" + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... if sys.version_info >= (3, 14): def future_discard_from_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... - def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: - """Record that `fut` is awaited on by `waiter`.""" - - def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: - """Return a set of all tasks for the loop.""" + def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... + def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi index 9d4c323ef5ca8..58488e3d15afe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi @@ -1,11 +1,3 @@ -"""Bisection algorithms. - -This module provides support for maintaining a list in sorted order without -having to sort the list after each insertion. For long lists of items with -expensive comparison operations, this can be an improvement over the more -common approach. -""" - import sys from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT from collections.abc import Callable, MutableSequence @@ -22,19 +14,7 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> int: - """Return the index where to insert item x in list a, assuming a is sorted. - - The return value i is such that all e in a[:i] have e < x, and all e in - a[i:] have e >= x. So if x already appears in the list, a.insert(i, x) will - insert just before the leftmost x already there. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - - A custom key function can be supplied to customize the sort order. - """ - + ) -> int: ... @overload def bisect_left( a: SupportsLenAndGetItem[_T], @@ -52,19 +32,7 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> int: - """Return the index where to insert item x in list a, assuming a is sorted. - - The return value i is such that all e in a[:i] have e <= x, and all e in - a[i:] have e > x. So if x already appears in the list, a.insert(i, x) will - insert just after the rightmost x already there. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - - A custom key function can be supplied to customize the sort order. - """ - + ) -> int: ... @overload def bisect_right( a: SupportsLenAndGetItem[_T], @@ -82,17 +50,7 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> None: - """Insert item x in list a, and keep it sorted assuming a is sorted. - - If x is already in a, insert it to the left of the leftmost x. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - - A custom key function can be supplied to customize the sort order. - """ - + ) -> None: ... @overload def insort_left( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -105,17 +63,7 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> None: - """Insert item x in list a, and keep it sorted assuming a is sorted. - - If x is already in a, insert it to the right of the rightmost x. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - - A custom key function can be supplied to customize the sort order. - """ - + ) -> None: ... @overload def insort_right( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -124,48 +72,13 @@ if sys.version_info >= (3, 10): else: def bisect_left( a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> int: - """Return the index where to insert item x in list a, assuming a is sorted. - - The return value i is such that all e in a[:i] have e < x, and all e in - a[i:] have e >= x. So if x already appears in the list, i points just - before the leftmost x already there. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - """ - + ) -> int: ... def bisect_right( a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> int: - """Return the index where to insert item x in list a, assuming a is sorted. - - The return value i is such that all e in a[:i] have e <= x, and all e in - a[i:] have e > x. So if x already appears in the list, i points just - beyond the rightmost x already there - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - """ - + ) -> int: ... def insort_left( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> None: - """Insert item x in list a, and keep it sorted assuming a is sorted. - - If x is already in a, insert it to the left of the leftmost x. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - """ - + ) -> None: ... def insort_right( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> None: - """Insert item x in list a, and keep it sorted assuming a is sorted. - - If x is already in a, insert it to the right of the rightmost x. - - Optional args lo (default 0) and hi (default len(a)) bound the - slice of a to be searched. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi index b806cd201c7fe..a6c3869fb8513 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi @@ -1,5 +1,3 @@ -"""_blake2b provides BLAKE2b for hashlib""" - import sys from _typeshed import ReadableBuffer from typing import ClassVar, Final, final @@ -16,8 +14,6 @@ BLAKE2S_SALT_SIZE: Final = 8 @final class blake2b: - """Return a new BLAKE2b hash object.""" - MAX_DIGEST_SIZE: ClassVar[int] = 64 MAX_KEY_SIZE: ClassVar[int] = 64 PERSON_SIZE: ClassVar[int] = 16 @@ -64,22 +60,13 @@ class blake2b: usedforsecurity: bool = True, ) -> Self: ... - def copy(self) -> Self: - """Return a copy of the hash object.""" - - def digest(self) -> bytes: - """Return the digest value as a bytes object.""" - - def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits.""" - - def update(self, data: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided bytes-like object.""" + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, data: ReadableBuffer, /) -> None: ... @final class blake2s: - """Return a new BLAKE2s hash object.""" - MAX_DIGEST_SIZE: ClassVar[int] = 32 MAX_KEY_SIZE: ClassVar[int] = 32 PERSON_SIZE: ClassVar[int] = 8 @@ -126,14 +113,7 @@ class blake2s: usedforsecurity: bool = True, ) -> Self: ... - def copy(self) -> Self: - """Return a copy of the hash object.""" - - def digest(self) -> bytes: - """Return the digest value as a bytes object.""" - - def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits.""" - - def update(self, data: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided bytes-like object.""" + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, data: ReadableBuffer, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi index b3f4e5d0c88de..233d4934f3c6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi @@ -1,7 +1 @@ -"""A minimal subset of the locale module used at interpreter startup -(imported by the _io module), in order to reduce startup time. - -Don't import directly from third-party code; use the `locale` module instead! -""" - def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi index 61a76f4f85811..fdad932ca22e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi @@ -5,68 +5,20 @@ from typing_extensions import Self @final class BZ2Compressor: - """Create a compressor object for compressing data incrementally. - - compresslevel - Compression level, as a number between 1 and 9. - - For one-shot compression, use the compress() function instead. - """ - if sys.version_info >= (3, 12): def __new__(cls, compresslevel: int = 9, /) -> Self: ... else: def __init__(self, compresslevel: int = 9, /) -> None: ... - def compress(self, data: ReadableBuffer, /) -> bytes: - """Provide data to the compressor object. - - Returns a chunk of compressed data if possible, or b'' otherwise. - - When you have finished providing data to the compressor, call the - flush() method to finish the compression process. - """ - - def flush(self) -> bytes: - """Finish the compression process. - - Returns the compressed data left in internal buffers. - - The compressor object may not be used after this method is called. - """ + def compress(self, data: ReadableBuffer, /) -> bytes: ... + def flush(self) -> bytes: ... @final class BZ2Decompressor: - """Create a decompressor object for decompressing data incrementally. - - For one-shot decompression, use the decompress() function instead. - """ - - def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: - """Decompress *data*, returning uncompressed data as bytes. - - If *max_length* is nonnegative, returns at most *max_length* bytes of - decompressed data. If this limit is reached and further output can be - produced, *self.needs_input* will be set to ``False``. In this case, the next - call to *decompress()* may provide *data* as b'' to obtain more of the output. - - If all of the input data was decompressed and returned (either because this - was less than *max_length* bytes, or because *max_length* was negative), - *self.needs_input* will be set to True. - - Attempting to decompress data after the end of stream is reached raises an - EOFError. Any data found after the end of the stream is ignored and saved in - the unused_data attribute. - """ - + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property - def eof(self) -> bool: - """True if the end-of-stream marker has been reached.""" - + def eof(self) -> bool: ... @property - def needs_input(self) -> bool: - """True if more input is needed before more decompressed data can be produced.""" - + def needs_input(self) -> bool: ... @property - def unused_data(self) -> bytes: - """Data found after the end of the compressed stream.""" + def unused_data(self) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi index 7548f98b66a8b..89f97edb9ba81 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi @@ -16,35 +16,13 @@ _CharMap: TypeAlias = dict[int, int] | _EncodingMap _Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] _SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] -def register(search_function: _SearchFunction, /) -> None: - """Register a codec search function. - - Search functions are expected to take one argument, the encoding name in - all lower case letters, and either return None, or a tuple of functions - (encoder, decoder, stream_reader, stream_writer) (or a CodecInfo object). - """ +def register(search_function: _SearchFunction, /) -> None: ... if sys.version_info >= (3, 10): - def unregister(search_function: _SearchFunction, /) -> None: - """Unregister a codec search function and clear the registry's cache. - - If the search function is not registered, do nothing. - """ - -def register_error(errors: str, handler: _Handler, /) -> None: - """Register the specified error handler under the name errors. - - handler must be a callable object, that will be called with an exception - instance containing information about the location of the encoding/decoding - error and must return a (replacement, new position) tuple. - """ - -def lookup_error(name: str, /) -> _Handler: - """lookup_error(errors) -> handler + def unregister(search_function: _SearchFunction, /) -> None: ... - Return the error handler for the specified error handling name or raise a - LookupError, if no handler exists under this name. - """ +def register_error(errors: str, handler: _Handler, /) -> None: ... +def lookup_error(name: str, /) -> _Handler: ... # The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 # https://docs.python.org/3/library/codecs.html#binary-transforms @@ -70,31 +48,13 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: - """Encodes obj using the codec registered for encoding. - - The default encoding is 'utf-8'. errors may be given to set a - different error handling scheme. Default is 'strict' meaning that encoding - errors raise a ValueError. Other possible values are 'ignore', 'replace' - and 'backslashreplace' as well as any other name registered with - codecs.register_error that can handle ValueErrors. - """ - +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... @overload def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap] @overload def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @overload -def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: # type: ignore[overload-overlap] - """Decodes obj using the codec registered for encoding. - - Default encoding is 'utf-8'. errors may be given to set a - different error handling scheme. Default is 'strict' meaning that encoding - errors raise a ValueError. Other possible values are 'ignore', 'replace' - and 'backslashreplace' as well as any other name registered with - codecs.register_error that can handle ValueErrors. - """ - +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap] @overload def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... @@ -111,9 +71,7 @@ def decode( def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... @overload def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... -def lookup(encoding: str, /) -> codecs.CodecInfo: - """Looks up a codec tuple in the Python codec registry and returns a CodecInfo object.""" - +def lookup(encoding: str, /) -> codecs.CodecInfo: ... def charmap_build(map: str, /) -> _CharMap: ... def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi index 0016acdc5549e..319577c9284bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi @@ -1,8 +1,3 @@ -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - import sys from abc import abstractmethod from types import MappingProxyType @@ -76,39 +71,31 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. @final class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[_KT_co]: - """Return a reverse iterator over the dict keys.""" + def __reversed__(self) -> Iterator[_KT_co]: ... __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): - def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: - """Return True if the view and the given iterable have a null intersection.""" + def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ... if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to""" + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... @final class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented - def __reversed__(self) -> Iterator[_VT_co]: - """Return a reverse iterator over the dict values.""" + def __reversed__(self) -> Iterator[_VT_co]: ... if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to""" + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... @final class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: - """Return a reverse iterator over the dict items.""" + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): - def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: - """Return True if the view and the given iterable have a null intersection.""" + def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ... if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to""" + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... if sys.version_info >= (3, 12): @runtime_checkable diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi index 12eafa626744d..aa67df2ab4787 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi @@ -1,5 +1,3 @@ -"""Internal classes used by the gzip, lzma and bz2 modules""" - # _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) from _typeshed import Incomplete, WriteableBuffer @@ -15,12 +13,9 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... -class BaseStream(BufferedIOBase): - """Mode-checking helper functions.""" +class BaseStream(BufferedIOBase): ... class DecompressReader(RawIOBase): - """Adapts the decompressor API to a RawIOBase reader API""" - def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi index a46b110a8c128..0ddeca7882cd1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi @@ -1,5 +1,3 @@ -"""Context Variables""" - import sys from collections.abc import Callable, Iterator, Mapping from types import GenericAlias, TracebackType @@ -20,38 +18,14 @@ class ContextVar(Generic[_T]): @property def name(self) -> str: ... @overload - def get(self) -> _T: - """Return a value for the context variable for the current context. - - If there is no value for the variable in the current context, the method will: - * return the value of the default argument of the method, if provided; or - * return the default value for the context variable, if it was created - with one; or - * raise a LookupError. - """ - + def get(self) -> _T: ... @overload def get(self, default: _T, /) -> _T: ... @overload def get(self, default: _D, /) -> _D | _T: ... - def set(self, value: _T, /) -> Token[_T]: - """Call to set a new value for the context variable in the current context. - - The required value argument is the new value for the context variable. - - Returns a Token object that can be used to restore the variable to its previous - value via the `ContextVar.reset()` method. - """ - - def reset(self, token: Token[_T], /) -> None: - """Reset the context variable. - - The variable is reset to the value it had before the `ContextVar.set()` that - created the token was used. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def set(self, value: _T, /) -> Token[_T]: ... + def reset(self, token: Token[_T], /) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class Token(Generic[_T]): @@ -61,16 +35,12 @@ class Token(Generic[_T]): def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express MISSING: ClassVar[object] __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 14): - def __enter__(self) -> Self: - """Enter into Token context manager.""" - + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> None: - """Exit from Token context manager, restore the linked ContextVar.""" + ) -> None: ... def copy_context() -> Context: ... @@ -80,28 +50,15 @@ def copy_context() -> Context: ... class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... @overload - def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: - """Return the value for `key` if `key` has the value in the context object. - - If `key` does not exist, return `default`. If `default` is not given, - return None. - """ - + def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: ... @overload def get(self, key: ContextVar[_T], default: _T, /) -> _T: ... @overload def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ... def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... - def copy(self) -> Context: - """Return a shallow copy of the context object.""" + def copy(self) -> Context: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __getitem__(self, key: ContextVar[_T], /) -> _T: - """Return self[key].""" - - def __iter__(self) -> Iterator[ContextVar[Any]]: - """Implement iter(self).""" - - def __len__(self) -> int: - """Return len(self).""" - + def __getitem__(self, key: ContextVar[_T], /) -> _T: ... + def __iter__(self) -> Iterator[ContextVar[Any]]: ... + def __len__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi index 93322e781d4f6..ea90766afee66 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi @@ -1,5 +1,3 @@ -"""CSV parsing and writing.""" - import csv import sys from _typeshed import SupportsWrite @@ -28,11 +26,6 @@ _DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Diale @disjoint_base class Dialect: - """CSV dialect - - The Dialect type records CSV parsing and generation options. - """ - delimiter: str quotechar: str | None escapechar: str | None @@ -58,60 +51,23 @@ if sys.version_info >= (3, 10): # This class calls itself _csv.reader. @disjoint_base class Reader: - """CSV reader - - Reader objects are responsible for reading and parsing tabular data - in CSV format. - """ - @property def dialect(self) -> Dialect: ... line_num: int - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> list[str]: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> list[str]: ... # This class calls itself _csv.writer. @disjoint_base class Writer: - """CSV writer - - Writer objects are responsible for generating tabular data - in CSV format from sequence input. - """ - @property def dialect(self) -> Dialect: ... if sys.version_info >= (3, 13): - def writerow(self, row: Iterable[Any], /) -> Any: - """writerow(iterable) - - Construct and write a CSV record from an iterable of fields. Non-string - elements will be converted to string. - """ - - def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: - """writerows(iterable of iterables) - - Construct and write a series of iterables to a csv file. Non-string - elements will be converted to string. - """ + def writerow(self, row: Iterable[Any], /) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: ... else: - def writerow(self, row: Iterable[Any]) -> Any: - """writerow(iterable) - - Construct and write a CSV record from an iterable of fields. Non-string - elements will be converted to string. - """ - - def writerows(self, rows: Iterable[Iterable[Any]]) -> None: - """writerows(iterable of iterables) - - Construct and write a series of iterables to a csv file. Non-string - elements will be converted to string. - """ + def writerow(self, row: Iterable[Any]) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... # For the return types below. # These aliases can be removed when typeshed drops support for 3.9. @@ -136,7 +92,7 @@ else: def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... def writer( - csvfile: SupportsWrite[str], + fileobj: SupportsWrite[str], /, dialect: _DialectLike = "excel", *, @@ -148,23 +104,9 @@ def writer( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> _writer: - """csv_writer = csv.writer(fileobj [, dialect='excel'] - [optional keyword args]) - for row in sequence: - csv_writer.writerow(row) - - [or] - - csv_writer = csv.writer(fileobj [, dialect='excel'] - [optional keyword args]) - csv_writer.writerows(rows) - - The "fileobj" argument can be any object that supports the file API. - """ - +) -> _writer: ... def reader( - csvfile: Iterable[str], + iterable: Iterable[str], /, dialect: _DialectLike = "excel", *, @@ -176,25 +118,11 @@ def reader( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> _reader: - """csv_reader = reader(iterable [, dialect='excel'] - [optional keyword args]) - for row in csv_reader: - process(row) - - The "iterable" argument can be any object that returns a line - of input for each iteration, such as a file object or a list. The - optional "dialect" parameter is discussed below. The function - also accepts optional keyword arguments which override settings - provided by the dialect. - - The returned object is an iterator. Each iteration returns a row - of the CSV file (which can span multiple input lines). - """ - +) -> _reader: ... def register_dialect( name: str, - dialect: type[Dialect | csv.Dialect] = ..., + /, + dialect: type[Dialect | csv.Dialect] | str = "excel", *, delimiter: str = ",", quotechar: str | None = '"', @@ -204,34 +132,8 @@ def register_dialect( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> None: - """Create a mapping from a string name to a dialect class. - dialect = csv.register_dialect(name[, dialect[, **fmtparams]]) - """ - -def unregister_dialect(name: str) -> None: - """Delete the name/dialect mapping associated with a string name. - - csv.unregister_dialect(name) - """ - -def get_dialect(name: str) -> Dialect: - """Return the dialect instance associated with name. - - dialect = csv.get_dialect(name) - """ - -def list_dialects() -> list[str]: - """Return a list of all known dialect names. - - names = csv.list_dialects() - """ - -def field_size_limit(new_limit: int = ...) -> int: - """Sets an upper limit on parsed fields. - - csv.field_size_limit([limit]) - - Returns old limit. If limit is not given, no new limit is set and - the old limit is returned - """ +) -> None: ... +def unregister_dialect(name: str) -> None: ... +def get_dialect(name: str) -> Dialect: ... +def list_dialects() -> list[str]: ... +def field_size_limit(new_limit: int = ...) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi index 2d7c2dc307f7a..c87cf5e326caa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi @@ -1,5 +1,3 @@ -"""Create and manipulate C compatible data types in Python.""" - import _typeshed import sys from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -31,52 +29,28 @@ if sys.platform == "win32": _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] class COMError(Exception): - """Raised when a COM method call failed.""" - hresult: int text: str | None details: _COMError_Details def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... - def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: - """CopyComPointer(src, dst) -> HRESULT value""" + def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ... + FUNCFLAG_HRESULT: Final = 0x2 FUNCFLAG_STDCALL: Final = 0x0 - def FormatError(code: int = ...) -> str: - """FormatError([integer]) -> string - - Convert a win32 error code into a string. If the error code is not - given, the return value of a call to GetLastError() is used. - """ - + def FormatError(code: int = ...) -> str: ... def get_last_error() -> int: ... def set_last_error(value: int) -> int: ... - def LoadLibrary(name: str, load_flags: int = 0, /) -> int: - """LoadLibrary(name, load_flags) -> handle - - Load an executable (usually a DLL), and return a handle to it. - The handle may be used to locate exported functions in this - module. load_flags are as defined for LoadLibraryEx in the - Windows API. - """ - - def FreeLibrary(handle: int, /) -> None: - """FreeLibrary(handle) -> void - - Free the handle of an executable previously loaded by LoadLibrary. - """ + def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ... + def FreeLibrary(handle: int, /) -> None: ... else: - def dlclose(handle: int, /) -> None: - """dlclose a library""" + def dlclose(handle: int, /) -> None: ... # The default for flag is RTLD_GLOBAL|RTLD_LOCAL, which is platform dependent. - def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: - """dlopen(name, flag={RTLD_GLOBAL|RTLD_LOCAL}) open a shared library""" - - def dlsym(handle: int, name: str, /) -> int: - """find symbol in shared library""" + def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: ... + def dlsym(handle: int, name: str, /) -> int: ... if sys.version_info >= (3, 13): # This class is not exposed. It calls itself _ctypes.CType_Type. @@ -123,8 +97,6 @@ class _PyCSimpleType(_CTypeBaseType): def __rmul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _SimpleCData(_CData, Generic[_T], metaclass=_PyCSimpleType): - """XXX to be provided""" - value: _T # The TypeVar can be unsolved here, # but we can't use overloads without creating many, many mypy false-positive errors @@ -152,8 +124,6 @@ class _PyCPointerType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): - """XXX to be provided""" - _type_: type[_CT] contents: _CT @overload @@ -161,35 +131,17 @@ class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): @overload def __init__(self, arg: _CT) -> None: ... @overload - def __getitem__(self, key: int, /) -> Any: - """Return self[key].""" - + def __getitem__(self, key: int, /) -> Any: ... @overload def __getitem__(self, key: slice, /) -> list[Any]: ... - def __setitem__(self, key: int, value: Any, /) -> None: - """Set self[key] to value.""" + def __setitem__(self, key: int, value: Any, /) -> None: ... if sys.version_info < (3, 14): @overload - def POINTER(type: None, /) -> type[c_void_p]: - """Create and return a new ctypes pointer type. - - type - A ctypes type. - - Pointer types are cached and reused internally, - so calling this function repeatedly is cheap. - """ - + def POINTER(type: None, /) -> type[c_void_p]: ... @overload def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... - def pointer(obj: _CT, /) -> _Pointer[_CT]: - """Create a new pointer instance, pointing to 'obj'. - - The returned object is of the type POINTER(type(obj)). Note that if you - just want to pass a pointer to an object to a foreign function call, you - should use byref(obj) which is much faster. - """ + def pointer(obj: _CT, /) -> _Pointer[_CT]: ... # This class is not exposed. It calls itself _ctypes.CArgObject. @final @@ -197,15 +149,10 @@ if sys.version_info < (3, 14): class _CArgObject: ... if sys.version_info >= (3, 14): - def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: - """Return a pointer lookalike to a C instance, only usable as function argument.""" + def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: ... else: - def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: - """byref(C instance[, offset=0]) -> byref-object - Return a pointer lookalike to a C instance, only usable - as function argument - """ + def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: ... _ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType] _PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any] @@ -224,8 +171,6 @@ class _PyCFuncPtrType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): - """Function Pointer""" - restype: type[_CDataType] | Callable[[int], Any] | None argtypes: Sequence[type[_CDataType]] errcheck: _ECT @@ -245,8 +190,7 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): cls, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | _CDataType | None = ..., / ) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... _GetT = TypeVar("_GetT") _SetT = TypeVar("_SetT") @@ -288,8 +232,6 @@ class _UnionType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Union(_CData, metaclass=_UnionType): - """Union base class""" - _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -318,8 +260,6 @@ class _PyCStructType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Structure(_CData, metaclass=_PyCStructType): - """Structure base class""" - _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -344,15 +284,6 @@ class _PyCArrayType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): - """Abstract base class for arrays. - - The recommended way to create concrete array types is by multiplying any - ctypes data type with a non-negative integer. Alternatively, you can subclass - this type and define _length_ and _type_ class variables. Array elements can - be read and written using standard subscript and slice accesses for slice - reads, the resulting object is not itself an Array. - """ - @property @abstractmethod def _length_(self) -> int: ... @@ -383,48 +314,28 @@ class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): # the array element type would belong are annotated with Any instead. def __init__(self, *args: Any) -> None: ... @overload - def __getitem__(self, key: int, /) -> Any: - """Return self[key].""" - + def __getitem__(self, key: int, /) -> Any: ... @overload def __getitem__(self, key: slice, /) -> list[Any]: ... @overload - def __setitem__(self, key: int, value: Any, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: int, value: Any, /) -> None: ... @overload def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ... def __iter__(self) -> Iterator[Any]: ... # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. - def __len__(self) -> int: - """Return len(self).""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - -def addressof(obj: _CData | _CDataType, /) -> int: - """Return the address of the C instance internal buffer""" - -def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: - """alignment(C type) -> integer - alignment(C instance) -> integer - Return the alignment requirements of a C instance - """ + def __len__(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +def addressof(obj: _CData | _CDataType, /) -> int: ... +def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... def get_errno() -> int: ... -def resize(obj: _CData | _CDataType, size: int, /) -> None: - """Resize the memory buffer of a ctypes instance""" - +def resize(obj: _CData | _CDataType, size: int, /) -> None: ... def set_errno(value: int, /) -> int: ... -def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: - """Return the size in bytes of a C instance.""" - +def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... def PyObj_FromPtr(address: int, /) -> Any: ... def Py_DECREF(o: _T, /) -> _T: ... def Py_INCREF(o: _T, /) -> _T: ... -def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: - """Return buffer interface information""" - +def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: ... def call_cdeclfunction(address: int, arguments: tuple[Any, ...], /) -> Any: ... def call_function(address: int, arguments: tuple[Any, ...], /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi index b53a33e16e46f..d4e4d48f4e20f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi @@ -276,531 +276,82 @@ REPORT_MOUSE_POSITION: Final[int] _C_API: Any version: Final[bytes] -def baudrate() -> int: - """Return the output speed of the terminal in bits per second.""" - -def beep() -> None: - """Emit a short attention sound.""" - -def can_change_color() -> bool: - """Return True if the programmer can change the colors displayed by the terminal.""" - -def cbreak(flag: bool = True, /) -> None: - """Enter cbreak mode. - - flag - If false, the effect is the same as calling nocbreak(). - - In cbreak mode (sometimes called "rare" mode) normal tty line buffering is - turned off and characters are available to be read one by one. However, - unlike raw mode, special characters (interrupt, quit, suspend, and flow - control) retain their effects on the tty driver and calling program. - Calling first raw() then cbreak() leaves the terminal in cbreak mode. - """ - -def color_content(color_number: int, /) -> tuple[int, int, int]: - """Return the red, green, and blue (RGB) components of the specified color. - - color_number - The number of the color (0 - (COLORS-1)). - - A 3-tuple is returned, containing the R, G, B values for the given color, - which will be between 0 (no component) and 1000 (maximum amount of component). - """ - -def color_pair(pair_number: int, /) -> int: - """Return the attribute value for displaying text in the specified color. - - pair_number - The number of the color pair. - - This attribute value can be combined with A_STANDOUT, A_REVERSE, and the - other A_* attributes. pair_number() is the counterpart to this function. - """ - -def curs_set(visibility: int, /) -> int: - """Set the cursor state. - - visibility - 0 for invisible, 1 for normal visible, or 2 for very visible. - - If the terminal supports the visibility requested, the previous cursor - state is returned; otherwise, an exception is raised. On many terminals, - the "visible" mode is an underline cursor and the "very visible" mode is - a block cursor. - """ - -def def_prog_mode() -> None: - """Save the current terminal mode as the "program" mode. - - The "program" mode is the mode when the running program is using curses. - - Subsequent calls to reset_prog_mode() will restore this mode. - """ - -def def_shell_mode() -> None: - """Save the current terminal mode as the "shell" mode. - - The "shell" mode is the mode when the running program is not using curses. - - Subsequent calls to reset_shell_mode() will restore this mode. - """ - -def delay_output(ms: int, /) -> None: - """Insert a pause in output. - - ms - Duration in milliseconds. - """ - -def doupdate() -> None: - """Update the physical screen to match the virtual screen.""" - -def echo(flag: bool = True, /) -> None: - """Enter echo mode. - - flag - If false, the effect is the same as calling noecho(). - - In echo mode, each character input is echoed to the screen as it is entered. - """ - -def endwin() -> None: - """De-initialize the library, and return terminal to normal status.""" - -def erasechar() -> bytes: - """Return the user's current erase character.""" - +def baudrate() -> int: ... +def beep() -> None: ... +def can_change_color() -> bool: ... +def cbreak(flag: bool = True, /) -> None: ... +def color_content(color_number: int, /) -> tuple[int, int, int]: ... +def color_pair(pair_number: int, /) -> int: ... +def curs_set(visibility: int, /) -> int: ... +def def_prog_mode() -> None: ... +def def_shell_mode() -> None: ... +def delay_output(ms: int, /) -> None: ... +def doupdate() -> None: ... +def echo(flag: bool = True, /) -> None: ... +def endwin() -> None: ... +def erasechar() -> bytes: ... def filter() -> None: ... -def flash() -> None: - """Flash the screen. - - That is, change it to reverse-video and then change it back in a short interval. - """ - -def flushinp() -> None: - """Flush all input buffers. - - This throws away any typeahead that has been typed by the user and has not - yet been processed by the program. - """ - -def get_escdelay() -> int: - """Gets the curses ESCDELAY setting. - - Gets the number of milliseconds to wait after reading an escape character, - to distinguish between an individual escape character entered on the - keyboard from escape sequences sent by cursor and function keys. - """ - -def get_tabsize() -> int: - """Gets the curses TABSIZE setting. - - Gets the number of columns used by the curses library when converting a tab - character to spaces as it adds the tab to a window. - """ - -def getmouse() -> tuple[int, int, int, int, int]: - """Retrieve the queued mouse event. - - After getch() returns KEY_MOUSE to signal a mouse event, this function - returns a 5-tuple (id, x, y, z, bstate). - """ - -def getsyx() -> tuple[int, int]: - """Return the current coordinates of the virtual screen cursor. - - Return a (y, x) tuple. If leaveok is currently true, return (-1, -1). - """ - -def getwin(file: SupportsRead[bytes], /) -> window: - """Read window related data stored in the file by an earlier putwin() call. - - The routine then creates and initializes a new window using that data, - returning the new window object. - """ - -def halfdelay(tenths: int, /) -> None: - """Enter half-delay mode. - - tenths - Maximal blocking delay in tenths of seconds (1 - 255). - - Use nocbreak() to leave half-delay mode. - """ - -def has_colors() -> bool: - """Return True if the terminal can display colors; otherwise, return False.""" +def flash() -> None: ... +def flushinp() -> None: ... +def get_escdelay() -> int: ... +def get_tabsize() -> int: ... +def getmouse() -> tuple[int, int, int, int, int]: ... +def getsyx() -> tuple[int, int]: ... +def getwin(file: SupportsRead[bytes], /) -> window: ... +def halfdelay(tenths: int, /) -> None: ... +def has_colors() -> bool: ... if sys.version_info >= (3, 10): - def has_extended_color_support() -> bool: - """Return True if the module supports extended colors; otherwise, return False. - - Extended color support allows more than 256 color-pairs for terminals - that support more than 16 colors (e.g. xterm-256color). - """ + def has_extended_color_support() -> bool: ... if sys.version_info >= (3, 14): - def assume_default_colors(fg: int, bg: int, /) -> None: - """Allow use of default values for colors on terminals supporting this feature. - - Assign terminal default foreground/background colors to color number -1. - Change the definition of the color-pair 0 to (fg, bg). - - Use this to support transparency in your application. - """ - -def has_ic() -> bool: - """Return True if the terminal has insert- and delete-character capabilities.""" - -def has_il() -> bool: - """Return True if the terminal has insert- and delete-line capabilities.""" - -def has_key(key: int, /) -> bool: - """Return True if the current terminal type recognizes a key with that value. - - key - Key number. - """ - -def init_color(color_number: int, r: int, g: int, b: int, /) -> None: - """Change the definition of a color. - - color_number - The number of the color to be changed (0 - (COLORS-1)). - r - Red component (0 - 1000). - g - Green component (0 - 1000). - b - Blue component (0 - 1000). - - When init_color() is used, all occurrences of that color on the screen - immediately change to the new definition. This function is a no-op on - most terminals; it is active only if can_change_color() returns true. - """ - -def init_pair(pair_number: int, fg: int, bg: int, /) -> None: - """Change the definition of a color-pair. - - pair_number - The number of the color-pair to be changed (1 - (COLOR_PAIRS-1)). - fg - Foreground color number (-1 - (COLORS-1)). - bg - Background color number (-1 - (COLORS-1)). - - If the color-pair was previously initialized, the screen is refreshed and - all occurrences of that color-pair are changed to the new definition. - """ - -def initscr() -> window: - """Initialize the library. - - Return a WindowObject which represents the whole screen. - """ - + def assume_default_colors(fg: int, bg: int, /) -> None: ... + +def has_ic() -> bool: ... +def has_il() -> bool: ... +def has_key(key: int, /) -> bool: ... +def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ... +def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ... +def initscr() -> window: ... def intrflush(flag: bool, /) -> None: ... -def is_term_resized(nlines: int, ncols: int, /) -> bool: - """Return True if resize_term() would modify the window structure, False otherwise. - - nlines - Height. - ncols - Width. - """ - -def isendwin() -> bool: - """Return True if endwin() has been called.""" - -def keyname(key: int, /) -> bytes: - """Return the name of specified key. - - key - Key number. - """ - -def killchar() -> bytes: - """Return the user's current line kill character.""" - -def longname() -> bytes: - """Return the terminfo long name field describing the current terminal. - - The maximum length of a verbose description is 128 characters. It is defined - only after the call to initscr(). - """ - -def meta(yes: bool, /) -> None: - """Enable/disable meta keys. - - If yes is True, allow 8-bit characters to be input. If yes is False, - allow only 7-bit characters. - """ - -def mouseinterval(interval: int, /) -> None: - """Set and retrieve the maximum time between press and release in a click. - - interval - Time in milliseconds. - - Set the maximum time that can elapse between press and release events in - order for them to be recognized as a click, and return the previous interval - value. - """ - -def mousemask(newmask: int, /) -> tuple[int, int]: - """Set the mouse events to be reported, and return a tuple (availmask, oldmask). - - Return a tuple (availmask, oldmask). availmask indicates which of the - specified mouse events can be reported; on complete failure it returns 0. - oldmask is the previous value of the given window's mouse event mask. - If this function is never called, no mouse events are ever reported. - """ - -def napms(ms: int, /) -> int: - """Sleep for specified time. - - ms - Duration in milliseconds. - """ - -def newpad(nlines: int, ncols: int, /) -> window: - """Create and return a pointer to a new pad data structure. - - nlines - Height. - ncols - Width. - """ - -def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: - """newwin(nlines, ncols, [begin_y=0, begin_x=0]) - Return a new window. - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - - By default, the window will extend from the specified position to the lower - right corner of the screen. - """ - -def nl(flag: bool = True, /) -> None: - """Enter newline mode. - - flag - If false, the effect is the same as calling nonl(). - - This mode translates the return key into newline on input, and translates - newline into return and line-feed on output. Newline mode is initially on. - """ - -def nocbreak() -> None: - """Leave cbreak mode. - - Return to normal "cooked" mode with line buffering. - """ - -def noecho() -> None: - """Leave echo mode. - - Echoing of input characters is turned off. - """ - -def nonl() -> None: - """Leave newline mode. - - Disable translation of return into newline on input, and disable low-level - translation of newline into newline/return on output. - """ - -def noqiflush() -> None: - """Disable queue flushing. - - When queue flushing is disabled, normal flush of input and output queues - associated with the INTR, QUIT and SUSP characters will not be done. - """ - -def noraw() -> None: - """Leave raw mode. - - Return to normal "cooked" mode with line buffering. - """ - -def pair_content(pair_number: int, /) -> tuple[int, int]: - """Return a tuple (fg, bg) containing the colors for the requested color pair. - - pair_number - The number of the color pair (0 - (COLOR_PAIRS-1)). - """ - -def pair_number(attr: int, /) -> int: - """Return the number of the color-pair set by the specified attribute value. - - color_pair() is the counterpart to this function. - """ - -def putp(string: ReadOnlyBuffer, /) -> None: - """Emit the value of a specified terminfo capability for the current terminal. - - Note that the output of putp() always goes to standard output. - """ - -def qiflush(flag: bool = True, /) -> None: - """Enable queue flushing. - - flag - If false, the effect is the same as calling noqiflush(). - - If queue flushing is enabled, all output in the display driver queue - will be flushed when the INTR, QUIT and SUSP characters are read. - """ - -def raw(flag: bool = True, /) -> None: - """Enter raw mode. - - flag - If false, the effect is the same as calling noraw(). - - In raw mode, normal line buffering and processing of interrupt, quit, - suspend, and flow control keys are turned off; characters are presented to - curses input functions one by one. - """ - -def reset_prog_mode() -> None: - """Restore the terminal to "program" mode, as previously saved by def_prog_mode().""" - -def reset_shell_mode() -> None: - """Restore the terminal to "shell" mode, as previously saved by def_shell_mode().""" - -def resetty() -> None: - """Restore terminal mode.""" - -def resize_term(nlines: int, ncols: int, /) -> None: - """Backend function used by resizeterm(), performing most of the work. - - nlines - Height. - ncols - Width. - - When resizing the windows, resize_term() blank-fills the areas that are - extended. The calling application should fill in these areas with appropriate - data. The resize_term() function attempts to resize all windows. However, - due to the calling convention of pads, it is not possible to resize these - without additional interaction with the application. - """ - -def resizeterm(nlines: int, ncols: int, /) -> None: - """Resize the standard and current windows to the specified dimensions. - - nlines - Height. - ncols - Width. - - Adjusts other bookkeeping data used by the curses library that record the - window dimensions (in particular the SIGWINCH handler). - """ - -def savetty() -> None: - """Save terminal mode.""" - -def set_escdelay(ms: int, /) -> None: - """Sets the curses ESCDELAY setting. - - ms - length of the delay in milliseconds. - - Sets the number of milliseconds to wait after reading an escape character, - to distinguish between an individual escape character entered on the - keyboard from escape sequences sent by cursor and function keys. - """ - -def set_tabsize(size: int, /) -> None: - """Sets the curses TABSIZE setting. - - size - rendered cell width of a tab character. - - Sets the number of columns used by the curses library when converting a tab - character to spaces as it adds the tab to a window. - """ - -def setsyx(y: int, x: int, /) -> None: - """Set the virtual screen cursor. - - y - Y-coordinate. - x - X-coordinate. - - If y and x are both -1, then leaveok is set. - """ - -def setupterm(term: str | None = None, fd: int = -1) -> None: - """Initialize the terminal. - - term - Terminal name. - If omitted, the value of the TERM environment variable will be used. - fd - File descriptor to which any initialization sequences will be sent. - If not supplied, the file descriptor for sys.stdout will be used. - """ - -def start_color() -> None: - """Initializes eight basic colors and global variables COLORS and COLOR_PAIRS. - - Must be called if the programmer wants to use colors, and before any other - color manipulation routine is called. It is good practice to call this - routine right after initscr(). - - It also restores the colors on the terminal to the values they had when the - terminal was just turned on. - """ - -def termattrs() -> int: - """Return a logical OR of all video attributes supported by the terminal.""" - -def termname() -> bytes: - """Return the value of the environment variable TERM, truncated to 14 characters.""" - -def tigetflag(capname: str, /) -> int: - """Return the value of the Boolean capability. - - capname - The terminfo capability name. - - The value -1 is returned if capname is not a Boolean capability, or 0 if - it is canceled or absent from the terminal description. - """ - -def tigetnum(capname: str, /) -> int: - """Return the value of the numeric capability. - - capname - The terminfo capability name. - - The value -2 is returned if capname is not a numeric capability, or -1 if - it is canceled or absent from the terminal description. - """ - -def tigetstr(capname: str, /) -> bytes | None: - """Return the value of the string capability. - - capname - The terminfo capability name. - - None is returned if capname is not a string capability, or is canceled or - absent from the terminal description. - """ - +def is_term_resized(nlines: int, ncols: int, /) -> bool: ... +def isendwin() -> bool: ... +def keyname(key: int, /) -> bytes: ... +def killchar() -> bytes: ... +def longname() -> bytes: ... +def meta(yes: bool, /) -> None: ... +def mouseinterval(interval: int, /) -> None: ... +def mousemask(newmask: int, /) -> tuple[int, int]: ... +def napms(ms: int, /) -> int: ... +def newpad(nlines: int, ncols: int, /) -> window: ... +def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: ... +def nl(flag: bool = True, /) -> None: ... +def nocbreak() -> None: ... +def noecho() -> None: ... +def nonl() -> None: ... +def noqiflush() -> None: ... +def noraw() -> None: ... +def pair_content(pair_number: int, /) -> tuple[int, int]: ... +def pair_number(attr: int, /) -> int: ... +def putp(string: ReadOnlyBuffer, /) -> None: ... +def qiflush(flag: bool = True, /) -> None: ... +def raw(flag: bool = True, /) -> None: ... +def reset_prog_mode() -> None: ... +def reset_shell_mode() -> None: ... +def resetty() -> None: ... +def resize_term(nlines: int, ncols: int, /) -> None: ... +def resizeterm(nlines: int, ncols: int, /) -> None: ... +def savetty() -> None: ... +def set_escdelay(ms: int, /) -> None: ... +def set_tabsize(size: int, /) -> None: ... +def setsyx(y: int, x: int, /) -> None: ... +def setupterm(term: str | None = None, fd: int = -1) -> None: ... +def start_color() -> None: ... +def termattrs() -> int: ... +def termname() -> bytes: ... +def tigetflag(capname: str, /) -> int: ... +def tigetnum(capname: str, /) -> int: ... +def tigetstr(capname: str, /) -> bytes | None: ... def tparm( str: ReadOnlyBuffer, i1: int = 0, @@ -813,57 +364,15 @@ def tparm( i8: int = 0, i9: int = 0, /, -) -> bytes: - """Instantiate the specified byte string with the supplied parameters. - - str - Parameterized byte string obtained from the terminfo database. - """ - -def typeahead(fd: int, /) -> None: - """Specify that the file descriptor fd be used for typeahead checking. - - fd - File descriptor. - - If fd is -1, then no typeahead checking is done. - """ - -def unctrl(ch: _ChType, /) -> bytes: - """Return a string which is a printable representation of the character ch. - - Control characters are displayed as a caret followed by the character, - for example as ^C. Printing characters are left as they are. - """ - -def unget_wch(ch: int | str, /) -> None: - """Push ch so the next get_wch() will return it.""" - -def ungetch(ch: _ChType, /) -> None: - """Push ch so the next getch() will return it.""" - -def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: - """Push a KEY_MOUSE event onto the input queue. - - The following getmouse() will return the given state data. - """ - +) -> bytes: ... +def typeahead(fd: int, /) -> None: ... +def unctrl(ch: _ChType, /) -> bytes: ... +def unget_wch(ch: int | str, /) -> None: ... +def ungetch(ch: _ChType, /) -> None: ... +def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ... def update_lines_cols() -> None: ... -def use_default_colors() -> None: - """Equivalent to assume_default_colors(-1, -1).""" - -def use_env(flag: bool, /) -> None: - """Use environment variables LINES and COLUMNS. - - If used, this function should be called before initscr() or newterm() are - called. - - When flag is False, the values of lines and columns specified in the terminfo - database will be used, even if environment variables LINES and COLUMNS (used - by default) are set, or if curses is running in a window (in which case - default behavior would be to use the window size if LINES and COLUMNS are - not set). - """ +def use_default_colors() -> None: ... +def use_env(flag: bool, /) -> None: ... class error(Exception): ... @@ -871,100 +380,22 @@ class error(Exception): ... class window: # undocumented encoding: str @overload - def addch(self, ch: _ChType, attr: int = ...) -> None: - """addch([y, x,] ch, [attr=_curses.A_NORMAL]) - Paint the character. - - y - Y-coordinate. - x - X-coordinate. - ch - Character to add. - attr - Attributes for the character. - - Paint character ch at (y, x) with attributes attr, - overwriting any character previously painted at that location. - By default, the character position and attributes are the - current settings for the window object. - """ - + def addch(self, ch: _ChType, attr: int = ...) -> None: ... @overload def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... @overload - def addnstr(self, str: str, n: int, attr: int = ...) -> None: - """addnstr([y, x,] str, n, [attr]) - Paint at most n characters of the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to add. - n - Maximal number of characters. - attr - Attributes for characters. - - Paint at most n characters of the string str at (y, x) with - attributes attr, overwriting anything previously on the display. - By default, the character position and attributes are the - current settings for the window object. - """ - + def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... @overload def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload - def addstr(self, str: str, attr: int = ...) -> None: - """addstr([y, x,] str, [attr]) - Paint the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to add. - attr - Attributes for characters. - - Paint the string str at (y, x) with attributes attr, - overwriting anything previously on the display. - By default, the character position and attributes are the - current settings for the window object. - """ - + def addstr(self, str: str, attr: int = ...) -> None: ... @overload def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... - def attroff(self, attr: int, /) -> None: - """Remove attribute attr from the "background" set.""" - - def attron(self, attr: int, /) -> None: - """Add attribute attr from the "background" set.""" - - def attrset(self, attr: int, /) -> None: - """Set the "background" set of attributes.""" - - def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: - """Set the background property of the window. - - ch - Background character. - attr - Background attributes. - """ - - def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: - """Set the window's background. - - ch - Background character. - attr - Background attributes. - """ - + def attroff(self, attr: int, /) -> None: ... + def attron(self, attr: int, /) -> None: ... + def attrset(self, attr: int, /) -> None: ... + def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: ... + def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: ... def border( self, ls: _ChType = ..., @@ -975,70 +406,13 @@ class window: # undocumented tr: _ChType = ..., bl: _ChType = ..., br: _ChType = ..., - ) -> None: - """Draw a border around the edges of the window. - - ls - Left side. - rs - Right side. - ts - Top side. - bs - Bottom side. - tl - Upper-left corner. - tr - Upper-right corner. - bl - Bottom-left corner. - br - Bottom-right corner. - - Each parameter specifies the character to use for a specific part of the - border. The characters can be specified as integers or as one-character - strings. A 0 value for any parameter will cause the default character to be - used for that parameter. - """ - + ) -> None: ... @overload - def box(self) -> None: - """box([verch=0, horch=0]) - Draw a border around the edges of the window. - - verch - Left and right side. - horch - Top and bottom side. - - Similar to border(), but both ls and rs are verch and both ts and bs are - horch. The default corner characters are always used by this function. - """ - + def box(self) -> None: ... @overload def box(self, vertch: _ChType = 0, horch: _ChType = 0) -> None: ... @overload - def chgat(self, attr: int) -> None: - """chgat([y, x,] [n=-1,] attr) - Set the attributes of characters. - - y - Y-coordinate. - x - X-coordinate. - n - Number of characters. - attr - Attributes for characters. - - Set the attributes of num characters at the current cursor position, or at - position (y, x) if supplied. If no value of num is given or num = -1, the - attribute will be set on all the characters to the end of the line. This - function does not move the cursor. The changed line will be touched using - the touchline() method so that the contents will be redisplayed by the next - window refresh. - """ - + def chgat(self, attr: int) -> None: ... @overload def chgat(self, num: int, attr: int) -> None: ... @overload @@ -1051,128 +425,35 @@ class window: # undocumented def clrtoeol(self) -> None: ... def cursyncup(self) -> None: ... @overload - def delch(self) -> None: - """delch([y, x]) - Delete any character at (y, x). - - y - Y-coordinate. - x - X-coordinate. - """ - + def delch(self) -> None: ... @overload def delch(self, y: int, x: int) -> None: ... def deleteln(self) -> None: ... @overload - def derwin(self, begin_y: int, begin_x: int) -> window: - """derwin([nlines=0, ncols=0,] begin_y, begin_x) - Create a sub-window (window-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - - derwin() is the same as calling subwin(), except that begin_y and begin_x - are relative to the origin of the window, rather than relative to the entire - screen. - """ - + def derwin(self, begin_y: int, begin_x: int) -> window: ... @overload def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... - def echochar(self, ch: _ChType, attr: int = 0, /) -> None: - """Add character ch with attribute attr, and refresh. - - ch - Character to add. - attr - Attributes for the character. - """ - - def enclose(self, y: int, x: int, /) -> bool: - """Return True if the screen-relative coordinates are enclosed by the window. - - y - Y-coordinate. - x - X-coordinate. - """ - + def echochar(self, ch: _ChType, attr: int = 0, /) -> None: ... + def enclose(self, y: int, x: int, /) -> bool: ... def erase(self) -> None: ... def getbegyx(self) -> tuple[int, int]: ... - def getbkgd(self) -> tuple[int, int]: - """Return the window's current background character/attribute pair.""" - + def getbkgd(self) -> tuple[int, int]: ... @overload - def getch(self) -> int: - """getch([y, x]) - Get a character code from terminal keyboard. - - y - Y-coordinate. - x - X-coordinate. - - The integer returned does not have to be in ASCII range: function keys, - keypad keys and so on return numbers higher than 256. In no-delay mode, -1 - is returned if there is no input, else getch() waits until a key is pressed. - """ - + def getch(self) -> int: ... @overload def getch(self, y: int, x: int) -> int: ... @overload - def get_wch(self) -> int | str: - """get_wch([y, x]) - Get a wide character from terminal keyboard. - - y - Y-coordinate. - x - X-coordinate. - - Return a character for most keys, or an integer for function keys, - keypad keys, and other special keys. - """ - + def get_wch(self) -> int | str: ... @overload def get_wch(self, y: int, x: int) -> int | str: ... @overload - def getkey(self) -> str: - """getkey([y, x]) - Get a character (string) from terminal keyboard. - - y - Y-coordinate. - x - X-coordinate. - - Returning a string instead of an integer, as getch() does. Function keys, - keypad keys and other special keys return a multibyte string containing the - key name. In no-delay mode, an exception is raised if there is no input. - """ - + def getkey(self) -> str: ... @overload def getkey(self, y: int, x: int) -> str: ... def getmaxyx(self) -> tuple[int, int]: ... def getparyx(self) -> tuple[int, int]: ... @overload - def getstr(self) -> bytes: - """getstr([[y, x,] n=2047]) - Read a string from the user, with primitive line editing capacity. - - y - Y-coordinate. - x - X-coordinate. - n - Maximal number of characters. - """ - + def getstr(self) -> bytes: ... @overload def getstr(self, n: int) -> bytes: ... @overload @@ -1181,142 +462,35 @@ class window: # undocumented def getstr(self, y: int, x: int, n: int) -> bytes: ... def getyx(self) -> tuple[int, int]: ... @overload - def hline(self, ch: _ChType, n: int) -> None: - """hline([y, x,] ch, n, [attr=_curses.A_NORMAL]) - Display a horizontal line. - - y - Starting Y-coordinate. - x - Starting X-coordinate. - ch - Character to draw. - n - Line length. - attr - Attributes for the characters. - """ - + def hline(self, ch: _ChType, n: int) -> None: ... @overload def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... def idcok(self, flag: bool) -> None: ... def idlok(self, yes: bool) -> None: ... def immedok(self, flag: bool) -> None: ... @overload - def inch(self) -> int: - """inch([y, x]) - Return the character at the given position in the window. - - y - Y-coordinate. - x - X-coordinate. - - The bottom 8 bits are the character proper, and upper bits are the attributes. - """ - + def inch(self) -> int: ... @overload def inch(self, y: int, x: int) -> int: ... @overload - def insch(self, ch: _ChType, attr: int = ...) -> None: - """insch([y, x,] ch, [attr=_curses.A_NORMAL]) - Insert a character before the current or specified position. - - y - Y-coordinate. - x - X-coordinate. - ch - Character to insert. - attr - Attributes for the character. - - All characters to the right of the cursor are shifted one position right, with - the rightmost characters on the line being lost. - """ - + def insch(self, ch: _ChType, attr: int = ...) -> None: ... @overload def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... def insdelln(self, nlines: int) -> None: ... def insertln(self) -> None: ... @overload - def insnstr(self, str: str, n: int, attr: int = ...) -> None: - """insnstr([y, x,] str, n, [attr]) - Insert at most n characters of the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to insert. - n - Maximal number of characters. - attr - Attributes for characters. - - Insert a character string (as many characters as will fit on the line) - before the character under the cursor, up to n characters. If n is zero - or negative, the entire string is inserted. All characters to the right - of the cursor are shifted right, with the rightmost characters on the line - being lost. The cursor position does not change (after moving to y, x, if - specified). - """ - + def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... @overload def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload - def insstr(self, str: str, attr: int = ...) -> None: - """insstr([y, x,] str, [attr]) - Insert the string before the current or specified position. - - y - Y-coordinate. - x - X-coordinate. - str - String to insert. - attr - Attributes for characters. - - Insert a character string (as many characters as will fit on the line) - before the character under the cursor. All characters to the right of - the cursor are shifted right, with the rightmost characters on the line - being lost. The cursor position does not change (after moving to y, x, - if specified). - """ - + def insstr(self, str: str, attr: int = ...) -> None: ... @overload def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... @overload - def instr(self, n: int = 2047) -> bytes: - """instr([y, x,] n=2047) - Return a string of characters, extracted from the window. - - y - Y-coordinate. - x - X-coordinate. - n - Maximal number of characters. - - Return a string of characters, extracted from the window starting at the - current cursor position, or at y, x if specified. Attributes are stripped - from the characters. If n is specified, instr() returns a string at most - n characters long (exclusive of the trailing NUL). - """ - + def instr(self, n: int = 2047) -> bytes: ... @overload def instr(self, y: int, x: int, n: int = 2047) -> bytes: ... - def is_linetouched(self, line: int, /) -> bool: - """Return True if the specified line was modified, otherwise return False. - - line - Line number. - - Raise a curses.error exception if line is not valid for the given window. - """ - + def is_linetouched(self, line: int, /) -> bool: ... def is_wintouched(self) -> bool: ... def keypad(self, yes: bool, /) -> None: ... def leaveok(self, yes: bool) -> None: ... @@ -1326,190 +500,51 @@ class window: # undocumented def nodelay(self, yes: bool) -> None: ... def notimeout(self, yes: bool) -> None: ... @overload - def noutrefresh(self) -> None: - """noutrefresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) - Mark for refresh but wait. - - This function updates the data structure representing the desired state of the - window, but does not force an update of the physical screen. To accomplish - that, call doupdate(). - """ - + def noutrefresh(self) -> None: ... @overload def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... @overload - def overlay(self, destwin: window) -> None: - """overlay(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol]) - Overlay the window on top of destwin. - - The windows need not be the same size, only the overlapping region is copied. - This copy is non-destructive, which means that the current background - character does not overwrite the old contents of destwin. - - To get fine-grained control over the copied region, the second form of - overlay() can be used. sminrow and smincol are the upper-left coordinates - of the source window, and the other variables mark a rectangle in the - destination window. - """ - + def overlay(self, destwin: window) -> None: ... @overload def overlay( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int ) -> None: ... @overload - def overwrite(self, destwin: window) -> None: - """overwrite(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, - dmaxcol]) - Overwrite the window on top of destwin. - - The windows need not be the same size, in which case only the overlapping - region is copied. This copy is destructive, which means that the current - background character overwrites the old contents of destwin. - - To get fine-grained control over the copied region, the second form of - overwrite() can be used. sminrow and smincol are the upper-left coordinates - of the source window, the other variables mark a rectangle in the destination - window. - """ - + def overwrite(self, destwin: window) -> None: ... @overload def overwrite( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int ) -> None: ... - def putwin(self, file: SupportsWrite[bytes], /) -> None: - """Write all data associated with the window into the provided file object. - - This information can be later retrieved using the getwin() function. - """ - - def redrawln(self, beg: int, num: int, /) -> None: - """Mark the specified lines corrupted. - - beg - Starting line number. - num - The number of lines. - - They should be completely redrawn on the next refresh() call. - """ - + def putwin(self, file: SupportsWrite[bytes], /) -> None: ... + def redrawln(self, beg: int, num: int, /) -> None: ... def redrawwin(self) -> None: ... @overload - def refresh(self) -> None: - """refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) - Update the display immediately. - - Synchronize actual screen with previous drawing/deleting methods. - The 6 optional arguments can only be specified when the window is a pad - created with newpad(). The additional parameters are needed to indicate - what part of the pad and screen are involved. pminrow and pmincol specify - the upper left-hand corner of the rectangle to be displayed in the pad. - sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to - be displayed on the screen. The lower right-hand corner of the rectangle to - be displayed in the pad is calculated from the screen coordinates, since the - rectangles must be the same size. Both rectangles must be entirely contained - within their respective structures. Negative values of pminrow, pmincol, - sminrow, or smincol are treated as if they were zero. - """ - + def refresh(self) -> None: ... @overload def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... def resize(self, nlines: int, ncols: int) -> None: ... - def scroll(self, lines: int = 1) -> None: - """scroll([lines=1]) - Scroll the screen or scrolling region. - - lines - Number of lines to scroll. - - Scroll upward if the argument is positive and downward if it is negative. - """ - + def scroll(self, lines: int = 1) -> None: ... def scrollok(self, flag: bool) -> None: ... - def setscrreg(self, top: int, bottom: int, /) -> None: - """Define a software scrolling region. - - top - First line number. - bottom - Last line number. - - All scrolling actions will take place in this region. - """ - + def setscrreg(self, top: int, bottom: int, /) -> None: ... def standend(self) -> None: ... def standout(self) -> None: ... @overload - def subpad(self, begin_y: int, begin_x: int) -> window: - """subwin([nlines=0, ncols=0,] begin_y, begin_x) - Create a sub-window (screen-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - - By default, the sub-window will extend from the specified position to the - lower right corner of the window. - """ - + def subpad(self, begin_y: int, begin_x: int) -> window: ... @overload def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... @overload - def subwin(self, begin_y: int, begin_x: int) -> window: - """subwin([nlines=0, ncols=0,] begin_y, begin_x) - Create a sub-window (screen-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - - By default, the sub-window will extend from the specified position to the - lower right corner of the window. - """ - + def subwin(self, begin_y: int, begin_x: int) -> window: ... @overload def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... def syncdown(self) -> None: ... def syncok(self, flag: bool) -> None: ... def syncup(self) -> None: ... def timeout(self, delay: int) -> None: ... - def touchline(self, start: int, count: int, changed: bool = True) -> None: - """touchline(start, count, [changed=True]) - Pretend count lines have been changed, starting with line start. - - If changed is supplied, it specifies whether the affected lines are marked - as having been changed (changed=True) or unchanged (changed=False). - """ - + def touchline(self, start: int, count: int, changed: bool = True) -> None: ... def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload - def vline(self, ch: _ChType, n: int) -> None: - """vline([y, x,] ch, n, [attr=_curses.A_NORMAL]) - Display a vertical line. - - y - Starting Y-coordinate. - x - Starting X-coordinate. - ch - Character to draw. - n - Line length. - attr - Attributes for the character. - """ - + def vline(self, ch: _ChType, n: int) -> None: ... @overload def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi index 39c877ed5816b..a552a151ddf14 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi @@ -8,56 +8,20 @@ class error(Exception): ... @final class panel: - def above(self) -> panel: - """Return the panel above the current panel.""" - - def below(self) -> panel: - """Return the panel below the current panel.""" - - def bottom(self) -> None: - """Push the panel to the bottom of the stack.""" - - def hidden(self) -> bool: - """Return True if the panel is hidden (not visible), False otherwise.""" - - def hide(self) -> None: - """Hide the panel. - - This does not delete the object, it just makes the window on screen invisible. - """ - - def move(self, y: int, x: int, /) -> None: - """Move the panel to the screen coordinates (y, x).""" - - def replace(self, win: window, /) -> None: - """Change the window associated with the panel to the window win.""" - - def set_userptr(self, obj: object, /) -> None: - """Set the panel's user pointer to obj.""" - - def show(self) -> None: - """Display the panel (which might have been hidden).""" - - def top(self) -> None: - """Push panel to the top of the stack.""" - - def userptr(self) -> object: - """Return the user pointer for the panel.""" - - def window(self) -> window: - """Return the window object associated with the panel.""" - -def bottom_panel() -> panel: - """Return the bottom panel in the panel stack.""" - -def new_panel(win: window, /) -> panel: - """Return a panel object, associating it with the given window win.""" - -def top_panel() -> panel: - """Return the top panel in the panel stack.""" - -def update_panels() -> panel: - """Updates the virtual screen after changes in the panel stack. - - This does not call curses.doupdate(), so you'll have to do this yourself. - """ + def above(self) -> panel: ... + def below(self) -> panel: ... + def bottom(self) -> None: ... + def hidden(self) -> bool: ... + def hide(self) -> None: ... + def move(self, y: int, x: int, /) -> None: ... + def replace(self, win: window, /) -> None: ... + def set_userptr(self, obj: object, /) -> None: ... + def show(self) -> None: ... + def top(self) -> None: ... + def userptr(self) -> object: ... + def window(self) -> window: ... + +def bottom_panel() -> panel: ... +def new_panel(win: window, /) -> panel: ... +def top_panel() -> panel: ... +def update_panels() -> panel: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi index 7d6157c503011..222c3ffcb246b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi @@ -39,26 +39,6 @@ if sys.platform != "win32": __init__: None # type: ignore[assignment] if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: - """Return a database object. - - filename - The filename to open. - flags - How to open the file. "r" for reading, "w" for writing, etc. - mode - If creating a new file, the mode bits for the new file - (e.g. os.O_RDWR). - """ + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... else: - def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: - """Return a database object. - - filename - The filename to open. - flags - How to open the file. "r" for reading, "w" for writing, etc. - mode - If creating a new file, the mode bits for the new file - (e.g. os.O_RDWR). - """ + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi index bda849f6988e1..3cfe8944dfaf4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi @@ -1,5 +1,3 @@ -"""C decimal arithmetic module""" - import sys from decimal import ( Clamped as Clamped, @@ -46,11 +44,8 @@ MIN_ETINY: Final[int] if sys.version_info >= (3, 14): IEEE_CONTEXT_MAX_BITS: Final[int] -def setcontext(context: Context, /) -> None: - """Set a new default context.""" - -def getcontext() -> Context: - """Get the current default context.""" +def setcontext(context: Context, /) -> None: ... +def getcontext() -> Context: ... if sys.version_info >= (3, 11): def localcontext( @@ -64,30 +59,13 @@ if sys.version_info >= (3, 11): clamp: int | None = None, traps: dict[_TrapType, bool] | None = None, flags: dict[_TrapType, bool] | None = None, - ) -> _ContextManager: - """Return a context manager that will set the default context to a copy of ctx - on entry to the with-statement and restore the previous default context when - exiting the with-statement. If no context is specified, a copy of the current - default context is used. - - """ + ) -> _ContextManager: ... else: - def localcontext(ctx: Context | None = None) -> _ContextManager: - """Return a context manager that will set the default context to a copy of ctx - on entry to the with-statement and restore the previous default context when - exiting the with-statement. If no context is specified, a copy of the current - default context is used. - - """ + def localcontext(ctx: Context | None = None) -> _ContextManager: ... if sys.version_info >= (3, 14): - def IEEEContext(bits: int, /) -> Context: - """Return a context object initialized to the proper values for one of the - IEEE interchange formats. The argument must be a multiple of 32 and less - than IEEE_CONTEXT_MAX_BITS. - - """ + def IEEEContext(bits: int, /) -> Context: ... DefaultContext: Context BasicContext: Context diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi index 7ca4f5b4774c6..58db64a016f34 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi @@ -1,12 +1,3 @@ -"""Core implementation of import. - -This module is NOT meant to be directly imported! It has been designed such -that it can be bootstrapped into Python as the implementation of import. As -such it requires the injection of specific modules and attributes in order to -work. One should use importlib as the public-facing version of this module. - -""" - import importlib.abc import importlib.machinery import sys @@ -24,67 +15,16 @@ def __import__( locals: Mapping[str, object] | None = None, fromlist: Sequence[str] | None = (), level: int = 0, -) -> ModuleType: - """Import a module. - - The 'globals' argument is used to infer where the import is occurring from - to handle relative imports. The 'locals' argument is ignored. The - 'fromlist' argument specifies what should exist as attributes on the module - being imported (e.g. ``from module import ``). The 'level' - argument represents the package location to import from in a relative - import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). - - """ - +) -> ModuleType: ... def spec_from_loader( name: str, loader: LoaderProtocol | None, *, origin: str | None = None, is_package: bool | None = None -) -> importlib.machinery.ModuleSpec | None: - """Return a module spec based on various loader methods.""" - -def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: - """Create a module based on the provided spec.""" - +) -> importlib.machinery.ModuleSpec | None: ... +def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... def _init_module_attrs( spec: importlib.machinery.ModuleSpec, module: types.ModuleType, *, override: bool = False ) -> types.ModuleType: ... class ModuleSpec: - """The specification for a module, used for loading. - - A module's spec is the source for information about the module. For - data associated with the module, including source, use the spec's - loader. - - `name` is the absolute name of the module. `loader` is the loader - to use when loading the module. `parent` is the name of the - package the module is in. The parent is derived from the name. - - `is_package` determines if the module is considered a package or - not. On modules this is reflected by the `__path__` attribute. - - `origin` is the specific location used by the loader from which to - load the module, if that information is available. When filename is - set, origin will match. - - `has_location` indicates that a spec's "origin" reflects a location. - When this is True, `__file__` attribute of the module is set. - - `cached` is the location of the cached bytecode file, if any. It - corresponds to the `__cached__` attribute. - - `submodule_search_locations` is the sequence of path entries to - search when importing submodules. If set, is_package should be - True--and False otherwise. - - Packages are simply modules that (may) have submodules. If a spec - has a non-None value in `submodule_search_locations`, the import - system will consider modules loaded from the spec as packages. - - Only finders (see importlib.abc.MetaPathFinder and - importlib.abc.PathEntryFinder) should modify ModuleSpec instances. - - """ - def __init__( self, name: str, @@ -101,32 +41,17 @@ class ModuleSpec: loader_state: Any cached: str | None @property - def parent(self) -> str | None: - """The name of the module's parent.""" + def parent(self) -> str | None: ... has_location: bool def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): - """Meta path import for built-in modules. - - All methods are either class or static methods to avoid the need to - instantiate the class. - - """ - # MetaPathFinder if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """Find the built-in module. - - If 'path' is ever specified then the search is considered a failure. - - This method is deprecated. Use find_spec() instead. - - """ + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -134,24 +59,13 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) ) -> ModuleSpec | None: ... # InspectLoader @classmethod - def is_package(cls, fullname: str) -> bool: - """Return False as built-in modules are never packages.""" - + def is_package(cls, fullname: str) -> bool: ... @classmethod - def load_module(cls, fullname: str) -> types.ModuleType: - """Load the specified module into sys.modules and return it. - - This method is deprecated. Use loader.exec_module() instead. - - """ - + def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod - def get_code(cls, fullname: str) -> None: - """Return None as built-in modules do not have code objects.""" - + def get_code(cls, fullname: str) -> None: ... @classmethod - def get_source(cls, fullname: str) -> None: - """Return None as built-in modules do not have source code.""" + def get_source(cls, fullname: str) -> None: ... # Loader if sys.version_info < (3, 12): @staticmethod @@ -159,47 +73,24 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ + def module_repr(module: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod - def create_module(spec: ModuleSpec) -> types.ModuleType | None: - """Create a built-in module""" - + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... @staticmethod - def exec_module(module: types.ModuleType) -> None: - """Exec a built-in module""" + def exec_module(module: types.ModuleType) -> None: ... else: @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: - """Create a built-in module""" - + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... @classmethod - def exec_module(cls, module: types.ModuleType) -> None: - """Exec a built-in module""" + def exec_module(cls, module: types.ModuleType) -> None: ... class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): - """Meta path import for frozen modules. - - All methods are either class or static methods to avoid the need to - instantiate the class. - - """ - # MetaPathFinder if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """Find a frozen module. - - This method is deprecated. Use find_spec() instead. - - """ + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -207,24 +98,13 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): ) -> ModuleSpec | None: ... # InspectLoader @classmethod - def is_package(cls, fullname: str) -> bool: - """Return True if the frozen module is a package.""" - + def is_package(cls, fullname: str) -> bool: ... @classmethod - def load_module(cls, fullname: str) -> types.ModuleType: - """Load a frozen module. - - This method is deprecated. Use exec_module() instead. - - """ - + def load_module(cls, fullname: str) -> types.ModuleType: ... @classmethod - def get_code(cls, fullname: str) -> None: - """Return the code object for the frozen module.""" - + def get_code(cls, fullname: str) -> None: ... @classmethod - def get_source(cls, fullname: str) -> None: - """Return None as frozen modules do not have source code.""" + def get_source(cls, fullname: str) -> None: ... # Loader if sys.version_info < (3, 12): @staticmethod @@ -232,20 +112,13 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(m: types.ModuleType) -> str: - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ + def module_repr(m: types.ModuleType) -> str: ... if sys.version_info >= (3, 10): @staticmethod - def create_module(spec: ModuleSpec) -> types.ModuleType | None: - """Set __file__, if able.""" + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... else: @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: - """Use default semantics for module creation.""" + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... @staticmethod def exec_module(module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi index bd0291a7fe512..4778be3af1f39 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi @@ -1,12 +1,3 @@ -"""Core implementation of path-based import. - -This module is NOT meant to be directly imported! It has been designed such -that it can be bootstrapped into Python as the implementation of import. As -such it requires the injection of specific modules and attributes in order to -work. One should use importlib as the public-facing version of this module. - -""" - import _ast import _io import importlib.abc @@ -35,75 +26,25 @@ else: MAGIC_NUMBER: Final[bytes] -def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: - """Given the path to a .py file, return the path to its .pyc file. - - The .py file does not need to exist; this simply returns the path to the - .pyc file calculated as if the .py file were imported. - - The 'optimization' parameter controls the presumed optimization level of - the bytecode file. If 'optimization' is not None, the string representation - of the argument is taken and verified to be alphanumeric (else ValueError - is raised). - - The debug_override parameter is deprecated. If debug_override is not None, - a True value is the same as setting 'optimization' to the empty string - while a False value is equivalent to setting 'optimization' to '1'. - - If sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - -def source_from_cache(path: StrPath) -> str: - """Given the path to a .pyc. file, return the path to its .py file. - - The .pyc file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc file. If path does - not conform to PEP 3147/488 format, ValueError will be raised. If - sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - -def decode_source(source_bytes: ReadableBuffer) -> str: - """Decode bytes representing source code and return the string. - - Universal newline support is used in the decoding. - """ - +def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... def spec_from_file_location( name: str, location: StrOrBytesPath | None = None, *, loader: LoaderProtocol | None = None, submodule_search_locations: list[str] | None = ..., -) -> importlib.machinery.ModuleSpec | None: - """Return a module spec based on a file location. - - To indicate that the module is a package, set - submodule_search_locations to a list of directory paths. An - empty list is sufficient, though its not otherwise useful to the - import system. - - The loader must take a spec as its only __init__() arg. - - """ - +) -> importlib.machinery.ModuleSpec | None: ... @deprecated( "Deprecated since Python 3.6. Use site configuration instead. " "Future versions of Python may not enable this finder by default." ) class WindowsRegistryFinder(importlib.abc.MetaPathFinder): - """Meta path finder for modules declared in the Windows registry.""" - if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """Find module named in the registry. - - This method is deprecated. Use find_spec() instead. - - """ + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( @@ -111,61 +52,27 @@ class WindowsRegistryFinder(importlib.abc.MetaPathFinder): ) -> ModuleSpec | None: ... class PathFinder(importlib.abc.MetaPathFinder): - """Meta path finder for sys.path and package __path__ attributes.""" - if sys.version_info >= (3, 10): @staticmethod - def invalidate_caches() -> None: - """Call the invalidate_caches() method on all path entry finders - stored in sys.path_importer_cache (where implemented). - """ + def invalidate_caches() -> None: ... else: @classmethod - def invalidate_caches(cls) -> None: - """Call the invalidate_caches() method on all path entry finders - stored in sys.path_importer_caches (where implemented). - """ + def invalidate_caches(cls) -> None: ... if sys.version_info >= (3, 10): @staticmethod - def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ + def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... else: @classmethod - def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... @classmethod def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None - ) -> ModuleSpec | None: - """Try to find a spec for 'fullname' on sys.path or 'path'. - - The search is based on sys.path_hooks and sys.path_importer_cache. - """ + ) -> ModuleSpec | None: ... if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: - """find the module on sys.path or 'path' based on sys.path_hooks and - sys.path_importer_cache. - - This method is deprecated. Use find_spec() instead. - - """ + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: Final[list[str]] DEBUG_BYTECODE_SUFFIXES: Final = [".pyc"] @@ -174,122 +81,36 @@ BYTECODE_SUFFIXES: Final = [".pyc"] EXTENSION_SUFFIXES: Final[list[str]] class FileFinder(importlib.abc.PathEntryFinder): - """File-based finder. - - Interactions with the file system are cached for performance, being - refreshed when the directory the finder is handling has been modified. - - """ - path: str - def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: - """Initialize with the path to search on and a variable number of - 2-tuples containing the loader and the file suffixes the loader - recognizes. - """ - + def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: ... @classmethod def path_hook( cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] - ) -> Callable[[str], importlib.abc.PathEntryFinder]: - """A class method which returns a closure to use on sys.path_hook - which will return an instance using the specified loaders and the path - called on the closure. - - If the path called on the closure is not a directory, ImportError is - raised. - - """ + ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class _LoaderBasics: - """Base class of common code needed by both SourceLoader and - SourcelessFileLoader. - """ - - def is_package(self, fullname: str) -> bool: - """Concrete implementation of InspectLoader.is_package by checking if - the path returned by get_filename has a filename of '__init__.py'. - """ - - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: - """Use default semantics for module creation.""" - - def exec_module(self, module: types.ModuleType) -> None: - """Execute the module.""" - - def load_module(self, fullname: str) -> types.ModuleType: - """This method is deprecated.""" + def is_package(self, fullname: str) -> bool: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def load_module(self, fullname: str) -> types.ModuleType: ... class SourceLoader(_LoaderBasics): - def path_mtime(self, path: str) -> float: - """Optional method that returns the modification time (an int) for the - specified path (a str). - - Raises OSError when the path cannot be handled. - """ - - def set_data(self, path: str, data: bytes) -> None: - """Optional method which writes data (bytes) to a file path (a str). - - Implementing this method allows for the writing of bytecode files. - """ - - def get_source(self, fullname: str) -> str | None: - """Concrete implementation of InspectLoader.get_source.""" - - def path_stats(self, path: str) -> Mapping[str, Any]: - """Optional method returning a metadata dict for the specified - path (a str). - - Possible keys: - - 'mtime' (mandatory) is the numeric timestamp of last source - code modification; - - 'size' (optional) is the size in bytes of the source code. - - Implementing this method allows the loader to read bytecode files. - Raises OSError when the path cannot be handled. - """ - + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... + def get_source(self, fullname: str) -> str | None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... def source_to_code( - self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: ReadableBuffer | StrPath - ) -> types.CodeType: - """Return the code object compiled from source. - - The 'data' argument can be any object type that compile() supports. - """ - - def get_code(self, fullname: str) -> types.CodeType | None: - """Concrete implementation of InspectLoader.get_code. - - Reading of bytecode requires path_stats to be implemented. To write - bytecode, set_data must also be implemented. - - """ + self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath + ) -> types.CodeType: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... class FileLoader: - """Base file loader class which implements the loader protocol methods that - require file system usage. - """ - name: str path: str - def __init__(self, fullname: str, path: str) -> None: - """Cache the module name and the path to the file found by the - finder. - """ - - def get_data(self, path: str) -> bytes: - """Return the data from path as raw bytes.""" - - def get_filename(self, name: str | None = None) -> str: - """Return the path to the source file as found by the finder.""" - - def load_module(self, name: str | None = None) -> types.ModuleType: - """Load a module from a file. - - This method is deprecated. Use exec_module() instead. - - """ + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, fullname: str | None = None) -> str: ... + def load_module(self, fullname: str | None = None) -> types.ModuleType: ... if sys.version_info >= (3, 10): def get_resource_reader(self, name: str | None = None) -> importlib.readers.FileReader: ... else: @@ -300,56 +121,27 @@ class FileLoader: def contents(self) -> Iterator[str]: ... class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.SourceLoader, SourceLoader): # type: ignore[misc] # incompatible method arguments in base classes - """Concrete implementation of SourceLoader using the file system.""" - - def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: - """Write bytes data to a file.""" - - def path_stats(self, path: str) -> Mapping[str, Any]: - """Return the metadata for the path.""" - + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... def source_to_code( # type: ignore[override] # incompatible with InspectLoader.source_to_code self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, - path: ReadableBuffer | StrPath, + path: bytes | StrPath, *, _optimize: int = -1, - ) -> types.CodeType: - """Return the code object compiled from source. - - The 'data' argument can be any object type that compile() supports. - """ + ) -> types.CodeType: ... class SourcelessFileLoader(importlib.abc.FileLoader, FileLoader, _LoaderBasics): - """Loader which handles sourceless file imports.""" - def get_code(self, fullname: str) -> types.CodeType | None: ... - def get_source(self, fullname: str) -> None: - """Return None as there is no source code.""" + def get_source(self, fullname: str) -> None: ... class ExtensionFileLoader(FileLoader, _LoaderBasics, importlib.abc.ExecutionLoader): - """Loader for extension modules. - - The constructor is designed to work with FileFinder. - - """ - def __init__(self, name: str, path: str) -> None: ... - def get_filename(self, name: str | None = None) -> str: - """Return the path to the source file as found by the finder.""" - - def get_source(self, fullname: str) -> None: - """Return None as extension modules have no source code.""" - - def create_module(self, spec: ModuleSpec) -> types.ModuleType: - """Create an uninitialized extension module""" - - def exec_module(self, module: types.ModuleType) -> None: - """Initialize an extension module""" - - def get_code(self, fullname: str) -> None: - """Return None as an extension module cannot create a code object.""" - + def get_filename(self, fullname: str | None = None) -> str: ... + def get_source(self, fullname: str) -> None: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def get_code(self, fullname: str) -> None: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -361,18 +153,10 @@ if sys.version_info >= (3, 11): def is_package(self, fullname: str) -> Literal[True]: ... def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... - def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation.""" - + def create_module(self, spec: ModuleSpec) -> None: ... def exec_module(self, module: types.ModuleType) -> None: ... @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> types.ModuleType: - """Load a namespace module. - - This method is deprecated. Use exec_module() instead. - - """ - + def load_module(self, fullname: str) -> types.ModuleType: ... def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... if sys.version_info < (3, 12): @staticmethod @@ -380,12 +164,7 @@ if sys.version_info >= (3, 11): "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ + def module_repr(module: types.ModuleType) -> str: ... _NamespaceLoader = NamespaceLoader else: @@ -396,54 +175,26 @@ else: def is_package(self, fullname: str) -> Literal[True]: ... def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... - def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation.""" - + def create_module(self, spec: ModuleSpec) -> None: ... def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> types.ModuleType: - """Load a namespace module. - - This method is deprecated. Use exec_module() instead. - - """ - + def load_module(self, fullname: str) -> types.ModuleType: ... @staticmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ - + def module_repr(module: types.ModuleType) -> str: ... def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... else: - def load_module(self, fullname: str) -> types.ModuleType: - """Load a namespace module. - - This method is deprecated. Use exec_module() instead. - - """ - + def load_module(self, fullname: str) -> types.ModuleType: ... @classmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(cls, module: types.ModuleType) -> str: - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ + def module_repr(cls, module: types.ModuleType) -> str: ... if sys.version_info >= (3, 13): - class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): - """A loader for modules that have been packaged as frameworks for - compatibility with Apple's iOS App Store policies. - """ + class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi index 58bd1d41d3588..2cb5fba29dfa1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi @@ -1,15 +1,3 @@ -"""This module provides an interface to the GNU DBM (GDBM) library. - -This module is quite similar to the dbm module, but uses GDBM instead to -provide some additional functionality. Please note that the file formats -created by GDBM and dbm are incompatible. - -GDBM objects behave like mappings (dictionaries), except that keys and -values are always immutable bytes-like objects or strings. Printing -a GDBM object doesn't print the keys and values, and the items() and -values() methods are not supported. -""" - import sys from _typeshed import ReadOnlyBuffer, StrOrBytesPath from types import TracebackType @@ -55,29 +43,6 @@ if sys.platform != "win32": __init__: None # type: ignore[assignment] if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: - """Open a dbm database and return a dbm object. - - The filename argument is the name of the database file. - - The optional flags argument can be 'r' (to open an existing database - for reading only -- default), 'w' (to open an existing database for - reading and writing), 'c' (which creates the database if it doesn't - exist), or 'n' (which always creates a new empty database). - - Some versions of gdbm support additional flags which must be - appended to one of the flags described above. The module constant - 'open_flags' is a string of valid additional flags. The 'f' flag - opens the database in fast mode; altered data will not automatically - be written to the disk after every change. This results in faster - writes to the database, but may result in an inconsistent database - if the program crashes while the database is still open. Use the - sync() method to force any unwritten data to be written to the disk. - The 's' flag causes all database operations to be synchronized to - disk. The 'u' flag disables locking of the database file. - - The optional mode argument is the Unix mode of the file, used only - when the database has to be created. It defaults to octal 0o666. - """ + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... else: def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi index 1dda9af2cbf3e..03c1eef3be3ff 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi @@ -1,5 +1,3 @@ -"""OpenSSL interface for hashlib module""" - import sys from _typeshed import ReadableBuffer from collections.abc import Callable @@ -26,245 +24,104 @@ class _HashObject(Protocol): @disjoint_base class HASH: - """A hash is an object used to calculate a checksum of a string of information. - - Methods: - - update() -- updates the current digest with an additional string - digest() -- return the current digest value - hexdigest() -- return the current digest as a string of hexadecimal digits - copy() -- return a copy of the current hash object - - Attributes: - - name -- the hash algorithm being used by this object - digest_size -- number of bytes in this hashes output - """ - @property def digest_size(self) -> int: ... @property def block_size(self) -> int: ... @property def name(self) -> str: ... - def copy(self) -> Self: - """Return a copy of the hash object.""" - - def digest(self) -> bytes: - """Return the digest value as a bytes object.""" - - def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits.""" - - def update(self, obj: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided string.""" + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, obj: ReadableBuffer, /) -> None: ... if sys.version_info >= (3, 10): class UnsupportedDigestmodError(ValueError): ... class HASHXOF(HASH): - """A hash is an object used to calculate a checksum of a string of information. - - Methods: - - update() -- updates the current digest with an additional string - digest(length) -- return the current digest value - hexdigest(length) -- return the current digest as a string of hexadecimal digits - copy() -- return a copy of the current hash object - - Attributes: - - name -- the hash algorithm being used by this object - digest_size -- number of bytes in this hashes output - """ - - def digest(self, length: int) -> bytes: # type: ignore[override] - """Return the digest value as a bytes object.""" - - def hexdigest(self, length: int) -> str: # type: ignore[override] - """Return the digest value as a string of hexadecimal digits.""" + def digest(self, length: int) -> bytes: ... # type: ignore[override] + def hexdigest(self, length: int) -> str: ... # type: ignore[override] @final class HMAC: - """The object used to calculate HMAC of a message. - - Methods: - - update() -- updates the current digest with an additional string - digest() -- return the current digest value - hexdigest() -- return the current digest as a string of hexadecimal digits - copy() -- return a copy of the current hash object - - Attributes: - - name -- the name, including the hash algorithm used by this object - digest_size -- number of bytes in digest() output - """ - @property def digest_size(self) -> int: ... @property def block_size(self) -> int: ... @property def name(self) -> str: ... - def copy(self) -> Self: - """Return a copy ("clone") of the HMAC object.""" - - def digest(self) -> bytes: - """Return the digest of the bytes passed to the update() method so far.""" - - def hexdigest(self) -> str: - """Return hexadecimal digest of the bytes passed to the update() method so far. - - This may be used to exchange the value safely in email or other non-binary - environments. - """ - - def update(self, msg: ReadableBuffer) -> None: - """Update the HMAC object with msg.""" + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, msg: ReadableBuffer) -> None: ... @overload -def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: - """Return 'a == b'. - - This function uses an approach designed to prevent - timing analysis, making it appropriate for cryptography. - - a and b must both be of the same type: either str (ASCII only), - or any bytes-like object. - - Note: If a and b are of different lengths, or if an error occurs, - a timing attack could theoretically reveal information about the - types and lengths of a and b--but not their values. - """ - +def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... @overload def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... -def get_fips_mode() -> int: - """Determine the OpenSSL FIPS mode of operation. - - For OpenSSL 3.0.0 and newer it returns the state of the default provider - in the default OSSL context. It's not quite the same as FIPS_mode() but good - enough for unittests. - - Effectively any non-zero return value indicates FIPS mode; - values other than 1 may have additional significance. - """ - -def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: - """Return a new hmac object.""" +def get_fips_mode() -> int: ... +def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ... if sys.version_info >= (3, 13): - def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Return a new hash object using the named algorithm. - - An optional string argument may be provided and will be - automatically hashed. - - The MD5 and SHA1 algorithms are always supported. - """ - - def openssl_md5(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a md5 hash object; optionally initialized with a string""" - - def openssl_sha1(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a sha1 hash object; optionally initialized with a string""" - - def openssl_sha224(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a sha224 hash object; optionally initialized with a string""" - - def openssl_sha256(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a sha256 hash object; optionally initialized with a string""" - - def openssl_sha384(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a sha384 hash object; optionally initialized with a string""" - - def openssl_sha512(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: - """Returns a sha512 hash object; optionally initialized with a string""" - + def new( + name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_md5( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha1( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha224( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha256( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha384( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... + def openssl_sha512( + data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None + ) -> HASH: ... def openssl_sha3_224( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha3-224 hash object; optionally initialized with a string""" - + ) -> HASH: ... def openssl_sha3_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha3-256 hash object; optionally initialized with a string""" - + ) -> HASH: ... def openssl_sha3_384( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha3-384 hash object; optionally initialized with a string""" - + ) -> HASH: ... def openssl_sha3_512( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha3-512 hash object; optionally initialized with a string""" - + ) -> HASH: ... def openssl_shake_128( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASHXOF: - """Returns a shake-128 variable hash object; optionally initialized with a string""" - + ) -> HASHXOF: ... def openssl_shake_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASHXOF: - """Returns a shake-256 variable hash object; optionally initialized with a string""" + ) -> HASHXOF: ... else: - def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Return a new hash object using the named algorithm. - - An optional string argument may be provided and will be - automatically hashed. - - The MD5 and SHA1 algorithms are always supported. - """ - - def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a md5 hash object; optionally initialized with a string""" - - def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha1 hash object; optionally initialized with a string""" - - def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha224 hash object; optionally initialized with a string""" - - def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha256 hash object; optionally initialized with a string""" - - def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha384 hash object; optionally initialized with a string""" - - def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha512 hash object; optionally initialized with a string""" - - def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-224 hash object; optionally initialized with a string""" - - def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-256 hash object; optionally initialized with a string""" - - def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-384 hash object; optionally initialized with a string""" - - def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-512 hash object; optionally initialized with a string""" - - def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: - """Returns a shake-128 variable hash object; optionally initialized with a string""" - - def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: - """Returns a shake-256 variable hash object; optionally initialized with a string""" - -def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: - """Single-shot HMAC.""" - + def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... + def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... + def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... + +def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: ... def pbkdf2_hmac( hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None -) -> bytes: - """Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as pseudorandom function.""" - -def scrypt(password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64) -> bytes: - """scrypt password-based key derivation function.""" +) -> bytes: ... +def scrypt( + password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64 +) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi index 07a2e751c5d61..4d7d6aba32418 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi @@ -1,83 +1,18 @@ -"""Heap queue algorithm (a.k.a. priority queue). - -Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for -all k, counting elements from 0. For the sake of comparison, -non-existing elements are considered to be infinite. The interesting -property of a heap is that a[0] is always its smallest element. - -Usage: - -heap = [] # creates an empty heap -heappush(heap, item) # pushes a new item on the heap -item = heappop(heap) # pops the smallest item from the heap -item = heap[0] # smallest item on the heap without popping it -heapify(x) # transforms list into a heap, in-place, in linear time -item = heapreplace(heap, item) # pops and returns smallest item, and adds - # new item; the heap size is unchanged - -Our API differs from textbook heap algorithms as follows: - -- We use 0-based indexing. This makes the relationship between the - index for a node and the indexes for its children slightly less - obvious, but is more suitable since Python uses 0-based indexing. - -- Our heappop() method returns the smallest item, not the largest. - -These two make it possible to view the heap as a regular Python list -without surprises: heap[0] is the smallest item, and heap.sort() -maintains the heap invariant! -""" - import sys from _typeshed import SupportsRichComparisonT as _T # All type variable use in this module requires comparability. from typing import Final __about__: Final[str] -def heapify(heap: list[_T], /) -> None: - """Transform list into a heap, in-place, in O(len(heap)) time.""" - -def heappop(heap: list[_T], /) -> _T: - """Pop the smallest item off the heap, maintaining the heap invariant.""" - -def heappush(heap: list[_T], item: _T, /) -> None: - """Push item onto heap, maintaining the heap invariant.""" - -def heappushpop(heap: list[_T], item: _T, /) -> _T: - """Push item on the heap, then pop and return the smallest item from the heap. - - The combined action runs more efficiently than heappush() followed by - a separate call to heappop(). - """ - -def heapreplace(heap: list[_T], item: _T, /) -> _T: - """Pop and return the current smallest value, and add the new item. - - This is more efficient than heappop() followed by heappush(), and can be - more appropriate when using a fixed-size heap. Note that the value - returned may be larger than item! That constrains reasonable uses of - this routine unless written as part of a conditional replacement: - - if item > heap[0]: - item = heapreplace(heap, item) - """ +def heapify(heap: list[_T], /) -> None: ... +def heappop(heap: list[_T], /) -> _T: ... +def heappush(heap: list[_T], item: _T, /) -> None: ... +def heappushpop(heap: list[_T], item: _T, /) -> _T: ... +def heapreplace(heap: list[_T], item: _T, /) -> _T: ... if sys.version_info >= (3, 14): - def heapify_max(heap: list[_T], /) -> None: - """Maxheap variant of heapify.""" - - def heappop_max(heap: list[_T], /) -> _T: - """Maxheap variant of heappop.""" - - def heappush_max(heap: list[_T], item: _T, /) -> None: - """Push item onto max heap, maintaining the heap invariant.""" - - def heappushpop_max(heap: list[_T], item: _T, /) -> _T: - """Maxheap variant of heappushpop. - - The combined action runs more efficiently than heappush_max() followed by - a separate call to heappop_max(). - """ - - def heapreplace_max(heap: list[_T], item: _T, /) -> _T: - """Maxheap variant of heapreplace.""" + def heapify_max(heap: list[_T], /) -> None: ... + def heappop_max(heap: list[_T], /) -> _T: ... + def heappush_max(heap: list[_T], item: _T, /) -> None: ... + def heappushpop_max(heap: list[_T], item: _T, /) -> _T: ... + def heapreplace_max(heap: list[_T], item: _T, /) -> _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi index 1f47d2ec56aa4..c12c26d08ba2a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi @@ -1,5 +1,3 @@ -"""(Extremely) low-level import machinery bits as used by importlib.""" - import sys import types from _typeshed import ReadableBuffer @@ -11,68 +9,22 @@ if sys.version_info >= (3, 14): pyc_magic_number_token: int def source_hash(key: int, source: ReadableBuffer) -> bytes: ... -def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: - """Create an extension module.""" - -def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: - """Create an extension module.""" - -def acquire_lock() -> None: - """Acquires the interpreter's import lock for the current thread. - - This lock should be used by import hooks to ensure thread-safety when importing - modules. On platforms without threads, this function does nothing. - """ - -def exec_builtin(mod: types.ModuleType, /) -> int: - """Initialize a built-in module.""" - -def exec_dynamic(mod: types.ModuleType, /) -> int: - """Initialize an extension module.""" - -def extension_suffixes() -> list[str]: - """Returns the list of file suffixes used to identify extension modules.""" - -def init_frozen(name: str, /) -> types.ModuleType: - """Initializes a frozen module.""" - -def is_builtin(name: str, /) -> int: - """Returns True if the module name corresponds to a built-in module.""" - -def is_frozen(name: str, /) -> bool: - """Returns True if the module name corresponds to a frozen module.""" - -def is_frozen_package(name: str, /) -> bool: - """Returns True if the module name is of a frozen package.""" - -def lock_held() -> bool: - """Return True if the import lock is currently held, else False. - - On platforms without threads, return False. - """ - -def release_lock() -> None: - """Release the interpreter's import lock. - - On platforms without threads, this function does nothing. - """ +def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: ... +def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: ... +def acquire_lock() -> None: ... +def exec_builtin(mod: types.ModuleType, /) -> int: ... +def exec_dynamic(mod: types.ModuleType, /) -> int: ... +def extension_suffixes() -> list[str]: ... +def init_frozen(name: str, /) -> types.ModuleType: ... +def is_builtin(name: str, /) -> int: ... +def is_frozen(name: str, /) -> bool: ... +def is_frozen_package(name: str, /) -> bool: ... +def lock_held() -> bool: ... +def release_lock() -> None: ... if sys.version_info >= (3, 11): - def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: - """Return info about the corresponding frozen module (if there is one) or None. - - The returned info (a 2-tuple): - - * data the raw marshalled bytes - * is_package whether or not it is a package - * origname the originally frozen module's name, or None if not - a stdlib module (this will usually be the same as - the module's current name) - """ - - def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: - """Create a code object for a frozen module.""" + def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: ... else: - def get_frozen_object(name: str, /) -> types.CodeType: - """Create a code object for a frozen module.""" + def get_frozen_object(name: str, /) -> types.CodeType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi index 987ca4566c1a0..a631a6f16616b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi @@ -1,7 +1,3 @@ -"""This module provides primitive operations to manage Python interpreters. -The 'interpreters' module provides a more convenient interface. -""" - from _typeshed import structseq from typing import Any, Final, Literal, SupportsIndex, final from typing_extensions import Buffer, Self @@ -15,41 +11,24 @@ class ChannelNotFoundError(ChannelError): ... # Mark as final, since instantiating ChannelID is not supported. @final class ChannelID: - """A channel ID identifies a channel and may be used as an int.""" - @property - def end(self) -> Literal["send", "recv", "both"]: - """'send', 'recv', or 'both'""" - + def end(self) -> Literal["send", "recv", "both"]: ... @property - def send(self) -> Self: - """the 'send' end of the channel""" - + def send(self) -> Self: ... @property - def recv(self) -> Self: - """the 'recv' end of the channel""" - + def recv(self) -> Self: ... def __eq__(self, other: object, /) -> bool: ... def __ge__(self, other: ChannelID, /) -> bool: ... def __gt__(self, other: ChannelID, /) -> bool: ... def __hash__(self) -> int: ... - def __index__(self) -> int: - """Return self converted to an integer, if self is suitable for use as an index into a list.""" - - def __int__(self) -> int: - """int(self)""" - + def __index__(self) -> int: ... + def __int__(self) -> int: ... def __le__(self, other: ChannelID, /) -> bool: ... def __lt__(self, other: ChannelID, /) -> bool: ... def __ne__(self, other: object, /) -> bool: ... @final class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]): - """ChannelInfo - - A named tuple of a channel's state. - """ - __match_args__: Final = ( "open", "closing", @@ -61,170 +40,47 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in "num_interp_recv_released", ) @property - def open(self) -> bool: - """both ends are open""" - + def open(self) -> bool: ... @property - def closing(self) -> bool: - """send is closed, recv is non-empty""" - + def closing(self) -> bool: ... @property - def closed(self) -> bool: - """both ends are closed""" - + def closed(self) -> bool: ... @property - def count(self) -> int: # type: ignore[override] - """queued objects""" - + def count(self) -> int: ... # type: ignore[override] @property - def num_interp_send(self) -> int: - """interpreters bound to the send end""" - + def num_interp_send(self) -> int: ... @property - def num_interp_send_released(self) -> int: - """interpreters bound to the send end and released""" - + def num_interp_send_released(self) -> int: ... @property - def num_interp_recv(self) -> int: - """interpreters bound to the send end""" - + def num_interp_recv(self) -> int: ... @property - def num_interp_recv_released(self) -> int: - """interpreters bound to the send end and released""" - + def num_interp_recv_released(self) -> int: ... @property - def num_interp_both(self) -> int: - """interpreters bound to both ends""" - + def num_interp_both(self) -> int: ... @property - def num_interp_both_recv_released(self) -> int: - """interpreters bound to both ends and released_from_the recv end""" - + def num_interp_both_recv_released(self) -> int: ... @property - def num_interp_both_send_released(self) -> int: - """interpreters bound to both ends and released_from_the send end""" - + def num_interp_both_send_released(self) -> int: ... @property - def num_interp_both_released(self) -> int: - """interpreters bound to both ends and released_from_both""" - + def num_interp_both_released(self) -> int: ... @property - def recv_associated(self) -> bool: - """current interpreter is bound to the recv end""" - + def recv_associated(self) -> bool: ... @property - def recv_released(self) -> bool: - """current interpreter *was* bound to the recv end""" - + def recv_released(self) -> bool: ... @property - def send_associated(self) -> bool: - """current interpreter is bound to the send end""" - + def send_associated(self) -> bool: ... @property - def send_released(self) -> bool: - """current interpreter *was* bound to the send end""" - -def create(unboundop: Literal[1, 2, 3]) -> ChannelID: - """channel_create(unboundop) -> cid - - Create a new cross-interpreter channel and return a unique generated ID. - """ - -def destroy(cid: SupportsIndex) -> None: - """channel_destroy(cid) - - Close and finalize the channel. Afterward attempts to use the channel - will behave as though it never existed. - """ - -def list_all() -> list[ChannelID]: - """channel_list_all() -> [cid] - - Return the list of all IDs for active channels. - """ - -def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: - """channel_list_interpreters(cid, *, send) -> [id] - - Return the list of all interpreter IDs associated with an end of the channel. - - The 'send' argument should be a boolean indicating whether to use the send or - receive end. - """ - -def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: - """channel_send(cid, obj, *, blocking=True, timeout=None) - - Add the object's data to the channel's queue. - By default this waits for the object to be received. - """ - -def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: - """channel_send_buffer(cid, obj, *, blocking=True, timeout=None) - - Add the object's buffer to the channel's queue. - By default this waits for the object to be received. - """ - -def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: - """channel_recv(cid, [default]) -> (obj, unboundop) - - Return a new object from the data at the front of the channel's queue. - - If there is nothing to receive then raise ChannelEmptyError, unless - a default value is provided. In that case return it. - """ - -def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: - """channel_close(cid, *, send=None, recv=None, force=False) - - Close the channel for all interpreters. - - If the channel is empty then the keyword args are ignored and both - ends are immediately closed. Otherwise, if 'force' is True then - all queued items are released and both ends are immediately - closed. - - If the channel is not empty *and* 'force' is False then following - happens: - - * recv is True (regardless of send): - - raise ChannelNotEmptyError - * recv is None and send is None: - - raise ChannelNotEmptyError - * send is True and recv is not True: - - fully close the 'send' end - - close the 'recv' end to interpreters not already receiving - - fully close it once empty - - Closing an already closed channel results in a ChannelClosedError. - - Once the channel's ID has no more ref counts in any interpreter - the channel will be destroyed. - """ - -def get_count(cid: SupportsIndex) -> int: - """get_count(cid) - - Return the number of items in the channel. - """ - -def get_info(cid: SupportsIndex) -> ChannelInfo: - """get_info(cid) - - Return details about the channel. - """ - -def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: - """get_channel_defaults(cid) - - Return the channel's default values, set when it was created. - """ - -def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: - """channel_release(cid, *, send=None, recv=None, force=True) + def send_released(self) -> bool: ... - Close the channel for the current interpreter. 'send' and 'recv' - (bool) may be used to indicate the ends to close. By default both - ends are closed. Closing an already closed end is a noop. - """ +def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ... +def destroy(cid: SupportsIndex) -> None: ... +def list_all() -> list[ChannelID]: ... +def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ... +def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ... +def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ... +def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ... +def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ... +def get_count(cid: SupportsIndex) -> int: ... +def get_info(cid: SupportsIndex) -> ChannelInfo: ... +def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ... +def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi index 3402f6bbb1247..c9323b106f3dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi @@ -1,90 +1,19 @@ -"""This module provides primitive operations to manage Python interpreters. -The 'interpreters' module provides a more convenient interface. -""" - from typing import Any, Literal, SupportsIndex from typing_extensions import TypeAlias _UnboundOp: TypeAlias = Literal[1, 2, 3] -class QueueError(RuntimeError): - """Indicates that a queue-related error happened.""" - +class QueueError(RuntimeError): ... class QueueNotFoundError(QueueError): ... -def bind(qid: SupportsIndex) -> None: - """bind(qid) - - Take a reference to the identified queue. - The queue is not destroyed until there are no references left. - """ - -def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: - """create(maxsize, unboundop, fallback) -> qid - - Create a new cross-interpreter queue and return its unique generated ID. - It is a new reference as though bind() had been called on the queue. - - The caller is responsible for calling destroy() for the new queue - before the runtime is finalized. - """ - -def destroy(qid: SupportsIndex) -> None: - """destroy(qid) - - Clear and destroy the queue. Afterward attempts to use the queue - will behave as though it never existed. - """ - -def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: - """get(qid) -> (obj, unboundop) - - Return a new object from the data at the front of the queue. - The unbound op is also returned. - - If there is nothing to receive then raise QueueEmpty. - """ - -def get_count(qid: SupportsIndex) -> int: - """get_count(qid) - - Return the number of items in the queue. - """ - -def get_maxsize(qid: SupportsIndex) -> int: - """get_maxsize(qid) - - Return the maximum number of items in the queue. - """ - -def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: - """get_queue_defaults(qid) - - Return the queue's default values, set when it was created. - """ - -def is_full(qid: SupportsIndex) -> bool: - """is_full(qid) - - Return true if the queue has a maxsize and has reached it. - """ - -def list_all() -> list[tuple[int, int, _UnboundOp]]: - """list_all() -> [(qid, unboundop, fallback)] - - Return the list of IDs for all queues. - Each corresponding default unbound op and fallback is also included. - """ - -def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: - """put(qid, obj) - - Add the object's data to the queue. - """ - -def release(qid: SupportsIndex) -> None: - """release(qid) - - Release a reference to the queue. - The queue is destroyed once there are no references left. - """ +def bind(qid: SupportsIndex) -> None: ... +def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: ... +def destroy(qid: SupportsIndex) -> None: ... +def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: ... +def get_count(qid: SupportsIndex) -> int: ... +def get_maxsize(qid: SupportsIndex) -> int: ... +def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: ... +def is_full(qid: SupportsIndex) -> bool: ... +def list_all() -> list[tuple[int, int, _UnboundOp]]: ... +def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: ... +def release(qid: SupportsIndex) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi index 958ae497f1ac8..8e097efad618a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi @@ -1,7 +1,3 @@ -"""This module provides primitive operations to manage Python interpreters. -The 'interpreters' module provides a more convenient interface. -""" - import types from collections.abc import Callable from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload @@ -12,111 +8,26 @@ _R = TypeVar("_R") _Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""] _SharedDict: TypeAlias = dict[str, Any] # many objects can be shared -class InterpreterError(Exception): - """A cross-interpreter operation failed""" - -class InterpreterNotFoundError(InterpreterError): - """An interpreter was not found""" - +class InterpreterError(Exception): ... +class InterpreterNotFoundError(InterpreterError): ... class NotShareableError(ValueError): ... @disjoint_base class CrossInterpreterBufferView: - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - -def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: - """new_config(name='isolated', /, **overrides) -> type.SimpleNamespace - - Return a representation of a new PyInterpreterConfig. - - The name determines the initial values of the config. Supported named - configs are: default, isolated, legacy, and empty. - - Any keyword arguments are set on the corresponding config fields, - overriding the initial values. - """ - -def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: - """create([config], *, reqrefs=False) -> ID - - Create a new interpreter and return a unique generated ID. - - The caller is responsible for destroying the interpreter before exiting, - typically by using _interpreters.destroy(). This can be managed - automatically by passing "reqrefs=True" and then using _incref() and - _decref() appropriately. - - "config" must be a valid interpreter config or the name of a - predefined config ("isolated" or "legacy"). The default - is "isolated". - """ - -def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: - """destroy(id, *, restrict=False) - - Destroy the identified interpreter. - - Attempting to destroy the current interpreter raises InterpreterError. - So does an unrecognized ID. - """ - -def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: - """list_all() -> [(ID, whence)] - - Return a list containing the ID of every existing interpreter. - """ - -def get_current() -> tuple[int, _Whence]: - """get_current() -> (ID, whence) - - Return the ID of current interpreter. - """ - -def get_main() -> tuple[int, _Whence]: - """get_main() -> (ID, whence) - - Return the ID of main interpreter. - """ - -def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: - """is_running(id, *, restrict=False) -> bool - - Return whether or not the identified interpreter is running. - """ - -def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: - """get_config(id, *, restrict=False) -> types.SimpleNamespace - - Return a representation of the config used to initialize the interpreter. - """ - -def whence(id: SupportsIndex) -> _Whence: - """whence(id) -> int - - Return an identifier for where the interpreter was created. - """ - + def __buffer__(self, flags: int, /) -> memoryview: ... + +def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: ... +def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: ... +def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: ... +def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: ... +def get_current() -> tuple[int, _Whence]: ... +def get_main() -> tuple[int, _Whence]: ... +def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: ... +def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: ... +def whence(id: SupportsIndex) -> _Whence: ... def exec( id: SupportsIndex, code: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None | types.SimpleNamespace: - """exec(id, code, shared=None, *, restrict=False) - - Execute the provided code in the identified interpreter. - This is equivalent to running the builtin exec() under the target - interpreter, using the __dict__ of its __main__ module as both - globals and locals. - - "code" may be a string containing the text of a Python script. - - Functions (and code objects) are also supported, with some restrictions. - The code/function must not take any arguments or be a closure - (i.e. have cell vars). Methods and other callables are not supported. - - If a function is provided, its code object is used and all its state - is ignored, including its __globals__ dict. - """ - +) -> None | types.SimpleNamespace: ... def call( id: SupportsIndex, callable: Callable[..., _R], @@ -125,60 +36,19 @@ def call( *, preserve_exc: bool = False, restrict: bool = False, -) -> tuple[_R, types.SimpleNamespace]: - """call(id, callable, args=None, kwargs=None, *, restrict=False) - - Call the provided object in the identified interpreter. - Pass the given args and kwargs, if possible. - """ - +) -> tuple[_R, types.SimpleNamespace]: ... def run_string( id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None: - """run_string(id, script, shared=None, *, restrict=False) - - Execute the provided string in the identified interpreter. - - (See _interpreters.exec(). - """ - +) -> None: ... def run_func( id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None: - """run_func(id, func, shared=None, *, restrict=False) - - Execute the body of the provided function in the identified interpreter. - Code objects are also supported. In both cases, closures and args - are not supported. Methods and other callables are not supported either. - - (See _interpreters.exec(). - """ - -def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: - """set___main___attrs(id, ns, *, restrict=False) - - Bind the given attributes in the interpreter's __main__ module. - """ - +) -> None: ... +def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: ... def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ... def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ... -def is_shareable(obj: object) -> bool: - """is_shareable(obj) -> bool - - Return True if the object's data may be shared between interpreters and - False otherwise. - """ - +def is_shareable(obj: object) -> bool: ... @overload -def capture_exception(exc: BaseException) -> types.SimpleNamespace: - """capture_exception(exc=None) -> types.SimpleNamespace - - Return a snapshot of an exception. If "exc" is None - then the current exception, if any, is used (but not cleared). - - The returned snapshot is the same as what _interpreters.exec() returns. - """ - +def capture_exception(exc: BaseException) -> types.SimpleNamespace: ... @overload def capture_exception(exc: None = None) -> types.SimpleNamespace | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi index e63f1d485689e..2d2a60e4dddf1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi @@ -1,37 +1,3 @@ -"""The io module provides the Python interfaces to stream handling. The -builtin open function is defined in this module. - -At the top of the I/O hierarchy is the abstract base class IOBase. It -defines the basic interface to a stream. Note, however, that there is no -separation between reading and writing to streams; implementations are -allowed to raise an OSError if they do not support a given operation. - -Extending IOBase is RawIOBase which deals simply with the reading and -writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide -an interface to OS files. - -BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its -subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer -streams that are readable, writable, and both respectively. -BufferedRandom provides a buffered interface to random access -streams. BytesIO is a simple stream of in-memory bytes. - -Another IOBase subclass, TextIOBase, deals with the encoding and decoding -of streams into text. TextIOWrapper, which extends it, is a buffered text -interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO -is an in-memory stream for text. - -Argument names are not part of the specification, and only the arguments -of open() are intended to be used as keyword arguments. - -data: - -DEFAULT_BUFFER_SIZE - - An int containing the default buffer size used by the module's buffered - I/O classes. -""" - import builtins import codecs import sys @@ -52,311 +18,69 @@ else: open = builtins.open -def open_code(path: str) -> IO[bytes]: - """Opens the provided file with the intent to import the contents. - - This may perform extra validation beyond open(), but is otherwise interchangeable - with calling open(path, 'rb'). - """ +def open_code(path: str) -> IO[bytes]: ... BlockingIOError = builtins.BlockingIOError if sys.version_info >= (3, 12): @disjoint_base class _IOBase: - """The abstract base class for all I/O classes. - - This class provides dummy implementations for many methods that - derived classes can override selectively; the default implementations - represent a file that cannot be read, written or seeked. - - Even though IOBase does not declare read, readinto, or write because - their signatures will vary, implementations and clients should - consider those methods part of the interface. Also, implementations - may raise UnsupportedOperation when operations they do not support are - called. - - The basic type used for binary data read from or written to a file is - bytes. Other bytes-like objects are accepted as method arguments too. - In some cases (such as readinto), a writable object is required. Text - I/O classes work with str data. - - Note that calling any method (except additional calls to close(), - which are ignored) on a closed stream should raise a ValueError. - - IOBase (and its subclasses) support the iterator protocol, meaning - that an IOBase object can be iterated over yielding the lines in a - stream. - - IOBase also supports the :keyword:`with` statement. In this example, - fp is closed after the suite of the with statement is complete: - - with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') - """ - - def __iter__(self) -> Iterator[bytes]: - """Implement iter(self).""" - - def __next__(self) -> bytes: - """Implement next(self).""" - + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def close(self) -> None: - """Flush and close the IO object. - - This method has no effect if the file is already closed. - """ - - def fileno(self) -> int: - """Return underlying file descriptor if one exists. - - Raise OSError if the IO object does not use a file descriptor. - """ - - def flush(self) -> None: - """Flush write buffers, if applicable. - - This is not implemented for read-only and non-blocking streams. - """ - - def isatty(self) -> bool: - """Return whether this is an 'interactive' stream. - - Return False if it can't be determined. - """ - - def readable(self) -> bool: - """Return whether object was opened for reading. - - If False, read() will raise OSError. - """ + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... read: Callable[..., Any] - def readlines(self, hint: int = -1, /) -> list[bytes]: - """Return a list of lines from the stream. - - hint can be specified to control the number of lines read: no more - lines will be read if the total size (in bytes/characters) of all - lines so far exceeds hint. - """ - - def seek(self, offset: int, whence: int = 0, /) -> int: - """Change the stream position to the given byte offset. - - offset - The stream position, relative to 'whence'. - whence - The relative position to seek from. - - The offset is interpreted relative to the position indicated by whence. - Values for whence are: - - * os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive - * os.SEEK_CUR or 1 -- current stream position; offset may be negative - * os.SEEK_END or 2 -- end of stream; offset is usually negative - - Return the new absolute position. - """ - - def seekable(self) -> bool: - """Return whether object supports random access. - - If False, seek(), tell() and truncate() will raise OSError. - This method may need to do a test seek(). - """ - - def tell(self) -> int: - """Return current stream position.""" - - def truncate(self, size: int | None = None, /) -> int: - """Truncate file to size bytes. - - File pointer is left unchanged. Size defaults to the current IO position - as reported by tell(). Return the new size. - """ - - def writable(self) -> bool: - """Return whether object was opened for writing. - - If False, write() will raise OSError. - """ + def readlines(self, hint: int = -1, /) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = None, /) -> int: ... + def writable(self) -> bool: ... write: Callable[..., Any] - def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: - """Write a list of lines to stream. - - Line separators are not added, so it is usual for each of the - lines provided to have a line separator at the end. - """ - - def readline(self, size: int | None = -1, /) -> bytes: - """Read and return a line from the stream. - - If size is specified, at most size bytes will be read. - - The line terminator is always b'\\n' for binary files; for text - files, the newlines argument to open can be used to select the line - terminator(s) recognized. - """ - - def __del__(self) -> None: - """Called when the instance is about to be destroyed.""" - + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... + def readline(self, size: int | None = -1, /) -> bytes: ... + def __del__(self) -> None: ... @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented else: class _IOBase: - """The abstract base class for all I/O classes. - - This class provides dummy implementations for many methods that - derived classes can override selectively; the default implementations - represent a file that cannot be read, written or seeked. - - Even though IOBase does not declare read, readinto, or write because - their signatures will vary, implementations and clients should - consider those methods part of the interface. Also, implementations - may raise UnsupportedOperation when operations they do not support are - called. - - The basic type used for binary data read from or written to a file is - bytes. Other bytes-like objects are accepted as method arguments too. - In some cases (such as readinto), a writable object is required. Text - I/O classes work with str data. - - Note that calling any method (except additional calls to close(), - which are ignored) on a closed stream should raise a ValueError. - - IOBase (and its subclasses) support the iterator protocol, meaning - that an IOBase object can be iterated over yielding the lines in a - stream. - - IOBase also supports the :keyword:`with` statement. In this example, - fp is closed after the suite of the with statement is complete: - - with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') - """ - - def __iter__(self) -> Iterator[bytes]: - """Implement iter(self).""" - - def __next__(self) -> bytes: - """Implement next(self).""" - + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def close(self) -> None: - """Flush and close the IO object. - - This method has no effect if the file is already closed. - """ - - def fileno(self) -> int: - """Returns underlying file descriptor if one exists. - - OSError is raised if the IO object does not use a file descriptor. - """ - - def flush(self) -> None: - """Flush write buffers, if applicable. - - This is not implemented for read-only and non-blocking streams. - """ - - def isatty(self) -> bool: - """Return whether this is an 'interactive' stream. - - Return False if it can't be determined. - """ - - def readable(self) -> bool: - """Return whether object was opened for reading. - - If False, read() will raise OSError. - """ + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... read: Callable[..., Any] - def readlines(self, hint: int = -1, /) -> list[bytes]: - """Return a list of lines from the stream. - - hint can be specified to control the number of lines read: no more - lines will be read if the total size (in bytes/characters) of all - lines so far exceeds hint. - """ - - def seek(self, offset: int, whence: int = 0, /) -> int: - """Change the stream position to the given byte offset. - - offset - The stream position, relative to 'whence'. - whence - The relative position to seek from. - - The offset is interpreted relative to the position indicated by whence. - Values for whence are: - - * os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive - * os.SEEK_CUR or 1 -- current stream position; offset may be negative - * os.SEEK_END or 2 -- end of stream; offset is usually negative - - Return the new absolute position. - """ - - def seekable(self) -> bool: - """Return whether object supports random access. - - If False, seek(), tell() and truncate() will raise OSError. - This method may need to do a test seek(). - """ - - def tell(self) -> int: - """Return current stream position.""" - - def truncate(self, size: int | None = None, /) -> int: - """Truncate file to size bytes. - - File pointer is left unchanged. Size defaults to the current IO - position as reported by tell(). Returns the new size. - """ - - def writable(self) -> bool: - """Return whether object was opened for writing. - - If False, write() will raise OSError. - """ + def readlines(self, hint: int = -1, /) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0, /) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = None, /) -> int: ... + def writable(self) -> bool: ... write: Callable[..., Any] - def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: - """Write a list of lines to stream. - - Line separators are not added, so it is usual for each of the - lines provided to have a line separator at the end. - """ - - def readline(self, size: int | None = -1, /) -> bytes: - """Read and return a line from the stream. - - If size is specified, at most size bytes will be read. - - The line terminator is always b'\\n' for binary files; for text - files, the newlines argument to open can be used to select the line - terminator(s) recognized. - """ - + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... + def readline(self, size: int | None = -1, /) -> bytes: ... def __del__(self) -> None: ... @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented class _RawIOBase(_IOBase): - """Base class for raw binary I/O.""" - - def readall(self) -> bytes: - """Read until EOF, using multiple read() call.""" + def readall(self) -> bytes: ... # The following methods can return None if the file is in non-blocking mode # and no data is available. def readinto(self, buffer: WriteableBuffer, /) -> int | MaybeNone: ... @@ -364,83 +88,15 @@ class _RawIOBase(_IOBase): def read(self, size: int = -1, /) -> bytes | MaybeNone: ... class _BufferedIOBase(_IOBase): - """Base class for buffered IO objects. - - The main difference with RawIOBase is that the read() method - supports omitting the size argument, and does not have a default - implementation that defers to readinto(). - - In addition, read(), readinto() and write() may raise - BlockingIOError if the underlying raw stream is in non-blocking - mode and not ready; unlike their raw counterparts, they will never - return None. - - A typical implementation should not inherit from a RawIOBase - implementation, but wrap one. - """ - - def detach(self) -> RawIOBase: - """Disconnect this buffer from its underlying raw stream and return it. - - After the raw stream has been detached, the buffer is in an unusable - state. - """ - + def detach(self) -> RawIOBase: ... def readinto(self, buffer: WriteableBuffer, /) -> int: ... - def write(self, buffer: ReadableBuffer, /) -> int: - """Write buffer b to the IO stream. - - Return the number of bytes written, which is always - the length of b in bytes. - - Raise BlockingIOError if the buffer is full and the - underlying raw stream cannot accept more data at the moment. - """ - + def write(self, buffer: ReadableBuffer, /) -> int: ... def readinto1(self, buffer: WriteableBuffer, /) -> int: ... - def read(self, size: int | None = -1, /) -> bytes: - """Read and return up to n bytes. - - If the size argument is omitted, None, or negative, read and - return all data until EOF. - - If the size argument is positive, and the underlying raw stream is - not 'interactive', multiple raw reads may be issued to satisfy - the byte count (unless EOF is reached first). - However, for interactive raw streams (as well as sockets and pipes), - at most one raw read will be issued, and a short result does not - imply that EOF is imminent. - - Return an empty bytes object on EOF. - - Return None if the underlying raw stream was open in non-blocking - mode and no data is available at the moment. - """ - - def read1(self, size: int = -1, /) -> bytes: - """Read and return up to size bytes, with at most one read() call to the underlying raw stream. - - Return an empty bytes object on EOF. - A short result does not imply that EOF is imminent. - """ + def read(self, size: int | None = -1, /) -> bytes: ... + def read1(self, size: int = -1, /) -> bytes: ... @disjoint_base class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes - """Open a file. - - The mode can be 'r' (default), 'w', 'x' or 'a' for reading, - writing, exclusive creation or appending. The file will be created if it - doesn't exist when opened for writing or appending; it will be truncated - when opened for writing. A FileExistsError will be raised if it already - exists when opened for creating. Opening a file for creating implies - writing so this mode behaves in a similar way to 'w'.Add a '+' to the mode - to allow simultaneous reading and writing. A custom opener can be used by - passing a callable as *opener*. The underlying file descriptor for the file - object is then obtained by calling opener with (*name*, *flags*). - *opener* must return an open file descriptor (passing os.open as *opener* - results in functionality similar to passing None). - """ - mode: str # The type of "name" equals the argument passed in to the constructor, # but that can make FileIO incompatible with other I/O types that assume @@ -450,73 +106,22 @@ class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompat self, file: FileDescriptorOrPath, mode: str = "r", closefd: bool = True, opener: _Opener | None = None ) -> None: ... @property - def closefd(self) -> bool: - """True if the file descriptor will be closed by close().""" - - def seek(self, pos: int, whence: int = 0, /) -> int: - """Move to new file position and return the file position. - - Argument offset is a byte count. Optional argument whence defaults to - SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values - are SEEK_CUR or 1 (move relative to current position, positive or negative), - and SEEK_END or 2 (move relative to end of file, usually negative, although - many platforms allow seeking beyond the end of a file). - - Note that not all file objects are seekable. - """ - - def read(self, size: int | None = -1, /) -> bytes | MaybeNone: - """Read at most size bytes, returned as bytes. - - If size is less than 0, read all bytes in the file making multiple read calls. - See ``FileIO.readall``. - - Attempts to make only one system call, retrying only per PEP 475 (EINTR). This - means less data may be returned than requested. - - In non-blocking mode, returns None if no data is available. Return an empty - bytes object at EOF. - """ + def closefd(self) -> bool: ... + def seek(self, pos: int, whence: int = 0, /) -> int: ... + def read(self, size: int | None = -1, /) -> bytes | MaybeNone: ... @disjoint_base class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes - """Buffered I/O implementation using an in-memory bytes buffer.""" - def __init__(self, initial_bytes: ReadableBuffer = b"") -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def getvalue(self) -> bytes: - """Retrieve the entire contents of the BytesIO object.""" - - def getbuffer(self) -> memoryview: - """Get a read-write view over the contents of the BytesIO object.""" - - def read1(self, size: int | None = -1, /) -> bytes: - """Read at most size bytes, returned as a bytes object. - - If the size argument is negative or omitted, read until EOF is reached. - Return an empty bytes object at EOF. - """ - - def readlines(self, size: int | None = None, /) -> list[bytes]: - """List of bytes objects, each a line from the file. - - Call readline() repeatedly and return a list of the lines so read. - The optional size argument, if given, is an approximate bound on the - total number of bytes in the lines returned. - """ - - def seek(self, pos: int, whence: int = 0, /) -> int: - """Change stream position. - - Seek to byte offset pos relative to position indicated by whence: - 0 Start of stream (the default). pos should be >= 0; - 1 Current position - pos may be negative; - 2 End of stream - pos usually negative. - Returns the new absolute position. - """ + def getvalue(self) -> bytes: ... + def getbuffer(self) -> memoryview: ... + def read1(self, size: int | None = -1, /) -> bytes: ... + def readlines(self, size: int | None = None, /) -> list[bytes]: ... + def seek(self, pos: int, whence: int = 0, /) -> int: ... @type_check_only class _BufferedReaderStream(Protocol): @@ -548,8 +153,6 @@ _BufferedReaderStreamT = TypeVar("_BufferedReaderStreamT", bound=_BufferedReader @disjoint_base class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_BufferedReaderStreamT]): # type: ignore[misc] # incompatible definitions of methods in the base classes - """Create a new buffered reader using the given readable raw IO object.""" - raw: _BufferedReaderStreamT if sys.version_info >= (3, 14): def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 131072) -> None: ... @@ -562,13 +165,6 @@ class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_Buffere @disjoint_base class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes - """A buffer for a writeable sequential RawIO object. - - The constructor creates a BufferedWriter for the given writeable raw - stream. If the buffer_size is not given, it defaults to - DEFAULT_BUFFER_SIZE. - """ - raw: RawIOBase if sys.version_info >= (3, 14): def __init__(self, raw: RawIOBase, buffer_size: int = 131072) -> None: ... @@ -581,13 +177,6 @@ class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore @disjoint_base class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes - """A buffered interface to random access streams. - - The constructor creates a reader and writer for a seekable stream, - raw, given in the first argument. If the buffer_size is omitted it - defaults to DEFAULT_BUFFER_SIZE. - """ - mode: str name: Any raw: RawIOBase @@ -602,17 +191,6 @@ class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore @disjoint_base class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStreamT]): - """A buffered reader and writer object together. - - A buffered reader object and buffered writer object put together to - form a sequential IO object that can read and write. This is typically - used with a socket or two-way pipe. - - reader and writer are RawIOBase objects that are readable and - writeable respectively. If the buffer_size is omitted it defaults to - DEFAULT_BUFFER_SIZE. - """ - if sys.version_info >= (3, 14): def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 131072, /) -> None: ... else: @@ -621,63 +199,17 @@ class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStr def peek(self, size: int = 0, /) -> bytes: ... class _TextIOBase(_IOBase): - """Base class for text I/O. - - This class provides a character and line based interface to stream - I/O. There is no readinto method because Python's character strings - are immutable. - """ - encoding: str errors: str | None newlines: str | tuple[str, ...] | None - def __iter__(self) -> Iterator[str]: # type: ignore[override] - """Implement iter(self).""" - - def __next__(self) -> str: # type: ignore[override] - """Implement next(self).""" - - def detach(self) -> BinaryIO: - """Separate the underlying buffer from the TextIOBase and return it. - - After the underlying buffer has been detached, the TextIO is in an unusable state. - """ - - def write(self, s: str, /) -> int: - """Write string s to stream. - - Return the number of characters written - (which is always equal to the length of the string). - """ - - def writelines(self, lines: Iterable[str], /) -> None: # type: ignore[override] - """Write a list of lines to stream. - - Line separators are not added, so it is usual for each of the - lines provided to have a line separator at the end. - """ - - def readline(self, size: int = -1, /) -> str: # type: ignore[override] - """Read until newline or EOF. - - Return an empty string if EOF is hit immediately. - If size is specified, at most size characters will be read. - """ - - def readlines(self, hint: int = -1, /) -> list[str]: # type: ignore[override] - """Return a list of lines from the stream. - - hint can be specified to control the number of lines read: no more - lines will be read if the total size (in bytes/characters) of all - lines so far exceeds hint. - """ - - def read(self, size: int | None = -1, /) -> str: - """Read at most size characters from stream. - - Read from underlying buffer until we have size characters or we hit EOF. - If size is negative or omitted, read until EOF. - """ + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def detach(self) -> BinaryIO: ... + def write(self, s: str, /) -> int: ... + def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override] + def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] + def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override] + def read(self, size: int | None = -1, /) -> str: ... @type_check_only class _WrappedBuffer(Protocol): @@ -706,36 +238,6 @@ _BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffe @disjoint_base class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes - """Character and line based layer over a BufferedIOBase object, buffer. - - encoding gives the name of the encoding that the stream will be - decoded or encoded with. It defaults to locale.getencoding(). - - errors determines the strictness of encoding and decoding (see - help(codecs.Codec) or the documentation for codecs.register) and - defaults to "strict". - - newline controls how line endings are handled. It can be None, '', - '\\n', '\\r', and '\\r\\n'. It works as follows: - - * On input, if newline is None, universal newlines mode is - enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and - these are translated into '\\n' before being returned to the - caller. If it is '', universal newline mode is enabled, but line - endings are returned to the caller untranslated. If it has any of - the other legal values, input lines are only terminated by the given - string, and the line ending is returned to the caller untranslated. - - * On output, if newline is None, any '\\n' characters written are - translated to the system default line separator, os.linesep. If - newline is '' or '\\n', no translation takes place. If newline is any - of the other legal values, any '\\n' characters written are translated - to the given string. - - If line_buffering is True, a call to flush is implied when a call to - write contains a newline character. - """ - def __init__( self, buffer: _BufferT_co, @@ -760,88 +262,30 @@ class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # t newline: str | None = None, line_buffering: bool | None = None, write_through: bool | None = None, - ) -> None: - """Reconfigure the text stream with new parameters. - - This also does an implicit stream flush. - """ - + ) -> None: ... def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] # Equals the "buffer" argument passed in to the constructor. def detach(self) -> _BufferT_co: ... # type: ignore[override] # TextIOWrapper's version of seek only supports a limited subset of # operations. - def seek(self, cookie: int, whence: int = 0, /) -> int: - """Set the stream position, and return the new stream position. - - cookie - Zero or an opaque number returned by tell(). - whence - The relative position to seek from. - - Four operations are supported, given by the following argument - combinations: - - - seek(0, SEEK_SET): Rewind to the start of the stream. - - seek(cookie, SEEK_SET): Restore a previous position; - 'cookie' must be a number returned by tell(). - - seek(0, SEEK_END): Fast-forward to the end of the stream. - - seek(0, SEEK_CUR): Leave the current stream position unchanged. - - Any other argument combinations are invalid, - and may raise exceptions. - """ - + def seek(self, cookie: int, whence: int = 0, /) -> int: ... def truncate(self, pos: int | None = None, /) -> int: ... @disjoint_base class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes - """Text I/O implementation using an in-memory buffer. - - The initial_value argument sets the value of object. The newline - argument is like the one of TextIOWrapper's constructor. - """ - def __init__(self, initial_value: str | None = "", newline: str | None = "\n") -> None: ... # StringIO does not contain a "name" field. This workaround is necessary # to allow StringIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def getvalue(self) -> str: - """Retrieve the entire contents of the object.""" - + def getvalue(self) -> str: ... @property def line_buffering(self) -> bool: ... - def seek(self, pos: int, whence: int = 0, /) -> int: - """Change stream position. - - Seek to character offset pos relative to position indicated by whence: - 0 Start of stream (the default). pos should be >= 0; - 1 Current position - pos must be 0; - 2 End of stream - pos must be 0. - Returns the new absolute position. - """ - - def truncate(self, pos: int | None = None, /) -> int: - """Truncate size to pos. - - The pos argument defaults to the current file position, as - returned by tell(). The current file position is unchanged. - Returns the new absolute position. - """ + def seek(self, pos: int, whence: int = 0, /) -> int: ... + def truncate(self, pos: int | None = None, /) -> int: ... @disjoint_base class IncrementalNewlineDecoder: - """Codec used when reading a file in universal newlines mode. - - It wraps another incremental decoder, translating \\r\\n and \\r into \\n. - It also records the types of newlines encountered. When used with - translate=False, it ensures that the newline sequence is returned in - one piece. When used with decoder=None, it expects unicode strings as - decode input and translates newlines without first invoking an external - decoder. - """ - def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = "strict") -> None: ... def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... @property @@ -852,19 +296,6 @@ class IncrementalNewlineDecoder: if sys.version_info >= (3, 10): @overload - def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: - """A helper function to choose the text encoding. - - When encoding is not None, this function returns it. - Otherwise, this function returns the default text encoding - (i.e. "locale" or "utf-8" depends on UTF-8 mode). - - This function emits an EncodingWarning if encoding is None and - sys.flags.warn_default_encoding is true. - - This can be used in APIs with an encoding=None parameter. - However, please consider using encoding="utf-8" for new APIs. - """ - + def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: ... @overload def text_encoding(encoding: _T, stacklevel: int = 2, /) -> _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi index c0ee2275573af..4a77e5be594ab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi @@ -1,45 +1,25 @@ -"""json speedups""" - from collections.abc import Callable from typing import Any, final from typing_extensions import Self @final class make_encoder: - """Encoder(markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan)""" - @property - def sort_keys(self) -> bool: - """sort_keys""" - + def sort_keys(self) -> bool: ... @property - def skipkeys(self) -> bool: - """skipkeys""" - + def skipkeys(self) -> bool: ... @property - def key_separator(self) -> str: - """key_separator""" - + def key_separator(self) -> str: ... @property - def indent(self) -> str | None: - """indent""" - + def indent(self) -> str | None: ... @property - def markers(self) -> dict[int, Any] | None: - """markers""" - + def markers(self) -> dict[int, Any] | None: ... @property - def default(self) -> Callable[[Any], Any]: - """default""" - + def default(self) -> Callable[[Any], Any]: ... @property - def encoder(self) -> Callable[[str], str]: - """encoder""" - + def encoder(self) -> Callable[[str], str]: ... @property - def item_separator(self) -> str: - """item_separator""" - + def item_separator(self) -> str: ... def __new__( cls, markers: dict[int, Any] | None, @@ -52,13 +32,10 @@ class make_encoder: skipkeys: bool, allow_nan: bool, ) -> Self: ... - def __call__(self, obj: object, _current_indent_level: int) -> Any: - """Call self as a function.""" + def __call__(self, obj: object, _current_indent_level: int) -> Any: ... @final class make_scanner: - """JSON scanner object""" - object_hook: Any object_pairs_hook: Any parse_int: Any @@ -67,30 +44,8 @@ class make_scanner: strict: bool # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __new__(cls, context: make_scanner) -> Self: ... - def __call__(self, string: str, index: int) -> tuple[Any, int]: - """Call self as a function.""" - -def encode_basestring(s: str, /) -> str: - """encode_basestring(string) -> string - - Return a JSON representation of a Python string - """ - -def encode_basestring_ascii(s: str, /) -> str: - """encode_basestring_ascii(string) -> string - - Return an ASCII-only JSON representation of a Python string - """ - -def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: - """scanstring(string, end, strict=True) -> (string, end) - - Scan the string s for a JSON string. End is the index of the - character in s after the quote that started the JSON string. - Unescapes all valid JSON string escape sequences and raises ValueError - on attempt to decode an invalid string. If strict is False then literal - control characters are allowed in the string. + def __call__(self, string: str, index: int) -> tuple[Any, int]: ... - Returns a tuple of the decoded string and the index of the character in s - after the end quote. - """ +def encode_basestring(s: str, /) -> str: ... +def encode_basestring_ascii(s: str, /) -> str: ... +def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi index 1c8f6ce53f1aa..ccce7a0d9d70f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi @@ -1,5 +1,3 @@ -"""Support for POSIX locales.""" - import sys from _typeshed import StrPath from typing import Final, Literal, TypedDict, type_check_only @@ -33,21 +31,14 @@ LC_NUMERIC: Final[int] LC_ALL: Final[int] CHAR_MAX: Final = 127 -def setlocale(category: int, locale: str | None = None, /) -> str: - """Activates/queries locale processing.""" - -def localeconv() -> _LocaleConv: - """Returns numeric and monetary locale-specific parameters.""" +def setlocale(category: int, locale: str | None = None, /) -> str: ... +def localeconv() -> _LocaleConv: ... if sys.version_info >= (3, 11): - def getencoding() -> str: - """Get the current locale encoding.""" - -def strcoll(os1: str, os2: str, /) -> int: - """Compares two strings according to the locale.""" + def getencoding() -> str: ... -def strxfrm(string: str, /) -> str: - """Return a string that can be used as a key for locale-aware comparisons.""" +def strcoll(os1: str, os2: str, /) -> int: ... +def strxfrm(string: str, /) -> str: ... # native gettext functions # https://docs.python.org/3/library/locale.html#access-to-message-catalogs @@ -117,31 +108,14 @@ if sys.platform != "win32": CRNCYSTR: Final[int] ALT_DIGITS: Final[int] - def nl_langinfo(key: int, /) -> str: - """Return the value for the locale information associated with key.""" + def nl_langinfo(key: int, /) -> str: ... + # This is dependent on `libintl.h` which is a part of `gettext` # system dependency. These functions might be missing. # But, we always say that they are present. - def gettext(msg: str, /) -> str: - """gettext(msg) -> string - - Return translation of msg. - """ - - def dgettext(domain: str | None, msg: str, /) -> str: - """dgettext(domain, msg) -> string - - Return translation of msg in domain. - """ - - def dcgettext(domain: str | None, msg: str, category: int, /) -> str: - """Return translation of msg in domain and category.""" - - def textdomain(domain: str | None, /) -> str: - """Set the C library's textdmain to domain, returning the new domain.""" - - def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: - """Bind the C library's domain to dir.""" - - def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: - """Bind the C library's domain to codeset.""" + def gettext(msg: str, /) -> str: ... + def dgettext(domain: str | None, msg: str, /) -> str: ... + def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ... + def textdomain(domain: str | None, /) -> str: ... + def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi index ed6d52517bb11..4f6d98b8ffb61 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi @@ -1,5 +1,3 @@ -"""Fast profiler""" - import sys from _typeshed import structseq from collections.abc import Callable @@ -9,59 +7,13 @@ from typing_extensions import disjoint_base @disjoint_base class Profiler: - """Build a profiler object using the specified timer function. - - The default timer is a fast built-in one based on real time. - For custom timer functions returning integers, 'timeunit' can - be a float specifying a scale (that is, how long each integer unit - is, in seconds). - """ - def __init__( self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True ) -> None: ... - def getstats(self) -> list[profiler_entry]: - """list of profiler_entry objects. - - getstats() -> list of profiler_entry objects - - Return all information collected by the profiler. - Each profiler_entry is a tuple-like object with the - following attributes: - - code code object - callcount how many times this was called - reccallcount how many times called recursively - totaltime total time in this entry - inlinetime inline time in this entry (not in subcalls) - calls details of the calls - - The calls attribute is either None or a list of - profiler_subentry objects: - - code called code object - callcount how many times this is called - reccallcount how many times this is called recursively - totaltime total time spent in this call - inlinetime inline time (not in further subcalls) - """ - - def enable(self, subcalls: bool = True, builtins: bool = True) -> None: - """Start collecting profiling information. - - subcalls - If True, also records for each function - statistics separated according to its current caller. - builtins - If True, records the time spent in - built-in functions separately from their caller. - """ - - def disable(self) -> None: - """Stop collecting profiling information.""" - - def clear(self) -> None: - """Clear all profiling information collected so far.""" + def getstats(self) -> list[profiler_entry]: ... + def enable(self, subcalls: bool = True, builtins: bool = True) -> None: ... + def disable(self) -> None: ... + def clear(self) -> None: ... @final class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi index dc41fd420f147..b38dce9fadedf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi @@ -37,94 +37,23 @@ PRESET_EXTREME: Final[int] # v big number @final class LZMADecompressor: - """Create a decompressor object for decompressing data incrementally. - - format - Specifies the container format of the input stream. If this is - FORMAT_AUTO (the default), the decompressor will automatically detect - whether the input is FORMAT_XZ or FORMAT_ALONE. Streams created with - FORMAT_RAW cannot be autodetected. - memlimit - Limit the amount of memory used by the decompressor. This will cause - decompression to fail if the input cannot be decompressed within the - given limit. - filters - A custom filter chain. This argument is required for FORMAT_RAW, and - not accepted with any other format. When provided, this should be a - sequence of dicts, each indicating the ID and options for a single - filter. - - For one-shot decompression, use the decompress() function instead. - """ - if sys.version_info >= (3, 12): def __new__(cls, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> Self: ... else: def __init__(self, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> None: ... - def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: - """Decompress *data*, returning uncompressed data as bytes. - - If *max_length* is nonnegative, returns at most *max_length* bytes of - decompressed data. If this limit is reached and further output can be - produced, *self.needs_input* will be set to ``False``. In this case, the next - call to *decompress()* may provide *data* as b'' to obtain more of the output. - - If all of the input data was decompressed and returned (either because this - was less than *max_length* bytes, or because *max_length* was negative), - *self.needs_input* will be set to True. - - Attempting to decompress data after the end of stream is reached raises an - EOFError. Any data found after the end of the stream is ignored and saved in - the unused_data attribute. - """ - + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property - def check(self) -> int: - """ID of the integrity check used by the input stream.""" - + def check(self) -> int: ... @property - def eof(self) -> bool: - """True if the end-of-stream marker has been reached.""" - + def eof(self) -> bool: ... @property - def unused_data(self) -> bytes: - """Data found after the end of the compressed stream.""" - + def unused_data(self) -> bytes: ... @property - def needs_input(self) -> bool: - """True if more input is needed before more decompressed data can be produced.""" + def needs_input(self) -> bool: ... @final class LZMACompressor: - """LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None) - - Create a compressor object for compressing data incrementally. - - format specifies the container format to use for the output. This can - be FORMAT_XZ (default), FORMAT_ALONE, or FORMAT_RAW. - - check specifies the integrity check to use. For FORMAT_XZ, the default - is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not support integrity - checks; for these formats, check must be omitted, or be CHECK_NONE. - - The settings used by the compressor can be specified either as a - preset compression level (with the 'preset' argument), or in detail - as a custom filter chain (with the 'filters' argument). For FORMAT_XZ - and FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset - level. For FORMAT_RAW, the caller must always specify a filter chain; - the raw compressor does not support preset compression levels. - - preset (if provided) should be an integer in the range 0-9, optionally - OR-ed with the constant PRESET_EXTREME. - - filters (if provided) should be a sequence of dicts. Each dict should - have an entry for "id" indicating the ID of the filter, plus - additional entries for options to the filter. - - For one-shot compression, use the compress() function instead. - """ - if sys.version_info >= (3, 12): def __new__( cls, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None @@ -134,28 +63,9 @@ class LZMACompressor: self, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None ) -> None: ... - def compress(self, data: ReadableBuffer, /) -> bytes: - """Provide data to the compressor object. - - Returns a chunk of compressed data if possible, or b'' otherwise. - - When you have finished providing data to the compressor, call the - flush() method to finish the compression process. - """ - - def flush(self) -> bytes: - """Finish the compression process. - - Returns the compressed data left in internal buffers. - - The compressor object may not be used after this method is called. - """ - -class LZMAError(Exception): - """Call to liblzma failed.""" + def compress(self, data: ReadableBuffer, /) -> bytes: ... + def flush(self) -> bytes: ... -def is_check_supported(check_id: int, /) -> bool: - """Test whether the given integrity check is supported. +class LZMAError(Exception): ... - Always returns True for CHECK_NONE and CHECK_CRC32. - """ +def is_check_supported(check_id: int, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi index 89ca2bfe82788..597bd09b700b0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi @@ -1,22 +1,9 @@ -"""Shared support for scanning document type declarations in HTML and XHTML. - -This module is used as a foundation for the html.parser module. It has no -documented public API and should not be used directly. - -""" - import sys from typing import Any class ParserBase: - """Parser base class which provides some common support methods used - by the SGML/HTML and XHTML parsers. - """ - def reset(self) -> None: ... - def getpos(self) -> tuple[int, int]: - """Return current line number and offset.""" - + def getpos(self) -> tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... def parse_comment(self, i: int, report: bool = True) -> int: ... # undocumented def parse_declaration(self, i: int) -> int: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi index 353942a5296cf..edceed51bf9db 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi @@ -1,5 +1,3 @@ -"""Documentation""" - import sys from typing import Final, type_check_only @@ -53,34 +51,11 @@ if sys.platform == "win32": __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def UuidCreate() -> str: - """Return the string representation of a new unique identifier.""" - - def FCICreate(cabname: str, files: list[str], /) -> None: - """Create a new CAB file. - - cabname - the name of the CAB file - files - a list of tuples, each containing the name of the file on disk, - and the name of the file inside the CAB file - """ - - def OpenDatabase(path: str, persist: int, /) -> _Database: - """Return a new database object. - - path - the file name of the MSI file - persist - the persistence mode - """ - - def CreateRecord(count: int, /) -> _Record: - """Return a new record object. + def UuidCreate() -> str: ... + def FCICreate(cabname: str, files: list[str], /) -> None: ... + def OpenDatabase(path: str, persist: int, /) -> _Database: ... + def CreateRecord(count: int, /) -> _Record: ... - count - the number of fields of the record - """ MSICOLINFO_NAMES: Final[int] MSICOLINFO_TYPES: Final[int] MSIDBOPEN_CREATE: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi index 7e893a6e4211d..cb1c1bcfc4aab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi @@ -1,12 +1,3 @@ -"""Operator interface. - -This module exports a set of functions implemented in C corresponding -to the intrinsic operators of Python. For example, operator.add(x, y) -is equivalent to the expression x+y. The function names are those -used for special methods; variants without leading and trailing -'__' are also provided for convenience. -""" - import sys from _typeshed import SupportsGetItem from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence @@ -56,200 +47,76 @@ class _SupportsPos(Protocol[_T_co]): def __pos__(self) -> _T_co: ... # All four comparison functions must have the same signature, or we get false-positive errors -def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a < b.""" - -def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a <= b.""" - -def eq(a: object, b: object, /) -> Any: - """Same as a == b.""" - -def ne(a: object, b: object, /) -> Any: - """Same as a != b.""" - -def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a >= b.""" - -def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a > b.""" - -def not_(a: object, /) -> bool: - """Same as not a.""" - -def truth(a: object, /) -> bool: - """Return True if a is true, False otherwise.""" - -def is_(a: object, b: object, /) -> bool: - """Same as a is b.""" - -def is_not(a: object, b: object, /) -> bool: - """Same as a is not b.""" - -def abs(a: SupportsAbs[_T], /) -> _T: - """Same as abs(a).""" - -def add(a: Any, b: Any, /) -> Any: - """Same as a + b.""" - -def and_(a: Any, b: Any, /) -> Any: - """Same as a & b.""" - -def floordiv(a: Any, b: Any, /) -> Any: - """Same as a // b.""" - -def index(a: SupportsIndex, /) -> int: - """Same as a.__index__()""" - -def inv(a: _SupportsInversion[_T_co], /) -> _T_co: - """Same as ~a.""" - -def invert(a: _SupportsInversion[_T_co], /) -> _T_co: - """Same as ~a.""" - -def lshift(a: Any, b: Any, /) -> Any: - """Same as a << b.""" - -def mod(a: Any, b: Any, /) -> Any: - """Same as a % b.""" - -def mul(a: Any, b: Any, /) -> Any: - """Same as a * b.""" - -def matmul(a: Any, b: Any, /) -> Any: - """Same as a @ b.""" - -def neg(a: _SupportsNeg[_T_co], /) -> _T_co: - """Same as -a.""" - -def or_(a: Any, b: Any, /) -> Any: - """Same as a | b.""" - -def pos(a: _SupportsPos[_T_co], /) -> _T_co: - """Same as +a.""" - -def pow(a: Any, b: Any, /) -> Any: - """Same as a ** b.""" - -def rshift(a: Any, b: Any, /) -> Any: - """Same as a >> b.""" - -def sub(a: Any, b: Any, /) -> Any: - """Same as a - b.""" - -def truediv(a: Any, b: Any, /) -> Any: - """Same as a / b.""" - -def xor(a: Any, b: Any, /) -> Any: - """Same as a ^ b.""" - -def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: - """Same as a + b, for a and b sequences.""" - -def contains(a: Container[object], b: object, /) -> bool: - """Same as b in a (note reversed operands).""" - -def countOf(a: Iterable[object], b: object, /) -> int: - """Return the number of items in a which are, or which equal, b.""" - +def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def eq(a: object, b: object, /) -> Any: ... +def ne(a: object, b: object, /) -> Any: ... +def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... +def not_(a: object, /) -> bool: ... +def truth(a: object, /) -> bool: ... +def is_(a: object, b: object, /) -> bool: ... +def is_not(a: object, b: object, /) -> bool: ... +def abs(a: SupportsAbs[_T], /) -> _T: ... +def add(a: Any, b: Any, /) -> Any: ... +def and_(a: Any, b: Any, /) -> Any: ... +def floordiv(a: Any, b: Any, /) -> Any: ... +def index(a: SupportsIndex, /) -> int: ... +def inv(a: _SupportsInversion[_T_co], /) -> _T_co: ... +def invert(a: _SupportsInversion[_T_co], /) -> _T_co: ... +def lshift(a: Any, b: Any, /) -> Any: ... +def mod(a: Any, b: Any, /) -> Any: ... +def mul(a: Any, b: Any, /) -> Any: ... +def matmul(a: Any, b: Any, /) -> Any: ... +def neg(a: _SupportsNeg[_T_co], /) -> _T_co: ... +def or_(a: Any, b: Any, /) -> Any: ... +def pos(a: _SupportsPos[_T_co], /) -> _T_co: ... +def pow(a: Any, b: Any, /) -> Any: ... +def rshift(a: Any, b: Any, /) -> Any: ... +def sub(a: Any, b: Any, /) -> Any: ... +def truediv(a: Any, b: Any, /) -> Any: ... +def xor(a: Any, b: Any, /) -> Any: ... +def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: ... +def contains(a: Container[object], b: object, /) -> bool: ... +def countOf(a: Iterable[object], b: object, /) -> int: ... @overload -def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: - """Same as del a[b].""" - +def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: ... @overload def delitem(a: MutableSequence[Any], b: slice, /) -> None: ... @overload def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ... @overload -def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: - """Same as a[b].""" - +def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: ... @overload def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ... -def indexOf(a: Iterable[_T], b: _T, /) -> int: - """Return the first index of b in a.""" - +def indexOf(a: Iterable[_T], b: _T, /) -> int: ... @overload -def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: - """Same as a[b] = c.""" - +def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: ... @overload def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ... @overload def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ... -def length_hint(obj: object, default: int = 0, /) -> int: - """Return an estimate of the number of items in obj. - - This is useful for presizing containers when building from an iterable. - - If the object supports len(), the result will be exact. - Otherwise, it may over- or under-estimate by an arbitrary amount. - The result will be an integer >= 0. - """ - -def iadd(a: Any, b: Any, /) -> Any: - """Same as a += b.""" - -def iand(a: Any, b: Any, /) -> Any: - """Same as a &= b.""" - -def iconcat(a: Any, b: Any, /) -> Any: - """Same as a += b, for a and b sequences.""" - -def ifloordiv(a: Any, b: Any, /) -> Any: - """Same as a //= b.""" - -def ilshift(a: Any, b: Any, /) -> Any: - """Same as a <<= b.""" - -def imod(a: Any, b: Any, /) -> Any: - """Same as a %= b.""" - -def imul(a: Any, b: Any, /) -> Any: - """Same as a *= b.""" - -def imatmul(a: Any, b: Any, /) -> Any: - """Same as a @= b.""" - -def ior(a: Any, b: Any, /) -> Any: - """Same as a |= b.""" - -def ipow(a: Any, b: Any, /) -> Any: - """Same as a **= b.""" - -def irshift(a: Any, b: Any, /) -> Any: - """Same as a >>= b.""" - -def isub(a: Any, b: Any, /) -> Any: - """Same as a -= b.""" - -def itruediv(a: Any, b: Any, /) -> Any: - """Same as a /= b.""" - -def ixor(a: Any, b: Any, /) -> Any: - """Same as a ^= b.""" +def length_hint(obj: object, default: int = 0, /) -> int: ... +def iadd(a: Any, b: Any, /) -> Any: ... +def iand(a: Any, b: Any, /) -> Any: ... +def iconcat(a: Any, b: Any, /) -> Any: ... +def ifloordiv(a: Any, b: Any, /) -> Any: ... +def ilshift(a: Any, b: Any, /) -> Any: ... +def imod(a: Any, b: Any, /) -> Any: ... +def imul(a: Any, b: Any, /) -> Any: ... +def imatmul(a: Any, b: Any, /) -> Any: ... +def ior(a: Any, b: Any, /) -> Any: ... +def ipow(a: Any, b: Any, /) -> Any: ... +def irshift(a: Any, b: Any, /) -> Any: ... +def isub(a: Any, b: Any, /) -> Any: ... +def itruediv(a: Any, b: Any, /) -> Any: ... +def ixor(a: Any, b: Any, /) -> Any: ... if sys.version_info >= (3, 11): - def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: - """Same as obj(*args, **kwargs).""" - -def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: - """Return 'a == b'. + def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... - This function uses an approach designed to prevent - timing analysis, making it appropriate for cryptography. - - a and b must both be of the same type: either str (ASCII only), - or any bytes-like object. - - Note: If a and b are of different lengths, or if an error occurs, - a timing attack could theoretically reveal information about the - types and lengths of a and b--but not their values. - """ +def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... if sys.version_info >= (3, 14): - def is_none(a: object, /) -> TypeIs[None]: - """Same as a is None.""" - - def is_not_none(a: _T | None, /) -> TypeIs[_T]: - """Same as a is not None.""" + def is_none(a: object, /) -> TypeIs[None]: ... + def is_not_none(a: _T | None, /) -> TypeIs[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi index e026e5e01e5e6..fb00e6986dd06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi @@ -1,5 +1,3 @@ -"""Shared OS X support functions.""" - from collections.abc import Iterable, Sequence from typing import Final, TypeVar @@ -13,90 +11,24 @@ _UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented _COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented _INITPRE: Final[str] # undocumented -def _find_executable(executable: str, path: str | None = None) -> str | None: # undocumented - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ - -def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: # undocumented - """Output from successful command execution or None""" - -def _find_build_tool(toolname: str) -> str: # undocumented - """Find a build tool on current path or using xcrun""" +def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented +def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented +def _find_build_tool(toolname: str) -> str: ... # undocumented _SYSTEM_VERSION: Final[str | None] # undocumented -def _get_system_version() -> str: # undocumented - """Return the OS X system version as a string""" - -def _remove_original_values(_config_vars: dict[str, str]) -> None: # undocumented - """Remove original unmodified values for testing""" - -def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: # undocumented - """Save modified and original unmodified value of configuration var""" - -def _supports_universal_builds() -> bool: # undocumented - """Returns True if universal builds are supported on this system""" - -def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Find appropriate C compiler for extension module builds""" - -def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove all universal build arguments from config vars""" - -def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove any unsupported archs from config vars""" - -def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Allow override of all archs with ARCHFLAGS env var""" - -def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove references to any SDKs not available""" - -def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: - """ - This function will strip '-isysroot PATH' and '-arch ARCH' from the - compile flags if the user has specified one them in extra_compile_flags. - - This is needed because '-arch ARCH' adds another architecture to the - build, without a way to remove an architecture. Furthermore GCC will - barf if multiple '-isysroot' arguments are present. - """ - -def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: - """Customize Python build configuration variables. - - Called internally from sysconfig with a mutable mapping - containing name/value pairs parsed from the configured - makefile used to build this interpreter. Returns - the mapping updated as needed to reflect the environment - in which the interpreter is running; in the case of - a Python from a binary installer, the installed - environment may be very different from the build - environment, i.e. different OS levels, different - built tools, different available CPU architectures. - - This customization is performed whenever - distutils.sysconfig.get_config_vars() is first - called. It may be used in environments where no - compilers are present, i.e. when installing pure - Python dists. Customization of compiler paths - and detection of unavailable archs is deferred - until the first extension module build is - requested (in distutils.sysconfig.customize_compiler). - - Currently called from distutils.sysconfig - """ - -def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: - """Customize compiler path and configuration variables. - - This customization is performed when the first - extension module build is requested - in distutils.sysconfig.customize_compiler. - """ - -def get_platform_osx(_config_vars: dict[str, str], osname: _T, release: _K, machine: _V) -> tuple[str | _T, str | _K, str | _V]: - """Filter values for get_platform()""" +def _get_system_version() -> str: ... # undocumented +def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented +def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented +def _supports_universal_builds() -> bool: ... # undocumented +def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ... +def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ... +def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... +def get_platform_osx( + _config_vars: dict[str, str], osname: _T, release: _K, machine: _V +) -> tuple[str | _T, str | _K, str | _V]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi index 9867a477a7f80..544f787172d6f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi @@ -1,5 +1,3 @@ -"""Optimized C implementation for the Python pickle module.""" - from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from pickle import PickleBuffer as PickleBuffer @@ -28,56 +26,10 @@ def dump( *, fix_imports: bool = True, buffer_callback: _BufferCallback = None, -) -> None: - """Write a pickled representation of obj to the open file object file. - - This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may - be more efficient. - - The optional *protocol* argument tells the pickler to use the given - protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default - protocol is 5. It was introduced in Python 3.8, and is incompatible - with previous versions. - - Specifying a negative protocol version selects the highest protocol - version supported. The higher the protocol used, the more recent the - version of Python needed to read the pickle produced. - - The *file* argument must have a write() method that accepts a single - bytes argument. It can thus be a file object opened for binary - writing, an io.BytesIO instance, or any other custom object that meets - this interface. - - If *fix_imports* is True and protocol is less than 3, pickle will try - to map the new Python 3 names to the old module names used in Python - 2, so that the pickle data stream is readable with Python 2. - - If *buffer_callback* is None (the default), buffer views are serialized - into *file* as part of the pickle stream. It is an error if - *buffer_callback* is not None and *protocol* is None or smaller than 5. - """ - -def dumps(obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None) -> bytes: - """Return the pickled representation of the object as a bytes object. - - The optional *protocol* argument tells the pickler to use the given - protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default - protocol is 5. It was introduced in Python 3.8, and is incompatible - with previous versions. - - Specifying a negative protocol version selects the highest protocol - version supported. The higher the protocol used, the more recent the - version of Python needed to read the pickle produced. - - If *fix_imports* is True and *protocol* is less than 3, pickle will - try to map the new Python 3 names to the old module names used in - Python 2, so that the pickle data stream is readable with Python 2. - - If *buffer_callback* is None (the default), buffer views are serialized - into *file* as part of the pickle stream. It is an error if - *buffer_callback* is not None and *protocol* is None or smaller than 5. - """ - +) -> None: ... +def dumps( + obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None +) -> bytes: ... def load( file: _ReadableFileobj, *, @@ -85,32 +37,7 @@ def load( encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = (), -) -> Any: - """Read and return an object from the pickle data stored in a file. - - This is equivalent to ``Unpickler(file).load()``, but may be more - efficient. - - The protocol version of the pickle is detected automatically, so no - protocol argument is needed. Bytes past the pickled object's - representation are ignored. - - The argument *file* must have two methods, a read() method that takes - an integer argument, and a readline() method that requires no - arguments. Both methods should return bytes. Thus *file* can be a - binary file object opened for reading, an io.BytesIO object, or any - other custom object that meets this interface. - - Optional keyword arguments are *fix_imports*, *encoding* and *errors*, - which are used to control compatibility support for pickle stream - generated by Python 2. If *fix_imports* is True, pickle will try to - map the old Python 2 names to the new names used in Python 3. The - *encoding* and *errors* tell pickle how to decode 8-bit string - instances pickled by Python 2; these default to 'ASCII' and 'strict', - respectively. The *encoding* can be 'bytes' to read these 8-bit - string instances as bytes objects. - """ - +) -> Any: ... def loads( data: ReadableBuffer, /, @@ -119,22 +46,7 @@ def loads( encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = (), -) -> Any: - """Read and return an object from the given pickle data. - - The protocol version of the pickle is detected automatically, so no - protocol argument is needed. Bytes past the pickled object's - representation are ignored. - - Optional keyword arguments are *fix_imports*, *encoding* and *errors*, - which are used to control compatibility support for pickle stream - generated by Python 2. If *fix_imports* is True, pickle will try to - map the old Python 2 names to the new names used in Python 3. The - *encoding* and *errors* tell pickle how to decode 8-bit string - instances pickled by Python 2; these default to 'ASCII' and 'strict', - respectively. The *encoding* can be 'bytes' to read these 8-bit - string instances as bytes objects. - """ +) -> Any: ... class PickleError(Exception): ... class PicklingError(PickleError): ... @@ -147,38 +59,6 @@ class PicklerMemoProxy: @disjoint_base class Pickler: - """This takes a binary file for writing a pickle data stream. - - The optional *protocol* argument tells the pickler to use the given - protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default - protocol is 5. It was introduced in Python 3.8, and is incompatible - with previous versions. - - Specifying a negative protocol version selects the highest protocol - version supported. The higher the protocol used, the more recent the - version of Python needed to read the pickle produced. - - The *file* argument must have a write() method that accepts a single - bytes argument. It can thus be a file object opened for binary - writing, an io.BytesIO instance, or any other custom object that meets - this interface. - - If *fix_imports* is True and protocol is less than 3, pickle will try - to map the new Python 3 names to the old module names used in Python - 2, so that the pickle data stream is readable with Python 2. - - If *buffer_callback* is None (the default), buffer views are - serialized into *file* as part of the pickle stream. - - If *buffer_callback* is not None, then it can be called any number - of times with a buffer view. If the callback returns a false value - (such as None), the given buffer is out-of-band; otherwise the - buffer is serialized in-band, i.e. inside the pickle stream. - - It is an error if *buffer_callback* is not None and *protocol* - is None or smaller than 5. - """ - fast: bool dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] reducer_override: Callable[[Any], Any] @@ -194,17 +74,9 @@ class Pickler: def memo(self) -> PicklerMemoProxy: ... @memo.setter def memo(self, value: PicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... - def dump(self, obj: Any, /) -> None: - """Write a pickled representation of the given object to the open file.""" - - def clear_memo(self) -> None: - """Clears the pickler's "memo". + def dump(self, obj: Any, /) -> None: ... + def clear_memo(self) -> None: ... - The memo is the data structure that remembers which objects the - pickler has already seen, so that shared or recursive objects are - pickled by reference and not by value. This method is useful when - re-using picklers. - """ # this method has no default implementation for Python < 3.13 def persistent_id(self, obj: Any, /) -> Any: ... @@ -215,28 +87,6 @@ class UnpicklerMemoProxy: @disjoint_base class Unpickler: - """This takes a binary file for reading a pickle data stream. - - The protocol version of the pickle is detected automatically, so no - protocol argument is needed. Bytes past the pickled object's - representation are ignored. - - The argument *file* must have two methods, a read() method that takes - an integer argument, and a readline() method that requires no - arguments. Both methods should return bytes. Thus *file* can be a - binary file object opened for reading, an io.BytesIO object, or any - other custom object that meets this interface. - - Optional keyword arguments are *fix_imports*, *encoding* and *errors*, - which are used to control compatibility support for pickle stream - generated by Python 2. If *fix_imports* is True, pickle will try to - map the old Python 2 names to the new names used in Python 3. The - *encoding* and *errors* tell pickle how to decode 8-bit string - instances pickled by Python 2; these default to 'ASCII' and 'strict', - respectively. The *encoding* can be 'bytes' to read these 8-bit - string instances as bytes objects. - """ - def __init__( self, file: _ReadableFileobj, @@ -250,23 +100,8 @@ class Unpickler: def memo(self) -> UnpicklerMemoProxy: ... @memo.setter def memo(self, value: UnpicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... - def load(self) -> Any: - """Load a pickle. - - Read a pickled object representation from the open file object given - in the constructor, and return the reconstituted object hierarchy - specified therein. - """ - - def find_class(self, module_name: str, global_name: str, /) -> Any: - """Return an object from a specified module. - - If necessary, the module will be imported. Subclasses may override - this method (e.g. to restrict unpickling of arbitrary classes and - functions). + def load(self) -> Any: ... + def find_class(self, module_name: str, global_name: str, /) -> Any: ... - This method is called whenever a class or a function object is - needed. Both arguments passed are str objects. - """ # this method has no default implementation for Python < 3.13 def persistent_load(self, pid: Any, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi index 0a1d475b8409c..dd74e316e8990 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi @@ -1,5 +1,3 @@ -"""A POSIX helper for the subprocess module.""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -31,29 +29,7 @@ if sys.platform != "win32": child_umask: int, preexec_fn: Callable[[], None], /, - ) -> int: - """Spawn a fresh new child process. - - Fork a child process, close parent file descriptors as appropriate in the - child and duplicate the few that are needed before calling exec() in the - child process. - - If close_fds is True, close file descriptors 3 and higher, except those listed - in the sorted tuple pass_fds. - - The preexec_fn, if supplied, will be called immediately before closing file - descriptors and exec. - - WARNING: preexec_fn is NOT SAFE if your application uses threads. - It may trigger infrequent, difficult to debug deadlocks. - - If an error occurs in the child process before the exec, it is - serialized and written to the errpipe_write fd per subprocess.py. - - Returns: the child process's PID. - - Raises: Only on an error in the parent process. - """ + ) -> int: ... else: def fork_exec( args: Sequence[StrOrBytesPath] | None, @@ -80,26 +56,4 @@ if sys.platform != "win32": preexec_fn: Callable[[], None], allow_vfork: bool, /, - ) -> int: - """Spawn a fresh new child process. - - Fork a child process, close parent file descriptors as appropriate in the - child and duplicate the few that are needed before calling exec() in the - child process. - - If close_fds is True, close file descriptors 3 and higher, except those listed - in the sorted tuple pass_fds. - - The preexec_fn, if supplied, will be called immediately before closing file - descriptors and exec. - - WARNING: preexec_fn is NOT SAFE if your application uses threads. - It may trigger infrequent, difficult to debug deadlocks. - - If an error occurs in the child process before the exec, it is - serialized and written to the errpipe_write fd per subprocess.py. - - Returns: the child process's PID. - - Raises: Only on an error in the parent process. - """ + ) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi index b273d362048a2..1260717489e41 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi @@ -5,33 +5,10 @@ _T = TypeVar("_T") _CacheToken = NewType("_CacheToken", int) -def get_cache_token() -> _CacheToken: - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to ``register()`` on any ABC. - """ +def get_cache_token() -> _CacheToken: ... class ABCMeta(type): - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - """ - def __new__( mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], / ) -> _typeshed.Self: ... - def register(cls, subclass: type[_T]) -> type[_T]: - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ + def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi index 95499412cc7d1..a6723f749da6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi @@ -1,5 +1,3 @@ -"""Python decimal arithmetic module""" - # This is a slight lie, the implementations aren't exactly identical # However, in all likelihood, the differences are inconsequential import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi index 2e78fd1d78391..edd484a9a71a4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi @@ -1,59 +1,18 @@ -"""C implementation of the Python queue module. -This module is an implementation detail, please do not use it directly. -""" - from types import GenericAlias from typing import Any, Generic, TypeVar from typing_extensions import disjoint_base _T = TypeVar("_T") -class Empty(Exception): - """Exception raised by Queue.get(block=0)/get_nowait().""" +class Empty(Exception): ... @disjoint_base class SimpleQueue(Generic[_T]): - """Simple, unbounded, reentrant FIFO queue.""" - def __init__(self) -> None: ... - def empty(self) -> bool: - """Return True if the queue is empty, False otherwise (not reliable!).""" - - def get(self, block: bool = True, timeout: float | None = None) -> _T: - """Remove and return an item from the queue. - - If optional args 'block' is true and 'timeout' is None (the default), - block if necessary until an item is available. If 'timeout' is - a non-negative number, it blocks at most 'timeout' seconds and raises - the Empty exception if no item was available within that time. - Otherwise ('block' is false), return an item if one is immediately - available, else raise the Empty exception ('timeout' is ignored - in that case). - """ - - def get_nowait(self) -> _T: - """Remove and return an item from the queue without blocking. - - Only get an item if one is immediately available. Otherwise - raise the Empty exception. - """ - - def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: - """Put the item on the queue. - - The optional 'block' and 'timeout' arguments are ignored, as this method - never blocks. They are provided for compatibility with the Queue class. - """ - - def put_nowait(self, item: _T) -> None: - """Put an item into the queue without blocking. - - This is exactly equivalent to `put(item)` and is only provided - for compatibility with the Queue class. - """ - - def qsize(self) -> int: - """Return the approximate size of the queue (not reliable!).""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def empty(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def qsize(self) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi index 243de87f5aa5f..ac00fdfb7272b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi @@ -1,5 +1,3 @@ -"""Module implements the Mersenne Twister random number generator.""" - import sys from typing_extensions import Self, TypeAlias, disjoint_base @@ -8,28 +6,13 @@ _State: TypeAlias = tuple[int, ...] @disjoint_base class Random: - """Random() -> create a random number generator with its own internal state.""" - if sys.version_info >= (3, 10): def __init__(self, seed: object = ..., /) -> None: ... else: def __new__(self, seed: object = ..., /) -> Self: ... - def seed(self, n: object = None, /) -> None: - """seed([n]) -> None. - - Defaults to use urandom and falls back to a combination - of the current time and the process identifier. - """ - - def getstate(self) -> _State: - """getstate() -> tuple containing the current state.""" - - def setstate(self, state: _State, /) -> None: - """setstate(state) -> None. Restores generator state.""" - - def random(self) -> float: - """random() -> x in the interval [0, 1).""" - - def getrandbits(self, k: int, /) -> int: - """getrandbits(k) -> x. Generates an int with k random bits.""" + def seed(self, n: object = None, /) -> None: ... + def getstate(self) -> _State: ... + def setstate(self, state: _State, /) -> None: ... + def random(self) -> float: ... + def getrandbits(self, k: int, /) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi index 98fa3d1ef8ffa..eb6c811294216 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi @@ -1,7 +1,3 @@ -""" -The objects used by the site module to add custom builtins. -""" - import sys from collections.abc import Iterable from typing import ClassVar, Literal, NoReturn @@ -13,22 +9,9 @@ class Quitter: def __call__(self, code: sys._ExitCode = None) -> NoReturn: ... class _Printer: - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice. - """ - MAXLINES: ClassVar[Literal[23]] def __init__(self, name: str, data: str, files: Iterable[str] = (), dirs: Iterable[str] = ()) -> None: ... def __call__(self) -> None: ... class _Helper: - """Define the builtin 'help'. - - This is a wrapper around pydoc.help that provides a helpful message - when 'help' is typed at the Python interactive prompt. - - Calling help() at the Python prompt starts an interactive help session. - Calling help(thing) prints help for the python object 'thing'. - """ - def __call__(self, request: object = ...) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi index fd99c122d0100..cdad886b3415e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi @@ -1,8 +1,3 @@ -"""Implementation module for socket operations. - -See the socket module for documentation. -""" - import sys from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterable @@ -738,67 +733,15 @@ if sys.platform != "win32" and sys.platform != "darwin": @disjoint_base class socket: - """socket(family=AF_INET, type=SOCK_STREAM, proto=0) -> socket object - socket(family=-1, type=-1, proto=-1, fileno=None) -> socket object - - Open a socket of the given type. The family argument specifies the - address family; it defaults to AF_INET. The type argument specifies - whether this is a stream (SOCK_STREAM, this is the default) - or datagram (SOCK_DGRAM) socket. The protocol argument defaults to 0, - specifying the default protocol. Keyword arguments are accepted. - The socket is created as non-inheritable. - - When a fileno is passed in, family, type and proto are auto-detected, - unless they are explicitly set. - - A socket object represents one endpoint of a network connection. - - Methods of socket objects (keyword arguments not allowed): - - _accept() -- accept connection, returning new socket fd and client address - bind(addr) -- bind the socket to a local address - close() -- close the socket - connect(addr) -- connect the socket to a remote address - connect_ex(addr) -- connect, return an error code instead of an exception - dup() -- return a new socket fd duplicated from fileno() - fileno() -- return underlying file descriptor - getpeername() -- return remote address [*] - getsockname() -- return local address - getsockopt(level, optname[, buflen]) -- get socket options - gettimeout() -- return timeout or None - listen([n]) -- start listening for incoming connections - recv(buflen[, flags]) -- receive data - recv_into(buffer[, nbytes[, flags]]) -- receive data (into a buffer) - recvfrom(buflen[, flags]) -- receive data and sender's address - recvfrom_into(buffer[, nbytes, [, flags]) - -- receive data and sender's address (into a buffer) - sendall(data[, flags]) -- send all data - send(data[, flags]) -- send data, may not send all of it - sendto(data[, flags], addr) -- send data to a given address - setblocking(bool) -- set or clear the blocking I/O flag - getblocking() -- return True if socket is blocking, False if non-blocking - setsockopt(level, optname, value[, optlen]) -- set socket options - settimeout(None | float) -- set or clear the timeout - shutdown(how) -- shut down traffic in one or both directions - - [*] not available on all platforms! - """ - @property - def family(self) -> int: - """the socket family""" - + def family(self) -> int: ... @property - def type(self) -> int: - """the socket type""" - + def type(self) -> int: ... @property - def proto(self) -> int: - """the socket protocol""" + def proto(self) -> int: ... # F811: "Redefinition of unused `timeout`" @property - def timeout(self) -> float | None: # noqa: F811 - """the socket timeout""" + def timeout(self) -> float | None: ... # noqa: F811 if sys.platform == "win32": def __init__( self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = None @@ -806,230 +749,38 @@ class socket: else: def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | None = None) -> None: ... - def bind(self, address: _Address, /) -> None: - """bind(address) - - Bind the socket to a local address. For IP sockets, the address is a - pair (host, port); the host must refer to the local host. For raw packet - sockets the address is a tuple (ifname, proto [,pkttype [,hatype [,addr]]]) - """ - - def close(self) -> None: - """close() - - Close the socket. It cannot be used after this call. - """ - - def connect(self, address: _Address, /) -> None: - """connect(address) - - Connect the socket to a remote address. For IP sockets, the address - is a pair (host, port). - """ - - def connect_ex(self, address: _Address, /) -> int: - """connect_ex(address) -> errno - - This is like connect(address), but returns an error code (the errno value) - instead of raising an exception when an error occurs. - """ - - def detach(self) -> int: - """detach() - - Close the socket object without closing the underlying file descriptor. - The object cannot be used after this call, but the file descriptor - can be reused for other purposes. The file descriptor is returned. - """ - - def fileno(self) -> int: - """fileno() -> integer - - Return the integer file descriptor of the socket. - """ - - def getpeername(self) -> _RetAddress: - """getpeername() -> address info - - Return the address of the remote endpoint. For IP sockets, the address - info is a pair (hostaddr, port). - """ - - def getsockname(self) -> _RetAddress: - """getsockname() -> address info - - Return the address of the local endpoint. The format depends on the - address family. For IPv4 sockets, the address info is a pair - (hostaddr, port). For IPv6 sockets, the address info is a 4-tuple - (hostaddr, port, flowinfo, scope_id). - """ - + def bind(self, address: _Address, /) -> None: ... + def close(self) -> None: ... + def connect(self, address: _Address, /) -> None: ... + def connect_ex(self, address: _Address, /) -> int: ... + def detach(self) -> int: ... + def fileno(self) -> int: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... @overload - def getsockopt(self, level: int, optname: int, /) -> int: - """getsockopt(level, option[, buffersize]) -> value - - Get a socket option. See the Unix manual for level and option. - If a nonzero buffersize argument is given, the return value is a - string of that length; otherwise it is an integer. - """ - + def getsockopt(self, level: int, optname: int, /) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes: ... - def getblocking(self) -> bool: - """getblocking() - - Returns True if socket is in blocking mode, or False if it - is in non-blocking mode. - """ - - def gettimeout(self) -> float | None: - """gettimeout() -> timeout - - Returns the timeout in seconds (float) associated with socket - operations. A timeout of None indicates that timeouts on socket - operations are disabled. - """ + def getblocking(self) -> bool: ... + def gettimeout(self) -> float | None: ... if sys.platform == "win32": - def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: - """ioctl(cmd, option) -> long - - Control the socket with WSAIoctl syscall. Currently supported 'cmd' values are - SIO_RCVALL: 'option' must be one of the socket.RCVALL_* constants. - SIO_KEEPALIVE_VALS: 'option' is a tuple of (onoff, timeout, interval). - SIO_LOOPBACK_FAST_PATH: 'option' is a boolean value, and is disabled by default - """ - - def listen(self, backlog: int = ..., /) -> None: - """listen([backlog]) - - Enable a server to accept connections. If backlog is specified, it must be - at least 0 (if it is lower, it is set to 0); it specifies the number of - unaccepted connections that the system will allow before refusing new - connections. If not specified, a default reasonable value is chosen. - """ - - def recv(self, bufsize: int, flags: int = 0, /) -> bytes: - """recv(buffersize[, flags]) -> data - - Receive up to buffersize bytes from the socket. For the optional flags - argument, see the Unix manual. When no data is available, block until - at least one byte is available or until the remote end is closed. When - the remote end is closed and all data is read, return the empty string. - """ - - def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: - """recvfrom(buffersize[, flags]) -> (data, address info) - - Like recv(buffersize, flags) but also return the sender's address info. - """ - if sys.platform != "win32": - def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: - """recvmsg(bufsize[, ancbufsize[, flags]]) -> (data, ancdata, msg_flags, address) - - Receive normal data (up to bufsize bytes) and ancillary data from the - socket. The ancbufsize argument sets the size in bytes of the - internal buffer used to receive the ancillary data; it defaults to 0, - meaning that no ancillary data will be received. Appropriate buffer - sizes for ancillary data can be calculated using CMSG_SPACE() or - CMSG_LEN(), and items which do not fit into the buffer might be - truncated or discarded. The flags argument defaults to 0 and has the - same meaning as for recv(). - - The return value is a 4-tuple: (data, ancdata, msg_flags, address). - The data item is a bytes object holding the non-ancillary data - received. The ancdata item is a list of zero or more tuples - (cmsg_level, cmsg_type, cmsg_data) representing the ancillary data - (control messages) received: cmsg_level and cmsg_type are integers - specifying the protocol level and protocol-specific type respectively, - and cmsg_data is a bytes object holding the associated data. The - msg_flags item is the bitwise OR of various flags indicating - conditions on the received message; see your system documentation for - details. If the receiving socket is unconnected, address is the - address of the sending socket, if available; otherwise, its value is - unspecified. - - If recvmsg() raises an exception after the system call returns, it - will first attempt to close any file descriptors received via the - SCM_RIGHTS mechanism. - """ + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: ... + def listen(self, backlog: int = ..., /) -> None: ... + def recv(self, bufsize: int, flags: int = 0, /) -> bytes: ... + def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: ... + if sys.platform != "win32": + def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: ... def recvmsg_into( self, buffers: Iterable[WriteableBuffer], ancbufsize: int = 0, flags: int = 0, / - ) -> tuple[int, list[_CMSG], int, Any]: - """recvmsg_into(buffers[, ancbufsize[, flags]]) -> (nbytes, ancdata, msg_flags, address) - - Receive normal data and ancillary data from the socket, scattering the - non-ancillary data into a series of buffers. The buffers argument - must be an iterable of objects that export writable buffers - (e.g. bytearray objects); these will be filled with successive chunks - of the non-ancillary data until it has all been written or there are - no more buffers. The ancbufsize argument sets the size in bytes of - the internal buffer used to receive the ancillary data; it defaults to - 0, meaning that no ancillary data will be received. Appropriate - buffer sizes for ancillary data can be calculated using CMSG_SPACE() - or CMSG_LEN(), and items which do not fit into the buffer might be - truncated or discarded. The flags argument defaults to 0 and has the - same meaning as for recv(). - - The return value is a 4-tuple: (nbytes, ancdata, msg_flags, address). - The nbytes item is the total number of bytes of non-ancillary data - written into the buffers. The ancdata item is a list of zero or more - tuples (cmsg_level, cmsg_type, cmsg_data) representing the ancillary - data (control messages) received: cmsg_level and cmsg_type are - integers specifying the protocol level and protocol-specific type - respectively, and cmsg_data is a bytes object holding the associated - data. The msg_flags item is the bitwise OR of various flags - indicating conditions on the received message; see your system - documentation for details. If the receiving socket is unconnected, - address is the address of the sending socket, if available; otherwise, - its value is unspecified. - - If recvmsg_into() raises an exception after the system call returns, - it will first attempt to close any file descriptors received via the - SCM_RIGHTS mechanism. - """ - - def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: - """recvfrom_into(buffer[, nbytes[, flags]]) -> (nbytes, address info) - - Like recv_into(buffer[, nbytes[, flags]]) but also return the sender's address info. - """ - - def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: - """recv_into(buffer, [nbytes[, flags]]) -> nbytes_read - - A version of recv() that stores its data into a buffer rather than creating - a new string. Receive up to buffersize bytes from the socket. If buffersize - is not specified (or 0), receive up to the size available in the given buffer. - - See recv() for documentation about the flags. - """ - - def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: - """send(data[, flags]) -> count - - Send a data string to the socket. For the optional flags - argument, see the Unix manual. Return the number of bytes - sent; this may be less than len(data) if the network is busy. - """ - - def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: - """sendall(data[, flags]) - - Send a data string to the socket. For the optional flags - argument, see the Unix manual. This calls send() repeatedly - until all data is sent. If an error occurs, it's impossible - to tell how much data has been sent. - """ + ) -> tuple[int, list[_CMSG], int, Any]: ... + def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: ... + def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: ... @overload - def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: - """sendto(data[, flags], address) -> count - - Like send(data, flags) but allows specifying the destination address. - For IP sockets, the address is a pair (hostaddr, port). - """ - + def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: ... @overload def sendto(self, data: ReadableBuffer, flags: int, address: _Address, /) -> int: ... if sys.platform != "win32": @@ -1040,267 +791,68 @@ class socket: flags: int = 0, address: _Address | None = None, /, - ) -> int: - """sendmsg(buffers[, ancdata[, flags[, address]]]) -> count - - Send normal and ancillary data to the socket, gathering the - non-ancillary data from a series of buffers and concatenating it into - a single message. The buffers argument specifies the non-ancillary - data as an iterable of bytes-like objects (e.g. bytes objects). - The ancdata argument specifies the ancillary data (control messages) - as an iterable of zero or more tuples (cmsg_level, cmsg_type, - cmsg_data), where cmsg_level and cmsg_type are integers specifying the - protocol level and protocol-specific type respectively, and cmsg_data - is a bytes-like object holding the associated data. The flags - argument defaults to 0 and has the same meaning as for send(). If - address is supplied and not None, it sets a destination address for - the message. The return value is the number of bytes of non-ancillary - data sent. - """ + ) -> int: ... if sys.platform == "linux": def sendmsg_afalg( self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 - ) -> int: - """sendmsg_afalg([msg], *, op[, iv[, assoclen[, flags=MSG_MORE]]]) - - Set operation mode, IV and length of associated data for an AF_ALG - operation socket. - """ - - def setblocking(self, flag: bool, /) -> None: - """setblocking(flag) - - Set the socket to blocking (flag is true) or non-blocking (false). - setblocking(True) is equivalent to settimeout(None); - setblocking(False) is equivalent to settimeout(0.0). - """ - - def settimeout(self, value: float | None, /) -> None: - """settimeout(timeout) - - Set a timeout on socket operations. 'timeout' can be a float, - giving in seconds, or None. Setting a timeout of None disables - the timeout feature and is equivalent to setblocking(1). - Setting a timeout of zero is the same as setblocking(0). - """ + ) -> int: ... + def setblocking(self, flag: bool, /) -> None: ... + def settimeout(self, value: float | None, /) -> None: ... @overload - def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: - """setsockopt(level, option, value: int) - setsockopt(level, option, value: buffer) - setsockopt(level, option, None, optlen: int) - - Set a socket option. See the Unix manual for level and option. - The value argument can either be an integer, a string buffer, or - None, optlen. - """ - + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: ... @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None: ... if sys.platform == "win32": - def share(self, process_id: int, /) -> bytes: - """share(process_id) -> bytes - - Share the socket with another process. The target process id - must be provided and the resulting bytes object passed to the target - process. There the shared socket can be instantiated by calling - socket.fromshare(). - """ - - def shutdown(self, how: int, /) -> None: - """shutdown(flag) + def share(self, process_id: int, /) -> bytes: ... - Shut down the reading side of the socket (flag == SHUT_RD), the writing side - of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR). - """ + def shutdown(self, how: int, /) -> None: ... SocketType = socket # ===== Functions ===== -def close(fd: SupportsIndex, /) -> None: - """close(integer) -> None - - Close an integer socket file descriptor. This is like os.close(), but for - sockets; on some platforms os.close() won't work for socket file descriptors. - """ - -def dup(fd: SupportsIndex, /) -> int: - """dup(integer) -> integer - - Duplicate an integer socket file descriptor. This is like os.dup(), but for - sockets; on some platforms os.dup() won't work for socket file descriptors. - """ +def close(fd: SupportsIndex, /) -> None: ... +def dup(fd: SupportsIndex, /) -> int: ... # the 5th tuple item is an address def getaddrinfo( host: bytes | str | None, port: bytes | str | int | None, family: int = ..., type: int = 0, proto: int = 0, flags: int = 0 -) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: - """getaddrinfo(host, port [, family, type, proto, flags]) - -> list of (family, type, proto, canonname, sockaddr) - - Resolve host and port into addrinfo struct. - """ - -def gethostbyname(hostname: str, /) -> str: - """gethostbyname(host) -> address - - Return the IP address (a string of the form '255.255.255.255') for a host. - """ - -def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: - """gethostbyname_ex(host) -> (name, aliaslist, addresslist) - - Return the true host name, a list of aliases, and a list of IP addresses, - for a host. The host argument is a string giving a host name or IP number. - """ - -def gethostname() -> str: - """gethostname() -> string - - Return the current host name. - """ - -def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: - """gethostbyaddr(host) -> (name, aliaslist, addresslist) - - Return the true host name, a list of aliases, and a list of IP addresses, - for a host. The host argument is a string giving a host name or IP number. - """ - -def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: - """getnameinfo(sockaddr, flags) --> (host, port) - - Get host and port for a sockaddr. - """ - -def getprotobyname(protocolname: str, /) -> int: - """getprotobyname(name) -> integer - - Return the protocol number for the named protocol. (Rarely used.) - """ - -def getservbyname(servicename: str, protocolname: str = ..., /) -> int: - """getservbyname(servicename[, protocolname]) -> integer - - Return a port number from a service name and protocol name. - The optional protocol name, if given, should be 'tcp' or 'udp', - otherwise any protocol will match. - """ - -def getservbyport(port: int, protocolname: str = ..., /) -> str: - """getservbyport(port[, protocolname]) -> string - - Return the service name from a port number and protocol name. - The optional protocol name, if given, should be 'tcp' or 'udp', - otherwise any protocol will match. - """ - -def ntohl(x: int, /) -> int: # param & ret val are 32-bit ints - """Convert a 32-bit unsigned integer from network to host byte order.""" - -def ntohs(x: int, /) -> int: # param & ret val are 16-bit ints - """Convert a 16-bit unsigned integer from network to host byte order.""" - -def htonl(x: int, /) -> int: # param & ret val are 32-bit ints - """Convert a 32-bit unsigned integer from host to network byte order.""" - -def htons(x: int, /) -> int: # param & ret val are 16-bit ints - """Convert a 16-bit unsigned integer from host to network byte order.""" - -def inet_aton(ip_addr: str, /) -> bytes: # ret val 4 bytes in length - """Convert an IP address in string format (123.45.67.89) to the 32-bit packed binary format used in low-level network functions.""" - -def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: - """Convert an IP address from 32-bit packed binary format to string format.""" - -def inet_pton(address_family: int, ip_string: str, /) -> bytes: - """inet_pton(af, ip) -> packed IP address string - - Convert an IP address from string format to a packed string suitable - for use with low-level network functions. - """ - -def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: - """inet_ntop(af, packed_ip) -> string formatted IP address - - Convert a packed IP address of the given family to string format. - """ - -def getdefaulttimeout() -> float | None: - """getdefaulttimeout() -> timeout - - Returns the default timeout in seconds (float) for new socket objects. - A value of None indicates that new socket objects have no timeout. - When the socket module is first imported, the default is None. - """ +) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... +def gethostbyname(hostname: str, /) -> str: ... +def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: ... +def gethostname() -> str: ... +def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: ... +def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: ... +def getprotobyname(protocolname: str, /) -> int: ... +def getservbyname(servicename: str, protocolname: str = ..., /) -> int: ... +def getservbyport(port: int, protocolname: str = ..., /) -> str: ... +def ntohl(x: int, /) -> int: ... # param & ret val are 32-bit ints +def ntohs(x: int, /) -> int: ... # param & ret val are 16-bit ints +def htonl(x: int, /) -> int: ... # param & ret val are 32-bit ints +def htons(x: int, /) -> int: ... # param & ret val are 16-bit ints +def inet_aton(ip_addr: str, /) -> bytes: ... # ret val 4 bytes in length +def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ... +def inet_pton(address_family: int, ip_string: str, /) -> bytes: ... +def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ... +def getdefaulttimeout() -> float | None: ... # F811: "Redefinition of unused `timeout`" -def setdefaulttimeout(timeout: float | None, /) -> None: # noqa: F811 - """setdefaulttimeout(timeout) - - Set the default timeout in seconds (float) for new socket objects. - A value of None indicates that new socket objects have no timeout. - When the socket module is first imported, the default is None. - """ +def setdefaulttimeout(timeout: float | None, /) -> None: ... # noqa: F811 if sys.platform != "win32": - def sethostname(name: str, /) -> None: - """sethostname(name) + def sethostname(name: str, /) -> None: ... + def CMSG_LEN(length: int, /) -> int: ... + def CMSG_SPACE(length: int, /) -> int: ... + def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: ... - Sets the hostname to name. - """ - - def CMSG_LEN(length: int, /) -> int: - """CMSG_LEN(length) -> control message length - - Return the total length, without trailing padding, of an ancillary - data item with associated data of the given length. This value can - often be used as the buffer size for recvmsg() to receive a single - item of ancillary data, but RFC 3542 requires portable applications to - use CMSG_SPACE() and thus include space for padding, even when the - item will be the last in the buffer. Raises OverflowError if length - is outside the permissible range of values. - """ - - def CMSG_SPACE(length: int, /) -> int: - """CMSG_SPACE(length) -> buffer size - - Return the buffer size needed for recvmsg() to receive an ancillary - data item with associated data of the given length, along with any - trailing padding. The buffer space needed to receive multiple items - is the sum of the CMSG_SPACE() values for their associated data - lengths. Raises OverflowError if length is outside the permissible - range of values. - """ - - def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: - """socketpair([family[, type [, proto]]]) -> (socket object, socket object) - - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is - AF_UNIX if defined on the platform; otherwise, the default is AF_INET. - """ - -def if_nameindex() -> list[tuple[int, str]]: - """if_nameindex() - - Returns a list of network interface information (index, name) tuples. - """ - -def if_nametoindex(oname: str, /) -> int: - """Returns the interface index corresponding to the interface name if_name.""" +def if_nameindex() -> list[tuple[int, str]]: ... +def if_nametoindex(oname: str, /) -> int: ... if sys.version_info >= (3, 14): - def if_indextoname(if_index: int, /) -> str: - """Returns the interface name corresponding to the interface index if_index.""" + def if_indextoname(if_index: int, /) -> str: ... else: - def if_indextoname(index: int, /) -> str: - """if_indextoname(if_index) - - Returns the interface name corresponding to the interface index if_index. - """ + def if_indextoname(index: int, /) -> str: ... CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi index bae33a446d2a3..50006dcf4032d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi @@ -215,13 +215,10 @@ if sys.version_info >= (3, 11): # Can take or return anything depending on what's in the registry. @overload -def adapt(obj: Any, proto: Any, /) -> Any: - """Adapt given object to given protocol.""" - +def adapt(obj: Any, proto: Any, /) -> Any: ... @overload def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: ... -def complete_statement(statement: str) -> bool: - """Checks if a string contains a complete SQL statement.""" +def complete_statement(statement: str) -> bool: ... if sys.version_info >= (3, 12): @overload @@ -235,18 +232,7 @@ if sys.version_info >= (3, 12): uri: bool = False, *, autocommit: bool = ..., - ) -> Connection: - """Open a connection to the SQLite database file 'database'. - - You can use ":memory:" to open a database connection to a database that - resides in RAM instead of on disk. - - Note: Passing more than 1 positional argument to _sqlite3.connect() is - deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', - 'check_same_thread', 'factory', 'cached_statements' and 'uri' will - become keyword-only parameters in Python 3.15. - """ - + ) -> Connection: ... @overload def connect( database: StrOrBytesPath, @@ -284,13 +270,7 @@ else: check_same_thread: bool = True, cached_statements: int = 128, uri: bool = False, - ) -> Connection: - """Opens a connection to the SQLite database file database. - - You can use ":memory:" to open a database connection to a database that resides - in RAM instead of on disk. - """ - + ) -> Connection: ... @overload def connect( database: StrOrBytesPath, @@ -315,39 +295,19 @@ else: uri: bool = False, ) -> _ConnectionT: ... -def enable_callback_tracebacks(enable: bool, /) -> None: - """Enable or disable callback functions throwing errors to stderr.""" +def enable_callback_tracebacks(enable: bool, /) -> None: ... if sys.version_info < (3, 12): # takes a pos-or-keyword argument because there is a C wrapper - def enable_shared_cache(do_enable: int) -> None: - """Enable or disable shared cache mode for the calling thread. - - This method is deprecated and will be removed in Python 3.12. - Shared cache is strongly discouraged by the SQLite 3 documentation. - If shared cache must be used, open the database in URI mode using - the cache=shared query parameter. - """ + def enable_shared_cache(do_enable: int) -> None: ... if sys.version_info >= (3, 10): - def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: - """Register a function to adapt Python objects to SQLite values.""" - - def register_converter(typename: str, converter: _Converter, /) -> None: - """Register a function to convert SQLite values to Python objects.""" + def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: ... + def register_converter(typename: str, converter: _Converter, /) -> None: ... else: - def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: - """register_adapter(type, callable) - - Registers an adapter with sqlite3's adapter registry. - """ - - def register_converter(name: str, converter: _Converter, /) -> None: - """register_converter(typename, callable) - - Registers a converter with sqlite3. - """ + def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: ... + def register_converter(name: str, converter: _Converter, /) -> None: ... if sys.version_info < (3, 10): OptimizedUnicode = str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi index ca8fe20333eae..73a43f29c8c5f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi @@ -1,7 +1,3 @@ -"""Implementation module for SSL socket operations. See the socket module -for documentation. -""" - import sys from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable @@ -51,72 +47,26 @@ class _CertInfo(TypedDict): caIssuers: NotRequired[tuple[str, ...] | None] crlDistributionPoints: NotRequired[tuple[str, ...] | None] -def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: - """Mix string into the OpenSSL PRNG state. - - entropy (a float) is a lower bound on the entropy contained in - string. See RFC 4086. - """ - -def RAND_bytes(n: int, /) -> bytes: - """Generate n cryptographically strong pseudo-random bytes.""" +def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: ... +def RAND_bytes(n: int, /) -> bytes: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.6; removed in Python 3.12. Use `ssl.RAND_bytes()` instead.") - def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: - """Generate n pseudo-random bytes. - - Return a pair (bytes, is_cryptographic). is_cryptographic is True - if the bytes generated are cryptographically strong. - """ + def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: ... if sys.version_info < (3, 10): def RAND_egd(path: str) -> None: ... -def RAND_status() -> bool: - """Returns True if the OpenSSL PRNG has been seeded with enough data and False if not. - - It is necessary to seed the PRNG with RAND_add() on some platforms before - using the ssl() function. - """ - -def get_default_verify_paths() -> tuple[str, str, str, str]: - """Return search paths and environment vars that are used by SSLContext's set_default_verify_paths() to load default CAs. - - The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. - """ +def RAND_status() -> bool: ... +def get_default_verify_paths() -> tuple[str, str, str, str]: ... if sys.platform == "win32": _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] - def enum_certificates(store_name: str) -> _EnumRetType: - """Retrieve certificates from Windows' cert store. - - store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide - more cert storages, too. The function returns a list of (bytes, - encoding_type, trust) tuples. The encoding_type flag can be interpreted - with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either - a set of OIDs or the boolean True. - """ - - def enum_crls(store_name: str) -> _EnumRetType: - """Retrieve CRLs from Windows' cert store. - - store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide - more cert storages, too. The function returns a list of (bytes, - encoding_type) tuples. The encoding_type flag can be interpreted with - X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. - """ - -def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: - """Lookup NID, short name, long name and OID of an ASN1_OBJECT. - - By default objects are looked up by OID. With name=True short and - long name are also matched. - """ - -def nid2obj(nid: int, /) -> tuple[int, str, str, str]: - """Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID.""" + def enum_certificates(store_name: str) -> _EnumRetType: ... + def enum_crls(store_name: str) -> _EnumRetType: ... +def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: ... +def nid2obj(nid: int, /) -> tuple[int, str, str, str]: ... @disjoint_base class _SSLContext: check_hostname: bool @@ -133,27 +83,9 @@ class _SSLContext: verify_flags: int verify_mode: int def __new__(cls, protocol: int, /) -> Self: ... - def cert_store_stats(self) -> dict[str, int]: - """Returns quantities of loaded X.509 certificates. - - X.509 certificates with a CA extension and certificate revocation lists - inside the context's cert store. - - NOTE: Certificates in a capath directory aren't loaded unless they have - been used at least once. - """ - + def cert_store_stats(self) -> dict[str, int]: ... @overload - def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: - """Returns a list of dicts with information of loaded CA certs. - - If the optional argument is True, returns a DER-encoded copy of the CA - certificate. - - NOTE: Certificates in a capath directory aren't loaded unless they have - been used at least once. - """ - + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -184,49 +116,23 @@ class MemoryBIO: eof: bool pending: int def __new__(self) -> Self: ... - def read(self, size: int = -1, /) -> bytes: - """Read up to size bytes from the memory BIO. - - If size is not specified, read the entire buffer. - If the return value is an empty bytes instance, this means either - EOF or that no data is available. Use the "eof" property to - distinguish between the two. - """ - - def write(self, b: ReadableBuffer, /) -> int: - """Writes the bytes b into the memory BIO. - - Returns the number of bytes written. - """ - - def write_eof(self) -> None: - """Write an EOF marker to the memory BIO. - - When all data has been read, the "eof" property will be True. - """ + def read(self, size: int = -1, /) -> bytes: ... + def write(self, b: ReadableBuffer, /) -> int: ... + def write_eof(self) -> None: ... @final class SSLSession: __hash__: ClassVar[None] # type: ignore[assignment] @property - def has_ticket(self) -> bool: - """Does the session contain a ticket?""" - + def has_ticket(self) -> bool: ... @property - def id(self) -> bytes: - """Session ID.""" - + def id(self) -> bytes: ... @property - def ticket_lifetime_hint(self) -> int: - """Ticket life time hint.""" - + def ticket_lifetime_hint(self) -> int: ... @property - def time(self) -> int: - """Session creation time (seconds since epoch).""" - + def time(self) -> int: ... @property - def timeout(self) -> int: - """Session timeout (delta in seconds).""" + def timeout(self) -> int: ... # _ssl.Certificate is weird: it can't be instantiated or subclassed. # Instances can only be created via methods of the private _ssl._SSLSocket class, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi index 44de15e745e92..7129a282b5747 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi @@ -1,74 +1,3 @@ -"""S_IFMT_: file type bits -S_IFDIR: directory -S_IFCHR: character device -S_IFBLK: block device -S_IFREG: regular file -S_IFIFO: fifo (named pipe) -S_IFLNK: symbolic link -S_IFSOCK: socket file -S_IFDOOR: door -S_IFPORT: event port -S_IFWHT: whiteout - -S_ISUID: set UID bit -S_ISGID: set GID bit -S_ENFMT: file locking enforcement -S_ISVTX: sticky bit -S_IREAD: Unix V7 synonym for S_IRUSR -S_IWRITE: Unix V7 synonym for S_IWUSR -S_IEXEC: Unix V7 synonym for S_IXUSR -S_IRWXU: mask for owner permissions -S_IRUSR: read by owner -S_IWUSR: write by owner -S_IXUSR: execute by owner -S_IRWXG: mask for group permissions -S_IRGRP: read by group -S_IWGRP: write by group -S_IXGRP: execute by group -S_IRWXO: mask for others (not in group) permissions -S_IROTH: read by others -S_IWOTH: write by others -S_IXOTH: execute by others - -UF_SETTABLE: mask of owner changeable flags -UF_NODUMP: do not dump file -UF_IMMUTABLE: file may not be changed -UF_APPEND: file may only be appended to -UF_OPAQUE: directory is opaque when viewed through a union stack -UF_NOUNLINK: file may not be renamed or deleted -UF_COMPRESSED: macOS: file is hfs-compressed -UF_TRACKED: used for dealing with document IDs -UF_DATAVAULT: entitlement required for reading and writing -UF_HIDDEN: macOS: file should not be displayed -SF_SETTABLE: mask of super user changeable flags -SF_ARCHIVED: file may be archived -SF_IMMUTABLE: file may not be changed -SF_APPEND: file may only be appended to -SF_RESTRICTED: entitlement required for writing -SF_NOUNLINK: file may not be renamed or deleted -SF_SNAPSHOT: file is a snapshot file -SF_FIRMLINK: file is a firmlink -SF_DATALESS: file is a dataless object - -On macOS: -SF_SUPPORTED: mask of super user supported flags -SF_SYNTHETIC: mask of read-only synthetic flags - -ST_MODE -ST_INO -ST_DEV -ST_NLINK -ST_UID -ST_GID -ST_SIZE -ST_ATIME -ST_MTIME -ST_CTIME - -FILE_ATTRIBUTE_*: Windows file attribute constants - (only present on Windows) -""" - import sys from typing import Final @@ -135,74 +64,19 @@ UF_NODUMP: Final = 0x00000001 UF_NOUNLINK: Final = 0x00000010 UF_OPAQUE: Final = 0x00000008 -def S_IMODE(mode: int, /) -> int: - """Return the portion of the file's mode that can be set by os.chmod().""" - -def S_IFMT(mode: int, /) -> int: - """Return the portion of the file's mode that describes the file type.""" - -def S_ISBLK(mode: int, /) -> bool: - """S_ISBLK(mode) -> bool - - Return True if mode is from a block special device file. - """ - -def S_ISCHR(mode: int, /) -> bool: - """S_ISCHR(mode) -> bool - - Return True if mode is from a character special device file. - """ - -def S_ISDIR(mode: int, /) -> bool: - """S_ISDIR(mode) -> bool - - Return True if mode is from a directory. - """ - -def S_ISDOOR(mode: int, /) -> bool: - """S_ISDOOR(mode) -> bool - - Return True if mode is from a door. - """ - -def S_ISFIFO(mode: int, /) -> bool: - """S_ISFIFO(mode) -> bool - - Return True if mode is from a FIFO (named pipe). - """ - -def S_ISLNK(mode: int, /) -> bool: - """S_ISLNK(mode) -> bool - - Return True if mode is from a symbolic link. - """ - -def S_ISPORT(mode: int, /) -> bool: - """S_ISPORT(mode) -> bool - - Return True if mode is from an event port. - """ - -def S_ISREG(mode: int, /) -> bool: - """S_ISREG(mode) -> bool - - Return True if mode is from a regular file. - """ - -def S_ISSOCK(mode: int, /) -> bool: - """S_ISSOCK(mode) -> bool - - Return True if mode is from a socket. - """ - -def S_ISWHT(mode: int, /) -> bool: - """S_ISWHT(mode) -> bool - - Return True if mode is from a whiteout. - """ - -def filemode(mode: int, /) -> str: - """Convert a file's mode to a string of the form '-rwxrwxrwx'""" +def S_IMODE(mode: int, /) -> int: ... +def S_IFMT(mode: int, /) -> int: ... +def S_ISBLK(mode: int, /) -> bool: ... +def S_ISCHR(mode: int, /) -> bool: ... +def S_ISDIR(mode: int, /) -> bool: ... +def S_ISDOOR(mode: int, /) -> bool: ... +def S_ISFIFO(mode: int, /) -> bool: ... +def S_ISLNK(mode: int, /) -> bool: ... +def S_ISPORT(mode: int, /) -> bool: ... +def S_ISREG(mode: int, /) -> bool: ... +def S_ISSOCK(mode: int, /) -> bool: ... +def S_ISWHT(mode: int, /) -> bool: ... +def filemode(mode: int, /) -> str: ... if sys.platform == "win32": IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi index 801e999a08089..a8fac2aea1b00 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi @@ -1,137 +1,23 @@ -"""Functions to convert between Python values and C structs. -Python bytes objects are used to hold the data representing the C struct -and also as format strings (explained below) to describe the layout of data -in the C struct. - -The optional first format char indicates byte order, size and alignment: - @: native order, size & alignment (default) - =: native order, std. size & alignment - <: little-endian, std. size & alignment - >: big-endian, std. size & alignment - !: same as > - -The remaining chars indicate types of args and must match exactly; -these can be preceded by a decimal repeat count: - x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; - ?: _Bool (requires C99; if not available, char is used instead) - h:short; H:unsigned short; i:int; I:unsigned int; - l:long; L:unsigned long; f:float; d:double; e:half-float. -Special cases (preceding decimal count indicates length): - s:string (array of char); p: pascal string (with count byte). -Special cases (only available in native format): - n:ssize_t; N:size_t; - P:an integer type that is wide enough to hold a pointer. -Special case (not in native mode unless 'long long' in platform C): - q:long long; Q:unsigned long long -Whitespace between formats is ignored. - -The variable struct.error is an exception raised on errors. -""" - from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterator from typing import Any from typing_extensions import disjoint_base -def pack(fmt: str | bytes, /, *v: Any) -> bytes: - """pack(format, v1, v2, ...) -> bytes - - Return a bytes object containing the values v1, v2, ... packed according - to the format string. See help(struct) for more on format strings. - """ - -def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: - """pack_into(format, buffer, offset, v1, v2, ...) - - Pack the values v1, v2, ... according to the format string and write - the packed bytes into the writable buffer buf starting at offset. Note - that the offset is a required argument. See help(struct) for more - on format strings. - """ - -def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: - """Return a tuple containing values unpacked according to the format string. - - The buffer's size in bytes must be calcsize(format). - - See help(struct) for more on format strings. - """ - -def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: - """Return a tuple containing values unpacked according to the format string. - - The buffer's size, minus offset, must be at least calcsize(format). - - See help(struct) for more on format strings. - """ - -def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: - """Return an iterator yielding tuples unpacked from the given bytes. - - The bytes are unpacked according to the format string, like - a repeated invocation of unpack_from(). - - Requires that the bytes length be a multiple of the format struct size. - """ - -def calcsize(format: str | bytes, /) -> int: - """Return size in bytes of the struct described by the format string.""" - +def pack(fmt: str | bytes, /, *v: Any) -> bytes: ... +def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: ... +def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... +def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... +def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... +def calcsize(format: str | bytes, /) -> int: ... @disjoint_base class Struct: - """Struct(fmt) --> compiled struct object""" - @property - def format(self) -> str: - """struct format string""" - + def format(self) -> str: ... @property - def size(self) -> int: - """struct size in bytes""" - + def size(self) -> int: ... def __init__(self, format: str | bytes) -> None: ... - def pack(self, *v: Any) -> bytes: - """S.pack(v1, v2, ...) -> bytes - - Return a bytes object containing values v1, v2, ... packed according - to the format string S.format. See help(struct) for more on format - strings. - """ - - def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: - """S.pack_into(buffer, offset, v1, v2, ...) - - Pack the values v1, v2, ... according to the format string S.format - and write the packed bytes into the writable buffer buf starting at - offset. Note that the offset is a required argument. See - help(struct) for more on format strings. - """ - - def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: - """Return a tuple containing unpacked values. - - Unpack according to the format string Struct.format. The buffer's size - in bytes must be Struct.size. - - See help(struct) for more on format strings. - """ - - def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: - """Return a tuple containing unpacked values. - - Values are unpacked according to the format string Struct.format. - - The buffer's size in bytes, starting at position offset, must be - at least Struct.size. - - See help(struct) for more on format strings. - """ - - def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: - """Return an iterator yielding tuples. - - Tuples are unpacked from the given bytes source, like a repeated - invocation of unpack_from(). - - Requires that the bytes length be a multiple of the struct size. - """ + def pack(self, *v: Any) -> bytes: ... + def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... + def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... + def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi index 8b63cc7ca8bc1..6969ae48cae79 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi @@ -1,7 +1,3 @@ -"""This module provides primitive operations to write multi-threaded programs. -The 'threading' module provides a more convenient interface. -""" - import signal import sys from _typeshed import structseq @@ -15,52 +11,15 @@ _Ts = TypeVarTuple("_Ts") error = RuntimeError -def _count() -> int: - """Return the number of currently running Python threads, excluding - the main thread. The returned number comprises all threads created - through `start_new_thread()` as well as `threading.Thread`, and not - yet finished. - - This function is meant for internal and specialized purposes only. - In most applications `threading.enumerate()` should be used instead. - """ - +def _count() -> int: ... @final class RLock: - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - """Lock the lock. `blocking` indicates whether we should wait - for the lock to be available or not. If `blocking` is False - and another thread holds the lock, the method will return False - immediately. If `blocking` is True and another thread holds - the lock, the method will wait for the lock to be released, - take it and then return True. - (note: the blocking operation is interruptible.) - - In all other cases, the method will return True immediately. - Precisely, if the current thread already holds the lock, its - internal counter is simply incremented. If nobody holds the lock, - the lock is taken and its internal counter initialized to 1. - """ - - def release(self) -> None: - """Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - and must be locked by the same thread that unlocks it; otherwise a - `RuntimeError` is raised. - - Do note that if the lock was acquire()d several times in a row by the - current thread, release() needs to be called as many times for the lock - to be available for other threads. - """ + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... __enter__ = acquire - def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: - """Release the lock.""" + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... if sys.version_info >= (3, 14): - def locked(self) -> bool: - """locked() - - Return a boolean indicating whether this object is locked right now. - """ + def locked(self) -> bool: ... if sys.version_info >= (3, 13): @final @@ -73,298 +32,86 @@ if sys.version_info >= (3, 13): def start_joinable_thread( function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True - ) -> _ThreadHandle: - """*For internal use only*: start a new thread. - - Like start_new_thread(), this starts a new thread calling the given function. - Unlike start_new_thread(), this returns a handle object with methods to join - or detach the given thread. - This function is not for third-party code, please use the - `threading` module instead. During finalization the runtime will not wait for - the thread to exit if daemon is True. If handle is provided it must be a - newly created thread._ThreadHandle instance. - """ - + ) -> _ThreadHandle: ... @final class lock: - """A lock object is a synchronization primitive. To create a lock, - call threading.Lock(). Methods are: - - acquire() -- lock the lock, possibly blocking until it can be obtained - release() -- unlock of the lock - locked() -- test whether the lock is currently locked - - A lock is not owned by the thread that locked it; another thread may - unlock it. A thread attempting to lock a lock that it has already locked - will block until another thread unlocks it. Deadlocks may ensue. - """ - - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - """Lock the lock. Without argument, this blocks if the lock is already - locked (even by the same thread), waiting for another thread to release - the lock, and return True once the lock is acquired. - With an argument, this will only block if the argument is true, - and the return value reflects whether the lock is acquired. - The blocking operation is interruptible. - """ - - def release(self) -> None: - """Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - but it needn't be locked by the same thread that unlocks it. - """ - - def locked(self) -> bool: - """Return whether the lock is in the locked state.""" - - def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: - """An obsolete synonym of acquire().""" - - def release_lock(self) -> None: - """An obsolete synonym of release().""" - - def locked_lock(self) -> bool: - """An obsolete synonym of locked().""" - - def __enter__(self) -> bool: - """Lock the lock.""" - + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release_lock(self) -> None: ... + def locked_lock(self) -> bool: ... + def __enter__(self) -> bool: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> None: - """Release the lock.""" + ) -> None: ... LockType = lock else: @final class LockType: - """A lock object is a synchronization primitive. To create a lock, - call threading.Lock(). Methods are: - - acquire() -- lock the lock, possibly blocking until it can be obtained - release() -- unlock of the lock - locked() -- test whether the lock is currently locked - - A lock is not owned by the thread that locked it; another thread may - unlock it. A thread attempting to lock a lock that it has already locked - will block until another thread unlocks it. Deadlocks may ensue. - """ - - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - """acquire(blocking=True, timeout=-1) -> bool - (acquire_lock() is an obsolete synonym) - - Lock the lock. Without argument, this blocks if the lock is already - locked (even by the same thread), waiting for another thread to release - the lock, and return True once the lock is acquired. - With an argument, this will only block if the argument is true, - and the return value reflects whether the lock is acquired. - The blocking operation is interruptible. - """ - - def release(self) -> None: - """release() - (release_lock() is an obsolete synonym) - - Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - but it needn't be locked by the same thread that unlocks it. - """ - - def locked(self) -> bool: - """locked() -> bool - (locked_lock() is an obsolete synonym) - - Return whether the lock is in the locked state. - """ - - def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: - """acquire(blocking=True, timeout=-1) -> bool - (acquire_lock() is an obsolete synonym) - - Lock the lock. Without argument, this blocks if the lock is already - locked (even by the same thread), waiting for another thread to release - the lock, and return True once the lock is acquired. - With an argument, this will only block if the argument is true, - and the return value reflects whether the lock is acquired. - The blocking operation is interruptible. - """ - - def release_lock(self) -> None: - """release() - (release_lock() is an obsolete synonym) - - Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - but it needn't be locked by the same thread that unlocks it. - """ - - def locked_lock(self) -> bool: - """locked() -> bool - (locked_lock() is an obsolete synonym) - - Return whether the lock is in the locked state. - """ - - def __enter__(self) -> bool: - """acquire(blocking=True, timeout=-1) -> bool - (acquire_lock() is an obsolete synonym) - - Lock the lock. Without argument, this blocks if the lock is already - locked (even by the same thread), waiting for another thread to release - the lock, and return True once the lock is acquired. - With an argument, this will only block if the argument is true, - and the return value reflects whether the lock is acquired. - The blocking operation is interruptible. - """ - + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release_lock(self) -> None: ... + def locked_lock(self) -> bool: ... + def __enter__(self) -> bool: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> None: - """release() - (release_lock() is an obsolete synonym) - - Release the lock, allowing another thread that is blocked waiting for - the lock to acquire the lock. The lock must be in the locked state, - but it needn't be locked by the same thread that unlocks it. - """ + ) -> None: ... @overload -def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: - """Start a new thread and return its identifier. - - The thread will call the function with positional arguments from the - tuple args and keyword arguments taken from the optional dictionary - kwargs. The thread exits when the function returns; the return value - is ignored. The thread will also exit when the function raises an - unhandled exception; a stack trace will be printed unless the exception - is SystemExit. - """ - +def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... @overload def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... # Obsolete synonym for start_new_thread() @overload -def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: - """An obsolete synonym of start_new_thread().""" - +def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... @overload def start_new(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... if sys.version_info >= (3, 10): - def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: - """Simulate the arrival of the given signal in the main thread, - where the corresponding signal handler will be executed. - If *signum* is omitted, SIGINT is assumed. - A subthread can use this function to interrupt the main thread. - - Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. - """ + def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: ... else: - def interrupt_main() -> None: - """interrupt_main() - - Raise a KeyboardInterrupt in the main thread. - A subthread can use this function to interrupt the main thread. - """ + def interrupt_main() -> None: ... -def exit() -> NoReturn: - """This is synonymous to ``raise SystemExit''. It will cause the current - thread to exit silently unless the exception is caught. - """ - -def exit_thread() -> NoReturn: # Obsolete synonym for exit() - """An obsolete synonym of exit().""" - -def allocate_lock() -> LockType: - """Create a new lock object. See help(type(threading.Lock())) for - information about locks. - """ - -def allocate() -> LockType: # Obsolete synonym for allocate_lock() - """An obsolete synonym of allocate_lock().""" - -def get_ident() -> int: - """Return a non-zero integer that uniquely identifies the current thread - amongst other threads that exist simultaneously. - This may be used to identify per-thread resources. - Even though on some platforms threads identities may appear to be - allocated consecutive numbers starting at 1, this behavior should not - be relied upon, and the number should be seen purely as a magic cookie. - A thread's identity may be reused for another thread after it exits. - """ - -def stack_size(size: int = 0, /) -> int: - """Return the thread stack size used when creating new threads. The - optional size argument specifies the stack size (in bytes) to be used - for subsequently created threads, and must be 0 (use platform or - configured default) or a positive integer value of at least 32,768 (32k). - If changing the thread stack size is unsupported, a ThreadError - exception is raised. If the specified size is invalid, a ValueError - exception is raised, and the stack size is unmodified. 32k bytes - currently the minimum supported stack size value to guarantee - sufficient stack space for the interpreter itself. - - Note that some platforms may have particular restrictions on values for - the stack size, such as requiring a minimum stack size larger than 32 KiB or - requiring allocation in multiples of the system memory page size - - platform documentation should be referred to for more information - (4 KiB pages are common; using multiples of 4096 for the stack size is - the suggested approach in the absence of more specific information). - """ +def exit() -> NoReturn: ... +def exit_thread() -> NoReturn: ... # Obsolete synonym for exit() +def allocate_lock() -> LockType: ... +def allocate() -> LockType: ... # Obsolete synonym for allocate_lock() +def get_ident() -> int: ... +def stack_size(size: int = 0, /) -> int: ... TIMEOUT_MAX: Final[float] -def get_native_id() -> int: # only available on some platforms - """Return a non-negative integer identifying the thread as reported - by the OS (kernel). This may be used to uniquely identify a - particular thread within a system. - """ - +def get_native_id() -> int: ... # only available on some platforms @final class _ExceptHookArgs(structseq[Any], tuple[type[BaseException], BaseException | None, TracebackType | None, Thread | None]): - """ExceptHookArgs - - Type used to pass arguments to threading.excepthook. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("exc_type", "exc_value", "exc_traceback", "thread") @property - def exc_type(self) -> type[BaseException]: - """Exception type""" - + def exc_type(self) -> type[BaseException]: ... @property - def exc_value(self) -> BaseException | None: - """Exception value""" - + def exc_value(self) -> BaseException | None: ... @property - def exc_traceback(self) -> TracebackType | None: - """Exception traceback""" - + def exc_traceback(self) -> TracebackType | None: ... @property - def thread(self) -> Thread | None: - """Thread""" + def thread(self) -> Thread | None: ... _excepthook: Callable[[_ExceptHookArgs], Any] if sys.version_info >= (3, 12): - def daemon_threads_allowed() -> bool: - """Return True if daemon threads are allowed in the current interpreter, - and False otherwise. - """ + def daemon_threads_allowed() -> bool: ... if sys.version_info >= (3, 14): - def set_name(name: str) -> None: - """Set the name of the current thread.""" + def set_name(name: str) -> None: ... @disjoint_base class _local: - """Thread-local data""" - def __getattribute__(self, name: str, /) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... def __delattr__(self, name: str, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi index fcf88de1bbc57..5f6acaf840aa1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi @@ -1,11 +1,3 @@ -"""Thread-local objects. - -(Note that this module provides a Python version of the threading.local - class. Depending on the version of Python you're using, there may be a - faster one available. You should always import the `local` class from - `threading`.) -""" - from threading import RLock from typing import Any from typing_extensions import Self, TypeAlias @@ -15,21 +7,14 @@ __all__ = ["local"] _LocalDict: TypeAlias = dict[Any, Any] class _localimpl: - """A class managing thread-local dicts""" - __slots__ = ("key", "dicts", "localargs", "locallock", "__weakref__") key: str dicts: dict[int, tuple[ReferenceType[Any], _LocalDict]] # Keep localargs in sync with the *args, **kwargs annotation on local.__new__ localargs: tuple[list[Any], dict[str, Any]] locallock: RLock - def get_dict(self) -> _LocalDict: - """Return the dict for the current thread. Raises KeyError if none - defined. - """ - - def create_dict(self) -> _LocalDict: - """Create a new dict for the current thread, and return it.""" + def get_dict(self) -> _LocalDict: ... + def create_dict(self) -> _LocalDict: ... class local: __slots__ = ("_local__impl", "__dict__") diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi index 37ccdc051eade..a3868f467c6ca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi @@ -19,12 +19,9 @@ from typing_extensions import TypeAlias, deprecated @final class Tcl_Obj: @property - def string(self) -> str: - """the string representation of this object, either as str or bytes""" - + def string(self) -> str: ... @property - def typename(self) -> str: - """name of the Tcl type""" + def typename(self) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, value, /): ... def __ge__(self, value, /): ... @@ -95,11 +92,8 @@ class TkappType: def wantobjects(self, *args, **kwargs): ... def willdispatch(self) -> None: ... if sys.version_info >= (3, 12): - def gettrace(self, /) -> _TkinterTraceFunc | None: - """Get the tracing function.""" - - def settrace(self, func: _TkinterTraceFunc | None, /) -> None: - """Set the tracing function.""" + def gettrace(self, /) -> _TkinterTraceFunc | None: ... + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ... # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS ALL_EVENTS: Final = -3 @@ -131,16 +125,7 @@ if sys.version_info >= (3, 13): sync: bool = False, use: str | None = None, /, - ): - """ - - wantTk - if false, then Tk_Init() doesn't get called - sync - if true, then pass -sync to wish - use - if not None, then pass -use to wish - """ + ): ... else: def create( @@ -153,22 +138,7 @@ else: sync: bool = False, use: str | None = None, /, - ): - """ - - wantTk - if false, then Tk_Init() doesn't get called - sync - if true, then pass -sync to wish - use - if not None, then pass -use to wish - """ - -def getbusywaitinterval() -> int: - """Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter.""" - -def setbusywaitinterval(new_val: int, /) -> None: - """Set the busy-wait interval in milliseconds between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. + ): ... - It should be set to a divisor of the maximum time between frames in an animation. - """ +def getbusywaitinterval() -> int: ... +def setbusywaitinterval(new_val: int, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi index 0ecc728570ce2..e9720f46692ce 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi @@ -1,65 +1,13 @@ -"""Debug module to trace memory blocks allocated by Python.""" - from collections.abc import Sequence from tracemalloc import _FrameTuple, _TraceTuple -def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: - """Get the traceback where the Python object obj was allocated. - - Return a tuple of (filename: str, lineno: int) tuples. - Return None if the tracemalloc module is disabled or did not - trace the allocation of the object. - """ - -def _get_traces() -> Sequence[_TraceTuple]: - """Get traces of all memory blocks allocated by Python. - - Return a list of (size: int, traceback: tuple) tuples. - traceback is a tuple of (filename: str, lineno: int) tuples. - - Return an empty list if the tracemalloc module is disabled. - """ - -def clear_traces() -> None: - """Clear traces of memory blocks allocated by Python.""" - -def get_traceback_limit() -> int: - """Get the maximum number of frames stored in the traceback of a trace. - - By default, a trace of an allocated memory block only stores - the most recent frame: the limit is 1. - """ - -def get_traced_memory() -> tuple[int, int]: - """Get the current size and peak size of memory blocks traced by tracemalloc. - - Returns a tuple: (current: int, peak: int). - """ - -def get_tracemalloc_memory() -> int: - """Get the memory usage in bytes of the tracemalloc module. - - This memory is used internally to trace memory allocations. - """ - -def is_tracing() -> bool: - """Return True if the tracemalloc module is tracing Python memory allocations.""" - -def reset_peak() -> None: - """Set the peak size of memory blocks traced by tracemalloc to the current size. - - Do nothing if the tracemalloc module is not tracing memory allocations. - """ - -def start(nframe: int = 1, /) -> None: - """Start tracing Python memory allocations. - - Also set the maximum number of frames stored in the traceback of a - trace to nframe. - """ - -def stop() -> None: - """Stop tracing Python memory allocations. - - Also clear traces of memory blocks allocated by Python. - """ +def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: ... +def _get_traces() -> Sequence[_TraceTuple]: ... +def clear_traces() -> None: ... +def get_traceback_limit() -> int: ... +def get_traced_memory() -> tuple[int, int]: ... +def get_tracemalloc_memory() -> int: ... +def is_tracing() -> bool: ... +def reset_peak() -> None: ... +def start(nframe: int = 1, /) -> None: ... +def stop() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_typeshed/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_typeshed/__init__.pyi index 25054b601a4f6..b786923880e13 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_typeshed/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_typeshed/__init__.pyi @@ -142,6 +142,9 @@ class SupportsIter(Protocol[_T_co]): class SupportsAiter(Protocol[_T_co]): def __aiter__(self) -> _T_co: ... +class SupportsLen(Protocol): + def __len__(self) -> int: ... + class SupportsLenAndGetItem(Protocol[_T_co]): def __len__(self) -> int: ... def __getitem__(self, k: int, /) -> _T_co: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi index 10ab8833673ba..2dbc7b8552813 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi @@ -1,7 +1,3 @@ -"""_warnings provides basic warning filtering support. -It is a helper module to speed up interpreter start-up. -""" - import sys from typing import Any, overload @@ -18,23 +14,7 @@ if sys.version_info >= (3, 12): source: Any | None = None, *, skip_file_prefixes: tuple[str, ...] = (), - ) -> None: - """Issue a warning, or maybe ignore it or raise an exception. - - message - Text of the warning message. - category - The Warning category subclass. Defaults to UserWarning. - stacklevel - How far up the call stack to make this warning appear. A value of 2 for - example attributes the warning to the caller of the code calling warn(). - source - If supplied, the destroyed object which emitted a ResourceWarning - skip_file_prefixes - An optional tuple of module filename prefixes indicating frames to skip - during stacklevel computations for stack frame attribution. - """ - + ) -> None: ... @overload def warn( message: Warning, @@ -47,9 +27,7 @@ if sys.version_info >= (3, 12): else: @overload - def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: - """Issue a warning, or maybe ignore it or raise an exception.""" - + def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @@ -63,9 +41,7 @@ def warn_explicit( registry: dict[str | tuple[str, type[Warning], int], int] | None = None, module_globals: dict[str, Any] | None = None, source: Any | None = None, -) -> None: - """Issue a warning, or maybe ignore it or raise an exception.""" - +) -> None: ... @overload def warn_explicit( message: Warning, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi index 3d59e1c66c44b..a744340afaabd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi @@ -1,5 +1,3 @@ -"""Weak-reference support module.""" - from collections.abc import Callable from typing import Any, TypeVar, overload from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType, ref as ref @@ -7,20 +5,11 @@ from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyTy _C = TypeVar("_C", bound=Callable[..., Any]) _T = TypeVar("_T") -def getweakrefcount(object: Any, /) -> int: - """Return the number of weak references to 'object'.""" - -def getweakrefs(object: Any, /) -> list[Any]: - """Return a list of all weak reference objects pointing to 'object'.""" +def getweakrefcount(object: Any, /) -> int: ... +def getweakrefs(object: Any, /) -> list[Any]: ... # Return CallableProxyType if object is callable, ProxyType otherwise @overload -def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: - """Create a proxy object that weakly references 'object'. - - 'callback', if given, is called with a reference to the - proxy when 'object' is about to be finalized. - """ - +def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: ... @overload def proxy(object: _T, callback: Callable[[_T], Any] | None = None, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi index 7b5f0af1f73e9..dad1ed7a4fb5c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi @@ -45,8 +45,4 @@ class WeakSet(MutableSet[_T]): def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi index 0880f7404b635..d9e2c377b115a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi @@ -175,9 +175,7 @@ if sys.platform == "win32": if sys.version_info >= (3, 14): COPY_FILE_DIRECTORY: Final = 0x00000080 - def CloseHandle(handle: int, /) -> None: - """Close handle.""" - + def CloseHandle(handle: int, /) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload @@ -206,15 +204,7 @@ if sys.platform == "win32": security_attributes: int, /, ) -> int: ... - def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: - """Create an anonymous pipe. - - pipe_attrs - Ignored internally, can be None. - - Returns a 2-tuple of handles, to the read and write ends of the pipe. - """ - + def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: ... def CreateProcess( application_name: str | None, command_line: str | None, @@ -226,20 +216,7 @@ if sys.platform == "win32": current_directory: str | None, startup_info: Any, /, - ) -> tuple[int, int, int, int]: - """Create a new process and its primary thread. - - command_line - Can be str or None - proc_attrs - Ignored internally, can be None. - thread_attrs - Ignored internally, can be None. - - The return value is a tuple of the process handle, thread handle, - process ID, and thread ID. - """ - + ) -> tuple[int, int, int, int]: ... def DuplicateHandle( source_process_handle: int, source_handle: int, @@ -248,49 +225,16 @@ if sys.platform == "win32": inherit_handle: bool, options: int = 0, /, - ) -> int: - """Return a duplicate handle object. - - The duplicate handle refers to the same object as the original - handle. Therefore, any changes to the object are reflected - through both handles. - """ - + ) -> int: ... def ExitProcess(ExitCode: int, /) -> NoReturn: ... - def GetACP() -> int: - """Get the current Windows ANSI code page identifier.""" - + def GetACP() -> int: ... def GetFileType(handle: int) -> int: ... - def GetCurrentProcess() -> int: - """Return a handle object for the current process.""" - - def GetExitCodeProcess(process: int, /) -> int: - """Return the termination status of the specified process.""" - + def GetCurrentProcess() -> int: ... + def GetExitCodeProcess(process: int, /) -> int: ... def GetLastError() -> int: ... - def GetModuleFileName(module_handle: int, /) -> str: - """Return the fully-qualified path for the file that contains module. - - The module must have been loaded by the current process. - - The module parameter should be a handle to the loaded module - whose path is being requested. If this parameter is 0, - GetModuleFileName retrieves the path of the executable file - of the current process. - """ - - def GetStdHandle(std_handle: int, /) -> int: - """Return a handle to the specified standard device. - - std_handle - One of STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, or STD_ERROR_HANDLE. - - The integer associated with the handle object is returned. - """ - - def GetVersion() -> int: - """Return the version number of the current operating system.""" - + def GetModuleFileName(module_handle: int, /) -> str: ... + def GetStdHandle(std_handle: int, /) -> int: ... + def GetVersion() -> int: ... def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ... def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): @@ -306,18 +250,9 @@ if sys.platform == "win32": def SetNamedPipeHandleState( named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, / ) -> None: ... - def TerminateProcess(handle: int, exit_code: int, /) -> None: - """Terminate the specified process and all of its threads.""" - + def TerminateProcess(handle: int, exit_code: int, /) -> None: ... def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ... - def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: - """Wait for a single object. - - Wait until the specified object is in the signaled state or - the time-out interval elapses. The timeout value is specified - in milliseconds. - """ - + def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: ... def WaitNamedPipe(name: str, timeout: int, /) -> None: ... @overload def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @@ -327,51 +262,19 @@ if sys.platform == "win32": def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: - """OVERLAPPED structure wrapper""" - event: int def GetOverlappedResult(self, wait: bool, /) -> tuple[int, int]: ... def cancel(self) -> None: ... def getbuffer(self) -> bytes | None: ... if sys.version_info >= (3, 13): - def BatchedWaitForMultipleObjects(handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF) -> list[int]: - """Supports a larger number of handles than WaitForMultipleObjects - - Note that the handles may be waited on other threads, which could cause - issues for objects like mutexes that become associated with the thread - that was waiting for them. Objects may also be left signalled, even if - the wait fails. - - It is recommended to use WaitForMultipleObjects whenever possible, and - only switch to BatchedWaitForMultipleObjects for scenarios where you - control all the handles involved, such as your own thread pool or - files, and all wait objects are left unmodified by a wait (for example, - manual reset events, threads, and files/pipes). - - Overlapped handles returned from this module use manual reset events. - """ - + def BatchedWaitForMultipleObjects( + handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF + ) -> list[int]: ... def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ... def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ... - def GetLongPathName(path: str) -> str: - """Return the long version of the provided path. - - If the path is already in its long form, returns the same value. - - The path must already be a 'str'. If the type is not known, use - os.fsdecode before calling this function. - """ - - def GetShortPathName(path: str) -> str: - """Return the short version of the provided path. - - If the path is already in its short form, returns the same value. - - The path must already be a 'str'. If the type is not known, use - os.fsdecode before calling this function. - """ - + def GetLongPathName(path: str) -> str: ... + def GetShortPathName(path: str) -> str: ... def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def ReleaseMutex(mutex: int) -> None: ... @@ -379,14 +282,5 @@ if sys.platform == "win32": def SetEvent(event: int) -> None: ... if sys.version_info >= (3, 12): - def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: - """Copies a file from one name to a new name. - - This is implemented using the CopyFile2 API, which preserves all stat - and metadata information apart from security attributes. - - progress_routine is reserved for future use, but is currently not - implemented. Its value is ignored. - """ - + def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi index 3d4413ed11377..f5e98ef88bb9f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi @@ -1,5 +1,3 @@ -"""Implementation module for Zstandard compression.""" - from _typeshed import ReadableBuffer from collections.abc import Mapping from compression.zstd import CompressionParameter, DecompressionParameter @@ -44,19 +42,6 @@ _ZstdCompressorFlushFrame: TypeAlias = Literal[2] @final class ZstdCompressor: - """Create a compressor object for compressing data incrementally. - - level - The compression level to use. Defaults to COMPRESSION_LEVEL_DEFAULT. - options - A dict object that contains advanced compression parameters. - zstd_dict - A ZstdDict object, a pre-trained Zstandard dictionary. - - Thread-safe at method level. For one-shot compression, use the compress() - function instead. - """ - CONTINUE: Final = 0 FLUSH_BLOCK: Final = 1 FLUSH_FRAME: Final = 2 @@ -65,252 +50,48 @@ class ZstdCompressor: ) -> Self: ... def compress( self, /, data: ReadableBuffer, mode: _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 0 - ) -> bytes: - """Provide data to the compressor object. - - mode - Can be these 3 values ZstdCompressor.CONTINUE, - ZstdCompressor.FLUSH_BLOCK, ZstdCompressor.FLUSH_FRAME - - Return a chunk of compressed data if possible, or b'' otherwise. When you have - finished providing data to the compressor, call the flush() method to finish - the compression process. - """ - - def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: - """Finish the compression process. - - mode - Can be these 2 values ZstdCompressor.FLUSH_FRAME, - ZstdCompressor.FLUSH_BLOCK - - Flush any remaining data left in internal buffers. Since Zstandard data - consists of one or more independent frames, the compressor object can still - be used after this method is called. - """ - - def set_pledged_input_size(self, size: int | None, /) -> None: - """Set the uncompressed content size to be written into the frame header. - - size - The size of the uncompressed data to be provided to the compressor. - - This method can be used to ensure the header of the frame about to be written - includes the size of the data, unless the CompressionParameter.content_size_flag - is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised. - - It is important to ensure that the pledged data size matches the actual data - size. If they do not match the compressed output data may be corrupted and the - final chunk written may be lost. - """ - + ) -> bytes: ... + def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: ... + def set_pledged_input_size(self, size: int | None, /) -> None: ... @property - def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: - """The last mode used to this compressor object, its value can be .CONTINUE, - .FLUSH_BLOCK, .FLUSH_FRAME. Initialized to .FLUSH_FRAME. - - It can be used to get the current state of a compressor, such as, data - flushed, or a frame ended. - """ + def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: ... @final class ZstdDecompressor: - """Create a decompressor object for decompressing data incrementally. - - zstd_dict - A ZstdDict object, a pre-trained Zstandard dictionary. - options - A dict object that contains advanced decompression parameters. - - Thread-safe at method level. For one-shot decompression, use the decompress() - function instead. - """ - def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> Self: ... - def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: - """Decompress *data*, returning uncompressed bytes if possible, or b'' otherwise. - - data - A bytes-like object, Zstandard data to be decompressed. - max_length - Maximum size of returned data. When it is negative, the size of - output buffer is unlimited. When it is nonnegative, returns at - most max_length bytes of decompressed data. - - If *max_length* is nonnegative, returns at most *max_length* bytes of - decompressed data. If this limit is reached and further output can be - produced, *self.needs_input* will be set to ``False``. In this case, the next - call to *decompress()* may provide *data* as b'' to obtain more of the output. - - If all of the input data was decompressed and returned (either because this - was less than *max_length* bytes, or because *max_length* was negative), - *self.needs_input* will be set to True. - - Attempting to decompress data after the end of a frame is reached raises an - EOFError. Any data found after the end of the frame is ignored and saved in - the self.unused_data attribute. - """ - + def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property - def eof(self) -> bool: - """True means the end of the first frame has been reached. If decompress data - after that, an EOFError exception will be raised. - """ - + def eof(self) -> bool: ... @property - def needs_input(self) -> bool: - """If the max_length output limit in .decompress() method has been reached, - and the decompressor has (or may has) unconsumed input data, it will be set - to False. In this case, passing b'' to the .decompress() method may output - further data. - """ - + def needs_input(self) -> bool: ... @property - def unused_data(self) -> bytes: - """A bytes object of un-consumed input data. - - When ZstdDecompressor object stops after a frame is - decompressed, unused input data after the frame. Otherwise this will be b''. - """ + def unused_data(self) -> bytes: ... @final class ZstdDict: - """Represents a Zstandard dictionary. - - dict_content - The content of a Zstandard dictionary as a bytes-like object. - is_raw - If true, perform no checks on *dict_content*, useful for some - advanced cases. Otherwise, check that the content represents - a Zstandard dictionary created by the zstd library or CLI. - - The dictionary can be used for compression or decompression, and can be shared - by multiple ZstdCompressor or ZstdDecompressor objects. - """ - def __new__(cls, dict_content: bytes, /, *, is_raw: bool = False) -> Self: ... - def __len__(self, /) -> int: - """Return len(self).""" - + def __len__(self, /) -> int: ... @property - def as_digested_dict(self) -> tuple[Self, int]: - """Load as a digested dictionary to compressor. - - Pass this attribute as zstd_dict argument: - compress(dat, zstd_dict=zd.as_digested_dict) - - 1. Some advanced compression parameters of compressor may be overridden - by parameters of digested dictionary. - 2. ZstdDict has a digested dictionaries cache for each compression level. - It's faster when loading again a digested dictionary with the same - compression level. - 3. No need to use this for decompression. - """ - + def as_digested_dict(self) -> tuple[Self, int]: ... @property - def as_prefix(self) -> tuple[Self, int]: - """Load as a prefix to compressor/decompressor. - - Pass this attribute as zstd_dict argument: - compress(dat, zstd_dict=zd.as_prefix) - - 1. Prefix is compatible with long distance matching, while dictionary is not. - 2. It only works for the first frame, then the compressor/decompressor will - return to no prefix state. - 3. When decompressing, must use the same prefix as when compressing. - """ - + def as_prefix(self) -> tuple[Self, int]: ... @property - def as_undigested_dict(self) -> tuple[Self, int]: - """Load as an undigested dictionary to compressor. - - Pass this attribute as zstd_dict argument: - compress(dat, zstd_dict=zd.as_undigested_dict) - - 1. The advanced compression parameters of compressor will not be overridden. - 2. Loading an undigested dictionary is costly. If load an undigested dictionary - multiple times, consider reusing a compressor object. - 3. No need to use this for decompression. - """ - + def as_undigested_dict(self) -> tuple[Self, int]: ... @property - def dict_content(self) -> bytes: - """The content of a Zstandard dictionary, as a bytes object.""" - + def dict_content(self) -> bytes: ... @property - def dict_id(self) -> int: - """The Zstandard dictionary, an int between 0 and 2**32. + def dict_id(self) -> int: ... - A non-zero value represents an ordinary Zstandard dictionary, - conforming to the standardised format. - - A value of zero indicates a 'raw content' dictionary, - without any restrictions on format or content. - """ - -class ZstdError(Exception): - """An error occurred in the zstd library.""" +class ZstdError(Exception): ... def finalize_dict( custom_dict_bytes: bytes, samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, compression_level: int, / -) -> bytes: - """Finalize a Zstandard dictionary. - - custom_dict_bytes - Custom dictionary content. - samples_bytes - Concatenation of samples. - samples_sizes - Tuple of samples' sizes. - dict_size - The size of the dictionary. - compression_level - Optimize for a specific Zstandard compression level, 0 means default. - """ - -def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: - """Get Zstandard frame infomation from a frame header. - - frame_buffer - A bytes-like object, containing the header of a Zstandard frame. - """ - -def get_frame_size(frame_buffer: ReadableBuffer) -> int: - """Get the size of a Zstandard frame, including the header and optional checksum. - - frame_buffer - A bytes-like object, it should start from the beginning of a frame, - and contains at least one complete frame. - """ - -def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: - """Get CompressionParameter/DecompressionParameter bounds. - - parameter - The parameter to get bounds. - is_compress - True for CompressionParameter, False for DecompressionParameter. - """ - -def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: - """Set CompressionParameter and DecompressionParameter types for validity check. - - c_parameter_type - CompressionParameter IntEnum type object - d_parameter_type - DecompressionParameter IntEnum type object - """ - -def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: - """Train a Zstandard dictionary on sample data. - - samples_bytes - Concatenation of samples. - samples_sizes - Tuple of samples' sizes. - dict_size - The size of the dictionary. - """ +) -> bytes: ... +def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: ... +def get_frame_size(frame_buffer: ReadableBuffer) -> int: ... +def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: ... +def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: ... +def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: ... zstd_version: Final[str] zstd_version_number: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi index 04202fae9444c..c8cd549e30eca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi @@ -1,5 +1,3 @@ -"""Abstract Base Classes (ABCs) according to PEP 3119.""" - import _typeshed import sys from _typeshed import SupportsWrite @@ -14,19 +12,6 @@ _P = ParamSpec("_P") # These definitions have special processing in mypy class ABCMeta(type): - """Metaclass for defining Abstract Base Classes (ABCs). - - Use this metaclass to create an ABC. An ABC can be subclassed - directly, and then acts as a mix-in class. You can also register - unrelated concrete classes (even built-in classes) and unrelated - ABCs as 'virtual subclasses' -- these and their descendants will - be considered subclasses of the registering ABC by the built-in - issubclass() function, but the registering ABC won't show up in - their MRO (Method Resolution Order) nor will method - implementations defined by the registering ABC be callable (not - even via super()). - """ - __abstractmethods__: frozenset[str] if sys.version_info >= (3, 11): def __new__( @@ -37,118 +22,30 @@ class ABCMeta(type): mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any ) -> _typeshed.Self: ... - def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: - """Override for isinstance(instance, cls).""" - - def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: - """Override for issubclass(subclass, cls).""" - - def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: - """Debug helper to print the ABC registry.""" - - def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: - """Register a virtual subclass of an ABC. - - Returns the subclass, to allow usage as a class decorator. - """ - -def abstractmethod(funcobj: _FuncT) -> _FuncT: - """A decorator indicating abstract methods. - - Requires that the metaclass is ABCMeta or derived from it. A - class that has a metaclass derived from ABCMeta cannot be - instantiated unless all of its abstract methods are overridden. - The abstract methods can be called using any of the normal - 'super' call mechanisms. abstractmethod() may be used to declare - abstract methods for properties and descriptors. - - Usage: - - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, arg1, arg2, argN): - ... - """ + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... +def abstractmethod(funcobj: _FuncT) -> _FuncT: ... @deprecated("Deprecated since Python 3.3. Use `@classmethod` stacked on top of `@abstractmethod` instead.") class abstractclassmethod(classmethod[_T, _P, _R_co]): - """A decorator indicating abstract classmethods. - - Deprecated, use 'classmethod' with 'abstractmethod' instead: - - class C(ABC): - @classmethod - @abstractmethod - def my_abstract_classmethod(cls, ...): - ... - - """ - __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... @deprecated("Deprecated since Python 3.3. Use `@staticmethod` stacked on top of `@abstractmethod` instead.") class abstractstaticmethod(staticmethod[_P, _R_co]): - """A decorator indicating abstract staticmethods. - - Deprecated, use 'staticmethod' with 'abstractmethod' instead: - - class C(ABC): - @staticmethod - @abstractmethod - def my_abstract_staticmethod(...): - ... - - """ - __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[_P, _R_co]) -> None: ... @deprecated("Deprecated since Python 3.3. Use `@property` stacked on top of `@abstractmethod` instead.") class abstractproperty(property): - """A decorator indicating abstract properties. - - Deprecated, use 'property' with 'abstractmethod' instead: - - class C(ABC): - @property - @abstractmethod - def my_abstract_property(self): - ... - - """ - __isabstractmethod__: Literal[True] class ABC(metaclass=ABCMeta): - """Helper class that provides a standard way to create an ABC using - inheritance. - """ - __slots__ = () -def get_cache_token() -> object: - """Returns the current ABC cache token. - - The token is an opaque object (supporting equality testing) identifying the - current version of the ABC cache for virtual subclasses. The token changes - with every call to register() on any ABC. - """ +def get_cache_token() -> object: ... if sys.version_info >= (3, 10): - def update_abstractmethods(cls: type[_T]) -> type[_T]: - """Recalculate the set of abstract methods of an abstract class. - - If a class has had one of its abstract methods implemented after the - class was created, the method will not be considered implemented until - this function is called. Alternatively, if a new abstract method has been - added to the class, it will only be considered an abstract method of the - class after this function is called. - - This function should be called before any use is made of the class, - usually in class decorators that add methods to the subject class. - - Returns cls, to allow usage as a class decorator. - - If cls is not an instance of ABCMeta, does nothing. - """ + def update_abstractmethods(cls: type[_T]) -> type[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi index 4d624b8c6c187..bfe12c6af2b0b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi @@ -1,139 +1,3 @@ -"""Stuff to parse AIFF-C and AIFF files. - -Unless explicitly stated otherwise, the description below is true -both for AIFF-C files and AIFF files. - -An AIFF-C file has the following structure. - - +-----------------+ - | FORM | - +-----------------+ - | | - +----+------------+ - | | AIFC | - | +------------+ - | | | - | | . | - | | . | - | | . | - +----+------------+ - -An AIFF file has the string "AIFF" instead of "AIFC". - -A chunk consists of an identifier (4 bytes) followed by a size (4 bytes, -big endian order), followed by the data. The size field does not include -the size of the 8 byte header. - -The following chunk types are recognized. - - FVER - (AIFF-C only). - MARK - <# of markers> (2 bytes) - list of markers: - (2 bytes, must be > 0) - (4 bytes) - ("pstring") - COMM - <# of channels> (2 bytes) - <# of sound frames> (4 bytes) - (2 bytes) - (10 bytes, IEEE 80-bit extended - floating point) - in AIFF-C files only: - (4 bytes) - ("pstring") - SSND - (4 bytes, not used by this program) - (4 bytes, not used by this program) - - -A pstring consists of 1 byte length, a string of characters, and 0 or 1 -byte pad to make the total length even. - -Usage. - -Reading AIFF files: - f = aifc.open(file, 'r') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods read(), seek(), and close(). -In some types of audio files, if the setpos() method is not used, -the seek() method is not necessary. - -This returns an instance of a class with the following public methods: - getnchannels() -- returns number of audio channels (1 for - mono, 2 for stereo) - getsampwidth() -- returns sample width in bytes - getframerate() -- returns sampling frequency - getnframes() -- returns number of audio frames - getcomptype() -- returns compression type ('NONE' for AIFF files) - getcompname() -- returns human-readable version of - compression type ('not compressed' for AIFF files) - getparams() -- returns a namedtuple consisting of all of the - above in the above order - getmarkers() -- get the list of marks in the audio file or None - if there are no marks - getmark(id) -- get mark with the specified id (raises an error - if the mark does not exist) - readframes(n) -- returns at most n frames of audio - rewind() -- rewind to the beginning of the audio stream - setpos(pos) -- seek to the specified position - tell() -- return the current position - close() -- close the instance (make it unusable) -The position returned by tell(), the position given to setpos() and -the position of marks are all compatible and have nothing to do with -the actual position in the file. -The close() method is called automatically when the class instance -is destroyed. - -Writing AIFF files: - f = aifc.open(file, 'w') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods write(), tell(), seek(), and -close(). - -This returns an instance of a class with the following public methods: - aiff() -- create an AIFF file (AIFF-C default) - aifc() -- create an AIFF-C file - setnchannels(n) -- set the number of channels - setsampwidth(n) -- set the sample width - setframerate(n) -- set the frame rate - setnframes(n) -- set the number of frames - setcomptype(type, name) - -- set the compression type and the - human-readable compression type - setparams(tuple) - -- set all parameters at once - setmark(id, pos, name) - -- add specified mark to the list of marks - tell() -- return current position in output file (useful - in combination with setmark()) - writeframesraw(data) - -- write audio frames without pathing up the - file header - writeframes(data) - -- write audio frames and patch up the file header - close() -- patch up the file header and close the - output file -You should set the parameters before the first writeframesraw or -writeframes. The total number of frames does not need to be set, -but when it is set to the correct value, the header does not have to -be patched up. -It is best to first set all parameters, perhaps possibly the -compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes(b'') or -close() to patch up the sizes in the header. -Marks can be added anytime. If there are any marks, you must call -close() after all frames have been written. -The close() method is called automatically when the class instance -is destroyed. - -When a file is opened with the extension '.aiff', an AIFF file is -written, otherwise an AIFF-C file is written. This default can be -changed by calling aiff() or aifc() before the first writeframes or -writeframesraw. -""" - from types import TracebackType from typing import IO, Any, Literal, NamedTuple, overload from typing_extensions import Self, TypeAlias @@ -143,8 +7,6 @@ __all__ = ["Error", "open"] class Error(Exception): ... class _aifc_params(NamedTuple): - """_aifc_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" - nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi index 685bd2ea8687e..3679dc29daaa0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi @@ -1,5 +1,3 @@ -"""Helpers for introspecting and wrapping annotations.""" - import sys from typing import Literal @@ -30,18 +28,6 @@ if sys.version_info >= (3, 14): @final class ForwardRef: - """Wrapper that holds a forward reference. - - Constructor arguments: - * arg: a string representing the code to be evaluated. - * module: the module where the forward reference was created. - Must be a string, not a module object. - * owner: The owning object (module, class, or function). - * is_argument: Does nothing, retained for compatibility. - * is_class: True if the forward reference was created in class scope. - - """ - __slots__ = ( "__forward_is_argument__", "__forward_is_class__", @@ -71,12 +57,7 @@ if sys.version_info >= (3, 14): type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, owner: object = None, format: Literal[Format.STRING], - ) -> str: - """Evaluate the forward reference and return the value. - - If the forward reference cannot be evaluated, raise an exception. - """ - + ) -> str: ... @overload def evaluate( self, @@ -116,12 +97,7 @@ if sys.version_info >= (3, 14): def __ror__(self, other: Any) -> types.UnionType: ... @overload - def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: - """Call an evaluate function. Evaluate functions are normally generated for - the value of type aliases and the bounds, constraints, and defaults of - type parameter objects. - """ - + def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: ... @overload def call_evaluate_function( evaluate: EvaluateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None @@ -129,40 +105,16 @@ if sys.version_info >= (3, 14): @overload def call_evaluate_function(evaluate: EvaluateFunc, format: Format, *, owner: object = None) -> AnnotationForm: ... @overload - def call_annotate_function(annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None) -> dict[str, str]: - """Call an __annotate__ function. __annotate__ functions are normally - generated by the compiler to defer the evaluation of annotations. They - can be called with any of the format arguments in the Format enum, but - compiler-generated __annotate__ functions only support the VALUE format. - This function provides additional functionality to call __annotate__ - functions with the FORWARDREF and STRING formats. - - *annotate* must be an __annotate__ function, which takes a single argument - and returns a dict of annotations. - - *format* must be a member of the Format enum or one of the corresponding - integer values. - - *owner* can be the object that owns the annotations (i.e., the module, - class, or function that the __annotate__ function derives from). With the - FORWARDREF format, it is used to provide better evaluation capabilities - on the generated ForwardRef objects. - - """ - + def call_annotate_function( + annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None + ) -> dict[str, str]: ... @overload def call_annotate_function( annotate: AnnotateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None ) -> dict[str, AnnotationForm | ForwardRef]: ... @overload def call_annotate_function(annotate: AnnotateFunc, format: Format, *, owner: object = None) -> dict[str, AnnotationForm]: ... - def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: - """Retrieve the annotate function from a class namespace dictionary. - - Return None if the namespace does not contain an annotate function. - This is useful in metaclass ``__new__`` methods to retrieve the annotate function. - """ - + def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: ... @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -171,58 +123,7 @@ if sys.version_info >= (3, 14): locals: Mapping[str, object] | None = None, eval_str: bool = False, format: Literal[Format.STRING], - ) -> dict[str, str]: - """Compute the annotations dict for an object. - - obj may be a callable, class, module, or other object with - __annotate__ or __annotations__ attributes. - Passing any other object raises TypeError. - - The *format* parameter controls the format in which annotations are returned, - and must be a member of the Format enum or its integer equivalent. - For the VALUE format, the __annotations__ is tried first; if it - does not exist, the __annotate__ function is called. The - FORWARDREF format uses __annotations__ if it exists and can be - evaluated, and otherwise falls back to calling the __annotate__ function. - The SOURCE format tries __annotate__ first, and falls back to - using __annotations__, stringified using annotations_to_string(). - - This function handles several details for you: - - * If eval_str is true, values of type str will - be un-stringized using eval(). This is intended - for use with stringized annotations - ("from __future__ import annotations"). - * If obj doesn't have an annotations dict, returns an - empty dict. (Functions and methods always have an - annotations dict; classes, modules, and other types of - callables may not.) - * Ignores inherited annotations on classes. If a class - doesn't have its own annotations dict, returns an empty dict. - * All accesses to object members and dict values are done - using getattr() and dict.get() for safety. - * Always, always, always returns a freshly-created dict. - - eval_str controls whether or not values of type str are replaced - with the result of calling eval() on those values: - - * If eval_str is true, eval() is called on values of type str. - * If eval_str is false (the default), values of type str are unchanged. - - globals and locals are passed in to eval(); see the documentation - for eval() for more information. If either globals or locals is - None, this function may replace that value with a context-specific - default, contingent on type(obj): - - * If obj is a module, globals defaults to obj.__dict__. - * If obj is a class, globals defaults to - sys.modules[obj.__module__].__dict__ and locals - defaults to the obj class namespace. - * If obj is a callable, globals defaults to obj.__globals__, - although if obj is a wrapped function (using - functools.update_wrapper()) it is first unwrapped. - """ - + ) -> dict[str, str]: ... @overload def get_annotations( obj: Any, @@ -241,17 +142,5 @@ if sys.version_info >= (3, 14): eval_str: bool = False, format: Format = Format.VALUE, # noqa: Y011 ) -> dict[str, AnnotationForm]: ... - def type_repr(value: object) -> str: - """Convert a Python value to a format suitable for use with the STRING format. - - This is intended as a helper for tools that support the STRING format but do - not have access to the code that originally produced the annotations. It uses - repr() for most objects. - - """ - - def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: - """Convert an annotation dict containing values to approximately the STRING format. - - Always returns a fresh a dictionary. - """ + def type_repr(value: object) -> str: ... + def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi index 52ab6bbf013b6..bce20e09250c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi @@ -1,66 +1,3 @@ -"""Command-line parsing library - -This module is an optparse-inspired command-line parsing library that: - - - handles both optional and positional arguments - - produces highly informative usage messages - - supports parsers that dispatch to sub-parsers - -The following is a simple usage example that sums integers from the -command-line and writes the result to a file:: - - parser = argparse.ArgumentParser( - description='sum the integers at the command line') - parser.add_argument( - 'integers', metavar='int', nargs='+', type=int, - help='an integer to be summed') - parser.add_argument( - '--log', - help='the file where the sum should be written') - args = parser.parse_args() - with (open(args.log, 'w') if args.log is not None - else contextlib.nullcontext(sys.stdout)) as log: - log.write('%s' % sum(args.integers)) - -The module contains the following public classes: - - - ArgumentParser -- The main entry point for command-line parsing. As the - example above shows, the add_argument() method is used to populate - the parser with actions for optional and positional arguments. Then - the parse_args() method is invoked to convert the args at the - command-line into an object with attributes. - - - ArgumentError -- The exception raised by ArgumentParser objects when - there are errors with the parser's actions. Errors raised while - parsing the command-line are caught by ArgumentParser and emitted - as command-line messages. - - - FileType -- A factory for defining types of files to be created. As the - example above shows, instances of FileType are typically passed as - the type= argument of add_argument() calls. Deprecated since - Python 3.14. - - - Action -- The base class for parser actions. Typically actions are - selected by passing strings like 'store_true' or 'append_const' to - the action= argument of add_argument(). However, for greater - customization of ArgumentParser actions, subclasses of Action may - be defined and passed as the action= argument. - - - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, - ArgumentDefaultsHelpFormatter -- Formatter classes which - may be passed as the formatter_class= argument to the - ArgumentParser constructor. HelpFormatter is the default, - RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser - not to change the formatting for help text, and - ArgumentDefaultsHelpFormatter adds information about argument defaults - to the help. - -All other classes in this module are considered implementation details. -(Also note that HelpFormatter and RawDescriptionHelpFormatter are only -considered public as object names -- the API of the formatter objects is -still considered an implementation detail.) -""" - import sys from _typeshed import SupportsWrite, sentinel from collections.abc import Callable, Generator, Iterable, Sequence @@ -104,26 +41,12 @@ ZERO_OR_MORE: Final = "*" _UNRECOGNIZED_ARGS_ATTR: Final = "_unrecognized_args" # undocumented class ArgumentError(Exception): - """An error from creating or using an argument (optional or positional). - - The string value of this exception is the message, augmented with - information about the argument that caused it. - """ - argument_name: str | None message: str def __init__(self, argument: Action | None, message: str) -> None: ... # undocumented class _AttributeHolder: - """Abstract base class that provides __repr__. - - The __repr__ method returns a string in the format:: - ClassName(attr=name, attr=name, ...) - The attributes are determined either by a class-level attribute, - '_kwarg_names', or by inspecting the instance __dict__. - """ - def _get_kwargs(self) -> list[tuple[str, Any]]: ... def _get_args(self) -> list[Any]: ... @@ -167,12 +90,7 @@ class _ActionsContainer: dest: str | None = ..., version: str = ..., **kwargs: Any, - ) -> Action: - """ - add_argument(dest, ..., name=value, ...) - add_argument(option_string, option_string, ..., name=value, ...) - """ - + ) -> Action: ... def add_argument_group( self, title: str | None = None, @@ -199,30 +117,6 @@ class _FormatterClass(Protocol): def __call__(self, *, prog: str) -> HelpFormatter: ... class ArgumentParser(_AttributeHolder, _ActionsContainer): - """Object for parsing command line strings into Python objects. - - Keyword Arguments: - - prog -- The name of the program (default: - ``os.path.basename(sys.argv[0])``) - - usage -- A usage message (default: auto-generated from arguments) - - description -- A description of what the program does - - epilog -- Text following the argument descriptions - - parents -- Parsers whose arguments should be copied into this one - - formatter_class -- HelpFormatter class for printing help messages - - prefix_chars -- Characters that prefix optional arguments - - fromfile_prefix_chars -- Characters that prefix files containing - additional arguments - - argument_default -- The default value for all arguments - - conflict_handler -- String indicating how to handle conflicts - - add_help -- Add a -h/-help option - - allow_abbrev -- Allow long options to be abbreviated unambiguously - - exit_on_error -- Determines whether or not ArgumentParser exits with - error info when an error occurs - - suggest_on_error - Enables suggestions for mistyped argument choices - and subparser names (default: ``False``) - - color - Allow color output in help messages (default: ``False``) - """ - prog: str usage: str | None epilog: str | None @@ -327,16 +221,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def parse_known_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... - def error(self, message: str) -> NoReturn: - """error(message: string) - - Prints a usage message incorporating the message to stderr and - exits. - - If you override this in a subclass, it should not return -- it - should either exit or raise an exception. - """ - + def error(self, message: str) -> NoReturn: ... @overload def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... @overload @@ -374,12 +259,6 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def _print_message(self, message: str, file: SupportsWrite[str] | None = None) -> None: ... class HelpFormatter: - """Formatter for generating usage messages and argument help strings. - - Only the name of this class is considered a public API. All the methods - provided by the class are considered an implementation detail. - """ - # undocumented _prog: str _indent_increment: int @@ -440,86 +319,12 @@ class HelpFormatter: def _get_default_metavar_for_optional(self, action: Action) -> str: ... def _get_default_metavar_for_positional(self, action: Action) -> str: ... -class RawDescriptionHelpFormatter(HelpFormatter): - """Help message formatter which retains any formatting in descriptions. - - Only the name of this class is considered a public API. All the methods - provided by the class are considered an implementation detail. - """ - -class RawTextHelpFormatter(RawDescriptionHelpFormatter): - """Help message formatter which retains formatting of all help text. - - Only the name of this class is considered a public API. All the methods - provided by the class are considered an implementation detail. - """ - -class ArgumentDefaultsHelpFormatter(HelpFormatter): - """Help message formatter which adds default values to argument help. - - Only the name of this class is considered a public API. All the methods - provided by the class are considered an implementation detail. - """ - -class MetavarTypeHelpFormatter(HelpFormatter): - """Help message formatter which uses the argument 'type' as the default - metavar value (instead of the argument 'dest') - - Only the name of this class is considered a public API. All the methods - provided by the class are considered an implementation detail. - """ +class RawDescriptionHelpFormatter(HelpFormatter): ... +class RawTextHelpFormatter(RawDescriptionHelpFormatter): ... +class ArgumentDefaultsHelpFormatter(HelpFormatter): ... +class MetavarTypeHelpFormatter(HelpFormatter): ... class Action(_AttributeHolder): - """Information about how to convert command line strings to Python objects. - - Action objects are used by an ArgumentParser to represent the information - needed to parse a single argument from one or more strings from the - command line. The keyword arguments to the Action constructor are also - all attributes of Action instances. - - Keyword Arguments: - - - option_strings -- A list of command-line option strings which - should be associated with this action. - - - dest -- The name of the attribute to hold the created object(s) - - - nargs -- The number of command-line arguments that should be - consumed. By default, one argument will be consumed and a single - value will be produced. Other values include: - - N (an integer) consumes N arguments (and produces a list) - - '?' consumes zero or one arguments - - '*' consumes zero or more arguments (and produces a list) - - '+' consumes one or more arguments (and produces a list) - Note that the difference between the default and nargs=1 is that - with the default, a single value will be produced, while with - nargs=1, a list containing a single value will be produced. - - - const -- The value to be produced if the option is specified and the - option uses an action that takes no values. - - - default -- The value to be produced if the option is not specified. - - - type -- A callable that accepts a single string argument, and - returns the converted value. The standard Python types str, int, - float, and complex are useful examples of such callables. If None, - str is used. - - - choices -- A container of values that should be allowed. If not None, - after a command-line argument has been converted to the appropriate - type, an exception will be raised if it is not a member of this - collection. - - - required -- True if the action must always be specified at the - command line. This is only meaningful for optional command-line - arguments. - - - help -- The help string describing the argument. - - - metavar -- The name to be used for the option's argument with the - help string. If None, the 'dest' value will be used as the name. - """ - option_strings: Sequence[str] dest: str nargs: int | str | None @@ -655,12 +460,6 @@ else: ) -> None: ... class Namespace(_AttributeHolder): - """Simple object for storing attributes. - - Implements equality by attribute names and values, and provides a simple - string representation. - """ - def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... @@ -671,22 +470,6 @@ class Namespace(_AttributeHolder): if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14. Open files after parsing arguments instead.") class FileType: - """Deprecated factory for creating file object types - - Instances of FileType are typically passed as type= arguments to the - ArgumentParser add_argument() method. - - Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. - """ - # undocumented _mode: str _bufsize: int @@ -699,22 +482,6 @@ if sys.version_info >= (3, 14): else: class FileType: - """Factory for creating file object types - - Instances of FileType are typically passed as type= arguments to the - ArgumentParser add_argument() method. - - Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. - """ - # undocumented _mode: str _bufsize: int @@ -1054,8 +821,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): def _get_subactions(self) -> list[Action]: ... # undocumented -class ArgumentTypeError(Exception): - """An error from trying to convert a command line string to a type.""" +class ArgumentTypeError(Exception): ... # undocumented def _get_action_name(argument: Action | None) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi index a7b41229c92d3..a6b0344a1e2ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi @@ -1,9 +1,3 @@ -"""This module defines an object type which can efficiently represent -an array of basic values: characters, integers, floating-point -numbers. Arrays are sequence types and behave very much like lists, -except that the type of objects stored in them is constrained. -""" - import sys from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Iterable, MutableSequence @@ -25,72 +19,10 @@ typecodes: str @disjoint_base class array(MutableSequence[_T]): - """array(typecode [, initializer]) -> array - - Return a new array whose items are restricted by typecode, and - initialized from the optional initializer value, which must be a list, - string or iterable over elements of the appropriate type. - - Arrays represent basic values and behave very much like lists, except - the type of objects stored in them is constrained. The type is specified - at object creation time by using a type code, which is a single character. - The following type codes are defined: - - Type code C Type Minimum size in bytes - 'b' signed integer 1 - 'B' unsigned integer 1 - 'u' Unicode character 2 (see note) - 'h' signed integer 2 - 'H' unsigned integer 2 - 'i' signed integer 2 - 'I' unsigned integer 2 - 'l' signed integer 4 - 'L' unsigned integer 4 - 'q' signed integer 8 (see note) - 'Q' unsigned integer 8 (see note) - 'f' floating-point 4 - 'd' floating-point 8 - - NOTE: The 'u' typecode corresponds to Python's unicode character. On - narrow builds this is 2-bytes on wide builds this is 4-bytes. - - NOTE: The 'q' and 'Q' type codes are only available if the platform - C compiler used to build Python supports 'long long', or, on Windows, - '__int64'. - - Methods: - - append() -- append a new item to the end of the array - buffer_info() -- return information giving the current memory info - byteswap() -- byteswap all the items of the array - count() -- return number of occurrences of an object - extend() -- extend array by appending multiple elements from an iterable - fromfile() -- read items from a file object - fromlist() -- append items from the list - frombytes() -- append items from the string - index() -- return index of first occurrence of an object - insert() -- insert a new item into the array at a provided position - pop() -- remove and return item (default last) - remove() -- remove first occurrence of an object - reverse() -- reverse the order of the items in the array - tofile() -- write all items to a file object - tolist() -- return the array converted to an ordinary list - tobytes() -- return the array converted to a string - - Attributes: - - typecode -- the typecode character used to create the array - itemsize -- the length in bytes of one array item - """ - @property - def typecode(self) -> _TypeCode: - """the typecode character used to create the array""" - + def typecode(self) -> _TypeCode: ... @property - def itemsize(self) -> int: - """the size, in bytes, of one array item""" - + def itemsize(self) -> int: ... @overload def __new__( cls: type[array[int]], typecode: _IntTypeCode, initializer: bytes | bytearray | Iterable[int] = ..., / @@ -120,138 +52,55 @@ class array(MutableSequence[_T]): def __new__(cls, typecode: str, initializer: Iterable[_T], /) -> Self: ... @overload def __new__(cls, typecode: str, initializer: bytes | bytearray = ..., /) -> Self: ... - def append(self, v: _T, /) -> None: - """Append new value v to the end of the array.""" - - def buffer_info(self) -> tuple[int, int]: - """Return a tuple (address, length) giving the current memory address and the length in items of the buffer used to hold array's contents. - - The length should be multiplied by the itemsize attribute to calculate - the buffer length in bytes. - """ - - def byteswap(self) -> None: - """Byteswap all items of the array. - - If the items in the array are not 1, 2, 4, or 8 bytes in size, RuntimeError is - raised. - """ - - def count(self, v: _T, /) -> int: - """Return number of occurrences of v in the array.""" - - def extend(self, bb: Iterable[_T], /) -> None: - """Append items to the end of the array.""" - - def frombytes(self, buffer: ReadableBuffer, /) -> None: - """Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method.""" - - def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: - """Read n objects from the file object f and append them to the end of the array.""" - - def fromlist(self, list: list[_T], /) -> None: - """Append items to array from list.""" - - def fromunicode(self, ustr: str, /) -> None: - """Extends this array with data from the unicode string ustr. - - The array must be a unicode type array; otherwise a ValueError is raised. - Use array.frombytes(ustr.encode(...)) to append Unicode data to an array of - some other type. - """ + def append(self, v: _T, /) -> None: ... + def buffer_info(self) -> tuple[int, int]: ... + def byteswap(self) -> None: ... + def count(self, v: _T, /) -> int: ... + def extend(self, bb: Iterable[_T], /) -> None: ... + def frombytes(self, buffer: ReadableBuffer, /) -> None: ... + def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: ... + def fromlist(self, list: list[_T], /) -> None: ... + def fromunicode(self, ustr: str, /) -> None: ... if sys.version_info >= (3, 10): - def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: - """Return index of first occurrence of v in the array. - - Raise ValueError if the value is not present. - """ + def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: ... else: - def index(self, v: _T, /) -> int: # type: ignore[override] - """Return index of first occurrence of v in the array.""" - - def insert(self, i: int, v: _T, /) -> None: - """Insert a new item v into the array before position i.""" - - def pop(self, i: int = -1, /) -> _T: - """Return the i-th element and delete it from the array. - - i defaults to -1. - """ - - def remove(self, v: _T, /) -> None: - """Remove the first occurrence of v in the array.""" + def index(self, v: _T, /) -> int: ... # type: ignore[override] - def tobytes(self) -> bytes: - """Convert the array to an array of machine values and return the bytes representation.""" + def insert(self, i: int, v: _T, /) -> None: ... + def pop(self, i: int = -1, /) -> _T: ... + def remove(self, v: _T, /) -> None: ... + def tobytes(self) -> bytes: ... + def tofile(self, f: SupportsWrite[bytes], /) -> None: ... + def tolist(self) -> list[_T]: ... + def tounicode(self) -> str: ... - def tofile(self, f: SupportsWrite[bytes], /) -> None: - """Write all items (as machine values) to the file object f.""" - - def tolist(self) -> list[_T]: - """Convert array to an ordinary list with the same items.""" - - def tounicode(self) -> str: - """Extends this array with data from the unicode string ustr. - - Convert the array to a unicode string. The array must be a unicode type array; - otherwise a ValueError is raised. Use array.tobytes().decode() to obtain a - unicode string from an array of some other type. - """ __hash__: ClassVar[None] # type: ignore[assignment] - def __contains__(self, value: object, /) -> bool: - """Return bool(key in self).""" - - def __len__(self) -> int: - """Return len(self).""" - + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> _T: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> _T: ... @overload def __getitem__(self, key: slice, /) -> array[_T]: ... @overload # type: ignore[override] - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... @overload def __setitem__(self, key: slice, value: array[_T], /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key].""" - - def __add__(self, value: array[_T], /) -> array[_T]: - """Return self+value.""" - + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __add__(self, value: array[_T], /) -> array[_T]: ... def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: array[_T], /) -> bool: ... def __gt__(self, value: array[_T], /) -> bool: ... - def __iadd__(self, value: array[_T], /) -> Self: # type: ignore[override] - """Implement self+=value.""" - - def __imul__(self, value: int, /) -> Self: - """Implement self*=value.""" - + def __iadd__(self, value: array[_T], /) -> Self: ... # type: ignore[override] + def __imul__(self, value: int, /) -> Self: ... def __le__(self, value: array[_T], /) -> bool: ... def __lt__(self, value: array[_T], /) -> bool: ... - def __mul__(self, value: int, /) -> array[_T]: - """Return self*value.""" - - def __rmul__(self, value: int, /) -> array[_T]: - """Return value*self.""" - - def __copy__(self) -> array[_T]: - """Return a copy of the array.""" - - def __deepcopy__(self, unused: Any, /) -> array[_T]: - """Return a copy of the array.""" - - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object.""" + def __mul__(self, value: int, /) -> array[_T]: ... + def __rmul__(self, value: int, /) -> array[_T]: ... + def __copy__(self) -> array[_T]: ... + def __deepcopy__(self, unused: Any, /) -> array[_T]: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... ArrayType = array diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi index 772f165a98f00..e66e609ee6645 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi @@ -1,26 +1,3 @@ -""" -The `ast` module helps Python applications to process trees of the Python -abstract syntax grammar. The abstract syntax itself might change with -each Python release; this module helps to find out programmatically what -the current grammar looks like and allows modifications of it. - -An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as -a flag to the `compile()` builtin function or by using the `parse()` -function from this module. The result will be a tree of objects whose -classes all inherit from `ast.AST`. - -A modified abstract syntax tree can be compiled into a Python code object -using the built-in `compile()` function. - -Additionally various helper functions are provided that make working with -the trees simpler. The main intention of the helper functions and this -module in general is to provide an easy to use interface for libraries -that work tightly with the python syntax (template engines for example). - -:copyright: Copyright 2008 by Armin Ronacher. -:license: Python License. -""" - import ast import builtins import os @@ -63,8 +40,7 @@ if sys.version_info >= (3, 12): _field_types: ClassVar[dict[str, Any]] if sys.version_info >= (3, 14): - def __replace__(self) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self) -> Self: ... else: class AST: @@ -73,16 +49,9 @@ else: _attributes: ClassVar[tuple[str, ...]] _fields: ClassVar[tuple[str, ...]] -class mod(AST): - """mod = Module(stmt* body, type_ignore* type_ignores) - | Interactive(stmt* body) - | Expression(expr body) - | FunctionType(expr* argtypes, expr returns) - """ +class mod(AST): ... class Module(mod): - """Module(stmt* body, type_ignore* type_ignores)""" - if sys.version_info >= (3, 10): __match_args__ = ("body", "type_ignores") body: list[stmt] @@ -93,12 +62,9 @@ class Module(mod): def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ... class Interactive(mod): - """Interactive(stmt* body)""" - if sys.version_info >= (3, 10): __match_args__ = ("body",) body: list[stmt] @@ -108,24 +74,18 @@ class Interactive(mod): def __init__(self, body: list[stmt]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: list[stmt] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, body: list[stmt] = ...) -> Self: ... class Expression(mod): - """Expression(expr body)""" - if sys.version_info >= (3, 10): __match_args__ = ("body",) body: expr def __init__(self, body: expr) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: expr = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, body: expr = ...) -> Self: ... class FunctionType(mod): - """FunctionType(expr* argtypes, expr returns)""" - if sys.version_info >= (3, 10): __match_args__ = ("argtypes", "returns") argtypes: list[expr] @@ -139,40 +99,9 @@ class FunctionType(mod): def __init__(self, argtypes: list[expr], returns: expr) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ... class stmt(AST): - """stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) - | Return(expr? value) - | Delete(expr* targets) - | Assign(expr* targets, expr value, string? type_comment) - | TypeAlias(expr name, type_param* type_params, expr value) - | AugAssign(expr target, operator op, expr value) - | AnnAssign(expr target, expr annotation, expr? value, int simple) - | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | While(expr test, stmt* body, stmt* orelse) - | If(expr test, stmt* body, stmt* orelse) - | With(withitem* items, stmt* body, string? type_comment) - | AsyncWith(withitem* items, stmt* body, string? type_comment) - | Match(expr subject, match_case* cases) - | Raise(expr? exc, expr? cause) - | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | Assert(expr test, expr? msg) - | Import(alias* names) - | ImportFrom(identifier? module, alias* names, int? level) - | Global(identifier* names) - | Nonlocal(identifier* names) - | Expr(expr value) - | Pass - | Break - | Continue - """ - lineno: int col_offset: int end_lineno: int | None @@ -180,12 +109,9 @@ class stmt(AST): def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... class FunctionDef(stmt): - """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" - if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -260,12 +186,9 @@ class FunctionDef(stmt): type_comment: str | None = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class AsyncFunctionDef(stmt): - """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" - if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -340,12 +263,9 @@ class AsyncFunctionDef(stmt): type_comment: str | None = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class ClassDef(stmt): - """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params)""" - if sys.version_info >= (3, 12): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") elif sys.version_info >= (3, 10): @@ -401,24 +321,18 @@ class ClassDef(stmt): decorator_list: list[expr] = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Return(stmt): - """Return(expr? value)""" - if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Delete(stmt): - """Delete(expr* targets)""" - if sys.version_info >= (3, 10): __match_args__ = ("targets",) targets: list[expr] @@ -428,12 +342,9 @@ class Delete(stmt): def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Assign(stmt): - """Assign(expr* targets, expr value, string? type_comment)""" - if sys.version_info >= (3, 10): __match_args__ = ("targets", "value", "type_comment") targets: list[expr] @@ -456,13 +367,10 @@ class Assign(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... if sys.version_info >= (3, 12): class TypeAlias(stmt): - """TypeAlias(expr name, type_param* type_params, expr value)""" - __match_args__ = ("name", "type_params", "value") name: Name type_params: list[type_param] @@ -489,12 +397,9 @@ if sys.version_info >= (3, 12): type_params: list[type_param] = ..., value: expr = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class AugAssign(stmt): - """AugAssign(expr target, operator op, expr value)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") target: Name | Attribute | Subscript @@ -512,12 +417,9 @@ class AugAssign(stmt): op: operator = ..., value: expr = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class AnnAssign(stmt): - """AnnAssign(expr target, expr annotation, expr? value, int simple)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") target: Name | Attribute | Subscript @@ -553,12 +455,9 @@ class AnnAssign(stmt): value: expr | None = ..., simple: int = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class For(stmt): - """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -597,12 +496,9 @@ class For(stmt): orelse: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class AsyncFor(stmt): - """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -641,12 +537,9 @@ class AsyncFor(stmt): orelse: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class While(stmt): - """While(expr test, stmt* body, stmt* orelse)""" - if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -662,12 +555,9 @@ class While(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class If(stmt): - """If(expr test, stmt* body, stmt* orelse)""" - if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -683,12 +573,9 @@ class If(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class With(stmt): - """With(withitem* items, stmt* body, string? type_comment)""" - if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -715,12 +602,9 @@ class With(stmt): body: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class AsyncWith(stmt): - """AsyncWith(withitem* items, stmt* body, string? type_comment)""" - if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -747,12 +631,9 @@ class AsyncWith(stmt): body: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Raise(stmt): - """Raise(expr? exc, expr? cause)""" - if sys.version_info >= (3, 10): __match_args__ = ("exc", "cause") exc: expr | None @@ -760,12 +641,9 @@ class Raise(stmt): def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Try(stmt): - """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" - if sys.version_info >= (3, 10): __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] @@ -800,13 +678,10 @@ class Try(stmt): orelse: list[stmt] = ..., finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... if sys.version_info >= (3, 11): class TryStar(stmt): - """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" - __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] handlers: list[ExceptHandler] @@ -840,12 +715,9 @@ if sys.version_info >= (3, 11): orelse: list[stmt] = ..., finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Assert(stmt): - """Assert(expr test, expr? msg)""" - if sys.version_info >= (3, 10): __match_args__ = ("test", "msg") test: expr @@ -853,12 +725,9 @@ class Assert(stmt): def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Import(stmt): - """Import(alias* names)""" - if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[alias] @@ -868,12 +737,9 @@ class Import(stmt): def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class ImportFrom(stmt): - """ImportFrom(identifier? module, alias* names, int? level)""" - if sys.version_info >= (3, 10): __match_args__ = ("module", "names", "level") module: str | None @@ -897,12 +763,9 @@ class ImportFrom(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Global(stmt): - """Global(identifier* names)""" - if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -912,12 +775,9 @@ class Global(stmt): def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Nonlocal(stmt): - """Nonlocal(identifier* names)""" - if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -927,62 +787,22 @@ class Nonlocal(stmt): def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Expr(stmt): - """Expr(expr value)""" - if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" - -class Pass(stmt): - """Pass""" - -class Break(stmt): - """Break""" + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... -class Continue(stmt): - """Continue""" +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... class expr(AST): - """expr = BoolOp(boolop op, expr* values) - | NamedExpr(expr target, expr value) - | BinOp(expr left, operator op, expr right) - | UnaryOp(unaryop op, expr operand) - | Lambda(arguments args, expr body) - | IfExp(expr test, expr body, expr orelse) - | Dict(expr?* keys, expr* values) - | Set(expr* elts) - | ListComp(expr elt, comprehension* generators) - | SetComp(expr elt, comprehension* generators) - | DictComp(expr key, expr value, comprehension* generators) - | GeneratorExp(expr elt, comprehension* generators) - | Await(expr value) - | Yield(expr? value) - | YieldFrom(expr value) - | Compare(expr left, cmpop* ops, expr* comparators) - | Call(expr func, expr* args, keyword* keywords) - | FormattedValue(expr value, int conversion, expr? format_spec) - | Interpolation(expr value, constant str, int conversion, expr? format_spec) - | JoinedStr(expr* values) - | TemplateStr(expr* values) - | Constant(constant value, string? kind) - | Attribute(expr value, identifier attr, expr_context ctx) - | Subscript(expr value, expr slice, expr_context ctx) - | Starred(expr value, expr_context ctx) - | Name(identifier id, expr_context ctx) - | List(expr* elts, expr_context ctx) - | Tuple(expr* elts, expr_context ctx) - | Slice(expr? lower, expr? upper, expr? step) - """ - lineno: int col_offset: int end_lineno: int | None @@ -990,12 +810,9 @@ class expr(AST): def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... class BoolOp(expr): - """BoolOp(boolop op, expr* values)""" - if sys.version_info >= (3, 10): __match_args__ = ("op", "values") op: boolop @@ -1006,12 +823,9 @@ class BoolOp(expr): def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class NamedExpr(expr): - """NamedExpr(expr target, expr value)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "value") target: Name @@ -1019,12 +833,9 @@ class NamedExpr(expr): def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class BinOp(expr): - """BinOp(expr left, operator op, expr right)""" - if sys.version_info >= (3, 10): __match_args__ = ("left", "op", "right") left: expr @@ -1033,12 +844,11 @@ class BinOp(expr): def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class UnaryOp(expr): - """UnaryOp(unaryop op, expr operand)""" - if sys.version_info >= (3, 10): __match_args__ = ("op", "operand") op: unaryop @@ -1046,12 +856,9 @@ class UnaryOp(expr): def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Lambda(expr): - """Lambda(arguments args, expr body)""" - if sys.version_info >= (3, 10): __match_args__ = ("args", "body") args: arguments @@ -1059,12 +866,9 @@ class Lambda(expr): def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class IfExp(expr): - """IfExp(expr test, expr body, expr orelse)""" - if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -1073,12 +877,11 @@ class IfExp(expr): def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class Dict(expr): - """Dict(expr?* keys, expr* values)""" - if sys.version_info >= (3, 10): __match_args__ = ("keys", "values") keys: list[expr | None] @@ -1089,12 +892,11 @@ class Dict(expr): def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class Set(expr): - """Set(expr* elts)""" - if sys.version_info >= (3, 10): __match_args__ = ("elts",) elts: list[expr] @@ -1104,12 +906,9 @@ class Set(expr): def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class ListComp(expr): - """ListComp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1120,12 +919,11 @@ class ListComp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class SetComp(expr): - """SetComp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1136,12 +934,11 @@ class SetComp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class DictComp(expr): - """DictComp(expr key, expr value, comprehension* generators)""" - if sys.version_info >= (3, 10): __match_args__ = ("key", "value", "generators") key: expr @@ -1157,12 +954,9 @@ class DictComp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class GeneratorExp(expr): - """GeneratorExp(expr elt, comprehension* generators)""" - if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1173,48 +967,38 @@ class GeneratorExp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class Await(expr): - """Await(expr value)""" - if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Yield(expr): - """Yield(expr? value)""" - if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class YieldFrom(expr): - """YieldFrom(expr value)""" - if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Compare(expr): - """Compare(expr left, cmpop* ops, expr* comparators)""" - if sys.version_info >= (3, 10): __match_args__ = ("left", "ops", "comparators") left: expr @@ -1230,12 +1014,9 @@ class Compare(expr): if sys.version_info >= (3, 14): def __replace__( self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Call(expr): - """Call(expr func, expr* args, keyword* keywords)""" - if sys.version_info >= (3, 10): __match_args__ = ("func", "args", "keywords") func: expr @@ -1251,12 +1032,9 @@ class Call(expr): if sys.version_info >= (3, 14): def __replace__( self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class FormattedValue(expr): - """FormattedValue(expr value, int conversion, expr? format_spec)""" - if sys.version_info >= (3, 10): __match_args__ = ("value", "conversion", "format_spec") value: expr @@ -1267,12 +1045,9 @@ class FormattedValue(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class JoinedStr(expr): - """JoinedStr(expr* values)""" - if sys.version_info >= (3, 10): __match_args__ = ("values",) values: list[expr] @@ -1282,22 +1057,16 @@ class JoinedStr(expr): def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... if sys.version_info >= (3, 14): class TemplateStr(expr): - """TemplateStr(expr* values)""" - __match_args__ = ("values",) values: list[expr] def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Interpolation(expr): - """Interpolation(expr value, constant str, int conversion, expr? format_spec)""" - __match_args__ = ("value", "str", "conversion", "format_spec") value: expr str: builtins.str @@ -1319,8 +1088,7 @@ if sys.version_info >= (3, 14): conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... if sys.version_info >= (3, 10): from types import EllipsisType @@ -1331,8 +1099,6 @@ else: _ConstantValue: typing_extensions.TypeAlias = str | bytes | bool | int | float | complex | None | ellipsis # noqa: F821 class Constant(expr): - """Constant(constant value, string? kind)""" - if sys.version_info >= (3, 10): __match_args__ = ("value", "kind") value: _ConstantValue @@ -1341,17 +1107,13 @@ class Constant(expr): # Aliases for value, for backwards compatibility @property @deprecated("Removed in Python 3.14. Use `value` instead.") - def n(self) -> _ConstantValue: - """Deprecated. Use value instead.""" - + def n(self) -> _ConstantValue: ... @n.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def n(self, value: _ConstantValue) -> None: ... @property @deprecated("Removed in Python 3.14. Use `value` instead.") - def s(self) -> _ConstantValue: - """Deprecated. Use value instead.""" - + def s(self) -> _ConstantValue: ... @s.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def s(self, value: _ConstantValue) -> None: ... @@ -1359,12 +1121,9 @@ class Constant(expr): def __init__(self, value: _ConstantValue, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Attribute(expr): - """Attribute(expr value, identifier attr, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("value", "attr", "ctx") value: expr @@ -1375,12 +1134,9 @@ class Attribute(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., attr: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Subscript(expr): - """Subscript(expr value, expr slice, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("value", "slice", "ctx") value: expr @@ -1391,12 +1147,9 @@ class Subscript(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., slice: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class Starred(expr): - """Starred(expr value, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("value", "ctx") value: expr @@ -1404,12 +1157,9 @@ class Starred(expr): def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Name(expr): - """Name(identifier id, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("id", "ctx") id: str @@ -1417,12 +1167,9 @@ class Name(expr): def __init__(self, id: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class List(expr): - """List(expr* elts, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1433,12 +1180,9 @@ class List(expr): def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class Tuple(expr): - """Tuple(expr* elts, expr_context ctx)""" - if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1450,16 +1194,12 @@ class Tuple(expr): def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... @deprecated("Deprecated since Python 3.9.") -class slice(AST): - """Deprecated AST node class.""" +class slice(AST): ... class Slice(expr): - """Slice(expr? lower, expr? upper, expr? step)""" - if sys.version_info >= (3, 10): __match_args__ = ("lower", "upper", "step") lower: expr | None @@ -1472,151 +1212,68 @@ class Slice(expr): if sys.version_info >= (3, 14): def __replace__( self, *, lower: expr | None = ..., upper: expr | None = ..., step: expr | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... @deprecated("Deprecated since Python 3.9. Use `ast.Tuple` instead.") class ExtSlice(slice): - """Deprecated AST node class. Use ast.Tuple instead.""" - def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_Attributes]) -> Tuple: ... # type: ignore[misc] @deprecated("Deprecated since Python 3.9. Use the index value directly instead.") class Index(slice): - """Deprecated AST node class. Use the index value directly instead.""" - def __new__(cls, value: expr, **kwargs: Unpack[_Attributes]) -> expr: ... # type: ignore[misc] -class expr_context(AST): - """expr_context = Load | Store | Del""" +class expr_context(AST): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugLoad(expr_context): - """Deprecated AST node class. Unused in Python 3.""" +class AugLoad(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugStore(expr_context): - """Deprecated AST node class. Unused in Python 3.""" +class AugStore(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Param(expr_context): - """Deprecated AST node class. Unused in Python 3.""" +class Param(expr_context): ... @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Suite(mod): - """Deprecated AST node class. Unused in Python 3.""" - -class Load(expr_context): - """Load""" - -class Store(expr_context): - """Store""" - -class Del(expr_context): - """Del""" - -class boolop(AST): - """boolop = And | Or""" - -class And(boolop): - """And""" - -class Or(boolop): - """Or""" - -class operator(AST): - """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv""" - -class Add(operator): - """Add""" - -class Sub(operator): - """Sub""" - -class Mult(operator): - """Mult""" - -class MatMult(operator): - """MatMult""" - -class Div(operator): - """Div""" - -class Mod(operator): - """Mod""" - -class Pow(operator): - """Pow""" - -class LShift(operator): - """LShift""" - -class RShift(operator): - """RShift""" - -class BitOr(operator): - """BitOr""" - -class BitXor(operator): - """BitXor""" - -class BitAnd(operator): - """BitAnd""" - -class FloorDiv(operator): - """FloorDiv""" - -class unaryop(AST): - """unaryop = Invert | Not | UAdd | USub""" - -class Invert(unaryop): - """Invert""" - -class Not(unaryop): - """Not""" - -class UAdd(unaryop): - """UAdd""" - -class USub(unaryop): - """USub""" - -class cmpop(AST): - """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn""" - -class Eq(cmpop): - """Eq""" - -class NotEq(cmpop): - """NotEq""" - -class Lt(cmpop): - """Lt""" - -class LtE(cmpop): - """LtE""" - -class Gt(cmpop): - """Gt""" - -class GtE(cmpop): - """GtE""" - -class Is(cmpop): - """Is""" - -class IsNot(cmpop): - """IsNot""" - -class In(cmpop): - """In""" - -class NotIn(cmpop): - """NotIn""" +class Suite(mod): ... + +class Load(expr_context): ... +class Store(expr_context): ... +class Del(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class Sub(operator): ... +class Mult(operator): ... +class MatMult(operator): ... +class Div(operator): ... +class Mod(operator): ... +class Pow(operator): ... +class LShift(operator): ... +class RShift(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class BitAnd(operator): ... +class FloorDiv(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class NotEq(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class In(cmpop): ... +class NotIn(cmpop): ... class comprehension(AST): - """comprehension(expr target, expr iter, expr* ifs, int is_async)""" - if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "ifs", "is_async") target: expr @@ -1632,12 +1289,9 @@ class comprehension(AST): def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ... class excepthandler(AST): - """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)""" - lineno: int col_offset: int end_lineno: int | None @@ -1647,12 +1301,9 @@ class excepthandler(AST): if sys.version_info >= (3, 14): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ... - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class ExceptHandler(excepthandler): - """ExceptHandler(expr? type, identifier? name, stmt* body)""" - if sys.version_info >= (3, 10): __match_args__ = ("type", "name", "body") type: expr | None @@ -1673,12 +1324,9 @@ class ExceptHandler(excepthandler): if sys.version_info >= (3, 14): def __replace__( self, *, type: expr | None = ..., name: str | None = ..., body: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class arguments(AST): - """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults)""" - if sys.version_info >= (3, 10): __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") posonlyargs: list[arg] @@ -1747,12 +1395,9 @@ class arguments(AST): kw_defaults: list[expr | None] = ..., kwarg: arg | None = ..., defaults: list[expr] = ..., - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class arg(AST): - """arg(identifier arg, expr? annotation, string? type_comment)""" - lineno: int col_offset: int end_lineno: int | None @@ -1769,12 +1414,9 @@ class arg(AST): if sys.version_info >= (3, 14): def __replace__( self, *, arg: str = ..., annotation: expr | None = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class keyword(AST): - """keyword(identifier? arg, expr value)""" - lineno: int col_offset: int end_lineno: int | None @@ -1789,12 +1431,9 @@ class keyword(AST): def __init__(self, arg: str | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class alias(AST): - """alias(identifier name, identifier? asname)""" - name: str asname: str | None if sys.version_info >= (3, 10): @@ -1810,12 +1449,9 @@ class alias(AST): def __init__(self, name: str, asname: str | None = None) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... class withitem(AST): - """withitem(expr context_expr, expr? optional_vars)""" - if sys.version_info >= (3, 10): __match_args__ = ("context_expr", "optional_vars") context_expr: expr @@ -1823,21 +1459,10 @@ class withitem(AST): def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ... if sys.version_info >= (3, 10): class pattern(AST): - """pattern = MatchValue(expr value) - | MatchSingleton(constant value) - | MatchSequence(pattern* patterns) - | MatchMapping(expr* keys, pattern* patterns, identifier? rest) - | MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) - | MatchStar(identifier? name) - | MatchAs(pattern? pattern, identifier? name) - | MatchOr(pattern* patterns) - """ - lineno: int col_offset: int end_lineno: int @@ -1847,12 +1472,9 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ... - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class match_case(AST): - """match_case(pattern pattern, expr? guard, stmt* body)""" - __match_args__ = ("pattern", "guard", "body") pattern: ast.pattern guard: expr | None @@ -1866,12 +1488,9 @@ if sys.version_info >= (3, 10): def __init__(self, pattern: ast.pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ... class Match(stmt): - """Match(expr subject, match_case* cases)""" - __match_args__ = ("subject", "cases") subject: expr cases: list[match_case] @@ -1881,34 +1500,27 @@ if sys.version_info >= (3, 10): def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__( + self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes] + ) -> Self: ... class MatchValue(pattern): - """MatchValue(expr value)""" - __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchSingleton(pattern): - """MatchSingleton(constant value)""" - __match_args__ = ("value",) value: bool | None def __init__(self, value: bool | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchSequence(pattern): - """MatchSequence(pattern* patterns)""" - __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -1917,12 +1529,9 @@ if sys.version_info >= (3, 10): def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchMapping(pattern): - """MatchMapping(expr* keys, pattern* patterns, identifier? rest)""" - __match_args__ = ("keys", "patterns", "rest") keys: list[expr] patterns: list[pattern] @@ -1948,12 +1557,9 @@ if sys.version_info >= (3, 10): patterns: list[pattern] = ..., rest: str | None = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class MatchClass(pattern): - """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns)""" - __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") cls: expr patterns: list[pattern] @@ -1987,23 +1593,17 @@ if sys.version_info >= (3, 10): kwd_attrs: list[str] = ..., kwd_patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class MatchStar(pattern): - """MatchStar(identifier? name)""" - __match_args__ = ("name",) name: str | None def __init__(self, name: str | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... class MatchAs(pattern): - """MatchAs(pattern? pattern, identifier? name)""" - __match_args__ = ("pattern", "name") pattern: ast.pattern | None name: str | None @@ -2014,12 +1614,9 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__( self, *, pattern: ast.pattern | None = ..., name: str | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class MatchOr(pattern): - """MatchOr(pattern* patterns)""" - __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -2028,15 +1625,11 @@ if sys.version_info >= (3, 10): def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... -class type_ignore(AST): - """type_ignore = TypeIgnore(int lineno, string tag)""" +class type_ignore(AST): ... class TypeIgnore(type_ignore): - """TypeIgnore(int lineno, string tag)""" - if sys.version_info >= (3, 10): __match_args__ = ("lineno", "tag") lineno: int @@ -2044,16 +1637,10 @@ class TypeIgnore(type_ignore): def __init__(self, lineno: int, tag: str) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ... if sys.version_info >= (3, 12): class type_param(AST): - """type_param = TypeVar(identifier name, expr? bound, expr? default_value) - | ParamSpec(identifier name, expr? default_value) - | TypeVarTuple(identifier name, expr? default_value) - """ - lineno: int col_offset: int end_lineno: int @@ -2061,12 +1648,9 @@ if sys.version_info >= (3, 12): def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ... class TypeVar(type_param): - """TypeVar(identifier name, expr? bound, expr? default_value)""" - if sys.version_info >= (3, 13): __match_args__ = ("name", "bound", "default_value") else: @@ -2089,12 +1673,9 @@ if sys.version_info >= (3, 12): bound: expr | None = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class ParamSpec(type_param): - """ParamSpec(identifier name, expr? default_value)""" - if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -2109,12 +1690,9 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... class TypeVarTuple(type_param): - """TypeVarTuple(identifier name, expr? default_value)""" - if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -2129,8 +1707,7 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields.""" + ) -> Self: ... if sys.version_info >= (3, 14): @type_check_only @@ -2144,32 +1721,22 @@ else: if sys.version_info < (3, 14): @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Num(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - def __new__(cls, n: complex, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Str(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - def __new__(cls, s: str, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Bytes(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - def __new__(cls, s: bytes, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class NameConstant(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - def __new__(cls, value: _ConstantValue, kind: str | None, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Ellipsis(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead""" - def __new__(cls, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] # everything below here is defined in ast.py @@ -2177,26 +1744,30 @@ if sys.version_info < (3, 14): _T = _TypeVar("_T", bound=AST) if sys.version_info >= (3, 13): + @overload + def parse( + source: _T, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec", "eval", "func_type", "single"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + optimize: Literal[-1, 0, 1, 2] = -1, + ) -> _T: ... @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = "", + filename: str | bytes | os.PathLike[Any] = "", mode: Literal["exec"] = "exec", *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> Module: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - + ) -> Module: ... @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["eval"], *, type_comments: bool = False, @@ -2206,7 +1777,7 @@ if sys.version_info >= (3, 13): @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["func_type"], *, type_comments: bool = False, @@ -2216,7 +1787,7 @@ if sys.version_info >= (3, 13): @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["single"], *, type_comments: bool = False, @@ -2253,7 +1824,7 @@ if sys.version_info >= (3, 13): @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = "", + filename: str | bytes | os.PathLike[Any] = "", mode: str = "exec", *, type_comments: bool = False, @@ -2262,25 +1833,28 @@ if sys.version_info >= (3, 13): ) -> mod: ... else: + @overload + def parse( + source: _T, + filename: str | bytes | os.PathLike[Any] = "", + mode: Literal["exec", "eval", "func_type", "single"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> _T: ... @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = "", + filename: str | bytes | os.PathLike[Any] = "", mode: Literal["exec"] = "exec", *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> Module: - """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ - + ) -> Module: ... @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["eval"], *, type_comments: bool = False, @@ -2289,7 +1863,7 @@ else: @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["func_type"], *, type_comments: bool = False, @@ -2298,7 +1872,7 @@ else: @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any], + filename: str | bytes | os.PathLike[Any], mode: Literal["single"], *, type_comments: bool = False, @@ -2331,22 +1905,14 @@ else: @overload def parse( source: str | ReadableBuffer, - filename: str | ReadableBuffer | os.PathLike[Any] = "", + filename: str | bytes | os.PathLike[Any] = "", mode: str = "exec", *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, ) -> mod: ... -def literal_eval(node_or_string: str | AST) -> Any: - """ - Evaluate an expression node or a string containing only a Python - expression. The string or node provided may only consist of the following - Python literal structures: strings, bytes, numbers, tuples, lists, dicts, - sets, booleans, and None. - - Caution: A complex expression can overflow the C stack and cause a crash. - """ +def literal_eval(node_or_string: str | AST) -> Any: ... if sys.version_info >= (3, 13): def dump( @@ -2356,136 +1922,30 @@ if sys.version_info >= (3, 13): *, indent: int | str | None = None, show_empty: bool = False, - ) -> str: - """ - Return a formatted dump of the tree in node. This is mainly useful for - debugging purposes. If annotate_fields is true (by default), - the returned string will show the names and the values for fields. - If annotate_fields is false, the result string will be more compact by - omitting unambiguous field names. Attributes such as line - numbers and column offsets are not dumped by default. If this is wanted, - include_attributes can be set to true. If indent is a non-negative - integer or string, then the tree will be pretty-printed with that indent - level. None (the default) selects the single line representation. - If show_empty is False, then empty lists and fields that are None - will be omitted from the output for better readability. - """ + ) -> str: ... else: def dump( node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None - ) -> str: - """ - Return a formatted dump of the tree in node. This is mainly useful for - debugging purposes. If annotate_fields is true (by default), - the returned string will show the names and the values for fields. - If annotate_fields is false, the result string will be more compact by - omitting unambiguous field names. Attributes such as line - numbers and column offsets are not dumped by default. If this is wanted, - include_attributes can be set to true. If indent is a non-negative - integer or string, then the tree will be pretty-printed with that indent - level. None (the default) selects the single line representation. - """ - -def copy_location(new_node: _T, old_node: AST) -> _T: - """ - Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` - attributes) from *old_node* to *new_node* if possible, and return *new_node*. - """ - -def fix_missing_locations(node: _T) -> _T: - """ - When you compile a node tree with compile(), the compiler expects lineno and - col_offset attributes for every node that supports them. This is rather - tedious to fill in for generated nodes, so this helper adds these attributes - recursively where not already set, by setting them to the values of the - parent node. It works recursively starting at *node*. - """ - -def increment_lineno(node: _T, n: int = 1) -> _T: - """ - Increment the line number and end line number of each node in the tree - starting at *node* by *n*. This is useful to "move code" to a different - location in a file. - """ - -def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: - """ - Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` - that is present on *node*. - """ - -def iter_child_nodes(node: AST) -> Iterator[AST]: - """ - Yield all direct child nodes of *node*, that is, all fields that are nodes - and all items of fields that are lists of nodes. - """ - -def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: - """ - Return the docstring for the given node or None if no docstring can - be found. If the node provided does not have docstrings a TypeError - will be raised. - - If *clean* is `True`, all tabs are expanded to spaces and any whitespace - that can be uniformly removed from the second line onwards is removed. - """ - -def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: - """Get source code segment of the *source* that generated *node*. - - If some location information (`lineno`, `end_lineno`, `col_offset`, - or `end_col_offset`) is missing, return None. - - If *padded* is `True`, the first line of a multi-line statement will - be padded with spaces to match its original position. - """ - -def walk(node: AST) -> Iterator[AST]: - """ - Recursively yield all descendant nodes in the tree starting at *node* - (including *node* itself), in no specified order. This is useful if you - only want to modify nodes in place and don't care about the context. - """ + ) -> str: ... -if sys.version_info >= (3, 14): - def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: - """Recursively compares two ASTs. +def copy_location(new_node: _T, old_node: AST) -> _T: ... +def fix_missing_locations(node: _T) -> _T: ... +def increment_lineno(node: _T, n: int = 1) -> _T: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... +def iter_child_nodes(node: AST) -> Iterator[AST]: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... +def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... +def walk(node: AST) -> Iterator[AST]: ... - compare_attributes affects whether AST attributes are considered - in the comparison. If compare_attributes is False (default), then - attributes are ignored. Otherwise they must all be equal. This - option is useful to check whether the ASTs are structurally equal but - might differ in whitespace or similar details. - """ +if sys.version_info >= (3, 14): + def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... class NodeVisitor: - """ - A node visitor base class that walks the abstract syntax tree and calls a - visitor function for every node found. This function may return a value - which is forwarded by the `visit` method. - - This class is meant to be subclassed, with the subclass adding visitor - methods. - - Per default the visitor functions for the nodes are ``'visit_'`` + - class name of the node. So a `TryFinally` node visit function would - be `visit_TryFinally`. This behavior can be changed by overriding - the `visit` method. If no visitor function exists for a node - (return value `None`) the `generic_visit` visitor is used instead. - - Don't use the `NodeVisitor` if you want to apply changes to nodes during - traversing. For this a special visitor exists (`NodeTransformer`) that - allows modifications. - """ - # All visit methods below can be overwritten by subclasses and return an # arbitrary value, which is passed to the caller. - def visit(self, node: AST) -> Any: - """Visit a node.""" - - def generic_visit(self, node: AST) -> Any: - """Called if no explicit visitor function exists for a node.""" + def visit(self, node: AST) -> Any: ... + def generic_visit(self, node: AST) -> Any: ... # The following visit methods are not defined on NodeVisitor, but can # be implemented by subclasses and are called during a visit if defined. def visit_Module(self, node: Module) -> Any: ... @@ -2625,41 +2085,6 @@ class NodeVisitor: def visit_Ellipsis(self, node: Ellipsis) -> Any: ... # type: ignore[deprecated] class NodeTransformer(NodeVisitor): - """ - A :class:`NodeVisitor` subclass that walks the abstract syntax tree and - allows modification of nodes. - - The `NodeTransformer` will walk the AST and use the return value of the - visitor methods to replace or remove the old node. If the return value of - the visitor method is ``None``, the node will be removed from its location, - otherwise it is replaced with the return value. The return value may be the - original node in which case no replacement takes place. - - Here is an example transformer that rewrites all occurrences of name lookups - (``foo``) to ``data['foo']``:: - - class RewriteName(NodeTransformer): - - def visit_Name(self, node): - return Subscript( - value=Name(id='data', ctx=Load()), - slice=Constant(value=node.id), - ctx=node.ctx - ) - - Keep in mind that if the node you're operating on has child nodes you must - either transform the child nodes yourself or call the :meth:`generic_visit` - method for the node first. - - For nodes that were part of a collection of statements (that applies to all - statement nodes), the visitor may also return a list of nodes rather than - just a single node. - - Usually you use the transformer like this:: - - node = YourTransformer().visit(node) - """ - def generic_visit(self, node: AST) -> AST: ... # TODO: Override the visit_* methods with better return types. # The usual return type is AST | None, but Iterable[AST] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi index 45a778d76e172..79a70d1c1ec8d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi @@ -1,24 +1,3 @@ -"""A class supporting chat-style (command/response) protocols. - -This class adds support for 'chat' style protocols - where one side -sends a 'command', and the other sends a response (examples would be -the common internet protocols - smtp, nntp, ftp, etc..). - -The handle_read() method looks at the input stream for the current -'terminator' (usually '\\r\\n' for single-line responses, '\\r\\n.\\r\\n' -for multi-line output), calling self.found_terminator() on its -receipt. - -for example: -Say you build an async nntp client using this class. At the start -of the connection, you'll have self.terminator set to '\\r\\n', in -order to process the single-line greeting. Just before issuing a -'LIST' command you'll set it to '\\r\\n.\\r\\n'. The output of the LIST -command will be accumulated (using your own 'collect_incoming_data' -method) up to the terminator, and then control will be returned to -you - by calling your self.found_terminator() method. -""" - import asyncore from abc import abstractmethod @@ -27,27 +6,16 @@ class simple_producer: def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): - """This is an abstract class. You must derive from this class, and add - the two methods collect_incoming_data() and found_terminator() - """ - ac_in_buffer_size: int ac_out_buffer_size: int @abstractmethod def collect_incoming_data(self, data: bytes) -> None: ... @abstractmethod def found_terminator(self) -> None: ... - def set_terminator(self, term: bytes | int | None) -> None: - """Set the input delimiter. - - Can be a fixed string of any length, an integer, or None. - """ - + def set_terminator(self, term: bytes | int | None) -> None: ... def get_terminator(self) -> bytes | int | None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... - def close_when_done(self) -> None: - """automatically close this channel once the outgoing queue is empty""" - + def close_when_done(self) -> None: ... def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi index a8732a22deec2..23cf57aaac335 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi @@ -1,5 +1,3 @@ -"""The asyncio package, tracking PEP 3156.""" - # This condition is so big, it's clearer to keep to platform condition in two blocks # Can't NOQA on a specific line: https://github.com/plinss/flake8-noqa/issues/22 import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi index 82ab82d7c5940..1f493210d6655 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi @@ -1,18 +1,3 @@ -"""Base implementation of event loop. - -The event loop can be broken up into a multiplexer (the part -responsible for notifying us of I/O events) and the event loop proper, -which wraps a multiplexer with functionality for scheduling callbacks, -immediately or at a given time in the future. - -Whenever a public API takes a callback, subsequent positional -arguments will be passed to the callback if/when it is called. This -avoids the proliferation of trivial lambdas implementing closures. -Keyword arguments for the callback are not supported; this is a -conscious design decision, leaving the door open for keyword arguments -to modify the meaning of the API call itself. -""" - import ssl import sys from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer @@ -74,147 +59,41 @@ class Server(AbstractServer): @property def sockets(self) -> tuple[socket, ...]: ... def close(self) -> None: ... - async def wait_closed(self) -> None: - """Wait until server is closed and all connections are dropped. - - - If the server is not closed, wait. - - If it is closed, but there are still active connections, wait. - - Anyone waiting here will be unblocked once both conditions - (server is closed and all connections have been dropped) - have become true, in either order. - - Historical note: In 3.11 and before, this was broken, returning - immediately if the server was already closed, even if there - were still active connections. An attempted fix in 3.12.0 was - still broken, returning immediately if the server was still - open and there were no active connections. Hopefully in 3.12.1 - we have it right. - """ + async def wait_closed(self) -> None: ... class BaseEventLoop(AbstractEventLoop): - def run_forever(self) -> None: - """Run until stop() is called.""" - - def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: - """Run until the Future is done. - - If the argument is a coroutine, it is wrapped in a Task. - - WARNING: It would be disastrous to call run_until_complete() - with the same coroutine twice -- it would wrap it in two - different Tasks and that can't be good. - - Return the Future's result, or raise its exception. - """ - - def stop(self) -> None: - """Stop running the event loop. - - Every callback already scheduled will still run. This simply informs - run_forever to stop looping after a complete iteration. - """ - - def is_running(self) -> bool: - """Returns True if the event loop is running.""" - - def is_closed(self) -> bool: - """Returns True if the event loop was closed.""" - - def close(self) -> None: - """Close the event loop. - - This clears the queues and shuts down the executor, - but does not wait for the executor to finish. - - The event loop must not be running. - """ - - async def shutdown_asyncgens(self) -> None: - """Shutdown all active asynchronous generators.""" + def run_forever(self) -> None: ... + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + def stop(self) -> None: ... + def is_running(self) -> bool: ... + def is_closed(self) -> bool: ... + def close(self) -> None: ... + async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. - def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None) -> Handle: - """Arrange for a callback to be called as soon as possible. - - This operates as a FIFO queue: callbacks are called in the - order in which they are registered. Each callback will be - called exactly once. - - Any positional arguments after the callback will be passed to - the callback when it is called. - """ - + def call_soon( + self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None + ) -> Handle: ... def call_later( self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: - """Arrange for a callback to be called at a given time. - - Return a Handle: an opaque object with a cancel() method that - can be used to cancel the call. - - The delay can be an int or float, expressed in seconds. It is - always relative to the current time. - - Each callback will be called exactly once. If two callbacks - are scheduled for exactly the same time, it is undefined which - will be called first. - - Any positional arguments after the callback will be passed to - the callback when it is called. - """ - + ) -> TimerHandle: ... def call_at( self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: - """Like call_later(), but uses an absolute time. - - Absolute time corresponds to the event loop's time() method. - """ - - def time(self) -> float: - """Return the time according to the event loop's clock. - - This is a float expressed in seconds since an epoch, but the - epoch, precision, accuracy and drift are unspecified and may - differ per event loop. - """ + ) -> TimerHandle: ... + def time(self) -> float: ... # Future methods - def create_future(self) -> Future[Any]: - """Create a Future object attached to the loop.""" + def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 11): - def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: - """Schedule or begin executing a coroutine object. - - Return a task object. - """ + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: ... else: - def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: - """Schedule a coroutine object. - - Return a task object. - """ - - def set_task_factory(self, factory: _TaskFactory | None) -> None: - """Set a task factory that will be used by loop.create_task(). - - If factory is None the default task factory will be set. - - If factory is a callable, it should have a signature matching - '(loop, coro, **kwargs)', where 'loop' will be a reference to the active - event loop, 'coro' will be a coroutine object, and **kwargs will be - arbitrary keyword arguments that should be passed on to Task. - The callable must return a Task. - """ + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ... - def get_task_factory(self) -> _TaskFactory | None: - """Return a task factory, or None if the default one is in use.""" + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + def get_task_factory(self) -> _TaskFactory | None: ... # Methods for interacting with threads def call_soon_threadsafe( self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: - """Like call_soon(), but thread-safe.""" - + ) -> Handle: ... def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... def set_default_executor(self, executor: ThreadPoolExecutor) -> None: ... # type: ignore[override] # Network I/O methods returning Futures. @@ -249,19 +128,7 @@ class BaseEventLoop(AbstractEventLoop): happy_eyeballs_delay: float | None = None, interleave: int | None = None, all_errors: bool = False, - ) -> tuple[Transport, _ProtocolT]: - """Connect to a TCP server. - - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. - - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ - + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -301,19 +168,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: - """Connect to a TCP server. - - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. - - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ - + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -351,19 +206,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: - """Connect to a TCP server. - - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. - - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ - + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, @@ -403,23 +246,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """Create a TCP server. - - The host parameter can be a string, in that case the TCP server is - bound to host and port. - - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. - - Return a Server object which can be used to stop the service. - - This method is a coroutine. - """ - + ) -> Server: ... @overload async def create_server( self, @@ -457,23 +284,7 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """Create a TCP server. - - The host parameter can be a string, in that case the TCP server is - bound to host and port. - - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. - - Return a Server object which can be used to stop the service. - - This method is a coroutine. - """ - + ) -> Server: ... @overload async def create_server( self, @@ -509,23 +320,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """Create a TCP server. - - The host parameter can be a string, in that case the TCP server is - bound to host and port. - - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. - - Return a Server object which can be used to stop the service. - - This method is a coroutine. - """ - + ) -> Server: ... @overload async def create_server( self, @@ -555,13 +350,7 @@ class BaseEventLoop(AbstractEventLoop): server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: - """Upgrade transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - + ) -> Transport | None: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -581,13 +370,7 @@ class BaseEventLoop(AbstractEventLoop): server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, - ) -> Transport | None: - """Upgrade transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - + ) -> Transport | None: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -595,44 +378,14 @@ class BaseEventLoop(AbstractEventLoop): *, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: - """Handle an accepted connection. - - This is used by servers that accept connections outside of - asyncio but that use asyncio to handle connections. - - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ + ) -> tuple[Transport, _ProtocolT]: ... async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True ) -> int: ... async def sendfile( self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True - ) -> int: - """Send a file to transport. - - Return the total number of bytes which were sent. - - The method uses high-performance os.sendfile if available. - - file must be a regular file object opened in binary mode. - - offset tells from where to start reading the file. If specified, - count is the total number of bytes to transmit as opposed to - sending the file until EOF is reached. File position is updated on - return or also in case of error in which case file.tell() - can be used to figure out the number of bytes - which were sent. - - fallback set to True makes asyncio to manually read and send - the file when the platform does not support the sendfile syscall - (e.g. Windows or SSL socket on Unix). - - Raise SendfileNotAvailableError if the system does not support - sendfile syscall and fallback is False. - """ + ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, @@ -646,8 +399,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: - """Create datagram connection.""" + ) -> tuple[DatagramTransport, _ProtocolT]: ... else: async def create_datagram_endpoint( self, @@ -662,8 +414,7 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: - """Create datagram connection.""" + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -722,75 +473,16 @@ class BaseEventLoop(AbstractEventLoop): def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... # Error handlers. - def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: - """Set handler as the new event loop exception handler. - - If handler is None, the default exception handler will - be set. - - If handler is a callable object, it should have a - signature matching '(loop, context)', where 'loop' - will be a reference to the active event loop, 'context' - will be a dict object (see `call_exception_handler()` - documentation for details about context). - """ - - def get_exception_handler(self) -> _ExceptionHandler | None: - """Return an exception handler, or None if the default one is in use.""" - - def default_exception_handler(self, context: _Context) -> None: - """Default exception handler. - - This is called when an exception occurs and no exception - handler is set, and can be called by a custom exception - handler that wants to defer to the default behavior. - - This default handler logs the error message and other - context-dependent information. In debug mode, a truncated - stack trace is also appended showing where the given object - (e.g. a handle or future or task) was created, if any. - - The context parameter has the same meaning as in - `call_exception_handler()`. - """ - - def call_exception_handler(self, context: _Context) -> None: - """Call the current event loop's exception handler. - - The context argument is a dict containing the following keys: - - - 'message': Error message; - - 'exception' (optional): Exception object; - - 'future' (optional): Future instance; - - 'task' (optional): Task instance; - - 'handle' (optional): Handle instance; - - 'protocol' (optional): Protocol instance; - - 'transport' (optional): Transport instance; - - 'socket' (optional): Socket instance; - - 'source_traceback' (optional): Traceback of the source; - - 'handle_traceback' (optional): Traceback of the handle; - - 'asyncgen' (optional): Asynchronous generator that caused - the exception. - - New keys maybe introduced in the future. - - Note: do not overload this method in an event loop subclass. - For custom exception handling, use the - `set_exception_handler()` method. - """ + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + def get_exception_handler(self) -> _ExceptionHandler | None: ... + def default_exception_handler(self, context: _Context) -> None: ... + def call_exception_handler(self, context: _Context) -> None: ... # Debug flag management. def get_debug(self) -> bool: ... def set_debug(self, enabled: bool) -> None: ... if sys.version_info >= (3, 12): - async def shutdown_default_executor(self, timeout: float | None = None) -> None: - """Schedule the shutdown of the default executor. - - The timeout parameter specifies the amount of time the executor will - be given to finish joining. The default value is None, which means - that the executor will be given an unlimited amount of time. - """ + async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... else: - async def shutdown_default_executor(self) -> None: - """Schedule the shutdown of the default executor.""" + async def shutdown_default_executor(self) -> None: ... def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi index 8b1ed8ae122a2..2cd0f2e3a7e4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi @@ -12,16 +12,6 @@ _PENDING: Final = "PENDING" # undocumented _CANCELLED: Final = "CANCELLED" # undocumented _FINISHED: Final = "FINISHED" # undocumented -def isfuture(obj: object) -> TypeIs[Future[Any]]: - """Check for a Future. - - This returns True when obj is a Future instance or is advertising - itself as duck-type compatible by setting _asyncio_future_blocking. - See comment in Future for more details. - """ - -def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: # undocumented - """helper function for Future.__repr__""" - -def _future_repr_info(future: futures.Future[Any]) -> list[str]: # undocumented - """helper function for Future.__repr__""" +def isfuture(obj: object) -> TypeIs[Future[Any]]: ... +def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented +def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi index 989824b6101c6..a5fe24e8768b7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -52,12 +52,7 @@ class BaseSubprocessTransport(transports.SubprocessTransport): def _pipe_connection_lost(self, fd: int, exc: BaseException | None) -> None: ... # undocumented def _pipe_data_received(self, fd: int, data: bytes) -> None: ... # undocumented def _process_exited(self, returncode: int) -> None: ... # undocumented - async def _wait(self) -> int: # undocumented - """Wait until the process exit and return the process return code. - - This method is a coroutine. - """ - + async def _wait(self) -> int: ... # undocumented def _try_finish(self) -> None: ... # undocumented def _call_connection_lost(self, exc: BaseException | None) -> None: ... # undocumented def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi index 61aaf51e026fa..5c6456b0e9c04 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi @@ -15,8 +15,6 @@ if sys.version_info >= (3, 12): THREAD_JOIN_TIMEOUT: Final = 300 class _SendfileMode(enum.Enum): - """An enumeration.""" - UNSUPPORTED = 1 TRY_NATIVE = 2 FALLBACK = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi index d1db48fb9cfad..59212f4ec398b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi @@ -15,22 +15,14 @@ _P = ParamSpec("_P") if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `async def` instead.") - def coroutine(func: _FunctionT) -> _FunctionT: - """Decorator to mark coroutines. - - If the coroutine is not yielded from before it is destroyed, - an error message is logged. - """ + def coroutine(func: _FunctionT) -> _FunctionT: ... @overload -def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: - """Return True if func is a decorated coroutine function.""" - +def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... @overload def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... @overload def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... @overload def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... -def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: - """Return True if obj is a coroutine object.""" +def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi index 10d72b8fa88cb..5dc698bc5e15c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi @@ -1,5 +1,3 @@ -"""Event loop and event loop policy.""" - import ssl import sys from _asyncio import ( @@ -75,8 +73,6 @@ class _TaskFactory(Protocol): def __call__(self, loop: AbstractEventLoop, factory: _CoroutineLike[_T], /) -> Future[_T]: ... class Handle: - """Object returned by callback registration methods.""" - __slots__ = ("_callback", "_args", "_cancelled", "_loop", "_source_traceback", "_repr", "__weakref__", "_context") _cancelled: bool _args: Sequence[Any] @@ -90,8 +86,6 @@ class Handle: def get_context(self) -> Context: ... class TimerHandle(Handle): - """Object returned by timed callback registration methods.""" - __slots__ = ["_scheduled", "_when"] def __init__( self, @@ -102,13 +96,7 @@ class TimerHandle(Handle): context: Context | None = None, ) -> None: ... def __hash__(self) -> int: ... - def when(self) -> float: - """Return a scheduled callback time. - - The time is an absolute timestamp, using the same time - reference as loop.time(). - """ - + def when(self) -> float: ... def __lt__(self, other: TimerHandle) -> bool: ... def __le__(self, other: TimerHandle) -> bool: ... def __gt__(self, other: TimerHandle) -> bool: ... @@ -116,94 +104,43 @@ class TimerHandle(Handle): def __eq__(self, other: object) -> bool: ... class AbstractServer: - """Abstract server returned by create_server().""" - @abstractmethod - def close(self) -> None: - """Stop serving. This leaves existing connections open.""" + def close(self) -> None: ... if sys.version_info >= (3, 13): @abstractmethod - def close_clients(self) -> None: - """Close all active connections.""" - + def close_clients(self) -> None: ... @abstractmethod - def abort_clients(self) -> None: - """Close all active connections immediately.""" + def abort_clients(self) -> None: ... async def __aenter__(self) -> Self: ... async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod - def get_loop(self) -> AbstractEventLoop: - """Get the event loop the Server object is attached to.""" - + def get_loop(self) -> AbstractEventLoop: ... @abstractmethod - def is_serving(self) -> bool: - """Return True if the server is accepting connections.""" - + def is_serving(self) -> bool: ... @abstractmethod - async def start_serving(self) -> None: - """Start accepting connections. - - This method is idempotent, so it can be called when - the server is already being serving. - """ - + async def start_serving(self) -> None: ... @abstractmethod - async def serve_forever(self) -> None: - """Start accepting connections until the coroutine is cancelled. - - The server is closed when the coroutine is cancelled. - """ - + async def serve_forever(self) -> None: ... @abstractmethod - async def wait_closed(self) -> None: - """Coroutine to wait until service is closed.""" + async def wait_closed(self) -> None: ... class AbstractEventLoop: - """Abstract event loop.""" - slow_callback_duration: float @abstractmethod - def run_forever(self) -> None: - """Run the event loop until stop() is called.""" - + def run_forever(self) -> None: ... @abstractmethod - def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: - """Run the event loop until a Future is done. - - Return the Future's result, or raise its exception. - """ - + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... @abstractmethod - def stop(self) -> None: - """Stop the event loop as soon as reasonable. - - Exactly how soon that is may depend on the implementation, but - no more I/O callbacks should be scheduled. - """ - + def stop(self) -> None: ... @abstractmethod - def is_running(self) -> bool: - """Return whether the event loop is currently running.""" - + def is_running(self) -> bool: ... @abstractmethod - def is_closed(self) -> bool: - """Returns True if the event loop was closed.""" - + def is_closed(self) -> bool: ... @abstractmethod - def close(self) -> None: - """Close the loop. - - The loop should not be running. - - This is idempotent and irreversible. - - No other methods should be called after this one. - """ - + def close(self) -> None: ... @abstractmethod - async def shutdown_asyncgens(self) -> None: - """Shutdown all active asynchronous generators.""" + async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. # "context" added in 3.9.10/3.10.2 for call_* @abstractmethod @@ -363,59 +300,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """A coroutine which creates a TCP server bound to host and port. - - The return value is a Server object which can be used to stop - the service. - - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. - - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). - - flags is a bitmask for getaddrinfo(). - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. - - keep_alive set to True keeps connections active by enabling the - periodic transmission of messages. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. - - ssl_shutdown_timeout is the time in seconds that an SSL server - will wait for completion of the SSL shutdown procedure - before aborting the connection. Default is 30s. - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - + ) -> Server: ... @overload @abstractmethod async def create_server( @@ -455,56 +340,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """A coroutine which creates a TCP server bound to host and port. - - The return value is a Server object which can be used to stop - the service. - - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. - - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). - - flags is a bitmask for getaddrinfo(). - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. - - ssl_shutdown_timeout is the time in seconds that an SSL server - will wait for completion of the SSL shutdown procedure - before aborting the connection. Default is 30s. - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - + ) -> Server: ... @overload @abstractmethod async def create_server( @@ -542,52 +378,7 @@ class AbstractEventLoop: reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """A coroutine which creates a TCP server bound to host and port. - - The return value is a Server object which can be used to stop - the service. - - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. - - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). - - flags is a bitmask for getaddrinfo(). - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ - + ) -> Server: ... @overload @abstractmethod async def create_server( @@ -619,13 +410,7 @@ class AbstractEventLoop: server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: - """Upgrade a transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - + ) -> Transport | None: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -637,35 +422,7 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """A coroutine which creates a UNIX Domain Socket server. - - The return value is a Server object, which can be used to stop - the service. - - path is a str, representing a file system path to bind the - server socket to. - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for the SSL handshake to complete (defaults to 60s). - - ssl_shutdown_timeout is the time in seconds that an SSL server - will wait for the SSL shutdown to finish (defaults to 30s). - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ + ) -> Server: ... else: @abstractmethod async def start_tls( @@ -677,13 +434,7 @@ class AbstractEventLoop: server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, - ) -> Transport | None: - """Upgrade a transport to TLS. - - Return a new transport that *protocol* should start using - immediately. - """ - + ) -> Transport | None: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -694,32 +445,8 @@ class AbstractEventLoop: ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: - """A coroutine which creates a UNIX Domain Socket server. - - The return value is a Server object, which can be used to stop - the service. - - path is a str, representing a file system path to bind the - server socket to. - - sock can optionally be specified in order to use a preexisting - socket object. - - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). - - ssl can be set to an SSLContext to enable SSL over the - accepted connections. - - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for the SSL handshake to complete (defaults to 60s). + ) -> Server: ... - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ if sys.version_info >= (3, 11): async def connect_accepted_socket( self, @@ -729,15 +456,7 @@ class AbstractEventLoop: ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: - """Handle an accepted connection. - - This is used by servers that accept connections outside of - asyncio, but use asyncio to handle connections. - - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, @@ -746,15 +465,7 @@ class AbstractEventLoop: *, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: - """Handle an accepted connection. - - This is used by servers that accept connections outside of - asyncio, but use asyncio to handle connections. - - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -786,12 +497,7 @@ class AbstractEventLoop: @abstractmethod async def sendfile( self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True - ) -> int: - """Send a file through a transport. - - Return an amount of sent bytes. - """ - + ) -> int: ... @abstractmethod async def create_datagram_endpoint( self, @@ -806,58 +512,16 @@ class AbstractEventLoop: reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: - """A coroutine which creates a datagram endpoint. - - This method will try to establish the endpoint in the background. - When successful, the coroutine returns a (transport, protocol) pair. - - protocol_factory must be a callable returning a protocol instance. - - socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on - host (or family if specified), socket type SOCK_DGRAM. - - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified it will automatically be set to True on - UNIX. - - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows and some UNIX's. If the - :py:data:`~socket.SO_REUSEPORT` constant is not defined then this - capability is unsupported. - - allow_broadcast tells the kernel to allow this endpoint to send - messages to the broadcast address. - - sock can optionally be specified in order to use a preexisting - socket object. - """ + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod - async def connect_read_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[ReadTransport, _ProtocolT]: - """Register read pipe in event loop. Set the pipe to non-blocking mode. - - protocol_factory should instantiate object with Protocol interface. - pipe is a file-like object. - Return pair (transport, protocol), where transport supports the - ReadTransport interface. - """ - + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... @abstractmethod async def connect_write_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any - ) -> tuple[WriteTransport, _ProtocolT]: - """Register write pipe in event loop. - - protocol_factory should instantiate object with BaseProtocol interface. - Pipe is file-like object already switched to nonblocking. - Return pair (transport, protocol), where transport support - WriteTransport interface. - """ - + ) -> tuple[WriteTransport, _ProtocolT]: ... @abstractmethod async def subprocess_shell( self, @@ -936,34 +600,16 @@ class AbstractEventLoop: @abstractmethod def set_debug(self, enabled: bool) -> None: ... @abstractmethod - async def shutdown_default_executor(self) -> None: - """Schedule the shutdown of the default executor.""" + async def shutdown_default_executor(self) -> None: ... if sys.version_info >= (3, 14): class _AbstractEventLoopPolicy: - """Abstract policy for accessing the event loop.""" - @abstractmethod - def get_event_loop(self) -> AbstractEventLoop: - """Get the event loop for the current context. - - Returns an event loop object implementing the AbstractEventLoop interface, - or raises an exception in case no event loop has been set for the - current context and the current policy does not specify to create one. - - It should never return None. - """ - + def get_event_loop(self) -> AbstractEventLoop: ... @abstractmethod - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop for the current context to loop.""" - + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... @abstractmethod - def new_event_loop(self) -> AbstractEventLoop: - """Create and return a new event loop object according to this - policy's rules. If there's need to set this loop as the event loop for - the current context, set_event_loop must be called explicitly. - """ + def new_event_loop(self) -> AbstractEventLoop: ... else: @type_check_only @@ -992,111 +638,38 @@ else: if sys.version_info >= (3, 14): class _BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): - """Default policy implementation for accessing the event loop. - - In this policy, each thread has its own event loop. However, we - only automatically create an event loop by default for the main - thread; other threads by default have no event loop. - - Other policies may have different rules (e.g. a single global - event loop, or automatically creating an event loop per thread, or - using some other notion of context to which an event loop is - associated). - """ - - def get_event_loop(self) -> AbstractEventLoop: - """Get the event loop for the current context. - - Returns an instance of EventLoop or raises an exception. - """ - - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop.""" - - def new_event_loop(self) -> AbstractEventLoop: - """Create a new event loop. - - You must call set_event_loop() to make this the current event - loop. - """ + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... else: class BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): - """Default policy implementation for accessing the event loop. - - In this policy, each thread has its own event loop. However, we - only automatically create an event loop by default for the main - thread; other threads by default have no event loop. - - Other policies may have different rules (e.g. a single global - event loop, or automatically creating an event loop per thread, or - using some other notion of context to which an event loop is - associated). - """ - - def get_event_loop(self) -> AbstractEventLoop: - """Get the event loop for the current context. - - Returns an instance of EventLoop or raises an exception. - """ - - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop.""" - - def new_event_loop(self) -> AbstractEventLoop: - """Create a new event loop. - - You must call set_event_loop() to make this the current event - loop. - """ + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... if sys.version_info >= (3, 14): - def _get_event_loop_policy() -> _AbstractEventLoopPolicy: - """Get the current event loop policy.""" - - def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: - """Set the current event loop policy. - - If policy is None, the default policy is restored. - """ - + def _get_event_loop_policy() -> _AbstractEventLoopPolicy: ... + def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... else: - def get_event_loop_policy() -> _AbstractEventLoopPolicy: - """Get the current event loop policy.""" - - def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: - """Set the current event loop policy. - - If policy is None, the default policy is restored. - """ - -def set_event_loop(loop: AbstractEventLoop | None) -> None: - """Equivalent to calling get_event_loop_policy().set_event_loop(loop).""" + def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... + def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... -def new_event_loop() -> AbstractEventLoop: - """Equivalent to calling get_event_loop_policy().new_event_loop().""" +def set_event_loop(loop: AbstractEventLoop | None) -> None: ... +def new_event_loop() -> AbstractEventLoop: ... if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher().""" - + def get_child_watcher() -> AbstractChildWatcher: ... @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def set_child_watcher(watcher: AbstractChildWatcher) -> None: - """Equivalent to calling - get_event_loop_policy().set_child_watcher(watcher). - """ - else: - def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher().""" + def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... - def set_child_watcher(watcher: AbstractChildWatcher) -> None: - """Equivalent to calling - get_event_loop_policy().set_child_watcher(watcher). - """ + else: + def get_child_watcher() -> AbstractChildWatcher: ... + def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi index bf1330f7b1518..759838f45de47 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi @@ -1,5 +1,3 @@ -"""asyncio exceptions.""" - import sys # Keep asyncio.__all__ updated with any changes to __all__ here @@ -23,47 +21,24 @@ else: "SendfileNotAvailableError", ) -class CancelledError(BaseException): - """The Future or Task was cancelled.""" +class CancelledError(BaseException): ... if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Exception): - """The operation exceeded the given deadline.""" - -class InvalidStateError(Exception): - """The operation is not allowed in this state.""" + class TimeoutError(Exception): ... -class SendfileNotAvailableError(RuntimeError): - """Sendfile syscall is not available. - - Raised if OS does not support sendfile syscall for given socket or - file type. - """ +class InvalidStateError(Exception): ... +class SendfileNotAvailableError(RuntimeError): ... class IncompleteReadError(EOFError): - """ - Incomplete read error. Attributes: - - - partial: read bytes string before the end of stream was reached - - expected: total number of expected bytes (or None if unknown) - """ - expected: int | None partial: bytes def __init__(self, partial: bytes, expected: int | None) -> None: ... class LimitOverrunError(Exception): - """Reached the buffer limit while looking for a separator. - - Attributes: - - consumed: total number of to be consumed bytes. - """ - consumed: int def __init__(self, message: str, consumed: int) -> None: ... if sys.version_info >= (3, 11): - class BrokenBarrierError(RuntimeError): - """Barrier is broken by barrier.abort() call.""" + class BrokenBarrierError(RuntimeError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi index 9213dcbd43196..597eb9e56e1a1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi @@ -19,31 +19,14 @@ def _get_function_source(func: object) -> tuple[str, int] | None: ... if sys.version_info >= (3, 13): def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ... - def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: - """Format function arguments and keyword arguments. - - Special case for a single parameter: ('hello',) is formatted as ('hello'). - - Note that this function only returns argument details when - debug=True is specified, as arguments may contain sensitive - information. - """ - + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: ... def _format_callback( func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "" ) -> str: ... else: def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... - def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: - """Format function arguments and keyword arguments. - - Special case for a single parameter: ('hello',) is formatted as ('hello'). - """ - + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... -def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: - """Replacement for traceback.extract_stack() that only does the - necessary work for asyncio debug mode. - """ +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi index 8acaaea444bf9..c907c7036b040 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi @@ -1,5 +1,3 @@ -"""A Future class similar to the one in PEP 3148.""" - import sys from _asyncio import Future as Future from concurrent.futures._base import Future as _ConcurrentFuture @@ -18,5 +16,4 @@ else: _T = TypeVar("_T") -def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: - """Wrap concurrent.futures.Future object.""" +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi index 2b9fb15f4fc63..18a8a6457d757 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi @@ -1,5 +1,3 @@ -"""Introspection utils for tasks call graphs.""" - import sys from _typeshed import SupportsWrite from asyncio import Future @@ -12,63 +10,19 @@ if sys.version_info >= (3, 14): @dataclass(frozen=True, slots=True) class FrameCallGraphEntry: - """FrameCallGraphEntry(frame: frame)""" - frame: FrameType @dataclass(frozen=True, slots=True) class FutureCallGraph: - """FutureCallGraph(future: _asyncio.Future, call_stack: tuple['FrameCallGraphEntry', ...], awaited_by: tuple['FutureCallGraph', ...])""" - future: Future[Any] call_stack: tuple[FrameCallGraphEntry, ...] awaited_by: tuple[FutureCallGraph, ...] @overload - def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: - """Capture the async call graph for the current task or the provided Future. - - The graph is represented with three data structures: - - * FutureCallGraph(future, call_stack, awaited_by) - - Where 'future' is an instance of asyncio.Future or asyncio.Task. - - 'call_stack' is a tuple of FrameGraphEntry objects. - - 'awaited_by' is a tuple of FutureCallGraph objects. - - * FrameCallGraphEntry(frame) - - Where 'frame' is a frame object of a regular Python function - in the call stack. - - Receives an optional 'future' argument. If not passed, - the current task will be used. If there's no current task, the function - returns None. - - If "capture_call_graph()" is introspecting *the current task*, the - optional keyword-only 'depth' argument can be used to skip the specified - number of frames from top of the stack. - - If the optional keyword-only 'limit' argument is provided, each call stack - in the resulting graph is truncated to include at most ``abs(limit)`` - entries. If 'limit' is positive, the entries left are the closest to - the invocation point. If 'limit' is negative, the topmost entries are - left. If 'limit' is omitted or None, all entries are present. - If 'limit' is 0, the call stack is not captured at all, only - "awaited by" information is present. - """ - + def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... @overload def capture_call_graph(future: Future[Any], /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... - def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: - """Return the async call graph as a string for `future`. - - If `future` is not provided, format the call graph for the current task. - """ - + def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: ... def print_call_graph( future: Future[Any] | None = None, /, *, file: SupportsWrite[str] | None = None, depth: int = 1, limit: int | None = None - ) -> None: - """Print the async call graph for the current task or the provided Future.""" + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi index d38607b4106c3..17390b0c5a0ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi @@ -1,5 +1,3 @@ -"""Synchronization primitives.""" - import enum import sys from _typeshed import Unused @@ -32,133 +30,29 @@ class _ContextManagerMixin: ) -> None: ... class Lock(_ContextManagerMixin, _LoopBoundMixin): - """Primitive lock objects. - - A primitive lock is a synchronization primitive that is not owned - by a particular task when locked. A primitive lock is in one - of two states, 'locked' or 'unlocked'. - - It is created in the unlocked state. It has two basic methods, - acquire() and release(). When the state is unlocked, acquire() - changes the state to locked and returns immediately. When the - state is locked, acquire() blocks until a call to release() in - another task changes it to unlocked, then the acquire() call - resets it to locked and returns. The release() method should only - be called in the locked state; it changes the state to unlocked - and returns immediately. If an attempt is made to release an - unlocked lock, a RuntimeError will be raised. - - When more than one task is blocked in acquire() waiting for - the state to turn to unlocked, only one task proceeds when a - release() call resets the state to unlocked; successive release() - calls will unblock tasks in FIFO order. - - Locks also support the asynchronous context management protocol. - 'async with lock' statement should be used. - - Usage: - - lock = Lock() - ... - await lock.acquire() - try: - ... - finally: - lock.release() - - Context manager usage: - - lock = Lock() - ... - async with lock: - ... - - Lock objects can be tested for locking state: - - if not lock.locked(): - await lock.acquire() - else: - # lock is acquired - ... - - """ - _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def locked(self) -> bool: - """Return True if lock is acquired.""" - - async def acquire(self) -> Literal[True]: - """Acquire a lock. - - This method blocks until the lock is unlocked, then sets it to - locked and returns True. - """ - - def release(self) -> None: - """Release a lock. - - When the lock is locked, reset it to unlocked, and return. - If any other tasks are blocked waiting for the lock to become - unlocked, allow exactly one of them to proceed. - - When invoked on an unlocked lock, a RuntimeError is raised. - - There is no return value. - """ + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... class Event(_LoopBoundMixin): - """Asynchronous equivalent to threading.Event. - - Class implementing event objects. An event manages a flag that can be set - to true with the set() method and reset to false with the clear() method. - The wait() method blocks until the flag is true. The flag is initially - false. - """ - _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def is_set(self) -> bool: - """Return True if and only if the internal flag is true.""" - - def set(self) -> None: - """Set the internal flag to true. All tasks waiting for it to - become true are awakened. Tasks that call wait() once the flag is - true will not block at all. - """ - - def clear(self) -> None: - """Reset the internal flag to false. Subsequently, tasks calling - wait() will block until set() is called to set the internal flag - to true again. - """ - - async def wait(self) -> Literal[True]: - """Block until the internal flag is true. - - If the internal flag is true on entry, return True - immediately. Otherwise, block until another task calls - set() to set the flag to true, then return True. - """ + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Literal[True]: ... class Condition(_ContextManagerMixin, _LoopBoundMixin): - """Asynchronous equivalent to threading.Condition. - - This class implements condition variable objects. A condition variable - allows one or more tasks to wait until they are notified by another - task. - - A new Lock object is created and used as the underlying lock. - """ - _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self, lock: Lock | None = None) -> None: ... @@ -168,66 +62,12 @@ class Condition(_ContextManagerMixin, _LoopBoundMixin): def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... - async def wait(self) -> Literal[True]: - """Wait until notified. - - If the calling task has not acquired the lock when this - method is called, a RuntimeError is raised. - - This method releases the underlying lock, and then blocks - until it is awakened by a notify() or notify_all() call for - the same condition variable in another task. Once - awakened, it re-acquires the lock and returns True. - - This method may return spuriously, - which is why the caller should always - re-check the state and be prepared to wait() again. - """ - - async def wait_for(self, predicate: Callable[[], _T]) -> _T: - """Wait until a predicate becomes true. - - The predicate should be a callable whose result will be - interpreted as a boolean value. The method will repeatedly - wait() until it evaluates to true. The final predicate value is - the return value. - """ - - def notify(self, n: int = 1) -> None: - """By default, wake up one task waiting on this condition, if any. - If the calling task has not acquired the lock when this method - is called, a RuntimeError is raised. - - This method wakes up n of the tasks waiting for the condition - variable; if fewer than n are waiting, they are all awoken. - - Note: an awakened task does not actually return from its - wait() call until it can reacquire the lock. Since notify() does - not release the lock, its caller should. - """ - - def notify_all(self) -> None: - """Wake up all tasks waiting on this condition. This method acts - like notify(), but wakes up all waiting tasks instead of one. If the - calling task has not acquired the lock when this method is called, - a RuntimeError is raised. - """ + async def wait(self) -> Literal[True]: ... + async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin, _LoopBoundMixin): - """A Semaphore implementation. - - A semaphore manages an internal counter which is decremented by each - acquire() call and incremented by each release() call. The counter - can never go below zero; when acquire() finds that it is zero, it blocks, - waiting until some other thread calls release(). - - Semaphores also support the context management protocol. - - The optional argument gives the initial value for the internal - counter; it defaults to 1. If the value given is less than 0, - ValueError is raised. - """ - _value: int _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): @@ -235,35 +75,12 @@ class Semaphore(_ContextManagerMixin, _LoopBoundMixin): else: def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... - def locked(self) -> bool: - """Returns True if semaphore cannot be acquired immediately.""" - - async def acquire(self) -> Literal[True]: - """Acquire a semaphore. - - If the internal counter is larger than zero on entry, - decrement it by one and return True immediately. If it is - zero on entry, block, waiting until some other task has - called release() to make it larger than 0, and then return - True. - """ - - def release(self) -> None: - """Release a semaphore, incrementing the internal counter by one. - - When it was zero on entry and another task is waiting for it to - become larger than zero again, wake up that task. - """ - - def _wake_up_next(self) -> None: - """Wake up the first waiter that isn't done.""" - -class BoundedSemaphore(Semaphore): - """A bounded semaphore implementation. + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + def _wake_up_next(self) -> None: ... - This raises ValueError in release() if it would increase the value - above the initial value. - """ +class BoundedSemaphore(Semaphore): ... if sys.version_info >= (3, 11): class _BarrierState(enum.Enum): # undocumented @@ -273,49 +90,15 @@ if sys.version_info >= (3, 11): BROKEN = "broken" class Barrier(_LoopBoundMixin): - """Asyncio equivalent to threading.Barrier - - Implements a Barrier primitive. - Useful for synchronizing a fixed number of tasks at known synchronization - points. Tasks block on 'wait()' and are simultaneously awoken once they - have all made their call. - """ - - def __init__(self, parties: int) -> None: - """Create a barrier, initialised to 'parties' tasks.""" - + def __init__(self, parties: int) -> None: ... async def __aenter__(self) -> Self: ... async def __aexit__(self, *args: Unused) -> None: ... - async def wait(self) -> int: - """Wait for the barrier. - - When the specified number of tasks have started waiting, they are all - simultaneously awoken. - Returns an unique and individual index number from 0 to 'parties-1'. - """ - - async def abort(self) -> None: - """Place the barrier into a 'broken' state. - - Useful in case of error. Any currently waiting tasks and tasks - attempting to 'wait()' will have BrokenBarrierError raised. - """ - - async def reset(self) -> None: - """Reset the barrier to the initial state. - - Any tasks currently waiting will get the BrokenBarrier exception - raised. - """ - + async def wait(self) -> int: ... + async def abort(self) -> None: ... + async def reset(self) -> None: ... @property - def parties(self) -> int: - """Return the number of tasks required to trip the barrier.""" - + def parties(self) -> int: ... @property - def n_waiting(self) -> int: - """Return the number of tasks currently waiting at the barrier.""" - + def n_waiting(self) -> int: ... @property - def broken(self) -> bool: - """Return True if the barrier is in a broken state.""" + def broken(self) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi index a544534b38ea7..e1de0b3bb845e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi @@ -1,5 +1,3 @@ -"""Logging configuration.""" - import logging logger: logging.Logger diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi index 1950908ee08e3..6ebcf543e6b94 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi @@ -1,5 +1,3 @@ -"""Event loop mixins.""" - import sys import threading from typing_extensions import Never diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi index c67cd07286563..909d671df289d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi @@ -1,9 +1,3 @@ -"""Event loop using a proactor and related classes. - -A proactor is a "notify-on-completion" multiplexer. Currently a -proactor is only implemented on Windows with IOCP. -""" - import sys from collections.abc import Mapping from socket import socket @@ -14,8 +8,6 @@ from . import base_events, constants, events, futures, streams, transports __all__ = ("BaseProactorEventLoop",) class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): - """Base class for pipe and socket transports.""" - def __init__( self, loop: events.AbstractEventLoop, @@ -28,8 +20,6 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr def __del__(self) -> None: ... class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): - """Transport for read pipes.""" - if sys.version_info >= (3, 10): def __init__( self, @@ -52,17 +42,11 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran server: events.AbstractServer | None = None, ) -> None: ... -class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): - """Transport for write pipes.""" - +class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... - -class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - """Transport for duplex pipes.""" +class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - """Transport for connected sockets.""" - _sendfile_compatible: ClassVar[constants._SendfileMode] def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi index bd2e9c1118cf3..2c52ad4be4102 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi @@ -1,5 +1,3 @@ -"""Abstract Protocol base classes.""" - from _typeshed import ReadableBuffer from asyncio import transports from typing import Any @@ -8,194 +6,36 @@ from typing import Any __all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") class BaseProtocol: - """Common base class for protocol interfaces. - - Usually user implements protocols that derived from BaseProtocol - like Protocol or ProcessProtocol. - - The only case when BaseProtocol should be implemented directly is - write-only transport like write pipe - """ - __slots__ = () - def connection_made(self, transport: transports.BaseTransport) -> None: - """Called when a connection is made. - - The argument is the transport representing the pipe connection. - To receive data, wait for data_received() calls. - When the connection is closed, connection_lost() is called. - """ - - def connection_lost(self, exc: Exception | None) -> None: - """Called when the connection is lost or closed. - - The argument is an exception object or None (the latter - meaning a regular EOF is received or the connection was - aborted or closed). - """ - - def pause_writing(self) -> None: - """Called when the transport's buffer goes over the high-water mark. - - Pause and resume calls are paired -- pause_writing() is called - once when the buffer goes strictly over the high-water mark - (even if subsequent writes increases the buffer size even - more), and eventually resume_writing() is called once when the - buffer size reaches the low-water mark. - - Note that if the buffer size equals the high-water mark, - pause_writing() is not called -- it must go strictly over. - Conversely, resume_writing() is called when the buffer size is - equal or lower than the low-water mark. These end conditions - are important to ensure that things go as expected when either - mark is zero. - - NOTE: This is the only Protocol callback that is not called - through EventLoop.call_soon() -- if it were, it would have no - effect when it's most needed (when the app keeps writing - without yielding until pause_writing() is called). - """ - - def resume_writing(self) -> None: - """Called when the transport's buffer drains below the low-water mark. - - See pause_writing() for details. - """ + def connection_made(self, transport: transports.BaseTransport) -> None: ... + def connection_lost(self, exc: Exception | None) -> None: ... + def pause_writing(self) -> None: ... + def resume_writing(self) -> None: ... class Protocol(BaseProtocol): - """Interface for stream protocol. - - The user should implement this interface. They can inherit from - this class but don't need to. The implementations here do - nothing (they don't raise exceptions). - - When the user wants to requests a transport, they pass a protocol - factory to a utility function (e.g., EventLoop.create_connection()). - - When the connection is made successfully, connection_made() is - called with a suitable transport object. Then data_received() - will be called 0 or more times with data (bytes) received from the - transport; finally, connection_lost() will be called exactly once - with either an exception object or None as an argument. - - State machine of calls: - - start -> CM [-> DR*] [-> ER?] -> CL -> end - - * CM: connection_made() - * DR: data_received() - * ER: eof_received() - * CL: connection_lost() - """ - # Need annotation or mypy will complain about 'Cannot determine type of "__slots__" in base class' __slots__: tuple[()] = () - def data_received(self, data: bytes) -> None: - """Called when some data is received. - - The argument is a bytes object. - """ - - def eof_received(self) -> bool | None: - """Called when the other end calls write_eof() or equivalent. - - If this returns a false value (including None), the transport - will close itself. If it returns a true value, closing the - transport is up to the protocol. - """ + def data_received(self, data: bytes) -> None: ... + def eof_received(self) -> bool | None: ... class BufferedProtocol(BaseProtocol): - """Interface for stream protocol with manual buffer control. - - Event methods, such as `create_server` and `create_connection`, - accept factories that return protocols that implement this interface. - - The idea of BufferedProtocol is that it allows to manually allocate - and control the receive buffer. Event loops can then use the buffer - provided by the protocol to avoid unnecessary data copies. This - can result in noticeable performance improvement for protocols that - receive big amounts of data. Sophisticated protocols can allocate - the buffer only once at creation time. - - State machine of calls: - - start -> CM [-> GB [-> BU?]]* [-> ER?] -> CL -> end - - * CM: connection_made() - * GB: get_buffer() - * BU: buffer_updated() - * ER: eof_received() - * CL: connection_lost() - """ - __slots__ = () - def get_buffer(self, sizehint: int) -> ReadableBuffer: - """Called to allocate a new receive buffer. - - *sizehint* is a recommended minimal size for the returned - buffer. When set to -1, the buffer size can be arbitrary. - - Must return an object that implements the - :ref:`buffer protocol `. - It is an error to return a zero-sized buffer. - """ - - def buffer_updated(self, nbytes: int) -> None: - """Called when the buffer was updated with the received data. - - *nbytes* is the total number of bytes that were written to - the buffer. - """ - - def eof_received(self) -> bool | None: - """Called when the other end calls write_eof() or equivalent. - - If this returns a false value (including None), the transport - will close itself. If it returns a true value, closing the - transport is up to the protocol. - """ + def get_buffer(self, sizehint: int) -> ReadableBuffer: ... + def buffer_updated(self, nbytes: int) -> None: ... + def eof_received(self) -> bool | None: ... class DatagramProtocol(BaseProtocol): - """Interface for datagram protocol.""" - __slots__ = () - def connection_made(self, transport: transports.DatagramTransport) -> None: # type: ignore[override] - """Called when a connection is made. - - The argument is the transport representing the pipe connection. - To receive data, wait for data_received() calls. - When the connection is closed, connection_lost() is called. - """ + def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. # See https://github.com/python/typing/issues/566 - def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: - """Called when some datagram is received.""" - - def error_received(self, exc: Exception) -> None: - """Called when a send or receive operation raises an OSError. - - (Other than BlockingIOError or InterruptedError.) - """ + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... + def error_received(self, exc: Exception) -> None: ... class SubprocessProtocol(BaseProtocol): - """Interface for protocol for subprocess calls.""" - __slots__: tuple[()] = () - def pipe_data_received(self, fd: int, data: bytes) -> None: - """Called when the subprocess writes data into stdout/stderr pipe. - - fd is int file descriptor. - data is bytes object. - """ - - def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: - """Called when a file descriptor associated with the child process is - closed. - - fd is the int file descriptor that was closed. - """ - - def process_exited(self) -> None: - """Called when subprocess has exited.""" + def pipe_data_received(self, fd: int, data: bytes) -> None: ... + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... + def process_exited(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi index 691da360709b2..2fa2226d0e6ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi @@ -9,11 +9,8 @@ if sys.version_info >= (3, 10): else: _LoopBoundMixin = object -class QueueEmpty(Exception): - """Raised when Queue.get_nowait() is called on an empty Queue.""" - -class QueueFull(Exception): - """Raised when the Queue.put_nowait() method is called on a full Queue.""" +class QueueEmpty(Exception): ... +class QueueFull(Exception): ... # Keep asyncio.__all__ updated with any changes to __all__ here if sys.version_info >= (3, 13): @@ -25,23 +22,11 @@ else: _T = TypeVar("_T") if sys.version_info >= (3, 13): - class QueueShutDown(Exception): - """Raised when putting on to or getting from a shut-down Queue.""" + class QueueShutDown(Exception): ... # If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy. # We can remove the noqa pragma when dropping 3.9 support. class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 - """A queue, useful for coordinating producer and consumer coroutines. - - If maxsize is less than or equal to zero, the queue size is infinite. If it - is an integer greater than 0, then "await put()" will block when the - queue reaches maxsize, until an item is removed by get(). - - Unlike the standard library Queue, you can reliably know this Queue's size - with qsize(), since your single-threaded asyncio application won't be - interrupted between calling qsize() and doing an operation on the Queue. - """ - if sys.version_info >= (3, 10): def __init__(self, maxsize: int = 0) -> None: ... else: @@ -51,107 +36,20 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... def _format(self) -> str: ... - def qsize(self) -> int: - """Number of items in the queue.""" - + def qsize(self) -> int: ... @property - def maxsize(self) -> int: - """Number of items allowed in the queue.""" - - def empty(self) -> bool: - """Return True if the queue is empty, False otherwise.""" - - def full(self) -> bool: - """Return True if there are maxsize items in the queue. - - Note: if the Queue was initialized with maxsize=0 (the default), - then full() is never True. - """ - - async def put(self, item: _T) -> None: - """Put an item into the queue. - - Put an item into the queue. If the queue is full, wait until a free - slot is available before adding item. - - Raises QueueShutDown if the queue has been shut down. - """ - - def put_nowait(self, item: _T) -> None: - """Put an item into the queue without blocking. - - If no free slot is immediately available, raise QueueFull. - - Raises QueueShutDown if the queue has been shut down. - """ - - async def get(self) -> _T: - """Remove and return an item from the queue. - - If queue is empty, wait until an item is available. - - Raises QueueShutDown if the queue has been shut down and is empty, or - if the queue has been shut down immediately. - """ - - def get_nowait(self) -> _T: - """Remove and return an item from the queue. - - Return an item if one is immediately available, else raise QueueEmpty. - - Raises QueueShutDown if the queue has been shut down and is empty, or - if the queue has been shut down immediately. - """ - - async def join(self) -> None: - """Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer calls task_done() to - indicate that the item was retrieved and all work on it is complete. - When the count of unfinished tasks drops to zero, join() unblocks. - """ - - def task_done(self) -> None: - """Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each get() used to fetch a task, - a subsequent call to task_done() tells the queue that the processing - on the task is complete. - - If a join() is currently blocking, it will resume when all items have - been processed (meaning that a task_done() call was received for every - item that had been put() into the queue). - - shutdown(immediate=True) calls task_done() for each remaining item in - the queue. - - Raises ValueError if called more times than there were items placed in - the queue. - """ - - def __class_getitem__(cls, type: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + async def put(self, item: _T) -> None: ... + def put_nowait(self, item: _T) -> None: ... + async def get(self) -> _T: ... + def get_nowait(self) -> _T: ... + async def join(self) -> None: ... + def task_done(self) -> None: ... + def __class_getitem__(cls, type: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 13): - def shutdown(self, immediate: bool = False) -> None: - """Shut-down the queue, making queue gets and puts raise QueueShutDown. - - By default, gets will only raise once the queue is empty. Set - 'immediate' to True to make gets raise immediately instead. - - All blocked callers of put() and get() will be unblocked. If - 'immediate', a task is marked as done for each item remaining in - the queue, which may unblock callers of join(). - """ - -class PriorityQueue(Queue[SupportsRichComparisonT]): - """A subclass of Queue; retrieves entries in priority order (lowest first). - - Entries are typically tuples of the form: (priority number, data). - """ + def shutdown(self, immediate: bool = False) -> None: ... -class LifoQueue(Queue[_T]): - """A subclass of Queue that retrieves most recently added entries first.""" +class PriorityQueue(Queue[SupportsRichComparisonT]): ... +class LifoQueue(Queue[_T]): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi index 25698e14a64e1..919e6521f8a15 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi @@ -17,98 +17,17 @@ _T = TypeVar("_T") if sys.version_info >= (3, 11): @final class Runner: - """A context manager that controls event loop life cycle. - - The context manager always creates a new event loop, - allows to run async functions inside it, - and properly finalizes the loop at the context manager exit. - - If debug is True, the event loop will be run in debug mode. - If loop_factory is passed, it is used for new event loop creation. - - asyncio.run(main(), debug=True) - - is a shortcut for - - with asyncio.Runner(debug=True) as runner: - runner.run(main()) - - The run() method can be called multiple times within the runner's context. - - This can be useful for interactive console (e.g. IPython), - unittest runners, console tools, -- everywhere when async code - is called from existing sync framework and where the preferred single - asyncio.run() call doesn't work. - - """ - def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... - def close(self) -> None: - """Shutdown and close event loop.""" - - def get_loop(self) -> AbstractEventLoop: - """Return embedded event loop.""" - - def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: - """Run code in the embedded event loop.""" + def close(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... if sys.version_info >= (3, 12): def run( main: Coroutine[Any, Any, _T], *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None - ) -> _T: - """Execute the coroutine and return the result. - - This function runs the passed coroutine, taking care of - managing the asyncio event loop, finalizing asynchronous - generators and closing the default executor. - - This function cannot be called when another asyncio event loop is - running in the same thread. - - If debug is True, the event loop will be run in debug mode. - If loop_factory is passed, it is used for new event loop creation. - - This function always creates a new event loop and closes it at the end. - It should be used as a main entry point for asyncio programs, and should - ideally only be called once. - - The executor is given a timeout duration of 5 minutes to shutdown. - If the executor hasn't finished within that duration, a warning is - emitted and the executor is closed. - - Example: - - async def main(): - await asyncio.sleep(1) - print('hello') - - asyncio.run(main()) - """ + ) -> _T: ... else: - def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: - """Execute the coroutine and return the result. - - This function runs the passed coroutine, taking care of - managing the asyncio event loop and finalizing asynchronous - generators. - - This function cannot be called when another asyncio event loop is - running in the same thread. - - If debug is True, the event loop will be run in debug mode. - - This function always creates a new event loop and closes it at the end. - It should be used as a main entry point for asyncio programs, and should - ideally only be called once. - - Example: - - async def main(): - await asyncio.sleep(1) - print('hello') - - asyncio.run(main()) - """ + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi index 358dd1abbc3f6..18c5df033e2f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi @@ -1,9 +1,3 @@ -"""Event loop using a selector and related classes. - -A selector is a "notify-when-ready" multiplexer. For a subclass which -also includes support for signal handling, see the unix_events sub-module. -""" - import selectors from socket import socket @@ -12,16 +6,5 @@ from . import base_events __all__ = ("BaseSelectorEventLoop",) class BaseSelectorEventLoop(base_events.BaseEventLoop): - """Selector event loop. - - See events.EventLoop for API specification. - """ - def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... - async def sock_recv(self, sock: socket, n: int) -> bytes: - """Receive data from the socket. - - The return value is a bytes object representing the data received. - The maximum amount of data to be received at once is specified by - nbytes. - """ + async def sock_recv(self, sock: socket, n: int) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi index 1a73b0cec2130..ab102f124c2e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi @@ -36,23 +36,6 @@ else: if sys.version_info < (3, 11): class _SSLPipe: - """An SSL "Pipe". - - An SSL pipe allows you to communicate with an SSL/TLS protocol instance - through memory buffers. It can be used to implement a security layer for an - existing connection where you don't have access to the connection's file - descriptor, or for some reason you don't want to use it. - - An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, - data is passed through untransformed. In wrapped mode, application level - data is encrypted to SSL record level data and vice versa. The SSL record - level is the lowest level in the SSL protocol suite and is what travels - as-is over the wire. - - An SslPipe initially is in "unwrapped" mode. To start SSL, call - do_handshake(). To shutdown SSL again, call unwrap(). - """ - max_size: ClassVar[int] _context: ssl.SSLContext @@ -65,101 +48,20 @@ if sys.version_info < (3, 11): _need_ssldata: bool _handshake_cb: Callable[[BaseException | None], None] | None _shutdown_cb: Callable[[], None] | None - def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: - """ - The *context* argument specifies the ssl.SSLContext to use. - - The *server_side* argument indicates whether this is a server side or - client side transport. - - The optional *server_hostname* argument can be used to specify the - hostname you are connecting to. You may only specify this parameter if - the _ssl module supports Server Name Indication (SNI). - """ - + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... @property - def context(self) -> ssl.SSLContext: - """The SSL context passed to the constructor.""" - + def context(self) -> ssl.SSLContext: ... @property - def ssl_object(self) -> ssl.SSLObject | None: - """The internal ssl.SSLObject instance. - - Return None if the pipe is not wrapped. - """ - + def ssl_object(self) -> ssl.SSLObject | None: ... @property - def need_ssldata(self) -> bool: - """Whether more record level data is needed to complete a handshake - that is currently in progress. - """ - + def need_ssldata(self) -> bool: ... @property - def wrapped(self) -> bool: - """ - Whether a security layer is currently in effect. - - Return False during handshake. - """ - - def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: - """Start the SSL handshake. - - Return a list of ssldata. A ssldata element is a list of buffers - - The optional *callback* argument can be used to install a callback that - will be called when the handshake is complete. The callback will be - called with None if successful, else an exception instance. - """ - - def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: - """Start the SSL shutdown sequence. - - Return a list of ssldata. A ssldata element is a list of buffers - - The optional *callback* argument can be used to install a callback that - will be called when the shutdown is complete. The callback will be - called without arguments. - """ - - def feed_eof(self) -> None: - """Send a potentially "ragged" EOF. - - This method will raise an SSL_ERROR_EOF exception if the EOF is - unexpected. - """ - - def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: - """Feed SSL record level data into the pipe. - - The data must be a bytes instance. It is OK to send an empty bytes - instance. This can be used to get ssldata for a handshake initiated by - this endpoint. - - Return a (ssldata, appdata) tuple. The ssldata element is a list of - buffers containing SSL data that needs to be sent to the remote SSL. - - The appdata element is a list of buffers containing plaintext data that - needs to be forwarded to the application. The appdata list may contain - an empty buffer indicating an SSL "close_notify" alert. This alert must - be acknowledged by calling shutdown(). - """ - - def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: - """Feed plaintext data into the pipe. - - Return an (ssldata, offset) tuple. The ssldata element is a list of - buffers containing record level data that needs to be sent to the - remote SSL instance. The offset is the number of plaintext bytes that - were processed, which may be less than the length of data. - - NOTE: In case of short writes, this call MUST be retried with the SAME - buffer passed into the *data* argument (i.e. the id() must be the - same). This is an OpenSSL requirement. A further particularity is that - a short write will always have offset == 0, because the _ssl module - does not enable partial writes. And even though the offset is zero, - there will still be encrypted data in ssldata. - """ + def wrapped(self) -> bool: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... + def feed_eof(self) -> None: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _sendfile_compatible: ClassVar[constants._SendfileMode] @@ -171,45 +73,16 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _ssl_protocol: SSLProtocol _closed: bool def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... - def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: - """Get optional transport information.""" - + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape - """Write some data bytes to the transport. - - This does not block; it buffers the data and arranges for it - to be sent out asynchronously. - """ - - def can_write_eof(self) -> Literal[False]: - """Return True if this transport supports write_eof(), False if not.""" + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def can_write_eof(self) -> Literal[False]: ... if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... def get_read_buffer_limits(self) -> tuple[int, int]: ... - def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: - """Set the high- and low-water limits for read flow control. - - These two values control when to call the upstream transport's - pause_reading() and resume_reading() methods. If specified, - the low-water limit must be less than or equal to the - high-water limit. Neither value can be negative. - - The defaults are implementation-specific. If only the - high-water limit is given, the low-water limit defaults to an - implementation-specific value less than or equal to the - high-water limit. Setting high to zero forces low to zero as - well, and causes pause_reading() to be called whenever the - buffer becomes non-empty. Setting low to zero causes - resume_reading() to be called only once the buffer is empty. - Use of zero for either limit is generally sub-optimal as it - reduces opportunities for doing I/O and computation - concurrently. - """ - - def get_read_buffer_size(self) -> int: - """Return the current size of the read buffer.""" + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_read_buffer_size(self) -> int: ... def __del__(self) -> None: ... @@ -219,12 +92,6 @@ else: _SSLProtocolBase: TypeAlias = protocols.Protocol class SSLProtocol(_SSLProtocolBase): - """SSL protocol. - - Implementation of SSL on top of a socket using incoming and outgoing - buffers which are ssl.MemoryBIO objects. - """ - _server_side: bool _server_hostname: str | None _sslcontext: ssl.SSLContext @@ -276,23 +143,8 @@ class SSLProtocol(_SSLProtocolBase): def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... - def connection_lost(self, exc: BaseException | None) -> None: - """Called when the low-level connection is lost or closed. - - The argument is an exception object or None (the latter - meaning a regular EOF is received or the connection was - aborted or closed). - """ - - def eof_received(self) -> None: - """Called when the other end of the low-level stream - is half-closed. - - If this returns a false value (including None), the transport - will close itself. If it returns a true value, closing the - transport is up to the protocol. - """ - + def connection_lost(self, exc: BaseException | None) -> None: ... + def eof_received(self) -> None: ... def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... def _start_shutdown(self) -> None: ... if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi index 938020b3a0f89..3324777f41686 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi @@ -1,5 +1,3 @@ -"""Support for running coroutines in parallel with staggered start times.""" - from collections.abc import Awaitable, Callable, Iterable from typing import Any @@ -9,54 +7,4 @@ __all__ = ("staggered_race",) async def staggered_race( coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None -) -> tuple[Any, int | None, list[Exception | None]]: - """Run coroutines with staggered start times and take the first to finish. - - This method takes an iterable of coroutine functions. The first one is - started immediately. From then on, whenever the immediately preceding one - fails (raises an exception), or when *delay* seconds has passed, the next - coroutine is started. This continues until one of the coroutines complete - successfully, in which case all others are cancelled, or until all - coroutines fail. - - The coroutines provided should be well-behaved in the following way: - - * They should only ``return`` if completed successfully. - - * They should always raise an exception if they did not complete - successfully. In particular, if they handle cancellation, they should - probably reraise, like this:: - - try: - # do work - except asyncio.CancelledError: - # undo partially completed work - raise - - Args: - coro_fns: an iterable of coroutine functions, i.e. callables that - return a coroutine object when called. Use ``functools.partial`` or - lambdas to pass arguments. - - delay: amount of time, in seconds, between starting coroutines. If - ``None``, the coroutines will run sequentially. - - loop: the event loop to use. - - Returns: - tuple *(winner_result, winner_index, exceptions)* where - - - *winner_result*: the result of the winning coroutine, or ``None`` - if no coroutines won. - - - *winner_index*: the index of the winning coroutine in - ``coro_fns``, or ``None`` if no coroutines won. If the winning - coroutine may return None on success, *winner_index* can be used - to definitively determine whether any coroutine won. - - - *exceptions*: list of exceptions returned by the coroutines. - ``len(exceptions)`` is equal to the number of coroutines actually - started, and the order is the same as in ``coro_fns``. The winning - coroutine's entry is ``None``. - - """ +) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi index 968a5d08ee14d..33cffb11ed780 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi @@ -36,25 +36,7 @@ if sys.version_info >= (3, 10): limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> tuple[StreamReader, StreamWriter]: - """A wrapper for create_connection() returning a (reader, writer) pair. - - The reader returned is a StreamReader instance; the writer is a - StreamWriter instance. - - The arguments are all the usual arguments to create_connection() - except protocol_factory; most common are positional host and port, - with various optional keyword arguments following. - - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). - - (If you want to customize the StreamReader and/or - StreamReaderProtocol classes, just copy the code -- there's - really nothing special here except some convenience.) - """ - + ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | Sequence[str] | None = None, @@ -63,27 +45,7 @@ if sys.version_info >= (3, 10): limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> Server: - """Start a socket server, call back for each client connected. - - The first parameter, `client_connected_cb`, takes two parameters: - client_reader, client_writer. client_reader is a StreamReader - object, while client_writer is a StreamWriter object. This - parameter can either be a plain callback function or a coroutine; - if it is a coroutine, it will be automatically converted into a - Task. - - The rest of the arguments are all the usual arguments to - loop.create_server() except protocol_factory; most common are - positional host and port, with various optional keyword arguments - following. The return value is the same as loop.create_server(). - - Additional optional keyword argument is limit (to set the buffer - limit passed to the StreamReader). - - The return value is the same as loop.create_server(), i.e. a - Server object which can be used to stop the service. - """ + ) -> Server: ... else: async def open_connection( @@ -94,25 +56,7 @@ else: limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> tuple[StreamReader, StreamWriter]: - """A wrapper for create_connection() returning a (reader, writer) pair. - - The reader returned is a StreamReader instance; the writer is a - StreamWriter instance. - - The arguments are all the usual arguments to create_connection() - except protocol_factory; most common are positional host and port, - with various optional keyword arguments following. - - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). - - (If you want to customize the StreamReader and/or - StreamReaderProtocol classes, just copy the code -- there's - really nothing special here except some convenience.) - """ - + ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | None = None, @@ -122,46 +66,20 @@ else: limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> Server: - """Start a socket server, call back for each client connected. - - The first parameter, `client_connected_cb`, takes two parameters: - client_reader, client_writer. client_reader is a StreamReader - object, while client_writer is a StreamWriter object. This - parameter can either be a plain callback function or a coroutine; - if it is a coroutine, it will be automatically converted into a - Task. - - The rest of the arguments are all the usual arguments to - loop.create_server() except protocol_factory; most common are - positional host and port, with various optional keyword arguments - following. The return value is the same as loop.create_server(). - - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). - - The return value is the same as loop.create_server(), i.e. a - Server object which can be used to stop the service. - """ + ) -> Server: ... if sys.platform != "win32": if sys.version_info >= (3, 10): async def open_unix_connection( path: StrPath | None = None, *, limit: int = 65536, **kwds: Any - ) -> tuple[StreamReader, StreamWriter]: - """Similar to `open_connection` but works with UNIX Domain Sockets.""" - + ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any - ) -> Server: - """Similar to `start_server` but works with UNIX Domain Sockets.""" + ) -> Server: ... else: async def open_unix_connection( path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any - ) -> tuple[StreamReader, StreamWriter]: - """Similar to `open_connection` but works with UNIX Domain Sockets.""" - + ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, @@ -169,30 +87,12 @@ if sys.platform != "win32": loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any, - ) -> Server: - """Similar to `start_server` but works with UNIX Domain Sockets.""" + ) -> Server: ... class FlowControlMixin(protocols.Protocol): - """Reusable flow control logic for StreamWriter.drain(). - - This implements the protocol methods pause_writing(), - resume_writing() and connection_lost(). If the subclass overrides - these it must call the super methods. - - StreamWriter.drain() must wait for _drain_helper() coroutine. - """ - def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): - """Helper class to adapt between Protocol and StreamReader. - - (This is a helper class instead of making StreamReader itself a - Protocol subclass, because the StreamReader has other potential - uses, and to prevent the user of the StreamReader to accidentally - call inappropriate methods of the protocol.) - """ - def __init__( self, stream_reader: StreamReader, @@ -202,15 +102,6 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __del__(self) -> None: ... class StreamWriter: - """Wraps a Transport. - - This exposes write(), writelines(), [can_]write_eof(), - get_extra_info() and close(). It adds drain() which returns an - optional Future on which you can wait for flow control. It also - adds a transport property which references the Transport - directly. - """ - def __init__( self, transport: transports.WriteTransport, @@ -228,14 +119,7 @@ class StreamWriter: def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... def get_extra_info(self, name: str, default: Any = None) -> Any: ... - async def drain(self) -> None: - """Flush the write buffer. - - The intended use is to write - - w.write(data) - await w.drain() - """ + async def drain(self) -> None: ... if sys.version_info >= (3, 12): async def start_tls( self, @@ -244,13 +128,12 @@ class StreamWriter: server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> None: - """Upgrade an existing stream-based connection to TLS.""" + ) -> None: ... elif sys.version_info >= (3, 11): async def start_tls( self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None - ) -> None: - """Upgrade an existing stream-based connection to TLS.""" + ) -> None: ... + if sys.version_info >= (3, 13): def __del__(self, warnings: ModuleType = ...) -> None: ... elif sys.version_info >= (3, 11): @@ -262,113 +145,15 @@ class StreamReader: def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... - def at_eof(self) -> bool: - """Return True if the buffer is empty and 'feed_eof' was called.""" - + def at_eof(self) -> bool: ... def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... - async def readline(self) -> bytes: - """Read chunk of data from the stream until newline (b' - ') is found. - - On success, return chunk that ends with newline. If only partial - line can be read due to EOF, return incomplete line without - terminating newline. When EOF was reached while no bytes read, empty - bytes object is returned. - - If limit is reached, ValueError will be raised. In that case, if - newline was found, complete line including newline will be removed - from internal buffer. Else, internal buffer will be cleared. Limit is - compared against part of the line without newline. - - If stream was paused, this function will automatically resume it if - needed. - """ + async def readline(self) -> bytes: ... if sys.version_info >= (3, 13): - async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: - """Read data from the stream until ``separator`` is found. - - On success, the data and separator will be removed from the - internal buffer (consumed). Returned data will include the - separator at the end. - - Configured stream limit is used to check result. Limit sets the - maximal length of data that can be returned, not counting the - separator. - - If an EOF occurs and the complete separator is still not found, - an IncompleteReadError exception will be raised, and the internal - buffer will be reset. The IncompleteReadError.partial attribute - may contain the separator partially. - - If the data cannot be read because of over limit, a - LimitOverrunError exception will be raised, and the data - will be left in the internal buffer, so it can be read again. - - The ``separator`` may also be a tuple of separators. In this - case the return value will be the shortest possible that has any - separator as the suffix. For the purposes of LimitOverrunError, - the shortest possible separator is considered to be the one that - matched. - """ + async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: ... else: - async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: - """Read data from the stream until ``separator`` is found. - - On success, the data and separator will be removed from the - internal buffer (consumed). Returned data will include the - separator at the end. - - Configured stream limit is used to check result. Limit sets the - maximal length of data that can be returned, not counting the - separator. - - If an EOF occurs and the complete separator is still not found, - an IncompleteReadError exception will be raised, and the internal - buffer will be reset. The IncompleteReadError.partial attribute - may contain the separator partially. - - If the data cannot be read because of over limit, a - LimitOverrunError exception will be raised, and the data - will be left in the internal buffer, so it can be read again. - """ - - async def read(self, n: int = -1) -> bytes: - """Read up to `n` bytes from the stream. - - If `n` is not provided or set to -1, - read until EOF, then return all read bytes. - If EOF was received and the internal buffer is empty, - return an empty bytes object. - - If `n` is 0, return an empty bytes object immediately. - - If `n` is positive, return at most `n` available bytes - as soon as at least 1 byte is available in the internal buffer. - If EOF is received before any byte is read, return an empty - bytes object. - - Returned value is not limited with limit, configured at stream - creation. - - If stream was paused, this function will automatically resume it if - needed. - """ - - async def readexactly(self, n: int) -> bytes: - """Read exactly `n` bytes. - - Raise an IncompleteReadError if EOF is reached before `n` bytes can be - read. The IncompleteReadError.partial attribute of the exception will - contain the partial read bytes. - - if n is zero, return empty bytes object. - - Returned value is not limited with limit, configured at stream - creation. - - If stream was paused, this function will automatically resume it if - needed. - """ + async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: ... + async def read(self, n: int = -1) -> bytes: ... + async def readexactly(self, n: int) -> bytes: ... def __aiter__(self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi index b82f0ba4ea3a0..ceee2b5b90a09 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi @@ -13,8 +13,6 @@ STDOUT: int DEVNULL: int class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): - """Like StreamReaderProtocol, but for a subprocess.""" - stdin: streams.StreamWriter | None stdout: streams.StreamReader | None stderr: streams.StreamReader | None @@ -31,9 +29,7 @@ class Process: ) -> None: ... @property def returncode(self) -> int | None: ... - async def wait(self) -> int: - """Wait until the process exit and return the process return code.""" - + async def wait(self) -> int: ... def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi index fe720da9ab77e..30b7c9129f6f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi @@ -17,31 +17,10 @@ else: _T = TypeVar("_T") class TaskGroup: - """Asynchronous context manager for managing groups of tasks. - - Example use: - - async with asyncio.TaskGroup() as group: - task1 = group.create_task(some_coroutine(...)) - task2 = group.create_task(other_coroutine(...)) - print("Both tasks have completed now.") - - All tasks are awaited when the context manager exits. - - Any exceptions other than `asyncio.CancelledError` raised within - a task will cancel all remaining tasks and wait for them to exit. - The exceptions are then combined and raised as an `ExceptionGroup`. - """ - _loop: AbstractEventLoop | None _tasks: set[Task[Any]] async def __aenter__(self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... - def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: - """Create a new task in this group and return it. - - Similar to `asyncio.create_task`. - """ - + def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... def _on_task_done(self, task: Task[object]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi index 61563b419a9cf..1442f7400a9c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi @@ -1,5 +1,3 @@ -"""Support for tasks, coroutines and the scheduler.""" - import concurrent.futures import sys from _asyncio import ( @@ -92,102 +90,18 @@ if sys.version_info >= (3, 13): @type_check_only class _SyncAndAsyncIterator(Iterator[_T_co], AsyncIterator[_T_co], Protocol[_T_co]): ... - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: - """Create an iterator of awaitables or their results in completion order. - - Run the supplied awaitables concurrently. The returned object can be - iterated to obtain the results of the awaitables as they finish. - - The object returned can be iterated as an asynchronous iterator or a plain - iterator. When asynchronous iteration is used, the originally-supplied - awaitables are yielded if they are tasks or futures. This makes it easy to - correlate previously-scheduled tasks with their results: - - ipv4_connect = create_task(open_connection("127.0.0.1", 80)) - ipv6_connect = create_task(open_connection("::1", 80)) - tasks = [ipv4_connect, ipv6_connect] - - async for earliest_connect in as_completed(tasks): - # earliest_connect is done. The result can be obtained by - # awaiting it or calling earliest_connect.result() - reader, writer = await earliest_connect - - if earliest_connect is ipv6_connect: - print("IPv6 connection established.") - else: - print("IPv4 connection established.") - - During asynchronous iteration, implicitly-created tasks will be yielded for - supplied awaitables that aren't tasks or futures. - - When used as a plain iterator, each iteration yields a new coroutine that - returns the result or raises the exception of the next completed awaitable. - This pattern is compatible with Python versions older than 3.13: - - ipv4_connect = create_task(open_connection("127.0.0.1", 80)) - ipv6_connect = create_task(open_connection("::1", 80)) - tasks = [ipv4_connect, ipv6_connect] - - for next_connect in as_completed(tasks): - # next_connect is not one of the original task objects. It must be - # awaited to obtain the result value or raise the exception of the - # awaitable that finishes next. - reader, writer = await next_connect - - A TimeoutError is raised if the timeout occurs before all awaitables are - done. This is raised by the async for loop during asynchronous iteration or - by the coroutines yielded during plain iteration. - """ + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: ... elif sys.version_info >= (3, 10): - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: - """Return an iterator whose values are coroutines. - - When waiting for the yielded coroutines you'll get the results (or - exceptions!) of the original Futures (or coroutines), in the order - in which and as soon as they complete. - - This differs from PEP 3148; the proper way to use this is: - - for f in as_completed(fs): - result = await f # The 'await' may raise. - # Use result. - - If a timeout is specified, the 'await' will raise - TimeoutError when the timeout occurs before all Futures are done. - - Note: The futures 'f' are not necessarily members of fs. - """ + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... else: def as_completed( fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None - ) -> Iterator[Future[_T]]: - """Return an iterator whose values are coroutines. - - When waiting for the yielded coroutines you'll get the results (or - exceptions!) of the original Futures (or coroutines), in the order - in which and as soon as they complete. - - This differs from PEP 3148; the proper way to use this is: - - for f in as_completed(fs): - result = await f # The 'await' may raise. - # Use result. - - If a timeout is specified, the 'await' will raise - TimeoutError when the timeout occurs before all Futures are done. - - Note: The futures 'f' are not necessarily members of fs. - """ + ) -> Iterator[Future[_T]]: ... @overload -def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: # type: ignore[overload-overlap] - """Wrap a coroutine or an awaitable in a future. - - If the argument is a Future, it is returned directly. - """ - +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[overload-overlap] @overload def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... @@ -199,37 +113,7 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No # N.B. Having overlapping overloads is the only way to get acceptable type inference in all edge cases. if sys.version_info >= (3, 10): @overload - def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: # type: ignore[overload-overlap] - """Return a future aggregating results from the given coroutines/futures. - - Coroutines will be wrapped in a future and scheduled in the event - loop. They will not necessarily be scheduled in the same order as - passed in. - - All futures must share the same event loop. If all the tasks are - done successfully, the returned future's result is the list of - results (in the order of the original sequence, not necessarily - the order of results arrival). If *return_exceptions* is True, - exceptions in the tasks are treated the same as successful - results, and gathered in the result list; otherwise, the first - raised exception will be immediately propagated to the returned - future. - - Cancellation: if the outer Future is cancelled, all children (that - have not completed yet) are also cancelled. If any child is - cancelled, this is treated as if it raised CancelledError -- - the outer Future is *not* cancelled in this case. (This is to - prevent the cancellation of one child to cause other children to - be cancelled.) - - If *return_exceptions* is False, cancelling gather() after it - has been marked done won't cancel any submitted awaitables. - For instance, gather can be marked done after propagating an - exception to the caller, therefore, calling ``gather.cancel()`` - after catching an exception (raised by one of the awaitables) from - gather won't cancel any other awaitables. - """ - + def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[overload-overlap] @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: Literal[False] = False @@ -344,37 +228,7 @@ else: @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], /, *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False - ) -> Future[tuple[_T1]]: - """Return a future aggregating results from the given coroutines/futures. - - Coroutines will be wrapped in a future and scheduled in the event - loop. They will not necessarily be scheduled in the same order as - passed in. - - All futures must share the same event loop. If all the tasks are - done successfully, the returned future's result is the list of - results (in the order of the original sequence, not necessarily - the order of results arrival). If *return_exceptions* is True, - exceptions in the tasks are treated the same as successful - results, and gathered in the result list; otherwise, the first - raised exception will be immediately propagated to the returned - future. - - Cancellation: if the outer Future is cancelled, all children (that - have not completed yet) are also cancelled. If any child is - cancelled, this is treated as if it raised CancelledError -- - the outer Future is *not* cancelled in this case. (This is to - prevent the cancellation of one child to cause other children to - be cancelled.) - - If *return_exceptions* is False, cancelling gather() after it - has been marked done won't cancel any submitted awaitables. - For instance, gather can be marked done after propagating an - exception to the caller, therefore, calling ``gather.cancel()`` - after catching an exception (raised by one of the awaitables) from - gather won't cancel any other awaitables. - """ - + ) -> Future[tuple[_T1]]: ... @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], @@ -496,137 +350,29 @@ else: ) -> Future[list[_T | BaseException]]: ... # unlike some asyncio apis, This does strict runtime checking of actually being a coroutine, not of any future-like. -def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: - """Submit a coroutine object to a given event loop. - - Return a concurrent.futures.Future to access the result. - """ +def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... if sys.version_info >= (3, 10): - def shield(arg: _FutureLike[_T]) -> Future[_T]: - """Wait for a future, shielding it from cancellation. - - The statement - - task = asyncio.create_task(something()) - res = await shield(task) - - is exactly equivalent to the statement - - res = await something() - - *except* that if the coroutine containing it is cancelled, the - task running in something() is not cancelled. From the POV of - something(), the cancellation did not happen. But its caller is - still cancelled, so the yield-from expression still raises - CancelledError. Note: If something() is cancelled by other means - this will still cancel shield(). - - If you want to completely ignore cancellation (not recommended) - you can combine shield() with a try/except clause, as follows: - - task = asyncio.create_task(something()) - try: - res = await shield(task) - except CancelledError: - res = None - - Save a reference to tasks passed to this function, to avoid - a task disappearing mid-execution. The event loop only keeps - weak references to tasks. A task that isn't referenced elsewhere - may get garbage collected at any time, even before it's done. - """ - + def shield(arg: _FutureLike[_T]) -> Future[_T]: ... @overload - async def sleep(delay: float) -> None: - """Coroutine that completes after a given time (in seconds).""" - + async def sleep(delay: float) -> None: ... @overload async def sleep(delay: float, result: _T) -> _T: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: - """Wait for the single Future or coroutine to complete, with timeout. - - Coroutine will be wrapped in Task. - - Returns result of the Future or coroutine. When a timeout occurs, - it cancels the task and raises TimeoutError. To avoid the task - cancellation, wrap it in shield(). - - If the wait is cancelled, the task is also cancelled. - - If the task suppresses the cancellation and returns a value instead, - that value is returned. - - This function is a coroutine. - """ + async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... else: - def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: - """Wait for a future, shielding it from cancellation. - - The statement - - res = await shield(something()) - - is exactly equivalent to the statement - - res = await something() - - *except* that if the coroutine containing it is cancelled, the - task running in something() is not cancelled. From the POV of - something(), the cancellation did not happen. But its caller is - still cancelled, so the yield-from expression still raises - CancelledError. Note: If something() is cancelled by other means - this will still cancel shield(). - - If you want to completely ignore cancellation (not recommended) - you can combine shield() with a try/except clause, as follows: - - try: - res = await shield(something()) - except CancelledError: - res = None - """ - + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... @overload - async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: - """Coroutine that completes after a given time (in seconds).""" - + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... @overload async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: - """Wait for the single Future or coroutine to complete, with timeout. - - Coroutine will be wrapped in Task. - - Returns result of the Future or coroutine. When a timeout occurs, - it cancels the task and raises TimeoutError. To avoid the task - cancellation, wrap it in shield(). - - If the wait is cancelled, the task is also cancelled. - - This function is a coroutine. - """ + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... if sys.version_info >= (3, 11): @overload async def wait( fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[_FT], set[_FT]]: - """Wait for the Futures or Tasks given by fs to complete. - - The fs iterable must not be empty. - - Returns two sets of Future: (done, pending). - - Usage: - - done, pending = await asyncio.wait(fs) - - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ - + ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -636,23 +382,7 @@ elif sys.version_info >= (3, 10): @overload async def wait( # type: ignore[overload-overlap] fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[_FT], set[_FT]]: - """Wait for the Futures and coroutines given by fs to complete. - - The fs iterable must not be empty. - - Coroutines will be wrapped in Tasks. - - Returns two sets of Future: (done, pending). - - Usage: - - done, pending = await asyncio.wait(fs) - - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ - + ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -666,23 +396,7 @@ else: loop: AbstractEventLoop | None = None, timeout: float | None = None, return_when: str = "ALL_COMPLETED", - ) -> tuple[set[_FT], set[_FT]]: - """Wait for the Futures and coroutines given by fs to complete. - - The fs iterable must not be empty. - - Coroutines will be wrapped in Tasks. - - Returns two sets of Future: (done, pending). - - Usage: - - done, pending = await asyncio.wait(fs) - - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ - + ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( fs: Iterable[Awaitable[_T]], @@ -697,28 +411,18 @@ if sys.version_info >= (3, 12): else: _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] -def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: - """Return a set of all tasks for the loop.""" +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... if sys.version_info >= (3, 11): - def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: - """Schedule the execution of a coroutine object in a spawn task. - - Return a Task object. - """ + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... else: - def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: - """Schedule the execution of a coroutine object in a spawn task. - - Return a Task object. - """ + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... if sys.version_info >= (3, 12): from _asyncio import current_task as current_task else: - def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: - """Return a currently executed task.""" + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... if sys.version_info >= (3, 14): def eager_task_factory( @@ -766,21 +470,6 @@ if sys.version_info >= (3, 12): context: Context | None = None, ) -> _TaskT_co: ... - def create_eager_task_factory(custom_task_constructor: _CustomTaskConstructor[_TaskT_co]) -> _EagerTaskFactoryType[_TaskT_co]: - """Create a function suitable for use as a task factory on an event-loop. - - Example usage: - - loop.set_task_factory( - asyncio.create_eager_task_factory(my_task_constructor)) - - Now, tasks created will be started immediately (rather than being first - scheduled to an event loop). The constructor argument can be any callable - that returns a Task-compatible object and has a signature compatible - with `Task.__init__`; it must have the `eager_start` keyword argument. - - Most applications will use `Task` for `custom_task_constructor` and in - this case there's no need to call `create_eager_task_factory()` - directly. Instead the global `eager_task_factory` instance can be - used. E.g. `loop.set_task_factory(asyncio.eager_task_factory)`. - """ + def create_eager_task_factory( + custom_task_constructor: _CustomTaskConstructor[_TaskT_co], + ) -> _EagerTaskFactoryType[_TaskT_co]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi index 6fd5104a57c29..00aae2ea814cb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi @@ -1,5 +1,3 @@ -"""High-level support for working with threads in asyncio""" - from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -9,13 +7,4 @@ __all__ = ("to_thread",) _P = ParamSpec("_P") _R = TypeVar("_R") -async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: - """Asynchronously run function *func* in a separate thread. - - Any *args and **kwargs supplied for this function are directly passed - to *func*. Also, the current :class:`contextvars.Context` is propagated, - allowing context variables from the main thread to be accessed in the - separate thread. - - Return a coroutine that can be awaited to get the eventual result of *func*. - """ +async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi index 06d2eac5b0e29..668cccbfe8b18 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi @@ -7,66 +7,14 @@ __all__ = ("Timeout", "timeout", "timeout_at") @final class Timeout: - """Asynchronous context manager for cancelling overdue coroutines. - - Use `timeout()` or `timeout_at()` rather than instantiating this class directly. - """ - - def __init__(self, when: float | None) -> None: - """Schedule a timeout that will trigger at a given loop time. - - - If `when` is `None`, the timeout will never trigger. - - If `when < loop.time()`, the timeout will trigger on the next - iteration of the event loop. - """ - - def when(self) -> float | None: - """Return the current deadline.""" - - def reschedule(self, when: float | None) -> None: - """Reschedule the timeout.""" - - def expired(self) -> bool: - """Is timeout expired during execution?""" - + def __init__(self, when: float | None) -> None: ... + def when(self) -> float | None: ... + def reschedule(self, when: float | None) -> None: ... + def expired(self) -> bool: ... async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... -def timeout(delay: float | None) -> Timeout: - """Timeout async context manager. - - Useful in cases when you want to apply timeout logic around block - of code or in cases when asyncio.wait_for is not suitable. For example: - - >>> async with asyncio.timeout(10): # 10 seconds timeout - ... await long_running_task() - - - delay - value in seconds or None to disable timeout logic - - long_running_task() is interrupted by raising asyncio.CancelledError, - the top-most affected timeout() context manager converts CancelledError - into TimeoutError. - """ - -def timeout_at(when: float | None) -> Timeout: - """Schedule the timeout at absolute time. - - Like timeout() but argument gives absolute time in the same clock system - as loop.time(). - - Please note: it is not POSIX time but a time with - undefined starting base, e.g. the time of the system power on. - - >>> async with asyncio.timeout_at(loop.time() + 10): - ... await long_running_task() - - - when - a deadline when timeout occurs or None to disable timeout logic - - long_running_task() is interrupted by raising asyncio.CancelledError, - the top-most affected timeout() context manager converts CancelledError - into TimeoutError. - """ +def timeout(delay: float | None) -> Timeout: ... +def timeout_at(when: float | None) -> Timeout: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi index e74fd30eb5038..bc8b809b9c055 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi @@ -1,5 +1,3 @@ -"""Tools to analyze tasks running in asyncio programs.""" - import sys from collections.abc import Iterable from enum import Enum @@ -33,33 +31,16 @@ class NodeType(Enum): TASK = 2 class CycleFoundException(Exception): - """Raised when there is a cycle when drawing the call tree.""" - cycles: list[list[int]] id2name: dict[int, str] def __init__(self, cycles: list[list[int]], id2name: dict[int, str]) -> None: ... def get_all_awaited_by(pid: SupportsIndex) -> list[_AwaitedInfo]: ... -def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: - """ - Build a list of strings for pretty-print an async call tree. - - The call tree is produced by `get_all_async_stacks()`, prefixing tasks - with `task_emoji` and coroutine frames with `cor_emoji`. - """ - +def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: ... def build_task_table(result: Iterable[_AwaitedInfo]) -> list[list[int | str]]: ... if sys.version_info >= (3, 14): - def exit_with_permission_help_text() -> None: - """ - Prints a message pointing to platform-specific permission help text and exits the program. - This function is called when a PermissionError is encountered while trying - to attach to a process. - """ - -def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: - """Build and print a table of all pending tasks under `pid`.""" + def exit_with_permission_help_text() -> None: ... -def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: - """Build and print a tree of all pending tasks under `pid`.""" +def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: ... +def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi index 5b2f7d12489c2..cc870d5e0b9ad 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi @@ -1,5 +1,3 @@ -"""Abstract Transport class.""" - from asyncio.events import AbstractEventLoop from asyncio.protocols import BaseProtocol from collections.abc import Iterable, Mapping @@ -10,225 +8,50 @@ from typing import Any __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") class BaseTransport: - """Base class for transports.""" - __slots__ = ("_extra",) def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... - def get_extra_info(self, name: str, default: Any = None) -> Any: - """Get optional transport information.""" - - def is_closing(self) -> bool: - """Return True if the transport is closing or closed.""" - - def close(self) -> None: - """Close the transport. - - Buffered data will be flushed asynchronously. No more data - will be received. After all buffered data is flushed, the - protocol's connection_lost() method will (eventually) be - called with None as its argument. - """ - - def set_protocol(self, protocol: BaseProtocol) -> None: - """Set a new protocol.""" - - def get_protocol(self) -> BaseProtocol: - """Return the current protocol.""" + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + def is_closing(self) -> bool: ... + def close(self) -> None: ... + def set_protocol(self, protocol: BaseProtocol) -> None: ... + def get_protocol(self) -> BaseProtocol: ... class ReadTransport(BaseTransport): - """Interface for read-only transports.""" - __slots__ = () - def is_reading(self) -> bool: - """Return True if the transport is receiving.""" - - def pause_reading(self) -> None: - """Pause the receiving end. - - No data will be passed to the protocol's data_received() - method until resume_reading() is called. - """ - - def resume_reading(self) -> None: - """Resume the receiving end. - - Data received will once again be passed to the protocol's - data_received() method. - """ + def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... + def resume_reading(self) -> None: ... class WriteTransport(BaseTransport): - """Interface for write-only transports.""" - __slots__ = () - def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: - """Set the high- and low-water limits for write flow control. - - These two values control when to call the protocol's - pause_writing() and resume_writing() methods. If specified, - the low-water limit must be less than or equal to the - high-water limit. Neither value can be negative. - - The defaults are implementation-specific. If only the - high-water limit is given, the low-water limit defaults to an - implementation-specific value less than or equal to the - high-water limit. Setting high to zero forces low to zero as - well, and causes pause_writing() to be called whenever the - buffer becomes non-empty. Setting low to zero causes - resume_writing() to be called only once the buffer is empty. - Use of zero for either limit is generally sub-optimal as it - reduces opportunities for doing I/O and computation - concurrently. - """ - - def get_write_buffer_size(self) -> int: - """Return the current size of the write buffer.""" - - def get_write_buffer_limits(self) -> tuple[int, int]: - """Get the high and low watermarks for write flow control. - Return a tuple (low, high) where low and high are - positive number of bytes. - """ - - def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape - """Write some data bytes to the transport. - - This does not block; it buffers the data and arranges for it - to be sent out asynchronously. - """ - - def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]]) -> None: # any memoryview format or shape - """Write a list (or any iterable) of data bytes to the transport. - - The default implementation concatenates the arguments and - calls write() on the result. - """ - - def write_eof(self) -> None: - """Close the write end after flushing buffered data. - - (This is like typing ^D into a UNIX program reading from stdin.) - - Data may still be received. - """ - - def can_write_eof(self) -> bool: - """Return True if this transport supports write_eof(), False if not.""" - - def abort(self) -> None: - """Close the transport immediately. - - Buffered data will be lost. No more data will be received. - The protocol's connection_lost() method will (eventually) be - called with None as its argument. - """ + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_write_buffer_size(self) -> int: ... + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def writelines( + self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]] + ) -> None: ... # any memoryview format or shape + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def abort(self) -> None: ... class Transport(ReadTransport, WriteTransport): - """Interface representing a bidirectional transport. - - There may be several implementations, but typically, the user does - not implement new transports; rather, the platform provides some - useful transports that are implemented using the platform's best - practices. - - The user never instantiates a transport directly; they call a - utility function, passing it a protocol factory and other - information necessary to create the transport and protocol. (E.g. - EventLoop.create_connection() or EventLoop.create_server().) - - The utility function will asynchronously create a transport and a - protocol and hook them up by calling the protocol's - connection_made() method, passing it the transport. - - The implementation here raises NotImplemented for every method - except writelines(), which calls write() in a loop. - """ - __slots__ = () class DatagramTransport(BaseTransport): - """Interface for datagram (UDP) transports.""" - __slots__ = () - def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: - """Send data to the transport. - - This does not block; it buffers the data and arranges for it - to be sent out asynchronously. - addr is target socket address. - If addr is None use target address pointed on transport creation. - If data is an empty bytes object a zero-length datagram will be - sent. - """ - - def abort(self) -> None: - """Close the transport immediately. - - Buffered data will be lost. No more data will be received. - The protocol's connection_lost() method will (eventually) be - called with None as its argument. - """ + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... + def abort(self) -> None: ... class SubprocessTransport(BaseTransport): __slots__ = () - def get_pid(self) -> int: - """Get subprocess id.""" - - def get_returncode(self) -> int | None: - """Get subprocess returncode. - - See also - http://docs.python.org/3/library/subprocess#subprocess.Popen.returncode - """ - - def get_pipe_transport(self, fd: int) -> BaseTransport | None: - """Get transport for pipe with number fd.""" - - def send_signal(self, signal: int) -> None: - """Send signal to subprocess. - - See also: - docs.python.org/3/library/subprocess#subprocess.Popen.send_signal - """ - - def terminate(self) -> None: - """Stop the subprocess. - - Alias for close() method. - - On Posix OSs the method sends SIGTERM to the subprocess. - On Windows the Win32 API function TerminateProcess() - is called to stop the subprocess. - - See also: - http://docs.python.org/3/library/subprocess#subprocess.Popen.terminate - """ - - def kill(self) -> None: - """Kill the subprocess. - - On Posix OSs the function sends SIGKILL to the subprocess. - On Windows kill() is an alias for terminate(). - - See also: - http://docs.python.org/3/library/subprocess#subprocess.Popen.kill - """ + def get_pid(self) -> int: ... + def get_returncode(self) -> int | None: ... + def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... class _FlowControlMixin(Transport): - """All the logic for (write) flow control in a mix-in base class. - - The subclass must implement get_write_buffer_size(). It must call - _maybe_pause_protocol() whenever the write buffer size increases, - and _maybe_resume_protocol() whenever it decreases. It may also - override set_write_buffer_limits() (e.g. to specify different - defaults). - - The subclass constructor must call super().__init__(extra). This - will call set_write_buffer_limits(). - - The user may call set_write_buffer_limits() and - get_write_buffer_size(), and their protocol's pause_writing() and - resume_writing() may be called. - """ - __slots__ = ("_loop", "_protocol_paused", "_high_water", "_low_water") def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi index 4d08d240165be..492f1e42adf20 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi @@ -14,13 +14,6 @@ _WriteBuffer: TypeAlias = bytearray | memoryview _CMSG: TypeAlias = tuple[int, int, bytes] class TransportSocket: - """A socket-like wrapper for exposing real transport sockets. - - These objects can be safely returned by APIs like - `transport.get_extra_info('socket')`. All potentially disruptive - operations (like "socket.close()") are banned. - """ - __slots__ = ("_sock",) def __init__(self, sock: socket.socket) -> None: ... @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi index 679f2e6734780..9071ee9a2fa7e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi @@ -1,5 +1,3 @@ -"""Selector event loop for Unix with signal handling.""" - import sys import types from _typeshed import StrPath @@ -52,171 +50,45 @@ if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class AbstractChildWatcher: - """Abstract base class for monitoring child processes. - - Objects derived from this class monitor a collection of subprocesses and - report their termination or interruption by a signal. - - New callbacks are registered with .add_child_handler(). Starting a new - process must be done within a 'with' block to allow the watcher to suspend - its activity until the new process if fully registered (this is needed to - prevent a race condition in some implementations). - - Example: - with watcher: - proc = subprocess.Popen("sleep 1") - watcher.add_child_handler(proc.pid, callback) - - Notes: - Implementations of this class must be thread-safe. - - Since child watcher objects may catch the SIGCHLD signal and call - waitpid(-1), there should be only one active object per process. - """ - @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: - """Register a new child handler. - - Arrange for callback(pid, returncode, *args) to be called when - process 'pid' terminates. Specifying another callback for the same - process replaces the previous handler. - - Note: callback() must be thread-safe. - """ - + ) -> None: ... @abstractmethod - def remove_child_handler(self, pid: int) -> bool: - """Removes the handler for process 'pid'. - - The function returns True if the handler was successfully removed, - False if there was nothing to remove. - """ - + def remove_child_handler(self, pid: int) -> bool: ... @abstractmethod - def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: - """Attach the watcher to an event loop. - - If the watcher was previously attached to an event loop, then it is - first detached before attaching to the new loop. - - Note: loop may be None. - """ - + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... @abstractmethod - def close(self) -> None: - """Close the watcher. - - This must be called to make sure that any underlying resource is freed. - """ - + def close(self) -> None: ... @abstractmethod - def __enter__(self) -> Self: - """Enter the watcher's context and allow starting new processes - - This function must return self - """ - + def __enter__(self) -> Self: ... @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: - """Exit the watcher's context""" - + ) -> None: ... @abstractmethod - def is_active(self) -> bool: - """Return ``True`` if the watcher is active and is used by the event loop. - - Return True if the watcher is installed and ready to handle process exit - notifications. - - """ + def is_active(self) -> bool: ... else: class AbstractChildWatcher: - """Abstract base class for monitoring child processes. - - Objects derived from this class monitor a collection of subprocesses and - report their termination or interruption by a signal. - - New callbacks are registered with .add_child_handler(). Starting a new - process must be done within a 'with' block to allow the watcher to suspend - its activity until the new process if fully registered (this is needed to - prevent a race condition in some implementations). - - Example: - with watcher: - proc = subprocess.Popen("sleep 1") - watcher.add_child_handler(proc.pid, callback) - - Notes: - Implementations of this class must be thread-safe. - - Since child watcher objects may catch the SIGCHLD signal and call - waitpid(-1), there should be only one active object per process. - """ - @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: - """Register a new child handler. - - Arrange for callback(pid, returncode, *args) to be called when - process 'pid' terminates. Specifying another callback for the same - process replaces the previous handler. - - Note: callback() must be thread-safe. - """ - + ) -> None: ... @abstractmethod - def remove_child_handler(self, pid: int) -> bool: - """Removes the handler for process 'pid'. - - The function returns True if the handler was successfully removed, - False if there was nothing to remove. - """ - + def remove_child_handler(self, pid: int) -> bool: ... @abstractmethod - def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: - """Attach the watcher to an event loop. - - If the watcher was previously attached to an event loop, then it is - first detached before attaching to the new loop. - - Note: loop may be None. - """ - + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... @abstractmethod - def close(self) -> None: - """Close the watcher. - - This must be called to make sure that any underlying resource is freed. - """ - + def close(self) -> None: ... @abstractmethod - def __enter__(self) -> Self: - """Enter the watcher's context and allow starting new processes - - This function must return self - """ - + def __enter__(self) -> Self: ... @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: - """Exit the watcher's context""" - + ) -> None: ... @abstractmethod - def is_active(self) -> bool: - """Return ``True`` if the watcher is active and is used by the event loop. - - Return True if the watcher is installed and ready to handle process exit - notifications. - - """ + def is_active(self) -> bool: ... if sys.platform != "win32": if sys.version_info < (3, 14): @@ -230,16 +102,6 @@ if sys.platform != "win32": @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class SafeChildWatcher(BaseChildWatcher): - """'Safe' child watcher implementation. - - This implementation avoids disrupting other code spawning processes by - polling explicitly each process in the SIGCHLD handler instead of calling - os.waitpid(-1). - - This is a safe solution but it has a significant overhead when handling a - big number of children (O(n) each time SIGCHLD is raised) - """ - def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -251,16 +113,6 @@ if sys.platform != "win32": @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class FastChildWatcher(BaseChildWatcher): - """'Fast' child watcher implementation. - - This implementation reaps every terminated processes by calling - os.waitpid(-1) directly, possibly breaking other code spawning processes - and waiting for their termination. - - There is no noticeable overhead when handling a big number of children - (O(1) each time a child terminates). - """ - def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -279,16 +131,6 @@ if sys.platform != "win32": def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... class SafeChildWatcher(BaseChildWatcher): - """'Safe' child watcher implementation. - - This implementation avoids disrupting other code spawning processes by - polling explicitly each process in the SIGCHLD handler instead of calling - os.waitpid(-1). - - This is a safe solution but it has a significant overhead when handling a - big number of children (O(n) each time SIGCHLD is raised) - """ - def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -299,16 +141,6 @@ if sys.platform != "win32": def remove_child_handler(self, pid: int) -> bool: ... class FastChildWatcher(BaseChildWatcher): - """'Fast' child watcher implementation. - - This implementation reaps every terminated processes by calling - os.waitpid(-1) directly, possibly breaking other code spawning processes - and waiting for their termination. - - There is no noticeable overhead when handling a big number of children - (O(1) each time a child terminates). - """ - def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -319,11 +151,6 @@ if sys.platform != "win32": def remove_child_handler(self, pid: int) -> bool: ... class _UnixSelectorEventLoop(BaseSelectorEventLoop): - """Unix event loop. - - Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. - """ - if sys.version_info >= (3, 13): async def create_unix_server( self, @@ -340,33 +167,17 @@ if sys.platform != "win32": ) -> Server: ... if sys.version_info >= (3, 14): - class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): - """UNIX event loop policy""" - + class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): ... else: class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - """UNIX event loop policy with a watcher for child processes.""" - if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def get_child_watcher(self) -> AbstractChildWatcher: - """Get the watcher for child processes. - - If not yet set, a ThreadedChildWatcher object is automatically created. - """ - + def get_child_watcher(self) -> AbstractChildWatcher: ... @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: - """Set the watcher for child processes.""" + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... else: - def get_child_watcher(self) -> AbstractChildWatcher: - """Get the watcher for child processes. - - If not yet set, a ThreadedChildWatcher object is automatically created. - """ - - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: - """Set the watcher for child processes.""" + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... SelectorEventLoop = _UnixSelectorEventLoop @@ -382,17 +193,6 @@ if sys.platform != "win32": if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class MultiLoopChildWatcher(AbstractChildWatcher): - """A watcher that doesn't require running loop in the main thread. - - This implementation registers a SIGCHLD signal handler on - instantiation (which may conflict with other code that - install own handler for this signal). - - The solution is safe but it has a significant overhead when - handling a big number of processes (*O(n)* each time a - SIGCHLD is received). - """ - def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -407,17 +207,6 @@ if sys.platform != "win32": else: class MultiLoopChildWatcher(AbstractChildWatcher): - """A watcher that doesn't require running loop in the main thread. - - This implementation registers a SIGCHLD signal handler on - instantiation (which may conflict with other code that - install own handler for this signal). - - The solution is safe but it has a significant overhead when - handling a big number of processes (*O(n)* each time a - SIGCHLD is received). - """ - def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -432,18 +221,6 @@ if sys.platform != "win32": if sys.version_info < (3, 14): class ThreadedChildWatcher(AbstractChildWatcher): - """Threaded child watcher implementation. - - The watcher uses a thread per process - for waiting for the process finish. - - It doesn't require subscription on POSIX signal - but a thread creation is not free. - - The watcher has O(1) complexity, its performance doesn't depend - on amount of spawn processes. - """ - def is_active(self) -> Literal[True]: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -458,17 +235,6 @@ if sys.platform != "win32": def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... class PidfdChildWatcher(AbstractChildWatcher): - """Child watcher implementation using Linux's pid file descriptors. - - This child watcher polls process file descriptors (pidfds) to await child - process termination. In some respects, PidfdChildWatcher is a "Goldilocks" - child watcher implementation. It doesn't require signals or threads, doesn't - interfere with any processes launched outside the event loop, and scales - linearly with the number of subprocesses launched by the event loop. The - main disadvantage is that pidfds are specific to Linux, and only work on - recent (5.3+) kernels. - """ - def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi index 99b9ec9565aed..a32381bfb3e63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,5 +1,3 @@ -"""Selector and proactor event loops for Windows.""" - import socket import sys from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer @@ -49,22 +47,14 @@ if sys.platform == "win32": CONNECT_PIPE_MAX_DELAY: float class PipeServer: - """Class representing a pipe server. - - This is much like a bound, listening socket. - """ - def __init__(self, address: str) -> None: ... def __del__(self) -> None: ... def closed(self) -> bool: ... def close(self) -> None: ... - class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): - """Windows version of selector event loop.""" + class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... class ProactorEventLoop(proactor_events.BaseProactorEventLoop): - """Windows version of proactor event loop using IOCP.""" - def __init__(self, proactor: IocpProactor | None = None) -> None: ... async def create_pipe_connection( self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str @@ -74,8 +64,6 @@ if sys.platform == "win32": ) -> list[PipeServer]: ... class IocpProactor: - """Proactor implementation using IOCP.""" - def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... @@ -98,13 +86,7 @@ if sys.platform == "win32": def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... - def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: - """Wait for a handle. - - Return a Future object. The result of the future is True if the wait - completed, or False if the wait did not complete (on timeout). - """ - + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... def close(self) -> None: ... if sys.version_info >= (3, 11): def recvfrom_into( @@ -123,19 +105,13 @@ if sys.platform == "win32": else: class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[SelectorEventLoop]] - def get_child_watcher(self) -> NoReturn: - """Get the watcher for child processes.""" - - def set_child_watcher(self, watcher: Any) -> NoReturn: - """Set the watcher for child processes.""" + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[ProactorEventLoop]] - def get_child_watcher(self) -> NoReturn: - """Get the watcher for child processes.""" - - def set_child_watcher(self, watcher: Any) -> NoReturn: - """Set the watcher for child processes.""" + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... if sys.version_info >= (3, 14): _DefaultEventLoopPolicy = _WindowsProactorEventLoopPolicy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi index 1b1d0bcf62c27..5cedd61b5f4a3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi @@ -1,5 +1,3 @@ -"""Various Windows specific bits and pieces.""" - import subprocess import sys from collections.abc import Callable @@ -13,15 +11,9 @@ if sys.platform == "win32": BUFSIZE: Final = 8192 PIPE: Final = subprocess.PIPE STDOUT: Final = subprocess.STDOUT - def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: - """Like os.pipe() but with overlapped support and using handles not fds.""" + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: ... class PipeHandle: - """Wrapper for an overlapped pipe handle which is vaguely file-object like. - - The IOCP event loop can use these instead of socket objects. - """ - def __init__(self, handle: int) -> None: ... def __del__(self) -> None: ... def __enter__(self) -> Self: ... @@ -32,11 +24,6 @@ if sys.platform == "win32": def close(self, *, CloseHandle: Callable[[int], object] = ...) -> None: ... class Popen(subprocess.Popen[AnyStr]): - """Replacement for subprocess.Popen using overlapped pipe handles. - - The stdin, stdout, stderr are None or instances of PipeHandle. - """ - stdin: PipeHandle | None # type: ignore[assignment] stdout: PipeHandle | None # type: ignore[assignment] stderr: PipeHandle | None # type: ignore[assignment] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi index 96a81edcf2d3d..36d1862fdda78 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi @@ -1,24 +1,3 @@ -"""Basic infrastructure for asynchronous socket service clients and servers. - -There are only two ways to have a program on a single processor do "more -than one thing at a time". Multi-threaded programming is the simplest and -most popular way to do it, but there is another very different technique, -that lets you have nearly all the advantages of multi-threading, without -actually using multiple threads. it's really only practical if your program -is largely I/O bound. If your program is CPU bound, then pre-emptive -scheduled threads are probably what you really need. Network servers are -rarely CPU-bound, however. - -If your operating system supports the select() system call in its I/O -library (and nearly all do), then you can use it to juggle multiple -communication channels at once; doing other work while your I/O is taking -place in the "background." Although this strategy can seem strange and -complex, especially at first, it is in many ways easier to understand and -control than multi-threaded programming. The module documented here solves -many of the difficult problems for you, making the task of building -sophisticated high-performance network servers and clients a snap. -""" - import sys from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi index f00719b18777a..7f7b05ccc0a39 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi @@ -1,9 +1,3 @@ -"""allow programmer to define multiple exit functions to be executed -upon normal program termination. - -Two public functions, register and unregister, are defined. -""" - from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -11,31 +5,8 @@ from typing_extensions import ParamSpec _T = TypeVar("_T") _P = ParamSpec("_P") -def _clear() -> None: - """Clear the list of previously registered exit functions.""" - -def _ncallbacks() -> int: - """Return the number of registered exit functions.""" - -def _run_exitfuncs() -> None: - """Run all registered exit functions. - - If a callback raises an exception, it is logged with sys.unraisablehook. - """ - -def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: - """Register a function to be executed upon normal program termination - - func - function to be called at exit - args - optional arguments to pass to func - kwargs - optional keyword arguments to pass to func - - func is returned to facilitate usage as a decorator. - """ - -def unregister(func: Callable[..., object], /) -> None: - """Unregister an exit function which was previously registered using - atexit.register - - func - function to be unregistered - """ +def _clear() -> None: ... +def _ncallbacks() -> int: ... +def _run_exitfuncs() -> None: ... +def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... +def unregister(func: Callable[..., object], /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi index fdfbc7d2cd841..f3ce78ccb7fae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi @@ -5,66 +5,26 @@ _RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] class error(Exception): ... -def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: - """Return a fragment which is the addition of the two samples passed as parameters.""" - -def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Decode an Intel/DVI ADPCM coded fragment to a linear fragment.""" - -def alaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in a-LAW encoding to linearly encoded sound fragments.""" - -def avg(fragment: Buffer, width: int, /) -> int: - """Return the average over all samples in the fragment.""" - -def avgpp(fragment: Buffer, width: int, /) -> int: - """Return the average peak-peak value over all samples in the fragment.""" - -def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: - """Return a fragment that is the original fragment with a bias added to each sample.""" - -def byteswap(fragment: Buffer, width: int, /) -> bytes: - """Convert big-endian samples to little-endian and vice versa.""" - -def cross(fragment: Buffer, width: int, /) -> int: - """Return the number of zero crossings in the fragment passed as an argument.""" - -def findfactor(fragment: Buffer, reference: Buffer, /) -> float: - """Return a factor F such that rms(add(fragment, mul(reference, -F))) is minimal.""" - -def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: - """Try to match reference as well as possible to a portion of fragment.""" - -def findmax(fragment: Buffer, length: int, /) -> int: - """Search fragment for a slice of specified number of samples with maximum energy.""" - -def getsample(fragment: Buffer, width: int, index: int, /) -> int: - """Return the value of sample index from the fragment.""" - -def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Convert samples to 4 bit Intel/DVI ADPCM encoding.""" - -def lin2alaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to a-LAW encoding.""" - -def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: - """Convert samples between 1-, 2-, 3- and 4-byte formats.""" - -def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to u-LAW encoding.""" - -def max(fragment: Buffer, width: int, /) -> int: - """Return the maximum of the absolute value of all samples in a fragment.""" - -def maxpp(fragment: Buffer, width: int, /) -> int: - """Return the maximum peak-peak value in the sound fragment.""" - -def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: - """Return the minimum and maximum values of all samples in the sound fragment.""" - -def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: - """Return a fragment that has all samples in the original fragment multiplied by the floating-point value factor.""" - +def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: ... +def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def alaw2lin(fragment: Buffer, width: int, /) -> bytes: ... +def avg(fragment: Buffer, width: int, /) -> int: ... +def avgpp(fragment: Buffer, width: int, /) -> int: ... +def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: ... +def byteswap(fragment: Buffer, width: int, /) -> bytes: ... +def cross(fragment: Buffer, width: int, /) -> int: ... +def findfactor(fragment: Buffer, reference: Buffer, /) -> float: ... +def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: ... +def findmax(fragment: Buffer, length: int, /) -> int: ... +def getsample(fragment: Buffer, width: int, index: int, /) -> int: ... +def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... +def lin2alaw(fragment: Buffer, width: int, /) -> bytes: ... +def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: ... +def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: ... +def max(fragment: Buffer, width: int, /) -> int: ... +def maxpp(fragment: Buffer, width: int, /) -> int: ... +def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: ... +def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: ... def ratecv( fragment: Buffer, width: int, @@ -75,20 +35,9 @@ def ratecv( weightA: int = 1, weightB: int = 0, /, -) -> tuple[bytes, _RatecvState]: - """Convert the frame rate of the input fragment.""" - -def reverse(fragment: Buffer, width: int, /) -> bytes: - """Reverse the samples in a fragment and returns the modified fragment.""" - -def rms(fragment: Buffer, width: int, /) -> int: - """Return the root-mean-square of the fragment, i.e. sqrt(sum(S_i^2)/n).""" - -def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Convert a stereo fragment to a mono fragment.""" - -def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Generate a stereo fragment from a mono fragment.""" - -def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in u-LAW encoding to linearly encoded sound fragments.""" +) -> tuple[bytes, _RatecvState]: ... +def reverse(fragment: Buffer, width: int, /) -> bytes: ... +def rms(fragment: Buffer, width: int, /) -> int: ... +def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... +def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi index 0fc7e13da0cec..279d74a94ebe2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi @@ -1,5 +1,3 @@ -"""Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings""" - import sys from _typeshed import ReadableBuffer from typing import IO @@ -30,195 +28,34 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 13): __all__ += ["z85decode", "z85encode"] -def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: - """Encode the bytes-like object s using Base64 and return a bytes object. - - Optional altchars should be a byte string of length 2 which specifies an - alternative alphabet for the '+' and '/' characters. This allows an - application to e.g. generate url or filesystem safe Base64 strings. - """ - -def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: - """Decode the Base64 encoded bytes-like object or ASCII string s. - - Optional altchars must be a bytes-like object or ASCII string of length 2 - which specifies the alternative alphabet used instead of the '+' and '/' - characters. - - The result is returned as a bytes object. A binascii.Error is raised if - s is incorrectly padded. - - If validate is False (the default), characters that are neither in the - normal base-64 alphabet nor the alternative alphabet are discarded prior - to the padding check. If validate is True, these non-alphabet characters - in the input result in a binascii.Error. - For more information about the strict base64 check, see: - - https://docs.python.org/3.11/library/binascii.html#binascii.a2b_base64 - """ - -def standard_b64encode(s: ReadableBuffer) -> bytes: - """Encode bytes-like object s using the standard Base64 alphabet. - - The result is returned as a bytes object. - """ - -def standard_b64decode(s: str | ReadableBuffer) -> bytes: - """Decode bytes encoded with the standard Base64 alphabet. - - Argument s is a bytes-like object or ASCII string to decode. The result - is returned as a bytes object. A binascii.Error is raised if the input - is incorrectly padded. Characters that are not in the standard alphabet - are discarded prior to the padding check. - """ - -def urlsafe_b64encode(s: ReadableBuffer) -> bytes: - """Encode bytes using the URL- and filesystem-safe Base64 alphabet. - - Argument s is a bytes-like object to encode. The result is returned as a - bytes object. The alphabet uses '-' instead of '+' and '_' instead of - '/'. - """ - -def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: - """Decode bytes using the URL- and filesystem-safe Base64 alphabet. - - Argument s is a bytes-like object or ASCII string to decode. The result - is returned as a bytes object. A binascii.Error is raised if the input - is incorrectly padded. Characters that are not in the URL-safe base-64 - alphabet, and are not a plus '+' or slash '/', are discarded prior to the - padding check. - - The alphabet uses '-' instead of '+' and '_' instead of '/'. - """ - -def b32encode(s: ReadableBuffer) -> bytes: - """ - Encode the bytes-like objects using base32 and return a bytes object. - """ - -def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: - """ - Decode the base32 encoded bytes-like object or ASCII string s. - - Optional casefold is a flag specifying whether a lowercase alphabet is - acceptable as input. For security purposes, the default is False. - - RFC 3548 allows for optional mapping of the digit 0 (zero) to the - letter O (oh), and for optional mapping of the digit 1 (one) to - either the letter I (eye) or letter L (el). The optional argument - map01 when not None, specifies which letter the digit 1 should be - mapped to (when map01 is not None, the digit 0 is always mapped to - the letter O). For security purposes the default is None, so that - 0 and 1 are not allowed in the input. - - The result is returned as a bytes object. A binascii.Error is raised if - the input is incorrectly padded or if there are non-alphabet - characters present in the input. - """ - -def b16encode(s: ReadableBuffer) -> bytes: - """Encode the bytes-like object s using Base16 and return a bytes object.""" - -def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: - """Decode the Base16 encoded bytes-like object or ASCII string s. - - Optional casefold is a flag specifying whether a lowercase alphabet is - acceptable as input. For security purposes, the default is False. - - The result is returned as a bytes object. A binascii.Error is raised if - s is incorrectly padded or if there are non-alphabet characters present - in the input. - """ +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... +def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: ... +def standard_b64encode(s: ReadableBuffer) -> bytes: ... +def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... +def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... +def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... +def b32encode(s: ReadableBuffer) -> bytes: ... +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: ... +def b16encode(s: ReadableBuffer) -> bytes: ... +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... if sys.version_info >= (3, 10): - def b32hexencode(s: ReadableBuffer) -> bytes: - """ - Encode the bytes-like objects using base32hex and return a bytes object. - """ - - def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: - """ - Decode the base32hex encoded bytes-like object or ASCII string s. - - Optional casefold is a flag specifying whether a lowercase alphabet is - acceptable as input. For security purposes, the default is False. - - The result is returned as a bytes object. A binascii.Error is raised if - the input is incorrectly padded or if there are non-alphabet - characters present in the input. - """ - -def a85encode(b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False) -> bytes: - """Encode bytes-like object b using Ascii85 and return a bytes object. - - foldspaces is an optional flag that uses the special short sequence 'y' - instead of 4 consecutive spaces (ASCII 0x20) as supported by 'btoa'. This - feature is not supported by the "standard" Adobe encoding. - - wrapcol controls whether the output should have newline (b'\\n') characters - added to it. If this is non-zero, each output line will be at most this - many characters long, excluding the trailing newline. - - pad controls whether the input is padded to a multiple of 4 before - encoding. Note that the btoa implementation always pads. - - adobe controls whether the encoded byte sequence is framed with <~ and ~>, - which is used by the Adobe implementation. - """ + def b32hexencode(s: ReadableBuffer) -> bytes: ... + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... +def a85encode( + b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False +) -> bytes: ... def a85decode( b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" -) -> bytes: - """Decode the Ascii85 encoded bytes-like object or ASCII string b. - - foldspaces is a flag that specifies whether the 'y' short sequence should be - accepted as shorthand for 4 consecutive spaces (ASCII 0x20). This feature is - not supported by the "standard" Adobe encoding. - - adobe controls whether the input sequence is in Adobe Ascii85 format (i.e. - is framed with <~ and ~>). - - ignorechars should be a byte string containing characters to ignore from the - input. This should only contain whitespace characters, and by default - contains all whitespace characters in ASCII. - - The result is returned as a bytes object. - """ - -def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: - """Encode bytes-like object b in base85 format and return a bytes object. - - If pad is true, the input is padded with b'\\0' so its length is a multiple of - 4 bytes before encoding. - """ - -def b85decode(b: str | ReadableBuffer) -> bytes: - """Decode the base85-encoded bytes-like object or ASCII string b - - The result is returned as a bytes object. - """ - -def decode(input: IO[bytes], output: IO[bytes]) -> None: - """Decode a file; input and output are binary files.""" - -def encode(input: IO[bytes], output: IO[bytes]) -> None: - """Encode a file; input and output are binary files.""" - -def encodebytes(s: ReadableBuffer) -> bytes: - """Encode a bytestring into a bytes object containing multiple lines - of base-64 data. - """ - -def decodebytes(s: ReadableBuffer) -> bytes: - """Decode a bytestring of base-64 data into a bytes object.""" +) -> bytes: ... +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... +def b85decode(b: str | ReadableBuffer) -> bytes: ... +def decode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encodebytes(s: ReadableBuffer) -> bytes: ... +def decodebytes(s: ReadableBuffer) -> bytes: ... if sys.version_info >= (3, 13): - def z85encode(s: ReadableBuffer) -> bytes: - """Encode bytes-like object b in z85 format and return a bytes object.""" - - def z85decode(s: str | ReadableBuffer) -> bytes: - """Decode the z85-encoded bytes-like object or ASCII string b - - The result is returned as a bytes object. - """ + def z85encode(s: ReadableBuffer) -> bytes: ... + def z85decode(s: str | ReadableBuffer) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi index e9ff8da0e462d..b6be2210ffe2e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi @@ -1,5 +1,3 @@ -"""Debugger basics""" - import sys from _typeshed import ExcInfo, TraceFunction, Unused from collections.abc import Callable, Iterable, Iterator, Mapping @@ -19,23 +17,9 @@ _Backend: TypeAlias = Literal["settrace", "monitoring"] # so we don't include the value of this constant in the stubs. GENERATOR_AND_COROUTINE_FLAGS: Final[int] -class BdbQuit(Exception): - """Exception to give up completely.""" +class BdbQuit(Exception): ... class Bdb: - """Generic Python debugger base class. - - This class takes care of details of the trace facility; - a derived class should implement user interaction. - The standard debugger class (pdb.Pdb) is an example. - - The optional skip argument must be an iterable of glob-style - module name patterns. The debugger will not step into frames - that originate in a module that matches one of these patterns. - Whether a frame is considered to originate in a certain module - is determined by the __name__ in the frame globals. - """ - skip: set[str] | None breaks: dict[str, list[int]] fncache: dict[str, str] @@ -51,283 +35,69 @@ class Bdb: else: def __init__(self, skip: Iterable[str] | None = None) -> None: ... - def canonic(self, filename: str) -> str: - """Return canonical form of filename. - - For real filenames, the canonical form is a case-normalized (on - case insensitive filesystems) absolute path. 'Filenames' with - angle brackets, such as "", generated in interactive - mode, are returned unchanged. - """ - - def reset(self) -> None: - """Set values of attributes as ready to start debugging.""" + def canonic(self, filename: str) -> str: ... + def reset(self) -> None: ... if sys.version_info >= (3, 12): @contextmanager def set_enterframe(self, frame: FrameType) -> Iterator[None]: ... - def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: - """Dispatch a trace function for debugged frames based on the event. - - This function is installed as the trace function for debugged - frames. Its return value is the new trace function, which is - usually itself. The default implementation decides how to - dispatch a frame, depending on the type of event (passed in as a - string) that is about to be executed. - - The event can be one of the following: - line: A new line of code is going to be executed. - call: A function is about to be called or another code block - is entered. - return: A function or other code block is about to return. - exception: An exception has occurred. - c_call: A C function is about to be called. - c_return: A C function has returned. - c_exception: A C function has raised an exception. - - For the Python events, specialized functions (see the dispatch_*() - methods) are called. For the C events, no action is taken. - - The arg parameter depends on the previous event. - """ - - def dispatch_line(self, frame: FrameType) -> TraceFunction: - """Invoke user function and return trace function for line event. - - If the debugger stops on the current line, invoke - self.user_line(). Raise BdbQuit if self.quitting is set. - Return self.trace_dispatch to continue tracing in this scope. - """ - - def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: - """Invoke user function and return trace function for call event. - - If the debugger stops on this function call, invoke - self.user_call(). Raise BdbQuit if self.quitting is set. - Return self.trace_dispatch to continue tracing in this scope. - """ - - def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: - """Invoke user function and return trace function for return event. - - If the debugger stops on this function return, invoke - self.user_return(). Raise BdbQuit if self.quitting is set. - Return self.trace_dispatch to continue tracing in this scope. - """ - - def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: - """Invoke user function and return trace function for exception event. - - If the debugger stops on this exception, invoke - self.user_exception(). Raise BdbQuit if self.quitting is set. - Return self.trace_dispatch to continue tracing in this scope. - """ + def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... + def dispatch_line(self, frame: FrameType) -> TraceFunction: ... + def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: ... + def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: ... + def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: ... if sys.version_info >= (3, 13): - def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: - """Invoke user function and return trace function for opcode event. - If the debugger stops on the current opcode, invoke - self.user_opcode(). Raise BdbQuit if self.quitting is set. - Return self.trace_dispatch to continue tracing in this scope. - - Opcode event will always trigger the user callback. For now the only - opcode event is from an inline set_trace() and we want to stop there - unconditionally. - """ - - def is_skipped_module(self, module_name: str) -> bool: - """Return True if module_name matches any skip pattern.""" - - def stop_here(self, frame: FrameType) -> bool: - """Return True if frame is below the starting frame in the stack.""" - - def break_here(self, frame: FrameType) -> bool: - """Return True if there is an effective breakpoint for this line. - - Check for line or function breakpoint and if in effect. - Delete temporary breakpoints if effective() says to. - """ - - def do_clear(self, arg: Any) -> bool | None: - """Remove temporary breakpoint. - - Must implement in derived classes or get NotImplementedError. - """ - - def break_anywhere(self, frame: FrameType) -> bool: - """Return True if there is any breakpoint in that frame""" - - def user_call(self, frame: FrameType, argument_list: None) -> None: - """Called if we might stop in a function.""" - - def user_line(self, frame: FrameType) -> None: - """Called when we stop or break at a line.""" - - def user_return(self, frame: FrameType, return_value: Any) -> None: - """Called when a return trap is set here.""" - - def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: - """Called when we stop on an exception.""" - - def set_until(self, frame: FrameType, lineno: int | None = None) -> None: - """Stop when the line with the lineno greater than the current one is - reached or when returning from current frame. - """ + def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: ... + + def is_skipped_module(self, module_name: str) -> bool: ... + def stop_here(self, frame: FrameType) -> bool: ... + def break_here(self, frame: FrameType) -> bool: ... + def do_clear(self, arg: Any) -> bool | None: ... + def break_anywhere(self, frame: FrameType) -> bool: ... + def user_call(self, frame: FrameType, argument_list: None) -> None: ... + def user_line(self, frame: FrameType) -> None: ... + def user_return(self, frame: FrameType, return_value: Any) -> None: ... + def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... if sys.version_info >= (3, 13): - def user_opcode(self, frame: FrameType) -> None: # undocumented - """Called when we are about to execute an opcode.""" + def user_opcode(self, frame: FrameType) -> None: ... # undocumented - def set_step(self) -> None: - """Stop after one line of code.""" + def set_step(self) -> None: ... if sys.version_info >= (3, 13): - def set_stepinstr(self) -> None: # undocumented - """Stop before the next instruction.""" - - def set_next(self, frame: FrameType) -> None: - """Stop on the next line in or below the given frame.""" - - def set_return(self, frame: FrameType) -> None: - """Stop when returning from the given frame.""" - - def set_trace(self, frame: FrameType | None = None) -> None: - """Start debugging from frame. - - If frame is not specified, debugging starts from caller's frame. - """ - - def set_continue(self) -> None: - """Stop only at breakpoints or when finished. - - If there are no breakpoints, set the system trace function to None. - """ - - def set_quit(self) -> None: - """Set quitting attribute to True. - - Raises BdbQuit exception in the next call to a dispatch_*() method. - """ + def set_stepinstr(self) -> None: ... # undocumented + def set_next(self, frame: FrameType) -> None: ... + def set_return(self, frame: FrameType) -> None: ... + def set_trace(self, frame: FrameType | None = None) -> None: ... + def set_continue(self) -> None: ... + def set_quit(self) -> None: ... def set_break( self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None - ) -> str | None: - """Set a new breakpoint for filename:lineno. - - If lineno doesn't exist for the filename, return an error message. - The filename should be in canonical form. - """ - - def clear_break(self, filename: str, lineno: int) -> str | None: - """Delete breakpoints for filename:lineno. - - If no breakpoints were set, return an error message. - """ - - def clear_bpbynumber(self, arg: SupportsInt) -> str | None: - """Delete a breakpoint by its index in Breakpoint.bpbynumber. - - If arg is invalid, return an error message. - """ - - def clear_all_file_breaks(self, filename: str) -> str | None: - """Delete all breakpoints in filename. - - If none were set, return an error message. - """ - - def clear_all_breaks(self) -> str | None: - """Delete all existing breakpoints. - - If none were set, return an error message. - """ - - def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: - """Return a breakpoint by its index in Breakpoint.bybpnumber. - - For invalid arg values or if the breakpoint doesn't exist, - raise a ValueError. - """ - - def get_break(self, filename: str, lineno: int) -> bool: - """Return True if there is a breakpoint for filename:lineno.""" - - def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: - """Return all breakpoints for filename:lineno. - - If no breakpoints are set, return an empty list. - """ - - def get_file_breaks(self, filename: str) -> list[int]: - """Return all lines with breakpoints for filename. - - If no breakpoints are set, return an empty list. - """ - - def get_all_breaks(self) -> dict[str, list[int]]: - """Return all breakpoints that are set.""" - - def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: - """Return a list of (frame, lineno) in a stack trace and a size. - - List starts with original calling frame, if there is one. - Size may be number of frames above or below f. - """ - - def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: - """Return a string with information about a stack entry. - - The stack entry frame_lineno is a (frame, lineno) tuple. The - return string contains the canonical filename, the function name - or '', the input arguments, the return value, and the - line of code (if it exists). - - """ - - def run(self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: - """Debug a statement executed via the exec() function. - - globals defaults to __main__.dict; locals defaults to globals. - """ - - def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: - """Debug an expression executed via the eval() function. - - globals defaults to __main__.dict; locals defaults to globals. - """ - - def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: - """For backwards-compatibility. Defers to run().""" - - def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: - """Debug a single function call. - - Return the result of the function call. - """ + ) -> str | None: ... + def clear_break(self, filename: str, lineno: int) -> str | None: ... + def clear_bpbynumber(self, arg: SupportsInt) -> str | None: ... + def clear_all_file_breaks(self, filename: str) -> str | None: ... + def clear_all_breaks(self) -> str | None: ... + def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: ... + def get_break(self, filename: str, lineno: int) -> bool: ... + def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: ... + def get_file_breaks(self, filename: str) -> list[int]: ... + def get_all_breaks(self) -> dict[str, list[int]]: ... + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... + def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: ... + def run( + self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... + def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... if sys.version_info >= (3, 14): def start_trace(self) -> None: ... def stop_trace(self) -> None: ... - def disable_current_event(self) -> None: - """Disable the current event.""" - - def restart_events(self) -> None: - """Restart all events.""" + def disable_current_event(self) -> None: ... + def restart_events(self) -> None: ... class Breakpoint: - """Breakpoint class. - - Implements temporary breakpoints, ignore counts, disabling and - (re)-enabling, and conditionals. - - Breakpoints are indexed by number through bpbynumber and by - the (file, line) tuple using bplist. The former points to a - single instance of class Breakpoint. The latter points to a - list of such instances since there may be more than one - breakpoint per line. - - When creating a breakpoint, its associated filename should be - in canonical form. If funcname is defined, a breakpoint hit will be - counted when the first line of that function is executed. A - conditional breakpoint always counts a hit. - """ - next: int bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] @@ -349,57 +119,12 @@ class Breakpoint: @staticmethod def clearBreakpoints() -> None: ... - def deleteMe(self) -> None: - """Delete the breakpoint from the list associated to a file:line. - - If it is the last breakpoint in that position, it also deletes - the entry for the file:line. - """ - - def enable(self) -> None: - """Mark the breakpoint as enabled.""" - - def disable(self) -> None: - """Mark the breakpoint as disabled.""" - - def bpprint(self, out: IO[str] | None = None) -> None: - """Print the output of bpformat(). - - The optional out argument directs where the output is sent - and defaults to standard output. - """ - - def bpformat(self) -> str: - """Return a string with information about the breakpoint. - - The information includes the breakpoint number, temporary - status, file:line position, break condition, number of times to - ignore, and number of times hit. - - """ - -def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: - """Return True if break should happen here. - - Whether a break should happen depends on the way that b (the breakpoint) - was set. If it was set via line number, check if b.line is the same as - the one in the frame. If it was set via function name, check if this is - the right function and if it is on the first executable line. - """ - -def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: - """Return (active breakpoint, delete temporary flag) or (None, None) as - breakpoint to act upon. - - The "active breakpoint" is the first entry in bplist[line, file] (which - must exist) that is enabled, for which checkfuncname is True, and that - has neither a False condition nor a positive ignore count. The flag, - meaning that a temporary breakpoint should be deleted, is False only - when the condiion cannot be evaluated (in which case, ignore count is - ignored). - - If no such entry exists, then (None, None) is returned. - """ + def deleteMe(self) -> None: ... + def enable(self) -> None: ... + def disable(self) -> None: ... + def bpprint(self, out: IO[str] | None = None) -> None: ... + def bpformat(self) -> str: ... -def set_trace() -> None: - """Start debugging with a Bdb instance from the caller's frame.""" +def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... +def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: ... +def set_trace() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi index 394c9ed001c0d..5606d5cdf74d9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi @@ -1,5 +1,3 @@ -"""Conversion between binary data and ASCII""" - import sys from _typeshed import ReadableBuffer from typing_extensions import TypeAlias, deprecated @@ -8,108 +6,35 @@ from typing_extensions import TypeAlias, deprecated # or ASCII-only strings. _AsciiBuffer: TypeAlias = str | ReadableBuffer -def a2b_uu(data: _AsciiBuffer, /) -> bytes: - """Decode a line of uuencoded data.""" - -def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: - """Uuencode line of data.""" +def a2b_uu(data: _AsciiBuffer, /) -> bytes: ... +def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: ... if sys.version_info >= (3, 11): - def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: - """Decode a line of base64 data. - - strict_mode - When set to True, bytes that are not part of the base64 standard are not allowed. - The same applies to excess data after padding (= / ==). - """ + def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: ... else: - def a2b_base64(data: _AsciiBuffer, /) -> bytes: - """Decode a line of base64 data.""" - -def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: - """Base64-code line of data.""" - -def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: - """Decode a string of qp-encoded data.""" - -def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: - """Encode a string using quoted-printable encoding. + def a2b_base64(data: _AsciiBuffer, /) -> bytes: ... - On encoding, when istext is set, newlines are not encoded, and white - space at end of lines is. When istext is not set, \\r and \\n (CR/LF) - are both encoded. When quotetabs is set, space and tabs are encoded. - """ +def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: ... +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def a2b_hqx(data: _AsciiBuffer, /) -> bytes: - """Decode .hqx coding.""" - + def a2b_hqx(data: _AsciiBuffer, /) -> bytes: ... @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def rledecode_hqx(data: ReadableBuffer, /) -> bytes: - """Decode hexbin RLE-coded string.""" - + def rledecode_hqx(data: ReadableBuffer, /) -> bytes: ... @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def rlecode_hqx(data: ReadableBuffer, /) -> bytes: - """Binhex RLE-code binary data.""" - + def rlecode_hqx(data: ReadableBuffer, /) -> bytes: ... @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def b2a_hqx(data: ReadableBuffer, /) -> bytes: - """Encode .hqx data.""" - -def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: - """Compute CRC-CCITT incrementally.""" - -def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: - """Compute CRC-32 incrementally.""" - -def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: - """Hexadecimal representation of binary data. - - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - - The return value is a bytes object. This function is also - available as "hexlify()". - - Example: - >>> binascii.b2a_hex(b'\\xb9\\x01\\xef') - b'b901ef' - >>> binascii.hexlify(b'\\xb9\\x01\\xef', ':') - b'b9:01:ef' - >>> binascii.b2a_hex(b'\\xb9\\x01\\xef', b'_', 2) - b'b9_01ef' - """ - -def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: - """Hexadecimal representation of binary data. - - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - - The return value is a bytes object. This function is also - available as "b2a_hex()". - """ - -def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: - """Binary data of hexadecimal representation. - - hexstr must contain an even number of hex digits (upper or lower case). - This function is also available as "unhexlify()". - """ - -def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: - """Binary data of hexadecimal representation. - - hexstr must contain an even number of hex digits (upper or lower case). - """ + def b2a_hqx(data: ReadableBuffer, /) -> bytes: ... + +def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: ... +def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: ... +def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... +def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... +def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: ... +def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: ... class Error(ValueError): ... class Incomplete(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi index 3129767a79537..bdead928468f4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi @@ -1,10 +1,3 @@ -"""Macintosh binhex compression/decompression. - -easy interface: -binhex(inputfilename, outputfilename) -hexbin(inputfilename, outputfilename) -""" - from _typeshed import SizedBuffer from typing import IO, Any, Final from typing_extensions import TypeAlias @@ -40,8 +33,7 @@ class BinHex: def write_rsrc(self, data: SizedBuffer) -> None: ... def close(self) -> None: ... -def binhex(inp: str, out: str) -> None: - """binhex(infilename, outfilename): create binhex-encoded copy of a file""" +def binhex(inp: str, out: str) -> None: ... class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... @@ -50,5 +42,4 @@ class HexBin: def read_rsrc(self, *n: int) -> bytes: ... def close(self) -> None: ... -def hexbin(inp: str, out: str) -> None: - """hexbin(infilename, outfilename) - Decode binhexed file""" +def hexbin(inp: str, out: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi index 69bf605572354..60dfc48d69bd7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi @@ -1,5 +1,3 @@ -"""Bisection algorithms.""" - from _bisect import * bisect = bisect_right diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi index 5f1d339a595ea..969d1687611c2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi @@ -1,15 +1,3 @@ -"""Built-in functions, types, exceptions, and other objects. - -This module provides direct access to all 'built-in' -identifiers of Python; for example, builtins.len is -the full name for the built-in function len(). - -This module is not normally accessed explicitly by most -applications, but can be useful in modules that provide -objects with the same name as a built-in value, but in -which the built-in of that name is also needed. -""" - import _ast import _sitebuiltins import _typeshed @@ -118,12 +106,6 @@ _StepT_co = TypeVar("_StepT_co", covariant=True, default=_StartT_co | _StopT_co) @disjoint_base class object: - """The base class of the class hierarchy. - - When called, it accepts no arguments and returns a new featureless - instance that has no instance attributes and cannot be given any. - """ - __doc__: str | None __dict__: dict[str, Any] __module__: str @@ -160,33 +142,13 @@ class object: @disjoint_base class staticmethod(Generic[_P, _R_co]): - """Convert a function to be a static method. - - A static method does not receive an implicit first argument. - To declare a static method, use this idiom: - - class C: - @staticmethod - def f(arg1, arg2, argN): - ... - - It can be called either on the class (e.g. C.f()) or on an instance - (e.g. C().f()). Both the class and the instance are ignored, and - neither is passed implicitly as the first argument to the method. - - Static methods in Python are similar to those found in Java or C++. - For a more advanced concept, see the classmethod builtin. - """ - @property def __func__(self) -> Callable[_P, _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... def __init__(self, f: Callable[_P, _R_co], /) -> None: ... @overload - def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: - """Return an attribute of instance, which is of type owner.""" - + def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: ... @overload def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -194,43 +156,20 @@ class staticmethod(Generic[_P, _R_co]): __qualname__: str @property def __wrapped__(self) -> Callable[_P, _R_co]: ... - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: - """Call self as a function.""" + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... __annotate__: AnnotateFunc | None @disjoint_base class classmethod(Generic[_T, _P, _R_co]): - """Convert a function to be a class method. - - A class method receives the class as implicit first argument, - just like an instance method receives the instance. - To declare a class method, use this idiom: - - class C: - @classmethod - def f(cls, arg1, arg2, argN): - ... - - It can be called either on the class (e.g. C.f()) or on an instance - (e.g. C().f()). The instance is ignored except for its class. - If a class method is called for a derived class, the derived class - object is passed as the implied first argument. - - Class methods are different than C++ or Java static methods. - If you want those, see the staticmethod builtin. - """ - @property def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... def __init__(self, f: Callable[Concatenate[type[_T], _P], _R_co], /) -> None: ... @overload - def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: - """Return an attribute of instance, which is of type owner.""" - + def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... @overload def __get__(self, instance: None, owner: type[_T], /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -244,10 +183,6 @@ class classmethod(Generic[_T, _P, _R_co]): @disjoint_base class type: - """type(object) -> the object's type - type(name, bases, dict, **kwds) -> a new type - """ - # object.__base__ is None. Otherwise, it would be a type. @property def __base__(self) -> type | None: ... @@ -281,31 +216,20 @@ class type: def __new__( cls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any ) -> _typeshed.Self: ... - def __call__(self, *args: Any, **kwds: Any) -> Any: - """Call self as a function.""" - - def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: - """Return a list of immediate subclasses.""" + def __call__(self, *args: Any, **kwds: Any) -> Any: ... + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. - def mro(self) -> list[type]: - """Return a type's method resolution order.""" - - def __instancecheck__(self, instance: Any, /) -> bool: - """Check if an object is an instance.""" - - def __subclasscheck__(self, subclass: type, /) -> bool: - """Check if a class is a subclass.""" - + def mro(self) -> list[type]: ... + def __instancecheck__(self, instance: Any, /) -> bool: ... + def __subclasscheck__(self, subclass: type, /) -> bool: ... @classmethod - def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: - """Create the namespace for the class statement""" + def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: ... if sys.version_info >= (3, 10): - def __or__(self, value: Any, /) -> types.UnionType: - """Return self|value.""" - - def __ror__(self, value: Any, /) -> types.UnionType: - """Return value|self.""" + # `int | str` produces an instance of `UnionType`, but `int | int` produces an instance of `type`, + # and `abc.ABC | abc.ABC` produces an instance of `abc.ABCMeta`. + def __or__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... + def __ror__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... if sys.version_info >= (3, 12): __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __annotations__: dict[str, AnnotationForm] @@ -314,21 +238,6 @@ class type: @disjoint_base class super: - """super() -> same as super(__class__, ) - super(type) -> unbound super object - super(type, obj) -> bound super object; requires isinstance(obj, type) - super(type, type2) -> bound super object; requires issubclass(type2, type) - Typical use to call a cooperative superclass method: - class C(B): - def meth(self, arg): - super().meth(arg) - This works for class methods too: - class C(B): - @classmethod - def cmeth(cls, arg): - super().cmeth(arg) - """ - @overload def __init__(self, t: Any, obj: Any, /) -> None: ... @overload @@ -342,99 +251,28 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 @disjoint_base class int: - """int([x]) -> integer - int(x, base=10) -> integer - - Convert a number or string to an integer, or return 0 if no arguments - are given. If x is a number, return x.__int__(). For floating-point - numbers, this truncates towards zero. - - If x is not a number or if base is given, then x must be a string, - bytes, or bytearray instance representing an integer literal in the - given base. The literal can be preceded by '+' or '-' and be surrounded - by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. - Base 0 means to interpret the base from the string as an integer literal. - >>> int('0b100', base=0) - 4 - """ - @overload - def __new__(cls, x: ConvertibleToInt = ..., /) -> Self: ... + def __new__(cls, x: ConvertibleToInt = 0, /) -> Self: ... @overload def __new__(cls, x: str | bytes | bytearray, /, base: SupportsIndex) -> Self: ... - def as_integer_ratio(self) -> tuple[int, Literal[1]]: - """Return a pair of integers, whose ratio is equal to the original int. - - The ratio is in lowest terms and has a positive denominator. - - >>> (10).as_integer_ratio() - (10, 1) - >>> (-10).as_integer_ratio() - (-10, 1) - >>> (0).as_integer_ratio() - (0, 1) - """ - + def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... @property - def real(self) -> int: - """the real part of a complex number""" - + def real(self) -> int: ... @property - def imag(self) -> Literal[0]: - """the imaginary part of a complex number""" - + def imag(self) -> Literal[0]: ... @property - def numerator(self) -> int: - """the numerator of a rational number in lowest terms""" - + def numerator(self) -> int: ... @property - def denominator(self) -> Literal[1]: - """the denominator of a rational number in lowest terms""" - - def conjugate(self) -> int: - """Returns self, the complex conjugate of any int.""" - - def bit_length(self) -> int: - """Number of bits necessary to represent self in binary. - - >>> bin(37) - '0b100101' - >>> (37).bit_length() - 6 - """ + def denominator(self) -> Literal[1]: ... + def conjugate(self) -> int: ... + def bit_length(self) -> int: ... if sys.version_info >= (3, 10): - def bit_count(self) -> int: - """Number of ones in the binary representation of the absolute value of self. + def bit_count(self) -> int: ... - Also known as the population count. - - >>> bin(13) - '0b1101' - >>> (13).bit_count() - 3 - """ if sys.version_info >= (3, 11): def to_bytes( self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False - ) -> bytes: - """Return an array of bytes representing an integer. - - length - Length of bytes object to use. An OverflowError is raised if the - integer is not representable with the given number of bytes. Default - is length 1. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - sys.byteorder as the byte order value. Default is to use 'big'. - signed - Determines whether two's complement is used to represent the integer. - If signed is False and a negative integer is given, an OverflowError - is raised. - """ - + ) -> bytes: ... @classmethod def from_bytes( cls, @@ -442,42 +280,9 @@ class int: byteorder: Literal["little", "big"] = "big", *, signed: bool = False, - ) -> Self: - """Return the integer represented by the given array of bytes. - - bytes - Holds the array of bytes to convert. The argument must either - support the buffer protocol or be an iterable object producing bytes. - Bytes and bytearray are examples of built-in objects that support the - buffer protocol. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - sys.byteorder as the byte order value. Default is to use 'big'. - signed - Indicates whether two's complement is used to represent the integer. - """ + ) -> Self: ... else: - def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: - """Return an array of bytes representing an integer. - - length - Length of bytes object to use. An OverflowError is raised if the - integer is not representable with the given number of bytes. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Determines whether two's complement is used to represent the integer. - If signed is False and a negative integer is given, an OverflowError - is raised. - """ - + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... @classmethod def from_bytes( cls, @@ -485,73 +290,27 @@ class int: byteorder: Literal["little", "big"], *, signed: bool = False, - ) -> Self: - """Return the integer represented by the given array of bytes. - - bytes - Holds the array of bytes to convert. The argument must either - support the buffer protocol or be an iterable object producing bytes. - Bytes and bytearray are examples of built-in objects that support the - buffer protocol. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Indicates whether two's complement is used to represent the integer. - """ - if sys.version_info >= (3, 12): - def is_integer(self) -> Literal[True]: - """Returns True. Exists for duck type compatibility with float.is_integer.""" - - def __add__(self, value: int, /) -> int: - """Return self+value.""" - - def __sub__(self, value: int, /) -> int: - """Return self-value.""" - - def __mul__(self, value: int, /) -> int: - """Return self*value.""" - - def __floordiv__(self, value: int, /) -> int: - """Return self//value.""" - - def __truediv__(self, value: int, /) -> float: - """Return self/value.""" - - def __mod__(self, value: int, /) -> int: - """Return self%value.""" - - def __divmod__(self, value: int, /) -> tuple[int, int]: - """Return divmod(self, value).""" - - def __radd__(self, value: int, /) -> int: - """Return value+self.""" - - def __rsub__(self, value: int, /) -> int: - """Return value-self.""" - - def __rmul__(self, value: int, /) -> int: - """Return value*self.""" - - def __rfloordiv__(self, value: int, /) -> int: - """Return value//self.""" - - def __rtruediv__(self, value: int, /) -> float: - """Return value/self.""" - - def __rmod__(self, value: int, /) -> int: - """Return value%self.""" - - def __rdivmod__(self, value: int, /) -> tuple[int, int]: - """Return divmod(value, self).""" - - @overload - def __pow__(self, x: Literal[0], /) -> Literal[1]: - """Return pow(self, value, mod).""" + ) -> Self: ... + if sys.version_info >= (3, 12): + def is_integer(self) -> Literal[True]: ... + + def __add__(self, value: int, /) -> int: ... + def __sub__(self, value: int, /) -> int: ... + def __mul__(self, value: int, /) -> int: ... + def __floordiv__(self, value: int, /) -> int: ... + def __truediv__(self, value: int, /) -> float: ... + def __mod__(self, value: int, /) -> int: ... + def __divmod__(self, value: int, /) -> tuple[int, int]: ... + def __radd__(self, value: int, /) -> int: ... + def __rsub__(self, value: int, /) -> int: ... + def __rmul__(self, value: int, /) -> int: ... + def __rfloordiv__(self, value: int, /) -> int: ... + def __rtruediv__(self, value: int, /) -> float: ... + def __rmod__(self, value: int, /) -> int: ... + def __rdivmod__(self, value: int, /) -> tuple[int, int]: ... + @overload + def __pow__(self, x: Literal[0], /) -> Literal[1]: ... @overload def __pow__(self, value: Literal[0], mod: None, /) -> Literal[1]: ... @overload @@ -564,68 +323,27 @@ class int: def __pow__(self, value: int, mod: None = None, /) -> Any: ... @overload def __pow__(self, value: int, mod: int, /) -> int: ... - def __rpow__(self, value: int, mod: int | None = None, /) -> Any: - """Return pow(value, self, mod).""" - - def __and__(self, value: int, /) -> int: - """Return self&value.""" - - def __or__(self, value: int, /) -> int: - """Return self|value.""" - - def __xor__(self, value: int, /) -> int: - """Return self^value.""" - - def __lshift__(self, value: int, /) -> int: - """Return self< int: - """Return self>>value.""" - - def __rand__(self, value: int, /) -> int: - """Return value&self.""" - - def __ror__(self, value: int, /) -> int: - """Return value|self.""" - - def __rxor__(self, value: int, /) -> int: - """Return value^self.""" - - def __rlshift__(self, value: int, /) -> int: - """Return value< int: - """Return value>>self.""" - - def __neg__(self) -> int: - """-self""" - - def __pos__(self) -> int: - """+self""" - - def __invert__(self) -> int: - """~self""" - - def __trunc__(self) -> int: - """Truncating an Integral returns itself.""" - - def __ceil__(self) -> int: - """Ceiling of an Integral returns itself.""" - - def __floor__(self) -> int: - """Flooring an Integral returns itself.""" + def __rpow__(self, value: int, mod: int | None = None, /) -> Any: ... + def __and__(self, value: int, /) -> int: ... + def __or__(self, value: int, /) -> int: ... + def __xor__(self, value: int, /) -> int: ... + def __lshift__(self, value: int, /) -> int: ... + def __rshift__(self, value: int, /) -> int: ... + def __rand__(self, value: int, /) -> int: ... + def __ror__(self, value: int, /) -> int: ... + def __rxor__(self, value: int, /) -> int: ... + def __rlshift__(self, value: int, /) -> int: ... + def __rrshift__(self, value: int, /) -> int: ... + def __neg__(self) -> int: ... + def __pos__(self) -> int: ... + def __invert__(self) -> int: ... + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... if sys.version_info >= (3, 14): - def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: - """Rounding an Integral returns itself. - - Rounding with an ndigits argument also returns an integer. - """ + def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: ... else: - def __round__(self, ndigits: SupportsIndex = ..., /) -> int: - """Rounding an Integral returns itself. - - Rounding with an ndigits argument also returns an integer. - """ + def __round__(self, ndigits: SupportsIndex = ..., /) -> int: ... def __getnewargs__(self) -> tuple[int]: ... def __eq__(self, value: object, /) -> bool: ... @@ -634,152 +352,60 @@ class int: def __le__(self, value: int, /) -> bool: ... def __gt__(self, value: int, /) -> bool: ... def __ge__(self, value: int, /) -> bool: ... - def __float__(self) -> float: - """float(self)""" - - def __int__(self) -> int: - """int(self)""" - - def __abs__(self) -> int: - """abs(self)""" - + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __abs__(self) -> int: ... def __hash__(self) -> int: ... - def __bool__(self) -> bool: - """True if self else False""" - - def __index__(self) -> int: - """Return self converted to an integer, if self is suitable for use as an index into a list.""" - - def __format__(self, format_spec: str, /) -> str: - """Convert to a string according to format_spec.""" + def __bool__(self) -> bool: ... + def __index__(self) -> int: ... + def __format__(self, format_spec: str, /) -> str: ... @disjoint_base class float: - """Convert a string or number to a floating-point number, if possible.""" - - def __new__(cls, x: ConvertibleToFloat = ..., /) -> Self: ... - def as_integer_ratio(self) -> tuple[int, int]: - """Return a pair of integers, whose ratio is exactly equal to the original float. - - The ratio is in lowest terms and has a positive denominator. Raise - OverflowError on infinities and a ValueError on NaNs. - - >>> (10.0).as_integer_ratio() - (10, 1) - >>> (0.0).as_integer_ratio() - (0, 1) - >>> (-.25).as_integer_ratio() - (-1, 4) - """ - - def hex(self) -> str: - """Return a hexadecimal representation of a floating-point number. - - >>> (-0.1).hex() - '-0x1.999999999999ap-4' - >>> 3.14159.hex() - '0x1.921f9f01b866ep+1' - """ - - def is_integer(self) -> bool: - """Return True if the float is an integer.""" - + def __new__(cls, x: ConvertibleToFloat = 0, /) -> Self: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def hex(self) -> str: ... + def is_integer(self) -> bool: ... @classmethod - def fromhex(cls, string: str, /) -> Self: - """Create a floating-point number from a hexadecimal string. - - >>> float.fromhex('0x1.ffffp10') - 2047.984375 - >>> float.fromhex('-0x1p-1074') - -5e-324 - """ - + def fromhex(cls, string: str, /) -> Self: ... @property - def real(self) -> float: - """the real part of a complex number""" - + def real(self) -> float: ... @property - def imag(self) -> float: - """the imaginary part of a complex number""" - - def conjugate(self) -> float: - """Return self, the complex conjugate of any float.""" - - def __add__(self, value: float, /) -> float: - """Return self+value.""" - - def __sub__(self, value: float, /) -> float: - """Return self-value.""" - - def __mul__(self, value: float, /) -> float: - """Return self*value.""" - - def __floordiv__(self, value: float, /) -> float: - """Return self//value.""" - - def __truediv__(self, value: float, /) -> float: - """Return self/value.""" - - def __mod__(self, value: float, /) -> float: - """Return self%value.""" - - def __divmod__(self, value: float, /) -> tuple[float, float]: - """Return divmod(self, value).""" - - @overload - def __pow__(self, value: int, mod: None = None, /) -> float: - """Return pow(self, value, mod).""" + def imag(self) -> float: ... + def conjugate(self) -> float: ... + def __add__(self, value: float, /) -> float: ... + def __sub__(self, value: float, /) -> float: ... + def __mul__(self, value: float, /) -> float: ... + def __floordiv__(self, value: float, /) -> float: ... + def __truediv__(self, value: float, /) -> float: ... + def __mod__(self, value: float, /) -> float: ... + def __divmod__(self, value: float, /) -> tuple[float, float]: ... + @overload + def __pow__(self, value: int, mod: None = None, /) -> float: ... # positive __value -> float; negative __value -> complex # return type must be Any as `float | complex` causes too many false-positive errors @overload def __pow__(self, value: float, mod: None = None, /) -> Any: ... - def __radd__(self, value: float, /) -> float: - """Return value+self.""" - - def __rsub__(self, value: float, /) -> float: - """Return value-self.""" - - def __rmul__(self, value: float, /) -> float: - """Return value*self.""" - - def __rfloordiv__(self, value: float, /) -> float: - """Return value//self.""" - - def __rtruediv__(self, value: float, /) -> float: - """Return value/self.""" - - def __rmod__(self, value: float, /) -> float: - """Return value%self.""" - - def __rdivmod__(self, value: float, /) -> tuple[float, float]: - """Return divmod(value, self).""" - + def __radd__(self, value: float, /) -> float: ... + def __rsub__(self, value: float, /) -> float: ... + def __rmul__(self, value: float, /) -> float: ... + def __rfloordiv__(self, value: float, /) -> float: ... + def __rtruediv__(self, value: float, /) -> float: ... + def __rmod__(self, value: float, /) -> float: ... + def __rdivmod__(self, value: float, /) -> tuple[float, float]: ... @overload - def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: - """Return pow(value, self, mod).""" - + def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: ... @overload def __rpow__(self, value: _NegativeInteger, mod: None = None, /) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @overload def __rpow__(self, value: float, mod: None = None, /) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... - def __trunc__(self) -> int: - """Return the Integral closest to x between 0 and x.""" - - def __ceil__(self) -> int: - """Return the ceiling as an Integral.""" - - def __floor__(self) -> int: - """Return the floor as an Integral.""" - + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... @overload - def __round__(self, ndigits: None = None, /) -> int: - """Return the Integral closest to x, rounding half toward even. - - When an argument is passed, work like built-in round(x, ndigits). - """ - + def __round__(self, ndigits: None = None, /) -> int: ... @overload def __round__(self, ndigits: SupportsIndex, /) -> float: ... def __eq__(self, value: object, /) -> bool: ... @@ -788,116 +414,57 @@ class float: def __le__(self, value: float, /) -> bool: ... def __gt__(self, value: float, /) -> bool: ... def __ge__(self, value: float, /) -> bool: ... - def __neg__(self) -> float: - """-self""" - - def __pos__(self) -> float: - """+self""" - - def __int__(self) -> int: - """int(self)""" - - def __float__(self) -> float: - """float(self)""" - - def __abs__(self) -> float: - """abs(self)""" - + def __neg__(self) -> float: ... + def __pos__(self) -> float: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self) -> float: ... def __hash__(self) -> int: ... - def __bool__(self) -> bool: - """True if self else False""" - - def __format__(self, format_spec: str, /) -> str: - """Formats the float according to format_spec.""" + def __bool__(self) -> bool: ... + def __format__(self, format_spec: str, /) -> str: ... if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: - """Convert real number to a floating-point number.""" + def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: ... @disjoint_base class complex: - """Create a complex number from a string or numbers. - - If a string is given, parse it as a complex number. - If a single number is given, convert it to a complex number. - If the 'real' or 'imag' arguments are given, create a complex number - with the specified real and imaginary components. - """ - # Python doesn't currently accept SupportsComplex for the second argument @overload def __new__( cls, - real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ..., - imag: complex | SupportsFloat | SupportsIndex = ..., + real: complex | SupportsComplex | SupportsFloat | SupportsIndex = 0, + imag: complex | SupportsFloat | SupportsIndex = 0, ) -> Self: ... @overload def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... @property - def real(self) -> float: - """the real part of a complex number""" - + def real(self) -> float: ... @property - def imag(self) -> float: - """the imaginary part of a complex number""" - - def conjugate(self) -> complex: - """Return the complex conjugate of its argument. (3-4j).conjugate() == 3+4j.""" - - def __add__(self, value: complex, /) -> complex: - """Return self+value.""" - - def __sub__(self, value: complex, /) -> complex: - """Return self-value.""" - - def __mul__(self, value: complex, /) -> complex: - """Return self*value.""" - - def __pow__(self, value: complex, mod: None = None, /) -> complex: - """Return pow(self, value, mod).""" - - def __truediv__(self, value: complex, /) -> complex: - """Return self/value.""" - - def __radd__(self, value: complex, /) -> complex: - """Return value+self.""" - - def __rsub__(self, value: complex, /) -> complex: - """Return value-self.""" - - def __rmul__(self, value: complex, /) -> complex: - """Return value*self.""" - - def __rpow__(self, value: complex, mod: None = None, /) -> complex: - """Return pow(value, self, mod).""" - - def __rtruediv__(self, value: complex, /) -> complex: - """Return value/self.""" - + def imag(self) -> float: ... + def conjugate(self) -> complex: ... + def __add__(self, value: complex, /) -> complex: ... + def __sub__(self, value: complex, /) -> complex: ... + def __mul__(self, value: complex, /) -> complex: ... + def __pow__(self, value: complex, mod: None = None, /) -> complex: ... + def __truediv__(self, value: complex, /) -> complex: ... + def __radd__(self, value: complex, /) -> complex: ... + def __rsub__(self, value: complex, /) -> complex: ... + def __rmul__(self, value: complex, /) -> complex: ... + def __rpow__(self, value: complex, mod: None = None, /) -> complex: ... + def __rtruediv__(self, value: complex, /) -> complex: ... def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... - def __neg__(self) -> complex: - """-self""" - - def __pos__(self) -> complex: - """+self""" - - def __abs__(self) -> float: - """abs(self)""" - + def __neg__(self) -> complex: ... + def __pos__(self) -> complex: ... + def __abs__(self) -> float: ... def __hash__(self) -> int: ... - def __bool__(self) -> bool: - """True if self else False""" - - def __format__(self, format_spec: str, /) -> str: - """Convert to a string according to format_spec.""" + def __bool__(self) -> bool: ... + def __format__(self, format_spec: str, /) -> str: ... if sys.version_info >= (3, 11): - def __complex__(self) -> complex: - """Convert this value to exact type complex.""" + def __complex__(self) -> complex: ... if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: - """Convert number to a complex floating-point number.""" + def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: ... @type_check_only class _FormatMapMapping(Protocol): @@ -909,477 +476,146 @@ class _TranslateTable(Protocol): @disjoint_base class str(Sequence[str]): - """str(object='') -> str - str(bytes_or_buffer[, encoding[, errors]]) -> str - - Create a new string object from the given object. If encoding or - errors is specified, then the object must expose a data buffer - that will be decoded using the given encoding and error handler. - Otherwise, returns the result of object.__str__() (if defined) - or repr(object). - encoding defaults to 'utf-8'. - errors defaults to 'strict'. - """ - @overload - def __new__(cls, object: object = ...) -> Self: ... + def __new__(cls, object: object = "") -> Self: ... @overload - def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + def __new__(cls, object: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> Self: ... @overload - def capitalize(self: LiteralString) -> LiteralString: - """Return a capitalized version of the string. - - More specifically, make the first character have upper case and the rest lower - case. - """ - + def capitalize(self: LiteralString) -> LiteralString: ... @overload def capitalize(self) -> str: ... # type: ignore[misc] @overload - def casefold(self: LiteralString) -> LiteralString: - """Return a version of the string suitable for caseless comparisons.""" - + def casefold(self: LiteralString) -> LiteralString: ... @overload def casefold(self) -> str: ... # type: ignore[misc] @overload - def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: - """Return a centered string of length width. - - Padding is done using the specified fill character (default is a space). - """ - + def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... @overload def center(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] - def count(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: - """Return the number of non-overlapping occurrences of substring sub in string S[start:end]. - - Optional arguments start and end are interpreted as in slice notation. - """ - - def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: - """Encode the string using the codec registered for encoding. - - encoding - The encoding in which to encode the string. - errors - The error handling scheme to use for encoding errors. - The default is 'strict' meaning that encoding errors raise a - UnicodeEncodeError. Other possible values are 'ignore', 'replace' and - 'xmlcharrefreplace' as well as any other name registered with - codecs.register_error that can handle UnicodeEncodeErrors. - """ - + def count(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... def endswith( - self, suffix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> bool: - """Return True if the string ends with the specified suffix, False otherwise. - - suffix - A string or a tuple of strings to try. - start - Optional start position. Default: start of the string. - end - Optional stop position. Default: end of the string. - """ - + self, suffix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> bool: ... @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: - """Return a copy where all tab characters are expanded using spaces. - - If tabsize is not given, a tab size of 8 characters is assumed. - """ - + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] - def find(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: - """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. - - Optional arguments start and end are interpreted as in slice notation. - Return -1 on failure. - """ - + def find(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: - """Return a formatted version of the string, using substitutions from args and kwargs. - The substitutions are identified by braces ('{' and '}'). - """ - + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... @overload def format(self, *args: object, **kwargs: object) -> str: ... - def format_map(self, mapping: _FormatMapMapping, /) -> str: - """Return a formatted version of the string, using substitutions from mapping. - The substitutions are identified by braces ('{' and '}'). - """ - - def index(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: - """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. - - Optional arguments start and end are interpreted as in slice notation. - Raises ValueError when the substring is not found. - """ - - def isalnum(self) -> bool: - """Return True if the string is an alpha-numeric string, False otherwise. - - A string is alpha-numeric if all characters in the string are alpha-numeric and - there is at least one character in the string. - """ - - def isalpha(self) -> bool: - """Return True if the string is an alphabetic string, False otherwise. - - A string is alphabetic if all characters in the string are alphabetic and there - is at least one character in the string. - """ - - def isascii(self) -> bool: - """Return True if all characters in the string are ASCII, False otherwise. - - ASCII characters have code points in the range U+0000-U+007F. - Empty string is ASCII too. - """ - - def isdecimal(self) -> bool: - """Return True if the string is a decimal string, False otherwise. - - A string is a decimal string if all characters in the string are decimal and - there is at least one character in the string. - """ - - def isdigit(self) -> bool: - """Return True if the string is a digit string, False otherwise. - - A string is a digit string if all characters in the string are digits and there - is at least one character in the string. - """ - - def isidentifier(self) -> bool: - """Return True if the string is a valid Python identifier, False otherwise. - - Call keyword.iskeyword(s) to test whether string s is a reserved identifier, - such as "def" or "class". - """ - - def islower(self) -> bool: - """Return True if the string is a lowercase string, False otherwise. - - A string is lowercase if all cased characters in the string are lowercase and - there is at least one cased character in the string. - """ - - def isnumeric(self) -> bool: - """Return True if the string is a numeric string, False otherwise. - - A string is numeric if all characters in the string are numeric and there is at - least one character in the string. - """ - - def isprintable(self) -> bool: - """Return True if all characters in the string are printable, False otherwise. - - A character is printable if repr() may use it in its output. - """ - - def isspace(self) -> bool: - """Return True if the string is a whitespace string, False otherwise. - - A string is whitespace if all characters in the string are whitespace and there - is at least one character in the string. - """ - - def istitle(self) -> bool: - """Return True if the string is a title-cased string, False otherwise. - - In a title-cased string, upper- and title-case characters may only - follow uncased characters and lowercase characters only cased ones. - """ - - def isupper(self) -> bool: - """Return True if the string is an uppercase string, False otherwise. - - A string is uppercase if all cased characters in the string are uppercase and - there is at least one cased character in the string. - """ - - @overload - def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString: - """Concatenate any number of strings. - - The string whose method is called is inserted in between each given string. - The result is returned as a new string. - - Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs' - """ - + def format_map(self, mapping: _FormatMapMapping, /) -> str: ... + def index(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString: ... @overload def join(self, iterable: Iterable[str], /) -> str: ... # type: ignore[misc] @overload - def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: - """Return a left-justified string of length width. - - Padding is done using the specified fill character (default is a space). - """ - + def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... @overload def ljust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload - def lower(self: LiteralString) -> LiteralString: - """Return a copy of the string converted to lowercase.""" - + def lower(self: LiteralString) -> LiteralString: ... @overload def lower(self) -> str: ... # type: ignore[misc] @overload - def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: - """Return a copy of the string with leading whitespace removed. - - If chars is given and not None, remove characters in chars instead. - """ - + def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... @overload def lstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: - """Partition the string into three parts using the given separator. - - This will search for the separator in the string. If the separator is found, - returns a 3-tuple containing the part before the separator, the separator - itself, and the part after it. - - If the separator is not found, returns a 3-tuple containing the original string - and two empty strings. - """ - + def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: ... @overload def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] if sys.version_info >= (3, 13): @overload - def replace(self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1) -> LiteralString: - """Return a copy with all occurrences of substring old replaced by new. - - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. - - If the optional argument count is given, only the first count occurrences are - replaced. - """ - + def replace( + self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1 + ) -> LiteralString: ... @overload def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] else: @overload - def replace(self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, /) -> LiteralString: - """Return a copy with all occurrences of substring old replaced by new. - - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. - - If the optional argument count is given, only the first count occurrences are - replaced. - """ - + def replace( + self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / + ) -> LiteralString: ... @overload def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] @overload - def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: - """Return a str with the given prefix string removed if present. - - If the string starts with the prefix string, return string[len(prefix):]. - Otherwise, return a copy of the original string. - """ - + def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: ... @overload def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] @overload - def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: - """Return a str with the given suffix string removed if present. - - If the string ends with the suffix string and that suffix is not empty, - return string[:-len(suffix)]. Otherwise, return a copy of the original - string. - """ - + def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: ... @overload def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] - def rfind(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: - """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. - - Optional arguments start and end are interpreted as in slice notation. - Return -1 on failure. - """ - - def rindex(self, sub: str, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., /) -> int: - """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. - - Optional arguments start and end are interpreted as in slice notation. - Raises ValueError when the substring is not found. - """ - + def rfind(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def rindex(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... @overload - def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: - """Return a right-justified string of length width. - - Padding is done using the specified fill character (default is a space). - """ - + def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... @overload def rjust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload - def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: - """Partition the string into three parts using the given separator. - - This will search for the separator in the string, starting at the end. If - the separator is found, returns a 3-tuple containing the part before the - separator, the separator itself, and the part after it. - - If the separator is not found, returns a 3-tuple containing two empty strings - and the original string. - """ - + def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: ... @overload def rpartition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: - """Return a list of the substrings in the string, using sep as the separator string. - - sep - The separator used to split the string. - - When set to None (the default value), will split on any whitespace - character (including \\n \\r \\t \\f and spaces) and will discard - empty strings from the result. - maxsplit - Maximum number of splits. - -1 (the default value) means no limit. - - Splitting starts at the end of the string and works to the front. - """ - + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload - def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: - """Return a copy of the string with trailing whitespace removed. - - If chars is given and not None, remove characters in chars instead. - """ - + def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... @overload def rstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: - """Return a list of the substrings in the string, using sep as the separator string. - - sep - The separator used to split the string. - - When set to None (the default value), will split on any whitespace - character (including \\n \\r \\t \\f and spaces) and will discard - empty strings from the result. - maxsplit - Maximum number of splits. - -1 (the default value) means no limit. - - Splitting starts at the front of the string and works to the end. - - Note, str.split() is mainly useful for data that has been intentionally - delimited. With natural text that includes punctuation, consider using - the regular expression module. - """ - + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: - """Return a list of the lines in the string, breaking at line boundaries. - - Line breaks are not included in the resulting list unless keepends is given and - true. - """ - + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( - self, prefix: str | tuple[str, ...], start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> bool: - """Return True if the string starts with the specified prefix, False otherwise. - - prefix - A string or a tuple of strings to try. - start - Optional start position. Default: start of the string. - end - Optional stop position. Default: end of the string. - """ - + self, prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> bool: ... @overload - def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: - """Return a copy of the string with leading and trailing whitespace removed. - - If chars is given and not None, remove characters in chars instead. - """ - + def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... @overload def strip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def swapcase(self: LiteralString) -> LiteralString: - """Convert uppercase characters to lowercase and lowercase characters to uppercase.""" - + def swapcase(self: LiteralString) -> LiteralString: ... @overload def swapcase(self) -> str: ... # type: ignore[misc] @overload - def title(self: LiteralString) -> LiteralString: - """Return a version of the string where each word is titlecased. - - More specifically, words start with uppercased characters and all remaining - cased characters have lower case. - """ - + def title(self: LiteralString) -> LiteralString: ... @overload def title(self) -> str: ... # type: ignore[misc] - def translate(self, table: _TranslateTable, /) -> str: - """Replace each character in the string using the given translation table. - - table - Translation table, which must be a mapping of Unicode ordinals to - Unicode ordinals, strings, or None. - - The table must implement lookup/indexing via __getitem__, for instance a - dictionary or list. If this operation raises LookupError, the character is - left untouched. Characters mapped to None are deleted. - """ - + def translate(self, table: _TranslateTable, /) -> str: ... @overload - def upper(self: LiteralString) -> LiteralString: - """Return a copy of the string converted to uppercase.""" - + def upper(self: LiteralString) -> LiteralString: ... @overload def upper(self) -> str: ... # type: ignore[misc] @overload - def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString: - """Pad a numeric string with zeros on the left, to fill a field of the given width. - - The string is never truncated. - """ - + def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString: ... @overload def zfill(self, width: SupportsIndex, /) -> str: ... # type: ignore[misc] @staticmethod @overload - def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: - """Return a translation table usable for str.translate(). - - If there is only one argument, it must be a dictionary mapping Unicode - ordinals (integers) or characters to Unicode ordinals, strings or None. - Character keys will be then converted to ordinals. - If there are two arguments, they must be strings of equal length, and - in the resulting dictionary, each character in x will be mapped to the - character at the same position in y. If there is a third argument, it - must be a string, whose characters will be mapped to None in the result. - """ - + def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, /) -> dict[int, int]: ... @@ -1387,497 +623,129 @@ class str(Sequence[str]): @overload def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]: ... @overload - def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString: - """Return self+value.""" - + def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString: ... @overload def __add__(self, value: str, /) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ - def __contains__(self, key: str, /) -> bool: # type: ignore[override] - """Return bool(key in self).""" - + def __contains__(self, key: str, /) -> bool: ... # type: ignore[override] def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: str, /) -> bool: ... @overload - def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: - """Return self[key].""" - + def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: ... @overload def __getitem__(self, key: SupportsIndex | slice, /) -> str: ... # type: ignore[misc] def __gt__(self, value: str, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: - """Implement iter(self).""" - + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, value: str, /) -> bool: ... - def __len__(self) -> int: - """Return len(self).""" - + def __len__(self) -> int: ... def __lt__(self, value: str, /) -> bool: ... @overload - def __mod__(self: LiteralString, value: LiteralString | tuple[LiteralString, ...], /) -> LiteralString: - """Return self%value.""" - + def __mod__(self: LiteralString, value: LiteralString | tuple[LiteralString, ...], /) -> LiteralString: ... @overload def __mod__(self, value: Any, /) -> str: ... @overload - def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: - """Return self*value.""" - + def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: ... @overload def __mul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __ne__(self, value: object, /) -> bool: ... @overload - def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: - """Return value*self.""" - + def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: ... @overload def __rmul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... - def __format__(self, format_spec: str, /) -> str: - """Return a formatted version of the string as described by format_spec.""" + def __format__(self, format_spec: str, /) -> str: ... @disjoint_base class bytes(Sequence[int]): - """bytes(iterable_of_ints) -> bytes - bytes(string, encoding[, errors]) -> bytes - bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer - bytes(int) -> bytes object of size given by the parameter initialized with null bytes - bytes() -> empty bytes object - - Construct an immutable array of bytes from: - - an iterable yielding integers in range(256) - - a text string encoded using the specified encoding - - any object implementing the buffer API. - - an integer - """ - @overload def __new__(cls, o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, /) -> Self: ... @overload - def __new__(cls, string: str, /, encoding: str, errors: str = ...) -> Self: ... + def __new__(cls, string: str, /, encoding: str, errors: str = "strict") -> Self: ... @overload def __new__(cls) -> Self: ... - def capitalize(self) -> bytes: - """B.capitalize() -> copy of B - - Return a copy of B with only its first character capitalized (ASCII) - and the rest lower-cased. - """ - - def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: - """Return a centered string of length width. - - Padding is done using the specified fill character. - """ - + def capitalize(self) -> bytes: ... + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: ... def count( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - """ - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """Decode the bytes using the codec registered for encoding. - - encoding - The encoding with which to decode the bytes. - errors - The error handling scheme to use for the handling of decoding errors. - The default is 'strict' meaning that decoding errors raise a - UnicodeDecodeError. Other possible values are 'ignore' and 'replace' - as well as any other name registered with codecs.register_error that - can handle UnicodeDecodeErrors. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], - start: SupportsIndex | None = ..., - end: SupportsIndex | None = ..., + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, /, - ) -> bool: - """Return True if the bytes ends with the specified suffix, False otherwise. - - suffix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - """ - - def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: - """Return a copy where all tab characters are expanded using spaces. - - If tabsize is not given, a tab size of 8 characters is assumed. - """ - + ) -> bool: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... def find( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Return -1 on failure. - """ - - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: - """Create a string of hexadecimal numbers from a bytes object. - - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - - Example: - >>> value = b'\\xb9\\x01\\xef' - >>> value.hex() - 'b901ef' - >>> value.hex(':') - 'b9:01:ef' - >>> value.hex(':', 2) - 'b9:01ef' - >>> value.hex(':', -2) - 'b901:ef' - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... def index( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Raise ValueError if the subsection is not found. - """ - - def isalnum(self) -> bool: - """B.isalnum() -> bool - - Return True if all characters in B are alphanumeric - and there is at least one character in B, False otherwise. - """ - - def isalpha(self) -> bool: - """B.isalpha() -> bool - - Return True if all characters in B are alphabetic - and there is at least one character in B, False otherwise. - """ - - def isascii(self) -> bool: - """B.isascii() -> bool - - Return True if B is empty or all characters in B are ASCII, - False otherwise. - """ - - def isdigit(self) -> bool: - """B.isdigit() -> bool - - Return True if all characters in B are digits - and there is at least one character in B, False otherwise. - """ - - def islower(self) -> bool: - """B.islower() -> bool - - Return True if all cased characters in B are lowercase and there is - at least one cased character in B, False otherwise. - """ - - def isspace(self) -> bool: - """B.isspace() -> bool - - Return True if all characters in B are whitespace - and there is at least one character in B, False otherwise. - """ - - def istitle(self) -> bool: - """B.istitle() -> bool - - Return True if B is a titlecased string and there is at least one - character in B, i.e. uppercase characters may only follow uncased - characters and lowercase characters only cased ones. Return False - otherwise. - """ - - def isupper(self) -> bool: - """B.isupper() -> bool - - Return True if all cased characters in B are uppercase and there is - at least one cased character in B, False otherwise. - """ - - def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: - """Concatenate any number of bytes objects. - - The bytes whose method is called is inserted in between each pair. - - The result is returned as a new bytes object. - - Example: b'.'.join([b'ab', b'pq', b'rs']) -> b'ab.pq.rs'. - """ - - def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: - """Return a left-justified string of length width. - - Padding is done using the specified fill character. - """ - - def lower(self) -> bytes: - """B.lower() -> copy of B - - Return a copy of B with all ASCII characters converted to lowercase. - """ - - def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: - """Strip leading bytes contained in the argument. - - If the argument is omitted or None, strip leading ASCII whitespace. - """ - - def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: - """Partition the bytes into three parts using the given separator. - - This will search for the separator sep in the bytes. If the separator is found, - returns a 3-tuple containing the part before the separator, the separator - itself, and the part after it. - - If the separator is not found, returns a 3-tuple containing the original bytes - object and two empty bytes objects. - """ - - def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: - """Return a copy with all occurrences of substring old replaced by new. - - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. - - If the optional argument count is given, only the first count occurrences are - replaced. - """ - - def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: - """Return a bytes object with the given prefix string removed if present. - - If the bytes starts with the prefix string, return bytes[len(prefix):]. - Otherwise, return a copy of the original bytes. - """ - - def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: - """Return a bytes object with the given suffix string removed if present. - - If the bytes ends with the suffix string and that suffix is not empty, - return bytes[:-len(prefix)]. Otherwise, return a copy of the original - bytes. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: ... + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... + def lower(self) -> bytes: ... + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: ... + def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: ... def rfind( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Return -1 on failure. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... def rindex( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Raise ValueError if the subsection is not found. - """ - - def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: - """Return a right-justified string of length width. - - Padding is done using the specified fill character. - """ - - def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: - """Partition the bytes into three parts using the given separator. - - This will search for the separator sep in the bytes, starting at the end. If - the separator is found, returns a 3-tuple containing the part before the - separator, the separator itself, and the part after it. - - If the separator is not found, returns a 3-tuple containing two empty bytes - objects and the original bytes object. - """ - - def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: - """Return a list of the sections in the bytes, using sep as the delimiter. - - sep - The delimiter according which to split the bytes. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. - - Splitting is done starting at the end of the bytes and working to the front. - """ - - def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: - """Strip trailing bytes contained in the argument. - - If the argument is omitted or None, strip trailing ASCII whitespace. - """ - - def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: - """Return a list of the sections in the bytes, using sep as the delimiter. - - sep - The delimiter according which to split the bytes. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. - """ - - def splitlines(self, keepends: bool = False) -> list[bytes]: - """Return a list of the lines in the bytes, breaking at line boundaries. - - Line breaks are not included in the resulting list unless keepends is given and - true. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def splitlines(self, keepends: bool = False) -> list[bytes]: ... def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], - start: SupportsIndex | None = ..., - end: SupportsIndex | None = ..., + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, /, - ) -> bool: - """Return True if the bytes starts with the specified prefix, False otherwise. - - prefix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - """ - - def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: - """Strip leading and trailing bytes contained in the argument. - - If the argument is omitted or None, strip leading and trailing ASCII whitespace. - """ - - def swapcase(self) -> bytes: - """B.swapcase() -> copy of B - - Return a copy of B with uppercase ASCII characters converted - to lowercase ASCII and vice versa. - """ - - def title(self) -> bytes: - """B.title() -> copy of B - - Return a titlecased version of B, i.e. ASCII words start with uppercase - characters, all remaining cased characters have lowercase. - """ - - def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: - """Return a copy with each character mapped by the given translation table. - - table - Translation table, which must be a bytes object of length 256. - - All characters occurring in the optional argument delete are removed. - The remaining characters are mapped through the given translation table. - """ - - def upper(self) -> bytes: - """B.upper() -> copy of B - - Return a copy of B with all ASCII characters converted to uppercase. - """ - - def zfill(self, width: SupportsIndex, /) -> bytes: - """Pad a numeric string with zeros on the left, to fill a field of the given width. - - The original string is never truncated. - """ - + ) -> bool: ... + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... + def swapcase(self) -> bytes: ... + def title(self) -> bytes: ... + def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: ... + def upper(self) -> bytes: ... + def zfill(self, width: SupportsIndex, /) -> bytes: ... @classmethod - def fromhex(cls, string: str, /) -> Self: - """Create a bytes object from a string of hexadecimal numbers. - - Spaces between two numbers are accepted. - Example: bytes.fromhex('B9 01EF') -> b'\\\\xb9\\\\x01\\\\xef'. - """ - + def fromhex(cls, string: str, /) -> Self: ... @staticmethod - def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: - """Return a translation table usable for the bytes or bytearray translate method. - - The returned table will be one where each byte in frm is mapped to the byte at - the same position in to. - - The bytes objects frm and to must be of the same length. - """ - - def __len__(self) -> int: - """Return len(self).""" - - def __iter__(self) -> Iterator[int]: - """Implement iter(self).""" - + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... def __hash__(self) -> int: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> int: ... @overload def __getitem__(self, key: slice, /) -> bytes: ... - def __add__(self, value: ReadableBuffer, /) -> bytes: - """Return self+value.""" - - def __mul__(self, value: SupportsIndex, /) -> bytes: - """Return self*value.""" - - def __rmul__(self, value: SupportsIndex, /) -> bytes: - """Return value*self.""" - - def __mod__(self, value: Any, /) -> bytes: - """Return self%value.""" + def __add__(self, value: ReadableBuffer, /) -> bytes: ... + def __mul__(self, value: SupportsIndex, /) -> bytes: ... + def __rmul__(self, value: SupportsIndex, /) -> bytes: ... + def __mod__(self, value: Any, /) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] - """Return bool(key in self).""" - + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: bytes, /) -> bool: ... @@ -1886,532 +754,122 @@ class bytes(Sequence[int]): def __ge__(self, value: bytes, /) -> bool: ... def __getnewargs__(self) -> tuple[bytes]: ... if sys.version_info >= (3, 11): - def __bytes__(self) -> bytes: - """Convert this value to exact type bytes.""" + def __bytes__(self) -> bytes: ... - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" + def __buffer__(self, flags: int, /) -> memoryview: ... @disjoint_base class bytearray(MutableSequence[int]): - """bytearray(iterable_of_ints) -> bytearray - bytearray(string, encoding[, errors]) -> bytearray - bytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer - bytearray(int) -> bytes array of size given by the parameter initialized with null bytes - bytearray() -> empty bytes array - - Construct a mutable bytearray object from: - - an iterable yielding integers in range(256) - - a text string encoded using the specified encoding - - a bytes or a buffer object - - any object implementing the buffer API. - - an integer - """ - @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer, /) -> None: ... @overload - def __init__(self, string: str, /, encoding: str, errors: str = ...) -> None: ... - def append(self, item: SupportsIndex, /) -> None: - """Append a single item to the end of the bytearray. - - item - The item to be appended. - """ - - def capitalize(self) -> bytearray: - """B.capitalize() -> copy of B - - Return a copy of B with only its first character capitalized (ASCII) - and the rest lower-cased. - """ - - def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: - """Return a centered string of length width. - - Padding is done using the specified fill character. - """ - + def __init__(self, string: str, /, encoding: str, errors: str = "strict") -> None: ... + def append(self, item: SupportsIndex, /) -> None: ... + def capitalize(self) -> bytearray: ... + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: ... def count( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - """ - - def copy(self) -> bytearray: - """Return a copy of B.""" - - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: - """Decode the bytearray using the codec registered for encoding. - - encoding - The encoding with which to decode the bytearray. - errors - The error handling scheme to use for the handling of decoding errors. - The default is 'strict' meaning that decoding errors raise a - UnicodeDecodeError. Other possible values are 'ignore' and 'replace' - as well as any other name registered with codecs.register_error that - can handle UnicodeDecodeErrors. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def copy(self) -> bytearray: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], - start: SupportsIndex | None = ..., - end: SupportsIndex | None = ..., + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, /, - ) -> bool: - """Return True if the bytearray ends with the specified suffix, False otherwise. - - suffix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytearray. - end - Optional stop position. Default: end of the bytearray. - """ - - def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: - """Return a copy where all tab characters are expanded using spaces. - - If tabsize is not given, a tab size of 8 characters is assumed. - """ - - def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: - """Append all the items from the iterator or sequence to the end of the bytearray. - - iterable_of_ints - The iterable of items to append. - """ - + ) -> bool: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... + def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: ... def find( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Return -1 on failure. - """ - - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: - """Create a string of hexadecimal numbers from a bytearray object. - - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - - Example: - >>> value = bytearray([0xb9, 0x01, 0xef]) - >>> value.hex() - 'b901ef' - >>> value.hex(':') - 'b9:01:ef' - >>> value.hex(':', 2) - 'b9:01ef' - >>> value.hex(':', -2) - 'b901:ef' - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... def index( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Raise ValueError if the subsection is not found. - """ - - def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: - """Insert a single item into the bytearray before the given index. - - index - The index where the value is to be inserted. - item - The item to be inserted. - """ - - def isalnum(self) -> bool: - """B.isalnum() -> bool - - Return True if all characters in B are alphanumeric - and there is at least one character in B, False otherwise. - """ - - def isalpha(self) -> bool: - """B.isalpha() -> bool - - Return True if all characters in B are alphabetic - and there is at least one character in B, False otherwise. - """ - - def isascii(self) -> bool: - """B.isascii() -> bool - - Return True if B is empty or all characters in B are ASCII, - False otherwise. - """ - - def isdigit(self) -> bool: - """B.isdigit() -> bool - - Return True if all characters in B are digits - and there is at least one character in B, False otherwise. - """ - - def islower(self) -> bool: - """B.islower() -> bool - - Return True if all cased characters in B are lowercase and there is - at least one cased character in B, False otherwise. - """ - - def isspace(self) -> bool: - """B.isspace() -> bool - - Return True if all characters in B are whitespace - and there is at least one character in B, False otherwise. - """ - - def istitle(self) -> bool: - """B.istitle() -> bool - - Return True if B is a titlecased string and there is at least one - character in B, i.e. uppercase characters may only follow uncased - characters and lowercase characters only cased ones. Return False - otherwise. - """ - - def isupper(self) -> bool: - """B.isupper() -> bool - - Return True if all cased characters in B are uppercase and there is - at least one cased character in B, False otherwise. - """ - - def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: - """Concatenate any number of bytes/bytearray objects. - - The bytearray whose method is called is inserted in between each pair. - - The result is returned as a new bytearray object. - """ - - def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: - """Return a left-justified string of length width. - - Padding is done using the specified fill character. - """ - - def lower(self) -> bytearray: - """B.lower() -> copy of B - - Return a copy of B with all ASCII characters converted to lowercase. - """ - - def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: - """Strip leading bytes contained in the argument. - - If the argument is omitted or None, strip leading ASCII whitespace. - """ - - def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: - """Partition the bytearray into three parts using the given separator. - - This will search for the separator sep in the bytearray. If the separator is - found, returns a 3-tuple containing the part before the separator, the - separator itself, and the part after it as new bytearray objects. - - If the separator is not found, returns a 3-tuple containing the copy of the - original bytearray object and two empty bytearray objects. - """ - - def pop(self, index: int = -1, /) -> int: - """Remove and return a single item from B. - - index - The index from where to remove the item. - -1 (the default value) means remove the last item. - - If no index argument is given, will pop the last item. - """ - - def remove(self, value: int, /) -> None: - """Remove the first occurrence of a value in the bytearray. - - value - The value to remove. - """ - - def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: - """Return a bytearray with the given prefix string removed if present. - - If the bytearray starts with the prefix string, return - bytearray[len(prefix):]. Otherwise, return a copy of the original - bytearray. - """ - - def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: - """Return a bytearray with the given suffix string removed if present. - - If the bytearray ends with the suffix string and that suffix is not - empty, return bytearray[:-len(suffix)]. Otherwise, return a copy of - the original bytearray. - """ - - def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: - """Return a copy with all occurrences of substring old replaced by new. - - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. - - If the optional argument count is given, only the first count occurrences are - replaced. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: ... + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... + def lower(self) -> bytearray: ... + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... + def pop(self, index: int = -1, /) -> int: ... + def remove(self, value: int, /) -> None: ... + def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: ... + def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: ... + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: ... def rfind( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Return -1 on failure. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... def rindex( - self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = ..., end: SupportsIndex | None = ..., / - ) -> int: - """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. - - Raise ValueError if the subsection is not found. - """ - - def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: - """Return a right-justified string of length width. - - Padding is done using the specified fill character. - """ - - def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: - """Partition the bytearray into three parts using the given separator. - - This will search for the separator sep in the bytearray, starting at the end. - If the separator is found, returns a 3-tuple containing the part before the - separator, the separator itself, and the part after it as new bytearray - objects. - - If the separator is not found, returns a 3-tuple containing two empty bytearray - objects and the copy of the original bytearray object. - """ - - def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: - """Return a list of the sections in the bytearray, using sep as the delimiter. - - sep - The delimiter according which to split the bytearray. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. - - Splitting is done starting at the end of the bytearray and working to the front. - """ - - def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: - """Strip trailing bytes contained in the argument. - - If the argument is omitted or None, strip trailing ASCII whitespace. - """ - - def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: - """Return a list of the sections in the bytearray, using sep as the delimiter. - - sep - The delimiter according which to split the bytearray. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. - """ - - def splitlines(self, keepends: bool = False) -> list[bytearray]: - """Return a list of the lines in the bytearray, breaking at line boundaries. - - Line breaks are not included in the resulting list unless keepends is given and - true. - """ - + self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / + ) -> int: ... + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def splitlines(self, keepends: bool = False) -> list[bytearray]: ... def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], - start: SupportsIndex | None = ..., - end: SupportsIndex | None = ..., + start: SupportsIndex | None = None, + end: SupportsIndex | None = None, /, - ) -> bool: - """Return True if the bytearray starts with the specified prefix, False otherwise. - - prefix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytearray. - end - Optional stop position. Default: end of the bytearray. - """ - - def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: - """Strip leading and trailing bytes contained in the argument. - - If the argument is omitted or None, strip leading and trailing ASCII whitespace. - """ - - def swapcase(self) -> bytearray: - """B.swapcase() -> copy of B - - Return a copy of B with uppercase ASCII characters converted - to lowercase ASCII and vice versa. - """ - - def title(self) -> bytearray: - """B.title() -> copy of B - - Return a titlecased version of B, i.e. ASCII words start with uppercase - characters, all remaining cased characters have lowercase. - """ - - def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: - """Return a copy with each character mapped by the given translation table. - - table - Translation table, which must be a bytes object of length 256. - - All characters occurring in the optional argument delete are removed. - The remaining characters are mapped through the given translation table. - """ - - def upper(self) -> bytearray: - """B.upper() -> copy of B - - Return a copy of B with all ASCII characters converted to uppercase. - """ - - def zfill(self, width: SupportsIndex, /) -> bytearray: - """Pad a numeric string with zeros on the left, to fill a field of the given width. - - The original string is never truncated. - """ - + ) -> bool: ... + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... + def swapcase(self) -> bytearray: ... + def title(self) -> bytearray: ... + def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: ... + def upper(self) -> bytearray: ... + def zfill(self, width: SupportsIndex, /) -> bytearray: ... @classmethod - def fromhex(cls, string: str, /) -> Self: - """Create a bytearray object from a string of hexadecimal numbers. - - Spaces between two numbers are accepted. - Example: bytearray.fromhex('B9 01EF') -> bytearray(b'\\\\xb9\\\\x01\\\\xef') - """ - + def fromhex(cls, string: str, /) -> Self: ... @staticmethod - def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: - """Return a translation table usable for the bytes or bytearray translate method. - - The returned table will be one where each byte in frm is mapped to the byte at - the same position in to. - - The bytes objects frm and to must be of the same length. - """ - - def __len__(self) -> int: - """Return len(self).""" - - def __iter__(self) -> Iterator[int]: - """Implement iter(self).""" + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... __hash__: ClassVar[None] # type: ignore[assignment] @overload - def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> int: ... @overload def __getitem__(self, key: slice, /) -> bytearray: ... @overload - def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: ... @overload def __setitem__(self, key: slice, value: Iterable[SupportsIndex] | bytes, /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key].""" - - def __add__(self, value: ReadableBuffer, /) -> bytearray: - """Return self+value.""" + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __add__(self, value: ReadableBuffer, /) -> bytearray: ... # The superclass wants us to accept Iterable[int], but that fails at runtime. - def __iadd__(self, value: ReadableBuffer, /) -> Self: # type: ignore[override] - """Implement self+=value.""" - - def __mul__(self, value: SupportsIndex, /) -> bytearray: - """Return self*value.""" - - def __rmul__(self, value: SupportsIndex, /) -> bytearray: - """Return value*self.""" - - def __imul__(self, value: SupportsIndex, /) -> Self: - """Implement self*=value.""" - - def __mod__(self, value: Any, /) -> bytes: - """Return self%value.""" + def __iadd__(self, value: ReadableBuffer, /) -> Self: ... # type: ignore[override] + def __mul__(self, value: SupportsIndex, /) -> bytearray: ... + def __rmul__(self, value: SupportsIndex, /) -> bytearray: ... + def __imul__(self, value: SupportsIndex, /) -> Self: ... + def __mod__(self, value: Any, /) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] - """Return bool(key in self).""" - + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: ReadableBuffer, /) -> bool: ... def __le__(self, value: ReadableBuffer, /) -> bool: ... def __gt__(self, value: ReadableBuffer, /) -> bool: ... def __ge__(self, value: ReadableBuffer, /) -> bool: ... - def __alloc__(self) -> int: - """B.__alloc__() -> int - - Return the number of bytes actually allocated. - """ - - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object.""" + def __alloc__(self) -> int: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... if sys.version_info >= (3, 14): - def resize(self, size: int, /) -> None: - """Resize the internal buffer of bytearray to len. - - size - New size to resize to.. - """ + def resize(self, size: int, /) -> None: ... _IntegerFormats: TypeAlias = Literal[ "b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P" @@ -2419,66 +877,30 @@ _IntegerFormats: TypeAlias = Literal[ @final class memoryview(Sequence[_I]): - """Create a new memoryview object which references the given object.""" - @property - def format(self) -> str: - """A string containing the format (in struct module style) - for each element in the view. - """ - + def format(self) -> str: ... @property - def itemsize(self) -> int: - """The size in bytes of each element of the memoryview.""" - + def itemsize(self) -> int: ... @property - def shape(self) -> tuple[int, ...] | None: - """A tuple of ndim integers giving the shape of the memory - as an N-dimensional array. - """ - + def shape(self) -> tuple[int, ...] | None: ... @property - def strides(self) -> tuple[int, ...] | None: - """A tuple of ndim integers giving the size in bytes to access - each element for each dimension of the array. - """ - + def strides(self) -> tuple[int, ...] | None: ... @property - def suboffsets(self) -> tuple[int, ...] | None: - """A tuple of integers used internally for PIL-style arrays.""" - + def suboffsets(self) -> tuple[int, ...] | None: ... @property - def readonly(self) -> bool: - """A bool indicating whether the memory is read only.""" - + def readonly(self) -> bool: ... @property - def ndim(self) -> int: - """An integer indicating how many dimensions of a multi-dimensional - array the memory represents. - """ - + def ndim(self) -> int: ... @property - def obj(self) -> ReadableBuffer: - """The underlying object of the memoryview.""" - + def obj(self) -> ReadableBuffer: ... @property - def c_contiguous(self) -> bool: - """A bool indicating whether the memory is C contiguous.""" - + def c_contiguous(self) -> bool: ... @property - def f_contiguous(self) -> bool: - """A bool indicating whether the memory is Fortran contiguous.""" - + def f_contiguous(self) -> bool: ... @property - def contiguous(self) -> bool: - """A bool indicating whether the memory is contiguous.""" - + def contiguous(self) -> bool: ... @property - def nbytes(self) -> int: - """The amount of space in bytes that the array would use in - a contiguous representation. - """ - + def nbytes(self) -> int: ... def __new__(cls, obj: ReadableBuffer) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -2487,13 +909,9 @@ class memoryview(Sequence[_I]): exc_val: BaseException | None, exc_tb: TracebackType | None, /, - ) -> None: - """Release the underlying buffer exposed by the memoryview object.""" - + ) -> None: ... @overload - def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: - """Cast a memoryview to a new format or shape.""" - + def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: ... @overload def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: ... @overload @@ -2501,147 +919,72 @@ class memoryview(Sequence[_I]): @overload def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... @overload - def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: ... @overload def __getitem__(self, key: slice, /) -> memoryview[_I]: ... def __contains__(self, x: object, /) -> bool: ... - def __iter__(self) -> Iterator[_I]: - """Implement iter(self).""" - - def __len__(self) -> int: - """Return len(self).""" - + def __iter__(self) -> Iterator[_I]: ... + def __len__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... @overload def __setitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], value: _I, /) -> None: ... if sys.version_info >= (3, 10): - def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: - """Return the data in the buffer as a byte string. - - Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the - original array is converted to C or Fortran order. For contiguous views, - 'A' returns an exact copy of the physical memory. In particular, in-memory - Fortran order is preserved. For non-contiguous views, the data is converted - to C first. order=None is the same as order='C'. - """ + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... else: - def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: - """Return the data in the buffer as a byte string. Order can be {'C', 'F', 'A'}. - When order is 'C' or 'F', the data of the original array is converted to C or - Fortran order. For contiguous views, 'A' returns an exact copy of the physical - memory. In particular, in-memory Fortran order is preserved. For non-contiguous - views, the data is converted to C first. order=None is the same as order='C'. - """ - - def tolist(self) -> list[int]: - """Return the data in the buffer as a list of elements.""" - - def toreadonly(self) -> memoryview: - """Return a readonly version of the memoryview.""" - - def release(self) -> None: - """Release the underlying buffer exposed by the memoryview object.""" - - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: - """Return the data in the buffer as a str of hexadecimal numbers. - - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - - Example: - >>> value = memoryview(b'\\xb9\\x01\\xef') - >>> value.hex() - 'b901ef' - >>> value.hex(':') - 'b9:01:ef' - >>> value.hex(':', 2) - 'b9:01ef' - >>> value.hex(':', -2) - 'b901:ef' - """ - - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object.""" + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... + + def tolist(self) -> list[int]: ... + def toreadonly(self) -> memoryview: ... + def release(self) -> None: ... + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... + # These are inherited from the Sequence ABC, but don't actually exist on memoryview. # See https://github.com/python/cpython/issues/125420 index: ClassVar[None] # type: ignore[assignment] count: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class bool(int): - """Returns True when the argument is true, False otherwise. - The builtins True and False are the only two instances of the class bool. - The class bool is a subclass of the class int, and cannot be subclassed. - """ - - def __new__(cls, o: object = ..., /) -> Self: ... + def __new__(cls, o: object = False, /) -> Self: ... # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload - def __and__(self, value: bool, /) -> bool: - """Return self&value.""" - + def __and__(self, value: bool, /) -> bool: ... @overload def __and__(self, value: int, /) -> int: ... @overload - def __or__(self, value: bool, /) -> bool: - """Return self|value.""" - + def __or__(self, value: bool, /) -> bool: ... @overload def __or__(self, value: int, /) -> int: ... @overload - def __xor__(self, value: bool, /) -> bool: - """Return self^value.""" - + def __xor__(self, value: bool, /) -> bool: ... @overload def __xor__(self, value: int, /) -> int: ... @overload - def __rand__(self, value: bool, /) -> bool: - """Return value&self.""" - + def __rand__(self, value: bool, /) -> bool: ... @overload def __rand__(self, value: int, /) -> int: ... @overload - def __ror__(self, value: bool, /) -> bool: - """Return value|self.""" - + def __ror__(self, value: bool, /) -> bool: ... @overload def __ror__(self, value: int, /) -> int: ... @overload - def __rxor__(self, value: bool, /) -> bool: - """Return value^self.""" - + def __rxor__(self, value: bool, /) -> bool: ... @overload def __rxor__(self, value: int, /) -> int: ... def __getnewargs__(self) -> tuple[int]: ... @deprecated("Will throw an error in Python 3.16. Use `not` for logical negation of bools instead.") - def __invert__(self) -> int: - """~self""" + def __invert__(self) -> int: ... @final class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): - """slice(stop) - slice(start, stop[, step]) - - Create a slice object. This is used for extended slicing (e.g. a[0:10:2]). - """ - @property def start(self) -> _StartT_co: ... @property @@ -2678,41 +1021,18 @@ class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): else: __hash__: ClassVar[None] # type: ignore[assignment] - def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: - """S.indices(len) -> (start, stop, stride) - - Assuming a sequence of length len, calculate the start and stop - indices, and the stride length of the extended slice described by - S. Out of bounds indices are clipped in a manner consistent with the - handling of normal slices. - """ + def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: ... @disjoint_base class tuple(Sequence[_T_co]): - """Built-in immutable sequence. - - If no argument is given, the constructor returns an empty tuple. - If iterable is specified the tuple is initialized from iterable's items. - - If the argument is a tuple, the return value is the same object. - """ - - def __new__(cls, iterable: Iterable[_T_co] = ..., /) -> Self: ... - def __len__(self) -> int: - """Return len(self).""" - - def __contains__(self, key: object, /) -> bool: - """Return bool(key in self).""" - + def __new__(cls, iterable: Iterable[_T_co] = (), /) -> Self: ... + def __len__(self) -> int: ... + def __contains__(self, key: object, /) -> bool: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> _T_co: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> _T_co: ... @overload def __getitem__(self, key: slice, /) -> tuple[_T_co, ...]: ... - def __iter__(self) -> Iterator[_T_co]: - """Implement iter(self).""" - + def __iter__(self) -> Iterator[_T_co]: ... def __lt__(self, value: tuple[_T_co, ...], /) -> bool: ... def __le__(self, value: tuple[_T_co, ...], /) -> bool: ... def __gt__(self, value: tuple[_T_co, ...], /) -> bool: ... @@ -2720,28 +1040,14 @@ class tuple(Sequence[_T_co]): def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: - """Return self+value.""" - + def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: ... @overload def __add__(self, value: tuple[_T, ...], /) -> tuple[_T_co | _T, ...]: ... - def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: - """Return self*value.""" - - def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: - """Return value*self.""" - - def count(self, value: Any, /) -> int: - """Return number of occurrences of value.""" - - def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: - """Return first index of value. - - Raises ValueError if the value is not present. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... + def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... + def count(self, value: Any, /) -> int: ... + def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: # https://github.com/python/typeshed/issues/7580 @@ -2796,135 +1102,61 @@ class function: @disjoint_base class list(MutableSequence[_T]): - """Built-in mutable sequence. - - If no argument is given, the constructor creates a new empty list. - The argument must be an iterable if specified. - """ - @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def copy(self) -> list[_T]: - """Return a shallow copy of the list.""" - - def append(self, object: _T, /) -> None: - """Append object to the end of the list.""" - - def extend(self, iterable: Iterable[_T], /) -> None: - """Extend list by appending elements from the iterable.""" - - def pop(self, index: SupportsIndex = -1, /) -> _T: - """Remove and return item at index (default last). - - Raises IndexError if list is empty or index is out of range. - """ + def copy(self) -> list[_T]: ... + def append(self, object: _T, /) -> None: ... + def extend(self, iterable: Iterable[_T], /) -> None: ... + def pop(self, index: SupportsIndex = -1, /) -> _T: ... # Signature of `list.index` should be kept in line with `collections.UserList.index()` # and multiprocessing.managers.ListProxy.index() - def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: - """Return first index of value. - - Raises ValueError if the value is not present. - """ - - def count(self, value: _T, /) -> int: - """Return number of occurrences of value.""" - - def insert(self, index: SupportsIndex, object: _T, /) -> None: - """Insert object before index.""" - - def remove(self, value: _T, /) -> None: - """Remove first occurrence of value. - - Raises ValueError if the value is not present. - """ + def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... + def count(self, value: _T, /) -> int: ... + def insert(self, index: SupportsIndex, object: _T, /) -> None: ... + def remove(self, value: _T, /) -> None: ... # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` # and multiprocessing.managers.ListProxy.sort() # # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: - """Sort the list in ascending order and return None. - - The sort is in-place (i.e. the list itself is modified) and stable (i.e. the - order of two equal elements is maintained). - - If a key function is given, apply it once to each list item and sort them, - ascending or descending, according to their function values. - - The reverse flag can be set to sort in descending order. - """ - + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... @overload def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... - def __len__(self) -> int: - """Return len(self).""" - - def __iter__(self) -> Iterator[_T]: - """Implement iter(self).""" + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... __hash__: ClassVar[None] # type: ignore[assignment] @overload - def __getitem__(self, i: SupportsIndex, /) -> _T: - """Return self[index].""" - + def __getitem__(self, i: SupportsIndex, /) -> _T: ... @overload def __getitem__(self, s: slice, /) -> list[_T]: ... @overload - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: - """Set self[key] to value.""" - - @overload - def __setitem__(self, key: slice, value: Iterable[_T], /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key].""" - # Overloading looks unnecessary, but is needed to work around complex mypy problems - @overload - def __add__(self, value: list[_T], /) -> list[_T]: - """Return self+value.""" - + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... @overload - def __add__(self, value: list[_S], /) -> list[_S | _T]: ... - def __iadd__(self, value: Iterable[_T], /) -> Self: # type: ignore[misc] - """Implement self+=value.""" - - def __mul__(self, value: SupportsIndex, /) -> list[_T]: - """Return self*value.""" - - def __rmul__(self, value: SupportsIndex, /) -> list[_T]: - """Return value*self.""" - - def __imul__(self, value: SupportsIndex, /) -> Self: - """Implement self*=value.""" - - def __contains__(self, key: object, /) -> bool: - """Return bool(key in self).""" - - def __reversed__(self) -> Iterator[_T]: - """Return a reverse iterator over the list.""" - + def __setitem__(self, key: slice, value: Iterable[_T], /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + # Overloading looks unnecessary, but is needed to work around complex mypy problems + @overload + def __add__(self, value: list[_T], /) -> list[_T]: ... + @overload + def __add__(self, value: list[_S], /) -> list[_S | _T]: ... + def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[misc] + def __mul__(self, value: SupportsIndex, /) -> list[_T]: ... + def __rmul__(self, value: SupportsIndex, /) -> list[_T]: ... + def __imul__(self, value: SupportsIndex, /) -> Self: ... + def __contains__(self, key: object, /) -> bool: ... + def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, value: list[_T], /) -> bool: ... def __ge__(self, value: list[_T], /) -> bool: ... def __lt__(self, value: list[_T], /) -> bool: ... def __le__(self, value: list[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @disjoint_base class dict(MutableMapping[_KT, _VT]): - """dict() -> new empty dictionary - dict(mapping) -> new dictionary initialized from a mapping object's - (key, value) pairs - dict(iterable) -> new dictionary initialized as if via: - d = {} - for k, v in iterable: - d[k] = v - dict(**kwargs) -> new dictionary initialized with the name=value pairs - in the keyword argument list. For example: dict(one=1, two=2) - """ - # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics # Also multiprocessing.managers.SyncManager.dict() @overload @@ -2956,295 +1188,134 @@ class dict(MutableMapping[_KT, _VT]): @overload def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... - def copy(self) -> dict[_KT, _VT]: - """Return a shallow copy of the dict.""" - - def keys(self) -> dict_keys[_KT, _VT]: - """Return a set-like object providing a view on the dict's keys.""" - - def values(self) -> dict_values[_KT, _VT]: - """Return an object providing a view on the dict's values.""" - - def items(self) -> dict_items[_KT, _VT]: - """Return a set-like object providing a view on the dict's items.""" + def copy(self) -> dict[_KT, _VT]: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + def items(self) -> dict_items[_KT, _VT]: ... # Signature of `dict.fromkeys` should be kept identical to # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: - """Create a new dictionary with keys from iterable and values set to value.""" - + def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]: ... # Positional-only in dict, but not in MutableMapping @overload # type: ignore[override] - def get(self, key: _KT, default: None = None, /) -> _VT | None: - """Return the value for key if key is in the dictionary, else default.""" - + def get(self, key: _KT, default: None = None, /) -> _VT | None: ... @overload def get(self, key: _KT, default: _VT, /) -> _VT: ... @overload def get(self, key: _KT, default: _T, /) -> _VT | _T: ... @overload - def pop(self, key: _KT, /) -> _VT: - """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - - If the key is not found, return the default if given; otherwise, - raise a KeyError. - """ - + def pop(self, key: _KT, /) -> _VT: ... @overload def pop(self, key: _KT, default: _VT, /) -> _VT: ... @overload def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... - def __len__(self) -> int: - """Return len(self).""" - - def __getitem__(self, key: _KT, /) -> _VT: - """Return self[key].""" - - def __setitem__(self, key: _KT, value: _VT, /) -> None: - """Set self[key] to value.""" - - def __delitem__(self, key: _KT, /) -> None: - """Delete self[key].""" - - def __iter__(self) -> Iterator[_KT]: - """Implement iter(self).""" - + def __len__(self) -> int: ... + def __getitem__(self, key: _KT, /) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT, /) -> None: ... + def __delitem__(self, key: _KT, /) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[_KT]: - """Return a reverse iterator over the dict keys.""" + def __reversed__(self) -> Iterator[_KT]: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @overload - def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: - """Return self|value.""" - + def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... @overload def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: - """Return value|self.""" - + def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... @overload def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: - """Return self|=value.""" - + def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: ... @overload def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... @disjoint_base class set(MutableSet[_T]): - """Build an unordered collection of unique elements.""" - @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def add(self, element: _T, /) -> None: - """Add an element to a set. - - This has no effect if the element is already present. - """ - - def copy(self) -> set[_T]: - """Return a shallow copy of a set.""" - - def difference(self, *s: Iterable[Any]) -> set[_T]: - """Return a new set with elements in the set that are not in the others.""" - - def difference_update(self, *s: Iterable[Any]) -> None: - """Update the set, removing elements found in others.""" - - def discard(self, element: _T, /) -> None: - """Remove an element from a set if it is a member. - - Unlike set.remove(), the discard() method does not raise - an exception when an element is missing from the set. - """ - - def intersection(self, *s: Iterable[Any]) -> set[_T]: - """Return a new set with elements common to the set and all others.""" - - def intersection_update(self, *s: Iterable[Any]) -> None: - """Update the set, keeping only elements found in it and all others.""" - - def isdisjoint(self, s: Iterable[Any], /) -> bool: - """Return True if two sets have a null intersection.""" - - def issubset(self, s: Iterable[Any], /) -> bool: - """Report whether another set contains this set.""" - - def issuperset(self, s: Iterable[Any], /) -> bool: - """Report whether this set contains another set.""" - - def remove(self, element: _T, /) -> None: - """Remove an element from a set; it must be a member. - - If the element is not a member, raise a KeyError. - """ - - def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: - """Return a new set with elements in either the set or other but not both.""" - - def symmetric_difference_update(self, s: Iterable[_T], /) -> None: - """Update the set, keeping only elements found in either set, but not in both.""" - - def union(self, *s: Iterable[_S]) -> set[_T | _S]: - """Return a new set with elements from the set and all others.""" - - def update(self, *s: Iterable[_T]) -> None: - """Update the set, adding elements from all others.""" - - def __len__(self) -> int: - """Return len(self).""" - - def __contains__(self, o: object, /) -> bool: - """x.__contains__(y) <==> y in x.""" - - def __iter__(self) -> Iterator[_T]: - """Implement iter(self).""" - - def __and__(self, value: AbstractSet[object], /) -> set[_T]: - """Return self&value.""" - - def __iand__(self, value: AbstractSet[object], /) -> Self: - """Return self&=value.""" - - def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: - """Return self|value.""" - - def __ior__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] - """Return self|=value.""" - - def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: - """Return self-value.""" - - def __isub__(self, value: AbstractSet[object], /) -> Self: - """Return self-=value.""" - - def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: - """Return self^value.""" - - def __ixor__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] - """Return self^=value.""" - + def add(self, element: _T, /) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, element: _T, /) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, s: Iterable[Any], /) -> bool: ... + def issubset(self, s: Iterable[Any], /) -> bool: ... + def issuperset(self, s: Iterable[Any], /) -> bool: ... + def remove(self, element: _T, /) -> None: ... + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: ... + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, value: AbstractSet[object], /) -> set[_T]: ... + def __iand__(self, value: AbstractSet[object], /) -> Self: ... + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ior__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: ... + def __isub__(self, value: AbstractSet[object], /) -> Self: ... + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... + def __ixor__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @disjoint_base class frozenset(AbstractSet[_T_co]): - """Build an immutable unordered collection of unique elements.""" - @overload def __new__(cls) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T_co], /) -> Self: ... - def copy(self) -> frozenset[_T_co]: - """Return a shallow copy of a set.""" - - def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: - """Return a new set with elements in the set that are not in the others.""" - - def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: - """Return a new set with elements common to the set and all others.""" - - def isdisjoint(self, s: Iterable[_T_co], /) -> bool: - """Return True if two sets have a null intersection.""" - - def issubset(self, s: Iterable[object], /) -> bool: - """Report whether another set contains this set.""" - - def issuperset(self, s: Iterable[object], /) -> bool: - """Report whether this set contains another set.""" - - def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: - """Return a new set with elements in either the set or other but not both.""" - - def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: - """Return a new set with elements from the set and all others.""" - - def __len__(self) -> int: - """Return len(self).""" - - def __contains__(self, o: object, /) -> bool: - """x.__contains__(y) <==> y in x.""" - - def __iter__(self) -> Iterator[_T_co]: - """Implement iter(self).""" - - def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: - """Return self&value.""" - - def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: - """Return self|value.""" - - def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: - """Return self-value.""" - - def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: - """Return self^value.""" - + def copy(self) -> frozenset[_T_co]: ... + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def isdisjoint(self, s: Iterable[_T_co], /) -> bool: ... + def issubset(self, s: Iterable[object], /) -> bool: ... + def issuperset(self, s: Iterable[object], /) -> bool: ... + def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: ... + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... + def __len__(self) -> int: ... + def __contains__(self, o: object, /) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... + def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... + def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... + def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @disjoint_base class enumerate(Generic[_T]): - """Return an enumerate object. - - iterable - an object supporting iteration - - The enumerate object yields pairs containing a count (from start, which - defaults to zero) and a value yielded by the iterable argument. - - enumerate is useful for obtaining an indexed list: - (0, seq[0]), (1, seq[1]), (2, seq[2]), ... - """ - def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> tuple[int, _T]: - """Implement next(self).""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[int, _T]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class range(Sequence[int]): - """range(stop) -> range object - range(start, stop[, step]) -> range object - - Return an object that produces a sequence of integers from start (inclusive) - to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1. - start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3. - These are exactly the valid indices for a list of 4 elements. - When step is given, it specifies the increment (or decrement). - """ - @property def start(self) -> int: ... @property @@ -3254,71 +1325,22 @@ class range(Sequence[int]): @overload def __new__(cls, stop: SupportsIndex, /) -> Self: ... @overload - def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = ..., /) -> Self: ... - def count(self, value: int, /) -> int: - """rangeobject.count(value) -> integer -- return number of occurrences of value""" - - def index(self, value: int, /) -> int: # type: ignore[override] - """rangeobject.index(value) -> integer -- return index of value. - Raise ValueError if the value is not present. - """ - - def __len__(self) -> int: - """Return len(self).""" - + def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = 1, /) -> Self: ... + def count(self, value: int, /) -> int: ... + def index(self, value: int, /) -> int: ... # type: ignore[override] + def __len__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __contains__(self, key: object, /) -> bool: - """Return bool(key in self).""" - - def __iter__(self) -> Iterator[int]: - """Implement iter(self).""" - + def __contains__(self, key: object, /) -> bool: ... + def __iter__(self) -> Iterator[int]: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> int: ... @overload def __getitem__(self, key: slice, /) -> range: ... - def __reversed__(self) -> Iterator[int]: - """Return a reverse iterator.""" + def __reversed__(self) -> Iterator[int]: ... @disjoint_base class property: - """Property attribute. - - fget - function to be used for getting an attribute value - fset - function to be used for setting an attribute value - fdel - function to be used for del'ing an attribute - doc - docstring - - Typical use is to define a managed attribute x: - - class C(object): - def getx(self): return self._x - def setx(self, value): self._x = value - def delx(self): del self._x - x = property(getx, setx, delx, "I'm the 'x' property.") - - Decorators make defining new properties or modifying existing ones easy: - - class C(object): - @property - def x(self): - "I am the 'x' property." - return self._x - @x.setter - def x(self, value): - self._x = value - @x.deleter - def x(self): - del self._x - """ - fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], None] | None fdel: Callable[[Any], None] | None @@ -3328,31 +1350,20 @@ class property: def __init__( self, - fget: Callable[[Any], Any] | None = ..., - fset: Callable[[Any, Any], None] | None = ..., - fdel: Callable[[Any], None] | None = ..., - doc: str | None = ..., + fget: Callable[[Any], Any] | None = None, + fset: Callable[[Any, Any], None] | None = None, + fdel: Callable[[Any], None] | None = None, + doc: str | None = None, ) -> None: ... - def getter(self, fget: Callable[[Any], Any], /) -> property: - """Descriptor to obtain a copy of the property with a different getter.""" - - def setter(self, fset: Callable[[Any, Any], None], /) -> property: - """Descriptor to obtain a copy of the property with a different setter.""" - - def deleter(self, fdel: Callable[[Any], None], /) -> property: - """Descriptor to obtain a copy of the property with a different deleter.""" - + def getter(self, fget: Callable[[Any], Any], /) -> property: ... + def setter(self, fset: Callable[[Any, Any], None], /) -> property: ... + def deleter(self, fdel: Callable[[Any], None], /) -> property: ... @overload - def __get__(self, instance: None, owner: type, /) -> Self: - """Return an attribute of instance, which is of type owner.""" - + def __get__(self, instance: None, owner: type, /) -> Self: ... @overload def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... - def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value.""" - - def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance.""" + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... @final @type_check_only @@ -3361,58 +1372,17 @@ class _NotImplementedType(Any): NotImplemented: _NotImplementedType -def abs(x: SupportsAbs[_T], /) -> _T: - """Return the absolute value of the argument.""" - -def all(iterable: Iterable[object], /) -> bool: - """Return True if bool(x) is True for all values x in the iterable. - - If the iterable is empty, return True. - """ - -def any(iterable: Iterable[object], /) -> bool: - """Return True if bool(x) is True for any x in the iterable. - - If the iterable is empty, return False. - """ - -def ascii(obj: object, /) -> str: - """Return an ASCII-only representation of an object. - - As repr(), return a string containing a printable representation of an - object, but escape the non-ASCII characters in the string returned by - repr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar - to that returned by repr() in Python 2. - """ - -def bin(number: int | SupportsIndex, /) -> str: - """Return the binary representation of an integer. - - >>> bin(2796202) - '0b1010101010101010101010' - """ - -def breakpoint(*args: Any, **kws: Any) -> None: - """Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept - whatever arguments are passed. - - By default, this drops you into the pdb debugger. - """ - -def callable(obj: object, /) -> TypeIs[Callable[..., object]]: - """Return whether the object is callable (i.e., some kind of function). - - Note that classes are callable, as are instances of classes with a - __call__() method. - """ - -def chr(i: int | SupportsIndex, /) -> str: - """Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.""" +def abs(x: SupportsAbs[_T], /) -> _T: ... +def all(iterable: Iterable[object], /) -> bool: ... +def any(iterable: Iterable[object], /) -> bool: ... +def ascii(obj: object, /) -> str: ... +def bin(number: int | SupportsIndex, /) -> str: ... +def breakpoint(*args: Any, **kws: Any) -> None: ... +def callable(obj: object, /) -> TypeIs[Callable[..., object]]: ... +def chr(i: int | SupportsIndex, /) -> str: ... if sys.version_info >= (3, 10): - def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: - """Return an AsyncIterator for an AsyncIterable object.""" - + def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: ... @type_check_only class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): def __anext__(self) -> _AwaitableT_co: ... @@ -3421,13 +1391,7 @@ if sys.version_info >= (3, 10): # `anext` is not, in fact, an async function. When default is not provided # `anext` is just a passthrough for `obj.__anext__` # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 - def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: - """Return the next item from the async iterator. - - If default is given and the async iterator is exhausted, - it is returned instead of raising StopAsyncIteration. - """ - + def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: ... @overload async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT: ... @@ -3437,32 +1401,18 @@ if sys.version_info >= (3, 10): @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, - filename: str | ReadableBuffer | PathLike[Any], + filename: str | bytes | PathLike[Any], mode: str, flags: Literal[0], dont_inherit: bool = False, optimize: int = -1, *, _feature_version: int = -1, -) -> CodeType: - """Compile source into a code object that can be executed by exec() or eval(). - - The source code may represent a Python module, statement or expression. - The filename will be used for run-time error messages. - The mode must be 'exec' to compile a module, 'single' to compile a - single (interactive) statement, or 'eval' to compile an expression. - The flags argument, if present, controls which future statements influence - the compilation of the code. - The dont_inherit argument, if true, stops the compilation inheriting - the effects of any future statements in effect in the code calling - compile; if absent or false these statements do influence the compilation, - in addition to any features explicitly specified. - """ - +) -> CodeType: ... @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, - filename: str | ReadableBuffer | PathLike[Any], + filename: str | bytes | PathLike[Any], mode: str, *, dont_inherit: bool = False, @@ -3472,7 +1422,7 @@ def compile( @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, - filename: str | ReadableBuffer | PathLike[Any], + filename: str | bytes | PathLike[Any], mode: str, flags: Literal[1024], dont_inherit: bool = False, @@ -3483,7 +1433,7 @@ def compile( @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, - filename: str | ReadableBuffer | PathLike[Any], + filename: str | bytes | PathLike[Any], mode: str, flags: int, dont_inherit: bool = False, @@ -3495,31 +1445,10 @@ def compile( copyright: _sitebuiltins._Printer credits: _sitebuiltins._Printer -def delattr(obj: object, name: str, /) -> None: - """Deletes the named attribute from the given object. - - delattr(x, 'y') is equivalent to ``del x.y`` - """ - -def dir(o: object = ..., /) -> list[str]: - """dir([object]) -> list of strings - - If called without an argument, return the names in the current scope. - Else, return an alphabetized list of names comprising (some of) the attributes - of the given object, and of attributes reachable from it. - If the object supplies a method named __dir__, it will be used; otherwise - the default dir() logic is used and returns: - for a module object: the module's attributes. - for a class object: its attributes, and recursively the attributes - of its bases. - for any other object: its attributes, its class's attributes, and - recursively the attributes of its class's base classes. - """ - +def delattr(obj: object, name: str, /) -> None: ... +def dir(o: object = ..., /) -> list[str]: ... @overload -def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: - """Return the tuple (x//y, x%y). Invariant: div*y + mod == x.""" - +def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: ... @overload def divmod(x: _T_contra, y: SupportsRDivMod[_T_contra, _T_co], /) -> _T_co: ... @@ -3531,15 +1460,7 @@ if sys.version_info >= (3, 13): /, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, - ) -> Any: - """Evaluate the given source in the context of globals and locals. - - The source may be a string representing a Python expression - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - """ + ) -> Any: ... else: def eval( @@ -3547,15 +1468,7 @@ else: globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, /, - ) -> Any: - """Evaluate the given source in the context of globals and locals. - - The source may be a string representing a Python expression - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - """ + ) -> Any: ... # Comment above regarding `eval` applies to `exec` as well if sys.version_info >= (3, 13): @@ -3566,17 +1479,7 @@ if sys.version_info >= (3, 13): locals: Mapping[str, object] | None = None, *, closure: tuple[CellType, ...] | None = None, - ) -> None: - """Execute the given source in the context of globals and locals. - - The source may be a string representing one or more Python statements - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - The closure must be a tuple of cellvars, and can only be used - when source is a code object requiring exactly that many cellvars. - """ + ) -> None: ... elif sys.version_info >= (3, 11): def exec( @@ -3586,17 +1489,7 @@ elif sys.version_info >= (3, 11): /, *, closure: tuple[CellType, ...] | None = None, - ) -> None: - """Execute the given source in the context of globals and locals. - - The source may be a string representing one or more Python statements - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - The closure must be a tuple of cellvars, and can only be used - when source is a code object requiring exactly that many cellvars. - """ + ) -> None: ... else: def exec( @@ -3604,24 +1497,12 @@ else: globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, /, - ) -> None: - """Execute the given source in the context of globals and locals. - - The source may be a string representing one or more Python statements - or a code object as returned by compile(). - The globals must be a dictionary and locals can be any mapping, - defaulting to the current globals and locals. - If only globals is given, locals defaults to it. - """ + ) -> None: ... exit: _sitebuiltins.Quitter @disjoint_base class filter(Generic[_T]): - """Return an iterator yielding those items of iterable for which function(item) - is true. If function is None, return the items that are true. - """ - @overload def __new__(cls, function: None, iterable: Iterable[_T | None], /) -> Self: ... @overload @@ -3630,31 +1511,12 @@ class filter(Generic[_T]): def __new__(cls, function: Callable[[_S], TypeIs[_T]], iterable: Iterable[_S], /) -> Self: ... @overload def __new__(cls, function: Callable[[_T], Any], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" - -def format(value: object, format_spec: str = "", /) -> str: - """Return type(value).__format__(value, format_spec) - - Many built-in types implement format_spec according to the - Format Specification Mini-language. See help('FORMATTING'). - - If type(value) does not supply a method named __format__ - and format_spec is empty, then str(value) is returned. - See also help('SPECIALMETHODS'). - """ + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... +def format(value: object, format_spec: str = "", /) -> str: ... @overload -def getattr(o: object, name: str, /) -> Any: - """getattr(object, name[, default]) -> value - - Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. - When a default argument is given, it is returned when the attribute doesn't - exist; without it, an exception is raised in that case. - """ +def getattr(o: object, name: str, /) -> Any: ... # While technically covered by the last overload, spelling out the types for None, bool # and basic containers help mypy out in some tricky situations involving type context @@ -3669,66 +1531,21 @@ def getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]: ... def getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, Any]: ... @overload def getattr(o: object, name: str, default: _T, /) -> Any | _T: ... -def globals() -> dict[str, Any]: - """Return the dictionary containing the current scope's global variables. - - NOTE: Updates to this dictionary *will* affect name lookups in the current - global scope and vice-versa. - """ - -def hasattr(obj: object, name: str, /) -> bool: - """Return whether the object has an attribute with the given name. - - This is done by calling getattr(obj, name) and catching AttributeError. - """ - -def hash(obj: object, /) -> int: - """Return the hash value for the given object. - - Two objects that compare equal must also have the same hash value, but the - reverse is not necessarily true. - """ +def globals() -> dict[str, Any]: ... +def hasattr(obj: object, name: str, /) -> bool: ... +def hash(obj: object, /) -> int: ... help: _sitebuiltins._Helper -def hex(number: int | SupportsIndex, /) -> str: - """Return the hexadecimal representation of an integer. - - >>> hex(12648430) - '0xc0ffee' - """ - -def id(obj: object, /) -> int: - """Return the identity of an object. - - This is guaranteed to be unique among simultaneously existing objects. - (CPython uses the object's memory address.) - """ - -def input(prompt: object = "", /) -> str: - """Read a string from standard input. The trailing newline is stripped. - - The prompt string, if given, is printed to standard output without a - trailing newline before reading input. - - If the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError. - On *nix systems, readline is used if available. - """ - +def hex(number: int | SupportsIndex, /) -> str: ... +def id(obj: object, /) -> int: ... +def input(prompt: object = "", /) -> str: ... @type_check_only class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, i: int, /) -> _T_co: ... @overload -def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: - """iter(iterable) -> iterator - iter(callable, sentinel) -> iterator - - Get an iterator from an object. In the first form, the argument must - supply its own iterator, or be a sequence. - In the second form, the callable is called until it returns the sentinel. - """ - +def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: ... @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... @overload @@ -3741,44 +1558,15 @@ if sys.version_info >= (3, 10): else: _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] -def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: - """Return whether an object is an instance of a class or of a subclass thereof. - - A tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to - check against. This is equivalent to ``isinstance(x, A) or isinstance(x, B) - or ...`` etc. - """ - -def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: - """Return whether 'cls' is derived from another class or is the same class. - - A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to - check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B) - or ...``. - """ - -def len(obj: Sized, /) -> int: - """Return the number of items in a container.""" +def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: ... +def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: ... +def len(obj: Sized, /) -> int: ... license: _sitebuiltins._Printer -def locals() -> dict[str, Any]: - """Return a dictionary containing the current scope's local variables. - - NOTE: Whether or not updates to this dictionary will affect name lookups in - the local scope and vice-versa is *implementation dependent* and not - covered by any backwards compatibility guarantees. - """ - +def locals() -> dict[str, Any]: ... @disjoint_base class map(Generic[_S]): - """Make an iterator that computes the function using arguments from - each of the iterables. Stops when the shortest iterable is exhausted. - - If strict is true and one of the arguments is exhausted before the others, - raise a ValueError. - """ - # 3.14 adds `strict` argument. if sys.version_info >= (3, 14): @overload @@ -3881,25 +1669,13 @@ class map(Generic[_S]): *iterables: Iterable[Any], ) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _S: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _S: ... @overload def max( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None -) -> SupportsRichComparisonT: - """max(iterable, *[, default=obj, key=func]) -> value - max(arg1, arg2, *args, *[, key=func]) -> value - - With a single iterable argument, return its biggest item. The - default keyword-only argument specifies an object to return if - the provided iterable is empty. - With two or more positional arguments, return the largest argument. - """ - +) -> SupportsRichComparisonT: ... @overload def max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -3913,16 +1689,7 @@ def max(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparis @overload def min( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None -) -> SupportsRichComparisonT: - """min(iterable, *[, default=obj, key=func]) -> value - min(arg1, arg2, *args, *[, key=func]) -> value - - With a single iterable argument, return its smallest item. The - default keyword-only argument specifies an object to return if - the provided iterable is empty. - With two or more positional arguments, return the smallest argument. - """ - +) -> SupportsRichComparisonT: ... @overload def min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -3934,21 +1701,10 @@ def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, def @overload def min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload -def next(i: SupportsNext[_T], /) -> _T: - """next(iterator[, default]) - - Return the next item from the iterator. If default is given and the iterator - is exhausted, it is returned instead of raising StopIteration. - """ - +def next(i: SupportsNext[_T], /) -> _T: ... @overload def next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT: ... -def oct(number: int | SupportsIndex, /) -> str: - """Return the octal representation of an integer. - - >>> oct(342391) - '0o1234567' - """ +def oct(number: int | SupportsIndex, /) -> str: ... _Opener: TypeAlias = Callable[[str, int], int] @@ -3963,121 +1719,7 @@ def open( newline: str | None = None, closefd: bool = True, opener: _Opener | None = None, -) -> TextIOWrapper: - """Open file and return a stream. Raise OSError upon failure. - - file is either a text or byte string giving the name (and the path - if the file isn't in the current working directory) of the file to - be opened or an integer file descriptor of the file to be - wrapped. (If a file descriptor is given, it is closed when the - returned I/O object is closed, unless closefd is set to False.) - - mode is an optional string that specifies the mode in which the file - is opened. It defaults to 'r' which means open for reading in text - mode. Other common values are 'w' for writing (truncating the file if - it already exists), 'x' for creating and writing to a new file, and - 'a' for appending (which on some Unix systems, means that all writes - append to the end of the file regardless of the current seek position). - In text mode, if encoding is not specified the encoding used is platform - dependent: locale.getencoding() is called to get the current locale encoding. - (For reading and writing raw bytes use binary mode and leave encoding - unspecified.) The available modes are: - - ========= =============================================================== - Character Meaning - --------- --------------------------------------------------------------- - 'r' open for reading (default) - 'w' open for writing, truncating the file first - 'x' create a new file and open it for writing - 'a' open for writing, appending to the end of the file if it exists - 'b' binary mode - 't' text mode (default) - '+' open a disk file for updating (reading and writing) - ========= =============================================================== - - The default mode is 'rt' (open for reading text). For binary random - access, the mode 'w+b' opens and truncates the file to 0 bytes, while - 'r+b' opens the file without truncation. The 'x' mode implies 'w' and - raises an `FileExistsError` if the file already exists. - - Python distinguishes between files opened in binary and text modes, - even when the underlying operating system doesn't. Files opened in - binary mode (appending 'b' to the mode argument) return contents as - bytes objects without any decoding. In text mode (the default, or when - 't' is appended to the mode argument), the contents of the file are - returned as strings, the bytes having been first decoded using a - platform-dependent encoding or using the specified encoding if given. - - buffering is an optional integer used to set the buffering policy. - Pass 0 to switch buffering off (only allowed in binary mode), 1 to select - line buffering (only usable in text mode), and an integer > 1 to indicate - the size of a fixed-size chunk buffer. When no buffering argument is - given, the default buffering policy works as follows: - - * Binary files are buffered in fixed-size chunks; the size of the buffer - is max(min(blocksize, 8 MiB), DEFAULT_BUFFER_SIZE) - when the device block size is available. - On most systems, the buffer will typically be 128 kilobytes long. - - * "Interactive" text files (files for which isatty() returns True) - use line buffering. Other text files use the policy described above - for binary files. - - encoding is the name of the encoding used to decode or encode the - file. This should only be used in text mode. The default encoding is - platform dependent, but any encoding supported by Python can be - passed. See the codecs module for the list of supported encodings. - - errors is an optional string that specifies how encoding errors are to - be handled---this argument should not be used in binary mode. Pass - 'strict' to raise a ValueError exception if there is an encoding error - (the default of None has the same effect), or pass 'ignore' to ignore - errors. (Note that ignoring encoding errors can lead to data loss.) - See the documentation for codecs.register or run 'help(codecs.Codec)' - for a list of the permitted encoding error strings. - - newline controls how universal newlines works (it only applies to text - mode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as - follows: - - * On input, if newline is None, universal newlines mode is - enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and - these are translated into '\\n' before being returned to the - caller. If it is '', universal newline mode is enabled, but line - endings are returned to the caller untranslated. If it has any of - the other legal values, input lines are only terminated by the given - string, and the line ending is returned to the caller untranslated. - - * On output, if newline is None, any '\\n' characters written are - translated to the system default line separator, os.linesep. If - newline is '' or '\\n', no translation takes place. If newline is any - of the other legal values, any '\\n' characters written are translated - to the given string. - - If closefd is False, the underlying file descriptor will be kept open - when the file is closed. This does not work when a file name is given - and must be True in that case. - - A custom opener can be used by passing a callable as *opener*. The - underlying file descriptor for the file object is then obtained by - calling *opener* with (*file*, *flags*). *opener* must return an open - file descriptor (passing os.open as *opener* results in functionality - similar to passing None). - - open() returns a file object whose type depends on the mode, and - through which the standard file operations such as reading and writing - are performed. When open() is used to open a file in a text mode ('w', - 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open - a file in a binary mode, the returned class varies: in read binary - mode, it returns a BufferedReader; in write binary and append binary - modes, it returns a BufferedWriter, and in read/write mode, it returns - a BufferedRandom. - - It is also possible to use a string or bytearray as a file for both - reading and writing. For strings StringIO can be used like a file - opened in a text mode, and for bytes a BytesIO can be used like a file - opened in a binary mode. - """ +) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @overload @@ -4152,16 +1794,7 @@ def open( closefd: bool = True, opener: _Opener | None = None, ) -> IO[Any]: ... -def ord(c: str | bytes | bytearray, /) -> int: - """Return the ordinal value of a character. - - If the argument is a one-character string, return the Unicode code - point of that character. - - If the argument is a bytes or bytearray object of length 1, return its - single byte value. - """ - +def ord(c: str | bytes | bytearray, /) -> int: ... @type_check_only class _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ... @@ -4172,19 +1805,7 @@ def print( end: str | None = "\n", file: SupportsWrite[str] | None = None, flush: Literal[False] = False, -) -> None: - """Prints the values to a stream, or to sys.stdout by default. - - sep - string inserted between values, default a space. - end - string appended after the last value, default a newline. - file - a file-like object (stream); defaults to the current sys.stdout. - flush - whether to forcibly flush the stream. - """ - +) -> None: ... @overload def print( *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool @@ -4212,13 +1833,7 @@ _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs a # TODO: `pow(int, int, Literal[0])` fails at runtime, # but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). @overload -def pow(base: int, exp: int, mod: int) -> int: - """Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments - - Some types, such as ints, are able to use a more efficient algorithm when - invoked using the three argument form. - """ - +def pow(base: int, exp: int, mod: int) -> int: ... @overload def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... @overload @@ -4259,26 +1874,15 @@ quit: _sitebuiltins.Quitter @disjoint_base class reversed(Generic[_T]): - """Return a reverse iterator over the values of the given sequence.""" - @overload def __new__(cls, sequence: Reversible[_T], /) -> Iterator[_T]: ... # type: ignore[misc] @overload def __new__(cls, sequence: SupportsLenAndGetItem[_T], /) -> Iterator[_T]: ... # type: ignore[misc] - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + def __length_hint__(self) -> int: ... - def __length_hint__(self) -> int: - """Private method returning an estimate of len(list(it)).""" - -def repr(obj: object, /) -> str: - """Return the canonical string representation of the object. - - For many object types, including most builtins, eval(repr(obj)) == obj. - """ +def repr(obj: object, /) -> str: ... # See https://github.com/python/typeshed/pull/9141 # and https://github.com/python/typeshed/pull/9151 @@ -4293,34 +1897,17 @@ class _SupportsRound2(Protocol[_T_co]): def __round__(self, ndigits: int, /) -> _T_co: ... @overload -def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: - """Round a number to a given precision in decimal digits. - - The return value is an integer if ndigits is omitted or None. Otherwise - the return value has the same type as the number. ndigits may be negative. - """ - +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... @overload def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... # See https://github.com/python/typeshed/pull/6292#discussion_r748875189 # for why arg 3 of `setattr` should be annotated with `Any` and not `object` -def setattr(obj: object, name: str, value: Any, /) -> None: - """Sets the named attribute on the given object to the specified value. - - setattr(x, 'y', v) is equivalent to ``x.y = v`` - """ - +def setattr(obj: object, name: str, value: Any, /) -> None: ... @overload def sorted( iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, reverse: bool = False -) -> list[SupportsRichComparisonT]: - """Return a new list containing all items from the iterable in ascending order. - - A custom key function can be supplied to customize the sort order, and the - reverse flag can be set to request the result in descending order. - """ - +) -> list[SupportsRichComparisonT]: ... @overload def sorted(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... @@ -4337,14 +1924,7 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # without creating many false-positive errors (see #7578). # Instead, we special-case the most common examples of this: bool and literal integers. @overload -def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: - """Return the sum of a 'start' value (default: 0) plus an iterable of numbers - - When the iterable is empty, return the start value. - This function is intended specifically for use with numeric values and may - reject non-numeric types. - """ - +def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ... @overload def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ... @overload @@ -4353,43 +1933,32 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A # The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) @overload -def vars(object: type, /) -> types.MappingProxyType[str, Any]: - """vars([object]) -> dictionary - - Without arguments, equivalent to locals(). - With an argument, equivalent to object.__dict__. - """ - +def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... @overload def vars(object: Any = ..., /) -> dict[str, Any]: ... @disjoint_base class zip(Generic[_T_co]): - """The zip object yields n-length tuples, where n is the number of iterables - passed as positional arguments to zip(). The i-th element in every tuple - comes from the i-th iterable argument to zip(). This continues until the - shortest argument is exhausted. - - If strict is true and one of the arguments is exhausted before the others, - raise a ValueError. - - >>> list(zip('abcdefg', range(3), range(4))) - [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] - """ - if sys.version_info >= (3, 10): @overload - def __new__(cls, *, strict: bool = ...) -> zip[Any]: ... + def __new__(cls, *, strict: bool = False) -> zip[Any]: ... @overload - def __new__(cls, iter1: Iterable[_T1], /, *, strict: bool = ...) -> zip[tuple[_T1]]: ... + def __new__(cls, iter1: Iterable[_T1], /, *, strict: bool = False) -> zip[tuple[_T1]]: ... @overload - def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = ...) -> zip[tuple[_T1, _T2]]: ... + def __new__(cls, iter1: Iterable[_T1], iter2: Iterable[_T2], /, *, strict: bool = False) -> zip[tuple[_T1, _T2]]: ... @overload def __new__( - cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /, *, strict: bool = ... + cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], /, *, strict: bool = False ) -> zip[tuple[_T1, _T2, _T3]]: ... @overload def __new__( - cls, iter1: Iterable[_T1], iter2: Iterable[_T2], iter3: Iterable[_T3], iter4: Iterable[_T4], /, *, strict: bool = ... + cls, + iter1: Iterable[_T1], + iter2: Iterable[_T2], + iter3: Iterable[_T3], + iter4: Iterable[_T4], + /, + *, + strict: bool = False, ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... @overload def __new__( @@ -4401,7 +1970,7 @@ class zip(Generic[_T_co]): iter5: Iterable[_T5], /, *, - strict: bool = ..., + strict: bool = False, ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def __new__( @@ -4414,7 +1983,7 @@ class zip(Generic[_T_co]): iter6: Iterable[Any], /, *iterables: Iterable[Any], - strict: bool = ..., + strict: bool = False, ) -> zip[tuple[Any, ...]]: ... else: @overload @@ -4446,11 +2015,8 @@ class zip(Generic[_T_co]): *iterables: Iterable[Any], ) -> zip[tuple[Any, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` # Return type of `__import__` should be kept the same as return type of `importlib.import_module` @@ -4460,29 +2026,8 @@ def __import__( locals: Mapping[str, object] | None = None, fromlist: Sequence[str] | None = (), level: int = 0, -) -> types.ModuleType: - """Import a module. - - Because this function is meant for use by the Python - interpreter and not for general use, it is better to use - importlib.import_module() to programmatically import a module. - - The globals argument is only used to determine the context; - they are not modified. The locals argument is unused. The fromlist - should be a list of names to emulate ``from name import ...``, or an - empty list to emulate ``import name``. - When importing a module from a package, note that __import__('A.B', ...) - returns package A when fromlist is empty, but its submodule B when - fromlist is not empty. The level argument is used to determine whether to - perform absolute or relative imports: 0 is absolute, while a positive number - is the number of parent directories to search relative to the current module. - """ - -def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: - """__build_class__(func, name, /, *bases, [metaclass], **kwds) -> class - - Internal helper function used by the class statement. - """ +) -> types.ModuleType: ... +def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... if sys.version_info >= (3, 10): from types import EllipsisType @@ -4504,8 +2049,6 @@ else: @disjoint_base class BaseException: - """Common base class for all exceptions""" - args: tuple[Any, ...] __cause__: BaseException | None __context__: BaseException | None @@ -4514,39 +2057,27 @@ class BaseException: def __init__(self, *args: object) -> None: ... def __new__(cls, *args: Any, **kwds: Any) -> Self: ... def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... - def with_traceback(self, tb: TracebackType | None, /) -> Self: - """Set self.__traceback__ to tb and return self.""" + def with_traceback(self, tb: TracebackType | None, /) -> Self: ... if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] - def add_note(self, note: str, /) -> None: - """Add a note to the exception""" - -class GeneratorExit(BaseException): - """Request that a generator exit.""" + def add_note(self, note: str, /) -> None: ... -class KeyboardInterrupt(BaseException): - """Program interrupted by user.""" +class GeneratorExit(BaseException): ... +class KeyboardInterrupt(BaseException): ... @disjoint_base class SystemExit(BaseException): - """Request to exit from the interpreter.""" - code: sys._ExitCode -class Exception(BaseException): - """Common base class for all non-exit exceptions.""" +class Exception(BaseException): ... @disjoint_base class StopIteration(Exception): - """Signal the end from iterator.__next__().""" - value: Any @disjoint_base class OSError(Exception): - """Base class for I/O related errors.""" - errno: int | None strerror: str | None # filename, filename2 are actually str | bytes | None @@ -4560,73 +2091,49 @@ IOError = OSError if sys.platform == "win32": WindowsError = OSError -class ArithmeticError(Exception): - """Base class for arithmetic errors.""" - -class AssertionError(Exception): - """Assertion failed.""" +class ArithmeticError(Exception): ... +class AssertionError(Exception): ... if sys.version_info >= (3, 10): @disjoint_base class AttributeError(Exception): - """Attribute not found.""" - - def __init__(self, *args: object, name: str | None = ..., obj: object = ...) -> None: ... - name: str + def __init__(self, *args: object, name: str | None = None, obj: object = None) -> None: ... + name: str | None obj: object else: - class AttributeError(Exception): - """Attribute not found.""" + class AttributeError(Exception): ... -class BufferError(Exception): - """Buffer error.""" - -class EOFError(Exception): - """Read beyond end of file.""" +class BufferError(Exception): ... +class EOFError(Exception): ... @disjoint_base class ImportError(Exception): - """Import can't find module, or can't find name in module.""" - - def __init__(self, *args: object, name: str | None = ..., path: str | None = ...) -> None: ... + def __init__(self, *args: object, name: str | None = None, path: str | None = None) -> None: ... name: str | None path: str | None msg: str # undocumented if sys.version_info >= (3, 12): name_from: str | None # undocumented -class LookupError(Exception): - """Base class for lookup errors.""" - -class MemoryError(Exception): - """Out of memory.""" +class LookupError(Exception): ... +class MemoryError(Exception): ... if sys.version_info >= (3, 10): @disjoint_base class NameError(Exception): - """Name not found globally.""" - - def __init__(self, *args: object, name: str | None = ...) -> None: ... - name: str + def __init__(self, *args: object, name: str | None = None) -> None: ... + name: str | None else: - class NameError(Exception): - """Name not found globally.""" - -class ReferenceError(Exception): - """Weak ref proxy used after referent went away.""" + class NameError(Exception): ... -class RuntimeError(Exception): - """Unspecified run-time error.""" - -class StopAsyncIteration(Exception): - """Signal the end from iterator.__anext__().""" +class ReferenceError(Exception): ... +class RuntimeError(Exception): ... +class StopAsyncIteration(Exception): ... @disjoint_base class SyntaxError(Exception): - """Invalid syntax.""" - msg: str filename: str | None lineno: int | None @@ -4655,106 +2162,42 @@ class SyntaxError(Exception): # If you provide more than two arguments, it still creates the SyntaxError, but # the arguments from the info tuple are not parsed. This form is omitted. -class SystemError(Exception): - """Internal error in the Python interpreter. - - Please report this to the Python maintainer, along with the traceback, - the Python version, and the hardware/OS platform and version. - """ - -class TypeError(Exception): - """Inappropriate argument type.""" - -class ValueError(Exception): - """Inappropriate argument value (of correct type).""" - -class FloatingPointError(ArithmeticError): - """Floating-point operation failed.""" - -class OverflowError(ArithmeticError): - """Result too large to be represented.""" - -class ZeroDivisionError(ArithmeticError): - """Second argument to a division or modulo operation was zero.""" - -class ModuleNotFoundError(ImportError): - """Module not found.""" - -class IndexError(LookupError): - """Sequence index out of range.""" - -class KeyError(LookupError): - """Mapping key not found.""" - -class UnboundLocalError(NameError): - """Local name referenced but not bound to a value.""" +class SystemError(Exception): ... +class TypeError(Exception): ... +class ValueError(Exception): ... +class FloatingPointError(ArithmeticError): ... +class OverflowError(ArithmeticError): ... +class ZeroDivisionError(ArithmeticError): ... +class ModuleNotFoundError(ImportError): ... +class IndexError(LookupError): ... +class KeyError(LookupError): ... +class UnboundLocalError(NameError): ... class BlockingIOError(OSError): - """I/O operation would block.""" - characters_written: int -class ChildProcessError(OSError): - """Child process error.""" - -class ConnectionError(OSError): - """Connection error.""" - -class BrokenPipeError(ConnectionError): - """Broken pipe.""" - -class ConnectionAbortedError(ConnectionError): - """Connection aborted.""" - -class ConnectionRefusedError(ConnectionError): - """Connection refused.""" - -class ConnectionResetError(ConnectionError): - """Connection reset.""" - -class FileExistsError(OSError): - """File already exists.""" - -class FileNotFoundError(OSError): - """File not found.""" - -class InterruptedError(OSError): - """Interrupted by signal.""" - -class IsADirectoryError(OSError): - """Operation doesn't work on directories.""" - -class NotADirectoryError(OSError): - """Operation only works on directories.""" - -class PermissionError(OSError): - """Not enough permissions.""" - -class ProcessLookupError(OSError): - """Process not found.""" - -class TimeoutError(OSError): - """Timeout expired.""" - -class NotImplementedError(RuntimeError): - """Method or function hasn't been implemented yet.""" - -class RecursionError(RuntimeError): - """Recursion limit exceeded.""" - -class IndentationError(SyntaxError): - """Improper indentation.""" - -class TabError(IndentationError): - """Improper mixture of spaces and tabs.""" - -class UnicodeError(ValueError): - """Unicode related error.""" +class ChildProcessError(OSError): ... +class ConnectionError(OSError): ... +class BrokenPipeError(ConnectionError): ... +class ConnectionAbortedError(ConnectionError): ... +class ConnectionRefusedError(ConnectionError): ... +class ConnectionResetError(ConnectionError): ... +class FileExistsError(OSError): ... +class FileNotFoundError(OSError): ... +class InterruptedError(OSError): ... +class IsADirectoryError(OSError): ... +class NotADirectoryError(OSError): ... +class PermissionError(OSError): ... +class ProcessLookupError(OSError): ... +class TimeoutError(OSError): ... +class NotImplementedError(RuntimeError): ... +class RecursionError(RuntimeError): ... +class IndentationError(SyntaxError): ... +class TabError(IndentationError): ... +class UnicodeError(ValueError): ... @disjoint_base class UnicodeDecodeError(UnicodeError): - """Unicode decoding error.""" - encoding: str object: bytes start: int @@ -4764,8 +2207,6 @@ class UnicodeDecodeError(UnicodeError): @disjoint_base class UnicodeEncodeError(UnicodeError): - """Unicode encoding error.""" - encoding: str object: str start: int @@ -4775,8 +2216,6 @@ class UnicodeEncodeError(UnicodeError): @disjoint_base class UnicodeTranslateError(UnicodeError): - """Unicode translation error.""" - encoding: None object: str start: int @@ -4784,50 +2223,20 @@ class UnicodeTranslateError(UnicodeError): reason: str def __init__(self, object: str, start: int, end: int, reason: str, /) -> None: ... -class Warning(Exception): - """Base class for warning categories.""" - -class UserWarning(Warning): - """Base class for warnings generated by user code.""" - -class DeprecationWarning(Warning): - """Base class for warnings about deprecated features.""" - -class SyntaxWarning(Warning): - """Base class for warnings about dubious syntax.""" - -class RuntimeWarning(Warning): - """Base class for warnings about dubious runtime behavior.""" - -class FutureWarning(Warning): - """Base class for warnings about constructs that will change semantically - in the future. - """ - -class PendingDeprecationWarning(Warning): - """Base class for warnings about features which will be deprecated - in the future. - """ - -class ImportWarning(Warning): - """Base class for warnings about probable mistakes in module imports""" - -class UnicodeWarning(Warning): - """Base class for warnings about Unicode related problems, mostly - related to conversion problems. - """ - -class BytesWarning(Warning): - """Base class for warnings about bytes and buffer related problems, mostly - related to conversion from str or comparing to str. - """ - -class ResourceWarning(Warning): - """Base class for warnings about resource usage.""" +class Warning(Exception): ... +class UserWarning(Warning): ... +class DeprecationWarning(Warning): ... +class SyntaxWarning(Warning): ... +class RuntimeWarning(Warning): ... +class FutureWarning(Warning): ... +class PendingDeprecationWarning(Warning): ... +class ImportWarning(Warning): ... +class UnicodeWarning(Warning): ... +class BytesWarning(Warning): ... +class ResourceWarning(Warning): ... if sys.version_info >= (3, 10): - class EncodingWarning(Warning): - """Base class for warnings about encodings.""" + class EncodingWarning(Warning): ... if sys.version_info >= (3, 11): _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException) @@ -4838,18 +2247,12 @@ if sys.version_info >= (3, 11): # See `check_exception_group.py` for use-cases and comments. @disjoint_base class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): - """A combination of multiple unrelated exceptions.""" - def __new__(cls, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> None: ... @property - def message(self) -> str: - """exception message""" - + def message(self) -> str: ... @property - def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: - """nested exceptions""" - + def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... @overload def subgroup( self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / @@ -4879,15 +2282,13 @@ if sys.version_info >= (3, 11): def derive(self, excs: Sequence[_ExceptionT], /) -> ExceptionGroup[_ExceptionT]: ... @overload def derive(self, excs: Sequence[_BaseExceptionT], /) -> BaseExceptionGroup[_BaseExceptionT]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): def __new__(cls, message: str, exceptions: Sequence[_ExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_ExceptionT_co], /) -> None: ... @property - def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: - """nested exceptions""" + def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... # We accept a narrower type, but that's OK. @overload # type: ignore[override] def subgroup( @@ -4907,5 +2308,4 @@ if sys.version_info >= (3, 11): ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... if sys.version_info >= (3, 13): - class PythonFinalizationError(RuntimeError): - """Operation blocked during Python finalization.""" + class PythonFinalizationError(RuntimeError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi index 84b3b4e30ec8b..7bd829d040cb8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi @@ -1,9 +1,3 @@ -"""Interface to the libbzip2 compression library. - -This module provides a file interface, classes for incremental -(de)compression, and functions for one-shot (de)compression. -""" - import sys from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -32,19 +26,8 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: - """Compress a block of data. - - compresslevel, if given, must be a number between 1 and 9. - - For incremental compression, use a BZ2Compressor object instead. - """ - -def decompress(data: ReadableBuffer) -> bytes: - """Decompress a block of data. - - For incremental decompression, use a BZ2Decompressor object instead. - """ +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -59,27 +42,7 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> BZ2File: - """Open a bzip2-compressed file in binary or text mode. - - The filename argument can be an actual filename (a str, bytes, or - PathLike object), or an existing file object to read from or write - to. - - The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or - "ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode. - The default mode is "rb", and the default compresslevel is 9. - - For binary mode, this function is equivalent to the BZ2File - constructor: BZ2File(filename, mode, compresslevel). In this case, - the encoding, errors and newline arguments must not be provided. - - For text mode, a BZ2File object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error - handling behavior, and line ending(s). - - """ - +) -> BZ2File: ... @overload def open( filename: _ReadableFileobj, @@ -136,116 +99,21 @@ def open( ) -> BZ2File | TextIOWrapper: ... class BZ2File(BaseStream, IO[bytes]): - """A file object providing transparent bzip2 (de)compression. - - A BZ2File can act as a wrapper for an existing file object, or refer - directly to a named file on disk. - - Note that BZ2File provides a *binary* file interface - data read is - returned as bytes, and data to be written should be given as bytes. - """ - def __enter__(self) -> Self: ... @overload - def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: - """Open a bzip2-compressed file. - - If filename is a str, bytes, or PathLike object, it gives the - name of the file to be opened. Otherwise, it should be a file - object, which will be used to read or write the compressed data. - - mode can be 'r' for reading (default), 'w' for (over)writing, - 'x' for creating exclusively, or 'a' for appending. These can - equivalently be given as 'rb', 'wb', 'xb', and 'ab'. - - If mode is 'w', 'x' or 'a', compresslevel can be a number between 1 - and 9 specifying the level of compression: 1 produces the least - compression, and 9 (default) produces the most compression. - - If mode is 'r', the input file may be the concatenation of - multiple compressed streams. - """ - + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... @overload def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... @overload def __init__( self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 ) -> None: ... - def read(self, size: int | None = -1) -> bytes: - """Read up to size uncompressed bytes from the file. - - If size is negative or omitted, read until EOF is reached. - Returns b'' if the file is already at EOF. - """ - - def read1(self, size: int = -1) -> bytes: - """Read up to size uncompressed bytes, while trying to avoid - making multiple reads from the underlying stream. Reads up to a - buffer's worth of data if size is negative. - - Returns b'' if the file is at EOF. - """ - - def readline(self, size: SupportsIndex = -1) -> bytes: # type: ignore[override] - """Read a line of uncompressed bytes from the file. - - The terminating newline (if present) is retained. If size is - non-negative, no more than size bytes will be read (in which - case the line may be incomplete). Returns b'' if already at EOF. - """ - - def readinto(self, b: WriteableBuffer) -> int: - """Read bytes into b. - - Returns the number of bytes read (0 for EOF). - """ - - def readlines(self, size: SupportsIndex = -1) -> list[bytes]: - """Read a list of lines of uncompressed bytes from the file. - - size can be specified to control the number of lines read: no - further lines will be read once the total size of the lines read - so far equals or exceeds size. - """ - - def peek(self, n: int = 0) -> bytes: - """Return buffered data without advancing the file position. - - Always returns at least one byte of data, unless at EOF. - The exact number of bytes returned is unspecified. - """ - - def seek(self, offset: int, whence: int = 0) -> int: - """Change the file position. - - The new position is specified by offset, relative to the - position indicated by whence. Values for whence are: - - 0: start of stream (default); offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive - - Returns the new file position. - - Note that seeking is emulated, so depending on the parameters, - this operation may be extremely slow. - """ - - def write(self, data: ReadableBuffer) -> int: - """Write a byte string to the file. - - Returns the number of uncompressed bytes written, which is - always the length of data in bytes. Note that due to buffering, - the file on disk may not reflect the data written until close() - is called. - """ - - def writelines(self, seq: Iterable[ReadableBuffer]) -> None: - """Write a sequence of byte strings to the file. - - Returns the number of uncompressed bytes written. - seq can be any iterable yielding byte strings. - - Line separators are not added between the written byte strings. - """ + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] + def readinto(self, b: WriteableBuffer) -> int: ... + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... + def peek(self, n: int = 0) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... + def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi index 008bb72e507c3..e921584d43905 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi @@ -1,7 +1,3 @@ -"""Python interface for the 'lsprof' profiler. -Compatible with the 'profile' module. -""" - import _lsprof from _typeshed import StrOrBytesPath, Unused from collections.abc import Callable, Mapping @@ -11,41 +7,16 @@ from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: - """Run statement under profiler optionally saving results in filename - - This function takes a single argument that can be passed to the - "exec" statement, and an optional file name. In all cases this - routine attempts to "exec" its first argument and gather profiling - statistics from the execution. If no file name is present, then this - function automatically prints a simple profiling report, sorted by the - standard name string (file/line/function-name) that is presented in - each line. - """ - +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 -) -> None: - """Run statement under profiler, supplying your own globals and locals, - optionally saving results in filename. - - statement and filename have the same semantics as profile.run - """ +) -> None: ... _T = TypeVar("_T") _P = ParamSpec("_P") _Label: TypeAlias = tuple[str, int, str] class Profile(_lsprof.Profiler): - """Profile(timer=None, timeunit=None, subcalls=True, builtins=True) - - Builds a profiler object using the specified timer function. - The default timer is a fast built-in one based on real time. - For custom timer functions returning integers, timeunit can - be a float specifying a scale (i.e. how long each integer unit - is, in seconds). - """ - stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi index 3b2aa61ceb873..d00f0d5d2bce3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi @@ -1,11 +1,3 @@ -"""Calendar printing functions - -Note when comparing these calendars to the ones printed by cal(1): By -default, these calendars have Monday as the first day of the week, and -Sunday as the last (the European convention). Use setfirstweekday() to -set the first day of the week (0=Monday, 6=Sunday). -""" - import datetime import enum import sys @@ -70,211 +62,52 @@ class IllegalMonthError(ValueError): class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... -def isleap(year: int) -> bool: - """Return True for leap years, False for non-leap years.""" - -def leapdays(y1: int, y2: int) -> int: - """Return number of leap years in range [y1, y2). - Assume y1 <= y2. - """ - -def weekday(year: int, month: int, day: int) -> int: - """Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31).""" - -def monthrange(year: int, month: int) -> tuple[int, int]: - """Return weekday of first day of month (0-6 ~ Mon-Sun) - and number of days (28-31) for year, month. - """ +def isleap(year: int) -> bool: ... +def leapdays(y1: int, y2: int) -> int: ... +def weekday(year: int, month: int, day: int) -> int: ... +def monthrange(year: int, month: int) -> tuple[int, int]: ... class Calendar: - """ - Base calendar class. This class doesn't do any formatting. It simply - provides data to subclasses. - """ - firstweekday: int def __init__(self, firstweekday: int = 0) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... - def iterweekdays(self) -> Iterable[int]: - """ - Return an iterator for one week of weekday numbers starting with the - configured first one. - """ - - def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: - """ - Return an iterator for one month. The iterator will yield datetime.date - values and will always iterate through complete weeks, so it will yield - dates outside the specified month. - """ - - def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: - """ - Like itermonthdates(), but will yield (day number, weekday number) - tuples. For days outside the specified month the day number is 0. - """ - - def itermonthdays(self, year: int, month: int) -> Iterable[int]: - """ - Like itermonthdates(), but will yield day numbers. For days outside - the specified month the day number is 0. - """ - - def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: - """ - Return a matrix (list of lists) representing a month's calendar. - Each row represents a week; week entries are datetime.date values. - """ - - def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: - """ - Return a matrix representing a month's calendar. - Each row represents a week; week entries are - (day number, weekday number) tuples. Day numbers outside this month - are zero. - """ - - def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: - """ - Return a matrix representing a month's calendar. - Each row represents a week; days outside this month are zero. - """ - - def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: - """ - Return the data for the specified year ready for formatting. The return - value is a list of month rows. Each month row contains up to width months. - Each month contains between 4 and 6 weeks and each week contains 1-7 - days. Days are datetime.date objects. - """ - - def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: - """ - Return the data for the specified year ready for formatting (similar to - yeardatescalendar()). Entries in the week lists are - (day number, weekday number) tuples. Day numbers outside this month are - zero. - """ - - def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: - """ - Return the data for the specified year ready for formatting (similar to - yeardatescalendar()). Entries in the week lists are day numbers. - Day numbers outside this month are zero. - """ - - def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: - """ - Like itermonthdates(), but will yield (year, month, day) tuples. Can be - used for dates outside of datetime.date range. - """ - - def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: - """ - Like itermonthdates(), but will yield (year, month, day, day_of_week) tuples. - Can be used for dates outside of datetime.date range. - """ + def iterweekdays(self) -> Iterable[int]: ... + def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: ... + def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... + def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... + def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: ... + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: ... + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: ... + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... class TextCalendar(Calendar): - """ - Subclass of Calendar that outputs a calendar as a simple plain text - similar to the UNIX program cal. - """ - - def prweek(self, theweek: int, width: int) -> None: - """ - Print a single week (no newline). - """ - - def formatday(self, day: int, weekday: int, width: int) -> str: - """ - Returns a formatted day. - """ - - def formatweek(self, theweek: int, width: int) -> str: - """ - Returns a single week in a string (no newline). - """ - - def formatweekday(self, day: int, width: int) -> str: - """ - Returns a formatted week day name. - """ - - def formatweekheader(self, width: int) -> str: - """ - Return a header for a week. - """ - - def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: - """ - Return a formatted month name. - """ - - def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: - """ - Print a month's calendar. - """ - - def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: - """ - Return a month's calendar string (multi-line). - """ - - def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: - """ - Returns a year's calendar as a multi-line string. - """ - - def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: - """Print a year's calendar.""" + def prweek(self, theweek: int, width: int) -> None: ... + def formatday(self, day: int, weekday: int, width: int) -> str: ... + def formatweek(self, theweek: int, width: int) -> str: ... + def formatweekday(self, day: int, width: int) -> str: ... + def formatweekheader(self, width: int) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... def firstweekday() -> int: ... -def monthcalendar(year: int, month: int) -> list[list[int]]: - """ - Return a matrix representing a month's calendar. - Each row represents a week; days outside this month are zero. - """ - -def prweek(theweek: int, width: int) -> None: - """ - Print a single week (no newline). - """ - -def week(theweek: int, width: int) -> str: - """ - Returns a single week in a string (no newline). - """ - -def weekheader(width: int) -> str: - """ - Return a header for a week. - """ - -def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: - """ - Print a month's calendar. - """ - -def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: - """ - Return a month's calendar string (multi-line). - """ - -def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: - """ - Returns a year's calendar as a multi-line string. - """ - -def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: - """Print a year's calendar.""" +def monthcalendar(year: int, month: int) -> list[list[int]]: ... +def prweek(theweek: int, width: int) -> None: ... +def week(theweek: int, width: int) -> str: ... +def weekheader(width: int) -> str: ... +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... class HTMLCalendar(Calendar): - """ - This calendar returns complete HTML pages. - """ - cssclasses: ClassVar[list[str]] cssclass_noday: ClassVar[str] cssclasses_weekday_head: ClassVar[list[str]] @@ -282,47 +115,16 @@ class HTMLCalendar(Calendar): cssclass_month: ClassVar[str] cssclass_year: ClassVar[str] cssclass_year_head: ClassVar[str] - def formatday(self, day: int, weekday: int) -> str: - """ - Return a day as a table cell. - """ - - def formatweek(self, theweek: int) -> str: - """ - Return a complete week as a table row. - """ - - def formatweekday(self, day: int) -> str: - """ - Return a weekday name as a table header. - """ - - def formatweekheader(self) -> str: - """ - Return a header for a week as a table row. - """ - - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: - """ - Return a month name as a table row. - """ - - def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: - """ - Return a formatted month as a table. - """ - - def formatyear(self, theyear: int, width: int = 3) -> str: - """ - Return a formatted year as a table of tables. - """ - + def formatday(self, day: int, weekday: int) -> str: ... + def formatweek(self, theweek: int) -> str: ... + def formatweekday(self, day: int) -> str: ... + def formatweekheader(self) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatyear(self, theyear: int, width: int = 3) -> str: ... def formatyearpage( self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None - ) -> bytes: - """ - Return a formatted year as a complete HTML page. - """ + ) -> bytes: ... class different_locale: def __init__(self, locale: _LocaleType) -> None: ... @@ -330,19 +132,9 @@ class different_locale: def __exit__(self, *args: Unused) -> None: ... class LocaleTextCalendar(TextCalendar): - """ - This class can be passed a locale name in the constructor and will return - month and weekday names in the specified locale. - """ - def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... class LocaleHTMLCalendar(HTMLCalendar): - """ - This class can be passed a locale name in the constructor and will return - month and weekday names in the specified locale. - """ - def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... def formatweekday(self, day: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... @@ -350,14 +142,9 @@ class LocaleHTMLCalendar(HTMLCalendar): c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... -def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: - """Prints multi-column formatting for year calendars""" - -def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: - """Returns a string formatted from n strings, centered within n columns.""" - -def timegm(tuple: tuple[int, ...] | struct_time) -> int: - """Unrelated but handy function to calculate Unix timestamp from GMT.""" +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... # Data attributes day_name: Sequence[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi index 1154503deb7b5..0f9d4343b6307 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi @@ -1,14 +1,3 @@ -"""Support module for CGI (Common Gateway Interface) scripts. - -This module defines a number of utilities for use by CGI scripts -written in Python. - -The global variable maxlen can be set to an integer indicating the maximum size -of a POST request. POST requests larger than this size will result in a -ValueError being raised during parsing. The default value of this variable is 0, -meaning the request size is unlimited. -""" - import os from _typeshed import SupportsContainsAndGetItem, SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type @@ -39,81 +28,23 @@ def parse( keep_blank_values: bool = ..., strict_parsing: bool = ..., separator: str = "&", -) -> dict[str, list[str]]: - """Parse a query in the environment or from a file (default stdin) - - Arguments, all optional: - - fp : file pointer; default: sys.stdin.buffer - - environ : environment dictionary; default: os.environ - - keep_blank_values: flag indicating whether blank values in - percent-encoded forms should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - separator: str. The symbol to use for separating the query arguments. - Defaults to &. - """ - +) -> dict[str, list[str]]: ... def parse_multipart( fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" -) -> dict[str, list[Any]]: - """Parse multipart input. - - Arguments: - fp : input file - pdict: dictionary containing other parameters of content-type header - encoding, errors: request encoding and error handler, passed to - FieldStorage - - Returns a dictionary just like parse_qs(): keys are the field names, each - value is a list of values for that field. For non-file fields, the value - is a list of strings. - """ - +) -> dict[str, list[Any]]: ... @type_check_only class _Environ(Protocol): def __getitem__(self, k: str, /) -> str: ... def keys(self) -> Iterable[str]: ... -def parse_header(line: str) -> tuple[str, dict[str, str]]: - """Parse a Content-type like header. - - Return the main content-type and a dictionary of options. - - """ - -def test(environ: _Environ = os.environ) -> None: - """Robust test CGI script, usable as main program. - - Write minimal HTTP headers and dump all information provided to - the script in HTML form. - - """ - -def print_environ(environ: _Environ = os.environ) -> None: - """Dump the shell environment as HTML.""" - -def print_form(form: dict[str, Any]) -> None: - """Dump the contents of a form as HTML.""" - -def print_directory() -> None: - """Dump the current directory as HTML.""" - -def print_environ_usage() -> None: - """Dump a list of environment variables used by CGI as HTML.""" +def parse_header(line: str) -> tuple[str, dict[str, str]]: ... +def test(environ: _Environ = os.environ) -> None: ... +def print_environ(environ: _Environ = os.environ) -> None: ... +def print_form(form: dict[str, Any]) -> None: ... +def print_directory() -> None: ... +def print_environ_usage() -> None: ... class MiniFieldStorage: - """Like FieldStorage, for use when no file uploads are possible.""" - # The first five "Any" attributes here are always None, but mypy doesn't support that filename: Any list: Any @@ -125,52 +56,9 @@ class MiniFieldStorage: headers: dict[Any, Any] name: Any value: Any - def __init__(self, name: Any, value: Any) -> None: - """Constructor from field name and value.""" + def __init__(self, name: Any, value: Any) -> None: ... class FieldStorage: - """Store a sequence of fields, reading multipart/form-data. - - This class provides naming, typing, files stored on disk, and - more. At the top level, it is accessible like a dictionary, whose - keys are the field names. (Note: None can occur as a field name.) - The items are either a Python list (if there's multiple values) or - another FieldStorage or MiniFieldStorage object. If it's a single - object, it has the following attributes: - - name: the field name, if specified; otherwise None - - filename: the filename, if specified; otherwise None; this is the - client side filename, *not* the file name on which it is - stored (that's a temporary file you don't deal with) - - value: the value as a *string*; for file uploads, this - transparently reads the file every time you request the value - and returns *bytes* - - file: the file(-like) object from which you can read the data *as - bytes* ; None if the data is stored a simple string - - type: the content-type, or None if not specified - - type_options: dictionary of options specified on the content-type - line - - disposition: content-disposition, or None if not specified - - disposition_options: dictionary of corresponding options - - headers: a dictionary(-like) object (sometimes email.message.Message or a - subclass thereof) containing *all* headers - - The class is subclassable, mostly for the purpose of overriding - the make_file() method, which is called internally to come up with - a file open for reading and writing. This makes it possible to - override the default choice of storing all files in a temporary - directory and unlinking them as soon as they have been opened. - - """ - FieldStorageClass: _type | None keep_blank_values: int strict_parsing: int @@ -206,102 +94,21 @@ class FieldStorage: errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", - ) -> None: - """Constructor. Read multipart/* until last part. - - Arguments, all optional: - - fp : file pointer; default: sys.stdin.buffer - (not used when the request method is GET) - Can be : - 1. a TextIOWrapper object - 2. an object whose read() and readline() methods return bytes - - headers : header dictionary-like object; default: - taken from environ as per CGI spec - - outerboundary : terminating multipart boundary - (for internal use only) - - environ : environment dictionary; default: os.environ - - keep_blank_values: flag indicating whether blank values in - percent-encoded forms should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - limit : used internally to read parts of multipart/form-data forms, - to exit from the reading loop when reached. It is the difference - between the form content-length and the number of bytes already - read - - encoding, errors : the encoding and error handler used to decode the - binary stream to strings. Must be the same as the charset defined - for the page sending the form (content-type : meta http-equiv or - header) - - max_num_fields: int. If set, then __init__ throws a ValueError - if there are more than n fields read by parse_qsl(). - - """ - + ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... - def __getitem__(self, key: str) -> Any: - """Dictionary style indexing.""" - - def getvalue(self, key: str, default: Any = None) -> Any: - """Dictionary style get() method, including 'value' lookup.""" - - def getfirst(self, key: str, default: Any = None) -> Any: - """Return the first value received.""" - - def getlist(self, key: str) -> _list[Any]: - """Return list of received values.""" - - def keys(self) -> _list[str]: - """Dictionary style keys() method.""" - - def __contains__(self, key: str) -> bool: - """Dictionary style __contains__ method.""" - - def __len__(self) -> int: - """Dictionary style len(x) support.""" - + def __getitem__(self, key: str) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... + def getlist(self, key: str) -> _list[Any]: ... + def keys(self) -> _list[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... def __bool__(self) -> bool: ... def __del__(self) -> None: ... # Returns bytes or str IO depending on an internal flag - def make_file(self) -> IO[Any]: - """Overridable: return a readable & writable file. - - The file will be used as follows: - - data is written to it - - seek(0) - - data is read from it - - The file is opened in binary mode for files, in text mode - for other fields - - This version opens a temporary file for reading and writing, - and immediately deletes (unlinks) it. The trick (on Unix!) is - that the file can still be used, but it can't be opened by - another process, and it will automatically be deleted when it - is closed or when the current process terminates. - - If you want a more permanent file, you derive a class which - overrides this method. If you want a visible temporary file - that is nevertheless automatically deleted when the script - terminates, try defining a __del__ method in a derived class - which unlinks the temporary files you have created. - - """ + def make_file(self) -> IO[Any]: ... def print_exception( type: type[BaseException] | None = None, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi index 64119f136f339..5657258011598 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi @@ -1,27 +1,3 @@ -"""More comprehensive traceback formatting for Python scripts. - -To enable this module, do: - - import cgitb; cgitb.enable() - -at the top of your script. The optional arguments to enable() are: - - display - if true, tracebacks are displayed in the web browser - logdir - if set, tracebacks are written to files in this directory - context - number of lines of source code to show for each stack frame - format - 'text' or 'html' controls the output format - -By default, tracebacks are displayed but not saved, the context is 5 lines -and the output format is 'html' (for backwards compatibility with the -original use of this module) - -Alternatively, if you have caught an exception and want cgitb to display it -for you, call cgitb.handler(). The optional argument to handler() is a -3-item tuple (etype, evalue, etb) just like the value of sys.exc_info(). -The default handler displays output as HTML. - -""" - from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType @@ -29,29 +5,18 @@ from typing import IO, Any, Final __UNDEF__: Final[object] # undocumented sentinel -def reset() -> str: # undocumented - """Return a string that resets the CGI and browser to a known state.""" - +def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented -def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: # undocumented - """Find the value for a given name in the given environment.""" - +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] -) -> list[tuple[str, str | None, Any]]: # undocumented - """Scan one logical line of Python and look up values of variables used.""" - -def html(einfo: OptExcInfo, context: int = 5) -> str: - """Return a nice HTML document describing a given traceback.""" - -def text(einfo: OptExcInfo, context: int = 5) -> str: - """Return a plain text document describing a given traceback.""" +) -> list[tuple[str, str | None, Any]]: ... # undocumented +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... class Hook: # undocumented - """A hook to replace sys.excepthook that shows tracebacks in HTML.""" - def __init__( self, display: int = 1, @@ -64,10 +29,4 @@ class Hook: # undocumented def handle(self, info: OptExcInfo | None = None) -> None: ... def handler(info: OptExcInfo | None = None) -> None: ... -def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: - """Install an exception handler that formats tracebacks as HTML. - - The optional argument 'display' can be set to 0 to suppress sending the - traceback to the browser, and 'logdir' can be set to a directory to cause - tracebacks to be written to files there. - """ +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi index 3f2c3a5b8c3da..9788d35f680c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi @@ -1,53 +1,3 @@ -"""Simple class to read IFF chunks. - -An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File -Format)) has the following structure: - -+----------------+ -| ID (4 bytes) | -+----------------+ -| size (4 bytes) | -+----------------+ -| data | -| ... | -+----------------+ - -The ID is a 4-byte string which identifies the type of chunk. - -The size field (a 32-bit value, encoded using big-endian byte order) -gives the size of the whole chunk, including the 8-byte header. - -Usually an IFF-type file consists of one or more chunks. The proposed -usage of the Chunk class defined here is to instantiate an instance at -the start of each chunk and read from the instance until it reaches -the end, after which a new instance can be instantiated. At the end -of the file, creating a new instance will fail with an EOFError -exception. - -Usage: -while True: - try: - chunk = Chunk(file) - except EOFError: - break - chunktype = chunk.getname() - while True: - data = chunk.read(nbytes) - if not data: - pass - # do something with data - -The interface is file-like. The implemented methods are: -read, close, seek, tell, isatty. -Extra methods are: skip() (called by close, skips to the end of the chunk), -getname() (returns the name (ID) of the chunk) - -The __init__ method has one required argument, a file-like object -(including a chunk instance), and one optional argument, a flag which -specifies whether or not chunks are aligned on 2-byte boundaries. The -default is 1, i.e. aligned. -""" - from typing import IO class Chunk: @@ -60,30 +10,11 @@ class Chunk: offset: int seekable: bool def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... - def getname(self) -> bytes: - """Return the name (ID) of the current chunk.""" - - def getsize(self) -> int: - """Return the size of the current chunk.""" - + def getname(self) -> bytes: ... + def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... - def seek(self, pos: int, whence: int = 0) -> None: - """Seek to specified position into the chunk. - Default position is 0 (start of chunk). - If the file is not seekable, this will result in an error. - """ - + def seek(self, pos: int, whence: int = 0) -> None: ... def tell(self) -> int: ... - def read(self, size: int = -1) -> bytes: - """Read at most size bytes from the chunk. - If size is omitted or negative, read until the end - of the chunk. - """ - - def skip(self) -> None: - """Skip the rest of the chunk. - If you are not interested in the contents of the chunk, - this method should be called so that the file points to - the start of the next chunk. - """ + def read(self, size: int = -1) -> bytes: ... + def skip(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi index 575f2bf95dacb..a08addcf54389 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi @@ -1,7 +1,3 @@ -"""This module provides access to mathematical functions for complex -numbers. -""" - from typing import Final, SupportsComplex, SupportsFloat, SupportsIndex from typing_extensions import TypeAlias @@ -15,93 +11,26 @@ tau: Final[float] _C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex -def acos(z: _C, /) -> complex: - """Return the arc cosine of z.""" - -def acosh(z: _C, /) -> complex: - """Return the inverse hyperbolic cosine of z.""" - -def asin(z: _C, /) -> complex: - """Return the arc sine of z.""" - -def asinh(z: _C, /) -> complex: - """Return the inverse hyperbolic sine of z.""" - -def atan(z: _C, /) -> complex: - """Return the arc tangent of z.""" - -def atanh(z: _C, /) -> complex: - """Return the inverse hyperbolic tangent of z.""" - -def cos(z: _C, /) -> complex: - """Return the cosine of z.""" - -def cosh(z: _C, /) -> complex: - """Return the hyperbolic cosine of z.""" - -def exp(z: _C, /) -> complex: - """Return the exponential value e**z.""" - -def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: - """Determine whether two complex numbers are close in value. - - rel_tol - maximum difference for being considered "close", relative to the - magnitude of the input values - abs_tol - maximum difference for being considered "close", regardless of the - magnitude of the input values - - Return True if a is close in value to b, and False otherwise. - - For the values to be considered close, the difference between them must be - smaller than at least one of the tolerances. - - -inf, inf and NaN behave similarly to the IEEE 754 Standard. That is, NaN is - not close to anything, even itself. inf and -inf are only close to themselves. - """ - -def isinf(z: _C, /) -> bool: - """Checks if the real or imaginary part of z is infinite.""" - -def isnan(z: _C, /) -> bool: - """Checks if the real or imaginary part of z not a number (NaN).""" - -def log(x: _C, base: _C = ..., /) -> complex: - """log(z[, base]) -> the logarithm of z to the given base. - - If the base is not specified, returns the natural logarithm (base e) of z. - """ - -def log10(z: _C, /) -> complex: - """Return the base-10 logarithm of z.""" - -def phase(z: _C, /) -> float: - """Return argument, also known as the phase angle, of a complex.""" - -def polar(z: _C, /) -> tuple[float, float]: - """Convert a complex from rectangular coordinates to polar coordinates. - - r is the distance from 0 and phi the phase angle. - """ - -def rect(r: float, phi: float, /) -> complex: - """Convert from polar coordinates to rectangular coordinates.""" - -def sin(z: _C, /) -> complex: - """Return the sine of z.""" - -def sinh(z: _C, /) -> complex: - """Return the hyperbolic sine of z.""" - -def sqrt(z: _C, /) -> complex: - """Return the square root of z.""" - -def tan(z: _C, /) -> complex: - """Return the tangent of z.""" - -def tanh(z: _C, /) -> complex: - """Return the hyperbolic tangent of z.""" - -def isfinite(z: _C, /) -> bool: - """Return True if both the real and imaginary parts of z are finite, else False.""" +def acos(z: _C, /) -> complex: ... +def acosh(z: _C, /) -> complex: ... +def asin(z: _C, /) -> complex: ... +def asinh(z: _C, /) -> complex: ... +def atan(z: _C, /) -> complex: ... +def atanh(z: _C, /) -> complex: ... +def cos(z: _C, /) -> complex: ... +def cosh(z: _C, /) -> complex: ... +def exp(z: _C, /) -> complex: ... +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... +def isinf(z: _C, /) -> bool: ... +def isnan(z: _C, /) -> bool: ... +def log(x: _C, base: _C = ..., /) -> complex: ... +def log10(z: _C, /) -> complex: ... +def phase(z: _C, /) -> float: ... +def polar(z: _C, /) -> tuple[float, float]: ... +def rect(r: float, phi: float, /) -> complex: ... +def sin(z: _C, /) -> complex: ... +def sinh(z: _C, /) -> complex: ... +def sqrt(z: _C, /) -> complex: ... +def tan(z: _C, /) -> complex: ... +def tanh(z: _C, /) -> complex: ... +def isfinite(z: _C, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi index a2ffd98d5f5bc..6e84133572bf5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi @@ -1,47 +1,3 @@ -"""A generic class to build line-oriented command interpreters. - -Interpreters constructed with this class obey the following conventions: - -1. End of file on input is processed as the command 'EOF'. -2. A command is parsed out of each line by collecting the prefix composed - of characters in the identchars member. -3. A command 'foo' is dispatched to a method 'do_foo()'; the do_ method - is passed a single argument consisting of the remainder of the line. -4. Typing an empty line repeats the last command. (Actually, it calls the - method 'emptyline', which may be overridden in a subclass.) -5. There is a predefined 'help' method. Given an argument 'topic', it - calls the command 'help_topic'. With no arguments, it lists all topics - with defined help_ functions, broken into up to three topics; documented - commands, miscellaneous help topics, and undocumented commands. -6. The command '?' is a synonym for 'help'. The command '!' is a synonym - for 'shell', if a do_shell method exists. -7. If completion is enabled, completing commands will be done automatically, - and completing of commands args is done by calling complete_foo() with - arguments text, line, begidx, endidx. text is string we are matching - against, all returned matches must begin with it. line is the current - input line (lstripped), begidx and endidx are the beginning and end - indexes of the text being matched, which could be used to provide - different completion depending upon which position the argument is in. - -The 'default' method may be overridden to intercept commands for which there -is no do_ method. - -The 'completedefault' method may be overridden to intercept completions for -commands that have no complete_ method. - -The data member 'self.ruler' sets the character used to draw separator lines -in the help messages. If empty, no ruler line is drawn. It defaults to "=". - -If the value of 'self.intro' is nonempty when the cmdloop method is called, -it is printed out on interpreter startup. This value may be overridden -via an optional argument to the cmdloop() method. - -The data members 'self.doc_header', 'self.misc_header', and -'self.undoc_header' set the headers used for the help function's -listings of documented functions, miscellaneous topics, and undocumented -functions respectively. -""" - from collections.abc import Callable from typing import IO, Any, Final from typing_extensions import LiteralString @@ -52,18 +8,6 @@ PROMPT: Final = "(Cmd) " IDENTCHARS: Final[LiteralString] # Too big to be `Literal` class Cmd: - """A simple framework for writing line-oriented command interpreters. - - These are often useful for test harnesses, administrative tools, and - prototypes that will later be wrapped in a more sophisticated interface. - - A Cmd instance or subclass instance is a line-oriented interpreter - framework. There is no good reason to instantiate Cmd itself; rather, - it's useful as a superclass of an interpreter class you define yourself - in order to inherit Cmd's methods and encapsulate action methods. - - """ - prompt: str identchars: str ruler: str @@ -79,103 +23,24 @@ class Cmd: stdout: IO[str] cmdqueue: list[str] completekey: str - def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: - """Instantiate a line-oriented interpreter framework. - - The optional argument 'completekey' is the readline name of a - completion key; it defaults to the Tab key. If completekey is - not None and the readline module is available, command completion - is done automatically. The optional arguments stdin and stdout - specify alternate input and output file objects; if not specified, - sys.stdin and sys.stdout are used. - - """ + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... old_completer: Callable[[str, int], str | None] | None - def cmdloop(self, intro: Any | None = None) -> None: - """Repeatedly issue a prompt, accept input, parse an initial prefix - off the received input, and dispatch to action methods, passing them - the remainder of the line as argument. - - """ - - def precmd(self, line: str) -> str: - """Hook method executed just before the command line is - interpreted, but after the input prompt is generated and issued. - - """ - - def postcmd(self, stop: bool, line: str) -> bool: - """Hook method executed just after a command dispatch is finished.""" - - def preloop(self) -> None: - """Hook method executed once when the cmdloop() method is called.""" - - def postloop(self) -> None: - """Hook method executed once when the cmdloop() method is about to - return. - - """ - - def parseline(self, line: str) -> tuple[str | None, str | None, str]: - """Parse the line into a command name and a string containing - the arguments. Returns a tuple containing (command, args, line). - 'command' and 'args' may be None if the line couldn't be parsed. - """ - - def onecmd(self, line: str) -> bool: - """Interpret the argument as though it had been typed in response - to the prompt. - - This may be overridden, but should not normally need to be; - see the precmd() and postcmd() methods for useful execution hooks. - The return value is a flag indicating whether interpretation of - commands by the interpreter should stop. - - """ - - def emptyline(self) -> bool: - """Called when an empty line is entered in response to the prompt. - - If this method is not overridden, it repeats the last nonempty - command entered. - - """ - - def default(self, line: str) -> None: - """Called on an input line when the command prefix is not recognized. - - If this method is not overridden, it prints an error message and - returns. - - """ - - def completedefault(self, *ignored: Any) -> list[str]: - """Method called to complete an input line when no command-specific - complete_*() method is available. - - By default, it returns an empty list. - - """ - + def cmdloop(self, intro: Any | None = None) -> None: ... + def precmd(self, line: str) -> str: ... + def postcmd(self, stop: bool, line: str) -> bool: ... + def preloop(self) -> None: ... + def postloop(self) -> None: ... + def parseline(self, line: str) -> tuple[str | None, str | None, str]: ... + def onecmd(self, line: str) -> bool: ... + def emptyline(self) -> bool: ... + def default(self, line: str) -> None: ... + def completedefault(self, *ignored: Any) -> list[str]: ... def completenames(self, text: str, *ignored: Any) -> list[str]: ... completion_matches: list[str] | None - def complete(self, text: str, state: int) -> list[str] | None: - """Return the next possible completion for 'text'. - - If a command has not been entered, then complete against command list. - Otherwise try to call complete_ to get list of completions. - """ - + def complete(self, text: str, state: int) -> list[str] | None: ... def get_names(self) -> list[str]: ... # Only the first element of args matters. def complete_help(self, *args: Any) -> list[str]: ... - def do_help(self, arg: str) -> bool | None: - """List available commands with "help" or detailed help with "help cmd".""" - + def do_help(self, arg: str) -> bool | None: ... def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... - def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: - """Display a list of strings as a compact set of columns. - - Each column is only as wide as necessary. - Columns are separated by two spaces (one was not legible enough). - """ + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi index 2a1098ac03a5d..0b13c8a5016d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi @@ -1,5 +1,3 @@ -"""Utilities needed to emulate Python's interactive interpreter.""" - import sys from codeop import CommandCompiler, compile_command as compile_command from collections.abc import Callable @@ -9,203 +7,34 @@ from typing import Any __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] class InteractiveInterpreter: - """Base class for InteractiveConsole. - - This class deals with parsing and interpreter state (the user's - namespace); it doesn't deal with input buffering or prompting or - input file naming (the filename is always passed in explicitly). - - """ - locals: dict[str, Any] # undocumented compile: CommandCompiler # undocumented - def __init__(self, locals: dict[str, Any] | None = None) -> None: - """Constructor. - - The optional 'locals' argument specifies a mapping to use as the - namespace in which code will be executed; it defaults to a newly - created dictionary with key "__name__" set to "__console__" and - key "__doc__" set to None. - - """ - - def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: - """Compile and run some source in the interpreter. - - Arguments are as for compile_command(). - - One of several things can happen: - - 1) The input is incorrect; compile_command() raised an - exception (SyntaxError or OverflowError). A syntax traceback - will be printed by calling the showsyntaxerror() method. - - 2) The input is incomplete, and more input is required; - compile_command() returned None. Nothing happens. - - 3) The input is complete; compile_command() returned a code - object. The code is executed by calling self.runcode() (which - also handles run-time exceptions, except for SystemExit). - - The return value is True in case 2, False in the other cases (unless - an exception is raised). The return value can be used to - decide whether to use sys.ps1 or sys.ps2 to prompt the next - line. - - """ - - def runcode(self, code: CodeType) -> None: - """Execute a code object. - - When an exception occurs, self.showtraceback() is called to - display a traceback. All exceptions are caught except - SystemExit, which is reraised. - - A note about KeyboardInterrupt: this exception may occur - elsewhere in this code, and may not always be caught. The - caller should be prepared to deal with it. - - """ + def __init__(self, locals: dict[str, Any] | None = None) -> None: ... + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... + def runcode(self, code: CodeType) -> None: ... if sys.version_info >= (3, 13): - def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: - """Display the syntax error that just occurred. - - This doesn't display a stack trace because there isn't one. - - If a filename is given, it is stuffed in the exception instead - of what was there before (because Python's parser always uses - "" when reading from a string). - - The output is written by self.write(), below. - - """ + def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: ... else: - def showsyntaxerror(self, filename: str | None = None) -> None: - """Display the syntax error that just occurred. - - This doesn't display a stack trace because there isn't one. - - If a filename is given, it is stuffed in the exception instead - of what was there before (because Python's parser always uses - "" when reading from a string). - - The output is written by self.write(), below. - - """ - - def showtraceback(self) -> None: - """Display the exception that just occurred. - - We remove the first stack item because it is our own code. - - The output is written by self.write(), below. - - """ - - def write(self, data: str) -> None: - """Write a string. - - The base implementation writes to sys.stderr; a subclass may - replace this with a different implementation. + def showsyntaxerror(self, filename: str | None = None) -> None: ... - """ + def showtraceback(self) -> None: ... + def write(self, data: str) -> None: ... class InteractiveConsole(InteractiveInterpreter): - """Closely emulate the behavior of the interactive Python interpreter. - - This class builds on InteractiveInterpreter and adds prompting - using the familiar sys.ps1 and sys.ps2, and input buffering. - - """ - buffer: list[str] # undocumented filename: str # undocumented if sys.version_info >= (3, 13): def __init__( self, locals: dict[str, Any] | None = None, filename: str = "", *, local_exit: bool = False - ) -> None: - """Constructor. - - The optional locals argument will be passed to the - InteractiveInterpreter base class. - - The optional filename argument should specify the (file)name - of the input stream; it will show up in tracebacks. - - """ - - def push(self, line: str, filename: str | None = None) -> bool: - """Push a line to the interpreter. - - The line should not have a trailing newline; it may have - internal newlines. The line is appended to a buffer and the - interpreter's runsource() method is called with the - concatenated contents of the buffer as source. If this - indicates that the command was executed or invalid, the buffer - is reset; otherwise, the command is incomplete, and the buffer - is left as it was after the line was appended. The return - value is 1 if more input is required, 0 if the line was dealt - with in some way (this is the same as runsource()). - - """ + ) -> None: ... + def push(self, line: str, filename: str | None = None) -> bool: ... else: - def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: - """Constructor. - - The optional locals argument will be passed to the - InteractiveInterpreter base class. - - The optional filename argument should specify the (file)name - of the input stream; it will show up in tracebacks. - - """ - - def push(self, line: str) -> bool: - """Push a line to the interpreter. - - The line should not have a trailing newline; it may have - internal newlines. The line is appended to a buffer and the - interpreter's runsource() method is called with the - concatenated contents of the buffer as source. If this - indicates that the command was executed or invalid, the buffer - is reset; otherwise, the command is incomplete, and the buffer - is left as it was after the line was appended. The return - value is 1 if more input is required, 0 if the line was dealt - with in some way (this is the same as runsource()). - - """ - - def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: - """Closely emulate the interactive Python console. + def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: ... + def push(self, line: str) -> bool: ... - The optional banner argument specifies the banner to print - before the first interaction; by default it prints a banner - similar to the one printed by the real Python interpreter, - followed by the current class name in parentheses (so as not - to confuse this with the real interpreter -- since it's so - close!). - - The optional exitmsg argument specifies the exit message - printed when exiting. Pass the empty string to suppress - printing an exit message. If exitmsg is not given or None, - a default message is printed. - - """ - - def resetbuffer(self) -> None: - """Reset the input buffer.""" - - def raw_input(self, prompt: str = "") -> str: - """Write a prompt and read a line. - - The returned line does not include the trailing newline. - When the user enters the EOF key sequence, EOFError is raised. - - The base implementation uses the built-in function - input(); a subclass may replace this with a different - implementation. - - """ + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... + def resetbuffer(self) -> None: ... + def raw_input(self, prompt: str = "") -> str: ... if sys.version_info >= (3, 13): def interact( @@ -214,22 +43,7 @@ if sys.version_info >= (3, 13): local: dict[str, Any] | None = None, exitmsg: str | None = None, local_exit: bool = False, - ) -> None: - """Closely emulate the interactive Python interpreter. - - This is a backwards compatible interface to the InteractiveConsole - class. When readfunc is not specified, it attempts to import the - readline module to enable GNU readline if it is available. - - Arguments (all optional, all default to None): - - banner -- passed to InteractiveConsole.interact() - readfunc -- if not None, replaces InteractiveConsole.raw_input() - local -- passed to InteractiveInterpreter.__init__() - exitmsg -- passed to InteractiveConsole.interact() - local_exit -- passed to InteractiveConsole.__init__() - - """ + ) -> None: ... else: def interact( @@ -237,18 +51,4 @@ else: readfunc: Callable[[str], str] | None = None, local: dict[str, Any] | None = None, exitmsg: str | None = None, - ) -> None: - """Closely emulate the interactive Python interpreter. - - This is a backwards compatible interface to the InteractiveConsole - class. When readfunc is not specified, it attempts to import the - readline module to enable GNU readline if it is available. - - Arguments (all optional, all default to None): - - banner -- passed to InteractiveConsole.interact() - readfunc -- if not None, replaces InteractiveConsole.raw_input() - local -- passed to InteractiveInterpreter.__init__() - exitmsg -- passed to InteractiveConsole.interact() - - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi index 5754989a3f37c..fa4d4fd4ba928 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi @@ -1,12 +1,3 @@ -"""codecs -- Python Codec Registry, API and helpers. - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import sys import types from _codecs import * @@ -134,8 +125,6 @@ class _BufferedIncrementalDecoder(Protocol): if sys.version_info >= (3, 12): class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): - """Codec details when looking up the codec registry""" - _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -166,8 +155,6 @@ if sys.version_info >= (3, 12): else: @disjoint_base class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): - """Codec details when looking up the codec registry""" - _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -195,135 +182,21 @@ else: _is_text_encoding: bool | None = None, ) -> Self: ... -def getencoder(encoding: str) -> _Encoder: - """Lookup up the codec for the given encoding and return - its encoder function. - - Raises a LookupError in case the encoding cannot be found. - - """ - -def getdecoder(encoding: str) -> _Decoder: - """Lookup up the codec for the given encoding and return - its decoder function. - - Raises a LookupError in case the encoding cannot be found. - - """ - -def getincrementalencoder(encoding: str) -> _IncrementalEncoder: - """Lookup up the codec for the given encoding and return - its IncrementalEncoder class or factory function. - - Raises a LookupError in case the encoding cannot be found - or the codecs doesn't provide an incremental encoder. - - """ - +def getencoder(encoding: str) -> _Encoder: ... +def getdecoder(encoding: str) -> _Decoder: ... +def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... @overload -def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: - """Lookup up the codec for the given encoding and return - its IncrementalDecoder class or factory function. - - Raises a LookupError in case the encoding cannot be found - or the codecs doesn't provide an incremental decoder. - - """ - +def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: ... @overload def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... -def getreader(encoding: str) -> _StreamReader: - """Lookup up the codec for the given encoding and return - its StreamReader class or factory function. - - Raises a LookupError in case the encoding cannot be found. - - """ - -def getwriter(encoding: str) -> _StreamWriter: - """Lookup up the codec for the given encoding and return - its StreamWriter class or factory function. - - Raises a LookupError in case the encoding cannot be found. - - """ - +def getreader(encoding: str) -> _StreamReader: ... +def getwriter(encoding: str) -> _StreamWriter: ... def open( filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 -) -> StreamReaderWriter: - """Open an encoded file using the given mode and return - a wrapped version providing transparent encoding/decoding. - - Note: The wrapped version will only accept the object format - defined by the codecs, i.e. Unicode objects for most builtin - codecs. Output is also codec dependent and will usually be - Unicode as well. - - If encoding is not None, then the - underlying encoded files are always opened in binary mode. - The default file mode is 'r', meaning to open the file in read mode. - - encoding specifies the encoding which is to be used for the - file. - - errors may be given to define the error handling. It defaults - to 'strict' which causes ValueErrors to be raised in case an - encoding error occurs. - - buffering has the same meaning as for the builtin open() API. - It defaults to -1 which means that the default buffer size will - be used. - - The returned wrapped file object provides an extra attribute - .encoding which allows querying the used encoding. This - attribute is only available if an encoding was specified as - parameter. - """ - -def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: - """Return a wrapped version of file which provides transparent - encoding translation. - - Data written to the wrapped file is decoded according - to the given data_encoding and then encoded to the underlying - file using file_encoding. The intermediate data type - will usually be Unicode but depends on the specified codecs. - - Bytes read from the file are decoded using file_encoding and then - passed back to the caller encoded using data_encoding. - - If file_encoding is not given, it defaults to data_encoding. - - errors may be given to define the error handling. It defaults - to 'strict' which causes ValueErrors to be raised in case an - encoding error occurs. - - The returned wrapped file object provides two extra attributes - .data_encoding and .file_encoding which reflect the given - parameters of the same name. The attributes can be used for - introspection by Python programs. - - """ - -def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: - """ - Encoding iterator. - - Encodes the input strings from the iterator using an IncrementalEncoder. - - errors and kwargs are passed through to the IncrementalEncoder - constructor. - """ - -def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: - """ - Decoding iterator. - - Decodes the input strings from the iterator using an IncrementalDecoder. - - errors and kwargs are passed through to the IncrementalDecoder - constructor. - """ +) -> StreamReaderWriter: ... +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... BOM: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` BOM_BE: Final = b"\xfe\xff" @@ -336,184 +209,40 @@ BOM_UTF32: Final[Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"]] # depends o BOM_UTF32_BE: Final = b"\x00\x00\xfe\xff" BOM_UTF32_LE: Final = b"\xff\xfe\x00\x00" -def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'strict' error handling, which raises a UnicodeError on coding errors.""" - -def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'replace' error handling, which replaces malformed data with a replacement marker.""" - -def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'ignore' error handling, which ignores malformed data and continues.""" - -def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'xmlcharrefreplace' error handling, which replaces an unencodable character with the appropriate XML character reference.""" - -def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'backslashreplace' error handling, which replaces malformed data with a backslashed escape sequence.""" - -def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'namereplace' error handling, which replaces an unencodable character with a \\N{...} escape sequence.""" +def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... class Codec: - """Defines the interface for stateless encoders/decoders. - - The .encode()/.decode() methods may use different error - handling schemes by providing the errors argument. These - string values are predefined: - - 'strict' - raise a ValueError error (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace' - replace with a suitable replacement character; - Python will use the official U+FFFD REPLACEMENT - CHARACTER for the builtin Unicode codecs on - decoding and '?' on encoding. - 'surrogateescape' - replace with private code points U+DCnn. - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference (only for encoding). - 'backslashreplace' - Replace with backslashed escape sequences. - 'namereplace' - Replace with \\N{...} escape sequences - (only for encoding). - - The set of allowed values can be extended via register_error. - - """ - # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. - def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: - """Encodes the object input and returns a tuple (output - object, length consumed). - - errors defines the error handling to apply. It defaults to - 'strict' handling. - - The method may not store state in the Codec instance. Use - StreamWriter for codecs which have to keep state in order to - make encoding efficient. - - The encoder must be able to handle zero length input and - return an empty object of the output object type in this - situation. - - """ - - def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: - """Decodes the object input and returns a tuple (output - object, length consumed). - - input must be an object which provides the bf_getreadbuf - buffer slot. Python strings, buffer objects and memory - mapped files are examples of objects providing this slot. - - errors defines the error handling to apply. It defaults to - 'strict' handling. - - The method may not store state in the Codec instance. Use - StreamReader for codecs which have to keep state in order to - make decoding efficient. - - The decoder must be able to handle zero length input and - return an empty object of the output object type in this - situation. - - """ + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... class IncrementalEncoder: - """ - An IncrementalEncoder encodes an input in multiple steps. The input can - be passed piece by piece to the encode() method. The IncrementalEncoder - remembers the state of the encoding process between calls to encode(). - """ - errors: str - def __init__(self, errors: str = "strict") -> None: - """ - Creates an IncrementalEncoder instance. - - The IncrementalEncoder may use different error handling schemes by - providing the errors keyword argument. See the module docstring - for a list of possible values. - """ - + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def encode(self, input: str, final: bool = False) -> bytes: - """ - Encodes input and returns the resulting object. - """ - - def reset(self) -> None: - """ - Resets the encoder to the initial state. - """ + def encode(self, input: str, final: bool = False) -> bytes: ... + def reset(self) -> None: ... # documentation says int but str is needed for the subclass. - def getstate(self) -> int | str: - """ - Return the current state of the encoder. - """ - - def setstate(self, state: int | str) -> None: - """ - Set the current state of the encoder. state must have been - returned by getstate(). - """ + def getstate(self) -> int | str: ... + def setstate(self, state: int | str) -> None: ... class IncrementalDecoder: - """ - An IncrementalDecoder decodes an input in multiple steps. The input can - be passed piece by piece to the decode() method. The IncrementalDecoder - remembers the state of the decoding process between calls to decode(). - """ - errors: str - def __init__(self, errors: str = "strict") -> None: - """ - Create an IncrementalDecoder instance. - - The IncrementalDecoder may use different error handling schemes by - providing the errors keyword argument. See the module docstring - for a list of possible values. - """ - + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def decode(self, input: ReadableBuffer, final: bool = False) -> str: - """ - Decode input and returns the resulting object. - """ - - def reset(self) -> None: - """ - Reset the decoder to the initial state. - """ - - def getstate(self) -> tuple[bytes, int]: - """ - Return the current state of the decoder. - - This must be a (buffered_input, additional_state_info) tuple. - buffered_input must be a bytes object containing bytes that - were passed to decode() that have not yet been converted. - additional_state_info must be a non-negative integer - representing the state of the decoder WITHOUT yet having - processed the contents of buffered_input. In the initial state - and after reset(), getstate() must return (b"", 0). - """ - - def setstate(self, state: tuple[bytes, int]) -> None: - """ - Set the current state of the decoder. - - state must have been returned by getstate(). The effect of - setstate((b"", 0)) must be equivalent to reset(). - """ + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + def reset(self) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): - """ - This subclass of IncrementalEncoder can be used as the baseclass for an - incremental encoder if the encoder must keep some of the output in a - buffer between calls to encode(). - """ - buffer: str def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -521,12 +250,6 @@ class BufferedIncrementalEncoder(IncrementalEncoder): def encode(self, input: str, final: bool = False) -> bytes: ... class BufferedIncrementalDecoder(IncrementalDecoder): - """ - This subclass of IncrementalDecoder can be used as the baseclass for an - incremental decoder if the decoder must be able to handle incomplete - byte sequences. - """ - buffer: bytes def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -538,179 +261,41 @@ class BufferedIncrementalDecoder(IncrementalDecoder): class StreamWriter(Codec): stream: _WritableStream errors: str - def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: - """Creates a StreamWriter instance. - - stream must be a file-like object open for writing. - - The StreamWriter may use different error handling - schemes by providing the errors keyword argument. These - parameters are predefined: - - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference. - 'backslashreplace' - Replace with backslashed escape - sequences. - 'namereplace' - Replace with \\N{...} escape sequences. - - The set of allowed parameter values can be extended via - register_error. - """ - - def write(self, object: str) -> None: - """Writes the object's contents encoded to self.stream.""" - - def writelines(self, list: Iterable[str]) -> None: - """Writes the concatenated list of strings to the stream - using .write(). - """ - - def reset(self) -> None: - """Resets the codec buffers used for keeping internal state. - - Calling this method should ensure that the data on the - output is put into a clean state, that allows appending - of new fresh data without having to rescan the whole - stream to recover state. - - """ - + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... + def write(self, object: str) -> None: ... + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: - """Inherit all other methods from the underlying stream.""" + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... class StreamReader(Codec): stream: _ReadableStream errors: str # This is set to str, but some subclasses set to bytes instead. charbuffertype: ClassVar[type] = ... - def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: - """Creates a StreamReader instance. - - stream must be a file-like object open for reading. - - The StreamReader may use different error handling - schemes by providing the errors keyword argument. These - parameters are predefined: - - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'backslashreplace' - Replace with backslashed escape sequences; - - The set of allowed parameter values can be extended via - register_error. - """ - - def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: - """Decodes data from the stream self.stream and returns the - resulting object. - - chars indicates the number of decoded code points or bytes to - return. read() will never return more data than requested, - but it might return less, if there is not enough available. - - size indicates the approximate maximum number of decoded - bytes or code points to read for decoding. The decoder - can modify this setting as appropriate. The default value - -1 indicates to read and decode as much as possible. size - is intended to prevent having to decode huge files in one - step. - - If firstline is true, and a UnicodeDecodeError happens - after the first line terminator in the input only the first line - will be returned, the rest of the input will be kept until the - next call to read(). - - The method should use a greedy read strategy, meaning that - it should read as much data as is allowed within the - definition of the encoding and the given size, e.g. if - optional encoding endings or state markers are available - on the stream, these should be read too. - """ - - def readline(self, size: int | None = None, keepends: bool = True) -> str: - """Read one line from the input stream and return the - decoded data. - - size, if given, is passed as size argument to the - read() method. - - """ - - def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: - """Read all lines available on the input stream - and return them as a list. - - Line breaks are implemented using the codec's decoder - method and are included in the list entries. - - sizehint, if given, is ignored since there is no efficient - way of finding the true end-of-line. - - """ - - def reset(self) -> None: - """Resets the codec buffers used for keeping internal state. - - Note that no stream repositioning should take place. - This method is primarily intended to be able to recover - from decoding errors. - - """ - - def seek(self, offset: int, whence: int = 0) -> None: - """Set the input stream's current position. - - Resets the codec buffers used for keeping state. - """ - + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... + def readline(self, size: int | None = None, keepends: bool = True) -> str: ... + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __iter__(self) -> Self: ... - def __next__(self) -> str: - """Return the next decoded line from the input stream.""" - - def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: - """Inherit all other methods from the underlying stream.""" + def __next__(self) -> str: ... + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): - """StreamReaderWriter instances allow wrapping streams which - work in both read and write modes. - - The design is such that one can use the factory functions - returned by the codec.lookup() function to construct the - instance. - - """ - stream: _Stream - def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: - """Creates a StreamReaderWriter instance. - - stream must be a Stream-like object. - - Reader, Writer must be factory functions or classes - providing the StreamReader, StreamWriter interface resp. - - Error handling is done in the same way as defined for the - StreamWriter/Readers. - - """ - + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... def read(self, size: int = -1) -> str: ... def readline(self, size: int | None = None) -> str: ... def readlines(self, sizehint: int | None = None) -> list[str]: ... - def __next__(self) -> str: - """Return the next decoded line from the input stream.""" - + def __next__(self) -> str: ... def __iter__(self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... @@ -718,8 +303,7 @@ class StreamReaderWriter(TextIO): def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __getattr__(self, name: str) -> Any: - """Inherit all other methods from the underlying stream.""" + def __getattr__(self, name: str) -> Any: ... # These methods don't actually exist directly, but they are needed to satisfy the TextIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... @@ -733,21 +317,6 @@ class StreamReaderWriter(TextIO): def writable(self) -> bool: ... class StreamRecoder(BinaryIO): - """StreamRecoder instances translate data from one encoding to another. - - They use the complete set of APIs returned by the - codecs.lookup() function to implement their task. - - Data written to the StreamRecoder is first decoded into an - intermediate format (depending on the "decode" codec) and then - written to the underlying stream using an instance of the provided - Writer class. - - In the other direction, data is read from the underlying stream using - a Reader instance and then encoded and returned to the caller. - - """ - data_encoding: str file_encoding: str def __init__( @@ -758,40 +327,17 @@ class StreamRecoder(BinaryIO): Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict", - ) -> None: - """Creates a StreamRecoder instance which implements a two-way - conversion: encode and decode work on the frontend (the - data visible to .read() and .write()) while Reader and Writer - work on the backend (the data in stream). - - You can use these objects to do transparent - transcodings from e.g. latin-1 to utf-8 and back. - - stream must be a file-like object. - - encode and decode must adhere to the Codec interface; Reader and - Writer must be factory functions or classes providing the - StreamReader and StreamWriter interfaces resp. - - Error handling is done in the same way as defined for the - StreamWriter/Readers. - - """ - + ) -> None: ... def read(self, size: int = -1) -> bytes: ... def readline(self, size: int | None = None) -> bytes: ... def readlines(self, sizehint: int | None = None) -> list[bytes]: ... - def __next__(self) -> bytes: - """Return the next decoded line from the input stream.""" - + def __next__(self) -> bytes: ... def __iter__(self) -> Self: ... # Base class accepts more types than just bytes def write(self, data: bytes) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[bytes]) -> None: ... # type: ignore[override] def reset(self) -> None: ... - def __getattr__(self, name: str) -> Any: - """Inherit all other methods from the underlying stream.""" - + def __getattr__(self, name: str) -> Any: ... def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi index 2ef06796dda4c..8e311343eb89d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi @@ -1,91 +1,15 @@ -"""Utilities to compile possibly incomplete Python source code. - -This module provides two interfaces, broadly similar to the builtin -function compile(), which take program text, a filename and a 'mode' -and: - -- Return code object if the command is complete and valid -- Return None if the command is incomplete -- Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - -The two interfaces are: - -compile_command(source, filename, symbol): - - Compiles a single command in the manner described above. - -CommandCompiler(): - - Instances of this class have __call__ methods identical in - signature to compile_command; the difference is that if the - instance compiles program text containing a __future__ statement, - the instance 'remembers' and compiles all subsequent program texts - with the statement in force. - -The module also provides another class: - -Compile(): - - Instances of this class act like the built-in function compile, - but with 'memory' in the sense described above. -""" - import sys from types import CodeType __all__ = ["compile_command", "Compile", "CommandCompiler"] if sys.version_info >= (3, 14): - def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: - """Compile a command and determine whether it is incomplete. - - Arguments: - - source -- the source string; may contain \\n characters - filename -- optional filename from which source was read; default - "" - symbol -- optional grammar start symbol; "single" (default), "exec" - or "eval" - - Return value / exceptions raised: - - - Return a code object if the command is complete and valid - - Return None if the command is incomplete - - Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - """ + def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: ... else: - def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: - """Compile a command and determine whether it is incomplete. - - Arguments: - - source -- the source string; may contain \\n characters - filename -- optional filename from which source was read; default - "" - symbol -- optional grammar start symbol; "single" (default), "exec" - or "eval" - - Return value / exceptions raised: - - - Return a code object if the command is complete and valid - - Return None if the command is incomplete - - Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - """ + def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... class Compile: - """Instances of this class behave much like the built-in compile - function, but if one is used to compile text containing a future - statement, it "remembers" and compiles all subsequent program texts - with the statement in force. - """ - flags: int if sys.version_info >= (3, 13): def __call__(self, source: str, filename: str, symbol: str, flags: int = 0) -> CodeType: ... @@ -93,30 +17,5 @@ class Compile: def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... class CommandCompiler: - """Instances of this class have __call__ methods identical in - signature to compile_command; the difference is that if the - instance compiles program text containing a __future__ statement, - the instance 'remembers' and compiles all subsequent program texts - with the statement in force. - """ - compiler: Compile - def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: - """Compile a command and determine whether it is incomplete. - - Arguments: - - source -- the source string; may contain \\n characters - filename -- optional filename from which source was read; - default "" - symbol -- optional grammar start symbol; "single" (default) or - "eval" - - Return value / exceptions raised: - - - Return a code object if the command is complete and valid - - Return None if the command is incomplete - - Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). - """ + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi index adb79e5ff4104..8636e6cdbdc31 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi @@ -1,19 +1,3 @@ -"""This module implements specialized container datatypes providing -alternatives to Python's general purpose built-in containers, dict, -list, set, and tuple. - -* namedtuple factory function for creating tuple subclasses with named fields -* deque list-like container with fast appends and pops on either end -* ChainMap dict-like class for creating a single view of multiple mappings -* Counter dict subclass for counting hashable objects -* OrderedDict dict subclass that remembers the order entries were added -* defaultdict dict subclass that calls a factory function to supply missing values -* UserDict wrapper around dictionary objects for easier dict subclassing -* UserList wrapper around list objects for easier list subclassing -* UserString wrapper around string objects for easier string subclassing - -""" - import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT @@ -56,29 +40,7 @@ def namedtuple( rename: bool = False, module: str | None = None, defaults: Iterable[Any] | None = None, -) -> type[tuple[Any, ...]]: - """Returns a new subclass of tuple with named fields. - - >>> Point = namedtuple('Point', ['x', 'y']) - >>> Point.__doc__ # docstring for the new class - 'Point(x, y)' - >>> p = Point(11, y=22) # instantiate with positional args or keywords - >>> p[0] + p[1] # indexable like a plain tuple - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> d = p._asdict() # convert to a dictionary - >>> d['x'] - 11 - >>> Point(**d) # convert from a dictionary - Point(x=11, y=22) - >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields - Point(x=100, y=22) - - """ +) -> type[tuple[Any, ...]]: ... class UserDict(MutableMapping[_KT, _VT]): data: dict[_KT, _VT] @@ -151,8 +113,6 @@ class UserDict(MutableMapping[_KT, _VT]): def get(self, key: _KT, default: _T) -> _VT | _T: ... class UserList(MutableSequence[_T]): - """A more or less complete user-defined wrapper around list objects.""" - data: list[_T] @overload def __init__(self, initlist: None = None) -> None: ... @@ -273,212 +233,60 @@ class UserString(Sequence[UserString]): @disjoint_base class deque(MutableSequence[_T]): - """A list-like sequence optimized for data accesses near its endpoints.""" - @property - def maxlen(self) -> int | None: - """maximum size of a deque or None if unbounded""" - + def maxlen(self) -> int | None: ... @overload def __init__(self, *, maxlen: int | None = None) -> None: ... @overload def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... - def append(self, x: _T, /) -> None: - """Add an element to the right side of the deque.""" - - def appendleft(self, x: _T, /) -> None: - """Add an element to the left side of the deque.""" - - def copy(self) -> Self: - """Return a shallow copy of a deque.""" - - def count(self, x: _T, /) -> int: - """Return number of occurrences of value.""" - - def extend(self, iterable: Iterable[_T], /) -> None: - """Extend the right side of the deque with elements from the iterable.""" - - def extendleft(self, iterable: Iterable[_T], /) -> None: - """Extend the left side of the deque with elements from the iterable.""" - - def insert(self, i: int, x: _T, /) -> None: - """Insert value before index.""" - - def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: - """Return first index of value. - - Raises ValueError if the value is not present. - """ - - def pop(self) -> _T: # type: ignore[override] - """Remove and return the rightmost element.""" - - def popleft(self) -> _T: - """Remove and return the leftmost element.""" - - def remove(self, value: _T, /) -> None: - """Remove first occurrence of value.""" - - def rotate(self, n: int = 1, /) -> None: - """Rotate the deque n steps to the right. If n is negative, rotates left.""" - - def __copy__(self) -> Self: - """Return a shallow copy of a deque.""" - - def __len__(self) -> int: - """Return len(self).""" + def append(self, x: _T, /) -> None: ... + def appendleft(self, x: _T, /) -> None: ... + def copy(self) -> Self: ... + def count(self, x: _T, /) -> int: ... + def extend(self, iterable: Iterable[_T], /) -> None: ... + def extendleft(self, iterable: Iterable[_T], /) -> None: ... + def insert(self, i: int, x: _T, /) -> None: ... + def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ... + def pop(self) -> _T: ... # type: ignore[override] + def popleft(self) -> _T: ... + def remove(self, value: _T, /) -> None: ... + def rotate(self, n: int = 1, /) -> None: ... + def __copy__(self) -> Self: ... + def __len__(self) -> int: ... __hash__: ClassVar[None] # type: ignore[assignment] # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores - def __getitem__(self, key: SupportsIndex, /) -> _T: # type: ignore[override] - """Return self[key].""" - - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: # type: ignore[override] - """Set self[key] to value.""" - - def __delitem__(self, key: SupportsIndex, /) -> None: # type: ignore[override] - """Delete self[key].""" - - def __contains__(self, key: object, /) -> bool: - """Return bool(key in self).""" - - def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: - """Return state information for pickling.""" - - def __iadd__(self, value: Iterable[_T], /) -> Self: - """Implement self+=value.""" - - def __add__(self, value: Self, /) -> Self: - """Return self+value.""" - - def __mul__(self, value: int, /) -> Self: - """Return self*value.""" - - def __imul__(self, value: int, /) -> Self: - """Implement self*=value.""" - + def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override] + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override] + def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override] + def __contains__(self, key: object, /) -> bool: ... + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self, value: Iterable[_T], /) -> Self: ... + def __add__(self, value: Self, /) -> Self: ... + def __mul__(self, value: int, /) -> Self: ... + def __imul__(self, value: int, /) -> Self: ... def __lt__(self, value: deque[_T], /) -> bool: ... def __le__(self, value: deque[_T], /) -> bool: ... def __gt__(self, value: deque[_T], /) -> bool: ... def __ge__(self, value: deque[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Counter(dict[_T, int], Generic[_T]): - """Dict subclass for counting hashable items. Sometimes called a bag - or multiset. Elements are stored as dictionary keys and their counts - are stored as dictionary values. - - >>> c = Counter('abcdeabcdabcaba') # count elements from a string - - >>> c.most_common(3) # three most common elements - [('a', 5), ('b', 4), ('c', 3)] - >>> sorted(c) # list all unique elements - ['a', 'b', 'c', 'd', 'e'] - >>> ''.join(sorted(c.elements())) # list elements with repetitions - 'aaaaabbbbcccdde' - >>> sum(c.values()) # total of all counts - 15 - - >>> c['a'] # count of letter 'a' - 5 - >>> for elem in 'shazam': # update counts from an iterable - ... c[elem] += 1 # by adding 1 to each element's count - >>> c['a'] # now there are seven 'a' - 7 - >>> del c['b'] # remove all 'b' - >>> c['b'] # now there are zero 'b' - 0 - - >>> d = Counter('simsalabim') # make another counter - >>> c.update(d) # add in the second counter - >>> c['a'] # now there are nine 'a' - 9 - - >>> c.clear() # empty the counter - >>> c - Counter() - - Note: If a count is set to zero or reduced to zero, it will remain - in the counter until the entry is deleted or the counter is cleared: - - >>> c = Counter('aaabbc') - >>> c['b'] -= 2 # reduce the count of 'b' by two - >>> c.most_common() # 'b' is still in, but its count is zero - [('a', 3), ('c', 1), ('b', 0)] - - """ - @overload - def __init__(self, iterable: None = None, /) -> None: - """Create a new, empty Counter object. And if given, count elements - from an input iterable. Or, initialize the count from another mapping - of elements to their counts. - - >>> c = Counter() # a new, empty counter - >>> c = Counter('gallahad') # a new counter from an iterable - >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping - >>> c = Counter(a=4, b=2) # a new counter from keyword args - - """ - + def __init__(self, iterable: None = None, /) -> None: ... @overload def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ... @overload def __init__(self, mapping: SupportsKeysAndGetItem[_T, int], /) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def copy(self) -> Self: - """Return a shallow copy.""" - - def elements(self) -> Iterator[_T]: - """Iterator over elements repeating each as many times as its count. - - >>> c = Counter('ABCABC') - >>> sorted(c.elements()) - ['A', 'A', 'B', 'B', 'C', 'C'] - - Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 - - >>> import math - >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) - >>> math.prod(prime_factors.elements()) - 1836 - - Note, if an element's count has been set to zero or is a negative - number, elements() will ignore it. - - """ - - def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: - """List the n most common elements and their counts from the most - common to the least. If n is None, then list all element counts. - - >>> Counter('abracadabra').most_common(3) - [('a', 5), ('b', 2), ('r', 2)] - - """ - + def copy(self) -> Self: ... + def elements(self) -> Iterator[_T]: ... + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... @classmethod def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] @overload - def subtract(self, iterable: None = None, /) -> None: - """Like dict.update() but subtracts counts instead of replacing them. - Counts can be reduced below zero. Both the inputs and outputs are - allowed to contain zero and negative counts. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.subtract('witch') # subtract elements from another iterable - >>> c.subtract(Counter('watch')) # subtract elements from another counter - >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch - 0 - >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch - -1 - - """ - + def subtract(self, iterable: None = None, /) -> None: ... @overload def subtract(self, mapping: Mapping[_T, int], /) -> None: ... @overload @@ -490,131 +298,34 @@ class Counter(dict[_T, int], Generic[_T]): # (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`, # the tuples would be added as keys, breaking type safety) @overload # type: ignore[override] - def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: - """Like dict.update() but add counts instead of replacing them. - - Source can be an iterable, a dictionary, or another Counter instance. - - >>> c = Counter('which') - >>> c.update('witch') # add elements from another iterable - >>> d = Counter('watch') - >>> c.update(d) # add elements from another counter - >>> c['h'] # four 'h' in which, witch, and watch - 4 - - """ - + def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: ... @overload def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ... @overload def update(self, iterable: None = None, /, **kwargs: int) -> None: ... - def __missing__(self, key: _T) -> int: - """The count of elements not in the Counter is zero.""" - - def __delitem__(self, elem: object) -> None: - """Like dict.__delitem__() but does not raise KeyError for missing values.""" + def __missing__(self, key: _T) -> int: ... + def __delitem__(self, elem: object) -> None: ... if sys.version_info >= (3, 10): - def __eq__(self, other: object) -> bool: - """True if all counts agree. Missing counts are treated as zero.""" - - def __ne__(self, other: object) -> bool: - """True if any counts disagree. Missing counts are treated as zero.""" - - def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: - """Add counts from two counters. - - >>> Counter('abbb') + Counter('bcc') - Counter({'b': 4, 'c': 2, 'a': 1}) - - """ - - def __sub__(self, other: Counter[_T]) -> Counter[_T]: - """Subtract count, but keep only results with positive counts. - - >>> Counter('abbbc') - Counter('bccd') - Counter({'b': 2, 'a': 1}) - - """ - - def __and__(self, other: Counter[_T]) -> Counter[_T]: - """Intersection is the minimum of corresponding counts. - - >>> Counter('abbb') & Counter('bcc') - Counter({'b': 1}) - - """ - - def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: # type: ignore[override] - """Union is the maximum of value in either of the input counters. - - >>> Counter('abbb') | Counter('bcc') - Counter({'b': 3, 'c': 2, 'a': 1}) - - """ - - def __pos__(self) -> Counter[_T]: - """Adds an empty counter, effectively stripping negative and zero counts""" - - def __neg__(self) -> Counter[_T]: - """Subtracts from an empty counter. Strips positive and zero counts, - and flips the sign on negative counts. - - """ + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + + def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ... + def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... + def __and__(self, other: Counter[_T]) -> Counter[_T]: ... + def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override] + def __pos__(self) -> Counter[_T]: ... + def __neg__(self) -> Counter[_T]: ... # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. - def __iadd__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[misc] - """Inplace add from another counter, keeping only positive counts. - - >>> c = Counter('abbb') - >>> c += Counter('bcc') - >>> c - Counter({'b': 4, 'c': 2, 'a': 1}) - - """ - - def __isub__(self, other: SupportsItems[_T, int]) -> Self: - """Inplace subtract counter, but keep only results with positive counts. - - >>> c = Counter('abbbc') - >>> c -= Counter('bccd') - >>> c - Counter({'b': 2, 'a': 1}) - - """ - - def __iand__(self, other: SupportsItems[_T, int]) -> Self: - """Inplace intersection is the minimum of corresponding counts. - - >>> c = Counter('abbb') - >>> c &= Counter('bcc') - >>> c - Counter({'b': 1}) - - """ - - def __ior__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[override,misc] - """Inplace union is the maximum of value from either counter. - - >>> c = Counter('abbb') - >>> c |= Counter('bcc') - >>> c - Counter({'b': 3, 'c': 2, 'a': 1}) - - """ + def __iadd__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[misc] + def __isub__(self, other: SupportsItems[_T, int]) -> Self: ... + def __iand__(self, other: SupportsItems[_T, int]) -> Self: ... + def __ior__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[override,misc] if sys.version_info >= (3, 10): - def total(self) -> int: - """Sum of the counts""" - - def __le__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a subset of those in other.""" - - def __lt__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a proper subset of those in other.""" - - def __ge__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a superset of those in other.""" - - def __gt__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a proper superset of those in other.""" + def total(self) -> int: ... + def __le__(self, other: Counter[Any]) -> bool: ... + def __lt__(self, other: Counter[Any]) -> bool: ... + def __ge__(self, other: Counter[Any]) -> bool: ... + def __gt__(self, other: Counter[Any]) -> bool: ... # The pure-Python implementations of the "views" classes # These are exposed at runtime in `collections/__init__.py` @@ -648,26 +359,10 @@ class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyrig @disjoint_base class OrderedDict(dict[_KT, _VT]): - """Dictionary that remembers insertion order""" - - def popitem(self, last: bool = True) -> tuple[_KT, _VT]: - """Remove and return a (key, value) pair from the dictionary. - - Pairs are returned in LIFO order if last is true or FIFO order if false. - """ - - def move_to_end(self, key: _KT, last: bool = True) -> None: - """Move an existing element to the end (or beginning if last is false). - - Raise KeyError if the element does not exist. - """ - - def copy(self) -> Self: - """od.copy() -> a shallow copy of od""" - - def __reversed__(self) -> Iterator[_KT]: - """od.__reversed__() <==> reversed(od)""" - + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = True) -> None: ... + def copy(self) -> Self: ... + def __reversed__(self) -> Iterator[_KT]: ... def keys(self) -> _odict_keys[_KT, _VT]: ... def items(self) -> _odict_items[_KT, _VT]: ... def values(self) -> _odict_values[_KT, _VT]: ... @@ -676,60 +371,34 @@ class OrderedDict(dict[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: - """Create a new ordered dictionary with keys from iterable and values set to value.""" - + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: - """Insert key with a value of default if key is not in the dictionary. - - Return the value for key if key is in the dictionary, else default. - """ - + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... # Same as dict.pop, but accepts keyword arguments @overload - def pop(self, key: _KT) -> _VT: - """od.pop(key[,default]) -> v, remove specified key and return the corresponding value. - - If the key is not found, return the default if given; otherwise, - raise a KeyError. - """ - + def pop(self, key: _KT) -> _VT: ... @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... def __eq__(self, value: object, /) -> bool: ... @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: - """Return self|value.""" - + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... @overload def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: - """Return value|self.""" - + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... @overload def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] @disjoint_base class defaultdict(dict[_KT, _VT]): - """defaultdict(default_factory=None, /, [...]) --> dict with default factory - - The default factory is called without arguments to produce - a new value when a key is not present, in __getitem__ only. - A defaultdict compares equal to a dict with the same items. - All remaining arguments are treated the same as if they were - passed to the dict constructor, including keyword arguments. - """ - default_factory: Callable[[], _VT] | None @overload def __init__(self) -> None: ... @@ -764,63 +433,24 @@ class defaultdict(dict[_KT, _VT]): /, **kwargs: _VT, ) -> None: ... - def __missing__(self, key: _KT, /) -> _VT: - """__missing__(key) # Called by __getitem__ for missing key; pseudo-code: - if self.default_factory is None: raise KeyError((key,)) - self[key] = value = self.default_factory() - return value - """ - - def __copy__(self) -> Self: - """D.copy() -> a shallow copy of D.""" - - def copy(self) -> Self: - """D.copy() -> a shallow copy of D.""" - + def __missing__(self, key: _KT, /) -> _VT: ... + def __copy__(self) -> Self: ... + def copy(self) -> Self: ... @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: - """Return self|value.""" - + def __or__(self, value: dict[_KT, _VT], /) -> Self: ... @overload def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: - """Return value|self.""" - + def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... @overload def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class ChainMap(MutableMapping[_KT, _VT]): - """A ChainMap groups multiple dicts (or other mappings) together - to create a single, updateable view. - - The underlying mappings are stored in a list. That list is public and can - be accessed or updated using the *maps* attribute. There is no other - state. - - Lookups search the underlying mappings successively until a key is found. - In contrast, writes, updates, and deletions only operate on the first - mapping. - - """ - maps: list[MutableMapping[_KT, _VT]] - def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: - """Initialize a ChainMap by setting *maps* to the given mappings. - If no mappings are provided, a single empty dictionary is used. - - """ - - def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: - """New ChainMap with a new map followed by all previous maps. - If no map is provided, an empty dict is used. - Keyword arguments update the map or new empty dict. - """ - + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... @property - def parents(self) -> Self: - """New ChainMap from maps[1:].""" - + def parents(self) -> Self: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @@ -837,41 +467,32 @@ class ChainMap(MutableMapping[_KT, _VT]): def __bool__(self) -> bool: ... # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: - """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D""" - + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload - def pop(self, key: _KT) -> _VT: - """Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].""" - + def pop(self, key: _KT) -> _VT: ... @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... - def copy(self) -> Self: - """New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]""" + def copy(self) -> Self: ... __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, # so the signature should be kept in line with `dict.fromkeys`. if sys.version_info >= (3, 13): @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: - """Create a new ChainMap with keys from iterable and values set to value.""" + def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: ... else: @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: - """Create a ChainMap with a single dict created from the iterable.""" + def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ... @classmethod @overload # Special-case None: the user probably wants to add non-None values later. - def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: - """Create a new ChainMap with keys from iterable and values set to value.""" - + def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi index 337264c60ca09..3df2a1d9eb9b3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi @@ -1,7 +1,2 @@ -"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. - -Unit tests are in test_collections. -""" - from _collections_abc import * from _collections_abc import __all__ as __all__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi index 9e674165b9551..4afcb5392b58e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi @@ -1,21 +1,3 @@ -"""Conversion functions between RGB and other color systems. - -This modules provides two functions for each color system ABC: - - rgb_to_abc(r, g, b) --> a, b, c - abc_to_rgb(a, b, c) --> r, g, b - -All inputs and outputs are triples of floats in the range [0.0...1.0] -(with the exception of I and Q, which covers a slightly larger range). -Inputs outside the valid range may cause exceptions or invalid outputs. - -Supported color systems: -RGB: Red, Green, Blue components -YIQ: Luminance, Chrominance (used by composite video signals) -HLS: Hue, Luminance, Saturation -HSV: Hue, Saturation, Value -""" - from typing import Final __all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi index e6c1e174f69d1..8972d50a4a634 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi @@ -1,16 +1,3 @@ -"""Module/script to byte-compile all .py files to .pyc files. - -When called as a script with arguments, this compiles the directories -given as arguments recursively; the -l option prevents it from -recursing into directories. - -Without arguments, it compiles all modules on sys.path, without -recursing into subdirectories. (Even though it should do so for -packages -- for now, you'll have to deal with packages separately.) - -See module py_compile for details of the actual byte-compilation. -""" - import sys from _typeshed import StrPath from py_compile import PycInvalidationMode @@ -39,32 +26,7 @@ if sys.version_info >= (3, 10): prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: - """Byte-compile all modules in the given directory tree. - - Arguments (only dir is required): - - dir: the directory to byte-compile - maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) - ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - workers: maximum number of parallel workers - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path - hardlink_dupes: hardlink duplicated pyc files - """ - + ) -> bool: ... def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -79,29 +41,7 @@ if sys.version_info >= (3, 10): prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: - """Byte-compile one file. - - Arguments (only fullname is required): - - fullname: the file to byte-compile - ddir: if given, the directory name compiled in to the - byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path. - hardlink_dupes: hardlink duplicated pyc files - """ + ) -> bool: ... else: def compile_dir( @@ -120,32 +60,7 @@ else: prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: - """Byte-compile all modules in the given directory tree. - - Arguments (only dir is required): - - dir: the directory to byte-compile - maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) - ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - workers: maximum number of parallel workers - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path - hardlink_dupes: hardlink duplicated pyc files - """ - + ) -> bool: ... def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -160,29 +75,7 @@ else: prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: - """Byte-compile one file. - - Arguments (only fullname is required): - - fullname: the file to byte-compile - ddir: if given, the directory name compiled in to the - byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path. - hardlink_dupes: hardlink duplicated pyc files - """ + ) -> bool: ... def compile_path( skip_curdir: bool = ..., @@ -192,16 +85,4 @@ def compile_path( legacy: bool = False, optimize: int = -1, invalidation_mode: PycInvalidationMode | None = None, -) -> bool: - """Byte-compile all module on sys.path. - - Arguments (all optional): - - skip_curdir: if true, skip current directory (default True) - maxlevels: max recursion level (default 0) - force: as for compile_dir() (default False) - quiet: as for compile_dir() (default 0) - legacy: as for compile_dir() (default False) - optimize: as for compile_dir() (default -1) - invalidation_mode: as for compiler_dir() - """ +) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi index 77866c1a1b6b2..b8463973ec671 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi @@ -1,5 +1,3 @@ -"""Internal classes used by compression modules""" - from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase @@ -13,12 +11,9 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... -class BaseStream(BufferedIOBase): - """Mode-checking helper functions.""" +class BaseStream(BufferedIOBase): ... class DecompressReader(RawIOBase): - """Adapts the decompressor API to a RawIOBase reader API""" - def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi index 074404e205abf..9ddc39f27c286 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi @@ -1,7 +1 @@ -"""Interface to the libbzip2 compression library. - -This module provides a file interface, classes for incremental -(de)compression, and functions for one-shot (de)compression. -""" - from bz2 import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi index 6cb4250d2fac2..9422a735c590e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi @@ -1,7 +1 @@ -"""Functions that read and write gzipped files. - -The user of the file doesn't have to worry about the compression, -but random access is not allowed. -""" - from gzip import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi index 07f407d24467d..936c3813db4f1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi @@ -1,11 +1 @@ -"""Interface to the liblzma compression library. - -This module provides a class for reading and writing compressed files, -classes for incremental (de)compression, and convenience functions for -one-shot (de)compression. - -These classes and functions support both the XZ and legacy LZMA -container formats, as well as raw compressed data streams. -""" - from lzma import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi index 87241f3133134..78d176c03ee83 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi @@ -1,16 +1 @@ -"""The functions in this module allow compression and decompression using the -zlib library, which is based on GNU zip. - -adler32(string[, start]) -- Compute an Adler-32 checksum. -compress(data[, level]) -- Compress data, with compression level 0-9 or -1. -compressobj([level[, ...]]) -- Return a compressor object. -crc32(string[, start]) -- Compute a CRC-32 checksum. -decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string. -decompressobj([wbits[, zdict]]) -- Return a decompressor object. - -'wbits' is window buffer size and container format. -Compressor objects support compress() and flush() methods; decompressor -objects support decompress() and flush(). -""" - from zlib import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi index d47f8eab3b051..d5da4be036129 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi @@ -1,5 +1,3 @@ -"""Python bindings to the Zstandard (zstd) compression library (RFC-8878).""" - import enum from _typeshed import ReadableBuffer from collections.abc import Iterable, Mapping @@ -37,86 +35,20 @@ zstd_version_info: Final[tuple[int, int, int]] COMPRESSION_LEVEL_DEFAULT: Final = _zstd.ZSTD_CLEVEL_DEFAULT class FrameInfo: - """Information about a Zstandard frame.""" - __slots__ = ("decompressed_size", "dictionary_id") decompressed_size: int dictionary_id: int def __init__(self, decompressed_size: int, dictionary_id: int) -> None: ... -def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: - """Get Zstandard frame information from a frame header. - - *frame_buffer* is a bytes-like object. It should start from the beginning - of a frame, and needs to include at least the frame header (6 to 18 bytes). - - The returned FrameInfo object has two attributes. - 'decompressed_size' is the size in bytes of the data in the frame when - decompressed, or None when the decompressed size is unknown. - 'dictionary_id' is an int in the range (0, 2**32). The special value 0 - means that the dictionary ID was not recorded in the frame header, - the frame may or may not need a dictionary to be decoded, - and the ID of such a dictionary is not specified. - """ - -def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: - """Return a ZstdDict representing a trained Zstandard dictionary. - - *samples* is an iterable of samples, where a sample is a bytes-like - object representing a file. - - *dict_size* is the dictionary's maximum size, in bytes. - """ - -def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: - """Return a ZstdDict representing a finalized Zstandard dictionary. - - Given a custom content as a basis for dictionary, and a set of samples, - finalize *zstd_dict* by adding headers and statistics according to the - Zstandard dictionary format. - - You may compose an effective dictionary content by hand, which is used as - basis dictionary, and use some samples to finalize a dictionary. The basis - dictionary may be a "raw content" dictionary. See *is_raw* in ZstdDict. - - *samples* is an iterable of samples, where a sample is a bytes-like object - representing a file. - *dict_size* is the dictionary's maximum size, in bytes. - *level* is the expected compression level. The statistics for each - compression level differ, so tuning the dictionary to the compression level - can provide improvements. - """ - +def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: ... +def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: ... +def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: ... def compress( data: ReadableBuffer, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None -) -> bytes: - """Return Zstandard compressed *data* as bytes. - - *level* is an int specifying the compression level to use, defaulting to - COMPRESSION_LEVEL_DEFAULT ('3'). - *options* is a dict object that contains advanced compression - parameters. See CompressionParameter for more on options. - *zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See - the function train_dict for how to train a ZstdDict on sample data. - - For incremental compression, use a ZstdCompressor instead. - """ - -def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: - """Decompress one or more frames of Zstandard compressed *data*. - - *zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See - the function train_dict for how to train a ZstdDict on sample data. - *options* is a dict object that contains advanced compression - parameters. See DecompressionParameter for more on options. - - For incremental decompression, use a ZstdDecompressor instead. - """ - +) -> bytes: ... +def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: ... @final class CompressionParameter(enum.IntEnum): - """Compression parameters.""" - compression_level = _zstd.ZSTD_c_compressionLevel window_log = _zstd.ZSTD_c_windowLog hash_log = _zstd.ZSTD_c_hashLog @@ -136,32 +68,15 @@ class CompressionParameter(enum.IntEnum): nb_workers = _zstd.ZSTD_c_nbWorkers job_size = _zstd.ZSTD_c_jobSize overlap_log = _zstd.ZSTD_c_overlapLog - def bounds(self) -> tuple[int, int]: - """Return the (lower, upper) int bounds of a compression parameter. - - Both the lower and upper bounds are inclusive. - """ + def bounds(self) -> tuple[int, int]: ... @final class DecompressionParameter(enum.IntEnum): - """Decompression parameters.""" - window_log_max = _zstd.ZSTD_d_windowLogMax - def bounds(self) -> tuple[int, int]: - """Return the (lower, upper) int bounds of a decompression parameter. - - Both the lower and upper bounds are inclusive. - """ + def bounds(self) -> tuple[int, int]: ... @final class Strategy(enum.IntEnum): - """Compression strategies, listed from fastest to strongest. - - Note that new strategies might be added in the future. - Only the order (from fast to strong) is guaranteed, - the numeric value might change. - """ - fast = _zstd.ZSTD_fast dfast = _zstd.ZSTD_dfast greedy = _zstd.ZSTD_greedy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi index 8ba96fd7e3692..e67b3d992f2f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi @@ -24,15 +24,6 @@ class _FileBinaryWrite(SupportsWrite[bytes], Protocol): def close(self) -> None: ... class ZstdFile(_streams.BaseStream): - """A file-like object providing transparent Zstandard (de)compression. - - A ZstdFile can act as a wrapper for an existing file object, or refer - directly to a named file on disk. - - ZstdFile provides a *binary* file interface. Data is read and returned as - bytes, and may only be written to objects that support the Buffer Protocol. - """ - FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME @@ -46,26 +37,7 @@ class ZstdFile(_streams.BaseStream): level: None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> None: - """Open a Zstandard compressed file in binary mode. - - *file* can be either an file-like object, or a file name to open. - - *mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for - creating exclusively, or 'a' for appending. These can equivalently be - given as 'rb', 'wb', 'xb' and 'ab' respectively. - - *level* is an optional int specifying the compression level to use, - or COMPRESSION_LEVEL_DEFAULT if not given. - - *options* is an optional dict for advanced compression parameters. - See CompressionParameter and DecompressionParameter for the possible - options. - - *zstd_dict* is an optional ZstdDict object, a pre-trained Zstandard - dictionary. See train_dict() to train ZstdDict on sample data. - """ - + ) -> None: ... @overload def __init__( self, @@ -77,86 +49,15 @@ class ZstdFile(_streams.BaseStream): options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, ) -> None: ... - def write(self, data: ReadableBuffer, /) -> int: - """Write a bytes-like object *data* to the file. - - Returns the number of uncompressed bytes written, which is - always the length of data in bytes. Note that due to buffering, - the file on disk may not reflect the data written until .flush() - or .close() is called. - """ - - def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: # type: ignore[override] - """Flush remaining data to the underlying stream. - - The mode argument can be FLUSH_BLOCK or FLUSH_FRAME. Abuse of this - method will reduce compression ratio, use it only when necessary. - - If the program is interrupted afterwards, all data can be recovered. - To ensure saving to disk, also need to use os.fsync(fd). - - This method does nothing in reading mode. - """ - - def read(self, size: int | None = -1) -> bytes: - """Read up to size uncompressed bytes from the file. - - If size is negative or omitted, read until EOF is reached. - Returns b'' if the file is already at EOF. - """ - - def read1(self, size: int | None = -1) -> bytes: - """Read up to size uncompressed bytes, while trying to avoid - making multiple reads from the underlying stream. Reads up to a - buffer's worth of data if size is negative. - - Returns b'' if the file is at EOF. - """ - - def readinto(self, b: WriteableBuffer) -> int: - """Read bytes into b. - - Returns the number of bytes read (0 for EOF). - """ - - def readinto1(self, b: WriteableBuffer) -> int: - """Read bytes into b, while trying to avoid making multiple reads - from the underlying stream. - - Returns the number of bytes read (0 for EOF). - """ - - def readline(self, size: int | None = -1) -> bytes: - """Read a line of uncompressed bytes from the file. - - The terminating newline (if present) is retained. If size is - non-negative, no more than size bytes will be read (in which - case the line may be incomplete). Returns b'' if already at EOF. - """ - - def seek(self, offset: int, whence: int = 0) -> int: - """Change the file position. - - The new position is specified by offset, relative to the - position indicated by whence. Possible values for whence are: - - 0: start of stream (default): offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive - - Returns the new file position. - - Note that seeking is emulated, so depending on the arguments, - this operation may be extremely slow. - """ - - def peek(self, size: int = -1) -> bytes: - """Return buffered data without advancing the file position. - - Always returns at least one byte of data, unless at EOF. - The exact number of bytes returned is unspecified. - """ - + def write(self, data: ReadableBuffer, /) -> int: ... + def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: ... # type: ignore[override] + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int | None = -1) -> bytes: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def peek(self, size: int = -1) -> bytes: ... @property def name(self) -> str | bytes: ... @property @@ -174,35 +75,7 @@ def open( encoding: str | None = None, errors: str | None = None, newline: str | None = None, -) -> ZstdFile: - """Open a Zstandard compressed file in binary or text mode. - - file can be either a file name (given as a str, bytes, or PathLike object), - in which case the named file is opened, or it can be an existing file object - to read from or write to. - - The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a', - 'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode. - - The level, options, and zstd_dict parameters specify the settings the same - as ZstdFile. - - When using read mode (decompression), the options parameter is a dict - representing advanced decompression options. The level parameter is not - supported in this case. When using write mode (compression), only one of - level, an int representing the compression level, or options, a dict - representing advanced compression options, may be passed. In both modes, - zstd_dict is a ZstdDict instance containing a trained Zstandard dictionary. - - For binary mode, this function is equivalent to the ZstdFile constructor: - ZstdFile(filename, mode, ...). In this case, the encoding, errors and - newline parameters must not be provided. - - For text mode, an ZstdFile object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error handling - behavior, and line ending(s). - """ - +) -> ZstdFile: ... @overload def open( file: StrOrBytesPath | _FileBinaryWrite, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi index 251a453c6ee66..ad4d20ea54453 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi @@ -1,5 +1,3 @@ -"""Execute computations asynchronously using threads or processes.""" - import sys from ._base import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi index a1ab245dd8432..be48a6e4289c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi @@ -18,155 +18,41 @@ FINISHED: Final = "FINISHED" _STATE_TO_DESCRIPTION_MAP: Final[dict[str, str]] LOGGER: Logger -class Error(Exception): - """Base class for all future-related exceptions.""" - -class CancelledError(Error): - """The Future was cancelled.""" +class Error(Exception): ... +class CancelledError(Error): ... if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Error): - """The operation exceeded the given deadline.""" - -class InvalidStateError(Error): - """The operation is not allowed in this state.""" + class TimeoutError(Error): ... -class BrokenExecutor(RuntimeError): - """ - Raised when a executor has become non-functional after a severe failure. - """ +class InvalidStateError(Error): ... +class BrokenExecutor(RuntimeError): ... _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _P = ParamSpec("_P") class Future(Generic[_T]): - """Represents the result of an asynchronous computation.""" - _condition: threading.Condition _state: str _result: _T | None _exception: BaseException | None _waiters: list[_Waiter] - def cancel(self) -> bool: - """Cancel the future if possible. - - Returns True if the future was cancelled, False otherwise. A future - cannot be cancelled if it is running or has already completed. - """ - - def cancelled(self) -> bool: - """Return True if the future was cancelled.""" - - def running(self) -> bool: - """Return True if the future is currently executing.""" - - def done(self) -> bool: - """Return True if the future was cancelled or finished executing.""" - - def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: - """Attaches a callable that will be called when the future finishes. - - Args: - fn: A callable that will be called with this future as its only - argument when the future completes or is cancelled. The callable - will always be called by a thread in the same process in which - it was added. If the future has already completed or been - cancelled then the callable will be called immediately. These - callables are called in the order that they were added. - """ - - def result(self, timeout: float | None = None) -> _T: - """Return the result of the call that the future represents. - - Args: - timeout: The number of seconds to wait for the result if the future - isn't done. If None, then there is no limit on the wait time. - - Returns: - The result of the call that the future represents. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - Exception: If the call raised then that exception will be raised. - """ - - def set_running_or_notify_cancel(self) -> bool: - """Mark the future as running or process any cancel notifications. - - Should only be used by Executor implementations and unit tests. - - If the future has been cancelled (cancel() was called and returned - True) then any threads waiting on the future completing (though calls - to as_completed() or wait()) are notified and False is returned. - - If the future was not cancelled then it is put in the running state - (future calls to running() will return True) and True is returned. - - This method should be called by Executor implementations before - executing the work associated with this future. If this method returns - False then the work should not be executed. - - Returns: - False if the Future was cancelled, True otherwise. - - Raises: - RuntimeError: if this method was already called or if set_result() - or set_exception() was called. - """ - - def set_result(self, result: _T) -> None: - """Sets the return value of work associated with the future. - - Should only be used by Executor implementations and unit tests. - """ - - def exception(self, timeout: float | None = None) -> BaseException | None: - """Return the exception raised by the call that the future represents. - - Args: - timeout: The number of seconds to wait for the exception if the - future isn't done. If None, then there is no limit on the wait - time. - - Returns: - The exception raised by the call that the future represents or None - if the call completed without raising. - - Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - """ - - def set_exception(self, exception: BaseException | None) -> None: - """Sets the result of the future as being the given exception. - - Should only be used by Executor implementations and unit tests. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def cancel(self) -> bool: ... + def cancelled(self) -> bool: ... + def running(self) -> bool: ... + def done(self) -> bool: ... + def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... + def result(self, timeout: float | None = None) -> _T: ... + def set_running_or_notify_cancel(self) -> bool: ... + def set_result(self, result: _T) -> None: ... + def exception(self, timeout: float | None = None) -> BaseException | None: ... + def set_exception(self, exception: BaseException | None) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Executor: - """This is an abstract base class for concrete asynchronous executors.""" - - def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: - """Submits a callable to be executed with the given arguments. - - Schedules the callable to be executed as fn(*args, **kwargs) and returns - a Future instance representing the execution of the callable. - - Returns: - A Future representing the given call. - """ + def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... if sys.version_info >= (3, 14): def map( self, @@ -175,74 +61,13 @@ class Executor: timeout: float | None = None, chunksize: int = 1, buffersize: int | None = None, - ) -> Iterator[_T]: - """Returns an iterator equivalent to map(fn, iter). - - Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: The size of the chunks the iterable will be broken into - before being passed to a child process. This argument is only - used by ProcessPoolExecutor; it is ignored by - ThreadPoolExecutor. - buffersize: The number of submitted tasks whose results have not - yet been yielded. If the buffer is full, iteration over the - iterables pauses until a result is yielded from the buffer. - If None, all input elements are eagerly collected, and a task is - submitted for each. - - Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. - """ + ) -> Iterator[_T]: ... else: def map( self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 - ) -> Iterator[_T]: - """Returns an iterator equivalent to map(fn, iter). - - Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: The size of the chunks the iterable will be broken into - before being passed to a child process. This argument is only - used by ProcessPoolExecutor; it is ignored by - ThreadPoolExecutor. - - Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. - """ - - def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: - """Clean-up the resources associated with the Executor. - - It is safe to call this method several times. Otherwise, no other - methods can be called after this one. - - Args: - wait: If True then shutdown will not return until all running - futures have finished executing and the resources used by the - executor have been reclaimed. - cancel_futures: If True then shutdown will cancel all pending - futures. Futures that are completed or running will not be - cancelled. - """ + ) -> Iterator[_T]: ... + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -259,60 +84,17 @@ class _AsCompletedFuture(Protocol[_T_co]): # Not used by as_completed, but needed to propagate the generic type def result(self, timeout: float | None = None) -> _T_co: ... -def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: - """An iterator over the given futures that yields each as it completes. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - iterate over. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - - Returns: - An iterator that yields the given Futures as they complete (finished or - cancelled). If any given Futures are duplicated, they will be returned - once. - - Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - """ +def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): - """DoneAndNotDoneFutures(done, not_done)""" - done: set[Future[_T]] not_done: set[Future[_T]] -def wait(fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> DoneAndNotDoneFutures[_T]: - """Wait for the futures in the given sequence to complete. - - Args: - fs: The sequence of Futures (possibly created by different Executors) to - wait upon. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - return_when: Indicates when this function should return. The options - are: - - FIRST_COMPLETED - Return when any future finishes or is - cancelled. - FIRST_EXCEPTION - Return when any future finishes by raising an - exception. If no future raises an exception - then it is equivalent to ALL_COMPLETED. - ALL_COMPLETED - Return when all futures finish or are cancelled. - - Returns: - A named 2-tuple of sets. The first set, named 'done', contains the - futures that completed (is finished or cancelled) before the wait - completed. The second set, named 'not_done', contains uncompleted - futures. Duplicate futures given to *fs* are removed and will be - returned only once. - """ +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... class _Waiter: - """Provides the event that wait() and as_completed() block on.""" - event: threading.Event finished_futures: list[Future[Any]] def add_result(self, future: Future[Any]) -> None: ... @@ -320,24 +102,17 @@ class _Waiter: def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): - """Used by as_completed().""" - lock: threading.Lock -class _FirstCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_COMPLETED).""" +class _FirstCompletedWaiter(_Waiter): ... class _AllCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" - num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... class _AcquireFutures: - """A context manager that does an ordered acquire of Future conditions.""" - futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi index 20ff4dd679c37..e101022babcb6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi @@ -1,5 +1,3 @@ -"""Implements InterpreterPoolExecutor.""" - import sys from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor @@ -39,10 +37,7 @@ if sys.version_info >= (3, 14): def __del__(self) -> None: ... def run(self, task: _Task) -> None: ... # type: ignore[override] - class BrokenInterpreterPool(BrokenThreadPool): - """ - Raised when a worker thread in an InterpreterPoolExecutor failed initializing. - """ + class BrokenInterpreterPool(BrokenThreadPool): ... class InterpreterPoolExecutor(ThreadPoolExecutor): BROKEN: type[BrokenInterpreterPool] @@ -64,18 +59,7 @@ if sys.version_info >= (3, 14): thread_name_prefix: str = "", initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: - """Initializes a new InterpreterPoolExecutor instance. - - Args: - max_workers: The maximum number of interpreters that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - initializer: A callable or script used to initialize - each worker interpreter. - initargs: A tuple of arguments to pass to the initializer. - """ - + ) -> None: ... @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi index 0264ceba46f4b..071b3aba5d330 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi @@ -1,45 +1,3 @@ -"""Implements ProcessPoolExecutor. - -The following diagram and text describe the data-flow through the system: - -|======================= In-process =====================|== Out-of-process ==| - -+----------+ +----------+ +--------+ +-----------+ +---------+ -| | => | Work Ids | | | | Call Q | | Process | -| | +----------+ | | +-----------+ | Pool | -| | | ... | | | | ... | +---------+ -| | | 6 | => | | => | 5, call() | => | | -| | | 7 | | | | ... | | | -| Process | | ... | | Local | +-----------+ | Process | -| Pool | +----------+ | Worker | | #1..n | -| Executor | | Thread | | | -| | +----------- + | | +-----------+ | | -| | <=> | Work Items | <=> | | <= | Result Q | <= | | -| | +------------+ | | +-----------+ | | -| | | 6: call() | | | | ... | | | -| | | future | | | | 4, result | | | -| | | ... | | | | 3, except | | | -+----------+ +------------+ +--------+ +-----------+ +---------+ - -Executor.submit() called: -- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict -- adds the id of the _WorkItem to the "Work Ids" queue - -Local worker thread: -- reads work ids from the "Work Ids" queue and looks up the corresponding - WorkItem from the "Work Items" dict: if the work item has been cancelled then - it is simply removed from the dict, otherwise it is repackaged as a - _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" - until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because - calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). -- reads _ResultItems from "Result Q", updates the future stored in the - "Work Items" dict and deletes the dict entry - -Process #1..n: -- reads _CallItems from "Call Q", executes the calls, and puts the resulting - _ResultItems in "Result Q" -""" - import sys from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, MutableSequence from multiprocessing.connection import Connection @@ -113,8 +71,6 @@ class _CallItem: def __init__(self, work_id: int, fn: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... class _SafeQueue(Queue[Future[Any]]): - """Safe Queue set exception to the future object linked to a job""" - pending_work_items: dict[int, _WorkItem[Any]] if sys.version_info < (3, 12): shutdown_lock: Lock @@ -141,18 +97,8 @@ class _SafeQueue(Queue[Future[Any]]): def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... -def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: - """Iterates over zip()ed iterables in chunks.""" - -def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: - """Processes a chunk of an iterable passed to map. - - Runs the function passed to map() on a chunk of the - iterable passed to map. - - This function is run in a separate process. - - """ +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... +def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: ... if sys.version_info >= (3, 11): def _sendback_result( @@ -161,14 +107,12 @@ if sys.version_info >= (3, 11): result: Any | None = None, exception: Exception | None = None, exit_pid: int | None = None, - ) -> None: - """Safely send back the given result or exception""" + ) -> None: ... else: def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None - ) -> None: - """Safely send back the given result or exception""" + ) -> None: ... if sys.version_info >= (3, 11): def _process_worker( @@ -177,19 +121,7 @@ if sys.version_info >= (3, 11): initializer: Callable[[Unpack[_Ts]], object] | None, initargs: tuple[Unpack[_Ts]], max_tasks: int | None = None, - ) -> None: - """Evaluates calls from call_queue and places the results in result_queue. - - This worker is run in a separate process. - - Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer - """ + ) -> None: ... else: def _process_worker( @@ -197,32 +129,9 @@ else: result_queue: SimpleQueue[_ResultItem], initializer: Callable[[Unpack[_Ts]], object] | None, initargs: tuple[Unpack[_Ts]], - ) -> None: - """Evaluates calls from call_queue and places the results in result_queue. - - This worker is run in a separate process. - - Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer - """ + ) -> None: ... class _ExecutorManagerThread(Thread): - """Manages the communication between this process and the worker processes. - - The manager is run in a local thread. - - Args: - executor: A reference to the ProcessPoolExecutor that owns - this thread. A weakref will be own by the manager as well as - references to internal objects used to introspect the state of - the executor. - """ - thread_wakeup: _ThreadWakeup shutdown_lock: Lock executor_reference: ref[Any] @@ -247,18 +156,9 @@ _system_limits_checked: bool _system_limited: bool | None def _check_system_limits() -> None: ... -def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: - """ - Specialized implementation of itertools.chain.from_iterable. - Each item in *iterable* should be a list. This function is - careful not to keep references to yielded objects. - """ +def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: ... -class BrokenProcessPool(BrokenExecutor): - """ - Raised when a process in a ProcessPoolExecutor terminated abruptly - while a future was in the running state. - """ +class BrokenProcessPool(BrokenExecutor): ... class ProcessPoolExecutor(Executor): _mp_context: BaseContext | None @@ -286,26 +186,7 @@ class ProcessPoolExecutor(Executor): initargs: tuple[()] = (), *, max_tasks_per_child: int | None = None, - ) -> None: - """Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers created - using the multiprocessing.get_context('start method') API. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - max_tasks_per_child: The maximum number of tasks a worker process - can complete before it will exit and be replaced with a fresh - worker process. The default of None means worker process will - live as long as the executor. Requires a non-'fork' mp_context - start method. When given, we default to using 'spawn' if no - mp_context is supplied. - """ - + ) -> None: ... @overload def __init__( self, @@ -334,19 +215,7 @@ class ProcessPoolExecutor(Executor): mp_context: BaseContext | None = None, initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: - """Initializes a new ProcessPoolExecutor instance. - - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ - + ) -> None: ... @overload def __init__( self, @@ -369,22 +238,5 @@ class ProcessPoolExecutor(Executor): def _adjust_process_count(self) -> None: ... if sys.version_info >= (3, 14): - def kill_workers(self) -> None: - """Attempts to kill the executor's workers. - Iterates through all of the current worker processes and kills - each one that is still alive. - - After killing workers, the pool will be in a broken state - and no longer usable (for instance, new tasks should not be - submitted). - """ - - def terminate_workers(self) -> None: - """Attempts to terminate the executor's workers. - Iterates through all of the current worker processes and terminates - each one that is still alive. - - After terminating workers, the pool will be in a broken state - and no longer usable (for instance, new tasks should not be - submitted). - """ + def kill_workers(self) -> None: ... + def terminate_workers(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi index 8bf83dc8b2304..50a6a9c6f43ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi @@ -1,5 +1,3 @@ -"""Implements ThreadPoolExecutor.""" - import queue import sys from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet @@ -59,11 +57,7 @@ if sys.version_info >= (3, 14): task: _Task def __init__(self, future: Future[Any], task: _Task) -> None: ... def run(self, ctx: WorkerContext) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def _worker(executor_reference: ref[Any], ctx: WorkerContext, work_queue: queue.SimpleQueue[Any]) -> None: ... @@ -75,11 +69,7 @@ else: kwargs: Mapping[str, Any] def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... def run(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... def _worker( executor_reference: ref[Any], @@ -88,10 +78,7 @@ else: initargs: tuple[Unpack[_Ts]], ) -> None: ... -class BrokenThreadPool(BrokenExecutor): - """ - Raised when a worker thread in a ThreadPoolExecutor failed initializing. - """ +class BrokenThreadPool(BrokenExecutor): ... class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 14): @@ -131,18 +118,7 @@ class ThreadPoolExecutor(Executor): thread_name_prefix: str = "", initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: - """Initializes a new ThreadPoolExecutor instance. - - Args: - max_workers: The maximum number of threads that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - initializer: A callable used to initialize worker threads. - initargs: A tuple of arguments to pass to the initializer. - ctxkwargs: Additional arguments to cls.prepare_context(). - """ - + ) -> None: ... @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi index 3485bb69cd50a..3839e6bef09b6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi @@ -1,5 +1,3 @@ -"""Subinterpreters High Level Module.""" - import sys import threading import types @@ -40,40 +38,16 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < _P = ParamSpec("_P") class ExecutionFailed(InterpreterError): - """An unhandled exception happened during execution. - - This is raised from Interpreter.exec() and Interpreter.call(). - """ - excinfo: types.SimpleNamespace def __init__(self, excinfo: types.SimpleNamespace) -> None: ... - def create() -> Interpreter: - """Return a new (idle) Python interpreter.""" - - def list_all() -> list[Interpreter]: - """Return all existing interpreters.""" - - def get_current() -> Interpreter: - """Return the currently running interpreter.""" - - def get_main() -> Interpreter: - """Return the main interpreter.""" + def create() -> Interpreter: ... + def list_all() -> list[Interpreter]: ... + def get_current() -> Interpreter: ... + def get_main() -> Interpreter: ... class Interpreter: - """A single Python interpreter. - - Attributes: - - "id" - the unique process-global ID number for the interpreter - "whence" - indicates where the interpreter was created - - If the interpreter wasn't created by this module - then any method that modifies the interpreter will fail, - i.e. .close(), .prepare_main(), .exec(), and .call() - """ - def __new__(cls, id: int, /, _whence: _Whence | None = None, _ownsref: bool | None = None) -> Self: ... def __reduce__(self) -> tuple[type[Self], int]: ... def __hash__(self) -> int: ... @@ -84,59 +58,11 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < def whence( self, ) -> Literal["unknown", "runtime init", "legacy C-API", "C-API", "cross-interpreter C-API", "_interpreters module"]: ... - def is_running(self) -> bool: - """Return whether or not the identified interpreter is running.""" - - def close(self) -> None: - """Finalize and destroy the interpreter. - - Attempting to destroy the current interpreter results - in an InterpreterError. - """ - + def is_running(self) -> bool: ... + def close(self) -> None: ... def prepare_main( self, ns: _SharedDict | None = None, /, **kwargs: Any - ) -> None: # kwargs has same value restrictions as _SharedDict - """Bind the given values into the interpreter's __main__. - - The values must be shareable. - """ - - def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: - """Run the given source code in the interpreter. - - This is essentially the same as calling the builtin "exec" - with this interpreter, using the __dict__ of its __main__ - module as both globals and locals. - - There is no return value. - - If the code raises an unhandled exception then an ExecutionFailed - exception is raised, which summarizes the unhandled exception. - The actual exception is discarded because objects cannot be - shared between interpreters. - - This blocks the current Python thread until done. During - that time, the previous interpreter is allowed to run - in other threads. - """ - - def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: - """Call the object in the interpreter with given args/kwargs. - - Nearly all callables, args, kwargs, and return values are - supported. All "shareable" objects are supported, as are - "stateless" functions (meaning non-closures that do not use - any globals). This method will fall back to pickle. - - If the callable raises an exception then the error display - (including full traceback) is sent back between the interpreters - and an ExecutionFailed exception is raised, much like what - happens with Interpreter.exec(). - """ - - def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: - """Return a new thread that calls the object in the interpreter. - - The return value and any raised exception are discarded. - """ + ) -> None: ... # kwargs has same value restrictions as _SharedDict + def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: ... + def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi index 372ac39270544..7cf1ea34786ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi @@ -1,5 +1,3 @@ -"""Common code between queues and channels.""" - import sys from collections.abc import Callable from typing import Final, NewType @@ -8,29 +6,16 @@ from typing_extensions import Never, Self, TypeAlias if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python <3.13 from _interpqueues import _UnboundOp - class ItemInterpreterDestroyed(Exception): - """Raised when trying to get an item whose interpreter was destroyed.""" - + class ItemInterpreterDestroyed(Exception): ... # Actually a descriptor that behaves similarly to classmethod but prevents # access from instances. classonly = classmethod class UnboundItem: - """Represents a cross-interpreter item no longer bound to an interpreter. - - An item is unbound when the interpreter that added it to the - cross-interpreter container is destroyed. - """ - __slots__ = () def __new__(cls) -> Never: ... @classonly - def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: - """A non-data descriptor that makes a value only visible on the class. - - This is like the "classmethod" builtin, but does not show up on - instances of the class. It may be used as a decorator. - """ + def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: ... # Sentinel types and alias that don't exist at runtime. _UnboundErrorType = NewType("_UnboundErrorType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi index e134d97e217fc..bdf08d93d1e00 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi @@ -1,5 +1,3 @@ -"""Cross-interpreter Queues High Level Module.""" - import queue import sys from typing import Final, SupportsIndex @@ -25,39 +23,15 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < "list_all", ] - class QueueEmpty(QueueError, queue.Empty): - """Raised from get_nowait() when the queue is empty. - - It is also raised from get() if it times out. - """ - - class QueueFull(QueueError, queue.Full): - """Raised from put_nowait() when the queue is full. - - It is also raised from put() if it times out. - """ - - class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): - """Raised from get() and get_nowait().""" - + class QueueEmpty(QueueError, queue.Empty): ... + class QueueFull(QueueError, queue.Full): ... + class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): ... UNBOUND: Final[UnboundItem] - def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: - """Return a new cross-interpreter queue. - - The queue may be used to pass data safely between interpreters. - - "unbounditems" sets the default for Queue.put(); see that method for - supported values. The default value is UNBOUND, which replaces - the unbound item. - """ - - def list_all() -> list[Queue]: - """Return a list of all open queues.""" + def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: ... + def list_all() -> list[Queue]: ... class Queue: - """A cross-interpreter queue.""" - def __new__(cls, id: int, /) -> Self: ... def __del__(self) -> None: ... def __hash__(self) -> int: ... @@ -80,42 +54,7 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < *, unbounditems: _AnyUnbound | None = None, _delay: float = 0.01, - ) -> None: - """Add the object to the queue. - - If "block" is true, this blocks while the queue is full. - - For most objects, the object received through Queue.get() will - be a new one, equivalent to the original and not sharing any - actual underlying data. The notable exceptions include - cross-interpreter types (like Queue) and memoryview, where the - underlying data is actually shared. Furthermore, some types - can be sent through a queue more efficiently than others. This - group includes various immutable types like int, str, bytes, and - tuple (if the items are likewise efficiently shareable). See interpreters.is_shareable(). - - "unbounditems" controls the behavior of Queue.get() for the given - object if the current interpreter (calling put()) is later - destroyed. - - If "unbounditems" is None (the default) then it uses the - queue's default, set with create_queue(), - which is usually UNBOUND. - - If "unbounditems" is UNBOUND_ERROR then get() will raise an - ItemInterpreterDestroyed exception if the original interpreter - has been destroyed. This does not otherwise affect the queue; - the next call to put() will work like normal, returning the next - item in the queue. - - If "unbounditems" is UNBOUND_REMOVE then the item will be removed - from the queue as soon as the original interpreter is destroyed. - Be aware that this will introduce an imbalance between put() - and get() calls. - - If "unbounditems" is UNBOUND then it is returned by get() in place - of the unbound item. - """ + ) -> None: ... else: def put( self, @@ -128,21 +67,8 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < def put_nowait(self, obj: object, *, unbounditems: _AnyUnbound | None = None) -> None: ... if sys.version_info >= (3, 14): - def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: - """Return the next object from the queue. - - If "block" is true, this blocks while the queue is empty. - - If the next item's original interpreter has been destroyed - then the "next object" is determined by the value of the - "unbounditems" argument to put(). - """ + def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... else: def get(self, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... - def get_nowait(self) -> object: - """Return the next object from the channel. - - If the queue is empty then raise QueueEmpty. Otherwise this - is the same as get(). - """ + def get_nowait(self) -> object: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi index 18c687b76368f..1909d80e3d189 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi @@ -1,148 +1,3 @@ -"""Configuration file parser. - -A configuration file consists of sections, lead by a "[section]" header, -and followed by "name: value" entries, with continuations and such in -the style of RFC 822. - -Intrinsic defaults can be specified by passing them into the -ConfigParser constructor as a dictionary. - -class: - -ConfigParser -- responsible for parsing a list of - configuration files, and managing the parsed database. - - methods: - - __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, - delimiters=('=', ':'), comment_prefixes=('#', ';'), - inline_comment_prefixes=None, strict=True, - empty_lines_in_values=True, default_section='DEFAULT', - interpolation=, converters=, - allow_unnamed_section=False): - Create the parser. When `defaults` is given, it is initialized into the - dictionary or intrinsic defaults. The keys must be strings, the values - must be appropriate for %()s string interpolation. - - When `dict_type` is given, it will be used to create the dictionary - objects for the list of sections, for the options within a section, and - for the default values. - - When `delimiters` is given, it will be used as the set of substrings - that divide keys from values. - - When `comment_prefixes` is given, it will be used as the set of - substrings that prefix comments in empty lines. Comments can be - indented. - - When `inline_comment_prefixes` is given, it will be used as the set of - substrings that prefix comments in non-empty lines. - - When `strict` is True, the parser won't allow for any section or option - duplicates while reading from a single source (file, string or - dictionary). Default is True. - - When `empty_lines_in_values` is False (default: True), each empty line - marks the end of an option. Otherwise, internal empty lines of - a multiline option are kept as part of the value. - - When `allow_no_value` is True (default: False), options without - values are accepted; the value presented for these is None. - - When `default_section` is given, the name of the special section is - named accordingly. By default it is called ``"DEFAULT"`` but this can - be customized to point to any other valid section name. Its current - value can be retrieved using the ``parser_instance.default_section`` - attribute and may be modified at runtime. - - When `interpolation` is given, it should be an Interpolation subclass - instance. It will be used as the handler for option value - pre-processing when using getters. RawConfigParser objects don't do - any sort of interpolation, whereas ConfigParser uses an instance of - BasicInterpolation. The library also provides a ``zc.buildout`` - inspired ExtendedInterpolation implementation. - - When `converters` is given, it should be a dictionary where each key - represents the name of a type converter and each value is a callable - implementing the conversion from string to the desired datatype. Every - converter gets its corresponding get*() method on the parser object and - section proxies. - - When `allow_unnamed_section` is True (default: False), options - without section are accepted: the section for these is - ``configparser.UNNAMED_SECTION``. - - sections() - Return all the configuration section names, sans DEFAULT. - - has_section(section) - Return whether the given section exists. - - has_option(section, option) - Return whether the given option exists in the given section. - - options(section) - Return list of configuration options for the named section. - - read(filenames, encoding=None) - Read and parse the iterable of named configuration files, given by - name. A single filename is also allowed. Non-existing files - are ignored. Return list of successfully read files. - - read_file(f, filename=None) - Read and parse one configuration file, given as a file object. - The filename defaults to f.name; it is only used in error - messages (if f has no `name` attribute, the string `` is used). - - read_string(string) - Read configuration from a given string. - - read_dict(dictionary) - Read configuration from a dictionary. Keys are section names, - values are dictionaries with keys and values that should be present - in the section. If the used dictionary type preserves order, sections - and their keys will be added in order. Values are automatically - converted to strings. - - get(section, option, raw=False, vars=None, fallback=_UNSET) - Return a string value for the named option. All % interpolations are - expanded in the return values, based on the defaults passed into the - constructor and the DEFAULT section. Additional substitutions may be - provided using the `vars` argument, which must be a dictionary whose - contents override any pre-existing defaults. If `option` is a key in - `vars`, the value from `vars` is used. - - getint(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to an integer. - - getfloat(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to a float. - - getboolean(section, options, raw=False, vars=None, fallback=_UNSET) - Like get(), but convert value to a boolean (currently case - insensitively defined as 0, false, no, off for False, and 1, true, - yes, on for True). Returns False or True. - - items(section=_UNSET, raw=False, vars=None) - If section is given, return a list of tuples with (name, value) for - each option in the section. Otherwise, return a list of tuples with - (section_name, section_proxy) for each section, including DEFAULTSECT. - - remove_section(section) - Remove the given file section and all its options. - - remove_option(section, option) - Remove the given option from the given section. - - set(section, option, value) - Set the given option. - - write(fp, space_around_delimiters=True) - Write the configuration state in .ini format. If - `space_around_delimiters` is True (the default), delimiters - between keys and values are surrounded by spaces. -""" - import sys from _typeshed import MaybeNone, StrOrBytesPath, SupportsWrite from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence @@ -273,48 +128,22 @@ DEFAULTSECT: Final = "DEFAULT" MAX_INTERPOLATION_DEPTH: Final = 10 class Interpolation: - """Dummy interpolation that passes the value through with no changes.""" - def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, defaults: _Section) -> str: ... def before_set(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... def before_read(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... def before_write(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... -class BasicInterpolation(Interpolation): - """Interpolation as implemented in the classic ConfigParser. - - The option values can contain format strings which refer to other values in - the same section, or values in the special default section. - - For example: - - something: %(dir)s/whatever - - would resolve the "%(dir)s" to the value of dir. All reference - expansions are done late, on demand. If a user needs to use a bare % in - a configuration file, she can escape it by writing %%. Other % usage - is considered a user error and raises `InterpolationSyntaxError`. - """ - -class ExtendedInterpolation(Interpolation): - """Advanced variant of interpolation, supports the syntax used by - `zc.buildout`. Enables interpolation between sections. - """ +class BasicInterpolation(Interpolation): ... +class ExtendedInterpolation(Interpolation): ... if sys.version_info < (3, 13): @deprecated( "Deprecated since Python 3.2; removed in Python 3.13. Use `BasicInterpolation` or `ExtendedInterpolation` instead." ) class LegacyInterpolation(Interpolation): - """Deprecated interpolation used in old versions of ConfigParser. - Use BasicInterpolation or ExtendedInterpolation instead. - """ - def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): - """ConfigParser that does not do interpolation.""" - _SECT_TMPL: ClassVar[str] # undocumented _OPT_TMPL: ClassVar[str] # undocumented _OPT_NV_TMPL: ClassVar[str] # undocumented @@ -435,73 +264,18 @@ class RawConfigParser(_Parser): def __iter__(self) -> Iterator[str]: ... def __contains__(self, key: object) -> bool: ... def defaults(self) -> _Section: ... - def sections(self) -> _SectionNameList: - """Return a list of section names, excluding [DEFAULT]""" - - def add_section(self, section: _SectionName) -> None: - """Create a new section in the configuration. - - Raise DuplicateSectionError if a section by the specified name - already exists. Raise ValueError if name is DEFAULT. - """ - - def has_section(self, section: _SectionName) -> bool: - """Indicate whether the named section is present in the configuration. - - The DEFAULT section is not acknowledged. - """ - - def options(self, section: _SectionName) -> list[str]: - """Return a list of option names for the given section name.""" - - def has_option(self, section: _SectionName, option: str) -> bool: - """Check for the existence of a given option in a given section. - If the specified `section` is None or an empty string, DEFAULT is - assumed. If the specified `section` does not exist, returns False. - """ - - def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: - """Read and parse a filename or an iterable of filenames. - - Files that cannot be opened are silently ignored; this is - designed so that you can specify an iterable of potential - configuration file locations (e.g. current directory, user's - home directory, systemwide directory), and all existing - configuration files in the iterable will be read. A single - filename may also be given. - - Return list of successfully read files. - """ - - def read_file(self, f: Iterable[str], source: str | None = None) -> None: - """Like read() but the argument must be a file-like object. - - The `f` argument must be iterable, returning one line at a time. - Optional second argument is the `source` specifying the name of the - file being read. If not given, it is taken from f.name. If `f` has no - `name` attribute, `` is used. - """ - - def read_string(self, string: str, source: str = "") -> None: - """Read configuration from a given string.""" - - def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: - """Read configuration from a dictionary. - - Keys are section names, values are dictionaries with keys and values - that should be present in the section. If the used dictionary type - preserves order, sections and their keys will be added in order. - - All types held in the dictionary are converted to strings during - reading, including section names, option names and keys. - - Optional second argument is the `source` specifying the name of the - dictionary being read. - """ + def sections(self) -> _SectionNameList: ... + def add_section(self, section: _SectionName) -> None: ... + def has_section(self, section: _SectionName) -> bool: ... + def options(self, section: _SectionName) -> list[str]: ... + def has_option(self, section: _SectionName, option: str) -> bool: ... + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... + def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... + def read_string(self, string: str, source: str = "") -> None: ... + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `parser.read_file()` instead.") - def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: - """Deprecated, use read_file instead.""" + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload @@ -534,85 +308,27 @@ class RawConfigParser(_Parser): ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: - """Get an option value for a given section. - - If `vars` is provided, it must be a dictionary. The option is looked up - in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. - If the key is not found and `fallback` is provided, it is used as - a fallback value. `None` can be provided as a `fallback` value. - - If interpolation is enabled and the optional argument `raw` is False, - all interpolations are expanded in the return values. - - Arguments `raw`, `vars`, and `fallback` are keyword only. - - The section DEFAULT is special. - """ - + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: ... @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T ) -> str | _T | MaybeNone: ... @overload - def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: - """Return a list of (name, value) tuples for each option in a section. - - All % interpolations are expanded in the return values, based on the - defaults passed into the constructor, unless the optional argument - `raw` is true. Additional substitutions may be provided using the - `vars` argument, which must be a dictionary whose contents overrides - any pre-existing defaults. - - The section DEFAULT is special. - """ - + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... @overload def items(self, section: _SectionName, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... - def set(self, section: _SectionName, option: str, value: str | None = None) -> None: - """Set an option.""" - - def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: - """Write an .ini-format representation of the configuration state. - - If `space_around_delimiters` is True (the default), delimiters - between keys and values are surrounded by spaces. - - Please note that comments in the original configuration file are not - preserved when writing the configuration back. - """ - - def remove_option(self, section: _SectionName, option: str) -> bool: - """Remove an option.""" - - def remove_section(self, section: _SectionName) -> bool: - """Remove a file section.""" - + def set(self, section: _SectionName, option: str, value: str | None = None) -> None: ... + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... + def remove_option(self, section: _SectionName, option: str) -> bool: ... + def remove_section(self, section: _SectionName) -> bool: ... def optionxform(self, optionstr: str) -> str: ... @property def converters(self) -> ConverterMapping: ... class ConfigParser(RawConfigParser): - """ConfigParser implementing interpolation.""" - # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: - """Get an option value for a given section. - - If `vars` is provided, it must be a dictionary. The option is looked up - in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. - If the key is not found and `fallback` is provided, it is used as - a fallback value. `None` can be provided as a `fallback` value. - - If interpolation is enabled and the optional argument `raw` is False, - all interpolations are expanded in the return values. - - Arguments `raw`, `vars`, and `fallback` are keyword only. - - The section DEFAULT is special. - """ - + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T @@ -620,15 +336,10 @@ class ConfigParser(RawConfigParser): if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `ConfigParser` instead.") - class SafeConfigParser(ConfigParser): - """ConfigParser alias for backwards compatibility purposes.""" + class SafeConfigParser(ConfigParser): ... class SectionProxy(MutableMapping[str, str]): - """A proxy for a single section from a parser.""" - - def __init__(self, parser: RawConfigParser, name: str) -> None: - """Creates a view on a section of the specified `name` in `parser`.""" - + def __init__(self, parser: RawConfigParser, name: str) -> None: ... def __getitem__(self, key: str) -> str: ... def __setitem__(self, key: str, value: str) -> None: ... def __delitem__(self, key: str) -> None: ... @@ -650,14 +361,7 @@ class SectionProxy(MutableMapping[str, str]): vars: _Section | None = None, _impl: Any | None = None, **kwargs: Any, # passed to the underlying parser's get() method - ) -> str | None: - """Get an option value. - - Unless `fallback` is provided, `None` will be returned if the option - is not found. - - """ - + ) -> str | None: ... @overload def get( self, @@ -687,13 +391,6 @@ class SectionProxy(MutableMapping[str, str]): def __getattr__(self, key: str) -> Callable[..., Any]: ... class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): - """Enables reuse of get*() methods between the parser and section proxies. - - If a parser class implements a getter directly, the value for the given - key will be ``None``. The presence of the converter name here enables - section proxies to find and use the implementation on the parser class. - """ - GETTERCRE: ClassVar[Pattern[Any]] def __init__(self, parser: RawConfigParser) -> None: ... def __getitem__(self, key: str) -> _ConverterCallback: ... @@ -703,37 +400,20 @@ class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): def __len__(self) -> int: ... class Error(Exception): - """Base class for ConfigParser exceptions.""" - message: str def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): - """Raised when no section matches a requested option.""" - section: _SectionName def __init__(self, section: _SectionName) -> None: ... class DuplicateSectionError(Error): - """Raised when a section is repeated in an input source. - - Possible repetitions that raise this exception are: multiple creation - using the API or in strict parsers when a section is found more than once - in a single input file, string or dictionary. - """ - section: _SectionName source: str | None lineno: int | None def __init__(self, section: _SectionName, source: str | None = None, lineno: int | None = None) -> None: ... class DuplicateOptionError(Error): - """Raised by strict parsers when an option is repeated in an input source. - - Current implementation raises this exception only when an option is found - more than once in a single file, string or dictionary. - """ - section: _SectionName option: str source: str | None @@ -741,40 +421,25 @@ class DuplicateOptionError(Error): def __init__(self, section: _SectionName, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): - """A requested option was not found.""" - section: _SectionName option: str def __init__(self, option: str, section: _SectionName) -> None: ... class InterpolationError(Error): - """Base class for interpolation-related exceptions.""" - section: _SectionName option: str def __init__(self, option: str, section: _SectionName, msg: str) -> None: ... class InterpolationDepthError(InterpolationError): - """Raised when substitutions are nested too deeply.""" - def __init__(self, option: str, section: _SectionName, rawval: object) -> None: ... class InterpolationMissingOptionError(InterpolationError): - """A string substitution required a setting which was not available.""" - reference: str def __init__(self, option: str, section: _SectionName, rawval: object, reference: str) -> None: ... -class InterpolationSyntaxError(InterpolationError): - """Raised when the source text contains invalid syntax. - - Current implementation raises this exception when the source text into - which substitutions are made does not conform to the required syntax. - """ +class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): - """Raised when a configuration file does not follow legal syntax.""" - source: str errors: list[tuple[int, str]] if sys.version_info >= (3, 13): @@ -797,39 +462,25 @@ class ParsingError(Error): if sys.version_info < (3, 12): @property @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") - def filename(self) -> str: - """Deprecated, use `source'.""" - + def filename(self) -> str: ... @filename.setter @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") def filename(self, value: str) -> None: ... class MissingSectionHeaderError(ParsingError): - """Raised when a key-value pair is found before any section header.""" - lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 13): class MultilineContinuationError(ParsingError): - """Raised when a key without value is followed by continuation line""" - lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 14): class UnnamedSectionDisabledError(Error): - """Raised when an attempt to use UNNAMED_SECTION is made with the - feature disabled. - """ - msg: Final = "Support for UNNAMED_SECTION is disabled." def __init__(self) -> None: ... - class InvalidWriteError(Error): - """Raised when attempting to write data that the parser would read back differently. - ex: writing a key which begins with the section header pattern would read back as a - new section - """ + class InvalidWriteError(Error): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi index 2b05511c33c9f..383a1b7f334b4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi @@ -1,5 +1,3 @@ -"""Utilities for with-statement contexts. See PEP 343.""" - import abc import sys from _typeshed import FileDescriptorOrPath, Unused @@ -49,54 +47,30 @@ _CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any, Any] | _ExitFunc) # allowlist for use as a Protocol. @runtime_checkable class AbstractContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """An abstract base class for context managers.""" - __slots__ = () - def __enter__(self) -> _T_co: - """Return `self` upon entering the runtime context.""" - + def __enter__(self) -> _T_co: ... @abstractmethod def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> _ExitT_co: - """Raise any exception triggered within the runtime context.""" + ) -> _ExitT_co: ... # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @runtime_checkable class AbstractAsyncContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """An abstract base class for asynchronous context managers.""" - __slots__ = () - async def __aenter__(self) -> _T_co: - """Return `self` upon entering the runtime context.""" - + async def __aenter__(self) -> _T_co: ... @abstractmethod async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> _ExitT_co: - """Raise any exception triggered within the runtime context.""" + ) -> _ExitT_co: ... class ContextDecorator: - """A base class or mixin that enables context managers to work as decorators.""" - - def _recreate_cm(self) -> Self: - """Return a recreated instance of self. - - Allows an otherwise one-shot context manager like - _GeneratorContextManager to support use as - a decorator via implicit recreation. - - This is a private interface just for _GeneratorContextManager. - See issue #11647 for details. - """ - + def _recreate_cm(self) -> Self: ... def __call__(self, func: _F) -> _F: ... class _GeneratorContextManagerBase(Generic[_G_co]): - """Shared functionality for @contextmanager and @asynccontextmanager.""" - # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... gen: _G_co @@ -109,49 +83,17 @@ class _GeneratorContextManager( AbstractContextManager[_T_co, bool | None], ContextDecorator, ): - """Helper for @contextmanager decorator.""" - def __exit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... -def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: - """@contextmanager decorator. - - Typical usage: - - @contextmanager - def some_generator(): - - try: - yield - finally: - - - This makes this: - - with some_generator() as : - - - equivalent to this: - - - try: - = - - finally: - - """ +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... if sys.version_info >= (3, 10): _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) class AsyncContextDecorator: - """A base class or mixin that enables async context managers to work as decorators.""" - - def _recreate_cm(self) -> Self: - """Return a recreated instance of self.""" - + def _recreate_cm(self) -> Self: ... def __call__(self, func: _AF) -> _AF: ... class _AsyncGeneratorContextManager( @@ -159,8 +101,6 @@ if sys.version_info >= (3, 10): AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator, ): - """Helper for @asynccontextmanager decorator.""" - async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... @@ -169,40 +109,11 @@ else: class _AsyncGeneratorContextManager( _GeneratorContextManagerBase[AsyncGenerator[_T_co, _SendT_contra]], AbstractAsyncContextManager[_T_co, bool | None] ): - """Helper for @asynccontextmanager decorator.""" - async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... -def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: - """@asynccontextmanager decorator. - - Typical usage: - - @asynccontextmanager - async def some_async_generator(): - - try: - yield - finally: - - - This makes this: - - async with some_async_generator() as : - - - equivalent to this: - - - try: - = - - finally: - - """ - +def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... @type_check_only class _SupportsClose(Protocol): def close(self) -> object: ... @@ -210,23 +121,6 @@ class _SupportsClose(Protocol): _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) class closing(AbstractContextManager[_SupportsCloseT, None]): - """Context to automatically close something at the end of a block. - - Code like this: - - with closing(.open()) as f: - - - is equivalent to this: - - f = .open() - try: - - finally: - f.close() - - """ - def __init__(self, thing: _SupportsCloseT) -> None: ... def __exit__(self, *exc_info: Unused) -> None: ... @@ -238,38 +132,10 @@ if sys.version_info >= (3, 10): _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) class aclosing(AbstractAsyncContextManager[_SupportsAcloseT, None]): - """Async context manager for safely finalizing an asynchronously cleaned-up - resource such as an async generator, calling its ``aclose()`` method. - - Code like this: - - async with aclosing(.fetch()) as agen: - - - is equivalent to this: - - agen = .fetch() - try: - - finally: - await agen.aclose() - - """ - def __init__(self, thing: _SupportsAcloseT) -> None: ... async def __aexit__(self, *exc_info: Unused) -> None: ... class suppress(AbstractContextManager[None, bool]): - """Context manager to suppress specified exceptions - - After the exception is suppressed, execution proceeds with the next - statement following the with statement. - - with suppress(FileNotFoundError): - os.remove(somefile) - # Execution still resumes here if the file was already removed - """ - def __init__(self, *exceptions: type[BaseException]) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None @@ -281,65 +147,19 @@ class _RedirectStream(AbstractContextManager[_T_io, None]): self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> None: ... -class redirect_stdout(_RedirectStream[_T_io]): - """Context manager for temporarily redirecting stdout to another file. - - # How to send help() to stderr - with redirect_stdout(sys.stderr): - help(dir) - - # How to write help() to a file - with open('help.txt', 'w') as f: - with redirect_stdout(f): - help(pow) - """ - -class redirect_stderr(_RedirectStream[_T_io]): - """Context manager for temporarily redirecting stderr to another file.""" +class redirect_stdout(_RedirectStream[_T_io]): ... +class redirect_stderr(_RedirectStream[_T_io]): ... class _BaseExitStack(Generic[_ExitT_co]): - """A base class for ExitStack and AsyncExitStack.""" - - def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: - """Enters the supplied context manager. - - If successful, also pushes its __exit__ method as a callback and - returns the result of the __enter__ method. - """ - - def push(self, exit: _CM_EF) -> _CM_EF: - """Registers a callback with the standard __exit__ method signature. - - Can suppress exceptions the same way __exit__ method can. - Also accepts any object with an __exit__ method (registering a call - to the method instead of the object itself). - """ - - def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: - """Registers an arbitrary callback and arguments. - - Cannot suppress exceptions. - """ - - def pop_all(self) -> Self: - """Preserve the context stack by transferring it to a new instance.""" + def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def pop_all(self) -> Self: ... # In reality this is a subclass of `AbstractContextManager`; # see #7961 for why we don't do that in the stub class ExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): - """Context manager for dynamic management of a stack of exit callbacks. - - For example: - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception. - """ - - def close(self) -> None: - """Immediately unwind the context stack.""" - + def close(self) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -353,45 +173,12 @@ _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any, Any] | _Exit # In reality this is a subclass of `AbstractAsyncContextManager`; # see #7961 for why we don't do that in the stub class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): - """Async context manager for dynamic management of a stack of exit - callbacks. - - For example: - async with AsyncExitStack() as stack: - connections = [await stack.enter_async_context(get_connection()) - for i in range(5)] - # All opened connections will automatically be released at the - # end of the async with statement, even if attempts to open a - # connection later in the list raise an exception. - """ - - async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: - """Enters the supplied async context manager. - - If successful, also pushes its __aexit__ method as a callback and - returns the result of the __aenter__ method. - """ - - def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: - """Registers a coroutine function with the standard __aexit__ method - signature. - - Can suppress exceptions the same way __aexit__ method can. - Also accepts any object with an __aexit__ method (registering a call - to the method instead of the object itself). - """ - + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: ... + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... def push_async_callback( self, callback: Callable[_P, Awaitable[_T]], /, *args: _P.args, **kwds: _P.kwargs - ) -> Callable[_P, Awaitable[_T]]: - """Registers an arbitrary coroutine function and arguments. - - Cannot suppress exceptions. - """ - - async def aclose(self) -> None: - """Immediately unwind the context stack.""" - + ) -> Callable[_P, Awaitable[_T]]: ... + async def aclose(self) -> None: ... async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -399,16 +186,6 @@ class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T, None], AbstractAsyncContextManager[_T, None]): - """Context manager that does no additional processing. - - Used as a stand-in for a normal context manager, when a particular - block of code is only sometimes used with a normal context manager: - - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - """ - enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -421,16 +198,6 @@ if sys.version_info >= (3, 10): else: class nullcontext(AbstractContextManager[_T, None]): - """Context manager that does no additional processing. - - Used as a stand-in for a normal context manager, when a particular - block of code is only sometimes used with a normal context manager: - - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - """ - enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -443,8 +210,6 @@ if sys.version_info >= (3, 11): _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None, None], Generic[_T_fd_or_any_path]): - """Non thread-safe context manager to change the current working directory.""" - path: _T_fd_or_any_path def __init__(self, path: _T_fd_or_any_path) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi index 2f464f1e1cf33..373899ea2635f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi @@ -1,54 +1,3 @@ -"""Generic (shallow and deep) copying operations. - -Interface summary: - - import copy - - x = copy.copy(y) # make a shallow copy of y - x = copy.deepcopy(y) # make a deep copy of y - x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` - -For module specific errors, copy.Error is raised. - -The difference between shallow and deep copying is only relevant for -compound objects (objects that contain other objects, like lists or -class instances). - -- A shallow copy constructs a new compound object and then (to the - extent possible) inserts *the same objects* into it that the - original contains. - -- A deep copy constructs a new compound object and then, recursively, - inserts *copies* into it of the objects found in the original. - -Two problems often exist with deep copy operations that don't exist -with shallow copy operations: - - a) recursive objects (compound objects that, directly or indirectly, - contain a reference to themselves) may cause a recursive loop - - b) because deep copy copies *everything* it may copy too much, e.g. - administrative data structures that should be shared even between - copies - -Python's deep copy operation avoids these problems by: - - a) keeping a table of objects already copied during the current - copying pass - - b) letting user-defined classes override the copying operation or the - set of components copied - -This version does not copy types like module, class, function, method, -nor stack trace, stack frame, nor file, socket, window, nor any -similar types. - -Classes can use the same interfaces to control copying that they use -to control pickling: they can define methods called __getinitargs__(), -__getstate__() and __setstate__(). See the documentation for module -"pickle" for information on these methods. -""" - import sys from typing import Any, Protocol, TypeVar, type_check_only @@ -66,27 +15,13 @@ class _SupportsReplace(Protocol[_RT_co]): PyStringMap: Any # Note: memo and _nil are internal kwargs. -def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: - """Deep copy operation on arbitrary Python objects. - - See the module's __doc__ string for more info. - """ - -def copy(x: _T) -> _T: - """Shallow copy operation on arbitrary Python objects. - - See the module's __doc__ string for more info. - """ +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ... +def copy(x: _T) -> _T: ... if sys.version_info >= (3, 13): __all__ += ["replace"] # The types accepted by `**changes` match those of `obj.__replace__`. - def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: - """Return a new object replacing specified fields with new values. - - This is especially useful for immutable objects, like named tuples or - frozen dataclasses. - """ + def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: ... class Error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi index 003a7a67edbe6..8f7fd957fc526 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi @@ -1,9 +1,3 @@ -"""Helper to provide extensibility for pickle. - -This is only useful to add pickle support for extension types defined in -C, not for instances of user-defined classes. -""" - from collections.abc import Callable, Hashable from typing import Any, SupportsInt, TypeVar from typing_extensions import TypeAlias @@ -19,12 +13,8 @@ def pickle( constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, ) -> None: ... def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... -def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: - """Register an extension code.""" - -def remove_extension(module: Hashable, name: Hashable, code: int) -> None: - """Unregister an extension code. For testing only.""" - +def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... +def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... def clear_extension_cache() -> None: ... _DispatchTableType: TypeAlias = dict[type, Callable[[Any], str | _Reduce[Any]]] # imported by multiprocessing.reduction diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi index df7d315f06982..f926321969897 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi @@ -1,5 +1,3 @@ -"""Wrapper to the POSIX crypt library call and associated functionality.""" - import sys from typing import Final, NamedTuple, type_check_only from typing_extensions import disjoint_base @@ -13,17 +11,10 @@ if sys.platform != "win32": total_size: int if sys.version_info >= (3, 12): - class _Method(_MethodBase): - """Class representing a salt method per the Modular Crypt Format or the - legacy 2-character crypt method. - """ - + class _Method(_MethodBase): ... else: @disjoint_base - class _Method(_MethodBase): - """Class representing a salt method per the Modular Crypt Format or the - legacy 2-character crypt method. - """ + class _Method(_MethodBase): ... METHOD_CRYPT: Final[_Method] METHOD_MD5: Final[_Method] @@ -31,20 +22,5 @@ if sys.platform != "win32": METHOD_SHA512: Final[_Method] METHOD_BLOWFISH: Final[_Method] methods: list[_Method] - def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: - """Generate a salt for the specified method. - - If not specified, the strongest available method will be used. - - """ - - def crypt(word: str, salt: str | _Method | None = None) -> str: - """Return a string representing the one-way hash of a password, with a salt - prepended. - - If ``salt`` is not specified or is ``None``, the strongest - available method will be selected and a salt generated. Otherwise, - ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as - returned by ``crypt.mksalt()``. - - """ + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi index 2f4cd6b12417d..2c8e7109cdfc3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi @@ -1,67 +1,3 @@ -""" -CSV parsing and writing. - -This module provides classes that assist in the reading and writing -of Comma Separated Value (CSV) files, and implements the interface -described by PEP 305. Although many CSV files are simple to parse, -the format is not formally defined by a stable specification and -is subtle enough that parsing lines of a CSV file with something -like line.split(",") is bound to fail. The module supports three -basic APIs: reading, writing, and registration of dialects. - - -DIALECT REGISTRATION: - -Readers and writers support a dialect argument, which is a convenient -handle on a group of settings. When the dialect argument is a string, -it identifies one of the dialects previously registered with the module. -If it is a class or instance, the attributes of the argument are used as -the settings for the reader or writer: - - class excel: - delimiter = ',' - quotechar = '"' - escapechar = None - doublequote = True - skipinitialspace = False - lineterminator = '\\r\\n' - quoting = QUOTE_MINIMAL - -SETTINGS: - - * quotechar - specifies a one-character string to use as the - quoting character. It defaults to '"'. - * delimiter - specifies a one-character string to use as the - field separator. It defaults to ','. - * skipinitialspace - specifies how to interpret spaces which - immediately follow a delimiter. It defaults to False, which - means that spaces immediately following a delimiter is part - of the following field. - * lineterminator - specifies the character sequence which should - terminate rows. - * quoting - controls when quotes should be generated by the writer. - It can take on any of the following module constants: - - csv.QUOTE_MINIMAL means only when required, for example, when a - field contains either the quotechar or the delimiter - csv.QUOTE_ALL means that quotes are always placed around fields. - csv.QUOTE_NONNUMERIC means that quotes are always placed around - fields which do not parse as integers or floating-point - numbers. - csv.QUOTE_STRINGS means that quotes are always placed around - fields which are strings. Note that the Python value None - is not a string. - csv.QUOTE_NOTNULL means that quotes are only placed around fields - that are not the Python value None. - csv.QUOTE_NONE means that quotes are never placed around fields. - * escapechar - specifies a one-character string used to escape - the delimiter when quoting is set to QUOTE_NONE. - * doublequote - controls the handling of quotes inside fields. When - True, two consecutive quotes are interpreted as one during read, - and when writing, each quote character embedded in the data is - written as two quotes -""" - import sys from _csv import ( QUOTE_ALL as QUOTE_ALL, @@ -123,14 +59,6 @@ if sys.version_info < (3, 13): _T = TypeVar("_T") class Dialect: - """Describe a CSV dialect. - - This must be subclassed (see csv.excel). Valid attributes are: - delimiter, quotechar, escapechar, doublequote, skipinitialspace, - lineterminator, quoting. - - """ - delimiter: str quotechar: str | None escapechar: str | None @@ -141,14 +69,9 @@ class Dialect: strict: bool def __init__(self) -> None: ... -class excel(Dialect): - """Describe the usual properties of Excel-generated CSV files.""" - -class excel_tab(excel): - """Describe the usual properties of Excel-generated TAB-delimited files.""" - -class unix_dialect(Dialect): - """Describe the usual properties of Unix-generated CSV files.""" +class excel(Dialect): ... +class excel_tab(excel): ... +class unix_dialect(Dialect): ... class DictReader(Generic[_T]): fieldnames: Sequence[_T] | None @@ -196,11 +119,7 @@ class DictReader(Generic[_T]): def __iter__(self) -> Self: ... def __next__(self) -> dict[_T | Any, str | Any]: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class DictWriter(Generic[_T]): fieldnames: Collection[_T] @@ -228,22 +147,9 @@ class DictWriter(Generic[_T]): def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Sniffer: - """ - "Sniffs" the format of a CSV file (i.e. delimiter, quotechar) - Returns a Dialect object. - """ - preferred: list[str] - def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: - """ - Returns a dialect (or None) corresponding to the sample - """ - + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... def has_header(self, sample: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi index 9d488e29da7c7..19bd261c67e06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi @@ -1,5 +1,3 @@ -"""create and manipulate C data types in Python""" - import sys from _ctypes import ( RTLD_GLOBAL as RTLD_GLOBAL, @@ -25,7 +23,7 @@ from _ctypes import ( set_errno as set_errno, sizeof as sizeof, ) -from _typeshed import StrPath +from _typeshed import StrPath, SupportsBool, SupportsLen from ctypes._endian import BigEndianStructure as BigEndianStructure, LittleEndianStructure as LittleEndianStructure from types import GenericAlias from typing import Any, ClassVar, Final, Generic, Literal, TypeVar, overload, type_check_only @@ -47,24 +45,12 @@ _DLLT = TypeVar("_DLLT", bound=CDLL) if sys.version_info >= (3, 14): @overload @deprecated("ctypes.POINTER with string") - def POINTER(cls: str) -> type[Any]: - """Create and return a new ctypes pointer type. - - Pointer types are cached and reused internally, - so calling this function repeatedly is cheap. - """ - + def POINTER(cls: str) -> type[Any]: ... @overload def POINTER(cls: None) -> type[c_void_p]: ... @overload def POINTER(cls: type[_CT]) -> type[_Pointer[_CT]]: ... - def pointer(obj: _CT) -> _Pointer[_CT]: - """Create a new pointer instance, pointing to 'obj'. - - The returned object is of the type POINTER(type(obj)). Note that if you - just want to pass a pointer to an object to a foreign function call, you - should use byref(obj) which is much faster. - """ + def pointer(obj: _CT) -> _Pointer[_CT]: ... else: from _ctypes import POINTER as POINTER, pointer as pointer @@ -91,20 +77,6 @@ else: _NameTypes: TypeAlias = str | None class CDLL: - """An instance of this class represents a loaded dll/shared - library, exporting functions using the standard C calling - convention (named 'cdecl' on Windows). - - The exported functions can be accessed as attributes, or by - indexing with the function name. Examples: - - .qsort -> callable object - ['qsort'] -> callable object - - Calling the functions releases the Python GIL during the call and - reacquires it afterwards. - """ - _func_flags_: ClassVar[int] _func_restype_: ClassVar[type[_CDataType]] _name: str @@ -123,34 +95,17 @@ class CDLL: def __getitem__(self, name_or_ordinal: str) -> _NamedFuncPointer: ... if sys.platform == "win32": - class OleDLL(CDLL): - """This class represents a dll exporting functions using the - Windows stdcall calling convention, and returning HRESULT. - HRESULT error values are automatically raised as OSError - exceptions. - """ - - class WinDLL(CDLL): - """This class represents a dll exporting functions using the - Windows stdcall calling convention. - """ - -class PyDLL(CDLL): - """This class represents the Python library itself. It allows - accessing Python API functions. The GIL is not released, and - Python exceptions are handled correctly. - """ + class OleDLL(CDLL): ... + class WinDLL(CDLL): ... + +class PyDLL(CDLL): ... class LibraryLoader(Generic[_DLLT]): def __init__(self, dlltype: type[_DLLT]) -> None: ... def __getattr__(self, name: str) -> _DLLT: ... def __getitem__(self, name: str) -> _DLLT: ... def LoadLibrary(self, name: str) -> _DLLT: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... cdll: LibraryLoader[CDLL] if sys.platform == "win32": @@ -178,22 +133,7 @@ def CFUNCTYPE( *argtypes: type[_CData | _CDataType], use_errno: bool = False, use_last_error: bool = False, -) -> type[_CFunctionType]: - """CFUNCTYPE(restype, *argtypes, - use_errno=False, use_last_error=False) -> function prototype. - - restype: the result type - argtypes: a sequence specifying the argument types - - The function prototype can be called in different ways to create a - callable object: - - prototype(integer address) -> foreign function - prototype(callable) -> create and return a C callable function from callable - prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method - prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal - prototype((function name, dll object)[, paramflags]) -> foreign function exported by name - """ +) -> type[_CFunctionType]: ... if sys.platform == "win32": def WINFUNCTYPE( @@ -217,19 +157,11 @@ _CVoidConstPLike: TypeAlias = _CVoidPLike | bytes _CastT = TypeVar("_CastT", bound=_CanCastTo) def cast(obj: _CData | _CDataType | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... -def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: - """create_string_buffer(aBytes) -> character array - create_string_buffer(anInteger) -> character array - create_string_buffer(aBytes, anInteger) -> character array - """ +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... c_buffer = create_string_buffer -def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: - """create_unicode_buffer(aString) -> character array - create_unicode_buffer(anInteger) -> character array - create_unicode_buffer(aString, anInteger) -> character array - """ +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -268,40 +200,24 @@ class _MemsetFunctionType(_CFunctionType): memset: _MemsetFunctionType -def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: - """string_at(ptr[, size]) -> string - - Return the byte string at void *ptr. - """ +def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: ... if sys.platform == "win32": def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... -def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: - """wstring_at(ptr[, size]) -> string - - Return the wide-character string at void *ptr. - """ +def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: ... if sys.version_info >= (3, 14): - def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: - """memoryview_at(ptr, size[, readonly]) -> memoryview - - Return a memoryview representing the memory at void *ptr. - """ + def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: ... class py_object(_CanCastTo, _SimpleCData[_T]): _type_: ClassVar[Literal["O"]] if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class c_bool(_SimpleCData[bool]): _type_: ClassVar[Literal["?"]] - def __init__(self, value: bool = ...) -> None: ... + def __init__(self, value: SupportsBool | SupportsLen | None = ...) -> None: ... class c_byte(_SimpleCData[int]): _type_: ClassVar[Literal["b"]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi index 007fd7d3296c1..97852f67aa6eb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi @@ -4,19 +4,13 @@ from ctypes import Structure, Union # At runtime, the native endianness is an alias for Structure, # while the other is a subclass with a metaclass added in. class BigEndianStructure(Structure): - """Structure with big endian byte order""" - __slots__ = () -class LittleEndianStructure(Structure): - """Structure base class""" +class LittleEndianStructure(Structure): ... # Same thing for these: one is an alias of Union at runtime if sys.version_info >= (3, 11): class BigEndianUnion(Union): - """Union with big endian byte order""" - __slots__ = () - class LittleEndianUnion(Union): - """Union base class""" + class LittleEndianUnion(Union): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi index 0d240b1f70c6d..c5dd954660638 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi @@ -1,11 +1,3 @@ -""" -Enough Mach-O to make your head spin. - -See the relevant header files in /usr/include/mach-o - -And also Apple's documentation. -""" - from typing import Final __version__: Final[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi index 37be9bd2414bd..c7e94daa21497 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi @@ -1,24 +1,8 @@ -""" -dyld emulation -""" - from collections.abc import Mapping from ctypes.macholib.dylib import dylib_info as dylib_info from ctypes.macholib.framework import framework_info as framework_info __all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"] -def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: - """ - Find a library or framework using dyld semantics - """ - -def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: - """ - Find a framework using dyld semantics in a very loose manner. - - Will take input such as: - Python - Python.framework - Python.framework/Versions/Current - """ +def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... +def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi index 58ece6cc99f4a..95945edfd155c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi @@ -1,7 +1,3 @@ -""" -Generic dylib path manipulation -""" - from typing import TypedDict, type_check_only __all__ = ["dylib_info"] @@ -15,23 +11,4 @@ class _DylibInfo(TypedDict): version: str | None suffix: str | None -def dylib_info(filename: str) -> _DylibInfo | None: - """ - A dylib name can take one of the following four forms: - Location/Name.SomeVersion_Suffix.dylib - Location/Name.SomeVersion.dylib - Location/Name_Suffix.dylib - Location/Name.dylib - - returns None if not found or a mapping equivalent to: - dict( - location='Location', - name='Name.SomeVersion_Suffix.dylib', - shortname='Name', - version='SomeVersion', - suffix='Suffix', - ) - - Note that SomeVersion and Suffix are optional and may be None - if not present. - """ +def dylib_info(filename: str) -> _DylibInfo | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi index f12f2b3fd152f..e92bf3700e840 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi @@ -1,7 +1,3 @@ -""" -Generic framework path manipulation -""" - from typing import TypedDict, type_check_only __all__ = ["framework_info"] @@ -15,23 +11,4 @@ class _FrameworkInfo(TypedDict): version: str | None suffix: str | None -def framework_info(filename: str) -> _FrameworkInfo | None: - """ - A framework name can take one of the following four forms: - Location/Name.framework/Versions/SomeVersion/Name_Suffix - Location/Name.framework/Versions/SomeVersion/Name - Location/Name.framework/Name_Suffix - Location/Name.framework/Name - - returns None if not found, or a mapping equivalent to: - dict( - location='Location', - name='Name.framework/Versions/SomeVersion/Name_Suffix', - shortname='Name', - version='SomeVersion', - suffix='Suffix', - ) - - Note that SomeVersion and Suffix are optional and may be None - if not present - """ +def framework_info(filename: str) -> _FrameworkInfo | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi index dc1a251365079..4f18c1d8db345 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi @@ -3,11 +3,9 @@ import sys def find_library(name: str) -> str | None: ... if sys.platform == "win32": - def find_msvcrt() -> str | None: - """Return the name of the VC runtime dll""" + def find_msvcrt() -> str | None: ... if sys.version_info >= (3, 14): - def dllist() -> list[str]: - """Return a list of loaded shared libraries in the current process.""" + def dllist() -> list[str]: ... def test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi index a6fcd958e492e..3e32487ad99f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi @@ -1,15 +1,3 @@ -"""curses - -The main package for curses support for Python. Normally used by importing -the package, and perhaps a particular module inside it. - - import curses - from curses import textpad - curses.initscr() - ... - -""" - import sys from _curses import * from _curses import window as window @@ -34,13 +22,7 @@ COLS: int COLORS: Final[int] COLOR_PAIRS: Final[int] -def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: - """Wrapper function that initializes curses and calls another function, - restoring normal keyboard/screen behavior on error. - The callable object 'func' is then passed the main window 'stdscr' - as its first argument, followed by any other arguments passed to - wrapper(). - """ +def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: ... # At runtime this class is unexposed and calls itself curses.ncurses_version. # That name would conflict with the actual curses.ncurses_version, which is diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi index 823f98c139b62..0234434b8c3de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi @@ -1,5 +1,3 @@ -"""Constants and membership tests for ASCII characters""" - from typing import Final, TypeVar _CharT = TypeVar("_CharT", str, int) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi index 067afef730a5b..861559d38bc5a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi @@ -1,6 +1 @@ -"""curses.panel - -Module for using panels with curses. -""" - from _curses_panel import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi index 129178bdefcc2..48ef67c9d85f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi @@ -1,45 +1,11 @@ -"""Simple textbox editing widget with Emacs-like keybindings.""" - from _curses import window from collections.abc import Callable -def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: - """Draw a rectangle with corners at the provided upper-left - and lower-right coordinates. - """ +def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: ... class Textbox: - """Editing widget using the interior of a window object. - Supports the following Emacs-like key bindings: - - Ctrl-A Go to left edge of window. - Ctrl-B Cursor left, wrapping to previous line if appropriate. - Ctrl-D Delete character under cursor. - Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on). - Ctrl-F Cursor right, wrapping to next line when appropriate. - Ctrl-G Terminate, returning the window contents. - Ctrl-H Delete character backward. - Ctrl-J Terminate if the window is 1 line, otherwise insert newline. - Ctrl-K If line is blank, delete it, otherwise clear to end of line. - Ctrl-L Refresh screen. - Ctrl-N Cursor down; move down one line. - Ctrl-O Insert a blank line at cursor location. - Ctrl-P Cursor up; move up one line. - - Move operations do nothing if the cursor is at an edge where the movement - is not possible. The following synonyms are supported where possible: - - KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N - KEY_BACKSPACE = Ctrl-h - """ - stripspaces: bool def __init__(self, win: window, insert_mode: bool = False) -> None: ... - def edit(self, validate: Callable[[int], int] | None = None) -> str: - """Edit in the widget window and collect the results.""" - - def do_command(self, ch: str | int) -> None: - """Process a single editing command.""" - - def gather(self) -> str: - """Collect and return the contents of the window.""" + def edit(self, validate: Callable[[int], int] | None = None) -> str: ... + def do_command(self, ch: str | int) -> None: ... + def gather(self) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi index 6a134c3df68ae..3a1c8cb5d62dd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi @@ -64,48 +64,11 @@ if sys.version_info >= (3, 10): class KW_ONLY: ... @overload -def asdict(obj: DataclassInstance) -> dict[str, Any]: - """Return the fields of a dataclass instance as a new dictionary mapping - field names to field values. - - Example usage:: - - @dataclass - class C: - x: int - y: int - - c = C(1, 2) - assert asdict(c) == {'x': 1, 'y': 2} - - If given, 'dict_factory' will be used instead of built-in dict. - The function applies recursively to field values that are - dataclass instances. This will also look into built-in containers: - tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. - """ - +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... @overload def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: DataclassInstance) -> tuple[Any, ...]: - """Return the fields of a dataclass instance as a new tuple of field values. - - Example usage:: - - @dataclass - class C: - x: int - y: int - - c = C(1, 2) - assert astuple(c) == (1, 2) - - If given, 'tuple_factory' will be used instead of built-in tuple. - The function applies recursively to field values that are - dataclass instances. This will also look into built-in containers: - tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. - """ - +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... @overload def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... @@ -125,21 +88,7 @@ if sys.version_info >= (3, 11): kw_only: bool = False, slots: bool = False, weakref_slot: bool = False, - ) -> type[_T]: - """Add dunder methods based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If repr - is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method is added. If frozen is true, fields may not be - assigned to after instance creation. If match_args is true, the - __match_args__ tuple is added. If kw_only is true, then by default - all fields are keyword-only. If slots is true, a new class with a - __slots__ attribute is returned. - """ - + ) -> type[_T]: ... @overload def dataclass( cls: None = None, @@ -172,22 +121,7 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> type[_T]: - """Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. If match_args is true, - the __match_args__ tuple is added. If kw_only is true, then by - default all fields are keyword-only. If slots is true, an - __slots__ attribute is added. - """ - + ) -> type[_T]: ... @overload def dataclass( cls: None = None, @@ -216,19 +150,7 @@ else: order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - ) -> type[_T]: - """Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. - """ - + ) -> type[_T]: ... @overload def dataclass( cls: None = None, @@ -333,11 +255,7 @@ class Field(Generic[_T]): ) -> None: ... def __set_name__(self, owner: Type[Any], name: str) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @@ -354,23 +272,7 @@ if sys.version_info >= (3, 14): metadata: Mapping[Any, Any] | None = None, kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., doc: str | None = None, - ) -> _T: - """Return an object to identify dataclass fields. - - default is the default value of the field. default_factory is a - 0-argument function called to initialize a field's value. If init - is true, the field will be a parameter to the class's __init__() - function. If repr is true, the field will be included in the - object's repr(). If hash is true, the field will be included in the - object's hash(). If compare is true, the field will be used in - comparison functions. metadata, if specified, must be a mapping - which is stored but not otherwise examined by dataclass. If kw_only - is true, the field will become a keyword-only parameter to - __init__(). doc is an optional docstring for this field. - - It is an error to specify both default and default_factory. - """ - + ) -> _T: ... @overload def field( *, @@ -410,23 +312,7 @@ elif sys.version_info >= (3, 10): compare: bool = True, metadata: Mapping[Any, Any] | None = None, kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., - ) -> _T: - """Return an object to identify dataclass fields. - - default is the default value of the field. default_factory is a - 0-argument function called to initialize a field's value. If init - is true, the field will be a parameter to the class's __init__() - function. If repr is true, the field will be included in the - object's repr(). If hash is true, the field will be included in the - object's hash(). If compare is true, the field will be used in - comparison functions. metadata, if specified, must be a mapping - which is stored but not otherwise examined by dataclass. If kw_only - is true, the field will become a keyword-only parameter to - __init__(). - - It is an error to specify both default and default_factory. - """ - + ) -> _T: ... @overload def field( *, @@ -463,21 +349,7 @@ else: hash: bool | None = None, compare: bool = True, metadata: Mapping[Any, Any] | None = None, - ) -> _T: - """Return an object to identify dataclass fields. - - default is the default value of the field. default_factory is a - 0-argument function called to initialize a field's value. If init - is True, the field will be a parameter to the class's __init__() - function. If repr is True, the field will be included in the - object's repr(). If hash is True, the field will be included in - the object's hash(). If compare is True, the field will be used - in comparison functions. metadata, if specified, must be a - mapping which is stored but not otherwise examined by dataclass. - - It is an error to specify both default and default_factory. - """ - + ) -> _T: ... @overload def field( *, @@ -501,20 +373,11 @@ else: metadata: Mapping[Any, Any] | None = None, ) -> Any: ... -def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: - """Return a tuple describing the fields of this dataclass. - - Accepts a dataclass or an instance of one. Tuple elements are of - type Field. - """ +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... # HACK: `obj: Never` typing matches if object argument is using `Any` type. @overload -def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] - """Returns True if obj is a dataclass or an instance of a - dataclass. - """ - +def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] @overload def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ... @overload @@ -550,32 +413,7 @@ if sys.version_info >= (3, 14): weakref_slot: bool = False, module: str | None = None, decorator: _DataclassFactory = ..., - ) -> type: - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'.:: - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to:: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, - slots, and weakref_slot are passed to dataclass(). - - If module parameter is defined, the '__module__' attribute of the dataclass is - set to that value. - """ + ) -> type: ... elif sys.version_info >= (3, 12): def make_dataclass( @@ -595,32 +433,7 @@ elif sys.version_info >= (3, 12): slots: bool = False, weakref_slot: bool = False, module: str | None = None, - ) -> type: - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'.:: - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to:: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, - slots, and weakref_slot are passed to dataclass(). - - If module parameter is defined, the '__module__' attribute of the dataclass is - set to that value. - """ + ) -> type: ... elif sys.version_info >= (3, 11): def make_dataclass( @@ -639,29 +452,7 @@ elif sys.version_info >= (3, 11): kw_only: bool = False, slots: bool = False, weakref_slot: bool = False, - ) -> type: - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'.:: - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to:: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ + ) -> type: ... elif sys.version_info >= (3, 10): def make_dataclass( @@ -679,29 +470,7 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> type: - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ + ) -> type: ... else: def make_dataclass( @@ -716,41 +485,6 @@ else: order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - ) -> type: - """Return a new dynamically created dataclass. - - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. - - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - - is equivalent to: - - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) - - For the bases and namespace parameters, see the builtin type() function. - - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ - -def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: - """Return a new object replacing specified fields with new values. - - This is especially useful for frozen classes. Example usage:: - - @dataclass(frozen=True) - class C: - x: int - y: int + ) -> type: ... - c = C(1, 2) - c1 = replace(c, x=3) - assert c1.x == 3 and c1.y == 2 - """ +def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi index 4571f2dcf6e65..8a0536c006d57 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi @@ -1,9 +1,3 @@ -"""Specific date/time and related types. - -See https://data.iana.org/time-zones/tz-link.html for -time zone and DST data sources. -""" - import sys from abc import abstractmethod from time import struct_time @@ -19,43 +13,26 @@ MINYEAR: Final = 1 MAXYEAR: Final = 9999 class tzinfo: - """Abstract base class for time zone info objects.""" - @abstractmethod - def tzname(self, dt: datetime | None, /) -> str | None: - """datetime -> string name of time zone.""" - + def tzname(self, dt: datetime | None, /) -> str | None: ... @abstractmethod - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: - """datetime -> timedelta showing offset from UTC, negative values indicating West of UTC""" - + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... @abstractmethod - def dst(self, dt: datetime | None, /) -> timedelta | None: - """datetime -> DST offset as timedelta positive east of UTC.""" - - def fromutc(self, dt: datetime, /) -> datetime: - """datetime in UTC -> datetime in local time.""" + def dst(self, dt: datetime | None, /) -> timedelta | None: ... + def fromutc(self, dt: datetime, /) -> datetime: ... # Alias required to avoid name conflicts with date(time).tzinfo. _TzInfo: TypeAlias = tzinfo @final class timezone(tzinfo): - """Fixed offset from UTC implementation of tzinfo.""" - utc: ClassVar[timezone] min: ClassVar[timezone] max: ClassVar[timezone] def __new__(cls, offset: timedelta, name: str = ...) -> Self: ... - def tzname(self, dt: datetime | None, /) -> str: - """If name is specified when timezone is created, returns the name. Otherwise returns offset as 'UTC(+|-)HH:MM'.""" - - def utcoffset(self, dt: datetime | None, /) -> timedelta: - """Return fixed offset.""" - - def dst(self, dt: datetime | None, /) -> None: - """Return None.""" - + def tzname(self, dt: datetime | None, /) -> str: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta: ... + def dst(self, dt: datetime | None, /) -> None: ... def __hash__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... @@ -76,120 +53,68 @@ class _IsoCalendarDate(tuple[int, int, int]): @disjoint_base class date: - """date(year, month, day) --> date object""" - min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] def __new__(cls, year: SupportsIndex, month: SupportsIndex, day: SupportsIndex) -> Self: ... @classmethod - def fromtimestamp(cls, timestamp: float, /) -> Self: - """Create a date from a POSIX timestamp. - - The timestamp is a number, e.g. created via time.time(), that is interpreted - as local time. - """ - + def fromtimestamp(cls, timestamp: float, /) -> Self: ... @classmethod - def today(cls) -> Self: - """Current date or datetime: same as self.__class__.fromtimestamp(time.time()).""" - + def today(cls) -> Self: ... @classmethod - def fromordinal(cls, n: int, /) -> Self: - """int -> date corresponding to a proleptic Gregorian ordinal.""" - + def fromordinal(cls, n: int, /) -> Self: ... @classmethod - def fromisoformat(cls, date_string: str, /) -> Self: - """str -> Construct a date from a string in ISO 8601 format.""" - + def fromisoformat(cls, date_string: str, /) -> Self: ... @classmethod - def fromisocalendar(cls, year: int, week: int, day: int) -> Self: - """int, int, int -> Construct a date from the ISO year, week number and weekday. - - This is the inverse of the date.isocalendar() function - """ - + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... - def ctime(self) -> str: - """Return ctime() style string.""" + def ctime(self) -> str: ... + if sys.version_info >= (3, 14): @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new date parsed from a string (like time.strptime()).""" + def strptime(cls, date_string: str, format: str, /) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): - def strftime(self, format: str) -> str: - """format -> strftime() style string.""" + def strftime(self, format: str) -> str: ... else: - def strftime(self, format: str, /) -> str: - """format -> strftime() style string.""" - - def __format__(self, fmt: str, /) -> str: - """Formats self with strftime.""" + def strftime(self, format: str, /) -> str: ... - def isoformat(self) -> str: - """Return string in ISO 8601 format, YYYY-MM-DD.""" - - def timetuple(self) -> struct_time: - """Return time tuple, compatible with time.localtime().""" - - def toordinal(self) -> int: - """Return proleptic Gregorian ordinal. January 1 of year 1 is day 1.""" + def __format__(self, fmt: str, /) -> str: ... + def isoformat(self) -> str: ... + def timetuple(self) -> struct_time: ... + def toordinal(self) -> int: ... if sys.version_info >= (3, 13): - def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: - """The same as replace().""" - - def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: - """Return date with new specified fields.""" + def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... def __le__(self, value: date, /) -> bool: ... def __lt__(self, value: date, /) -> bool: ... def __ge__(self, value: date, /) -> bool: ... def __gt__(self, value: date, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __add__(self, value: timedelta, /) -> Self: - """Return self+value.""" - - def __radd__(self, value: timedelta, /) -> Self: - """Return value+self.""" - + def __add__(self, value: timedelta, /) -> Self: ... + def __radd__(self, value: timedelta, /) -> Self: ... @overload - def __sub__(self, value: datetime, /) -> NoReturn: - """Return self-value.""" - + def __sub__(self, value: datetime, /) -> NoReturn: ... @overload def __sub__(self, value: Self, /) -> timedelta: ... @overload def __sub__(self, value: timedelta, /) -> Self: ... def __hash__(self) -> int: ... - def weekday(self) -> int: - """Return the day of the week represented by the date. - Monday == 0 ... Sunday == 6 - """ - - def isoweekday(self) -> int: - """Return the day of the week represented by the date. - Monday == 1 ... Sunday == 7 - """ - - def isocalendar(self) -> _IsoCalendarDate: - """Return a named tuple containing ISO year, week number, and weekday.""" + def weekday(self) -> int: ... + def isoweekday(self) -> int: ... + def isocalendar(self) -> _IsoCalendarDate: ... @disjoint_base class time: - """time([hour[, minute[, second[, microsecond[, tzinfo]]]]]) --> a time object - - All arguments are optional. tzinfo may be None, or an instance of - a tzinfo subclass. The remaining arguments may be ints. - """ - min: ClassVar[time] max: ClassVar[time] resolution: ClassVar[timedelta] @@ -221,42 +146,26 @@ class time: def __gt__(self, value: time, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def isoformat(self, timespec: str = "auto") -> str: - """Return string in ISO 8601 format, [HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. - - The optional argument timespec specifies the number of additional terms - of the time to include. Valid options are 'auto', 'hours', 'minutes', - 'seconds', 'milliseconds' and 'microseconds'. - """ - + def isoformat(self, timespec: str = "auto") -> str: ... @classmethod - def fromisoformat(cls, time_string: str, /) -> Self: - """string -> time from a string in ISO 8601 format""" + def fromisoformat(cls, time_string: str, /) -> Self: ... + if sys.version_info >= (3, 14): @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new time parsed from a string (like time.strptime()).""" + def strptime(cls, date_string: str, format: str, /) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): - def strftime(self, format: str) -> str: - """format -> strftime() style string.""" + def strftime(self, format: str) -> str: ... else: - def strftime(self, format: str, /) -> str: - """format -> strftime() style string.""" - - def __format__(self, fmt: str, /) -> str: - """Formats self with strftime.""" + def strftime(self, format: str, /) -> str: ... - def utcoffset(self) -> timedelta | None: - """Return self.tzinfo.utcoffset(self).""" - - def tzname(self) -> str | None: - """Return self.tzinfo.tzname(self).""" - - def dst(self) -> timedelta | None: - """Return self.tzinfo.dst(self).""" + def __format__(self, fmt: str, /) -> str: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... if sys.version_info >= (3, 13): def __replace__( self, @@ -268,8 +177,7 @@ class time: microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: - """The same as replace().""" + ) -> Self: ... def replace( self, @@ -280,22 +188,13 @@ class time: tzinfo: _TzInfo | None = ..., *, fold: int = ..., - ) -> Self: - """Return time with new specified fields.""" + ) -> Self: ... _Date: TypeAlias = date _Time: TypeAlias = time @disjoint_base class timedelta: - """Difference between two datetime values. - - timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0) - - All arguments are optional and default to 0. - Arguments may be integers or floats, and may be positive or negative. - """ - min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] @@ -310,83 +209,41 @@ class timedelta: weeks: float = 0, ) -> Self: ... @property - def days(self) -> int: - """Number of days.""" - + def days(self) -> int: ... @property - def seconds(self) -> int: - """Number of seconds (>= 0 and less than 1 day).""" - + def seconds(self) -> int: ... @property - def microseconds(self) -> int: - """Number of microseconds (>= 0 and less than 1 second).""" - - def total_seconds(self) -> float: - """Total seconds in the duration.""" - - def __add__(self, value: timedelta, /) -> timedelta: - """Return self+value.""" - - def __radd__(self, value: timedelta, /) -> timedelta: - """Return value+self.""" - - def __sub__(self, value: timedelta, /) -> timedelta: - """Return self-value.""" - - def __rsub__(self, value: timedelta, /) -> timedelta: - """Return value-self.""" - - def __neg__(self) -> timedelta: - """-self""" - - def __pos__(self) -> timedelta: - """+self""" - - def __abs__(self) -> timedelta: - """abs(self)""" - - def __mul__(self, value: float, /) -> timedelta: - """Return self*value.""" - - def __rmul__(self, value: float, /) -> timedelta: - """Return value*self.""" - + def microseconds(self) -> int: ... + def total_seconds(self) -> float: ... + def __add__(self, value: timedelta, /) -> timedelta: ... + def __radd__(self, value: timedelta, /) -> timedelta: ... + def __sub__(self, value: timedelta, /) -> timedelta: ... + def __rsub__(self, value: timedelta, /) -> timedelta: ... + def __neg__(self) -> timedelta: ... + def __pos__(self) -> timedelta: ... + def __abs__(self) -> timedelta: ... + def __mul__(self, value: float, /) -> timedelta: ... + def __rmul__(self, value: float, /) -> timedelta: ... @overload - def __floordiv__(self, value: timedelta, /) -> int: - """Return self//value.""" - + def __floordiv__(self, value: timedelta, /) -> int: ... @overload def __floordiv__(self, value: int, /) -> timedelta: ... @overload - def __truediv__(self, value: timedelta, /) -> float: - """Return self/value.""" - + def __truediv__(self, value: timedelta, /) -> float: ... @overload def __truediv__(self, value: float, /) -> timedelta: ... - def __mod__(self, value: timedelta, /) -> timedelta: - """Return self%value.""" - - def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: - """Return divmod(self, value).""" - + def __mod__(self, value: timedelta, /) -> timedelta: ... + def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: ... def __le__(self, value: timedelta, /) -> bool: ... def __lt__(self, value: timedelta, /) -> bool: ... def __ge__(self, value: timedelta, /) -> bool: ... def __gt__(self, value: timedelta, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __bool__(self) -> bool: - """True if self else False""" - + def __bool__(self) -> bool: ... def __hash__(self) -> int: ... @disjoint_base class datetime(date): - """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) - - The year, month and day arguments are required. tzinfo may be None, or an - instance of a tzinfo subclass. The remaining arguments may be ints. - """ - min: ClassVar[datetime] max: ClassVar[datetime] def __new__( @@ -419,51 +276,26 @@ class datetime(date): # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): @classmethod - def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: - """timestamp[, tz] -> tz's local time from POSIX timestamp.""" + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: ... else: @classmethod - def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: - """timestamp[, tz] -> tz's local time from POSIX timestamp.""" + def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: ... @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)") - def utcfromtimestamp(cls, t: float, /) -> Self: - """Construct a naive UTC datetime from a POSIX timestamp.""" - + def utcfromtimestamp(cls, t: float, /) -> Self: ... @classmethod - def now(cls, tz: _TzInfo | None = None) -> Self: - """Returns new datetime object representing current time local to tz. - - tz - Timezone object. - - If no tz is specified, uses local timezone. - """ - + def now(cls, tz: _TzInfo | None = None) -> Self: ... @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)") - def utcnow(cls) -> Self: - """Return a new datetime representing UTC day and time.""" - + def utcnow(cls) -> Self: ... @classmethod - def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: - """date, time -> datetime with same date and time fields""" - - def timestamp(self) -> float: - """Return POSIX timestamp as float.""" - - def utctimetuple(self) -> struct_time: - """Return UTC time tuple, compatible with time.localtime().""" - - def date(self) -> _Date: - """Return date object with same year, month and day.""" - - def time(self) -> _Time: - """Return time object with same time but with tzinfo=None.""" - - def timetz(self) -> _Time: - """Return time object with same time and tzinfo.""" + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... + def timestamp(self) -> float: ... + def utctimetuple(self) -> struct_time: ... + def date(self) -> _Date: ... + def time(self) -> _Time: ... + def timetz(self) -> _Time: ... if sys.version_info >= (3, 13): def __replace__( self, @@ -478,8 +310,7 @@ class datetime(date): microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: - """The same as replace().""" + ) -> Self: ... def replace( self, @@ -493,33 +324,14 @@ class datetime(date): tzinfo: _TzInfo | None = ..., *, fold: int = ..., - ) -> Self: - """Return datetime with new specified fields.""" - - def astimezone(self, tz: _TzInfo | None = None) -> Self: - """tz -> convert to local time in new timezone tz""" - - def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: - """[sep] -> string in ISO 8601 format, YYYY-MM-DDT[HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. - sep is used to separate the year from the time, and defaults to 'T'. - The optional argument timespec specifies the number of additional terms - of the time to include. Valid options are 'auto', 'hours', 'minutes', - 'seconds', 'milliseconds' and 'microseconds'. - """ - + ) -> Self: ... + def astimezone(self, tz: _TzInfo | None = None) -> Self: ... + def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: ... @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new datetime parsed from a string (like time.strptime()).""" - - def utcoffset(self) -> timedelta | None: - """Return self.tzinfo.utcoffset(self).""" - - def tzname(self) -> str | None: - """Return self.tzinfo.tzname(self).""" - - def dst(self) -> timedelta | None: - """Return self.tzinfo.dst(self).""" - + def strptime(cls, date_string: str, format: str, /) -> Self: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... def __le__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __lt__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __ge__(self, value: datetime, /) -> bool: ... # type: ignore[override] @@ -527,9 +339,7 @@ class datetime(date): def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload # type: ignore[override] - def __sub__(self, value: Self, /) -> timedelta: - """Return self-value.""" - + def __sub__(self, value: Self, /) -> timedelta: ... @overload def __sub__(self, value: timedelta, /) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi index 3c41015fcd043..7cbb63cf2f06e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi @@ -1,32 +1,3 @@ -"""Generic interface to all dbm clones. - -Use - - import dbm - d = dbm.open(file, 'w', 0o666) - -The returned object is a dbm.sqlite3, dbm.gnu, dbm.ndbm or dbm.dumb database object, dependent on the -type of database being opened (determined by the whichdb function) in the case -of an existing dbm. If the dbm does not exist and the create or new flag ('c' -or 'n') was specified, the dbm type will be determined by the availability of -the modules (tested in the above order). - -It has the following interface (key and data are strings): - - d[key] = data # store data at key (may override data at - # existing key) - data = d[key] # retrieve data at key (raise KeyError if no - # such key) - del d[key] # delete data stored at key (raises KeyError - # if no such key) - flag = key in d # true if the key exists - list = d.keys() # return a list of all existing keys (slow!) - -Future versions may change the order in which implementations are -tested for existence, and add interfaces to other dbm-like -implementations. -""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -126,53 +97,9 @@ class _error(Exception): ... error: tuple[type[_error], type[OSError]] if sys.version_info >= (3, 11): - def whichdb(filename: StrOrBytesPath) -> str | None: - """Guess which db package to use to open a db file. - - Return values: - - - None if the database file can't be read; - - empty string if the file can be read but can't be recognized - - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. - - Importing the given module may still fail, and opening the - database using that module may still fail. - """ - - def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: - """Open or create database at path given by *file*. - - Optional argument *flag* can be 'r' (default) for read-only access, 'w' - for read-write access of an existing database, 'c' for read-write access - to a new or existing database, and 'n' for read-write access to a new - database. - - Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it - only if it doesn't exist; and 'n' always creates a new database. - """ + def whichdb(filename: StrOrBytesPath) -> str | None: ... + def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... else: - def whichdb(filename: str) -> str | None: - """Guess which db package to use to open a db file. - - Return values: - - - None if the database file can't be read; - - empty string if the file can be read but can't be recognized - - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. - - Importing the given module may still fail, and opening the - database using that module may still fail. - """ - - def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: - """Open or create database at path given by *file*. - - Optional argument *flag* can be 'r' (default) for read-only access, 'w' - for read-write access of an existing database, 'c' for read-write access - to a new or existing database, and 'n' for read-write access to a new - database. - - Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it - only if it doesn't exist; and 'n' always creates a new database. - """ + def whichdb(filename: str) -> str | None: ... + def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi index 22ef756acac03..1c0b7756f2925 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi @@ -1,26 +1,3 @@ -"""A dumb and slow but simple dbm clone. - -For database spam, spam.dir contains the index (a text file), -spam.bak *may* contain a backup of the index (also a text file), -while spam.dat contains the data (a binary file). - -XXX TO DO: - -- seems to contain a bug when updating... - -- reclaim free space (currently, space once occupied by deleted or expanded -items is never reused) - -- support concurrent access (currently, if two processes take turns making -updates, they can mess up the index) - -- support efficient access to large databases (currently, the whole index -is read when the database is opened, and some updates rewrite the whole index) - -- support opening for read-only (flag = 'm') - -""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -54,33 +31,7 @@ class _Database(MutableMapping[_KeyType, bytes]): ) -> None: ... if sys.version_info >= (3, 11): - def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: - """Open the database file, filename, and return corresponding object. - - The flag argument, used to control how the database is opened in the - other DBM implementations, supports only the semantics of 'c' and 'n' - values. Other values will default to the semantics of 'c' value: - the database will always opened for update and will be created if it - does not exist. - - The optional mode argument is the UNIX mode of the file, used only when - the database has to be created. It defaults to octal code 0o666 (and - will be modified by the prevailing umask). - - """ + def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: ... else: - def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: - """Open the database file, filename, and return corresponding object. - - The flag argument, used to control how the database is opened in the - other DBM implementations, supports only the semantics of 'c' and 'n' - values. Other values will default to the semantics of 'c' value: - the database will always opened for update and will be created if it - does not exist. - - The optional mode argument is the UNIX mode of the file, used only when - the database has to be created. It defaults to octal code 0o666 (and - will be modified by the prevailing umask). - - """ + def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi index b07a1defffdf2..2dac3d12b0ca4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi @@ -1,3 +1 @@ -"""Provide the _gdbm module as a dbm submodule.""" - from _gdbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi index 23056a29ef2b6..66c943ab640be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi @@ -1,3 +1 @@ -"""Provide the _dbm module as a dbm submodule.""" - from _dbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi index 4f30544592376..e2fba93b20017 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi @@ -26,17 +26,4 @@ class _Database(MutableMapping[bytes, bytes]): def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... -def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: - """Open a dbm.sqlite3 database and return the dbm object. - - The 'filename' parameter is the name of the database file. - - The optional 'flag' parameter can be one of ...: - 'r' (default): open an existing database for read only access - 'w': open an existing database for read/write access - 'c': create a database if it does not exist; open for read/write access - 'n': always create a new, empty database; open for read/write access - - The optional 'mode' parameter is the Unix file access mode of the database; - only used when creating a new database. Default: 0o666. - """ +def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi index 1f6ba755df7a2..2e06c2d1b724a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi @@ -1,103 +1,3 @@ -"""Decimal fixed-point and floating-point arithmetic. - -This is an implementation of decimal floating-point arithmetic based on -the General Decimal Arithmetic Specification: - - http://speleotrove.com/decimal/decarith.html - -and IEEE standard 854-1987: - - http://en.wikipedia.org/wiki/IEEE_854-1987 - -Decimal floating point has finite precision with arbitrarily large bounds. - -The purpose of this module is to support arithmetic using familiar -"schoolhouse" rules and to avoid some of the tricky representation -issues associated with binary floating point. The package is especially -useful for financial applications or for contexts where users have -expectations that are at odds with binary floating point (for instance, -in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead -of 0.0; Decimal('1.00') % Decimal('0.1') returns the expected -Decimal('0.00')). - -Here are some examples of using the decimal module: - ->>> from decimal import * ->>> setcontext(ExtendedContext) ->>> Decimal(0) -Decimal('0') ->>> Decimal('1') -Decimal('1') ->>> Decimal('-.0123') -Decimal('-0.0123') ->>> Decimal(123456) -Decimal('123456') ->>> Decimal('123.45e12345678') -Decimal('1.2345E+12345680') ->>> Decimal('1.33') + Decimal('1.27') -Decimal('2.60') ->>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41') -Decimal('-2.20') ->>> dig = Decimal(1) ->>> print(dig / Decimal(3)) -0.333333333 ->>> getcontext().prec = 18 ->>> print(dig / Decimal(3)) -0.333333333333333333 ->>> print(dig.sqrt()) -1 ->>> print(Decimal(3).sqrt()) -1.73205080756887729 ->>> print(Decimal(3) ** 123) -4.85192780976896427E+58 ->>> inf = Decimal(1) / Decimal(0) ->>> print(inf) -Infinity ->>> neginf = Decimal(-1) / Decimal(0) ->>> print(neginf) --Infinity ->>> print(neginf + inf) -NaN ->>> print(neginf * inf) --Infinity ->>> print(dig / 0) -Infinity ->>> getcontext().traps[DivisionByZero] = 1 ->>> print(dig / 0) -Traceback (most recent call last): - ... - ... - ... -decimal.DivisionByZero: x / 0 ->>> c = Context() ->>> c.traps[InvalidOperation] = 0 ->>> print(c.flags[InvalidOperation]) -0 ->>> c.divide(Decimal(0), Decimal(0)) -Decimal('NaN') ->>> c.traps[InvalidOperation] = 1 ->>> print(c.flags[InvalidOperation]) -1 ->>> c.flags[InvalidOperation] = 0 ->>> print(c.flags[InvalidOperation]) -0 ->>> print(c.divide(Decimal(0), Decimal(0))) -Traceback (most recent call last): - ... - ... - ... -decimal.InvalidOperation: 0 / 0 ->>> print(c.flags[InvalidOperation]) -1 ->>> c.flags[InvalidOperation] = 0 ->>> c.traps[InvalidOperation] = 0 ->>> print(c.divide(Decimal(0), Decimal(0))) -NaN ->>> print(c.flags[InvalidOperation]) -1 ->>> -""" - import numbers import sys from _decimal import ( @@ -149,8 +49,6 @@ class _ContextManager: def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... class DecimalTuple(NamedTuple): - """DecimalTuple(sign, digits, exponent)""" - sign: int digits: tuple[int, ...] exponent: int | Literal["n", "N", "F"] @@ -172,167 +70,51 @@ class FloatOperation(DecimalException, TypeError): ... @disjoint_base class Decimal: - """Construct a new Decimal object. 'value' can be an integer, string, tuple, - or another Decimal object. If no value is given, return Decimal('0'). The - context does not affect the conversion and is only passed to determine if - the InvalidOperation trap is active. - - """ - def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ... if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: Decimal | float, /) -> Self: - """Class method that converts a real number to a decimal number, exactly. - - >>> Decimal.from_number(314) # int - Decimal('314') - >>> Decimal.from_number(0.1) # float - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_number(Decimal('3.14')) # another decimal instance - Decimal('3.14') - - - """ + def from_number(cls, number: Decimal | float, /) -> Self: ... @classmethod - def from_float(cls, f: float, /) -> Self: - """Class method that converts a float to a decimal number, exactly. - Since 0.1 is not exactly representable in binary floating point, - Decimal.from_float(0.1) is not the same as Decimal('0.1'). - - >>> Decimal.from_float(0.1) - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_float(float('nan')) - Decimal('NaN') - >>> Decimal.from_float(float('inf')) - Decimal('Infinity') - >>> Decimal.from_float(float('-inf')) - Decimal('-Infinity') - - - """ - - def __bool__(self) -> bool: - """True if self else False""" - - def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Compare self to other. Return a decimal value: - - a or b is a NaN ==> Decimal('NaN') - a < b ==> Decimal('-1') - a == b ==> Decimal('0') - a > b ==> Decimal('1') - - """ - + def from_float(cls, f: float, /) -> Self: ... + def __bool__(self) -> bool: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def __hash__(self) -> int: ... - def as_tuple(self) -> DecimalTuple: - """Return a tuple representation of the number.""" - - def as_integer_ratio(self) -> tuple[int, int]: - """Decimal.as_integer_ratio() -> (int, int) - - Return a pair of integers, whose ratio is exactly equal to the original - Decimal and with a positive denominator. The ratio is in lowest terms. - Raise OverflowError on infinities and a ValueError on NaNs. - - """ - - def to_eng_string(self, context: Context | None = None) -> str: - """Convert to an engineering-type string. Engineering notation has an exponent - which is a multiple of 3, so there are up to 3 digits left of the decimal - place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). - - The value of context.capitals determines whether the exponent sign is lower - or upper case. Otherwise, the context does not affect the operation. - - """ - - def __abs__(self) -> Decimal: - """abs(self)""" - - def __add__(self, value: _Decimal, /) -> Decimal: - """Return self+value.""" - - def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(self, value).""" - + def as_tuple(self) -> DecimalTuple: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def to_eng_string(self, context: Context | None = None) -> str: ... + def __abs__(self) -> Decimal: ... + def __add__(self, value: _Decimal, /) -> Decimal: ... + def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... def __eq__(self, value: object, /) -> bool: ... - def __floordiv__(self, value: _Decimal, /) -> Decimal: - """Return self//value.""" - + def __floordiv__(self, value: _Decimal, /) -> Decimal: ... def __ge__(self, value: _ComparableNum, /) -> bool: ... def __gt__(self, value: _ComparableNum, /) -> bool: ... def __le__(self, value: _ComparableNum, /) -> bool: ... def __lt__(self, value: _ComparableNum, /) -> bool: ... - def __mod__(self, value: _Decimal, /) -> Decimal: - """Return self%value.""" - - def __mul__(self, value: _Decimal, /) -> Decimal: - """Return self*value.""" - - def __neg__(self) -> Decimal: - """-self""" - - def __pos__(self) -> Decimal: - """+self""" - - def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: - """Return pow(self, value, mod).""" - - def __radd__(self, value: _Decimal, /) -> Decimal: - """Return value+self.""" - - def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(value, self).""" - - def __rfloordiv__(self, value: _Decimal, /) -> Decimal: - """Return value//self.""" - - def __rmod__(self, value: _Decimal, /) -> Decimal: - """Return value%self.""" - - def __rmul__(self, value: _Decimal, /) -> Decimal: - """Return value*self.""" - - def __rsub__(self, value: _Decimal, /) -> Decimal: - """Return value-self.""" - - def __rtruediv__(self, value: _Decimal, /) -> Decimal: - """Return value/self.""" - - def __sub__(self, value: _Decimal, /) -> Decimal: - """Return self-value.""" - - def __truediv__(self, value: _Decimal, /) -> Decimal: - """Return self/value.""" - - def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the remainder from dividing self by other. This differs from - self % other in that the sign of the remainder is chosen so as to minimize - its absolute value. More precisely, the return value is self - n * other - where n is the integer nearest to the exact value of self / other, and - if two integers are equally near then the even one is chosen. - - If the result is zero then its sign will be the sign of self. - - """ - - def __float__(self) -> float: - """float(self)""" - - def __int__(self) -> int: - """int(self)""" - + def __mod__(self, value: _Decimal, /) -> Decimal: ... + def __mul__(self, value: _Decimal, /) -> Decimal: ... + def __neg__(self) -> Decimal: ... + def __pos__(self) -> Decimal: ... + def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ... + def __radd__(self, value: _Decimal, /) -> Decimal: ... + def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ... + def __rmod__(self, value: _Decimal, /) -> Decimal: ... + def __rmul__(self, value: _Decimal, /) -> Decimal: ... + def __rsub__(self, value: _Decimal, /) -> Decimal: ... + def __rtruediv__(self, value: _Decimal, /) -> Decimal: ... + def __sub__(self, value: _Decimal, /) -> Decimal: ... + def __truediv__(self, value: _Decimal, /) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... def __trunc__(self) -> int: ... @property def real(self) -> Decimal: ... @property def imag(self) -> Decimal: ... - def conjugate(self) -> Decimal: - """Return self.""" - + def conjugate(self) -> Decimal: ... def __complex__(self) -> complex: ... @overload def __round__(self) -> int: ... @@ -340,355 +122,53 @@ class Decimal: def __round__(self, ndigits: int, /) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... - def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: - """Fused multiply-add. Return self*other+third with no rounding of the - intermediate product self*other. - - >>> Decimal(2).fma(3, 5) - Decimal('11') - - - """ - - def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: - """Return pow(value, self, mod).""" - - def normalize(self, context: Context | None = None) -> Decimal: - """Normalize the number by stripping the rightmost trailing zeros and - converting any result equal to Decimal('0') to Decimal('0e0'). Used - for producing canonical values for members of an equivalence class. - For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize - to the equivalent value Decimal('32.1'). - - """ - - def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: - """Return a value equal to the first operand after rounding and having the - exponent of the second operand. - - >>> Decimal('1.41421356').quantize(Decimal('1.000')) - Decimal('1.414') - - Unlike other operations, if the length of the coefficient after the quantize - operation would be greater than precision, then an InvalidOperation is signaled. - This guarantees that, unless there is an error condition, the quantized exponent - is always equal to that of the right-hand operand. - - Also unlike other operations, quantize never signals Underflow, even if the - result is subnormal and inexact. - - If the exponent of the second operand is larger than that of the first, then - rounding may be necessary. In this case, the rounding mode is determined by the - rounding argument if given, else by the given context argument; if neither - argument is given, the rounding mode of the current thread's context is used. - - """ - - def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: - """Test whether self and other have the same exponent or whether both are NaN. - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - - """ - - def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """Round to the nearest integer, signaling Inexact or Rounded as appropriate if - rounding occurs. The rounding mode is determined by the rounding parameter - if given, else by the given context. If neither parameter is given, then the - rounding mode of the current default context is used. - - """ - - def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """Round to the nearest integer without signaling Inexact or Rounded. The - rounding mode is determined by the rounding parameter if given, else by - the given context. If neither parameter is given, then the rounding mode - of the current default context is used. - - """ - - def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: - """Identical to the to_integral_value() method. The to_integral() name has been - kept for compatibility with older versions. - - """ - - def sqrt(self, context: Context | None = None) -> Decimal: - """Return the square root of the argument to full precision. The result is - correctly rounded using the ROUND_HALF_EVEN rounding mode. - - """ - - def max(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Maximum of self and other. If one operand is a quiet NaN and the other is - numeric, the numeric operand is returned. - - """ - - def min(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Minimum of self and other. If one operand is a quiet NaN and the other is - numeric, the numeric operand is returned. - - """ - - def adjusted(self) -> int: - """Return the adjusted exponent of the number. Defined as exp + digits - 1.""" - - def canonical(self) -> Decimal: - """Return the canonical encoding of the argument. Currently, the encoding - of a Decimal instance is always canonical, so this operation returns its - argument unchanged. - - """ - - def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Identical to compare, except that all NaNs signal.""" - - def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Compare two operands using their abstract representation rather than - their numerical value. Similar to the compare() method, but the result - gives a total ordering on Decimal instances. Two Decimal instances with - the same numeric value but different representations compare unequal - in this ordering: - - >>> Decimal('12.0').compare_total(Decimal('12')) - Decimal('-1') - - Quiet and signaling NaNs are also included in the total ordering. The result - of this function is Decimal('0') if both operands have the same representation, - Decimal('-1') if the first operand is lower in the total order than the second, - and Decimal('1') if the first operand is higher in the total order than the - second operand. See the specification for details of the total order. - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - - """ - - def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Compare two operands using their abstract representation rather than their - value as in compare_total(), but ignoring the sign of each operand. - - x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - - """ - - def copy_abs(self) -> Decimal: - """Return the absolute value of the argument. This operation is unaffected by - context and is quiet: no flags are changed and no rounding is performed. - - """ - - def copy_negate(self) -> Decimal: - """Return the negation of the argument. This operation is unaffected by context - and is quiet: no flags are changed and no rounding is performed. - - """ - - def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return a copy of the first operand with the sign set to be the same as the - sign of the second operand. For example: - - >>> Decimal('2.3').copy_sign(Decimal('-1.5')) - Decimal('-2.3') - - This operation is unaffected by context and is quiet: no flags are changed - and no rounding is performed. As an exception, the C version may raise - InvalidOperation if the second operand cannot be converted exactly. - - """ - - def exp(self, context: Context | None = None) -> Decimal: - """Return the value of the (natural) exponential function e**x at the given - number. The function always uses the ROUND_HALF_EVEN mode and the result - is correctly rounded. - - """ - - def is_canonical(self) -> bool: - """Return True if the argument is canonical and False otherwise. Currently, - a Decimal instance is always canonical, so this operation always returns - True. - - """ - - def is_finite(self) -> bool: - """Return True if the argument is a finite number, and False if the argument - is infinite or a NaN. - - """ - - def is_infinite(self) -> bool: - """Return True if the argument is either positive or negative infinity and - False otherwise. - - """ - - def is_nan(self) -> bool: - """Return True if the argument is a (quiet or signaling) NaN and False - otherwise. - - """ - - def is_normal(self, context: Context | None = None) -> bool: - """Return True if the argument is a normal finite non-zero number with an - adjusted exponent greater than or equal to Emin. Return False if the - argument is zero, subnormal, infinite or a NaN. - - """ - - def is_qnan(self) -> bool: - """Return True if the argument is a quiet NaN, and False otherwise.""" - - def is_signed(self) -> bool: - """Return True if the argument has a negative sign and False otherwise. - Note that both zeros and NaNs can carry signs. - - """ - - def is_snan(self) -> bool: - """Return True if the argument is a signaling NaN and False otherwise.""" - - def is_subnormal(self, context: Context | None = None) -> bool: - """Return True if the argument is subnormal, and False otherwise. A number is - subnormal if it is non-zero, finite, and has an adjusted exponent less - than Emin. - - """ - - def is_zero(self) -> bool: - """Return True if the argument is a (positive or negative) zero and False - otherwise. - - """ - - def ln(self, context: Context | None = None) -> Decimal: - """Return the natural (base e) logarithm of the operand. The function always - uses the ROUND_HALF_EVEN mode and the result is correctly rounded. - - """ - - def log10(self, context: Context | None = None) -> Decimal: - """Return the base ten logarithm of the operand. The function always uses the - ROUND_HALF_EVEN mode and the result is correctly rounded. - - """ - - def logb(self, context: Context | None = None) -> Decimal: - """For a non-zero number, return the adjusted exponent of the operand as a - Decimal instance. If the operand is a zero, then Decimal('-Infinity') is - returned and the DivisionByZero condition is raised. If the operand is - an infinity then Decimal('Infinity') is returned. - - """ - - def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'and' of the two (logical) operands.""" - - def logical_invert(self, context: Context | None = None) -> Decimal: - """Return the digit-wise inversion of the (logical) operand.""" - - def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'or' of the two (logical) operands.""" - - def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'exclusive or' of the two (logical) operands.""" - - def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Similar to the max() method, but the comparison is done using the absolute - values of the operands. - - """ - - def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Similar to the min() method, but the comparison is done using the absolute - values of the operands. - - """ - - def next_minus(self, context: Context | None = None) -> Decimal: - """Return the largest number representable in the given context (or in the - current default context if no context is given) that is smaller than the - given operand. - - """ - - def next_plus(self, context: Context | None = None) -> Decimal: - """Return the smallest number representable in the given context (or in the - current default context if no context is given) that is larger than the - given operand. - - """ - - def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: - """If the two operands are unequal, return the number closest to the first - operand in the direction of the second operand. If both operands are - numerically equal, return a copy of the first operand with the sign set - to be the same as the sign of the second operand. - - """ - - def number_class(self, context: Context | None = None) -> str: - """Return a string describing the class of the operand. The returned value - is one of the following ten strings: - - * '-Infinity', indicating that the operand is negative infinity. - * '-Normal', indicating that the operand is a negative normal number. - * '-Subnormal', indicating that the operand is negative and subnormal. - * '-Zero', indicating that the operand is a negative zero. - * '+Zero', indicating that the operand is a positive zero. - * '+Subnormal', indicating that the operand is positive and subnormal. - * '+Normal', indicating that the operand is a positive normal number. - * '+Infinity', indicating that the operand is positive infinity. - * 'NaN', indicating that the operand is a quiet NaN (Not a Number). - * 'sNaN', indicating that the operand is a signaling NaN. - - - """ - - def radix(self) -> Decimal: - """Return Decimal(10), the radix (base) in which the Decimal class does - all its arithmetic. Included for compatibility with the specification. - - """ - - def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the result of rotating the digits of the first operand by an amount - specified by the second operand. The second operand must be an integer in - the range -precision through precision. The absolute value of the second - operand gives the number of places to rotate. If the second operand is - positive then rotation is to the left; otherwise rotation is to the right. - The coefficient of the first operand is padded on the left with zeros to - length precision if necessary. The sign and exponent of the first operand are - unchanged. - - """ - - def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the first operand with the exponent adjusted the second. Equivalently, - return the first operand multiplied by 10**other. The second operand must be - an integer. - - """ - - def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the result of shifting the digits of the first operand by an amount - specified by the second operand. The second operand must be an integer in - the range -precision through precision. The absolute value of the second - operand gives the number of places to shift. If the second operand is - positive, then the shift is to the left; otherwise the shift is to the - right. Digits shifted into the coefficient are zeros. The sign and exponent - of the first operand are unchanged. - - """ - + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... + def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def adjusted(self) -> int: ... + def canonical(self) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def copy_abs(self) -> Decimal: ... + def copy_negate(self) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... + def is_canonical(self) -> bool: ... + def is_finite(self) -> bool: ... + def is_infinite(self) -> bool: ... + def is_nan(self) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... + def is_qnan(self) -> bool: ... + def is_signed(self) -> bool: ... + def is_snan(self) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... + def is_zero(self) -> bool: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... + def radix(self) -> Decimal: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any, /) -> Self: ... @@ -696,19 +176,6 @@ class Decimal: @disjoint_base class Context: - """The context affects almost all operations and controls rounding, - Over/Underflow, raising of exceptions and much more. A new context - can be constructed as follows: - - >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, - ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, - ... traps=[InvalidOperation, DivisionByZero, Overflow], - ... flags=[]) - >>> - - - """ - # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, # even settable attributes like `prec` and `rounding`, # but that's inexpressible in the stub. @@ -734,249 +201,74 @@ class Context: traps: dict[_TrapType, bool] | Container[_TrapType] | None = None, ) -> None: ... def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... - def clear_flags(self) -> None: - """Reset all flags to False.""" - - def clear_traps(self) -> None: - """Set all traps to False.""" - - def copy(self) -> Context: - """Return a duplicate of the context with all flags cleared.""" - + def clear_flags(self) -> None: ... + def clear_traps(self) -> None: ... + def copy(self) -> Context: ... def __copy__(self) -> Context: ... # see https://github.com/python/cpython/issues/94107 __hash__: ClassVar[None] # type: ignore[assignment] - def Etiny(self) -> int: - """Return a value equal to Emin - prec + 1, which is the minimum exponent value - for subnormal results. When underflow occurs, the exponent is set to Etiny. - - """ - - def Etop(self) -> int: - """Return a value equal to Emax - prec + 1. This is the maximum exponent - if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() - must not be negative. - - """ - - def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: - """Create a new Decimal instance from num, using self as the context. Unlike the - Decimal constructor, this function observes the context limits. - - """ - - def create_decimal_from_float(self, f: float, /) -> Decimal: - """Create a new Decimal instance from float f. Unlike the Decimal.from_float() - class method, this function observes the context limits. - - """ - - def abs(self, x: _Decimal, /) -> Decimal: - """Return the absolute value of x.""" - - def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the sum of x and y.""" - - def canonical(self, x: Decimal, /) -> Decimal: - """Return a new instance of x.""" - - def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically.""" - - def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically. All NaNs signal.""" - - def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation.""" - - def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation, ignoring sign.""" - - def copy_abs(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign set to 0.""" - - def copy_decimal(self, x: _Decimal, /) -> Decimal: - """Return a copy of Decimal x.""" - - def copy_negate(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign inverted.""" - - def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Copy the sign from y to x.""" - - def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y.""" - - def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y, truncated to an integer.""" - - def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return quotient and remainder of the division x / y.""" - - def exp(self, x: _Decimal, /) -> Decimal: - """Return e ** x.""" - - def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: - """Return x multiplied by y, plus z.""" - - def is_canonical(self, x: _Decimal, /) -> bool: - """Return True if x is canonical, False otherwise.""" - - def is_finite(self, x: _Decimal, /) -> bool: - """Return True if x is finite, False otherwise.""" - - def is_infinite(self, x: _Decimal, /) -> bool: - """Return True if x is infinite, False otherwise.""" - - def is_nan(self, x: _Decimal, /) -> bool: - """Return True if x is a qNaN or sNaN, False otherwise.""" - - def is_normal(self, x: _Decimal, /) -> bool: - """Return True if x is a normal number, False otherwise.""" - - def is_qnan(self, x: _Decimal, /) -> bool: - """Return True if x is a quiet NaN, False otherwise.""" - - def is_signed(self, x: _Decimal, /) -> bool: - """Return True if x is negative, False otherwise.""" - - def is_snan(self, x: _Decimal, /) -> bool: - """Return True if x is a signaling NaN, False otherwise.""" - - def is_subnormal(self, x: _Decimal, /) -> bool: - """Return True if x is subnormal, False otherwise.""" - - def is_zero(self, x: _Decimal, /) -> bool: - """Return True if x is a zero, False otherwise.""" - - def ln(self, x: _Decimal, /) -> Decimal: - """Return the natural (base e) logarithm of x.""" - - def log10(self, x: _Decimal, /) -> Decimal: - """Return the base 10 logarithm of x.""" - - def logb(self, x: _Decimal, /) -> Decimal: - """Return the exponent of the magnitude of the operand's MSD.""" - - def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise and of x and y.""" - - def logical_invert(self, x: _Decimal, /) -> Decimal: - """Invert all digits of x.""" - - def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise or of x and y.""" - - def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise xor of x and y.""" - - def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the maximum.""" - - def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored.""" - - def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the minimum.""" - - def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored.""" - - def minus(self, x: _Decimal, /) -> Decimal: - """Minus corresponds to the unary prefix minus operator in Python, but applies - the context to the result. - - """ - - def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the product of x and y.""" - - def next_minus(self, x: _Decimal, /) -> Decimal: - """Return the largest representable number smaller than x.""" - - def next_plus(self, x: _Decimal, /) -> Decimal: - """Return the smallest representable number larger than x.""" - - def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the number closest to x, in the direction towards y.""" - - def normalize(self, x: _Decimal, /) -> Decimal: - """Reduce x to its simplest form. Alias for reduce(x).""" - - def number_class(self, x: _Decimal, /) -> str: - """Return an indication of the class of x.""" - - def plus(self, x: _Decimal, /) -> Decimal: - """Plus corresponds to the unary prefix plus operator in Python, but applies - the context to the result. - - """ - - def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: - """Compute a**b. If 'a' is negative, then 'b' must be integral. The result - will be inexact unless 'a' is integral and the result is finite and can - be expressed exactly in 'precision' digits. In the Python version the - result is always correctly rounded, in the C version the result is almost - always correctly rounded. - - If modulo is given, compute (a**b) % modulo. The following restrictions - hold: - - * all three arguments must be integral - * 'b' must be nonnegative - * at least one of 'a' or 'b' must be nonzero - * modulo must be nonzero and less than 10**prec in absolute value - - - """ - - def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a value equal to x (rounded), having the exponent of y.""" - - def radix(self) -> Decimal: - """Return 10.""" - - def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the remainder from integer division. The sign of the result, - if non-zero, is the same as that of the original dividend. - - """ - - def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x - y * n, where n is the integer nearest the exact value of x / y - (if the result is 0 then its sign will be the sign of x). - - """ - - def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, rotated by y places.""" - - def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: - """Return True if the two operands have the same exponent.""" - - def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the first operand after adding the second value to its exp.""" - - def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, shifted by y places.""" - - def sqrt(self, x: _Decimal, /) -> Decimal: - """Square root of a non-negative number to context precision.""" - - def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the difference between x and y.""" - - def to_eng_string(self, x: _Decimal, /) -> str: - """Convert a number to a string, using engineering notation.""" - - def to_sci_string(self, x: _Decimal, /) -> str: - """Convert a number to a string using scientific notation.""" - - def to_integral_exact(self, x: _Decimal, /) -> Decimal: - """Round to an integer. Signal if the result is rounded or inexact.""" - - def to_integral_value(self, x: _Decimal, /) -> Decimal: - """Round to an integer.""" - - def to_integral(self, x: _Decimal, /) -> Decimal: - """Identical to to_integral_value(x).""" + def Etiny(self) -> int: ... + def Etop(self) -> int: ... + def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ... + def create_decimal_from_float(self, f: float, /) -> Decimal: ... + def abs(self, x: _Decimal, /) -> Decimal: ... + def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def canonical(self, x: Decimal, /) -> Decimal: ... + def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def copy_abs(self, x: _Decimal, /) -> Decimal: ... + def copy_decimal(self, x: _Decimal, /) -> Decimal: ... + def copy_negate(self, x: _Decimal, /) -> Decimal: ... + def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def exp(self, x: _Decimal, /) -> Decimal: ... + def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ... + def is_canonical(self, x: _Decimal, /) -> bool: ... + def is_finite(self, x: _Decimal, /) -> bool: ... + def is_infinite(self, x: _Decimal, /) -> bool: ... + def is_nan(self, x: _Decimal, /) -> bool: ... + def is_normal(self, x: _Decimal, /) -> bool: ... + def is_qnan(self, x: _Decimal, /) -> bool: ... + def is_signed(self, x: _Decimal, /) -> bool: ... + def is_snan(self, x: _Decimal, /) -> bool: ... + def is_subnormal(self, x: _Decimal, /) -> bool: ... + def is_zero(self, x: _Decimal, /) -> bool: ... + def ln(self, x: _Decimal, /) -> Decimal: ... + def log10(self, x: _Decimal, /) -> Decimal: ... + def logb(self, x: _Decimal, /) -> Decimal: ... + def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def logical_invert(self, x: _Decimal, /) -> Decimal: ... + def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def minus(self, x: _Decimal, /) -> Decimal: ... + def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def next_minus(self, x: _Decimal, /) -> Decimal: ... + def next_plus(self, x: _Decimal, /) -> Decimal: ... + def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def normalize(self, x: _Decimal, /) -> Decimal: ... + def number_class(self, x: _Decimal, /) -> str: ... + def plus(self, x: _Decimal, /) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... + def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def radix(self) -> Decimal: ... + def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ... + def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def sqrt(self, x: _Decimal, /) -> Decimal: ... + def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... + def to_eng_string(self, x: _Decimal, /) -> str: ... + def to_sci_string(self, x: _Decimal, /) -> str: ... + def to_integral_exact(self, x: _Decimal, /) -> Decimal: ... + def to_integral_value(self, x: _Decimal, /) -> Decimal: ... + def to_integral(self, x: _Decimal, /) -> Decimal: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi index 7f05a7996e58b..6efe68322bb65 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi @@ -1,31 +1,3 @@ -""" -Module difflib -- helpers for computing deltas between objects. - -Function get_close_matches(word, possibilities, n=3, cutoff=0.6): - Use SequenceMatcher to return list of the best "good enough" matches. - -Function context_diff(a, b): - For two lists of strings, return a delta in context diff format. - -Function ndiff(a, b): - Return a delta: the difference between `a` and `b` (lists of strings). - -Function restore(delta, which): - Return one of the two sequences that generated an ndiff delta. - -Function unified_diff(a, b): - For two lists of strings, return a delta in unified diff format. - -Class SequenceMatcher: - A flexible class for comparing pairs of sequences of any type. - -Class Differ: - For producing human-readable deltas from sequences of lines of text. - -Class HtmlDiff: - For producing HTML side by side comparison with change highlights. -""" - import re import sys from collections.abc import Callable, Iterable, Iterator, Sequence @@ -50,112 +22,13 @@ __all__ = [ _T = TypeVar("_T") class Match(NamedTuple): - """Match(a, b, size)""" - a: int b: int size: int class SequenceMatcher(Generic[_T]): - """ - SequenceMatcher is a flexible class for comparing pairs of sequences of - any type, so long as the sequence elements are hashable. The basic - algorithm predates, and is a little fancier than, an algorithm - published in the late 1980's by Ratcliff and Obershelp under the - hyperbolic name "gestalt pattern matching". The basic idea is to find - the longest contiguous matching subsequence that contains no "junk" - elements (R-O doesn't address junk). The same idea is then applied - recursively to the pieces of the sequences to the left and to the right - of the matching subsequence. This does not yield minimal edit - sequences, but does tend to yield matches that "look right" to people. - - SequenceMatcher tries to compute a "human-friendly diff" between two - sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the - longest *contiguous* & junk-free matching subsequence. That's what - catches peoples' eyes. The Windows(tm) windiff has another interesting - notion, pairing up elements that appear uniquely in each sequence. - That, and the method here, appear to yield more intuitive difference - reports than does diff. This method appears to be the least vulnerable - to syncing up on blocks of "junk lines", though (like blank lines in - ordinary text files, or maybe "

" lines in HTML files). That may be - because this is the only method of the 3 that has a *concept* of - "junk" . - - Example, comparing two strings, and considering blanks to be "junk": - - >>> s = SequenceMatcher(lambda x: x == " ", - ... "private Thread currentThread;", - ... "private volatile Thread currentThread;") - >>> - - .ratio() returns a float in [0, 1], measuring the "similarity" of the - sequences. As a rule of thumb, a .ratio() value over 0.6 means the - sequences are close matches: - - >>> print(round(s.ratio(), 2)) - 0.87 - >>> - - If you're only interested in where the sequences match, - .get_matching_blocks() is handy: - - >>> for block in s.get_matching_blocks(): - ... print("a[%d] and b[%d] match for %d elements" % block) - a[0] and b[0] match for 8 elements - a[8] and b[17] match for 21 elements - a[29] and b[38] match for 0 elements - - Note that the last tuple returned by .get_matching_blocks() is always a - dummy, (len(a), len(b), 0), and this is the only case in which the last - tuple element (number of elements matched) is 0. - - If you want to know how to change the first sequence into the second, - use .get_opcodes(): - - >>> for opcode in s.get_opcodes(): - ... print("%6s a[%d:%d] b[%d:%d]" % opcode) - equal a[0:8] b[0:8] - insert a[8:8] b[8:17] - equal a[8:29] b[17:38] - - See the Differ class for a fancy human-friendly file differencer, which - uses SequenceMatcher both to compare sequences of lines, and to compare - sequences of characters within similar (near-matching) lines. - - See also function get_close_matches() in this module, which shows how - simple code building on SequenceMatcher can be used to do useful work. - - Timing: Basic R-O is cubic time worst case and quadratic time expected - case. SequenceMatcher is quadratic time for the worst case and has - expected-case behavior dependent in a complicated way on how many - elements the sequences have in common; best case time is linear. - """ - @overload - def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: - """Construct a SequenceMatcher. - - Optional arg isjunk is None (the default), or a one-argument - function that takes a sequence element and returns true iff the - element is junk. None is equivalent to passing "lambda x: 0", i.e. - no elements are considered to be junk. For example, pass - lambda x: x in " \\t" - if you're comparing lines as sequences of characters, and don't - want to synch up on blanks or hard tabs. - - Optional arg a is the first of two sequences to be compared. By - default, an empty string. The elements of a must be hashable. See - also .set_seqs() and .set_seq1(). - - Optional arg b is the second of two sequences to be compared. By - default, an empty string. The elements of b must be hashable. See - also .set_seqs() and .set_seq2(). - - Optional arg autojunk should be set to False to disable the - "automatic junk heuristic" that treats popular elements as junk - (see module documentation for more information). - """ - + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload @@ -166,431 +39,36 @@ class SequenceMatcher(Generic[_T]): b: Sequence[str] = "", autojunk: bool = True, ) -> None: ... - def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: - """Set the two sequences to be compared. - - >>> s = SequenceMatcher() - >>> s.set_seqs("abcd", "bcde") - >>> s.ratio() - 0.75 - """ - - def set_seq1(self, a: Sequence[_T]) -> None: - """Set the first sequence to be compared. - - The second sequence to be compared is not changed. - - >>> s = SequenceMatcher(None, "abcd", "bcde") - >>> s.ratio() - 0.75 - >>> s.set_seq1("bcde") - >>> s.ratio() - 1.0 - >>> - - SequenceMatcher computes and caches detailed information about the - second sequence, so if you want to compare one sequence S against - many sequences, use .set_seq2(S) once and call .set_seq1(x) - repeatedly for each of the other sequences. - - See also set_seqs() and set_seq2(). - """ - - def set_seq2(self, b: Sequence[_T]) -> None: - """Set the second sequence to be compared. - - The first sequence to be compared is not changed. - - >>> s = SequenceMatcher(None, "abcd", "bcde") - >>> s.ratio() - 0.75 - >>> s.set_seq2("abcd") - >>> s.ratio() - 1.0 - >>> - - SequenceMatcher computes and caches detailed information about the - second sequence, so if you want to compare one sequence S against - many sequences, use .set_seq2(S) once and call .set_seq1(x) - repeatedly for each of the other sequences. - - See also set_seqs() and set_seq1(). - """ - - def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: - """Find longest matching block in a[alo:ahi] and b[blo:bhi]. - - By default it will find the longest match in the entirety of a and b. - - If isjunk is not defined: - - Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where - alo <= i <= i+k <= ahi - blo <= j <= j+k <= bhi - and for all (i',j',k') meeting those conditions, - k >= k' - i <= i' - and if i == i', j <= j' - - In other words, of all maximal matching blocks, return one that - starts earliest in a, and of all those maximal matching blocks that - start earliest in a, return the one that starts earliest in b. - - >>> s = SequenceMatcher(None, " abcd", "abcd abcd") - >>> s.find_longest_match(0, 5, 0, 9) - Match(a=0, b=4, size=5) - - If isjunk is defined, first the longest matching block is - determined as above, but with the additional restriction that no - junk element appears in the block. Then that block is extended as - far as possible by matching (only) junk elements on both sides. So - the resulting block never matches on junk except as identical junk - happens to be adjacent to an "interesting" match. - - Here's the same example as before, but considering blanks to be - junk. That prevents " abcd" from matching the " abcd" at the tail - end of the second sequence directly. Instead only the "abcd" can - match, and matches the leftmost "abcd" in the second sequence: - - >>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd") - >>> s.find_longest_match(0, 5, 0, 9) - Match(a=1, b=0, size=4) - - If no blocks match, return (alo, blo, 0). - - >>> s = SequenceMatcher(None, "ab", "c") - >>> s.find_longest_match(0, 2, 0, 1) - Match(a=0, b=0, size=0) - """ - - def get_matching_blocks(self) -> list[Match]: - """Return list of triples describing matching subsequences. - - Each triple is of the form (i, j, n), and means that - a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in - i and in j. New in Python 2.5, it's also guaranteed that if - (i, j, n) and (i', j', n') are adjacent triples in the list, and - the second is not the last triple in the list, then i+n != i' or - j+n != j'. IOW, adjacent triples never describe adjacent equal - blocks. - - The last triple is a dummy, (len(a), len(b), 0), and is the only - triple with n==0. - - >>> s = SequenceMatcher(None, "abxcd", "abcd") - >>> list(s.get_matching_blocks()) - [Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)] - """ - - def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: - """Return list of 5-tuples describing how to turn a into b. - - Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple - has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the - tuple preceding it, and likewise for j1 == the previous j2. - - The tags are strings, with these meanings: - - 'replace': a[i1:i2] should be replaced by b[j1:j2] - 'delete': a[i1:i2] should be deleted. - Note that j1==j2 in this case. - 'insert': b[j1:j2] should be inserted at a[i1:i1]. - Note that i1==i2 in this case. - 'equal': a[i1:i2] == b[j1:j2] - - >>> a = "qabxcd" - >>> b = "abycdf" - >>> s = SequenceMatcher(None, a, b) - >>> for tag, i1, i2, j1, j2 in s.get_opcodes(): - ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % - ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) - delete a[0:1] (q) b[0:0] () - equal a[1:3] (ab) b[0:2] (ab) - replace a[3:4] (x) b[2:3] (y) - equal a[4:6] (cd) b[3:5] (cd) - insert a[6:6] () b[5:6] (f) - """ - - def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: - """Isolate change clusters by eliminating ranges with no changes. - - Return a generator of groups with up to n lines of context. - Each group is in the same format as returned by get_opcodes(). - - >>> from pprint import pprint - >>> a = list(map(str, range(1,40))) - >>> b = a[:] - >>> b[8:8] = ['i'] # Make an insertion - >>> b[20] += 'x' # Make a replacement - >>> b[23:28] = [] # Make a deletion - >>> b[30] += 'y' # Make another replacement - >>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes())) - [[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)], - [('equal', 16, 19, 17, 20), - ('replace', 19, 20, 20, 21), - ('equal', 20, 22, 21, 23), - ('delete', 22, 27, 23, 23), - ('equal', 27, 30, 23, 26)], - [('equal', 31, 34, 27, 30), - ('replace', 34, 35, 30, 31), - ('equal', 35, 38, 31, 34)]] - """ - - def ratio(self) -> float: - """Return a measure of the sequences' similarity (float in [0,1]). - - Where T is the total number of elements in both sequences, and - M is the number of matches, this is 2.0*M / T. - Note that this is 1 if the sequences are identical, and 0 if - they have nothing in common. - - .ratio() is expensive to compute if you haven't already computed - .get_matching_blocks() or .get_opcodes(), in which case you may - want to try .quick_ratio() or .real_quick_ratio() first to get an - upper bound. - - >>> s = SequenceMatcher(None, "abcd", "bcde") - >>> s.ratio() - 0.75 - >>> s.quick_ratio() - 0.75 - >>> s.real_quick_ratio() - 1.0 - """ - - def quick_ratio(self) -> float: - """Return an upper bound on ratio() relatively quickly. - - This isn't defined beyond that it is an upper bound on .ratio(), and - is faster to compute. - """ - - def real_quick_ratio(self) -> float: - """Return an upper bound on ratio() very quickly. - - This isn't defined beyond that it is an upper bound on .ratio(), and - is faster to compute than either .ratio() or .quick_ratio(). - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... + def set_seq1(self, a: Sequence[_T]) -> None: ... + def set_seq2(self, b: Sequence[_T]) -> None: ... + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... + def get_matching_blocks(self) -> list[Match]: ... + def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: ... + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... + def ratio(self) -> float: ... + def quick_ratio(self) -> float: ... + def real_quick_ratio(self) -> float: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @overload -def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: - """Use SequenceMatcher to return list of the best "good enough" matches. - - word is a sequence for which close matches are desired (typically a - string). - - possibilities is a list of sequences against which to match word - (typically a list of strings). - - Optional arg n (default 3) is the maximum number of close matches to - return. n must be > 0. - - Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities - that don't score at least that similar to word are ignored. - - The best (no more than n) matches among the possibilities are returned - in a list, sorted by similarity score, most similar first. - - >>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"]) - ['apple', 'ape'] - >>> import keyword as _keyword - >>> get_close_matches("wheel", _keyword.kwlist) - ['while'] - >>> get_close_matches("Apple", _keyword.kwlist) - [] - >>> get_close_matches("accept", _keyword.kwlist) - ['except'] - """ - +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... @overload def get_close_matches( word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 ) -> list[Sequence[_T]]: ... class Differ: - """ - Differ is a class for comparing sequences of lines of text, and - producing human-readable differences or deltas. Differ uses - SequenceMatcher both to compare sequences of lines, and to compare - sequences of characters within similar (near-matching) lines. - - Each line of a Differ delta begins with a two-letter code: - - '- ' line unique to sequence 1 - '+ ' line unique to sequence 2 - ' ' line common to both sequences - '? ' line not present in either input sequence - - Lines beginning with '? ' attempt to guide the eye to intraline - differences, and were not present in either input sequence. These lines - can be confusing if the sequences contain tab characters. - - Note that Differ makes no claim to produce a *minimal* diff. To the - contrary, minimal diffs are often counter-intuitive, because they synch - up anywhere possible, sometimes accidental matches 100 pages apart. - Restricting synch points to contiguous matches preserves some notion of - locality, at the occasional cost of producing a longer diff. - - Example: Comparing two texts. - - First we set up the texts, sequences of individual single-line strings - ending with newlines (such sequences can also be obtained from the - `readlines()` method of file-like objects): - - >>> text1 = ''' 1. Beautiful is better than ugly. - ... 2. Explicit is better than implicit. - ... 3. Simple is better than complex. - ... 4. Complex is better than complicated. - ... '''.splitlines(keepends=True) - >>> len(text1) - 4 - >>> text1[0][-1] - '\\n' - >>> text2 = ''' 1. Beautiful is better than ugly. - ... 3. Simple is better than complex. - ... 4. Complicated is better than complex. - ... 5. Flat is better than nested. - ... '''.splitlines(keepends=True) - - Next we instantiate a Differ object: - - >>> d = Differ() - - Note that when instantiating a Differ object we may pass functions to - filter out line and character 'junk'. See Differ.__init__ for details. - - Finally, we compare the two: - - >>> result = list(d.compare(text1, text2)) - - 'result' is a list of strings, so let's pretty-print it: - - >>> from pprint import pprint as _pprint - >>> _pprint(result) - [' 1. Beautiful is better than ugly.\\n', - '- 2. Explicit is better than implicit.\\n', - '- 3. Simple is better than complex.\\n', - '+ 3. Simple is better than complex.\\n', - '? ++\\n', - '- 4. Complex is better than complicated.\\n', - '? ^ ---- ^\\n', - '+ 4. Complicated is better than complex.\\n', - '? ++++ ^ ^\\n', - '+ 5. Flat is better than nested.\\n'] - - As a single multi-line string it looks like this: - - >>> print(''.join(result), end="") - 1. Beautiful is better than ugly. - - 2. Explicit is better than implicit. - - 3. Simple is better than complex. - + 3. Simple is better than complex. - ? ++ - - 4. Complex is better than complicated. - ? ^ ---- ^ - + 4. Complicated is better than complex. - ? ++++ ^ ^ - + 5. Flat is better than nested. - """ - - def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: - """ - Construct a text differencer, with optional filters. - - The two optional keyword parameters are for filter functions: - - - `linejunk`: A function that should accept a single string argument, - and return true iff the string is junk. The module-level function - `IS_LINE_JUNK` may be used to filter out lines without visible - characters, except for at most one splat ('#'). It is recommended - to leave linejunk None; the underlying SequenceMatcher class has - an adaptive notion of "noise" lines that's better than any static - definition the author has ever been able to craft. - - - `charjunk`: A function that should accept a string of length 1. The - module-level function `IS_CHARACTER_JUNK` may be used to filter out - whitespace characters (a blank or tab; **note**: bad idea to include - newline in this!). Use of IS_CHARACTER_JUNK is recommended. - """ - - def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: - """ - Compare two sequences of lines; generate the resulting delta. - - Each sequence must contain individual single-line strings ending with - newlines. Such sequences can be obtained from the `readlines()` method - of file-like objects. The delta generated also consists of newline- - terminated strings, ready to be printed as-is via the writelines() - method of a file-like object. - - Example: - - >>> print(''.join(Differ().compare('one\\ntwo\\nthree\\n'.splitlines(True), - ... 'ore\\ntree\\nemu\\n'.splitlines(True))), - ... end="") - - one - ? ^ - + ore - ? ^ - - two - - three - ? - - + tree - + emu - """ + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... + def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... if sys.version_info >= (3, 14): - def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: - """ - Return True for ignorable line: if `line` is blank or contains a single '#'. - - Examples: - - >>> IS_LINE_JUNK('\\n') - True - >>> IS_LINE_JUNK(' # \\n') - True - >>> IS_LINE_JUNK('hello\\n') - False - """ + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: ... else: - def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: - """ - Return True for ignorable line: iff `line` is blank or contains a single '#'. - - Examples: - - >>> IS_LINE_JUNK('\\n') - True - >>> IS_LINE_JUNK(' # \\n') - True - >>> IS_LINE_JUNK('hello\\n') - False - """ - -def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: # ws is undocumented - """ - Return True for ignorable character: iff `ch` is a space or tab. - - Examples: - - >>> IS_CHARACTER_JUNK(' ') - True - >>> IS_CHARACTER_JUNK('\\t') - True - >>> IS_CHARACTER_JUNK('\\n') - False - >>> IS_CHARACTER_JUNK('x') - False - """ + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: ... +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented def unified_diff( a: Sequence[str], b: Sequence[str], @@ -600,46 +78,7 @@ def unified_diff( tofiledate: str = "", n: int = 3, lineterm: str = "\n", -) -> Iterator[str]: - """ - Compare two sequences of lines; generate the delta as a unified diff. - - Unified diffs are a compact way of showing line changes and a few - lines of context. The number of context lines is set by 'n' which - defaults to three. - - By default, the diff control lines (those with ---, +++, or @@) are - created with a trailing newline. This is helpful so that inputs - created from file.readlines() result in diffs that are suitable for - file.writelines() since both the inputs and outputs have trailing - newlines. - - For inputs that do not have trailing newlines, set the lineterm - argument to "" so that the output will be uniformly newline free. - - The unidiff format normally has a header for filenames and modification - times. Any or all of these may be specified using strings for - 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. - The modification times are normally expressed in the ISO 8601 format. - - Example: - - >>> for line in unified_diff('one two three four'.split(), - ... 'zero one tree four'.split(), 'Original', 'Current', - ... '2005-01-26 23:30:50', '2010-04-02 10:20:52', - ... lineterm=''): - ... print(line) # doctest: +NORMALIZE_WHITESPACE - --- Original 2005-01-26 23:30:50 - +++ Current 2010-04-02 10:20:52 - @@ -1,4 +1,4 @@ - +zero - one - -two - -three - +tree - four - """ - +) -> Iterator[str]: ... def context_diff( a: Sequence[str], b: Sequence[str], @@ -649,124 +88,22 @@ def context_diff( tofiledate: str = "", n: int = 3, lineterm: str = "\n", -) -> Iterator[str]: - """ - Compare two sequences of lines; generate the delta as a context diff. - - Context diffs are a compact way of showing line changes and a few - lines of context. The number of context lines is set by 'n' which - defaults to three. - - By default, the diff control lines (those with *** or ---) are - created with a trailing newline. This is helpful so that inputs - created from file.readlines() result in diffs that are suitable for - file.writelines() since both the inputs and outputs have trailing - newlines. - - For inputs that do not have trailing newlines, set the lineterm - argument to "" so that the output will be uniformly newline free. - - The context diff format normally has a header for filenames and - modification times. Any or all of these may be specified using - strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. - The modification times are normally expressed in the ISO 8601 format. - If not specified, the strings default to blanks. - - Example: - - >>> print(''.join(context_diff('one\\ntwo\\nthree\\nfour\\n'.splitlines(True), - ... 'zero\\none\\ntree\\nfour\\n'.splitlines(True), 'Original', 'Current')), - ... end="") - *** Original - --- Current - *************** - *** 1,4 **** - one - ! two - ! three - four - --- 1,4 ---- - + zero - one - ! tree - four - """ - +) -> Iterator[str]: ... def ndiff( a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., -) -> Iterator[str]: - """ - Compare `a` and `b` (lists of strings); return a `Differ`-style delta. - - Optional keyword parameters `linejunk` and `charjunk` are for filter - functions, or can be None: - - - linejunk: A function that should accept a single string argument and - return true iff the string is junk. The default is None, and is - recommended; the underlying SequenceMatcher class has an adaptive - notion of "noise" lines. - - - charjunk: A function that accepts a character (string of length - 1), and returns true iff the character is junk. The default is - the module-level function IS_CHARACTER_JUNK, which filters out - whitespace characters (a blank or tab; note: it's a bad idea to - include newline in this!). - - Tools/scripts/ndiff.py is a command-line front-end to this function. - - Example: - - >>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), - ... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) - >>> print(''.join(diff), end="") - - one - ? ^ - + ore - ? ^ - - two - - three - ? - - + tree - + emu - """ +) -> Iterator[str]: ... class HtmlDiff: - """For producing HTML side by side comparison with change highlights. - - This class can be used to create an HTML table (or a complete HTML file - containing the table) showing a side by side, line by line comparison - of text with inter-line and intra-line change highlights. The table can - be generated in either full or contextual difference mode. - - The following methods are provided for HTML generation: - - make_table -- generates HTML for a single side by side table - make_file -- generates complete HTML file with a single side by side table - - See tools/scripts/diff.py for an example usage of this class. - """ - def __init__( self, tabsize: int = 8, wrapcolumn: int | None = None, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., - ) -> None: - """HtmlDiff instance initializer - - Arguments: - tabsize -- tab stop spacing, defaults to 8. - wrapcolumn -- column number where lines are broken and wrapped, - defaults to None where lines are not wrapped. - linejunk,charjunk -- keyword arguments passed into ndiff() (used by - HtmlDiff() to generate the side by side HTML differences). See - ndiff() documentation for argument default values and descriptions. - """ - + ) -> None: ... def make_file( self, fromlines: Sequence[str], @@ -777,24 +114,7 @@ class HtmlDiff: numlines: int = 5, *, charset: str = "utf-8", - ) -> str: - """Returns HTML file of side by side comparison with change highlights - - Arguments: - fromlines -- list of "from" lines - tolines -- list of "to" lines - fromdesc -- "from" file column header string - todesc -- "to" file column header string - context -- set to True for contextual differences (defaults to False - which shows full differences). - numlines -- number of context lines. When context is set True, - controls number of lines displayed before and after the change. - When context is False, controls the number of lines to place - the "next" link anchors before the next change (so click of - "next" link jumps to just before the change). - charset -- charset of the HTML document - """ - + ) -> str: ... def make_table( self, fromlines: Sequence[str], @@ -803,46 +123,9 @@ class HtmlDiff: todesc: str = "", context: bool = False, numlines: int = 5, - ) -> str: - """Returns HTML table of side by side comparison with change highlights - - Arguments: - fromlines -- list of "from" lines - tolines -- list of "to" lines - fromdesc -- "from" file column header string - todesc -- "to" file column header string - context -- set to True for contextual differences (defaults to False - which shows full differences). - numlines -- number of context lines. When context is set True, - controls number of lines displayed before and after the change. - When context is False, controls the number of lines to place - the "next" link anchors before the next change (so click of - "next" link jumps to just before the change). - """ - -def restore(delta: Iterable[str], which: int) -> Iterator[str]: - """ - Generate one of the two sequences that generated a delta. - - Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract - lines originating from file 1 or 2 (parameter `which`), stripping off line - prefixes. - - Examples: - - >>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), - ... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) - >>> diff = list(diff) - >>> print(''.join(restore(diff, 1)), end="") - one - two - three - >>> print(''.join(restore(diff, 2)), end="") - ore - tree - emu - """ + ) -> str: ... +def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Iterable[bytes | bytearray], @@ -853,13 +136,4 @@ def diff_bytes( tofiledate: bytes | bytearray = b"", n: int = 3, lineterm: bytes | bytearray = b"\n", -) -> Iterator[bytes]: - """ - Compare `a` and `b`, two sequences of lines represented as bytes rather - than str. This is a wrapper for `dfunc`, which is typically either - unified_diff() or context_diff(). Inputs are losslessly converted to - strings so that `dfunc` only has to worry about strings, and encoded - back to bytes on return. This is necessary to compare files with - unknown or inconsistent encoding. All other inputs (except `n`) must be - bytes rather than str. - """ +) -> Iterator[bytes]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi index 6da3dc252cf51..896b50fa93847 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi @@ -1,5 +1,3 @@ -"""Disassembler of Python byte code into mnemonics.""" - import sys import types from collections.abc import Callable, Iterator @@ -47,8 +45,6 @@ _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeTyp if sys.version_info >= (3, 11): class Positions(NamedTuple): - """Positions(lineno, end_lineno, col_offset, end_col_offset)""" - lineno: int | None = None end_lineno: int | None = None col_offset: int | None = None @@ -56,8 +52,6 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 13): class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, start_offset, starts_line, line_number, label, positions, cache_info)""" - opname: str opcode: int arg: int | None @@ -73,8 +67,6 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 11): class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target, positions)""" - opname: str opcode: int arg: int | None @@ -87,8 +79,6 @@ elif sys.version_info >= (3, 11): else: class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target)""" - opname: str opcode: int arg: int | None @@ -100,71 +90,23 @@ else: if sys.version_info >= (3, 12): class Instruction(_Instruction): - """Details for a bytecode operation. - - Defined fields: - opname - human readable name for operation - opcode - numeric code for operation - arg - numeric argument to operation (if any), otherwise None - argval - resolved arg value (if known), otherwise same as arg - argrepr - human readable description of operation argument - offset - start index of operation within bytecode sequence - start_offset - start index of operation within bytecode sequence including extended args if present; - otherwise equal to Instruction.offset - starts_line - True if this opcode starts a source line, otherwise False - line_number - source line number associated with this opcode (if any), otherwise None - label - A label if this instruction is a jump target, otherwise None - positions - Optional dis.Positions object holding the span of source code - covered by this instruction - cache_info - information about the format and content of the instruction's cache - entries (if any) - """ - if sys.version_info < (3, 13): - def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: - """Format instruction details for inclusion in disassembly output - - *lineno_width* sets the width of the line number field (0 omits it) - *mark_as_current* inserts a '-->' marker arrow as part of the line - *offset_width* sets the width of the instruction offset field - """ + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... if sys.version_info >= (3, 13): @property - def oparg(self) -> int: - """Alias for Instruction.arg.""" - + def oparg(self) -> int: ... @property - def baseopcode(self) -> int: - """Numeric code for the base operation if operation is specialized. - - Otherwise equal to Instruction.opcode. - """ - + def baseopcode(self) -> int: ... @property - def baseopname(self) -> str: - """Human readable name for the base operation if operation is specialized. - - Otherwise equal to Instruction.opname. - """ - + def baseopname(self) -> str: ... @property - def cache_offset(self) -> int: - """Start index of the cache entries following the operation.""" - + def cache_offset(self) -> int: ... @property - def end_offset(self) -> int: - """End index of the cache entries following the operation.""" - + def end_offset(self) -> int: ... @property - def jump_target(self) -> int: - """Bytecode index of the jump target if this is a jump operation. - - Otherwise return None. - """ - + def jump_target(self) -> int: ... @property - def is_jump_target(self) -> bool: - """True if other code jumps to here, otherwise False""" + def is_jump_target(self) -> bool: ... if sys.version_info >= (3, 14): @staticmethod def make( @@ -184,38 +126,9 @@ if sys.version_info >= (3, 12): else: @disjoint_base class Instruction(_Instruction): - """Details for a bytecode operation - - Defined fields: - opname - human readable name for operation - opcode - numeric code for operation - arg - numeric argument to operation (if any), otherwise None - argval - resolved arg value (if known), otherwise same as arg - argrepr - human readable description of operation argument - offset - start index of operation within bytecode sequence - starts_line - line started by this opcode (if any), otherwise None - is_jump_target - True if other code jumps to here, otherwise False - positions - Optional dis.Positions object holding the span of source code - covered by this instruction - """ - - def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: - """Format instruction details for inclusion in disassembly output - - *lineno_width* sets the width of the line number field (0 omits it) - *mark_as_current* inserts a '-->' marker arrow as part of the line - *offset_width* sets the width of the instruction offset field - """ + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... class Bytecode: - """The bytecode operations of a piece of code - - Instantiate this with a function, method, other compiled object, string of - code, or a code object (as returned by compile()). - - Iterating over this yields the bytecode operations as Instruction instances. - """ - codeobj: types.CodeType first_line: int if sys.version_info >= (3, 14): @@ -262,41 +175,21 @@ class Bytecode: if sys.version_info >= (3, 11): @classmethod - def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: - """Construct a Bytecode from the given traceback""" + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... else: @classmethod - def from_traceback(cls, tb: types.TracebackType) -> Self: - """Construct a Bytecode from the given traceback""" + def from_traceback(cls, tb: types.TracebackType) -> Self: ... def __iter__(self) -> Iterator[Instruction]: ... - def info(self) -> str: - """Return formatted information about the code object.""" - - def dis(self) -> str: - """Return a formatted view of the bytecode operations.""" + def info(self) -> str: ... + def dis(self) -> str: ... COMPILER_FLAG_NAMES: Final[dict[int, str]] -def findlabels(code: _HaveCodeType) -> list[int]: - """Detect all offsets in a byte code which are jump targets. - - Return the list of offsets. - - """ - -def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: - """Find the offsets in a byte code which are start of lines in the source. - - Generate pairs (offset, lineno) - lineno will be an integer or None the offset does not have a source line. - """ - -def pretty_flags(flags: int) -> str: - """Return pretty representation of code flags.""" - -def code_info(x: _HaveCodeType | str) -> str: - """Formatted details of methods, functions, or code.""" +def findlabels(code: _HaveCodeType) -> list[int]: ... +def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... +def pretty_flags(flags: int) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... if sys.version_info >= (3, 14): # 3.14 added `show_positions` @@ -309,16 +202,7 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: - """Disassemble classes, methods, functions, and other compiled objects. - - With no argument, disassemble the last traceback. - - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ - + ) -> None: ... def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -328,9 +212,7 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: - """Disassemble a code object.""" - + ) -> None: ... def distb( tb: types.TracebackType | None = None, *, @@ -339,8 +221,7 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: - """Disassemble a traceback (default: last traceback).""" + ) -> None: ... elif sys.version_info >= (3, 13): # 3.13 added `show_offsets` @@ -352,16 +233,7 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: - """Disassemble classes, methods, functions, and other compiled objects. - - With no argument, disassemble the last traceback. - - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ - + ) -> None: ... def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -370,9 +242,7 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: - """Disassemble a code object.""" - + ) -> None: ... def distb( tb: types.TracebackType | None = None, *, @@ -380,8 +250,7 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: - """Disassemble a traceback (default: last traceback).""" + ) -> None: ... elif sys.version_info >= (3, 11): # 3.11 added `show_caches` and `adaptive` @@ -392,93 +261,35 @@ elif sys.version_info >= (3, 11): depth: int | None = None, show_caches: bool = False, adaptive: bool = False, - ) -> None: - """Disassemble classes, methods, functions, and other compiled objects. - - With no argument, disassemble the last traceback. - - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ - + ) -> None: ... def disassemble( co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False - ) -> None: - """Disassemble a code object.""" - + ) -> None: ... def distb( tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False - ) -> None: - """Disassemble a traceback (default: last traceback).""" + ) -> None: ... else: def dis( x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None - ) -> None: - """Disassemble classes, methods, functions, and other compiled objects. - - With no argument, disassemble the last traceback. - - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ - - def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: - """Disassemble a code object.""" - - def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: - """Disassemble a traceback (default: last traceback).""" + ) -> None: ... + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... if sys.version_info >= (3, 13): # 3.13 made `show_cache` `None` by default def get_instructions( x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False - ) -> Iterator[Instruction]: - """Iterator for the opcodes in methods, functions or code - - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. - - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ + ) -> Iterator[Instruction]: ... elif sys.version_info >= (3, 11): def get_instructions( x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False - ) -> Iterator[Instruction]: - """Iterator for the opcodes in methods, functions or code - - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. - - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ + ) -> Iterator[Instruction]: ... else: - def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: - """Iterator for the opcodes in methods, functions or code - - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. - - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ - -def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: - """Print details of methods, functions, or code to *file*. + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... - If *file* is not provided, the output is printed on stdout. - """ +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... disco = disassemble diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi index f2363986b9478..328a5b7834419 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi @@ -1,13 +1,3 @@ -"""distutils - -The main package for the Python Module Distribution Utilities. Normally -used from a setup script as - - from distutils.core import setup - - setup (...) -""" - # Attempts to improve these stubs are probably not the best use of time: # - distutils is deleted in Python 3.12 and newer # - Most users already do not use stdlib distutils, due to setuptools monkeypatching diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi index 970c5623cddf5..bba9373b72dbc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi @@ -1,12 +1,3 @@ -"""distutils._msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for Microsoft Visual Studio 2015. - -The module is compatible with VS 2015 and later. You can find legacy support -for older versions in distutils.msvc9compiler and distutils.msvccompiler. -""" - from _typeshed import Incomplete from distutils.ccompiler import CCompiler from typing import ClassVar, Final @@ -15,10 +6,6 @@ PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] PLAT_TO_VCVARS: Final[dict[str, str]] class MSVCCompiler(CCompiler): - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class. - """ - compiler_type: ClassVar[str] executables: ClassVar[dict[Incomplete, Incomplete]] res_extension: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi index da8de7406d2cb..16684ff069568 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi @@ -1,9 +1,3 @@ -"""distutils.archive_util - -Utility functions for creating archive files (tarballs, zip files, -that sort of thing). -""" - from _typeshed import StrOrBytesPath, StrPath from typing import Literal, overload @@ -17,24 +11,7 @@ def make_archive( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "ztar". - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - +) -> str: ... @overload def make_archive( base_name: StrPath, @@ -54,29 +31,5 @@ def make_tarball( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: - """Create a (possibly compressed) tar file from all the files under - 'base_dir'. - - 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or - None. ("compress" will be deprecated in Python 3.2) - - 'owner' and 'group' can be used to define an owner and a group for the - archive that is being built. If not provided, the current owner and group - will be used. - - The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). - - Returns the output filename. - """ - -def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: - """Create a zip file from all the files under 'base_dir'. - - The output zip file will be named 'base_name' + ".zip". Uses either the - "zipfile" Python module (if available) or the InfoZIP "zip" utility - (if installed and found on the default search path). If neither tool is - available, raises DistutilsExecError. Returns the name of the output zip - file. - """ +) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi index 562f079f81223..3e432f94b525d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi @@ -1,12 +1,3 @@ -"""distutils.bcppcompiler - -Contains BorlandCCompiler, an implementation of the abstract CCompiler class -for the Borland C++ compiler. -""" - from distutils.ccompiler import CCompiler -class BCPPCompiler(CCompiler): - """Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ +class BCPPCompiler(CCompiler): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi index 3b6671e662f6c..5bff209807eef 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi @@ -1,9 +1,3 @@ -"""distutils.ccompiler - -Contains CCompiler, an abstract base class that defines the interface -for the Distutils compiler abstraction model. -""" - from _typeshed import BytesPath, StrPath, Unused from collections.abc import Callable, Iterable, Sequence from distutils.file_util import _BytesPathT, _StrPathT @@ -15,73 +9,19 @@ _Ts = TypeVarTuple("_Ts") def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] -) -> list[str]: - """Generate linker options for searching library directories and - linking with specific libraries. 'libraries' and 'library_dirs' are, - respectively, lists of library names (not filenames!) and search - directories. Returns a list of command-line options suitable for use - with some compiler (depending on the two format strings passed in). - """ - -def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: - """Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - -def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: - """Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - """ - +) -> list[str]: ... +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... def new_compiler( plat: str | None = None, compiler: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0, -) -> CCompiler: - """Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - -def show_compilers() -> None: - """Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ +) -> CCompiler: ... +def show_compilers() -> None: ... class CCompiler: - """Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ - dry_run: bool force: bool verbose: bool @@ -95,114 +35,20 @@ class CCompiler: def __init__( self, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0 ) -> None: ... - def add_include_dir(self, dir: str) -> None: - """Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - - def set_include_dirs(self, dirs: list[str]) -> None: - """Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - - def add_library(self, libname: str) -> None: - """Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - - def set_libraries(self, libnames: list[str]) -> None: - """Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - - def add_library_dir(self, dir: str) -> None: - """Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - - def set_library_dirs(self, dirs: list[str]) -> None: - """Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - - def add_runtime_library_dir(self, dir: str) -> None: - """Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - - def set_runtime_library_dirs(self, dirs: list[str]) -> None: - """Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - - def define_macro(self, name: str, value: str | None = None) -> None: - """Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - - def undefine_macro(self, name: str) -> None: - """Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - - def add_link_object(self, object: str) -> None: - """Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - - def set_link_objects(self, objects: list[str]) -> None: - """Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - - def detect_language(self, sources: str | list[str]) -> str | None: - """Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - - def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: - """Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: ... def has_function( self, funcname: str, @@ -210,45 +56,11 @@ class CCompiler: include_dirs: list[str] | None = None, libraries: list[str] | None = None, library_dirs: list[str] | None = None, - ) -> bool: - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - - def library_dir_option(self, dir: str) -> str: - """Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - - def library_option(self, lib: str) -> str: - """Return the compiler option to add 'lib' to the list of libraries - linked into the shared library or executable. - """ - - def runtime_library_dir_option(self, dir: str) -> str: - """Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - - def set_executables(self, **args: str) -> None: - """Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command-line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **args: str) -> None: ... def compile( self, sources: Sequence[StrPath], @@ -259,56 +71,7 @@ class CCompiler: extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, depends: list[str] | None = None, - ) -> list[str]: - """Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command-line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepend/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - + ) -> list[str]: ... def create_static_lib( self, objects: list[str], @@ -316,30 +79,7 @@ class CCompiler: output_dir: str | None = None, debug: bool | Literal[0, 1] = 0, target_lang: str | None = None, - ) -> None: - """Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - + ) -> None: ... def link( self, target_desc: str, @@ -355,51 +95,7 @@ class CCompiler: extra_postargs: list[str] | None = None, build_temp: str | None = None, target_lang: str | None = None, - ) -> None: - """Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - + ) -> None: ... def link_executable( self, objects: list[str], @@ -451,17 +147,7 @@ class CCompiler: include_dirs: list[str] | None = None, extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, - ) -> None: - """Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - + ) -> None: ... @overload def executable_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi index 9adbe4377ddb6..7f97bc3a2c9e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi @@ -1,9 +1,3 @@ -"""distutils.cmd - -Provides the Command class, the base class for the command classes -in the distutils.command package. -""" - from _typeshed import BytesPath, StrOrBytesPath, StrPath, Unused from abc import abstractmethod from collections.abc import Callable, Iterable @@ -36,119 +30,28 @@ _CommandT = TypeVar("_CommandT", bound=Command) _Ts = TypeVarTuple("_Ts") class Command: - """Abstract base class for defining command classes, the "worker bees" - of the Distutils. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ - dry_run: bool | Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run distribution: Distribution # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] - def __init__(self, dist: Distribution) -> None: - """Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being - instantiated. - """ - + def __init__(self, dist: Distribution) -> None: ... @abstractmethod - def initialize_options(self) -> None: - """Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command-line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - + def initialize_options(self) -> None: ... @abstractmethod - def finalize_options(self) -> None: - """Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command-line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - + def finalize_options(self) -> None: ... @abstractmethod - def run(self) -> None: - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - - def announce(self, msg: str, level: int = 1) -> None: - """If the current verbosity level is of greater than or equal to - 'level' print 'msg' to stdout. - """ - - def debug_print(self, msg: str) -> None: - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - - def ensure_string(self, option: str, default: str | None = None) -> None: - """Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - - def ensure_string_list(self, option: str) -> None: - """Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\\s*/ or /\\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - - def ensure_filename(self, option: str) -> None: - """Ensure that 'option' is the name of an existing file.""" - + def run(self) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def debug_print(self, msg: str) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... + def ensure_string_list(self, option: str) -> None: ... + def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... - def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: - """Set the values of any "undefined" options from corresponding - option values in some other command object. "Undefined" here means - "is None", which is the convention used to indicate that an option - has not been changed between 'initialize_options()' and - 'finalize_options()'. Usually called from 'finalize_options()' for - options that depend on some other command rather than another - option of the same command. 'src_cmd' is the other command from - which option values will be taken (a command object will be created - for it if necessary); the remaining arguments are - '(src_option,dst_option)' tuples which mean "take the value of - 'src_option' in the 'src_cmd' command object, and copy it to - 'dst_option' in the current command object". - """ + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. @overload - def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: - """Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - + def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ... @overload def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ... @overload @@ -251,20 +154,8 @@ class Command: def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ... @overload def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ... - def run_command(self, command: str) -> None: - """Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - - def get_sub_commands(self) -> list[str]: - """Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - + def run_command(self, command: str) -> None: ... + def get_sub_commands(self) -> list[str]: ... def warn(self, msg: str) -> None: ... def execute( self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 @@ -279,12 +170,7 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, link: str | None = None, level: Unused = 1, - ) -> tuple[_StrPathT | str, bool]: - """Copy a file respecting verbose, dry-run and force flags. (The - former two default to whatever is in the Distribution object, and - the latter defaults to false for commands that don't define it.) - """ - + ) -> tuple[_StrPathT | str, bool]: ... @overload def copy_file( self, @@ -303,20 +189,12 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, preserve_symlinks: bool | Literal[0, 1] = 0, level: Unused = 1, - ) -> list[str]: - """Copy an entire directory tree respecting verbose, dry-run, - and force flags. - """ - + ) -> list[str]: ... @overload - def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: - """Move a file respecting dry-run flag.""" - + def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ... @overload def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... - def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: - """Spawn an external command respecting dry-run flag.""" - + def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ... @overload def make_archive( self, @@ -346,15 +224,6 @@ class Command: exec_msg: str | None = None, skip_msg: str | None = None, level: Unused = 1, - ) -> None: - """Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - + ) -> None: ... def ensure_finalized(self) -> None: ... def dump_options(self, header=None, indent: str = "") -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi index b22cd819f98ba..4d7372858af34 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi @@ -1,9 +1,3 @@ -"""distutils.command - -Package containing implementation of all the standard Distutils -commands. -""" - import sys from . import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi index 36568ce343bb2..6f996207077e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi @@ -1,17 +1,10 @@ -"""distutils.command.bdist - -Implements the Distutils 'bdist' command (create a built [binary] -distribution). -""" - from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar from ..cmd import Command -def show_formats() -> None: - """Print list of available formats (arguments to "--format" option).""" +def show_formats() -> None: ... class bdist(Command): description: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi index 3a44a95549fe7..297a0c39ed430 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi @@ -1,10 +1,3 @@ -"""distutils.command.bdist_dumb - -Implements the Distutils 'bdist_dumb' command (create a "dumb" built -distribution -- i.e., just an archive to be unpacked under $prefix or -$exec_prefix). -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi index f37d75b54528a..d677f81d14251 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -1,7 +1,3 @@ -""" -Implements the bdist_msi command. -""" - import sys from _typeshed import Incomplete from typing import ClassVar, Literal @@ -12,47 +8,12 @@ if sys.platform == "win32": from msilib import Control, Dialog class PyDialog(Dialog): - """Dialog class with a fixed layout: controls at the top, then a ruler, - then a list of buttons: back, next, cancel. Optionally a bitmap at the - left. - """ - - def __init__(self, *args, **kw) -> None: - """Dialog(database, name, x, y, w, h, attributes, title, first, - default, cancel, bitmap=true) - """ - - def title(self, title) -> None: - """Set the title text of the dialog at the top.""" - - def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: - """Add a back button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated - """ - - def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: - """Add a cancel button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated - """ - - def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: - """Add a Next button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated - """ - - def xbutton(self, name, title, next, xpos) -> Control: - """Add a button with a given title, the tab-next button, - its name in the Control table, giving its x position; the - y-position is aligned with the other buttons. - - Return the button, so that events can be associated - """ + def __init__(self, *args, **kw) -> None: ... + def title(self, title) -> None: ... + def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: ... + def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: ... + def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: ... + def xbutton(self, name, title, next, xpos) -> Control: ... class bdist_msi(Command): description: str @@ -78,18 +39,7 @@ if sys.platform == "win32": db: Incomplete def run(self) -> None: ... def add_files(self) -> None: ... - def add_find_python(self) -> None: - """Adds code to the installer to compute the location of Python. - - Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the - registry for each version of Python. - - Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, - else from PYTHON.MACHINE.X.Y. - - Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe - """ - + def add_find_python(self) -> None: ... def add_scripts(self) -> None: ... def add_ui(self) -> None: ... def get_installer_filename(self, fullname): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi index 83432461226d6..83b4161094c51 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi @@ -1,9 +1,3 @@ -"""distutils.command.bdist_rpm - -Implements the Distutils 'bdist_rpm' command (create RPM source and binary -distributions). -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi index 11be78a8f729a..cf333bc5400dd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -1,9 +1,3 @@ -"""distutils.command.bdist_wininst - -Implements the Distutils 'bdist_wininst' command: create a windows installer -exe-program. -""" - from _typeshed import StrOrBytesPath from distutils.cmd import Command from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi index be3edede3e55c..3ec0c9614d62a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi @@ -1,8 +1,3 @@ -"""distutils.command.build - -Implements the Distutils 'build' command. -""" - from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi index dd78603653d31..69cfbe7120d8e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi @@ -1,10 +1,3 @@ -"""distutils.command.build_clib - -Implements the Distutils 'build_clib' command, to build a C/C++ library -that is included in the module distribution and needed by an extension -module. -""" - from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -30,17 +23,7 @@ class build_clib(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_library_list(self, libraries) -> None: - """Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - + def check_library_list(self, libraries) -> None: ... def get_library_names(self): ... def get_source_files(self): ... def build_libraries(self, libraries) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi index 9b70452720373..c5a9b5d508f0d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi @@ -1,10 +1,3 @@ -"""distutils.command.build_ext - -Implements the Distutils 'build_ext' command, for building extension -modules (currently limited to C extensions, should accommodate C++ -extensions ASAP). -""" - from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -45,62 +38,15 @@ class build_ext(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_extensions_list(self, extensions) -> None: - """Ensure that the list of extensions (presumably provided as a - command option 'extensions') is valid, i.e. it is a list of - Extension objects. We also support the old-style list of 2-tuples, - where the tuples are (ext_name, build_info), which are converted to - Extension instances here. - - Raise DistutilsSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - + def check_extensions_list(self, extensions) -> None: ... def get_source_files(self): ... def get_outputs(self): ... def build_extensions(self) -> None: ... def build_extension(self, ext) -> None: ... - def swig_sources(self, sources, extension): - """Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - - def find_swig(self): - """Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - - def get_ext_fullpath(self, ext_name: str) -> str: - """Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - - def get_ext_fullname(self, ext_name: str) -> str: - """Returns the fullname of a given extension name. - - Adds the `package.` prefix - """ - - def get_ext_filename(self, ext_name: str) -> str: - """Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\\bar.pyd"). - """ - - def get_export_symbols(self, ext): - """Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "PyInit_" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "PyInit_" function. - """ - - def get_libraries(self, ext): - """Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows, we add the Python library (eg. python20.dll). - """ + def swig_sources(self, sources, extension): ... + def find_swig(self): ... + def get_ext_fullpath(self, ext_name: str) -> str: ... + def get_ext_fullname(self, ext_name: str) -> str: ... + def get_ext_filename(self, ext_name: str) -> str: ... + def get_export_symbols(self, ext): ... + def get_libraries(self, ext): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi index c35514d105256..23ed230bb2d8c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi @@ -1,8 +1,3 @@ -"""distutils.command.build_py - -Implements the Distutils 'build_py' command. -""" - from _typeshed import Incomplete from typing import ClassVar, Literal @@ -27,42 +22,15 @@ class build_py(Command): data_files: Incomplete def finalize_options(self) -> None: ... def run(self) -> None: ... - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - - def build_package_data(self) -> None: - """Copy data files into build directory""" - - def get_package_dir(self, package): - """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any). - """ - + def get_data_files(self): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + def get_package_dir(self, package): ... def check_package(self, package, package_dir): ... def check_module(self, module, module_file): ... def find_package_modules(self, package, package_dir): ... - def find_modules(self): - """Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - - def find_all_modules(self): - """Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do. - """ - + def find_modules(self): ... + def find_all_modules(self): ... def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... def get_outputs(self, include_bytecode: bool | Literal[0, 1] = 1) -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi index 201ef5dd9e2b8..8372919bbd530 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi @@ -1,8 +1,3 @@ -"""distutils.command.build_scripts - -Implements the Distutils 'build_scripts' command. -""" - from _typeshed import Incomplete from typing import ClassVar @@ -24,12 +19,7 @@ class build_scripts(Command): def finalize_options(self) -> None: ... def get_source_files(self): ... def run(self) -> None: ... - def copy_scripts(self): - """Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ + def copy_scripts(self): ... class build_scripts_2to3(build_scripts, Mixin2to3): def copy_scripts(self): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi index e3c560ccff01b..2c807fd2c4396 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi @@ -1,8 +1,3 @@ -"""distutils.command.check - -Implements the Distutils 'check' command. -""" - from _typeshed import Incomplete from typing import Any, ClassVar, Final, Literal from typing_extensions import TypeAlias @@ -31,35 +26,15 @@ class SilentReporter(_Reporter): HAS_DOCUTILS: Final[bool] class check(Command): - """This command checks the meta-data of the package.""" - description: str user_options: ClassVar[list[tuple[str, str, str]]] boolean_options: ClassVar[list[str]] restructuredtext: int metadata: int strict: int - def initialize_options(self) -> None: - """Sets default values for options.""" - + def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... - def warn(self, msg): - """Counts the number of warnings that occurs.""" - - def run(self) -> None: - """Runs the command.""" - - def check_metadata(self) -> None: - """Ensures that all required elements of meta-data are supplied. - - Required fields: - name, version, URL - - Recommended fields: - (author and author_email) or (maintainer and maintainer_email) - - Warns if any are missing. - """ - - def check_restructuredtext(self) -> None: - """Checks if the long string fields are reST-compliant.""" + def warn(self, msg): ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def check_restructuredtext(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi index 363e24eeaff61..0f3768d6dcf4d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi @@ -1,8 +1,3 @@ -"""distutils.command.clean - -Implements the Distutils 'clean' command. -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi index 79b5bac38a316..381e8e466bf16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi @@ -1,14 +1,3 @@ -"""distutils.command.config - -Implements the Distutils 'config' command, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - from _typeshed import StrOrBytesPath from collections.abc import Sequence from re import Pattern @@ -40,14 +29,7 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - + ) -> bool: ... def search_cpp( self, pattern: Pattern[str] | str, @@ -55,22 +37,10 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - + ) -> bool: ... def try_compile( self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: - """Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - + ) -> bool: ... def try_link( self, body: str, @@ -79,12 +49,7 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - + ) -> bool: ... def try_run( self, body: str, @@ -93,12 +58,7 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: - """Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - + ) -> bool: ... def check_func( self, func: str, @@ -108,21 +68,7 @@ class config(Command): library_dirs: Sequence[str] | None = None, decl: bool | Literal[0, 1] = 0, call: bool | Literal[0, 1] = 0, - ) -> bool: - """Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - + ) -> bool: ... def check_lib( self, library: str, @@ -130,26 +76,9 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, other_libraries: list[str] = [], - ) -> bool: - """Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - + ) -> bool: ... def check_header( self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: - """Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - -def dump_file(filename: StrOrBytesPath, head=None) -> None: - """Dumps a file content into log.info. + ) -> bool: ... - If head is not None, will be dumped before the file content. - """ +def dump_file(filename: StrOrBytesPath, head=None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi index 3d32c66f9b15d..1714e01a2c284 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi @@ -1,8 +1,3 @@ -"""distutils.command.install - -Implements the Distutils 'install' command. -""" - import sys from _typeshed import Incomplete from collections.abc import Callable @@ -48,76 +43,29 @@ class install(Command): build_base: Incomplete build_lib: Incomplete record: Incomplete - def initialize_options(self) -> None: - """Initializes options.""" + def initialize_options(self) -> None: ... config_vars: Incomplete install_libbase: Incomplete - def finalize_options(self) -> None: - """Finalizes options.""" - - def dump_dirs(self, msg) -> None: - """Dumps the list of user options.""" - - def finalize_unix(self) -> None: - """Finalizes options for posix platforms.""" - - def finalize_other(self) -> None: - """Finalizes options for non-posix platforms""" - - def select_scheme(self, name) -> None: - """Sets the install directories by applying the install schemes.""" - - def expand_basedirs(self) -> None: - """Calls `os.path.expanduser` on install_base, install_platbase and - root. - """ - - def expand_dirs(self) -> None: - """Calls `os.path.expanduser` on install dirs.""" - - def convert_paths(self, *names) -> None: - """Call `convert_path` over `names`.""" + def finalize_options(self) -> None: ... + def dump_dirs(self, msg) -> None: ... + def finalize_unix(self) -> None: ... + def finalize_other(self) -> None: ... + def select_scheme(self, name) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def convert_paths(self, *names) -> None: ... path_file: Incomplete extra_dirs: Incomplete - def handle_extra_path(self) -> None: - """Set `path_file` and `extra_dirs` using `extra_path`.""" - - def change_roots(self, *names) -> None: - """Change the install directories pointed by name using root.""" - - def create_home_path(self) -> None: - """Create directories under ~.""" - - def run(self) -> None: - """Runs the command.""" - - def create_path_file(self) -> None: - """Creates the .pth file""" - - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - - def has_lib(self): - """Returns true if the current distribution has any Python - modules to install. - """ - - def has_headers(self): - """Returns true if the current distribution has any headers to - install. - """ - - def has_scripts(self): - """Returns true if the current distribution has any scripts to. - install. - """ - - def has_data(self): - """Returns true if the current distribution has any data to. - install. - """ + def handle_extra_path(self) -> None: ... + def change_roots(self, *names) -> None: ... + def create_home_path(self) -> None: ... + def run(self) -> None: ... + def create_path_file(self) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... + def has_lib(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data(self): ... # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi index 67c031a601cda..609de62b04b52 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi @@ -1,9 +1,3 @@ -"""distutils.command.install_data - -Implements the Distutils 'install_data' command, for installing -platform-independent data files. -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi index 6746e210fff9a..75bb906ce5824 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi @@ -1,17 +1,9 @@ -"""distutils.command.install_egg_info - -Implements the Distutils 'install_egg_info' command, for installing -a package's PKG-INFO metadata. -""" - from _typeshed import Incomplete from typing import ClassVar from ..cmd import Command class install_egg_info(Command): - """Install an .egg-info file for the package""" - description: ClassVar[str] user_options: ClassVar[list[tuple[str, str, str]]] install_dir: Incomplete @@ -22,21 +14,6 @@ class install_egg_info(Command): def run(self) -> None: ... def get_outputs(self) -> list[str]: ... -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ +def safe_name(name): ... +def safe_version(version): ... +def to_filename(name): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi index f0eae22e2653a..3caad8a07dca4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi @@ -1,9 +1,3 @@ -"""distutils.command.install_headers - -Implements the Distutils 'install_headers' command, to install C/C++ header -files to the Python include directory. -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi index 22ce9de26e229..a537e254904aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi @@ -1,9 +1,3 @@ -"""distutils.command.install_lib - -Implements the Distutils 'install_lib' command -(install all Python modules). -""" - from _typeshed import Incomplete from typing import ClassVar, Final @@ -28,15 +22,5 @@ class install_lib(Command): def build(self) -> None: ... def install(self): ... def byte_compile(self, files) -> None: ... - def get_outputs(self): - """Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - - def get_inputs(self): - """Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ + def get_outputs(self): ... + def get_inputs(self): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi index 702c1c644b105..658594f32e43c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi @@ -1,9 +1,3 @@ -"""distutils.command.install_scripts - -Implements the Distutils 'install_scripts' command, for installing -Python scripts. -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi index d7491aa079555..c3bd62aaa7aa0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi @@ -1,8 +1,3 @@ -"""distutils.command.register - -Implements the Distutils 'register' command (register with the repository). -""" - from collections.abc import Callable from typing import Any, ClassVar @@ -17,44 +12,9 @@ class register(PyPIRCCommand): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_metadata(self) -> None: - """Deprecated API.""" - - def classifiers(self) -> None: - """Fetch the list of classifiers from the server.""" - - def verify_metadata(self) -> None: - """Send the metadata to the package index server to be checked.""" - - def send_metadata(self) -> None: - """Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - - """ - + def check_metadata(self) -> None: ... + def classifiers(self) -> None: ... + def verify_metadata(self) -> None: ... + def send_metadata(self) -> None: ... def build_post_data(self, action): ... - def post_to_server(self, data, auth=None): - """Post a query to the server, and return a string response.""" + def post_to_server(self, data, auth=None): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi index e82c0ead81d3a..48a140714dda7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi @@ -1,26 +1,14 @@ -"""distutils.command.sdist - -Implements the Distutils 'sdist' command (create a source distribution). -""" - from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar from ..cmd import Command -def show_formats() -> None: - """Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ +def show_formats() -> None: ... class sdist(Command): description: str - def checking_metadata(self): - """Callable used for the check sub-command. - - Placed here so user_options can view it - """ + def checking_metadata(self): ... user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] @@ -45,80 +33,13 @@ class sdist(Command): def finalize_options(self) -> None: ... filelist: Incomplete def run(self) -> None: ... - def check_metadata(self) -> None: - """Deprecated API.""" - - def get_file_list(self) -> None: - """Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - - def add_defaults(self) -> None: - """Add all the default files to self.filelist: - - README or README.txt - - setup.py - - test/test*.py - - all pure Python modules mentioned in setup script - - all files pointed by package_data (build_py) - - all files defined in data_files. - - all files defined as scripts. - - all C sources listed as part of extensions or C libraries - in the setup script (doesn't catch C headers!) - Warns if (README or README.txt) or setup.py are missing; everything - else is optional. - """ - - def read_template(self) -> None: - """Read and parse manifest template file named by self.template. - - (usually "MANIFEST.in") The parsing and processing is done by - 'self.filelist', which updates itself accordingly. - """ - - def prune_file_list(self) -> None: - """Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - - def write_manifest(self) -> None: - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - - def read_manifest(self) -> None: - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - - def make_release_tree(self, base_dir, files) -> None: - """Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - - def make_distribution(self) -> None: - """Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - - def get_archive_files(self): - """Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ + def check_metadata(self) -> None: ... + def get_file_list(self) -> None: ... + def add_defaults(self) -> None: ... + def read_template(self) -> None: ... + def prune_file_list(self) -> None: ... + def write_manifest(self) -> None: ... + def read_manifest(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def make_distribution(self) -> None: ... + def get_archive_files(self): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi index a34bcb86f82f8..afcfbaf48677e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi @@ -1,10 +1,3 @@ -""" -distutils.command.upload - -Implements the Distutils 'upload' subcommand (upload package to a package -index). -""" - from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi index 66f8b382f886b..5814a82841cc9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi @@ -1,9 +1,3 @@ -"""distutils.pypirc - -Provides the PyPIRCCommand class, the base class for the command classes -that uses .pypirc in the distutils.command package. -""" - from abc import abstractmethod from distutils.cmd import Command from typing import ClassVar @@ -11,28 +5,13 @@ from typing import ClassVar DEFAULT_PYPIRC: str class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file""" - DEFAULT_REPOSITORY: ClassVar[str] DEFAULT_REALM: ClassVar[str] repository: None realm: None user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] - def initialize_options(self) -> None: - """Initialize options.""" - - def finalize_options(self) -> None: - """Finalizes options.""" - + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... @abstractmethod - def run(self) -> None: - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command-line, and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ + def run(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi index dcf1dd00a8683..174f249913514 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi @@ -1,11 +1,3 @@ -"""distutils.core - -The only module that needs to be imported to use the Distutils; provides -the 'setup' function (which is to be called from the setup script). Also -indirectly provides the Distribution and Command classes, although they are -really defined in distutils.dist and distutils.cmd. -""" - from _typeshed import Incomplete, StrOrBytesPath from collections.abc import Mapping from distutils.cmd import Command as Command @@ -62,67 +54,5 @@ def setup( fullname: str = ..., # Custom Distributions could accept more params **attrs: Any, -) -> Distribution: - """The gateway to the Distutils: do everything your setup script needs - to do, in a highly flexible and user-driven way. Briefly: create a - Distribution instance; find and parse config files; parse the command - line; run each Distutils command found there, customized by the options - supplied to 'setup()' (as keyword arguments), in config files, and on - the command line. - - The Distribution instance might be an instance of a class supplied via - the 'distclass' keyword argument to 'setup'; if no such class is - supplied, then the Distribution class (in dist.py) is instantiated. - All other arguments to 'setup' (except for 'cmdclass') are used to set - attributes of the Distribution instance. - - The 'cmdclass' argument, if supplied, is a dictionary mapping command - names to command classes. Each command encountered on the command line - will be turned into a command class, which is in turn instantiated; any - class found in 'cmdclass' is used in place of the default, which is - (for command 'foo_bar') class 'foo_bar' in module - 'distutils.command.foo_bar'. The command class must provide a - 'user_options' attribute which is a list of option specifiers for - 'distutils.fancy_getopt'. Any command-line options between the current - and the next command are used to set attributes of the current command - object. - - When the entire command-line has been successfully parsed, calls the - 'run()' method on each command object in turn. This method will be - driven entirely by the Distribution object (which each command object - has a reference to, thanks to its constructor), and the - command-specific options that became attributes of each command - object. - """ - -def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: - """Run a setup script in a somewhat controlled environment, and - return the Distribution instance that drives things. This is useful - if you need to find out the distribution meta-data (passed as - keyword args from 'script' to 'setup()', or the contents of the - config files or command-line. - - 'script_name' is a file that will be read and run with 'exec()'; - 'sys.argv[0]' will be replaced with 'script' for the duration of the - call. 'script_args' is a list of strings; if supplied, - 'sys.argv[1:]' will be replaced by 'script_args' for the duration of - the call. - - 'stop_after' tells 'setup()' when to stop processing; possible - values: - init - stop after the Distribution instance has been created and - populated with the keyword arguments to 'setup()' - config - stop after config files have been parsed (and their data - stored in the Distribution instance) - commandline - stop after the command-line ('sys.argv[1:]' or 'script_args') - have been parsed (and the data stored in the Distribution) - run [default] - stop after all commands have been run (the same as if 'setup()' - had been called in the usual way - - Returns the Distribution instance, which provides all information - used to drive the Distutils. - """ +) -> Distribution: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi index 59765fecc29b8..80924d63e4714 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi @@ -1,56 +1,20 @@ -"""distutils.cygwinccompiler - -Provides the CygwinCCompiler class, a subclass of UnixCCompiler that -handles the Cygwin port of the GNU C compiler to Windows. It also contains -the Mingw32CCompiler class which handles the mingw32 port of GCC (same as -cygwin in no-cygwin mode). -""" - from distutils.unixccompiler import UnixCCompiler from distutils.version import LooseVersion from re import Pattern from typing import Final, Literal -def get_msvcr() -> list[str] | None: - """Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - -class CygwinCCompiler(UnixCCompiler): - """Handles the Cygwin port of the GNU C compiler to Windows.""" +def get_msvcr() -> list[str] | None: ... -class Mingw32CCompiler(CygwinCCompiler): - """Handles the Mingw32 port of the GNU C compiler to Windows.""" +class CygwinCCompiler(UnixCCompiler): ... +class Mingw32CCompiler(CygwinCCompiler): ... CONFIG_H_OK: Final = "ok" CONFIG_H_NOTOK: Final = "not ok" CONFIG_H_UNCERTAIN: Final = "uncertain" -def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: - """Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: - - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h - - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ +def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ... RE_VERSION: Final[Pattern[bytes]] -def get_versions() -> tuple[LooseVersion | None, ...]: - """Try to find out the versions of gcc, ld and dllwrap. - - If not possible it returns None for it. - """ - -def is_cygwingcc() -> bool: - """Try to determine if the gcc that would be used is from cygwin.""" +def get_versions() -> tuple[LooseVersion | None, ...]: ... +def is_cygwingcc() -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi index ec12c28282667..058377accabcc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi @@ -1,10 +1,3 @@ -"""distutils.dep_util - -Utility functions for simple, timestamp-based dependency of files -and groups of files; also, function based entirely on such -timestamp dependency analysis. -""" - from _typeshed import StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Iterable from typing import Literal, TypeVar @@ -12,34 +5,10 @@ from typing import Literal, TypeVar _SourcesT = TypeVar("_SourcesT", bound=StrOrBytesPath) _TargetsT = TypeVar("_TargetsT", bound=StrOrBytesPath) -def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: - """Return true if 'source' exists and is more recently modified than - 'target', or if 'source' exists and 'target' doesn't. Return false if - both exist and 'target' is the same age or younger than 'source'. - Raise DistutilsFileError if 'source' does not exist. - """ - +def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: ... def newer_pairwise( sources: SupportsLenAndGetItem[_SourcesT], targets: SupportsLenAndGetItem[_TargetsT] -) -> tuple[list[_SourcesT], list[_TargetsT]]: - """Walk two filename lists in parallel, testing if each source is newer - than its corresponding target. Return a pair of lists (sources, - targets) where source is newer than target, according to the semantics - of 'newer()'. - """ - +) -> tuple[list[_SourcesT], list[_TargetsT]]: ... def newer_group( sources: Iterable[StrOrBytesPath], target: StrOrBytesPath, missing: Literal["error", "ignore", "newer"] = "error" -) -> Literal[0, 1]: - """Return true if 'target' is out-of-date with respect to any file - listed in 'sources'. In other words, if 'target' exists and is newer - than every file in 'sources', return false; otherwise return true. - 'missing' controls what we do when a source file is missing; the - default ("error") is to blow up with an OSError from inside 'stat()'; - if it is "ignore", we silently drop any missing source files; if it is - "newer", any missing source files make us assume that 'target' is - out-of-date (this is handy in "dry-run" mode: it'll make you pretend to - carry out commands that wouldn't work because inputs are missing, but - that doesn't matter because you're not actually going to run the - commands). - """ +) -> Literal[0, 1]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi index 8f153f618009a..23e2c3bc28b98 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi @@ -1,40 +1,15 @@ -"""distutils.dir_util - -Utility functions for manipulating directories and directory trees. -""" - from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal -def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: - """Create a directory and any missing ancestor directories. - - If the directory already exists (or if 'name' is the empty string, which - means the current directory, which of course exists), then do nothing. - Raise DistutilsFileError if unable to create some directory along the way - (eg. some sub-path exists, but is a file rather than a directory). - If 'verbose' is true, print a one-line summary of each mkdir to stdout. - Return the list of directories actually created. - """ - +def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: ... def create_tree( base_dir: StrPath, files: Iterable[StrPath], mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> None: - """Create all the empty directories under 'base_dir' needed to put 'files' - there. - - 'base_dir' is just the name of a directory which doesn't necessarily - exist yet; 'files' is a list of filenames to be interpreted relative to - 'base_dir'. 'base_dir' + the directory portion of every file in 'files' - will be created if it doesn't already exist. 'mode', 'verbose' and - 'dry_run' flags are as for 'mkpath()'. - """ - +) -> None: ... def copy_tree( src: StrPath, dst: str, @@ -44,30 +19,5 @@ def copy_tree( update: bool | Literal[0, 1] = 0, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> list[str]: - """Copy an entire directory tree 'src' to a new location 'dst'. - - Both 'src' and 'dst' must be directory names. If 'src' is not a - directory, raise DistutilsFileError. If 'dst' does not exist, it is - created with 'mkpath()'. The end result of the copy is that every - file in 'src' is copied to 'dst', and directories under 'src' are - recursively copied to 'dst'. Return the list of files that were - copied or might have been copied, using their output name. The - return value is unaffected by 'update' or 'dry_run': it is simply - the list of all files under 'src', with the names changed to be - under 'dst'. - - 'preserve_mode' and 'preserve_times' are the same as for - 'copy_file'; note that they only apply to regular files, not to - directories. If 'preserve_symlinks' is true, symlinks will be - copied as symlinks (on platforms that support them!); otherwise - (the default), the destination of the symlink will be copied. - 'update' and 'verbose' are the same as for 'copy_file'. - """ - -def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: - """Recursively remove an entire directory tree. - - Any errors are ignored (apart from being reported to stdout if 'verbose' - is true). - """ +) -> list[str]: ... +def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi index 68dc636cc6e2f..412b94131b54e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi @@ -1,9 +1,3 @@ -"""distutils.dist - -Provides the Distribution class, which represents the module distribution -being built/installed/distributed. -""" - from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite from collections.abc import Iterable, MutableMapping from distutils.cmd import Command @@ -37,10 +31,6 @@ _OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str _CommandT = TypeVar("_CommandT", bound=Command) class DistributionMetadata: - """Dummy class to hold the distribution meta-data: name, version, - author, and so forth. - """ - def __init__(self, path: StrOrBytesPath | None = None) -> None: ... name: str | None version: str | None @@ -59,15 +49,9 @@ class DistributionMetadata: provides: list[str] | None requires: list[str] | None obsoletes: list[str] | None - def read_pkg_file(self, file: IO[str]) -> None: - """Reads the metadata values from a file object.""" - - def write_pkg_info(self, base_dir: StrPath) -> None: - """Write the PKG-INFO file into the release tree.""" - - def write_pkg_file(self, file: SupportsWrite[str]) -> None: - """Write the PKG-INFO format data to a file object.""" - + def read_pkg_file(self, file: IO[str]) -> None: ... + def write_pkg_info(self, base_dir: StrPath) -> None: ... + def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... def get_name(self) -> str: ... def get_version(self) -> str: ... def get_fullname(self) -> str: ... @@ -94,39 +78,10 @@ class DistributionMetadata: def set_obsoletes(self, value: Iterable[str]) -> None: ... class Distribution: - """The core of the Distutils. Most of the work hiding behind 'setup' - is really done within a Distribution instance, which farms the work out - to the Distutils commands specified on the command line. - - Setup scripts will almost never instantiate Distribution directly, - unless the 'setup()' function is totally inadequate to their needs. - However, it is conceivable that a setup script might wish to subclass - Distribution for some specialized purpose, and then pass the subclass - to 'setup()' as the 'distclass' keyword argument. If so, it is - necessary to respect the expectations that 'setup' has of Distribution. - See the code for 'setup()', in core.py, for details. - """ - cmdclass: dict[str, type[Command]] metadata: DistributionMetadata - def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: - """Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - - def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - + def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... global_options: ClassVar[_OptionsList] common_usage: ClassVar[str] @@ -158,88 +113,18 @@ class Distribution: have_run: Incomplete want_user_cfg: bool def dump_option_dicts(self, header=None, commands=None, indent: str = "") -> None: ... - def find_config_files(self): - """Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: distutils.cfg in the - Distutils installation directory (ie. where the top-level - Distutils __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ + def find_config_files(self): ... commands: Incomplete - def parse_command_line(self): - """Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in core.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Distutils commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises DistutilsGetoptError; any error on the - command-line raises DistutilsArgError. If no Distutils commands - were found on the command line, raises DistutilsArgError. Return - true if command-line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - - def finalize_options(self) -> None: - """Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - - def print_command_list(self, commands, header, max_length) -> None: - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - - def print_commands(self) -> None: - """Print out a help message listing all available commands with a - description of each. The list is divided into "standard commands" - (listed in distutils.command.__all__) and "extra commands" - (mentioned in self.cmdclass, but not a standard command). The - descriptions come from the command class attribute - 'description'. - """ - - def get_command_list(self): - """Get a list of (command, description) tuples. - The list is divided into "standard commands" (listed in - distutils.command.__all__) and "extra commands" (mentioned in - self.cmdclass, but not a standard command). The descriptions come - from the command class attribute 'description'. - """ - - def get_command_packages(self): - """Return a list of packages from which commands are loaded.""" + def parse_command_line(self): ... + def finalize_options(self) -> None: ... + def handle_display_options(self, option_order): ... + def print_command_list(self, commands, header, max_length) -> None: ... + def print_commands(self) -> None: ... + def get_command_list(self): ... + def get_command_packages(self): ... # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. @overload - def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: - """Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - + def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ... @overload def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ... @overload @@ -284,19 +169,7 @@ class Distribution: @overload def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ... @overload - def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: - """Return the class that implements the Distutils command named by - 'command'. First we check the 'cmdclass' dictionary; if the - command is mentioned there, we fetch the class object from the - dictionary and return it. Otherwise we load the command module - ("distutils.command." + command) and fetch the command class from - the module. The loaded class is also stored in 'cmdclass' - to speed future calls to 'get_command_class()'. - - Raises DistutilsModuleError if the expected module could not be - found, or if that module does not define the expected class. - """ - + def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ... @overload def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ... @overload @@ -338,26 +211,7 @@ class Distribution: @overload def get_command_class(self, command: str) -> type[Command]: ... @overload - def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: - """Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': ie., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install" command for an example. Only - reinitializes the sub-commands that actually matter, ie. those - whose test predicates return true. - - Returns the reinitialized command object. - """ - + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ... @overload def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ... @overload @@ -403,21 +257,8 @@ class Distribution: @overload def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ... def announce(self, msg, level: int = 2) -> None: ... - def run_commands(self) -> None: - """Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - - def run_command(self, command: str) -> None: - """Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - + def run_commands(self) -> None: ... + def run_command(self, command: str) -> None: ... def has_pure_modules(self) -> bool: ... def has_ext_modules(self) -> bool: ... def has_c_libraries(self) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi index c277b0a6074e7..e483362bfbf19 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi @@ -1,97 +1,19 @@ -"""distutils.errors - -Provides exceptions used by the Distutils modules. Note that Distutils -modules may raise standard exceptions; in particular, SystemExit is -usually raised for errors that are obviously the end-user's fault -(eg. bad command-line arguments). - -This module is safe to use in "from ... import *" mode; it only exports -symbols whose names start with "Distutils" and end with "Error". -""" - -class DistutilsError(Exception): - """The root of all Distutils evil.""" - -class DistutilsModuleError(DistutilsError): - """Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes). - """ - -class DistutilsClassError(DistutilsError): - """Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface. - """ - -class DistutilsGetoptError(DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus.""" - -class DistutilsArgError(DistutilsError): - """Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage. - """ - -class DistutilsFileError(DistutilsError): - """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before OSError - could be raised. - """ - -class DistutilsOptionError(DistutilsError): - """Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise DistutilsSetupError instead. - """ - -class DistutilsSetupError(DistutilsError): - """For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'. - """ - -class DistutilsPlatformError(DistutilsError): - """We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass. - """ - -class DistutilsExecError(DistutilsError): - """Any problems executing an external program (such as the C - compiler, when compiling C files). - """ - -class DistutilsInternalError(DistutilsError): - """Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!). - """ - -class DistutilsTemplateError(DistutilsError): - """Syntax error in a file list template.""" - -class DistutilsByteCompileError(DistutilsError): - """Byte compile error.""" - -class CCompilerError(Exception): - """Some compile/link operation failed.""" - -class PreprocessError(CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - -class CompileError(CCompilerError): - """Failure to compile one or more C/C++ source files.""" - -class LibError(CCompilerError): - """Failure to create a static library from one or more C/C++ object - files. - """ - -class LinkError(CCompilerError): - """Failure to link one or more C/C++ object files into an executable - or shared library file. - """ - -class UnknownFileError(CCompilerError): - """Attempt to process an unknown file type.""" +class DistutilsError(Exception): ... +class DistutilsModuleError(DistutilsError): ... +class DistutilsClassError(DistutilsError): ... +class DistutilsGetoptError(DistutilsError): ... +class DistutilsArgError(DistutilsError): ... +class DistutilsFileError(DistutilsError): ... +class DistutilsOptionError(DistutilsError): ... +class DistutilsSetupError(DistutilsError): ... +class DistutilsPlatformError(DistutilsError): ... +class DistutilsExecError(DistutilsError): ... +class DistutilsInternalError(DistutilsError): ... +class DistutilsTemplateError(DistutilsError): ... +class DistutilsByteCompileError(DistutilsError): ... +class CCompilerError(Exception): ... +class PreprocessError(CCompilerError): ... +class CompileError(CCompilerError): ... +class LibError(CCompilerError): ... +class LinkError(CCompilerError): ... +class UnknownFileError(CCompilerError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi index e0253524f5b26..789bbf6ec3d12 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi @@ -1,74 +1,4 @@ -"""distutils.extension - -Provides the Extension class, used to describe C/C++ extension -modules in setup scripts. -""" - class Extension: - """Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ - name: str sources: list[str] include_dirs: list[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi index 7254af3933ef3..f3fa2a1255a6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -1,13 +1,3 @@ -"""distutils.fancy_getopt - -Wrapper around the standard getopt module that provides the following -additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object -""" - from collections.abc import Iterable, Mapping from getopt import _SliceableT, _StrSequenceT_co from re import Pattern @@ -22,48 +12,18 @@ neg_alias_re: Final[Pattern[str]] longopt_xlate: Final[dict[int, int]] class FancyGetopt: - """Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ - def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO: kinda wrong, `getopt(object=object())` is invalid @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None = None, object: None = None - ) -> tuple[_StrSequenceT_co, OptionDummy]: - """Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - + ) -> tuple[_StrSequenceT_co, OptionDummy]: ... @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None, object: Any ) -> _StrSequenceT_co: ... # object is an arbitrary non-slotted object - def get_option_order(self) -> list[tuple[str, str]]: - """Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - - def generate_help(self, header: str | None = None) -> list[str]: - """Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ + def get_option_order(self) -> list[tuple[str, str]]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... # Same note as FancyGetopt.getopt @overload @@ -77,24 +37,8 @@ def fancy_getopt( WS_TRANS: Final[dict[int, str]] -def wrap_text(text: str, width: int) -> list[str]: - """wrap_text(text : string, width : int) -> [string] - - Split 'text' into multiple lines of no more than 'width' characters - each, and return the list of strings that results. - """ - -def translate_longopt(opt: str) -> str: - """Convert a long option name to a valid Python identifier by - changing "-" to "_". - """ +def wrap_text(text: str, width: int) -> list[str]: ... +def translate_longopt(opt: str) -> str: ... class OptionDummy: - """Dummy class just used as a place to hold command-line option - values as instance attributes. - """ - - def __init__(self, options: Iterable[str] = []) -> None: - """Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None. - """ + def __init__(self, options: Iterable[str] = []) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi index ba4ded2abf9f2..c763f91a958d7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi @@ -1,8 +1,3 @@ -"""distutils.file_util - -Utility functions for operating on single files. -""" - from _typeshed import BytesPath, StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal, TypeVar, overload @@ -20,32 +15,7 @@ def copy_file( link: str | None = None, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> tuple[_StrPathT | str, bool]: - """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is - copied there with the same name; otherwise, it must be a filename. (If - the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' - is true (the default), the file's mode (type and permission bits, or - whatever is analogous on the current platform) is copied. If - 'preserve_times' is true (the default), the last-modified and - last-access times are copied as well. If 'update' is true, 'src' will - only be copied if 'dst' does not exist, or if 'dst' does exist but is - older than 'src'. - - 'link' allows you to make hard links (os.link) or symbolic links - (os.symlink) instead of copying: set it to "hard" or "sym"; if it is - None (the default), files are copied. Don't set 'link' on systems that - don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. If hardlink fails, falls back to - _copy_file_contents(). - - Under Mac OS, uses the native file copy function in macostools; on - other systems, uses '_copy_file_contents()' to copy file contents. - - Return a tuple (dest_name, copied): 'dest_name' is the actual name of - the output file, and 'copied' is true if the file was copied (or would - have been copied, if 'dry_run' true). - """ - +) -> tuple[_StrPathT | str, bool]: ... @overload def copy_file( src: BytesPath, @@ -60,20 +30,9 @@ def copy_file( @overload def move_file( src: StrPath, dst: _StrPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 -) -> _StrPathT | str: - """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will - be moved into it with the same name; otherwise, 'src' is just renamed - to 'dst'. Return the new full name of the file. - - Handles cross-device moves on Unix using 'copy_file()'. What about - other systems??? - """ - +) -> _StrPathT | str: ... @overload def move_file( src: BytesPath, dst: _BytesPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 ) -> _BytesPathT | bytes: ... -def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ +def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi index 5e450f25478c8..607a78a1fbaca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi @@ -1,39 +1,15 @@ -"""distutils.filelist - -Provides the FileList class, used for poking about the filesystem -and building lists of files. -""" - from collections.abc import Iterable from re import Pattern from typing import Literal, overload # class is entirely undocumented class FileList: - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - - Instance attributes: - dir - directory from which files will be taken -- only used if - 'allfiles' not supplied to constructor - files - list of filenames currently being built/filtered/manipulated - allfiles - complete list of files under consideration (ie. without any - filtering applied) - """ - allfiles: Iterable[str] | None files: list[str] def __init__(self, warn: None = None, debug_print: None = None) -> None: ... def set_allfiles(self, allfiles: Iterable[str]) -> None: ... def findall(self, dir: str = ".") -> None: ... - def debug_print(self, msg: str) -> None: - """Print 'msg' to stdout if the global DEBUG (taken from the - DISTUTILS_DEBUG environment variable) flag is true. - """ - + def debug_print(self, msg: str) -> None: ... def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... def sort(self) -> None: ... @@ -42,32 +18,7 @@ class FileList: @overload def include_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. Patterns - are not quite the same as implemented by the 'fnmatch' module: '*' - and '?' match non-special characters, where "special" is platform- - dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found, False otherwise. - """ - + ) -> bool: ... @overload def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -81,14 +32,7 @@ class FileList: @overload def exclude_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. Other parameters are the same as for - 'include_pattern()', above. - The list 'self.files' is modified in place. - Return True if files are found, False otherwise. - """ - + ) -> bool: ... @overload def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -100,29 +44,12 @@ class FileList: is_regex: bool | Literal[0, 1] = 0, ) -> bool: ... -def findall(dir: str = ".") -> list[str]: - """ - Find all files under 'dir' and return the list of full filenames. - Unless dir is '.', return full filenames with dir prepended. - """ - -def glob_to_re(pattern: str) -> str: - """Translate a shell-like glob pattern to a regular expression; return - a string containing the regex. Differs from 'fnmatch.translate()' in - that '*' does not match "special characters" (which are - platform-specific). - """ - +def findall(dir: str = ".") -> list[str]: ... +def glob_to_re(pattern: str) -> str: ... @overload def translate_pattern( pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 -) -> Pattern[str]: - """Translate a shell-like wildcard pattern to a compiled regular - expression. Return the compiled regex. If 'is_regex' true, - then 'pattern' is directly compiled to a regex (if it's a string) - or just returned as-is (assumes it's a regex object). - """ - +) -> Pattern[str]: ... @overload def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi index 442fc5feb7824..7246dd6be0cdf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi @@ -1,5 +1,3 @@ -"""A simple log mechanism styled after PEP 282.""" - from typing import Any, Final DEBUG: Final = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi index 8ab7990270bb0..80872a6b739f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi @@ -1,12 +1,3 @@ -"""distutils.msvccompiler - -Contains MSVCCompiler, an implementation of the abstract CCompiler class -for the Microsoft Visual Studio. -""" - from distutils.ccompiler import CCompiler -class MSVCCompiler(CCompiler): - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class. - """ +class MSVCCompiler(CCompiler): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi index c2ac88c4b25d1..ae07a49504fe1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi @@ -1,11 +1,3 @@ -"""distutils.spawn - -Provides the 'spawn()' function, a front-end to various platform- -specific functions for launching another program in a sub-process. -Also provides the 'find_executable()' to search the path for a given -executable name. -""" - from collections.abc import Iterable from typing import Literal @@ -14,26 +6,5 @@ def spawn( search_path: bool | Literal[0, 1] = 1, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, -) -> None: - """Run another program, specified as a command list 'cmd', in a new process. - - 'cmd' is just the argument list for the new process, ie. - cmd[0] is the program to run and cmd[1:] are the rest of its arguments. - There is no way to run a program with a name different from that of its - executable. - - If 'search_path' is true (the default), the system's executable - search path will be used to find the program; otherwise, cmd[0] - must be the exact path to the executable. If 'dry_run' is true, - the command will not actually be run. - - Raise DistutilsExecError if running the program fails in any way; just - return on success. - """ - -def find_executable(executable: str, path: str | None = None) -> str | None: - """Tries to find 'executable' in the directories listed in 'path'. - - A string listing directories separated by 'os.pathsep'; defaults to - os.environ['PATH']. Returns the complete filename or None if not found. - """ +) -> None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi index c2ba23f1607cf..4a9c45eb562a4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi @@ -1,14 +1,3 @@ -"""Provide access to Python's configuration information. The specific -configuration variables available depend heavily on the platform and -configuration. The values may be retrieved using -get_config_var(name), and the list of variables is available via -get_config_vars().keys(). Additional convenience functions are also -available. - -Written by: Fred L. Drake, Jr. -Email: -""" - import sys from collections.abc import Mapping from distutils.ccompiler import CCompiler @@ -22,85 +11,23 @@ BASE_EXEC_PREFIX: Final[str] project_base: Final[str] python_build: Final[bool] -def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: - """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in - 'string' according to 'vars' (a dictionary mapping variable names to - values). Variables not present in 'vars' are silently expanded to the - empty string. The variable values in 'vars' should not contain further - variable expansions; if 'vars' is the output of 'parse_makefile()', - you're fine. Returns a variable-expanded version of 's'. - """ - +def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ... @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") -def get_config_var(name: Literal["SO"]) -> int | str | None: - """Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - +def get_config_var(name: Literal["SO"]) -> int | str | None: ... @overload def get_config_var(name: str) -> int | str | None: ... @overload -def get_config_vars() -> dict[str, str | int]: - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - +def get_config_vars() -> dict[str, str | int]: ... @overload def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ... -def get_config_h_filename() -> str: - """Return the path of pyconfig.h.""" - -def get_makefile_filename() -> str: - """Return the path of the Makefile.""" - -def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: - """Return the directory containing installed Python header files. - - If 'plat_specific' is false (the default), this is the path to the - non-platform-specific header files, i.e. Python.h and so on; - otherwise, this is the path to platform-specific header files - (namely pyconfig.h). - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... +def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: ... def get_python_lib( plat_specific: bool | Literal[0, 1] = 0, standard_lib: bool | Literal[0, 1] = 0, prefix: str | None = None -) -> str: - """Return the directory containing the Python library (standard or - site additions). - - If 'plat_specific' is true, return the directory containing - platform-specific modules, i.e. any module from a non-pure-Python - module distribution; otherwise, return the platform-shared library - directory. If 'standard_lib' is true, return the directory - containing standard Python library modules; otherwise, return the - directory for site-specific modules. - - If 'prefix' is supplied, use it instead of sys.base_prefix or - sys.base_exec_prefix -- i.e., ignore 'plat_specific'. - """ - -def customize_compiler(compiler: CCompiler) -> None: - """Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ +) -> str: ... +def customize_compiler(compiler: CCompiler) -> None: ... if sys.version_info < (3, 10): - def get_python_version() -> str: - """Return a string containing the major and minor Python version, - leaving off the patchlevel. Sample return values could be '1.5' - or '2.2'. - """ + def get_python_version() -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi index 7c9c6c65c699b..54951af7e55d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi @@ -1,72 +1,6 @@ -"""text_file - -provides the TextFile class, which gives an interface to text files -that (optionally) takes care of stripping comments, ignoring blank -lines, and joining lines with backslashes. -""" - from typing import IO, Literal class TextFile: - """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not. - """ - def __init__( self, filename: str | None = None, @@ -78,52 +12,10 @@ class TextFile: skip_blanks: bool | Literal[0, 1] = ..., join_lines: bool | Literal[0, 1] = ..., collapse_join: bool | Literal[0, 1] = ..., - ) -> None: - """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'. - """ - - def open(self, filename: str) -> None: - """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor. - """ - - def close(self) -> None: - """Close the current file and forget everything we know about it - (filename, current line number). - """ - - def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: - """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line. - """ - - def readline(self) -> str | None: - """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not. - """ - - def readlines(self) -> list[str]: - """Read and return the list of all logical lines remaining in the - current file. - """ - - def unreadline(self, line: str) -> str: - """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead. - """ + ) -> None: ... + def open(self, filename: str) -> None: ... + def close(self) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... + def readline(self) -> str | None: ... + def readlines(self) -> list[str]: ... + def unreadline(self, line: str) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi index e1a17ecf3a682..e1d443471af36 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi @@ -1,18 +1,3 @@ -"""distutils.unixccompiler - -Contains the UnixCCompiler class, a subclass of CCompiler that handles -the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi index 8a492c6873464..0e1bb4165d99d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi @@ -1,9 +1,3 @@ -"""distutils.util - -Miscellaneous utility functions -- anything that doesn't fit into -one of the other *util.py modules. -""" - from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any, Literal @@ -11,99 +5,21 @@ from typing_extensions import TypeVarTuple, Unpack _Ts = TypeVarTuple("_Ts") -def get_host_platform() -> str: - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; eg. on Linux, the kernel version isn't - particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ - +def get_host_platform() -> str: ... def get_platform() -> str: ... -def convert_path(pathname: str) -> str: - """Return 'pathname' as a name that will work on the native filesystem, - i.e. split it on '/' and put it back together again using the current - directory separator. Needed because filenames in the setup script are - always supplied in Unix style, and have to be converted to the local - convention before we can actually use them in the filesystem. Raises - ValueError on non-Unix-ish systems if 'pathname' either starts or - ends with a slash. - """ - -def change_root(new_root: StrPath, pathname: StrPath) -> str: - """Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - -def check_environ() -> None: - """Ensure that 'os.environ' has all the environment variables we - guarantee that users can use in config files, command-line options, - etc. Currently this includes: - HOME - user's home directory (Unix only) - PLAT - description of the current platform, including hardware - and OS (see 'get_platform()') - """ - -def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: - """Perform shell/Perl-style variable substitution on 'string'. Every - occurrence of '$' followed by a name is considered a variable, and - variable is substituted by the value found in the 'local_vars' - dictionary, or in 'os.environ' if it's not in 'local_vars'. - 'os.environ' is first checked/augmented to guarantee that it contains - certain values: see 'check_environ()'. Raise ValueError for any - variables not found in either 'local_vars' or 'os.environ'. - """ - -def split_quoted(s: str) -> list[str]: - """Split a string up according to Unix shell-like rules for quotes and - backslashes. In short: words are delimited by spaces, as long as those - spaces are not escaped by a backslash, or inside a quoted string. - Single and double quotes are equivalent, and the quote characters can - be backslash-escaped. The backslash is stripped from any two-character - escape sequence, leaving only the escaped character. The quote - characters are stripped from any quoted string. Returns a list of - words. - """ - +def convert_path(pathname: str) -> str: ... +def change_root(new_root: StrPath, pathname: StrPath) -> str: ... +def check_environ() -> None: ... +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... +def split_quoted(s: str) -> list[str]: ... def execute( func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, -) -> None: - """Perform some action that affects the outside world (eg. by - writing to the filesystem). Such actions are special because they - are disabled by the 'dry_run' flag. This method takes care of all - that bureaucracy for you; all you have to do is supply the - function to call and an argument tuple for it (to embody the - "external action" being performed), and an optional message to - print. - """ - -def strtobool(val: str) -> Literal[0, 1]: - """Convert a string representation of truth to true (1) or false (0). - - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - +) -> None: ... +def strtobool(val: str) -> Literal[0, 1]: ... def byte_compile( py_files: list[str], optimize: int = 0, @@ -113,54 +29,14 @@ def byte_compile( verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, direct: bool | None = None, -) -> None: - """Byte-compile a collection of Python source files to .pyc - files in a __pycache__ subdirectory. 'py_files' is a list - of files to compile; any files that don't end in ".py" are silently - skipped. 'optimize' must be one of the following: - 0 - don't optimize - 1 - normal optimization (like "python -O") - 2 - extra optimization (like "python -OO") - If 'force' is true, all files are recompiled regardless of - timestamps. - - The source filename encoded in each bytecode file defaults to the - filenames listed in 'py_files'; you can modify these with 'prefix' and - 'basedir'. 'prefix' is a string that will be stripped off of each - source filename, and 'base_dir' is a directory name that will be - prepended (after 'prefix' is stripped). You can supply either or both - (or neither) of 'prefix' and 'base_dir', as you wish. - - If 'dry_run' is true, doesn't actually do anything that would - affect the filesystem. - - Byte-compilation is either done directly in this interpreter process - with the standard py_compile module, or indirectly by writing a - temporary script and executing it. Normally, you should let - 'byte_compile()' figure out to use direct compilation or not (see - the source for details). The 'direct' flag is used by the script - generated in indirect mode; unless you know what you're doing, leave - it set to None. - """ - -def rfc822_escape(header: str) -> str: - """Return a version of the string escaped for inclusion in an - RFC-822 header, by ensuring there are 8 spaces space after each newline. - """ - +) -> None: ... +def rfc822_escape(header: str) -> str: ... def run_2to3( files: Iterable[str], fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Unused = None, -) -> None: - """Invoke 2to3 on a list of Python files. - The files should all come from the build area, as the - modification is done in-place. To reduce the build time, - only files modified since the last invocation of this - function should be passed in the files argument. - """ - +) -> None: ... def copydir_run_2to3( src: StrPath, dest: StrPath, @@ -168,20 +44,9 @@ def copydir_run_2to3( fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None, -) -> list[str]: - """Recursively copy a directory, only copying new and changed files, - running run_2to3 over all newly copied Python modules afterward. - - If you give a template string, it's parsed like a MANIFEST.in. - """ +) -> list[str]: ... class Mixin2to3: - """Mixin class for commands that run 2to3. - To configure 2to3, setup scripts may either change - the class variables, or inherit from individual commands - to override how 2to3 is invoked. - """ - fixer_names: Iterable[str] | None options: Mapping[str, Any] | None explicit: Container[str] | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi index e6c38de01efa0..47da65ef87aab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi @@ -1,33 +1,8 @@ -"""Provides classes to represent module version numbers (one class for -each style of version numbering). There are currently two such classes -implemented: StrictVersion and LooseVersion. - -Every version number class implements the following interface: - * the 'parse' method takes a string and parses it to some internal - representation; if the string is an invalid version number, - 'parse' raises a ValueError exception - * the class constructor takes an optional string argument which, - if supplied, is passed to 'parse' - * __str__ reconstructs the string that was passed to 'parse' (or - an equivalent string -- ie. one that will generate an equivalent - version number instance) - * __repr__ generates Python code to recreate the version number instance - * _cmp compares the current instance with either another instance - of the same class or a string (which will be parsed to an instance - of the same class, thus must follow the same rules) -""" - from abc import abstractmethod from re import Pattern from typing_extensions import Self class Version: - """Abstract base class for version numbering classes. Just provides - constructor (__init__) and reproducer (__repr__), because those - seem to be the same for all version numbering classes; and route - rich comparisons to _cmp. - """ - def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self | str) -> bool: ... def __le__(self, other: Self | str) -> bool: ... @@ -43,41 +18,6 @@ class Version: def _cmp(self, other: Self | str) -> bool: ... class StrictVersion(Version): - """Version numbering for anal retentives and software idealists. - Implements the standard interface for version number classes as - described above. A version number consists of two or three - dot-separated numeric components, with an optional "pre-release" tag - on the end. The pre-release tag consists of the letter 'a' or 'b' - followed by a number. If the numeric components of two version - numbers are equal, then one with a pre-release tag will always - be deemed earlier (lesser) than one without. - - The following are valid version numbers (shown in the order that - would be obtained by sorting according to the supplied cmp function): - - 0.4 0.4.0 (these two are equivalent) - 0.4.1 - 0.5a1 - 0.5b3 - 0.5 - 0.9.6 - 1.0 - 1.0.4a3 - 1.0.4b1 - 1.0.4 - - The following are examples of invalid version numbers: - - 1 - 2.7.2.2 - 1.3.a4 - 1.3pl1 - 1.3c4 - - The rationale for this version numbering system will be explained - in the distutils documentation. - """ - version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None @@ -87,37 +27,6 @@ class StrictVersion(Version): def _cmp(self, other: Self | str) -> bool: ... class LooseVersion(Version): - """Version numbering for anarchists and software realists. - Implements the standard interface for version number classes as - described above. A version number consists of a series of numbers, - separated by either periods or strings of letters. When comparing - version numbers, the numeric components will be compared - numerically, and the alphabetic components lexically. The following - are all valid version numbers, in no particular order: - - 1.5.1 - 1.5.2b2 - 161 - 3.10a - 8.02 - 3.4j - 1996.07.12 - 3.2.pl0 - 3.1.1.6 - 2g6 - 11g - 0.960923 - 2.2beta29 - 1.13++ - 5.5.kw - 2.0b1pl0 - - In fact, there is no such thing as an invalid version number under - this scheme; the rules for comparison are simple and predictable, - but may not always give the results you want (for some definition - of "want"). - """ - component_re: Pattern[str] vstring: str version: tuple[str | int, ...] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi index 55a37fc365282..1bb96e1a77868 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi @@ -1,42 +1,3 @@ -"""Module doctest -- a framework for running examples in docstrings. - -In simplest use, end each module M to be tested with: - -def _test(): - import doctest - doctest.testmod() - -if __name__ == "__main__": - _test() - -Then running the module as a script will cause the examples in the -docstrings to get executed and verified: - -python M.py - -This won't display anything unless an example fails, in which case the -failing example(s) and the cause(s) of the failure(s) are printed to stdout -(why not stderr? because stderr is a lame hack <0.2 wink>), and the final -line of output is "Test failed.". - -Run it with the -v switch instead: - -python M.py -v - -and a detailed report of all examples tried is printed to stdout, along -with assorted summaries at the end. - -You can force verbose mode by passing "verbose=True" to testmod, or prohibit -it by passing "verbose=False". In either of those cases, sys.argv is not -examined by testmod. - -There are a variety of other ways to run doctests, including integration -with the unittest framework, and support for running non-Python text -files containing doctests. There are also many ways to override parts -of doctest's default behaviors. See the Library Reference Manual for -details. -""" - import sys import types import unittest @@ -93,8 +54,6 @@ if sys.version_info >= (3, 13): else: class TestResults(NamedTuple): - """TestResults(failed, attempted)""" - failed: int attempted: int @@ -123,41 +82,6 @@ BLANKLINE_MARKER: Final = "" ELLIPSIS_MARKER: Final = "..." class Example: - """ - A single doctest example, consisting of source code and expected - output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that precede the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. - """ - source: str want: str exc_msg: str | None @@ -177,30 +101,6 @@ class Example: def __eq__(self, other: object) -> bool: ... class DocTest: - """ - A collection of doctest examples that should be run in a single - namespace. Each `DocTest` defines the following attributes: - - - examples: the list of examples. - - - globs: The namespace (aka globals) that the examples should - be run in. - - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). - - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. - - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. - - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. - """ - examples: list[Example] globs: dict[str, Any] name: str @@ -215,80 +115,20 @@ class DocTest: filename: str | None, lineno: int | None, docstring: str | None, - ) -> None: - """ - Create a new DocTest containing the given examples. The - DocTest's globals are initialized with a copy of `globs`. - """ - + ) -> None: ... def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... def __eq__(self, other: object) -> bool: ... class DocTestParser: - """ - A class used to parse strings containing doctest examples. - """ - - def parse(self, string: str, name: str = "") -> list[str | Example]: - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - - def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: - """ - Extract all doctest examples from the given string, and - collect them into a `DocTest` object. - - `globs`, `name`, `filename`, and `lineno` are attributes for - the new `DocTest` object. See the documentation for `DocTest` - for more information. - """ - - def get_examples(self, string: str, name: str = "") -> list[Example]: - """ - Extract all doctest examples from the given string, and return - them as a list of `Example` objects. Line numbers are - 0-based, because it's most common in doctests that nothing - interesting appears on the same line as opening triple-quote, - and so the first interesting line is called "line 1" then. - - The optional argument `name` is a name identifying this - string, and is only used for error messages. - """ + def parse(self, string: str, name: str = "") -> list[str | Example]: ... + def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... + def get_examples(self, string: str, name: str = "") -> list[Example]: ... class DocTestFinder: - """ - A class used to extract the DocTests that are relevant to a given - object, from its docstring and the docstrings of its contained - objects. Doctests can currently be extracted from the following - object types: modules, functions, classes, methods, staticmethods, - classmethods, and properties. - """ - def __init__( self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True - ) -> None: - """ - Create a new doctest finder. - - The optional argument `parser` specifies a class or - function that should be used to create new DocTest objects (or - objects that implement the same interface as DocTest). The - signature for this factory function should match the signature - of the DocTest constructor. - - If the optional argument `recurse` is false, then `find` will - only examine the given object, and not any contained objects. - - If the optional argument `exclude_empty` is false, then `find` - will include tests for objects with empty docstrings. - """ - + ) -> None: ... def find( self, obj: object, @@ -296,107 +136,11 @@ class DocTestFinder: module: None | bool | types.ModuleType = None, globs: dict[str, Any] | None = None, extraglobs: dict[str, Any] | None = None, - ) -> list[DocTest]: - """ - Return a list of the DocTests that are defined by the given - object's docstring, or by any of its contained objects' - docstrings. - - The optional parameter `module` is the module that contains - the given object. If the module is not specified or is None, then - the test finder will attempt to automatically determine the - correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - - Contained objects whose module does not match `module` are ignored. - - If `module` is False, no attempt to find the module will be made. - This is obscure, of use mostly in tests: if `module` is False, or - is None but cannot be found automatically, then all objects are - considered to belong to the (non-existent) module, so all contained - objects will (recursively) be searched for doctests. - - The globals for each DocTest is formed by combining `globs` - and `extraglobs` (bindings in `extraglobs` override bindings - in `globs`). A new copy of the globals dictionary is created - for each DocTest. If `globs` is not specified, then it - defaults to the module's `__dict__`, if specified, or {} - otherwise. If `extraglobs` is not specified, then it defaults - to {}. - - """ + ) -> list[DocTest]: ... _Out: TypeAlias = Callable[[str], object] class DocTestRunner: - """ - A class used to run DocTest test cases, and accumulate statistics. - The `run` method is used to process a single DocTest case. It - returns a TestResults instance. - - >>> save_colorize = _colorize.COLORIZE - >>> _colorize.COLORIZE = False - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> tests.sort(key = lambda test: test.name) - >>> for test in tests: - ... print(test.name, '->', runner.run(test)) - _TestClass -> TestResults(failed=0, attempted=2) - _TestClass.__init__ -> TestResults(failed=0, attempted=2) - _TestClass.get -> TestResults(failed=0, attempted=2) - _TestClass.square -> TestResults(failed=0, attempted=1) - - The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated TestResults - instance: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 test in _TestClass.square - 7 tests in 4 items. - 7 passed. - Test passed. - TestResults(failed=0, attempted=7) - - The aggregated number of tried examples and failed examples is also - available via the `tries`, `failures` and `skips` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - >>> runner.skips - 0 - - The comparison between expected outputs and actual outputs is done - by an `OutputChecker`. This comparison may be customized with a - number of option flags; see the documentation for `testmod` for - more information. If the option flags are insufficient, then the - comparison may also be customized by passing a subclass of - `OutputChecker` to the constructor. - - The test runner's display output can be controlled in two ways. - First, an output function (`out`) can be passed to - `TestRunner.run`; this function will be called with strings that - should be displayed. It defaults to `sys.stdout.write`. If - capturing the output is not sufficient, then the display output - can be also customized by subclassing DocTestRunner, and - overriding the methods `report_start`, `report_success`, - `report_unexpected_exception`, and `report_failure`. - - >>> _colorize.COLORIZE = save_colorize - """ - DIVIDER: str optionflags: int original_optionflags: int @@ -405,233 +149,34 @@ class DocTestRunner: if sys.version_info >= (3, 13): skips: int test: DocTest - def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: - """ - Create a new test runner. - - Optional keyword arg `checker` is the `OutputChecker` that - should be used to compare the expected outputs and actual - outputs of doctest examples. - - Optional keyword arg 'verbose' prints lots of stuff if true, - only failures if false; by default, it's true iff '-v' is in - sys.argv. - - Optional argument `optionflags` can be used to control how the - test runner compares expected output to actual output, and how - it displays failures. See the documentation for `testmod` for - more information. - """ - - def report_start(self, out: _Out, test: DocTest, example: Example) -> None: - """ - Report that the test runner is about to process the given - example. (Only displays a message if verbose=True) - """ - - def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: - """ - Report that the given example ran successfully. (Only - displays a message if verbose=True) - """ - - def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: - """ - Report that the given example failed. - """ - - def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: - """ - Report that the given example raised an unexpected exception. - """ - + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... + def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... + def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... def run( self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True - ) -> TestResults: - """ - Run the examples in `test`, and display the results using the - writer function `out`. - - The examples are run in the namespace `test.globs`. If - `clear_globs` is true (the default), then this namespace will - be cleared after the test runs, to help with garbage - collection. If you would like to examine the namespace after - the test completes, then use `clear_globs=False`. - - `compileflags` gives the set of flags that should be used by - the Python compiler when running the examples. If not - specified, then it will default to the set of future-import - flags that apply to `globs`. - - The output of each example is checked using - `DocTestRunner.check_output`, and the results are formatted by - the `DocTestRunner.report_*` methods. - """ - - def summarize(self, verbose: bool | None = None) -> TestResults: - """ - Print a summary of all the test cases that have been run by - this DocTestRunner, and return a TestResults instance. - - The optional `verbose` argument controls how detailed the - summary is. If the verbosity is not specified, then the - DocTestRunner's verbosity is used. - """ - + ) -> TestResults: ... + def summarize(self, verbose: bool | None = None) -> TestResults: ... def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: - """ - A class used to check whether the actual output from a doctest - example matches the expected output. `OutputChecker` defines two - methods: `check_output`, which compares a given pair of outputs, - and returns true if they match; and `output_difference`, which - returns a string describing the differences between two outputs. - """ - - def check_output(self, want: str, got: str, optionflags: int) -> bool: - """ - Return True iff the actual output from an example (`got`) - matches the expected output (`want`). These strings are - always considered to match if they are identical; but - depending on what option flags the test runner is using, - several non-exact match types are also possible. See the - documentation for `TestRunner` for more information about - option flags. - """ - - def output_difference(self, example: Example, got: str, optionflags: int) -> str: - """ - Return a string describing the differences between the - expected output for a given example (`example`) and the actual - output (`got`). `optionflags` is the set of option flags used - to compare `want` and `got`. - """ + def check_output(self, want: str, got: str, optionflags: int) -> bool: ... + def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... class DocTestFailure(Exception): - """A DocTest example has failed in debugging mode. - - The exception instance has variables: - - - test: the DocTest object being run - - - example: the Example object that failed - - - got: the actual output - """ - test: DocTest example: Example got: str def __init__(self, test: DocTest, example: Example, got: str) -> None: ... class UnexpectedException(Exception): - """A DocTest example has encountered an unexpected exception - - The exception instance has variables: - - - test: the DocTest object being run - - - example: the Example object that failed - - - exc_info: the exception info - """ - test: DocTest example: Example exc_info: ExcInfo def __init__(self, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... -class DebugRunner(DocTestRunner): - """Run doc tests but raise an exception as soon as there is a failure. - - If an unexpected exception occurs, an UnexpectedException is raised. - It contains the test, the example, and the original exception: - - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException as f: - ... failure = f - - >>> failure.test is test - True - - >>> failure.example.want - '42\\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[1] # Already has the traceback - Traceback (most recent call last): - ... - KeyError - - We wrap the original exception to give the calling application - access to the test and example information. - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> try: - ... runner.run(test) - ... except DocTestFailure as f: - ... failure = f - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\\n' - - and the actual output: - - >>> failure.got - '1\\n' - - If a failure or error occurs, the globals are left intact: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - Traceback (most recent call last): - ... - doctest.UnexpectedException: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} - - But the globals are cleared if there is no error: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - TestResults(failed=0, attempted=1) - - >>> test.globs - {} - - """ +class DebugRunner(DocTestRunner): ... master: DocTestRunner | None @@ -645,71 +190,7 @@ def testmod( extraglobs: dict[str, Any] | None = None, raise_on_error: bool = False, exclude_empty: bool = False, -) -> TestResults: - """m=None, name=None, globs=None, verbose=None, report=True, - optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - - Test examples in docstrings in functions and classes reachable - from module m (or the current module if m is not supplied), starting - with m.__doc__. - - Also test examples reachable from dict m.__test__ if it exists. - m.__test__ maps names to functions, classes and strings; - function and class docstrings are tested even if the name is private; - strings are tested directly, as if they were docstrings. - - Return (#failures, #tests). - - See help(doctest) for an overview. - - Optional keyword arg "name" gives the name of the module; by default - use m.__name__. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use m.__dict__. A copy of this - dict is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. This is new in 2.4. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. This is new in 2.3. Possible values (see the - docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - SKIP - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - +) -> TestResults: ... def testfile( filename: str, module_relative: bool = True, @@ -723,84 +204,7 @@ def testfile( raise_on_error: bool = False, parser: DocTestParser = ..., encoding: str | None = None, -) -> TestResults: - """ - Test examples in the given file. Return (#failures, #tests). - - Optional keyword arg "module_relative" specifies how filenames - should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - - Optional keyword arg "name" gives the name of the test; by default - use the file's basename. - - Optional keyword argument "package" is a Python package or the - name of a Python package whose directory should be used as the - base directory for a module relative filename. If no package is - specified, then the calling module's directory is used as the base - directory for module relative filenames. It is an error to - specify "package" if "module_relative" is False. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use {}. A copy of this dict - is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - SKIP - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Optional keyword arg "parser" specifies a DocTestParser (or - subclass) that should be used to extract tests from the files. - - Optional keyword arg "encoding" specifies an encoding that should - be used to convert the file to unicode. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - +) -> TestResults: ... def run_docstring_examples( f: object, globs: dict[str, Any], @@ -808,50 +212,8 @@ def run_docstring_examples( name: str = "NoName", compileflags: int | None = None, optionflags: int = 0, -) -> None: - """ - Test examples in the given object's docstring (`f`), using `globs` - as globals. Optional argument `name` is used in failure messages. - If the optional argument `verbose` is true, then generate output - even if there are no failures. - - `compileflags` gives the set of flags that should be used by the - Python compiler when running the examples. If not specified, then - it will default to the set of future-import flags that apply to - `globs`. - - Optional keyword arg `optionflags` specifies options for the - testing and output. See the documentation for `testmod` for more - information. - """ - -def set_unittest_reportflags(flags: int) -> int: - """Sets the unittest option flags. - - The old flag is returned so that a runner could restore the old - value if it wished to: - - >>> import doctest - >>> old = doctest._unittest_reportflags - >>> doctest.set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True - - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - - Only reporting flags can be set: - - >>> doctest.set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) - - >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - """ +) -> None: ... +def set_unittest_reportflags(flags: int) -> int: ... class DocTestCase(unittest.TestCase): def __init__( @@ -879,41 +241,7 @@ def DocTestSuite( extraglobs: dict[str, Any] | None = None, test_finder: DocTestFinder | None = None, **options: Any, -) -> _DocTestSuite: - """ - Convert doctest tests for a module to a unittest test suite. - - This converts each documentation string in a module that - contains doctest tests to a unittest test case. If any of the - tests in a doc string fail, then the test case fails. An exception - is raised showing the name of the file containing the test and a - (sometimes approximate) line number. - - The `module` argument provides the module to be tested. The argument - can be either a module or a module name. - - If no argument is given, the calling module is used. - - A number of options may be provided as keyword arguments: - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - """ +) -> _DocTestSuite: ... class DocFileCase(DocTestCase): ... @@ -926,140 +254,9 @@ def DocFileTest( encoding: str | None = None, **options: Any, ) -> DocFileCase: ... -def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: - """A unittest suite for one or more doctest files. - - The path to each doctest file is given as a string; the - interpretation of that string depends on the keyword argument - "module_relative". - - A number of options may be provided as keyword arguments: - - module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - - package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - - parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - - encoding - An encoding that will be used to convert the files to unicode. - """ - -def script_from_examples(s: str) -> str: - """Extract script from text with examples. - - Converts text with examples to a Python script. Example input is - converted to regular code. Example output and all other words - are converted to comments: - - >>> text = ''' - ... Here are examples of simple math. - ... - ... Python has super accurate integer addition - ... - ... >>> 2 + 2 - ... 5 - ... - ... And very friendly error messages: - ... - ... >>> 1/0 - ... To Infinity - ... And - ... Beyond - ... - ... You can use logic if you want: - ... - ... >>> if 0: - ... ... blah - ... ... blah - ... ... - ... - ... Ho hum - ... ''' - - >>> print(script_from_examples(text)) - # Here are examples of simple math. - # - # Python has super accurate integer addition - # - 2 + 2 - # Expected: - ## 5 - # - # And very friendly error messages: - # - 1/0 - # Expected: - ## To Infinity - ## And - ## Beyond - # - # You can use logic if you want: - # - if 0: - blah - blah - # - # Ho hum - - """ - -def testsource(module: None | str | types.ModuleType, name: str) -> str: - """Extract the test sources from a doctest docstring as a script. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the doc string with tests to be debugged. - """ - -def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: - """Debug a single doctest docstring, in argument `src`""" - -def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: - """Debug a test script. `src` is the script, as a string.""" - -def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: - """Debug a single doctest docstring. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the docstring with tests to be debugged. - """ +def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... +def script_from_examples(s: str) -> str: ... +def testsource(module: None | str | types.ModuleType, name: str) -> str: ... +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi index 11a143b8d0ed0..53f8c350b01e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi @@ -1,5 +1,3 @@ -"""A package for parsing, handling, and generating email messages.""" - from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -35,23 +33,13 @@ _ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 _ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 @overload -def message_from_string(s: str) -> Message: - """Parse a string into a Message object model. - - Optional _class and strict are passed to the Parser constructor. - """ - +def message_from_string(s: str) -> Message: ... @overload def message_from_string(s: str, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload def message_from_string(s: str, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... @overload -def message_from_bytes(s: bytes | bytearray) -> Message: - """Parse a bytes string into a Message object model. - - Optional _class and strict are passed to the Parser constructor. - """ - +def message_from_bytes(s: bytes | bytearray) -> Message: ... @overload def message_from_bytes(s: bytes | bytearray, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload @@ -59,23 +47,13 @@ def message_from_bytes( s: bytes | bytearray, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT] ) -> _MessageT: ... @overload -def message_from_file(fp: IO[str]) -> Message: - """Read a file and parse its contents into a Message object model. - - Optional _class and strict are passed to the Parser constructor. - """ - +def message_from_file(fp: IO[str]) -> Message: ... @overload def message_from_file(fp: IO[str], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload def message_from_file(fp: IO[str], _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... @overload -def message_from_binary_file(fp: IO[bytes]) -> Message: - """Read a binary file and parse its contents into a Message object model. - - Optional _class and strict are passed to the Parser constructor. - """ - +def message_from_binary_file(fp: IO[bytes]) -> Message: ... @overload def message_from_binary_file(fp: IO[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi index 1c73357b5388f..dededd006e5b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi @@ -1,72 +1,3 @@ -"""Header value parser implementing various email-related RFC parsing rules. - -The parsing methods defined in this module implement various email related -parsing rules. Principal among them is RFC 5322, which is the followon -to RFC 2822 and primarily a clarification of the former. It also implements -RFC 2047 encoded word decoding. - -RFC 5322 goes to considerable trouble to maintain backward compatibility with -RFC 822 in the parse phase, while cleaning up the structure on the generation -phase. This parser supports correct RFC 5322 generation by tagging white space -as folding white space only when folding is allowed in the non-obsolete rule -sets. Actually, the parser is even more generous when accepting input than RFC -5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages. -Where possible deviations from the standard are annotated on the 'defects' -attribute of tokens that deviate. - -The general structure of the parser follows RFC 5322, and uses its terminology -where there is a direct correspondence. Where the implementation requires a -somewhat different structure than that used by the formal grammar, new terms -that mimic the closest existing terms are used. Thus, it really helps to have -a copy of RFC 5322 handy when studying this code. - -Input to the parser is a string that has already been unfolded according to -RFC 5322 rules. According to the RFC this unfolding is the very first step, and -this parser leaves the unfolding step to a higher level message parser, which -will have already detected the line breaks that need unfolding while -determining the beginning and end of each header. - -The output of the parser is a TokenList object, which is a list subclass. A -TokenList is a recursive data structure. The terminal nodes of the structure -are Terminal objects, which are subclasses of str. These do not correspond -directly to terminal objects in the formal grammar, but are instead more -practical higher level combinations of true terminals. - -All TokenList and Terminal objects have a 'value' attribute, which produces the -semantically meaningful value of that part of the parse subtree. The value of -all whitespace tokens (no matter how many sub-tokens they may contain) is a -single space, as per the RFC rules. This includes 'CFWS', which is herein -included in the general class of whitespace tokens. There is one exception to -the rule that whitespace tokens are collapsed into single spaces in values: in -the value of a 'bare-quoted-string' (a quoted-string with no leading or -trailing whitespace), any whitespace that appeared between the quotation marks -is preserved in the returned value. Note that in all Terminal strings quoted -pairs are turned into their unquoted values. - -All TokenList and Terminal objects also have a string value, which attempts to -be a "canonical" representation of the RFC-compliant form of the substring that -produced the parsed subtree, including minimal use of quoted pair quoting. -Whitespace runs are not collapsed. - -Comment tokens also have a 'content' attribute providing the string found -between the parens (including any nested comments) with whitespace preserved. - -All TokenList and Terminal objects have a 'defects' attribute which is a -possibly empty list all of the defects found while creating the token. Defects -may appear on any token in the tree, and a composite list of all defects in the -subtree is available through the 'all_defects' attribute of any node. (For -Terminal notes x.defects == x.all_defects.) - -Each object in a parse tree is called a 'token', and each has a 'token_type' -attribute that gives the name from the RFC 5322 grammar that it represents. -Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that -may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters. -It is returned in place of lists of (ctext/quoted-pair) and -(qtext/quoted-pair). - -XXX: provide complete list of token types. -""" - from collections.abc import Iterable, Iterator from email.errors import HeaderParseError, MessageDefect from email.policy import Policy @@ -91,9 +22,7 @@ NLSET: Final[set[str]] SPECIALSNL: Final[set[str]] # Added in Python 3.9.23, 3.10.17, 3.11.12, 3.12.9, 3.13.2 -def make_quoted_pairs(value: Any) -> str: - """Escape dquote and backslash for use within a quoted-string.""" - +def make_quoted_pairs(value: Any) -> str: ... def quote_string(value: Any) -> str: ... rfc2047_matcher: Final[Pattern[str]] @@ -110,9 +39,7 @@ class TokenList(list[TokenList | Terminal]): def all_defects(self) -> list[MessageDefect]: ... def startswith_fws(self) -> bool: ... @property - def as_ew_allowed(self) -> bool: - """True if all top level tokens of this part may be RFC2047 encoded.""" - + def as_ew_allowed(self) -> bool: ... @property def comments(self) -> list[str]: ... def fold(self, *, policy: Policy) -> str: ... @@ -413,388 +340,59 @@ class ValueTerminal(Terminal): def startswith_fws(self) -> bool: ... class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... - -class _InvalidEwError(HeaderParseError): - """Invalid encoded word found while parsing headers.""" +class _InvalidEwError(HeaderParseError): ... DOT: Final[ValueTerminal] ListSeparator: Final[ValueTerminal] RouteComponentMarker: Final[ValueTerminal] -def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: - """FWS = 1*WSP - - This isn't the RFC definition. We're using fws to represent tokens where - folding can be done, but when we are parsing the *un*folding has already - been done so we don't need to watch out for CRLF. - - """ - -def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: - """encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" """ - -def get_unstructured(value: str) -> UnstructuredTokenList: - """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - - obs-NO-WS-CTL is control characters except WSP/CR/LF. - - So, basically, we have printable runs, plus control characters or nulls in - the obsolete syntax, separated by whitespace. Since RFC 2047 uses the - obsolete syntax in its specification, but requires whitespace on either - side of the encoded words, I can see no reason to need to separate the - non-printable-non-whitespace from the printable runs if they occur, so we - parse this into xtext tokens separated by WSP tokens. - - Because an 'unstructured' value must by definition constitute the entire - value, this 'get' routine does not return a remaining value, only the - parsed TokenList. - - """ - -def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: - """ctext = - - This is not the RFC ctext, since we are handling nested comments in comment - and unquoting quoted-pairs here. We allow anything except the '()' - characters, but if we find any ASCII other than the RFC defined printable - ASCII, a NonPrintableDefect is added to the token's defects list. Since - quoted pairs are converted to their unquoted values, what is returned is - a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value - is ' '. - - """ - -def get_qcontent(value: str) -> tuple[ValueTerminal, str]: - """qcontent = qtext / quoted-pair - - We allow anything except the DQUOTE character, but if we find any ASCII - other than the RFC defined printable ASCII, a NonPrintableDefect is - added to the token's defects list. Any quoted pairs are converted to their - unquoted values, so what is returned is a 'ptext' token. In this case it - is a ValueTerminal. - - """ - -def get_atext(value: str) -> tuple[ValueTerminal, str]: - """atext = - - We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to - the token's defects list if we find non-atext characters. - """ - -def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: - """bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE - - A quoted-string without the leading or trailing white space. Its - value is the text between the quote marks, with whitespace - preserved and quoted pairs decoded. - """ - -def get_comment(value: str) -> tuple[Comment, str]: - """comment = "(" *([FWS] ccontent) [FWS] ")" - ccontent = ctext / quoted-pair / comment - - We handle nested comments here, and quoted-pair in our qp-ctext routine. - """ - -def get_cfws(value: str) -> tuple[CFWSList, str]: - """CFWS = (1*([FWS] comment) [FWS]) / FWS""" - -def get_quoted_string(value: str) -> tuple[QuotedString, str]: - """quoted-string = [CFWS] [CFWS] - - 'bare-quoted-string' is an intermediate class defined by this - parser and not by the RFC grammar. It is the quoted string - without any attached CFWS. - """ - -def get_atom(value: str) -> tuple[Atom, str]: - """atom = [CFWS] 1*atext [CFWS] - - An atom could be an rfc2047 encoded word. - """ - -def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: - """dot-text = 1*atext *("." 1*atext)""" - -def get_dot_atom(value: str) -> tuple[DotAtom, str]: - """dot-atom = [CFWS] dot-atom-text [CFWS] - - Any place we can have a dot atom, we could instead have an rfc2047 encoded - word. - """ - -def get_word(value: str) -> tuple[Any, str]: - """word = atom / quoted-string - - Either atom or quoted-string may start with CFWS. We have to peel off this - CFWS first to determine which type of word to parse. Afterward we splice - the leading CFWS, if any, into the parsed sub-token. - - If neither an atom or a quoted-string is found before the next special, a - HeaderParseError is raised. - - The token returned is either an Atom or a QuotedString, as appropriate. - This means the 'word' level of the formal grammar is not represented in the - parse tree; this is because having that extra layer when manipulating the - parse tree is more confusing than it is helpful. - - """ - -def get_phrase(value: str) -> tuple[Phrase, str]: - """phrase = 1*word / obs-phrase - obs-phrase = word *(word / "." / CFWS) - - This means a phrase can be a sequence of words, periods, and CFWS in any - order as long as it starts with at least one word. If anything other than - words is detected, an ObsoleteHeaderDefect is added to the token's defect - list. We also accept a phrase that starts with CFWS followed by a dot; - this is registered as an InvalidHeaderDefect, since it is not supported by - even the obsolete grammar. - - """ - -def get_local_part(value: str) -> tuple[LocalPart, str]: - """local-part = dot-atom / quoted-string / obs-local-part""" - -def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: - """obs-local-part = word *("." word)""" - -def get_dtext(value: str) -> tuple[ValueTerminal, str]: - """dtext = / obs-dtext - obs-dtext = obs-NO-WS-CTL / quoted-pair - - We allow anything except the excluded characters, but if we find any - ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is - added to the token's defects list. Quoted pairs are converted to their - unquoted values, so what is returned is a ptext token, in this case a - ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is - added to the returned token's defect list. - - """ - -def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: - """domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]""" - -def get_domain(value: str) -> tuple[Domain, str]: - """domain = dot-atom / domain-literal / obs-domain - obs-domain = atom *("." atom)) - - """ - -def get_addr_spec(value: str) -> tuple[AddrSpec, str]: - """addr-spec = local-part "@" domain""" - -def get_obs_route(value: str) -> tuple[ObsRoute, str]: - """obs-route = obs-domain-list ":" - obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) - - Returns an obs-route token with the appropriate sub-tokens (that is, - there is no obs-domain-list in the parse tree). - """ - -def get_angle_addr(value: str) -> tuple[AngleAddr, str]: - """angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr - obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS] - - """ - -def get_display_name(value: str) -> tuple[DisplayName, str]: - """display-name = phrase - - Because this is simply a name-rule, we don't return a display-name - token containing a phrase, but rather a display-name token with - the content of the phrase. - - """ - -def get_name_addr(value: str) -> tuple[NameAddr, str]: - """name-addr = [display-name] angle-addr""" - -def get_mailbox(value: str) -> tuple[Mailbox, str]: - """mailbox = name-addr / addr-spec""" - -def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: - """Read everything up to one of the chars in endchars. - - This is outside the formal grammar. The InvalidMailbox TokenList that is - returned acts like a Mailbox, but the data attributes are None. - - """ - -def get_mailbox_list(value: str) -> tuple[MailboxList, str]: - """mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list - obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS]) - - For this routine we go outside the formal grammar in order to improve error - handling. We recognize the end of the mailbox list only at the end of the - value or at a ';' (the group terminator). This is so that we can turn - invalid mailboxes into InvalidMailbox tokens and continue parsing any - remaining valid mailboxes. We also allow all mailbox entries to be null, - and this condition is handled appropriately at a higher level. - - """ - -def get_group_list(value: str) -> tuple[GroupList, str]: - """group-list = mailbox-list / CFWS / obs-group-list - obs-group-list = 1*([CFWS] ",") [CFWS] - - """ - -def get_group(value: str) -> tuple[Group, str]: - """group = display-name ":" [group-list] ";" [CFWS]""" - -def get_address(value: str) -> tuple[Address, str]: - """address = mailbox / group - - Note that counter-intuitively, an address can be either a single address or - a list of addresses (a group). This is why the returned Address object has - a 'mailboxes' attribute which treats a single address as a list of length - one. When you need to differentiate between to two cases, extract the single - element, which is either a mailbox or a group token. - - """ - -def get_address_list(value: str) -> tuple[AddressList, str]: - """address_list = (address *("," address)) / obs-addr-list - obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) - - We depart from the formal grammar here by continuing to parse until the end - of the input, assuming the input to be entirely composed of an - address-list. This is always true in email parsing, and allows us - to skip invalid addresses to parse additional valid ones. - - """ - -def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: - """no-fold-literal = "[" *dtext "]" """ - -def get_msg_id(value: str) -> tuple[MsgID, str]: - """msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS] - id-left = dot-atom-text / obs-id-left - id-right = dot-atom-text / no-fold-literal / obs-id-right - no-fold-literal = "[" *dtext "]" - """ - -def parse_message_id(value: str) -> MessageID: - """message-id = "Message-ID:" msg-id CRLF""" - -def parse_mime_version(value: str) -> MIMEVersion: - """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]""" - -def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: - """Read everything up to the next ';'. - - This is outside the formal grammar. The InvalidParameter TokenList that is - returned acts like a Parameter, but the data attributes are None. - - """ - -def get_ttext(value: str) -> tuple[ValueTerminal, str]: - """ttext = - - We allow any non-TOKEN_ENDS in ttext, but add defects to the token's - defects list if we find non-ttext characters. We also register defects for - *any* non-printables even though the RFC doesn't exclude all of them, - because we follow the spirit of RFC 5322. - - """ - -def get_token(value: str) -> tuple[Token, str]: - """token = [CFWS] 1*ttext [CFWS] - - The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or - tspecials. We also exclude tabs even though the RFC doesn't. - - The RFC implies the CFWS but is not explicit about it in the BNF. - - """ - -def get_attrtext(value: str) -> tuple[ValueTerminal, str]: - """attrtext = 1*(any non-ATTRIBUTE_ENDS character) - - We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the - token's defects list if we find non-attrtext characters. We also register - defects for *any* non-printables even though the RFC doesn't exclude all of - them, because we follow the spirit of RFC 5322. - - """ - -def get_attribute(value: str) -> tuple[Attribute, str]: - """[CFWS] 1*attrtext [CFWS] - - This version of the BNF makes the CFWS explicit, and as usual we use a - value terminal for the actual run of characters. The RFC equivalent of - attrtext is the token characters, with the subtraction of '*', "'", and '%'. - We include tab in the excluded set just as we do for token. - - """ - -def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: - """attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%') - - This is a special parsing routine so that we get a value that - includes % escapes as a single string (which we decode as a single - string later). - - """ - -def get_extended_attribute(value: str) -> tuple[Attribute, str]: - """[CFWS] 1*extended_attrtext [CFWS] - - This is like the non-extended version except we allow % characters, so that - we can pick up an encoded value as a single string. - - """ - -def get_section(value: str) -> tuple[Section, str]: - """'*' digits - - The formal BNF is more complicated because leading 0s are not allowed. We - check for that and add a defect. We also assume no CFWS is allowed between - the '*' and the digits, though the RFC is not crystal clear on that. - The caller should already have dealt with leading CFWS. - - """ - -def get_value(value: str) -> tuple[Value, str]: - """quoted-string / attribute""" - -def get_parameter(value: str) -> tuple[Parameter, str]: - """attribute [section] ["*"] [CFWS] "=" value - - The CFWS is implied by the RFC but not made explicit in the BNF. This - simplified form of the BNF from the RFC is made to conform with the RFC BNF - through some extra checks. We do it this way because it makes both error - recovery and working with the resulting parse tree easier. - """ - -def parse_mime_parameters(value: str) -> MimeParameters: - """parameter *( ";" parameter ) - - That BNF is meant to indicate this routine should only be called after - finding and handling the leading ';'. There is no corresponding rule in - the formal RFC grammar, but it is more convenient for us for the set of - parameters to be treated as its own TokenList. - - This is 'parse' routine because it consumes the remaining value, but it - would never be called to parse a full header. Instead it is called to - parse everything after the non-parameter value of a specific MIME header. - - """ - -def parse_content_type_header(value: str) -> ContentType: - """maintype "/" subtype *( ";" parameter ) - - The maintype and substype are tokens. Theoretically they could - be checked against the official IANA list + x-token, but we - don't do that. - """ - -def parse_content_disposition_header(value: str) -> ContentDisposition: - """disposition-type *( ";" parameter )""" - -def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: - """mechanism""" +def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: ... +def get_unstructured(value: str) -> UnstructuredTokenList: ... +def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... +def get_atext(value: str) -> tuple[ValueTerminal, str]: ... +def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: ... +def get_comment(value: str) -> tuple[Comment, str]: ... +def get_cfws(value: str) -> tuple[CFWSList, str]: ... +def get_quoted_string(value: str) -> tuple[QuotedString, str]: ... +def get_atom(value: str) -> tuple[Atom, str]: ... +def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: ... +def get_dot_atom(value: str) -> tuple[DotAtom, str]: ... +def get_word(value: str) -> tuple[Any, str]: ... +def get_phrase(value: str) -> tuple[Phrase, str]: ... +def get_local_part(value: str) -> tuple[LocalPart, str]: ... +def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: ... +def get_dtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: ... +def get_domain(value: str) -> tuple[Domain, str]: ... +def get_addr_spec(value: str) -> tuple[AddrSpec, str]: ... +def get_obs_route(value: str) -> tuple[ObsRoute, str]: ... +def get_angle_addr(value: str) -> tuple[AngleAddr, str]: ... +def get_display_name(value: str) -> tuple[DisplayName, str]: ... +def get_name_addr(value: str) -> tuple[NameAddr, str]: ... +def get_mailbox(value: str) -> tuple[Mailbox, str]: ... +def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: ... +def get_mailbox_list(value: str) -> tuple[MailboxList, str]: ... +def get_group_list(value: str) -> tuple[GroupList, str]: ... +def get_group(value: str) -> tuple[Group, str]: ... +def get_address(value: str) -> tuple[Address, str]: ... +def get_address_list(value: str) -> tuple[AddressList, str]: ... +def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: ... +def get_msg_id(value: str) -> tuple[MsgID, str]: ... +def parse_message_id(value: str) -> MessageID: ... +def parse_mime_version(value: str) -> MIMEVersion: ... +def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: ... +def get_ttext(value: str) -> tuple[ValueTerminal, str]: ... +def get_token(value: str) -> tuple[Token, str]: ... +def get_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_attribute(value: str) -> tuple[Attribute, str]: ... +def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_extended_attribute(value: str) -> tuple[Attribute, str]: ... +def get_section(value: str) -> tuple[Section, str]: ... +def get_value(value: str) -> tuple[Value, str]: ... +def get_parameter(value: str) -> tuple[Parameter, str]: ... +def parse_mime_parameters(value: str) -> MimeParameters: ... +def parse_content_type_header(value: str) -> ContentType: ... +def parse_content_disposition_header(value: str) -> ContentDisposition: ... +def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi index de41124c98bdc..0fb890d424b10 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi @@ -1,8 +1,3 @@ -"""Policy framework for the email package. - -Allows fine grained feature control of how the package parses and emits data. -""" - from abc import ABCMeta, abstractmethod from email.errors import MessageDefect from email.header import Header @@ -23,27 +18,6 @@ class _MessageFactory(Protocol[_MessageT]): # assume that the __init__ arguments and attributes of _PolicyBase are # the same as those of Policy. class _PolicyBase(Generic[_MessageT_co]): - """Policy Object basic framework. - - This class is useless unless subclassed. A subclass should define - class attributes with defaults for any values that are to be - managed by the Policy object. The constructor will then allow - non-default values to be set for these attributes at instance - creation time. The instance will be callable, taking these same - attributes keyword arguments, and returning a new instance - identical to the called instance except for those values changed - by the keyword arguments. Instances may be added, yielding new - instances with any non-default values from the right hand - operand overriding those in the left hand operand. That is, - - A + B == A() - - The repr of an instance can be used to reconstruct the object - if and only if the repr of the values can be used to reconstruct - those values. - - """ - max_line_length: int | None linesep: str cte_type: str @@ -64,13 +38,7 @@ class _PolicyBase(Generic[_MessageT_co]): message_factory: _MessageFactory[_MessageT_co] | None = None, # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = True, - ) -> None: - """Create new Policy, possibly overriding some defaults. - - See class docstring for a list of overridable attributes. - - """ - + ) -> None: ... def clone( self, *, @@ -82,285 +50,31 @@ class _PolicyBase(Generic[_MessageT_co]): message_factory: _MessageFactory[_MessageT_co] | None = ..., # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., - ) -> Self: - """Return a new instance with specified attributes changed. - - The new instance has the same attribute values as the current object, - except for the changes passed in as keyword arguments. - - """ - - def __add__(self, other: Policy) -> Self: - """Non-default values from right operand override those from left. - - The object returned is a new instance of the subclass. - - """ + ) -> Self: ... + def __add__(self, other: Policy) -> Self: ... class Policy(_PolicyBase[_MessageT_co], metaclass=ABCMeta): - """Controls for how messages are interpreted and formatted. - - Most of the classes and many of the methods in the email package accept - Policy objects as parameters. A Policy object contains a set of values and - functions that control how input is interpreted and how output is rendered. - For example, the parameter 'raise_on_defect' controls whether or not an RFC - violation results in an error being raised or not, while 'max_line_length' - controls the maximum length of output lines when a Message is serialized. - - Any valid attribute may be overridden when a Policy is created by passing - it as a keyword argument to the constructor. Policy objects are immutable, - but a new Policy object can be created with only certain values changed by - calling the Policy instance with keyword arguments. Policy objects can - also be added, producing a new Policy object in which the non-default - attributes set in the right hand operand overwrite those specified in the - left operand. - - Settable attributes: - - raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - - linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - - cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - - max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - - mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - - message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - - verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. - """ - # Every Message object has a `defects` attribute, so the following # methods will work for any Message object. - def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: - """Based on policy, either raise defect or call register_defect. - - handle_defect(obj, defect) - - defect should be a Defect subclass, but in any case must be an - Exception subclass. obj is the object on which the defect should be - registered if it is not raised. If the raise_on_defect is True, the - defect is raised as an error, otherwise the object and the defect are - passed to register_defect. - - This method is intended to be called by parsers that discover defects. - The email package parsers always call it with Defect instances. - - """ - - def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: - """Record 'defect' on 'obj'. - - Called by handle_defect if raise_on_defect is False. This method is - part of the Policy API so that Policy subclasses can implement custom - defect handling. The default implementation calls the append method of - the defects attribute of obj. The objects used by the email package by - default that get passed to this method will always have a defects - attribute with an append method. - - """ - - def header_max_count(self, name: str) -> int | None: - """Return the maximum allowed number of headers named 'name'. - - Called when a header is added to a Message object. If the returned - value is not 0 or None, and there are already a number of headers with - the name 'name' equal to the value returned, a ValueError is raised. - - Because the default behavior of Message's __setitem__ is to append the - value to the list of headers, it is easy to create duplicate headers - without realizing it. This method allows certain headers to be limited - in the number of instances of that header that may be added to a - Message programmatically. (The limit is not observed by the parser, - which will faithfully produce as many headers as exist in the message - being parsed.) - - The default implementation returns None for all header names. - """ - + def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... + def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... + def header_max_count(self, name: str) -> int | None: ... @abstractmethod - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: - """Given a list of linesep terminated strings constituting the lines of - a single header, return the (name, value) tuple that should be stored - in the model. The input lines should retain their terminating linesep - characters. The lines passed in by the email package may contain - surrogateescaped binary data. - """ - + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... @abstractmethod - def header_store_parse(self, name: str, value: str) -> tuple[str, str]: - """Given the header name and the value provided by the application - program, return the (name, value) that should be stored in the model. - """ - + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... @abstractmethod - def header_fetch_parse(self, name: str, value: str) -> str: - """Given the header name and the value from the model, return the value - to be returned to the application program that is requesting that - header. The value passed in by the email package may contain - surrogateescaped binary data if the lines were parsed by a BytesParser. - The returned value should not contain any surrogateescaped data. - - """ - + def header_fetch_parse(self, name: str, value: str) -> str: ... @abstractmethod - def fold(self, name: str, value: str) -> str: - """Given the header name and the value from the model, return a string - containing linesep characters that implement the folding of the header - according to the policy controls. The value passed in by the email - package may contain surrogateescaped binary data if the lines were - parsed by a BytesParser. The returned value should not contain any - surrogateescaped data. - - """ - + def fold(self, name: str, value: str) -> str: ... @abstractmethod - def fold_binary(self, name: str, value: str) -> bytes: - """Given the header name and the value from the model, return binary - data containing linesep characters that implement the folding of the - header according to the policy controls. The value passed in by the - email package may contain surrogateescaped binary data. - - """ + def fold_binary(self, name: str, value: str) -> bytes: ... class Compat32(Policy[_MessageT_co]): - """Controls for how messages are interpreted and formatted. - - Most of the classes and many of the methods in the email package accept - Policy objects as parameters. A Policy object contains a set of values and - functions that control how input is interpreted and how output is rendered. - For example, the parameter 'raise_on_defect' controls whether or not an RFC - violation results in an error being raised or not, while 'max_line_length' - controls the maximum length of output lines when a Message is serialized. - - Any valid attribute may be overridden when a Policy is created by passing - it as a keyword argument to the constructor. Policy objects are immutable, - but a new Policy object can be created with only certain values changed by - calling the Policy instance with keyword arguments. Policy objects can - also be added, producing a new Policy object in which the non-default - attributes set in the right hand operand overwrite those specified in the - left operand. - - Settable attributes: - - raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - - linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - - cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - - max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - - mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - - message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - - verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. - This particular policy is the backward compatibility Policy. It - replicates the behavior of the email package version 5.1. - """ - - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: - """Given a list of linesep terminated strings constituting the lines of - a single header, return the (name, value) tuple that should be stored - in the model. The input lines should retain their terminating linesep - characters. The lines passed in by the email package may contain - surrogateescaped binary data. - The name is parsed as everything up to the ':' and returned unmodified. - The value is determined by stripping leading whitespace off the - remainder of the first line joined with all subsequent lines, and - stripping any trailing carriage return or linefeed characters. - - """ - - def header_store_parse(self, name: str, value: str) -> tuple[str, str]: - """Given the header name and the value provided by the application - program, return the (name, value) that should be stored in the model. - The name and value are returned unmodified. - """ - - def header_fetch_parse(self, name: str, value: str) -> str | Header: # type: ignore[override] - """Given the header name and the value from the model, return the value - to be returned to the application program that is requesting that - header. The value passed in by the email package may contain - surrogateescaped binary data if the lines were parsed by a BytesParser. - The returned value should not contain any surrogateescaped data. - - If the value contains binary data, it is converted into a Header object - using the unknown-8bit charset. Otherwise it is returned unmodified. - """ - - def fold(self, name: str, value: str) -> str: - """Given the header name and the value from the model, return a string - containing linesep characters that implement the folding of the header - according to the policy controls. The value passed in by the email - package may contain surrogateescaped binary data if the lines were - parsed by a BytesParser. The returned value should not contain any - surrogateescaped data. - - Headers are folded using the Header folding algorithm, which preserves - existing line breaks in the value, and wraps each resulting line to the - max_line_length. Non-ASCII binary data are CTE encoded using the - unknown-8bit charset. - - """ - - def fold_binary(self, name: str, value: str) -> bytes: - """Given the header name and the value from the model, return binary - data containing linesep characters that implement the folding of the - header according to the policy controls. The value passed in by the - email package may contain surrogateescaped binary data. - - Headers are folded using the Header folding algorithm, which preserves - existing line breaks in the value, and wraps each resulting line to the - max_line_length. If cte_type is 7bit, non-ascii binary data is CTE - encoded using the unknown-8bit charset. Otherwise the original source - header is used, with its existing line breaks and/or binary data. - - """ + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] + def fold(self, name: str, value: str) -> str: ... + def fold_binary(self, name: str, value: str) -> bytes: ... compat32: Compat32[Message[str, str]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi index 5c606ff488193..563cd7f669a22 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi @@ -1,58 +1,13 @@ -"""Base64 content transfer encoding per RFCs 2045-2047. - -This module handles the content transfer encoding method defined in RFC 2045 -to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit -characters encoding known as Base64. - -It is used in the MIME standards for email to attach images, audio, and text -using some 8-bit character sets to messages. - -This module provides an interface to encode and decode both headers and bodies -with Base64 encoding. - -RFC 2045 defines a method for including character set information in an -'encoded-word' in a header. This method is commonly used for 8-bit real names -in To:, From:, Cc:, etc. fields, as well as Subject: lines. - -This module does not do the line wrapping or end-of-line character conversion -necessary for proper internationalized headers; it only does dumb encoding and -decoding. To deal with the various line wrapping issues, use the email.header -module. -""" - __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] from _typeshed import ReadableBuffer -def header_length(bytearray: str | bytes | bytearray) -> int: - """Return the length of s when it is encoded with base64.""" - -def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: - """Encode a single header line with Base64 encoding in a given charset. - - charset names the character set to use to encode the header. It defaults - to iso-8859-1. Base64 encoding is defined in RFC 2045. - """ +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... # First argument should be a buffer that supports slicing and len(). -def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: - """Encode a string with base64. - - Each line will be wrapped at, at most, maxlinelen characters (defaults to - 76 characters). - - Each line of encoded text will end with eol, which defaults to "\\n". Set - this to "\\r\\n" if you will be using the result of this function directly - in an email. - """ - -def decode(string: str | ReadableBuffer) -> bytes: - """Decode a raw base64 string, returning a bytes object. - - This function does not parse a full MIME header value encoded with - base64 (like =?iso-8859-1?b?bmloISBuaWgh?=) -- please use the high - level email.header class for that functionality. - """ +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi index 3688abc11668d..e1930835bbd11 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi @@ -16,50 +16,6 @@ ALIASES: Final[dict[str, str]] CODEC_MAP: Final[dict[str, str | None]] # undocumented class Charset: - """Map character sets to their email properties. - - This class provides information about the requirements imposed on email - for a specific character set. It also provides convenience routines for - converting between character sets, given the availability of the - applicable codecs. Given a character set, it will do its best to provide - information on how to use that character set in an email in an - RFC-compliant way. - - Certain character sets must be encoded with quoted-printable or base64 - when used in email headers or bodies. Certain character sets must be - converted outright, and are not allowed in email. Instances of this - module expose the following information about a character set: - - input_charset: The initial character set specified. Common aliases - are converted to their 'official' email names (e.g. latin_1 - is converted to iso-8859-1). Defaults to 7-bit us-ascii. - - header_encoding: If the character set must be encoded before it can be - used in an email header, this attribute will be set to - charset.QP (for quoted-printable), charset.BASE64 (for - base64 encoding), or charset.SHORTEST for the shortest of - QP or BASE64 encoding. Otherwise, it will be None. - - body_encoding: Same as header_encoding, but describes the encoding for the - mail message's body, which indeed may be different than the - header encoding. charset.SHORTEST is not allowed for - body_encoding. - - output_charset: Some character sets must be converted before they can be - used in email headers or bodies. If the input_charset is - one of them, this attribute will contain the name of the - charset output will be converted to. Otherwise, it will - be None. - - input_codec: The name of the Python codec used to convert the - input_charset to Unicode. If no conversion codec is - necessary, this attribute will be None. - - output_codec: The name of the Python codec used to convert Unicode - to the output_charset. If no conversion codec is necessary, - this attribute will have the same value as the input_codec. - """ - input_charset: str header_encoding: int body_encoding: int @@ -67,68 +23,12 @@ class Charset: input_codec: str | None output_codec: str | None def __init__(self, input_charset: str = "us-ascii") -> None: ... - def get_body_encoding(self) -> str | Callable[[Message], None]: - """Return the content-transfer-encoding used for body encoding. - - This is either the string 'quoted-printable' or 'base64' depending on - the encoding used, or it is a function in which case you should call - the function with a single argument, the Message object being - encoded. The function should then set the Content-Transfer-Encoding - header itself to whatever is appropriate. - - Returns "quoted-printable" if self.body_encoding is QP. - Returns "base64" if self.body_encoding is BASE64. - Returns conversion function otherwise. - """ - - def get_output_charset(self) -> str | None: - """Return the output character set. - - This is self.output_charset if that is not None, otherwise it is - self.input_charset. - """ - - def header_encode(self, string: str) -> str: - """Header-encode a string by converting it first to bytes. - - The type of encoding (base64 or quoted-printable) will be based on - this charset's `header_encoding`. - - :param string: A unicode string for the header. It must be possible - to encode this string to bytes using the character set's - output codec. - :return: The encoded string, with RFC 2047 chrome. - """ - - def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: - """Header-encode a string by converting it first to bytes. - - This is similar to `header_encode()` except that the string is fit - into maximum line lengths as given by the argument. - - :param string: A unicode string for the header. It must be possible - to encode this string to bytes using the character set's - output codec. - :param maxlengths: Maximum line length iterator. Each element - returned from this iterator will provide the next maximum line - length. This parameter is used as an argument to built-in next() - and should never be exhausted. The maximum line lengths should - not count the RFC 2047 chrome. These line lengths are only a - hint; the splitter does the best it can. - :return: Lines of encoded strings, each with RFC 2047 chrome. - """ - + def get_body_encoding(self) -> str | Callable[[Message], None]: ... + def get_output_charset(self) -> str | None: ... + def header_encode(self, string: str) -> str: ... + def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: ... @overload - def body_encode(self, string: None) -> None: - """Body-encode a string by converting it first to bytes. - - The type of encoding (base64 or quoted-printable) will be based on - self.body_encoding. If body_encoding is None, we assume the - output charset is a 7bit encoding, so re-encoding the decoded - string using the ascii codec produces the correct string version - of the content. - """ - + def body_encode(self, string: None) -> None: ... @overload def body_encode(self, string: str | bytes) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -137,41 +37,6 @@ class Charset: def add_charset( charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None -) -> None: - """Add character set properties to the global registry. - - charset is the input character set, and must be the canonical name of a - character set. - - Optional header_enc and body_enc is either charset.QP for - quoted-printable, charset.BASE64 for base64 encoding, charset.SHORTEST for - the shortest of qp or base64 encoding, or None for no encoding. SHORTEST - is only valid for header_enc. It describes how message headers and - message bodies in the input charset are to be encoded. Default is no - encoding. - - Optional output_charset is the character set that the output should be - in. Conversions will proceed from input charset, to Unicode, to the - output charset when the method Charset.convert() is called. The default - is to output in the same character set as the input. - - Both input_charset and output_charset must have Unicode codec entries in - the module's charset-to-codec mapping; use add_codec(charset, codecname) - to add codecs the module does not know about. See the codecs module's - documentation for more information. - """ - -def add_alias(alias: str, canonical: str) -> None: - """Add a character set alias. - - alias is the alias name, e.g. latin-1 - canonical is the character set's canonical name, e.g. iso-8859-1 - """ - -def add_codec(charset: str, codecname: str) -> None: - """Add a codec that map characters in the given charset to/from Unicode. - - charset is the canonical name of a character set. codecname is the name - of a Python codec, as appropriate for the second argument to the unicode() - built-in, or to the encode() method of a Unicode string. - """ +) -> None: ... +def add_alias(alias: str, canonical: str) -> None: ... +def add_codec(charset: str, codecname: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi index b627d00b8845f..55223bdc07621 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi @@ -1,23 +1,8 @@ -"""Encodings and related functions.""" - from email.message import Message __all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] -def encode_base64(msg: Message) -> None: - """Encode the message's payload in Base64. - - Also, add an appropriate Content-Transfer-Encoding header. - """ - -def encode_quopri(msg: Message) -> None: - """Encode the message's payload in quoted-printable. - - Also, add an appropriate Content-Transfer-Encoding header. - """ - -def encode_7or8bit(msg: Message) -> None: - """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" - -def encode_noop(msg: Message) -> None: - """Do nothing.""" +def encode_base64(msg: Message) -> None: ... +def encode_quopri(msg: Message) -> None: ... +def encode_7or8bit(msg: Message) -> None: ... +def encode_noop(msg: Message) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi index 742412a0525bb..b501a58665560 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi @@ -1,92 +1,42 @@ -"""email package exception classes.""" - import sys -class MessageError(Exception): - """Base class for errors in the email package.""" - -class MessageParseError(MessageError): - """Base class for message parsing errors.""" - -class HeaderParseError(MessageParseError): - """Error while parsing headers.""" - -class BoundaryError(MessageParseError): - """Couldn't find terminating boundary.""" - -class MultipartConversionError(MessageError, TypeError): - """Conversion to a multipart is prohibited.""" - -class CharsetError(MessageError): - """An illegal charset was given.""" +class MessageError(Exception): ... +class MessageParseError(MessageError): ... +class HeaderParseError(MessageParseError): ... +class BoundaryError(MessageParseError): ... +class MultipartConversionError(MessageError, TypeError): ... +class CharsetError(MessageError): ... # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -class HeaderWriteError(MessageError): - """Error while writing headers.""" +class HeaderWriteError(MessageError): ... class MessageDefect(ValueError): - """Base class for a message defect.""" - def __init__(self, line: str | None = None) -> None: ... -class NoBoundaryInMultipartDefect(MessageDefect): - """A message claimed to be a multipart but had no boundary parameter.""" - -class StartBoundaryNotFoundDefect(MessageDefect): - """The claimed start boundary was never found.""" - -class FirstHeaderLineIsContinuationDefect(MessageDefect): - """A message had a continuation line as its first header line.""" - -class MisplacedEnvelopeHeaderDefect(MessageDefect): - """A 'Unix-from' header was found in the middle of a header block.""" - -class MultipartInvariantViolationDefect(MessageDefect): - """A message claimed to be a multipart but no subparts were found.""" - -class InvalidMultipartContentTransferEncodingDefect(MessageDefect): - """An invalid content transfer encoding was set on the multipart itself.""" - -class UndecodableBytesDefect(MessageDefect): - """Header contained bytes that could not be decoded""" - -class InvalidBase64PaddingDefect(MessageDefect): - """base64 encoded sequence had an incorrect length""" - -class InvalidBase64CharactersDefect(MessageDefect): - """base64 encoded sequence had characters not in base64 alphabet""" - -class InvalidBase64LengthDefect(MessageDefect): - """base64 encoded sequence had invalid length (1 mod 4)""" - -class CloseBoundaryNotFoundDefect(MessageDefect): - """A start boundary was found, but not the corresponding close boundary.""" - -class MissingHeaderBodySeparatorDefect(MessageDefect): - """Found line with no leading whitespace and no colon before blank line.""" +class NoBoundaryInMultipartDefect(MessageDefect): ... +class StartBoundaryNotFoundDefect(MessageDefect): ... +class FirstHeaderLineIsContinuationDefect(MessageDefect): ... +class MisplacedEnvelopeHeaderDefect(MessageDefect): ... +class MultipartInvariantViolationDefect(MessageDefect): ... +class InvalidMultipartContentTransferEncodingDefect(MessageDefect): ... +class UndecodableBytesDefect(MessageDefect): ... +class InvalidBase64PaddingDefect(MessageDefect): ... +class InvalidBase64CharactersDefect(MessageDefect): ... +class InvalidBase64LengthDefect(MessageDefect): ... +class CloseBoundaryNotFoundDefect(MessageDefect): ... +class MissingHeaderBodySeparatorDefect(MessageDefect): ... MalformedHeaderDefect = MissingHeaderBodySeparatorDefect -class HeaderDefect(MessageDefect): - """Base class for a header defect.""" - -class InvalidHeaderDefect(HeaderDefect): - """Header is not valid, message gives details.""" - -class HeaderMissingRequiredValue(HeaderDefect): - """A header that must have a value had none""" +class HeaderDefect(MessageDefect): ... +class InvalidHeaderDefect(HeaderDefect): ... +class HeaderMissingRequiredValue(HeaderDefect): ... class NonPrintableDefect(HeaderDefect): - """ASCII characters outside the ascii-printable range found""" - def __init__(self, non_printables: str | None) -> None: ... -class ObsoleteHeaderDefect(HeaderDefect): - """Header uses syntax declared obsolete by RFC 5322""" - -class NonASCIILocalPartDefect(HeaderDefect): - """local_part contains non-ASCII characters""" +class ObsoleteHeaderDefect(HeaderDefect): ... +class NonASCIILocalPartDefect(HeaderDefect): ... if sys.version_info >= (3, 10): - class InvalidDateDefect(HeaderDefect): - """Header has unparsable or invalid date""" + class InvalidDateDefect(HeaderDefect): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi index aedb2f6c2c7d9..d9279e9cd996d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi @@ -1,20 +1,3 @@ -"""FeedParser - An email feed parser. - -The feed parser implements an interface for incrementally parsing an email -message, line by line. This has advantages for certain applications, such as -those reading email messages off a socket. - -FeedParser.feed() is the primary interface for pushing new data into the -parser. It returns when there's nothing more it can do with the available -data. When you have no more data to push into the parser, call .close(). -This completes the parsing and returns the root message object. - -The other advantage of this parser is that it will never raise a parsing -exception. Instead, when it finds something unexpected, it adds a 'defect' to -the current message. Defects are just instances that live on the message -object's .defects attribute. -""" - from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -24,39 +7,16 @@ from typing import Generic, overload __all__ = ["FeedParser", "BytesFeedParser"] class FeedParser(Generic[_MessageT]): - """A feed-style parser of email.""" - @overload - def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: - """_factory is called with no arguments to create a new message obj - - The policy keyword specifies a policy object that controls a number of - aspects of the parser's operation. The default policy maintains - backward compatibility. - - """ - + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... - def feed(self, data: str) -> None: - """Push more data into the parser.""" - - def close(self) -> _MessageT: - """Parse all remaining data and return the root message object.""" + def feed(self, data: str) -> None: ... + def close(self) -> _MessageT: ... class BytesFeedParser(FeedParser[_MessageT]): - """Like FeedParser, but feed accepts bytes.""" - @overload - def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: - """_factory is called with no arguments to create a new message obj - - The policy keyword specifies a policy object that controls a number of - aspects of the parser's operation. The default policy maintains - backward compatibility. - - """ - + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... def feed(self, data: bytes | bytearray) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi index 9d1f405ee2bfc..d30e686299fab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi @@ -1,5 +1,3 @@ -"""Classes to generate plain text from a message object tree.""" - from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy @@ -12,12 +10,6 @@ __all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] _MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Any) class Generator(Generic[_MessageT]): - """Generates output from a Message object tree. - - This basic generator writes the message to the given file object as plain - text. - """ - maxheaderlen: int | None policy: Policy[_MessageT] | None @overload @@ -28,30 +20,7 @@ class Generator(Generic[_MessageT]): maxheaderlen: int | None = None, *, policy: None = None, - ) -> None: - """Create the generator for message flattening. - - outfp is the output file-like object for writing the message to. It - must have a write() method. - - Optional mangle_from_ is a flag that, when True (the default if policy - is not set), escapes From_ lines in the body of the message by putting - a '>' in front of them. - - Optional maxheaderlen specifies the longest length for a non-continued - header. When a header line is longer (in characters, with tabs - expanded to 8 spaces) than maxheaderlen, the header will split as - defined in the Header class. Set maxheaderlen to zero to disable - header wrapping. The default is 78, as recommended (but not required) - by RFC 2822. - - The policy keyword specifies a policy object that controls a number of - aspects of the generator's operation. If no policy is specified, - the policy associated with the Message object passed to the - flatten method is used. - - """ - + ) -> None: ... @overload def __init__( self, @@ -62,40 +31,10 @@ class Generator(Generic[_MessageT]): policy: Policy[_MessageT], ) -> None: ... def write(self, s: str) -> None: ... - def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: - """Print the message object tree rooted at msg to the output file - specified when the Generator instance was created. - - unixfrom is a flag that forces the printing of a Unix From_ delimiter - before the first object in the message tree. If the original message - has no From_ delimiter, a 'standard' one is crafted. By default, this - is False to inhibit the printing of any From_ delimiter. - - Note that for subobjects, no From_ line is printed. - - linesep specifies the characters used to indicate a new line in - the output. The default value is determined by the policy specified - when the Generator instance was created or, if none was specified, - from the policy associated with the msg. - - """ - - def clone(self, fp: SupportsWrite[str]) -> Self: - """Clone this generator with the exact same options.""" + def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: ... + def clone(self, fp: SupportsWrite[str]) -> Self: ... class BytesGenerator(Generator[_MessageT]): - """Generates a bytes version of a Message object tree. - - Functionally identical to the base Generator except that the output is - bytes and not string. When surrogates were used in the input to encode - bytes, these are decoded back to bytes for output. If the policy has - cte_type set to 7bit, then the message is transformed such that the - non-ASCII bytes are properly content transfer encoded, using the charset - unknown-8bit. - - The outfp object must accept bytes in its write method. - """ - @overload def __init__( self: BytesGenerator[Any], # The Policy of the message is used. @@ -104,30 +43,7 @@ class BytesGenerator(Generator[_MessageT]): maxheaderlen: int | None = None, *, policy: None = None, - ) -> None: - """Create the generator for message flattening. - - outfp is the output file-like object for writing the message to. It - must have a write() method. - - Optional mangle_from_ is a flag that, when True (the default if policy - is not set), escapes From_ lines in the body of the message by putting - a '>' in front of them. - - Optional maxheaderlen specifies the longest length for a non-continued - header. When a header line is longer (in characters, with tabs - expanded to 8 spaces) than maxheaderlen, the header will split as - defined in the Header class. Set maxheaderlen to zero to disable - header wrapping. The default is 78, as recommended (but not required) - by RFC 2822. - - The policy keyword specifies a policy object that controls a number of - aspects of the generator's operation. If no policy is specified, - the policy associated with the Message object passed to the - flatten method is used. - - """ - + ) -> None: ... @overload def __init__( self, @@ -139,12 +55,6 @@ class BytesGenerator(Generator[_MessageT]): ) -> None: ... class DecodedGenerator(Generator[_MessageT]): - """Generates a text representation of a message. - - Like the Generator base class, except that non-text parts are substituted - with a format string representing the part. - """ - @overload def __init__( self: DecodedGenerator[Any], # The Policy of the message is used. @@ -154,29 +64,7 @@ class DecodedGenerator(Generator[_MessageT]): fmt: str | None = None, *, policy: None = None, - ) -> None: - """Like Generator.__init__() except that an additional optional - argument is allowed. - - Walks through all subparts of a message. If the subpart is of main - type 'text', then it prints the decoded payload of the subpart. - - Otherwise, fmt is a format string that is used instead of the message - payload. fmt is expanded with the following keywords (in - %(keyword)s format): - - type : Full MIME type of the non-text part - maintype : Main MIME type of the non-text part - subtype : Sub-MIME type of the non-text part - filename : Filename of the non-text part - description: Description associated with the non-text part - encoding : Content transfer encoding of the non-text part - - The default value for fmt is None, meaning - - [Non-text (%(type)s) part of message omitted, filename %(filename)s] - """ - + ) -> None: ... @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi index c9dc5105f8853..a26bbb516e096 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi @@ -1,5 +1,3 @@ -"""Header encoding and decoding functionality.""" - from collections.abc import Iterable from email.charset import Charset from typing import Any, ClassVar @@ -15,89 +13,9 @@ class Header: header_name: str | None = None, continuation_ws: str = " ", errors: str = "strict", - ) -> None: - """Create a MIME-compliant header that can contain many character sets. - - Optional s is the initial header value. If None, the initial header - value is not set. You can later append to the header with .append() - method calls. s may be a byte string or a Unicode string, but see the - .append() documentation for semantics. - - Optional charset serves two purposes: it has the same meaning as the - charset argument to the .append() method. It also sets the default - character set for all subsequent .append() calls that omit the charset - argument. If charset is not provided in the constructor, the us-ascii - charset is used both as s's initial charset and as the default for - subsequent .append() calls. - - The maximum line length can be specified explicitly via maxlinelen. For - splitting the first line to a shorter value (to account for the field - header which isn't included in s, e.g. 'Subject') pass in the name of - the field in header_name. The default maxlinelen is 78 as recommended - by RFC 2822. - - continuation_ws must be RFC 2822 compliant folding whitespace (usually - either a space or a hard tab) which will be prepended to continuation - lines. - - errors is passed through to the .append() call. - """ - - def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: - """Append a string to the MIME header. - - Optional charset, if given, should be a Charset instance or the name - of a character set (which will be converted to a Charset instance). A - value of None (the default) means that the charset given in the - constructor is used. - - s may be a byte string or a Unicode string. If it is a byte string - (i.e. isinstance(s, str) is false), then charset is the encoding of - that byte string, and a UnicodeError will be raised if the string - cannot be decoded with that charset. If s is a Unicode string, then - charset is a hint specifying the character set of the characters in - the string. In either case, when producing an RFC 2822 compliant - header using RFC 2047 rules, the string will be encoded using the - output codec of the charset. If the string cannot be encoded to the - output codec, a UnicodeError will be raised. - - Optional 'errors' is passed as the errors argument to the decode - call if s is a byte string. - """ - - def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: - """Encode a message header into an RFC-compliant format. - - There are many issues involved in converting a given string for use in - an email header. Only certain character sets are readable in most - email clients, and as header strings can only contain a subset of - 7-bit ASCII, care must be taken to properly convert and encode (with - Base64 or quoted-printable) header strings. In addition, there is a - 75-character length limit on any given encoded header field, so - line-wrapping must be performed, even with double-byte character sets. - - Optional maxlinelen specifies the maximum length of each generated - line, exclusive of the linesep string. Individual lines may be longer - than maxlinelen if a folding point cannot be found. The first line - will be shorter by the length of the header name plus ": " if a header - name was specified at Header construction time. The default value for - maxlinelen is determined at header construction time. - - Optional splitchars is a string containing characters which should be - given extra weight by the splitting algorithm during normal header - wrapping. This is in very rough support of RFC 2822's 'higher level - syntactic breaks': split points preceded by a splitchar are preferred - during line splitting, with the characters preferred in the order in - which they appear in the string. Space and tab may be included in the - string to indicate whether preference should be given to one over the - other as a split point when other split chars do not appear in the line - being split. Splitchars does not affect RFC 2047 encoded lines. - - Optional linesep is a string to be used to separate the lines of - the value. The default value is the most useful for typical - Python applications, but it can be set to \\r\\n to produce RFC-compliant - line separators when needed. - """ + ) -> None: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... def __ne__(self, value: object, /) -> bool: ... @@ -105,43 +23,10 @@ class Header: # decode_header() either returns list[tuple[str, None]] if the header # contains no encoded parts, or list[tuple[bytes, str | None]] if the header # contains at least one encoded part. -def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: - """Decode a message header value without converting charset. - - For historical reasons, this function may return either: - - 1. A list of length 1 containing a pair (str, None). - 2. A list of (bytes, charset) pairs containing each of the decoded - parts of the header. Charset is None for non-encoded parts of the header, - otherwise a lower-case string containing the name of the character set - specified in the encoded string. - - header may be a string that may or may not contain RFC2047 encoded words, - or it may be a Header object. - - An email.errors.HeaderParseError may be raised when certain decoding error - occurs (e.g. a base64 decoding exception). - - This function exists for backwards compatibility only. For new code, we - recommend using email.headerregistry.HeaderRegistry instead. - """ - +def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... def make_header( decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], maxlinelen: int | None = None, header_name: str | None = None, continuation_ws: str = " ", -) -> Header: - """Create a Header from a sequence of pairs as returned by decode_header() - - decode_header() takes a header value string and returns a sequence of - pairs of the format (decoded_string, charset) where charset is the string - name of the character set. - - This function takes one of those sequence of pairs and returns a Header - instance. Optional maxlinelen, header_name, and continuation_ws are as in - the Header constructor. - - This function exists for backwards compatibility only, and is not - recommended for use in new code. - """ +) -> Header: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi index c75b8015f7f4c..bea68307e0091 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi @@ -1,9 +1,3 @@ -"""Representing and manipulating email headers via custom objects. - -This module provides an implementation of the HeaderRegistry API. -The implementation is designed to flexibly follow RFC5322 rules. -""" - import types from collections.abc import Iterable, Mapping from datetime import datetime as _datetime @@ -23,38 +17,6 @@ from typing import Any, ClassVar, Literal, Protocol, type_check_only from typing_extensions import Self class BaseHeader(str): - """Base class for message headers. - - Implements generic behavior and provides tools for subclasses. - - A subclass must define a classmethod named 'parse' that takes an unfolded - value string and a dictionary as its arguments. The dictionary will - contain one key, 'defects', initialized to an empty list. After the call - the dictionary must contain two additional keys: parse_tree, set to the - parse tree obtained from parsing the header, and 'decoded', set to the - string value of the idealized representation of the data from the value. - (That is, encoded words are decoded, and values that have canonical - representations are so represented.) - - The defects key is intended to collect parsing defects, which the message - parser will subsequently dispose of as appropriate. The parser should not, - insofar as practical, raise any errors. Defects should be added to the - list instead. The standard header parsers register defects for RFC - compliance issues, for obsolete RFC syntax, and for unrecoverable parsing - errors. - - The parse method may add additional keys to the dictionary. In this case - the subclass must define an 'init' method, which will be passed the - dictionary as its keyword arguments. The method should use (usually by - setting them as the value of similarly named attributes) and remove all the - extra keys added by its parse method, and then use super to call its parent - class with the remaining arguments and keywords. - - The subclass should also make sure that a 'max_count' attribute is defined - that is either None or 1. XXX: need to better define this API. - - """ - # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) max_count: ClassVar[Literal[1] | None] @property @@ -63,46 +25,12 @@ class BaseHeader(str): def defects(self) -> tuple[MessageDefect, ...]: ... def __new__(cls, name: str, value: Any) -> Self: ... def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... - def fold(self, *, policy: Policy) -> str: - """Fold header according to policy. - - The parsed representation of the header is folded according to - RFC5322 rules, as modified by the policy. If the parse tree - contains surrogateescaped bytes, the bytes are CTE encoded using - the charset 'unknown-8bit". - - Any non-ASCII characters in the parse tree are CTE encoded using - charset utf-8. XXX: make this a policy setting. - - The returned value is an ASCII-only string possibly containing linesep - characters, and ending with a linesep character. The string includes - the header name and the ': ' separator. - - """ + def fold(self, *, policy: Policy) -> str: ... class UnstructuredHeader: max_count: ClassVar[Literal[1] | None] @staticmethod - def value_parser(value: str) -> UnstructuredTokenList: - """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - - obs-NO-WS-CTL is control characters except WSP/CR/LF. - - So, basically, we have printable runs, plus control characters or nulls in - the obsolete syntax, separated by whitespace. Since RFC 2047 uses the - obsolete syntax in its specification, but requires whitespace on either - side of the encoded words, I can see no reason to need to separate the - non-printable-non-whitespace from the printable runs if they occur, so we - parse this into xtext tokens separated by WSP tokens. - - Because an 'unstructured' value must by definition constitute the entire - value, this 'get' routine does not return a remaining value, only the - parsed TokenList. - - """ - + def value_parser(value: str) -> UnstructuredTokenList: ... @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -110,40 +38,12 @@ class UniqueUnstructuredHeader(UnstructuredHeader): max_count: ClassVar[Literal[1]] class DateHeader: - """Header whose value consists of a single timestamp. - - Provides an additional attribute, datetime, which is either an aware - datetime using a timezone, or a naive datetime if the timezone - in the input string is -0000. Also accepts a datetime as input. - The 'value' attribute is the normalized form of the timestamp, - which means it is the output of format_datetime on the datetime. - """ - max_count: ClassVar[Literal[1] | None] def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... @property def datetime(self) -> _datetime | None: ... @staticmethod - def value_parser(value: str) -> UnstructuredTokenList: - """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - - obs-NO-WS-CTL is control characters except WSP/CR/LF. - - So, basically, we have printable runs, plus control characters or nulls in - the obsolete syntax, separated by whitespace. Since RFC 2047 uses the - obsolete syntax in its specification, but requires whitespace on either - side of the encoded words, I can see no reason to need to separate the - non-printable-non-whitespace from the printable runs if they occur, so we - parse this into xtext tokens separated by WSP tokens. - - Because an 'unstructured' value must by definition constitute the entire - value, this 'get' routine does not return a remaining value, only the - parsed TokenList. - - """ - + def value_parser(value: str) -> UnstructuredTokenList: ... @classmethod def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... @@ -191,9 +91,7 @@ class MIMEVersionHeader: @property def minor(self) -> int | None: ... @staticmethod - def value_parser(value: str) -> MIMEVersion: - """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]""" - + def value_parser(value: str) -> MIMEVersion: ... @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -213,21 +111,14 @@ class ContentTypeHeader(ParameterizedMIMEHeader): @property def subtype(self) -> str: ... @staticmethod - def value_parser(value: str) -> ContentType: - """maintype "/" subtype *( ";" parameter ) - - The maintype and substype are tokens. Theoretically they could - be checked against the official IANA list + x-token, but we - don't do that. - """ + def value_parser(value: str) -> ContentType: ... class ContentDispositionHeader(ParameterizedMIMEHeader): # init is redefined but has the same signature as parent class, so is omitted from the stub @property def content_disposition(self) -> str | None: ... @staticmethod - def value_parser(value: str) -> ContentDisposition: - """disposition-type *( ";" parameter )""" + def value_parser(value: str) -> ContentDisposition: ... class ContentTransferEncodingHeader: max_count: ClassVar[Literal[1]] @@ -237,16 +128,14 @@ class ContentTransferEncodingHeader: @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod - def value_parser(value: str) -> ContentTransferEncoding: - """mechanism""" + def value_parser(value: str) -> ContentTransferEncoding: ... class MessageIDHeader: max_count: ClassVar[Literal[1]] @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod - def value_parser(value: str) -> MessageID: - """message-id = "Message-ID:" msg-id CRLF""" + def value_parser(value: str) -> MessageID: ... @type_check_only class _HeaderParser(Protocol): @@ -257,39 +146,15 @@ class _HeaderParser(Protocol): def parse(cls, value: str, kwds: dict[str, Any], /) -> None: ... class HeaderRegistry: - """A header_factory and header registry.""" - registry: dict[str, type[_HeaderParser]] base_class: type[BaseHeader] default_class: type[_HeaderParser] def __init__( self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True - ) -> None: - """Create a header_factory that works with the Policy API. - - base_class is the class that will be the last class in the created - header class's __bases__ list. default_class is the class that will be - used if "name" (see __call__) does not appear in the registry. - use_default_map controls whether or not the default mapping of names to - specialized classes is copied in to the registry when the factory is - created. The default is True. - - """ - - def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: - """Register cls as the specialized class for handling "name" headers.""" - + ) -> None: ... + def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... def __getitem__(self, name: str) -> type[BaseHeader]: ... - def __call__(self, name: str, value: Any) -> BaseHeader: - """Create a header instance for header 'name' from 'value'. - - Creates a header instance by creating a specialized class for parsing - and representing the specified header by combining the factory - base_class with a specialized class from the registry or the - default_class, and passing the name and value to the constructed - class's constructor. - - """ + def __call__(self, name: str, value: Any) -> BaseHeader: ... class Address: @property @@ -299,29 +164,10 @@ class Address: @property def domain(self) -> str: ... @property - def addr_spec(self) -> str: - """The addr_spec (username@domain) portion of the address, quoted - according to RFC 5322 rules, but with no Content Transfer Encoding. - """ - + def addr_spec(self) -> str: ... def __init__( self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None - ) -> None: - """Create an object representing a full email address. - - An address can have a 'display_name', a 'username', and a 'domain'. In - addition to specifying the username and domain separately, they may be - specified together by using the addr_spec keyword *instead of* the - username and domain keywords. If an addr_spec string is specified it - must be properly quoted according to RFC 5322 rules; an error will be - raised if it is not. - - An Address object has display_name, username, domain, and addr_spec - attributes, all of which are read-only. The addr_spec and the string - value of the object are both quoted according to RFC5322 rules, but - without any Content Transfer Encoding. - - """ + ) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... @@ -330,20 +176,6 @@ class Group: def display_name(self) -> str | None: ... @property def addresses(self) -> tuple[Address, ...]: ... - def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: - """Create an object representing an address group. - - An address group consists of a display_name followed by colon and a - list of addresses (see Address) terminated by a semi-colon. The Group - is created by specifying a display_name and a possibly empty list of - Address objects. A Group can also be used to represent a single - address that is not in a group, which is convenient when manipulating - lists that are a combination of Groups and individual Addresses. In - this case the display_name should be set to None. In particular, the - string representation of a Group whose display_name is None is the same - as the Address object, if there is one and only one Address object in - the addresses list. - - """ + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi index 54f56d7c996de..d964d68438336 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi @@ -1,32 +1,12 @@ -"""Various types of useful iterators and generators.""" - from _typeshed import SupportsWrite from collections.abc import Iterator from email.message import Message __all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] -def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: - """Iterate over the parts, returning string payloads line-by-line. - - Optional decode (default False) is passed through to .get_payload(). - """ - -def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: - """Iterate over the subparts with a given MIME type. - - Use 'maintype' as the main MIME type to match against; this defaults to - "text". Optional 'subtype' is the MIME subtype to match against; if - omitted, only the main type is matched. - """ - -def walk(self: Message) -> Iterator[Message]: - """Walk over the message tree, yielding each subpart. - - The walk is performed in depth-first order. This method is a - generator. - """ +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... +def walk(self: Message) -> Iterator[Message]: ... # We include the seemingly private function because it is documented in the stdlib documentation. -def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: - """A handy debugging aid""" +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi index 308c21ad163c9..794882b140e61 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi @@ -1,5 +1,3 @@ -"""Basic message object for the email package object model.""" - from _typeshed import MaybeNone from collections.abc import Generator, Iterator, Sequence from email import _ParamsType, _ParamType @@ -35,21 +33,6 @@ class _SupportsDecodeToPayload(Protocol): def decode(self, encoding: str, errors: str, /) -> _PayloadType | _MultipartPayloadType: ... class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): - """Basic message object. - - A message object is defined as something that has a bunch of RFC 2822 - headers and a payload. It may optionally have an envelope header - (a.k.a. Unix-From or From_ header). If the message is a container (i.e. a - multipart or a message/rfc822), then the payload is a list of Message - objects, otherwise it is a string. - - Message objects implement part of the 'mapping' interface, which assumes - there is exactly one occurrence of the header per message. Some headers - do in fact appear multiple times (e.g. Received) and for those headers, - you must use the explicit API to set or get all the headers. Not all of - the mapping methods are implemented. - """ - # The policy attributes and arguments in this class and its subclasses # would ideally use Policy[Self], but this is not possible. policy: Policy[Any] # undocumented @@ -57,42 +40,14 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): epilogue: str | None defects: list[MessageDefect] def __init__(self, policy: Policy[Any] = ...) -> None: ... - def is_multipart(self) -> bool: - """Return True if the message consists of multiple parts.""" - + def is_multipart(self) -> bool: ... def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... - def attach(self, payload: _PayloadType) -> None: - """Add the given payload to the current payload. - - The current payload will always be a list of objects after this method - is called. If you want to set the payload to a scalar object, use - set_payload() instead. - """ + def attach(self, payload: _PayloadType) -> None: ... # `i: int` without a multipart payload results in an error # `| MaybeNone` acts like `| Any`: can be None for cleared or unset payload, but annoying to check @overload # multipart - def get_payload(self, i: int, decode: Literal[True]) -> None: - """Return a reference to the payload. - - The payload will either be a list object or a string. If you mutate - the list object, you modify the message's payload in place. Optional - i returns that index into the payload. - - Optional decode is a flag indicating whether the payload should be - decoded or not, according to the Content-Transfer-Encoding header - (default is False). - - When True and the message is not a multipart, the payload will be - decoded if this header's value is `quoted-printable' or `base64'. If - some other encoding is used, or the header is missing, or if the - payload has bogus data (i.e. bogus base64 or uuencoded data), the - payload is returned as-is. - - If the message is a multipart and the decode flag is True, then None - is returned. - """ - + def get_payload(self, i: int, decode: Literal[True]) -> None: ... @overload # multipart def get_payload(self, i: int, decode: Literal[False] = False) -> _PayloadType | MaybeNone: ... @overload # either @@ -105,370 +60,81 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): # then an invalid payload could be passed, but this is unlikely # Not[_SupportsEncodeToPayload] @overload - def set_payload(self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None) -> None: - """Set the payload to the given value. - - Optional charset sets the message's default character set. See - set_charset() for details. - """ - + def set_payload( + self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None + ) -> None: ... @overload def set_payload( self, payload: _SupportsEncodeToPayload | _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: Charset | str, ) -> None: ... - def set_charset(self, charset: _CharsetType) -> None: - """Set the charset of the payload to a given character set. - - charset can be a Charset instance, a string naming a character set, or - None. If it is a string it will be converted to a Charset instance. - If charset is None, the charset parameter will be removed from the - Content-Type field. Anything else will generate a TypeError. - - The message will be assumed to be of type text/* encoded with - charset.input_charset. It will be converted to charset.output_charset - and encoded properly, if needed, when generating the plain text - representation of the message. MIME headers (MIME-Version, - Content-Type, Content-Transfer-Encoding) will be added as needed. - """ - - def get_charset(self) -> _CharsetType: - """Return the Charset instance associated with the message's payload.""" - - def __len__(self) -> int: - """Return the total number of headers, including duplicates.""" - + def set_charset(self, charset: _CharsetType) -> None: ... + def get_charset(self) -> _CharsetType: ... + def __len__(self) -> int: ... def __contains__(self, name: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... # Same as `get` with `failobj=None`, but with the expectation that it won't return None in most scenarios # This is important for protocols using __getitem__, like SupportsKeysAndGetItem # Morally, the return type should be `AnyOf[_HeaderType, None]`, # so using "the Any trick" instead. - def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: - """Get a header value. - - Return None if the header is missing instead of raising an exception. - - Note that if the header appeared multiple times, exactly which - occurrence gets returned is undefined. Use get_all() to get all - the values matching a header field name. - """ - - def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: - """Set the value of a header. - - Note: this does not overwrite an existing header with the same field - name. Use __delitem__() first to delete any existing headers. - """ - - def __delitem__(self, name: str) -> None: - """Delete all occurrences of a header, if present. - - Does not raise an exception if the header is missing. - """ - - def keys(self) -> list[str]: - """Return a list of all the message's header field names. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - - def values(self) -> list[_HeaderT_co]: - """Return a list of all the message's header values. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - - def items(self) -> list[tuple[str, _HeaderT_co]]: - """Get all the message's header fields and values. - - These will be sorted in the order they appeared in the original - message, or were added to the message, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - + def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: ... + def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: ... + def __delitem__(self, name: str) -> None: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_HeaderT_co]: ... + def items(self) -> list[tuple[str, _HeaderT_co]]: ... @overload - def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: - """Get a header value. - - Like __getitem__() but return failobj instead of None when the field - is missing. - """ - + def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: ... @overload def get(self, name: str, failobj: _T) -> _HeaderT_co | _T: ... @overload - def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: - """Return a list of all the values for the named field. - - These will be sorted in the order they appeared in the original - message, and may contain duplicates. Any fields deleted and - re-inserted are always appended to the header list. - - If no such fields exist, failobj is returned (defaults to None). - """ - + def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: ... @overload def get_all(self, name: str, failobj: _T) -> list[_HeaderT_co] | _T: ... - def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: - """Extended header setting. - - name is the header field to add. keyword arguments can be used to set - additional parameters for the header field, with underscores converted - to dashes. Normally the parameter will be added as key="value" unless - value is None, in which case only the key will be added. If a - parameter value contains non-ASCII characters it can be specified as a - three-tuple of (charset, language, value), in which case it will be - encoded according to RFC2231 rules. Otherwise it will be encoded using - the utf-8 charset and a language of ''. - - Examples: - - msg.add_header('content-disposition', 'attachment', filename='bud.gif') - msg.add_header('content-disposition', 'attachment', - filename=('utf-8', '', 'Fußballer.ppt')) - msg.add_header('content-disposition', 'attachment', - filename='Fußballer.ppt')) - """ - - def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: - """Replace a header. - - Replace the first matching header found in the message, retaining - header order and case. If no matching header was found, a KeyError is - raised. - """ - - def get_content_type(self) -> str: - """Return the message's content type. - - The returned string is coerced to lower case of the form - 'maintype/subtype'. If there was no Content-Type header in the - message, the default type as given by get_default_type() will be - returned. Since according to RFC 2045, messages always have a default - type this will always return a value. - - RFC 2045 defines a message's default type to be text/plain unless it - appears inside a multipart/digest container, in which case it would be - message/rfc822. - """ - - def get_content_maintype(self) -> str: - """Return the message's main content type. - - This is the 'maintype' part of the string returned by - get_content_type(). - """ - - def get_content_subtype(self) -> str: - """Returns the message's sub-content type. - - This is the 'subtype' part of the string returned by - get_content_type(). - """ - - def get_default_type(self) -> str: - """Return the 'default' content type. - - Most messages have a default content type of text/plain, except for - messages that are subparts of multipart/digest containers. Such - subparts have a default content type of message/rfc822. - """ - - def set_default_type(self, ctype: str) -> None: - """Set the 'default' content type. - - ctype should be either "text/plain" or "message/rfc822", although this - is not enforced. The default content type is not stored in the - Content-Type header. - """ - + def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... + def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: ... + def get_content_type(self) -> str: ... + def get_content_maintype(self) -> str: ... + def get_content_subtype(self) -> str: ... + def get_default_type(self) -> str: ... + def set_default_type(self, ctype: str) -> None: ... @overload def get_params( self, failobj: None = None, header: str = "content-type", unquote: bool = True - ) -> list[tuple[str, str]] | None: - """Return the message's Content-Type parameters, as a list. - - The elements of the returned list are 2-tuples of key/value pairs, as - split on the '=' sign. The left hand side of the '=' is the key, - while the right hand side is the value. If there is no '=' sign in - the parameter the value is the empty string. The value is as - described in the get_param() method. - - Optional failobj is the object to return if there is no Content-Type - header. Optional header is the header to search instead of - Content-Type. If unquote is True, the value is unquoted. - """ - + ) -> list[tuple[str, str]] | None: ... @overload def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... @overload def get_param( self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True - ) -> _ParamType | None: - """Return the parameter value if found in the Content-Type header. - - Optional failobj is the object to return if there is no Content-Type - header, or the Content-Type header has no such parameter. Optional - header is the header to search instead of Content-Type. - - Parameter keys are always compared case insensitively. The return - value can either be a string, or a 3-tuple if the parameter was RFC - 2231 encoded. When it's a 3-tuple, the elements of the value are of - the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and - LANGUAGE can be None, in which case you should consider VALUE to be - encoded in the us-ascii charset. You can usually ignore LANGUAGE. - The parameter value (either the returned string, or the VALUE item in - the 3-tuple) is always unquoted, unless unquote is set to False. - - If your application doesn't care whether the parameter was RFC 2231 - encoded, it can turn the return value into a string as follows: - - rawparam = msg.get_param('foo') - param = email.utils.collapse_rfc2231_value(rawparam) - - """ - + ) -> _ParamType | None: ... @overload def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... - def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: - """Remove the given parameter completely from the Content-Type header. - - The header will be re-written in place without the parameter or its - value. All values will be quoted as necessary unless requote is - False. Optional header specifies an alternative to the Content-Type - header. - """ - - def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: - """Set the main type and subtype for the Content-Type header. - - type must be a string in the form "maintype/subtype", otherwise a - ValueError is raised. - - This method replaces the Content-Type header, keeping all the - parameters in place. If requote is False, this leaves the existing - header's quoting as is. Otherwise, the parameters will be quoted (the - default). - - An alternative header can be specified in the header argument. When - the Content-Type header is set, we'll always also add a MIME-Version - header. - """ - + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... @overload - def get_filename(self, failobj: None = None) -> str | None: - """Return the filename associated with the payload if present. - - The filename is extracted from the Content-Disposition header's - 'filename' parameter, and it is unquoted. If that header is missing - the 'filename' parameter, this method falls back to looking for the - 'name' parameter. - """ - + def get_filename(self, failobj: None = None) -> str | None: ... @overload def get_filename(self, failobj: _T) -> str | _T: ... @overload - def get_boundary(self, failobj: None = None) -> str | None: - """Return the boundary associated with the payload if present. - - The boundary is extracted from the Content-Type header's 'boundary' - parameter, and it is unquoted. - """ - + def get_boundary(self, failobj: None = None) -> str | None: ... @overload def get_boundary(self, failobj: _T) -> str | _T: ... - def set_boundary(self, boundary: str) -> None: - """Set the boundary parameter in Content-Type to 'boundary'. - - This is subtly different than deleting the Content-Type header and - adding a new one with a new boundary parameter via add_header(). The - main difference is that using the set_boundary() method preserves the - order of the Content-Type header in the original message. - - HeaderParseError is raised if the message has no Content-Type header. - """ - + def set_boundary(self, boundary: str) -> None: ... @overload - def get_content_charset(self) -> str | None: - """Return the charset parameter of the Content-Type header. - - The returned string is always coerced to lower case. If there is no - Content-Type header, or if that header has no charset parameter, - failobj is returned. - """ - + def get_content_charset(self) -> str | None: ... @overload def get_content_charset(self, failobj: _T) -> str | _T: ... @overload - def get_charsets(self, failobj: None = None) -> list[str | None]: - """Return a list containing the charset(s) used in this message. - - The returned list of items describes the Content-Type headers' - charset parameter for this message and all the subparts in its - payload. - - Each item will either be a string (the value of the charset parameter - in the Content-Type header of that part) or the value of the - 'failobj' parameter (defaults to None), if the part does not have a - main MIME type of "text", or the charset is not defined. - - The list will contain one string for each part of the message, plus - one for the container message (i.e. self), so that a non-multipart - message will still return a list of length 1. - """ - + def get_charsets(self, failobj: None = None) -> list[str | None]: ... @overload def get_charsets(self, failobj: _T) -> list[str | _T]: ... - def walk(self) -> Generator[Self, None, None]: - """Walk over the message tree, yielding each subpart. - - The walk is performed in depth-first order. This method is a - generator. - """ - - def get_content_disposition(self) -> str | None: - """Return the message's content-disposition if it exists, or None. - - The return values can be either 'inline', 'attachment' or None - according to the rfc2183. - """ - - def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: - """Return the entire formatted message as a string. - - Optional 'unixfrom', when true, means include the Unix From_ envelope - header. For backward compatibility reasons, if maxheaderlen is - not specified it defaults to 0, so you must override it explicitly - if you want a different maxheaderlen. 'policy' is passed to the - Generator instance used to serialize the message; if it is not - specified the policy associated with the message instance is used. - - If the message object contains binary data that is not encoded - according to RFC standards, the non-compliant data will be replaced by - unicode "unknown character" code points. - """ - - def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: - """Return the entire formatted message as a bytes object. - - Optional 'unixfrom', when true, means include the Unix From_ envelope - header. 'policy' is passed to the BytesGenerator instance used to - serialize the message; if not specified the policy associated with - the message instance is used. - """ - - def __bytes__(self) -> bytes: - """Return the entire formatted message as a bytes object.""" - + def walk(self) -> Generator[Self, None, None]: ... + def get_content_disposition(self) -> str | None: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: ... + def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: ... + def __bytes__(self) -> bytes: ... def set_param( self, param: str, @@ -478,77 +144,20 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): charset: str | None = None, language: str = "", replace: bool = False, - ) -> None: - """Set a parameter in the Content-Type header. - - If the parameter already exists in the header, its value will be - replaced with the new value. - - If header is Content-Type and has not yet been defined for this - message, it will be set to "text/plain" and the new parameter and - value will be appended as per RFC 2045. - - An alternate header can be specified in the header argument, and all - parameters will be quoted as necessary unless requote is False. - - If charset is specified, the parameter will be encoded according to RFC - 2231. Optional language specifies the RFC 2231 language, defaulting - to the empty string. Both charset and language should be strings. - """ + ) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: - """Store name and value in the model without modification. - - This is an "internal" API, intended only for use by a parser. - """ - - def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: - """Return the (name, value) header pairs without modification. - - This is an "internal" API, intended only for use by a generator. - """ + def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: ... class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def __init__(self, policy: Policy[Any] | None = None) -> None: ... - def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: - """Return best candidate mime part for display as 'body' of message. - - Do a depth first search, starting with self, looking for the first part - matching each of the items in preferencelist, and return the part - corresponding to the first item that has a match, or None if no items - have a match. If 'related' is not included in preferencelist, consider - the root part of any multipart/related encountered as a candidate - match. Ignore parts with 'Content-Disposition: attachment'. - """ - - def attach(self, payload: Self) -> None: # type: ignore[override] - """Add the given payload to the current payload. - - The current payload will always be a list of objects after this method - is called. If you want to set the payload to a scalar object, use - set_payload() instead. - """ + def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: ... + def attach(self, payload: Self) -> None: ... # type: ignore[override] # The attachments are created via type(self) in the attach method. It's theoretically # possible to sneak other attachment types into a MIMEPart instance, but could cause # cause unforseen consequences. - def iter_attachments(self) -> Iterator[Self]: - """Return an iterator over the non-main parts of a multipart. - - Skip the first of each occurrence of text/plain, text/html, - multipart/related, or multipart/alternative in the multipart (unless - they have a 'Content-Disposition: attachment' header) and include all - remaining subparts in the returned iterator. When applied to a - multipart/related, return all parts except the root part. Return an - empty iterator when applied to a multipart/alternative or a - non-multipart. - """ - - def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: - """Return an iterator over all immediate subparts of a multipart. - - Return an empty iterator for a non-multipart. - """ - + def iter_attachments(self) -> Iterator[Self]: ... + def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: ... def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... def make_related(self, boundary: str | None = None) -> None: ... @@ -559,18 +168,7 @@ class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... - def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: - """Return the entire formatted message as a string. - - Optional 'unixfrom', when true, means include the Unix From_ envelope - header. maxheaderlen is retained for backward compatibility with the - base Message class, but defaults to None, meaning that the policy value - for max_line_length controls the header maximum length. 'policy' is - passed to the Generator instance used to serialize the message; if it - is not specified the policy associated with the message instance is - used. - """ - + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: ... def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi index b2249845d7450..a7ab9dc75ce24 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi @@ -1,5 +1,3 @@ -"""Class representing application/* type MIME documents.""" - from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +6,6 @@ from email.policy import Policy __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): - """Class for generating application/* MIME documents.""" - def __init__( self, _data: str | bytes | bytearray, @@ -18,18 +14,4 @@ class MIMEApplication(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: - """Create an application/* type MIME document. - - _data contains the bytes for the raw application data. - - _subtype is the MIME content type subtype, defaulting to - 'octet-stream'. - - _encoder is a function which will perform the actual encoding for - transport of the application data, defaulting to base64 encoding. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi index 532e4b21b10ef..090dfb960db6f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi @@ -1,5 +1,3 @@ -"""Class representing audio/* type MIME documents.""" - from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +6,6 @@ from email.policy import Policy __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): - """Class for generating audio/* MIME documents.""" - def __init__( self, _audiodata: str | bytes | bytearray, @@ -18,24 +14,4 @@ class MIMEAudio(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: - """Create an audio/* type MIME document. - - _audiodata contains the bytes for the raw audio data. If this data - can be decoded as au, wav, aiff, or aifc, then the - subtype will be automatically included in the Content-Type header. - Otherwise, you can specify the specific audio subtype via the - _subtype parameter. If _subtype is not given, and no subtype can be - guessed, a TypeError is raised. - - _encoder is a function which will perform the actual encoding for - transport of the image data. It takes one argument, which is this - Image instance. It should use get_payload() and set_payload() to - change the payload to the encoded form. It should also add any - Content-Transfer-Encoding or other headers to the message as - necessary. The default encoding is Base64. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi index 9c173a72bae95..b733709f1b5a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi @@ -1,5 +1,3 @@ -"""Base class for MIME specializations.""" - import email.message from email import _ParamsType from email.policy import Policy @@ -7,12 +5,4 @@ from email.policy import Policy __all__ = ["MIMEBase"] class MIMEBase(email.message.Message): - """Base class for MIME specializations.""" - - def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: - """This constructor adds a Content-Type: and a MIME-Version: header. - - The Content-Type: header is taken from the _maintype and _subtype - arguments. Additional parameters for this header are taken from the - keyword arguments. - """ + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi index 72c5ac48af093..b47afa6ce5925 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi @@ -1,5 +1,3 @@ -"""Class representing image/* type MIME documents.""" - from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +6,6 @@ from email.policy import Policy __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): - """Class for generating image/* type MIME documents.""" - def __init__( self, _imagedata: str | bytes | bytearray, @@ -18,23 +14,4 @@ class MIMEImage(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: - """Create an image/* type MIME document. - - _imagedata contains the bytes for the raw image data. If the data - type can be detected (jpeg, png, gif, tiff, rgb, pbm, pgm, ppm, - rast, xbm, bmp, webp, and exr attempted), then the subtype will be - automatically included in the Content-Type header. Otherwise, you can - specify the specific image subtype via the _subtype parameter. - - _encoder is a function which will perform the actual encoding for - transport of the image data. It takes one argument, which is this - Image instance. It should use get_payload() and set_payload() to - change the payload to the encoded form. It should also add any - Content-Transfer-Encoding or other headers to the message as - necessary. The default encoding is Base64. - - Any additional keyword arguments are passed to the base class - constructor, which turns them into parameters on the Content-Type - header. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi index eb854b713e20a..a1e370e2eab51 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi @@ -1,5 +1,3 @@ -"""Class representing message/* MIME documents.""" - from email._policybase import _MessageT from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy @@ -7,15 +5,4 @@ from email.policy import Policy __all__ = ["MIMEMessage"] class MIMEMessage(MIMENonMultipart): - """Class representing message/* MIME documents.""" - - def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: - """Create a message/* type MIME document. - - _msg is a message object and must be an instance of Message, or a - derived class of Message, otherwise a TypeError is raised. - - Optional _subtype defines the subtype of the contained message. The - default is "rfc822" (this is defined by the MIME standard, even though - the term "rfc822" is technically outdated by RFC 2822). - """ + def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi index 8ac0e60e8be3f..fb9599edbcb8f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi @@ -1,5 +1,3 @@ -"""Base class for MIME multipart/* type messages.""" - from collections.abc import Sequence from email import _ParamsType from email._policybase import _MessageT @@ -9,8 +7,6 @@ from email.policy import Policy __all__ = ["MIMEMultipart"] class MIMEMultipart(MIMEBase): - """Base class for MIME multipart/* type messages.""" - def __init__( self, _subtype: str = "mixed", @@ -19,22 +15,4 @@ class MIMEMultipart(MIMEBase): *, policy: Policy[_MessageT] | None = None, **_params: _ParamsType, - ) -> None: - """Creates a multipart/* type message. - - By default, creates a multipart/mixed message, with proper - Content-Type and MIME-Version headers. - - _subtype is the subtype of the multipart content type, defaulting to - 'mixed'. - - boundary is the multipart boundary string. By default it is - calculated as needed. - - _subparts is a sequence of initial subparts for the payload. It - must be an iterable object, such as a list. You can always - attach new subparts to the message by using the attach() method. - - Additional parameters for the Content-Type header are taken from the - keyword arguments (or passed into the _params argument). - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi index 395e5a1b362f3..5497d89b10726 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi @@ -1,8 +1,5 @@ -"""Base class for MIME type messages that are not multipart.""" - from email.mime.base import MIMEBase __all__ = ["MIMENonMultipart"] -class MIMENonMultipart(MIMEBase): - """Base class for MIME non-multipart type messages.""" +class MIMENonMultipart(MIMEBase): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi index 08264521d4839..edfa67a092427 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi @@ -1,21 +1,9 @@ -"""Class representing text/* type MIME documents.""" - from email._policybase import Policy from email.mime.nonmultipart import MIMENonMultipart __all__ = ["MIMEText"] class MIMEText(MIMENonMultipart): - """Class for generating text/* type MIME documents.""" - - def __init__(self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None) -> None: - """Create a text/* type MIME document. - - _text is the string for this message object. - - _subtype is the MIME sub content type, defaulting to "plain". - - _charset is the character set parameter added to the Content-Type - header. This defaults to "us-ascii". Note that as a side-effect, the - Content-Transfer-Encoding header will also be set. - """ + def __init__( + self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi index 2cbc3f021849d..a4924a6cbd88f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi @@ -1,5 +1,3 @@ -"""A parser of RFC 2822 and MIME email messages.""" - from _typeshed import SupportsRead from collections.abc import Callable from email._policybase import _MessageT @@ -13,49 +11,13 @@ __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedPa class Parser(Generic[_MessageT]): @overload - def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: - """Parser of RFC 2822 and MIME email messages. - - Creates an in-memory object tree representing the email message, which - can then be manipulated and turned over to a Generator to return the - textual representation of the message. - - The string must be formatted as a block of RFC 2822 headers and header - continuation lines, optionally preceded by a 'Unix-from' header. The - header block is terminated either by the end of the string or by a - blank line. - - _class is the class to instantiate for new message objects when they - must be created. This class must have a constructor that can take - zero arguments. Default is Message.Message. - - The policy keyword specifies a policy object that controls a number of - aspects of the parser's operation. The default policy maintains - backward compatibility. - - """ - + def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: ... @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload def __init__(self, _class: Callable[[], _MessageT] | None, *, policy: Policy[_MessageT] = ...) -> None: ... - def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: - """Create a message structure from the data in a file. - - Reads all the data from the file and returns the root of the message - structure. Optional headersonly is a flag specifying whether to stop - parsing after reading the headers or not. The default is False, - meaning it parses the entire contents of the file. - """ - - def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: - """Create a message structure from a string. - - Returns the root of the message structure. Optional headersonly is a - flag specifying whether to stop parsing after reading the headers or - not. The default is False, meaning it parses the entire contents of - the file. - """ + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: ... + def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: ... class HeaderParser(Parser[_MessageT]): def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> _MessageT: ... @@ -64,44 +26,13 @@ class HeaderParser(Parser[_MessageT]): class BytesParser(Generic[_MessageT]): parser: Parser[_MessageT] @overload - def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: - """Parser of binary RFC 2822 and MIME email messages. - - Creates an in-memory object tree representing the email message, which - can then be manipulated and turned over to a Generator to return the - textual representation of the message. - - The input must be formatted as a block of RFC 2822 headers and header - continuation lines, optionally preceded by a 'Unix-from' header. The - header block is terminated either by the end of the input or by a - blank line. - - _class is the class to instantiate for new message objects when they - must be created. This class must have a constructor that can take - zero arguments. Default is Message.Message. - """ - + def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: ... @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... - def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: - """Create a message structure from the data in a binary file. - - Reads all the data from the file and returns the root of the message - structure. Optional headersonly is a flag specifying whether to stop - parsing after reading the headers or not. The default is False, - meaning it parses the entire contents of the file. - """ - - def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: - """Create a message structure from a byte string. - - Returns the root of the message structure. Optional headersonly is a - flag specifying whether to stop parsing after reading the headers or - not. The default is False, meaning it parses the entire contents of - the file. - """ + def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: ... class BytesHeaderParser(BytesParser[_MessageT]): def parse(self, fp: _WrappedBuffer, headersonly: bool = True) -> _MessageT: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi index 14c5bda44ca16..35c999919eede 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi @@ -1,7 +1,3 @@ -"""This will be the home for the policy that hooks in the new -code that adds all the email6 features. -""" - from collections.abc import Callable from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, _MessageT, compat32 as compat32 from email.contentmanager import ContentManager @@ -12,116 +8,6 @@ from typing_extensions import Self __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] class EmailPolicy(Policy[_MessageT]): - """Controls for how messages are interpreted and formatted. - - Most of the classes and many of the methods in the email package accept - Policy objects as parameters. A Policy object contains a set of values and - functions that control how input is interpreted and how output is rendered. - For example, the parameter 'raise_on_defect' controls whether or not an RFC - violation results in an error being raised or not, while 'max_line_length' - controls the maximum length of output lines when a Message is serialized. - - Any valid attribute may be overridden when a Policy is created by passing - it as a keyword argument to the constructor. Policy objects are immutable, - but a new Policy object can be created with only certain values changed by - calling the Policy instance with keyword arguments. Policy objects can - also be added, producing a new Policy object in which the non-default - attributes set in the right hand operand overwrite those specified in the - left operand. - - Settable attributes: - - raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - - linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - - cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - - max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - - mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - - message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - - verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. - PROVISIONAL - - The API extensions enabled by this policy are currently provisional. - Refer to the documentation for details. - - This policy adds new header parsing and folding algorithms. Instead of - simple strings, headers are custom objects with custom attributes - depending on the type of the field. The folding algorithm fully - implements RFCs 2047 and 5322. - - In addition to the settable attributes listed above that apply to - all Policies, this policy adds the following additional attributes: - - utf8 -- if False (the default) message headers will be - serialized as ASCII, using encoded words to encode - any non-ASCII characters in the source strings. If - True, the message headers will be serialized using - utf8 and will not contain encoded words (see RFC - 6532 for more on this serialization format). - - refold_source -- if the value for a header in the Message object - came from the parsing of some source, this attribute - indicates whether or not a generator should refold - that value when transforming the message back into - stream form. The possible values are: - - none -- all source values use original folding - long -- source values that have any line that is - longer than max_line_length will be - refolded - all -- all values are refolded. - - The default is 'long'. - - header_factory -- a callable that takes two arguments, 'name' and - 'value', where 'name' is a header field name and - 'value' is an unfolded header field value, and - returns a string-like object that represents that - header. A default header_factory is provided that - understands some of the RFC5322 header field types. - (Currently address fields and date fields have - special treatment, while all other fields are - treated as unstructured. This list will be - completed before the extension is marked stable.) - - content_manager -- an object with at least two methods: get_content - and set_content. When the get_content or - set_content method of a Message object is called, - it calls the corresponding method of this object, - passing it the message object as its first argument, - and any arguments or keywords that were passed to - it as additional arguments. The default - content_manager is - :data:`~email.contentmanager.raw_data_manager`. - - """ - utf8: bool refold_source: str header_factory: Callable[[str, Any], Any] @@ -160,92 +46,11 @@ class EmailPolicy(Policy[_MessageT]): header_factory: Callable[[str, str], str] = ..., content_manager: ContentManager = ..., ) -> None: ... - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: - """Given a list of linesep terminated strings constituting the lines of - a single header, return the (name, value) tuple that should be stored - in the model. The input lines should retain their terminating linesep - characters. The lines passed in by the email package may contain - surrogateescaped binary data. - The name is parsed as everything up to the ':' and returned unmodified. - The value is determined by stripping leading whitespace off the - remainder of the first line joined with all subsequent lines, and - stripping any trailing carriage return or linefeed characters. (This - is the same as Compat32). - - """ - - def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: - """Given the header name and the value provided by the application - program, return the (name, value) that should be stored in the model. - The name is returned unchanged. If the input value has a 'name' - attribute and it matches the name ignoring case, the value is returned - unchanged. Otherwise the name and value are passed to header_factory - method, and the resulting custom header object is returned as the - value. In this case a ValueError is raised if the input value contains - CR or LF characters. - - """ - - def header_fetch_parse(self, name: str, value: str) -> Any: - """Given the header name and the value from the model, return the value - to be returned to the application program that is requesting that - header. The value passed in by the email package may contain - surrogateescaped binary data if the lines were parsed by a BytesParser. - The returned value should not contain any surrogateescaped data. - - If the value has a 'name' attribute, it is returned to unmodified. - Otherwise the name and the value with any linesep characters removed - are passed to the header_factory method, and the resulting custom - header object is returned. Any surrogateescaped bytes get turned - into the unicode unknown-character glyph. - - """ - - def fold(self, name: str, value: str) -> Any: - """Given the header name and the value from the model, return a string - containing linesep characters that implement the folding of the header - according to the policy controls. The value passed in by the email - package may contain surrogateescaped binary data if the lines were - parsed by a BytesParser. The returned value should not contain any - surrogateescaped data. - - Header folding is controlled by the refold_source policy setting. A - value is considered to be a 'source value' if and only if it does not - have a 'name' attribute (having a 'name' attribute means it is a header - object of some sort). If a source value needs to be refolded according - to the policy, it is converted into a custom header object by passing - the name and the value with any linesep characters removed to the - header_factory method. Folding of a custom header object is done by - calling its fold method with the current policy. - - Source values are split into lines using splitlines. If the value is - not to be refolded, the lines are rejoined using the linesep from the - policy and returned. The exception is lines containing non-ascii - binary data. In that case the value is refolded regardless of the - refold_source setting, which causes the binary data to be CTE encoded - using the unknown-8bit charset. - - """ - - def fold_binary(self, name: str, value: str) -> bytes: - """Given the header name and the value from the model, return binary - data containing linesep characters that implement the folding of the - header according to the policy controls. The value passed in by the - email package may contain surrogateescaped binary data. - - The same as fold if cte_type is 7bit, except that the returned value is - bytes. - - If cte_type is 8bit, non-ASCII binary data is converted back into - bytes. Headers with binary data are not refolded, regardless of the - refold_header setting, since there is no way to know whether the binary - data consists of single byte characters or multibyte characters. - - If utf8 is true, headers are encoded to utf8, otherwise to ascii with - non-ASCII unicode rendered as encoded words. - - """ - + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: ... + def header_fetch_parse(self, name: str, value: str) -> Any: ... + def fold(self, name: str, value: str) -> Any: ... + def fold_binary(self, name: str, value: str) -> bytes: ... def clone( self, *, @@ -261,13 +66,7 @@ class EmailPolicy(Policy[_MessageT]): refold_source: str = ..., header_factory: Callable[[str, str], str] = ..., content_manager: ContentManager = ..., - ) -> Self: - """Return a new instance with specified attributes changed. - - The new instance has the same attribute values as the current object, - except for the changes passed in as keyword arguments. - - """ + ) -> Self: ... default: EmailPolicy[EmailMessage] SMTP: EmailPolicy[EmailMessage] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi index 7bdd774117e8c..87d08eecc70ce 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi @@ -1,27 +1,3 @@ -"""Quoted-printable content transfer encoding per RFCs 2045-2047. - -This module handles the content transfer encoding method defined in RFC 2045 -to encode US ASCII-like 8-bit data called 'quoted-printable'. It is used to -safely encode text that is in a character set similar to the 7-bit US ASCII -character set, but that includes some 8-bit characters that are normally not -allowed in email bodies or headers. - -Quoted-printable is very space-inefficient for encoding binary files; use the -email.base64mime module for that instead. - -This module provides an interface to encode and decode both headers and bodies -with quoted-printable encoding. - -RFC 2045 defines a method for including character set information in an -'encoded-word' in a header. This method is commonly used for 8-bit real names -in To:/From:/Cc: etc. fields, as well as Subject: lines. - -This module does not do the line wrapping or end-of-line character -conversion necessary for proper internationalized headers; it only -does dumb encoding and decoding. To deal with the various line -wrapping issues, use the email.header module. -""" - from collections.abc import Iterable __all__ = [ @@ -37,79 +13,16 @@ __all__ = [ "unquote", ] -def header_check(octet: int) -> bool: - """Return True if the octet should be escaped with header quopri.""" - -def body_check(octet: int) -> bool: - """Return True if the octet should be escaped with body quopri.""" - -def header_length(bytearray: Iterable[int]) -> int: - """Return a header quoted-printable encoding length. - - Note that this does not include any RFC 2047 chrome added by - `header_encode()`. - - :param bytearray: An array of bytes (a.k.a. octets). - :return: The length in bytes of the byte array when it is encoded with - quoted-printable for headers. - """ - -def body_length(bytearray: Iterable[int]) -> int: - """Return a body quoted-printable encoding length. - - :param bytearray: An array of bytes (a.k.a. octets). - :return: The length in bytes of the byte array when it is encoded with - quoted-printable for bodies. - """ - -def unquote(s: str | bytes | bytearray) -> str: - """Turn a string in the form =AB to the ASCII character with value 0xab""" - +def header_check(octet: int) -> bool: ... +def body_check(octet: int) -> bool: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... def quote(c: str | bytes | bytearray) -> str: ... -def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: - """Encode a single header line with quoted-printable (like) encoding. - - Defined in RFC 2045, this 'Q' encoding is similar to quoted-printable, but - used specifically for email header fields to allow charsets with mostly 7 - bit characters (and some 8 bit) to remain more or less readable in non-RFC - 2045 aware mail clients. - - charset names the character set to use in the RFC 2046 header. It - defaults to iso-8859-1. - """ - -def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: - """Encode with quoted-printable, wrapping at maxlinelen characters. - - Each line of encoded text will end with eol, which defaults to "\\n". Set - this to "\\r\\n" if you will be using the result of this function directly - in an email. - - Each line will be wrapped at, at most, maxlinelen characters before the - eol string (maxlinelen defaults to 76 characters, the maximum value - permitted by RFC 2045). Long lines will have the 'soft line break' - quoted-printable character "=" appended to them, so the decoded text will - be identical to the original text. - - The minimum maxlinelen is 4 to have room for a quoted character ("=XX") - followed by a soft line break. Smaller values will generate a - ValueError. - - """ - -def decode(encoded: str, eol: str = "\n") -> str: - """Decode a quoted-printable string. - - Lines are separated with eol, which defaults to \\n. - """ - -def header_decode(s: str) -> str: - """Decode a string encoded with RFC 2045 MIME header 'Q' encoding. - - This function does not parse a full MIME header value encoded with - quoted-printable (like =?iso-8859-1?q?Hello_World?=) -- please use - the high level email.header class for that functionality. - """ +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(encoded: str, eol: str = "\n") -> str: ... +def header_decode(s: str) -> str: ... body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi index 009e95f2a866f..efc32a7abce29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi @@ -1,5 +1,3 @@ -"""Miscellaneous utilities.""" - import datetime import sys from _typeshed import Unused @@ -29,65 +27,21 @@ __all__ = [ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None] -def quote(str: str) -> str: - """Prepare string to be used in a quoted string. - - Turns backslash and double quote characters into quoted pairs. These - are the only characters that need to be quoted inside a quoted string. - Does not add the surrounding double quotes. - """ - -def unquote(str: str) -> str: - """Remove quotes from a string.""" +def quote(str: str) -> str: ... +def unquote(str: str) -> str: ... # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: - """ - Parse addr into its constituent realname and email address parts. - - Return a tuple of realname and email address, unless the parse fails, in - which case return a 2-tuple of ('', ''). - - If strict is True, use a strict parser which rejects malformed inputs. - """ - -def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: - """The inverse of parseaddr(), this takes a 2-tuple of the form - (realname, email_address) and returns the string value suitable - for an RFC 2822 From, To or Cc header. - - If the first element of pair is false, then the second element is - returned unmodified. - - The optional charset is the character set that is used to encode - realname in case realname is not ASCII safe. Can be an instance of str or - a Charset-like object which has a header_encode method. Default is - 'utf-8'. - """ +def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: - """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. - - When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in - its place. - - If strict is true, use a strict parser which rejects malformed inputs. - """ - +def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ... @overload -def parsedate(data: None) -> None: - """Convert a time string to a time tuple.""" - +def parsedate(data: None) -> None: ... @overload def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... @overload -def parsedate_tz(data: None) -> None: - """Convert a date string to a time tuple. - - Accounts for military timezones. - """ - +def parsedate_tz(data: None) -> None: ... @overload def parsedate_tz(data: str) -> _PDTZ | None: ... @@ -100,103 +54,25 @@ if sys.version_info >= (3, 10): else: def parsedate_to_datetime(data: str) -> datetime.datetime: ... -def mktime_tz(data: _PDTZ) -> int: - """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp.""" - -def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: - """Returns a date string as specified by RFC 2822, e.g.: - - Fri, 09 Nov 2001 01:08:47 -0000 - - Optional timeval if given is a floating-point time value as accepted by - gmtime() and localtime(), otherwise the current time is used. - - Optional localtime is a flag that when True, interprets timeval, and - returns a date relative to the local timezone instead of UTC, properly - taking daylight savings time into account. - - Optional argument usegmt means that the timezone is written out as - an ascii string, not numeric one (so "GMT" instead of "+0000"). This - is needed for HTTP, and is only used when localtime==False. - """ - -def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: - """Turn a datetime into a date string as specified in RFC 2822. - - If usegmt is True, dt must be an aware datetime with an offset of zero. In - this case 'GMT' will be rendered instead of the normal +0000 required by - RFC2822. This is to support HTTP headers involving date stamps. - """ +def mktime_tz(data: _PDTZ) -> int: ... +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... if sys.version_info >= (3, 14): - def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: - """Return local time as an aware datetime object. - - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - - """ + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... elif sys.version_info >= (3, 12): @overload - def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: - """Return local time as an aware datetime object. - - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - The isdst parameter is ignored. - - """ - + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... @overload @deprecated("The `isdst` parameter does nothing and will be removed in Python 3.14.") def localtime(dt: datetime.datetime | None = None, isdst: Unused = None) -> datetime.datetime: ... else: - def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: - """Return local time as an aware datetime object. - - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - In this case, a positive or zero value for *isdst* causes localtime to - presume initially that summer time (for example, Daylight Saving Time) - is or is not (respectively) in effect for the specified time. A - negative value for *isdst* causes the localtime() function to attempt - to divine whether summer time is in effect for the specified time. - - """ - -def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: - """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: - - <142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> - - Optional idstring if given is a string used to strengthen the - uniqueness of the message id. Optional domain if given provides the - portion of the message id after the '@'. It defaults to the locally - defined hostname. - """ - -def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: # May return list[str]. See issue #10431 for details. - """Decode string according to RFC 2231""" - -def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: - """Encode string according to RFC 2231. - - If neither charset nor language is given, then s is returned as-is. If - charset is given but not language, the string is encoded using the empty - string for language. - """ + def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... # May return list[str]. See issue #10431 for details. +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... -def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: - """Decode parameters list according to RFC 2231. - - params is a sequence of 2-tuples containing (param name, string value). - """ +def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi index 7d26d92022bf7..61f86d243c720 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi @@ -1,50 +1,9 @@ -"""Standard "encodings" Package - - Standard Python encoding modules are stored in this package - directory. - - Codec modules must have names corresponding to normalized encoding - names as defined in the normalize_encoding() function below, e.g. - 'utf-8' must be implemented by the module 'utf_8.py'. - - Each codec module must export the following interface: - - * getregentry() -> codecs.CodecInfo object - The getregentry() API must return a CodecInfo object with encoder, decoder, - incrementalencoder, incrementaldecoder, streamwriter and streamreader - attributes which adhere to the Python Codec Interface Standard. - - In addition, a module may optionally also define the following - APIs which are then used by the package's codec search function: - - * getaliases() -> sequence of encoding name strings to use as aliases - - Alias names returned by getaliases() must be normalized encoding - names as defined by normalize_encoding(). - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import sys from codecs import CodecInfo class CodecRegistryError(LookupError, SystemError): ... -def normalize_encoding(encoding: str | bytes) -> str: - """Normalize an encoding name. - - Normalization works as follows: all non-alphanumeric - characters except the dot used for Python package names are - collapsed and replaced with a single underscore, e.g. ' -;#' - becomes '_'. Leading and trailing underscores are removed. - - Note that encoding names should be ASCII only. - - """ - +def normalize_encoding(encoding: str | bytes) -> str: ... def search_function(encoding: str) -> CodecInfo | None: ... if sys.version_info >= (3, 14) and sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi index ca13370cc64c9..079af85d51ee4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi @@ -1,19 +1 @@ -"""Encoding Aliases Support - -This module is used by the encodings package search function to -map encodings names to module names. - -Note that the search function normalizes the encoding names before -doing the lookup, so the mapping will have to map normalized -encoding names to module names. - -Contents: - - The following aliases dictionary contains mappings of all IANA - character set names for which the Python core library provides - codecs. In addition to these, a few Python specific codec - aliases have also been added. - -""" - aliases: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi index 62be1000b29fc..a85585af32ed9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi @@ -1,12 +1,3 @@ -"""Python 'ascii' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi index dedf3be9c8d7b..0c4f1cb1fe599 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi @@ -1,10 +1,3 @@ -"""Python 'base64_codec' Codec - base64 content transfer encoding. - -This codec de/encodes from bytes to bytes. - -Written by Marc-Andre Lemburg (mal@lemburg.com). -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi index cf0f6ff30ed44..468346a93da98 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi @@ -1,12 +1,3 @@ -"""Python 'bz2_codec' Codec - bz2 compression encoding. - -This codec de/encodes from bytes to bytes and is therefore usable with -bytes.transform() and bytes.untransform(). - -Adapted by Raymond Hettinger from zlib_codec.py which was written -by Marc-Andre Lemburg (mal@lemburg.com). -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi index 83d14fdf043b1..a971a15860b52 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi @@ -1,15 +1,3 @@ -"""Generic Python Character Mapping Codec. - - Use this codec directly rather than through the automatic - conversion mechanisms supplied by unicode() and .encode(). - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _codecs import _CharMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi index d8415c0d99c41..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi index 7d4eed710a85f..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1006 generated from 'MAPPINGS/VENDORS/MISC/CP1006.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi index 9dbbf0d295a69..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1026 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP1026.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi index 65f02593dd57a..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec for CP1125""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi index 1477632e88765..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi index 846276ec73cc0..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi index a945fdd5588a8..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1251 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1251.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi index 5e25a55b97fdf..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1252 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1252.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi index db602cb995629..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi index 3618e631b67fb..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi index 1a094bc49b1bc..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi index 470adf7eeb682..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi index 81a6a39c0c1a0..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1257 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1257.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi index 51322b0474634..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi index fb2b9c0234508..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp273 generated from 'python-mappings/CP273.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi index ef2d7e50a524d..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp424 generated from 'MAPPINGS/VENDORS/MISC/CP424.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi index da6ce75fb45c5..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi index 9d7ba23c0dd09..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi index 12222a8b75e9a..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi @@ -1,8 +1,3 @@ -"""Python Character Mapping Codec cp720 generated on Windows: -Vista 6.0.6002 SP2 Multiprocessor Free with the command: - python Tools/unicode/genwincodec.py 720 -""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi index d1be95602f378..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi index e6c838d21660a..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi index cf3d6c9362d6d..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP850.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi index 0caf316eea694..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi index 449b6570bb78c..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi index f9e2b6cd6e5a7..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp856 generated from 'MAPPINGS/VENDORS/MISC/CP856.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi index 10d9eedf067e1..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi index 68a9a7186bf6a..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec for CP858, modified from cp850.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi index 3cb4ca72b65bc..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi index dfe41216e8cac..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP861.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi index 0846ae9a4ee63..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi index 2c8e9cd40f4d9..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP863.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi index 26402123e7fc1..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP864.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi index ff9d33db6f61a..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP865.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi index f581f58b95aab..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP866.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi index d047ab42bbc4e..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP869.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi index 7bceeabf2180d..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp874 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP874.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi index c14444902d081..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi index 34274186368c8..3fd4fe38898a8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi @@ -1,10 +1,3 @@ -"""Python 'hex_codec' Codec - 2-digit hex content transfer encoding. - -This codec de/encodes from bytes to bytes. - -Written by Marc-Andre Lemburg (mal@lemburg.com). -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi index eeaea3e14ab56..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi @@ -1,14 +1,3 @@ -"""Python Character Mapping Codec generated from 'hp_roman8.txt' with gencodec.py. - -Based on data from ftp://dkuug.dk/i18n/charmaps/HP-ROMAN8 (Keld Simonsen) - -Original source: LaserJet IIP Printer User's Manual HP part no -33471-90901, Hewlet-Packard, June 1989. - -(Used with permission) - -""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi index a557aa1885a05..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi index eb77204555dfe..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi index ff75663e36a03..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi index b83a44185d724..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi index 99db89a0ef0b4..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_14 generated from 'MAPPINGS/ISO8859/8859-14.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi index 88271ed1d4ac9..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_15 generated from 'MAPPINGS/ISO8859/8859-15.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi index 1f6c8c8db1f4a..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_16 generated from 'MAPPINGS/ISO8859/8859-16.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi index c812f8a5c1c36..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_2 generated from 'MAPPINGS/ISO8859/8859-2.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi index 8414a39157763..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi index 49291ee183024..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi index 636c07388ea7e..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi index 2664a18952f12..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi index 6716b9702261b..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi index 308832217ab1f..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi index 9d06803516a07..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi index 85393efacd982..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi index 5d8feb27ce7ea..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec koi8_t""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi index feb2736531031..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec koi8_u generated from 'python-mappings/KOI8-U.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi index 1bcefed6c332c..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec kz1048 generated from 'MAPPINGS/VENDORS/MISC/KZ1048.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi index 87419a70e2876..3b06773eac03c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi @@ -1,12 +1,3 @@ -"""Python 'latin-1' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi index b267b1eb49171..42781b4892984 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py.""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi index b5894c73b3da0..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi index 6314158198fa4..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi index 3b73beecb1ac7..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_farsi generated from 'MAPPINGS/VENDORS/APPLE/FARSI.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi index 9b49f6b8f099d..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi index c0a02d06ce606..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi index 3e0b50e5f8676..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi @@ -1,12 +1,3 @@ -"""Python Character Mapping Codec mac_latin2 generated from 'MAPPINGS/VENDORS/MICSFT/MAC/LATIN2.TXT' with gencodec.py. - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. -(c) Copyright 2000 Guido van Rossum. - -""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi index 387546e419e69..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi index c2f955eb496fa..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi index 4858ecacd4abd..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec mac_turkish generated from 'MAPPINGS/VENDORS/APPLE/TURKISH.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi index 9332b0587d11d..2c2917d63f6db 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi @@ -1,13 +1,3 @@ -"""Python 'mbcs' Codec for Windows - - -Cloned by Mark Hammond (mhammond@skippinet.com.au) from ascii.py, -which was written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi index 65a0e13a27e0f..376c12c445f42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi @@ -1,5 +1,3 @@ -"""Python 'oem' Codec for Windows""" - import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi index 9b1e097e9634f..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi @@ -1,9 +1,3 @@ -"""Python Character Mapping Codec for PalmOS 3.5. - -Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py. - -""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi index a0fca2c6d09e9..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi @@ -1,12 +1,3 @@ -"""Python Character Mapping Codec generated from 'PTCP154.txt' with gencodec.py. - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. -(c) Copyright 2000 Guido van Rossum. - -""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi index 69d61ef8e2faa..eb99e667b4167 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi @@ -1,46 +1,20 @@ -"""Codec for the Punycode encoding, as specified in RFC 3492 - -Written by Martin v. Löwis. -""" - import codecs from typing import Literal -def segregate(str: str) -> tuple[bytes, list[int]]: - """3.1 Basic code point segregation""" - -def selective_len(str: str, max: int) -> int: - """Return the length of str, considering only characters below max.""" - -def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: - """Return a pair (index, pos), indicating the next occurrence of - char in str. index is the position of the character considering - only ordinals up to and including char, and pos is the position in - the full string. index/pos is the starting position in the full - string. - """ - -def insertion_unsort(str: str, extended: list[int]) -> list[int]: - """3.2 Insertion unsort coding""" - +def segregate(str: str) -> tuple[bytes, list[int]]: ... +def selective_len(str: str, max: int) -> int: ... +def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: ... +def insertion_unsort(str: str, extended: list[int]) -> list[int]: ... def T(j: int, bias: int) -> int: ... digits: Literal[b"abcdefghijklmnopqrstuvwxyz0123456789"] -def generate_generalized_integer(N: int, bias: int) -> bytes: - """3.3 Generalized variable-length integers""" - +def generate_generalized_integer(N: int, bias: int) -> bytes: ... def adapt(delta: int, first: bool, numchars: int) -> int: ... -def generate_integers(baselen: int, deltas: list[int]) -> bytes: - """3.4 Bias adaptation""" - +def generate_integers(baselen: int, deltas: list[int]) -> bytes: ... def punycode_encode(text: str) -> bytes: ... -def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: - """3.3 Generalized variable-length integers""" - -def insertion_sort(base: str, extended: bytes, errors: str) -> str: - """3.2 Insertion sort coding""" - +def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: ... +def insertion_sort(base: str, extended: bytes, errors: str) -> str: ... def punycode_decode(text: memoryview | bytes | bytearray | str, errors: str) -> str: ... class Codec(codecs.Codec): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi index 0670265cba928..e9deadd8d463c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi @@ -1,8 +1,3 @@ -"""Codec for quoted-printable encoding. - -This codec de/encodes from bytes to bytes. -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi index 39f2d1153795f..2887739468f24 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi @@ -1,12 +1,3 @@ -"""Python 'raw-unicode-escape' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi index 60431b2597784..8d71bc9575949 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi @@ -1,10 +1,3 @@ -"""Python Character Mapping Codec for ROT13. - -This codec de/encodes from str to str. - -Written by Marc-Andre Lemburg (mal@lemburg.com). -""" - import codecs from _typeshed import SupportsRead, SupportsWrite diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi index bce7bd673ba40..f62195662ce96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi @@ -1,5 +1,3 @@ -"""Python Character Mapping Codec tis_620 generated from 'python-mappings/TIS-620.TXT' with gencodec.py.""" - import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi index d1d4f8dad8352..4775dac752f28 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi @@ -1,15 +1,3 @@ -"""Python 'undefined' Codec - - This codec will always raise a UnicodeError exception when being - used. It is intended for use by the site.py file to switch off - automatic string to Unicode coercion. - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi index 6273f12b8bed8..ceaa39a3859ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi @@ -1,12 +1,3 @@ -"""Python 'unicode-escape' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi index a67441b32fc5b..3b712cde420ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi @@ -1,12 +1,3 @@ -"""Python 'utf-16' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi index 2c6e718beab06..cc7d1534fc69b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi @@ -1,12 +1,3 @@ -"""Python 'utf-16-be' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi index 106e801258593..ba103eb088e3c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi @@ -1,12 +1,3 @@ -"""Python 'utf-16-le' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi index f294f44802e51..c925be712c728 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi @@ -1,7 +1,3 @@ -""" -Python 'utf-32' Codec -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi index 360d0f69fe189..9d28f5199c501 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi @@ -1,7 +1,3 @@ -""" -Python 'utf-32-be' Codec -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi index 40149d98d20f9..5be14a91a3e6c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi @@ -1,7 +1,3 @@ -""" -Python 'utf-32-le' Codec -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi index cc1ac51151ed5..dc1162f34c287 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi @@ -1,8 +1,3 @@ -"""Python 'utf-7' Codec - -Written by Brian Quinlan (brian@sweetapp.com). -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi index ae6671e94b196..918712d804730 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi @@ -1,12 +1,3 @@ -"""Python 'utf-8' Codec - - -Written by Marc-Andre Lemburg (mal@lemburg.com). - -(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. - -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi index 1e7d176fa4400..af69217d67321 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -1,13 +1,3 @@ -"""Python 'utf-8-sig' Codec -This work similar to UTF-8 with the following changes: - -* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the - first three bytes. - -* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these - bytes will be skipped. -""" - import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi index 9f59d5921e27b..e32ba8ac0a1a7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi @@ -1,12 +1,3 @@ -"""Python 'uu_codec' Codec - UU content transfer encoding. - -This codec de/encodes from bytes to bytes. - -Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were -adapted from uu.py which was written by Lance Ellinghouse and -modified by Jack Jansen and Fredrik Lundh. -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi index 7926d78c0c22c..0f13d0e810e91 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi @@ -1,10 +1,3 @@ -"""Python 'zlib_codec' Codec - zlib compression encoding. - -This codec de/encodes from bytes to bytes. - -Written by Marc-Andre Lemburg (mal@lemburg.com). -""" - import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi index c203a2e65b00b..332fb1845917d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi @@ -1,10 +1,6 @@ __all__ = ["version", "bootstrap"] -def version() -> str: - """ - Returns a string specifying the bundled version of pip. - """ - +def version() -> str: ... def bootstrap( *, root: str | None = None, @@ -13,10 +9,4 @@ def bootstrap( altinstall: bool = False, default_pip: bool = False, verbosity: int = 0, -) -> None: - """ - Bootstrap pip into the current Python installation (or the given root - directory). - - Note that calling this function will alter both sys.path and os.environ. - """ +) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi index b9933de380be8..4ac860f5e611d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi @@ -57,43 +57,20 @@ _Signature: TypeAlias = Any # TODO: Unable to import Signature from inspect mod if sys.version_info >= (3, 11): class nonmember(Generic[_EnumMemberT]): - """ - Protects item from becoming an Enum member during class creation. - """ - value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class member(Generic[_EnumMemberT]): - """ - Forces item to become an Enum member during class creation. - """ - value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class _EnumDict(dict[str, Any]): - """ - Track enum member order and ensure member names are not reused. - - EnumType will use the names found in self._member_names as the - enumeration member names. - """ - if sys.version_info >= (3, 13): def __init__(self, cls_name: str | None = None) -> None: ... else: def __init__(self) -> None: ... - def __setitem__(self, key: str, value: Any) -> None: - """ - Changes anything not dundered or not a descriptor. - - If an enum member name is used twice, an error is raised; duplicate - values are not checked for. - - Single underscore (sunder) names are reserved. - """ + def __setitem__(self, key: str, value: Any) -> None: ... if sys.version_info >= (3, 11): # See comment above `typing.MutableMapping.update` # for why overloads are preferable to a Union here @@ -113,10 +90,6 @@ if sys.version_info >= (3, 13): # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(type): - """ - Metaclass for Enum - """ - if sys.version_info >= (3, 11): def __new__( metacls: type[_typeshed.Self], @@ -135,93 +108,28 @@ class EnumMeta(type): @classmethod def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] - def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: - """ - Return members in definition order. - """ - - def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: - """ - Return members in reverse definition order. - """ + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... if sys.version_info >= (3, 12): - def __contains__(self: type[Any], value: object) -> bool: - """Return True if `value` is in `cls`. - - `value` is in `cls` if: - 1) `value` is a member of `cls`, or - 2) `value` is the value of one of the `cls`'s members. - 3) `value` is a pseudo-member (flags) - """ + def __contains__(self: type[Any], value: object) -> bool: ... elif sys.version_info >= (3, 11): - def __contains__(self: type[Any], member: object) -> bool: - """ - Return True if member is a member of this enum - raises TypeError if member is not an enum member - - note: in 3.12 TypeError will no longer be raised, and True will also be - returned if member is the value of a member in this enum - """ + def __contains__(self: type[Any], member: object) -> bool: ... elif sys.version_info >= (3, 10): def __contains__(self: type[Any], obj: object) -> bool: ... else: def __contains__(self: type[Any], member: object) -> bool: ... - def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: - """ - Return the member matching `name`. - """ - + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... @_builtins_property - def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: - """ - Returns a mapping of member name->value. - - This mapping lists all enum members, including aliases. Note that this - is a read-only view of the internal mapping. - """ - - def __len__(self) -> int: - """ - Return the number of members (no aliases) - """ - - def __bool__(self) -> Literal[True]: - """ - classes/types should always be True. - """ - + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... def __dir__(self) -> list[str]: ... # Overload 1: Value lookup on an already existing enum class (simple case) @overload - def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: - """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - - The value lookup branch is chosen if the enum is final. - - When used for the functional API: - - `value` will be the name of the new class. + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. - - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. - - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. - - `type`, if set, will be mixed in as the first base class. - """ # Overload 2: Functional API for constructing new enum classes. if sys.version_info >= (3, 11): @overload @@ -235,33 +143,7 @@ class EnumMeta(type): type: type | None = None, start: int = 1, boundary: FlagBoundary | None = None, - ) -> type[Enum]: - """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - - The value lookup branch is chosen if the enum is final. - - When used for the functional API: - - `value` will be the name of the new class. - - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. - - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. - - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. - - `type`, if set, will be mixed in as the first base class. - """ + ) -> type[Enum]: ... else: @overload def __call__( @@ -273,31 +155,8 @@ class EnumMeta(type): qualname: str | None = None, type: type | None = None, start: int = 1, - ) -> type[Enum]: - """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - - When used for the functional API: - - `value` will be the name of the new class. - - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. - - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. + ) -> type[Enum]: ... - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. - - `type`, if set, will be mixed in as the first base class. - """ # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) # # >>> class Foo(enum.Enum): @@ -307,33 +166,7 @@ class EnumMeta(type): # if sys.version_info >= (3, 12): @overload - def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: - """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - - The value lookup branch is chosen if the enum is final. - - When used for the functional API: - - `value` will be the name of the new class. - - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. - - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. - - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. - - `type`, if set, will be mixed in as the first base class. - """ + def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ... if sys.version_info >= (3, 14): @property def __signature__(cls) -> _Signature: ... @@ -347,14 +180,6 @@ if sys.version_info >= (3, 11): EnumType = EnumMeta class property(types.DynamicClassAttribute): - """ - This is a descriptor, used to define attributes that act differently - when accessed through an enum member and through an enum class. - Instance access is the same as property(), but access to an attribute - through the enum class will instead look in the class' _member_map_ for - a corresponding enum member. - """ - def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... name: str clsname: str @@ -365,52 +190,10 @@ else: _magic_enum_attr = types.DynamicClassAttribute class Enum(metaclass=EnumMeta): - """ - Create a collection of name/value pairs. - - Example enumeration: - - >>> class Color(Enum): - ... RED = 1 - ... BLUE = 2 - ... GREEN = 3 - - Access them by: - - - attribute access: - - >>> Color.RED - - - - value lookup: - - >>> Color(1) - - - - name lookup: - - >>> Color['RED'] - - - Enumerations can be iterated over, and know how many members they have: - - >>> len(Color) - 3 - - >>> list(Color) - [, , ] - - Methods can be added to enumerations, and members can have their own - attributes -- see the documentation for details. - """ - @_magic_enum_attr - def name(self) -> str: - """The name of the Enum member.""" - + def name(self) -> str: ... @_magic_enum_attr - def value(self) -> Any: - """The value of the Enum member.""" + def value(self) -> Any: ... _name_: str _value_: Any _ignore_: str | list[str] @@ -419,32 +202,16 @@ class Enum(metaclass=EnumMeta): @classmethod def _missing_(cls, value: object) -> Any: ... @staticmethod - def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: - """ - Generate the next value when not given. - - name: the name of the member - start: the initial start value or None - count: the number of existing members - last_values: the list of values assigned - """ + def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... # It's not true that `__new__` will accept any argument type, # so ideally we'd use `Any` to indicate that the argument type is inexpressible. # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` # (see #7752, #2539, mypy/#5788), # and in practice using `object` here has the same effect as using `Any`. def __new__(cls, value: object) -> Self: ... - def __dir__(self) -> list[str]: - """ - Returns public methods and other interesting attributes. - """ - + def __dir__(self) -> list[str]: ... def __hash__(self) -> int: ... - def __format__(self, format_spec: str) -> str: - """ - Returns format using actual value type unless __str__ has been overridden. - """ - + def __format__(self, format_spec: str) -> str: ... def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... @@ -459,22 +226,13 @@ class Enum(metaclass=EnumMeta): def _add_alias_(self, name: str) -> None: ... if sys.version_info >= (3, 11): - class ReprEnum(Enum): - """ - Only changes the repr(), leaving str() and format() to the mixed-in type. - """ + class ReprEnum(Enum): ... if sys.version_info >= (3, 12): class IntEnum(int, ReprEnum): - """ - Enum where members are also (and must be) ints - """ - _value_: int @_magic_enum_attr - def value(self) -> int: - """The value of the Enum member.""" - + def value(self) -> int: ... def __new__(cls, value: int) -> Self: ... else: @@ -485,55 +243,30 @@ else: @disjoint_base class IntEnum(int, _IntEnumBase): - """ - Enum where members are also (and must be) ints - """ - _value_: int @_magic_enum_attr - def value(self) -> int: - """The value of the Enum member.""" - + def value(self) -> int: ... def __new__(cls, value: int) -> Self: ... -def unique(enumeration: _EnumerationT) -> _EnumerationT: - """ - Class decorator for enumerations ensuring unique member values. - """ +def unique(enumeration: _EnumerationT) -> _EnumerationT: ... _auto_null: Any class Flag(Enum): - """ - Support for flags - """ - _name_: str | None # type: ignore[assignment] _value_: int @_magic_enum_attr - def name(self) -> str | None: # type: ignore[override] - """The name of the Enum member.""" - + def name(self) -> str | None: ... # type: ignore[override] @_magic_enum_attr - def value(self) -> int: - """The value of the Enum member.""" - - def __contains__(self, other: Self) -> bool: - """ - Returns True if self has at least the same flags set as other. - """ - + def value(self) -> int: ... + def __contains__(self, other: Self) -> bool: ... def __bool__(self) -> bool: ... def __or__(self, other: Self) -> Self: ... def __and__(self, other: Self) -> Self: ... def __xor__(self, other: Self) -> Self: ... def __invert__(self) -> Self: ... if sys.version_info >= (3, 11): - def __iter__(self) -> Iterator[Self]: - """ - Returns flags in definition order. - """ - + def __iter__(self) -> Iterator[Self]: ... def __len__(self) -> int: ... __ror__ = __or__ __rand__ = __and__ @@ -541,27 +274,14 @@ class Flag(Enum): if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): - """ - Enum where members are also (and must be) strings - """ - def __new__(cls, value: str) -> Self: ... _value_: str @_magic_enum_attr - def value(self) -> str: - """The value of the Enum member.""" - + def value(self) -> str: ... @staticmethod - def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: - """ - Return the lower-cased version of the member name. - """ + def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ... class EnumCheck(StrEnum): - """ - various conditions to check an enumeration for - """ - CONTINUOUS = "no skipped integer values" NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags" UNIQUE = "one name per value" @@ -571,22 +291,10 @@ if sys.version_info >= (3, 11): UNIQUE: Final = EnumCheck.UNIQUE class verify: - """ - Check an enumeration for various constraints. (see EnumCheck) - """ - def __init__(self, *checks: EnumCheck) -> None: ... def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... class FlagBoundary(StrEnum): - """ - control how out of range values are handled - "strict" -> error is raised [default for Flag] - "conform" -> extra bits are discarded - "eject" -> lose flag status - "keep" -> keep flag status and all bits [default for IntFlag] - """ - STRICT = "strict" CONFORM = "conform" EJECT = "eject" @@ -597,39 +305,14 @@ if sys.version_info >= (3, 11): EJECT: Final = FlagBoundary.EJECT KEEP: Final = FlagBoundary.KEEP - def global_str(self: Enum) -> str: - """ - use enum_name instead of class.enum_name - """ - - def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: - """ - decorator that makes the repr() of an enum member reference its module - instead of its class; also exports all members to the enum's module's - global namespace - """ - - def global_enum_repr(self: Enum) -> str: - """ - use module.enum_name instead of class.enum_name - - the module is the last module in case of a multi-module name - """ - - def global_flag_repr(self: Flag) -> str: - """ - use module.flag_name instead of class.flag_name - - the module is the last module in case of a multi-module name - """ + def global_str(self: Enum) -> str: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... + def global_enum_repr(self: Enum) -> str: ... + def global_flag_repr(self: Flag) -> str: ... if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases - """ - Support for integer-based Flags - """ - def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -643,10 +326,6 @@ elif sys.version_info >= (3, 11): # The body of the class is the same, but the base classes are different. @disjoint_base class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases - """ - Support for integer-based Flags - """ - def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -659,10 +338,6 @@ elif sys.version_info >= (3, 11): else: @disjoint_base class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases - """ - Support for integer-based Flags - """ - def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -673,10 +348,6 @@ else: __rxor__ = __xor__ class auto: - """ - Instances are replaced with an appropriate value in Enum class suites. - """ - _value_: Any @_magic_enum_attr def value(self) -> Any: ... @@ -687,9 +358,7 @@ class auto: # shouldn't have these, but they're needed for int versions of auto (mostly the __or__). # Ideally type checkers would special case auto enough to handle this, # but until then this is a slightly inaccurate helping hand. - def __or__(self, other: int | Self) -> Self: - """Return self|value.""" - + def __or__(self, other: int | Self) -> Self: ... def __and__(self, other: int | Self) -> Self: ... def __xor__(self, other: int | Self) -> Self: ... __ror__ = __or__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi index 386c055ae2c4e..e025e1fd13b9b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi @@ -1,17 +1,3 @@ -"""This module makes available standard errno system symbols. - -The value of each symbol is the corresponding integer value, -e.g., on most systems, errno.ENOENT equals the integer 2. - -The dictionary errno.errorcode maps numeric codes to symbol names, -e.g., errno.errorcode[2] could be the string 'ENOENT'. - -Symbols that are not relevant to the underlying system are not defined. - -To map error codes to error messages, use the function os.strerror(), -e.g. os.strerror(2) could return 'No such file or directory'. -""" - import sys from collections.abc import Mapping from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi index 561223b39b94d..33d08995eb759 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi @@ -1,40 +1,23 @@ -"""faulthandler module.""" - import sys from _typeshed import FileDescriptorLike -def cancel_dump_traceback_later() -> None: - """Cancel the previous call to dump_traceback_later().""" - -def disable() -> None: - """Disable the fault handler.""" - -def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: - """Dump the traceback of the current thread, or of all threads if all_threads is True, into file.""" +def cancel_dump_traceback_later() -> None: ... +def disable() -> None: ... +def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... if sys.version_info >= (3, 14): - def dump_c_stack(file: FileDescriptorLike = ...) -> None: - """Dump the C stack of the current thread.""" + def dump_c_stack(file: FileDescriptorLike = ...) -> None: ... -def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: - """Dump the traceback of all threads in timeout seconds, - or each timeout seconds if repeat is True. If exit is True, call _exit(1) which is not safe. - """ +def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: ... if sys.version_info >= (3, 14): - def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: - """Enable the fault handler.""" + def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: ... else: - def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: - """Enable the fault handler.""" + def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... -def is_enabled() -> bool: - """Check if the handler is enabled.""" +def is_enabled() -> bool: ... if sys.platform != "win32": - def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: - """Register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file.""" - - def unregister(signum: int, /) -> None: - """Unregister the handler of the signal 'signum' registered by register().""" + def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: ... + def unregister(signum: int, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi index 561e6585c8975..5a3e89b0c6766 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi @@ -1,9 +1,3 @@ -"""This module performs file control and I/O control on file -descriptors. It is an interface to the fcntl() and ioctl() Unix -routines. File descriptors can be obtained with the fileno() method of -a file or socket object. -""" - import sys from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer from typing import Any, Final, Literal, overload @@ -143,54 +137,12 @@ if sys.platform != "win32": F_RDAHEAD: Final[int] @overload - def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: - """Perform the operation `cmd` on file descriptor fd. - - The values used for `cmd` are operating system dependent, and are available - as constants in the fcntl module, using the same names as used in - the relevant C header files. The argument arg is optional, and - defaults to 0; it may be an int or a string. If arg is given as a string, - the return value of fcntl is a string of that length, containing the - resulting value put in the arg buffer by the operating system. The length - of the arg string is not allowed to exceed 1024 bytes. If the arg given - is an integer or if none is specified, the result value is an integer - corresponding to the return value of the fcntl call in the C code. - """ - + def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: ... @overload def fcntl(fd: FileDescriptorLike, cmd: int, arg: str | ReadOnlyBuffer, /) -> bytes: ... # If arg is an int, return int @overload - def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: - """Perform the operation `request` on file descriptor `fd`. - - The values used for `request` are operating system dependent, and are available - as constants in the fcntl or termios library modules, using the same names as - used in the relevant C header files. - - The argument `arg` is optional, and defaults to 0; it may be an int or a - buffer containing character data (most likely a string or an array). - - If the argument is a mutable buffer (such as an array) and if the - mutate_flag argument (which is only allowed in this case) is true then the - buffer is (in effect) passed to the operating system and changes made by - the OS will be reflected in the contents of the buffer after the call has - returned. The return value is the integer returned by the ioctl system - call. - - If the argument is a mutable buffer and the mutable_flag argument is false, - the behavior is as if a string had been passed. - - If the argument is an immutable buffer (most likely a string) then a copy - of the buffer is passed to the operating system and the return value is a - string of the same length containing whatever the operating system put in - the buffer. The length of the arg buffer in this case is not allowed to - exceed 1024 bytes. - - If the arg given is an integer or if none is specified, the result value is - an integer corresponding to the return value of the ioctl call in the C - code. - """ + def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: ... # The return type works as follows: # - If arg is a read-write buffer, return int if mutate_flag is True, otherwise bytes # - If arg is a read-only buffer, return bytes (and ignore the value of mutate_flag) @@ -202,34 +154,5 @@ if sys.platform != "win32": def ioctl(fd: FileDescriptorLike, request: int, arg: WriteableBuffer, mutate_flag: Literal[False], /) -> bytes: ... @overload def ioctl(fd: FileDescriptorLike, request: int, arg: Buffer, mutate_flag: bool = True, /) -> Any: ... - def flock(fd: FileDescriptorLike, operation: int, /) -> None: - """Perform the lock operation `operation` on file descriptor `fd`. - - See the Unix manual page for flock(2) for details (On some systems, this - function is emulated using fcntl()). - """ - - def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: - """A wrapper around the fcntl() locking calls. - - `fd` is the file descriptor of the file to lock or unlock, and operation is one - of the following values: - - LOCK_UN - unlock - LOCK_SH - acquire a shared lock - LOCK_EX - acquire an exclusive lock - - When operation is LOCK_SH or LOCK_EX, it can also be bitwise ORed with - LOCK_NB to avoid blocking on lock acquisition. If LOCK_NB is used and the - lock cannot be acquired, an OSError will be raised and the exception will - have an errno attribute set to EACCES or EAGAIN (depending on the operating - system -- for portability, check for either value). - - `len` is the number of bytes to lock, with the default meaning to lock to - EOF. `start` is the byte offset, relative to `whence`, to that the lock - starts. `whence` is as with fileobj.seek(), specifically: - - 0 - relative to the start of the file (SEEK_SET) - 1 - relative to the current buffer position (SEEK_CUR) - 2 - relative to the end of the file (SEEK_END) - """ + def flock(fd: FileDescriptorLike, operation: int, /) -> None: ... + def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi index 33e29bcc32775..620cc177a415a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi @@ -1,15 +1,3 @@ -"""Utilities for comparing files and directories. - -Classes: - dircmp - -Functions: - cmp(f1, f2, shallow=True) -> int - cmpfiles(a, b, common) -> ([], [], []) - clear_cache() - -""" - import sys from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence @@ -21,84 +9,12 @@ __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: Final[list[str]] BUFSIZE: Final = 8192 -def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: - """Compare two files. - - Arguments: - - f1 -- First file name - - f2 -- Second file name - - shallow -- treat files as identical if their stat signatures (type, size, - mtime) are identical. Otherwise, files are considered different - if their sizes or contents differ. [default: True] - - Return value: - - True if the files are the same, False otherwise. - - This function uses a cache for past comparisons and the results, - with cache entries invalidated if their stat information - changes. The cache may be cleared by calling clear_cache(). - - """ - +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... def cmpfiles( a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True -) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: - """Compare common files in two directories. - - a, b -- directory names - common -- list of file names found in both directories - shallow -- if true, do comparison based solely on stat() information - - Returns a tuple of three lists: - files that compare equal - files that are different - filenames that aren't regular files. - - """ +) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... class dircmp(Generic[AnyStr]): - """A class that manages the comparison of 2 directories. - - dircmp(a, b, ignore=None, hide=None, *, shallow=True) - A and B are directories. - IGNORE is a list of names to ignore, - defaults to DEFAULT_IGNORES. - HIDE is a list of names to hide, - defaults to [os.curdir, os.pardir]. - SHALLOW specifies whether to just check the stat signature (do not read - the files). - defaults to True. - - High level usage: - x = dircmp(dir1, dir2) - x.report() -> prints a report on the differences between dir1 and dir2 - or - x.report_partial_closure() -> prints report on differences between dir1 - and dir2, and reports on common immediate subdirectories. - x.report_full_closure() -> like report_partial_closure, - but fully recursive. - - Attributes: - left_list, right_list: The files in dir1 and dir2, - filtered by hide and ignore. - common: a list of names in both dir1 and dir2. - left_only, right_only: names only in dir1, dir2. - common_dirs: subdirectories in both dir1 and dir2. - common_files: files in both dir1 and dir2. - common_funny: names in both dir1 and dir2 where the type differs between - dir1 and dir2, or the name is not stat-able. - same_files: list of identical files. - diff_files: list of filenames which differ. - funny_files: list of files which could not be compared. - subdirs: a dictionary of dircmp instances (or MyDirCmp instances if this - object is of type MyDirCmp, a subclass of dircmp), keyed by names - in common_dirs. - """ - if sys.version_info >= (3, 13): def __init__( self, @@ -144,11 +60,6 @@ class dircmp(Generic[AnyStr]): def phase3(self) -> None: ... def phase4(self) -> None: ... def phase4_closure(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... -def clear_cache() -> None: - """Clear the filecmp cache.""" +def clear_cache() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi index 9959c68f10ed5..910d638142751 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi @@ -1,70 +1,3 @@ -"""Helper class to quickly write a loop over all standard input files. - -Typical use is: - - import fileinput - for line in fileinput.input(encoding="utf-8"): - process(line) - -This iterates over the lines of all files listed in sys.argv[1:], -defaulting to sys.stdin if the list is empty. If a filename is '-' it -is also replaced by sys.stdin and the optional arguments mode and -openhook are ignored. To specify an alternative list of filenames, -pass it as the argument to input(). A single file name is also allowed. - -Functions filename(), lineno() return the filename and cumulative line -number of the line that has just been read; filelineno() returns its -line number in the current file; isfirstline() returns true iff the -line just read is the first line of its file; isstdin() returns true -iff the line was read from sys.stdin. Function nextfile() closes the -current file so that the next iteration will read the first line from -the next file (if any); lines not read from the file will not count -towards the cumulative line count; the filename is not changed until -after the first line of the next file has been read. Function close() -closes the sequence. - -Before any lines have been read, filename() returns None and both line -numbers are zero; nextfile() has no effect. After all lines have been -read, filename() and the line number functions return the values -pertaining to the last line read; nextfile() has no effect. - -All files are opened in text mode by default, you can override this by -setting the mode parameter to input() or FileInput.__init__(). -If an I/O error occurs during opening or reading a file, the OSError -exception is raised. - -If sys.stdin is used more than once, the second and further use will -return no lines, except perhaps for interactive use, or if it has been -explicitly reset (e.g. using sys.stdin.seek(0)). - -Empty files are opened and immediately closed; the only time their -presence in the list of filenames is noticeable at all is when the -last file opened is empty. - -It is possible that the last line of a file doesn't end in a newline -character; otherwise lines are returned including the trailing -newline. - -Class FileInput is the implementation; its methods filename(), -lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close() -correspond to the functions in the module. In addition it has a -readline() method which returns the next input line, and a -__getitem__() method which implements the sequence behavior. The -sequence must be accessed in strictly sequential order; sequence -access and readline() cannot be mixed. - -Optional in-place filtering: if the keyword argument inplace=True is -passed to input() or to the FileInput constructor, the file is moved -to a backup file and standard output is directed to the input file. -This makes it possible to write a filter that rewrites its input file -in place. If the keyword argument backup="." is also -given, it specifies the extension for the backup file, and the backup -file remains around; by default, the extension is ".bak" and it is -deleted when the output file is closed. In-place filtering is -disabled when standard input is read. XXX The current implementation -does not work for MS-DOS 8+3 filesystems. -""" - import sys from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable @@ -109,14 +42,7 @@ if sys.version_info >= (3, 10): openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, encoding: str | None = None, errors: str | None = None, - ) -> FileInput[str]: - """Return an instance of the FileInput class, which can be iterated. - - The parameters are passed to the constructor of the FileInput class. - The returned instance, in addition to being an iterator, - keeps global state for the functions of this module,. - """ - + ) -> FileInput[str]: ... @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -150,14 +76,7 @@ else: *, mode: _TextMode = "r", openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, - ) -> FileInput[str]: - """Return an instance of the FileInput class, which can be iterated. - - The parameters are passed to the constructor of the FileInput class. - The returned instance, in addition to being an iterator, - keeps global state for the functions of this module,. - """ - + ) -> FileInput[str]: ... @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -177,71 +96,16 @@ else: openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... -def close() -> None: - """Close the sequence.""" - -def nextfile() -> None: - """ - Close the current file so that the next iteration will read the first - line from the next file (if any); lines not read from the file will - not count towards the cumulative line count. The filename is not - changed until after the first line of the next file has been read. - Before the first line has been read, this function has no effect; - it cannot be used to skip the first file. After the last line of the - last file has been read, this function has no effect. - """ - -def filename() -> str: - """ - Return the name of the file currently being read. - Before the first line has been read, returns None. - """ - -def lineno() -> int: - """ - Return the cumulative line number of the line that has just been read. - Before the first line has been read, returns 0. After the last line - of the last file has been read, returns the line number of that line. - """ - -def filelineno() -> int: - """ - Return the line number in the current file. Before the first line - has been read, returns 0. After the last line of the last file has - been read, returns the line number of that line within the file. - """ - -def fileno() -> int: - """ - Return the file number of the current file. When no file is currently - opened, returns -1. - """ - -def isfirstline() -> bool: - """ - Returns true the line just read is the first line of its file, - otherwise returns false. - """ - -def isstdin() -> bool: - """ - Returns true if the last line was read from sys.stdin, - otherwise returns false. - """ +def close() -> None: ... +def nextfile() -> None: ... +def filename() -> str: ... +def lineno() -> int: ... +def filelineno() -> int: ... +def fileno() -> int: ... +def isfirstline() -> bool: ... +def isstdin() -> bool: ... class FileInput(Generic[AnyStr]): - """FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None) - - Class FileInput is the implementation of the module; its methods - filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(), - nextfile() and close() correspond to the functions of the same name - in the module. - In addition it has a readline() method which returns the next - input line, and a __getitem__() method which implements the - sequence behavior. The sequence must be accessed in strictly - sequential order; random access and readline() cannot be mixed. - """ - if sys.version_info >= (3, 10): # encoding and errors are added @overload @@ -333,11 +197,7 @@ class FileInput(Generic[AnyStr]): def fileno(self) -> int: ... def isfirstline(self) -> bool: ... def isstdin(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 10): def hook_compressed( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi index bea649b871061..345c4576497de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi @@ -1,15 +1,3 @@ -"""Filename matching with shell patterns. - -fnmatch(FILENAME, PATTERN) matches according to the local convention. -fnmatchcase(FILENAME, PATTERN) always takes case in account. - -The functions operate by translating the pattern into a regular -expression. They cache the compiled regular expressions for speed. - -The function translate(PATTERN) returns a regular expression -corresponding to PATTERN. (It does not compile it.) -""" - import sys from collections.abc import Iterable from typing import AnyStr @@ -18,38 +6,10 @@ __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] if sys.version_info >= (3, 14): __all__ += ["filterfalse"] -def fnmatch(name: AnyStr, pat: AnyStr) -> bool: - """Test whether FILENAME matches PATTERN. - - Patterns are Unix shell style: - - * matches everything - ? matches any single character - [seq] matches any character in seq - [!seq] matches any char not in seq - - An initial period in FILENAME is not special. - Both FILENAME and PATTERN are first case-normalized - if the operating system requires it. - If you don't want this, use fnmatchcase(FILENAME, PATTERN). - """ - -def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: - """Test whether FILENAME matches PATTERN, including case. - - This is a version of fnmatch() which doesn't case-normalize - its arguments. - """ - -def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: - """Construct a list from those elements of the iterable NAMES that match PAT.""" - -def translate(pat: str) -> str: - """Translate a shell PATTERN to a regular expression. - - There is no way to quote meta-characters. - """ +def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... +def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... +def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... +def translate(pat: str) -> str: ... if sys.version_info >= (3, 14): - def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: - """Construct a list from those elements of the iterable NAMES that do not match PAT.""" + def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi index 56176d6dea426..05c3c8b3dd41c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi @@ -1,23 +1,3 @@ -"""Generic output formatting. - -Formatter objects transform an abstract flow of formatting events into -specific output events on writer objects. Formatters manage several stack -structures to allow various properties of a writer object to be changed and -restored; writers need not be able to handle relative changes nor any sort -of ``change back'' operation. Specific writer properties which may be -controlled via formatter objects are horizontal alignment, font, and left -margin indentations. A mechanism is provided which supports providing -arbitrary, non-exclusive style settings to a writer as well. Additional -interfaces facilitate formatting events which are not reversible, such as -paragraph separation. - -Writer objects encapsulate device interfaces. Abstract devices, such as -file formats, are supported as well as physical devices. The provided -implementations all work with abstract devices. The interface makes -available mechanisms for setting the properties which formatter objects -manage and inserting data into the output. -""" - from collections.abc import Iterable from typing import IO, Any from typing_extensions import TypeAlias @@ -27,16 +7,6 @@ _FontType: TypeAlias = tuple[str, bool, bool, bool] _StylesType: TypeAlias = tuple[Any, ...] class NullFormatter: - """A formatter which does nothing. - - If the writer parameter is omitted, a NullWriter instance is created. - No methods of the writer are called by NullFormatter instances. - - Implementations should inherit from this class if implementing a writer - interface but don't need to inherit any implementation. - - """ - writer: NullWriter | None def __init__(self, writer: NullWriter | None = None) -> None: ... def end_paragraph(self, blankline: int) -> None: ... @@ -58,14 +28,6 @@ class NullFormatter: def assert_line_data(self, flag: int = 1) -> None: ... class AbstractFormatter: - """The standard formatter. - - This implementation has demonstrated wide applicability to many writers, - and may be used directly in most circumstances. It has been used to - implement a full-featured World Wide Web browser. - - """ - writer: NullWriter align: str | None align_stack: list[str | None] @@ -102,14 +64,6 @@ class AbstractFormatter: def assert_line_data(self, flag: int = 1) -> None: ... class NullWriter: - """Minimal writer interface to use in testing & inheritance. - - A writer which only provides the interface definition; no actions are - taken on any methods. This should be the base class for all writers - which do not need to inherit any implementation methods. - - """ - def flush(self) -> None: ... def new_alignment(self, align: str | None) -> None: ... def new_font(self, font: _FontType) -> None: ... @@ -123,23 +77,9 @@ class NullWriter: def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... -class AbstractWriter(NullWriter): - """A writer which can be used in debugging formatters, but not much else. - - Each method simply announces itself by printing its name and - arguments on standard output. - - """ +class AbstractWriter(NullWriter): ... class DumbWriter(NullWriter): - """Simple writer class which writes output on the file object passed in - as the file parameter or, if file is omitted, on standard output. The - output is simply word-wrapped to the number of columns specified by - the maxcol parameter. This class is suitable for reflowing a sequence - of paragraphs. - - """ - file: IO[str] maxcol: int def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi index 07e88ca468ca9..ef4066aa65b52 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi @@ -1,5 +1,3 @@ -"""Fraction, infinite-precision, rational numbers.""" - import sys from collections.abc import Callable from decimal import Decimal @@ -16,349 +14,154 @@ class _ConvertibleToIntegerRatio(Protocol): def as_integer_ratio(self) -> tuple[int | Rational, int | Rational]: ... class Fraction(Rational): - """This class implements rational numbers. - - In the two-argument form of the constructor, Fraction(8, 6) will - produce a rational number equivalent to 4/3. Both arguments must - be Rational. The numerator defaults to 0 and the denominator - defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. - - Fractions can also be constructed from: - - - numeric strings similar to those accepted by the - float constructor (for example, '-2.3' or '1e10') - - - strings of the form '123/456' - - - float and Decimal instances - - - other Rational instances (including integers) - - """ - __slots__ = ("_numerator", "_denominator") @overload - def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: - """Constructs a Rational. - - Takes a string like '3/2' or '1.5', another Rational instance, a - numerator/denominator pair, or a float. - - Examples - -------- - - >>> Fraction(10, -8) - Fraction(-5, 4) - >>> Fraction(Fraction(1, 7), 5) - Fraction(1, 35) - >>> Fraction(Fraction(1, 7), Fraction(2, 3)) - Fraction(3, 14) - >>> Fraction('314') - Fraction(314, 1) - >>> Fraction('-35/4') - Fraction(-35, 4) - >>> Fraction('3.1415') # conversion from numeric string - Fraction(6283, 2000) - >>> Fraction('-47e-2') # string may include a decimal exponent - Fraction(-47, 100) - >>> Fraction(1.47) # direct construction from float (exact conversion) - Fraction(6620291452234629, 4503599627370496) - >>> Fraction(2.25) - Fraction(9, 4) - >>> Fraction(Decimal('1.47')) - Fraction(147, 100) - - """ - + def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: ... @overload def __new__(cls, numerator: float | Decimal | str) -> Self: ... if sys.version_info >= (3, 14): @overload - def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: - """Constructs a Rational. - - Takes a string like '3/2' or '1.5', another Rational instance, a - numerator/denominator pair, or a float. - - Examples - -------- - - >>> Fraction(10, -8) - Fraction(-5, 4) - >>> Fraction(Fraction(1, 7), 5) - Fraction(1, 35) - >>> Fraction(Fraction(1, 7), Fraction(2, 3)) - Fraction(3, 14) - >>> Fraction('314') - Fraction(314, 1) - >>> Fraction('-35/4') - Fraction(-35, 4) - >>> Fraction('3.1415') # conversion from numeric string - Fraction(6283, 2000) - >>> Fraction('-47e-2') # string may include a decimal exponent - Fraction(-47, 100) - >>> Fraction(1.47) # direct construction from float (exact conversion) - Fraction(6620291452234629, 4503599627370496) - >>> Fraction(2.25) - Fraction(9, 4) - >>> Fraction(Decimal('1.47')) - Fraction(147, 100) - - """ + def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: ... @classmethod - def from_float(cls, f: float) -> Self: - """Converts a finite float to a rational number, exactly. - - Beware that Fraction.from_float(0.3) != Fraction(3, 10). - - """ - + def from_float(cls, f: float) -> Self: ... @classmethod - def from_decimal(cls, dec: Decimal) -> Self: - """Converts a finite Decimal instance to a rational number, exactly.""" - - def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: - """Closest Fraction to self with denominator at most max_denominator. - - >>> Fraction('3.141592653589793').limit_denominator(10) - Fraction(22, 7) - >>> Fraction('3.141592653589793').limit_denominator(100) - Fraction(311, 99) - >>> Fraction(4321, 8765).limit_denominator(10000) - Fraction(4321, 8765) - - """ - - def as_integer_ratio(self) -> tuple[int, int]: - """Return a pair of integers, whose ratio is equal to the original Fraction. - - The ratio is in lowest terms and has a positive denominator. - """ + def from_decimal(cls, dec: Decimal) -> Self: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... + def as_integer_ratio(self) -> tuple[int, int]: ... if sys.version_info >= (3, 12): - def is_integer(self) -> bool: - """Return True if the Fraction is an integer.""" + def is_integer(self) -> bool: ... @property def numerator(a) -> int: ... @property def denominator(a) -> int: ... @overload - def __add__(a, b: int | Fraction) -> Fraction: - """a + b""" - + def __add__(a, b: int | Fraction) -> Fraction: ... @overload def __add__(a, b: float) -> float: ... @overload def __add__(a, b: complex) -> complex: ... @overload - def __radd__(b, a: int | Fraction) -> Fraction: - """a + b""" - + def __radd__(b, a: int | Fraction) -> Fraction: ... @overload def __radd__(b, a: float) -> float: ... @overload def __radd__(b, a: complex) -> complex: ... @overload - def __sub__(a, b: int | Fraction) -> Fraction: - """a - b""" - + def __sub__(a, b: int | Fraction) -> Fraction: ... @overload def __sub__(a, b: float) -> float: ... @overload def __sub__(a, b: complex) -> complex: ... @overload - def __rsub__(b, a: int | Fraction) -> Fraction: - """a - b""" - + def __rsub__(b, a: int | Fraction) -> Fraction: ... @overload def __rsub__(b, a: float) -> float: ... @overload def __rsub__(b, a: complex) -> complex: ... @overload - def __mul__(a, b: int | Fraction) -> Fraction: - """a * b""" - + def __mul__(a, b: int | Fraction) -> Fraction: ... @overload def __mul__(a, b: float) -> float: ... @overload def __mul__(a, b: complex) -> complex: ... @overload - def __rmul__(b, a: int | Fraction) -> Fraction: - """a * b""" - + def __rmul__(b, a: int | Fraction) -> Fraction: ... @overload def __rmul__(b, a: float) -> float: ... @overload def __rmul__(b, a: complex) -> complex: ... @overload - def __truediv__(a, b: int | Fraction) -> Fraction: - """a / b""" - + def __truediv__(a, b: int | Fraction) -> Fraction: ... @overload def __truediv__(a, b: float) -> float: ... @overload def __truediv__(a, b: complex) -> complex: ... @overload - def __rtruediv__(b, a: int | Fraction) -> Fraction: - """a / b""" - + def __rtruediv__(b, a: int | Fraction) -> Fraction: ... @overload def __rtruediv__(b, a: float) -> float: ... @overload def __rtruediv__(b, a: complex) -> complex: ... @overload - def __floordiv__(a, b: int | Fraction) -> int: - """a // b""" - + def __floordiv__(a, b: int | Fraction) -> int: ... @overload def __floordiv__(a, b: float) -> float: ... @overload - def __rfloordiv__(b, a: int | Fraction) -> int: - """a // b""" - + def __rfloordiv__(b, a: int | Fraction) -> int: ... @overload def __rfloordiv__(b, a: float) -> float: ... @overload - def __mod__(a, b: int | Fraction) -> Fraction: - """a % b""" - + def __mod__(a, b: int | Fraction) -> Fraction: ... @overload def __mod__(a, b: float) -> float: ... @overload - def __rmod__(b, a: int | Fraction) -> Fraction: - """a % b""" - + def __rmod__(b, a: int | Fraction) -> Fraction: ... @overload def __rmod__(b, a: float) -> float: ... @overload - def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: - """(a // b, a % b)""" - + def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... @overload def __divmod__(a, b: float) -> tuple[float, Fraction]: ... @overload - def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: - """(a // b, a % b)""" - + def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... @overload def __rdivmod__(a, b: float) -> tuple[float, Fraction]: ... if sys.version_info >= (3, 14): @overload - def __pow__(a, b: int, modulo: None = None) -> Fraction: - """a ** b - - If b is not an integer, the result will be a float or complex - since roots are generally irrational. If b is an integer, the - result will be rational. - - """ - + def __pow__(a, b: int, modulo: None = None) -> Fraction: ... @overload def __pow__(a, b: float | Fraction, modulo: None = None) -> float: ... @overload def __pow__(a, b: complex, modulo: None = None) -> complex: ... else: @overload - def __pow__(a, b: int) -> Fraction: - """a ** b - - If b is not an integer, the result will be a float or complex - since roots are generally irrational. If b is an integer, the - result will be rational. - - """ - + def __pow__(a, b: int) -> Fraction: ... @overload def __pow__(a, b: float | Fraction) -> float: ... @overload def __pow__(a, b: complex) -> complex: ... if sys.version_info >= (3, 14): @overload - def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: - """a ** b""" - + def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: ... @overload def __rpow__(b, a: complex, modulo: None = None) -> complex: ... else: @overload - def __rpow__(b, a: float | Fraction) -> float: - """a ** b""" - + def __rpow__(b, a: float | Fraction) -> float: ... @overload def __rpow__(b, a: complex) -> complex: ... - def __pos__(a) -> Fraction: - """+a: Coerces a subclass instance to Fraction""" - - def __neg__(a) -> Fraction: - """-a""" - - def __abs__(a) -> Fraction: - """abs(a)""" - - def __trunc__(a) -> int: - """math.trunc(a)""" - - def __floor__(a) -> int: - """math.floor(a)""" - - def __ceil__(a) -> int: - """math.ceil(a)""" - + def __pos__(a) -> Fraction: ... + def __neg__(a) -> Fraction: ... + def __abs__(a) -> Fraction: ... + def __trunc__(a) -> int: ... + def __floor__(a) -> int: ... + def __ceil__(a) -> int: ... @overload - def __round__(self, ndigits: None = None) -> int: - """round(self, ndigits) - - Rounds half toward even. - """ - + def __round__(self, ndigits: None = None) -> int: ... @overload def __round__(self, ndigits: int) -> Fraction: ... - def __hash__(self) -> int: # type: ignore[override] - """hash(self)""" - - def __eq__(a, b: object) -> bool: - """a == b""" - - def __lt__(a, b: _ComparableNum) -> bool: - """a < b""" - - def __gt__(a, b: _ComparableNum) -> bool: - """a > b""" - - def __le__(a, b: _ComparableNum) -> bool: - """a <= b""" - - def __ge__(a, b: _ComparableNum) -> bool: - """a >= b""" - - def __bool__(a) -> bool: - """a != 0""" - + def __hash__(self) -> int: ... # type: ignore[override] + def __eq__(a, b: object) -> bool: ... + def __lt__(a, b: _ComparableNum) -> bool: ... + def __gt__(a, b: _ComparableNum) -> bool: ... + def __le__(a, b: _ComparableNum) -> bool: ... + def __ge__(a, b: _ComparableNum) -> bool: ... + def __bool__(a) -> bool: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): - def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: - """int(a)""" + def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... # Not actually defined within fractions.py, but provides more useful # overrides @property - def real(self) -> Fraction: - """Real numbers are their real component.""" - + def real(self) -> Fraction: ... @property - def imag(self) -> Literal[0]: - """Real numbers have no imaginary component.""" - - def conjugate(self) -> Fraction: - """Conjugate is a no-op for Reals.""" + def imag(self) -> Literal[0]: ... + def conjugate(self) -> Fraction: ... if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: - """Converts a finite real number to a rational number, exactly. - - Beware that Fraction.from_number(0.3) != Fraction(3, 10). - - """ + def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi index 229f9cce34675..44bc2165fe0e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi @@ -1,33 +1,3 @@ -"""An FTP client class and some helper functions. - -Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds - -Example: - ->>> from ftplib import FTP ->>> ftp = FTP('ftp.python.org') # connect to host, default port ->>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@ -'230 Guest login ok, access restrictions apply.' ->>> ftp.retrlines('LIST') # list directory contents -total 9 -drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . -drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. -drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin -drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc -d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming -drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib -drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub -drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr --rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg -'226 Transfer complete.' ->>> ftp.quit() -'221 Goodbye.' ->>> - -A nice test that reveals some of the network dialogue would be: -python ftplib.py -d localhost -l -p -l -""" - import sys from _typeshed import SupportsRead, SupportsReadline from collections.abc import Callable, Iterable, Iterator @@ -54,29 +24,6 @@ class error_proto(Error): ... all_errors: tuple[type[Exception], ...] class FTP: - """An FTP client class. - - To create a connection, call the class using these arguments: - host, user, passwd, acct, timeout, source_address, encoding - - The first four arguments are all strings, and have default value ''. - The parameter ´timeout´ must be numeric and defaults to None if not - passed, meaning that no timeout will be set on any ftp socket(s). - If a timeout is passed, then this is now the default timeout for all ftp - socket operations for this instance. - The last parameter is the encoding of filenames, which defaults to utf-8. - - Then use self.connect() with optional host and port argument. - - To download a file, use ftp.retrlines('RETR ' + filename), - or ftp.retrbinary() with slightly different arguments. - To upload a file, use ftp.storlines() or ftp.storbinary(), - which have an open file as argument (see their definitions - below for details). - The download/upload functions first issue appropriate TYPE - and PORT or PASV commands. - """ - debugging: int host: str port: int @@ -104,124 +51,35 @@ class FTP: source_address: tuple[str, int] | None = None, *, encoding: str = "utf-8", - ) -> None: - """Initialization method (called by class instantiation). - Initialize host to localhost, port to standard ftp port. - Optional arguments are host (for connect()), - and user, passwd, acct (for login()). - """ - - def connect(self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None) -> str: - """Connect to host. Arguments are: - - host: hostname to connect to (string, default previous host) - - port: port to connect to (integer, default previous port) - - timeout: the timeout to set against the ftp socket(s) - - source_address: a 2-tuple (host, port) for the socket to bind - to as its source address before connecting. - """ - - def getwelcome(self) -> str: - """Get the welcome message from the server. - (this is read and squirreled away by connect()) - """ - - def set_debuglevel(self, level: int) -> None: - """Set the debugging level. - The required argument level means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - - def debug(self, level: int) -> None: - """Set the debugging level. - The required argument level means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - - def set_pasv(self, val: bool | Literal[0, 1]) -> None: - """Use passive or active mode for data transfers. - With a false argument, use the normal PORT mode, - With a true argument, use the PASV command. - """ - + ) -> None: ... + def connect( + self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None + ) -> str: ... + def getwelcome(self) -> str: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def set_pasv(self, val: bool | Literal[0, 1]) -> None: ... def sanitize(self, s: str) -> str: ... def putline(self, line: str) -> None: ... def putcmd(self, line: str) -> None: ... def getline(self) -> str: ... def getmultiline(self) -> str: ... def getresp(self) -> str: ... - def voidresp(self) -> str: - """Expect a response beginning with '2'.""" - - def abort(self) -> str: - """Abort a file transfer. Uses out-of-band data. - This does not follow the procedure from the RFC to send Telnet - IP and Synch; that doesn't seem to work with the servers I've - tried. Instead, just send the ABOR command as OOB data. - """ - - def sendcmd(self, cmd: str) -> str: - """Send a command and return the response.""" - - def voidcmd(self, cmd: str) -> str: - """Send a command and expect a response beginning with '2'.""" - - def sendport(self, host: str, port: int) -> str: - """Send a PORT command with the current host and the given - port number. - """ - - def sendeprt(self, host: str, port: int) -> str: - """Send an EPRT command with the current host and the given port number.""" - - def makeport(self) -> socket: - """Create a new socket and send a PORT command for it.""" - - def makepasv(self) -> tuple[str, int]: - """Internal: Does the PASV or EPSV handshake -> (address, port)""" - - def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: - """Login, default anonymous.""" + def voidresp(self) -> str: ... + def abort(self) -> str: ... + def sendcmd(self, cmd: str) -> str: ... + def voidcmd(self, cmd: str) -> str: ... + def sendport(self, host: str, port: int) -> str: ... + def sendeprt(self, host: str, port: int) -> str: ... + def makeport(self) -> socket: ... + def makepasv(self) -> tuple[str, int]: ... + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... # In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers - def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: - """Initiate a transfer over the data connection. - - If the transfer is active, send a port command and the - transfer command, and accept the connection. If the server is - passive, send a pasv command, connect to it, and start the - transfer command. Either way, return the socket for the - connection and the expected size of the transfer. The - expected size may be None if it could not be determined. - - Optional 'rest' argument can be a string that is sent as the - argument to a REST command. This is essentially a server - marker used to tell the server to skip over any data up to the - given marker. - """ - - def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: - """Like ntransfercmd() but returns only the socket.""" - + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ... + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... def retrbinary( self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None - ) -> str: - """Retrieve data in binary mode. A new port is created for you. - - Args: - cmd: A RETR command. - callback: A single parameter callable to be called on each - block of data read. - blocksize: The maximum number of bytes to read from the - socket at one time. [default: 8192] - rest: Passed to transfercmd(). [default: None] - - Returns: - The response code. - """ - + ) -> str: ... def storbinary( self, cmd: str, @@ -229,136 +87,25 @@ class FTP: blocksize: int = 8192, callback: Callable[[bytes], object] | None = None, rest: int | str | None = None, - ) -> str: - """Store a file in binary mode. A new port is created for you. - - Args: - cmd: A STOR command. - fp: A file-like object with a read(num_bytes) method. - blocksize: The maximum data size to read from fp and send over - the connection at once. [default: 8192] - callback: An optional single parameter callable that is called on - each block of data after it is sent. [default: None] - rest: Passed to transfercmd(). [default: None] - - Returns: - The response code. - """ - - def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: - """Retrieve data in line mode. A new port is created for you. - - Args: - cmd: A RETR, LIST, or NLST command. - callback: An optional single parameter callable that is called - for each line with the trailing CRLF stripped. - [default: print_line()] - - Returns: - The response code. - """ - - def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: - """Store a file in line mode. A new port is created for you. - - Args: - cmd: A STOR command. - fp: A file-like object with a readline() method. - callback: An optional single parameter callable that is called on - each line after it is sent. [default: None] - - Returns: - The response code. - """ - - def acct(self, password: str) -> str: - """Send new account name.""" - - def nlst(self, *args: str) -> list[str]: - """Return a list of files in a given directory (default the current).""" + ) -> str: ... + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... + def acct(self, password: str) -> str: ... + def nlst(self, *args: str) -> list[str]: ... # Technically only the last arg can be a Callable but ... - def dir(self, *args: str | Callable[[str], object]) -> None: - """List a directory in long form. - By default list current directory to stdout. - Optional last argument is callback function; all - non-empty arguments before it are concatenated to the - LIST command. (This *should* only be used for a pathname.) - """ - - def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: - """List a directory in a standardized format by using MLSD - command (RFC-3659). If path is omitted the current directory - is assumed. "facts" is a list of strings representing the type - of information desired (e.g. ["type", "size", "perm"]). - - Return a generator object yielding a tuple of two elements - for every file found in path. - First element is the file name, the second one is a dictionary - including a variable number of "facts" depending on the server - and whether "facts" argument has been provided. - """ - - def rename(self, fromname: str, toname: str) -> str: - """Rename a file.""" - - def delete(self, filename: str) -> str: - """Delete a file.""" - - def cwd(self, dirname: str) -> str: - """Change to a directory.""" - - def size(self, filename: str) -> int | None: - """Retrieve the size of a file.""" - - def mkd(self, dirname: str) -> str: - """Make a directory, return its full pathname.""" - - def rmd(self, dirname: str) -> str: - """Remove a directory.""" - - def pwd(self) -> str: - """Return current working directory.""" - - def quit(self) -> str: - """Quit, and close the connection.""" - - def close(self) -> None: - """Close the connection without assuming anything about it.""" + def dir(self, *args: str | Callable[[str], object]) -> None: ... + def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: ... + def rename(self, fromname: str, toname: str) -> str: ... + def delete(self, filename: str) -> str: ... + def cwd(self, dirname: str) -> str: ... + def size(self, filename: str) -> int | None: ... + def mkd(self, dirname: str) -> str: ... + def rmd(self, dirname: str) -> str: ... + def pwd(self) -> str: ... + def quit(self) -> str: ... + def close(self) -> None: ... class FTP_TLS(FTP): - """A FTP subclass which adds TLS support to FTP as described - in RFC-4217. - - Connect as usual to port 21 implicitly securing the FTP control - connection before authenticating. - - Securing the data connection requires user to explicitly ask - for it by calling prot_p() method. - - Usage example: - >>> from ftplib import FTP_TLS - >>> ftps = FTP_TLS('ftp.python.org') - >>> ftps.login() # login anonymously previously securing control channel - '230 Guest login ok, access restrictions apply.' - >>> ftps.prot_p() # switch to secure data connection - '200 Protection level set to P' - >>> ftps.retrlines('LIST') # list directory content securely - total 9 - drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . - drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. - drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin - drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc - d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming - drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib - drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub - drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr - -rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg - '226 Transfer complete.' - >>> ftps.quit() - '221 Goodbye.' - >>> - """ - if sys.version_info >= (3, 12): def __init__( self, @@ -392,41 +139,15 @@ class FTP_TLS(FTP): certfile: str | None context: SSLContext def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... - def auth(self) -> str: - """Set up secure control connection by using TLS/SSL.""" - - def prot_p(self) -> str: - """Set up secure data connection.""" - - def prot_c(self) -> str: - """Set up clear text data connection.""" - - def ccc(self) -> str: - """Switch back to a clear-text control connection.""" - -def parse150(resp: str) -> int | None: # undocumented - """Parse the '150' response for a RETR request. - Returns the expected transfer size or None; size is not guaranteed to - be present in the 150 message. - """ - -def parse227(resp: str) -> tuple[str, int]: # undocumented - """Parse the '227' response for a PASV request. - Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)' - Return ('host.addr.as.numbers', port#) tuple. - """ - -def parse229(resp: str, peer: Any) -> tuple[str, int]: # undocumented - """Parse the '229' response for an EPSV request. - Raises error_proto if it does not contain '(|||port|)' - Return ('host.addr.as.numbers', port#) tuple. - """ - -def parse257(resp: str) -> str: # undocumented - """Parse the '257' response for a MKD or PWD request. - This is a response to a MKD or PWD request: a directory name. - Returns the directoryname in the 257 reply. - """ - -def ftpcp(source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I") -> None: # undocumented - """Copy file from one FTP-instance to another.""" + def auth(self) -> str: ... + def prot_p(self) -> str: ... + def prot_c(self) -> str: ... + def ccc(self) -> str: ... + +def parse150(resp: str) -> int | None: ... # undocumented +def parse227(resp: str) -> tuple[str, int]: ... # undocumented +def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented +def parse257(resp: str) -> str: ... # undocumented +def ftpcp( + source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" +) -> None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi index cd65e31ae8d8f..47baf917294da 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi @@ -1,5 +1,3 @@ -"""functools.py - Tools for working with functions and callable objects""" - import sys import types from _typeshed import SupportsAllComparisons, SupportsItems @@ -35,47 +33,16 @@ _RWrapper = TypeVar("_RWrapper") if sys.version_info >= (3, 14): @overload - def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: - """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. - - This effectively reduces the iterable to a single value. If initial is present, - it is placed before the items of the iterable in the calculation, and serves as - a default when the iterable is empty. - - For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) - calculates ((((1 + 2) + 3) + 4) + 5). - """ + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: ... else: @overload - def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: - """reduce(function, iterable[, initial], /) -> value - - Apply a function of two arguments cumulatively to the items of an iterable, from left to right. - - This effectively reduces the iterable to a single value. If initial is present, - it is placed before the items of the iterable in the calculation, and serves as - a default when the iterable is empty. - - For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) - calculates ((((1 + 2) + 3) + 4) + 5). - """ + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: ... @overload -def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: - """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. - - This effectively reduces the iterable to a single value. If initial is present, - it is placed before the items of the iterable in the calculation, and serves as - a default when the iterable is empty. - - For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) - calculates ((((1 + 2) + 3) + 4) + 5). - """ +def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: ... class _CacheInfo(NamedTuple): - """CacheInfo(hits, misses, maxsize, currsize)""" - hits: int misses: int maxsize: int | None @@ -88,57 +55,16 @@ class _CacheParameters(TypedDict): @final class _lru_cache_wrapper(Generic[_T]): - """Create a cached callable that wraps another function. - - user_function: the function being cached - - maxsize: 0 for no caching - None for unlimited cache size - n for a bounded cache - - typed: False cache f(3) and f(3.0) as identical calls - True cache f(3) and f(3.0) as distinct calls - - cache_info_type: namedtuple class with the fields: - hits misses currsize maxsize - """ - __wrapped__: Callable[..., _T] - def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: - """Call self as a function.""" - - def cache_info(self) -> _CacheInfo: - """Report cache statistics""" - - def cache_clear(self) -> None: - """Clear the cache and cache statistics""" - + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... + def cache_info(self) -> _CacheInfo: ... + def cache_clear(self) -> None: ... def cache_parameters(self) -> _CacheParameters: ... def __copy__(self) -> _lru_cache_wrapper[_T]: ... def __deepcopy__(self, memo: Any, /) -> _lru_cache_wrapper[_T]: ... @overload -def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: - """Least-recently-used cache decorator. - - If *maxsize* is set to None, the LRU features are disabled and the cache - can grow without bound. - - If *typed* is True, arguments of different types will be cached separately. - For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as - distinct calls with distinct results. Some types such as str and int may - be cached separately even when typed is false. - - Arguments to the cached function must be hashable. - - View the cache statistics named tuple (hits, misses, maxsize, currsize) - with f.cache_info(). Clear the cache and statistics with f.cache_clear(). - Access the underlying function with f.__wrapped__. - - See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU) - - """ - +def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... @overload def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... @@ -189,32 +115,12 @@ if sys.version_info >= (3, 14): wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: - """Update a wrapper function to look like the wrapped function - - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) - """ - + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: - """Decorator factory to apply update_wrapper() to a wrapper function - - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). - """ + ) -> _Wrapper[_PWrapped, _RWrapped]: ... elif sys.version_info >= (3, 12): def update_wrapper( @@ -222,32 +128,12 @@ elif sys.version_info >= (3, 12): wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: - """Update a wrapper function to look like the wrapped function - - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) - """ - + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: - """Decorator factory to apply update_wrapper() to a wrapper function - - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). - """ + ) -> _Wrapper[_PWrapped, _RWrapped]: ... else: def update_wrapper( @@ -255,79 +141,31 @@ else: wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: - """Update a wrapper function to look like the wrapped function - - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) - """ - + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: - """Decorator factory to apply update_wrapper() to a wrapper function - - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). - """ - -def total_ordering(cls: type[_T]) -> type[_T]: - """Class decorator that fills in missing ordering methods""" - -def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: - """Convert a cmp= function into a key= function. - - mycmp - Function that compares two objects. - """ + ) -> _Wrapper[_PWrapped, _RWrapped]: ... +def total_ordering(cls: type[_T]) -> type[_T]: ... +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... @disjoint_base class partial(Generic[_T]): - """Create a new function with partial application of the given arguments - and keywords. - """ - @property - def func(self) -> Callable[..., _T]: - """function object to use in future partial calls""" - + def func(self) -> Callable[..., _T]: ... @property - def args(self) -> tuple[Any, ...]: - """tuple of arguments to future partial calls""" - + def args(self) -> tuple[Any, ...]: ... @property - def keywords(self) -> dict[str, Any]: - """dictionary of keyword arguments to future partial calls""" - + def keywords(self) -> dict[str, Any]: ... def __new__(cls, func: Callable[..., _T], /, *args: Any, **kwargs: Any) -> Self: ... - def __call__(self, /, *args: Any, **kwargs: Any) -> _T: - """Call self as a function.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor: TypeAlias = Any class partialmethod(Generic[_T]): - """Method descriptor with partial application of the given arguments - and keywords. - - Supports wrapping existing descriptors and handles non-descriptor - callables as instance methods. - """ - func: Callable[..., _T] | _Descriptor args: tuple[Any, ...] keywords: dict[str, Any] @@ -345,11 +183,7 @@ class partialmethod(Generic[_T]): def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... @property def __isabstractmethod__(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 11): _RegType: TypeAlias = type[Any] | types.UnionType @@ -374,35 +208,16 @@ class _SingleDispatchCallable(Generic[_T]): def _clear_cache(self) -> None: ... def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... -def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: - """Single-dispatch generic function decorator. - - Transforms a function into a generic function, which can have different - behaviours depending upon the type of its first argument. The decorated - function acts as the default implementation, and additional - implementations can be registered using the register() attribute of the - generic function. - """ +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... class singledispatchmethod(Generic[_T]): - """Single-dispatch generic method descriptor. - - Supports wrapping existing descriptors and handles non-descriptor - callables as instance methods. - """ - dispatcher: _SingleDispatchCallable[_T] func: Callable[..., _T] def __init__(self, func: Callable[..., _T]) -> None: ... @property def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: - """generic_method.register(cls, func) -> func - - Registers a new implementation for the given *cls* on a *generic_method*. - """ - + def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload @@ -420,15 +235,9 @@ class cached_property(Generic[_T_co]): def __set_name__(self, owner: type[Any], name: str) -> None: ... # __set__ is not defined at runtime, but @cached_property is designed to be settable def __set__(self, instance: object, value: _T_co) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ - -def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: - """Simple lightweight unbounded cache. Sometimes called "memoize".""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: ... def _make_key( args: tuple[Hashable, ...], kwds: SupportsItems[Any, Any], @@ -438,25 +247,11 @@ def _make_key( tuple: type = ..., type: Any = ..., len: Callable[[Sized], int] = ..., -) -> Hashable: - """Make a cache key from optionally typed positional and keyword arguments - - The key is constructed in a way that is flat as possible rather than - as a nested structure that would take more memory. - - If there is only a single argument and its data type is known to cache - its hash value, then that argument is returned without a wrapper. This - saves space and improves lookup speed. - - """ +) -> Hashable: ... if sys.version_info >= (3, 14): @final - class _PlaceholderType: - """The type of the Placeholder singleton. - - Used as a placeholder for partial arguments. - """ + class _PlaceholderType: ... Placeholder: Final[_PlaceholderType] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi index 6b231fa7bc287..06fb6b47c2d1d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi @@ -1,25 +1,3 @@ -"""This module provides access to the garbage collector for reference cycles. - -enable() -- Enable automatic garbage collection. -disable() -- Disable automatic garbage collection. -isenabled() -- Returns true if automatic collection is enabled. -collect() -- Do a full collection right now. -get_count() -- Return the current collection counts. -get_stats() -- Return list of dictionaries containing per-generation stats. -set_debug() -- Set debugging flags. -get_debug() -- Get debugging flags. -set_threshold() -- Set the collection thresholds. -get_threshold() -- Return the current collection thresholds. -get_objects() -- Return a list of all objects tracked by the collector. -is_tracked() -- Returns true if a given object is tracked. -is_finalized() -- Returns true if a given object has been already finalized. -get_referrers() -- Return the list of objects that refer to an object. -get_referents() -- Return the list of objects that an object refers to. -freeze() -- Freeze all tracked objects and ignore them for future collections. -unfreeze() -- Unfreeze all objects in the permanent generation. -get_freeze_count() -- Return the number of objects in the permanent generation. -""" - from collections.abc import Callable from typing import Any, Final, Literal from typing_extensions import TypeAlias @@ -35,97 +13,21 @@ _CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], callbacks: list[_CallbackType] garbage: list[Any] -def collect(generation: int = 2) -> int: - """Run the garbage collector. - - With no arguments, run a full collection. The optional argument - may be an integer specifying which generation to collect. A ValueError - is raised if the generation number is invalid. - - The number of unreachable objects is returned. - """ - -def disable() -> None: - """Disable automatic garbage collection.""" - -def enable() -> None: - """Enable automatic garbage collection.""" - -def get_count() -> tuple[int, int, int]: - """Return a three-tuple of the current collection counts.""" - -def get_debug() -> int: - """Get the garbage collection debugging flags.""" - -def get_objects(generation: int | None = None) -> list[Any]: - """Return a list of objects tracked by the collector (excluding the list returned). - - generation - Generation to extract the objects from. - - If generation is not None, return only the objects tracked by the collector - that are in that generation. - """ - -def freeze() -> None: - """Freeze all current tracked objects and ignore them for future collections. - - This can be used before a POSIX fork() call to make the gc copy-on-write friendly. - Note: collection before a POSIX fork() call may free pages for future allocation - which can cause copy-on-write. - """ - -def unfreeze() -> None: - """Unfreeze all objects in the permanent generation. - - Put all objects in the permanent generation back into oldest generation. - """ - -def get_freeze_count() -> int: - """Return the number of objects in the permanent generation.""" - -def get_referents(*objs: Any) -> list[Any]: - """Return the list of objects that are directly referred to by 'objs'.""" - -def get_referrers(*objs: Any) -> list[Any]: - """Return the list of objects that directly refer to any of 'objs'.""" - -def get_stats() -> list[dict[str, Any]]: - """Return a list of dictionaries containing per-generation statistics.""" - -def get_threshold() -> tuple[int, int, int]: - """Return the current collection thresholds.""" - -def is_tracked(obj: Any, /) -> bool: - """Returns true if the object is tracked by the garbage collector. - - Simple atomic objects will return false. - """ - -def is_finalized(obj: Any, /) -> bool: - """Returns true if the object has been already finalized by the GC.""" - -def isenabled() -> bool: - """Returns true if automatic garbage collection is enabled.""" - -def set_debug(flags: int, /) -> None: - """Set the garbage collection debugging flags. - - flags - An integer that can have the following bits turned on: - DEBUG_STATS - Print statistics during collection. - DEBUG_COLLECTABLE - Print collectable objects found. - DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects - found. - DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them. - DEBUG_LEAK - Debug leaking programs (everything but STATS). - - Debugging information is written to sys.stderr. - """ - -def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: - """set_threshold(threshold0, [threshold1, [threshold2]]) - Set the collection thresholds (the collection frequency). - - Setting 'threshold0' to zero disables collection. - """ +def collect(generation: int = 2) -> int: ... +def disable() -> None: ... +def enable() -> None: ... +def get_count() -> tuple[int, int, int]: ... +def get_debug() -> int: ... +def get_objects(generation: int | None = None) -> list[Any]: ... +def freeze() -> None: ... +def unfreeze() -> None: ... +def get_freeze_count() -> int: ... +def get_referents(*objs: Any) -> list[Any]: ... +def get_referrers(*objs: Any) -> list[Any]: ... +def get_stats() -> list[dict[str, Any]]: ... +def get_threshold() -> tuple[int, int, int]: ... +def is_tracked(obj: Any, /) -> bool: ... +def is_finalized(obj: Any, /) -> bool: ... +def isenabled() -> bool: ... +def set_debug(flags: int, /) -> None: ... +def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi index af647a69b4e18..3caed77a661ac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi @@ -1,9 +1,3 @@ -""" -Path operations common to more than one OS -Do not use directly. The OS specific modules import the appropriate -functions from this module themselves. -""" - import os import sys from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT @@ -34,9 +28,7 @@ if sys.version_info >= (3, 13): # Iterable[T], so that list[T] | Literal[""] could be used as a return # type. But because this only works when T is str, we need Sequence[T] instead. @overload -def commonprefix(m: Sequence[LiteralString]) -> LiteralString: - """Given a list of pathnames, returns the longest common leading component""" - +def commonprefix(m: Sequence[LiteralString]) -> LiteralString: ... @overload def commonprefix(m: Sequence[StrPath]) -> str: ... @overload @@ -45,59 +37,27 @@ def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... -def exists(path: FileDescriptorOrPath) -> bool: - """Test whether a path exists. Returns False for broken symbolic links""" - -def getsize(filename: FileDescriptorOrPath) -> int: - """Return the size of a file, reported by os.stat().""" - -def isfile(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a regular file""" - -def isdir(s: FileDescriptorOrPath) -> bool: - """Return true if the pathname refers to an existing directory.""" +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... if sys.version_info >= (3, 12): - def islink(path: StrOrBytesPath) -> bool: - """Test whether a path is a symbolic link""" + def islink(path: StrOrBytesPath) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: FileDescriptorOrPath) -> float: - """Return the last access time of a file, reported by os.stat().""" - -def getmtime(filename: FileDescriptorOrPath) -> float: - """Return the last modification time of a file, reported by os.stat().""" - -def getctime(filename: FileDescriptorOrPath) -> float: - """Return the metadata change time of a file, reported by os.stat().""" - -def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: - """Test whether two pathnames reference the same actual file or directory - - This is determined by the device number and i-node number and - raises an exception if an os.stat() call on either pathname fails. - """ - -def sameopenfile(fp1: int, fp2: int) -> bool: - """Test whether two open file objects reference the same file""" - -def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: - """Test whether two stat buffers reference the same file""" +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... +def sameopenfile(fp1: int, fp2: int) -> bool: ... +def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... if sys.version_info >= (3, 13): - def isjunction(path: StrOrBytesPath) -> bool: - """Test whether a path is a junction - Junctions are not supported on the current platform - """ - - def isdevdrive(path: StrOrBytesPath) -> bool: - """Determines whether the specified path is on a Windows Dev Drive. - Dev Drives are not supported on the current platform - """ - - def lexists(path: StrOrBytesPath) -> bool: - """Test whether a path exists. Returns True for broken symbolic links""" + def isjunction(path: StrOrBytesPath) -> bool: ... + def isdevdrive(path: StrOrBytesPath) -> bool: ... + def lexists(path: StrOrBytesPath) -> bool: ... # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.4 _AllowMissingType = NewType("_AllowMissingType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi index a6e2a8f73dbc9..c15db8122cfcf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi @@ -1,19 +1,3 @@ -"""Parser for command line options. - -This module helps scripts to parse the command line arguments in -sys.argv. It supports the same conventions as the Unix getopt() -function (including the special meanings of arguments of the form '-' -and '--'). Long options similar to those supported by GNU software -may be used as well via an optional third argument. This module -provides two functions and an exception: - -getopt() -- Parse command line options -gnu_getopt() -- Like getopt(), but allow option and non-option arguments -to be intermixed. -GetoptError -- exception (class) raised with 'opt' attribute, which is the -option involved with the exception. -""" - from collections.abc import Iterable, Sequence from typing import Protocol, TypeVar, overload, type_check_only @@ -30,51 +14,10 @@ __all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] def getopt( args: _SliceableT[_StrSequenceT_co], shortopts: str, longopts: Iterable[str] | str = [] -) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: - """getopt(args, options[, long_options]) -> opts, args - - Parses command line options and parameter list. args is the - argument list to be parsed, without the leading reference to the - running program. Typically, this means "sys.argv[1:]". shortopts - is the string of option letters that the script wants to - recognize, with options that require an argument followed by a - colon and options that accept an optional argument followed by - two colons (i.e., the same format that Unix getopt() uses). If - specified, longopts is a list of strings with the names of the - long options which should be supported. The leading '--' - characters should not be included in the option name. Options - which require an argument should be followed by an equal sign - ('='). Options which accept an optional argument should be - followed by an equal sign and question mark ('=?'). - - The return value consists of two elements: the first is a list of - (option, value) pairs; the second is the list of program arguments - left after the option list was stripped (this is a trailing slice - of the first argument). Each option-and-value pair returned has - the option as its first element, prefixed with a hyphen (e.g., - '-x'), and the option argument as its second element, or an empty - string if the option has no argument. The options occur in the - list in the same order in which they were found, thus allowing - multiple occurrences. Long and short options may be mixed. - - """ - +) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: ... def gnu_getopt( args: Sequence[str], shortopts: str, longopts: Iterable[str] | str = [] -) -> tuple[list[tuple[str, str]], list[str]]: - """getopt(args, options[, long_options]) -> opts, args - - This function works like getopt(), except that GNU style scanning - mode is used by default. This means that option and non-option - arguments may be intermixed. The getopt() function stops - processing options as soon as a non-option argument is - encountered. - - If the first character of the option string is '+', or if the - environment variable POSIXLY_CORRECT is set, then option - processing stops as soon as a non-option argument is encountered. - - """ +) -> tuple[list[tuple[str, str]], list[str]]: ... class GetoptError(Exception): msg: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi index 3242fd5779a82..bb3013dfbf393 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi @@ -1,67 +1,14 @@ -"""Utilities to get a password and/or the current user name. - -getpass(prompt[, stream[, echo_char]]) - Prompt for a password, with echo -turned off and optional keyboard feedback. -getuser() - Get the user name from the environment or password database. - -GetPassWarning - This UserWarning is issued when getpass() cannot prevent - echoing of the password contents while reading. - -On Windows, the msvcrt module will be used. - -""" - import sys from typing import TextIO __all__ = ["getpass", "getuser", "GetPassWarning"] if sys.version_info >= (3, 14): - def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: - """Prompt for a password, with echo turned off. - - Args: - prompt: Written on stream to ask for the input. Default: 'Password: ' - stream: A writable file object to display the prompt. Defaults to - the tty. If no tty is available defaults to sys.stderr. - echo_char: A single ASCII character to mask input (e.g., '*'). - If None, input is hidden. - Returns: - The seKr3t input. - Raises: - EOFError: If our input tty or stdin was closed. - GetPassWarning: When we were unable to turn echo off on the input. - - Always restores terminal settings before returning. - """ + def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: ... else: - def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: - """Prompt for a password, with echo turned off. - - Args: - prompt: Written on stream to ask for the input. Default: 'Password: ' - stream: A writable file object to display the prompt. Defaults to - the tty. If no tty is available defaults to sys.stderr. - Returns: - The seKr3t input. - Raises: - EOFError: If our input tty or stdin was closed. - GetPassWarning: When we were unable to turn echo off on the input. - - Always restores terminal settings before returning. - """ - -def getuser() -> str: - """Get the username from the environment or password database. - - First try various environment variables, then the password - database. This works on Windows as long as USERNAME is set. - Any failure to find a username raises OSError. + def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... - .. versionchanged:: 3.13 - Previously, various exceptions beyond just :exc:`OSError` - were raised. - """ +def getuser() -> str: ... class GetPassWarning(UserWarning): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi index 9b61fdad980ab..e9ffd7a4a4a42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi @@ -1,15 +1,3 @@ -"""Internationalization and localization support. - -This module provides internationalization (I18N) and localization (L10N) -support for your Python programs by providing an interface to the GNU gettext -message catalog library. - -I18N refers to the operation by which a program is made aware of multiple -languages. L10N refers to the adaptation of your program, once -internationalized, to the local language and cultural habits. - -""" - import io import sys from _typeshed import StrPath @@ -198,7 +186,4 @@ if sys.version_info < (3, 11): Catalog = translation -def c2py(plural: str) -> Callable[[int], int]: - """Gets a C expression as used in PO files for plural forms and returns a - Python function that implements an equivalent expression. - """ +def c2py(plural: str) -> Callable[[int], int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi index d3a6642a4c2fc..942fd73961963 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi @@ -1,5 +1,3 @@ -"""Filename globbing utility.""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, Sequence @@ -33,21 +31,7 @@ if sys.version_info >= (3, 11): dir_fd: int | None = None, recursive: bool = False, include_hidden: bool = False, - ) -> list[AnyStr]: - """Return a list of paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. Unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns by default. - - If `include_hidden` is true, the patterns '*', '?', '**' will match hidden - directories. - - If `recursive` is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - + ) -> list[AnyStr]: ... def iglob( pathname: AnyStr, *, @@ -55,88 +39,24 @@ if sys.version_info >= (3, 11): dir_fd: int | None = None, recursive: bool = False, include_hidden: bool = False, - ) -> Iterator[AnyStr]: - """Return an iterator which yields the paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ + ) -> Iterator[AnyStr]: ... elif sys.version_info >= (3, 10): def glob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> list[AnyStr]: - """Return a list of paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - + ) -> list[AnyStr]: ... def iglob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> Iterator[AnyStr]: - """Return an iterator which yields the paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ + ) -> Iterator[AnyStr]: ... else: - def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: - """Return a list of paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - - def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: - """Return an iterator which yields the paths matching a pathname pattern. - - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. - - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ - -def escape(pathname: AnyStr) -> AnyStr: - """Escape all special characters.""" + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... +def escape(pathname: AnyStr) -> AnyStr: ... def has_magic(s: str | bytes) -> bool: ... # undocumented if sys.version_info >= (3, 13): - def translate(pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None) -> str: - """Translate a pathname with shell wildcards to a regular expression. - - If `recursive` is true, the pattern segment '**' will match any number of - path segments. - - If `include_hidden` is true, wildcards can match path segments beginning - with a dot ('.'). - - If a sequence of separator characters is given to `seps`, they will be - used to split the pattern into segments and match path separators. If not - given, os.path.sep and os.path.altsep (where available) are used. - """ + def translate( + pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None + ) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi index 793c8f0975775..1ca8cbe12b085 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi @@ -11,99 +11,18 @@ if sys.version_info >= (3, 11): from types import GenericAlias class TopologicalSorter(Generic[_T]): - """Provides functionality to topologically sort a graph of hashable nodes""" - @overload def __init__(self, graph: None = None) -> None: ... @overload def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... - def add(self, node: _T, *predecessors: _T) -> None: - """Add a new node and its predecessors to the graph. - - Both the *node* and all elements in *predecessors* must be hashable. - - If called multiple times with the same node argument, the set of dependencies - will be the union of all dependencies passed in. - - It is possible to add a node with no dependencies (*predecessors* is not provided) - as well as provide a dependency twice. If a node that has not been provided before - is included among *predecessors* it will be automatically added to the graph with - no predecessors of its own. - - Raises ValueError if called after "prepare". - """ - - def prepare(self) -> None: - """Mark the graph as finished and check for cycles in the graph. - - If any cycle is detected, "CycleError" will be raised, but "get_ready" can - still be used to obtain as many nodes as possible until cycles block more - progress. After a call to this function, the graph cannot be modified and - therefore no more nodes can be added using "add". - - Raise ValueError if nodes have already been passed out of the sorter. - - """ - - def is_active(self) -> bool: - """Return ``True`` if more progress can be made and ``False`` otherwise. - - Progress can be made if cycles do not block the resolution and either there - are still nodes ready that haven't yet been returned by "get_ready" or the - number of nodes marked "done" is less than the number that have been returned - by "get_ready". - - Raises ValueError if called without calling "prepare" previously. - """ - + def add(self, node: _T, *predecessors: _T) -> None: ... + def prepare(self) -> None: ... + def is_active(self) -> bool: ... def __bool__(self) -> bool: ... - def done(self, *nodes: _T) -> None: - """Marks a set of nodes returned by "get_ready" as processed. - - This method unblocks any successor of each node in *nodes* for being returned - in the future by a call to "get_ready". - - Raises ValueError if any node in *nodes* has already been marked as - processed by a previous call to this method, if a node was not added to the - graph by using "add" or if called without calling "prepare" previously or if - node has not yet been returned by "get_ready". - """ - - def get_ready(self) -> tuple[_T, ...]: - """Return a tuple of all the nodes that are ready. - - Initially it returns all nodes with no predecessors; once those are marked - as processed by calling "done", further calls will return all new nodes that - have all their predecessors already processed. Once no more progress can be made, - empty tuples are returned. - - Raises ValueError if called without calling "prepare" previously. - """ - - def static_order(self) -> Iterable[_T]: - """Returns an iterable of nodes in a topological order. - - The particular order that is returned may depend on the specific - order in which the items were inserted in the graph. - - Using this method does not require to call "prepare" or "done". If any - cycle is detected, :exc:`CycleError` will be raised. - """ + def done(self, *nodes: _T) -> None: ... + def get_ready(self) -> tuple[_T, ...]: ... + def static_order(self) -> Iterable[_T]: ... if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ - -class CycleError(ValueError): - """Subclass of ValueError raised by TopologicalSorter.prepare if cycles - exist in the working graph. + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - If multiple cycles exist, only one undefined choice among them will be reported - and included in the exception. The detected cycle can be accessed via the second - element in the *args* attribute of the exception instance and consists in a list - of nodes, such that each node is, in the graph, an immediate predecessor of the - next node in the list. In the reported list, the first and the last node will be - the same, to make it clear that it is cyclic. - """ +class CycleError(ValueError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi index 2364e81f8904a..965ecece2a56d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi @@ -1,19 +1,3 @@ -"""Access to the Unix group database. - -Group entries are reported as 4-tuples containing the following fields -from the group database, in order: - - gr_name - name of the group - gr_passwd - group password (encrypted); often empty - gr_gid - numeric ID of the group - gr_mem - list of members - -The gid is an integer, name and password are strings. (Note that most -users are not explicitly listed as members of the groups they are in -according to the password database. Check both databases to get -complete membership information.) -""" - import sys from _typeshed import structseq from typing import Any, Final, final @@ -21,47 +5,18 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_group(structseq[Any], tuple[str, str | None, int, list[str]]): - """grp.struct_group: Results from getgr*() routines. - - This object may be accessed either as a tuple of - (gr_name,gr_passwd,gr_gid,gr_mem) - or via the object attributes as named in the above tuple. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("gr_name", "gr_passwd", "gr_gid", "gr_mem") @property - def gr_name(self) -> str: - """group name""" - + def gr_name(self) -> str: ... @property - def gr_passwd(self) -> str | None: - """password""" - + def gr_passwd(self) -> str | None: ... @property - def gr_gid(self) -> int: - """group id""" - + def gr_gid(self) -> int: ... @property - def gr_mem(self) -> list[str]: - """group members""" - - def getgrall() -> list[struct_group]: - """Return a list of all available group entries, in arbitrary order. - - An entry whose name starts with '+' or '-' represents an instruction - to use YP/NIS and may not be accessible via getgrnam or getgrgid. - """ - - def getgrgid(id: int) -> struct_group: - """Return the group database entry for the given numeric group ID. - - If id is not valid, raise KeyError. - """ - - def getgrnam(name: str) -> struct_group: - """Return the group database entry for the given group name. + def gr_mem(self) -> list[str]: ... - If name is not valid, raise KeyError. - """ + def getgrall() -> list[struct_group]: ... + def getgrgid(id: int) -> struct_group: ... + def getgrnam(name: str) -> struct_group: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi index ac3fe1a3c0908..b18f76f06e3ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi @@ -1,9 +1,3 @@ -"""Functions that read and write gzipped files. - -The user of the file doesn't have to worry about the compression, -but random access is not allowed. -""" - import sys import zlib from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath, WriteableBuffer @@ -57,26 +51,7 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> GzipFile: - """Open a gzip-compressed file in binary or text mode. - - The filename argument can be an actual filename (a str or bytes object), or - an existing file object to read from or write to. - - The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for - binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is - "rb", and the default compresslevel is 9. - - For binary mode, this function is equivalent to the GzipFile constructor: - GzipFile(filename, mode, compresslevel). In this case, the encoding, errors - and newline arguments must not be provided. - - For text mode, a GzipFile object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error handling - behavior, and line ending(s). - - """ - +) -> GzipFile: ... @overload def open( filename: StrOrBytesPath | _WritableFileobj, @@ -106,11 +81,6 @@ def open( ) -> GzipFile | TextIOWrapper: ... class _PaddedFile: - """Minimal read-only file object that prepends a string to the contents - of an actual file. Shouldn't be used outside of gzip.py, as it lacks - essential functionality. - """ - file: _ReadableFileobj def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... def read(self, size: int) -> bytes: ... @@ -118,18 +88,9 @@ class _PaddedFile: def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... -class BadGzipFile(OSError): - """Exception raised in some cases for invalid gzip files.""" +class BadGzipFile(OSError): ... class GzipFile(BaseStream): - """The GzipFile class simulates most of the methods of a file object with - the exception of the truncate() method. - - This class only supports opening files in binary mode. If you need to open a - compressed file in text mode, use the gzip.open() function. - - """ - myfileobj: FileIO | None mode: object name: str @@ -143,41 +104,7 @@ class GzipFile(BaseStream): compresslevel: int = 9, fileobj: _ReadableFileobj | None = None, mtime: float | None = None, - ) -> None: - """Constructor for the GzipFile class. - - At least one of fileobj and filename must be given a - non-trivial value. - - The new class instance is based on fileobj, which can be a regular - file, an io.BytesIO object, or any other object which simulates a file. - It defaults to None, in which case filename is opened to provide - a file object. - - When fileobj is not None, the filename argument is only used to be - included in the gzip file header, which may include the original - filename of the uncompressed file. It defaults to the filename of - fileobj, if discernible; otherwise, it defaults to the empty string, - and in this case the original filename is not included in the header. - - The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or - 'xb' depending on whether the file will be read or written. The default - is the mode of fileobj if discernible; otherwise, the default is 'rb'. - A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and - 'wb', 'a' and 'ab', and 'x' and 'xb'. - - The compresslevel argument is an integer from 0 to 9 controlling the - level of compression; 1 is fastest and produces the least compression, - and 9 is slowest and produces the most compression. 0 is no compression - at all. The default is 9. - - The optional mtime argument is the timestamp requested by gzip. The time - is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. - If mtime is omitted or None, the current time is used. Use mtime = 0 - to generate a compressed stream that does not depend on creation time. - - """ - + ) -> None: ... @overload def __init__( self, @@ -220,32 +147,16 @@ class GzipFile(BaseStream): def filename(self) -> str: ... @property - def mtime(self) -> int | None: - """Last modification time read from stream, or None""" + def mtime(self) -> int | None: ... crc: int def write(self, data: ReadableBuffer) -> int: ... def read(self, size: int | None = -1) -> bytes: ... - def read1(self, size: int = -1) -> bytes: - """Implements BufferedIOBase.read1() - - Reads up to a buffer's worth of data if size is negative. - """ - + def read1(self, size: int = -1) -> bytes: ... def peek(self, n: int) -> bytes: ... def close(self) -> None: ... def flush(self, zlib_mode: int = 2) -> None: ... - def fileno(self) -> int: - """Invoke the underlying file object's fileno() method. - - This will raise AttributeError if the underlying file object - doesn't support fileno(). - """ - - def rewind(self) -> None: - """Return the uncompressed stream file position indicator to the - beginning of the file - """ - + def fileno(self) -> int: ... + def rewind(self) -> None: ... def seek(self, offset: int, whence: int = 0) -> int: ... def readline(self, size: int | None = -1) -> bytes: ... @@ -257,24 +168,9 @@ class _GzipReader(DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... if sys.version_info >= (3, 14): - def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: - """Compress data in one shot and return the compressed string. - - compresslevel sets the compression level in range of 0-9. - mtime can be used to set the modification time. - The modification time is set to 0 by default, for reproducibility. - """ + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: ... else: - def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: - """Compress data in one shot and return the compressed string. - - compresslevel sets the compression level in range of 0-9. - mtime can be used to set the modification time. The modification time is - set to the current time by default. - """ + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... -def decompress(data: ReadableBuffer) -> bytes: - """Decompress a gzip compressed string in one shot. - Return the decompressed string. - """ +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi index faeafece16a43..924136301b215 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi @@ -1,54 +1,3 @@ -"""hashlib module - A common interface to many hash functions. - -new(name, data=b'', **kwargs) - returns a new hash object implementing the - given hash function; initializing the hash - using the given binary data. - -Named constructor functions are also available, these are faster -than using new(name): - -md5(), sha1(), sha224(), sha256(), sha384(), sha512(), blake2b(), blake2s(), -sha3_224, sha3_256, sha3_384, sha3_512, shake_128, and shake_256. - -More algorithms may be available on your platform but the above are guaranteed -to exist. See the algorithms_guaranteed and algorithms_available attributes -to find out what algorithm names can be passed to new(). - -NOTE: If you want the adler32 or crc32 hash functions they are available in -the zlib module. - -Choose your hash function wisely. Some have known collision weaknesses. -sha384 and sha512 will be slow on 32 bit platforms. - -Hash objects have these methods: - - update(data): Update the hash object with the bytes in data. Repeated calls - are equivalent to a single call with the concatenation of all - the arguments. - - digest(): Return the digest of the bytes passed to the update() method - so far as a bytes object. - - hexdigest(): Like digest() except the digest is returned as a string - of double length, containing only hexadecimal digits. - - copy(): Return a copy (clone) of the hash object. This can be used to - efficiently compute the digests of data that share a common - initial substring. - -For example, to obtain the digest of the byte string 'Nobody inspects the -spammish repetition': - - >>> import hashlib - >>> m = hashlib.md5() - >>> m.update(b"Nobody inspects") - >>> m.update(b" the spammish repetition") - >>> m.digest() - b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9' - -More condensed: - - >>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest() - 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2' - -""" - import sys from _blake2 import blake2b as blake2b, blake2s as blake2s from _hashlib import ( @@ -117,10 +66,7 @@ else: "pbkdf2_hmac", ) -def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: - """new(name, data=b'') - Return a new hashing object using the named algorithm; - optionally initialized with data (which must be a bytes-like object). - """ +def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: ... algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] @@ -137,17 +83,7 @@ if sys.version_info >= (3, 11): def file_digest( fileobj: _BytesIOLike | _FileDigestFileObj, digest: str | Callable[[], _HashObject], /, *, _bufsize: int = 262144 - ) -> HASH: - """Hash the contents of a file-like object. Returns a digest object. - - *fileobj* must be a file-like object opened for reading in binary mode. - It accepts file objects from open(), io.BytesIO(), and SocketIO objects. - The function may bypass Python's I/O and use the file descriptor *fileno* - directly. - - *digest* must either be a hash algorithm name as a *str*, a hash - constructor, or a callable that returns a hash object. - """ + ) -> HASH: ... # Legacy typing-only alias _Hash = HASH diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi index 0eb884cb035a6..220c41f303fba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi @@ -1,35 +1,3 @@ -"""Heap queue algorithm (a.k.a. priority queue). - -Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for -all k, counting elements from 0. For the sake of comparison, -non-existing elements are considered to be infinite. The interesting -property of a heap is that a[0] is always its smallest element. - -Usage: - -heap = [] # creates an empty heap -heappush(heap, item) # pushes a new item on the heap -item = heappop(heap) # pops the smallest item from the heap -item = heap[0] # smallest item on the heap without popping it -heapify(x) # transforms list into a heap, in-place, in linear time -item = heappushpop(heap, item) # pushes a new item and then returns - # the smallest item; the heap size is unchanged -item = heapreplace(heap, item) # pops and returns smallest item, and adds - # new item; the heap size is unchanged - -Our API differs from textbook heap algorithms as follows: - -- We use 0-based indexing. This makes the relationship between the - index for a node and the indexes for its children slightly less - obvious, but is more suitable since Python uses 0-based indexing. - -- Our heappop() method returns the smallest item, not the largest. - -These two make it possible to view the heap as a regular Python list -without surprises: heap[0] is the smallest item, and heap.sort() -maintains the heap invariant! -""" - from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Generator, Iterable @@ -43,35 +11,7 @@ __about__: Final[str] def merge( *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False -) -> Generator[_S]: - """Merge multiple sorted inputs into a single sorted output. - - Similar to sorted(itertools.chain(*iterables)) but returns a generator, - does not pull the data into memory all at once, and assumes that each of - the input streams is already sorted (smallest to largest). - - >>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) - [0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] - - If *key* is not None, applies a key function to each element to determine - its sort order. - - >>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len)) - ['dog', 'cat', 'fish', 'horse', 'kangaroo'] - - """ - -def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: - """Find the n largest elements in a dataset. - - Equivalent to: sorted(iterable, key=key, reverse=True)[:n] - """ - -def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: - """Find the n smallest elements in a dataset. - - Equivalent to: sorted(iterable, key=key)[:n] - """ - -def _heapify_max(heap: list[Any], /) -> None: # undocumented - """Maxheap variant of heapify.""" +) -> Generator[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def _heapify_max(heap: list[Any], /) -> None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi index 4aa8aea3cd0c6..070c59b1c166d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi @@ -1,8 +1,3 @@ -"""HMAC (Keyed-Hashing for Message Authentication) module. - -Implements the HMAC algorithm as described by RFC 2104. -""" - from _hashlib import _HashObject, compare_digest as compare_digest from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable @@ -20,78 +15,20 @@ digest_size: None # In reality digestmod has a default value, but the function always throws an error # if the argument is not given, so we pretend it is a required argument. @overload -def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: - """Create a new hashing object and return it. - - key: bytes or buffer, The starting key for the hash. - msg: bytes or buffer, Initial input for the hash, or None. - digestmod: A hash name suitable for hashlib.new(). *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. - - Required as of 3.8, despite its position after the optional - msg argument. Passing it as a keyword argument is - recommended, though not required for legacy API reasons. - - You can now feed arbitrary bytes into the object using its update() - method, and can ask for the hash value at any time by calling its digest() - or hexdigest() methods. - """ - +def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... @overload def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... class HMAC: - """RFC 2104 HMAC class. Also complies with RFC 4231. - - This supports the API for Cryptographic Hash Functions (PEP 247). - """ - __slots__ = ("_hmac", "_inner", "_outer", "block_size", "digest_size") digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: - """Create a new HMAC object. - - key: bytes or buffer, key for the keyed hash object. - msg: bytes or buffer, Initial input for the hash or None. - digestmod: A hash name suitable for hashlib.new(). *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. - - Required as of 3.8, despite its position after the optional - msg argument. Passing it as a keyword argument is - recommended, though not required for legacy API reasons. - """ - - def update(self, msg: ReadableBuffer) -> None: - """Feed data from msg into this hashing object.""" - - def digest(self) -> bytes: - """Return the hash value of this hashing object. - - This returns the hmac value as bytes. The object is - not altered in any way by this function; you can continue - updating the object after calling this function. - """ - - def hexdigest(self) -> str: - """Like digest(), but returns a string of hexadecimal digits instead.""" - - def copy(self) -> HMAC: - """Return a separate copy of this hashing object. - - An update to this copy won't affect the original object. - """ - -def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: - """Fast inline implementation of HMAC. + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... + def update(self, msg: ReadableBuffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> HMAC: ... - key: bytes or buffer, The key for the keyed hash object. - msg: bytes or buffer, Input message. - digest: A hash name suitable for hashlib.new() for best performance. *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. - """ +def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi index af5799ee0c726..afba90832535d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi @@ -1,24 +1,6 @@ -""" -General functions for HTML manipulation. -""" - from typing import AnyStr __all__ = ["escape", "unescape"] -def escape(s: AnyStr, quote: bool = True) -> AnyStr: - """ - Replace special characters "&", "<" and ">" to HTML-safe sequences. - If the optional flag quote is true (the default), the quotation mark - characters, both double quote (") and single quote (') characters are also - translated. - """ - -def unescape(s: AnyStr) -> AnyStr: - """ - Convert all named and numeric character references (e.g. >, >, - &x3e;) in the string s to the corresponding unicode characters. - This function uses the rules defined by the HTML 5 standard - for both valid and invalid character references, and the list of - HTML 5 named character references defined in html.entities.html5. - """ +def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... +def unescape(s: AnyStr) -> AnyStr: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi index 0f7b8b04bbeca..e5890d1ecfbd8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi @@ -1,5 +1,3 @@ -"""HTML character entity references.""" - from typing import Final __all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi index cc71c2cb41f4c..7edd39e8c7037 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi @@ -1,6 +1,3 @@ -"""A parser for HTML and XHTML.""" - -import sys from _markupbase import ParserBase from re import Pattern from typing import Final @@ -8,52 +5,14 @@ from typing import Final __all__ = ["HTMLParser"] class HTMLParser(ParserBase): - """Find tags and other markup and call handler functions. - - Usage: - p = HTMLParser() - p.feed(data) - ... - p.close() - - Start tags are handled by calling self.handle_starttag() or - self.handle_startendtag(); end tags by self.handle_endtag(). The - data between tags is passed from the parser to the derived class - by calling self.handle_data() with the data as argument (the data - may be split up in arbitrary chunks). If convert_charrefs is - True the character references are converted automatically to the - corresponding Unicode character (and self.handle_data() is no - longer split in chunks), otherwise they are passed by calling - self.handle_entityref() or self.handle_charref() with the string - containing respectively the named or numeric reference as the - argument. - """ - CDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] - if sys.version_info >= (3, 13): - # Added in 3.13.6 - RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] - - def __init__(self, *, convert_charrefs: bool = True) -> None: - """Initialize and reset this instance. - - If convert_charrefs is True (the default), all character references - are automatically converted to the corresponding Unicode characters. - """ - - def feed(self, data: str) -> None: - """Feed data to the parser. - - Call this as often as you want, with as little or as much text - as you want (may include '\\n'). - """ - - def close(self) -> None: - """Handle any buffered data.""" - - def get_starttag_text(self) -> str | None: - """Return full source of start tag: '<...>'.""" + # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 + RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] + def __init__(self, *, convert_charrefs: bool = True) -> None: ... + def feed(self, data: str) -> None: ... + def close(self) -> None: ... + def get_starttag_text(self) -> str | None: ... def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... @@ -71,11 +30,8 @@ class HTMLParser(ParserBase): def parse_html_declaration(self, i: int) -> int: ... # undocumented def parse_pi(self, i: int) -> int: ... # undocumented def parse_starttag(self, i: int) -> int: ... # undocumented - if sys.version_info >= (3, 13): - # `escapable` parameter added in 3.13.6 - def set_cdata_mode(self, elem: str, *, escapable: bool = False) -> None: ... # undocumented - else: - def set_cdata_mode(self, elem: str) -> None: ... # undocumented + # `escapable` parameter added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 + def set_cdata_mode(self, elem: str, *, escapable: bool = False) -> None: ... # undocumented rawdata: str # undocumented cdata_elem: str | None # undocumented convert_charrefs: bool # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi index 1b65f162193f9..f60c3909736d3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi @@ -10,25 +10,6 @@ else: __all__ = ["HTTPStatus"] class HTTPStatus(IntEnum): - """HTTP status codes and reason phrases - - Status codes from the following RFCs are all observed: - - * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - * RFC 7725: An HTTP Status Code to Report Legal Obstacles - * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) - * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) - * RFC 8297: An HTTP Status Code for Indicating Hints - * RFC 8470: Using Early Data in HTTP - """ - @property def phrase(self) -> str: ... @property @@ -124,14 +105,6 @@ class HTTPStatus(IntEnum): if sys.version_info >= (3, 11): class HTTPMethod(StrEnum): - """HTTP methods and descriptions - - Methods from the following RFCs are all observed: - - * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 - * RFC 5789: PATCH Method for HTTP - """ - @property def description(self) -> str: ... CONNECT = "CONNECT" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi index 4fe786738c744..d259e84e6f2aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi @@ -1,73 +1,3 @@ -"""HTTP/1.1 client library - - - - -HTTPConnection goes through a number of "states", which define when a client -may legally make another request or fetch the response for a particular -request. This diagram details these state transitions: - - (null) - | - | HTTPConnection() - v - Idle - | - | putrequest() - v - Request-started - | - | ( putheader() )* endheaders() - v - Request-sent - |\\_____________________________ - | | getresponse() raises - | response = getresponse() | ConnectionError - v v - Unread-response Idle - [Response-headers-read] - |\\____________________ - | | - | response.read() | putrequest() - v v - Idle Req-started-unread-response - ______/| - / | - response.read() | | ( putheader() )* endheaders() - v v - Request-started Req-sent-unread-response - | - | response.read() - v - Request-sent - -This diagram presents the following rules: - -- a second request may not be started until {response-headers-read} - -- a response [object] cannot be retrieved until {request-sent} - -- there is no differentiation between an unread response body and a - partially read response body - -Note: this enforcement is applied by the HTTPConnection class. The - HTTPResponse class does not enforce this state machine, which - implies sophisticated clients may accelerate the request/response - pipeline. Caution should be taken, though: accelerating the states - beyond the above pattern may imply knowledge of the server's - connection-close behavior for certain requests. For example, it - is impossible to tell whether the server will close the connection - UNTIL the response headers have been read; this means that further - requests cannot be placed into the pipeline until it is known that - the server will NOT be closing the connection. - -Logical State __state __response -------------- ------- ---------- -Idle _CS_IDLE None -Request-started _CS_REQ_STARTED None -Request-sent _CS_REQ_SENT None -Unread-response _CS_IDLE -Req-started-unread-response _CS_REQ_STARTED -Req-sent-unread-response _CS_REQ_SENT -""" - import email.message import io import ssl @@ -189,21 +119,10 @@ NETWORK_AUTHENTICATION_REQUIRED: Final = 511 responses: dict[int, str] class HTTPMessage(email.message.Message[str, str]): - def getallmatchingheaders(self, name: str) -> list[str]: # undocumented - """Find all header lines matching a given header name. - - Look through the list of headers and find all lines matching a given - header name (and their continuation lines). A list of the lines is - returned, without interpretation. If the header does not occur, an - empty list is returned. If the header occurs multiple times, all - occurrences are returned. Case is not important in the header name. - - """ + def getallmatchingheaders(self, name: str) -> list[str]: ... # undocumented @overload -def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: - """Parses only RFC2822 headers from a file pointer.""" - +def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload def parse_headers(fp: SupportsReadline[bytes]) -> HTTPMessage: ... @@ -226,86 +145,24 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incomp url: str def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... def peek(self, n: int = -1) -> bytes: ... - def read(self, amt: int | None = None) -> bytes: - """Read and return the response body, or up to the next amt bytes.""" - - def read1(self, n: int = -1) -> bytes: - """Read with at most one underlying system call. If at least one - byte is buffered, return that instead. - """ - - def readinto(self, b: WriteableBuffer) -> int: - """Read up to len(b) bytes into bytearray b and return the number - of bytes read. - """ - + def read(self, amt: int | None = None) -> bytes: ... + def read1(self, n: int = -1) -> bytes: ... + def readinto(self, b: WriteableBuffer) -> int: ... def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] @overload - def getheader(self, name: str) -> str | None: - """Returns the value of the header matching *name*. - - If there are multiple matching headers, the values are - combined into a single string separated by commas and spaces. - - If no matching header is found, returns *default* or None if - the *default* is not specified. - - If the headers are unknown, raises http.client.ResponseNotReady. - - """ - + def getheader(self, name: str) -> str | None: ... @overload def getheader(self, name: str, default: _T) -> str | _T: ... - def getheaders(self) -> list[tuple[str, str]]: - """Return list of (header, value) tuples.""" - - def isclosed(self) -> bool: - """True if the connection is closed.""" - + def getheaders(self) -> list[tuple[str, str]]: ... + def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... - def info(self) -> email.message.Message: - """Returns an instance of the class mimetools.Message containing - meta-information associated with the URL. - - When the method is HTTP, these headers are those returned by - the server at the head of the retrieved HTML page (including - Content-Length and Content-Type). - - When the method is FTP, a Content-Length header will be - present if (as is now usual) the server passed back a file - length in response to the FTP retrieval request. A - Content-Type header will be present if the MIME type can be - guessed. - - When the method is local-file, returned headers will include - a Date representing the file's last-modified time, a - Content-Length giving file size, and a Content-Type - containing a guess at the file's type. See also the - description of the mimetools module. - - """ - - def geturl(self) -> str: - """Return the real URL of the page. - - In some cases, the HTTP server redirects a client to another - URL. The urlopen() function handles this transparently, but in - some cases the caller needs to know which URL the client was - redirected to. The geturl() method can be used to get at this - redirected URL. - - """ - - def getcode(self) -> int: - """Return the HTTP status code that was sent with the response, - or None if the URL is not an HTTP URL. - - """ - + def info(self) -> email.message.Message: ... + def geturl(self) -> str: ... + def getcode(self) -> int: ... def begin(self) -> None: ... class HTTPConnection: @@ -333,95 +190,21 @@ class HTTPConnection: headers: Mapping[str, _HeaderValue] = {}, *, encode_chunked: bool = False, - ) -> None: - """Send a complete request to the server.""" - - def getresponse(self) -> HTTPResponse: - """Get the response from the server. - - If the HTTPConnection is in the correct state, returns an - instance of HTTPResponse or of whatever object is returned by - the response_class variable. - - If a request has not been sent or if a previous response has - not be handled, ResponseNotReady is raised. If the HTTP - response indicates that the connection should be closed, then - it will be closed before the response is returned. When the - connection is closed, the underlying socket is closed. - """ - + ) -> None: ... + def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... if sys.version_info >= (3, 12): - def get_proxy_response_headers(self) -> HTTPMessage | None: - """ - Returns a dictionary with the headers of the response - received from the proxy server to the CONNECT request - sent to set the tunnel. - - If the CONNECT request was not sent, the method returns None. - """ - - def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: - """Set up host and port for HTTP CONNECT tunnelling. - - In a connection that uses HTTP CONNECT tunnelling, the host passed to - the constructor is used as a proxy server that relays all communication - to the endpoint passed to `set_tunnel`. This done by sending an HTTP - CONNECT request to the proxy server when the connection is established. + def get_proxy_response_headers(self) -> HTTPMessage | None: ... - This method must be called before the HTTP connection has been - established. - - The headers argument should be a mapping of extra HTTP headers to send - with the CONNECT request. - - As HTTP/1.1 is used for HTTP CONNECT tunnelling request, as per the RFC - (https://tools.ietf.org/html/rfc7231#section-4.3.6), a HTTP Host: - header must be provided, matching the authority-form of the request - target provided as the destination for the CONNECT request. If a - HTTP Host: header is not provided via the headers argument, one - is generated and transmitted automatically. - """ - - def connect(self) -> None: - """Connect to the host and port specified in __init__.""" - - def close(self) -> None: - """Close the connection to the HTTP server.""" - - def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: - """Send a request to the server. - - 'method' specifies an HTTP request method, e.g. 'GET'. - 'url' specifies the object being requested, e.g. '/index.html'. - 'skip_host' if True does not add automatically a 'Host:' header - 'skip_accept_encoding' if True does not add automatically an - 'Accept-Encoding:' header - """ - - def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: - """Send a request header line to the server. - - For example: h.putheader('Accept', 'text/html') - """ - - def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: - """Indicate that the last header line has been sent to the server. - - This method sends the request to the server. The optional message_body - argument can be used to pass a message body associated with the - request. - """ - - def send(self, data: _DataType | str) -> None: - """Send 'data' to the server. - ``data`` can be a string object, a bytes object, an array object, a - file-like object that supports a .read() method, or an iterable object. - """ + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... + def connect(self) -> None: ... + def close(self) -> None: ... + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... + def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: ... + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... + def send(self, data: _DataType | str) -> None: ... class HTTPSConnection(HTTPConnection): - """This class allows communication via SSL.""" - # Can be `None` if `.connect()` was not called: sock: ssl.SSLSocket | MaybeNone if sys.version_info >= (3, 12): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi index a2f876c853e06..31e1d3fc83785 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi @@ -1,30 +1,3 @@ -"""HTTP cookie handling for web clients. - -This module has (now fairly distant) origins in Gisle Aas' Perl module -HTTP::Cookies, from the libwww-perl library. - -Docstrings, comments and debug strings in this code refer to the -attributes of the HTTP cookie system as cookie-attributes, to distinguish -them clearly from Python attributes. - -Class diagram (note that BSDDBCookieJar and the MSIE* classes are not -distributed with the Python standard library, but are available from -http://wwwsearch.sf.net/): - - CookieJar____ - / \\ \\ - FileCookieJar \\ \\ - / | \\ \\ \\ - MozillaCookieJar | LWPCookieJar \\ \\ - | | \\ - | ---MSIEBase | \\ - | / | | \\ - | / MSIEDBCookieJar BSDDBCookieJar - |/ - MSIECookieJar - -""" - import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -49,12 +22,6 @@ _T = TypeVar("_T") class LoadError(OSError): ... class CookieJar: - """Collection of HTTP cookies. - - You may not need to know about this class: try - urllib.request.build_opener(HTTPCookieProcessor).open(url). - """ - non_word_re: ClassVar[Pattern[str]] # undocumented quote_re: ClassVar[Pattern[str]] # undocumented strict_domain_re: ClassVar[Pattern[str]] # undocumented @@ -62,176 +29,43 @@ class CookieJar: dots_re: ClassVar[Pattern[str]] # undocumented magic_re: ClassVar[Pattern[str]] # undocumented def __init__(self, policy: CookiePolicy | None = None) -> None: ... - def add_cookie_header(self, request: Request) -> None: - """Add correct Cookie: header to request (urllib.request.Request object). - - The Cookie2 header is also added unless policy.hide_cookie2 is true. - - """ - - def extract_cookies(self, response: HTTPResponse, request: Request) -> None: - """Extract cookies from response, where allowable given the request.""" - + def add_cookie_header(self, request: Request) -> None: ... + def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... def set_policy(self, policy: CookiePolicy) -> None: ... - def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: - """Return sequence of Cookie objects extracted from response object.""" - - def set_cookie(self, cookie: Cookie) -> None: - """Set a cookie, without checking whether or not it should be set.""" - - def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: - """Set a cookie if policy says it's OK to do so.""" - - def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: - """Clear some cookies. - - Invoking this method without arguments will clear all cookies. If - given a single argument, only cookies belonging to that domain will be - removed. If given two arguments, cookies belonging to the specified - path within that domain are removed. If given three arguments, then - the cookie with the specified name, path and domain is removed. - - Raises KeyError if no matching cookie exists. - - """ - - def clear_session_cookies(self) -> None: - """Discard all session cookies. - - Note that the .save() method won't save session cookies anyway, unless - you ask otherwise by passing a true ignore_discard argument. - - """ - - def clear_expired_cookies(self) -> None: # undocumented - """Discard all expired cookies. - - You probably don't need to call this method: expired cookies are never - sent back to the server (provided you're using DefaultCookiePolicy), - this method is called by CookieJar itself every so often, and the - .save() method won't save expired cookies anyway (unless you ask - otherwise by passing a true ignore_expires argument). - - """ - + def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... + def set_cookie(self, cookie: Cookie) -> None: ... + def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... + def clear_session_cookies(self) -> None: ... + def clear_expired_cookies(self) -> None: ... # undocumented def __iter__(self) -> Iterator[Cookie]: ... - def __len__(self) -> int: - """Return number of contained cookies.""" + def __len__(self) -> int: ... class FileCookieJar(CookieJar): - """CookieJar that can be loaded from and saved to a file.""" - filename: str | None delayload: bool - def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: - """ - Cookies are NOT loaded from the named file until either the .load() or - .revert() method is called. - - """ - - def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: - """Save cookies to a file.""" - - def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: - """Load cookies from a file.""" - - def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: - """Clear all cookies and reload cookies from a saved file. - - Raises LoadError (or OSError) if reversion is not successful; the - object's state will not be altered if this happens. - - """ + def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... class MozillaCookieJar(FileCookieJar): - """ - - WARNING: you may want to backup your browser's cookies file if you use - this class to save cookies. I *think* it works, but there have been - bugs in the past! - - This class differs from CookieJar only in the format it uses to save and - load cookies to and from a file. This class uses the Mozilla/Netscape - 'cookies.txt' format. curl and lynx use this file format, too. - - Don't expect cookies saved while the browser is running to be noticed by - the browser (in fact, Mozilla on unix will overwrite your saved cookies if - you change them on disk while it's running; on Windows, you probably can't - save at all while the browser is running). - - Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to - Netscape cookies on saving. - - In particular, the cookie version and port number information is lost, - together with information about whether or not Path, Port and Discard were - specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the - domain as set in the HTTP header started with a dot (yes, I'm aware some - domains in Netscape files start with a dot and some don't -- trust me, you - really don't want to know any more about this). - - Note that though Mozilla and Netscape use the same format, they use - slightly different headers. The class saves cookies using the Netscape - header by default (Mozilla can cope with that). - - """ - if sys.version_info < (3, 10): header: ClassVar[str] # undocumented class LWPCookieJar(FileCookieJar): - """ - The LWPCookieJar saves a sequence of "Set-Cookie3" lines. - "Set-Cookie3" is the format used by the libwww-perl library, not known - to be compatible with any browser, but which is easy to read and - doesn't lose information about RFC 2965 cookies. - - Additional methods - - as_lwp_str(ignore_discard=True, ignore_expired=True) - - """ - - def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: # undocumented - """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. - - ignore_discard and ignore_expires: see docstring for FileCookieJar.save - - """ + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented class CookiePolicy: - """Defines which cookies get accepted from and returned to server. - - May also modify cookies, though this is probably a bad idea. - - The subclass DefaultCookiePolicy defines the standard rules for Netscape - and RFC 2965 cookies -- override that if you want a customized policy. - - """ - netscape: bool rfc2965: bool hide_cookie2: bool - def set_ok(self, cookie: Cookie, request: Request) -> bool: - """Return true if (and only if) cookie should be accepted from server. - - Currently, pre-expired cookies never get this far -- the CookieJar - class deletes such cookies itself. - - """ - - def return_ok(self, cookie: Cookie, request: Request) -> bool: - """Return true if (and only if) cookie should be returned to server.""" - - def domain_return_ok(self, domain: str, request: Request) -> bool: - """Return false if cookies should not be returned, given cookie domain.""" - - def path_return_ok(self, path: str, request: Request) -> bool: - """Return false if cookies should not be returned, given cookie path.""" + def set_ok(self, cookie: Cookie, request: Request) -> bool: ... + def return_ok(self, cookie: Cookie, request: Request) -> bool: ... + def domain_return_ok(self, domain: str, request: Request) -> bool: ... + def path_return_ok(self, path: str, request: Request) -> bool: ... class DefaultCookiePolicy(CookiePolicy): - """Implements the standard rules for accepting and returning cookies.""" - rfc2109_as_netscape: bool strict_domain: bool strict_rfc2965_unverifiable: bool @@ -259,22 +93,12 @@ class DefaultCookiePolicy(CookiePolicy): strict_ns_set_initial_dollar: bool = False, strict_ns_set_path: bool = False, secure_protocols: Sequence[str] = ("https", "wss"), - ) -> None: - """Constructor arguments should be passed as keyword arguments only.""" - - def blocked_domains(self) -> tuple[str, ...]: - """Return the sequence of blocked domains (as a tuple).""" - - def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: - """Set the sequence of blocked domains.""" - + ) -> None: ... + def blocked_domains(self) -> tuple[str, ...]: ... + def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... def is_blocked(self, domain: str) -> bool: ... - def allowed_domains(self) -> tuple[str, ...] | None: - """Return None, or the sequence of allowed domains (as a tuple).""" - - def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: - """Set the sequence of allowed domains, or None.""" - + def allowed_domains(self) -> tuple[str, ...] | None: ... + def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: ... def is_not_allowed(self, domain: str) -> bool: ... def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented def set_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented @@ -290,23 +114,6 @@ class DefaultCookiePolicy(CookiePolicy): def return_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented class Cookie: - """HTTP Cookie. - - This class represents both Netscape and RFC 2965 cookies. - - This is deliberately a very simple class. It just holds attributes. It's - possible to construct Cookie instances that don't comply with the cookie - standards. CookieJar.make_cookies is the factory function for Cookie - objects -- it deals with cookie parsing, supplying defaults, and - normalising to the representation used in this class. CookiePolicy is - responsible for checking them to see whether they should be accepted from - and returned to the server. - - Note that the port may be present in the headers, but unspecified ("Port" - rather than"Port=80", for example); if this is the case, port is None. - - """ - version: int | None name: str value: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi index e07ddcccae82f..4df12e3125d4b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi @@ -1,93 +1,3 @@ -""" -Here's a sample session to show how to use this module. -At the moment, this is the only documentation. - -The Basics ----------- - -Importing is easy... - - >>> from http import cookies - -Most of the time you start by creating a cookie. - - >>> C = cookies.SimpleCookie() - -Once you've created your Cookie, you can add values just as if it were -a dictionary. - - >>> C = cookies.SimpleCookie() - >>> C["fig"] = "newton" - >>> C["sugar"] = "wafer" - >>> C.output() - 'Set-Cookie: fig=newton\\r\\nSet-Cookie: sugar=wafer' - -Notice that the printable representation of a Cookie is the -appropriate format for a Set-Cookie: header. This is the -default behavior. You can change the header and printed -attributes by using the .output() function - - >>> C = cookies.SimpleCookie() - >>> C["rocky"] = "road" - >>> C["rocky"]["path"] = "/cookie" - >>> print(C.output(header="Cookie:")) - Cookie: rocky=road; Path=/cookie - >>> print(C.output(attrs=[], header="Cookie:")) - Cookie: rocky=road - -The load() method of a Cookie extracts cookies from a string. In a -CGI script, you would use this method to extract the cookies from the -HTTP_COOKIE environment variable. - - >>> C = cookies.SimpleCookie() - >>> C.load("chips=ahoy; vienna=finger") - >>> C.output() - 'Set-Cookie: chips=ahoy\\r\\nSet-Cookie: vienna=finger' - -The load() method is darn-tootin smart about identifying cookies -within a string. Escaped quotation marks, nested semicolons, and other -such trickeries do not confuse it. - - >>> C = cookies.SimpleCookie() - >>> C.load('keebler="E=everybody; L=\\\\"Loves\\\\"; fudge=\\\\012;";') - >>> print(C) - Set-Cookie: keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;" - -Each element of the Cookie also supports all of the RFC 2109 -Cookie attributes. Here's an example which sets the Path -attribute. - - >>> C = cookies.SimpleCookie() - >>> C["oreo"] = "doublestuff" - >>> C["oreo"]["path"] = "/" - >>> print(C) - Set-Cookie: oreo=doublestuff; Path=/ - -Each dictionary element has a 'value' attribute, which gives you -back the value associated with the key. - - >>> C = cookies.SimpleCookie() - >>> C["twix"] = "none for you" - >>> C["twix"].value - 'none for you' - -The SimpleCookie expects that all values should be standard strings. -Just to be sure, SimpleCookie invokes the str() builtin to convert -the value to a string, when the values are set dictionary-style. - - >>> C = cookies.SimpleCookie() - >>> C["number"] = 7 - >>> C["string"] = "seven" - >>> C["number"].value - '7' - >>> C["string"].value - 'seven' - >>> C.output() - 'Set-Cookie: number=7\\r\\nSet-Cookie: string=seven' - -Finis. -""" - from collections.abc import Iterable, Mapping from types import GenericAlias from typing import Any, Generic, TypeVar, overload @@ -99,14 +9,7 @@ _DataType: TypeAlias = str | Mapping[str, str | Morsel[Any]] _T = TypeVar("_T") @overload -def _quote(str: None) -> None: - """Quote a string for use in a cookie header. - - If the string does not need to be double-quoted, then just return the - string. Otherwise, surround the string in doublequotes and quote - (with a \\) special characters. - """ - +def _quote(str: None) -> None: ... @overload def _quote(str: str) -> str: ... @overload @@ -117,14 +20,6 @@ def _unquote(str: str) -> str: ... class CookieError(Exception): ... class Morsel(dict[str, Any], Generic[_T]): - """A class to hold ONE (key, value) pair. - - In a cookie, each such pair may have several attributes, so this class is - used to keep the attributes associated with the appropriate key,value pair. - This class also includes a coded_value attribute, which is used to hold - the network representation of the value. - """ - @property def value(self) -> str: ... @property @@ -146,51 +41,16 @@ class Morsel(dict[str, Any], Generic[_T]): def OutputString(self, attrs: list[str] | None = None) -> str: ... def __eq__(self, morsel: object) -> bool: ... def __setitem__(self, K: str, V: Any) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): - """A container class for a set of Morsels.""" - def __init__(self, input: _DataType | None = None) -> None: ... - def value_decode(self, val: str) -> tuple[_T, str]: - """real_value, coded_value = value_decode(STRING) - Called prior to setting a cookie's value from the network - representation. The VALUE is the value read from HTTP - header. - Override this function to modify the behavior of cookies. - """ - - def value_encode(self, val: _T) -> tuple[_T, str]: - """real_value, coded_value = value_encode(VALUE) - Called prior to setting a cookie's value from the dictionary - representation. The VALUE is the value being assigned. - Override this function to modify the behavior of cookies. - """ - - def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: - """Return a string suitable for HTTP.""" + def value_decode(self, val: str) -> tuple[_T, str]: ... + def value_encode(self, val: _T) -> tuple[_T, str]: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... __str__ = output - def js_output(self, attrs: list[str] | None = None) -> str: - """Return a string suitable for JavaScript.""" - - def load(self, rawdata: _DataType) -> None: - """Load cookies from a string (presumably HTTP_COOKIE) or - from a dictionary. Loading cookies from a dictionary 'd' - is equivalent to calling: - map(Cookie.__setitem__, d.keys(), d.values()) - """ - - def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: - """Dictionary style assignment.""" + def js_output(self, attrs: list[str] | None = None) -> str: ... + def load(self, rawdata: _DataType) -> None: ... + def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... -class SimpleCookie(BaseCookie[str]): - """ - SimpleCookie supports strings as cookie values. When setting - the value using the dictionary assignment notation, SimpleCookie - calls the builtin str() to convert the value to a string. Values - received from HTTP are kept as strings. - """ +class SimpleCookie(BaseCookie[str]): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi index f558baca6b5e9..2c1a374331bcc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi @@ -1,36 +1,3 @@ -"""HTTP server classes. - -Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see -SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and (deprecated) CGIHTTPRequestHandler for CGI scripts. - -It does, however, optionally implement HTTP/1.1 persistent connections. - -Notes on CGIHTTPRequestHandler ------------------------------- - -This class is deprecated. It implements GET and POST requests to cgi-bin scripts. - -If the os.fork() function is not present (Windows), subprocess.Popen() is used, -with slightly altered but never documented semantics. Use from a threaded -process is likely to trigger a warning at os.fork() time. - -In all cases, the implementation is intentionally naive -- all -requests are executed synchronously. - -SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL --- it may execute arbitrary Python code or external programs. - -Note that status code 200 is sent prior to execution of a CGI script, so -scripts cannot send other status codes such as 302 (redirect). - -XXX To do: - -- log requests even later (to capture byte count) -- log user-agent header and other interesting goodies -- send error log to separate file -""" - import _socket import email.message import io @@ -91,114 +58,11 @@ if sys.version_info >= (3, 14): password: _PasswordType | None = None, alpn_protocols: Iterable[str] | None = None, ) -> None: ... - def server_activate(self) -> None: - """Wrap the socket in SSLSocket.""" + def server_activate(self) -> None: ... class ThreadingHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer): ... class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): - """HTTP request handler base class. - - The following explanation of HTTP serves to guide you through the - code as well as to expose any misunderstandings I may have about - HTTP (so you don't need to read the code to figure out I'm wrong - :-). - - HTTP (HyperText Transfer Protocol) is an extensible protocol on - top of a reliable stream transport (e.g. TCP/IP). The protocol - recognizes three parts to a request: - - 1. One line identifying the request type and path - 2. An optional set of RFC-822-style headers - 3. An optional data part - - The headers and data are separated by a blank line. - - The first line of the request has the form - - - - where is a (case-sensitive) keyword such as GET or POST, - is a string containing path information for the request, - and should be the string "HTTP/1.0" or "HTTP/1.1". - is encoded using the URL encoding scheme (using %xx to signify - the ASCII character with hex code xx). - - The specification specifies that lines are separated by CRLF but - for compatibility with the widest range of clients recommends - servers also handle LF. Similarly, whitespace in the request line - is treated sensibly (allowing multiple spaces between components - and allowing trailing whitespace). - - Similarly, for output, lines ought to be separated by CRLF pairs - but most clients grok LF characters just fine. - - If the first line of the request has the form - - - - (i.e. is left out) then this is assumed to be an HTTP - 0.9 request; this form has no optional headers and data part and - the reply consists of just the data. - - The reply form of the HTTP 1.x protocol again has three parts: - - 1. One line giving the response code - 2. An optional set of RFC-822-style headers - 3. The data - - Again, the headers and data are separated by a blank line. - - The response code line has the form - - - - where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), - is a 3-digit response code indicating success or - failure of the request, and is an optional - human-readable string explaining what the response code means. - - This server parses the request and the headers, and then calls a - function specific to the request type (). Specifically, - a request SPAM will be handled by a method do_SPAM(). If no - such method exists the server sends an error response to the - client. If it exists, it is called with no arguments: - - do_SPAM() - - Note that the request name is case sensitive (i.e. SPAM and spam - are different requests). - - The various request details are stored in instance variables: - - - client_address is the client IP address in the form (host, - port); - - - command, path and version are the broken-down request line; - - - headers is an instance of email.message.Message (or a derived - class) containing the header information; - - - rfile is a file object open for reading positioned at the - start of the optional input data part; - - - wfile is a file object open for writing. - - IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! - - The first thing to be written must be the response line. Then - follow 0 or more header lines, then a blank line, and then the - actual data (if any). The meaning of the header lines depends on - the command executed by the server; in most cases, when data is - returned, there should be at least one header line of the form - - Content-type: / - - where and should be registered MIME types, - e.g. "text/html" or "text/plain". - - """ - client_address: tuple[str, int] close_connection: bool requestline: str @@ -216,142 +80,24 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def handle_one_request(self) -> None: - """Handle a single HTTP request. - - You normally don't need to override this method; see the class - __doc__ string for information on how to handle specific HTTP - commands such as GET and POST. - - """ - - def handle_expect_100(self) -> bool: - """Decide what to do with an "Expect: 100-continue" header. - - If the client is expecting a 100 Continue response, we must - respond with either a 100 Continue or a final response before - waiting for the request body. The default is to always respond - with a 100 Continue. You can behave differently (for example, - reject unauthorized requests) by overriding this method. - - This method should either return True (possibly after sending - a 100 Continue response) or send an error response and return - False. - - """ - - def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: - """Send and log an error reply. - - Arguments are - * code: an HTTP error code - 3 digits - * message: a simple optional 1 line reason phrase. - *( HTAB / SP / VCHAR / %x80-FF ) - defaults to short entry matching the response code - * explain: a detailed message defaults to the long entry - matching the response code. - - This sends an error response (so it must be called before any - output has been generated), logs the error, and finally sends - a piece of HTML explaining the error to the user. - - """ - - def send_response(self, code: int, message: str | None = None) -> None: - """Add the response header to the headers buffer and log the - response code. - - Also send two standard headers with the server software - version and the current date. - - """ - - def send_header(self, keyword: str, value: str) -> None: - """Send a MIME header to the headers buffer.""" - - def send_response_only(self, code: int, message: str | None = None) -> None: - """Send the response header only.""" - - def end_headers(self) -> None: - """Send the blank line ending the MIME headers.""" - + def handle_one_request(self) -> None: ... + def handle_expect_100(self) -> bool: ... + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... + def send_response(self, code: int, message: str | None = None) -> None: ... + def send_header(self, keyword: str, value: str) -> None: ... + def send_response_only(self, code: int, message: str | None = None) -> None: ... + def end_headers(self) -> None: ... def flush_headers(self) -> None: ... - def log_request(self, code: int | str = "-", size: int | str = "-") -> None: - """Log an accepted request. - - This is called by send_response(). - - """ - - def log_error(self, format: str, *args: Any) -> None: - """Log an error. - - This is called when a request cannot be fulfilled. By - default it passes the message on to log_message(). - - Arguments are the same as for log_message(). - - XXX This should go to the separate error log. - - """ - - def log_message(self, format: str, *args: Any) -> None: - """Log an arbitrary message. - - This is used by all other logging functions. Override - it if you have specific logging wishes. - - The first argument, FORMAT, is a format string for the - message to be logged. If the format string contains - any % escapes requiring parameters, they should be - specified as subsequent arguments (it's just like - printf!). - - The client ip and current date/time are prefixed to - every message. - - Unicode control characters are replaced with escaped hex - before writing the output to stderr. - - """ - - def version_string(self) -> str: - """Return the server software version string.""" - - def date_time_string(self, timestamp: float | None = None) -> str: - """Return the current date and time formatted for a message header.""" - - def log_date_time_string(self) -> str: - """Return the current time formatted for logging.""" - - def address_string(self) -> str: - """Return the client address.""" - - def parse_request(self) -> bool: # undocumented - """Parse a request (internal). - - The request should be stored in self.raw_requestline; the results - are in self.command, self.path, self.request_version and - self.headers. - - Return True for success, False for failure; on failure, any relevant - error response has already been sent back. - - """ + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... + def log_error(self, format: str, *args: Any) -> None: ... + def log_message(self, format: str, *args: Any) -> None: ... + def version_string(self) -> str: ... + def date_time_string(self, timestamp: float | None = None) -> str: ... + def log_date_time_string(self) -> str: ... + def address_string(self) -> str: ... + def parse_request(self) -> bool: ... # undocumented class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): - """Simple HTTP request handler with GET and HEAD commands. - - This serves files from the current directory and any of its - subdirectories. The MIME type for files is determined by - calling the .guess_type() method. - - The GET and HEAD requests are identical except that the HEAD - request omits the actual contents of the file. - - """ - extensions_map: dict[str, str] if sys.version_info >= (3, 12): index_pages: ClassVar[tuple[str, ...]] @@ -364,160 +110,33 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): *, directory: StrPath | None = None, ) -> None: ... - def do_GET(self) -> None: - """Serve a GET request.""" - - def do_HEAD(self) -> None: - """Serve a HEAD request.""" - - def send_head(self) -> io.BytesIO | BinaryIO | None: # undocumented - """Common code for GET and HEAD commands. - - This sends the response code and MIME headers. - - Return value is either a file object (which has to be copied - to the outputfile by the caller unless the command was HEAD, - and must be closed by the caller under all circumstances), or - None, in which case the caller has nothing further to do. - - """ - - def list_directory(self, path: StrPath) -> io.BytesIO | None: # undocumented - """Helper to produce a directory listing (absent index.html). - - Return value is either a file object, or None (indicating an - error). In either case, the headers are sent, making the - interface the same as for send_head(). - - """ - - def translate_path(self, path: str) -> str: # undocumented - """Translate a /-separated PATH to the local filename syntax. - - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) - - """ - - def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: # undocumented - """Copy all data between two file objects. - - The SOURCE argument is a file object open for reading - (or anything with a read() method) and the DESTINATION - argument is a file object open for writing (or - anything with a write() method). - - The only reason for overriding this would be to change - the block size or perhaps to replace newlines by CRLF - -- note however that this the default server uses this - to copy binary data as well. - - """ + def do_GET(self) -> None: ... + def do_HEAD(self) -> None: ... + def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented + def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented + def translate_path(self, path: str) -> str: ... # undocumented + def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented + def guess_type(self, path: StrPath) -> str: ... # undocumented - def guess_type(self, path: StrPath) -> str: # undocumented - """Guess the type of a file. - - Argument is a PATH (a filename). - - Return value is a string of the form type/subtype, - usable for a MIME Content-type header. - - The default implementation looks the file's extension - up in the table self.extensions_map, using application/octet-stream - as a default; however it would be permissible (if - slow) to look inside the data to make a better guess. - - """ - -def executable(path: StrPath) -> bool: # undocumented - """Test for executable file.""" +def executable(path: StrPath) -> bool: ... # undocumented if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): - """Complete HTTP server with GET, HEAD and POST commands. - - GET and HEAD also support running CGI scripts. - - The POST command is *only* implemented for CGI scripts. - - """ - cgi_directories: list[str] have_fork: bool # undocumented - def do_POST(self) -> None: - """Serve a POST request. - - This is only implemented for CGI scripts. - - """ - - def is_cgi(self) -> bool: # undocumented - """Test whether self.path corresponds to a CGI script. - - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. - - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. - - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). - - """ - - def is_executable(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is an executable file.""" - - def is_python(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is a Python script.""" - - def run_cgi(self) -> None: # undocumented - """Execute a CGI script.""" + def do_POST(self) -> None: ... + def is_cgi(self) -> bool: ... # undocumented + def is_executable(self, path: StrPath) -> bool: ... # undocumented + def is_python(self, path: StrPath) -> bool: ... # undocumented + def run_cgi(self) -> None: ... # undocumented else: class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): - """Complete HTTP server with GET, HEAD and POST commands. - - GET and HEAD also support running CGI scripts. - - The POST command is *only* implemented for CGI scripts. - - """ - cgi_directories: list[str] have_fork: bool # undocumented - def do_POST(self) -> None: - """Serve a POST request. - - This is only implemented for CGI scripts. - - """ - - def is_cgi(self) -> bool: # undocumented - """Test whether self.path corresponds to a CGI script. - - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. - - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. - - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). - - """ - - def is_executable(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is an executable file.""" - - def is_python(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is a Python script.""" - - def run_cgi(self) -> None: # undocumented - """Execute a CGI script.""" + def do_POST(self) -> None: ... + def is_cgi(self) -> bool: ... # undocumented + def is_executable(self, path: StrPath) -> bool: ... # undocumented + def is_python(self, path: StrPath) -> bool: ... # undocumented + def run_cgi(self) -> None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi index dca7377e76f22..536985a592b7f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi @@ -1,15 +1,3 @@ -"""IMAP4 client. - -Based on RFC 2060. - -Public class: IMAP4 -Public variable: Debug -Public functions: Internaldate2tuple - Int2AP - ParseFlags - Time2Internaldate -""" - import subprocess import sys import time @@ -35,53 +23,6 @@ _AnyResponseData: TypeAlias = list[None] | list[bytes | tuple[bytes, bytes]] Commands: dict[str, tuple[str, ...]] class IMAP4: - """IMAP4 client class. - - Instantiate with: IMAP4([host[, port[, timeout=None]]]) - - host - host's name (default: localhost); - port - port number (default: standard IMAP4 port). - timeout - socket timeout (default: None) - If timeout is not given or is None, - the global default socket timeout is used - - All IMAP4rev1 commands are supported by methods of the same - name (in lowercase). - - All arguments to commands are converted to strings, except for - AUTHENTICATE, and the last argument to APPEND which is passed as - an IMAP4 literal. If necessary (the string contains any - non-printing characters or white-space and isn't enclosed with - either parentheses or double quotes) each string is quoted. - However, the 'password' argument to the LOGIN command is always - quoted. If you want to avoid having an argument string quoted - (eg: the 'flags' argument to STORE) then enclose the string in - parentheses (eg: "(\\Deleted)"). - - Each command returns a tuple: (type, [data, ...]) where 'type' - is usually 'OK' or 'NO', and 'data' is either the text from the - tagged response, or untagged results from command. Each 'data' - is either a string, or a tuple. If a tuple, then the first part - is the header of the response, and the second part contains - the data (ie: 'literal' value). - - Errors raise the exception class .error(""). - IMAP4 server errors raise .abort(""), - which is a sub-class of 'error'. Mailbox status changes - from READ-WRITE to READ-ONLY raise the exception class - .readonly(""), which is a sub-class of 'abort'. - - "error" exceptions imply a program error. - "abort" exceptions imply the connection should be reset, and - the command re-tried. - "readonly" exceptions imply the command should be re-tried. - - Note: to use this module, you must read the RFCs pertaining to the - IMAP4 protocol, as the semantics of the arguments to each IMAP4 - command are left to the invoker, not to mention the results. Also, - most IMAP servers implement a sub-set of the commands available here. - """ - class error(Exception): ... class abort(error): ... class readonly(abort): ... @@ -100,12 +41,7 @@ class IMAP4: capabilities: tuple[str, ...] PROTOCOL_VERSION: str def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... - def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: - """Setup connection to remote server on "host:port" - (default: localhost:standard IMAP4 port). - This connection will be used by the routines: - read, readline, send, shutdown. - """ + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... if sys.version_info >= (3, 14): @property @deprecated("IMAP4.file is unsupported, can cause errors, and may be removed.") @@ -117,404 +53,72 @@ class IMAP4: host: str port: int sock: _socket - def read(self, size: int) -> bytes: - """Read 'size' bytes from remote.""" - - def readline(self) -> bytes: - """Read line from remote.""" - - def send(self, data: ReadableBuffer) -> None: - """Send data to remote.""" - - def shutdown(self) -> None: - """Close I/O established in "open".""" - - def socket(self) -> _socket: - """Return socket instance used to connect to IMAP4 server. - - socket = .socket() - """ - - def recent(self) -> _CommandResults: - """Return most recent 'RECENT' responses if any exist, - else prompt server for an update using the 'NOOP' command. - - (typ, [data]) = .recent() - - 'data' is None if no new messages, - else list of RECENT responses, most recent last. - """ - - def response(self, code: str) -> _CommandResults: - """Return data for response 'code' if received, or None. - - Old value for response 'code' is cleared. - - (code, [data]) = .response(code) - """ - - def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: - """Append message to named mailbox. - - (typ, [data]) = .append(mailbox, flags, date_time, message) - - All args except 'message' can be None. - """ - - def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: - """Authenticate command - requires response processing. - - 'mechanism' specifies which authentication mechanism is to - be used - it must appear in .capabilities in the - form AUTH=. - - 'authobject' must be a callable object: - - data = authobject(response) - - It will be called to process server continuation responses; the - response argument it is passed will be a bytes. It should return bytes - data that will be base64 encoded and sent to the server. It should - return None if the client abort response '*' should be sent instead. - """ - - def capability(self) -> _CommandResults: - """(typ, [data]) = .capability() - Fetch capabilities list from server. - """ - - def check(self) -> _CommandResults: - """Checkpoint mailbox on server. - - (typ, [data]) = .check() - """ - - def close(self) -> _CommandResults: - """Close currently selected mailbox. - - Deleted messages are removed from writable mailbox. - This is the recommended command before 'LOGOUT'. - - (typ, [data]) = .close() - """ - - def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: - """Copy 'message_set' messages onto end of 'new_mailbox'. - - (typ, [data]) = .copy(message_set, new_mailbox) - """ - - def create(self, mailbox: str) -> _CommandResults: - """Create new mailbox. - - (typ, [data]) = .create(mailbox) - """ - - def delete(self, mailbox: str) -> _CommandResults: - """Delete old mailbox. - - (typ, [data]) = .delete(mailbox) - """ - - def deleteacl(self, mailbox: str, who: str) -> _CommandResults: - """Delete the ACLs (remove any rights) set for who on mailbox. - - (typ, [data]) = .deleteacl(mailbox, who) - """ - - def enable(self, capability: str) -> _CommandResults: - """Send an RFC5161 enable string to the server. - - (typ, [data]) = .enable(capability) - """ - + def read(self, size: int) -> bytes: ... + def readline(self) -> bytes: ... + def send(self, data: ReadableBuffer) -> None: ... + def shutdown(self) -> None: ... + def socket(self) -> _socket: ... + def recent(self) -> _CommandResults: ... + def response(self, code: str) -> _CommandResults: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... + def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... + def capability(self) -> _CommandResults: ... + def check(self) -> _CommandResults: ... + def close(self) -> _CommandResults: ... + def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: ... + def create(self, mailbox: str) -> _CommandResults: ... + def delete(self, mailbox: str) -> _CommandResults: ... + def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... + def enable(self, capability: str) -> _CommandResults: ... def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def expunge(self) -> _CommandResults: - """Permanently remove deleted items from selected mailbox. - - Generates 'EXPUNGE' response for each deleted message. - - (typ, [data]) = .expunge() - - 'data' is list of 'EXPUNGE'd message numbers in order received. - """ - - def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: - """Fetch (parts of) messages. - - (typ, [data, ...]) = .fetch(message_set, message_parts) - - 'message_parts' should be a string of selected parts - enclosed in parentheses, eg: "(UID BODY[TEXT])". - - 'data' are tuples of message part envelope and data. - """ - - def getacl(self, mailbox: str) -> _CommandResults: - """Get the ACLs for a mailbox. - - (typ, [data]) = .getacl(mailbox) - """ - - def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: - """(typ, [data]) = .getannotation(mailbox, entry, attribute) - Retrieve ANNOTATIONs. - """ - - def getquota(self, root: str) -> _CommandResults: - """Get the quota root's resource usage and limits. - - Part of the IMAP4 QUOTA extension defined in rfc2087. - - (typ, [data]) = .getquota(root) - """ - - def getquotaroot(self, mailbox: str) -> _CommandResults: - """Get the list of quota roots for the named mailbox. - - (typ, [[QUOTAROOT responses...], [QUOTA responses]]) = .getquotaroot(mailbox) - """ + def expunge(self) -> _CommandResults: ... + def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... + def getacl(self, mailbox: str) -> _CommandResults: ... + def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... + def getquota(self, root: str) -> _CommandResults: ... + def getquotaroot(self, mailbox: str) -> _CommandResults: ... if sys.version_info >= (3, 14): - def idle(self, duration: float | None = None) -> Idler: - """Return an iterable IDLE context manager producing untagged responses. - If the argument is not None, limit iteration to 'duration' seconds. - - with M.idle(duration=29 * 60) as idler: - for typ, data in idler: - print(typ, data) - - Note: 'duration' requires a socket connection (not IMAP4_stream). - """ - - def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: - """List mailbox names in directory matching pattern. - - (typ, [data]) = .list(directory='""', pattern='*') - - 'data' is list of LIST responses. - """ - - def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: - """Identify client using plaintext password. - - (typ, [data]) = .login(user, password) - - NB: 'password' will be quoted. - """ - - def login_cram_md5(self, user: str, password: str) -> _CommandResults: - """Force use of CRAM-MD5 authentication. - - (typ, [data]) = .login_cram_md5(user, password) - """ - - def logout(self) -> tuple[str, _AnyResponseData]: - """Shutdown connection to server. - - (typ, [data]) = .logout() - - Returns server 'BYE' response. - """ - - def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: - """List 'subscribed' mailbox names in directory matching pattern. - - (typ, [data, ...]) = .lsub(directory='""', pattern='*') - - 'data' are tuples of message part envelope and data. - """ - - def myrights(self, mailbox: str) -> _CommandResults: - """Show my ACLs for a mailbox (i.e. the rights that I have on mailbox). - - (typ, [data]) = .myrights(mailbox) - """ - - def namespace(self) -> _CommandResults: - """Returns IMAP namespaces ala rfc2342 - - (typ, [data, ...]) = .namespace() - """ - - def noop(self) -> tuple[str, _list[bytes]]: - """Send NOOP command. - - (typ, [data]) = .noop() - """ - - def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: - """Fetch truncated part of a message. - - (typ, [data, ...]) = .partial(message_num, message_part, start, length) - - 'data' is tuple of message part envelope and data. - """ - - def proxyauth(self, user: str) -> _CommandResults: - """Assume authentication as "user". - - Allows an authorised administrator to proxy into any user's - mailbox. - - (typ, [data]) = .proxyauth(user) - """ - - def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: - """Rename old mailbox name to new. - - (typ, [data]) = .rename(oldmailbox, newmailbox) - """ - - def search(self, charset: str | None, *criteria: str) -> _CommandResults: - """Search mailbox for matching messages. - - (typ, [data]) = .search(charset, criterion, ...) - - 'data' is space separated list of matching message numbers. - If UTF8 is enabled, charset MUST be None. - """ - - def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: - """Select a mailbox. - - Flush all untagged responses. - - (typ, [data]) = .select(mailbox='INBOX', readonly=False) - - 'data' is count of messages in mailbox ('EXISTS' response). - - Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so - other responses should be obtained via .response('FLAGS') etc. - """ - - def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: - """Set a mailbox acl. - - (typ, [data]) = .setacl(mailbox, who, what) - """ - - def setannotation(self, *args: str) -> _CommandResults: - """(typ, [data]) = .setannotation(mailbox[, entry, attribute]+) - Set ANNOTATIONs. - """ - - def setquota(self, root: str, limits: str) -> _CommandResults: - """Set the quota root's resource limits. - - (typ, [data]) = .setquota(root, limits) - """ - - def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: - """IMAP4rev1 extension SORT command. - - (typ, [data]) = .sort(sort_criteria, charset, search_criteria, ...) - """ - + def idle(self, duration: float | None = None) -> Idler: ... + + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... + def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... + def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... + def logout(self) -> tuple[str, _AnyResponseData]: ... + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... + def myrights(self, mailbox: str) -> _CommandResults: ... + def namespace(self) -> _CommandResults: ... + def noop(self) -> tuple[str, _list[bytes]]: ... + def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: ... + def proxyauth(self, user: str) -> _CommandResults: ... + def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... + def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... + def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... + def setannotation(self, *args: str) -> _CommandResults: ... + def setquota(self, root: str, limits: str) -> _CommandResults: ... + def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... - def status(self, mailbox: str, names: str) -> _CommandResults: - """Request named status conditions for mailbox. - - (typ, [data]) = .status(mailbox, names) - """ - - def store(self, message_set: str, command: str, flags: str) -> _CommandResults: - """Alters flag dispositions for messages in mailbox. - - (typ, [data]) = .store(message_set, command, flags) - """ - - def subscribe(self, mailbox: str) -> _CommandResults: - """Subscribe to new mailbox. - - (typ, [data]) = .subscribe(mailbox) - """ - - def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: - """IMAPrev1 extension THREAD command. - - (type, [data]) = .thread(threading_algorithm, charset, search_criteria, ...) - """ - - def uid(self, command: str, *args: str) -> _CommandResults: - """Execute "command arg ..." with messages identified by UID, - rather than message number. - - (typ, [data]) = .uid(command, arg1, arg2, ...) - - Returns response appropriate to 'command'. - """ - - def unsubscribe(self, mailbox: str) -> _CommandResults: - """Unsubscribe from old mailbox. - - (typ, [data]) = .unsubscribe(mailbox) - """ - - def unselect(self) -> _CommandResults: - """Free server's resources associated with the selected mailbox - and returns the server to the authenticated state. - This command performs the same actions as CLOSE, except - that no messages are permanently removed from the currently - selected mailbox. - - (typ, [data]) = .unselect() - """ - - def xatom(self, name: str, *args: str) -> _CommandResults: - """Allow simple extension commands - notified by server in CAPABILITY response. - - Assumes command is legal in current state. - - (typ, [data]) = .xatom(name, arg, ...) - - Returns response appropriate to extension command 'name'. - """ - + def status(self, mailbox: str, names: str) -> _CommandResults: ... + def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... + def subscribe(self, mailbox: str) -> _CommandResults: ... + def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def uid(self, command: str, *args: str) -> _CommandResults: ... + def unsubscribe(self, mailbox: str) -> _CommandResults: ... + def unselect(self) -> _CommandResults: ... + def xatom(self, name: str, *args: str) -> _CommandResults: ... def print_log(self) -> None: ... if sys.version_info >= (3, 14): class Idler: - """Iterable IDLE context manager: start IDLE & produce untagged responses. - - An object of this type is returned by the IMAP4.idle() method. - - Note: The name and structure of this class are subject to change. - """ - def __init__(self, imap: IMAP4, duration: float | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: object, exc_val: Unused, exc_tb: Unused) -> Literal[False]: ... def __iter__(self) -> Self: ... def __next__(self) -> tuple[str, float | None]: ... - def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: - """Yield a burst of responses no more than 'interval' seconds apart. - - with M.idle() as idler: - # get a response and any others following by < 0.1 seconds - batch = list(idler.burst()) - print(f'processing {len(batch)} responses...') - print(batch) - - Note: This generator requires a socket connection (not IMAP4_stream). - """ + def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: ... class IMAP4_SSL(IMAP4): - """IMAP4 client class over SSL connection - - Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) - - host - host's name (default: localhost); - port - port number (default: standard IMAP4 SSL port); - ssl_context - a SSLContext object that contains your certificate chain - and private key (default: None) - timeout - socket timeout (default: None) If timeout is not given or is None, - the global default socket timeout is used - - for more documentation see the docstring of the parent class IMAP4. - """ - if sys.version_info < (3, 12): keyfile: str certfile: str @@ -540,25 +144,10 @@ class IMAP4_SSL(IMAP4): else: file: IO[Any] - def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: - """Setup connection to remote server on "host:port". - (default: localhost:standard IMAP4 SSL port). - This connection will be used by the routines: - read, readline, send, shutdown. - """ - + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): - """IMAP4 client class over a stream - - Instantiate with: IMAP4_stream(command) - - "command" - a string that can be passed to subprocess.Popen() - - for more documentation see the docstring of the parent class IMAP4. - """ - command: str def __init__(self, command: str) -> None: ... if sys.version_info >= (3, 14): @@ -570,44 +159,16 @@ class IMAP4_stream(IMAP4): process: subprocess.Popen[bytes] writefile: IO[Any] readfile: IO[Any] - def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: - """Setup a stream connection. - This connection will be used by the routines: - read, readline, send, shutdown. - """ + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... class _Authenticator: - """Private class to provide en/decoding - for base64-based authentication conversation. - """ - mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... def encode(self, inp: bytes | bytearray | memoryview) -> str: ... def decode(self, inp: str | SizedBuffer) -> bytes: ... -def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: - """Parse an IMAP4 INTERNALDATE string. - - Return corresponding local time. The return value is a - time.struct_time tuple or None if the string has wrong format. - """ - -def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: - """Convert integer to A-P string representation.""" - -def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: - """Convert IMAP4 flags response to python tuple.""" - -def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: - """Convert date_time to IMAP4 INTERNALDATE representation. - - Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The - date_time argument can be a number (int or float) representing - seconds since epoch (as returned by time.time()), a 9-tuple - representing local time, an instance of time.struct_time (as - returned by time.localtime()), an aware datetime instance or a - double-quoted string. In the last case, it is assumed to already - be in the correct format. - """ +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... +def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi index 3b6536960e8d7..e45ca3eb5bdbc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi @@ -1,5 +1,3 @@ -"""Recognize image file formats based on their first few bytes.""" - from _typeshed import StrPath from collections.abc import Callable from typing import Any, BinaryIO, Protocol, overload, type_check_only @@ -13,9 +11,7 @@ class _ReadableBinary(Protocol): def seek(self, offset: int, /) -> Any: ... @overload -def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: - """Return the type of image contained in a file or byte stream.""" - +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... @overload def what(file: Any, h: bytes) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi index 119a9a497dfb3..b5b4223aa58e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi @@ -1,11 +1,3 @@ -"""This module provides the components needed to build your own __import__ -function. Undocumented functions are obsolete. - -In most cases it is preferred you consider using the importlib module's -functionality over this module. - -""" - import types from _imp import ( acquire_lock as acquire_lock, @@ -34,64 +26,16 @@ PY_FROZEN: Final = 7 PY_CODERESOURCE: Final = 8 IMP_HOOK: Final = 9 -def new_module(name: str) -> types.ModuleType: - """**DEPRECATED** - - Create a new module. - - The module is not entered into sys.modules. - - """ - -def get_magic() -> bytes: - """**DEPRECATED** - - Return the magic number for .pyc files. - """ - -def get_tag() -> str: - """Return the magic tag for .pyc files.""" - -def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: - """**DEPRECATED** - - Given the path to a .py file, return the path to its .pyc file. - - The .py file does not need to exist; this simply returns the path to the - .pyc file calculated as if the .py file were imported. - - If debug_override is not None, then it must be a boolean and is used in - place of sys.flags.optimize. - - If sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - -def source_from_cache(path: StrPath) -> str: - """**DEPRECATED** - - Given the path to a .pyc. file, return the path to its .py file. - - The .pyc file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc file. If path does - not conform to PEP 3147 format, ValueError will be raised. If - sys.implementation.cache_tag is None then NotImplementedError is raised. - - """ - -def get_suffixes() -> list[tuple[str, str, int]]: - """**DEPRECATED**""" +def new_module(name: str) -> types.ModuleType: ... +def get_magic() -> bytes: ... +def get_tag() -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def get_suffixes() -> list[tuple[str, str, int]]: ... class NullImporter: - """**DEPRECATED** - - Null import object. - - """ - def __init__(self, path: StrPath) -> None: ... - def find_module(self, fullname: Any) -> None: - """Always returns None.""" + def find_module(self, fullname: Any) -> None: ... # Technically, a text file has to support a slightly different set of operations than a binary file, # but we ignore that here. @@ -106,54 +50,14 @@ class _FileLike(Protocol): # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... -def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: - """**DEPRECATED**""" - -def load_package(name: str, path: StrPath) -> types.ModuleType: - """**DEPRECATED**""" - -def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: - """**DEPRECATED** - - Load a module, given information returned by find_module(). - - The module name must include the full package name, if any. - - """ +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_package(name: str, path: StrPath) -> types.ModuleType: ... +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None -) -> tuple[IO[Any], str, tuple[str, str, int]]: - """**DEPRECATED** - - Search for a module. - - If path is omitted or None, search for a built-in, frozen or special - module and continue search in sys.path. The module name cannot - contain '.'; to search for a submodule of a package, pass the - submodule name and the package's __path__. - - """ - -def reload(module: types.ModuleType) -> types.ModuleType: - """**DEPRECATED** - - Reload the module and return it. - - The module must have been successfully imported before. - - """ - -def init_builtin(name: str) -> types.ModuleType | None: - """**DEPRECATED** - - Load and return a built-in module by name, or None is such module doesn't - exist - """ - -def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: # file argument is ignored - """**DEPRECATED** - - Load an extension module. - """ +) -> tuple[IO[Any], str, tuple[str, str, int]]: ... +def reload(module: types.ModuleType) -> types.ModuleType: ... +def init_builtin(name: str) -> types.ModuleType | None: ... +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi index 11d7bf3c480fc..d60f90adee19c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi @@ -1,5 +1,3 @@ -"""A pure Python implementation of import.""" - import sys from importlib._bootstrap import __import__ as __import__ from importlib.abc import Loader @@ -9,34 +7,11 @@ from typing_extensions import deprecated __all__ = ["__import__", "import_module", "invalidate_caches", "reload"] # `importlib.import_module` return type should be kept the same as `builtins.__import__` -def import_module(name: str, package: str | None = None) -> ModuleType: - """Import a module. - - The 'package' argument is required when performing a relative import. It - specifies the package to use as the anchor point from which to resolve the - relative import to an absolute import. - - """ +def import_module(name: str, package: str | None = None) -> ModuleType: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `importlib.util.find_spec()` instead.") - def find_loader(name: str, path: str | None = None) -> Loader | None: - """Return the loader for the specified module. - - This is a backward-compatible wrapper around find_spec(). - - This function is deprecated in favor of importlib.util.find_spec(). - - """ - -def invalidate_caches() -> None: - """Call the invalidate_caches() method on all meta path finders stored in - sys.meta_path (where implemented). - """ - -def reload(module: ModuleType) -> ModuleType: - """Reload the module and return it. - - The module must have been successfully imported before. + def find_loader(name: str, path: str | None = None) -> Loader | None: ... - """ +def invalidate_caches() -> None: ... +def reload(module: ModuleType) -> ModuleType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi index c85e8004cd37c..90ab340219172 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi @@ -1,5 +1,3 @@ -"""Subset of importlib.abc used to reduce importlib.util imports.""" - import sys import types from abc import ABCMeta @@ -8,43 +6,15 @@ from typing_extensions import deprecated if sys.version_info >= (3, 10): class Loader(metaclass=ABCMeta): - """Abstract base class for import loaders.""" - - def load_module(self, fullname: str) -> types.ModuleType: - """Return the loaded module. - - The module must be added to sys.modules and have import-related - attributes set properly. The fullname is a str. - - ImportError is raised on failure. - - This method is deprecated in favor of loader.exec_module(). If - exec_module() exists then it is used to provide a backwards-compatible - functionality for this method. - - """ + def load_module(self, fullname: str) -> types.ModuleType: ... if sys.version_info < (3, 12): @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(self, module: types.ModuleType) -> str: - """Return a module's repr. - - Used by the module type when the method does not raise - NotImplementedError. - - This method is deprecated. - - """ - - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: - """Return a module to initialize and into which to load. + def module_repr(self, module: types.ModuleType) -> str: ... - This method should raise ImportError if anything prevents it - from creating a new module. It may return None to indicate - that the spec should create the new module. - """ + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi index 116884f228c01..02427ff420620 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi @@ -1,11 +1,2 @@ -"""Core implementation of import. - -This module is NOT meant to be directly imported! It has been designed such -that it can be bootstrapped into Python as the implementation of import. As -such it requires the injection of specific modules and attributes in order to -work. One should use importlib as the public-facing version of this module. - -""" - from _frozen_importlib import * from _frozen_importlib import __import__ as __import__, _init_module_attrs as _init_module_attrs diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi index a4d2aeccd2744..6210ce7083afa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi @@ -1,11 +1,2 @@ -"""Core implementation of path-based import. - -This module is NOT meant to be directly imported! It has been designed such -that it can be bootstrapped into Python as the implementation of import. As -such it requires the injection of specific modules and attributes in order to -work. One should use importlib as the public-facing version of this module. - -""" - from _frozen_importlib_external import * from _frozen_importlib_external import _NamespaceLoader as _NamespaceLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi index 9bbc9e5a91438..ef7761f7119b9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi @@ -1,5 +1,3 @@ -"""Abstract base classes related to import.""" - import _ast import sys import types @@ -31,453 +29,151 @@ if sys.version_info >= (3, 10): from importlib._abc import Loader as Loader else: class Loader(metaclass=ABCMeta): - """Abstract base class for import loaders.""" - - def load_module(self, fullname: str) -> types.ModuleType: - """Return the loaded module. - - The module must be added to sys.modules and have import-related - attributes set properly. The fullname is a str. - - ImportError is raised on failure. - - This method is deprecated in favor of loader.exec_module(). If - exec_module() exists then it is used to provide a backwards-compatible - functionality for this method. - - """ - - def module_repr(self, module: types.ModuleType) -> str: - """Return a module's repr. - - Used by the module type when the method does not raise - NotImplementedError. - - This method is deprecated. - - """ - - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: - """Return a module to initialize and into which to load. - - This method should raise ImportError if anything prevents it - from creating a new module. It may return None to indicate - that the spec should create the new module. - """ + def load_module(self, fullname: str) -> types.ModuleType: ... + def module_repr(self, module: types.ModuleType) -> str: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use `MetaPathFinder` or `PathEntryFinder` instead.") - class Finder(metaclass=ABCMeta): - """Legacy abstract base class for import finders. - - It may be subclassed for compatibility with legacy third party - reimplementations of the import system. Otherwise, finder - implementations should derive from the more specific MetaPathFinder - or PathEntryFinder ABCs. - - Deprecated since Python 3.3 - """ + class Finder(metaclass=ABCMeta): ... @deprecated("Deprecated since Python 3.7. Use `importlib.resources.abc.TraversableResources` instead.") class ResourceLoader(Loader): - """Abstract base class for loaders which can return data from their - back-end storage to facilitate reading data to perform an import. - - This ABC represents one of the optional protocols specified by PEP 302. - - For directly loading resources, use TraversableResources instead. This class - primarily exists for backwards compatibility with other ABCs in this module. - - """ - @abstractmethod - def get_data(self, path: str) -> bytes: - """Abstract method which when implemented should return the bytes for - the specified path. The path must be a str. - """ + def get_data(self, path: str) -> bytes: ... class InspectLoader(Loader): - """Abstract base class for loaders which support inspection about the - modules they can load. - - This ABC represents one of the optional protocols specified by PEP 302. - - """ - - def is_package(self, fullname: str) -> bool: - """Optional method which when implemented should return whether the - module is a package. The fullname is a str. Returns a bool. - - Raises ImportError if the module cannot be found. - """ - - def get_code(self, fullname: str) -> types.CodeType | None: - """Method which returns the code object for the module. - - The fullname is a str. Returns a types.CodeType if possible, else - returns None if a code object does not make sense - (e.g. built-in module). Raises ImportError if the module cannot be - found. - """ - + def is_package(self, fullname: str) -> bool: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... @abstractmethod - def get_source(self, fullname: str) -> str | None: - """Abstract method which should return the source code for the - module. The fullname is a str. Returns a str. - - Raises ImportError if the module cannot be found. - """ - - def exec_module(self, module: types.ModuleType) -> None: - """Execute the module.""" - + def get_source(self, fullname: str) -> str | None: ... + def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod def source_to_code( - data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: ReadableBuffer | StrPath = "" - ) -> types.CodeType: - """Compile 'data' into a code object. - - The 'data' argument can be anything that compile() can handle. The'path' - argument should be where the data was retrieved (when applicable). - """ + data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath = "" + ) -> types.CodeType: ... class ExecutionLoader(InspectLoader): - """Abstract base class for loaders that wish to support the execution of - modules as scripts. - - This ABC represents one of the optional protocols specified in PEP 302. - - """ - @abstractmethod - def get_filename(self, fullname: str) -> str: - """Abstract method which should return the value that __file__ is to be - set to. - - Raises ImportError if the module cannot be found. - """ + def get_filename(self, fullname: str) -> str: ... class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): # type: ignore[misc] # incompatible definitions of source_to_code in the base classes - """Abstract base class for loading source code (and optionally any - corresponding bytecode). - - To support loading from source code, the abstractmethods inherited from - ResourceLoader and ExecutionLoader need to be implemented. To also support - loading from bytecode, the optional methods specified directly by this ABC - is required. - - Inherited abstractmethods not implemented in this ABC: - - * ResourceLoader.get_data - * ExecutionLoader.get_filename - - """ - @deprecated("Deprecated since Python 3.3. Use `importlib.resources.abc.SourceLoader.path_stats` instead.") - def path_mtime(self, path: str) -> float: - """Return the (int) modification time for the path (str).""" - - def set_data(self, path: str, data: bytes) -> None: - """Write the bytes to the path (if possible). - - Accepts a str path and data as bytes. - - Any needed intermediary directories are to be created. If for some - reason the file cannot be written because of permissions, fail - silently. - """ - - def get_source(self, fullname: str) -> str | None: - """Concrete implementation of InspectLoader.get_source.""" - - def path_stats(self, path: str) -> Mapping[str, Any]: - """Return a metadata dict for the source pointed to by the path (str). - Possible keys: - - 'mtime' (mandatory) is the numeric timestamp of last source - code modification; - - 'size' (optional) is the size in bytes of the source code. - """ + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... + def get_source(self, fullname: str) -> str | None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... # The base classes differ starting in 3.10: if sys.version_info >= (3, 10): # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(metaclass=ABCMeta): - """Abstract base class for import finders on sys.meta_path.""" - if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `MetaPathFinder.find_spec()` instead.") - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: - """Return a loader for the module. + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. - - """ - - def invalidate_caches(self) -> None: - """An optional method for clearing the finder's cache, if any. - This method is used by importlib.invalidate_caches(). - """ + def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(metaclass=ABCMeta): - """Abstract base class for path entry finders used by PathFinder.""" - if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `PathEntryFinder.find_spec()` instead.") - def find_module(self, fullname: str) -> Loader | None: - """Try to find a loader for the specified module by delegating to - self.find_loader(). - - This method is deprecated in favor of finder.find_spec(). - - """ - + def find_module(self, fullname: str) -> Loader | None: ... @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: - """Return (loader, namespace portion) for the path entry. - - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. - - The portion will be discarded if another path entry finder - locates the module as a normal module or package. + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ - - def invalidate_caches(self) -> None: - """An optional method for clearing the finder's cache, if any. - This method is used by PathFinder.invalidate_caches(). - """ + def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... else: # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(Finder): - """Abstract base class for import finders on sys.meta_path.""" - - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: - """Return a loader for the module. - - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. - - """ - - def invalidate_caches(self) -> None: - """An optional method for clearing the finder's cache, if any. - This method is used by importlib.invalidate_caches(). - """ + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): - """Abstract base class for path entry finders used by PathFinder.""" - - def find_module(self, fullname: str) -> Loader | None: - """Try to find a loader for the specified module by delegating to - self.find_loader(). - - This method is deprecated in favor of finder.find_spec(). - - """ - - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: - """Return (loader, namespace portion) for the path entry. - - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. - - The portion will be discarded if another path entry finder - locates the module as a normal module or package. - - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ - - def invalidate_caches(self) -> None: - """An optional method for clearing the finder's cache, if any. - This method is used by PathFinder.invalidate_caches(). - """ + def find_module(self, fullname: str) -> Loader | None: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): - """Abstract base class partially implementing the ResourceLoader and - ExecutionLoader ABCs. - """ - name: str path: str - def __init__(self, fullname: str, path: str) -> None: - """Cache the module name and the path to the file found by the - finder. - """ - - def get_data(self, path: str) -> bytes: - """Return the data from path as raw bytes.""" - - def get_filename(self, name: str | None = None) -> str: - """Return the path to the source file as found by the finder.""" - - def load_module(self, name: str | None = None) -> types.ModuleType: - """Load a module from a file. - - This method is deprecated. Use exec_module() instead. - - """ + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, fullname: str | None = None) -> str: ... + def load_module(self, fullname: str | None = None) -> types.ModuleType: ... if sys.version_info < (3, 11): class ResourceReader(metaclass=ABCMeta): - """Abstract base class for loaders to provide resource reading support.""" - @abstractmethod - def open_resource(self, resource: str) -> IO[bytes]: - """Return an opened, file-like object for binary reading. - - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ - + def open_resource(self, resource: str) -> IO[bytes]: ... @abstractmethod - def resource_path(self, resource: str) -> str: - """Return the file system path to the specified resource. - - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ + def resource_path(self, resource: str) -> str: ... if sys.version_info >= (3, 10): @abstractmethod - def is_resource(self, path: str) -> bool: - """Return True if the named 'path' is a resource. - - Files are resources, directories are not. - """ + def is_resource(self, path: str) -> bool: ... else: @abstractmethod - def is_resource(self, name: str) -> bool: - """Return True if the named 'name' is consider a resource.""" + def is_resource(self, name: str) -> bool: ... @abstractmethod - def contents(self) -> Iterator[str]: - """Return an iterable of entries in `package`.""" + def contents(self) -> Iterator[str]: ... @runtime_checkable class Traversable(Protocol): - """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - """ - @abstractmethod - def is_dir(self) -> bool: - """ - Return True if self is a dir - """ - + def is_dir(self) -> bool: ... @abstractmethod - def is_file(self) -> bool: - """ - Return True if self is a file - """ - + def is_file(self) -> bool: ... @abstractmethod - def iterdir(self) -> Iterator[Traversable]: - """ - Yield Traversable objects in self - """ + def iterdir(self) -> Iterator[Traversable]: ... if sys.version_info >= (3, 11): @abstractmethod def joinpath(self, *descendants: str) -> Traversable: ... else: @abstractmethod - def joinpath(self, child: str, /) -> Traversable: - """ - Return Traversable child in self - """ + def joinpath(self, child: str, /) -> Traversable: ... + # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @overload @abstractmethod - def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: - """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). - - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ - + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @property @abstractmethod - def name(self) -> str: - """ - The base name of this object without any parent references. - """ + def name(self) -> str: ... if sys.version_info >= (3, 10): - def __truediv__(self, child: str, /) -> Traversable: - """ - Return Traversable child in self - """ + def __truediv__(self, child: str, /) -> Traversable: ... else: @abstractmethod - def __truediv__(self, child: str, /) -> Traversable: - """ - Return Traversable child in self - """ + def __truediv__(self, child: str, /) -> Traversable: ... @abstractmethod - def read_bytes(self) -> bytes: - """ - Read contents of self as bytes - """ - + def read_bytes(self) -> bytes: ... @abstractmethod - def read_text(self, encoding: str | None = None) -> str: - """ - Read contents of self as text - """ + def read_text(self, encoding: str | None = None) -> str: ... class TraversableResources(ResourceReader): - """ - The required interface for providing traversable - resources. - """ - @abstractmethod - def files(self) -> Traversable: - """Return a Traversable object for the loaded package.""" - + def files(self) -> Traversable: ... def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi index 9cbc94326faf6..767046b70a3d1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi @@ -1,5 +1,3 @@ -"""The machinery of importlib: finders, loaders, hooks, etc.""" - import sys from importlib._bootstrap import BuiltinImporter as BuiltinImporter, FrozenImporter as FrozenImporter, ModuleSpec as ModuleSpec from importlib._bootstrap_external import ( @@ -21,8 +19,7 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 14): from importlib._bootstrap_external import AppleFrameworkLoader as AppleFrameworkLoader -def all_suffixes() -> list[str]: - """Returns a list of all recognized module suffixes for this process""" +def all_suffixes() -> list[str]: ... if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi index f2e832714a6f6..9286e92331c82 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -35,45 +35,21 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 10): from importlib.metadata._meta import PackageMetadata as PackageMetadata, SimplePath - def packages_distributions() -> Mapping[str, list[str]]: - """ - Return a mapping of top-level packages to their - distributions. - - >>> import collections.abc - >>> pkgs = packages_distributions() - >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) - True - """ + def packages_distributions() -> Mapping[str, list[str]]: ... + _SimplePath: TypeAlias = SimplePath else: _SimplePath: TypeAlias = Path class PackageNotFoundError(ModuleNotFoundError): - """The package was not found.""" - @property - def name(self) -> str: # type: ignore[override] - """module name""" + def name(self) -> str: ... # type: ignore[override] if sys.version_info >= (3, 13): _EntryPointBase = object elif sys.version_info >= (3, 11): class DeprecatedTuple: - """ - Provide subscript item access for backward compatibility. - - >>> recwarn = getfixture('recwarn') - >>> ep = EntryPoint(name='name', value='value', group='group') - >>> ep[:] - ('name', 'value', 'group') - >>> ep[0] - 'name' - >>> len(recwarn) - 1 - """ - def __getitem__(self, item: int) -> str: ... _EntryPointBase = DeprecatedTuple @@ -85,34 +61,13 @@ else: if sys.version_info >= (3, 11): class EntryPoint(_EntryPointBase): - """An entry point as defined by Python packaging conventions. - - See `the packaging docs on entry points - `_ - for more information. - - >>> ep = EntryPoint( - ... name=None, group=None, value='package.module:attr [extra1, extra2]') - >>> ep.module - 'package.module' - >>> ep.attr - 'attr' - >>> ep.extras - ['extra1', 'extra2'] - """ - pattern: ClassVar[Pattern[str]] name: str value: str group: str def __init__(self, name: str, value: str, group: str) -> None: ... - def load(self) -> Any: # Callable[[], Any] or an importable module - """Load the entry point from its definition. If only a module - is indicated by the value, return that module. Otherwise, - return the named object. - """ - + def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> list[str]: ... @property @@ -129,63 +84,19 @@ if sys.version_info >= (3, 11): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> bool: # undocumented - """ - EntryPoint matches the given parameters. - - >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') - >>> ep.matches(group='foo') - True - >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') - True - >>> ep.matches(group='foo', name='other') - False - >>> ep.matches() - True - >>> ep.matches(extras=['extra1', 'extra2']) - True - >>> ep.matches(module='bing') - True - >>> ep.matches(attr='bong') - True - """ - + ) -> bool: ... # undocumented def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: object) -> bool: ... if sys.version_info < (3, 12): - def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really - """ - Supply iter so one may construct dicts of EntryPoints by name. - """ + def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really else: @disjoint_base class EntryPoint(_EntryPointBase): - """An entry point as defined by Python packaging conventions. - - See `the packaging docs on entry points - `_ - for more information. - - >>> ep = EntryPoint( - ... name=None, group=None, value='package.module:attr [extra1, extra2]') - >>> ep.module - 'package.module' - >>> ep.attr - 'attr' - >>> ep.extras - ['extra1', 'extra2'] - """ - pattern: ClassVar[Pattern[str]] - def load(self) -> Any: # Callable[[], Any] or an importable module - """Load the entry point from its definition. If only a module - is indicated by the value, return that module. Otherwise, - return the named object. - """ - + def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> list[str]: ... @property @@ -203,45 +114,15 @@ else: module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> bool: # undocumented - """ - EntryPoint matches the given parameters. - - >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') - >>> ep.matches(group='foo') - True - >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') - True - >>> ep.matches(group='foo', name='other') - False - >>> ep.matches() - True - >>> ep.matches(extras=['extra1', 'extra2']) - True - >>> ep.matches(module='bing') - True - >>> ep.matches(attr='bong') - True - """ + ) -> bool: ... # undocumented def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really - """ - Supply iter so one may construct dicts of EntryPoints by name. - """ + def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really if sys.version_info >= (3, 12): class EntryPoints(tuple[EntryPoint, ...]): - """ - An immutable collection of selectable EntryPoint objects. - """ - __slots__ = () - def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] - """ - Get the EntryPoint in self matching name. - """ - + def __getitem__(self, name: str) -> EntryPoint: ... # type: ignore[override] def select( self, *, @@ -251,71 +132,20 @@ if sys.version_info >= (3, 12): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> EntryPoints: - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - + ) -> EntryPoints: ... @property - def names(self) -> set[str]: - """ - Return the set of all names of all entry points. - """ - + def names(self) -> set[str]: ... @property - def groups(self) -> set[str]: - """ - Return the set of all groups of all entry points. - """ + def groups(self) -> set[str]: ... elif sys.version_info >= (3, 10): class DeprecatedList(list[_T]): - """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ - __slots__ = () class EntryPoints(DeprecatedList[EntryPoint]): # use as list is deprecated since 3.10 - """ - An immutable collection of selectable EntryPoint objects. - """ - # int argument is deprecated since 3.10 __slots__ = () - def __getitem__(self, name: int | str) -> EntryPoint: # type: ignore[override] - """ - Get the EntryPoint in self matching name. - """ - + def __getitem__(self, name: int | str) -> EntryPoint: ... # type: ignore[override] def select( self, *, @@ -325,52 +155,14 @@ elif sys.version_info >= (3, 10): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> EntryPoints: - """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ - + ) -> EntryPoints: ... @property - def names(self) -> set[str]: - """ - Return the set of all names of all entry points. - """ - + def names(self) -> set[str]: ... @property - def groups(self) -> set[str]: - """ - Return the set of all groups of all entry points. - - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() - """ + def groups(self) -> set[str]: ... if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class Deprecated(Generic[_KT, _VT]): - """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ - def __getitem__(self, name: _KT) -> _VT: ... @overload def get(self, name: _KT, default: None = None) -> _VT | None: ... @@ -385,23 +177,12 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `select` instead.") class SelectableGroups(Deprecated[str, EntryPoints], dict[str, EntryPoints]): # use as dict is deprecated since 3.10 - """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ - @classmethod def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property def groups(self) -> set[str]: ... @property - def names(self) -> set[str]: - """ - for coverage: - >>> SelectableGroups().names - set() - """ - + def names(self) -> set[str]: ... @overload def select(self) -> Self: ... @overload @@ -417,12 +198,9 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): ) -> EntryPoints: ... class PackagePath(pathlib.PurePosixPath): - """A reference to a path in a package""" - def read_text(self, encoding: str = "utf-8") -> str: ... def read_binary(self) -> bytes: ... - def locate(self) -> PathLike[str]: - """Return a path-like object for this path""" + def locate(self) -> PathLike[str]: ... # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: hash: FileHash | None size: int | None @@ -440,211 +218,60 @@ else: _distribution_parent = object class Distribution(_distribution_parent): - """ - An abstract Python distribution package. - - Custom providers may derive from this class and define - the abstract methods to provide a concrete implementation - for their environment. Some providers may opt to override - the default implementation of some properties to bypass - the file-reading mechanism. - """ - @abc.abstractmethod - def read_text(self, filename: str) -> str | None: - """Attempt to load metadata file given by the name. - - Python distribution metadata is organized by blobs of text - typically represented as "files" in the metadata directory - (e.g. package-1.0.dist-info). These files include things - like: - - - METADATA: The distribution metadata including fields - like Name and Version and Description. - - entry_points.txt: A series of entry points as defined in - `the entry points spec `_. - - RECORD: A record of files according to - `this recording spec `_. - - A package may provide any set of files, including those - not listed here or none at all. - - :param filename: The name of the file in the distribution info. - :return: The text if found, otherwise None. - """ - + def read_text(self, filename: str) -> str | None: ... @abc.abstractmethod - def locate_file(self, path: StrPath) -> _SimplePath: - """ - Given a path to a file in this distribution, return a SimplePath - to it. - """ - + def locate_file(self, path: StrPath) -> _SimplePath: ... @classmethod - def from_name(cls, name: str) -> Distribution: - """Return the Distribution for the given package name. - - :param name: The name of the distribution package to search for. - :return: The Distribution instance (or subclass thereof) for the named - package, if found. - :raises PackageNotFoundError: When the named package's distribution - metadata cannot be found. - :raises ValueError: When an invalid value is supplied for name. - """ - + def from_name(cls, name: str) -> Distribution: ... @overload @classmethod - def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: - """Return an iterable of Distribution objects for all packages. - - Pass a ``context`` or pass keyword arguments for constructing - a context. - - :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for packages matching - the context. - """ - + def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload @classmethod def discover( cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... @staticmethod - def at(path: StrPath) -> PathDistribution: - """Return a Distribution for the indicated metadata path. + def at(path: StrPath) -> PathDistribution: ... - :param path: a string or path-like object - :return: a concrete Distribution instance for the path - """ if sys.version_info >= (3, 10): @property - def metadata(self) -> PackageMetadata: - """Return the parsed metadata for this Distribution. - - The returned object will have keys that name the various bits of - metadata per the - `Core metadata specifications `_. - - Custom providers may provide the METADATA file or override this - property. - """ - + def metadata(self) -> PackageMetadata: ... @property - def entry_points(self) -> EntryPoints: - """ - Return EntryPoints for this distribution. - - Custom providers may provide the ``entry_points.txt`` file - or override this property. - """ + def entry_points(self) -> EntryPoints: ... else: @property - def metadata(self) -> Message: - """Return the parsed metadata for this Distribution. - - The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. - """ - + def metadata(self) -> Message: ... @property def entry_points(self) -> list[EntryPoint]: ... @property - def version(self) -> str: - """Return the 'Version' metadata for the distribution package.""" - + def version(self) -> str: ... @property - def files(self) -> list[PackagePath] | None: - """Files in this distribution. - - :return: List of PackagePath for this distribution or None - - Result is `None` if the metadata file that enumerates files - (i.e. RECORD for dist-info, or installed-files.txt or - SOURCES.txt for egg-info) is missing. - Result may be empty if the metadata exists but is empty. - - Custom providers are recommended to provide a "RECORD" file (in - ``read_text``) or override this property to allow for callers to be - able to resolve filenames provided by the package. - """ - + def files(self) -> list[PackagePath] | None: ... @property - def requires(self) -> list[str] | None: - """Generated requirements specified for this Distribution""" + def requires(self) -> list[str] | None: ... if sys.version_info >= (3, 10): @property - def name(self) -> str: - """Return the 'Name' metadata for the distribution package.""" + def name(self) -> str: ... if sys.version_info >= (3, 13): @property def origin(self) -> types.SimpleNamespace | None: ... class DistributionFinder(MetaPathFinder): - """ - A MetaPathFinder capable of discovering installed distributions. - - Custom providers should implement this interface in order to - supply metadata. - """ - class Context: - """ - Keyword arguments presented by the caller to - ``distributions()`` or ``Distribution.discover()`` - to narrow the scope of a search for distributions - in all DistributionFinders. - - Each DistributionFinder may expect any parameters - and should attempt to honor the canonical - parameters defined below when appropriate. - - This mechanism gives a custom provider a means to - solicit additional details from the caller beyond - "name" and "path" when searching distributions. - For example, imagine a provider that exposes suites - of packages in either a "public" or "private" ``realm``. - A caller may wish to query only for distributions in - a particular realm and could call - ``distributions(realm="private")`` to signal to the - custom provider to only include distributions from that - realm. - """ - name: str | None def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... @property - def path(self) -> list[str]: - """ - The sequence of directory path that a distribution finder - should search. - - Typically refers to Python installed package paths such as - "site-packages" directories and defaults to ``sys.path``. - """ + def path(self) -> list[str]: ... @abc.abstractmethod - def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching the ``context``, - a DistributionFinder.Context instance. - """ + def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... class MetadataPathFinder(DistributionFinder): @classmethod - def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... if sys.version_info >= (3, 11): @classmethod def invalidate_caches(cls) -> None: ... @@ -654,135 +281,40 @@ class MetadataPathFinder(DistributionFinder): class PathDistribution(Distribution): _path: _SimplePath - def __init__(self, path: _SimplePath) -> None: - """Construct a distribution. - - :param path: SimplePath indicating the metadata directory. - """ - - def read_text(self, filename: StrPath) -> str | None: - """Attempt to load metadata file given by the name. - - Python distribution metadata is organized by blobs of text - typically represented as "files" in the metadata directory - (e.g. package-1.0.dist-info). These files include things - like: - - - METADATA: The distribution metadata including fields - like Name and Version and Description. - - entry_points.txt: A series of entry points as defined in - `the entry points spec `_. - - RECORD: A record of files according to - `this recording spec `_. - - A package may provide any set of files, including those - not listed here or none at all. - - :param filename: The name of the file in the distribution info. - :return: The text if found, otherwise None. - """ - + def __init__(self, path: _SimplePath) -> None: ... + def read_text(self, filename: StrPath) -> str | None: ... def locate_file(self, path: StrPath) -> _SimplePath: ... -def distribution(distribution_name: str) -> Distribution: - """Get the ``Distribution`` instance for the named package. - - :param distribution_name: The name of the distribution package as a string. - :return: A ``Distribution`` instance (or subclass thereof). - """ - +def distribution(distribution_name: str) -> Distribution: ... @overload -def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: - """Get all ``Distribution`` instances in the current environment. - - :return: An iterable of ``Distribution`` instances. - """ - +def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload def distributions( *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... if sys.version_info >= (3, 10): - def metadata(distribution_name: str) -> PackageMetadata: - """Get the metadata for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: A PackageMetadata containing the parsed metadata. - """ + def metadata(distribution_name: str) -> PackageMetadata: ... else: - def metadata(distribution_name: str) -> Message: - """Get the metadata for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: An email.Message containing the parsed metadata. - """ + def metadata(distribution_name: str) -> Message: ... if sys.version_info >= (3, 12): def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... - ) -> EntryPoints: - """Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - :return: EntryPoints for all installed packages. - """ + ) -> EntryPoints: ... elif sys.version_info >= (3, 10): @overload - def entry_points() -> SelectableGroups: - """Return EntryPoint objects for all installed packages. - - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). - - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. - - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. - - :return: EntryPoints or SelectableGroups for all installed packages. - """ - + def entry_points() -> SelectableGroups: ... @overload def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... ) -> EntryPoints: ... else: - def entry_points() -> dict[str, list[EntryPoint]]: - """Return EntryPoint objects for all installed packages. - - :return: EntryPoint objects for all installed packages. - """ - -def version(distribution_name: str) -> str: - """Get the version string for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: The version string for the package as defined in the package's - "Version" metadata key. - """ - -def files(distribution_name: str) -> list[PackagePath] | None: - """Return a list of files for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: List of files composing the distribution. - """ - -def requires(distribution_name: str) -> list[str] | None: - """ - Return a list of requirements for the named package. + def entry_points() -> dict[str, list[EntryPoint]]: ... - :return: An iterable of requirements, suitable for - packaging.requirement.Requirement. - """ +def version(distribution_name: str) -> str: ... +def files(distribution_name: str) -> list[PackagePath] | None: ... +def requires(distribution_name: str) -> list[str] | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi index 530e2463fcc2e..9f791dab254fd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -14,31 +14,19 @@ class PackageMetadata(Protocol): def __getitem__(self, key: str) -> str: ... def __iter__(self) -> Iterator[str]: ... @property - def json(self) -> dict[str, str | list[str]]: - """ - A JSON-compatible form of the metadata. - """ - + def json(self) -> dict[str, str | list[str]]: ... @overload - def get_all(self, name: str, failobj: None = None) -> list[Any] | None: - """Helper for @overload to raise when called.""" - + def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ... @overload def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... if sys.version_info >= (3, 12): @overload - def get(self, name: str, failobj: None = None) -> str | None: - """Helper for @overload to raise when called.""" - + def get(self, name: str, failobj: None = None) -> str | None: ... @overload def get(self, name: str, failobj: _T) -> _T | str: ... if sys.version_info >= (3, 13): class SimplePath(Protocol): - """ - A minimal subset of pathlib.Path required by Distribution. - """ - def joinpath(self, other: StrPath, /) -> SimplePath: ... def __truediv__(self, other: StrPath, /) -> SimplePath: ... # Incorrect at runtime @@ -50,10 +38,6 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 12): class SimplePath(Protocol[_T_co]): - """ - A minimal subset of pathlib.Path required by PathDistribution. - """ - # At runtime this is defined as taking `str | _T`, but that causes trouble. # See #11436. def joinpath(self, other: str, /) -> _T_co: ... @@ -65,10 +49,6 @@ elif sys.version_info >= (3, 12): else: class SimplePath(Protocol): - """ - A minimal subset of pathlib.Path required by PathDistribution. - """ - # Actually takes only self at runtime, but that's clearly wrong def joinpath(self, other: Any, /) -> SimplePath: ... # Not defined as a property at runtime, but it should be diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi index 029111a3c7f3c..4a6c73921535a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi @@ -1,10 +1,3 @@ -""" -Compatibility shim for .resources.readers as found on Python 3.10. - -Consumers that can rely on Python 3.11 should use the other -module directly. -""" - # On py311+, things are actually defined in importlib.resources.readers, # and re-exported here, # but doing it this way leads to less code duplication for us @@ -39,13 +32,7 @@ if sys.version_info >= (3, 10): class FileReader(abc.TraversableResources): path: pathlib.Path def __init__(self, loader: FileLoader) -> None: ... - def resource_path(self, resource: StrPath) -> str: - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - + def resource_path(self, resource: StrPath) -> str: ... def files(self) -> pathlib.Path: ... class ZipReader(abc.TraversableResources): @@ -53,22 +40,10 @@ if sys.version_info >= (3, 10): archive: str def __init__(self, loader: zipimporter, module: str) -> None: ... def open_resource(self, resource: str) -> BufferedReader: ... - def is_resource(self, path: StrPath) -> bool: - """ - Workaround for `zipfile.Path.is_file` returning true - for non-existent paths. - """ - + def is_resource(self, path: StrPath) -> bool: ... def files(self) -> zipfile.Path: ... class MultiplexedPath(abc.Traversable): - """ - Given a series of Traversable objects, implement a merged - version of the interface across all objects. Useful for - namespace packages which may be multihomed at a single - name. - """ - def __init__(self, *paths: abc.Traversable) -> None: ... def iterdir(self) -> Iterator[abc.Traversable]: ... def read_bytes(self) -> NoReturn: ... @@ -93,11 +68,5 @@ if sys.version_info >= (3, 10): class NamespaceReader(abc.TraversableResources): path: MultiplexedPath def __init__(self, namespace_path: Iterable[str]) -> None: ... - def resource_path(self, resource: str) -> str: - """ - Return the file system path to prevent - `resources.path()` from creating a temporary - copy. - """ - + def resource_path(self, resource: str) -> str: ... def files(self) -> MultiplexedPath: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi index fb0ac53ac1cf3..28adc37da4a42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi @@ -1,12 +1,3 @@ -""" -Read resources contained within a package. - -This codebase is shared between importlib.resources in the stdlib -and importlib_resources in PyPI. See -https://github.com/python/importlib_metadata/wiki/Development-Methodology -for more detail. -""" - import os import sys from collections.abc import Iterator @@ -14,7 +5,7 @@ from contextlib import AbstractContextManager from pathlib import Path from types import ModuleType from typing import Any, BinaryIO, Literal, TextIO -from typing_extensions import TypeAlias +from typing_extensions import TypeAlias, deprecated if sys.version_info >= (3, 11): from importlib.resources.abc import Traversable @@ -67,62 +58,27 @@ if sys.version_info >= (3, 13): ) else: - def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource.""" - - def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: - """Return a file-like object opened for text reading of the resource.""" - - def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource.""" - - def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: - """Return the decoded string of the resource. - - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ - - def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: - """A context manager providing a file path object to the resource. - - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ - - def is_resource(package: Package, name: str) -> bool: - """True if `name` is a resource inside `package`. - - Directories are *not* resources. - """ - - def contents(package: Package) -> Iterator[str]: - """Return an iterable of entries in `package`. - - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ + def open_binary(package: Package, resource: Resource) -> BinaryIO: ... + def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... + def read_binary(package: Package, resource: Resource) -> bytes: ... + def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: ... + def is_resource(package: Package, name: str) -> bool: ... + if sys.version_info >= (3, 11): + @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") + def contents(package: Package) -> Iterator[str]: ... + else: + def contents(package: Package) -> Iterator[str]: ... if sys.version_info >= (3, 11): from importlib.resources._common import as_file as as_file else: - def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: - """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... if sys.version_info >= (3, 11): from importlib.resources._common import files as files else: - def files(package: Package) -> Traversable: - """ - Get a Traversable resource from a package - """ + def files(package: Package) -> Traversable: ... if sys.version_info >= (3, 11): from importlib.resources.abc import ResourceReader as ResourceReader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi index c4250d4e27ae1..11a93ca82d8df 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi @@ -17,39 +17,18 @@ if sys.version_info >= (3, 11): def package_to_anchor( func: Callable[[Anchor | None], Traversable], - ) -> Callable[[Anchor | None, Anchor | None], Traversable]: - """ - Replace 'package' parameter as 'anchor' and warn about the change. - - Other errors should fall through. - - >>> files('a', 'b') - Traceback (most recent call last): - TypeError: files() takes from 0 to 1 positional arguments but 2 were given - - Remove this compatibility in Python 3.14. - """ - + ) -> Callable[[Anchor | None, Anchor | None], Traversable]: ... @overload - def files(anchor: Anchor | None = None) -> Traversable: - """ - Get a Traversable resource for an anchor. - """ - + def files(anchor: Anchor | None = None) -> Traversable: ... @overload @deprecated("Deprecated since Python 3.12; will be removed in Python 3.15. Use `anchor` parameter instead.") def files(package: Anchor | None = None) -> Traversable: ... else: - def files(package: Package) -> Traversable: - """ - Get a Traversable resource from a package - """ + def files(package: Package) -> Traversable: ... + + def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: ... - def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: - """ - Return the package's loader if it's a ResourceReader. - """ if sys.version_info >= (3, 12): def resolve(cand: Anchor | None) -> types.ModuleType: ... @@ -57,20 +36,7 @@ if sys.version_info >= (3, 11): def resolve(cand: Package) -> types.ModuleType: ... if sys.version_info < (3, 12): - def get_package(package: Package) -> types.ModuleType: - """Take a package name or module object and return the module. - - Raise an exception if the resolved module is not a package. - """ - - def from_package(package: types.ModuleType) -> Traversable: - """ - Return a Traversable object for the given package. - - """ + def get_package(package: Package) -> types.ModuleType: ... - def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: - """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ + def from_package(package: types.ModuleType) -> Traversable: ... + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi index e25c9a8e3ab06..71e01bcd3d5ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi @@ -1,5 +1,3 @@ -"""Simplified function-based API for importlib.resources""" - import sys # Even though this file is 3.13+ only, Pyright will complain in stubtest for older versions. @@ -11,42 +9,23 @@ if sys.version_info >= (3, 13): from io import TextIOWrapper from pathlib import Path from typing import BinaryIO, Literal, overload - from typing_extensions import Unpack - - def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: - """Open for binary reading the *resource* within *package*.""" + from typing_extensions import Unpack, deprecated + def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: ... @overload def open_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" - ) -> TextIOWrapper: - """Open for text reading the *resource* within *package*.""" - + ) -> TextIOWrapper: ... @overload def open_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> TextIOWrapper: ... - def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: - """Read and return contents of *resource* within *package* as bytes.""" - + def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: ... @overload def read_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" - ) -> str: - """Read and return contents of *resource* within *package* as str.""" - + ) -> str: ... @overload def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: ... - def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: - """Return the path to the *resource* as an actual file system path.""" - - def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: - """Return ``True`` if there is a resource named *name* in the package, - - Otherwise returns ``False``. - """ - - def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: - """Return an iterable over the named resources within the package. - - The iterable returns :class:`str` resources (e.g. files). - The iterable does not recurse into subdirectories. - """ + def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: ... + def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: ... + @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") + def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi index af8faaea598a3..80d92a608604e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi @@ -6,124 +6,47 @@ from typing import IO, Any, Literal, Protocol, overload, runtime_checkable if sys.version_info >= (3, 11): class ResourceReader(metaclass=ABCMeta): - """Abstract base class for loaders to provide resource reading support.""" - @abstractmethod - def open_resource(self, resource: str) -> IO[bytes]: - """Return an opened, file-like object for binary reading. - - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ - + def open_resource(self, resource: str) -> IO[bytes]: ... @abstractmethod - def resource_path(self, resource: str) -> str: - """Return the file system path to the specified resource. - - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ - + def resource_path(self, resource: str) -> str: ... @abstractmethod - def is_resource(self, path: str) -> bool: - """Return True if the named 'path' is a resource. - - Files are resources, directories are not. - """ - + def is_resource(self, path: str) -> bool: ... @abstractmethod - def contents(self) -> Iterator[str]: - """Return an iterable of entries in `package`.""" + def contents(self) -> Iterator[str]: ... @runtime_checkable class Traversable(Protocol): - """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - - Any exceptions that occur when accessing the backing resource - may propagate unaltered. - """ - @abstractmethod - def is_dir(self) -> bool: - """ - Return True if self is a directory - """ - + def is_dir(self) -> bool: ... @abstractmethod - def is_file(self) -> bool: - """ - Return True if self is a file - """ - + def is_file(self) -> bool: ... @abstractmethod - def iterdir(self) -> Iterator[Traversable]: - """ - Yield Traversable objects in self - """ - + def iterdir(self) -> Iterator[Traversable]: ... @abstractmethod - def joinpath(self, *descendants: str) -> Traversable: - """ - Return Traversable resolved with any descendants applied. + def joinpath(self, *descendants: str) -> Traversable: ... - Each descendant should be a path segment relative to self - and each may contain multiple levels separated by - ``posixpath.sep`` (``/``). - """ # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @overload @abstractmethod - def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: - """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). - - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ - + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @property @abstractmethod - def name(self) -> str: - """ - The base name of this object without any parent references. - """ - - def __truediv__(self, child: str, /) -> Traversable: - """ - Return Traversable child in self - """ - + def name(self) -> str: ... + def __truediv__(self, child: str, /) -> Traversable: ... @abstractmethod - def read_bytes(self) -> bytes: - """ - Read contents of self as bytes - """ - + def read_bytes(self) -> bytes: ... @abstractmethod - def read_text(self, encoding: str | None = None) -> str: - """ - Read contents of self as text - """ + def read_text(self, encoding: str | None = None) -> str: ... class TraversableResources(ResourceReader): - """ - The required interface for providing traversable - resources. - """ - @abstractmethod - def files(self) -> Traversable: - """Return a Traversable object for the loaded package.""" - + def files(self) -> Traversable: ... def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi index 27ded93f6efcb..c4c758111c2dd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi @@ -1,7 +1,3 @@ -""" -Interface adapters for low-level readers. -""" - import abc import sys from collections.abc import Iterator @@ -13,45 +9,19 @@ if sys.version_info >= (3, 11): from .abc import Traversable, TraversableResources class SimpleReader(abc.ABC): - """ - The minimum, low-level interface required from a resource - provider. - """ - @property @abc.abstractmethod - def package(self) -> str: - """ - The name of the package for which this reader loads resources. - """ - + def package(self) -> str: ... @abc.abstractmethod - def children(self) -> list[SimpleReader]: - """ - Obtain an iterable of SimpleReader for available - child containers (e.g. directories). - """ - + def children(self) -> list[SimpleReader]: ... @abc.abstractmethod - def resources(self) -> list[str]: - """ - Obtain available named resources for this virtual package. - """ - + def resources(self) -> list[str]: ... @abc.abstractmethod - def open_binary(self, resource: str) -> BinaryIO: - """ - Obtain a File-like for a named resource. - """ - + def open_binary(self, resource: str) -> BinaryIO: ... @property def name(self) -> str: ... class ResourceHandle(Traversable, metaclass=abc.ABCMeta): - """ - Handle to a named resource in a ResourceReader. - """ - parent: ResourceContainer def __init__(self, parent: ResourceContainer, name: str) -> None: ... def is_file(self) -> Literal[True]: ... @@ -73,10 +43,6 @@ if sys.version_info >= (3, 11): def joinpath(self, name: Never) -> NoReturn: ... # type: ignore[override] class ResourceContainer(Traversable, metaclass=abc.ABCMeta): - """ - Traversable container for a package's resources via its reader. - """ - reader: SimpleReader def __init__(self, reader: SimpleReader) -> None: ... def is_dir(self) -> Literal[True]: ... @@ -87,10 +53,4 @@ if sys.version_info >= (3, 11): def joinpath(self, *descendants: str) -> Traversable: ... class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta): - """ - A TraversableResources based on SimpleReader. Resource providers - may derive from this class to provide the TraversableResources - interface by supplying the SimpleReader interface. - """ - def files(self) -> ResourceContainer: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi index 4c6f2308448f0..58d8c6617082a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi @@ -1,10 +1,3 @@ -""" -Compatibility shim for .resources.simple as found on Python 3.10. - -Consumers that can rely on Python 3.11 should use the other -module directly. -""" - import sys if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi index 73acc3a83a030..05c4d0d1edb30 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi @@ -1,5 +1,3 @@ -"""Utility code for constructing importers, etc.""" - import importlib.machinery import sys import types @@ -23,81 +21,28 @@ if sys.version_info < (3, 12): "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """Decorator to handle selecting the proper module for loaders. - - The decorated function is passed the module to use instead of the module - name. The module passed in to the function is either from sys.modules if - it already exists or is a new module. If the module is new, then __name__ - is set the first argument to the method, __loader__ is set to self, and - __package__ is set accordingly (if self.is_package() is defined) will be set - before it is passed to the decorated function (if self.is_package() does - not work for the module it will be set post-load). - - If an exception is raised and the decorator created the module it is - subsequently removed from sys.modules. - - The decorator assumes that the decorated function takes the module name as - the second argument. - - """ - + def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """Set __loader__ on the returned module. - - This function is deprecated. - - """ - + def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: - """Set __package__ on the returned module. + def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... - This function is deprecated. - - """ - -def resolve_name(name: str, package: str | None) -> str: - """Resolve a relative module name to an absolute one.""" - -def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: - """Return the spec for the specified module. - - First, sys.modules is checked to see if the module was already imported. If - so, then sys.modules[name].__spec__ is returned. If that happens to be - set to None, then ValueError is raised. If the module is not in - sys.modules, then sys.meta_path is searched for a suitable spec with the - value of 'path' given to the finders. None is returned if no spec could - be found. - - If the name is for submodule (contains a dot), the parent module is - automatically imported. - - The name and package arguments work the same as importlib.import_module(). - In other words, relative module names (with leading dots) work. - - """ +def resolve_name(name: str, package: str | None) -> str: ... +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... class LazyLoader(Loader): - """A loader that creates a module which defers loading until attribute access.""" - def __init__(self, loader: Loader) -> None: ... @classmethod - def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: - """Construct a callable which returns the eager loader made lazy.""" - - def exec_module(self, module: types.ModuleType) -> None: - """Make the module load lazily.""" + def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: ... + def exec_module(self, module: types.ModuleType) -> None: ... -def source_hash(source_bytes: ReadableBuffer) -> bytes: - """Return the hash of *source_bytes* as used in hash-based pyc files.""" +def source_hash(source_bytes: ReadableBuffer) -> bytes: ... if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi index 08d3d9d299832..55ae61617af7e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi @@ -1,31 +1,3 @@ -"""Get useful information from live Python objects. - -This module encapsulates the interface provided by the internal special -attributes (co_*, im_*, tb_*, etc.) in a friendlier fashion. -It also provides some help for examining source code and class layout. - -Here are some of the useful functions provided by this module: - - ismodule(), isclass(), ismethod(), ispackage(), isfunction(), - isgeneratorfunction(), isgenerator(), istraceback(), isframe(), - iscode(), isbuiltin(), isroutine() - check object types - getmembers() - get members of an object that satisfy a given condition - - getfile(), getsourcefile(), getsource() - find an object's source code - getdoc(), getcomments() - get documentation on an object - getmodule() - determine the module that an object came from - getclasstree() - arrange classes so as to represent their hierarchy - - getargvalues(), getcallargs() - get info about function arguments - getfullargspec() - same, with support for Python 3 features - formatargvalues() - format an argument spec - getouterframes(), getinnerframes() - get info about frames - currentframe() - get the current stack frame - stack(), trace() - get info about frames on the stack or in a traceback - - signature() - get a Signature object for the callable -""" - import dis import enum import sys @@ -185,8 +157,6 @@ _V_contra = TypeVar("_V_contra", contravariant=True) class EndOfBlock(Exception): ... class BlockFinder: - """Provide a tokeneater() method to detect the end of a code block.""" - indent: int islambda: bool started: bool @@ -219,11 +189,7 @@ _GetMembersPredicate: TypeAlias = Callable[[Any], bool] _GetMembersReturn: TypeAlias = list[tuple[str, _T]] @overload -def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: - """Return all members of an object as (name, value) pairs sorted by name. - Optionally, only return members that satisfy a given predicate. - """ - +def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... @overload def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload @@ -231,123 +197,45 @@ def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> if sys.version_info >= (3, 11): @overload - def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: - """Return all members of an object as (name, value) pairs sorted by name - without triggering dynamic lookup via the descriptor protocol, - __getattr__ or __getattribute__. Optionally, only return members that - satisfy a given predicate. - - Note: this function may not be able to retrieve all members - that getmembers can fetch (like dynamically created attributes) - and may find members that getmembers can't (like descriptors - that raise AttributeError). It can also return descriptor objects - instead of instance members in some cases. - """ - + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... @overload def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ... -def getmodulename(path: StrPath) -> str | None: - """Return the module name for a given file, or None.""" - -def ismodule(object: object) -> TypeIs[ModuleType]: - """Return true if the object is a module.""" - -def isclass(object: object) -> TypeIs[type[Any]]: - """Return true if the object is a class.""" - -def ismethod(object: object) -> TypeIs[MethodType]: - """Return true if the object is an instance method.""" +def getmodulename(path: StrPath) -> str | None: ... +def ismodule(object: object) -> TypeIs[ModuleType]: ... +def isclass(object: object) -> TypeIs[type[Any]]: ... +def ismethod(object: object) -> TypeIs[MethodType]: ... if sys.version_info >= (3, 14): # Not TypeIs because it does not return True for all modules - def ispackage(object: object) -> TypeGuard[ModuleType]: - """Return true if the object is a package.""" - -def isfunction(object: object) -> TypeIs[FunctionType]: - """Return true if the object is a user-defined function. - - Function objects provide these attributes: - __doc__ documentation string - __name__ name with which this function was defined - __qualname__ qualified name of this function - __module__ name of the module the function was defined in or None - __code__ code object containing compiled function bytecode - __defaults__ tuple of any default values for arguments - __globals__ global namespace in which this function was defined - __annotations__ dict of parameter annotations - __kwdefaults__ dict of keyword only parameters with defaults - __dict__ namespace which is supporting arbitrary function attributes - __closure__ a tuple of cells or None - __type_params__ tuple of type parameters - """ + def ispackage(object: object) -> TypeGuard[ModuleType]: ... + +def isfunction(object: object) -> TypeIs[FunctionType]: ... if sys.version_info >= (3, 12): - def markcoroutinefunction(func: _F) -> _F: - """ - Decorator to ensure callable is recognised as a coroutine function. - """ + def markcoroutinefunction(func: _F) -> _F: ... @overload -def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: - """Return true if the object is a user-defined generator function. - - Generator function objects provide the same attributes as functions. - See help(isfunction) for a list of attributes. - """ - +def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: ... @overload def isgeneratorfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... @overload def isgeneratorfunction(obj: object) -> TypeGuard[Callable[..., GeneratorType[Any, Any, Any]]]: ... @overload -def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: - """Return true if the object is a coroutine function. - - Coroutine functions are normally defined with "async def" syntax, but may - be marked via markcoroutinefunction. - """ - +def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... @overload def iscoroutinefunction(obj: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... @overload def iscoroutinefunction(obj: Callable[_P, object]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, Any]]]: ... @overload def iscoroutinefunction(obj: object) -> TypeGuard[Callable[..., CoroutineType[Any, Any, Any]]]: ... -def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: - """Return true if the object is a generator. - - Generator objects provide these attributes: - gi_code code object - gi_frame frame object or possibly None once the generator has - been exhausted - gi_running set to 1 when generator is executing, 0 otherwise - gi_yieldfrom object being iterated by yield from or None - - __iter__() defined to support iteration over container - close() raises a new GeneratorExit exception inside the - generator to terminate the iteration - send() resumes the generator and "sends" a value that becomes - the result of the current yield-expression - throw() used to raise an exception inside the generator - """ - -def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: - """Return true if the object is a coroutine.""" - -def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: - """Return true if object can be passed to an ``await`` expression.""" - +def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: ... +def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: ... +def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: ... @overload -def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: - """Return true if the object is an asynchronous generator function. - - Asynchronous generator functions are defined with "async def" - syntax and have "yield" expressions in their body. - """ - +def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: ... @overload def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... @overload @@ -360,80 +248,14 @@ class _SupportsSet(Protocol[_T_contra, _V_contra]): class _SupportsDelete(Protocol[_T_contra]): def __delete__(self, instance: _T_contra, /) -> None: ... -def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: - """Return true if the object is an asynchronous generator.""" - -def istraceback(object: object) -> TypeIs[TracebackType]: - """Return true if the object is a traceback. - - Traceback objects provide these attributes: - tb_frame frame object at this level - tb_lasti index of last attempted instruction in bytecode - tb_lineno current line number in Python source code - tb_next next inner traceback object (called by this level) - """ - -def isframe(object: object) -> TypeIs[FrameType]: - """Return true if the object is a frame object. - - Frame objects provide these attributes: - f_back next outer frame object (this frame's caller) - f_builtins built-in namespace seen by this frame - f_code code object being executed in this frame - f_globals global namespace seen by this frame - f_lasti index of last attempted instruction in bytecode - f_lineno current line number in Python source code - f_locals local namespace seen by this frame - f_trace tracing function for this frame, or None - f_trace_lines is a tracing event triggered for each source line? - f_trace_opcodes are per-opcode events being requested? - - clear() used to clear all references to local variables - """ - -def iscode(object: object) -> TypeIs[CodeType]: - """Return true if the object is a code object. - - Code objects provide these attributes: - co_argcount number of arguments (not including *, ** args - or keyword only arguments) - co_code string of raw compiled bytecode - co_cellvars tuple of names of cell variables - co_consts tuple of constants used in the bytecode - co_filename name of file in which this code object was created - co_firstlineno number of first line in Python source code - co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg - | 16=nested | 32=generator | 64=nofree | 128=coroutine - | 256=iterable_coroutine | 512=async_generator - | 0x4000000=has_docstring - co_freevars tuple of names of free variables - co_posonlyargcount number of positional only arguments - co_kwonlyargcount number of keyword only arguments (not including ** arg) - co_lnotab encoded mapping of line numbers to bytecode indices - co_name name with which this code object was defined - co_names tuple of names other than arguments and function locals - co_nlocals number of local variables - co_stacksize virtual machine stack space required - co_varnames tuple of names of arguments and local variables - co_qualname fully qualified function name - - co_lines() returns an iterator that yields successive bytecode ranges - co_positions() returns an iterator of source code positions for each bytecode instruction - replace() returns a copy of the code object with a new values - """ - -def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: - """Return true if the object is a built-in function or method. - - Built-in functions and methods provide these attributes: - __doc__ documentation string - __name__ original name of this function or method - __self__ instance to which a method is bound, or None - """ +def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: ... +def istraceback(object: object) -> TypeIs[TracebackType]: ... +def isframe(object: object) -> TypeIs[FrameType]: ... +def iscode(object: object) -> TypeIs[CodeType]: ... +def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: ... if sys.version_info >= (3, 11): - def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: - """Return true if the object is a method wrapper.""" + def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: ... def isroutine( object: object, @@ -446,52 +268,12 @@ def isroutine( | WrapperDescriptorType | MethodDescriptorType | ClassMethodDescriptorType -]: - """Return true if the object is any kind of function or method.""" - -def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: - """Return true if the object is a method descriptor. - - But not if ismethod() or isclass() or isfunction() are true. - - This is new in Python 2.2, and, for example, is true of int.__add__. - An object passing this test has a __get__ attribute, but not a - __set__ attribute or a __delete__ attribute. Beyond that, the set - of attributes varies; __name__ is usually sensible, and __doc__ - often is. - - Methods implemented via descriptors that also pass one of the other - tests return false from the ismethoddescriptor() test, simply because - the other tests promise more -- you can, e.g., count on having the - __func__ attribute (etc) when an object passes ismethod(). - """ - -def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: - """Return true if the object is a member descriptor. - - Member descriptors are specialized descriptors defined in extension - modules. - """ - -def isabstract(object: object) -> bool: - """Return true if the object is an abstract base class (ABC).""" - -def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: - """Return true if the object is a getset descriptor. - - getset descriptors are specialized descriptors defined in extension - modules. - """ - -def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: - """Return true if the object is a data descriptor. - - Data descriptors have a __set__ or a __delete__ attribute. Examples are - properties (defined in Python) and getsets and members (defined in C). - Typically, data descriptors will also have __name__ and __doc__ attributes - (properties, getsets, and members have both of these attributes), but this - is not guaranteed. - """ +]: ... +def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: ... +def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: ... +def isabstract(object: object) -> bool: ... +def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: ... +def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: ... # # Retrieving source code @@ -500,84 +282,26 @@ _SourceObjectType: TypeAlias = ( ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] ) -def findsource(object: _SourceObjectType) -> tuple[list[str], int]: - """Return the entire source file and starting line number for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a list of all the lines - in the file and the line number indexes a line in that list. An OSError - is raised if the source code cannot be retrieved. - """ - -def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: - """Return an absolute path to the source or compiled file for an object. - - The idea is for each object to have a unique origin, so this routine - normalizes the result as much as possible. - """ +def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... # Special-case the two most common input types here # to avoid the annoyingly vague `Sequence[str]` return type @overload -def getblock(lines: list[str]) -> list[str]: - """Extract the block of code at the top of the given list of lines.""" - +def getblock(lines: list[str]) -> list[str]: ... @overload def getblock(lines: tuple[str, ...]) -> tuple[str, ...]: ... @overload def getblock(lines: Sequence[str]) -> Sequence[str]: ... -def getdoc(object: object) -> str | None: - """Get the documentation string for an object. - - All tabs are expanded to spaces. To clean up docstrings that are - indented to line up with blocks of code, any whitespace than can be - uniformly removed from the second line onwards is removed. - """ - -def getcomments(object: object) -> str | None: - """Get lines of comments immediately preceding an object's source code. - - Returns None when source can't be found. - """ - -def getfile(object: _SourceObjectType) -> str: - """Work out which source or compiled file an object was defined in.""" - -def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: - """Return the module an object was defined in, or None if not found.""" - -def getsourcefile(object: _SourceObjectType) -> str | None: - """Return the filename that can be used to locate an object's source. - Return None if no way can be identified to get the source. - """ - -def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: - """Return a list of source lines and starting line number for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a list of the lines - corresponding to the object and the line number indicates where in the - original source file the first line of code was found. An OSError is - raised if the source code cannot be retrieved. - """ - -def getsource(object: _SourceObjectType) -> str: - """Return the text of the source code for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a single string. An - OSError is raised if the source code cannot be retrieved. - """ - -def cleandoc(doc: str) -> str: - """Clean up indentation from docstrings. - - Any whitespace that can be uniformly removed from the second line - onwards is removed. - """ - -def indentsize(line: str) -> int: - """Return the indent size, in spaces, at the start of a line of text.""" +def getdoc(object: object) -> str | None: ... +def getcomments(object: object) -> str | None: ... +def getfile(object: _SourceObjectType) -> str: ... +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... +def getsourcefile(object: _SourceObjectType) -> str | None: ... +def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getsource(object: _SourceObjectType) -> str: ... +def cleandoc(doc: str) -> str: ... +def indentsize(line: str) -> int: ... _IntrospectableCallable: TypeAlias = Callable[..., Any] @@ -593,8 +317,7 @@ if sys.version_info >= (3, 14): locals: Mapping[str, Any] | None = None, eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 - ) -> Signature: - """Get a signature object for the passed callable.""" + ) -> Signature: ... elif sys.version_info >= (3, 10): def signature( @@ -604,71 +327,27 @@ elif sys.version_info >= (3, 10): globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None, eval_str: bool = False, - ) -> Signature: - """Get a signature object for the passed callable.""" + ) -> Signature: ... else: - def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: - """Get a signature object for the passed callable.""" - -class _void: - """A private marker - used in Parameter & Signature.""" + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... -class _empty: - """Marker object for Signature.empty and Parameter.empty.""" +class _void: ... +class _empty: ... class Signature: - """A Signature object represents the overall signature of a function. - It stores a Parameter object for each parameter accepted by the - function, as well as information specific to the function itself. - - A Signature object has the following public attributes and methods: - - * parameters : OrderedDict - An ordered mapping of parameters' names to the corresponding - Parameter objects (keyword-only arguments are in the same order - as listed in `code.co_varnames`). - * return_annotation : object - The annotation for the return type of the function if specified. - If the function has no annotation for its return type, this - attribute is set to `Signature.empty`. - * bind(*args, **kwargs) -> BoundArguments - Creates a mapping from positional and keyword arguments to - parameters. - * bind_partial(*args, **kwargs) -> BoundArguments - Creates a partial mapping from positional and keyword arguments - to parameters (simulating 'functools.partial' behavior.) - """ - __slots__ = ("_return_annotation", "_parameters") def __init__( self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True - ) -> None: - """Constructs Signature from the given list of Parameter - objects and 'return_annotation'. All arguments are optional. - """ + ) -> None: ... empty = _empty @property def parameters(self) -> types.MappingProxyType[str, Parameter]: ... @property def return_annotation(self) -> Any: ... - def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: - """Get a BoundArguments object, that maps the passed `args` - and `kwargs` to the function's signature. Raises `TypeError` - if the passed arguments can not be bound. - """ - - def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: - """Get a BoundArguments object, that partially maps the - passed `args` and `kwargs` to the function's signature. - Raises `TypeError` if the passed arguments can not be bound. - """ - - def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: - """Creates a customized copy of the Signature. - Pass 'parameters' and/or 'return_annotation' arguments - to override them in the new copy. - """ + def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... __replace__ = replace if sys.version_info >= (3, 14): @classmethod @@ -681,8 +360,7 @@ class Signature: locals: Mapping[str, Any] | None = None, eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 - ) -> Self: - """Constructs Signature for the given callable object.""" + ) -> Self: ... elif sys.version_info >= (3, 10): @classmethod def from_callable( @@ -693,35 +371,14 @@ class Signature: globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None, eval_str: bool = False, - ) -> Self: - """Constructs Signature for the given callable object.""" + ) -> Self: ... else: @classmethod - def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: - """Constructs Signature for the given callable object.""" + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... if sys.version_info >= (3, 14): - def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: - """Create a string representation of the Signature object. - - If *max_width* integer is passed, - signature will try to fit into the *max_width*. - If signature is longer than *max_width*, - all parameters will be on separate lines. - - If *quote_annotation_strings* is False, annotations - in the signature are displayed without opening and closing quotation - marks. This is useful when the signature was created with the - STRING format or when ``from __future__ import annotations`` was used. - """ + def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: ... elif sys.version_info >= (3, 13): - def format(self, *, max_width: int | None = None) -> str: - """Create a string representation of the Signature object. - - If *max_width* integer is passed, - signature will try to fit into the *max_width*. - If signature is longer than *max_width*, - all parameters will be on separate lines. - """ + def format(self, *, max_width: int | None = None) -> str: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -735,56 +392,10 @@ elif sys.version_info >= (3, 10): globals: Mapping[str, Any] | None = None, # value types depend on the key locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, - ) -> dict[str, AnnotationForm]: # values are type expressions - """Compute the annotations dict for an object. - - obj may be a callable, class, or module. - Passing in an object of any other type raises TypeError. - - Returns a dict. get_annotations() returns a new dict every time - it's called; calling it twice on the same object will return two - different but equivalent dicts. - - This function handles several details for you: - - * If eval_str is true, values of type str will - be un-stringized using eval(). This is intended - for use with stringized annotations - ("from __future__ import annotations"). - * If obj doesn't have an annotations dict, returns an - empty dict. (Functions and methods always have an - annotations dict; classes, modules, and other types of - callables may not.) - * Ignores inherited annotations on classes. If a class - doesn't have its own annotations dict, returns an empty dict. - * All accesses to object members and dict values are done - using getattr() and dict.get() for safety. - * Always, always, always returns a freshly-created dict. - - eval_str controls whether or not values of type str are replaced - with the result of calling eval() on those values: - - * If eval_str is true, eval() is called on values of type str. - * If eval_str is false (the default), values of type str are unchanged. - - globals and locals are passed in to eval(); see the documentation - for eval() for more information. If either globals or locals is - None, this function may replace that value with a context-specific - default, contingent on type(obj): - - * If obj is a module, globals defaults to obj.__dict__. - * If obj is a class, globals defaults to - sys.modules[obj.__module__].__dict__ and locals - defaults to the obj class namespace. - * If obj is a callable, globals defaults to obj.__globals__, - although if obj is a wrapped function (using - functools.update_wrapper()) it is first unwrapped. - """ + ) -> dict[str, AnnotationForm]: ... # values are type expressions # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): - """An enumeration.""" - POSITIONAL_ONLY = 0 POSITIONAL_OR_KEYWORD = 1 VAR_POSITIONAL = 2 @@ -802,47 +413,10 @@ if sys.version_info >= (3, 12): def getasyncgenstate( agen: AsyncGenerator[Any, Any], - ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: - """Get current state of an asynchronous generator object. - - Possible states are: - AGEN_CREATED: Waiting to start execution. - AGEN_RUNNING: Currently being executed by the interpreter. - AGEN_SUSPENDED: Currently suspended at a yield expression. - AGEN_CLOSED: Execution has completed. - """ - - def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: - """ - Get the mapping of asynchronous generator local variables to their current - values. - - A dict is returned, with the keys the local variable names and values the - bound values. - """ + ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: ... + def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: ... class Parameter: - """Represents a parameter in a function signature. - - Has the following public attributes: - - * name : str - The name of the parameter as a string. - * default : object - The default value for the parameter if specified. If the - parameter has no default value, this attribute is set to - `Parameter.empty`. - * annotation - The annotation for the parameter if specified. If the - parameter has no annotation, this attribute is set to - `Parameter.empty`. - * kind : str - Describes how argument values are bound to the parameter. - Possible values: `Parameter.POSITIONAL_ONLY`, - `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, - `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. - """ - __slots__ = ("_name", "_kind", "_default", "_annotation") def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... empty = _empty @@ -867,8 +441,7 @@ class Parameter: kind: _ParameterKind | type[_void] = ..., default: Any = ..., annotation: Any = ..., - ) -> Self: - """Creates a customized copy of the Parameter.""" + ) -> Self: ... if sys.version_info >= (3, 13): __replace__ = replace @@ -876,22 +449,6 @@ class Parameter: def __hash__(self) -> int: ... class BoundArguments: - """Result of `Signature.bind` call. Holds the mapping of arguments - to the function's parameters. - - Has the following public attributes: - - * arguments : dict - An ordered mutable mapping of parameters' names to arguments' values. - Does not contain arguments' default values. - * signature : Signature - The Signature object that created this instance. - * args : tuple - Tuple of positional arguments values. - * kwargs : dict - Dict of keyword arguments values. - """ - __slots__ = ("arguments", "_signature", "__weakref__") arguments: OrderedDict[str, Any] @property @@ -901,16 +458,7 @@ class BoundArguments: @property def signature(self) -> Signature: ... def __init__(self, signature: Signature, arguments: OrderedDict[str, Any]) -> None: ... - def apply_defaults(self) -> None: - """Set default values for missing arguments. - - For variable-positional arguments (*args) the default is an - empty tuple. - - For variable-keyword arguments (**kwargs) the default is an - empty dict. - """ - + def apply_defaults(self) -> None: ... def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -920,71 +468,28 @@ class BoundArguments: _ClassTreeItem: TypeAlias = list[tuple[type, ...]] | list[_ClassTreeItem] -def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: - """Arrange the given list of classes into a hierarchy of nested lists. - - Where a nested list appears, it contains classes derived from the class - whose entry immediately precedes the list. Each entry is a 2-tuple - containing a class and a tuple of its base classes. If the 'unique' - argument is true, exactly one entry appears in the returned structure - for each class in the given list. Otherwise, classes using multiple - inheritance and their descendants will appear multiple times. - """ - -def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: - """Recursive helper function for getclasstree().""" +def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: ... +def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: ... class Arguments(NamedTuple): - """Arguments(args, varargs, varkw)""" - args: list[str] varargs: str | None varkw: str | None -def getargs(co: CodeType) -> Arguments: - """Get information about the arguments accepted by a code object. - - Three things are returned: (args, varargs, varkw), where - 'args' is the list of argument names. Keyword-only arguments are - appended. 'varargs' and 'varkw' are the names of the * and ** - arguments or None. - """ +def getargs(co: CodeType) -> Arguments: ... if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.0; removed in Python 3.11.") class ArgSpec(NamedTuple): - """ArgSpec(args, varargs, keywords, defaults)""" - args: list[str] varargs: str | None keywords: str | None defaults: tuple[Any, ...] @deprecated("Deprecated since Python 3.0; removed in Python 3.11. Use `inspect.signature()` instead.") - def getargspec(func: object) -> ArgSpec: - """Get the names and default values of a function's parameters. - - A tuple of four things is returned: (args, varargs, keywords, defaults). - 'args' is a list of the argument names, including keyword-only argument names. - 'varargs' and 'keywords' are the names of the * and ** parameters or None. - 'defaults' is an n-tuple of the default values of the last n parameters. - - This function is deprecated, as it does not support annotations or - keyword-only parameters and will raise ValueError if either is present - on the supplied callable. - - For a more structured introspection API, use inspect.signature() instead. - - Alternatively, use getfullargspec() for an API with a similar namedtuple - based interface, but full support for annotations and keyword-only - parameters. - - Deprecated since Python 3.5, use `inspect.getfullargspec()`. - """ + def getargspec(func: object) -> ArgSpec: ... class FullArgSpec(NamedTuple): - """FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)""" - args: list[str] varargs: str | None varkw: str | None @@ -993,39 +498,15 @@ class FullArgSpec(NamedTuple): kwonlydefaults: dict[str, Any] | None annotations: dict[str, Any] -def getfullargspec(func: object) -> FullArgSpec: - """Get the names and default values of a callable object's parameters. - - A tuple of seven things is returned: - (args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations). - 'args' is a list of the parameter names. - 'varargs' and 'varkw' are the names of the * and ** parameters or None. - 'defaults' is an n-tuple of the default values of the last n parameters. - 'kwonlyargs' is a list of keyword-only parameter names. - 'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults. - 'annotations' is a dictionary mapping parameter names to annotations. - - Notable differences from inspect.signature(): - - the "self" parameter is always reported, even for bound methods - - wrapper chains defined by __wrapped__ *not* unwrapped automatically - """ +def getfullargspec(func: object) -> FullArgSpec: ... class ArgInfo(NamedTuple): - """ArgInfo(args, varargs, keywords, locals)""" - args: list[str] varargs: str | None keywords: str | None locals: dict[str, Any] -def getargvalues(frame: FrameType) -> ArgInfo: - """Get information about arguments passed into a particular frame. - - A tuple of four things is returned: (args, varargs, varkw, locals). - 'args' is a list of the argument names. - 'varargs' and 'varkw' are the names of the * and ** arguments or None. - 'locals' is the locals dictionary of the given frame. - """ +def getargvalues(frame: FrameType) -> ArgInfo: ... if sys.version_info >= (3, 14): def formatannotation(annotation: object, base_module: str | None = None, *, quote_annotation_strings: bool = True) -> str: ... @@ -1053,18 +534,7 @@ if sys.version_info < (3, 11): formatvalue: Callable[[Any], str] = ..., formatreturns: Callable[[Any], str] = ..., formatannotation: Callable[[Any], str] = ..., - ) -> str: - """Format an argument spec from the values returned by getfullargspec. - - The first seven arguments are (args, varargs, varkw, defaults, - kwonlyargs, kwonlydefaults, annotations). The other five arguments - are the corresponding optional formatting functions that are called to - turn names and values into strings. The last argument is an optional - function to format the sequence of arguments. - - Deprecated since Python 3.5: use the `signature` function and `Signature` - objects. - """ + ) -> str: ... def formatargvalues( args: list[str], @@ -1075,59 +545,18 @@ def formatargvalues( formatvarargs: Callable[[str], str] | None = ..., formatvarkw: Callable[[str], str] | None = ..., formatvalue: Callable[[Any], str] | None = ..., -) -> str: - """Format an argument spec from the 4 values returned by getargvalues. - - The first four arguments are (args, varargs, varkw, locals). The - next four arguments are the corresponding optional formatting functions - that are called to turn names and values into strings. The ninth - argument is an optional function to format the sequence of arguments. - """ - -def getmro(cls: type) -> tuple[type, ...]: - """Return tuple of base classes (including cls) in method resolution order.""" - -def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: - """Get the mapping of arguments to values. - - A dict is returned, with keys the function argument names (including the - names of the * and ** arguments, if any), and values the respective bound - values from 'positional' and 'named'. - """ +) -> str: ... +def getmro(cls: type) -> tuple[type, ...]: ... +def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: ... class ClosureVars(NamedTuple): - """ClosureVars(nonlocals, globals, builtins, unbound)""" - nonlocals: Mapping[str, Any] globals: Mapping[str, Any] builtins: Mapping[str, Any] unbound: AbstractSet[str] -def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: - """ - Get the mapping of free variables to their current values. - - Returns a named tuple of dicts mapping the current nonlocal, global - and builtin references as seen by the body of the function. A final - set of unbound names that could not be resolved is also provided. - """ - -def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: - """Get the object wrapped by *func*. - - Follows the chain of :attr:`__wrapped__` attributes returning the last - object in the chain. - - *stop* is an optional callback accepting an object in the wrapper chain - as its sole argument that allows the unwrapping to be terminated early if - the callback returns a true value. If the callback never returns a true - value, the last object in the chain is returned as usual. For example, - :func:`signature` uses this to stop unwrapping if any object in the - chain has a ``__signature__`` attribute defined. - - :exc:`ValueError` is raised if a cycle is encountered. - - """ +def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... # # The interpreter stack @@ -1135,8 +564,6 @@ def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any if sys.version_info >= (3, 11): class _Traceback(NamedTuple): - """_Traceback(filename, lineno, function, code_context, index)""" - filename: str lineno: int function: str @@ -1144,8 +571,6 @@ if sys.version_info >= (3, 11): index: int | None # type: ignore[assignment] class _FrameInfo(NamedTuple): - """_FrameInfo(frame, filename, lineno, function, code_context, index)""" - frame: FrameType filename: str lineno: int @@ -1213,8 +638,6 @@ if sys.version_info >= (3, 11): else: class Traceback(NamedTuple): - """Traceback(filename, lineno, function, code_context, index)""" - filename: str lineno: int function: str @@ -1222,8 +645,6 @@ else: index: int | None # type: ignore[assignment] class FrameInfo(NamedTuple): - """FrameInfo(frame, filename, lineno, function, code_context, index)""" - frame: FrameType filename: str lineno: int @@ -1231,57 +652,19 @@ else: code_context: list[str] | None index: int | None # type: ignore[assignment] -def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: - """Get information about a frame or traceback object. - - A tuple of five things is returned: the filename, the line number of - the current line, the function name, a list of lines of context from - the source code, and the index of the current line within that list. - The optional second argument specifies the number of lines of context - to return, which are centered around the current line. - """ - -def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: - """Get a list of records for a frame and all higher (calling) frames. - - Each record contains a frame object, filename, line number, function - name, a list of lines of context, and index within the context. - """ - -def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: - """Get a list of records for a traceback's frame and all lower frames. - - Each record contains a frame object, filename, line number, function - name, a list of lines of context, and index within the context. - """ - -def getlineno(frame: FrameType) -> int: - """Get the line number from a frame object, allowing for optimization.""" - -def currentframe() -> FrameType | None: - """Return the frame of the caller or None if this is not possible.""" - -def stack(context: int = 1) -> list[FrameInfo]: - """Return a list of records for the stack above the caller's frame.""" - -def trace(context: int = 1) -> list[FrameInfo]: - """Return a list of records for the stack below the current exception.""" +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... +def getlineno(frame: FrameType) -> int: ... +def currentframe() -> FrameType | None: ... +def stack(context: int = 1) -> list[FrameInfo]: ... +def trace(context: int = 1) -> list[FrameInfo]: ... # # Fetching attributes statically # -def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: - """Retrieve attributes without triggering dynamic lookup via the - descriptor protocol, __getattr__ or __getattribute__. - - Note: this function may not be able to retrieve all attributes - that getattr can fetch (like dynamically created attributes) - and may find attributes that getattr can't (like descriptors - that raise AttributeError). It can also return descriptor objects - instead of instance members in some cases. See the - documentation for details. - """ +def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: ... # # Current State of Generators and Coroutines @@ -1294,15 +677,7 @@ GEN_CLOSED: Final = "GEN_CLOSED" def getgeneratorstate( generator: Generator[Any, Any, Any], -) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: - """Get current state of a generator-iterator. - - Possible states are: - GEN_CREATED: Waiting to start execution. - GEN_RUNNING: Currently being executed by the interpreter. - GEN_SUSPENDED: Currently suspended at a yield expression. - GEN_CLOSED: Execution has completed. - """ +) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: ... CORO_CREATED: Final = "CORO_CREATED" CORO_RUNNING: Final = "CORO_RUNNING" @@ -1311,70 +686,21 @@ CORO_CLOSED: Final = "CORO_CLOSED" def getcoroutinestate( coroutine: Coroutine[Any, Any, Any], -) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: - """Get current state of a coroutine object. - - Possible states are: - CORO_CREATED: Waiting to start execution. - CORO_RUNNING: Currently being executed by the interpreter. - CORO_SUSPENDED: Currently suspended at an await expression. - CORO_CLOSED: Execution has completed. - """ - -def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: - """ - Get the mapping of generator local variables to their current values. - - A dict is returned, with the keys the local variable names and values the - bound values. - """ - -def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: - """ - Get the mapping of coroutine local variables to their current values. - - A dict is returned, with the keys the local variable names and values the - bound values. - """ +) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: ... +def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: ... +def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: ... # Create private type alias to avoid conflict with symbol of same # name created in Attribute class. _Object: TypeAlias = object class Attribute(NamedTuple): - """Attribute(name, kind, defining_class, object)""" - name: str kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type object: _Object -def classify_class_attrs(cls: type) -> list[Attribute]: - """Return list of attribute-descriptor tuples. - - For each name in dir(cls), the return list contains a 4-tuple - with these elements: - - 0. The name (a string). - - 1. The kind of attribute this is, one of these strings: - 'class method' created via classmethod() - 'static method' created via staticmethod() - 'property' created via property() - 'method' any other flavor of method or descriptor - 'data' not a method - - 2. The class which defined this attribute (a class). - - 3. The object as obtained by calling getattr; if this fails, or if the - resulting object does not live anywhere in the class' mro (including - metaclasses) then the object is looked up in the defining class's - dict (found by walking the mro). - - If one of the items in dir(cls) is stored in the metaclass it will now - be discovered and not have None be listed as the class in which it was - defined. Any items whose home class cannot be discovered are skipped. - """ +def classify_class_attrs(cls: type) -> list[Attribute]: ... class ClassFoundException(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi index 94d8ed0257f05..d301d700e9d0f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi @@ -1,38 +1,3 @@ -"""The io module provides the Python interfaces to stream handling. The -builtin open function is defined in this module. - -At the top of the I/O hierarchy is the abstract base class IOBase. It -defines the basic interface to a stream. Note, however, that there is no -separation between reading and writing to streams; implementations are -allowed to raise an OSError if they do not support a given operation. - -Extending IOBase is RawIOBase which deals simply with the reading and -writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide -an interface to OS files. - -BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its -subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer -streams that are readable, writable, and both respectively. -BufferedRandom provides a buffered interface to random access -streams. BytesIO is a simple stream of in-memory bytes. - -Another IOBase subclass, TextIOBase, deals with the encoding and decoding -of streams into text. TextIOWrapper, which extends it, is a buffered text -interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO -is an in-memory stream for text. - -Argument names are not part of the specification, and only the arguments -of open() are intended to be used as keyword arguments. - -data: - -DEFAULT_BUFFER_SIZE - - An int containing the default buffer size used by the module's buffered - I/O classes. open() uses the file's blksize (as obtained by os.stat) if - possible. -""" - import abc import sys from _io import ( @@ -95,87 +60,16 @@ SEEK_CUR: Final = 1 SEEK_END: Final = 2 class UnsupportedOperation(OSError, ValueError): ... - -class IOBase(_IOBase, metaclass=abc.ABCMeta): - """The abstract base class for all I/O classes. - - This class provides dummy implementations for many methods that - derived classes can override selectively; the default implementations - represent a file that cannot be read, written or seeked. - - Even though IOBase does not declare read, readinto, or write because - their signatures will vary, implementations and clients should - consider those methods part of the interface. Also, implementations - may raise UnsupportedOperation when operations they do not support are - called. - - The basic type used for binary data read from or written to a file is - bytes. Other bytes-like objects are accepted as method arguments too. - In some cases (such as readinto), a writable object is required. Text - I/O classes work with str data. - - Note that calling any method (except additional calls to close(), - which are ignored) on a closed stream should raise a ValueError. - - IOBase (and its subclasses) support the iterator protocol, meaning - that an IOBase object can be iterated over yielding the lines in a - stream. - - IOBase also supports the :keyword:`with` statement. In this example, - fp is closed after the suite of the with statement is complete: - - with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') - """ - -class RawIOBase(_RawIOBase, IOBase): - """Base class for raw binary I/O.""" - -class BufferedIOBase(_BufferedIOBase, IOBase): - """Base class for buffered IO objects. - - The main difference with RawIOBase is that the read() method - supports omitting the size argument, and does not have a default - implementation that defers to readinto(). - - In addition, read(), readinto() and write() may raise - BlockingIOError if the underlying raw stream is in non-blocking - mode and not ready; unlike their raw counterparts, they will never - return None. - - A typical implementation should not inherit from a RawIOBase - implementation, but wrap one. - """ - -class TextIOBase(_TextIOBase, IOBase): - """Base class for text I/O. - - This class provides a character and line based interface to stream - I/O. There is no readinto method because Python's character strings - are immutable. - """ +class IOBase(_IOBase, metaclass=abc.ABCMeta): ... +class RawIOBase(_RawIOBase, IOBase): ... +class BufferedIOBase(_BufferedIOBase, IOBase): ... +class TextIOBase(_TextIOBase, IOBase): ... if sys.version_info >= (3, 14): class Reader(Protocol[_T_co]): - """Protocol for simple I/O reader instances. - - This protocol only supports blocking I/O. - """ - __slots__ = () - def read(self, size: int = ..., /) -> _T_co: - """Read data from the input stream and return it. - - If *size* is specified, at most *size* items (bytes/characters) will be - read. - """ + def read(self, size: int = ..., /) -> _T_co: ... class Writer(Protocol[_T_contra]): - """Protocol for simple I/O writer instances. - - This protocol only supports blocking I/O. - """ - __slots__ = () - def write(self, data: _T_contra, /) -> int: - """Write *data* to the output stream and return the number of items written.""" + def write(self, data: _T_contra, /) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi index f8397ff5b33ed..e2f3defa2deac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi @@ -1,10 +1,3 @@ -"""A fast, lightweight IPv4/IPv6 manipulation library in Python. - -This library is used to create/poke/manipulate IPv4 and IPv6 addresses -and networks. - -""" - import sys from collections.abc import Iterable, Iterator from typing import Any, Final, Generic, Literal, TypeVar, overload @@ -20,144 +13,45 @@ _N = TypeVar("_N", IPv4Network, IPv6Network) _RawIPAddress: TypeAlias = int | str | bytes | IPv4Address | IPv6Address _RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface -def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Address or IPv6Address object. - - Raises: - ValueError: if the *address* passed isn't either a v4 or a v6 - address - - """ - +def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... def ip_network( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True -) -> IPv4Network | IPv6Network: - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP network. Either IPv4 or - IPv6 networks may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Network or IPv6Network object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. Or if the network has host bits set. - - """ - +) -> IPv4Network | IPv6Network: ... def ip_interface( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], -) -> IPv4Interface | IPv6Interface: - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - - Returns: - An IPv4Interface or IPv6Interface object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. - - Notes: - The IPv?Interface classes describe an Address on a particular - Network, so they're basically a combination of both the Address - and Network classes. - - """ +) -> IPv4Interface | IPv6Interface: ... class _IPAddressBase: - """The mother class.""" - __slots__ = () @property - def compressed(self) -> str: - """Return the shorthand version of the IP address as a string.""" - + def compressed(self) -> str: ... @property - def exploded(self) -> str: - """Return the longhand version of the IP address as a string.""" - + def exploded(self) -> str: ... @property - def reverse_pointer(self) -> str: - """The name of the reverse DNS pointer for the IP address, e.g.: - >>> ipaddress.ip_address("127.0.0.1").reverse_pointer - '1.0.0.127.in-addr.arpa' - >>> ipaddress.ip_address("2001:db8::1").reverse_pointer - '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' - - """ + def reverse_pointer(self) -> str: ... if sys.version_info < (3, 14): @property def version(self) -> int: ... class _BaseAddress(_IPAddressBase): - """A generic IP object. - - This IP class contains the version independent methods which are - used by single IP addresses. - """ - __slots__ = () def __add__(self, other: int) -> Self: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self, other: int) -> Self: ... - def __format__(self, fmt: str) -> str: - """Returns an IP address as a formatted string. - - Supported presentation types are: - 's': returns the IP address as a string (default) - 'b': converts to binary and returns a zero-padded string - 'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string - 'n': the same as 'b' for IPv4 and 'x' for IPv6 - - For binary and hex presentation types, the alternate form specifier - '#' and the grouping option '_' are supported. - """ - + def __format__(self, fmt: str) -> str: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self, other: Self) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __gt__(self, other: Self) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __le__(self, other: Self) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... class _BaseNetwork(_IPAddressBase, Generic[_A]): - """A generic IP network object. - - This IP class contains the version independent methods which are - used by networks. - """ - network_address: _A netmask: _A def __contains__(self, other: Any) -> bool: ... @@ -167,237 +61,42 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): def __hash__(self) -> int: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self, other: Self) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __gt__(self, other: Self) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __le__(self, other: Self) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" - - def address_exclude(self, other: Self) -> Iterator[Self]: - """Remove an address from a larger block. - - For example: - - addr1 = ip_network('192.0.2.0/28') - addr2 = ip_network('192.0.2.1/32') - list(addr1.address_exclude(addr2)) = - [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), - IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] - - or IPv6: - - addr1 = ip_network('2001:db8::1/32') - addr2 = ip_network('2001:db8::1/128') - list(addr1.address_exclude(addr2)) = - [ip_network('2001:db8::1/128'), - ip_network('2001:db8::2/127'), - ip_network('2001:db8::4/126'), - ip_network('2001:db8::8/125'), - ... - ip_network('2001:db8:8000::/33')] - - Args: - other: An IPv4Network or IPv6Network object of the same type. - - Returns: - An iterator of the IPv(4|6)Network objects which is self - minus other. - - Raises: - TypeError: If self and other are of differing address - versions, or if other is not a network object. - ValueError: If other is not completely contained by self. - - """ + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def address_exclude(self, other: Self) -> Iterator[Self]: ... @property def broadcast_address(self) -> _A: ... - def compare_networks(self, other: Self) -> int: - """Compare two IP objects. - - This is only concerned about the comparison of the integer - representation of the network addresses. This means that the - host bits aren't considered at all in this method. If you want - to compare host bits, you can easily enough do a - 'HostA._ip < HostB._ip' - - Args: - other: An IP object. - - Returns: - If the IP versions of self and other are the same, returns: - - -1 if self < other: - eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') - IPv6Network('2001:db8::1000/124') < - IPv6Network('2001:db8::2000/124') - 0 if self == other - eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') - IPv6Network('2001:db8::1000/124') == - IPv6Network('2001:db8::1000/124') - 1 if self > other - eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') - IPv6Network('2001:db8::2000/124') > - IPv6Network('2001:db8::1000/124') - - Raises: - TypeError if the IP versions are different. - - """ - - def hosts(self) -> Iterator[_A] | list[_A]: - """Generate Iterator over usable hosts in a network. - - This is like __iter__ except it doesn't return the network - or broadcast addresses. - - """ - + def compare_networks(self, other: Self) -> int: ... + def hosts(self) -> Iterator[_A] | list[_A]: ... @property - def is_global(self) -> bool: - """Test if this address is allocated for public networks. - - Returns: - A boolean, True if the address is not reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. - - """ - + def is_global(self) -> bool: ... @property - def is_link_local(self) -> bool: - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - + def is_link_local(self) -> bool: ... @property - def is_loopback(self) -> bool: - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - + def is_loopback(self) -> bool: ... @property - def is_multicast(self) -> bool: - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - + def is_multicast(self) -> bool: ... @property - def is_private(self) -> bool: - """Test if this network belongs to a private range. - - Returns: - A boolean, True if the network is reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. - - """ - + def is_private(self) -> bool: ... @property - def is_reserved(self) -> bool: - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - + def is_reserved(self) -> bool: ... @property - def is_unspecified(self) -> bool: - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - + def is_unspecified(self) -> bool: ... @property - def num_addresses(self) -> int: - """Number of hosts in the current subnet.""" - - def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: - """Tell if self is partly contained in other.""" - + def num_addresses(self) -> int: ... + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... @property def prefixlen(self) -> int: ... - def subnet_of(self, other: Self) -> bool: - """Return True if this network is a subnet of other.""" - - def supernet_of(self, other: Self) -> bool: - """Return True if this network is a supernet of other.""" - - def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: - """The subnets which join to make the current subnet. - - In the case that self contains only one IP - (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 - for IPv6), yield an iterator with just ourself. - - Args: - prefixlen_diff: An integer, the amount the prefix length - should be increased by. This should not be set if - new_prefix is also set. - new_prefix: The desired new prefix length. This must be a - larger number (smaller prefix) than the existing prefix. - This should not be set if prefixlen_diff is also set. - - Returns: - An iterator of IPv(4|6) objects. - - Raises: - ValueError: The prefixlen_diff is too small or too large. - OR - prefixlen_diff and new_prefix are both set or new_prefix - is a smaller number than the current prefix (smaller - number means a larger network) - - """ - - def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: - """The supernet containing the current network. - - Args: - prefixlen_diff: An integer, the amount the prefix length of - the network should be decreased by. For example, given a - /24 network and a prefixlen_diff of 3, a supernet with a - /21 netmask is returned. - - Returns: - An IPv4 network object. - - Raises: - ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have - a negative prefix length. - OR - If prefixlen_diff and new_prefix are both set or new_prefix is a - larger number than the current prefix (larger number means a - smaller network) - - """ - + def subnet_of(self, other: Self) -> bool: ... + def supernet_of(self, other: Self) -> bool: ... + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... @property def with_hostmask(self) -> str: ... @property @@ -408,13 +107,6 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): def hostmask(self) -> _A: ... class _BaseV4: - """Base IPv4 object. - - The following methods are used by IPv4 objects in both single IP - addresses and networks. - - """ - __slots__ = () if sys.version_info >= (3, 14): version: Final = 4 @@ -426,166 +118,30 @@ class _BaseV4: def max_prefixlen(self) -> Literal[32]: ... class IPv4Address(_BaseV4, _BaseAddress): - """Represent and manipulate single IPv4 Addresses.""" - __slots__ = ("_ip", "__weakref__") - def __init__(self, address: object) -> None: - """ - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv4Address('192.0.2.1') == IPv4Address(3221225985). - or, more generally - IPv4Address(int(IPv4Address('192.0.2.1'))) == - IPv4Address('192.0.2.1') - - Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. - - """ - + def __init__(self, address: object) -> None: ... @property - def is_global(self) -> bool: - """``True`` if the address is defined as globally reachable by - iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ - (for IPv6) with the following exception: - - For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: - - address.is_global == address.ipv4_mapped.is_global - - ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` - IPv4 range where they are both ``False``. - """ - + def is_global(self) -> bool: ... @property - def is_link_local(self) -> bool: - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is link-local per RFC 3927. - - """ - + def is_link_local(self) -> bool: ... @property - def is_loopback(self) -> bool: - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback per RFC 3330. - - """ - + def is_loopback(self) -> bool: ... @property - def is_multicast(self) -> bool: - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is multicast. - See RFC 3171 for details. - - """ - + def is_multicast(self) -> bool: ... @property - def is_private(self) -> bool: - """``True`` if the address is defined as not globally reachable by - iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ - (for IPv6) with the following exceptions: - - * ``is_private`` is ``False`` for ``100.64.0.0/10`` - * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: - - address.is_private == address.ipv4_mapped.is_private - - ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` - IPv4 range where they are both ``False``. - """ - + def is_private(self) -> bool: ... @property - def is_reserved(self) -> bool: - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within the - reserved IPv4 Network range. - - """ - + def is_reserved(self) -> bool: ... @property - def is_unspecified(self) -> bool: - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 5735 3. - - """ - + def is_unspecified(self) -> bool: ... @property - def packed(self) -> bytes: - """The binary representation of this address.""" + def packed(self) -> bytes: ... if sys.version_info >= (3, 13): @property - def ipv6_mapped(self) -> IPv6Address: - """Return the IPv4-mapped IPv6 address. - - Returns: - The IPv4-mapped IPv6 address per RFC 4291. - - """ + def ipv6_mapped(self) -> IPv6Address: ... class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): - """This class represents and manipulates 32-bit IPv4 network + addresses.. - - Attributes: [examples for IPv4Network('192.0.2.0/27')] - .network_address: IPv4Address('192.0.2.0') - .hostmask: IPv4Address('0.0.0.31') - .broadcast_address: IPv4Address('192.0.2.32') - .netmask: IPv4Address('255.255.255.224') - .prefixlen: 27 - - """ - - def __init__(self, address: object, strict: bool = True) -> None: - """Instantiate a new IPv4 network object. - - Args: - address: A string or integer representing the IP [& network]. - '192.0.2.0/24' - '192.0.2.0/255.255.255.0' - '192.0.2.0/0.0.0.255' - are all functionally the same in IPv4. Similarly, - '192.0.2.1' - '192.0.2.1/255.255.255.255' - '192.0.2.1/32' - are also functionally equivalent. That is to say, failing to - provide a subnetmask will create an object with a mask of /32. - - If the mask (portion after the / in the argument) is given in - dotted quad form, it is treated as a netmask if it starts with a - non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it - starts with a zero field (e.g. 0.255.255.255 == /8), with the - single exception of an all-zero mask which is treated as a - netmask == /0. If no mask is given, a default of /32 is used. - - Additionally, an integer can be passed, so - IPv4Network('192.0.2.1') == IPv4Network(3221225985) - or, more generally - IPv4Interface(int(IPv4Interface('192.0.2.1'))) == - IPv4Interface('192.0.2.1') - - Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. - NetmaskValueError: If the netmask isn't valid for - an IPv4 address. - ValueError: If strict is True and a network address is not - supplied. - """ + def __init__(self, address: object, strict: bool = True) -> None: ... class IPv4Interface(IPv4Address): netmask: IPv4Address @@ -604,13 +160,6 @@ class IPv4Interface(IPv4Address): def with_prefixlen(self) -> str: ... class _BaseV6: - """Base IPv6 object. - - The following methods are used by IPv6 objects in both single IP - addresses and networks. - - """ - __slots__ = () if sys.version_info >= (3, 14): version: Final = 6 @@ -622,227 +171,41 @@ class _BaseV6: def max_prefixlen(self) -> Literal[128]: ... class IPv6Address(_BaseV6, _BaseAddress): - """Represent and manipulate single IPv6 Addresses.""" - __slots__ = ("_ip", "_scope_id", "__weakref__") - def __init__(self, address: object) -> None: - """Instantiate a new IPv6 address object. - - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv6Address('2001:db8::') == - IPv6Address(42540766411282592856903984951653826560) - or, more generally - IPv6Address(int(IPv6Address('2001:db8::'))) == - IPv6Address('2001:db8::') - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - - """ - + def __init__(self, address: object) -> None: ... @property - def is_global(self) -> bool: - """``True`` if the address is defined as globally reachable by - iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ - (for IPv6) with the following exception: - - For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: - - address.is_global == address.ipv4_mapped.is_global - - ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` - IPv4 range where they are both ``False``. - """ - + def is_global(self) -> bool: ... @property - def is_link_local(self) -> bool: - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - + def is_link_local(self) -> bool: ... @property - def is_loopback(self) -> bool: - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - + def is_loopback(self) -> bool: ... @property - def is_multicast(self) -> bool: - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - + def is_multicast(self) -> bool: ... @property - def is_private(self) -> bool: - """``True`` if the address is defined as not globally reachable by - iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ - (for IPv6) with the following exceptions: - - * ``is_private`` is ``False`` for ``100.64.0.0/10`` - * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: - - address.is_private == address.ipv4_mapped.is_private - - ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` - IPv4 range where they are both ``False``. - """ - + def is_private(self) -> bool: ... @property - def is_reserved(self) -> bool: - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - + def is_reserved(self) -> bool: ... @property - def is_unspecified(self) -> bool: - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - + def is_unspecified(self) -> bool: ... @property - def packed(self) -> bytes: - """The binary representation of this address.""" - + def packed(self) -> bytes: ... @property - def ipv4_mapped(self) -> IPv4Address | None: - """Return the IPv4 mapped address. - - Returns: - If the IPv6 address is a v4 mapped address, return the - IPv4 mapped address. Return None otherwise. - - """ - + def ipv4_mapped(self) -> IPv4Address | None: ... @property - def is_site_local(self) -> bool: - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ - + def is_site_local(self) -> bool: ... @property - def sixtofour(self) -> IPv4Address | None: - """Return the IPv4 6to4 embedded address. - - Returns: - The IPv4 6to4-embedded address if present or None if the - address doesn't appear to contain a 6to4 embedded address. - - """ - + def sixtofour(self) -> IPv4Address | None: ... @property - def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: - """Tuple of embedded teredo IPs. - - Returns: - Tuple of the (server, client) IPs or None if the address - doesn't appear to be a teredo address (doesn't start with - 2001::/32) - - """ - + def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... @property - def scope_id(self) -> str | None: - """Identifier of a particular zone of the address's scope. - - See RFC 4007 for details. - - Returns: - A string identifying the zone of the address if specified, else None. - - """ - + def scope_id(self) -> str | None: ... def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): - """This class represents and manipulates 128-bit IPv6 networks. - - Attributes: [examples for IPv6('2001:db8::1000/124')] - .network_address: IPv6Address('2001:db8::1000') - .hostmask: IPv6Address('::f') - .broadcast_address: IPv6Address('2001:db8::100f') - .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') - .prefixlen: 124 - - """ - - def __init__(self, address: object, strict: bool = True) -> None: - """Instantiate a new IPv6 Network object. - - Args: - address: A string or integer representing the IPv6 network or the - IP and prefix/netmask. - '2001:db8::/128' - '2001:db8:0000:0000:0000:0000:0000:0000/128' - '2001:db8::' - are all functionally the same in IPv6. That is to say, - failing to provide a subnetmask will create an object with - a mask of /128. - - Additionally, an integer can be passed, so - IPv6Network('2001:db8::') == - IPv6Network(42540766411282592856903984951653826560) - or, more generally - IPv6Network(int(IPv6Network('2001:db8::'))) == - IPv6Network('2001:db8::') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 2001:db8::1000/124 and not an - IP address on a network, eg, 2001:db8::1/124. - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - NetmaskValueError: If the netmask isn't valid for - an IPv6 address. - ValueError: If strict was True and a network address was not - supplied. - """ - + def __init__(self, address: object, strict: bool = True) -> None: ... @property - def is_site_local(self) -> bool: - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ + def is_site_local(self) -> bool: ... class IPv6Interface(IPv6Address): netmask: IPv6Address @@ -860,113 +223,25 @@ class IPv6Interface(IPv6Address): @property def with_prefixlen(self) -> str: ... -def v4_int_to_packed(address: int) -> bytes: - """Represent an address as 4 packed bytes in network (big-endian) order. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The integer address packed as 4 bytes in network (big-endian) order. - - Raises: - ValueError: If the integer is negative or too large to be an - IPv4 IP address. - - """ - -def v6_int_to_packed(address: int) -> bytes: - """Represent an address as 16 packed bytes in network (big-endian) order. - - Args: - address: An integer representation of an IPv6 IP address. - - Returns: - The integer address packed as 16 bytes in network (big-endian) order. - - """ +def v4_int_to_packed(address: int) -> bytes: ... +def v6_int_to_packed(address: int) -> bytes: ... # Third overload is technically incorrect, but convenient when first and last are return values of ip_address() @overload -def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: - """Summarize a network range given the first and last IP addresses. - - Example: - >>> list(summarize_address_range(IPv4Address('192.0.2.0'), - ... IPv4Address('192.0.2.130'))) - ... #doctest: +NORMALIZE_WHITESPACE - [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), - IPv4Network('192.0.2.130/32')] - - Args: - first: the first IPv4Address or IPv6Address in the range. - last: the last IPv4Address or IPv6Address in the range. - - Returns: - An iterator of the summarized IPv(4|6) network objects. - - Raise: - TypeError: - If the first and last objects are not IP addresses. - If the first and last objects are not the same version. - ValueError: - If the last object is not greater than the first. - If the version of the first address is not 4 or 6. - - """ - +def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... @overload def summarize_address_range( first: IPv4Address | IPv6Address, last: IPv4Address | IPv6Address ) -> Iterator[IPv4Network] | Iterator[IPv6Network]: ... -def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: - """Collapse a list of IP objects. - - Example: - collapse_addresses([IPv4Network('192.0.2.0/25'), - IPv4Network('192.0.2.128/25')]) -> - [IPv4Network('192.0.2.0/24')] - - Args: - addresses: An iterable of IPv4Network or IPv6Network objects. - - Returns: - An iterator of the collapsed IPv(4|6)Network objects. - - Raises: - TypeError: If passed a list of mixed version objects. - - """ - +def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... @overload -def get_mixed_type_key(obj: _A) -> tuple[int, _A]: - """Return a key suitable for sorting between networks and addresses. - - Address and Network objects are not sortable by default; they're - fundamentally different so the expression - - IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') - - doesn't make any sense. There are some times however, where you may wish - to have ipaddress sort these for you anyway. If you need to do this, you - can use this function as the key= argument to sorted(). - - Args: - obj: either a Network or Address object. - Returns: - appropriate key. - - """ - +def get_mixed_type_key(obj: _A) -> tuple[int, _A]: ... @overload def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... -class AddressValueError(ValueError): - """A Value Error related to the address.""" - -class NetmaskValueError(ValueError): - """A Value Error related to the netmask.""" +class AddressValueError(ValueError): ... +class NetmaskValueError(ValueError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi index 4c1eaf164d8b2..fe4ccbdf8ae97 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi @@ -1,34 +1,3 @@ -"""Functional tools for creating and using iterators. - -Infinite iterators: -count(start=0, step=1) --> start, start+step, start+2*step, ... -cycle(p) --> p0, p1, ... plast, p0, p1, ... -repeat(elem [,n]) --> elem, elem, elem, ... endlessly or up to n times - -Iterators terminating on the shortest input sequence: -accumulate(p[, func]) --> p0, p0+p1, p0+p1+p2 -batched(p, n) --> [p0, p1, ..., p_n-1], [p_n, p_n+1, ..., p_2n-1], ... -chain(p, q, ...) --> p0, p1, ... plast, q0, q1, ... -chain.from_iterable([p, q, ...]) --> p0, p1, ... plast, q0, q1, ... -compress(data, selectors) --> (d[0] if s[0]), (d[1] if s[1]), ... -dropwhile(predicate, seq) --> seq[n], seq[n+1], starting when predicate fails -groupby(iterable[, keyfunc]) --> sub-iterators grouped by value of keyfunc(v) -filterfalse(predicate, seq) --> elements of seq where predicate(elem) is False -islice(seq, [start,] stop [, step]) --> elements from - seq[start:stop:step] -pairwise(s) --> (s[0],s[1]), (s[1],s[2]), (s[2], s[3]), ... -starmap(fun, seq) --> fun(*seq[0]), fun(*seq[1]), ... -tee(it, n=2) --> (it1, it2 , ... itn) splits one iterator into n -takewhile(predicate, seq) --> seq[0], seq[1], until predicate fails -zip_longest(p, q, ...) --> (p[0], q[0]), (p[1], q[1]), ... - -Combinatoric generators: -product(p, q, ... [repeat=1]) --> cartesian product -permutations(p[, r]) -combinations(p, r) -combinations_with_replacement(p, r) -""" - import sys from _typeshed import MaybeNone from collections.abc import Callable, Iterable, Iterator @@ -60,218 +29,101 @@ _Predicate: TypeAlias = Callable[[_T], object] # but we can't enforce the add method @disjoint_base class count(Generic[_N]): - """Return a count object whose .__next__() method returns consecutive values. - - Equivalent to: - def count(firstval=0, step=1): - x = firstval - while 1: - yield x - x += step - """ - @overload def __new__(cls) -> count[int]: ... @overload def __new__(cls, start: _N, step: _Step = ...) -> count[_N]: ... @overload def __new__(cls, *, step: _N) -> count[_N]: ... - def __next__(self) -> _N: - """Implement next(self).""" - - def __iter__(self) -> Self: - """Implement iter(self).""" + def __next__(self) -> _N: ... + def __iter__(self) -> Self: ... @disjoint_base class cycle(Generic[_T]): - """Return elements from the iterable until it is exhausted. Then repeat the sequence indefinitely.""" - def __new__(cls, iterable: Iterable[_T], /) -> Self: ... - def __next__(self) -> _T: - """Implement next(self).""" - - def __iter__(self) -> Self: - """Implement iter(self).""" + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... @disjoint_base class repeat(Generic[_T]): - """repeat(object [,times]) -> create an iterator which returns the object - for the specified number of times. If not specified, returns the object - endlessly. - """ - @overload def __new__(cls, object: _T) -> Self: ... @overload def __new__(cls, object: _T, times: int) -> Self: ... - def __next__(self) -> _T: - """Implement next(self).""" - - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __length_hint__(self) -> int: - """Private method returning an estimate of len(list(it)).""" + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + def __length_hint__(self) -> int: ... @disjoint_base class accumulate(Generic[_T]): - """Return series of accumulated sums (or other binary function results).""" - @overload def __new__(cls, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> Self: ... @overload def __new__(cls, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... @disjoint_base class chain(Generic[_T]): - """Return a chain object whose .__next__() method returns elements from the - first iterable until it is exhausted, then elements from the next - iterable, until all of the iterables are exhausted. - """ - def __new__(cls, *iterables: Iterable[_T]) -> Self: ... - def __next__(self) -> _T: - """Implement next(self).""" - - def __iter__(self) -> Self: - """Implement iter(self).""" - + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable - def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: - """Alternative chain() constructor taking a single iterable argument that evaluates lazily.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @disjoint_base class compress(Generic[_T]): - """Return data elements corresponding to true selector elements. - - Forms a shorter iterator from selected data elements using the selectors to - choose the data elements. - """ - def __new__(cls, data: Iterable[_T], selectors: Iterable[Any]) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... @disjoint_base class dropwhile(Generic[_T]): - """Drop items from the iterable while predicate(item) is true. - - Afterwards, return every element until the iterable is exhausted. - """ - def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... @disjoint_base class filterfalse(Generic[_T]): - """Return those items of iterable for which function(item) is false. - - If function is None, return the items that are false. - """ - def __new__(cls, function: _Predicate[_T] | None, iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... @disjoint_base class groupby(Generic[_T_co, _S_co]): - """make an iterator that returns consecutive keys and groups from the iterable - - iterable - Elements to divide into groups according to the key function. - key - A function for computing the group category for each element. - If the key function is not specified or is None, the element itself - is used for grouping. - """ - @overload def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: ... @disjoint_base class islice(Generic[_T]): - """islice(iterable, stop) --> islice object - islice(iterable, start, stop[, step]) --> islice object - - Return an iterator whose next() method returns selected values from an - iterable. If start is specified, will skip all preceding elements; - otherwise, start defaults to zero. Step defaults to one. If - specified as another value, step determines how many values are - skipped between successive calls. Works like a slice() on a list - but returns an iterator. - """ - @overload def __new__(cls, iterable: Iterable[_T], stop: int | None, /) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T], start: int | None, stop: int | None, step: int | None = ..., /) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... @disjoint_base class starmap(Generic[_T_co]): - """Return an iterator whose values are returned from the function evaluated with an argument tuple taken from the given sequence.""" - def __new__(cls, function: Callable[..., _T], iterable: Iterable[Iterable[Any]], /) -> starmap[_T]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... @disjoint_base class takewhile(Generic[_T]): - """Return successive entries from an iterable as long as the predicate evaluates to true for each entry.""" - def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T: - """Implement next(self).""" - -def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: - """Returns a tuple of n independent iterators.""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... +def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: ... @disjoint_base class zip_longest(Generic[_T_co]): - """Return a zip_longest object whose .__next__() method returns a tuple where - the i-th element comes from the i-th iterable argument. The .__next__() - method continues until the longest iterable in the argument sequence - is exhausted and then it raises StopIteration. When the shorter iterables - are exhausted, the fillvalue is substituted in their place. The fillvalue - defaults to None or can be specified by a keyword argument. - """ - # one iterable (fillvalue doesn't matter) @overload def __new__(cls, iter1: Iterable[_T1], /, *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ... @@ -346,29 +198,11 @@ class zip_longest(Generic[_T_co]): *iterables: Iterable[_T], fillvalue: _T, ) -> zip_longest[tuple[_T, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... @disjoint_base class product(Generic[_T_co]): - """Cartesian product of input iterables. Equivalent to nested for-loops. - - For example, product(A, B) returns the same as: ((x,y) for x in A for y in B). - The leftmost iterators are in the outermost for-loop, so the output tuples - cycle in a manner similar to an odometer (with the rightmost element changing - on every iteration). - - To compute the product of an iterable with itself, specify the number - of repetitions with the optional repeat keyword argument. For example, - product(A, repeat=4) means the same as product(A, A, A, A). - - product('ab', range(3)) --> ('a',0) ('a',1) ('a',2) ('b',0) ('b',1) ('b',2) - product((0,1), (0,1), (0,1)) --> (0,0,0) (0,0,1) (0,1,0) (0,1,1) (1,0,0) ... - """ - @overload def __new__(cls, iter1: Iterable[_T1], /) -> product[tuple[_T1]]: ... @overload @@ -450,19 +284,11 @@ class product(Generic[_T_co]): ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9, _T10]]: ... @overload def __new__(cls, *iterables: Iterable[_T1], repeat: int = 1) -> product[tuple[_T1, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... @disjoint_base class permutations(Generic[_T_co]): - """Return successive r-length permutations of elements in the iterable. - - permutations(range(3), 2) --> (0,1), (0,2), (1,0), (1,2), (2,0), (2,1) - """ - @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> permutations[tuple[_T, _T]]: ... @overload @@ -473,19 +299,11 @@ class permutations(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> permutations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int | None = ...) -> permutations[tuple[_T, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... @disjoint_base class combinations(Generic[_T_co]): - """Return successive r-length combinations of elements in the iterable. - - combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3) - """ - @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... @overload @@ -496,19 +314,11 @@ class combinations(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... @disjoint_base class combinations_with_replacement(Generic[_T_co]): - """Return successive r-length combinations of elements in the iterable allowing individual elements to have successive repeats. - - combinations_with_replacement('ABC', 2) --> ('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C') - """ - @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations_with_replacement[tuple[_T, _T]]: ... @overload @@ -519,55 +329,23 @@ class combinations_with_replacement(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations_with_replacement[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations_with_replacement[tuple[_T, ...]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... if sys.version_info >= (3, 10): @disjoint_base class pairwise(Generic[_T_co]): - """Return an iterator of overlapping pairs taken from the input iterator. - - s -> (s0,s1), (s1,s2), (s2, s3), ... - """ - def __new__(cls, iterable: Iterable[_T], /) -> pairwise[tuple[_T, _T]]: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _T_co: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... if sys.version_info >= (3, 12): @disjoint_base class batched(Generic[_T_co]): - """Batch data into tuples of length n. The last batch may be shorter than n. - - Loops over the input iterable and accumulates data into tuples - up to size n. The input is consumed lazily, just enough to - fill a batch. The result is yielded as soon as a batch is full - or when the input iterable is exhausted. - - >>> for batch in batched('ABCDEFG', 3): - ... print(batch) - ... - ('A', 'B', 'C') - ('D', 'E', 'F') - ('G',) - - If "strict" is True, raises a ValueError if the final batch is shorter - than n. - """ - if sys.version_info >= (3, 13): def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ... else: def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> tuple[_T_co, ...]: - """Implement next(self).""" + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi index b911c1315a5c5..63e9718ee1512 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi @@ -1,101 +1,3 @@ -"""JSON (JavaScript Object Notation) is a subset of -JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data -interchange format. - -:mod:`json` exposes an API familiar to users of the standard library -:mod:`marshal` and :mod:`pickle` modules. It is derived from a -version of the externally maintained simplejson library. - -Encoding basic Python object hierarchies:: - - >>> import json - >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) - '["foo", {"bar": ["baz", null, 1.0, 2]}]' - >>> print(json.dumps("\\"foo\\bar")) - "\\"foo\\bar" - >>> print(json.dumps('\\u1234')) - "\\u1234" - >>> print(json.dumps('\\\\')) - "\\\\" - >>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)) - {"a": 0, "b": 0, "c": 0} - >>> from io import StringIO - >>> io = StringIO() - >>> json.dump(['streaming API'], io) - >>> io.getvalue() - '["streaming API"]' - -Compact encoding:: - - >>> import json - >>> mydict = {'4': 5, '6': 7} - >>> json.dumps([1,2,3,mydict], separators=(',', ':')) - '[1,2,3,{"4":5,"6":7}]' - -Pretty printing:: - - >>> import json - >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)) - { - "4": 5, - "6": 7 - } - -Decoding JSON:: - - >>> import json - >>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}] - >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj - True - >>> json.loads('"\\\\"foo\\\\bar"') == '"foo\\x08ar' - True - >>> from io import StringIO - >>> io = StringIO('["streaming API"]') - >>> json.load(io)[0] == 'streaming API' - True - -Specializing JSON object decoding:: - - >>> import json - >>> def as_complex(dct): - ... if '__complex__' in dct: - ... return complex(dct['real'], dct['imag']) - ... return dct - ... - >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', - ... object_hook=as_complex) - (1+2j) - >>> from decimal import Decimal - >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') - True - -Specializing JSON object encoding:: - - >>> import json - >>> def encode_complex(obj): - ... if isinstance(obj, complex): - ... return [obj.real, obj.imag] - ... raise TypeError(f'Object of type {obj.__class__.__name__} ' - ... f'is not JSON serializable') - ... - >>> json.dumps(2 + 1j, default=encode_complex) - '[2.0, 1.0]' - >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) - '[2.0, 1.0]' - >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) - '[2.0, 1.0]' - - -Using json from the shell to validate and pretty-print:: - - $ echo '{"json":"obj"}' | python -m json - { - "json": "obj" - } - $ echo '{ 1.2:3.4}' | python -m json - Expecting property name enclosed in double quotes: line 1 column 3 (char 2) -""" - from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable from typing import Any @@ -118,48 +20,7 @@ def dumps( default: Callable[[Any], Any] | None = None, sort_keys: bool = False, **kwds: Any, -) -> str: - """Serialize ``obj`` to a JSON formatted ``str``. - - If ``skipkeys`` is true then ``dict`` keys that are not basic types - (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped - instead of raising a ``TypeError``. - - If ``ensure_ascii`` is false, then the return value can contain non-ASCII - characters if they appear in strings contained in ``obj``. Otherwise, all - such characters are escaped in JSON strings. - - If ``check_circular`` is false, then the circular reference check - for container types will be skipped and a circular reference will - result in an ``RecursionError`` (or worse). - - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in - strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - - If ``indent`` is a non-negative integer, then JSON array elements and - object members will be pretty-printed with that indent level. An indent - level of 0 will only insert newlines. ``None`` is the most compact - representation. - - If specified, ``separators`` should be an ``(item_separator, key_separator)`` - tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and - ``(',', ': ')`` otherwise. To get the most compact JSON representation, - you should specify ``(',', ':')`` to eliminate whitespace. - - ``default(obj)`` is a function that should return a serializable version - of obj or raise TypeError. The default simply raises TypeError. - - If *sort_keys* is true (default: ``False``), then the output of - dictionaries will be sorted by key. - - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the - ``.default()`` method to serialize additional types), specify it with - the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. - - """ - +) -> str: ... def dump( obj: Any, fp: SupportsWrite[str], @@ -174,49 +35,7 @@ def dump( default: Callable[[Any], Any] | None = None, sort_keys: bool = False, **kwds: Any, -) -> None: - """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a - ``.write()``-supporting file-like object). - - If ``skipkeys`` is true then ``dict`` keys that are not basic types - (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped - instead of raising a ``TypeError``. - - If ``ensure_ascii`` is false, then the strings written to ``fp`` can - contain non-ASCII characters if they appear in strings contained in - ``obj``. Otherwise, all such characters are escaped in JSON strings. - - If ``check_circular`` is false, then the circular reference check - for container types will be skipped and a circular reference will - result in an ``RecursionError`` (or worse). - - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) - in strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - - If ``indent`` is a non-negative integer, then JSON array elements and - object members will be pretty-printed with that indent level. An indent - level of 0 will only insert newlines. ``None`` is the most compact - representation. - - If specified, ``separators`` should be an ``(item_separator, key_separator)`` - tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and - ``(',', ': ')`` otherwise. To get the most compact JSON representation, - you should specify ``(',', ':')`` to eliminate whitespace. - - ``default(obj)`` is a function that should return a serializable version - of obj or raise TypeError. The default simply raises TypeError. - - If *sort_keys* is true (default: ``False``), then the output of - dictionaries will be sorted by key. - - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the - ``.default()`` method to serialize additional types), specify it with - the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. - - """ - +) -> None: ... def loads( s: str | bytes | bytearray, *, @@ -227,40 +46,7 @@ def loads( parse_constant: Callable[[str], Any] | None = None, object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, -) -> Any: - """Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance - containing a JSON document) to a Python object. - - ``object_hook`` is an optional function that will be called with the - result of any object literal decode (a ``dict``). The return value of - ``object_hook`` will be used instead of the ``dict``. This feature - can be used to implement custom decoders (e.g. JSON-RPC class hinting). - - ``object_pairs_hook`` is an optional function that will be called with the - result of any object literal decoded with an ordered list of pairs. The - return value of ``object_pairs_hook`` will be used instead of the ``dict``. - This feature can be used to implement custom decoders. If ``object_hook`` - is also defined, the ``object_pairs_hook`` takes priority. - - ``parse_float``, if specified, will be called with the string - of every JSON float to be decoded. By default this is equivalent to - float(num_str). This can be used to use another datatype or parser - for JSON floats (e.g. decimal.Decimal). - - ``parse_int``, if specified, will be called with the string - of every JSON int to be decoded. By default this is equivalent to - int(num_str). This can be used to use another datatype or parser - for JSON integers (e.g. float). - - ``parse_constant``, if specified, will be called with one of the - following strings: -Infinity, Infinity, NaN. - This can be used to raise an exception if invalid JSON numbers - are encountered. - - To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` - kwarg; otherwise ``JSONDecoder`` is used. - """ - +) -> Any: ... def load( fp: SupportsRead[str | bytes], *, @@ -271,23 +57,5 @@ def load( parse_constant: Callable[[str], Any] | None = None, object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, -) -> Any: - """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing - a JSON document) to a Python object. - - ``object_hook`` is an optional function that will be called with the - result of any object literal decode (a ``dict``). The return value of - ``object_hook`` will be used instead of the ``dict``. This feature - can be used to implement custom decoders (e.g. JSON-RPC class hinting). - - ``object_pairs_hook`` is an optional function that will be called with the - result of any object literal decoded with an ordered list of pairs. The - return value of ``object_pairs_hook`` will be used instead of the ``dict``. - This feature can be used to implement custom decoders. If ``object_hook`` - is also defined, the ``object_pairs_hook`` takes priority. - - To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` - kwarg; otherwise ``JSONDecoder`` is used. - """ - +) -> Any: ... def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi index 9f579adab15e5..8debfe6cd65a9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi @@ -1,21 +1,9 @@ -"""Implementation of JSONDecoder""" - from collections.abc import Callable from typing import Any __all__ = ["JSONDecoder", "JSONDecodeError"] class JSONDecodeError(ValueError): - """Subclass of ValueError with the following additional properties: - - msg: The unformatted error message - doc: The JSON document being parsed - pos: The start index of doc where parsing failed - lineno: The line corresponding to pos - colno: The column corresponding to pos - - """ - msg: str doc: str pos: int @@ -24,35 +12,6 @@ class JSONDecodeError(ValueError): def __init__(self, msg: str, doc: str, pos: int) -> None: ... class JSONDecoder: - """Simple JSON decoder - - Performs the following translations in decoding by default: - - +---------------+-------------------+ - | JSON | Python | - +===============+===================+ - | object | dict | - +---------------+-------------------+ - | array | list | - +---------------+-------------------+ - | string | str | - +---------------+-------------------+ - | number (int) | int | - +---------------+-------------------+ - | number (real) | float | - +---------------+-------------------+ - | true | True | - +---------------+-------------------+ - | false | False | - +---------------+-------------------+ - | null | None | - +---------------+-------------------+ - - It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as - their corresponding ``float`` values, which is outside the JSON spec. - - """ - object_hook: Callable[[dict[str, Any]], Any] parse_float: Callable[[str], Any] parse_int: Callable[[str], Any] @@ -68,52 +27,6 @@ class JSONDecoder: parse_constant: Callable[[str], Any] | None = None, strict: bool = True, object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, - ) -> None: - """``object_hook``, if specified, will be called with the result - of every JSON object decoded and its return value will be used in - place of the given ``dict``. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - ``object_pairs_hook``, if specified will be called with the result of - every JSON object decoded with an ordered list of pairs. The return - value of ``object_pairs_hook`` will be used instead of the ``dict``. - This feature can be used to implement custom decoders. - If ``object_hook`` is also defined, the ``object_pairs_hook`` takes - priority. - - ``parse_float``, if specified, will be called with the string - of every JSON float to be decoded. By default this is equivalent to - float(num_str). This can be used to use another datatype or parser - for JSON floats (e.g. decimal.Decimal). - - ``parse_int``, if specified, will be called with the string - of every JSON int to be decoded. By default this is equivalent to - int(num_str). This can be used to use another datatype or parser - for JSON integers (e.g. float). - - ``parse_constant``, if specified, will be called with one of the - following strings: -Infinity, Infinity, NaN. - This can be used to raise an exception if invalid JSON numbers - are encountered. - - If ``strict`` is false (true is the default), then control - characters will be allowed inside strings. Control characters in - this context are those with character codes in the 0-31 range, - including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``. - """ - - def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: # _w is undocumented - """Return the Python representation of ``s`` (a ``str`` instance - containing a JSON document). - - """ - - def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: - """Decode a JSON document from ``s`` (a ``str`` beginning with - a JSON document) and return a 2-tuple of the Python - representation and the index in ``s`` where the document ended. - - This can be used to decode a JSON document from a string that may - have extraneous data at the end. - - """ + ) -> None: ... + def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi index b28d343d59356..83b78666d4a70 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi @@ -1,5 +1,3 @@ -"""Implementation of JSONEncoder""" - from collections.abc import Callable, Iterator from re import Pattern from typing import Any, Final @@ -10,54 +8,12 @@ HAS_UTF8: Final[Pattern[bytes]] # undocumented ESCAPE_DCT: Final[dict[str, str]] # undocumented INFINITY: Final[float] # undocumented -def py_encode_basestring(s: str) -> str: # undocumented - """Return a JSON representation of a Python string""" - -def py_encode_basestring_ascii(s: str) -> str: # undocumented - """Return an ASCII-only JSON representation of a Python string""" - -def encode_basestring(s: str, /) -> str: # undocumented - """encode_basestring(string) -> string - - Return a JSON representation of a Python string - """ - -def encode_basestring_ascii(s: str, /) -> str: # undocumented - """encode_basestring_ascii(string) -> string - - Return an ASCII-only JSON representation of a Python string - """ +def py_encode_basestring(s: str) -> str: ... # undocumented +def py_encode_basestring_ascii(s: str) -> str: ... # undocumented +def encode_basestring(s: str, /) -> str: ... # undocumented +def encode_basestring_ascii(s: str, /) -> str: ... # undocumented class JSONEncoder: - """Extensible JSON encoder for Python data structures. - - Supports the following objects and types by default: - - +-------------------+---------------+ - | Python | JSON | - +===================+===============+ - | dict | object | - +-------------------+---------------+ - | list, tuple | array | - +-------------------+---------------+ - | str | string | - +-------------------+---------------+ - | int, float | number | - +-------------------+---------------+ - | True | true | - +-------------------+---------------+ - | False | false | - +-------------------+---------------+ - | None | null | - +-------------------+---------------+ - - To extend this to recognize other objects, subclass and implement a - ``.default()`` method with another method that returns a serializable - object for ``o`` if possible, otherwise it should call the superclass - implementation (to raise ``TypeError``). - - """ - item_separator: str key_separator: str @@ -78,83 +34,7 @@ class JSONEncoder: indent: int | str | None = None, separators: tuple[str, str] | None = None, default: Callable[..., Any] | None = None, - ) -> None: - """Constructor for JSONEncoder, with sensible defaults. - - If skipkeys is false, then it is a TypeError to attempt - encoding of keys that are not str, int, float, bool or None. - If skipkeys is True, such items are simply skipped. - - If ensure_ascii is true, the output is guaranteed to be str - objects with all incoming non-ASCII characters escaped. If - ensure_ascii is false, the output can contain non-ASCII characters. - - If check_circular is true, then lists, dicts, and custom encoded - objects will be checked for circular references during encoding to - prevent an infinite recursion (which would cause an RecursionError). - Otherwise, no such check takes place. - - If allow_nan is true, then NaN, Infinity, and -Infinity will be - encoded as such. This behavior is not JSON specification compliant, - but is consistent with most JavaScript based encoders and decoders. - Otherwise, it will be a ValueError to encode such floats. - - If sort_keys is true, then the output of dictionaries will be - sorted by key; this is useful for regression tests to ensure - that JSON serializations can be compared on a day-to-day basis. - - If indent is a non-negative integer, then JSON array - elements and object members will be pretty-printed with that - indent level. An indent level of 0 will only insert newlines. - None is the most compact representation. - - If specified, separators should be an (item_separator, key_separator) - tuple. The default is (', ', ': ') if *indent* is ``None`` and - (',', ': ') otherwise. To get the most compact JSON representation, - you should specify (',', ':') to eliminate whitespace. - - If specified, default is a function that gets called for objects - that can't otherwise be serialized. It should return a JSON encodable - version of the object or raise a ``TypeError``. - - """ - - def default(self, o: Any) -> Any: - """Implement this method in a subclass such that it returns - a serializable object for ``o``, or calls the base implementation - (to raise a ``TypeError``). - - For example, to support arbitrary iterators, you could - implement default like this:: - - def default(self, o): - try: - iterable = iter(o) - except TypeError: - pass - else: - return list(iterable) - # Let the base class default method raise the TypeError - return super().default(o) - - """ - - def encode(self, o: Any) -> str: - """Return a JSON string representation of a Python data structure. - - >>> from json.encoder import JSONEncoder - >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) - '{"foo": ["bar", "baz"]}' - - """ - - def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: - """Encode the given object and yield each string - representation as available. - - For example:: - - for chunk in JSONEncoder().iterencode(bigobject): - mysocket.write(chunk) - - """ + ) -> None: ... + def default(self, o: Any) -> Any: ... + def encode(self, o: Any) -> str: ... + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi index ea8f53c47c291..68b42e92d295e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi @@ -1,5 +1,3 @@ -"""JSON token scanner""" - from _json import make_scanner as make_scanner from re import Pattern from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi index 1ca4f2ec301b2..7e7363e797f3f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi @@ -1,7 +1 @@ -"""Command-line tool to validate and pretty-print JSON - -See `json.__main__` for a usage example (invocation as -`python -m json.tool` is supported for backwards compatibility). -""" - def main() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi index dde44c3fe1c5b..6b8bdad6beb6a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi @@ -1,29 +1,15 @@ -"""Keywords (from "Grammar/python.gram") - -This file is automatically generated; please don't muck it up! - -To update the symbols in this file, 'cd' to the top directory of -the python source tree and run: - - PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen Grammar/python.gram Grammar/Tokens Lib/keyword.py - -Alternatively, you can run 'make regen-keyword'. -""" - from collections.abc import Sequence from typing import Final __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] -def iskeyword(s: str, /) -> bool: - """x.__contains__(y) <==> y in x.""" +def iskeyword(s: str, /) -> bool: ... # a list at runtime, but you're not meant to mutate it; # type it as a sequence kwlist: Final[Sequence[str]] -def issoftkeyword(s: str, /) -> bool: - """x.__contains__(y) <==> y in x.""" +def issoftkeyword(s: str, /) -> bool: ... # a list at runtime, but you're not meant to mutate it; # type it as a sequence diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi index 5f2b10c0dd9f7..4c87b664eb200 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi @@ -1,11 +1,3 @@ -"""A bottom-up tree matching algorithm implementation meant to speed -up 2to3's matching process. After the tree patterns are reduced to -their rarest linear path, a linear Aho-Corasick automaton is -created. The linear automaton traverses the linear paths from the -leaves to the root of the AST and returns a set of nodes for further -matching. This reduces significantly the number of candidate nodes. -""" - from _typeshed import Incomplete, SupportsGetItem from collections import defaultdict from collections.abc import Iterable @@ -14,8 +6,6 @@ from .fixer_base import BaseFix from .pytree import Leaf, Node class BMNode: - """Class for a node of the Aho-Corasick automaton used in matching""" - count: Incomplete transition_table: Incomplete fixers: Incomplete @@ -24,44 +14,15 @@ class BMNode: def __init__(self) -> None: ... class BottomMatcher: - """The main matcher class. After instantiating the patterns should - be added using the add_fixer method - """ - match: Incomplete root: Incomplete nodes: Incomplete fixers: Incomplete logger: Incomplete def __init__(self) -> None: ... - def add_fixer(self, fixer: BaseFix) -> None: - """Reduces a fixer's pattern tree to a linear path and adds it - to the matcher(a common Aho-Corasick automaton). The fixer is - appended on the matching states and called when they are - reached - """ - - def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: - """Recursively adds a linear pattern to the AC automaton""" - - def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: - """The main interface with the bottom matcher. The tree is - traversed from the bottom using the constructed - automaton. Nodes are only checked once as the tree is - retraversed. When the automaton fails, we give it one more - shot(in case the above tree matches as a whole with the - rejected leaf), then we break for the next leaf. There is the - special case of multiple arguments(see code comments) where we - recheck the nodes - - Args: - The leaves of the AST tree to be matched - - Returns: - A dictionary of node matches with fixers as the keys - """ - - def print_ac(self) -> None: - """Prints a graphviz diagram of the BM automaton(for debugging)""" + def add_fixer(self, fixer: BaseFix) -> None: ... + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ... + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ... + def print_ac(self) -> None: ... def type_repr(type_num: int) -> str | int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi index 95baa2bef0869..06813c94308a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi @@ -1,5 +1,3 @@ -"""Base class for fixers (optional, but recommended).""" - from _typeshed import Incomplete, StrPath from abc import ABCMeta, abstractmethod from collections.abc import MutableMapping @@ -10,14 +8,6 @@ from .pytree import Base, Leaf, Node _N = TypeVar("_N", bound=Base) class BaseFix: - """Optional base class for fixers. - - The subclass name must be FixFooBar where FooBar is the result of - removing underscores and capitalizing the words of the fix name. - For example, the class name for a fixer named 'has_key' should be - FixHasKey. - """ - PATTERN: ClassVar[str | None] pattern: Incomplete | None pattern_tree: Incomplete | None @@ -32,96 +22,21 @@ class BaseFix: BM_compatible: ClassVar[bool] syms: Incomplete log: Incomplete - def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: - """Initializer. Subclass may override. - - Args: - options: a dict containing the options passed to RefactoringTool - that could be used to customize the fixer through the command line. - log: a list to append warnings and other messages to. - """ - - def compile_pattern(self) -> None: - """Compiles self.PATTERN into self.pattern. - - Subclass may override if it doesn't want to use - self.{pattern,PATTERN} in .match(). - """ - - def set_filename(self, filename: StrPath) -> None: - """Set the filename. - - The main refactoring tool should call this. - """ - - def match(self, node: _N) -> Literal[False] | dict[str, _N]: - """Returns match for a given parse tree node. - - Should return a true or false object (not necessarily a bool). - It may return a non-empty dict of matching sub-nodes as - returned by a matching pattern. - - Subclass may override. - """ - + def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ... + def compile_pattern(self) -> None: ... + def set_filename(self, filename: StrPath) -> None: ... + def match(self, node: _N) -> Literal[False] | dict[str, _N]: ... @abstractmethod - def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: - """Returns the transformation for a given parse tree node. - - Args: - node: the root of the parse tree that matched the fixer. - results: a dict mapping symbolic names to part of the match. - - Returns: - None, or a node that is a modified copy of the - argument node. The node argument may also be modified in-place to - effect the same change. - - Subclass *must* override. - """ - - def new_name(self, template: str = "xxx_todo_changeme") -> str: - """Return a string suitable for use as an identifier - - The new name is guaranteed not to conflict with other identifiers. - """ + def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ... + def new_name(self, template: str = "xxx_todo_changeme") -> str: ... first_log: bool def log_message(self, message: str) -> None: ... - def cannot_convert(self, node: Base, reason: str | None = None) -> None: - """Warn the user that a given chunk of code is not valid Python 3, - but that it cannot be converted automatically. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - - def warning(self, node: Base, reason: str) -> None: - """Used for warning the user about possible uncertainty in the - translation. - - First argument is the top-level node for the code in question. - Optional second argument is why it can't be converted. - """ - - def start_tree(self, tree: Node, filename: StrPath) -> None: - """Some fixers need to maintain tree-wide state. - This method is called once, at the start of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ - - def finish_tree(self, tree: Node, filename: StrPath) -> None: - """Some fixers need to maintain tree-wide state. - This method is called once, at the conclusion of tree fix-up. - - tree - the root node of the tree to be processed. - filename - the name of the file the tree came from. - """ + def cannot_convert(self, node: Base, reason: str | None = None) -> None: ... + def warning(self, node: Base, reason: str) -> None: ... + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... class ConditionalFix(BaseFix, metaclass=ABCMeta): - """Base class for fixers which not execute if an import is found.""" - skip_on: ClassVar[str | None] def start_tree(self, tree: Node, filename: StrPath, /) -> None: ... def should_skip(self, node: Base) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi index 2ce199a27a5e8..e53e3dd864579 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi @@ -1,8 +1,3 @@ -"""Fixer for apply(). - -This converts apply(func, v, k) into (func)(*v, **k). -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi index ce79d93f4ecb9..1bf7db2f76e98 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -1,5 +1,3 @@ -"""Fixer that replaces deprecated unittest method names.""" - from typing import ClassVar, Final, Literal from ..fixer_base import BaseFix diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi index 49c66c877dff8..8ed5ccaa7fd39 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -1,5 +1,3 @@ -"""Fixer for basestring -> str.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi index bc798b5ac7313..1efca6228ea28 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes buffer(...) into memoryview(...).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi index 3cc91c55696a4..08c54c3bc376b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi @@ -1,29 +1,3 @@ -"""Fixer for dict methods. - -d.keys() -> list(d.keys()) -d.items() -> list(d.items()) -d.values() -> list(d.values()) - -d.iterkeys() -> iter(d.keys()) -d.iteritems() -> iter(d.items()) -d.itervalues() -> iter(d.values()) - -d.viewkeys() -> d.keys() -d.viewitems() -> d.items() -d.viewvalues() -> d.values() - -Except in certain very specific contexts: the iter() can be dropped -when the context is list(), sorted(), iter() or for...in; the list() -can be dropped when the context is list() or sorted() (but not iter() -or for...in!). Special contexts that apply to both: list(), sorted(), tuple() -set(), any(), all(), sum(). - -Note: iter(d.keys()) could be written as iter(d) but since the -original d.iterkeys() was also redundant we don't fix this. And there -are (rare) contexts where it makes a difference (e.g. when passing it -as an argument to a function that introspects the argument). -""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi index 5d12137d55b4d..30930a2c381e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi @@ -1,25 +1,3 @@ -"""Fixer for except statements with named exceptions. - -The following cases will be converted: - -- "except E, T:" where T is a name: - - except E as T: - -- "except E, T:" where T is not a name, tuple or list: - - except E as t: - T = t - - This is done because the target of an "except" clause must be a - name. - -- "except E, T:" where T is a tuple or list literal: - - except E as t: - T = t.args -""" - from collections.abc import Generator, Iterable from typing import ClassVar, Literal, TypeVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi index b8a77be440265..71e2a820a564d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi @@ -1,11 +1,3 @@ -"""Fixer for exec. - -This converts usages of the exec statement into calls to a built-in -exec() function. - -exec code in ns1, ns2 -> exec(code, ns1, ns2) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi index 9d77b2d8db601..8122a6389b124 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -1,9 +1,3 @@ -"""Fixer for execfile. - -This converts usages of the execfile function into calls to the built-in -exec() function. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi index b0cd78d8fef06..7fc910c0a1bcd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -1,7 +1,3 @@ -""" -Convert use of sys.exitfunc to use the atexit module. -""" - from _typeshed import Incomplete, StrPath from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi index ef5907daacb52..638889be8b65b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi @@ -1,15 +1,3 @@ -"""Fixer that changes filter(F, X) into list(filter(F, X)). - -We avoid the transformation if the filter() call is directly contained -in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or -for V in <>:. - -NOTE: This is still not correct if the original code was depending on -filter(F, X) to return a string if X is a string and a tuple if X is a -tuple. That would require type inference, which we don't do. Let -Python 2.6 figure it out. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi index f9d236bd0f365..60487bb1f2a62 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -1,5 +1,3 @@ -"""Fix function attribute names (f.func_x -> f.__x__).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi index bf23287d81027..12ed93f21223d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi @@ -1,8 +1,3 @@ -"""Remove __future__ imports - -from __future__ import foo is replaced with an empty line. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi index fb20d477180ef..aa3ccf50be9e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -1,7 +1,3 @@ -""" -Fixer that changes os.getcwdu() to os.getcwd(). -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi index 1323b5209ddd6..f6f5a072e21b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -1,31 +1,3 @@ -"""Fixer for has_key(). - -Calls to .has_key() methods are expressed in terms of the 'in' -operator: - - d.has_key(k) -> k in d - -CAVEATS: -1) While the primary target of this fixer is dict.has_key(), the - fixer will change any has_key() method call, regardless of its - class. - -2) Cases like this will not be converted: - - m = d.has_key - if m(k): - ... - - Only *calls* to has_key() are converted. While it is possible to - convert the above to something like - - m = d.__contains__ - if m(k): - ... - - this is currently not done. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi index a654a002d46bc..6b2723d09d436 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -1,31 +1,3 @@ -"""Adjust some old Python 2 idioms to their modern counterparts. - -* Change some type comparisons to isinstance() calls: - type(x) == T -> isinstance(x, T) - type(x) is T -> isinstance(x, T) - type(x) != T -> not isinstance(x, T) - type(x) is not T -> not isinstance(x, T) - -* Change "while 1:" into "while True:". - -* Change both - - v = list(EXPR) - v.sort() - foo(v) - -and the more general - - v = EXPR - v.sort() - foo(v) - -into - - v = sorted(EXPR) - foo(v) -""" - from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi index 87577fd281fda..bf4b2d00925eb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi @@ -1,15 +1,3 @@ -"""Fixer for import statements. -If spam is being imported from the local directory, this import: - from spam import eggs -Becomes: - from .spam import eggs - -And this import: - import spam -Becomes: - from . import spam -""" - from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Literal @@ -17,10 +5,7 @@ from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Node -def traverse_imports(names) -> Generator[str, None, None]: - """ - Walks over all the names imported in a dotted_as_names node. - """ +def traverse_imports(names) -> Generator[str, None, None]: ... class FixImport(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi index 4aa8734f15a57..c747af529f440 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi @@ -1,5 +1,3 @@ -"""Fix incompatible imports and module references.""" - from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi index dba1376beda7c..618ecd0424d86 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -1,7 +1,3 @@ -"""Fix incompatible imports and module references that must be fixed after -fix_imports. -""" - from typing import Final from . import fix_imports diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi index 1105e93218103..fc1279535bedb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes input(...) into eval(input(...)).""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi index 4808fe189d1b5..804b7b2517a50 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi @@ -1,8 +1,3 @@ -"""Fixer for intern(). - -intern(s) -> sys.intern(s) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi index 7d236eef8de49..31eefd6253174 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -1,11 +1,3 @@ -"""Fixer that cleans up a tuple argument to isinstance after the tokens -in it were fixed. This is mainly used to remove double occurrences of -tokens as a leftover of the long -> int / unicode -> str conversion. - -eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) - -> isinstance(x, int) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi index 2bba3174b7b50..229d86ee71bb7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -1,12 +1,3 @@ -"""Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and -itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) - -imports from itertools are fixed in fix_itertools_import.py - -If itertools is imported as something else (ie: import itertools as it; -it.izip(spam, eggs)) method calls will not get fixed. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi index 554f71d74f00c..39a4da506867e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -1,5 +1,3 @@ -"""Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse)""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi index 5b4e0002ec8c5..9ccf2711d7d12 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns 'long' into 'int' everywhere.""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi index 866c5dc8b3ded..6e60282cf0be5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi @@ -1,21 +1,3 @@ -"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there -exists a 'from future_builtins import map' statement in the top-level -namespace. - -As a special case, map(None, X) is changed into list(X). (This is -necessary because the semantics are changed in this case -- the new -map(None, X) is equivalent to [(x,) for x in X].) - -We avoid the transformation (except for the special case mentioned -above) if the map() call is directly contained in iter(<>), list(<>), -tuple(<>), sorted(<>), ...join(<>), or for V in <>:. - -NOTE: This is still not correct if the original code was depending on -map(F, X, Y, ...) to go on until the longest argument is exhausted, -substituting None for missing values -- like zip(), it now stops as -soon as the shortest argument is exhausted. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi index 70ced90db3798..1b1ec82032b4f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -1,51 +1,15 @@ -"""Fixer for __metaclass__ = X -> (metaclass=X) methods. - -The various forms of classef (inherits nothing, inherits once, inherits -many) don't parse the same in the CST so we look at ALL classes for -a __metaclass__ and if we find one normalize the inherits to all be -an arglist. - -For one-liner classes ('class X: pass') there is no indent/dedent so -we normalize those into having a suite. - -Moving the __metaclass__ into the classdef can also cause the class -body to be empty so there is some special casing for that as well. - -This fixer also tries very hard to keep original indenting and spacing -in all those corner cases. - -""" - from collections.abc import Generator from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Base -def has_metaclass(parent): - """we have to check the cls_node without changing it. - There are two possibilities: - 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') - 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') - """ - -def fixup_parse_tree(cls_node) -> None: - """one-line classes don't get a suite in the parse tree so we add - one to normalize the tree - """ - -def fixup_simple_stmt(parent, i, stmt_node) -> None: - """if there is a semi-colon all the parts count as part of the same - simple_stmt. We just want the __metaclass__ part so we move - everything after the semi-colon into its own simple_stmt node - """ - +def has_metaclass(parent): ... +def fixup_parse_tree(cls_node) -> None: ... +def fixup_simple_stmt(parent, i, stmt_node) -> None: ... def remove_trailing_newline(node) -> None: ... def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... -def fixup_indent(suite) -> None: - """If an INDENT is followed by a thing with a prefix then nuke the prefix - Otherwise we get in trouble when removing __metaclass__ at suite start - """ +def fixup_indent(suite) -> None: ... class FixMetaclass(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi index b886514c168e3..ca9b71e43f856 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -1,5 +1,3 @@ -"""Fix bound method attributes (method.im_? -> method.__?__).""" - from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi index 0e851740281ef..6ff1220b04728 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns <> into !=.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi index 87a4d61e79ebf..b13914ae8c018 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi @@ -1,5 +1,3 @@ -"""Fixer for it.next() -> next(it), per PEP 3114.""" - from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi index 60bd7dffbaf19..5c37fc12ef089 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -1,5 +1,3 @@ -"""Fixer for __nonzero__ -> __bool__ methods.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi index 71dc9016199bd..113145e395f62 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -1,5 +1,3 @@ -"""Fixer that turns 1L into 1, 0755 into 0o755.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi index 08750a96891a2..b9863d38347be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi @@ -1,14 +1,3 @@ -"""Fixer for operator functions. - -operator.isCallable(obj) -> callable(obj) -operator.sequenceIncludes(obj) -> operator.contains(obj) -operator.isSequenceType(obj) -> isinstance(obj, collections.abc.Sequence) -operator.isMappingType(obj) -> isinstance(obj, collections.abc.Mapping) -operator.isNumberType(obj) -> isinstance(obj, numbers.Number) -operator.repeat(obj, n) -> operator.mul(obj, n) -operator.irepeat(obj, n) -> operator.imul(obj, n) -""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi index ba7f01a849ccd..237df6c5ff2c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi @@ -1,8 +1,3 @@ -"""Fixer that adds parentheses where they are required - -This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi index 1d6a8c661a20e..e9564b04ac75f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi @@ -1,15 +1,3 @@ -"""Fixer for print. - -Change: - 'print' into 'print()' - 'print ...' into 'print(...)' - 'print ... ,' into 'print(..., end=" ")' - 'print >>x, ...' into 'print(..., file=x)' - -No changes are applied if print_function is imported from __future__ - -""" - from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi index d5fa6461e6cc8..e02c3080f4093 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi @@ -1,26 +1,3 @@ -"""Fixer for 'raise E, V, T' - -raise -> raise -raise E -> raise E -raise E, V -> raise E(V) -raise E, V, T -> raise E(V).with_traceback(T) -raise E, None, T -> raise E.with_traceback(T) - -raise (((E, E'), E''), E'''), V -> raise E(V) -raise "foo", V, T -> warns about string exceptions - - -CAVEATS: -1) "raise E, V" will be incorrectly translated if V is an exception - instance. The correct Python 3 idiom is - - raise E from V - - but since we can't detect instance-hood by syntax alone and since - any client code would have to be changed as well, we don't automate - this. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi index 717649bf41e55..d1a0eb0e0a7ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes raw_input(...) into input(...).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi index 24a992749ec95..f8ad876c21a69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -1,9 +1,3 @@ -"""Fixer for reduce(). - -Makes sure reduce() is imported from the functools module if reduce is -used in that module. -""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi index 23f98533310b1..820075438eca1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi @@ -1,8 +1,3 @@ -"""Fixer for reload(). - -reload(s) -> importlib.reload(s) -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi index ff1d30d77b589..652d8f15ea1a9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi @@ -1,9 +1,3 @@ -"""Fix incompatible renames - -Fixes: - * sys.maxint -> sys.maxsize -""" - from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi index 84569e39856b5..3b192d396dd68 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi @@ -1,5 +1,3 @@ -"""Fixer that transforms `xyzzy` into repr(xyzzy).""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi index eb550907dee2d..6962ff326f56a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -1,7 +1,3 @@ -""" -Optional fixer to transform set() calls to set literals. -""" - from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi index adb75ffcc3e4a..ba914bcab5d6b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -1,5 +1,3 @@ -"""Fixer for StandardError -> Exception.""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi index 7797fbae94bcd..0fa1a47870872 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -1,10 +1,3 @@ -"""Fixer for sys.exc_{type, value, traceback} - -sys.exc_type -> sys.exc_info()[0] -sys.exc_value -> sys.exc_info()[1] -sys.exc_traceback -> sys.exc_info()[2] -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi index 63291df410cd4..4c99855e5c373 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi @@ -1,12 +1,3 @@ -"""Fixer for generator.throw(E, V, T). - -g.throw(E) -> g.throw(E) -g.throw(E, V) -> g.throw(E(V)) -g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) - -g.throw("foo"[, V[, T]]) will warn about string exceptions. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi index 263a340f477fd..7f4f7f4e8656e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -1,22 +1,3 @@ -"""Fixer for function definitions with tuple parameters. - -def func(((a, b), c), d): - ... - - -> - -def func(x, d): - ((a, b), c) = x - ... - -It will also support lambdas: - - lambda (x, y): x + y -> lambda t: t[0] + t[1] - - # The parens are a syntax error in Python 3 - lambda (x): x + y -> lambda x: x + y -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi index ea9f88f8fed62..e26dbec71a97d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi @@ -1,21 +1,3 @@ -"""Fixer for removing uses of the types module. - -These work for only the known names in the types module. The forms above -can include types. or not. ie, It is assumed the module is imported either as: - - import types - from types import ... # either * or specific types - -The import statements are not modified. - -There should be another fixer that handles at least the following constants: - - type([]) -> list - type(()) -> tuple - type('') -> str - -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi index 2a4d1e9ced15d..85d1315213b96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -1,13 +1,3 @@ -"""Fixer for unicode. - -* Changes unicode to str and unichr to chr. - -* If "...\\u..." is not unicode literal change it into "...\\\\u...". - -* Change u"..." into "...". - -""" - from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi index 214350c28e523..abdcc0f62970f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -1,8 +1,3 @@ -"""Fix changes imports of urllib which are now incompatible. -This is rather similar to fix_imports, but because of the more -complex nature of the fixing for urllib, it has its own fixer. -""" - from collections.abc import Generator from typing import Final, Literal @@ -14,19 +9,7 @@ def build_pattern() -> Generator[str, None, None]: ... class FixUrllib(FixImports): def build_pattern(self): ... - def transform_import(self, node, results) -> None: - """Transform for the basic import case. Replaces the old - import name with a comma separated list of its - replacements. - """ - - def transform_member(self, node, results): - """Transform for imports of specific module elements. Replaces - the module to be imported from with the appropriate new - module. - """ - - def transform_dot(self, node, results) -> None: - """Transform for calls to module members in code.""" - + def transform_import(self, node, results) -> None: ... + def transform_member(self, node, results): ... + def transform_dot(self, node, results) -> None: ... def transform(self, node, results) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi index 43db27641fa01..4ce5cb2c4ac16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -1,10 +1,3 @@ -"""Fixer that changes 'a ,b' into 'a, b'. - -This also changes '{a :b}' into '{a: b}', but does not touch other -uses of colons. It does not touch other uses of whitespace. - -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi index aa87720f33b63..71318b7660b68 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -1,5 +1,3 @@ -"""Fixer that changes xrange(...) into range(...).""" - from _typeshed import Incomplete, StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi index d48c2ef460e82..b4794143a0031 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -1,8 +1,3 @@ -"""Fix "for x in f.xreadlines()" -> "for x in f". - -This fixer will also convert g(f.xreadlines) into g(f.__iter__). -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi index 06a30dd1be292..805886ee31805 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi @@ -1,12 +1,3 @@ -""" -Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) -unless there exists a 'from future_builtins import zip' statement in the -top-level namespace. - -We avoid the transformation if the zip() call is directly contained in -iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. -""" - from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi index 1a176b656c3d4..5b7fdfca5d65d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi @@ -1,7 +1,3 @@ -""" -Main program for 2to3. -""" - from _typeshed import FileDescriptorOrPath from collections.abc import Container, Iterable, Iterator, Mapping, Sequence from logging import _ExcInfoType @@ -9,19 +5,9 @@ from typing import AnyStr, Literal from . import refactor as refactor -def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: - """Return a unified diff of two strings.""" +def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ... class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): - """ - A refactoring tool that can avoid overwriting its input files. - Prints output to stdout. - - Output files can optionally be written to a different directory and or - have an extra file suffix appended to their name for use in situations - where you do not want to replace the input files. - """ - nobackups: bool show_diffs: bool def __init__( @@ -34,25 +20,7 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): input_base_dir: str = "", output_dir: str = "", append_suffix: str = "", - ) -> None: - """ - Args: - fixers: A list of fixers to import. - options: A dict with RefactoringTool configuration. - explicit: A list of fixers to run even if they are explicit. - nobackups: If true no backup '.bak' files will be created for those - files that are being refactored. - show_diffs: Should diffs of the refactoring be printed to stdout? - input_base_dir: The base directory for all input files. This class - will strip this path prefix off of filenames before substituting - it with output_dir. Only meaningful if output_dir is supplied. - All files processed by refactor() must start with this path. - output_dir: If supplied, all converted files will be written into - this directory tree instead of input_base_dir. - append_suffix: If supplied, all files output by this tool will have - this appended to their filename. Useful for changing .py to - .py3 for example by passing append_suffix='3'. - """ + ) -> None: ... # Same as super.log_error and Logger.error def log_error( # type: ignore[override] self, @@ -71,13 +39,4 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override] def warn(msg: object) -> None: ... -def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: - """Main program. - - Args: - fixer_pkg: the name of a package where the fixers are located. - args: optional; a list of command line arguments. If omitted, - sys.argv[1:] is used. - - Returns a suggested exit status (0, 1, 2). - """ +def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi index 8a246f1044397..de8a874f434d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -1,5 +1,3 @@ -"""The pgen2 package.""" - from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi index 957375a0bc620..dea13fb9d0f8b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -1,9 +1,3 @@ -"""Parser driver. - -This provides a high-level interface to parse a file into a syntax tree. - -""" - from _typeshed import StrPath from collections.abc import Iterable from logging import Logger @@ -20,22 +14,14 @@ class Driver: logger: Logger convert: _Convert def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... - def parse_tokens(self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False) -> _NL: - """Parse a series of tokens and return the syntax tree.""" - - def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree.""" - - def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree.""" - - def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: - """Parse a file and return the syntax tree.""" - - def parse_string(self, text: str, debug: bool = False) -> _NL: - """Parse a string and return the syntax tree.""" + def parse_tokens( + self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False + ) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... def load_grammar( gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None -) -> Grammar: - """Load the grammar (maybe from a pickle).""" +) -> Grammar: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi index abfd18b546326..bef0a7922683b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,14 +1,3 @@ -"""This module defines the data structures used to represent a grammar. - -These are a bit arcane because they are derived from the data -structures used by Python's 'pgen' parser generator. - -There's also a table here mapping operators to their names in the -token module; the Python tokenize module reports all operators as the -fallback token code OP, but the parser needs the actual token code. - -""" - from _typeshed import StrPath from typing_extensions import Self, TypeAlias @@ -17,59 +6,6 @@ _DFA: TypeAlias = list[list[tuple[int, int]]] _DFAS: TypeAlias = tuple[_DFA, dict[int, int]] class Grammar: - """Pgen parsing tables conversion class. - - Once initialized, this class supplies the grammar tables for the - parsing engine implemented by parse.py. The parsing engine - accesses the instance variables directly. The class here does not - provide initialization of the tables; several subclasses exist to - do this (see the conv and pgen modules). - - The load() method reads the tables from a pickle file, which is - much faster than the other ways offered by subclasses. The pickle - file is written by calling dump() (after loading the grammar - tables using a subclass). The report() method prints a readable - representation of the tables to stdout, for debugging. - - The instance variables are as follows: - - symbol2number -- a dict mapping symbol names to numbers. Symbol - numbers are always 256 or higher, to distinguish - them from token numbers, which are between 0 and - 255 (inclusive). - - number2symbol -- a dict mapping numbers to symbol names; - these two are each other's inverse. - - states -- a list of DFAs, where each DFA is a list of - states, each state is a list of arcs, and each - arc is a (i, j) pair where i is a label and j is - a state number. The DFA number is the index into - this list. (This name is slightly confusing.) - Final states are represented by a special arc of - the form (0, j) where j is its own state number. - - dfas -- a dict mapping symbol numbers to (DFA, first) - pairs, where DFA is an item from the states list - above, and first is a set of tokens that can - begin this grammar rule (represented by a dict - whose values are always 1). - - labels -- a list of (x, y) pairs where x is either a token - number or a symbol number, and y is either None - or a string; the strings are keywords. The label - number is the index in this list; label numbers - are used to mark state transitions (arcs) in the - DFAs. - - start -- the number of the grammar's start symbol. - - keywords -- a dict mapping keyword strings to arc labels. - - tokens -- a dict mapping token numbers to arc labels. - - """ - symbol2number: dict[str, int] number2symbol: dict[int, str] states: list[_DFA] @@ -79,19 +15,10 @@ class Grammar: tokens: dict[int, int] symbol2label: dict[str, int] start: int - def dump(self, filename: StrPath) -> None: - """Dump the grammar tables to a pickle file.""" - - def load(self, filename: StrPath) -> None: - """Load the grammar tables from a pickle file.""" - - def copy(self) -> Self: - """ - Copy the grammar. - """ - - def report(self) -> None: - """Dump the grammar tables to standard output, for debugging.""" + def dump(self, filename: StrPath) -> None: ... + def load(self, filename: StrPath) -> None: ... + def copy(self) -> Self: ... + def report(self) -> None: ... opmap_raw: str opmap: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi index 9b991d20e4fd1..c3fabe8a5177f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi @@ -1,5 +1,3 @@ -"""Safely evaluate Python string literals without using eval().""" - from re import Match simple_escapes: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi index a1841da5f6ea1..320c5f018d43f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -1,12 +1,3 @@ -"""Parser engine for the grammar tables generated by pgen. - -The grammar table must be loaded first. - -See Parser/parser.c in the Python distribution for additional info on -how this parsing engine works. - -""" - from _typeshed import Incomplete from collections.abc import Sequence from typing_extensions import TypeAlias @@ -18,8 +9,6 @@ from .grammar import _DFAS, Grammar _Context: TypeAlias = Sequence[Incomplete] class ParseError(Exception): - """Exception to signal the parser is stuck.""" - msg: str type: int value: str | None @@ -27,95 +16,15 @@ class ParseError(Exception): def __init__(self, msg: str, type: int, value: str | None, context: _Context) -> None: ... class Parser: - """Parser engine. - - The proper usage sequence is: - - p = Parser(grammar, [converter]) # create instance - p.setup([start]) # prepare for parsing - : - if p.addtoken(...): # parse a token; may raise ParseError - break - root = p.rootnode # root of abstract syntax tree - - A Parser instance may be reused by calling setup() repeatedly. - - A Parser instance contains state pertaining to the current token - sequence, and should not be used concurrently by different threads - to parse separate token sequences. - - See driver.py for how to get input tokens by tokenizing a file or - string. - - Parsing is complete when addtoken() returns True; the root of the - abstract syntax tree can then be retrieved from the rootnode - instance variable. When a syntax error occurs, addtoken() raises - the ParseError exception. There is no error recovery; the parser - cannot be used after a syntax error was reported (but it can be - reinitialized by calling setup()). - - """ - grammar: Grammar convert: _Convert stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None used_names: set[str] - def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: - """Constructor. - - The grammar argument is a grammar.Grammar instance; see the - grammar module for more information. - - The parser is not ready yet for parsing; you must call the - setup() method to get it started. - - The optional convert argument is a function mapping concrete - syntax tree nodes to abstract syntax tree nodes. If not - given, no conversion is done and the syntax tree produced is - the concrete syntax tree. If given, it must be a function of - two arguments, the first being the grammar (a grammar.Grammar - instance), and the second being the concrete syntax tree node - to be converted. The syntax tree is converted from the bottom - up. - - A concrete syntax tree node is a (type, value, context, nodes) - tuple, where type is the node type (a token or symbol number), - value is None for symbols and a string for tokens, context is - None or an opaque value used for error reporting (typically a - (lineno, offset) pair), and nodes is a list of children for - symbols, and None for tokens. - - An abstract syntax tree node may be anything; this is entirely - up to the converter function. - - """ - - def setup(self, start: int | None = None) -> None: - """Prepare for parsing. - - This *must* be called before starting to parse. - - The optional argument is an alternative start symbol; it - defaults to the grammar's start symbol. - - You can use a Parser instance to parse any number of programs; - each time you call setup() the parser is reset to an initial - state determined by the (implicit or explicit) start symbol. - - """ - - def addtoken(self, type: int, value: str | None, context: _Context) -> bool: - """Add a token; return True iff this is the end of the program.""" - - def classify(self, type: int, value: str | None, context: _Context) -> int: - """Turn a token into a label. (Internal)""" - - def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: - """Shift a token. (Internal)""" - - def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: - """Push a nonterminal. (Internal)""" - - def pop(self) -> None: - """Pop a nonterminal. (Internal)""" + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... + def classify(self, type: int, value: str | None, context: _Context) -> int: ... + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... + def pop(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi index e465beca208e6..6898517acee64 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi @@ -1,5 +1,3 @@ -"""Token constants (from "token.h").""" - from typing import Final ENDMARKER: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index ec35feb9ad494..af54de1b51d33 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -1,28 +1,3 @@ -"""Tokenization help for Python programs. - -generate_tokens(readline) is a generator that breaks a stream of -text into Python tokens. It accepts a readline-like method which is called -repeatedly to get the next line of input (or "" for EOF). It generates -5-tuples with these members: - - the token type (see token.py) - the token (a string) - the starting (row, column) indices of the token (a 2-tuple of ints) - the ending (row, column) indices of the token (a 2-tuple of ints) - the original line (string) - -It is designed to match the working of the Python tokenizer exactly, except -that it produces COMMENT tokens for comments and gives type OP for all -operators - -Older entry points - tokenize_loop(readline, tokeneater) - tokenize(readline, tokeneater=printtoken) -are the same, except instead of generating tokens, tokeneater is a callback -function to which the 5 fields described above are passed as 5 arguments, -each time a new token is found. -""" - from collections.abc import Callable, Iterable, Iterator from typing_extensions import TypeAlias @@ -107,19 +82,7 @@ _TokenInfo: TypeAlias = tuple[int, str, _Coord, _Coord, str] class TokenError(Exception): ... class StopTokenizing(Exception): ... -def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: - """ - The tokenize() function accepts two parameters: one representing the - input stream, and one providing an output mechanism for tokenize(). - - The first parameter, readline, must be a callable object which provides - the same interface as the readline() method of built-in file objects. - Each call to the function should return one line of input as a string. - - The second parameter, tokeneater, must also be a callable object. It is - called once for each token, with five arguments, corresponding to the - tuples generated by generate_tokens(). - """ +def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ... class Untokenizer: tokens: list[str] @@ -129,38 +92,5 @@ class Untokenizer: def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... -def untokenize(iterable: Iterable[_TokenInfo]) -> str: - """Transform tokens back into Python source code. - - Each element returned by the iterable must be a token sequence - with at least two elements, a token number and token value. If - only two tokens are passed, the resulting output is poor. - - Round-trip invariant for full input: - Untokenized source will match input source exactly - - Round-trip invariant for limited input: - # Output text will tokenize the back to the input - t1 = [tok[:2] for tok in generate_tokens(f.readline)] - newcode = untokenize(t1) - readline = iter(newcode.splitlines(1)).next - t2 = [tok[:2] for tokin generate_tokens(readline)] - assert t1 == t2 - """ - -def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: - """ - The generate_tokens() generator requires one argument, readline, which - must be a callable object which provides the same interface as the - readline() method of built-in file objects. Each call to the function - should return one line of input as a string. Alternately, readline - can be a callable function terminating with StopIteration: - readline = open(myfile).next # Example of alternate readline - - The generator produces 5-tuples with these members: the token type; the - token string; a 2-tuple (srow, scol) of ints specifying the row and - column where the token begins in the source; a 2-tuple (erow, ecol) of - ints specifying the row and column where the token ends in the source; - and the line on which the token was found. The line passed is the - physical line. - """ +def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... +def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi index 1f43c10e7bcf4..86c74b54888af 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,14 +1,7 @@ -"""Export the Python grammar and symbols.""" - from .pgen2.grammar import Grammar class Symbols: - def __init__(self, grammar: Grammar) -> None: - """Initializer. - - Creates an attribute for each grammar symbol (nonterminal), - whose value is the symbol's type (an int >= 256). - """ + def __init__(self, grammar: Grammar) -> None: ... class python_symbols(Symbols): and_expr: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi index 1c2a2b4d97c1d..51bdbc75e1421 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi @@ -1,12 +1,3 @@ -""" -Python parse tree definitions. - -This is a very concrete parse tree; we need to keep every token and -even the comments and whitespace between tokens. - -There's also a pattern matching implementation here. -""" - from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused from abc import abstractmethod from collections.abc import Iterable, Iterator, MutableSequence @@ -26,101 +17,35 @@ HUGE: Final = 0x7FFFFFFF def type_repr(type_num: int) -> str | int: ... class Base: - """ - Abstract base class for Node and Leaf. - - This provides some default functionality and boilerplate using the - template pattern. - - A node may be a subnode of at most one parent. - """ - type: int parent: Node | None prefix: str children: list[_NL] was_changed: bool was_checked: bool - def __eq__(self, other: object) -> bool: - """ - Compare two nodes for equality. - - This calls the method _eq(). - """ + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @abstractmethod - def _eq(self, other: Base) -> bool: - """ - Compare two nodes for equality. - - This is called by __eq__ and __ne__. It is only called if the two nodes - have the same type. This must be implemented by the concrete subclass. - Nodes should be considered equal if they have the same structure, - ignoring the prefix string and other context information. - """ - + def _eq(self, other: Base) -> bool: ... @abstractmethod - def clone(self) -> Self: - """ - Return a cloned (deep) copy of self. - - This must be implemented by the concrete subclass. - """ - + def clone(self) -> Self: ... @abstractmethod - def post_order(self) -> Iterator[Self]: - """ - Return a post-order iterator for the tree. - - This must be implemented by the concrete subclass. - """ - + def post_order(self) -> Iterator[Self]: ... @abstractmethod - def pre_order(self) -> Iterator[Self]: - """ - Return a pre-order iterator for the tree. - - This must be implemented by the concrete subclass. - """ - - def replace(self, new: _NL | list[_NL]) -> None: - """Replace this node with a new one in the parent.""" - - def get_lineno(self) -> int: - """Return the line number which generated the invocant node.""" - + def pre_order(self) -> Iterator[Self]: ... + def replace(self, new: _NL | list[_NL]) -> None: ... + def get_lineno(self) -> int: ... def changed(self) -> None: ... - def remove(self) -> int | None: - """ - Remove the node from the tree. Returns the position of the node in its - parent's children before it was removed. - """ - + def remove(self) -> int | None: ... @property - def next_sibling(self) -> _NL | None: - """ - The node immediately following the invocant in their parent's children - list. If the invocant does not have a next sibling, it is None - """ - + def next_sibling(self) -> _NL | None: ... @property - def prev_sibling(self) -> _NL | None: - """ - The node immediately preceding the invocant in their parent's children - list. If the invocant does not have a previous sibling, it is None. - """ - + def prev_sibling(self) -> _NL | None: ... def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... - def get_suffix(self) -> str: - """ - Return the string immediately following the invocant node. This is - effectively equivalent to node.next_sibling.prefix - """ + def get_suffix(self) -> str: ... class Node(Base): - """Concrete implementation for interior nodes.""" - fixers_applied: MutableSequence[BaseFix] | None # Is Unbound until set in refactor.RefactoringTool future_features: frozenset[Incomplete] @@ -133,56 +58,17 @@ class Node(Base): context: Unused = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] | None = None, - ) -> None: - """ - Initializer. - - Takes a type constant (a symbol number >= 256), a sequence of - child nodes, and an optional context keyword argument. - - As a side effect, the parent pointers of the children are updated. - """ - - def _eq(self, other: Base) -> bool: - """Compare two nodes for equality.""" - - def clone(self) -> Node: - """Return a cloned (deep) copy of self.""" - - def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree.""" - - def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree.""" - - def set_child(self, i: int, child: _NL) -> None: - """ - Equivalent to 'node.children[i] = child'. This method also sets the - child's parent attribute appropriately. - """ - - def insert_child(self, i: int, child: _NL) -> None: - """ - Equivalent to 'node.children.insert(i, child)'. This method also sets - the child's parent attribute appropriately. - """ - - def append_child(self, child: _NL) -> None: - """ - Equivalent to 'node.children.append(child)'. This method also sets the - child's parent attribute appropriately. - """ - - def __unicode__(self) -> str: - """ - Return a pretty string representation. - - This reproduces the input source exactly. - """ + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Node: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def set_child(self, i: int, child: _NL) -> None: ... + def insert_child(self, i: int, child: _NL) -> None: ... + def append_child(self, child: _NL) -> None: ... + def __unicode__(self) -> str: ... class Leaf(Base): - """Concrete implementation for leaf nodes.""" - lineno: int column: int value: str @@ -194,188 +80,39 @@ class Leaf(Base): context: _Context | None = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] = [], - ) -> None: - """ - Initializer. - - Takes a type constant (a token number < 256), a string value, and an - optional context keyword argument. - """ - - def _eq(self, other: Base) -> bool: - """Compare two nodes for equality.""" + ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Leaf: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... + def __unicode__(self) -> str: ... - def clone(self) -> Leaf: - """Return a cloned (deep) copy of self.""" - - def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree.""" - - def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree.""" - - def __unicode__(self) -> str: - """ - Return a pretty string representation. - - This reproduces the input source exactly. - """ - -def convert(gr: Grammar, raw_node: _RawNode) -> _NL: - """ - Convert raw node information to a Node or Leaf instance. - - This is passed to the parser driver which calls it whenever a reduction of a - grammar rule produces a new complete node, so that the tree is build - strictly bottom-up. - """ +def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... class BasePattern: - """ - A pattern is a tree matching pattern. - - It looks for a specific node type (token or symbol), and - optionally for a specific content. - - This is an abstract base class. There are three concrete - subclasses: - - - LeafPattern matches a single leaf node; - - NodePattern matches a single node (usually non-leaf); - - WildcardPattern matches a sequence of nodes of variable length. - """ - type: int content: str | None name: str | None - def optimize(self) -> BasePattern: # sic, subclasses are free to optimize themselves into different patterns - """ - A subclass can define this as a hook for optimizations. - - Returns either self or another node with the same effect. - """ - - def match(self, node: _NL, results: _Results | None = None) -> bool: - """ - Does this pattern exactly match a node? - - Returns True if it matches, False if not. - - If results is not None, it must be a dict which will be - updated with the nodes matching named subpatterns. - - Default implementation for non-wildcard patterns. - """ - - def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: - """ - Does this pattern exactly match a sequence of nodes? - - Default implementation for non-wildcard patterns. - """ - - def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: - """ - Generator yielding all matches for this pattern. - - Default implementation for non-wildcard patterns. - """ + def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ... + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: - """ - Initializer. Takes optional type, content, and name. - - The type, if given must be a token type (< 256). If not given, - this matches any *leaf* node; the content may still be required. - - The content, if given, must be a string. - - If a name is given, the matching node is stored in the results - dict under that key. - """ + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class NodePattern(BasePattern): wildcards: bool - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: - """ - Initializer. Takes optional type, content, and name. - - The type, if given, must be a symbol type (>= 256). If the - type is None this matches *any* single node (leaf or not), - except if content is not None, in which it only matches - non-leaf nodes that also match the content pattern. - - The content, if not None, must be a sequence of Patterns that - must match the node's children exactly. If the content is - given, the type must not be None. - - If a name is given, the matching node is stored in the results - dict under that key. - """ + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class WildcardPattern(BasePattern): - """ - A wildcard pattern can match zero or more nodes. - - This has all the flexibility needed to implement patterns like: - - .* .+ .? .{m,n} - (a b c | d e | f) - (...)* (...)+ (...)? (...){m,n} - - except it always uses non-greedy matching. - """ - min: int max: int - def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: - """ - Initializer. - - Args: - content: optional sequence of subsequences of patterns; - if absent, matches one node; - if present, each subsequence is an alternative [*] - min: optional minimum number of times to match, default 0 - max: optional maximum number of times to match, default HUGE - name: optional name assigned to this match - - [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is - equivalent to (a b c | d e | f g h); if content is None, - this is equivalent to '.' in regular expression terms. - The min and max parameters work as follows: - min=0, max=maxint: .* - min=1, max=maxint: .+ - min=0, max=1: .? - min=1, max=1: . - If content is not None, replace the dot with the parenthesized - list of alternatives, e.g. (a b c | d e | f g h)* - """ + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... class NegatedPattern(BasePattern): - def __init__(self, content: str | None = None) -> None: - """ - Initializer. - - The argument is either a pattern or None. If it is None, this - only matches an empty sequence (effectively '$' in regex - lingo). If it is not None, this matches whenever the argument - pattern doesn't have any matches. - """ + def __init__(self, content: str | None = None) -> None: ... def generate_matches( patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL] -) -> Iterator[tuple[int, _Results]]: - """ - Generator yielding matches for a sequence of patterns and nodes. - - Args: - patterns: a sequence of patterns - nodes: a sequence of nodes - - Yields: - (count, results) tuples where: - count: the entire sequence of patterns matches nodes[:count]; - results: dict containing named submatches. - """ +) -> Iterator[tuple[int, _Results]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi index cd788ee2dc624..a7f3825406488 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi @@ -1,10 +1,3 @@ -"""Refactoring framework. - -Used as a main program, this can refactor any number of files and/or -recursively descend down directories. Imported as a module, this -provides infrastructure to write your own refactoring tool. -""" - from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem from collections.abc import Container, Generator, Iterable, Mapping from logging import Logger, _ExcInfoType @@ -18,16 +11,10 @@ from .pgen2.driver import Driver from .pgen2.grammar import Grammar from .pytree import Node -def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: - """Return a sorted list of all available fix names in the given package.""" - -def get_fixers_from_package(pkg_name: str) -> list[str]: - """ - Return the fully qualified names for fixers in the package pkg_name. - """ +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... +def get_fixers_from_package(pkg_name: str) -> list[str]: ... -class FixerError(Exception): - """A fixer could not be loaded.""" +class FixerError(Exception): ... class RefactoringTool: CLASS_PREFIX: ClassVar[str] @@ -50,159 +37,40 @@ class RefactoringTool: bmi_post_order: list[BaseFix] def __init__( self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None - ) -> None: - """Initializer. - - Args: - fixer_names: a list of fixers to import - options: a dict with configuration. - explicit: a list of fixers to run even if they are explicit. - """ - - def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: - """Inspects the options to load the requested patterns and handlers. - - Returns: - (pre_order, post_order), where pre_order is the list of fixers that - want a pre-order AST traversal, and post_order is the list that want - post-order traversal. - """ - - def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: - """Called when an error occurs.""" - + ) -> None: ... + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ... + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ... @overload - def log_message(self, msg: object) -> None: - """Hook to log a message.""" - + def log_message(self, msg: object) -> None: ... @overload def log_message(self, msg: str, *args: object) -> None: ... @overload def log_debug(self, msg: object) -> None: ... @overload def log_debug(self, msg: str, *args: object) -> None: ... - def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: - """Called with the old version, new version, and filename of a - refactored file. - """ - - def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: - """Refactor a list of files and directories.""" - - def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: - """Descends down a directory and refactor every Python file found. - - Python files are assumed to have a .py extension. - - Files and subdirectories starting with '.' are skipped. - """ - - def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: - """ - Do our best to decode a Python source file correctly. - """ - - def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: - """Refactors a file.""" - - def refactor_string(self, data: str, name: str) -> Node | None: - """Refactor a given input string. - - Args: - data: a string holding the code to be refactored. - name: a human-readable name for use in error/log messages. - - Returns: - An AST corresponding to the refactored input stream; None if - there were errors during the parse. - """ - + def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ... + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_string(self, data: str, name: str) -> Node | None: ... def refactor_stdin(self, doctests_only: bool = False) -> None: ... - def refactor_tree(self, tree: Node, name: str) -> bool: - """Refactors a parse tree (modifying the tree in place). - - For compatible patterns the bottom matcher module is - used. Otherwise the tree is traversed node-to-node for - matches. - - Args: - tree: a pytree.Node instance representing the root of the tree - to be refactored. - name: a human-readable name for this tree. - - Returns: - True if the tree was modified, False otherwise. - """ - - def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: - """Traverse an AST, applying a set of fixers to each node. - - This is a helper method for refactor_tree(). - - Args: - fixers: a list of fixer instances. - traversal: a generator that yields AST nodes. - - Returns: - None - """ - + def refactor_tree(self, tree: Node, name: str) -> bool: ... + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ... def processed_file( self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None - ) -> None: - """ - Called when a file has been refactored and there may be changes. - """ - - def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: - """Writes a string to a file. - - It first shows a unified diff between the old text and the new text, and - then rewrites the file; the latter is only done if the write option is - set. - """ + ) -> None: ... + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ... PS1: Final = ">>> " PS2: Final = "... " - def refactor_docstring(self, input: str, filename: StrPath) -> str: - """Refactors a docstring, looking for doctests. - - This returns a modified version of the input string. It looks - for doctests, which start with a ">>>" prompt, and may be - continued with "..." prompts, as long as the "..." is indented - the same as the ">>>". - - (Unfortunately we can't use the doctest module's parser, - since, like most parsers, it is not geared towards preserving - the original source.) - """ - - def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: - """Refactors one doctest. - - A doctest is given as a block of lines, the first of which starts - with ">>>" (possibly indented), while the remaining lines start - with "..." (identically indented). - - """ - + def refactor_docstring(self, input: str, filename: StrPath) -> str: ... + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ... def summarize(self) -> None: ... - def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: - """Parses a block into a tree. - - This is necessary to get correct line number / offset information - in the parser diagnostics and embedded into the parse tree. - """ - + def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ... def wrap_toks( self, block: Iterable[str], lineno: int, indent: int - ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: - """Wraps a tokenize stream to systematically modify start/end.""" - - def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: - """Generates lines as expected by tokenize from a list of lines. - - This strips the first len(indent + self.PS1) characters off each line. - """ + ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ... + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... class MultiprocessingUnsupported(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi index 368ce1b044990..5379a21e7d123 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi @@ -1,10 +1,3 @@ -"""Cache lines from Python source files. - -This is intended to read lines from modules imported -- hence if a filename -is not found, it will look down the module search path for a file by -that name. -""" - from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias @@ -18,40 +11,9 @@ _SourceLoader: TypeAlias = tuple[Callable[[], str | None]] cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented -def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: - """Get a line for a Python source file from the cache. - Update the cache if it doesn't contain an entry for this file already. - """ - -def clearcache() -> None: - """Clear the cache entirely.""" - -def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: - """Get the lines for a Python source file from the cache. - Update the cache if it doesn't contain an entry for this file already. - """ - -def checkcache(filename: str | None = None) -> None: - """Discard cache entries that are out of date. - (This is not checked upon each call!) - """ - -def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: - """Update a cache entry and return its list of lines. - If something's wrong, print a message, discard the cache entry, - and return an empty list. - """ - -def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: - """Seed the cache for filename with module_globals. - - The module loader will be asked for the source only when getlines is - called, not immediately. - - If there is an entry in the cache already, it is not altered. - - :return: True if a lazy load is registered in the cache, - otherwise False. To register such a load a module loader with a - get_source method must be found, the filename must be a cacheable - filename, and the filename must not be already cached. - """ +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... +def clearcache() -> None: ... +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def checkcache(filename: str | None = None) -> None: ... +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi index ed836048f435e..fae9f849b6373 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi @@ -1,15 +1,3 @@ -"""Locale support module. - -The module provides low-level access to the C lib's locale APIs and adds high -level number formatting APIs as well as a locale aliasing engine to complement -these. - -The aliasing engine includes support for many commonly used locale names and -maps them to values suitable for passing to the C lib's setlocale() function. It -also includes default encodings for all supported locale names. - -""" - import sys from _locale import ( CHAR_MAX as CHAR_MAX, @@ -141,124 +129,33 @@ if sys.platform != "win32": class Error(Exception): ... -def getdefaultlocale(envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")) -> tuple[_str | None, _str | None]: - """Tries to determine the default locale settings and returns - them as tuple (language code, encoding). - - According to POSIX, a program which has not called - setlocale(LC_ALL, "") runs using the portable 'C' locale. - Calling setlocale(LC_ALL, "") lets it use the default locale as - defined by the LANG variable. Since we don't want to interfere - with the current locale setting we thus emulate the behavior - in the way described above. - - To maintain compatibility with other platforms, not only the - LANG variable is tested, but a list of variables given as - envvars parameter. The first found to be defined will be - used. envvars defaults to the search path used in GNU gettext; - it must always contain the variable name 'LANG'. - - Except for the code 'C', the language code corresponds to RFC - 1766. code and encoding can be None in case the values cannot - be determined. - - """ - -def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: - """Returns the current setting for the given locale category as - tuple (language code, encoding). - - category may be one of the LC_* value except LC_ALL. It - defaults to LC_CTYPE. - - Except for the code 'C', the language code corresponds to RFC - 1766. code and encoding can be None in case the values cannot - be determined. - - """ - -def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: - """Set the locale for the given category. The locale can be - a string, an iterable of two strings (language code and encoding), - or None. - - Iterables are converted to strings using the locale aliasing - engine. Locale strings are passed directly to the C lib. - - category may be given as one of the LC_* values. - - """ - -def getpreferredencoding(do_setlocale: bool = True) -> _str: - """Return the charset that the user is likely using, - according to the system configuration. - """ - -def normalize(localename: _str) -> _str: - """Returns a normalized locale code for the given locale - name. - - The returned locale code is formatted for use with - setlocale(). - - If normalization fails, the original name is returned - unchanged. - - If the given encoding is not known, the function defaults to - the default encoding for the locale code just like setlocale() - does. - - """ +def getdefaultlocale( + envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") +) -> tuple[_str | None, _str | None]: ... +def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... +def getpreferredencoding(do_setlocale: bool = True) -> _str: ... +def normalize(localename: _str) -> _str: ... if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `locale.setlocale(locale.LC_ALL, '')` instead.") - def resetlocale(category: int = ...) -> None: - """Sets the locale for category to the default setting. - - The default setting is determined by calling - getdefaultlocale(). category defaults to LC_ALL. - - """ + def resetlocale(category: int = ...) -> None: ... else: - def resetlocale(category: int = ...) -> None: - """Sets the locale for category to the default setting. - - The default setting is determined by calling - getdefaultlocale(). category defaults to LC_ALL. - - """ + def resetlocale(category: int = ...) -> None: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.7; removed in Python 3.12. Use `locale.format_string()` instead.") - def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: - """Deprecated, use format_string instead.""" - -def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: - """Formats a string in the same way that the % formatting would use, - but takes the current locale into account. - - Grouping is applied if the third parameter is true. - Conversion uses monetary thousands separator and grouping strings if - forth parameter monetary is true. - """ - -def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: - """Formats val according to the currency settings - in the current locale. - """ - -def delocalize(string: _str) -> _str: - """Parses a string as a normalized number according to the locale settings.""" - -def atof(string: _str, func: Callable[[_str], float] = ...) -> float: - """Parses a string as a float according to the locale settings.""" - -def atoi(string: _str) -> int: - """Converts a string to an integer according to the locale settings.""" - -def str(val: float) -> _str: - """Convert float to string, taking the locale into account.""" + def format( + percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any + ) -> _str: ... + +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... +def delocalize(string: _str) -> _str: ... +def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... +def atoi(string: _str) -> int: ... +def str(val: float) -> _str: ... locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi index 657c65a8b6934..8248f82ea87ac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi @@ -1,12 +1,3 @@ -""" -Logging package for Python. Based on PEP 282 and comments thereto in -comp.lang.python. - -Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. - -To use, simply 'import logging' and log away! -""" - import sys import threading from _typeshed import StrPath, SupportsWrite @@ -100,117 +91,27 @@ _levelToName: dict[int, str] _nameToLevel: dict[str, int] class Filterer: - """ - A base class for loggers and handlers which allows them to share - common code. - """ - filters: list[_FilterType] - def addFilter(self, filter: _FilterType) -> None: - """ - Add the specified filter to this handler. - """ - - def removeFilter(self, filter: _FilterType) -> None: - """ - Remove the specified filter from this handler. - """ + def addFilter(self, filter: _FilterType) -> None: ... + def removeFilter(self, filter: _FilterType) -> None: ... if sys.version_info >= (3, 12): - def filter(self, record: LogRecord) -> bool | LogRecord: - """ - Determine if a record is loggable by consulting all the filters. - - The default is to allow the record to be logged; any filter can veto - this by returning a false value. - If a filter attached to a handler returns a log record instance, - then that instance is used in place of the original log record in - any further processing of the event by that handler. - If a filter returns any other true value, the original log record - is used in any further processing of the event by that handler. - - If none of the filters return false values, this method returns - a log record. - If any of the filters return a false value, this method returns - a false value. - - .. versionchanged:: 3.2 - - Allow filters to be just callables. - - .. versionchanged:: 3.12 - Allow filters to return a LogRecord instead of - modifying it in place. - """ + def filter(self, record: LogRecord) -> bool | LogRecord: ... else: - def filter(self, record: LogRecord) -> bool: - """ - Determine if a record is loggable by consulting all the filters. - - The default is to allow the record to be logged; any filter can veto - this and the record is then dropped. Returns a zero value if a record - is to be dropped, else non-zero. - - .. versionchanged:: 3.2 - - Allow filters to be just callables. - """ + def filter(self, record: LogRecord) -> bool: ... class Manager: # undocumented - """ - There is [under normal circumstances] just one Manager instance, which - holds the hierarchy of loggers. - """ - root: RootLogger disable: int emittedNoHandlerWarning: bool loggerDict: dict[str, Logger | PlaceHolder] loggerClass: type[Logger] | None logRecordFactory: Callable[..., LogRecord] | None - def __init__(self, rootnode: RootLogger) -> None: - """ - Initialize the manager with the root node of the logger hierarchy. - """ - - def getLogger(self, name: str) -> Logger: - """ - Get a logger with the specified name (channel name), creating it - if it doesn't yet exist. This name is a dot-separated hierarchical - name, such as "a", "a.b", "a.b.c" or similar. - - If a PlaceHolder existed for the specified name [i.e. the logger - didn't exist but a child of it did], replace it with the created - logger and fix up the parent/child references which pointed to the - placeholder to now point to the logger. - """ - - def setLoggerClass(self, klass: type[Logger]) -> None: - """ - Set the class to be used when instantiating a logger with this Manager. - """ - - def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: - """ - Set the factory to be used when instantiating a log record with this - Manager. - """ + def __init__(self, rootnode: RootLogger) -> None: ... + def getLogger(self, name: str) -> Logger: ... + def setLoggerClass(self, klass: type[Logger]) -> None: ... + def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: ... class Logger(Filterer): - """ - Instances of the Logger class represent a single logging channel. A - "logging channel" indicates an area of an application. Exactly how an - "area" is defined is up to the application developer. Since an - application can have any number of areas, logging channels are identified - by a unique string. Application areas can be nested (e.g. an area - of "input processing" might include sub-areas "read CSV files", "read - XLS files" and "read Gnumeric files"). To cater for this natural nesting, - channel names are organized into a namespace hierarchy where levels are - separated by periods, much like the Java or Python package namespace. So - in the instance given above, channel names might be "input" for the upper - level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels. - There is no arbitrary limit to the depth of nesting. - """ - name: str # undocumented level: int # undocumented parent: Logger | None # undocumented @@ -219,44 +120,11 @@ class Logger(Filterer): disabled: bool # undocumented root: ClassVar[RootLogger] # undocumented manager: Manager # undocumented - def __init__(self, name: str, level: _Level = 0) -> None: - """ - Initialize the logger with a name and an optional level. - """ - - def setLevel(self, level: _Level) -> None: - """ - Set the logging level of this logger. level must be an int or a str. - """ - - def isEnabledFor(self, level: int) -> bool: - """ - Is this logger enabled for level 'level'? - """ - - def getEffectiveLevel(self) -> int: - """ - Get the effective level for this logger. - - Loop through this logger and its parents in the logger hierarchy, - looking for a non-zero logging level. Return the first one found. - """ - - def getChild(self, suffix: str) -> Self: # see python/typing#980 - """ - Get a logger which is a descendant to this one. - - This is a convenience method, such that - - logging.getLogger('abc').getChild('def.ghi') - - is the same as - - logging.getLogger('abc.def.ghi') - - It's useful, for example, when the parent logger is named using - __name__ rather than a literal string. - """ + def __init__(self, name: str, level: _Level = 0) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def getChild(self, suffix: str) -> Self: ... # see python/typing#980 if sys.version_info >= (3, 12): def getChildren(self) -> set[Logger]: ... @@ -268,16 +136,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with severity 'DEBUG'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) - """ - + ) -> None: ... def info( self, msg: object, @@ -286,16 +145,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with severity 'INFO'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.info("Houston, we have a %s", "notable problem", exc_info=True) - """ - + ) -> None: ... def warning( self, msg: object, @@ -304,16 +154,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with severity 'WARNING'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) - """ - + ) -> None: ... @deprecated("Deprecated since Python 3.3. Use `Logger.warning()` instead.") def warn( self, @@ -332,16 +173,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with severity 'ERROR'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.error("Houston, we have a %s", "major problem", exc_info=True) - """ - + ) -> None: ... def exception( self, msg: object, @@ -350,11 +182,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Convenience method for logging an ERROR with exception information. - """ - + ) -> None: ... def critical( self, msg: object, @@ -363,16 +191,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with severity 'CRITICAL'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.critical("Houston, we have a %s", "major disaster", exc_info=True) - """ - + ) -> None: ... def log( self, level: int, @@ -382,16 +201,7 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: - """ - Log 'msg % args' with the integer severity 'level'. - - To pass exception information, use the keyword argument exc_info with - a true value, e.g. - - logger.log(level, "We have a %s", "mysterious problem", exc_info=True) - """ - + ) -> None: ... def _log( self, level: int, @@ -401,36 +211,12 @@ class Logger(Filterer): extra: Mapping[str, object] | None = None, stack_info: bool = False, stacklevel: int = 1, - ) -> None: # undocumented - """ - Low-level logging routine which creates a LogRecord and then calls - all the handlers of this logger to handle the record. - """ + ) -> None: ... # undocumented fatal = critical - def addHandler(self, hdlr: Handler) -> None: - """ - Add the specified handler to this logger. - """ - - def removeHandler(self, hdlr: Handler) -> None: - """ - Remove the specified handler from this logger. - """ - - def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: - """ - Find the stack frame of the caller so that we can note the source - file name, line number and function name. - """ - - def handle(self, record: LogRecord) -> None: - """ - Call the handlers for the specified record. - - This method is used for unpickled records received from a socket, as - well as those created locally. Logger-level filtering is applied. - """ - + def addHandler(self, hdlr: Handler) -> None: ... + def removeHandler(self, hdlr: Handler) -> None: ... + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... + def handle(self, record: LogRecord) -> None: ... def makeRecord( self, name: str, @@ -443,33 +229,9 @@ class Logger(Filterer): func: str | None = None, extra: Mapping[str, object] | None = None, sinfo: str | None = None, - ) -> LogRecord: - """ - A factory method which can be overridden in subclasses to create - specialized LogRecords. - """ - - def hasHandlers(self) -> bool: - """ - See if this logger has any handlers configured. - - Loop through all handlers for this logger and its parents in the - logger hierarchy. Return True if a handler was found, else False. - Stop searching up the hierarchy whenever a logger with the "propagate" - attribute set to zero is found - that will be the last logger which - is checked for the existence of handlers. - """ - - def callHandlers(self, record: LogRecord) -> None: # undocumented - """ - Pass a record to all relevant handlers. - - Loop through all handlers for this logger and its parents in the - logger hierarchy. If no handler was found, output a one-off error - message to sys.stderr. Stop searching up the hierarchy whenever a - logger with the "propagate" attribute set to zero is found - that - will be the last logger whose handlers are called. - """ + ) -> LogRecord: ... + def hasHandlers(self) -> bool: ... + def callHandlers(self, record: LogRecord) -> None: ... # undocumented CRITICAL: Final = 50 FATAL: Final = CRITICAL @@ -481,168 +243,30 @@ DEBUG: Final = 10 NOTSET: Final = 0 class Handler(Filterer): - """ - Handler instances dispatch logging events to specific destinations. - - The base handler class. Acts as a placeholder which defines the Handler - interface. Handlers can optionally use Formatter instances to format - records as desired. By default, no formatter is specified; in this case, - the 'raw' message as determined by record.message is logged. - """ - level: int # undocumented formatter: Formatter | None # undocumented lock: threading.Lock | None # undocumented name: str | None # undocumented - def __init__(self, level: _Level = 0) -> None: - """ - Initializes the instance - basically setting the formatter to None - and the filter list to empty. - """ - + def __init__(self, level: _Level = 0) -> None: ... def get_name(self) -> str: ... # undocumented def set_name(self, name: str) -> None: ... # undocumented - def createLock(self) -> None: - """ - Acquire a thread lock for serializing access to the underlying I/O. - """ - - def acquire(self) -> None: - """ - Acquire the I/O thread lock. - """ - - def release(self) -> None: - """ - Release the I/O thread lock. - """ - - def setLevel(self, level: _Level) -> None: - """ - Set the logging level of this handler. level must be an int or a str. - """ - - def setFormatter(self, fmt: Formatter | None) -> None: - """ - Set the formatter for this handler. - """ - - def flush(self) -> None: - """ - Ensure all logging output has been flushed. - - This version does nothing and is intended to be implemented by - subclasses. - """ - - def close(self) -> None: - """ - Tidy up any resources used by the handler. - - This version removes the handler from an internal map of handlers, - _handlers, which is used for handler lookup by name. Subclasses - should ensure that this gets called from overridden close() - methods. - """ - - def handle(self, record: LogRecord) -> bool: - """ - Conditionally emit the specified logging record. - - Emission depends on filters which may have been added to the handler. - Wrap the actual emission of the record with acquisition/release of - the I/O thread lock. - - Returns an instance of the log record that was emitted - if it passed all filters, otherwise a false value is returned. - """ - - def handleError(self, record: LogRecord) -> None: - """ - Handle errors which occur during an emit() call. - - This method should be called from handlers when an exception is - encountered during an emit() call. If raiseExceptions is false, - exceptions get silently ignored. This is what is mostly wanted - for a logging system - most users will not care about errors in - the logging system, they are more interested in application errors. - You could, however, replace this with a custom handler if you wish. - The record which was being processed is passed in to this method. - """ - - def format(self, record: LogRecord) -> str: - """ - Format the specified record. - - If a formatter is set, use it. Otherwise, use the default formatter - for the module. - """ - - def emit(self, record: LogRecord) -> None: - """ - Do whatever it takes to actually log the specified logging record. - - This version is intended to be implemented by subclasses and so - raises a NotImplementedError. - """ + def createLock(self) -> None: ... + def acquire(self) -> None: ... + def release(self) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def setFormatter(self, fmt: Formatter | None) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def handle(self, record: LogRecord) -> bool: ... + def handleError(self, record: LogRecord) -> None: ... + def format(self, record: LogRecord) -> str: ... + def emit(self, record: LogRecord) -> None: ... if sys.version_info >= (3, 12): - def getHandlerByName(name: str) -> Handler | None: - """ - Get a handler with the specified *name*, or None if there isn't one with - that name. - """ - - def getHandlerNames() -> frozenset[str]: - """ - Return all known handler names as an immutable set. - """ + def getHandlerByName(name: str) -> Handler | None: ... + def getHandlerNames() -> frozenset[str]: ... class Formatter: - """ - Formatter instances are used to convert a LogRecord to text. - - Formatters need to know how a LogRecord is constructed. They are - responsible for converting a LogRecord to (usually) a string which can - be interpreted by either a human or an external system. The base Formatter - allows a formatting string to be specified. If none is supplied, the - style-dependent default value, "%(message)s", "{message}", or - "${message}", is used. - - The Formatter can be initialized with a format string which makes use of - knowledge of the LogRecord attributes - e.g. the default value mentioned - above makes use of the fact that the user's message and arguments are pre- - formatted into a LogRecord's message attribute. Currently, the useful - attributes in a LogRecord are described by: - - %(name)s Name of the logger (logging channel) - %(levelno)s Numeric logging level for the message (DEBUG, INFO, - WARNING, ERROR, CRITICAL) - %(levelname)s Text logging level for the message ("DEBUG", "INFO", - "WARNING", "ERROR", "CRITICAL") - %(pathname)s Full pathname of the source file where the logging - call was issued (if available) - %(filename)s Filename portion of pathname - %(module)s Module (name portion of filename) - %(lineno)d Source line number where the logging call was issued - (if available) - %(funcName)s Function name - %(created)f Time when the LogRecord was created (time.time_ns() / 1e9 - return value) - %(asctime)s Textual time when the LogRecord was created - %(msecs)d Millisecond portion of the creation time - %(relativeCreated)d Time in milliseconds when the LogRecord was created, - relative to the time the logging module was loaded - (typically at application startup time) - %(thread)d Thread ID (if available) - %(threadName)s Thread name (if available) - %(taskName)s Task name (if available) - %(process)d Process ID (if available) - %(processName)s Process name (if available) - %(message)s The result of record.getMessage(), computed just as - the record is emitted - """ - converter: Callable[[float | None], struct_time] _fmt: str | None # undocumented datefmt: str | None # undocumented @@ -659,180 +283,36 @@ class Formatter: validate: bool = True, *, defaults: Mapping[str, Any] | None = None, - ) -> None: - """ - Initialize the formatter with specified format strings. - - Initialize the formatter either with the specified format string, or a - default as described above. Allow for specialized date formatting with - the optional datefmt argument. If datefmt is omitted, you get an - ISO8601-like (or RFC 3339-like) format. - - Use a style parameter of '%', '{' or '$' to specify that you want to - use one of %-formatting, :meth:`str.format` (``{}``) formatting or - :class:`string.Template` formatting in your format string. - - .. versionchanged:: 3.2 - Added the ``style`` parameter. - """ + ) -> None: ... else: def __init__( self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True - ) -> None: - """ - Initialize the formatter with specified format strings. - - Initialize the formatter either with the specified format string, or a - default as described above. Allow for specialized date formatting with - the optional datefmt argument. If datefmt is omitted, you get an - ISO8601-like (or RFC 3339-like) format. - - Use a style parameter of '%', '{' or '$' to specify that you want to - use one of %-formatting, :meth:`str.format` (``{}``) formatting or - :class:`string.Template` formatting in your format string. - - .. versionchanged:: 3.2 - Added the ``style`` parameter. - """ - - def format(self, record: LogRecord) -> str: - """ - Format the specified record as text. - - The record's attribute dictionary is used as the operand to a - string formatting operation which yields the returned string. - Before formatting the dictionary, a couple of preparatory steps - are carried out. The message attribute of the record is computed - using LogRecord.getMessage(). If the formatting string uses the - time (as determined by a call to usesTime(), formatTime() is - called to format the event time. If there is exception information, - it is formatted using formatException() and appended to the message. - """ - - def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: - """ - Return the creation time of the specified LogRecord as formatted text. - - This method should be called from format() by a formatter which - wants to make use of a formatted time. This method can be overridden - in formatters to provide for any specific requirement, but the - basic behaviour is as follows: if datefmt (a string) is specified, - it is used with time.strftime() to format the creation time of the - record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used. - The resulting string is returned. This function uses a user-configurable - function to convert the creation time to a tuple. By default, - time.localtime() is used; to change this for a particular formatter - instance, set the 'converter' attribute to a function with the same - signature as time.localtime() or time.gmtime(). To change it for all - formatters, for example if you want all logging times to be shown in GMT, - set the 'converter' attribute in the Formatter class. - """ - - def formatException(self, ei: _SysExcInfoType) -> str: - """ - Format and return the specified exception information as a string. - - This default implementation just uses - traceback.print_exception() - """ + ) -> None: ... + def format(self, record: LogRecord) -> str: ... + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... + def formatException(self, ei: _SysExcInfoType) -> str: ... def formatMessage(self, record: LogRecord) -> str: ... # undocumented - def formatStack(self, stack_info: str) -> str: - """ - This method is provided as an extension point for specialized - formatting of stack information. - - The input data is a string as returned from a call to - :func:`traceback.print_stack`, but with the last trailing newline - removed. - - The base implementation just returns the value passed in. - """ - - def usesTime(self) -> bool: # undocumented - """ - Check if the format uses the creation time of the record. - """ + def formatStack(self, stack_info: str) -> str: ... + def usesTime(self) -> bool: ... # undocumented class BufferingFormatter: - """ - A formatter suitable for formatting a number of records. - """ - linefmt: Formatter - def __init__(self, linefmt: Formatter | None = None) -> None: - """ - Optionally specify a formatter which will be used to format each - individual record. - """ - - def formatHeader(self, records: Sequence[LogRecord]) -> str: - """ - Return the header string for the specified records. - """ - - def formatFooter(self, records: Sequence[LogRecord]) -> str: - """ - Return the footer string for the specified records. - """ - - def format(self, records: Sequence[LogRecord]) -> str: - """ - Format the specified records and return the result as a string. - """ + def __init__(self, linefmt: Formatter | None = None) -> None: ... + def formatHeader(self, records: Sequence[LogRecord]) -> str: ... + def formatFooter(self, records: Sequence[LogRecord]) -> str: ... + def format(self, records: Sequence[LogRecord]) -> str: ... class Filter: - """ - Filter instances are used to perform arbitrary filtering of LogRecords. - - Loggers and Handlers can optionally use Filter instances to filter - records as desired. The base filter class only allows events which are - below a certain point in the logger hierarchy. For example, a filter - initialized with "A.B" will allow events logged by loggers "A.B", - "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If - initialized with the empty string, all events are passed. - """ - name: str # undocumented nlen: int # undocumented - def __init__(self, name: str = "") -> None: - """ - Initialize a filter. - - Initialize with the name of the logger which, together with its - children, will have its events allowed through the filter. If no - name is specified, allow every event. - """ + def __init__(self, name: str = "") -> None: ... if sys.version_info >= (3, 12): - def filter(self, record: LogRecord) -> bool | LogRecord: - """ - Determine if the specified record is to be logged. - - Returns True if the record should be logged, or False otherwise. - If deemed appropriate, the record may be modified in-place. - """ + def filter(self, record: LogRecord) -> bool | LogRecord: ... else: - def filter(self, record: LogRecord) -> bool: - """ - Determine if the specified record is to be logged. - - Returns True if the record should be logged, or False otherwise. - If deemed appropriate, the record may be modified in-place. - """ + def filter(self, record: LogRecord) -> bool: ... class LogRecord: - """ - A LogRecord instance represents an event being logged. - - LogRecord instances are created every time something is logged. They - contain all the information pertinent to the event being logged. The - main information passed in is in msg and args, which are combined - using str(msg) % args to create the message field of the record. The - record also includes information such as when the record was created, - the source line where the logging call was made, and any exception - information to be logged. - """ - # args can be set to None by logging.handlers.QueueHandler # (see https://bugs.python.org/issue44473) args: _ArgsType | None @@ -872,78 +352,24 @@ class LogRecord: exc_info: _SysExcInfoType | None, func: str | None = None, sinfo: str | None = None, - ) -> None: - """ - Initialize a logging record with interesting information. - """ - - def getMessage(self) -> str: - """ - Return the message for this LogRecord. - - Return the message for this LogRecord after merging any user-supplied - arguments with the message. - """ + ) -> None: ... + def getMessage(self) -> str: ... # Allows setting contextual information on LogRecord objects as per the docs, see #7833 def __setattr__(self, name: str, value: Any, /) -> None: ... _L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) class LoggerAdapter(Generic[_L]): - """ - An adapter for loggers which makes it easier to specify contextual - information in logging output. - """ - logger: _L manager: Manager # undocumented if sys.version_info >= (3, 13): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: - """ - Initialize the adapter with a logger and a dict-like object which - provides contextual information. This constructor signature allows - easy stacking of LoggerAdapters, if so desired. - - You can effectively pass keyword arguments as shown in the - following example: - - adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) - - By default, LoggerAdapter objects will drop the "extra" argument - passed on the individual log calls to use its own instead. - - Initializing it with merge_extra=True will instead merge both - maps when logging, the individual call extra taking precedence - over the LoggerAdapter instance extra - - .. versionchanged:: 3.13 - The *merge_extra* argument was added. - """ + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: ... elif sys.version_info >= (3, 10): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: - """ - Initialize the adapter with a logger and a dict-like object which - provides contextual information. This constructor signature allows - easy stacking of LoggerAdapters, if so desired. - - You can effectively pass keyword arguments as shown in the - following example: - - adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) - """ + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... else: - def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: - """ - Initialize the adapter with a logger and a dict-like object which - provides contextual information. This constructor signature allows - easy stacking of LoggerAdapters, if so desired. + def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... - You can effectively pass keyword arguments as shown in the - following example: - - adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) - """ if sys.version_info >= (3, 10): extra: Mapping[str, object] | None else: @@ -952,17 +378,7 @@ class LoggerAdapter(Generic[_L]): if sys.version_info >= (3, 13): merge_extra: bool - def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: - """ - Process the logging message and keyword arguments passed in to - a logging call to insert contextual information. You can either - manipulate the message itself, the keyword args or both. Return - the message and kwargs modified (or not) to suit your needs. - - Normally, you'll only need to override this one method in a - LoggerAdapter subclass for your specific needs. - """ - + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: ... def debug( self, msg: object, @@ -972,11 +388,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate a debug call to the underlying logger. - """ - + ) -> None: ... def info( self, msg: object, @@ -986,11 +398,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate an info call to the underlying logger. - """ - + ) -> None: ... def warning( self, msg: object, @@ -1000,11 +408,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate a warning call to the underlying logger. - """ - + ) -> None: ... @deprecated("Deprecated since Python 3.3. Use `LoggerAdapter.warning()` instead.") def warn( self, @@ -1025,11 +429,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate an error call to the underlying logger. - """ - + ) -> None: ... def exception( self, msg: object, @@ -1039,11 +439,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate an exception call to the underlying logger. - """ - + ) -> None: ... def critical( self, msg: object, @@ -1053,11 +449,7 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate a critical call to the underlying logger. - """ - + ) -> None: ... def log( self, level: int, @@ -1068,31 +460,11 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: - """ - Delegate a log call to the underlying logger, after adding - contextual information from this adapter instance. - """ - - def isEnabledFor(self, level: int) -> bool: - """ - Is this logger enabled for level 'level'? - """ - - def getEffectiveLevel(self) -> int: - """ - Get the effective level for the underlying logger. - """ - - def setLevel(self, level: _Level) -> None: - """ - Set the specified level on the underlying logger. - """ - - def hasHandlers(self) -> bool: - """ - See if the underlying logger has any handlers. - """ + ) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def setLevel(self, level: _Level) -> None: ... + def hasHandlers(self) -> bool: ... if sys.version_info >= (3, 11): def _log( self, @@ -1103,10 +475,7 @@ class LoggerAdapter(Generic[_L]): exc_info: _ExcInfoType | None = None, extra: Mapping[str, object] | None = None, stack_info: bool = False, - ) -> None: # undocumented - """ - Low-level log implementation, proxied to allow nested logger adapters. - """ + ) -> None: ... # undocumented else: def _log( self, @@ -1116,37 +485,16 @@ class LoggerAdapter(Generic[_L]): exc_info: _ExcInfoType | None = None, extra: Mapping[str, object] | None = None, stack_info: bool = False, - ) -> None: # undocumented - """ - Low-level log implementation, proxied to allow nested logger adapters. - """ + ) -> None: ... # undocumented @property def name(self) -> str: ... # undocumented if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ - -def getLogger(name: str | None = None) -> Logger: - """ - Return a logger with the specified name, creating it if necessary. - - If no name is specified, return the root logger. - """ - -def getLoggerClass() -> type[Logger]: - """ - Return the class to be used when instantiating a logger. - """ - -def getLogRecordFactory() -> Callable[..., LogRecord]: - """ - Return the factory to be used when instantiating a log record. - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +def getLogger(name: str | None = None) -> Logger: ... +def getLoggerClass() -> type[Logger]: ... +def getLogRecordFactory() -> Callable[..., LogRecord]: ... def debug( msg: object, *args: object, @@ -1154,13 +502,7 @@ def debug( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'DEBUG' on the root logger. If the logger has - no handlers, call basicConfig() to add a console handler with a pre-defined - format. - """ - +) -> None: ... def info( msg: object, *args: object, @@ -1168,13 +510,7 @@ def info( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'INFO' on the root logger. If the logger has - no handlers, call basicConfig() to add a console handler with a pre-defined - format. - """ - +) -> None: ... def warning( msg: object, *args: object, @@ -1182,13 +518,7 @@ def warning( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'WARNING' on the root logger. If the logger has - no handlers, call basicConfig() to add a console handler with a pre-defined - format. - """ - +) -> None: ... @deprecated("Deprecated since Python 3.3. Use `warning()` instead.") def warn( msg: object, @@ -1205,13 +535,7 @@ def error( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'ERROR' on the root logger. If the logger has - no handlers, call basicConfig() to add a console handler with a pre-defined - format. - """ - +) -> None: ... def critical( msg: object, *args: object, @@ -1219,13 +543,7 @@ def critical( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'CRITICAL' on the root logger. If the logger - has no handlers, call basicConfig() to add a console handler with a - pre-defined format. - """ - +) -> None: ... def exception( msg: object, *args: object, @@ -1233,13 +551,7 @@ def exception( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log a message with severity 'ERROR' on the root logger, with exception - information. If the logger has no handlers, basicConfig() is called to add - a console handler with a pre-defined format. - """ - +) -> None: ... def log( level: int, msg: object, @@ -1248,47 +560,14 @@ def log( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: - """ - Log 'msg % args' with the integer severity 'level' on the root logger. If - the logger has no handlers, call basicConfig() to add a console handler - with a pre-defined format. - """ +) -> None: ... fatal = critical -def disable(level: int = 50) -> None: - """ - Disable all logging calls of severity 'level' and below. - """ - -def addLevelName(level: int, levelName: str) -> None: - """ - Associate 'levelName' with 'level'. - - This is used when converting levels to text during message formatting. - """ - +def disable(level: int = 50) -> None: ... +def addLevelName(level: int, levelName: str) -> None: ... @overload -def getLevelName(level: int) -> str: - """ - Return the textual or numeric representation of logging level 'level'. - - If the level is one of the predefined levels (CRITICAL, ERROR, WARNING, - INFO, DEBUG) then you get the corresponding string. If you have - associated levels with names using addLevelName then the name you have - associated with 'level' is returned. - - If a numeric value corresponding to one of the defined levels is passed - in, the corresponding string representation is returned. - - If a string representation of the level is passed in, the corresponding - numeric value is returned. - - If no matching numeric or string value is passed in, the string - 'Level %s' % level is returned. - """ - +def getLevelName(level: int) -> str: ... @overload @deprecated("The str -> int case is considered a mistake.") def getLevelName(level: str) -> Any: ... @@ -1296,14 +575,7 @@ def getLevelName(level: str) -> Any: ... if sys.version_info >= (3, 11): def getLevelNamesMapping() -> dict[str, int]: ... -def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: - """ - Make a LogRecord whose attributes are defined by the specified dictionary, - This function is useful for converting a logging event received over - a socket connection (which is sent as a dictionary) into a LogRecord - instance. - """ - +def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... def basicConfig( *, filename: StrPath | None = ..., @@ -1317,148 +589,28 @@ def basicConfig( force: bool | None = ..., encoding: str | None = ..., errors: str | None = ..., -) -> None: - """ - Do basic configuration for the logging system. - - This function does nothing if the root logger already has handlers - configured, unless the keyword argument *force* is set to ``True``. - It is a convenience method intended for use by simple scripts - to do one-shot configuration of the logging package. - - The default behaviour is to create a StreamHandler which writes to - sys.stderr, set a formatter using the BASIC_FORMAT format string, and - add the handler to the root logger. - - A number of optional keyword arguments may be specified, which can alter - the default behaviour. - - filename Specifies that a FileHandler be created, using the specified - filename, rather than a StreamHandler. - filemode Specifies the mode to open the file, if filename is specified - (if filemode is unspecified, it defaults to 'a'). - format Use the specified format string for the handler. - datefmt Use the specified date/time format. - style If a format string is specified, use this to specify the - type of format string (possible values '%', '{', '$', for - %-formatting, :meth:`str.format` and :class:`string.Template` - - defaults to '%'). - level Set the root logger level to the specified level. - stream Use the specified stream to initialize the StreamHandler. Note - that this argument is incompatible with 'filename' - if both - are present, 'stream' is ignored. - handlers If specified, this should be an iterable of already created - handlers, which will be added to the root logger. Any handler - in the list which does not have a formatter assigned will be - assigned the formatter created in this function. - force If this keyword is specified as true, any existing handlers - attached to the root logger are removed and closed, before - carrying out the configuration as specified by the other - arguments. - encoding If specified together with a filename, this encoding is passed to - the created FileHandler, causing it to be used when the file is - opened. - errors If specified together with a filename, this value is passed to the - created FileHandler, causing it to be used when the file is - opened in text mode. If not specified, the default value is - `backslashreplace`. - - Note that you could specify a stream created using open(filename, mode) - rather than passing the filename and mode in. However, it should be - remembered that StreamHandler does not close its stream (since it may be - using sys.stdout or sys.stderr), whereas FileHandler closes its stream - when the handler is closed. - - .. versionchanged:: 3.2 - Added the ``style`` parameter. - - .. versionchanged:: 3.3 - Added the ``handlers`` parameter. A ``ValueError`` is now thrown for - incompatible arguments (e.g. ``handlers`` specified together with - ``filename``/``filemode``, or ``filename``/``filemode`` specified - together with ``stream``, or ``handlers`` specified together with - ``stream``. - - .. versionchanged:: 3.8 - Added the ``force`` parameter. - - .. versionchanged:: 3.9 - Added the ``encoding`` and ``errors`` parameters. - """ - -def shutdown(handlerList: Sequence[Any] = ...) -> None: # handlerList is undocumented - """ - Perform any cleanup actions in the logging system (e.g. flushing - buffers). - - Should be called at application exit. - """ - -def setLoggerClass(klass: type[Logger]) -> None: - """ - Set the class to be used when instantiating a logger. The class should - define __init__() such that only a name argument is required, and the - __init__() should call Logger.__init__() - """ - -def captureWarnings(capture: bool) -> None: - """ - If capture is true, redirect all warnings to the logging package. - If capture is False, ensure that warnings are not redirected to logging - but to their original destinations. - """ - -def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: - """ - Set the factory to be used when instantiating a log record. - - :param factory: A callable which will be called to instantiate - a log record. - """ +) -> None: ... +def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented +def setLoggerClass(klass: type[Logger]) -> None: ... +def captureWarnings(capture: bool) -> None: ... +def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... lastResort: Handler | None _StreamT = TypeVar("_StreamT", bound=SupportsWrite[str]) class StreamHandler(Handler, Generic[_StreamT]): - """ - A handler class which writes logging records, appropriately formatted, - to a stream. Note that this class does not close the stream, as - sys.stdout or sys.stderr may be used. - """ - stream: _StreamT # undocumented terminator: str @overload - def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: - """ - Initialize the handler. - - If stream is not specified, sys.stderr is used. - """ - + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... @overload def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 - def setStream(self, stream: _StreamT) -> _StreamT | None: - """ - Sets the StreamHandler's stream to the specified value, - if it is different. - - Returns the old stream, if the stream was changed, or None - if it wasn't. - """ + def setStream(self, stream: _StreamT) -> _StreamT | None: ... if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class FileHandler(StreamHandler[TextIOWrapper]): - """ - A handler class which writes formatted logging records to disk files. - """ - baseFilename: str # undocumented mode: str # undocumented encoding: str | None # undocumented @@ -1466,59 +618,20 @@ class FileHandler(StreamHandler[TextIOWrapper]): errors: str | None # undocumented def __init__( self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: - """ - Open the specified file and use it as the stream for logging. - """ - - def _open(self) -> TextIOWrapper: # undocumented - """ - Open the current base file with the (original) mode and encoding. - Return the resulting stream. - """ + ) -> None: ... + def _open(self) -> TextIOWrapper: ... # undocumented -class NullHandler(Handler): - """ - This handler does nothing. It's intended to be used to avoid the - "No handlers could be found for logger XXX" one-off warning. This is - important for library code, which may contain code to log events. If a user - of the library does not configure logging, the one-off warning might be - produced; to avoid this, the library developer simply needs to instantiate - a NullHandler and add it to the top-level logger of the library module or - package. - """ +class NullHandler(Handler): ... class PlaceHolder: # undocumented - """ - PlaceHolder instances are used in the Manager logger hierarchy to take - the place of nodes for which no loggers have been defined. This class is - intended for internal use only and not as part of the public API. - """ - loggerMap: dict[Logger, None] - def __init__(self, alogger: Logger) -> None: - """ - Initialize with the specified logger being a child of this placeholder. - """ - - def append(self, alogger: Logger) -> None: - """ - Add the specified logger as a child of this placeholder. - """ + def __init__(self, alogger: Logger) -> None: ... + def append(self, alogger: Logger) -> None: ... # Below aren't in module docs but still visible class RootLogger(Logger): - """ - A root logger is not that different to any other logger, except that - it must have a logging level and there is only one instance of it in - the hierarchy. - """ - - def __init__(self, level: int) -> None: - """ - Initialize the logger with the name "root". - """ + def __init__(self, level: int) -> None: ... root: RootLogger @@ -1534,9 +647,7 @@ class PercentStyle: # undocumented def __init__(self, fmt: str) -> None: ... def usesTime(self) -> bool: ... - def validate(self) -> None: - """Validate the input format, ensure it matches the correct style""" - + def validate(self) -> None: ... def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi index 7c083b255cce2..72412ddc2cea5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi @@ -1,13 +1,3 @@ -""" -Configuration functions for the logging package for Python. The core package -is based on PEP 282 and comments thereto in comp.lang.python, and influenced -by Apache's log4j system. - -Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. - -To use, simply 'import logging' and log away! -""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence @@ -73,8 +63,7 @@ class _DictConfigArgs(TypedDict, total=False): # # Also accept a TypedDict type, to allow callers to use TypedDict # types, and for somewhat stricter type checking of dict literals. -def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: - """Configure logging using a dictionary.""" +def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... if sys.version_info >= (3, 10): def fileConfig( @@ -82,73 +71,29 @@ if sys.version_info >= (3, 10): defaults: Mapping[str, str] | None = None, disable_existing_loggers: bool = True, encoding: str | None = None, - ) -> None: - """ - Read the logging configuration from a ConfigParser-format file. - - This can be called several times from an application, allowing an end user - the ability to select from various pre-canned configurations (if the - developer provides a mechanism to present the choices and load the chosen - configuration). - """ + ) -> None: ... else: def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, defaults: Mapping[str, str] | None = None, disable_existing_loggers: bool = True, - ) -> None: - """ - Read the logging configuration from a ConfigParser-format file. - - This can be called several times from an application, allowing an end user - the ability to select from various pre-canned configurations (if the - developer provides a mechanism to present the choices and load the chosen - configuration). - """ + ) -> None: ... def valid_ident(s: str) -> Literal[True]: ... # undocumented -def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: - """ - Start up a socket server on the specified port, and listen for new - configurations. - - These will be sent as a file suitable for processing by fileConfig(). - Returns a Thread object on which you can call start() to start the server, - and which you can join() when appropriate. To stop the server, call - stopListening(). - - Use the ``verify`` argument to verify any bytes received across the wire - from a client. If specified, it should be a callable which receives a - single argument - the bytes of configuration data received across the - network - and it should return either ``None``, to indicate that the - passed in bytes could not be verified and should be discarded, or a - byte string which is then passed to the configuration machinery as - normal. Note that you can return transformed bytes, e.g. by decrypting - the bytes passed in. - """ - -def stopListening() -> None: - """ - Stop the listening server which was created with a call to listen(). - """ +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... +def stopListening() -> None: ... class ConvertingMixin: # undocumented - """For ConvertingXXX's, this mixin class provides common functions""" - def convert_with_key(self, key: Any, value: Any, replace: bool = True) -> Any: ... def convert(self, value: Any) -> Any: ... class ConvertingDict(dict[Hashable, Any], ConvertingMixin): # undocumented - """A converting dictionary wrapper.""" - def __getitem__(self, key: Hashable) -> Any: ... def get(self, key: Hashable, default: Any = None) -> Any: ... def pop(self, key: Hashable, default: Any = None) -> Any: ... class ConvertingList(list[Any], ConvertingMixin): # undocumented - """A converting list wrapper.""" - @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -157,8 +102,6 @@ class ConvertingList(list[Any], ConvertingMixin): # undocumented if sys.version_info >= (3, 12): class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented - """A converting tuple wrapper.""" - @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -167,18 +110,12 @@ if sys.version_info >= (3, 12): else: @disjoint_base class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented - """A converting tuple wrapper.""" - @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload def __getitem__(self, key: slice) -> Any: ... class BaseConfigurator: - """ - The configurator base class which defines some useful defaults. - """ - CONVERT_PATTERN: Pattern[str] WORD_PATTERN: Pattern[str] DOT_PATTERN: Pattern[str] @@ -190,66 +127,24 @@ class BaseConfigurator: config: dict[str, Any] # undocumented def __init__(self, config: _DictConfigArgs | dict[str, Any]) -> None: ... - def resolve(self, s: str) -> Any: - """ - Resolve strings to objects using standard import and attribute - syntax. - """ - - def ext_convert(self, value: str) -> Any: - """Default converter for the ext:// protocol.""" - - def cfg_convert(self, value: str) -> Any: - """Default converter for the cfg:// protocol.""" - - def convert(self, value: Any) -> Any: - """ - Convert values to an appropriate type. dicts, lists and tuples are - replaced by their converting alternatives. Strings are checked to - see if they have a conversion format and are converted if they do. - """ - - def configure_custom(self, config: dict[str, Any]) -> Any: - """Configure an object with a user-supplied factory.""" - - def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: - """Utility function which converts lists to tuples.""" + def resolve(self, s: str) -> Any: ... + def ext_convert(self, value: str) -> Any: ... + def cfg_convert(self, value: str) -> Any: ... + def convert(self, value: Any) -> Any: ... + def configure_custom(self, config: dict[str, Any]) -> Any: ... + def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: ... class DictConfigurator(BaseConfigurator): - """ - Configure logging using a dictionary-like object to describe the - configuration. - """ - - def configure(self) -> None: # undocumented - """Do the configuration.""" - - def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: # undocumented - """Configure a formatter from a dictionary.""" - - def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: # undocumented - """Configure a filter from a dictionary.""" - - def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: # undocumented - """Add filters to a filterer from a list of names.""" - - def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: # undocumented - """Configure a handler from a dictionary.""" - - def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: # undocumented - """Add handlers to a logger from a list of names.""" - + def configure(self) -> None: ... # undocumented + def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: ... # undocumented + def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: ... # undocumented + def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: ... # undocumented + def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: ... # undocumented + def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: ... # undocumented def common_logger_config( self, logger: Logger, config: _LoggerConfiguration, incremental: bool = False - ) -> None: # undocumented - """ - Perform configuration which is common to root and non-root loggers. - """ - - def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented - """Configure a non-root logger from a dictionary.""" - - def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented - """Configure a root logger from a dictionary.""" + ) -> None: ... # undocumented + def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented dictConfigClass = DictConfigurator diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi index cbf27e4b69c8d..535f1c6851831 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi @@ -1,12 +1,3 @@ -""" -Additional handlers for the logging package for Python. The core package is -based on PEP 282 and comments thereto in comp.lang.python. - -Copyright (C) 2001-2021 Vinay Sajip. All Rights Reserved. - -To use, simply 'import logging.handlers' and log away! -""" - import datetime import http.client import ssl @@ -31,91 +22,24 @@ SYSLOG_UDP_PORT: Final = 514 SYSLOG_TCP_PORT: Final = 514 class WatchedFileHandler(FileHandler): - """ - A handler for logging to a file, which watches the file - to see if it has changed while in use. This can happen because of - usage of programs such as newsyslog and logrotate which perform - log file rotation. This handler, intended for use under Unix, - watches the file to see if it has changed since the last emit. - (A file has changed if its device or inode have changed.) - If it has changed, the old file stream is closed, and the file - opened to get a new stream. - - This handler is not appropriate for use under Windows, because - under Windows open files cannot be moved or renamed - logging - opens the files with exclusive locks - and so there is no need - for such a handler. - - This handler is based on a suggestion and patch by Chad J. - Schroeder. - """ - dev: int # undocumented ino: int # undocumented def __init__( self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... def _statstream(self) -> None: ... # undocumented - def reopenIfNeeded(self) -> None: - """ - Reopen log file if needed. - - Checks if the underlying file has changed, and if it - has, close the old stream and reopen the file to get the - current stream. - """ + def reopenIfNeeded(self) -> None: ... class BaseRotatingHandler(FileHandler): - """ - Base class for handlers that rotate log files at a certain point. - Not meant to be instantiated directly. Instead, use RotatingFileHandler - or TimedRotatingFileHandler. - """ - namer: Callable[[str], str] | None rotator: Callable[[str, str], None] | None def __init__( self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: - """ - Use the specified filename for streamed logging - """ - - def rotation_filename(self, default_name: str) -> str: - """ - Modify the filename of a log file when rotating. - - This is provided so that a custom filename can be provided. - - The default implementation calls the 'namer' attribute of the - handler, if it's callable, passing the default name to - it. If the attribute isn't callable (the default is None), the name - is returned unchanged. - - :param default_name: The default name for the log file. - """ - - def rotate(self, source: str, dest: str) -> None: - """ - When rotating, rotate the current log. - - The default implementation calls the 'rotator' attribute of the - handler, if it's callable, passing the source and dest arguments to - it. If the attribute isn't callable (the default is None), the source - is simply renamed to the destination. - - :param source: The source filename. This is normally the base - filename, e.g. 'test.log' - :param dest: The destination filename. This is normally - what the source is rotated to, e.g. 'test.log.1'. - """ + ) -> None: ... + def rotation_filename(self, default_name: str) -> str: ... + def rotate(self, source: str, dest: str) -> None: ... class RotatingFileHandler(BaseRotatingHandler): - """ - Handler for logging to a set of files, which switches from one file - to the next when the current file reaches a certain size. - """ - maxBytes: int # undocumented backupCount: int # undocumented def __init__( @@ -127,50 +51,11 @@ class RotatingFileHandler(BaseRotatingHandler): encoding: str | None = None, delay: bool = False, errors: str | None = None, - ) -> None: - """ - Open the specified file and use it as the stream for logging. - - By default, the file grows indefinitely. You can specify particular - values of maxBytes and backupCount to allow the file to rollover at - a predetermined size. - - Rollover occurs whenever the current log file is nearly maxBytes in - length. If backupCount is >= 1, the system will successively create - new files with the same pathname as the base file, but with extensions - ".1", ".2" etc. appended to it. For example, with a backupCount of 5 - and a base file name of "app.log", you would get "app.log", - "app.log.1", "app.log.2", ... through to "app.log.5". The file being - written to is always "app.log" - when it gets filled up, it is closed - and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. - exist, then they are renamed to "app.log.2", "app.log.3" etc. - respectively. - - If maxBytes is zero, rollover never occurs. - """ - - def doRollover(self) -> None: - """ - Do a rollover, as described in __init__(). - """ - - def shouldRollover(self, record: LogRecord) -> int: # undocumented - """ - Determine if rollover should occur. - - Basically, see if the supplied record would cause the file to exceed - the size limit we have. - """ + ) -> None: ... + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented class TimedRotatingFileHandler(BaseRotatingHandler): - """ - Handler for logging to a file, rotating the log file at certain timed - intervals. - - If backupCount is > 0, when rollover is done, no more than backupCount - files are kept - the oldest ones are deleted. - """ - when: str # undocumented backupCount: int # undocumented utc: bool # undocumented @@ -192,48 +77,12 @@ class TimedRotatingFileHandler(BaseRotatingHandler): atTime: datetime.time | None = None, errors: str | None = None, ) -> None: ... - def doRollover(self) -> None: - """ - do a rollover; in this case, a date/time stamp is appended to the filename - when the rollover happens. However, you want the file to be named for the - start of the interval, not the current time. If there is a backup count, - then we have to get a list of matching filenames, sort them and remove - the one with the oldest suffix. - """ - - def shouldRollover(self, record: LogRecord) -> int: # undocumented - """ - Determine if rollover should occur. - - record is not used, as we are just comparing times, but it is needed so - the method signatures are the same - """ - - def computeRollover(self, currentTime: int) -> int: # undocumented - """ - Work out the rollover time based on the specified time. - """ - - def getFilesToDelete(self) -> list[str]: # undocumented - """ - Determine the files to delete when rolling over. - - More specific than the earlier method, which just used glob.glob(). - """ + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + def computeRollover(self, currentTime: int) -> int: ... # undocumented + def getFilesToDelete(self) -> list[str]: ... # undocumented class SocketHandler(Handler): - """ - A handler class which writes logging records, in pickle format, to - a streaming socket. The socket is kept open across logging calls. - If the peer resets it, an attempt is made to reconnect on the next call. - The pickle which is sent is that of the LogRecord's attribute dictionary - (__dict__), so that the receiver does not need to have the logging module - installed in order to process the logging event. - - To unpickle the record at the receiving end into a LogRecord, use the - makeLogRecord function. - """ - host: str # undocumented port: int | None # undocumented address: tuple[str, int] | str # undocumented @@ -243,69 +92,16 @@ class SocketHandler(Handler): retryStart: float # undocumented retryFactor: float # undocumented retryMax: float # undocumented - def __init__(self, host: str, port: int | None) -> None: - """ - Initializes the handler with a specific host address and port. - - When the attribute *closeOnError* is set to True - if a socket error - occurs, the socket is silently closed and then reopened on the next - logging call. - """ - - def makeSocket(self, timeout: float = 1) -> socket: # timeout is undocumented - """ - A factory method which allows subclasses to define the precise - type of socket they want. - """ - - def makePickle(self, record: LogRecord) -> bytes: - """ - Pickles the record in binary format with a length prefix, and - returns it ready for transmission across the socket. - """ - - def send(self, s: ReadableBuffer) -> None: - """ - Send a pickled string to the socket. - - This function allows for partial sends which can happen when the - network is busy. - """ - - def createSocket(self) -> None: - """ - Try to create a socket, using an exponential backoff with - a max retry time. Thanks to Robert Olson for the original patch - (SF #815911) which has been slightly refactored. - """ + def __init__(self, host: str, port: int | None) -> None: ... + def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented + def makePickle(self, record: LogRecord) -> bytes: ... + def send(self, s: ReadableBuffer) -> None: ... + def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): - """ - A handler class which writes logging records, in pickle format, to - a datagram socket. The pickle which is sent is that of the LogRecord's - attribute dictionary (__dict__), so that the receiver does not need to - have the logging module installed in order to process the logging event. - - To unpickle the record at the receiving end into a LogRecord, use the - makeLogRecord function. - - """ - - def makeSocket(self) -> socket: # type: ignore[override] - """ - The factory method of SocketHandler is here overridden to create - a UDP socket (SOCK_DGRAM). - """ + def makeSocket(self) -> socket: ... # type: ignore[override] class SysLogHandler(Handler): - """ - A handler class which sends formatted logging records to a syslog - server. Based on Sam Rushing's syslog module: - http://www.nightmare.com/squirl/python-ext/misc/syslog.py - Contributed by Nicolas Untz (after which minor refactoring changes - have been made). - """ - LOG_EMERG: int LOG_ALERT: int LOG_CRIT: int @@ -356,106 +152,25 @@ class SysLogHandler(Handler): facility: str | int = 1, socktype: SocketKind | None = None, timeout: float | None = None, - ) -> None: - """ - Initialize a handler. - - If address is specified as a string, a UNIX socket is used. To log to a - local syslogd, "SysLogHandler(address="/dev/log")" can be used. - If facility is not specified, LOG_USER is used. If socktype is - specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific - socket type will be used. For Unix sockets, you can also specify a - socktype of None, in which case socket.SOCK_DGRAM will be used, falling - back to socket.SOCK_STREAM. - """ + ) -> None: ... else: def __init__( self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None - ) -> None: - """ - Initialize a handler. - - If address is specified as a string, a UNIX socket is used. To log to a - local syslogd, "SysLogHandler(address="/dev/log")" can be used. - If facility is not specified, LOG_USER is used. If socktype is - specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific - socket type will be used. For Unix sockets, you can also specify a - socktype of None, in which case socket.SOCK_DGRAM will be used, falling - back to socket.SOCK_STREAM. - """ + ) -> None: ... if sys.version_info >= (3, 11): - def createSocket(self) -> None: - """ - Try to create a socket and, if it's not a datagram socket, connect it - to the other end. This method is called during handler initialization, - but it's not regarded as an error if the other end isn't listening yet - --- the method will be called again when emitting an event, - if there is no socket at that point. - """ - - def encodePriority(self, facility: int | str, priority: int | str) -> int: - """ - Encode the facility and priority. You can pass in strings or - integers - if strings are passed, the facility_names and - priority_names mapping dictionaries are used to convert them to - integers. - """ + def createSocket(self) -> None: ... - def mapPriority(self, levelName: str) -> str: - """ - Map a logging level name to a key in the priority_names map. - This is useful in two scenarios: when custom levels are being - used, and in the case where you can't do a straightforward - mapping by lowercasing the logging level name because of locale- - specific issues (see SF #1524081). - """ + def encodePriority(self, facility: int | str, priority: int | str) -> int: ... + def mapPriority(self, levelName: str) -> str: ... class NTEventLogHandler(Handler): - """ - A handler class which sends events to the NT Event Log. Adds a - registry entry for the specified application name. If no dllname is - provided, win32service.pyd (which contains some basic message - placeholders) is used. Note that use of these placeholders will make - your event logs big, as the entire message source is held in the log. - If you want slimmer logs, you have to pass in the name of your own DLL - which contains the message definitions you want to use in the event log. - """ - def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... - def getEventCategory(self, record: LogRecord) -> int: - """ - Return the event category for the record. - - Override this if you want to specify your own categories. This version - returns 0. - """ + def getEventCategory(self, record: LogRecord) -> int: ... # TODO: correct return value? - def getEventType(self, record: LogRecord) -> int: - """ - Return the event type for the record. - - Override this if you want to specify your own types. This version does - a mapping using the handler's typemap attribute, which is set up in - __init__() to a dictionary which contains mappings for DEBUG, INFO, - WARNING, ERROR and CRITICAL. If you are using your own levels you will - either need to override this method or place a suitable dictionary in - the handler's typemap attribute. - """ - - def getMessageID(self, record: LogRecord) -> int: - """ - Return the message ID for the event record. If you are using your - own messages, you could do this by having the msg passed to the - logger being an ID rather than a formatting string. Then, in here, - you could use a dictionary lookup to get the message ID. This - version returns 1, which is the base message ID in win32service.pyd. - """ + def getEventType(self, record: LogRecord) -> int: ... + def getMessageID(self, record: LogRecord) -> int: ... class SMTPHandler(Handler): - """ - A handler class which sends an SMTP email for each logging event. - """ - mailhost: str # undocumented mailport: int | None # undocumented username: str | None # undocumented @@ -475,90 +190,23 @@ class SMTPHandler(Handler): credentials: tuple[str, str] | None = None, secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, timeout: float = 5.0, - ) -> None: - """ - Initialize the handler. - - Initialize the instance with the from and to addresses and subject - line of the email. To specify a non-standard SMTP port, use the - (host, port) tuple format for the mailhost argument. To specify - authentication credentials, supply a (username, password) tuple - for the credentials argument. To specify the use of a secure - protocol (TLS), pass in a tuple for the secure argument. This will - only be used when authentication credentials are supplied. The tuple - will be either an empty tuple, or a single-value tuple with the name - of a keyfile, or a 2-value tuple with the names of the keyfile and - certificate file. (This tuple is passed to the - `ssl.SSLContext.load_cert_chain` method). - A timeout in seconds can be specified for the SMTP connection (the - default is one second). - """ - - def getSubject(self, record: LogRecord) -> str: - """ - Determine the subject for the email. - - If you want to specify a subject line which is record-dependent, - override this method. - """ + ) -> None: ... + def getSubject(self, record: LogRecord) -> str: ... class BufferingHandler(Handler): - """ - A handler class which buffers logging records in memory. Whenever each - record is added to the buffer, a check is made to see if the buffer should - be flushed. If it should, then flush() is expected to do what's needed. - """ - capacity: int # undocumented buffer: list[LogRecord] # undocumented - def __init__(self, capacity: int) -> None: - """ - Initialize the handler with the buffer size. - """ - - def shouldFlush(self, record: LogRecord) -> bool: - """ - Should the handler flush its buffer? - - Returns true if the buffer is up to capacity. This method can be - overridden to implement custom flushing strategies. - """ + def __init__(self, capacity: int) -> None: ... + def shouldFlush(self, record: LogRecord) -> bool: ... class MemoryHandler(BufferingHandler): - """ - A handler class which buffers logging records in memory, periodically - flushing them to a target handler. Flushing occurs whenever the buffer - is full, or when an event of a certain severity or greater is seen. - """ - flushLevel: int # undocumented target: Handler | None # undocumented flushOnClose: bool # undocumented - def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: - """ - Initialize the handler with the buffer size, the level at which - flushing should occur and an optional target. - - Note that without a target being set either here or via setTarget(), - a MemoryHandler is no use to anyone! - - The ``flushOnClose`` argument is ``True`` for backward compatibility - reasons - the old behaviour is that when the handler is closed, the - buffer is flushed, even if the flush level hasn't been exceeded nor the - capacity exceeded. To prevent this, set ``flushOnClose`` to ``False``. - """ - - def setTarget(self, target: Handler | None) -> None: - """ - Set the target handler for this handler. - """ + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... + def setTarget(self, target: Handler | None) -> None: ... class HTTPHandler(Handler): - """ - A class which sends records to a web server, using either GET or - POST semantics. - """ - host: str # undocumented url: str # undocumented method: str # undocumented @@ -573,26 +221,9 @@ class HTTPHandler(Handler): secure: bool = False, credentials: tuple[str, str] | None = None, context: ssl.SSLContext | None = None, - ) -> None: - """ - Initialize the instance with the host, the request URL, and the method - ("GET" or "POST") - """ - - def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: - """ - Default implementation of mapping the log record into a dict - that is sent as the CGI data. Overwrite in your class. - Contributed by Franz Glasner. - """ - - def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: # undocumented - """ - get a HTTP[S]Connection. - - Override when a custom connection is required, for example if - there is a proxy. - """ + ) -> None: ... + def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented @type_check_only class _QueueLike(Protocol[_T]): @@ -600,126 +231,28 @@ class _QueueLike(Protocol[_T]): def put_nowait(self, item: _T, /) -> None: ... class QueueHandler(Handler): - """ - This handler sends events to a queue. Typically, it would be used together - with a multiprocessing Queue to centralise logging to file in one process - (in a multi-process application), so as to avoid file write contention - between processes. - - This code is new in Python 3.2, but this class can be copy pasted into - user code for use with earlier Python versions. - """ - queue: _QueueLike[Any] - def __init__(self, queue: _QueueLike[Any]) -> None: - """ - Initialise an instance, using the passed queue. - """ - - def prepare(self, record: LogRecord) -> Any: - """ - Prepare a record for queuing. The object returned by this method is - enqueued. - - The base implementation formats the record to merge the message and - arguments, and removes unpickleable items from the record in-place. - Specifically, it overwrites the record's `msg` and - `message` attributes with the merged message (obtained by - calling the handler's `format` method), and sets the `args`, - `exc_info` and `exc_text` attributes to None. - - You might want to override this method if you want to convert - the record to a dict or JSON string, or send a modified copy - of the record while leaving the original intact. - """ - - def enqueue(self, record: LogRecord) -> None: - """ - Enqueue a record. - - The base implementation uses put_nowait. You may want to override - this method if you want to use blocking, timeouts or custom queue - implementations. - """ + def __init__(self, queue: _QueueLike[Any]) -> None: ... + def prepare(self, record: LogRecord) -> Any: ... + def enqueue(self, record: LogRecord) -> None: ... if sys.version_info >= (3, 12): listener: QueueListener | None class QueueListener: - """ - This class implements an internal threaded listener which watches for - LogRecords being added to a queue, removes them and passes them to a - list of handlers for processing. - """ - handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: _QueueLike[Any] # undocumented _thread: Thread | None # undocumented - def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: - """ - Initialise an instance with the specified queue and - handlers. - """ - - def dequeue(self, block: bool) -> LogRecord: - """ - Dequeue a record and return it, optionally blocking. - - The base implementation uses get. You may want to override this method - if you want to use timeouts or work with custom queue implementations. - """ + def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... + def dequeue(self, block: bool) -> LogRecord: ... + def prepare(self, record: LogRecord) -> Any: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def enqueue_sentinel(self) -> None: ... + def handle(self, record: LogRecord) -> None: ... - def prepare(self, record: LogRecord) -> Any: - """ - Prepare a record for handling. - - This method just returns the passed-in record. You may want to - override this method if you need to do any custom marshalling or - manipulation of the record before passing it to the handlers. - """ - - def start(self) -> None: - """ - Start the listener. - - This starts up a background thread to monitor the queue for - LogRecords to process. - """ - - def stop(self) -> None: - """ - Stop the listener. - - This asks the thread to terminate, and then waits for it to do so. - Note that if you don't call this before your application exits, there - may be some records still left on the queue, which won't be processed. - """ - - def enqueue_sentinel(self) -> None: - """ - This is used to enqueue the sentinel record. - - The base implementation uses put_nowait. You may want to override this - method if you want to use timeouts or work with custom queue - implementations. - """ - - def handle(self, record: LogRecord) -> None: - """ - Handle a record. - - This just loops through the handlers offering them the record - to handle. - """ if sys.version_info >= (3, 14): - def __enter__(self) -> Self: - """ - For use as a context manager. Starts the listener. - """ - + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> None: - """ - For use as a context manager. Stops the listener. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi index 65b1f1898f251..b7ef607b75cbf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi @@ -1,13 +1,3 @@ -"""Interface to the liblzma compression library. - -This module provides a class for reading and writing compressed files, -classes for incremental (de)compression, and convenience functions for -one-shot (de)compression. - -These classes and functions support both the XZ and legacy LZMA -container formats, as well as raw compressed data streams. -""" - import sys from _lzma import ( CHECK_CRC32 as CHECK_CRC32, @@ -99,15 +89,6 @@ _OpenTextWritingMode: TypeAlias = Literal["wt", "xt", "at"] _PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes - """A file object providing transparent LZMA (de)compression. - - An LZMAFile can act as a wrapper for an existing file object, or - refer directly to a named file on disk. - - Note that LZMAFile provides a *binary* file interface - data read - is returned as bytes, and data to be written must be given as bytes. - """ - def __init__( self, filename: _PathOrFile | None = None, @@ -117,105 +98,14 @@ class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible def check: int = -1, preset: int | None = None, filters: _FilterChain | None = None, - ) -> None: - """Open an LZMA-compressed file in binary mode. - - filename can be either an actual file name (given as a str, - bytes, or PathLike object), in which case the named file is - opened, or it can be an existing file object to read from or - write to. - - mode can be "r" for reading (default), "w" for (over)writing, - "x" for creating exclusively, or "a" for appending. These can - equivalently be given as "rb", "wb", "xb" and "ab" respectively. - - format specifies the container format to use for the file. - If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the - default is FORMAT_XZ. - - check specifies the integrity check to use. This argument can - only be used when opening a file for writing. For FORMAT_XZ, - the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not - support integrity checks - for these formats, check must be - omitted, or be CHECK_NONE. - - When opening a file for reading, the *preset* argument is not - meaningful, and should be omitted. The *filters* argument should - also be omitted, except when format is FORMAT_RAW (in which case - it is required). - - When opening a file for writing, the settings used by the - compressor can be specified either as a preset compression - level (with the *preset* argument), or in detail as a custom - filter chain (with the *filters* argument). For FORMAT_XZ and - FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset - level. For FORMAT_RAW, the caller must always specify a filter - chain; the raw compressor does not support preset compression - levels. - - preset (if provided) should be an integer in the range 0-9, - optionally OR-ed with the constant PRESET_EXTREME. - - filters (if provided) should be a sequence of dicts. Each dict - should have an entry for "id" indicating ID of the filter, plus - additional entries for options to the filter. - """ - + ) -> None: ... def __enter__(self) -> Self: ... - def peek(self, size: int = -1) -> bytes: - """Return buffered data without advancing the file position. - - Always returns at least one byte of data, unless at EOF. - The exact number of bytes returned is unspecified. - """ - - def read(self, size: int | None = -1) -> bytes: - """Read up to size uncompressed bytes from the file. - - If size is negative or omitted, read until EOF is reached. - Returns b"" if the file is already at EOF. - """ - - def read1(self, size: int = -1) -> bytes: - """Read up to size uncompressed bytes, while trying to avoid - making multiple reads from the underlying stream. Reads up to a - buffer's worth of data if size is negative. - - Returns b"" if the file is at EOF. - """ - - def readline(self, size: int | None = -1) -> bytes: - """Read a line of uncompressed bytes from the file. - - The terminating newline (if present) is retained. If size is - non-negative, no more than size bytes will be read (in which - case the line may be incomplete). Returns b'' if already at EOF. - """ - - def write(self, data: ReadableBuffer) -> int: - """Write a bytes object to the file. - - Returns the number of uncompressed bytes written, which is - always the length of data in bytes. Note that due to buffering, - the file on disk may not reflect the data written until close() - is called. - """ - - def seek(self, offset: int, whence: int = 0) -> int: - """Change the file position. - - The new position is specified by offset, relative to the - position indicated by whence. Possible values for whence are: - - 0: start of stream (default): offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive - - Returns the new file position. - - Note that seeking is emulated, so depending on the parameters, - this operation may be extremely slow. - """ + def peek(self, size: int = -1) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = -1) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... @overload def open( @@ -229,31 +119,7 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> LZMAFile: - """Open an LZMA-compressed file in binary or text mode. - - filename can be either an actual file name (given as a str, bytes, - or PathLike object), in which case the named file is opened, or it - can be an existing file object to read from or write to. - - The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb", - "a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text - mode. - - The format, check, preset and filters arguments specify the - compression settings, as for LZMACompressor, LZMADecompressor and - LZMAFile. - - For binary mode, this function is equivalent to the LZMAFile - constructor: LZMAFile(filename, mode, ...). In this case, the - encoding, errors and newline arguments must not be provided. - - For text mode, an LZMAFile object is created, and wrapped in an - io.TextIOWrapper instance with the specified encoding, error - handling behavior, and line ending(s). - - """ - +) -> LZMAFile: ... @overload def open( filename: _PathOrFile, @@ -308,20 +174,7 @@ def open( ) -> LZMAFile | TextIOWrapper: ... def compress( data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None -) -> bytes: - """Compress a block of data. - - Refer to LZMACompressor's docstring for a description of the - optional arguments *format*, *check*, *preset* and *filters*. - - For incremental compression, use an LZMACompressor instead. - """ - -def decompress(data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> bytes: - """Decompress a block of data. - - Refer to LZMADecompressor's docstring for a description of the - optional arguments *format*, *check* and *filters*. - - For incremental decompression, use an LZMADecompressor instead. - """ +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi index 01570c1c2ed67..89bd998b4dfeb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi @@ -1,5 +1,3 @@ -"""Read/write support for Maildir, mbox, MH, Babyl, and MMDF mailboxes.""" - import email.message import io import sys @@ -49,489 +47,202 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): - """A group of messages in a particular place.""" - _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: - """Initialize a Mailbox instance.""" - + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... @overload def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... @abstractmethod - def add(self, message: _MessageData) -> str: - """Add message and return assigned key.""" - + def add(self, message: _MessageData) -> str: ... @abstractmethod - def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist.""" - + def remove(self, key: str) -> None: ... def __delitem__(self, key: str) -> None: ... - def discard(self, key: str) -> None: - """If the keyed message exists, remove it.""" - + def discard(self, key: str) -> None: ... @abstractmethod - def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist.""" - + def __setitem__(self, key: str, message: _MessageData) -> None: ... @overload - def get(self, key: str, default: None = None) -> _MessageT | None: - """Return the keyed message, or default if it doesn't exist.""" - + def get(self, key: str, default: None = None) -> _MessageT | None: ... @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... - def __getitem__(self, key: str) -> _MessageT: - """Return the keyed message; raise KeyError if it doesn't exist.""" - + def __getitem__(self, key: str) -> _MessageT: ... @abstractmethod - def get_message(self, key: str) -> _MessageT: - """Return a Message representation or raise a KeyError.""" - - def get_string(self, key: str) -> str: - """Return a string representation or raise a KeyError. - - Uses email.message.Message to create a 7bit clean string - representation of the message. - """ - + def get_message(self, key: str) -> _MessageT: ... + def get_string(self, key: str) -> str: ... @abstractmethod - def get_bytes(self, key: str) -> bytes: - """Return a byte string representation or raise a KeyError.""" + def get_bytes(self, key: str) -> bytes: ... # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here @abstractmethod - def get_file(self, key: str) -> Any: - """Return a file-like representation or raise a KeyError.""" - + def get_file(self, key: str) -> Any: ... @abstractmethod - def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys.""" - - def keys(self) -> list[str]: - """Return a list of keys.""" - - def itervalues(self) -> Iterator[_MessageT]: - """Return an iterator over all messages.""" - + def iterkeys(self) -> Iterator[str]: ... + def keys(self) -> list[str]: ... + def itervalues(self) -> Iterator[_MessageT]: ... def __iter__(self) -> Iterator[_MessageT]: ... - def values(self) -> list[_MessageT]: - """Return a list of messages. Memory intensive.""" - - def iteritems(self) -> Iterator[tuple[str, _MessageT]]: - """Return an iterator over (key, message) tuples.""" - - def items(self) -> list[tuple[str, _MessageT]]: - """Return a list of (key, message) tuples. Memory intensive.""" - + def values(self) -> list[_MessageT]: ... + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: ... + def items(self) -> list[tuple[str, _MessageT]]: ... @abstractmethod - def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise.""" - + def __contains__(self, key: str) -> bool: ... @abstractmethod - def __len__(self) -> int: - """Return a count of messages in the mailbox.""" - - def clear(self) -> None: - """Delete all messages.""" - + def __len__(self) -> int: ... + def clear(self) -> None: ... @overload - def pop(self, key: str, default: None = None) -> _MessageT | None: - """Delete the keyed message and return it, or default.""" - + def pop(self, key: str, default: None = None) -> _MessageT | None: ... @overload def pop(self, key: str, default: _T) -> _MessageT | _T: ... - def popitem(self) -> tuple[str, _MessageT]: - """Delete an arbitrary (key, message) pair and return it.""" - - def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: - """Change the messages that correspond to certain keys.""" - + def popitem(self) -> tuple[str, _MessageT]: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... @abstractmethod - def flush(self) -> None: - """Write any pending changes to the disk.""" - + def flush(self) -> None: ... @abstractmethod - def lock(self) -> None: - """Lock the mailbox.""" - + def lock(self) -> None: ... @abstractmethod - def unlock(self) -> None: - """Unlock the mailbox if it is locked.""" - + def unlock(self) -> None: ... @abstractmethod - def close(self) -> None: - """Flush and close the mailbox.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def close(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class Maildir(Mailbox[MaildirMessage]): - """A qmail-style Maildir mailbox.""" - colon: str - def __init__(self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True) -> None: - """Initialize a Maildir instance.""" - - def add(self, message: _MessageData) -> str: - """Add message and return assigned key.""" - - def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist.""" - - def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist.""" - - def get_message(self, key: str) -> MaildirMessage: - """Return a Message representation or raise a KeyError.""" - - def get_bytes(self, key: str) -> bytes: - """Return a bytes representation or raise a KeyError.""" - - def get_file(self, key: str) -> _ProxyFile[bytes]: - """Return a file-like representation or raise a KeyError.""" + def __init__( + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True + ) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MaildirMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... if sys.version_info >= (3, 13): - def get_info(self, key: str) -> str: - """Get the keyed message's "info" as a string.""" - - def set_info(self, key: str, info: str) -> None: - """Set the keyed message's "info" string.""" - - def get_flags(self, key: str) -> str: - """Return as a string the standard flags that are set on the keyed message.""" - - def set_flags(self, key: str, flags: str) -> None: - """Set the given flags and unset all others on the keyed message.""" - - def add_flag(self, key: str, flag: str) -> None: - """Set the given flag(s) without changing others on the keyed message.""" - - def remove_flag(self, key: str, flag: str) -> None: - """Unset the given string flag(s) without changing others on the keyed message.""" - - def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys.""" - - def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise.""" - - def __len__(self) -> int: - """Return a count of messages in the mailbox.""" - - def flush(self) -> None: - """Write any pending changes to disk.""" - - def lock(self) -> None: - """Lock the mailbox.""" - - def unlock(self) -> None: - """Unlock the mailbox if it is locked.""" - - def close(self) -> None: - """Flush and close the mailbox.""" - - def list_folders(self) -> list[str]: - """Return a list of folder names.""" - - def get_folder(self, folder: str) -> Maildir: - """Return a Maildir instance for the named folder.""" - - def add_folder(self, folder: str) -> Maildir: - """Create a folder and return a Maildir instance representing it.""" - - def remove_folder(self, folder: str) -> None: - """Delete the named folder, which must be empty.""" - - def clean(self) -> None: - """Delete old files in "tmp".""" - - def next(self) -> str | None: - """Return the next message in a one-time iteration.""" + def get_info(self, key: str) -> str: ... + def set_info(self, key: str, info: str) -> None: ... + def get_flags(self, key: str) -> str: ... + def set_flags(self, key: str, flags: str) -> None: ... + def add_flag(self, key: str, flag: str) -> None: ... + def remove_flag(self, key: str, flag: str) -> None: ... + + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: str) -> Maildir: ... + def add_folder(self, folder: str) -> Maildir: ... + def remove_folder(self, folder: str) -> None: ... + def clean(self) -> None: ... + def next(self) -> str | None: ... class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): - """A single-file mailbox.""" - - def add(self, message: _MessageData) -> str: - """Add message and return assigned key.""" - - def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist.""" - - def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist.""" - - def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys.""" - - def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise.""" - - def __len__(self) -> int: - """Return a count of messages in the mailbox.""" - - def lock(self) -> None: - """Lock the mailbox.""" - - def unlock(self) -> None: - """Unlock the mailbox if it is locked.""" - - def flush(self) -> None: - """Write any pending changes to disk.""" - - def close(self) -> None: - """Flush and close the mailbox.""" + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... class _mboxMMDF(_singlefileMailbox[_MessageT]): - """An mbox or MMDF mailbox.""" - - def get_message(self, key: str) -> _MessageT: - """Return a Message representation or raise a KeyError.""" - - def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: - """Return a file-like representation or raise a KeyError.""" - - def get_bytes(self, key: str, from_: bool = False) -> bytes: - """Return a string representation or raise a KeyError.""" - - def get_string(self, key: str, from_: bool = False) -> str: - """Return a string representation or raise a KeyError.""" + def get_message(self, key: str) -> _MessageT: ... + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... + def get_bytes(self, key: str, from_: bool = False) -> bytes: ... + def get_string(self, key: str, from_: bool = False) -> str: ... class mbox(_mboxMMDF[mboxMessage]): - """A classic mbox mailbox.""" - - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: - """Initialize an mbox mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): - """An MMDF mailbox.""" - - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: - """Initialize an MMDF mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... class MH(Mailbox[MHMessage]): - """An MH mailbox.""" - - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: - """Initialize an MH instance.""" - - def add(self, message: _MessageData) -> str: - """Add message and return assigned key.""" - - def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist.""" - - def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist.""" - - def get_message(self, key: str) -> MHMessage: - """Return a Message representation or raise a KeyError.""" - - def get_bytes(self, key: str) -> bytes: - """Return a bytes representation or raise a KeyError.""" - - def get_file(self, key: str) -> _ProxyFile[bytes]: - """Return a file-like representation or raise a KeyError.""" - - def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys.""" - - def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise.""" - - def __len__(self) -> int: - """Return a count of messages in the mailbox.""" - - def flush(self) -> None: - """Write any pending changes to the disk.""" - - def lock(self) -> None: - """Lock the mailbox.""" - - def unlock(self) -> None: - """Unlock the mailbox if it is locked.""" - - def close(self) -> None: - """Flush and close the mailbox.""" - - def list_folders(self) -> list[str]: - """Return a list of folder names.""" - - def get_folder(self, folder: StrPath) -> MH: - """Return an MH instance for the named folder.""" - - def add_folder(self, folder: StrPath) -> MH: - """Create a folder and return an MH instance representing it.""" - - def remove_folder(self, folder: StrPath) -> None: - """Delete the named folder, which must be empty.""" - - def get_sequences(self) -> dict[str, list[int]]: - """Return a name-to-key-list dictionary to define each sequence.""" - - def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: - """Set sequences using the given name-to-key-list dictionary.""" - - def pack(self) -> None: - """Re-name messages to eliminate numbering gaps. Invalidates keys.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MHMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... + def get_sequences(self) -> dict[str, list[int]]: ... + def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... + def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): - """An Rmail-style Babyl mailbox.""" - - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: - """Initialize a Babyl mailbox.""" - - def get_message(self, key: str) -> BabylMessage: - """Return a Message representation or raise a KeyError.""" - - def get_bytes(self, key: str) -> bytes: - """Return a string representation or raise a KeyError.""" - - def get_file(self, key: str) -> IO[bytes]: - """Return a file-like representation or raise a KeyError.""" - - def get_labels(self) -> list[str]: - """Return a list of user-defined labels in the mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... + def get_message(self, key: str) -> BabylMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> IO[bytes]: ... + def get_labels(self) -> list[str]: ... class Message(email.message.Message): - """Message with mailbox-format-specific properties.""" - - def __init__(self, message: _MessageData | None = None) -> None: - """Initialize a Message instance.""" + def __init__(self, message: _MessageData | None = None) -> None: ... class MaildirMessage(Message): - """Message with Maildir-specific properties.""" - - def get_subdir(self) -> str: - """Return 'new' or 'cur'.""" - - def set_subdir(self, subdir: Literal["new", "cur"]) -> None: - """Set subdir to 'new' or 'cur'.""" - - def get_flags(self) -> str: - """Return as a string the flags that are set.""" - - def set_flags(self, flags: Iterable[str]) -> None: - """Set the given flags and unset all others.""" - - def add_flag(self, flag: str) -> None: - """Set the given flag(s) without changing others.""" - - def remove_flag(self, flag: str) -> None: - """Unset the given string flag(s) without changing others.""" - - def get_date(self) -> int: - """Return delivery date of message, in seconds since the epoch.""" - - def set_date(self, date: float) -> None: - """Set delivery date of message, in seconds since the epoch.""" - - def get_info(self) -> str: - """Get the message's "info" as a string.""" - - def set_info(self, info: str) -> None: - """Set the message's "info" string.""" + def get_subdir(self) -> str: ... + def set_subdir(self, subdir: Literal["new", "cur"]) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... + def get_date(self) -> int: ... + def set_date(self, date: float) -> None: ... + def get_info(self) -> str: ... + def set_info(self, info: str) -> None: ... class _mboxMMDFMessage(Message): - """Message with mbox- or MMDF-specific properties.""" - - def get_from(self) -> str: - """Return contents of "From " line.""" - - def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: - """Set "From " line, formatting and appending time_ if specified.""" - - def get_flags(self) -> str: - """Return as a string the flags that are set.""" + def get_from(self) -> str: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... - def set_flags(self, flags: Iterable[str]) -> None: - """Set the given flags and unset all others.""" - - def add_flag(self, flag: str) -> None: - """Set the given flag(s) without changing others.""" - - def remove_flag(self, flag: str) -> None: - """Unset the given string flag(s) without changing others.""" - -class mboxMessage(_mboxMMDFMessage): - """Message with mbox-specific properties.""" +class mboxMessage(_mboxMMDFMessage): ... class MHMessage(Message): - """Message with MH-specific properties.""" - - def get_sequences(self) -> list[str]: - """Return a list of sequences that include the message.""" - - def set_sequences(self, sequences: Iterable[str]) -> None: - """Set the list of sequences that include the message.""" - - def add_sequence(self, sequence: str) -> None: - """Add sequence to list of sequences including the message.""" - - def remove_sequence(self, sequence: str) -> None: - """Remove sequence from the list of sequences including the message.""" + def get_sequences(self) -> list[str]: ... + def set_sequences(self, sequences: Iterable[str]) -> None: ... + def add_sequence(self, sequence: str) -> None: ... + def remove_sequence(self, sequence: str) -> None: ... class BabylMessage(Message): - """Message with Babyl-specific properties.""" - - def get_labels(self) -> list[str]: - """Return a list of labels on the message.""" - - def set_labels(self, labels: Iterable[str]) -> None: - """Set the list of labels on the message.""" - - def add_label(self, label: str) -> None: - """Add label to list of labels on the message.""" - - def remove_label(self, label: str) -> None: - """Remove label from the list of labels on the message.""" - - def get_visible(self) -> Message: - """Return a Message representation of visible headers.""" - - def set_visible(self, visible: _MessageData) -> None: - """Set the Message representation of visible headers.""" + def get_labels(self) -> list[str]: ... + def set_labels(self, labels: Iterable[str]) -> None: ... + def add_label(self, label: str) -> None: ... + def remove_label(self, label: str) -> None: ... + def get_visible(self) -> Message: ... + def set_visible(self, visible: _MessageData) -> None: ... + def update_visible(self) -> None: ... - def update_visible(self) -> None: - """Update and/or sensibly generate a set of visible headers.""" - -class MMDFMessage(_mboxMMDFMessage): - """Message with MMDF-specific properties.""" +class MMDFMessage(_mboxMMDFMessage): ... class _ProxyFile(Generic[AnyStr]): - """A read-only wrapper of a file.""" - - def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: - """Initialize a _ProxyFile.""" - - def read(self, size: int | None = None) -> AnyStr: - """Read bytes.""" - - def read1(self, size: int | None = None) -> AnyStr: - """Read bytes.""" - - def readline(self, size: int | None = None) -> AnyStr: - """Read a line.""" - - def readlines(self, sizehint: int | None = None) -> list[AnyStr]: - """Read multiple lines.""" - - def __iter__(self) -> Iterator[AnyStr]: - """Iterate over lines.""" - - def tell(self) -> int: - """Return the position.""" - - def seek(self, offset: int, whence: int = 0) -> None: - """Change position.""" - - def close(self) -> None: - """Close the file.""" - - def __enter__(self) -> Self: - """Context management protocol support.""" - + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... + def read(self, size: int | None = None) -> AnyStr: ... + def read1(self, size: int | None = None) -> AnyStr: ... + def readline(self, size: int | None = None) -> AnyStr: ... + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def tell(self) -> int: ... + def seek(self, offset: int, whence: int = 0) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -539,29 +250,13 @@ class _ProxyFile(Generic[AnyStr]): def flush(self) -> None: ... @property def closed(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _PartialFile(_ProxyFile[AnyStr]): - """A read-only wrapper of part of a file.""" - - def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: - """Initialize a _PartialFile.""" - -class Error(Exception): - """Raised for module-specific errors.""" - -class NoSuchMailboxError(Error): - """The specified mailbox does not exist and won't be created.""" - -class NotEmptyError(Error): - """The specified mailbox is not empty and deletion was requested.""" - -class ExternalClashError(Error): - """Another process caused an action to fail.""" + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... -class FormatError(Error): - """A file appears to have an invalid format.""" +class Error(Exception): ... +class NoSuchMailboxError(Error): ... +class NotEmptyError(Error): ... +class ExternalClashError(Error): ... +class FormatError(Error): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi index 6848c9929c9c2..ce549e01f528c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi @@ -1,5 +1,3 @@ -"""Mailcap file handling. See RFC 1524.""" - from collections.abc import Mapping, Sequence from typing_extensions import TypeAlias @@ -9,23 +7,5 @@ __all__ = ["getcaps", "findmatch"] def findmatch( caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = [] -) -> tuple[str | None, _Cap | None]: - """Find a match for a mailcap entry. - - Return a tuple containing the command line, and the mailcap entry - used; (None, None) if no match is found. This may invoke the - 'test' command of several matching entries before deciding which - entry to use. - - """ - -def getcaps() -> dict[str, list[_Cap]]: - """Return a dictionary containing the mailcap database. - - The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain') - to a list of dictionaries corresponding to mailcap entries. The list - collects all the entries for that MIME type from all available mailcap - files. Each dictionary contains key-value pairs for that MIME type, - where the viewing command is stored with the key "view". - - """ +) -> tuple[str | None, _Cap | None]: ... +def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi index 760ed43735f69..46c421e4ce307 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi @@ -1,32 +1,3 @@ -"""This module contains functions that can read and write Python values in -a binary format. The format is specific to Python, but independent of -machine architecture issues. - -Not all Python object types are supported; in general, only objects -whose value is independent from a particular invocation of Python can be -written and read by this module. The following types are supported: -None, integers, floating-point numbers, strings, bytes, bytearrays, -tuples, lists, sets, dictionaries, and code objects, where it -should be understood that tuples, lists and dictionaries are only -supported as long as the values contained therein are themselves -supported; and recursive lists and dictionaries should not be written -(they will cause infinite loops). - -Variables: - -version -- indicates the format that the module uses. Version 0 is the - historical format, version 1 shares interned strings and version 2 - uses a binary format for floating-point numbers. - Version 3 shares common object references (New in version 3.4). - -Functions: - -dump() -- write value to a file -load() -- read value from a file -dumps() -- marshal value as a bytes object -loads() -- read value from a bytes-like object -""" - import builtins import sys import types @@ -58,142 +29,21 @@ _Marshallable: TypeAlias = ( ) if sys.version_info >= (3, 14): - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: - """Write the value on the open file. - - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. - allow_code - Allow to write code objects. - - If the value has (or contains an object that has) an unsupported type, a - ValueError exception is raised - but garbage data will also be written - to the file. The object will not be properly read back by load(). - """ - - def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: - """Return the bytes object that would be written to a file by dump(value, file). - - value - Must be a supported type. - version - Indicates the data format that dumps should use. - allow_code - Allow to write code objects. - - Raise a ValueError exception if value has (or contains an object that has) an - unsupported type. - """ + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: ... + def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: ... elif sys.version_info >= (3, 13): - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: - """Write the value on the open file. - - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. - allow_code - Allow to write code objects. - - If the value has (or contains an object that has) an unsupported type, a - ValueError exception is raised - but garbage data will also be written - to the file. The object will not be properly read back by load(). - """ - - def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: - """Return the bytes object that would be written to a file by dump(value, file). - - value - Must be a supported type. - version - Indicates the data format that dumps should use. - allow_code - Allow to write code objects. - - Raise a ValueError exception if value has (or contains an object that has) an - unsupported type. - """ + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: ... + def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: ... else: - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: - """Write the value on the open file. - - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. - - If the value has (or contains an object that has) an unsupported type, a - ValueError exception is raised - but garbage data will also be written - to the file. The object will not be properly read back by load(). - """ - - def dumps(value: _Marshallable, version: int = 4, /) -> bytes: - """Return the bytes object that would be written to a file by dump(value, file). - - value - Must be a supported type. - version - Indicates the data format that dumps should use. - - Raise a ValueError exception if value has (or contains an object that has) an - unsupported type. - """ + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ... + def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ... if sys.version_info >= (3, 13): - def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: - """Read one value from the open file and return it. - - file - Must be readable binary file. - allow_code - Allow to load code objects. - - If no valid value is read (e.g. because the data has a different Python - version's incompatible marshal format), raise EOFError, ValueError or - TypeError. - - Note: If an object containing an unsupported type was marshalled with - dump(), load() will substitute None for the unmarshallable type. - """ - - def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: - """Convert the bytes-like object to a value. - - allow_code - Allow to load code objects. - - If no valid value is found, raise EOFError, ValueError or TypeError. Extra - bytes in the input are ignored. - """ + def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... + def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... else: - def load(file: SupportsRead[bytes], /) -> Any: - """Read one value from the open file and return it. - - file - Must be readable binary file. - - If no valid value is read (e.g. because the data has a different Python - version's incompatible marshal format), raise EOFError, ValueError or - TypeError. - - Note: If an object containing an unsupported type was marshalled with - dump(), load() will substitute None for the unmarshallable type. - """ - - def loads(bytes: ReadableBuffer, /) -> Any: - """Convert the bytes-like object to a value. - - If no valid value is found, raise EOFError, ValueError or TypeError. Extra - bytes in the input are ignored. - """ + def load(file: SupportsRead[bytes], /) -> Any: ... + def loads(bytes: ReadableBuffer, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi index e8ee56217c8c5..1903d488f7bb3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi @@ -1,7 +1,3 @@ -"""This module provides access to the mathematical functions -defined by the C standard. -""" - import sys from _typeshed import SupportsMul, SupportsRMul from collections.abc import Iterable @@ -19,278 +15,83 @@ inf: Final[float] nan: Final[float] tau: Final[float] -def acos(x: _SupportsFloatOrIndex, /) -> float: - """Return the arc cosine (measured in radians) of x. - - The result is between 0 and pi. - """ - -def acosh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic cosine of x.""" - -def asin(x: _SupportsFloatOrIndex, /) -> float: - """Return the arc sine (measured in radians) of x. - - The result is between -pi/2 and pi/2. - """ - -def asinh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic sine of x.""" - -def atan(x: _SupportsFloatOrIndex, /) -> float: - """Return the arc tangent (measured in radians) of x. - - The result is between -pi/2 and pi/2. - """ - -def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: - """Return the arc tangent (measured in radians) of y/x. - - Unlike atan(y/x), the signs of both x and y are considered. - """ - -def atanh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic tangent of x.""" +def acos(x: _SupportsFloatOrIndex, /) -> float: ... +def acosh(x: _SupportsFloatOrIndex, /) -> float: ... +def asin(x: _SupportsFloatOrIndex, /) -> float: ... +def asinh(x: _SupportsFloatOrIndex, /) -> float: ... +def atan(x: _SupportsFloatOrIndex, /) -> float: ... +def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: ... +def atanh(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 11): - def cbrt(x: _SupportsFloatOrIndex, /) -> float: - """Return the cube root of x.""" + def cbrt(x: _SupportsFloatOrIndex, /) -> float: ... @type_check_only class _SupportsCeil(Protocol[_T_co]): def __ceil__(self) -> _T_co: ... @overload -def ceil(x: _SupportsCeil[_T], /) -> _T: - """Return the ceiling of x as an Integral. - - This is the smallest integer >= x. - """ - +def ceil(x: _SupportsCeil[_T], /) -> _T: ... @overload def ceil(x: _SupportsFloatOrIndex, /) -> int: ... -def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: - """Number of ways to choose k items from n items without repetition and without order. - - Evaluates to n! / (k! * (n - k)!) when k <= n and evaluates - to zero when k > n. - - Also called the binomial coefficient because it is equivalent - to the coefficient of k-th term in polynomial expansion of the - expression (1 + x)**n. - - Raises TypeError if either of the arguments are not integers. - Raises ValueError if either of the arguments are negative. - """ - -def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return a float with the magnitude (absolute value) of x but the sign of y. - - On platforms that support signed zeros, copysign(1.0, -0.0) - returns -1.0. - """ - -def cos(x: _SupportsFloatOrIndex, /) -> float: - """Return the cosine of x (measured in radians).""" - -def cosh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic cosine of x.""" - -def degrees(x: _SupportsFloatOrIndex, /) -> float: - """Convert angle x from radians to degrees.""" - -def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: - """Return the Euclidean distance between two points p and q. - - The points should be specified as sequences (or iterables) of - coordinates. Both inputs must have the same dimension. - - Roughly equivalent to: - sqrt(sum((px - qx) ** 2.0 for px, qx in zip(p, q))) - """ - -def erf(x: _SupportsFloatOrIndex, /) -> float: - """Error function at x.""" - -def erfc(x: _SupportsFloatOrIndex, /) -> float: - """Complementary error function at x.""" - -def exp(x: _SupportsFloatOrIndex, /) -> float: - """Return e raised to the power of x.""" +def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: ... +def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def cos(x: _SupportsFloatOrIndex, /) -> float: ... +def cosh(x: _SupportsFloatOrIndex, /) -> float: ... +def degrees(x: _SupportsFloatOrIndex, /) -> float: ... +def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: ... +def erf(x: _SupportsFloatOrIndex, /) -> float: ... +def erfc(x: _SupportsFloatOrIndex, /) -> float: ... +def exp(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 11): - def exp2(x: _SupportsFloatOrIndex, /) -> float: - """Return 2 raised to the power of x.""" - -def expm1(x: _SupportsFloatOrIndex, /) -> float: - """Return exp(x)-1. - - This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x. - """ - -def fabs(x: _SupportsFloatOrIndex, /) -> float: - """Return the absolute value of the float x.""" - -def factorial(x: SupportsIndex, /) -> int: - """Find n!.""" + def exp2(x: _SupportsFloatOrIndex, /) -> float: ... +def expm1(x: _SupportsFloatOrIndex, /) -> float: ... +def fabs(x: _SupportsFloatOrIndex, /) -> float: ... +def factorial(x: SupportsIndex, /) -> int: ... @type_check_only class _SupportsFloor(Protocol[_T_co]): def __floor__(self) -> _T_co: ... @overload -def floor(x: _SupportsFloor[_T], /) -> _T: - """Return the floor of x as an Integral. - - This is the largest integer <= x. - """ - +def floor(x: _SupportsFloor[_T], /) -> _T: ... @overload def floor(x: _SupportsFloatOrIndex, /) -> int: ... -def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return fmod(x, y), according to platform C. - - x % y may differ. - """ - -def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: - """Return the mantissa and exponent of x, as pair (m, e). - - m is a float and e is an int, such that x = m * 2.**e. - If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0. - """ - -def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: - """Return an accurate floating-point sum of values in the iterable seq. - - Assumes IEEE-754 floating-point arithmetic. - """ - -def gamma(x: _SupportsFloatOrIndex, /) -> float: - """Gamma function at x.""" - -def gcd(*integers: SupportsIndex) -> int: - """Greatest Common Divisor.""" - -def hypot(*coordinates: _SupportsFloatOrIndex) -> float: - """Multidimensional Euclidean distance from the origin to a point. - - Roughly equivalent to: - sqrt(sum(x**2 for x in coordinates)) - - For a two dimensional point (x, y), gives the hypotenuse - using the Pythagorean theorem: sqrt(x*x + y*y). - - For example, the hypotenuse of a 3/4/5 right triangle is: - - >>> hypot(3.0, 4.0) - 5.0 - """ - +def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: ... +def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: ... +def gamma(x: _SupportsFloatOrIndex, /) -> float: ... +def gcd(*integers: SupportsIndex) -> int: ... +def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... def isclose( a: _SupportsFloatOrIndex, b: _SupportsFloatOrIndex, *, rel_tol: _SupportsFloatOrIndex = 1e-09, abs_tol: _SupportsFloatOrIndex = 0.0, -) -> bool: - """Determine whether two floating-point numbers are close in value. - - rel_tol - maximum difference for being considered "close", relative to the - magnitude of the input values - abs_tol - maximum difference for being considered "close", regardless of the - magnitude of the input values - - Return True if a is close in value to b, and False otherwise. - - For the values to be considered close, the difference between them - must be smaller than at least one of the tolerances. - - -inf, inf and NaN behave similarly to the IEEE 754 Standard. That - is, NaN is not close to anything, even itself. inf and -inf are - only close to themselves. - """ - -def isinf(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is a positive or negative infinity, and False otherwise.""" - -def isfinite(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is neither an infinity nor a NaN, and False otherwise.""" - -def isnan(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is a NaN (not a number), and False otherwise.""" - -def isqrt(n: SupportsIndex, /) -> int: - """Return the integer part of the square root of the input.""" - -def lcm(*integers: SupportsIndex) -> int: - """Least Common Multiple.""" - -def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: - """Return x * (2**i). - - This is essentially the inverse of frexp(). - """ - -def lgamma(x: _SupportsFloatOrIndex, /) -> float: - """Natural logarithm of absolute value of Gamma function at x.""" - -def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: - """log(x, [base=math.e]) - Return the logarithm of x to the given base. - - If the base is not specified, returns the natural logarithm (base e) of x. - """ - -def log10(x: _SupportsFloatOrIndex, /) -> float: - """Return the base 10 logarithm of x.""" - -def log1p(x: _SupportsFloatOrIndex, /) -> float: - """Return the natural logarithm of 1+x (base e). - - The result is computed in a way which is accurate for x near zero. - """ - -def log2(x: _SupportsFloatOrIndex, /) -> float: - """Return the base 2 logarithm of x.""" - -def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: - """Return the fractional and integer parts of x. - - Both results carry the sign of x and are floats. - """ +) -> bool: ... +def isinf(x: _SupportsFloatOrIndex, /) -> bool: ... +def isfinite(x: _SupportsFloatOrIndex, /) -> bool: ... +def isnan(x: _SupportsFloatOrIndex, /) -> bool: ... +def isqrt(n: SupportsIndex, /) -> int: ... +def lcm(*integers: SupportsIndex) -> int: ... +def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: ... +def lgamma(x: _SupportsFloatOrIndex, /) -> float: ... +def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... +def log10(x: _SupportsFloatOrIndex, /) -> float: ... +def log1p(x: _SupportsFloatOrIndex, /) -> float: ... +def log2(x: _SupportsFloatOrIndex, /) -> float: ... +def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: ... if sys.version_info >= (3, 12): - def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: - """Return the floating-point value the given number of steps after x towards y. - - If steps is not specified or is None, it defaults to 1. - - Raises a TypeError, if x or y is not a double, or if steps is not an integer. - Raises ValueError if steps is negative. - """ + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: ... else: - def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return the next floating-point value after x towards y.""" - -def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: - """Number of ways to choose k items from n items without repetition and with order. - - Evaluates to n! / (n - k)! when k <= n and evaluates - to zero when k > n. - - If k is not specified or is None, then k defaults to n - and the function returns n!. - - Raises TypeError if either of the arguments are not integers. - Raises ValueError if either of the arguments are negative. - """ + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... -def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return x**y (x to the power of y).""" +def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: ... +def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... _PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] _NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] @@ -310,75 +111,30 @@ _SupportsProdNoDefaultT = TypeVar("_SupportsProdNoDefaultT", bound=_SupportsProd # For more details on its limitations and false positives, see #13572. # Instead, just like `builtins.sum`, we explicitly handle several useful cases. @overload -def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: # type: ignore[overload-overlap] - """Calculate the product of all the elements in the input iterable. - - The default start value for the product is 1. - - When the iterable is empty, return the start value. This function is - intended specifically for use with numeric values and may reject - non-numeric types. - """ - +def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: ... # type: ignore[overload-overlap] @overload def prod(iterable: Iterable[_SupportsProdNoDefaultT], /) -> _SupportsProdNoDefaultT | Literal[1]: ... @overload def prod(iterable: Iterable[_MultiplicableT1], /, *, start: _MultiplicableT2) -> _MultiplicableT1 | _MultiplicableT2: ... -def radians(x: _SupportsFloatOrIndex, /) -> float: - """Convert angle x from degrees to radians.""" - -def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Difference between x and the closest integer multiple of y. - - Return x - n*y where n*y is the closest integer multiple of y. - In the case where x is exactly halfway between two multiples of - y, the nearest even value of n is used. The result is always exact. - """ - -def sin(x: _SupportsFloatOrIndex, /) -> float: - """Return the sine of x (measured in radians).""" - -def sinh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic sine of x.""" +def radians(x: _SupportsFloatOrIndex, /) -> float: ... +def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +def sin(x: _SupportsFloatOrIndex, /) -> float: ... +def sinh(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 12): - def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: - """Return the sum of products of values from two iterables p and q. - - Roughly equivalent to: - - sum(map(operator.mul, p, q, strict=True)) - - For float and mixed int/float inputs, the intermediate products - and sums are computed with extended precision. - """ + def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: ... -def sqrt(x: _SupportsFloatOrIndex, /) -> float: - """Return the square root of x.""" - -def tan(x: _SupportsFloatOrIndex, /) -> float: - """Return the tangent of x (measured in radians).""" - -def tanh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic tangent of x.""" +def sqrt(x: _SupportsFloatOrIndex, /) -> float: ... +def tan(x: _SupportsFloatOrIndex, /) -> float: ... +def tanh(x: _SupportsFloatOrIndex, /) -> float: ... # Is different from `_typeshed.SupportsTrunc`, which is not generic @type_check_only class _SupportsTrunc(Protocol[_T_co]): def __trunc__(self) -> _T_co: ... -def trunc(x: _SupportsTrunc[_T], /) -> _T: - """Truncates the Real x to the nearest Integral toward 0. - - Uses the __trunc__ magic method. - """ - -def ulp(x: _SupportsFloatOrIndex, /) -> float: - """Return the value of the least significant bit of the float x.""" +def trunc(x: _SupportsTrunc[_T], /) -> _T: ... +def ulp(x: _SupportsFloatOrIndex, /) -> float: ... if sys.version_info >= (3, 13): - def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: - """Fused multiply-add operation. - - Compute (x * y) + z with a single round. - """ + def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi index 4e54a0eb8538f..9914a34a2d6a6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi @@ -1,28 +1,3 @@ -"""Guess the MIME type of a file. - -This module defines two useful functions: - -guess_type(url, strict=True) -- guess the MIME type and encoding of a URL. - -guess_extension(type, strict=True) -- guess the extension for a given MIME type. - -It also contains the following, for tuning the behavior: - -Data: - -knownfiles -- list of files to parse -inited -- flag set when init() has been called -suffix_map -- dictionary mapping suffixes to suffixes -encodings_map -- dictionary mapping suffixes to encodings -types_map -- dictionary mapping suffixes to types - -Functions: - -init([files]) -- parse a list of files, default knownfiles (on Windows, the - default values are taken from the registry) -read_mime_types(file) -- parse one file, return a dictionary or None -""" - import sys from _typeshed import StrPath from collections.abc import Sequence @@ -47,73 +22,15 @@ __all__ = [ if sys.version_info >= (3, 13): __all__ += ["guess_file_type"] -def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: - """Guess the type of a file based on its URL. - - Return value is a tuple (type, encoding) where type is None if the - type can't be guessed (no or unknown suffix) or a string of the - form type/subtype, usable for a MIME Content-type header; and - encoding is None for no encoding or the name of the program used - to encode (e.g. compress or gzip). The mappings are table - driven. Encoding suffixes are case sensitive; type suffixes are - first tried case sensitive, then case insensitive. - - The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped - to ".tar.gz". (This is table-driven too, using the dictionary - suffix_map). - - Optional 'strict' argument when false adds a bunch of commonly found, but - non-standard types. - """ - -def guess_all_extensions(type: str, strict: bool = True) -> list[str]: - """Guess the extensions for a file based on its MIME type. - - Return value is a list of strings giving the possible filename - extensions, including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data - stream, but would be mapped to the MIME type 'type' by - guess_type(). If no extension can be guessed for 'type', None - is returned. - - Optional 'strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - -def guess_extension(type: str, strict: bool = True) -> str | None: - """Guess the extension for a file based on its MIME type. - - Return value is a string giving a filename extension, including the - leading dot ('.'). The extension is not guaranteed to have been - associated with any particular data stream, but would be mapped to the - MIME type 'type' by guess_type(). If no extension can be guessed for - 'type', None is returned. - - Optional 'strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - +def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... +def guess_extension(type: str, strict: bool = True) -> str | None: ... def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... -def add_type(type: str, ext: str, strict: bool = True) -> None: - """Add a mapping between a type and an extension. - - When the extension is already known, the new - type will replace the old one. When the type - is already known the extension will be added - to the list of known extensions. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ +def add_type(type: str, ext: str, strict: bool = True) -> None: ... if sys.version_info >= (3, 13): - def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: - """Guess the type of a file based on its path. - - Similar to guess_type(), but takes file path instead of URL. - """ + def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... inited: bool knownfiles: list[str] @@ -123,108 +40,17 @@ types_map: dict[str, str] common_types: dict[str, str] class MimeTypes: - """MIME-types datastore. - - This datastore can handle information from mime.types-style files - and supports basic determination of MIME type from a filename or - URL, and can guess a reasonable extension given a MIME type. - """ - suffix_map: dict[str, str] encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... - def add_type(self, type: str, ext: str, strict: bool = True) -> None: - """Add a mapping between a type and an extension. - - When the extension is already known, the new - type will replace the old one. When the type - is already known the extension will be added - to the list of known extensions. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - - Valid extensions are empty or start with a '.'. - """ - - def guess_extension(self, type: str, strict: bool = True) -> str | None: - """Guess the extension for a file based on its MIME type. - - Return value is a string giving a filename extension, - including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data - stream, but would be mapped to the MIME type 'type' by - guess_type(). If no extension can be guessed for 'type', None - is returned. - - Optional 'strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - - def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: - """Guess the type of a file which is either a URL or a path-like object. - - Return value is a tuple (type, encoding) where type is None if - the type can't be guessed (no or unknown suffix) or a string - of the form type/subtype, usable for a MIME Content-type - header; and encoding is None for no encoding or the name of - the program used to encode (e.g. compress or gzip). The - mappings are table driven. Encoding suffixes are case - sensitive; type suffixes are first tried case sensitive, then - case insensitive. - - The suffixes .tgz, .taz and .tz (case sensitive!) are all - mapped to '.tar.gz'. (This is table-driven too, using the - dictionary suffix_map.) - - Optional 'strict' argument when False adds a bunch of commonly found, - but non-standard types. - """ - - def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: - """Guess the extensions for a file based on its MIME type. - - Return value is a list of strings giving the possible filename - extensions, including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data stream, - but would be mapped to the MIME type 'type' by guess_type(). - - Optional 'strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - - def read(self, filename: str, strict: bool = True) -> None: - """ - Read a single mime.types-format file, specified by pathname. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - - def readfp(self, fp: IO[str], strict: bool = True) -> None: - """ - Read a single mime.types-format file. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - - def read_windows_registry(self, strict: bool = True) -> None: - """ - Load the MIME types database from Windows registry. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ + def add_type(self, type: str, ext: str, strict: bool = True) -> None: ... + def guess_extension(self, type: str, strict: bool = True) -> str | None: ... + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... + def read(self, filename: str, strict: bool = True) -> None: ... + def readfp(self, fp: IO[str], strict: bool = True) -> None: ... + def read_windows_registry(self, strict: bool = True) -> None: ... if sys.version_info >= (3, 13): - def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: - """Guess the type of a file based on its path. - - Similar to guess_type(), but takes file path instead of URL. - """ + def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi index 42214fb31ac63..8a5baba629141 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi @@ -33,29 +33,6 @@ PAGESIZE: Final[int] @disjoint_base class mmap: - """Windows: mmap(fileno, length[, tagname[, access[, offset]]]) - - Maps length bytes from the file specified by the file handle fileno, - and returns a mmap object. If length is larger than the current size - of the file, the file is extended to contain length bytes. If length - is 0, the maximum length of the map is the current size of the file, - except that if the file is empty Windows raises an exception (you cannot - create an empty mapping on Windows). - - Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]]) - - Maps length bytes from the file specified by the file descriptor fileno, - and returns a mmap object. If length is 0, the maximum length of the map - will be the current size of the file when mmap is called. - flags specifies the nature of the mapping. MAP_PRIVATE creates a - private copy-on-write mapping, so changes to the contents of the mmap - object will be private to this process, and MAP_SHARED creates a mapping - that's shared with all other processes mapping the same areas of the file. - The default value is MAP_SHARED. - - To map anonymous memory, pass -1 as the fileno (both versions). - """ - if sys.platform == "win32": def __new__(self, fileno: int, length: int, tagname: str | None = None, access: int = 0, offset: int = 0) -> Self: ... else: @@ -90,8 +67,7 @@ class mmap: def size(self) -> int: ... def tell(self) -> int: ... def write_byte(self, byte: int) -> None: ... - def __len__(self) -> int: - """Return len(self).""" + def __len__(self) -> int: ... closed: bool if sys.platform != "win32": def madvise(self, option: int, start: int = 0, length: int = ...) -> None: ... @@ -101,18 +77,12 @@ class mmap: def read(self, n: int | None = None) -> bytes: ... def write(self, bytes: ReadableBuffer) -> int: ... @overload - def __getitem__(self, key: int, /) -> int: - """Return self[key].""" - + def __getitem__(self, key: int, /) -> int: ... @overload def __getitem__(self, key: slice, /) -> bytes: ... - def __delitem__(self, key: int | slice, /) -> NoReturn: - """Delete self[key].""" - + def __delitem__(self, key: int | slice, /) -> NoReturn: ... @overload - def __setitem__(self, key: int, value: int, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: int, value: int, /) -> None: ... @overload def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, @@ -123,11 +93,8 @@ class mmap: def __iter__(self) -> Iterator[int]: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object.""" + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... if sys.version_info >= (3, 13): def seekable(self) -> Literal[True]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi index 692cb04850ad9..6db665a18e691 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi @@ -1,5 +1,3 @@ -"""Find modules used by a script, using introspection.""" - import sys from collections.abc import Container, Iterable, Iterator, Sequence from types import CodeType @@ -62,27 +60,9 @@ class ModuleFinder: def find_module( self, name: str, path: str | None, parent: Module | None = None ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented - def report(self) -> None: - """Print a report to stdout, listing the found modules with their - paths, as well as modules that are missing, or seem to be missing. - """ - - def any_missing(self) -> list[str]: # undocumented - """Return a list of modules that appear to be missing. Use - any_missing_maybe() if you want to know which modules are - certain to be missing, and which *may* be missing. - """ - - def any_missing_maybe(self) -> tuple[list[str], list[str]]: # undocumented - """Return two lists, one with modules that are certainly missing - and one with modules that *may* be missing. The latter names could - either be submodules *or* just global names in the package. - - The reason it can't always be determined is that it's impossible to - tell which names are imported when "from module import *" is done - with an extension module, short of actually importing it. - """ - + def report(self) -> None: ... + def any_missing(self) -> list[str]: ... # undocumented + def any_missing_maybe(self) -> tuple[list[str], list[str]]: ... # undocumented def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented def test() -> ModuleFinder | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi index 0983860e54e6d..622f585f5beea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi @@ -37,9 +37,7 @@ if sys.platform == "win32": action: str, seqno: int | type[_Unspecified] = ..., cond: str | type[_Unspecified] = ..., - ) -> None: - """Change the sequence number of an action in a sequence list""" - + ) -> None: ... def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... def add_stream(db: _Database, name: str, path: str) -> None: ... def init_database( @@ -82,17 +80,7 @@ if sys.platform == "win32": _logical: str, default: str, componentflags: int | None = None, - ) -> None: - """Create a new directory in the Directory table. There is a current component - at each point in time for the directory, which is either explicitly created - through start_component, or implicitly when files are added for the first - time. Files are added into the current component, and into the cab file. - To create a directory, a base directory object needs to be specified (can be - None), the path to the physical directory, and a logical directory name. - Default specifies the DefaultDir slot in the directory table. componentflags - specifies the default flags that new components get. - """ - + ) -> None: ... def start_component( self, component: str | None = None, @@ -100,30 +88,11 @@ if sys.platform == "win32": flags: int | None = None, keyfile: str | None = None, uuid: str | None = None, - ) -> None: - """Add an entry to the Component table, and make this component the current for this - directory. If no component name is given, the directory name is used. If no feature - is given, the current feature is used. If no flags are given, the directory's default - flags are used. If no keyfile is given, the KeyPath is left null in the Component - table. - """ - + ) -> None: ... def make_short(self, file: str) -> str: ... - def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: - """Add a file to the current component of the directory, starting a new one - if there is no current component. By default, the file name in the source - and the file table will be identical. If the src file is specified, it is - interpreted relative to the current directory. Optionally, a version and a - language can be specified for the entry in the File table. - """ - - def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: - """Add a list of files to the current component as specified in the - glob pattern. Individual files can be excluded in the exclude list. - """ - - def remove_pyc(self) -> None: - """Remove .pyc files on uninstall""" + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... + def remove_pyc(self) -> None: ... class Binary: name: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi index 2a11014103a27..5feca8eab5c1c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi @@ -13,88 +13,20 @@ if sys.platform == "win32": SEM_NOALIGNMENTFAULTEXCEPT: Final = 0x0004 SEM_NOGPFAULTERRORBOX: Final = 0x0002 SEM_NOOPENFILEERRORBOX: Final = 0x8000 - def locking(fd: int, mode: int, nbytes: int, /) -> None: - """Lock part of a file based on file descriptor fd from the C runtime. - - Raises OSError on failure. The locked region of the file extends from - the current file position for nbytes bytes, and may continue beyond - the end of the file. mode must be one of the LK_* constants listed - below. Multiple regions in a file may be locked at the same time, but - may not overlap. Adjacent regions are not merged; they must be unlocked - individually. - """ - - def setmode(fd: int, mode: int, /) -> int: - """Set the line-end translation mode for the file descriptor fd. - - To set it to text mode, flags should be os.O_TEXT; for binary, it - should be os.O_BINARY. - - Return value is the previous mode. - """ - - def open_osfhandle(handle: int, flags: int, /) -> int: - """Create a C runtime file descriptor from the file handle handle. - - The flags parameter should be a bitwise OR of os.O_APPEND, os.O_RDONLY, - and os.O_TEXT. The returned file descriptor may be used as a parameter - to os.fdopen() to create a file object. - """ - - def get_osfhandle(fd: int, /) -> int: - """Return the file handle for the file descriptor fd. - - Raises OSError if fd is not recognized. - """ - - def kbhit() -> bool: - """Returns a nonzero value if a keypress is waiting to be read. Otherwise, return 0.""" - - def getch() -> bytes: - """Read a keypress and return the resulting character as a byte string. - - Nothing is echoed to the console. This call will block if a keypress is - not already available, but will not wait for Enter to be pressed. If the - pressed key was a special function key, this will return '\\000' or - '\\xe0'; the next call will return the keycode. The Control-C keypress - cannot be read with this function. - """ - - def getwch() -> str: - """Wide char variant of getch(), returning a Unicode value.""" - - def getche() -> bytes: - """Similar to getch(), but the keypress will be echoed if possible.""" - - def getwche() -> str: - """Wide char variant of getche(), returning a Unicode value.""" - - def putch(char: bytes | bytearray, /) -> None: - """Print the byte string char to the console without buffering.""" - - def putwch(unicode_char: str, /) -> None: - """Wide char variant of putch(), accepting a Unicode value.""" - - def ungetch(char: bytes | bytearray, /) -> None: - """Opposite of getch. - - Cause the byte string char to be "pushed back" into the - console buffer; it will be the next character read by - getch() or getche(). - """ - - def ungetwch(unicode_char: str, /) -> None: - """Wide char variant of ungetch(), accepting a Unicode value.""" - - def heapmin() -> None: - """Minimize the malloc() heap. - - Force the malloc() heap to clean itself up and return unused blocks - to the operating system. On failure, this raises OSError. - """ - - def SetErrorMode(mode: int, /) -> int: - """Wrapper around SetErrorMode.""" + def locking(fd: int, mode: int, nbytes: int, /) -> None: ... + def setmode(fd: int, mode: int, /) -> int: ... + def open_osfhandle(handle: int, flags: int, /) -> int: ... + def get_osfhandle(fd: int, /) -> int: ... + def kbhit() -> bool: ... + def getch() -> bytes: ... + def getwch() -> str: ... + def getche() -> bytes: ... + def getwche() -> str: ... + def putch(char: bytes | bytearray, /) -> None: ... + def putwch(unicode_char: str, /) -> None: ... + def ungetch(char: bytes | bytearray, /) -> None: ... + def ungetwch(unicode_char: str, /) -> None: ... + def heapmin() -> None: ... + def SetErrorMode(mode: int, /) -> int: ... if sys.version_info >= (3, 10): - def GetErrorMode() -> int: # undocumented - """Wrapper around GetErrorMode.""" + def GetErrorMode() -> int: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi index 3fe3d082c30dc..cd4fa102c0f3e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi @@ -18,89 +18,36 @@ _RecvT_co = TypeVar("_RecvT_co", covariant=True, default=Any) class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property - def closed(self) -> bool: # undocumented - """True if the connection is closed""" - + def closed(self) -> bool: ... # undocumented @property - def readable(self) -> bool: # undocumented - """True if the connection is readable""" - + def readable(self) -> bool: ... # undocumented @property - def writable(self) -> bool: # undocumented - """True if the connection is writable""" - - def fileno(self) -> int: - """File descriptor or handle of the connection""" - - def close(self) -> None: - """Close the connection""" - - def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: - """Send the bytes data from a bytes-like object""" - - def send(self, obj: _SendT_contra) -> None: - """Send a (picklable) object""" - - def recv_bytes(self, maxlength: int | None = None) -> bytes: - """ - Receive bytes data as a bytes object. - """ - - def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: - """ - Receive bytes data into a writeable bytes-like object. - Return the number of bytes read. - """ - - def recv(self) -> _RecvT_co: - """Receive a (picklable) object""" - - def poll(self, timeout: float | None = 0.0) -> bool: - """Whether there is any input available to be read""" - + def writable(self) -> bool: ... # undocumented + def fileno(self) -> int: ... + def close(self) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... + def send(self, obj: _SendT_contra) -> None: ... + def recv_bytes(self, maxlength: int | None = None) -> bytes: ... + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... + def recv(self) -> _RecvT_co: ... + def poll(self, timeout: float | None = 0.0) -> bool: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __del__(self) -> None: ... -class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): - """ - Connection class based on an arbitrary file descriptor (Unix only), or - a socket handle (Windows). - """ +class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... if sys.platform == "win32": - class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): - """ - Connection class based on a Windows named pipe. - Overlapped I/O is used, so the handles must have been created - with FILE_FLAG_OVERLAPPED. - """ + class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... class Listener: - """ - Returns a listener object. - - This is a wrapper for a bound socket which is 'listening' for - connections, or for a Windows named pipe. - """ - def __init__( self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... - def accept(self) -> Connection[Incomplete, Incomplete]: - """ - Accept a connection on the bound socket or named pipe of `self`. - - Returns a `Connection` object. - """ - - def close(self) -> None: - """ - Close the bound socket or named pipe of `self`. - """ - + def accept(self) -> Connection[Incomplete, Incomplete]: ... + def close(self) -> None: ... @property def address(self) -> _Address: ... @property @@ -120,17 +67,8 @@ else: def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... def wait( object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None -) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: - """ - Wait till an object in object_list is ready/readable. - - Returns list of those objects in object_list which are ready/readable. - """ - -def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: - """ - Returns a connection to the address of a `Listener` - """ +) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: ... +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: ... # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection @@ -139,13 +77,7 @@ def Client(address: _Address, family: str | None = None, authkey: bytes | None = # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: - """ - Returns pair of connection objects at either end of a pipe - """ + def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... else: - def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: - """ - Returns pair of connection objects at either end of a pipe - """ + def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi index 21be42237d796..03d1d2e5c2203 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi @@ -37,32 +37,14 @@ class BaseContext: # N.B. The methods below are applied at runtime to generate # multiprocessing.*, so the signatures should be identical (modulo self). @staticmethod - def current_process() -> BaseProcess: - """ - Return process object representing the current process - """ - + def current_process() -> BaseProcess: ... @staticmethod - def parent_process() -> BaseProcess | None: - """ - Return process object representing the parent process - """ - + def parent_process() -> BaseProcess | None: ... @staticmethod - def active_children() -> list[BaseProcess]: - """ - Return list of process objects corresponding to live child processes - """ - - def cpu_count(self) -> int: - """Returns the number of CPUs in the system""" - - def Manager(self) -> SyncManager: - """Returns a manager associated with a running server process + def active_children() -> list[BaseProcess]: ... + def cpu_count(self) -> int: ... + def Manager(self) -> SyncManager: ... - The managers methods such as `Lock()`, `Condition()` and `Queue()` - can be used to create shared objects. - """ # N.B. Keep this in sync with multiprocessing.connection.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. @@ -70,71 +52,41 @@ class BaseContext: # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: - """Returns two connection object connected by a pipe""" + def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... else: - def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: - """Returns two connection object connected by a pipe""" + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... def Barrier( self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None - ) -> synchronize.Barrier: - """Returns a barrier object""" - - def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: - """Returns a bounded semaphore object""" - - def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: - """Returns a condition object""" - - def Event(self) -> synchronize.Event: - """Returns an event object""" - - def Lock(self) -> synchronize.Lock: - """Returns a non-recursive lock object""" - - def RLock(self) -> synchronize.RLock: - """Returns a recursive lock object""" - - def Semaphore(self, value: int = 1) -> synchronize.Semaphore: - """Returns a semaphore object""" - - def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: - """Returns a queue object""" - - def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: - """Returns a queue object""" - - def SimpleQueue(self) -> queues.SimpleQueue[Any]: - """Returns a queue object""" - + ) -> synchronize.Barrier: ... + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... + def Event(self) -> synchronize.Event: ... + def Lock(self) -> synchronize.Lock: ... + def RLock(self) -> synchronize.RLock: ... + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... + def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... def Pool( self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = (), maxtasksperchild: int | None = None, - ) -> _Pool: - """Returns a process pool object""" - + ) -> _Pool: ... @overload - def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: - """Returns a shared object""" - + def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... @overload def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... @overload - def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: - """Returns a shared array""" - + def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... @overload def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload def Value( self, typecode_or_type: type[_SimpleCData[_T]], *args: Any, lock: Literal[True] | _LockLike = True - ) -> Synchronized[_T]: - """Returns a synchronized shared object""" - + ) -> Synchronized[_T]: ... @overload def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> Synchronized[_CT]: ... @overload @@ -146,9 +98,7 @@ class BaseContext: @overload def Array( self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] - ) -> SynchronizedArray[_T]: - """Returns a synchronized shared array""" - + ) -> SynchronizedArray[_T]: ... @overload def Array( self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True @@ -169,34 +119,12 @@ class BaseContext: def Array( self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True ) -> Any: ... - def freeze_support(self) -> None: - """Check whether this is a fake forked process in a frozen executable. - If so then run code specified by commandline and exit. - """ - - def get_logger(self) -> Logger: - """Return package logger -- if it does not already exist then - it is created. - """ - - def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: - """Turn on logging and add a handler which prints to stderr""" - - def allow_connection_pickling(self) -> None: - """Install support for sending connections and sockets - between processes - """ - - def set_executable(self, executable: str) -> None: - """Sets the path to a python.exe or pythonw.exe binary used to run - child processes instead of sys.executable when using the 'spawn' - start method. Useful for people embedding Python. - """ - - def set_forkserver_preload(self, module_names: list[str]) -> None: - """Set list of module names to try to load in forkserver process. - This is really just a hint. - """ + def freeze_support(self) -> None: ... + def get_logger(self) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... + def allow_connection_pickling(self) -> None: ... + def set_executable(self, executable: str) -> None: ... + def set_forkserver_preload(self, module_names: list[str]) -> None: ... if sys.platform != "win32": @overload def get_context(self, method: None = None) -> DefaultContext: ... @@ -222,11 +150,7 @@ class BaseContext: def get_start_method(self, allow_none: bool) -> str | None: ... def set_start_method(self, method: str | None, force: bool = False) -> None: ... @property - def reducer(self) -> str: - """Controls how objects will be reduced to a form that can be - shared with other processes. - """ - + def reducer(self) -> str: ... @reducer.setter def reducer(self, reduction: str) -> None: ... def _check_available(self) -> None: ... @@ -240,8 +164,7 @@ class DefaultContext(BaseContext): Process: ClassVar[type[Process]] def __init__(self, context: BaseContext) -> None: ... def get_start_method(self, allow_none: bool = False) -> str: ... - def get_all_start_methods(self) -> list[str]: - """Returns a list of the supported start methods, default first.""" + def get_all_start_methods(self) -> list[str]: ... _default_context: DefaultContext diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi index 00208b1103cb0..c4af295d23161 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -10,32 +10,10 @@ MAXFDS_TO_SEND: Final = 256 SIGNED_STRUCT: Final[Struct] class ForkServer: - def set_forkserver_preload(self, modules_names: list[str]) -> None: - """Set list of module names to try to load in forkserver process.""" - - def get_inherited_fds(self) -> list[int] | None: - """Return list of fds inherited from parent process. - - This returns None if the current process was not started by fork - server. - """ - - def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: - """Request forkserver to create a child process. - - Returns a pair of fds (status_r, data_w). The calling process can read - the child process's pid and (eventually) its returncode from status_r. - The calling process should write to data_w the pickled preparation and - process data. - """ - - def ensure_running(self) -> None: - """Make sure that a fork server is running. - - This can be called from any process. Note that usually a child - process will just reuse the forkserver started by its parent, so - ensure_running() will do nothing. - """ + def set_forkserver_preload(self, modules_names: list[str]) -> None: ... + def get_inherited_fds(self) -> list[int] | None: ... + def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: ... + def ensure_running(self) -> None: ... if sys.version_info >= (3, 14): def main( @@ -46,8 +24,7 @@ if sys.version_info >= (3, 14): sys_path: list[str] | None = None, *, authkey_r: int | None = None, - ) -> None: - """Run forkserver.""" + ) -> None: ... else: def main( @@ -56,8 +33,7 @@ else: preload: Sequence[str], main_path: str | None = None, sys_path: Unused = None, - ) -> None: - """Run forkserver.""" + ) -> None: ... def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi index 894d2dfc622b3..38191a099f1ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi @@ -8,10 +8,6 @@ from typing_extensions import TypeAlias __all__ = ["BufferWrapper"] class Arena: - """ - A shared memory area backed by a temporary file (POSIX). - """ - size: int buffer: mmap if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi index c4c8182c1ad27..5efe69a973777 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi @@ -38,10 +38,6 @@ class Namespace: _Namespace: TypeAlias = Namespace class Token: - """ - Type to uniquely identify a shared object - """ - __slots__ = ("typeid", "address", "id") typeid: str | bytes | None address: _Address | None @@ -51,10 +47,6 @@ class Token: def __setstate__(self, state: tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]) -> None: ... class BaseProxy: - """ - A base for proxies of shared objects - """ - _address_to_local: dict[_Address, Any] _mutex: Any def __init__( @@ -68,27 +60,15 @@ class BaseProxy: manager_owned: bool = False, ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... - def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: - """ - Try to call a method of the referent and return a copy of the result - """ - - def _getvalue(self) -> Any: - """ - Get a copy of the value of the referent - """ - + def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: ... + def _getvalue(self) -> Any: ... def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... class ValueProxy(BaseProxy, Generic[_T]): def get(self) -> _T: ... def set(self, value: _T) -> None: ... value: _T - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 13): class _BaseDictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -116,11 +96,7 @@ if sys.version_info >= (3, 13): def values(self) -> list[_VT]: ... # type: ignore[override] class DictProxy(_BaseDictProxy[_KT, _VT]): - def __class_getitem__(cls, args: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, args: Any, /) -> GenericAlias: ... else: class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -227,11 +203,7 @@ class ListProxy(BaseListProxy[_T]): def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[override] def __imul__(self, value: SupportsIndex, /) -> Self: ... # type: ignore[override] if sys.version_info >= (3, 13): - def __class_getitem__(cls, args: Any, /) -> Any: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, args: Any, /) -> Any: ... # Send is (kind, result) # Receive is (id, methodname, args, kwds) @@ -239,10 +211,6 @@ _ServerConnection: TypeAlias = Connection[tuple[str, Any], tuple[str, str, Itera # Returned by BaseManager.get_server() class Server: - """ - Server class which runs in a process controlled by a manager object - """ - address: _Address | None id_to_obj: dict[str, tuple[Any, set[str], dict[str, str]]] fallback_mapping: dict[str, Callable[[_ServerConnection, str, Any], Any]] @@ -255,70 +223,28 @@ class Server: authkey: bytes, serializer: str, ) -> None: ... - def serve_forever(self) -> None: - """ - Run the server forever - """ - + def serve_forever(self) -> None: ... def accepter(self) -> None: ... if sys.version_info >= (3, 10): - def handle_request(self, conn: _ServerConnection) -> None: - """ - Handle a new connection - """ + def handle_request(self, conn: _ServerConnection) -> None: ... else: - def handle_request(self, c: _ServerConnection) -> None: - """ - Handle a new connection - """ - - def serve_client(self, conn: _ServerConnection) -> None: - """ - Handle requests from the proxies in a particular process/thread - """ + def handle_request(self, c: _ServerConnection) -> None: ... + def serve_client(self, conn: _ServerConnection) -> None: ... def fallback_getvalue(self, conn: _ServerConnection, ident: str, obj: _T) -> _T: ... def fallback_str(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def fallback_repr(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def dummy(self, c: _ServerConnection) -> None: ... - def debug_info(self, c: _ServerConnection) -> str: - """ - Return some info --- useful to spot problems with refcounting - """ - - def number_of_objects(self, c: _ServerConnection) -> int: - """ - Number of shared objects - """ - - def shutdown(self, c: _ServerConnection) -> None: - """ - Shutdown this process - """ - - def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: - """ - Create a new shared object and return its id - """ - - def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: - """ - Return the methods of the shared object indicated by token - """ - - def accept_connection(self, c: _ServerConnection, name: str) -> None: - """ - Spawn a new thread to serve this connection - """ - + def debug_info(self, c: _ServerConnection) -> str: ... + def number_of_objects(self, c: _ServerConnection) -> int: ... + def shutdown(self, c: _ServerConnection) -> None: ... + def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: ... + def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: ... + def accept_connection(self, c: _ServerConnection, name: str) -> None: ... def incref(self, c: _ServerConnection, ident: str) -> None: ... def decref(self, c: _ServerConnection, ident: str) -> None: ... class BaseManager: - """ - Base class for managers - """ - if sys.version_info >= (3, 11): def __init__( self, @@ -338,26 +264,11 @@ class BaseManager: ctx: BaseContext | None = None, ) -> None: ... - def get_server(self) -> Server: - """ - Return server object with serve_forever() method and address attribute - """ - - def connect(self) -> None: - """ - Connect manager object to the server process - """ - - def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: - """ - Spawn a server process for this manager object - """ + def get_server(self) -> Server: ... + def connect(self) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: ... shutdown: _Finalize # only available after start() was called - def join(self, timeout: float | None = None) -> None: # undocumented - """ - Join the manager process (if it has been spawned) - """ - + def join(self, timeout: float | None = None) -> None: ... # undocumented @property def address(self) -> _Address | None: ... @classmethod @@ -369,27 +280,13 @@ class BaseManager: exposed: Sequence[str] | None = None, method_to_typeid: Mapping[str, str] | None = None, create_method: bool = True, - ) -> None: - """ - Register a typeid with the manager type - """ - + ) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class SyncManager(BaseManager): - """ - Subclass of `BaseManager` which supports a number of shared object types. - - The types registered are those intended for the synchronization - of threads, plus `dict`, `list` and `Namespace`. - - The `multiprocessing.Manager()` function creates started instances of - this class. - """ - def Barrier( self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None ) -> threading.Barrier: ... @@ -442,40 +339,12 @@ class SyncManager(BaseManager): class RemoteError(Exception): ... class SharedMemoryServer(Server): - def track_segment(self, c: _ServerConnection, segment_name: str) -> None: - """Adds the supplied shared memory block name to Server's tracker.""" - - def release_segment(self, c: _ServerConnection, segment_name: str) -> None: - """Calls unlink() on the shared memory block with the supplied name - and removes it from the tracker instance inside the Server. - """ - - def list_segments(self, c: _ServerConnection) -> list[str]: - """Returns a list of names of shared memory blocks that the Server - is currently tracking. - """ + def track_segment(self, c: _ServerConnection, segment_name: str) -> None: ... + def release_segment(self, c: _ServerConnection, segment_name: str) -> None: ... + def list_segments(self, c: _ServerConnection) -> list[str]: ... class SharedMemoryManager(BaseManager): - """Like SyncManager but uses SharedMemoryServer instead of Server. - - It provides methods for creating and returning SharedMemory instances - and for creating a list-like object (ShareableList) backed by shared - memory. It also provides methods that create and return Proxy Objects - that support synchronization across processes (i.e. multi-process-safe - locks and semaphores). - """ - - def get_server(self) -> SharedMemoryServer: - """Better than monkeypatching for now; merge into Server ultimately""" - - def SharedMemory(self, size: int) -> _SharedMemory: - """Returns a new SharedMemory instance with the specified size in - bytes, to be tracked by the manager. - """ - - def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: - """Returns a new ShareableList instance populated with the values - from the input sequence, to be tracked by the manager. - """ - + def get_server(self) -> SharedMemoryServer: ... + def SharedMemory(self, size: int) -> _SharedMemory: ... + def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: ... def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi index c8c6ff3d3104e..b79f9e77359ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi @@ -17,11 +17,7 @@ class ApplyResult(Generic[_T]): def wait(self, timeout: float | None = None) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # alias created during issue #17805 AsyncResult = ApplyResult @@ -45,10 +41,6 @@ class IMapIterator(Generic[_T]): class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool: - """ - Class which supports an async version of applying functions to arguments. - """ - def __init__( self, processes: int | None = None, @@ -59,12 +51,7 @@ class Pool: ) -> None: ... @staticmethod def Process(ctx: DefaultContext, *args: Any, **kwds: Any) -> Process: ... - def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: - """ - Equivalent of `func(*args, **kwds)`. - Pool must be running. - """ - + def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: ... def apply_async( self, func: Callable[..., _T], @@ -72,17 +59,8 @@ class Pool: kwds: Mapping[str, Any] = {}, callback: Callable[[_T], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> AsyncResult[_T]: - """ - Asynchronous version of `apply()` method. - """ - - def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: - """ - Apply `func` to each element in `iterable`, collecting the results - in a list that is returned. - """ - + ) -> AsyncResult[_T]: ... + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... def map_async( self, func: Callable[[_S], _T], @@ -90,28 +68,10 @@ class Pool: chunksize: int | None = None, callback: Callable[[list[_T]], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> MapResult[_T]: - """ - Asynchronous version of `map()` method. - """ - - def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: - """ - Equivalent of `map()` -- can be MUCH slower than `Pool.map()`. - """ - - def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: - """ - Like `imap()` method but ordering of results is arbitrary. - """ - - def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: - """ - Like `map()` method but the elements of the `iterable` are expected to - be iterables as well and will be unpacked as arguments. Hence - `func` and (a, b) becomes func(a, b). - """ - + ) -> MapResult[_T]: ... + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... def starmap_async( self, func: Callable[..., _T], @@ -119,11 +79,7 @@ class Pool: chunksize: int | None = None, callback: Callable[[list[_T]], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> AsyncResult[list[_T]]: - """ - Asynchronous version of `starmap()` method. - """ - + ) -> AsyncResult[list[_T]]: ... def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi index 632a7657fd616..481b9eec5a37c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -13,10 +13,6 @@ if sys.platform == "win32": WINENV: Final[bool] class Popen: - """ - Start a subprocess to run the code of a process object - """ - finalizer: Finalize method: ClassVar[str] pid: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi index f740eb50c0eb4..4d129b27b0e87 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi @@ -4,12 +4,6 @@ from typing import Any __all__ = ["BaseProcess", "current_process", "active_children", "parent_process"] class BaseProcess: - """ - Process objects represent activity that is run in a separate process - - The class is analogous to `threading.Thread` - """ - name: str daemon: bool authkey: bytes @@ -24,80 +18,22 @@ class BaseProcess: *, daemon: bool | None = None, ) -> None: ... - def run(self) -> None: - """ - Method to be run in sub-process; can be overridden in sub-class - """ - - def start(self) -> None: - """ - Start child process - """ - - def terminate(self) -> None: - """ - Terminate process; sends SIGTERM signal or uses TerminateProcess() - """ - - def kill(self) -> None: - """ - Terminate process; sends SIGKILL signal or uses TerminateProcess() - """ - - def close(self) -> None: - """ - Close the Process object. - - This method releases resources held by the Process object. It is - an error to call this method if the child process is still running. - """ - - def join(self, timeout: float | None = None) -> None: - """ - Wait until child process terminates - """ - - def is_alive(self) -> bool: - """ - Return whether process is alive - """ - + def run(self) -> None: ... + def start(self) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def close(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + def is_alive(self) -> bool: ... @property - def exitcode(self) -> int | None: - """ - Return exit code of process or `None` if it has yet to stop - """ - + def exitcode(self) -> int | None: ... @property - def ident(self) -> int | None: - """ - Return identifier (PID) of process or `None` if it has yet to start - """ - + def ident(self) -> int | None: ... @property - def pid(self) -> int | None: - """ - Return identifier (PID) of process or `None` if it has yet to start - """ - + def pid(self) -> int | None: ... @property - def sentinel(self) -> int: - """ - Return a file descriptor (Unix) or handle (Windows) suitable for - waiting for process termination. - """ - -def current_process() -> BaseProcess: - """ - Return process object representing the current process - """ - -def active_children() -> list[BaseProcess]: - """ - Return list of process objects corresponding to live child processes - """ + def sentinel(self) -> int: ... -def parent_process() -> BaseProcess | None: - """ - Return process object representing the parent process - """ +def current_process() -> BaseProcess: ... +def active_children() -> list[BaseProcess]: ... +def parent_process() -> BaseProcess | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi index dfdeab7538166..a6b00d744c421 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi @@ -21,11 +21,7 @@ class Queue(Generic[_T]): def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... @@ -37,8 +33,4 @@ class SimpleQueue(Generic[_T]): def empty(self) -> bool: ... def get(self) -> _T: ... def put(self, obj: _T) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi index 325d472f9a599..490ae195c20e2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi @@ -18,63 +18,39 @@ else: HAVE_SEND_HANDLE: Final[bool] class ForkingPickler(pickle.Pickler): - """Pickler subclass used by multiprocessing.""" - dispatch_table: _DispatchTableType def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod - def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: - """Register a reduce function for a type.""" - + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... @classmethod def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: - """Replacement for pickle.dump() using ForkingPickler.""" +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... if sys.platform == "win32": def duplicate( handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None - ) -> int: - """Duplicate a handle. (target_process is a handle not a pid!)""" - - def steal_handle(source_pid: int, handle: int) -> int: - """Steal a handle from process identified by source_pid.""" - - def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: - """Send a handle over a local connection.""" - - def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: - """Receive a handle over a local connection.""" + ) -> int: ... + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: ... class DupHandle: - """Picklable wrapper for a handle.""" - def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... - def detach(self) -> int: - """Get the handle. This should only be called once.""" + def detach(self) -> int: ... else: if sys.version_info < (3, 14): ACKNOWLEDGE: Final[bool] - def recvfds(sock: socket, size: int) -> list[int]: - """Receive an array of fds over an AF_UNIX socket.""" - - def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: - """Send a handle over a local connection.""" - - def recv_handle(conn: HasFileno) -> int: - """Receive a handle over a local connection.""" - - def sendfds(sock: socket, fds: list[int]) -> None: - """Send an array of fds over an AF_UNIX socket.""" - - def DupFd(fd: int) -> Any: # Return type is really hard to get right - """Return a wrapper for an fd.""" + def recvfds(sock: socket, size: int) -> list[int]: ... + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... + def recv_handle(conn: HasFileno) -> int: ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -95,11 +71,6 @@ else: _DupFd = DupFd class AbstractReducer(metaclass=ABCMeta): - """Abstract base class for use in implementing a Reduction class - suitable for use in replacing the standard reduction mechanism - used in multiprocessing. - """ - ForkingPickler = _ForkingPickler register = _register dump = _dump diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi index fd8a166e6637c..5fee7cf31e17f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -7,21 +7,14 @@ if sys.platform == "win32": __all__ += ["DupSocket"] class DupSocket: - """Picklable wrapper for a socket.""" - def __init__(self, sock: socket) -> None: ... - def detach(self) -> socket: - """Get the socket. This should only be called once.""" + def detach(self) -> socket: ... else: __all__ += ["DupFd"] class DupFd: - """Wrapper for fd which can be used at any time.""" - def __init__(self, fd: int) -> None: ... - def detach(self) -> int: - """Get the fd. This should only be called once.""" + def detach(self) -> int: ... -def stop(timeout: float | None = None) -> None: - """Stop the background thread and clear registered resources.""" +def stop(timeout: float | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi index 0f8cc7817d484..cb2f27a628614 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -6,18 +6,9 @@ __all__ = ["ensure_running", "register", "unregister"] class ResourceTracker: def getfd(self) -> int | None: ... - def ensure_running(self) -> None: - """Make sure that resource tracker process is running. - - This can be run from any process. Usually a child process will use - the resource created by its parent. - """ - - def register(self, name: Sized, rtype: str) -> None: - """Register name of resource with resource tracker.""" - - def unregister(self, name: Sized, rtype: str) -> None: - """Unregister name of resource with resource tracker.""" + def ensure_running(self) -> None: ... + def register(self, name: Sized, rtype: str) -> None: ... + def unregister(self, name: Sized, rtype: str) -> None: ... if sys.version_info >= (3, 12): def __del__(self) -> None: ... @@ -27,5 +18,4 @@ register = _resource_tracker.register unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd -def main(fd: FileDescriptorOrPath) -> None: - """Run resource tracker.""" +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi index 1d7059758de5a..f75a372a69a2d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -1,9 +1,3 @@ -"""Provides shared memory for direct access across processes. - -The API of this package is currently provisional. Refer to the -documentation for details. -""" - import sys from collections.abc import Iterable from types import GenericAlias @@ -15,71 +9,22 @@ __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: - """Creates a new shared memory block or attaches to an existing - shared memory block. - - Every shared memory block is assigned a unique name. This enables - one process to create a shared memory block with a particular name - so that a different process can attach to that same shared memory - block using that same name. - - As a resource for sharing data across processes, shared memory blocks - may outlive the original process that created them. When one process - no longer needs access to a shared memory block that might still be - needed by other processes, the close() method should be called. - When a shared memory block is no longer needed by any process, the - unlink() method should be called to ensure proper cleanup. - """ - if sys.version_info >= (3, 13): def __init__(self, name: str | None = None, create: bool = False, size: int = 0, *, track: bool = True) -> None: ... else: def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... @property - def buf(self) -> memoryview | None: - """A memoryview of contents of the shared memory block.""" - + def buf(self) -> memoryview | None: ... @property - def name(self) -> str: - """Unique name that identifies the shared memory block.""" - + def name(self) -> str: ... @property - def size(self) -> int: - """Size in bytes.""" - - def close(self) -> None: - """Closes access to the shared memory from this instance but does - not destroy the shared memory block. - """ - - def unlink(self) -> None: - """Requests that the underlying shared memory block be destroyed. - - Unlink should be called once (and only once) across all handles - which have access to the shared memory block, even if these - handles belong to different processes. Closing and unlinking may - happen in any order, but trying to access data inside a shared - memory block after unlinking may result in memory errors, - depending on platform. - - This method has no effect on Windows, where the only way to - delete a shared memory block is to close all handles. - """ - + def size(self) -> int: ... + def close(self) -> None: ... + def unlink(self) -> None: ... def __del__(self) -> None: ... class ShareableList(Generic[_SLT]): - """Pattern for a mutable list-like object shareable via a shared - memory block. It differs from the built-in list type in that these - lists can not change their overall length (i.e. no append, insert, - etc.) - - Because values are packed into a memoryview as bytes, the struct - packing format for any storable value must require no more than 8 - characters to describe its format. - """ - shm: SharedMemory @overload def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... @@ -90,19 +35,7 @@ class ShareableList(Generic[_SLT]): def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... def __len__(self) -> int: ... @property - def format(self) -> str: - """The struct packing format used by all currently stored items.""" - - def count(self, value: _SLT) -> int: - """L.count(value) -> integer -- return number of occurrences of value.""" - - def index(self, value: _SLT) -> int: - """L.index(value) -> integer -- return first index of value. - Raises ValueError if the value is not present. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def format(self) -> str: ... + def count(self, value: _SLT) -> int: ... + def index(self, value: _SLT) -> int: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi index 7349cb2711eb6..e2ec15f05ea23 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -13,27 +13,15 @@ _T = TypeVar("_T") _CT = TypeVar("_CT", bound=_CData) @overload -def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: - """ - Returns a ctypes object allocated from shared memory - """ - +def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: ... @overload def RawValue(typecode_or_type: str, *args: Any) -> Any: ... @overload -def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: - """ - Returns a ctypes array allocated from shared memory - """ - +def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload -def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: - """ - Return a synchronization wrapper for a Value - """ - +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... @overload def Value( typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None @@ -49,11 +37,7 @@ def Value( @overload def Array( typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None -) -> _CT: - """ - Return a synchronization wrapper for a RawArray - """ - +) -> _CT: ... @overload def Array( typecode_or_type: type[c_char], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi index 99f1fc3278473..4a97532228974 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi @@ -17,41 +17,16 @@ WINSERVICE: Final[bool] def set_executable(exe: str) -> None: ... def get_executable() -> str: ... -def is_forking(argv: Sequence[str]) -> bool: - """ - Return whether commandline indicates we are forking - """ - -def freeze_support() -> None: - """ - Run code for process object if this in not the main process - """ - -def get_command_line(**kwds: Any) -> list[str]: - """ - Returns prefix of command line used for spawning a child process - """ - -def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: - """ - Run code specified by data received over pipe - """ +def is_forking(argv: Sequence[str]) -> bool: ... +def freeze_support() -> None: ... +def get_command_line(**kwds: Any) -> list[str]: ... +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... # undocumented def _main(fd: int, parent_sentinel: int) -> int: ... -def get_preparation_data(name: str) -> dict[str, Any]: - """ - Return info about parent needed by child to unpickle process object - """ +def get_preparation_data(name: str) -> dict[str, Any]: ... old_main_modules: list[ModuleType] -def prepare(data: Mapping[str, Any]) -> None: - """ - Try to get current process ready to unpickle process object - """ - -def import_main_path(main_path: str) -> None: - """ - Set sys.modules['__main__'] to module at main_path - """ +def prepare(data: Mapping[str, Any]) -> None: ... +def import_main_path(main_path: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi index c3592f8bc98c0..3583194c77e29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi @@ -48,16 +48,8 @@ if sys.version_info >= (3, 14): def warn(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... -def get_logger() -> Logger: - """ - Returns logger used by multiprocessing - """ - -def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: - """ - Turn on logging and add a handler which prints to stderr - """ - +def get_logger() -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: Final[bool] @@ -66,10 +58,6 @@ def get_temp_dir() -> str: ... def register_after_fork(obj: _T, func: Callable[[_T], object]) -> None: ... class Finalize(Generic[_R_co]): - """ - Class which supports object finalization using weakrefs - """ - # "args" and "kwargs" are passed as arguments to "callback". @overload def __init__( @@ -100,25 +88,11 @@ class Finalize(Generic[_R_co]): _finalizer_registry: MutableMapping[Incomplete, Incomplete] = {}, sub_debug: Callable[..., object] = ..., getpid: Callable[[], int] = ..., - ) -> _R_co: - """ - Run the callback unless it has already been called or cancelled - """ - - def cancel(self) -> None: - """ - Cancel finalization of the object - """ - - def still_active(self) -> bool: - """ - Return whether this finalizer is still waiting to invoke callback - """ - -def is_exiting() -> bool: - """ - Returns true if the process is shutting down - """ + ) -> _R_co: ... + def cancel(self) -> None: ... + def still_active(self) -> bool: ... + +def is_exiting() -> bool: ... class ForkAwareThreadLock: acquire: Callable[[bool, float], bool] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi index e7aa5955b8b03..480f55a46d645 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi @@ -1,5 +1,3 @@ -"""An object-oriented interface to .netrc files.""" - import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -7,8 +5,6 @@ from typing_extensions import TypeAlias __all__ = ["netrc", "NetrcParseError"] class NetrcParseError(Exception): - """Exception raised on syntax errors in the .netrc file.""" - filename: str | None lineno: int | None msg: str @@ -24,5 +20,4 @@ class netrc: hosts: dict[str, _NetrcTuple] macros: dict[str, list[str]] def __init__(self, file: StrOrBytesPath | None = None) -> None: ... - def authenticators(self, host: str) -> _NetrcTuple | None: - """Return a (user, account, password) tuple for given host.""" + def authenticators(self, host: str) -> _NetrcTuple | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi index 699a0ff4e0b96..10eef2336a834 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi @@ -1,31 +1,9 @@ -"""This module contains functions for accessing NIS maps.""" - import sys if sys.platform != "win32": - def cat(map: str, domain: str = ...) -> dict[str, str]: - """cat(map, domain = defaultdomain) - Returns the entire map as a dictionary. Optionally domain can be - specified but it defaults to the system default domain. - """ - - def get_default_domain() -> str: - """get_default_domain() -> str - Corresponds to the C library yp_get_default_domain() call, returning - the default NIS domain. - """ - - def maps(domain: str = ...) -> list[str]: - """maps(domain = defaultdomain) - Returns an array of all available NIS maps within a domain. If domain - is not specified it defaults to the system default domain. - """ - - def match(key: str, map: str, domain: str = ...) -> str: - """match(key, map, domain = defaultdomain) - Corresponds to the C library yp_match() call, returning the value of - key in the given map. Optionally domain can be specified but it - defaults to the system default domain. - """ + def cat(map: str, domain: str = ...) -> dict[str, str]: ... + def get_default_domain() -> str: ... + def maps(domain: str = ...) -> list[str]: ... + def match(key: str, map: str, domain: str = ...) -> str: ... class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi index 67e82dec00e00..1fb1e79f69a1f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi @@ -1,32 +1,3 @@ -"""An NNTP client class based on: -- RFC 977: Network News Transfer Protocol -- RFC 2980: Common NNTP Extensions -- RFC 3977: Network News Transfer Protocol (version 2) - -Example: - ->>> from nntplib import NNTP ->>> s = NNTP('news') ->>> resp, count, first, last, name = s.group('comp.lang.python') ->>> print('Group', name, 'has', count, 'articles, range', first, 'to', last) -Group comp.lang.python has 51 articles, range 5770 to 5821 ->>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last)) ->>> resp = s.quit() ->>> - -Here 'resp' is the server response line. -Error responses are turned into exceptions. - -To post an article from a file: ->>> f = open(filename, 'rb') # file containing article, including header ->>> resp = s.post(f) ->>> - -For descriptions of all methods, read the comments in the code below. -Note that all arguments and return values representing article numbers -are strings, not numbers, since they are rarely used for calculations. -""" - import datetime import socket import ssl @@ -51,47 +22,29 @@ __all__ = [ _File: TypeAlias = IO[bytes] | bytes | str | None class NNTPError(Exception): - """Base class for all nntplib exceptions""" - response: str -class NNTPReplyError(NNTPError): - """Unexpected [123]xx reply""" - -class NNTPTemporaryError(NNTPError): - """4xx errors""" - -class NNTPPermanentError(NNTPError): - """5xx errors""" - -class NNTPProtocolError(NNTPError): - """Response does not begin with [1-5]""" - -class NNTPDataError(NNTPError): - """Error in response data""" +class NNTPReplyError(NNTPError): ... +class NNTPTemporaryError(NNTPError): ... +class NNTPPermanentError(NNTPError): ... +class NNTPProtocolError(NNTPError): ... +class NNTPDataError(NNTPError): ... NNTP_PORT: Final = 119 NNTP_SSL_PORT: Final = 563 class GroupInfo(NamedTuple): - """GroupInfo(group, last, first, flag)""" - group: str last: str first: str flag: str class ArticleInfo(NamedTuple): - """ArticleInfo(number, message_id, lines)""" - number: int message_id: str lines: list[bytes] -def decode_header(header_str: str) -> str: - """Takes a unicode string representing a munged header value - and decodes it as a (possibly non-ASCII) readable value. - """ +def decode_header(header_str: str) -> str: ... class NNTP: encoding: str @@ -117,245 +70,39 @@ class NNTP: readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: - """Initialize an instance. Arguments: - - host: hostname to connect to - - port: port to connect to (default the standard NNTP port) - - user: username to authenticate with - - password: password to use with username - - readermode: if true, send 'mode reader' command after - connecting. - - usenetrc: allow loading username and password from ~/.netrc file - if not specified explicitly - - timeout: timeout (in seconds) used for socket connections - - readermode is sometimes necessary if you are connecting to an - NNTP server on the local machine and intend to call - reader-specific commands, such as `group'. If you get - unexpected NNTPPermanentErrors, you might need to set - readermode. - """ - + ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def getwelcome(self) -> str: - """Get the welcome message from the server - (this is read and squirreled away by __init__()). - If the response code is 200, posting is allowed; - if it 201, posting is not allowed. - """ - - def getcapabilities(self) -> dict[str, _list[str]]: - """Get the server capabilities, as read by __init__(). - If the CAPABILITIES command is not supported, an empty dict is - returned. - """ - - def set_debuglevel(self, level: int) -> None: - """Set the debugging level. Argument 'level' means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - - def debug(self, level: int) -> None: - """Set the debugging level. Argument 'level' means: - 0: no debugging output (default) - 1: print commands and responses but not body text etc. - 2: also print raw lines read and sent before stripping CR/LF - """ - - def capabilities(self) -> tuple[str, dict[str, _list[str]]]: - """Process a CAPABILITIES command. Not supported by all servers. - Return: - - resp: server response if successful - - caps: a dictionary mapping capability names to lists of tokens - (for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] }) - """ - - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: - """Process a NEWGROUPS command. Arguments: - - date: a date or datetime object - Return: - - resp: server response if successful - - list: list of newsgroup names - """ - - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: - """Process a NEWNEWS command. Arguments: - - group: group name or '*' - - date: a date or datetime object - Return: - - resp: server response if successful - - list: list of message ids - """ - - def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: - """Process a LIST or LIST ACTIVE command. Arguments: - - group_pattern: a pattern indicating which groups to query - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of (group, last, first, flag) (strings) - """ - - def description(self, group: str) -> str: - """Get a description for a single group. If more than one - group matches ('group' is a pattern), return the first. If no - group matches, return an empty string. - - This elides the response code from the server, since it can - only be '215' or '285' (for xgtitle) anyway. If the response - code is needed, use the 'descriptions' method. - - NOTE: This neither checks for a wildcard in 'group' nor does - it check whether the group actually exists. - """ - - def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: - """Get descriptions for a range of groups.""" - - def group(self, name: str) -> tuple[str, int, int, int, str]: - """Process a GROUP command. Argument: - - group: the group name - Returns: - - resp: server response if successful - - count: number of articles - - first: first article number - - last: last article number - - name: the group name - """ - - def help(self, *, file: _File = None) -> tuple[str, _list[str]]: - """Process a HELP command. Argument: - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of strings returned by the server in response to the - HELP command - """ - - def stat(self, message_spec: Any = None) -> tuple[str, int, str]: - """Process a STAT command. Argument: - - message_spec: article number or message id (if not specified, - the current article is selected) - Returns: - - resp: server response if successful - - art_num: the article number - - message_id: the message id - """ - - def next(self) -> tuple[str, int, str]: - """Process a NEXT command. No arguments. Return as for STAT.""" - - def last(self) -> tuple[str, int, str]: - """Process a LAST command. No arguments. Return as for STAT.""" - - def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """Process a HEAD command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the headers in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of header lines) - """ - - def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """Process a BODY command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the body in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of body lines) - """ - - def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: - """Process an ARTICLE command. Argument: - - message_spec: article number or message id - - file: filename string or file object to store the article in - Returns: - - resp: server response if successful - - ArticleInfo: (article number, message id, list of article lines) - """ - - def slave(self) -> str: - """Process a SLAVE command. Returns: - - resp: server response if successful - """ - - def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: - """Process an XHDR command (optional server extension). Arguments: - - hdr: the header type (e.g. 'subject') - - str: an article nr, a message id, or a range nr1-nr2 - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of (nr, value) strings - """ - - def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: - """Process an XOVER command (optional server extension) Arguments: - - start: start of range - - end: end of range - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of dicts containing the response fields - """ - + def getwelcome(self) -> str: ... + def getcapabilities(self) -> dict[str, _list[str]]: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... + def description(self, group: str) -> str: ... + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... + def group(self, name: str) -> tuple[str, int, int, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... + def next(self) -> tuple[str, int, str]: ... + def last(self) -> tuple[str, int, str]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def slave(self) -> str: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... def over( self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None - ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: - """Process an OVER command. If the command isn't supported, fall - back to XOVER. Arguments: - - message_spec: - - either a message id, indicating the article to fetch - information about - - or a (start, end) tuple, indicating a range of article numbers; - if end is None, information up to the newest message will be - retrieved - - or None, indicating the current article number must be used - - file: Filename string or file object to store the result in - Returns: - - resp: server response if successful - - list: list of dicts containing the response fields - - NOTE: the "message id" form isn't supported by XOVER - """ - - def date(self) -> tuple[str, datetime.datetime]: - """Process the DATE command. - Returns: - - resp: server response if successful - - date: datetime object - """ - - def post(self, data: bytes | Iterable[bytes]) -> str: - """Process a POST command. Arguments: - - data: bytes object, iterable or file containing the article - Returns: - - resp: server response if successful - """ - - def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: - """Process an IHAVE command. Arguments: - - message_id: message-id of the article - - data: file containing the article - Returns: - - resp: server response if successful - Note that if the server refuses the article an exception is raised. - """ - - def quit(self) -> str: - """Process a QUIT command and close the socket. Returns: - - resp: server response if successful - """ - + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def date(self) -> tuple[str, datetime.datetime]: ... + def post(self, data: bytes | Iterable[bytes]) -> str: ... + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... + def quit(self) -> str: ... def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... - def starttls(self, context: ssl.SSLContext | None = None) -> None: - """Process a STARTTLS command. Arguments: - - context: SSL context to use for the encrypted connection - """ + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -370,7 +117,4 @@ class NNTP_SSL(NNTP): readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: - """This works identically to NNTP.__init__, except for the change - in default port and the `ssl_context` argument for SSL connections. - """ + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi index c10b791bd4683..0c87444d18f44 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi @@ -1,9 +1,3 @@ -"""This module provides access to operating system functionality that is -standardized by the C Standard and the POSIX standard (a thinly -disguised Unix interface). Refer to the library manual and -corresponding Unix manual entries for more information on calls. -""" - import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi index 337fbe46c83d4..074df075b9727 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi @@ -1,9 +1,3 @@ -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - import sys from _typeshed import BytesPath, StrOrBytesPath, StrPath from genericpath import ( @@ -126,5 +120,4 @@ else: realpath = abspath if sys.version_info >= (3, 13): - def isreserved(path: StrOrBytesPath) -> bool: - """Return true if the pathname is reserved by the system.""" + def isreserved(path: StrOrBytesPath) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi index b4341638fe5cf..014af8a0fd2ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi @@ -1,32 +1,12 @@ -"""Convert a NT pathname to a file URL and vice versa. - -This module only exists to provide OS-specific code -for urllib.requests, thus do not use directly. -""" - import sys from typing_extensions import deprecated if sys.version_info >= (3, 14): @deprecated("The `nturl2path` module is deprecated since Python 3.14.") - def url2pathname(url: str) -> str: - """OS-specific conversion from a relative URL of the 'file' scheme - to a file system path; not recommended for general use. - """ - + def url2pathname(url: str) -> str: ... @deprecated("The `nturl2path` module is deprecated since Python 3.14.") - def pathname2url(p: str) -> str: - """OS-specific conversion from a file system path to a relative URL - of the 'file' scheme; not recommended for general use. - """ + def pathname2url(p: str) -> str: ... else: - def url2pathname(url: str) -> str: - """OS-specific conversion from a relative URL of the 'file' scheme - to a file system path; not recommended for general use. - """ - - def pathname2url(p: str) -> str: - """OS-specific conversion from a file system path to a relative URL - of the 'file' scheme; not recommended for general use. - """ + def url2pathname(url: str) -> str: ... + def pathname2url(p: str) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi index dcd77c4188c55..64fb16581e952 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi @@ -1,8 +1,3 @@ -"""Abstract Base Classes (ABCs) for numbers, according to PEP 3141. - -TODO: Fill out more detailed documentation on the operators. -""" - # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. # @@ -66,360 +61,157 @@ class _IntegralLike(_RealLike, Protocol): ################# class Number(metaclass=ABCMeta): - """All numbers inherit from this class. - - If you just want to check if an argument x is a number, without - caring what kind, use isinstance(x, Number). - """ - __slots__ = () @abstractmethod - def __hash__(self) -> int: - """The type of the None singleton.""" + def __hash__(self) -> int: ... # See comment at the top of the file # for why some of these return types are purposefully vague class Complex(Number, _ComplexLike): - """Complex defines the operations that work on the builtin complex type. - - In short, those are: a conversion to complex, .real, .imag, +, -, - *, /, **, abs(), .conjugate, ==, and !=. - - If it is given heterogeneous arguments, and doesn't have special - knowledge about them, it should fall back to the builtin complex - type as described below. - """ - __slots__ = () @abstractmethod - def __complex__(self) -> complex: - """Return a builtin complex instance. Called for complex(self).""" - - def __bool__(self) -> bool: - """True if self != 0. Called for bool(self).""" - + def __complex__(self) -> complex: ... + def __bool__(self) -> bool: ... @property @abstractmethod - def real(self) -> _RealLike: - """Retrieve the real component of this number. - - This should subclass Real. - """ - + def real(self) -> _RealLike: ... @property @abstractmethod - def imag(self) -> _RealLike: - """Retrieve the imaginary component of this number. - - This should subclass Real. - """ - + def imag(self) -> _RealLike: ... @abstractmethod - def __add__(self, other) -> _ComplexLike: - """self + other""" - + def __add__(self, other) -> _ComplexLike: ... @abstractmethod - def __radd__(self, other) -> _ComplexLike: - """other + self""" - + def __radd__(self, other) -> _ComplexLike: ... @abstractmethod - def __neg__(self) -> _ComplexLike: - """-self""" - + def __neg__(self) -> _ComplexLike: ... @abstractmethod - def __pos__(self) -> _ComplexLike: - """+self""" - - def __sub__(self, other) -> _ComplexLike: - """self - other""" - - def __rsub__(self, other) -> _ComplexLike: - """other - self""" - + def __pos__(self) -> _ComplexLike: ... + def __sub__(self, other) -> _ComplexLike: ... + def __rsub__(self, other) -> _ComplexLike: ... @abstractmethod - def __mul__(self, other) -> _ComplexLike: - """self * other""" - + def __mul__(self, other) -> _ComplexLike: ... @abstractmethod - def __rmul__(self, other) -> _ComplexLike: - """other * self""" - + def __rmul__(self, other) -> _ComplexLike: ... @abstractmethod - def __truediv__(self, other) -> _ComplexLike: - """self / other: Should promote to float when necessary.""" - + def __truediv__(self, other) -> _ComplexLike: ... @abstractmethod - def __rtruediv__(self, other) -> _ComplexLike: - """other / self""" - + def __rtruediv__(self, other) -> _ComplexLike: ... @abstractmethod - def __pow__(self, exponent) -> _ComplexLike: - """self ** exponent; should promote to float or complex when necessary.""" - + def __pow__(self, exponent) -> _ComplexLike: ... @abstractmethod - def __rpow__(self, base) -> _ComplexLike: - """base ** self""" - + def __rpow__(self, base) -> _ComplexLike: ... @abstractmethod - def __abs__(self) -> _RealLike: - """Returns the Real distance from 0. Called for abs(self).""" - + def __abs__(self) -> _RealLike: ... @abstractmethod - def conjugate(self) -> _ComplexLike: - """(x+y*i).conjugate() returns (x-y*i).""" - + def conjugate(self) -> _ComplexLike: ... @abstractmethod - def __eq__(self, other: object) -> bool: - """self == other""" + def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] # See comment at the top of the file # for why some of these return types are purposefully vague class Real(Complex, _RealLike): - """To Complex, Real adds the operations that work on real numbers. - - In short, those are: a conversion to float, trunc(), divmod, - %, <, <=, >, and >=. - - Real also provides defaults for the derived operations. - """ - __slots__ = () @abstractmethod - def __float__(self) -> float: - """Any Real can be converted to a native float object. - - Called for float(self). - """ - + def __float__(self) -> float: ... @abstractmethod - def __trunc__(self) -> _IntegralLike: - """trunc(self): Truncates self to an Integral. - - Returns an Integral i such that: - * i > 0 iff self > 0; - * abs(i) <= abs(self); - * for any Integral j satisfying the first two conditions, - abs(i) >= abs(j) [i.e. i has "maximal" abs among those]. - i.e. "truncate towards 0". - """ - + def __trunc__(self) -> _IntegralLike: ... @abstractmethod - def __floor__(self) -> _IntegralLike: - """Finds the greatest Integral <= self.""" - + def __floor__(self) -> _IntegralLike: ... @abstractmethod - def __ceil__(self) -> _IntegralLike: - """Finds the least Integral >= self.""" - + def __ceil__(self) -> _IntegralLike: ... @abstractmethod @overload - def __round__(self, ndigits: None = None) -> _IntegralLike: - """Rounds self to ndigits decimal places, defaulting to 0. - - If ndigits is omitted or None, returns an Integral, otherwise - returns a Real. Rounds half toward even. - """ - + def __round__(self, ndigits: None = None) -> _IntegralLike: ... @abstractmethod @overload def __round__(self, ndigits: int) -> _RealLike: ... - def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: - """divmod(self, other): The pair (self // other, self % other). - - Sometimes this can be computed faster than the pair of - operations. - """ - - def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: - """divmod(other, self): The pair (other // self, other % self). - - Sometimes this can be computed faster than the pair of - operations. - """ - + def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: ... + def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: ... @abstractmethod - def __floordiv__(self, other) -> _RealLike: - """self // other: The floor() of self/other.""" - + def __floordiv__(self, other) -> _RealLike: ... @abstractmethod - def __rfloordiv__(self, other) -> _RealLike: - """other // self: The floor() of other/self.""" - + def __rfloordiv__(self, other) -> _RealLike: ... @abstractmethod - def __mod__(self, other) -> _RealLike: - """self % other""" - + def __mod__(self, other) -> _RealLike: ... @abstractmethod - def __rmod__(self, other) -> _RealLike: - """other % self""" - + def __rmod__(self, other) -> _RealLike: ... @abstractmethod - def __lt__(self, other) -> bool: - """self < other - - < on Reals defines a total ordering, except perhaps for NaN. - """ - + def __lt__(self, other) -> bool: ... @abstractmethod - def __le__(self, other) -> bool: - """self <= other""" - - def __complex__(self) -> complex: - """complex(self) == complex(float(self), 0)""" - + def __le__(self, other) -> bool: ... + def __complex__(self) -> complex: ... @property - def real(self) -> _RealLike: - """Real numbers are their real component.""" - + def real(self) -> _RealLike: ... @property - def imag(self) -> Literal[0]: - """Real numbers have no imaginary component.""" - - def conjugate(self) -> _RealLike: - """Conjugate is a no-op for Reals.""" + def imag(self) -> Literal[0]: ... + def conjugate(self) -> _RealLike: ... # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod - def __pos__(self) -> _RealLike: - """+self""" - + def __pos__(self) -> _RealLike: ... @abstractmethod - def __neg__(self) -> _RealLike: - """-self""" + def __neg__(self) -> _RealLike: ... # See comment at the top of the file # for why some of these return types are purposefully vague class Rational(Real): - """.numerator and .denominator should be in lowest terms.""" - __slots__ = () @property @abstractmethod - def numerator(self) -> _IntegralLike: - """The numerator of a rational number in lowest terms.""" - + def numerator(self) -> _IntegralLike: ... @property @abstractmethod - def denominator(self) -> _IntegralLike: - """The denominator of a rational number in lowest terms. - - This denominator should be positive. - """ - - def __float__(self) -> float: - """float(self) = self.numerator / self.denominator - - It's important that this conversion use the integer's "true" - division rather than casting one side to float before dividing - so that ratios of huge integers convert without overflowing. - - """ + def denominator(self) -> _IntegralLike: ... + def __float__(self) -> float: ... # See comment at the top of the file # for why some of these return types are purposefully vague class Integral(Rational, _IntegralLike): - """Integral adds methods that work on integral numbers. - - In short, these are conversion to int, pow with modulus, and the - bit-string operations. - """ - __slots__ = () @abstractmethod - def __int__(self) -> int: - """int(self)""" - - def __index__(self) -> int: - """Called whenever an index is needed, such as in slicing""" - + def __int__(self) -> int: ... + def __index__(self) -> int: ... @abstractmethod - def __pow__(self, exponent, modulus=None) -> _IntegralLike: - """self ** exponent % modulus, but maybe faster. - - Accept the modulus argument if you want to support the - 3-argument version of pow(). Raise a TypeError if exponent < 0 - or any argument isn't Integral. Otherwise, just implement the - 2-argument version described in Complex. - """ - + def __pow__(self, exponent, modulus=None) -> _IntegralLike: ... @abstractmethod - def __lshift__(self, other) -> _IntegralLike: - """self << other""" - + def __lshift__(self, other) -> _IntegralLike: ... @abstractmethod - def __rlshift__(self, other) -> _IntegralLike: - """other << self""" - + def __rlshift__(self, other) -> _IntegralLike: ... @abstractmethod - def __rshift__(self, other) -> _IntegralLike: - """self >> other""" - + def __rshift__(self, other) -> _IntegralLike: ... @abstractmethod - def __rrshift__(self, other) -> _IntegralLike: - """other >> self""" - + def __rrshift__(self, other) -> _IntegralLike: ... @abstractmethod - def __and__(self, other) -> _IntegralLike: - """self & other""" - + def __and__(self, other) -> _IntegralLike: ... @abstractmethod - def __rand__(self, other) -> _IntegralLike: - """other & self""" - + def __rand__(self, other) -> _IntegralLike: ... @abstractmethod - def __xor__(self, other) -> _IntegralLike: - """self ^ other""" - + def __xor__(self, other) -> _IntegralLike: ... @abstractmethod - def __rxor__(self, other) -> _IntegralLike: - """other ^ self""" - + def __rxor__(self, other) -> _IntegralLike: ... @abstractmethod - def __or__(self, other) -> _IntegralLike: - """self | other""" - + def __or__(self, other) -> _IntegralLike: ... @abstractmethod - def __ror__(self, other) -> _IntegralLike: - """other | self""" - + def __ror__(self, other) -> _IntegralLike: ... @abstractmethod - def __invert__(self) -> _IntegralLike: - """~self""" - - def __float__(self) -> float: - """float(self) == float(int(self))""" - + def __invert__(self) -> _IntegralLike: ... + def __float__(self) -> float: ... @property - def numerator(self) -> _IntegralLike: - """Integers are their own numerators.""" - + def numerator(self) -> _IntegralLike: ... @property - def denominator(self) -> Literal[1]: - """Integers have a denominator of 1.""" + def denominator(self) -> Literal[1]: ... # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod - def __pos__(self) -> _IntegralLike: - """+self""" - + def __pos__(self) -> _IntegralLike: ... @abstractmethod - def __neg__(self) -> _IntegralLike: - """-self""" - + def __neg__(self) -> _IntegralLike: ... @abstractmethod - def __abs__(self) -> _IntegralLike: - """Returns the Real distance from 0. Called for abs(self).""" - + def __abs__(self) -> _IntegralLike: ... @abstractmethod @overload - def __round__(self, ndigits: None = None) -> _IntegralLike: - """Rounds self to ndigits decimal places, defaulting to 0. - - If ndigits is omitted or None, returns an Integral, otherwise - returns a Real. Rounds half toward even. - """ - + def __round__(self, ndigits: None = None) -> _IntegralLike: ... @abstractmethod @overload def __round__(self, ndigits: int) -> _IntegralLike: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi index 080a968911290..ed0e96ef1cb9c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi @@ -1,8 +1,3 @@ -""" -opcode module - potentially shared between dis and other modules which -operate on bytecodes (e.g. peephole optimizers). -""" - import sys from typing import Final, Literal @@ -49,5 +44,4 @@ opmap: Final[dict[str, int]] HAVE_ARGUMENT: Final = 43 EXTENDED_ARG: Final = 69 -def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: - """Compute the stack effect of the opcode.""" +def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi index 790288021bcd5..2f919514b0b8b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi @@ -1,12 +1,3 @@ -"""Operator interface. - -This module exports a set of functions implemented in C corresponding -to the intrinsic operators of Python. For example, operator.add(x, y) -is equivalent to the expression x+y. The function names are those -used for special methods; variants without leading and trailing -'__' are also provided for convenience. -""" - import sys from _operator import ( abs as abs, @@ -191,13 +182,6 @@ if sys.version_info >= (3, 11): # them here. @final class attrgetter(Generic[_T_co]): - """Return a callable object that fetches the given attribute(s) from its operand. - After f = attrgetter('name'), the call f(r) returns r.name. - After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date). - After h = attrgetter('name.first', 'name.last'), the call h(r) returns - (r.name.first, r.name.last). - """ - @overload def __new__(cls, attr: str, /) -> attrgetter[Any]: ... @overload @@ -208,16 +192,10 @@ class attrgetter(Generic[_T_co]): def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ... @overload def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... - def __call__(self, obj: Any, /) -> _T_co: - """Call self as a function.""" + def __call__(self, obj: Any, /) -> _T_co: ... @final class itemgetter(Generic[_T_co]): - """Return a callable object that fetches the given item(s) from its operand. - After f = itemgetter(2), the call f(r) returns r[2]. - After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) - """ - @overload def __new__(cls, item: _T, /) -> itemgetter[_T]: ... @overload @@ -227,19 +205,13 @@ class itemgetter(Generic[_T_co]): # "tuple[int, int]" is incompatible with protocol "SupportsIndex" # preventing [_T_co, ...] instead of [Any, ...] # - # A suspected mypy issue prevents using [..., _T] instead of [..., Any] here. - # https://github.com/python/mypy/issues/14032 - def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: - """Call self as a function.""" + # If we can't infer a literal key from __new__ (ie: `itemgetter[Literal[0]]` for `itemgetter(0)`), + # then we can't annotate __call__'s return type or it'll break on tuples + # + # These issues are best demonstrated by the `itertools.check_itertools_recipes.unique_justseen` test. + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... @final class methodcaller: - """Return a callable object that calls the given method on its operand. - After f = methodcaller('name'), the call f(r) returns r.name(). - After g = methodcaller('name', 'date', foo=1), the call g(r) returns - r.name('date', foo=1). - """ - def __new__(cls, name: str, /, *args: Any, **kwargs: Any) -> Self: ... - def __call__(self, obj: Any) -> Any: - """Call self as a function.""" + def __call__(self, obj: Any) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi index f2cd353e92b49..c522917992800 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi @@ -1,26 +1,3 @@ -"""A powerful, extensible, and easy-to-use option parser. - -By Greg Ward - -Originally distributed as Optik. - -For support, use the optik-users@lists.sourceforge.net mailing list -(http://lists.sourceforge.net/lists/listinfo/optik-users). - -Simple usage example: - - from optparse import OptionParser - - parser = OptionParser() - parser.add_option("-f", "--file", dest="filename", - help="write report to FILE", metavar="FILE") - parser.add_option("-q", "--quiet", - action="store_false", dest="verbose", default=True, - help="don't print status messages to stdout") - - (options, args) = parser.parse_args() -""" - import builtins from _typeshed import MaybeNone, SupportsWrite from abc import abstractmethod @@ -60,83 +37,21 @@ class OptParseError(Exception): def __init__(self, msg: str) -> None: ... class BadOptionError(OptParseError): - """ - Raised if an invalid option is seen on the command line. - """ - opt_str: str def __init__(self, opt_str: str) -> None: ... class AmbiguousOptionError(BadOptionError): - """ - Raised if an ambiguous option is seen on the command line. - """ - possibilities: Iterable[str] def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... class OptionError(OptParseError): - """ - Raised if an Option instance is created with invalid or - inconsistent arguments. - """ - option_id: str def __init__(self, msg: str, option: Option) -> None: ... -class OptionConflictError(OptionError): - """ - Raised if conflicting options are added to an OptionParser. - """ - -class OptionValueError(OptParseError): - """ - Raised if an invalid option value is encountered on the command - line. - """ +class OptionConflictError(OptionError): ... +class OptionValueError(OptParseError): ... class HelpFormatter: - """ - Abstract base class for formatting option help. OptionParser - instances should use one of the HelpFormatter subclasses for - formatting help; by default IndentedHelpFormatter is used. - - Instance attributes: - parser : OptionParser - the controlling OptionParser instance - indent_increment : int - the number of columns to indent per nesting level - max_help_position : int - the maximum starting column for option help text - help_position : int - the calculated starting column for option help text; - initially the same as the maximum - width : int - total number of columns for output (pass None to constructor for - this value to be taken from the $COLUMNS environment variable) - level : int - current indentation level - current_indent : int - current indentation level (in columns) - help_width : int - number of columns available for option help text (calculated) - default_tag : str - text to replace with each option's default value, "%default" - by default. Set to false value to disable default value expansion. - option_strings : { Option : str } - maps Option instances to the snippet of help text explaining - the syntax of that option, e.g. "-h, --help" or - "-fFILE, --file=FILE" - _short_opt_fmt : str - format string controlling how short options with values are - printed in help text. Must be either "%s%s" ("-fFILE") or - "%s %s" ("-f FILE"), because those are the two syntaxes that - Optik supports. - _long_opt_fmt : str - similar but for long options; must be either "%s %s" ("--file FILE") - or "%s=%s" ("--file=FILE"). - """ - NO_DEFAULT_VALUE: str _long_opt_fmt: str _short_opt_fmt: str @@ -161,9 +76,7 @@ class HelpFormatter: @abstractmethod def format_heading(self, heading: str) -> str: ... def format_option(self, option: Option) -> str: ... - def format_option_strings(self, option: Option) -> str: - """Return a comma-separated list of option strings & metavariables.""" - + def format_option_strings(self, option: Option) -> str: ... @abstractmethod def format_usage(self, usage: str) -> str: ... def indent(self) -> None: ... @@ -173,8 +86,6 @@ class HelpFormatter: def store_option_strings(self, parser: OptionParser) -> None: ... class IndentedHelpFormatter(HelpFormatter): - """Format help with indented section bodies.""" - def __init__( self, indent_increment: int = 2, @@ -186,8 +97,6 @@ class IndentedHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class TitledHelpFormatter(HelpFormatter): - """Format help with underlined section headers.""" - def __init__( self, indent_increment: int = 0, @@ -199,25 +108,6 @@ class TitledHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class Option: - """ - Instance attributes: - _short_opts : [string] - _long_opts : [string] - - action : string - type : string - dest : string - default : any - nargs : int - const : any - choices : [string] - callback : function - callback_args : (any*) - callback_kwargs : { string : any } - help : string - metavar : string - """ - ACTIONS: tuple[str, ...] ALWAYS_TYPED_ACTIONS: tuple[str, ...] ATTRS: list[str] @@ -282,36 +172,6 @@ class Option: make_option = Option class OptionContainer: - """ - Abstract base class. - - Class attributes: - standard_option_list : [Option] - list of standard options that will be accepted by all instances - of this parser class (intended to be overridden by subclasses). - - Instance attributes: - option_list : [Option] - the list of Option objects contained by this OptionContainer - _short_opt : { string : Option } - dictionary mapping short option strings, eg. "-f" or "-X", - to the Option instances that implement them. If an Option - has multiple short option strings, it will appear in this - dictionary multiple times. [1] - _long_opt : { string : Option } - dictionary mapping long option strings, eg. "--file" or - "--exclude", to the Option instances that implement them. - Again, a given Option can occur multiple times in this - dictionary. [1] - defaults : { string : any } - dictionary mapping option destination names to default - values for each destination [1] - - [1] These mappings are common to (shared by) all components of the - controlling OptionParser, where they are initially created. - - """ - _long_opt: dict[str, Option] _short_opt: dict[str, Option] conflict_handler: str @@ -325,11 +185,7 @@ class OptionContainer: def _create_option_mappings(self) -> None: ... def _share_option_mappings(self, parser: OptionParser) -> None: ... @overload - def add_option(self, opt: Option, /) -> Option: - """add_option(Option) - add_option(opt_str, ..., kwarg=val, ...) - """ - + def add_option(self, opt: Option, /) -> Option: ... @overload def add_option( self, @@ -351,9 +207,7 @@ class OptionContainer: **kwargs, # Allow arbitrary keyword arguments for user defined option_class ) -> Option: ... def add_options(self, option_list: Iterable[Option]) -> None: ... - def destroy(self) -> None: - """see OptionParser.destroy().""" - + def destroy(self) -> None: ... def format_option_help(self, formatter: HelpFormatter) -> str: ... def format_description(self, formatter: HelpFormatter) -> str: ... def format_help(self, formatter: HelpFormatter) -> str: ... @@ -375,21 +229,8 @@ class OptionGroup(OptionContainer): class Values: def __init__(self, defaults: Mapping[str, object] | None = None) -> None: ... def _update(self, dict: Mapping[str, object], mode: Literal["careful", "loose"]) -> None: ... - def _update_careful(self, dict: Mapping[str, object]) -> None: - """ - Update the option values from an arbitrary dictionary, but only - use keys from dict that already have a corresponding attribute - in self. Any keys in dict without a corresponding attribute - are silently ignored. - """ - - def _update_loose(self, dict: Mapping[str, object]) -> None: - """ - Update the option values from an arbitrary dictionary, - using all keys from the dictionary regardless of whether - they have a corresponding attribute in self or not. - """ - + def _update_careful(self, dict: Mapping[str, object]) -> None: ... + def _update_loose(self, dict: Mapping[str, object]) -> None: ... def ensure_value(self, attr: str, value: object) -> Any: ... # return type cannot be known statically def read_file(self, filename: str, mode: Literal["careful", "loose"] = "careful") -> None: ... def read_module(self, modname: str, mode: Literal["careful", "loose"] = "careful") -> None: ... @@ -402,74 +243,6 @@ class Values: def __eq__(self, other: object) -> bool: ... class OptionParser(OptionContainer): - """ - Class attributes: - standard_option_list : [Option] - list of standard options that will be accepted by all instances - of this parser class (intended to be overridden by subclasses). - - Instance attributes: - usage : string - a usage string for your program. Before it is displayed - to the user, "%prog" will be expanded to the name of - your program (self.prog or os.path.basename(sys.argv[0])). - prog : string - the name of the current program (to override - os.path.basename(sys.argv[0])). - description : string - A paragraph of text giving a brief overview of your program. - optparse reformats this paragraph to fit the current terminal - width and prints it when the user requests help (after usage, - but before the list of options). - epilog : string - paragraph of help text to print after option help - - option_groups : [OptionGroup] - list of option groups in this parser (option groups are - irrelevant for parsing the command-line, but very useful - for generating help) - - allow_interspersed_args : bool = true - if true, positional arguments may be interspersed with options. - Assuming -a and -b each take a single argument, the command-line - -ablah foo bar -bboo baz - will be interpreted the same as - -ablah -bboo -- foo bar baz - If this flag were false, that command line would be interpreted as - -ablah -- foo bar -bboo baz - -- ie. we stop processing options as soon as we see the first - non-option argument. (This is the tradition followed by - Python's getopt module, Perl's Getopt::Std, and other argument- - parsing libraries, but it is generally annoying to users.) - - process_default_values : bool = true - if true, option default values are processed similarly to option - values from the command line: that is, they are passed to the - type-checking function for the option's type (as long as the - default value is a string). (This really only matters if you - have defined custom types; see SF bug #955889.) Set it to false - to restore the behaviour of Optik 1.4.1 and earlier. - - rargs : [string] - the argument list currently being parsed. Only set when - parse_args() is active, and continually trimmed down as - we consume arguments. Mainly there for the benefit of - callback options. - largs : [string] - the list of leftover arguments that we have skipped while - parsing options. If allow_interspersed_args is false, this - list is always empty. - values : Values - the set of option values currently being accumulated. Only - set when parse_args() is active. Also mainly for callbacks. - - Because of the 'rargs', 'largs', and 'values' attributes, - OptionParser is not thread-safe. If, for some perverse reason, you - need to parse command-line arguments simultaneously in different - threads, use different OptionParser instances. - - """ - allow_interspersed_args: bool epilog: str | None formatter: HelpFormatter @@ -502,67 +275,19 @@ class OptionParser(OptionContainer): def _get_all_options(self) -> list[Option]: ... def _get_args(self, args: list[str] | None) -> list[str]: ... def _init_parsing_state(self) -> None: ... - def _match_long_opt(self, opt: str) -> str: - """_match_long_opt(opt : string) -> string - - Determine which long option string 'opt' matches, ie. which one - it is an unambiguous abbreviation for. Raises BadOptionError if - 'opt' doesn't unambiguously match any long option string. - """ - + def _match_long_opt(self, opt: str) -> str: ... def _populate_option_list(self, option_list: Iterable[Option] | None, add_help: bool = True) -> None: ... - def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: - """_process_args(largs : [string], - rargs : [string], - values : Values) - - Process command-line arguments and populate 'values', consuming - options and arguments from 'rargs'. If 'allow_interspersed_args' is - false, stop at the first non-option argument. If true, accumulate any - interspersed non-option arguments in 'largs'. - """ - + def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: ... def _process_long_opt(self, rargs: list[str], values: Values) -> None: ... def _process_short_opts(self, rargs: list[str], values: Values) -> None: ... @overload def add_option_group(self, opt_group: OptionGroup, /) -> OptionGroup: ... @overload def add_option_group(self, title: str, /, description: str | None = None) -> OptionGroup: ... - def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: - """ - check_values(values : Values, args : [string]) - -> (values : Values, args : [string]) - - Check that the supplied option values and leftover arguments are - valid. Returns the option values and leftover arguments - (possibly adjusted, possibly completely new -- whatever you - like). Default implementation just returns the passed-in - values; subclasses may override as desired. - """ - - def disable_interspersed_args(self) -> None: - """Set parsing to stop on the first non-option. Use this if - you have a command processor which runs another command that - has options of its own and you want to make sure these options - don't get confused. - """ - - def enable_interspersed_args(self) -> None: - """Set parsing to not stop on the first non-option, allowing - interspersing switches with command arguments. This is the - default behavior. See also disable_interspersed_args() and the - class documentation description of the attribute - allow_interspersed_args. - """ - - def error(self, msg: str) -> NoReturn: - """error(msg : string) - - Print a usage message incorporating 'msg' to stderr and exit. - If you override this in a subclass, it should not return -- it - should either exit or raise an exception. - """ - + def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: ... + def disable_interspersed_args(self) -> None: ... + def enable_interspersed_args(self) -> None: ... + def error(self, msg: str) -> NoReturn: ... def exit(self, status: int = 0, msg: str | None = None) -> NoReturn: ... def expand_prog_name(self, s: str) -> str: ... def format_epilog(self, formatter: HelpFormatter) -> str: ... @@ -573,47 +298,10 @@ class OptionParser(OptionContainer): def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... - def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: - """ - parse_args(args : [string] = sys.argv[1:], - values : Values = None) - -> (values : Values, args : [string]) - - Parse the command-line options found in 'args' (default: - sys.argv[1:]). Any errors result in a call to 'error()', which - by default prints the usage message to stderr and calls - sys.exit() with an error message. On success returns a pair - (values, args) where 'values' is a Values instance (with all - your option values) and 'args' is the list of arguments left - over after parsing options. - """ - - def print_usage(self, file: SupportsWrite[str] | None = None) -> None: - """print_usage(file : file = stdout) - - Print the usage message for the current program (self.usage) to - 'file' (default stdout). Any occurrence of the string "%prog" in - self.usage is replaced with the name of the current program - (basename of sys.argv[0]). Does nothing if self.usage is empty - or not defined. - """ - - def print_help(self, file: SupportsWrite[str] | None = None) -> None: - """print_help(file : file = stdout) - - Print an extended help message, listing all options and any - help text provided with them, to 'file' (default stdout). - """ - - def print_version(self, file: SupportsWrite[str] | None = None) -> None: - """print_version(file : file = stdout) - - Print the version message for this program (self.version) to - 'file' (default stdout). As with print_usage(), any occurrence - of "%prog" in self.version is replaced by the current program's - name. Does nothing if self.version is empty or undefined. - """ - + def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: ... + def print_help(self, file: SupportsWrite[str] | None = None) -> None: ... + def print_version(self, file: SupportsWrite[str] | None = None) -> None: ... def set_default(self, dest: str, value: Any) -> None: ... # default value can be "any" type def set_defaults(self, **kwargs: Any) -> None: ... # default values can be "any" type def set_process_default_values(self, process: bool) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 88f6a919a15fc..71c79dfac399f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -1,26 +1,3 @@ -"""OS routines for NT or Posix depending on what system we're on. - -This exports: - - all functions from posix or nt, e.g. unlink, stat, etc. - - os.path is either posixpath or ntpath - - os.name is either 'posix' or 'nt' - - os.curdir is a string representing the current directory (always '.') - - os.pardir is a string representing the parent directory (always '..') - - os.sep is the (or a most common) pathname separator ('/' or '\\\\') - - os.extsep is the extension separator (always '.') - - os.altsep is the alternate pathname separator (None or '/') - - os.pathsep is the component separator used in $PATH etc - - os.linesep is the line separator in text files ('\\n' or '\\r\\n') - - os.defpath is the default search path for executables - - os.devnull is the file path of the null device ('/dev/null', etc.) - -Programs that import and use 'os' stand a better chance of being -portable between different platforms. Of course, they must then -only use functions that are defined by all platforms (e.g., unlink -and opendir), and leave all pathname manipulation to os.path -(e.g., split and join). -""" - import sys from _typeshed import ( AnyStr_co, @@ -798,18 +775,6 @@ TMP_MAX: Final[int] # Undocumented, but used by tempfile # ----- os classes (structures) ----- @final class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): - """stat_result: Result from stat, fstat, or lstat. - - This object may be accessed either as a tuple of - (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) - or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on. - - Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev, - or st_flags, they are available as attributes only. - - See os.stat for more information. - """ - # The constructor of this class takes an iterable of variable length (though it must be at least 10). # # However, this class behaves like a tuple of 10 elements, @@ -824,40 +789,23 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo __match_args__: Final = ("st_mode", "st_ino", "st_dev", "st_nlink", "st_uid", "st_gid", "st_size") @property - def st_mode(self) -> int: # protection bits, - """protection bits""" - + def st_mode(self) -> int: ... # protection bits, @property - def st_ino(self) -> int: # inode number, - """inode""" - + def st_ino(self) -> int: ... # inode number, @property - def st_dev(self) -> int: # device, - """device""" - + def st_dev(self) -> int: ... # device, @property - def st_nlink(self) -> int: # number of hard links, - """number of hard links""" - + def st_nlink(self) -> int: ... # number of hard links, @property - def st_uid(self) -> int: # user id of owner, - """user ID of owner""" - + def st_uid(self) -> int: ... # user id of owner, @property - def st_gid(self) -> int: # group id of owner, - """group ID of owner""" - + def st_gid(self) -> int: ... # group id of owner, @property - def st_size(self) -> int: # size of file, in bytes, - """total size, in bytes""" - + def st_size(self) -> int: ... # size of file, in bytes, @property - def st_atime(self) -> float: # time of most recent access, - """time of last access""" - + def st_atime(self) -> float: ... # time of most recent access, @property - def st_mtime(self) -> float: # time of most recent content modification, - """time of last modification""" + def st_mtime(self) -> float: ... # time of most recent content modification, # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) if sys.version_info >= (3, 12) and sys.platform == "win32": @property @@ -866,67 +814,46 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo Use st_birthtime instead to retrieve the file creation time. \ In the future, this property will contain the last metadata change time.""" ) - def st_ctime(self) -> float: - """time of last change""" + def st_ctime(self) -> float: ... else: @property - def st_ctime(self) -> float: - """time of last change""" + def st_ctime(self) -> float: ... @property - def st_atime_ns(self) -> int: # time of most recent access, in nanoseconds - """time of last access in nanoseconds""" - + def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds @property - def st_mtime_ns(self) -> int: # time of most recent content modification in nanoseconds - """time of last modification in nanoseconds""" + def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds @property - def st_ctime_ns(self) -> int: - """time of last change in nanoseconds""" + def st_ctime_ns(self) -> int: ... if sys.platform == "win32": @property - def st_file_attributes(self) -> int: - """Windows file attribute bits""" - + def st_file_attributes(self) -> int: ... @property - def st_reparse_tag(self) -> int: - """Windows reparse tag""" + def st_reparse_tag(self) -> int: ... if sys.version_info >= (3, 12): @property - def st_birthtime(self) -> float: # time of file creation in seconds - """time of creation""" - + def st_birthtime(self) -> float: ... # time of file creation in seconds @property - def st_birthtime_ns(self) -> int: # time of file creation in nanoseconds - """time of creation in nanoseconds""" + def st_birthtime_ns(self) -> int: ... # time of file creation in nanoseconds else: @property - def st_blocks(self) -> int: # number of blocks allocated for file - """number of blocks allocated""" - + def st_blocks(self) -> int: ... # number of blocks allocated for file @property - def st_blksize(self) -> int: # filesystem blocksize - """blocksize for filesystem I/O""" - + def st_blksize(self) -> int: ... # filesystem blocksize @property - def st_rdev(self) -> int: # type of device if an inode device - """device type (if inode device)""" + def st_rdev(self) -> int: ... # type of device if an inode device if sys.platform != "linux": # These properties are available on MacOS, but not Ubuntu. # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): @property - def st_gen(self) -> int: # file generation number - """generation number""" - + def st_gen(self) -> int: ... # file generation number @property - def st_birthtime(self) -> float: # time of file creation in seconds - """time of creation""" + def st_birthtime(self) -> float: ... # time of file creation in seconds if sys.platform == "darwin": @property - def st_flags(self) -> int: # user defined flags for file - """user defined flags for file""" + def st_flags(self) -> int: ... # user defined flags for file # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 @@ -935,29 +862,12 @@ In the future, this property will contain the last metadata change time.""" # on the allowlist for use as a Protocol starting in 3.14. @runtime_checkable class PathLike(ABC, Protocol[AnyStr_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """Abstract base class for implementing the file system path protocol.""" - __slots__ = () @abstractmethod - def __fspath__(self) -> AnyStr_co: - """Return the file system path representation of the object.""" + def __fspath__(self) -> AnyStr_co: ... @overload -def listdir(path: StrPath | None = None) -> list[str]: - """Return a list containing the names of the files in the directory. - -path can be specified as either str, bytes, or a path-like object. If path is bytes, - the filenames returned will also be bytes; in all other circumstances - the filenames returned will be str. -If path is None, uses the path='.'. -On some platforms, path may also be specified as an open file descriptor;\\ - the file descriptor must refer to a directory. - If this functionality is unavailable, using it raises NotImplementedError. - -The list is in arbitrary order. It does not include the special -entries '.' and '..' even if they are present in the directory. -""" - +def listdir(path: StrPath | None = None) -> list[str]: ... @overload def listdir(path: BytesPath) -> list[bytes]: ... @overload @@ -968,48 +878,21 @@ class DirEntry(Generic[AnyStr]): # The constructor is hidden @property - def name(self) -> AnyStr: - """the entry's base filename, relative to scandir() "path" argument""" - + def name(self) -> AnyStr: ... @property - def path(self) -> AnyStr: - """the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)""" - - def inode(self) -> int: - """Return inode of the entry; cached per entry.""" - - def is_dir(self, *, follow_symlinks: bool = True) -> bool: - """Return True if the entry is a directory; cached per entry.""" - - def is_file(self, *, follow_symlinks: bool = True) -> bool: - """Return True if the entry is a file; cached per entry.""" - - def is_symlink(self) -> bool: - """Return True if the entry is a symbolic link; cached per entry.""" - - def stat(self, *, follow_symlinks: bool = True) -> stat_result: - """Return stat_result object for the entry; cached per entry.""" - - def __fspath__(self) -> AnyStr: - """Returns the path for the entry.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def path(self) -> AnyStr: ... + def inode(self) -> int: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def is_symlink(self) -> bool: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def __fspath__(self) -> AnyStr: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 12): - def is_junction(self) -> bool: - """Return True if the entry is a junction; cached per entry.""" + def is_junction(self) -> bool: ... @final class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): - """statvfs_result: Result from statvfs or fstatvfs. - - This object may be accessed either as a tuple of - (bsize, frsize, blocks, bfree, bavail, files, ffree, favail, flag, namemax), - or via the attributes f_bsize, f_frsize, f_blocks, f_bfree, and so on. - - See os.statvfs for more information. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ( "f_bsize", @@ -1048,226 +931,86 @@ class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, in def f_fsid(self) -> int: ... # ----- os function stubs ----- -def fsencode(filename: StrOrBytesPath) -> bytes: - """Encode filename (an os.PathLike, bytes, or str) to the filesystem - encoding with 'surrogateescape' error handler, return bytes unchanged. - On Windows, use 'strict' error handler if the file system encoding is - 'mbcs' (which is the default encoding). - """ - -def fsdecode(filename: StrOrBytesPath) -> str: - """Decode filename (an os.PathLike, bytes, or str) from the filesystem - encoding with 'surrogateescape' error handler, return str unchanged. On - Windows, use 'strict' error handler if the file system encoding is - 'mbcs' (which is the default encoding). - """ - +def fsencode(filename: StrOrBytesPath) -> bytes: ... +def fsdecode(filename: StrOrBytesPath) -> str: ... @overload -def fspath(path: str) -> str: - """Return the file system path representation of the object. - - If the object is str or bytes, then allow it to pass through as-is. If the - object defines __fspath__(), then return the result of that method. All other - types raise a TypeError. - """ - +def fspath(path: str) -> str: ... @overload def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[AnyStr]) -> AnyStr: ... -def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: - """Returns the sequence of directories that will be searched for the - named executable (similar to a shell) when launching a process. - - *env* must be an environment variable dict or None. If *env* is None, - os.environ will be used. - """ - -def getlogin() -> str: - """Return the actual login name.""" - -def getpid() -> int: - """Return the current process id.""" - -def getppid() -> int: - """Return the parent's process id. - - If the parent process has already exited, Windows machines will still - return its id; others systems will return the id of the 'init' process (1). - """ - -def strerror(code: int, /) -> str: - """Translate an error code to a message string.""" - -def umask(mask: int, /) -> int: - """Set the current numeric umask and return the previous umask.""" - +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... +def getlogin() -> str: ... +def getpid() -> int: ... +def getppid() -> int: ... +def strerror(code: int, /) -> str: ... +def umask(mask: int, /) -> int: ... @final class uname_result(structseq[str], tuple[str, str, str, str, str]): - """uname_result: Result from os.uname(). - - This object may be accessed either as a tuple of - (sysname, nodename, release, version, machine), - or via the attributes sysname, nodename, release, version, and machine. - - See os.uname for more information. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") @property - def sysname(self) -> str: - """operating system name""" - + def sysname(self) -> str: ... @property - def nodename(self) -> str: - """name of machine on network (implementation-defined)""" - + def nodename(self) -> str: ... @property - def release(self) -> str: - """operating system release""" - + def release(self) -> str: ... @property - def version(self) -> str: - """operating system version""" - + def version(self) -> str: ... @property - def machine(self) -> str: - """hardware identifier""" + def machine(self) -> str: ... if sys.platform != "win32": - def ctermid() -> str: - """Return the name of the controlling terminal for this process.""" - - def getegid() -> int: - """Return the current process's effective group id.""" - - def geteuid() -> int: - """Return the current process's effective user id.""" - - def getgid() -> int: - """Return the current process's group id.""" - - def getgrouplist(user: str, group: int, /) -> list[int]: - """Returns a list of groups to which a user belongs. - - user - username to lookup - group - base group id of the user - """ - - def getgroups() -> list[int]: # Unix only, behaves differently on Mac - """Return list of supplemental group IDs for the process.""" - - def initgroups(username: str, gid: int, /) -> None: - """Initialize the group access list. - - Call the system initgroups() to initialize the group access list with all of - the groups of which the specified username is a member, plus the specified - group id. - """ - - def getpgid(pid: int) -> int: - """Call the system call getpgid(), and return the result.""" - - def getpgrp() -> int: - """Return the current process group id.""" - - def getpriority(which: int, who: int) -> int: - """Return program scheduling priority.""" - - def setpriority(which: int, who: int, priority: int) -> None: - """Set program scheduling priority.""" + def ctermid() -> str: ... + def getegid() -> int: ... + def geteuid() -> int: ... + def getgid() -> int: ... + def getgrouplist(user: str, group: int, /) -> list[int]: ... + def getgroups() -> list[int]: ... # Unix only, behaves differently on Mac + def initgroups(username: str, gid: int, /) -> None: ... + def getpgid(pid: int) -> int: ... + def getpgrp() -> int: ... + def getpriority(which: int, who: int) -> int: ... + def setpriority(which: int, who: int, priority: int) -> None: ... if sys.platform != "darwin": - def getresuid() -> tuple[int, int, int]: - """Return a tuple of the current process's real, effective, and saved user ids.""" - - def getresgid() -> tuple[int, int, int]: - """Return a tuple of the current process's real, effective, and saved group ids.""" - - def getuid() -> int: - """Return the current process's user id.""" - - def setegid(egid: int, /) -> None: - """Set the current process's effective group id.""" - - def seteuid(euid: int, /) -> None: - """Set the current process's effective user id.""" - - def setgid(gid: int, /) -> None: - """Set the current process's group id.""" - - def setgroups(groups: Sequence[int], /) -> None: - """Set the groups of the current process to list.""" - - def setpgrp() -> None: - """Make the current process the leader of its process group.""" - - def setpgid(pid: int, pgrp: int, /) -> None: - """Call the system call setpgid(pid, pgrp).""" - - def setregid(rgid: int, egid: int, /) -> None: - """Set the current process's real and effective group ids.""" + def getresuid() -> tuple[int, int, int]: ... + def getresgid() -> tuple[int, int, int]: ... + + def getuid() -> int: ... + def setegid(egid: int, /) -> None: ... + def seteuid(euid: int, /) -> None: ... + def setgid(gid: int, /) -> None: ... + def setgroups(groups: Sequence[int], /) -> None: ... + def setpgrp() -> None: ... + def setpgid(pid: int, pgrp: int, /) -> None: ... + def setregid(rgid: int, egid: int, /) -> None: ... if sys.platform != "darwin": - def setresgid(rgid: int, egid: int, sgid: int, /) -> None: - """Set the current process's real, effective, and saved group ids.""" - - def setresuid(ruid: int, euid: int, suid: int, /) -> None: - """Set the current process's real, effective, and saved user ids.""" - - def setreuid(ruid: int, euid: int, /) -> None: - """Set the current process's real and effective user ids.""" - - def getsid(pid: int, /) -> int: - """Call the system call getsid(pid) and return the result.""" - - def setsid() -> None: - """Call the system call setsid().""" + def setresgid(rgid: int, egid: int, sgid: int, /) -> None: ... + def setresuid(ruid: int, euid: int, suid: int, /) -> None: ... - def setuid(uid: int, /) -> None: - """Set the current process's user id.""" - - def uname() -> uname_result: - """Return an object identifying the current operating system. - - The object behaves like a named tuple with the following fields: - (sysname, nodename, release, version, machine) - """ + def setreuid(ruid: int, euid: int, /) -> None: ... + def getsid(pid: int, /) -> int: ... + def setsid() -> None: ... + def setuid(uid: int, /) -> None: ... + def uname() -> uname_result: ... @overload -def getenv(key: str) -> str | None: - """Get an environment variable, return None if it doesn't exist. - The optional second argument can specify an alternate default. - key, default and the result are str. - """ - +def getenv(key: str) -> str | None: ... @overload def getenv(key: str, default: _T) -> str | _T: ... if sys.platform != "win32": @overload - def getenvb(key: bytes) -> bytes | None: - """Get an environment variable, return None if it doesn't exist. - The optional second argument can specify an alternate default. - key, default and the result are bytes. - """ - + def getenvb(key: bytes) -> bytes | None: ... @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... - def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: - """Change or add an environment variable.""" - - def unsetenv(name: StrOrBytesPath, /) -> None: - """Delete an environment variable.""" + def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: ... + def unsetenv(name: StrOrBytesPath, /) -> None: ... else: - def putenv(name: str, value: str, /) -> None: - """Change or add an environment variable.""" - - def unsetenv(name: str, /) -> None: - """Delete an environment variable.""" + def putenv(name: str, value: str, /) -> None: ... + def unsetenv(name: str, /) -> None: ... _Opener: TypeAlias = Callable[[str, int], int] @@ -1348,232 +1091,50 @@ def fdopen( closefd: bool = ..., opener: _Opener | None = ..., ) -> IO[Any]: ... -def close(fd: int) -> None: - """Close a file descriptor.""" - -def closerange(fd_low: int, fd_high: int, /) -> None: - """Closes all file descriptors in [fd_low, fd_high), ignoring errors.""" - -def device_encoding(fd: int) -> str | None: - """Return a string describing the encoding of a terminal's file descriptor. - - The file descriptor must be attached to a terminal. - If the device is not a terminal, return None. - """ - -def dup(fd: int, /) -> int: - """Return a duplicate of a file descriptor.""" - -def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: - """Duplicate file descriptor.""" - -def fstat(fd: int) -> stat_result: - """Perform a stat system call on the given file descriptor. - - Like stat(), but for an open file descriptor. - Equivalent to os.stat(fd). - """ - -def ftruncate(fd: int, length: int, /) -> None: - """Truncate a file, specified by file descriptor, to a specific length.""" - -def fsync(fd: FileDescriptorLike) -> None: - """Force write of fd to disk.""" - -def isatty(fd: int, /) -> bool: - """Return True if the fd is connected to a terminal. - - Return True if the file descriptor is an open file descriptor - connected to the slave end of a terminal. - """ +def close(fd: int) -> None: ... +def closerange(fd_low: int, fd_high: int, /) -> None: ... +def device_encoding(fd: int) -> str | None: ... +def dup(fd: int, /) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... +def fstat(fd: int) -> stat_result: ... +def ftruncate(fd: int, length: int, /) -> None: ... +def fsync(fd: FileDescriptorLike) -> None: ... +def isatty(fd: int, /) -> bool: ... if sys.platform != "win32" and sys.version_info >= (3, 11): - def login_tty(fd: int, /) -> None: - """Prepare the tty of which fd is a file descriptor for a new login session. - - Make the calling process a session leader; make the tty the - controlling tty, the stdin, the stdout, and the stderr of the - calling process; close fd. - """ + def login_tty(fd: int, /) -> None: ... if sys.version_info >= (3, 11): - def lseek(fd: int, position: int, whence: int, /) -> int: - """Set the position of a file descriptor. Return the new position. - - fd - An open file descriptor, as returned by os.open(). - position - Position, interpreted relative to 'whence'. - whence - The relative position to seek from. Valid values are: - - SEEK_SET: seek from the start of the file. - - SEEK_CUR: seek from the current file position. - - SEEK_END: seek from the end of the file. - - The return value is the number of bytes relative to the beginning of the file. - """ + def lseek(fd: int, position: int, whence: int, /) -> int: ... else: - def lseek(fd: int, position: int, how: int, /) -> int: - """Set the position of a file descriptor. Return the new position. - - Return the new cursor position in number of bytes - relative to the beginning of the file. - """ + def lseek(fd: int, position: int, how: int, /) -> int: ... -def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: - """Open a file for low level IO. Returns a file descriptor (integer). - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ - -def pipe() -> tuple[int, int]: - """Create a pipe. - - Returns a tuple of two file descriptors: - (read_fd, write_fd) - """ - -def read(fd: int, length: int, /) -> bytes: - """Read from a file descriptor. Returns a bytes object.""" +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... +def pipe() -> tuple[int, int]: ... +def read(fd: int, length: int, /) -> bytes: ... if sys.version_info >= (3, 12) or sys.platform != "win32": - def get_blocking(fd: int, /) -> bool: - """Get the blocking mode of the file descriptor. - - Return False if the O_NONBLOCK flag is set, True if the flag is cleared. - """ - - def set_blocking(fd: int, blocking: bool, /) -> None: - """Set the blocking mode of the specified file descriptor. - - Set the O_NONBLOCK flag if blocking is False, - clear the O_NONBLOCK flag otherwise. - """ + def get_blocking(fd: int, /) -> bool: ... + def set_blocking(fd: int, blocking: bool, /) -> None: ... if sys.platform != "win32": - def fchown(fd: int, uid: int, gid: int) -> None: - """Change the owner and group id of the file specified by file descriptor. - - Equivalent to os.chown(fd, uid, gid). - """ - - def fpathconf(fd: int, name: str | int, /) -> int: - """Return the configuration limit name for the file descriptor fd. - - If there is no limit, return -1. - """ - - def fstatvfs(fd: int, /) -> statvfs_result: - """Perform an fstatvfs system call on the given fd. - - Equivalent to statvfs(fd). - """ - - def lockf(fd: int, command: int, length: int, /) -> None: - """Apply, test or remove a POSIX lock on an open file descriptor. - - fd - An open file descriptor. - command - One of F_LOCK, F_TLOCK, F_ULOCK or F_TEST. - length - The number of bytes to lock, starting at the current position. - """ - - def openpty() -> tuple[int, int]: # some flavors of Unix - """Open a pseudo-terminal. - - Return a tuple of (master_fd, slave_fd) containing open file descriptors - for both the master and slave ends. - """ + def fchown(fd: int, uid: int, gid: int) -> None: ... + def fpathconf(fd: int, name: str | int, /) -> int: ... + def fstatvfs(fd: int, /) -> statvfs_result: ... + def lockf(fd: int, command: int, length: int, /) -> None: ... + def openpty() -> tuple[int, int]: ... # some flavors of Unix if sys.platform != "darwin": - def fdatasync(fd: FileDescriptorLike) -> None: - """Force write of fd to disk without forcing update of metadata.""" - - def pipe2(flags: int, /) -> tuple[int, int]: # some flavors of Unix - """Create a pipe with flags set atomically. - - Returns a tuple of two file descriptors: - (read_fd, write_fd) - - flags can be constructed by ORing together one or more of these values: - O_NONBLOCK, O_CLOEXEC. - """ + def fdatasync(fd: FileDescriptorLike) -> None: ... + def pipe2(flags: int, /) -> tuple[int, int]: ... # some flavors of Unix + def posix_fallocate(fd: int, offset: int, length: int, /) -> None: ... + def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: ... - def posix_fallocate(fd: int, offset: int, length: int, /) -> None: - """Ensure a file has allocated at least a particular number of bytes on disk. - - Ensure that the file specified by fd encompasses a range of bytes - starting at offset bytes from the beginning and continuing for length bytes. - """ - - def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: - """Announce an intention to access data in a specific pattern. - - Announce an intention to access data in a specific pattern, thus allowing - the kernel to make optimizations. - The advice applies to the region of the file specified by fd starting at - offset and continuing for length bytes. - advice is one of POSIX_FADV_NORMAL, POSIX_FADV_SEQUENTIAL, - POSIX_FADV_RANDOM, POSIX_FADV_NOREUSE, POSIX_FADV_WILLNEED, or - POSIX_FADV_DONTNEED. - """ - - def pread(fd: int, length: int, offset: int, /) -> bytes: - """Read a number of bytes from a file descriptor starting at a particular offset. - - Read length bytes from file descriptor fd, starting at offset bytes from - the beginning of the file. The file offset remains unchanged. - """ - - def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: - """Write bytes to a file descriptor starting at a particular offset. - - Write buffer to fd, starting at offset bytes from the beginning of - the file. Returns the number of bytes written. Does not change the - current file offset. - """ + def pread(fd: int, length: int, offset: int, /) -> bytes: ... + def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: ... # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not - def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: - """Reads from a file descriptor into a number of mutable bytes-like objects. - - Combines the functionality of readv() and pread(). As readv(), it will - transfer data into each buffer until it is full and then move on to the next - buffer in the sequence to hold the rest of the data. Its fourth argument, - specifies the file offset at which the input operation is to be performed. It - will return the total number of bytes read (which can be less than the total - capacity of all the objects). - - The flags argument contains a bitwise OR of zero or more of the following flags: - - - RWF_HIPRI - - RWF_NOWAIT - - Using non-zero flags requires Linux 4.6 or newer. - """ - - def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: - """Writes the contents of bytes-like objects to a file descriptor at a given offset. - - Combines the functionality of writev() and pwrite(). All buffers must be a sequence - of bytes-like objects. Buffers are processed in array order. Entire contents of first - buffer is written before proceeding to second, and so on. The operating system may - set a limit (sysconf() value SC_IOV_MAX) on the number of buffers that can be used. - This function writes the contents of each object to the file descriptor and returns - the total number of bytes written. - - The flags argument contains a bitwise OR of zero or more of the following flags: - - - RWF_DSYNC - - RWF_SYNC - - RWF_APPEND - - Using non-zero flags requires Linux 4.7 or newer. - """ + def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: ... + def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: ... if sys.platform != "darwin": if sys.version_info >= (3, 10): RWF_APPEND: Final[int] # docs say available on 3.7+, stubtest says otherwise @@ -1583,8 +1144,7 @@ if sys.platform != "win32": RWF_NOWAIT: Final[int] if sys.platform == "linux": - def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: - """Copy count bytes from file descriptor in_fd to file descriptor out_fd.""" + def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: ... else: def sendfile( out_fd: FileDescriptor, @@ -1594,241 +1154,61 @@ if sys.platform != "win32": headers: Sequence[ReadableBuffer] = (), trailers: Sequence[ReadableBuffer] = (), flags: int = 0, - ) -> int: # FreeBSD and Mac OS X only - """Copy count bytes from file descriptor in_fd to file descriptor out_fd.""" - - def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: - """Read from a file descriptor fd into an iterable of buffers. - - The buffers should be mutable buffers accepting bytes. - readv will transfer data into each buffer until it is full - and then move on to the next buffer in the sequence to hold - the rest of the data. - - readv returns the total number of bytes read, - which may be less than the total capacity of all the buffers. - """ + ) -> int: ... # FreeBSD and Mac OS X only - def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: - """Iterate over buffers, and write the contents of each to a file descriptor. - - Returns the total number of bytes written. - buffers must be a sequence of bytes-like objects. - """ + def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: ... + def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: ... if sys.version_info >= (3, 14): - def readinto(fd: int, buffer: ReadableBuffer, /) -> int: - """Read into a buffer object from a file descriptor. - - The buffer should be mutable and bytes-like. On success, returns the number of - bytes read. Less bytes may be read than the size of the buffer. The underlying - system call will be retried when interrupted by a signal, unless the signal - handler raises an exception. Other errors will not be retried and an error will - be raised. - - Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 - (which can be used to check for errors without reading data). Never returns - negative. - """ + def readinto(fd: int, buffer: ReadableBuffer, /) -> int: ... @final class terminal_size(structseq[int], tuple[int, int]): - """A tuple of (columns, lines) for holding terminal window size""" - if sys.version_info >= (3, 10): __match_args__: Final = ("columns", "lines") @property - def columns(self) -> int: - """width of the terminal window in characters""" - + def columns(self) -> int: ... @property - def lines(self) -> int: - """height of the terminal window in characters""" - -def get_terminal_size(fd: int = ..., /) -> terminal_size: - """Return the size of the terminal window as (columns, lines). - - The optional argument fd (default standard output) specifies - which file descriptor should be queried. - - If the file descriptor is not connected to a terminal, an OSError - is thrown. - - This function will only be defined if an implementation is - available for this system. - - shutil.get_terminal_size is the high-level function which should - normally be used, os.get_terminal_size is the low-level implementation. - """ + def lines(self) -> int: ... -def get_inheritable(fd: int, /) -> bool: - """Get the close-on-exe flag of the specified file descriptor.""" - -def set_inheritable(fd: int, inheritable: bool, /) -> None: - """Set the inheritable flag of the specified file descriptor.""" +def get_terminal_size(fd: int = ..., /) -> terminal_size: ... +def get_inheritable(fd: int, /) -> bool: ... +def set_inheritable(fd: int, inheritable: bool, /) -> None: ... if sys.platform == "win32": - def get_handle_inheritable(handle: int, /) -> bool: - """Get the close-on-exe flag of the specified file descriptor.""" - - def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: - """Set the inheritable flag of the specified handle.""" + def get_handle_inheritable(handle: int, /) -> bool: ... + def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: ... if sys.platform != "win32": # Unix only - def tcgetpgrp(fd: int, /) -> int: - """Return the process group associated with the terminal specified by fd.""" - - def tcsetpgrp(fd: int, pgid: int, /) -> None: - """Set the process group associated with the terminal specified by fd.""" - - def ttyname(fd: int, /) -> str: - """Return the name of the terminal device connected to 'fd'. - - fd - Integer file descriptor handle. - """ - -def write(fd: int, data: ReadableBuffer, /) -> int: - """Write a bytes object to a file descriptor.""" + def tcgetpgrp(fd: int, /) -> int: ... + def tcsetpgrp(fd: int, pgid: int, /) -> None: ... + def ttyname(fd: int, /) -> str: ... +def write(fd: int, data: ReadableBuffer, /) -> int: ... def access( path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True -) -> bool: - """Use the real uid/gid to test for access to a path. - - path - Path to be tested; can be string, bytes, or a path-like object. - mode - Operating-system mode bitfield. Can be F_OK to test existence, - or the inclusive-OR of R_OK, W_OK, and X_OK. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that - directory. - effective_ids - If True, access will use the effective uid/gid instead of - the real uid/gid. - follow_symlinks - If False, and the last element of the path is a symbolic link, - access will examine the symbolic link itself instead of the file - the link points to. - - dir_fd, effective_ids, and follow_symlinks may not be implemented - on your platform. If they are unavailable, using them will raise a - NotImplementedError. - - Note that most operations will use the effective uid/gid, therefore this - routine can be used in a suid/sgid environment to test if the invoking user - has the specified access to the path. - """ - -def chdir(path: FileDescriptorOrPath) -> None: - """Change the current working directory to the specified path. - - path may always be specified as a string. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - """ +) -> bool: ... +def chdir(path: FileDescriptorOrPath) -> None: ... if sys.platform != "win32": - def fchdir(fd: FileDescriptorLike) -> None: - """Change to the directory of the given file descriptor. - - fd must be opened on a directory, not a file. - Equivalent to os.chdir(fd). - """ - -def getcwd() -> str: - """Return a unicode string representing the current working directory.""" - -def getcwdb() -> bytes: - """Return a bytes string representing the current working directory.""" - -def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: - """Change the access permissions of a file. - - path - Path to be modified. May always be specified as a str, bytes, or a path-like object. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - mode - Operating-system mode bitfield. - Be careful when using number literals for *mode*. The conventional UNIX notation for - numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in - Python. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that - directory. - follow_symlinks - If False, and the last element of the path is a symbolic link, - chmod will modify the symbolic link itself instead of the file - the link points to. - - It is an error to use dir_fd or follow_symlinks when specifying path as - an open file descriptor. - dir_fd and follow_symlinks may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. - """ - -if sys.platform != "win32" and sys.platform != "linux": - def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: # some flavors of Unix - """Set file flags. - - If follow_symlinks is False, and the last element of the path is a symbolic - link, chflags will change flags on the symbolic link itself instead of the - file the link points to. - follow_symlinks may not be implemented on your platform. If it is - unavailable, using it will raise a NotImplementedError. - """ + def fchdir(fd: FileDescriptorLike) -> None: ... - def lchflags(path: StrOrBytesPath, flags: int) -> None: - """Set file flags. +def getcwd() -> str: ... +def getcwdb() -> bytes: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... - This function will not follow symbolic links. - Equivalent to chflags(path, flags, follow_symlinks=False). - """ +if sys.platform != "win32" and sys.platform != "linux": + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix + def lchflags(path: StrOrBytesPath, flags: int) -> None: ... if sys.platform != "win32": - def chroot(path: StrOrBytesPath) -> None: - """Change root directory to path.""" - - def chown(path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: - """Change the owner and group id of path to the numeric uid and gid.\\ - - path - Path to be examined; can be string, bytes, a path-like object, or open-file-descriptor int. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that - directory. - follow_symlinks - If False, and the last element of the path is a symbolic link, - stat will examine the symbolic link itself instead of the file - the link points to. - -path may always be specified as a string. -On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -If follow_symlinks is False, and the last element of the path is a symbolic - link, chown will modify the symbolic link itself instead of the file the - link points to. -It is an error to use dir_fd or follow_symlinks when specifying path as - an open file descriptor. -dir_fd and follow_symlinks may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. -""" - - def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: - """Change the owner and group id of path to the numeric uid and gid. - - This function will not follow symbolic links. - Equivalent to os.chown(path, uid, gid, follow_symlinks=False). - """ + def chroot(path: StrOrBytesPath) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... def link( src: StrOrBytesPath, @@ -1837,170 +1217,31 @@ def link( src_dir_fd: int | None = None, dst_dir_fd: int | None = None, follow_symlinks: bool = True, -) -> None: - """Create a hard link to a file. - - If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. - If follow_symlinks is False, and the last element of src is a symbolic - link, link will create a link to the symbolic link itself instead of the - file the link points to. - src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your - platform. If they are unavailable, using them will raise a - NotImplementedError. - """ - -def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: - """Perform a stat system call on the given path, without following symbolic links. - - Like stat(), but do not follow symbolic links. - Equivalent to stat(path, follow_symlinks=False). - """ - -def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: - """Create a directory. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - - The mode argument is ignored on Windows. Where it is used, the current umask - value is first masked out. - """ +) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... if sys.platform != "win32": - def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: # Unix only - """Create a "fifo" (a POSIX named pipe). - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ - -def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: - """makedirs(name [, mode=0o777][, exist_ok=False]) - - Super-mkdir; create a leaf directory and all intermediate ones. Works like - mkdir, except that any intermediate path segment (not just the rightmost) - will be created if it does not exist. If the target directory already - exists, raise an OSError if exist_ok is False. Otherwise no exception is - raised. This is recursive. + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only - """ +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... if sys.platform != "win32": - def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: - """Create a node in the file system. - - Create a node in the file system (file, device special file or named pipe) - at path. mode specifies both the permissions to use and the - type of node to be created, being combined (bitwise OR) with one of - S_IFREG, S_IFCHR, S_IFBLK, and S_IFIFO. If S_IFCHR or S_IFBLK is set on mode, - device defines the newly created device special file (probably using - os.makedev()). Otherwise device is ignored. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ - - def major(device: int, /) -> int: - """Extracts a device major number from a raw device number.""" - - def minor(device: int, /) -> int: - """Extracts a device minor number from a raw device number.""" - - def makedev(major: int, minor: int, /) -> int: - """Composes a raw device number from the major and minor device numbers.""" - - def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: # Unix only - """Return the configuration limit name for the file or directory path. - - If there is no limit, return -1. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - """ - -def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: - """Return a string representing the path to which the symbolic link points. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - - dir_fd may not be implemented on your platform. If it is unavailable, - using it will raise a NotImplementedError. - """ - -def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: - """Remove a file (same as unlink()). - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ - -def removedirs(name: StrOrBytesPath) -> None: - """removedirs(name) - - Super-rmdir; remove a leaf directory and all empty intermediate - ones. Works like rmdir except that, if the leaf directory is - successfully removed, directories corresponding to rightmost path - segments will be pruned away until either the whole path is - consumed or an error occurs. Errors during this latter phase are - ignored -- they generally mean that a directory was not empty. - - """ - -def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: - """Rename a file or directory. - - If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. - src_dir_fd and dst_dir_fd, may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. - """ - -def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: - """renames(old, new) - - Super-rename; create directories as necessary and delete any left - empty. Works like rename, except creation of any intermediate - directories needed to make the new pathname good is attempted - first. After the rename, directories corresponding to rightmost - path segments of the old name will be pruned until either the - whole path is consumed or a nonempty directory is found. - - Note: this function can fail with the new directory structure made - if you lack permissions needed to unlink the leaf directory or - file. - - """ - -def replace(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: - """Rename a file or directory, overwriting the destination. - - If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. - src_dir_fd and dst_dir_fd, may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. - """ - -def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: - """Remove a directory. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ - + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... + def major(device: int, /) -> int: ... + def minor(device: int, /) -> int: ... + def makedev(major: int, minor: int, /) -> int: ... + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only + +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +def removedirs(name: StrOrBytesPath) -> None: ... +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... +def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... +def replace( + src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... @final @type_check_only class _ScandirIterator(Generic[AnyStr]): @@ -2012,86 +1253,25 @@ class _ScandirIterator(Generic[AnyStr]): def close(self) -> None: ... @overload -def scandir(path: None = None) -> _ScandirIterator[str]: - """Return an iterator of DirEntry objects for given path. - - path can be specified as either str, bytes, or a path-like object. If path - is bytes, the names of yielded DirEntry objects will also be bytes; in - all other circumstances they will be str. - - If path is None, uses the path='.'. - """ - +def scandir(path: None = None) -> _ScandirIterator[str]: ... @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: - """Perform a stat system call on the given path. - - path - Path to be examined; can be string, bytes, a path-like object or - open-file-descriptor int. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be a relative string; path will then be relative to - that directory. - follow_symlinks - If False, and the last element of the path is a symbolic link, - stat will examine the symbolic link itself instead of the file - the link points to. - - dir_fd and follow_symlinks may not be implemented - on your platform. If they are unavailable, using them will raise a - NotImplementedError. - - It's an error to use dir_fd or follow_symlinks when specifying path as - an open file descriptor. - """ +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... if sys.platform != "win32": - def statvfs(path: FileDescriptorOrPath) -> statvfs_result: # Unix only - """Perform a statvfs system call on the given path. - - path may always be specified as a string. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - """ - -def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None) -> None: - """Create a symbolic link pointing to src named dst. + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only - target_is_directory is required on Windows if the target is to be - interpreted as a directory. (On Windows, symlink requires - Windows 6.0 or greater, and raises a NotImplementedError otherwise.) - target_is_directory is ignored on non-Windows platforms. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ +def symlink( + src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None +) -> None: ... if sys.platform != "win32": - def sync() -> None: # Unix only - """Force write of everything to disk.""" - -def truncate(path: FileDescriptorOrPath, length: int) -> None: # Unix only up to version 3.4 - """Truncate a file, specified by path, to a specific length. - - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - """ - -def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: - """Remove a file (same as remove()). - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. - """ + def sync() -> None: ... # Unix only +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... def utime( path: FileDescriptorOrPath, times: tuple[int, int] | tuple[float, float] | None = None, @@ -2099,96 +1279,13 @@ def utime( ns: tuple[int, int] = ..., dir_fd: int | None = None, follow_symlinks: bool = True, -) -> None: - """Set the access and modified time of path. - - path may always be specified as a string. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - - If times is not None, it must be a tuple (atime, mtime); - atime and mtime should be expressed as float seconds since the epoch. - If ns is specified, it must be a tuple (atime_ns, mtime_ns); - atime_ns and mtime_ns should be expressed as integer nanoseconds - since the epoch. - If times is None and ns is unspecified, utime uses the current time. - Specifying tuples for both times and ns is an error. - - If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. - If follow_symlinks is False, and the last element of the path is a symbolic - link, utime will modify the symbolic link itself instead of the file the - link points to. - It is an error to use dir_fd or follow_symlinks when specifying path - as an open file descriptor. - dir_fd and follow_symlinks may not be available on your platform. - If they are unavailable, using them will raise a NotImplementedError. - """ +) -> None: ... _OnError: TypeAlias = Callable[[OSError], object] def walk( top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False -) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: - """Directory tree generator. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), yields a 3-tuple - - dirpath, dirnames, filenames - - dirpath is a string, the path to the directory. dirnames is a list of - the names of the subdirectories in dirpath (including symlinks to directories, - and excluding '.' and '..'). - filenames is a list of the names of the non-directory files in dirpath. - Note that the names in the lists are just names, with no path components. - To get a full path (which begins with top) to a file or directory in - dirpath, do os.path.join(dirpath, name). - - If optional arg 'topdown' is true or not specified, the triple for a - directory is generated before the triples for any of its subdirectories - (directories are generated top down). If topdown is false, the triple - for a directory is generated after the triples for all of its - subdirectories (directories are generated bottom up). - - When topdown is true, the caller can modify the dirnames list in-place - (e.g., via del or slice assignment), and walk will only recurse into the - subdirectories whose names remain in dirnames; this can be used to prune the - search, or to impose a specific order of visiting. Modifying dirnames when - topdown is false has no effect on the behavior of os.walk(), since the - directories in dirnames have already been generated by the time dirnames - itself is generated. No matter the value of topdown, the list of - subdirectories is retrieved before the tuples for the directory and its - subdirectories are generated. - - By default errors from the os.scandir() call are ignored. If - optional arg 'onerror' is specified, it should be a function; it - will be called with one argument, an OSError instance. It can - report the error to continue with the walk, or raise the exception - to abort the walk. Note that the filename is available as the - filename attribute of the exception object. - - By default, os.walk does not follow symbolic links to subdirectories on - systems that support them. In order to get this functionality, set the - optional argument 'followlinks' to true. - - Caution: if you pass a relative pathname for top, don't change the - current working directory between resumptions of walk. walk never - changes the current directory, and assumes that the client doesn't - either. - - Example: - - import os - from os.path import join, getsize - for root, dirs, files in os.walk('python/Lib/xml'): - print(root, "consumes ") - print(sum(getsize(join(root, name)) for name in files), end=" ") - print("bytes in", len(files), "non-directory files") - if '__pycache__' in dirs: - dirs.remove('__pycache__') # don't visit __pycache__ directories - - """ +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... if sys.platform != "win32": @overload @@ -2199,40 +1296,7 @@ if sys.platform != "win32": *, follow_symlinks: bool = False, dir_fd: int | None = None, - ) -> Iterator[tuple[str, list[str], list[str], int]]: - """Directory tree generator. - - This behaves exactly like walk(), except that it yields a 4-tuple - - dirpath, dirnames, filenames, dirfd - - `dirpath`, `dirnames` and `filenames` are identical to walk() output, - and `dirfd` is a file descriptor referring to the directory `dirpath`. - - The advantage of fwalk() over walk() is that it's safe against symlink - races (when follow_symlinks is False). - - If dir_fd is not None, it should be a file descriptor open to a directory, - and top should be relative; top will then be relative to that directory. - (dir_fd is always supported for fwalk.) - - Caution: - Since fwalk() yields file descriptors, those are only valid until the - next iteration step, so you should dup() them if you want to keep them - for a longer period. - - Example: - - import os - for root, dirs, files, rootfd in os.fwalk('python/Lib/xml'): - print(root, "consumes", end="") - print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files), - end="") - print("bytes in", len(files), "non-directory files") - if '__pycache__' in dirs: - dirs.remove('__pycache__') # don't visit __pycache__ directories - """ - + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( top: BytesPath, @@ -2243,34 +1307,9 @@ if sys.platform != "win32": dir_fd: int | None = None, ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: - """Return the value of extended attribute attribute on path. - - path may be either a string, a path-like object, or an open file descriptor. - If follow_symlinks is False, and the last element of the path is a symbolic - link, getxattr will examine the symbolic link itself instead of the file - the link points to. - """ - - def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: - """Return a list of extended attributes on path. - - path may be either None, a string, a path-like object, or an open file descriptor. - if path is None, listxattr will examine the current directory. - If follow_symlinks is False, and the last element of the path is a symbolic - link, listxattr will examine the symbolic link itself instead of the file - the link points to. - """ - - def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: - """Remove extended attribute attribute on path. - - path may be either a string, a path-like object, or an open file descriptor. - If follow_symlinks is False, and the last element of the path is a symbolic - link, removexattr will modify the symbolic link itself instead of the file - the link points to. - """ - + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... def setxattr( path: FileDescriptorOrPath, attribute: StrOrBytesPath, @@ -2278,52 +1317,21 @@ if sys.platform != "win32": flags: int = 0, *, follow_symlinks: bool = True, - ) -> None: - """Set extended attribute attribute on path to value. - - path may be either a string, a path-like object, or an open file descriptor. - If follow_symlinks is False, and the last element of the path is a symbolic - link, setxattr will modify the symbolic link itself instead of the file - the link points to. - """ + ) -> None: ... -def abort() -> NoReturn: - """Abort the interpreter immediately. - - This function 'dumps core' or otherwise fails in the hardest way possible - on the hosting operating system. This function never returns. - """ +def abort() -> NoReturn: ... # These are defined as execl(file, *args) but the first *arg is mandatory. -def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: - """execl(file, *args) - - Execute the executable file with argument list args, replacing the - current process. - """ - -def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: - """execlp(file, *args) - - Execute the executable file (which is searched for along $PATH) - with argument list args, replacing the current process. - """ +def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... +def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... # These are: execle(file, *args, env) but env is pulled from the last element of the args. -def execle(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]]) -> NoReturn: - """execle(file, *args, env) - - Execute the executable file with argument list args and - environment env, replacing the current process. - """ - -def execlpe(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]]) -> NoReturn: - """execlpe(file, *args, env) - - Execute the executable file (which is searched for along $PATH) - with argument list args and environment env, replacing the current - process. - """ +def execle( + file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] +) -> NoReturn: ... +def execlpe( + file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] +) -> NoReturn: ... # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. @@ -2346,71 +1354,19 @@ _ExecVArgs: TypeAlias = ( # we limit to str | bytes. _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] -def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: - """Execute an executable path with arguments, replacing current process. - - path - Path of executable file. - argv - Tuple or list of strings. - """ - -def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: - """Execute an executable path with arguments, replacing current process. - - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - """ - -def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: - """execvp(file, args) - - Execute the executable file (which is searched for along $PATH) - with argument list args, replacing the current process. - args may be a list or tuple of strings. - """ - -def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: - """execvpe(file, args, env) - - Execute the executable file (which is searched for along $PATH) - with argument list args and environment env, replacing the - current process. - args may be a list or tuple of strings. - """ - -def _exit(status: int) -> NoReturn: - """Exit to the system with specified status, without normal exit processing.""" - -def kill(pid: int, signal: int, /) -> None: - """Kill a process with a signal.""" +def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... +def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def _exit(status: int) -> NoReturn: ... +def kill(pid: int, signal: int, /) -> None: ... if sys.platform != "win32": # Unix only - def fork() -> int: - """Fork a child process. - - Return 0 to child process and PID of child to parent process. - """ - - def forkpty() -> tuple[int, int]: # some flavors of Unix - """Fork a new process with a new pseudo-terminal as controlling tty. - - Returns a tuple of (pid, master_fd). - Like fork(), return pid of 0 to the child process, - and pid of child to the parent process. - To both, return fd of newly opened pseudo-terminal. - """ - - def killpg(pgid: int, signal: int, /) -> None: - """Kill a process group with a signal.""" - - def nice(increment: int, /) -> int: - """Add increment to the priority of process and return the new priority.""" + def fork() -> int: ... + def forkpty() -> tuple[int, int]: ... # some flavors of Unix + def killpg(pgid: int, signal: int, /) -> None: ... + def nice(increment: int, /) -> int: ... if sys.platform != "darwin" and sys.platform != "linux": def plock(op: int, /) -> None: ... @@ -2435,124 +1391,36 @@ class _wrap_close: def writelines(self, lines: Iterable[str], /) -> None: ... def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... -def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: - """spawnl(mode, file, *args) -> integer - - Execute file with arguments from args in a subprocess. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - -def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise sig - """spawnle(mode, file, *args, env) -> integer - - Execute file with arguments from args in a subprocess with the - supplied environment. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ +def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... +def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig if sys.platform != "win32": - def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: - """spawnv(mode, file, args) -> integer - - Execute file with arguments from args in a subprocess. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - - def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: - """spawnve(mode, file, args, env) -> integer - - Execute file with arguments from args in a subprocess with the - specified environment. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... else: - def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: - """Execute the program specified by path in a new process. - - mode - Mode of process creation. - path - Path of executable file. - argv - Tuple or list of strings. - """ - - def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: - """Execute the program specified by path in a new process. - - mode - Mode of process creation. - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - """ - -def system(command: StrOrBytesPath) -> int: - """Execute the command in a subshell.""" + def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... +def system(command: StrOrBytesPath) -> int: ... @final class times_result(structseq[float], tuple[float, float, float, float, float]): - """times_result: Result from os.times(). - - This object may be accessed either as a tuple of - (user, system, children_user, children_system, elapsed), - or via the attributes user, system, children_user, children_system, - and elapsed. - - See os.times for more information. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") @property - def user(self) -> float: - """user time""" - + def user(self) -> float: ... @property - def system(self) -> float: - """system time""" - + def system(self) -> float: ... @property - def children_user(self) -> float: - """user time of children""" - + def children_user(self) -> float: ... @property - def children_system(self) -> float: - """system time of children""" - + def children_system(self) -> float: ... @property - def elapsed(self) -> float: - """elapsed time since an arbitrary point in the past""" - -def times() -> times_result: - """Return a collection containing process timing information. - - The object returned behaves like a named tuple with these fields: - (utime, stime, cutime, cstime, elapsed_time) - All fields are floating-point numbers. - """ - -def waitpid(pid: int, options: int, /) -> tuple[int, int]: - """Wait for completion of a given child process. - - Returns a tuple of information regarding the child process: - (pid, status) + def elapsed(self) -> float: ... - The options argument is ignored on Windows. - """ +def times() -> times_result: ... +def waitpid(pid: int, options: int, /) -> tuple[int, int]: ... if sys.platform == "win32": if sys.version_info >= (3, 10): @@ -2562,114 +1430,20 @@ if sys.platform == "win32": arguments: str = "", cwd: StrOrBytesPath | None = None, show_cmd: int = 1, - ) -> None: - """Start a file with its associated application. - - When "operation" is not specified or "open", this acts like - double-clicking the file in Explorer, or giving the file name as an - argument to the DOS "start" command: the file is opened with whatever - application (if any) its extension is associated. - When another "operation" is given, it specifies what should be done with - the file. A typical operation is "print". - - "arguments" is passed to the application, but should be omitted if the - file is a document. - - "cwd" is the working directory for the operation. If "filepath" is - relative, it will be resolved against this directory. This argument - should usually be an absolute path. - - "show_cmd" can be used to override the recommended visibility option. - See the Windows ShellExecute documentation for values. - - startfile returns as soon as the associated application is launched. - There is no option to wait for the application to close, and no way - to retrieve the application's exit status. - - The filepath is relative to the current directory. If you want to use - an absolute path, make sure the first character is not a slash ("/"); - the underlying Win32 ShellExecute function doesn't work if it is. - """ + ) -> None: ... else: - def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: - """Start a file with its associated application. - - When "operation" is not specified or "open", this acts like - double-clicking the file in Explorer, or giving the file name as an - argument to the DOS "start" command: the file is opened with whatever - application (if any) its extension is associated. - When another "operation" is given, it specifies what should be done with - the file. A typical operation is "print". - - startfile returns as soon as the associated application is launched. - There is no option to wait for the application to close, and no way - to retrieve the application's exit status. - - The filepath is relative to the current directory. If you want to use - an absolute path, make sure the first character is not a slash ("/"); - the underlying Win32 ShellExecute function doesn't work if it is. - """ + def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ... else: - def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: - """spawnlp(mode, file, *args) -> integer - - Execute file (which is looked for along $PATH) with arguments from - args in a subprocess with the supplied environment. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - - def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise signature - """spawnlpe(mode, file, *args, env) -> integer - - Execute file (which is looked for along $PATH) with arguments from - args in a subprocess with the supplied environment. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - - def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: - """spawnvp(mode, file, args) -> integer - - Execute file (which is looked for along $PATH) with arguments from - args in a subprocess. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - - def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: - """spawnvpe(mode, file, args, env) -> integer - - Execute file (which is looked for along $PATH) with arguments from - args in a subprocess with the supplied environment. - If mode == P_NOWAIT return the pid of the process. - If mode == P_WAIT return the process's exit code if it exits normally; - otherwise return -SIG, where SIG is the signal that killed it. - """ - - def wait() -> tuple[int, int]: # Unix only - """Wait for completion of a child process. - - Returns a tuple of information about the child process: - (pid, status) - """ + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def wait() -> tuple[int, int]: ... # Unix only # Added to MacOS in 3.13 if sys.platform != "darwin" or sys.version_info >= (3, 13): @final class waitid_result(structseq[int], tuple[int, int, int, int, int]): - """waitid_result: Result from waitid. - - This object may be accessed either as a tuple of - (si_pid, si_uid, si_signo, si_status, si_code), - or via the attributes si_pid, si_uid, and so on. - - See os.waitid for more information. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") @@ -2684,64 +1458,20 @@ else: @property def si_code(self) -> int: ... - def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: - """Returns the result of waiting for a process or processes. - - idtype - Must be one of be P_PID, P_PGID or P_ALL. - id - The id to wait on. - options - Constructed from the ORing of one or more of WEXITED, WSTOPPED - or WCONTINUED and additionally may be ORed with WNOHANG or WNOWAIT. + def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: ... - Returns either waitid_result or None if WNOHANG is specified and there are - no children in a waitable state. - """ from resource import struct_rusage - def wait3(options: int) -> tuple[int, int, struct_rusage]: - """Wait for completion of a child process. - - Returns a tuple of information about the child process: - (pid, status, rusage) - """ - - def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: - """Wait for completion of a specific child process. - - Returns a tuple of information about the child process: - (pid, status, rusage) - """ - - def WCOREDUMP(status: int, /) -> bool: - """Return True if the process returning status was dumped to a core file.""" - - def WIFCONTINUED(status: int) -> bool: - """Return True if a particular process was continued from a job control stop. - - Return True if the process returning status was continued from a - job control stop. - """ - - def WIFSTOPPED(status: int) -> bool: - """Return True if the process returning status was stopped.""" - - def WIFSIGNALED(status: int) -> bool: - """Return True if the process returning status was terminated by a signal.""" - - def WIFEXITED(status: int) -> bool: - """Return True if the process returning status exited via the exit() system call.""" - - def WEXITSTATUS(status: int) -> int: - """Return the process return code from status.""" - - def WSTOPSIG(status: int) -> int: - """Return the signal that stopped the process that provided the status value.""" - - def WTERMSIG(status: int) -> int: - """Return the signal that terminated the process that provided the status value.""" - + def wait3(options: int) -> tuple[int, int, struct_rusage]: ... + def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: ... + def WCOREDUMP(status: int, /) -> bool: ... + def WIFCONTINUED(status: int) -> bool: ... + def WIFSTOPPED(status: int) -> bool: ... + def WIFSIGNALED(status: int) -> bool: ... + def WIFEXITED(status: int) -> bool: ... + def WEXITSTATUS(status: int) -> int: ... + def WSTOPSIG(status: int) -> int: ... + def WTERMSIG(status: int) -> int: ... def posix_spawn( path: StrOrBytesPath, argv: _ExecVArgs, @@ -2755,31 +1485,7 @@ else: setsigmask: Iterable[int] = ..., setsigdef: Iterable[int] = ..., scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: - """Execute the program specified by path in a new process. - - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - file_actions - A sequence of file action tuples. - setpgroup - The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids - If the value is `true` the POSIX_SPAWN_RESETIDS will be activated. - setsid - If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. - setsigmask - The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. - setsigdef - The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. - scheduler - A tuple with the scheduler policy (optional) and parameters. - """ - + ) -> int: ... def posix_spawnp( path: StrOrBytesPath, argv: _ExecVArgs, @@ -2793,30 +1499,7 @@ else: setsigmask: Iterable[int] = ..., setsigdef: Iterable[int] = ..., scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: - """Execute the program specified by path in a new process. - - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - file_actions - A sequence of file action tuples. - setpgroup - The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids - If the value is `True` the POSIX_SPAWN_RESETIDS will be activated. - setsid - If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. - setsigmask - The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. - setsigdef - The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. - scheduler - A tuple with the scheduler policy (optional) and parameters. - """ + ) -> int: ... POSIX_SPAWN_OPEN: Final = 0 POSIX_SPAWN_CLOSE: Final = 1 POSIX_SPAWN_DUP2: Final = 2 @@ -2824,120 +1507,45 @@ else: if sys.platform != "win32": @final class sched_param(structseq[int], tuple[int]): - """Currently has only one field: sched_priority - - sched_priority - A scheduling parameter. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("sched_priority",) def __new__(cls, sched_priority: int) -> Self: ... @property - def sched_priority(self) -> int: - """the scheduling priority""" - - def sched_get_priority_min(policy: int) -> int: # some flavors of Unix - """Get the minimum scheduling priority for policy.""" + def sched_priority(self) -> int: ... - def sched_get_priority_max(policy: int) -> int: # some flavors of Unix - """Get the maximum scheduling priority for policy.""" - - def sched_yield() -> None: # some flavors of Unix - """Voluntarily relinquish the CPU.""" + def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix + def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix + def sched_yield() -> None: ... # some flavors of Unix if sys.platform != "darwin": - def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: # some flavors of Unix - """Set the scheduling policy for the process identified by pid. - - If pid is 0, the calling process is changed. - param is an instance of sched_param. - """ - - def sched_getscheduler(pid: int, /) -> int: # some flavors of Unix - """Get the scheduling policy for the process identified by pid. - - Passing 0 for pid returns the scheduling policy for the calling process. - """ + def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: ... # some flavors of Unix + def sched_getscheduler(pid: int, /) -> int: ... # some flavors of Unix + def sched_rr_get_interval(pid: int, /) -> float: ... # some flavors of Unix + def sched_setparam(pid: int, param: sched_param, /) -> None: ... # some flavors of Unix + def sched_getparam(pid: int, /) -> sched_param: ... # some flavors of Unix + def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: ... # some flavors of Unix + def sched_getaffinity(pid: int, /) -> set[int]: ... # some flavors of Unix - def sched_rr_get_interval(pid: int, /) -> float: # some flavors of Unix - """Return the round-robin quantum for the process identified by pid, in seconds. - - Value returned is a float. - """ - - def sched_setparam(pid: int, param: sched_param, /) -> None: # some flavors of Unix - """Set scheduling parameters for the process identified by pid. - - If pid is 0, sets parameters for the calling process. - param should be an instance of sched_param. - """ - - def sched_getparam(pid: int, /) -> sched_param: # some flavors of Unix - """Returns scheduling parameters for the process identified by pid. - - If pid is 0, returns parameters for the calling process. - Return value is an instance of sched_param. - """ - - def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: # some flavors of Unix - """Set the CPU affinity of the process identified by pid to mask. - - mask should be an iterable of integers identifying CPUs. - """ - - def sched_getaffinity(pid: int, /) -> set[int]: # some flavors of Unix - """Return the affinity of the process identified by pid (or the current process if zero). - - The affinity is returned as a set of CPU identifiers. - """ - -def cpu_count() -> int | None: - """Return the number of logical CPUs in the system. - - Return None if indeterminable. - """ +def cpu_count() -> int | None: ... if sys.version_info >= (3, 13): # Documented to return `int | None`, but falls back to `len(sched_getaffinity(0))` when # available. See https://github.com/python/cpython/blob/417c130/Lib/os.py#L1175-L1186. if sys.platform != "win32" and sys.platform != "darwin": - def process_cpu_count() -> int: - """ - Get the number of CPUs of the current process. - - Return the number of logical CPUs usable by the calling thread of the - current process. Return None if indeterminable. - """ + def process_cpu_count() -> int: ... else: - def process_cpu_count() -> int | None: - """Return the number of logical CPUs in the system. - - Return None if indeterminable. - """ + def process_cpu_count() -> int | None: ... if sys.platform != "win32": # Unix only - def confstr(name: str | int, /) -> str | None: - """Return a string-valued system configuration variable.""" - - def getloadavg() -> tuple[float, float, float]: - """Return average recent system load information. - - Return the number of processes in the system run queue averaged over - the last 1, 5, and 15 minutes as a tuple of three floats. - Raises OSError if the load average was unobtainable. - """ - - def sysconf(name: str | int, /) -> int: - """Return an integer-valued system configuration variable.""" + def confstr(name: str | int, /) -> str | None: ... + def getloadavg() -> tuple[float, float, float]: ... + def sysconf(name: str | int, /) -> int: ... if sys.platform == "linux": - def getrandom(size: int, flags: int = 0) -> bytes: - """Obtain a series of random bytes.""" + def getrandom(size: int, flags: int = 0) -> bytes: ... -def urandom(size: int, /) -> bytes: - """Return a bytes object containing random bytes suitable for cryptographic use.""" +def urandom(size: int, /) -> bytes: ... if sys.platform != "win32": def register_at_fork( @@ -2945,19 +1553,7 @@ if sys.platform != "win32": before: Callable[..., Any] | None = ..., after_in_parent: Callable[..., Any] | None = ..., after_in_child: Callable[..., Any] | None = ..., - ) -> None: - """Register callables to be called when forking a new process. - - before - A callable to be called in the parent before the fork() syscall. - after_in_child - A callable to be called in the child after fork(). - after_in_parent - A callable to be called in the parent after fork(). - - 'before' callbacks are called in reverse order. - 'after_in_child' and 'after_in_parent' callbacks are called in order. - """ + ) -> None: ... if sys.platform == "win32": class _AddedDllDirectory: @@ -2967,16 +1563,7 @@ if sys.platform == "win32": def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def add_dll_directory(path: str) -> _AddedDllDirectory: - """Add a path to the DLL search path. - - This search path is used when resolving dependencies for imported - extension modules (the module itself is resolved through sys.path), - and also by ctypes. - - Remove the directory by calling close() on the returned object or - using it in a with statement. - """ + def add_dll_directory(path: str) -> _AddedDllDirectory: ... if sys.platform == "linux": MFD_CLOEXEC: Final[int] @@ -2997,69 +1584,20 @@ if sys.platform == "linux": MFD_HUGE_2GB: Final[int] MFD_HUGE_16GB: Final[int] def memfd_create(name: str, flags: int = ...) -> int: ... - def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: - """Copy count bytes from one file descriptor to another. - - src - Source file descriptor. - dst - Destination file descriptor. - count - Number of bytes to copy. - offset_src - Starting offset in src. - offset_dst - Starting offset in dst. - - If offset_src is None, then src is read from the current position; - respectively for offset_dst. - """ - -def waitstatus_to_exitcode(status: int) -> int: - """Convert a wait status to an exit code. + def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: ... - On Unix: - - * If WIFEXITED(status) is true, return WEXITSTATUS(status). - * If WIFSIGNALED(status) is true, return -WTERMSIG(status). - * Otherwise, raise a ValueError. - - On Windows, return status shifted right by 8 bits. - - On Unix, if the process is being traced or if waitpid() was called with - WUNTRACED option, the caller must first check if WIFSTOPPED(status) is true. - This function must not be called if WIFSTOPPED(status) is true. - """ +def waitstatus_to_exitcode(status: int) -> int: ... if sys.platform == "linux": - def pidfd_open(pid: int, flags: int = ...) -> int: - """Return a file descriptor referring to the process *pid*. - - The descriptor can be used to perform process management without races and - signals. - """ + def pidfd_open(pid: int, flags: int = ...) -> int: ... if sys.version_info >= (3, 12) and sys.platform == "linux": PIDFD_NONBLOCK: Final = 2048 if sys.version_info >= (3, 12) and sys.platform == "win32": - def listdrives() -> list[str]: - """Return a list containing the names of drives in the system. - - A drive name typically looks like 'C:\\\\'. - """ - - def listmounts(volume: str) -> list[str]: - """Return a list containing mount points for a particular volume. - - 'volume' should be a GUID path as returned from os.listvolumes. - """ - - def listvolumes() -> list[str]: - """Return a list containing the volumes in the system. - - Volumes are typically represented as a GUID path. - """ + def listdrives() -> list[str]: ... + def listmounts(volume: str) -> list[str]: ... + def listvolumes() -> list[str]: ... if sys.version_info >= (3, 10) and sys.platform == "linux": EFD_CLOEXEC: Final[int] @@ -3068,15 +1606,9 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": SPLICE_F_MORE: Final[int] SPLICE_F_MOVE: Final[int] SPLICE_F_NONBLOCK: Final[int] - def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: - """Creates and returns an event notification file descriptor.""" - - def eventfd_read(fd: FileDescriptor) -> int: - """Read eventfd value""" - - def eventfd_write(fd: FileDescriptor, value: int) -> None: - """Write eventfd value.""" - + def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: ... + def eventfd_read(fd: FileDescriptor) -> int: ... + def eventfd_write(fd: FileDescriptor, value: int) -> None: ... def splice( src: FileDescriptor, dst: FileDescriptor, @@ -3084,26 +1616,7 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": offset_src: int | None = ..., offset_dst: int | None = ..., flags: int = 0, - ) -> int: - """Transfer count bytes from one pipe to a descriptor or vice versa. - - src - Source file descriptor. - dst - Destination file descriptor. - count - Number of bytes to copy. - offset_src - Starting offset in src. - offset_dst - Starting offset in dst. - flags - Flags to modify the semantics of the call. - - If offset_src is None, then src is read from the current position; - respectively for offset_dst. The offset associated to the file - descriptor that refers to a pipe must be None. - """ + ) -> int: ... if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_FILES: Final[int] @@ -3120,58 +1633,14 @@ if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_SYSVSEM: Final[int] # Linux 2.6.26+ CLONE_THREAD: Final[int] CLONE_VM: Final[int] - def unshare(flags: int) -> None: - """Disassociate parts of a process (or thread) execution context. - - flags - Namespaces to be unshared. - """ - - def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: - """Move the calling thread into different namespaces. - - fd - A file descriptor to a namespace. - nstype - Type of namespace. - """ + def unshare(flags: int) -> None: ... + def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ... if sys.version_info >= (3, 13) and sys.platform != "win32": - def posix_openpt(oflag: int, /) -> int: - """Open and return a file descriptor for a master pseudo-terminal device. - - Performs a posix_openpt() C function call. The oflag argument is used to - set file status flags and file access modes as specified in the manual page - of posix_openpt() of your system. - """ - - def grantpt(fd: FileDescriptorLike, /) -> None: - """Grant access to the slave pseudo-terminal device. - - fd - File descriptor of a master pseudo-terminal device. - - Performs a grantpt() C function call. - """ - - def unlockpt(fd: FileDescriptorLike, /) -> None: - """Unlock a pseudo-terminal master/slave pair. - - fd - File descriptor of a master pseudo-terminal device. - - Performs an unlockpt() C function call. - """ - - def ptsname(fd: FileDescriptorLike, /) -> str: - """Return the name of the slave pseudo-terminal device. - - fd - File descriptor of a master pseudo-terminal device. - - If the ptsname_r() C function is available, it is called; - otherwise, performs a ptsname() C function call. - """ + def posix_openpt(oflag: int, /) -> int: ... + def grantpt(fd: FileDescriptorLike, /) -> None: ... + def unlockpt(fd: FileDescriptorLike, /) -> None: ... + def ptsname(fd: FileDescriptorLike, /) -> str: ... if sys.version_info >= (3, 13) and sys.platform == "linux": TFD_TIMER_ABSTIME: Final = 1 @@ -3180,90 +1649,19 @@ if sys.version_info >= (3, 13) and sys.platform == "linux": TFD_CLOEXEC: Final[int] POSIX_SPAWN_CLOSEFROM: Final[int] - def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: - """Create and return a timer file descriptor. - - clockid - A valid clock ID constant as timer file descriptor. - - time.CLOCK_REALTIME - time.CLOCK_MONOTONIC - time.CLOCK_BOOTTIME - flags - 0 or a bit mask of os.TFD_NONBLOCK or os.TFD_CLOEXEC. - - os.TFD_NONBLOCK - If *TFD_NONBLOCK* is set as a flag, read doesn't blocks. - If *TFD_NONBLOCK* is not set as a flag, read block until the timer fires. - - os.TFD_CLOEXEC - If *TFD_CLOEXEC* is set as a flag, enable the close-on-exec flag - """ - + def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: ... def timerfd_settime( fd: FileDescriptor, /, *, flags: int = 0, initial: float = 0.0, interval: float = 0.0 - ) -> tuple[float, float]: - """Alter a timer file descriptor's internal timer in seconds. - - fd - A timer file descriptor. - flags - 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. - initial - The initial expiration time, in seconds. - interval - The timer's interval, in seconds. - """ - - def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: - """Alter a timer file descriptor's internal timer in nanoseconds. - - fd - A timer file descriptor. - flags - 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. - initial - initial expiration timing in seconds. - interval - interval for the timer in seconds. - """ - - def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: - """Return a tuple of a timer file descriptor's (interval, next expiration) in float seconds. - - fd - A timer file descriptor. - """ - - def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: - """Return a tuple of a timer file descriptor's (interval, next expiration) in nanoseconds. - - fd - A timer file descriptor. - """ + ) -> tuple[float, float]: ... + def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: ... + def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: ... + def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: ... if sys.version_info >= (3, 13) or sys.platform != "win32": # Added to Windows in 3.13. - def fchmod(fd: int, mode: int) -> None: - """Change the access permissions of the file given by file descriptor fd. - - fd - The file descriptor of the file to be modified. - mode - Operating-system mode bitfield. - Be careful when using number literals for *mode*. The conventional UNIX notation for - numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in - Python. - - Equivalent to os.chmod(fd, mode). - """ + def fchmod(fd: int, mode: int) -> None: ... if sys.platform != "linux": if sys.version_info >= (3, 13) or sys.platform != "win32": # Added to Windows in 3.13. - def lchmod(path: StrOrBytesPath, mode: int) -> None: - """Change the access permissions of a file, without following symbolic links. - - If path is a symlink, this affects the link itself rather than the target. - Equivalent to chmod(path, mode, follow_symlinks=False)." - """ + def lchmod(path: StrOrBytesPath, mode: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi index 4ba2953ca212d..dc688a9f877f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi @@ -1,15 +1,3 @@ -"""Common operations on Posix pathnames. - -Instead of importing this module directly, import os and refer to -this module as os.path. The "os.path" name is an alias for this -module on Posix systems; on other systems (e.g. Windows), -os.path provides the same operations in a manner specific to that -platform, and is an alias to another module (e.g. ntpath). - -Some of this can actually be useful on non-Posix systems too, e.g. -for manipulation of the pathname component of URLs. -""" - import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi index 0c616fefc9053..26140c76248ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi @@ -1,55 +1,25 @@ -"""This is an interface to Python's internal parser.""" - from _typeshed import StrOrBytesPath from collections.abc import Sequence from types import CodeType from typing import Any, ClassVar, final -def expr(source: str) -> STType: - """Creates an ST object from an expression.""" - -def suite(source: str) -> STType: - """Creates an ST object from a suite.""" - -def sequence2st(sequence: Sequence[Any]) -> STType: - """Creates an ST object from a tree representation.""" - -def tuple2st(sequence: Sequence[Any]) -> STType: - """Creates an ST object from a tree representation.""" - -def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: - """Creates a list-tree representation of an ST.""" - -def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: - """Creates a tuple-tree representation of an ST.""" - -def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: - """Compiles an ST object into a code object.""" - -def isexpr(st: STType) -> bool: - """Determines if an ST object was created from an expression.""" - -def issuite(st: STType) -> bool: - """Determines if an ST object was created from a suite.""" +def expr(source: str) -> STType: ... +def suite(source: str) -> STType: ... +def sequence2st(sequence: Sequence[Any]) -> STType: ... +def tuple2st(sequence: Sequence[Any]) -> STType: ... +def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... +def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... +def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... +def isexpr(st: STType) -> bool: ... +def issuite(st: STType) -> bool: ... class ParserError(Exception): ... @final class STType: - """Intermediate representation of a Python parse tree.""" - __hash__: ClassVar[None] # type: ignore[assignment] - def compile(self, filename: StrOrBytesPath = ...) -> CodeType: - """Compile this ST object into a code object.""" - - def isexpr(self) -> bool: - """Determines if this ST object was created from an expression.""" - - def issuite(self) -> bool: - """Determines if this ST object was created from a suite.""" - - def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: - """Creates a list-tree representation of this ST.""" - - def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: - """Creates a tuple-tree representation of this ST.""" + def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... + def isexpr(self) -> bool: ... + def issuite(self) -> bool: ... + def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... + def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi index bfd6d681e68cf..fa5143f202927 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi @@ -1,10 +1,3 @@ -"""Object-oriented filesystem paths. - -This module provides classes to represent abstract paths and concrete -paths with operations that have semantics appropriate for different -operating systems. -""" - import sys import types from _typeshed import ( @@ -36,15 +29,6 @@ if sys.version_info >= (3, 13): __all__ += ["UnsupportedOperation"] class PurePath(PathLike[str]): - """Base class for manipulating paths without I/O. - - PurePath represents a filesystem path and offers operations which - don't imply any actual filesystem I/O. Depending on your system, - instantiating a PurePath will return either a PurePosixPath or a - PureWindowsPath object. You can also instantiate either of these classes - directly, regardless of your system. - """ - if sys.version_info >= (3, 13): __slots__ = ( "_raw_paths", @@ -72,69 +56,29 @@ class PurePath(PathLike[str]): __slots__ = ("_drv", "_root", "_parts", "_str", "_hash", "_pparts", "_cached_cparts") if sys.version_info >= (3, 13): parser: ClassVar[types.ModuleType] - def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: - """ - Return True if this path matches the given glob-style pattern. The - pattern is matched against the entire path. - """ + def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: ... @property - def parts(self) -> tuple[str, ...]: - """An object providing sequence-like access to the - components in the filesystem path. - """ - + def parts(self) -> tuple[str, ...]: ... @property - def drive(self) -> str: - """The drive prefix (letter or UNC path), if any.""" - + def drive(self) -> str: ... @property - def root(self) -> str: - """The root of the path, if any.""" - + def root(self) -> str: ... @property - def anchor(self) -> str: - """The concatenation of the drive and root, or ''.""" - + def anchor(self) -> str: ... @property - def name(self) -> str: - """The final path component, if any.""" - + def name(self) -> str: ... @property - def suffix(self) -> str: - """ - The final component's last suffix, if any. - - This includes the leading period. For example: '.txt' - """ - + def suffix(self) -> str: ... @property - def suffixes(self) -> list[str]: - """ - A list of the final component's suffixes, if any. - - These include the leading periods. For example: ['.tar', '.gz'] - """ - + def suffixes(self) -> list[str]: ... @property - def stem(self) -> str: - """The final path component, minus its last suffix.""" + def stem(self) -> str: ... if sys.version_info >= (3, 12): - def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: - """Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ - + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... def __init__(self, *args: StrPath) -> None: ... # pyright: ignore[reportInconsistentConstructor] else: - def __new__(cls, *args: StrPath) -> Self: - """Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ + def __new__(cls, *args: StrPath) -> Self: ... def __hash__(self) -> int: ... def __fspath__(self) -> str: ... @@ -144,148 +88,58 @@ class PurePath(PathLike[str]): def __ge__(self, other: PurePath) -> bool: ... def __truediv__(self, key: StrPath) -> Self: ... def __rtruediv__(self, key: StrPath) -> Self: ... - def __bytes__(self) -> bytes: - """Return the bytes representation of the path. This is only - recommended to use under Unix. - """ - - def as_posix(self) -> str: - """Return the string representation of the path with forward (/) - slashes. - """ - - def as_uri(self) -> str: - """Return the path as a URI.""" - - def is_absolute(self) -> bool: - """True if the path is absolute (has both a root and, if applicable, - a drive). - """ + def __bytes__(self) -> bytes: ... + def as_posix(self) -> str: ... + def as_uri(self) -> str: ... + def is_absolute(self) -> bool: ... if sys.version_info >= (3, 13): @deprecated( "Deprecated since Python 3.13; will be removed in Python 3.15. " "Use `os.path.isreserved()` to detect reserved paths on Windows." ) - def is_reserved(self) -> bool: - """Return True if the path contains one of the special names reserved - by the system, if any. - """ + def is_reserved(self) -> bool: ... else: - def is_reserved(self) -> bool: - """Return True if the path contains one of the special names reserved - by the system, if any. - """ + def is_reserved(self) -> bool: ... if sys.version_info >= (3, 14): - def is_relative_to(self, other: StrPath) -> bool: - """Return True if the path is relative to another path or False.""" + def is_relative_to(self, other: StrPath) -> bool: ... elif sys.version_info >= (3, 12): - def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: - """Return True if the path is relative to another path or False.""" + def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... else: - def is_relative_to(self, *other: StrPath) -> bool: - """Return True if the path is relative to another path or False.""" + def is_relative_to(self, *other: StrPath) -> bool: ... + if sys.version_info >= (3, 12): - def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: - """ - Return True if this path matches the given pattern. If the pattern is - relative, matching is done from the right; otherwise, the entire path - is matched. The recursive wildcard '**' is *not* supported by this - method. - """ + def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: ... else: - def match(self, path_pattern: str) -> bool: - """ - Return True if this path matches the given pattern. - """ + def match(self, path_pattern: str) -> bool: ... + if sys.version_info >= (3, 14): - def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: - """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - related to the other path), raise ValueError. - - The *walk_up* parameter controls whether `..` may be used to resolve - the path. - """ + def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: ... elif sys.version_info >= (3, 12): - def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: - """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - related to the other path), raise ValueError. - - The *walk_up* parameter controls whether `..` may be used to resolve - the path. - """ + def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... else: - def relative_to(self, *other: StrPath) -> Self: - """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. - """ - - def with_name(self, name: str) -> Self: - """Return a new path with the file name changed.""" - - def with_stem(self, stem: str) -> Self: - """Return a new path with the stem changed.""" - - def with_suffix(self, suffix: str) -> Self: - """Return a new path with the file suffix changed. If the path - has no suffix, add given suffix. If the given suffix is an empty - string, remove the suffix from the path. - """ - - def joinpath(self, *other: StrPath) -> Self: - """Combine this path with one or several arguments, and return a - new path representing either a subpath (if all arguments are relative - paths) or a totally different path (if one of the arguments is - anchored). - """ + def relative_to(self, *other: StrPath) -> Self: ... + def with_name(self, name: str) -> Self: ... + def with_stem(self, stem: str) -> Self: ... + def with_suffix(self, suffix: str) -> Self: ... + def joinpath(self, *other: StrPath) -> Self: ... @property - def parents(self) -> Sequence[Self]: - """A sequence of this path's logical parents.""" - + def parents(self) -> Sequence[Self]: ... @property - def parent(self) -> Self: - """The logical parent of the path.""" + def parent(self) -> Self: ... if sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... if sys.version_info >= (3, 12): - def with_segments(self, *args: StrPath) -> Self: - """Construct a new path object from any number of path-like objects. - Subclasses may override this method to customize how new path objects - are created from methods like `iterdir()`. - """ + def with_segments(self, *args: StrPath) -> Self: ... class PurePosixPath(PurePath): - """PurePath subclass for non-Windows systems. - - On a POSIX system, instantiating a PurePath should return this object. - However, you can also instantiate it directly on any system. - """ - __slots__ = () class PureWindowsPath(PurePath): - """PurePath subclass for Windows systems. - - On a Windows system, instantiating a PurePath should return this object. - However, you can also instantiate it directly on any system. - """ - __slots__ = () class Path(PurePath): - """PurePath subclass that can make system calls. - - Path represents a filesystem path but unlike PurePath, also offers - methods to do system calls on path objects. Depending on your system, - instantiating a Path will return either a PosixPath or a WindowsPath - object. You can also instantiate a PosixPath or WindowsPath directly, - but cannot instantiate a WindowsPath on a POSIX system or vice versa. - """ - if sys.version_info >= (3, 14): __slots__ = ("_info",) elif sys.version_info >= (3, 10): @@ -299,206 +153,74 @@ class Path(PurePath): def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... @classmethod - def cwd(cls) -> Self: - """Return a new path pointing to the current working directory.""" + def cwd(cls) -> Self: ... if sys.version_info >= (3, 10): - def stat(self, *, follow_symlinks: bool = True) -> stat_result: - """ - Return the result of the stat() system call on this path, like - os.stat() does. - """ - - def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: - """ - Change the permissions of the path, like os.chmod(). - """ + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... else: - def stat(self) -> stat_result: - """ - Return the result of the stat() system call on this path, like - os.stat() does. - """ - - def chmod(self, mode: int) -> None: - """ - Change the permissions of the path, like os.chmod(). - """ + def stat(self) -> stat_result: ... + def chmod(self, mode: int) -> None: ... + if sys.version_info >= (3, 13): @classmethod - def from_uri(cls, uri: str) -> Self: - """Return a new path from the given 'file' URI.""" - - def is_dir(self, *, follow_symlinks: bool = True) -> bool: - """ - Whether this path is a directory. - """ - - def is_file(self, *, follow_symlinks: bool = True) -> bool: - """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - - def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: - """ - Open the file in text mode, read it, and close the file. - """ + def from_uri(cls, uri: str) -> Self: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ... else: def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def is_dir(self) -> bool: - """ - Whether this path is a directory. - """ - - def is_file(self) -> bool: - """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - - def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: - """ - Open the file in text mode, read it, and close the file. - """ + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + if sys.version_info >= (3, 13): - def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - - def rglob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ + def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: ... + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False + ) -> Iterator[Self]: ... elif sys.version_info >= (3, 12): - def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - - def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ + def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... else: - def glob(self, pattern: str) -> Generator[Self, None, None]: - """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ - - def rglob(self, pattern: str) -> Generator[Self, None, None]: - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - if sys.version_info >= (3, 12): - def exists(self, *, follow_symlinks: bool = True) -> bool: - """ - Whether this path exists. + def glob(self, pattern: str) -> Generator[Self, None, None]: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... - This method normally follows symlinks; to check whether a symlink exists, - add the argument follow_symlinks=False. - """ + if sys.version_info >= (3, 12): + def exists(self, *, follow_symlinks: bool = True) -> bool: ... else: - def exists(self) -> bool: - """ - Whether this path exists. - """ - - def is_symlink(self) -> bool: - """ - Whether this path is a symbolic link. - """ - - def is_socket(self) -> bool: - """ - Whether this path is a socket. - """ - - def is_fifo(self) -> bool: - """ - Whether this path is a FIFO. - """ - - def is_block_device(self) -> bool: - """ - Whether this path is a block device. - """ - - def is_char_device(self) -> bool: - """ - Whether this path is a character device. - """ + def exists(self) -> bool: ... + + def is_symlink(self) -> bool: ... + def is_socket(self) -> bool: ... + def is_fifo(self) -> bool: ... + def is_block_device(self) -> bool: ... + def is_char_device(self) -> bool: ... if sys.version_info >= (3, 12): - def is_junction(self) -> bool: - """ - Whether this path is a junction. - """ - - def iterdir(self) -> Generator[Self, None, None]: - """Yield path objects of the directory contents. - - The children are yielded in arbitrary order, and the - special entries '.' and '..' are not included. - """ - - def lchmod(self, mode: int) -> None: - """ - Like chmod(), except if the path points to a symlink, the symlink's - permissions are changed, rather than its target's. - """ - - def lstat(self) -> stat_result: - """ - Like stat(), except if the path points to a symlink, the symlink's - status information is returned, rather than its target's. - """ - - def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: - """ - Create a new directory at this given path. - """ + def is_junction(self) -> bool: ... + + def iterdir(self) -> Generator[Self, None, None]: ... + def lchmod(self, mode: int) -> None: ... + def lstat(self) -> stat_result: ... + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... + if sys.version_info >= (3, 14): @property - def info(self) -> PathInfo: - """ - A PathInfo object that exposes the file type and other file attributes - of this path. - """ - + def info(self) -> PathInfo: ... @overload - def move_into(self, target_dir: _PathT) -> _PathT: # type: ignore[overload-overlap] - """ - Move this file or directory tree into the given existing directory. - """ - + def move_into(self, target_dir: _PathT) -> _PathT: ... # type: ignore[overload-overlap] @overload def move_into(self, target_dir: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload - def move(self, target: _PathT) -> _PathT: # type: ignore[overload-overlap] - """ - Recursively move this file or directory tree to the given destination. - """ - + def move(self, target: _PathT) -> _PathT: ... # type: ignore[overload-overlap] @overload def move(self, target: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload - def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] - """ - Copy this file or directory tree into the given existing directory. - """ - + def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] @overload def copy_into(self, target_dir: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @overload - def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] - """ - Recursively copy this file or directory tree to the given destination. - """ - + def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] @overload def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @@ -513,11 +235,7 @@ class Path(PurePath): encoding: str | None = None, errors: str | None = None, newline: str | None = None, - ) -> TextIOWrapper: - """ - Open the file pointed to by this path and return a file object, as - the built-in open() function does. - """ + ) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @overload def open( @@ -566,233 +284,72 @@ class Path(PurePath): if sys.platform == "win32": if sys.version_info >= (3, 13): # raises UnsupportedOperation: - def owner(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] - """ - Return the login name of the file owner. - """ - - def group(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] - """ - Return the group name of the file gid. - """ + def owner(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] + def group(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] else: - def owner(self: Never) -> str: # type: ignore[misc] - """ - Return the login name of the file owner. - """ - - def group(self: Never) -> str: # type: ignore[misc] - """ - Return the group name of the file gid. - """ + def owner(self: Never) -> str: ... # type: ignore[misc] + def group(self: Never) -> str: ... # type: ignore[misc] else: if sys.version_info >= (3, 13): - def owner(self, *, follow_symlinks: bool = True) -> str: - """ - Return the login name of the file owner. - """ - - def group(self, *, follow_symlinks: bool = True) -> str: - """ - Return the group name of the file gid. - """ + def owner(self, *, follow_symlinks: bool = True) -> str: ... + def group(self, *, follow_symlinks: bool = True) -> str: ... else: - def owner(self) -> str: - """ - Return the login name of the file owner. - """ - - def group(self) -> str: - """ - Return the group name of the file gid. - """ + def owner(self) -> str: ... + def group(self) -> str: ... + # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms if sys.platform == "win32" and sys.version_info < (3, 12): - def is_mount(self: Never) -> bool: # type: ignore[misc] - """ - Check if this path is a POSIX mount point - """ + def is_mount(self: Never) -> bool: ... # type: ignore[misc] else: - def is_mount(self) -> bool: - """ - Check if this path is a mount point - """ - - def readlink(self) -> Self: - """ - Return the path to which the symbolic link points. - """ - if sys.version_info >= (3, 10): - def rename(self, target: StrPath) -> Self: - """ - Rename this path to the target path. - - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. + def is_mount(self) -> bool: ... - Returns the new Path instance pointing to the target path. - """ + def readlink(self) -> Self: ... - def replace(self, target: StrPath) -> Self: - """ - Rename this path to the target path, overwriting if that path exists. - - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. - - Returns the new Path instance pointing to the target path. - """ - else: - def rename(self, target: str | PurePath) -> Self: - """ - Rename this path to the target path. - - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. - - Returns the new Path instance pointing to the target path. - """ - - def replace(self, target: str | PurePath) -> Self: - """ - Rename this path to the target path, overwriting if that path exists. - - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. - - Returns the new Path instance pointing to the target path. - """ - - def resolve(self, strict: bool = False) -> Self: - """ - Make the path absolute, resolving all symlinks on the way and also - normalizing it. - """ - - def rmdir(self) -> None: - """ - Remove this directory. The directory must be empty. - """ - - def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: - """ - Make this path a symlink pointing to the target path. - Note the order of arguments (link, target) is the reverse of os.symlink. - """ if sys.version_info >= (3, 10): - def hardlink_to(self, target: StrOrBytesPath) -> None: - """ - Make this path a hard link pointing to the same file as *target*. - - Note the order of arguments (self, target) is the reverse of os.link's. - """ - - def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: - """ - Create this file with the given access mode, if it doesn't exist. - """ + def rename(self, target: StrPath) -> Self: ... + def replace(self, target: StrPath) -> Self: ... + else: + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... - def unlink(self, missing_ok: bool = False) -> None: - """ - Remove this file or link. - If the path is a directory, use rmdir() instead. - """ + def resolve(self, strict: bool = False) -> Self: ... + def rmdir(self) -> None: ... + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... + if sys.version_info >= (3, 10): + def hardlink_to(self, target: StrOrBytesPath) -> None: ... + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... + def unlink(self, missing_ok: bool = False) -> None: ... @classmethod - def home(cls) -> Self: - """Return a new path pointing to expanduser('~').""" - - def absolute(self) -> Self: - """Return an absolute version of this path - No normalization or symlink resolution is performed. - - Use resolve() to resolve symlinks and remove '..' segments. - """ - - def expanduser(self) -> Self: - """Return a new path with expanded ~ and ~user constructs - (as returned by os.path.expanduser) - """ - - def read_bytes(self) -> bytes: - """ - Open the file in bytes mode, read it, and close the file. - """ - - def samefile(self, other_path: StrPath) -> bool: - """Return whether other_path is the same or not as this file - (as returned by os.path.samefile()). - """ - - def write_bytes(self, data: ReadableBuffer) -> int: - """ - Open the file in bytes mode, write to it, and close the file. - """ + def home(cls) -> Self: ... + def absolute(self) -> Self: ... + def expanduser(self) -> Self: ... + def read_bytes(self) -> bytes: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): def write_text( self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None - ) -> int: - """ - Open the file in text mode, write to it, and close the file. - """ + ) -> int: ... else: - def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: - """ - Open the file in text mode, write to it, and close the file. - """ + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `hardlink_to()` instead.") - def link_to(self, target: StrOrBytesPath) -> None: - """ - Make the target path a hard link pointing to this path. - - Note this function does not make this path a hard link to *target*, - despite the implication of the function and argument names. The order - of arguments (target, link) is the reverse of Path.symlink_to, but - matches that of os.link. - - Deprecated since Python 3.10 and scheduled for removal in Python 3.12. - Use `hardlink_to()` instead. - """ + def link_to(self, target: StrOrBytesPath) -> None: ... else: - def link_to(self, target: StrOrBytesPath) -> None: - """ - Make the target path a hard link pointing to this path. - - Note this function does not make this path a hard link to *target*, - despite the implication of the function and argument names. The order - of arguments (target, link) is the reverse of Path.symlink_to, but - matches that of os.link. - - """ + def link_to(self, target: StrOrBytesPath) -> None: ... if sys.version_info >= (3, 12): def walk( self, top_down: bool = True, on_error: Callable[[OSError], object] | None = None, follow_symlinks: bool = False - ) -> Iterator[tuple[Self, list[str], list[str]]]: - """Walk the directory tree from this directory, similar to os.walk().""" + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... class PosixPath(Path, PurePosixPath): - """Path subclass for non-Windows systems. - - On a POSIX system, instantiating a Path should return this object. - """ - __slots__ = () class WindowsPath(Path, PureWindowsPath): - """Path subclass for Windows systems. - - On a Windows system, instantiating a Path should return this object. - """ - __slots__ = () if sys.version_info >= (3, 13): - class UnsupportedOperation(NotImplementedError): - """An exception that is raised when an unsupported operation is attempted.""" + class UnsupportedOperation(NotImplementedError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi index f5419ed28c44f..9f9a650846deb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi @@ -1,15 +1,7 @@ -""" -Protocols for supporting classes in pathlib. -""" - from typing import Protocol, runtime_checkable @runtime_checkable class PathInfo(Protocol): - """Protocol for path info objects, which support querying the file type. - Methods may return cached results. - """ - def exists(self, *, follow_symlinks: bool = True) -> bool: ... def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... def is_file(self, *, follow_symlinks: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi index 60436b2c81a55..2f114b20572df 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi @@ -1,341 +1,3 @@ -""" -The Python Debugger Pdb -======================= - -To use the debugger in its simplest form: - - >>> import pdb - >>> pdb.run('') - -The debugger's prompt is '(Pdb) '. This will stop in the first -function call in . - -Alternatively, if a statement terminated with an unhandled exception, -you can use pdb's post-mortem facility to inspect the contents of the -traceback: - - >>> - - >>> import pdb - >>> pdb.pm() - -The commands recognized by the debugger are listed in the next -section. Most can be abbreviated as indicated; e.g., h(elp) means -that 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel', -nor as 'H' or 'Help' or 'HELP'). Optional arguments are enclosed in -square brackets. Alternatives in the command syntax are separated -by a vertical bar (|). - -A blank line repeats the previous command literally, except for -'list', where it lists the next 11 lines. - -Commands that the debugger doesn't recognize are assumed to be Python -statements and are executed in the context of the program being -debugged. Python statements can also be prefixed with an exclamation -point ('!'). This is a powerful way to inspect the program being -debugged; it is even possible to change variables or call functions. -When an exception occurs in such a statement, the exception name is -printed but the debugger's state is not changed. - -The debugger supports aliases, which can save typing. And aliases can -have parameters (see the alias help entry) which allows one a certain -level of adaptability to the context under examination. - -Multiple commands may be entered on a single line, separated by the -pair ';;'. No intelligence is applied to separating the commands; the -input is split at the first ';;', even if it is in the middle of a -quoted string. - -If a file ".pdbrc" exists in your home directory or in the current -directory, it is read in and executed as if it had been typed at the -debugger prompt. This is particularly useful for aliases. If both -files exist, the one in the home directory is read first and aliases -defined there can be overridden by the local file. This behavior can be -disabled by passing the "readrc=False" argument to the Pdb constructor. - -Aside from aliases, the debugger is not directly programmable; but it -is implemented as a class from which you can derive your own debugger -class, which you can make as fancy as you like. - - -Debugger commands -================= - -h(elp) - -Without argument, print the list of available commands. -With a command name as argument, print help about that command. -"help pdb" shows the full pdb documentation. -"help exec" gives help on the ! command. - -w(here) [count] - -Print a stack trace. If count is not specified, print the full stack. -If count is 0, print the current frame entry. If count is positive, -print count entries from the most recent frame. If count is negative, -print -count entries from the least recent frame. -An arrow indicates the "current frame", which determines the -context of most commands. 'bt' is an alias for this command. - -d(own) [count] - -Move the current frame count (default one) levels down in the -stack trace (to a newer frame). - -u(p) [count] - -Move the current frame count (default one) levels up in the -stack trace (to an older frame). - -b(reak) [ ([filename:]lineno | function) [, condition] ] - -Without argument, list all breaks. - -With a line number argument, set a break at this line in the -current file. With a function name, set a break at the first -executable line of that function. If a second argument is -present, it is a string specifying an expression which must -evaluate to true before the breakpoint is honored. - -The line number may be prefixed with a filename and a colon, -to specify a breakpoint in another file (probably one that -hasn't been loaded yet). The file is searched for on -sys.path; the .py suffix may be omitted. - -tbreak [ ([filename:]lineno | function) [, condition] ] - -Same arguments as break, but sets a temporary breakpoint: it -is automatically deleted when first hit. - -cl(ear) [filename:lineno | bpnumber ...] - -With a space separated list of breakpoint numbers, clear -those breakpoints. Without argument, clear all breaks (but -first ask confirmation). With a filename:lineno argument, -clear all breaks at that line in that file. - -disable bpnumber [bpnumber ...] - -Disables the breakpoints given as a space separated list of -breakpoint numbers. Disabling a breakpoint means it cannot -cause the program to stop execution, but unlike clearing a -breakpoint, it remains in the list of breakpoints and can be -(re-)enabled. - -enable bpnumber [bpnumber ...] - -Enables the breakpoints given as a space separated list of -breakpoint numbers. - -ignore bpnumber [count] - -Set the ignore count for the given breakpoint number. If -count is omitted, the ignore count is set to 0. A breakpoint -becomes active when the ignore count is zero. When non-zero, -the count is decremented each time the breakpoint is reached -and the breakpoint is not disabled and any associated -condition evaluates to true. - -condition bpnumber [condition] - -Set a new condition for the breakpoint, an expression which -must evaluate to true before the breakpoint is honored. If -condition is absent, any existing condition is removed; i.e., -the breakpoint is made unconditional. - -(Pdb) commands [bpnumber] -(com) ... -(com) end -(Pdb) - -Specify a list of commands for breakpoint number bpnumber. -The commands themselves are entered on the following lines. -Type a line containing just 'end' to terminate the commands. -The commands are executed when the breakpoint is hit. - -To remove all commands from a breakpoint, type commands and -follow it immediately with end; that is, give no commands. - -With no bpnumber argument, commands refers to the last -breakpoint set. - -You can use breakpoint commands to start your program up -again. Simply use the continue command, or step, or any other -command that resumes execution. - -Specifying any command resuming execution (currently continue, -step, next, return, jump, quit and their abbreviations) -terminates the command list (as if that command was -immediately followed by end). This is because any time you -resume execution (even with a simple next or step), you may -encounter another breakpoint -- which could have its own -command list, leading to ambiguities about which list to -execute. - -If you use the 'silent' command in the command list, the usual -message about stopping at a breakpoint is not printed. This -may be desirable for breakpoints that are to print a specific -message and then continue. If none of the other commands -print anything, you will see no sign that the breakpoint was -reached. - -s(tep) - -Execute the current line, stop at the first possible occasion -(either in a function that is called or in the current -function). - -n(ext) - -Continue execution until the next line in the current function -is reached or it returns. - -unt(il) [lineno] - -Without argument, continue execution until the line with a -number greater than the current one is reached. With a line -number, continue execution until a line with a number greater -or equal to that is reached. In both cases, also stop when -the current frame returns. - -j(ump) lineno - -Set the next line that will be executed. Only available in -the bottom-most frame. This lets you jump back and execute -code again, or jump forward to skip code that you don't want -to run. - -It should be noted that not all jumps are allowed -- for -instance it is not possible to jump into the middle of a -for loop or out of a finally clause. - -r(eturn) - -Continue execution until the current function returns. - -retval - -Print the return value for the last return of a function. - -run [args...] - -Restart the debugged python program. If a string is supplied -it is split with "shlex", and the result is used as the new -sys.argv. History, breakpoints, actions and debugger options -are preserved. "restart" is an alias for "run". - -c(ont(inue)) - -Continue execution, only stop when a breakpoint is encountered. - -l(ist) [first[, last] | .] - -List source code for the current file. Without arguments, -list 11 lines around the current line or continue the previous -listing. With . as argument, list 11 lines around the current -line. With one argument, list 11 lines starting at that line. -With two arguments, list the given range; if the second -argument is less than the first, it is a count. - -The current line in the current frame is indicated by "->". -If an exception is being debugged, the line where the -exception was originally raised or propagated is indicated by -">>", if it differs from the current line. - -ll | longlist - -List the whole source code for the current function or frame. - -a(rgs) - -Print the argument list of the current function. - -p expression - -Print the value of the expression. - -pp expression - -Pretty-print the value of the expression. - -whatis expression - -Print the type of the argument. - -source expression - -Try to get source code for the given object and display it. - -display [expression] - -Display the value of the expression if it changed, each time execution -stops in the current frame. - -Without expression, list all display expressions for the current frame. - -undisplay [expression] - -Do not display the expression any more in the current frame. - -Without expression, clear all display expressions for the current frame. - -interact - -Start an interactive interpreter whose global namespace -contains all the (global and local) names found in the current scope. - -alias [name [command]] - -Create an alias called 'name' that executes 'command'. The -command must *not* be enclosed in quotes. Replaceable -parameters can be indicated by %1, %2, and so on, while %* is -replaced by all the parameters. If no command is given, the -current alias for name is shown. If no name is given, all -aliases are listed. - -Aliases may be nested and can contain anything that can be -legally typed at the pdb prompt. Note! You *can* override -internal pdb commands with aliases! Those internal commands -are then hidden until the alias is removed. Aliasing is -recursively applied to the first word of the command line; all -other words in the line are left alone. - -As an example, here are two useful aliases (especially when -placed in the .pdbrc file): - -# Print instance variables (usage "pi classInst") -alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) -# Print instance variables in self -alias ps pi self - -unalias name - -Delete the specified alias. - -debug code - -Enter a recursive debugger that steps through the code -argument (which is an arbitrary expression or statement to be -executed in the current environment). - -q(uit) | exit - -Quit from the debugger. The program being executed is aborted. - -(!) statement - -Execute the (one-line) statement in the context of the current -stack frame. The exclamation point can be omitted unless the -first word of the statement resembles a debugger command, e.g.: -(Pdb) ! n=42 -(Pdb) - -To assign to a global variable you must always prefix the command with -a 'global' command, e.g.: -(Pdb) global list_options; list_options = ['-l'] -(Pdb) -""" - import signal import sys from bdb import Bdb, _Backend @@ -343,6 +5,7 @@ from cmd import Cmd from collections.abc import Callable, Iterable, Mapping, Sequence from inspect import _SourceObjectType from linecache import _ModuleGlobals +from rlcompleter import Completer from types import CodeType, FrameType, TracebackType from typing import IO, Any, ClassVar, Final, Literal, TypeVar from typing_extensions import ParamSpec, Self, TypeAlias @@ -357,90 +20,24 @@ _Mode: TypeAlias = Literal["inline", "cli"] line_prefix: Final[str] # undocumented -class Restart(Exception): - """Causes a debugger to be restarted for the debugged python program.""" - -def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: - """Execute the *statement* (given as a string or a code object) - under debugger control. - - The debugger prompt appears before any code is executed; you can set - breakpoints and type continue, or you can step through the statement - using step or next. - - The optional *globals* and *locals* arguments specify the - environment in which the code is executed; by default the - dictionary of the module __main__ is used (see the explanation of - the built-in exec() or eval() functions.). - """ - -def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: - """Evaluate the *expression* (given as a string or a code object) - under debugger control. - - When runeval() returns, it returns the value of the expression. - Otherwise this function is similar to run(). - """ +class Restart(Exception): ... +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... -def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: - """Call the function (a function or method object, not a string) - with the given arguments. - - When runcall() returns, it returns whatever the function call - returned. The debugger prompt appears as soon as the function is - entered. - """ +def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... if sys.version_info >= (3, 14): - def set_default_backend(backend: _Backend) -> None: - """Set the default backend to use for Pdb instances.""" - - def get_default_backend() -> _Backend: - """Get the default backend to use for Pdb instances.""" - - def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: - """Enter the debugger at the calling stack frame. - - This is useful to hard-code a breakpoint at a given point in a - program, even if the code is not otherwise being debugged (e.g. when - an assertion fails). If given, *header* is printed to the console - just before debugging begins. *commands* is an optional list of - pdb commands to run when the debugger starts. - """ - - async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: - """Enter the debugger at the calling stack frame, but in async mode. - - This should be used as await pdb.set_trace_async(). Users can do await - if they enter the debugger with this function. Otherwise it's the same - as set_trace(). - """ + def set_default_backend(backend: _Backend) -> None: ... + def get_default_backend() -> _Backend: ... + def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... else: - def set_trace(*, header: str | None = None) -> None: - """Enter the debugger at the calling stack frame. - - This is useful to hard-code a breakpoint at a given point in a - program, even if the code is not otherwise being debugged (e.g. when - an assertion fails). If given, *header* is printed to the console - just before debugging begins. - """ - -def post_mortem(t: TracebackType | None = None) -> None: - """Enter post-mortem debugging of the given *traceback*, or *exception* - object. - - If no traceback is given, it uses the one of the exception that is - currently being handled (an exception must be being handled if the - default is to be used). - - If `t` is an exception object, the `exceptions` command makes it possible to - list and inspect its chained exceptions (if any). - """ + def set_trace(*, header: str | None = None) -> None: ... -def pm() -> None: - """Enter post-mortem debugging of the traceback found in sys.last_exc.""" +def post_mortem(t: TracebackType | None = None) -> None: ... +def pm() -> None: ... class Pdb(Bdb, Cmd): # Everything here is undocumented, except for __init__ @@ -501,44 +98,21 @@ class Pdb(Bdb, Cmd): if sys.version_info >= (3, 13): user_opcode = Bdb.user_line - def bp_commands(self, frame: FrameType) -> bool: - """Call every command that was set for the current active breakpoint - (if there is one). + def bp_commands(self, frame: FrameType) -> bool: ... - Returns True if the normal interaction function must be called, - False otherwise. - """ if sys.version_info >= (3, 13): def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ... else: def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... - def displayhook(self, obj: object) -> None: - """Custom displayhook for the exec in default(), which prevents - assignment of the _ variable in the builtins. - """ - - def handle_command_def(self, line: str) -> bool: - """Handles one command line during command list definition.""" - - def defaultFile(self) -> str: - """Produce a reasonable default.""" - + def displayhook(self, obj: object) -> None: ... + def handle_command_def(self, line: str) -> bool: ... + def defaultFile(self) -> str: ... def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... if sys.version_info >= (3, 14): - def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: - """Check whether specified line seems to be executable. - - Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank - line or EOF). Warning: testing is not comprehensive. - """ + def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: ... else: - def checkline(self, filename: str, lineno: int) -> int: - """Check whether specified line seems to be executable. - - Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank - line or EOF). Warning: testing is not comprehensive. - """ + def checkline(self, filename: str, lineno: int) -> int: ... def _getval(self, arg: str) -> object: ... if sys.version_info >= (3, 14): @@ -547,19 +121,7 @@ class Pdb(Bdb, Cmd): def print_stack_trace(self) -> None: ... def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... - def lookupmodule(self, filename: str) -> str | None: - """Helper function for break/clear parsing -- may be overridden. - - lookupmodule() translates (possibly incomplete) file or module name - into an absolute file name. - - filename could be in format of: - * an absolute path like '/path/to/file.py' - * a relative path like 'file.py' or 'dir/file.py' - * a module name like 'module' or 'package.module' - - files and modules will be searched in sys.path. - """ + def lookupmodule(self, filename: str) -> str | None: ... if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... @@ -569,333 +131,43 @@ class Pdb(Bdb, Cmd): if sys.version_info >= (3, 13): def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... - def do_commands(self, arg: str) -> bool | None: - """(Pdb) commands [bpnumber] - (com) ... - (com) end - (Pdb) - - Specify a list of commands for breakpoint number bpnumber. - The commands themselves are entered on the following lines. - Type a line containing just 'end' to terminate the commands. - The commands are executed when the breakpoint is hit. - - To remove all commands from a breakpoint, type commands and - follow it immediately with end; that is, give no commands. - - With no bpnumber argument, commands refers to the last - breakpoint set. - - You can use breakpoint commands to start your program up - again. Simply use the continue command, or step, or any other - command that resumes execution. - - Specifying any command resuming execution (currently continue, - step, next, return, jump, quit and their abbreviations) - terminates the command list (as if that command was - immediately followed by end). This is because any time you - resume execution (even with a simple next or step), you may - encounter another breakpoint -- which could have its own - command list, leading to ambiguities about which list to - execute. - - If you use the 'silent' command in the command list, the usual - message about stopping at a breakpoint is not printed. This - may be desirable for breakpoints that are to print a specific - message and then continue. If none of the other commands - print anything, you will see no sign that the breakpoint was - reached. - """ + def do_commands(self, arg: str) -> bool | None: ... if sys.version_info >= (3, 14): - def do_break(self, arg: str, temporary: bool = False) -> bool | None: - """b(reak) [ ([filename:]lineno | function) [, condition] ] - - Without argument, list all breaks. - - With a line number argument, set a break at this line in the - current file. With a function name, set a break at the first - executable line of that function. If a second argument is - present, it is a string specifying an expression which must - evaluate to true before the breakpoint is honored. - - The line number may be prefixed with a filename and a colon, - to specify a breakpoint in another file (probably one that - hasn't been loaded yet). The file is searched for on - sys.path; the .py suffix may be omitted. - """ + def do_break(self, arg: str, temporary: bool = False) -> bool | None: ... else: - def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: - """b(reak) [ ([filename:]lineno | function) [, condition] ] - - Without argument, list all breaks. - - With a line number argument, set a break at this line in the - current file. With a function name, set a break at the first - executable line of that function. If a second argument is - present, it is a string specifying an expression which must - evaluate to true before the breakpoint is honored. - - The line number may be prefixed with a filename and a colon, - to specify a breakpoint in another file (probably one that - hasn't been loaded yet). The file is searched for on - sys.path; the .py suffix may be omitted. - """ - - def do_tbreak(self, arg: str) -> bool | None: - """tbreak [ ([filename:]lineno | function) [, condition] ] - - Same arguments as break, but sets a temporary breakpoint: it - is automatically deleted when first hit. - """ - - def do_enable(self, arg: str) -> bool | None: - """enable bpnumber [bpnumber ...] - - Enables the breakpoints given as a space separated list of - breakpoint numbers. - """ - - def do_disable(self, arg: str) -> bool | None: - """disable bpnumber [bpnumber ...] - - Disables the breakpoints given as a space separated list of - breakpoint numbers. Disabling a breakpoint means it cannot - cause the program to stop execution, but unlike clearing a - breakpoint, it remains in the list of breakpoints and can be - (re-)enabled. - """ - - def do_condition(self, arg: str) -> bool | None: - """condition bpnumber [condition] - - Set a new condition for the breakpoint, an expression which - must evaluate to true before the breakpoint is honored. If - condition is absent, any existing condition is removed; i.e., - the breakpoint is made unconditional. - """ - - def do_ignore(self, arg: str) -> bool | None: - """ignore bpnumber [count] - - Set the ignore count for the given breakpoint number. If - count is omitted, the ignore count is set to 0. A breakpoint - becomes active when the ignore count is zero. When non-zero, - the count is decremented each time the breakpoint is reached - and the breakpoint is not disabled and any associated - condition evaluates to true. - """ - - def do_clear(self, arg: str) -> bool | None: - """cl(ear) [filename:lineno | bpnumber ...] - - With a space separated list of breakpoint numbers, clear - those breakpoints. Without argument, clear all breaks (but - first ask confirmation). With a filename:lineno argument, - clear all breaks at that line in that file. - """ - - def do_where(self, arg: str) -> bool | None: - """w(here) [count] - - Print a stack trace. If count is not specified, print the full stack. - If count is 0, print the current frame entry. If count is positive, - print count entries from the most recent frame. If count is negative, - print -count entries from the least recent frame. - An arrow indicates the "current frame", which determines the - context of most commands. 'bt' is an alias for this command. - """ + def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: ... + + def do_tbreak(self, arg: str) -> bool | None: ... + def do_enable(self, arg: str) -> bool | None: ... + def do_disable(self, arg: str) -> bool | None: ... + def do_condition(self, arg: str) -> bool | None: ... + def do_ignore(self, arg: str) -> bool | None: ... + def do_clear(self, arg: str) -> bool | None: ... + def do_where(self, arg: str) -> bool | None: ... if sys.version_info >= (3, 13): - def do_exceptions(self, arg: str) -> bool | None: - """exceptions [number] - - List or change current exception in an exception chain. - - Without arguments, list all the current exception in the exception - chain. Exceptions will be numbered, with the current exception indicated - with an arrow. - - If given an integer as argument, switch to the exception at that index. - """ - - def do_up(self, arg: str) -> bool | None: - """u(p) [count] - - Move the current frame count (default one) levels up in the - stack trace (to an older frame). - """ - - def do_down(self, arg: str) -> bool | None: - """d(own) [count] - - Move the current frame count (default one) levels down in the - stack trace (to a newer frame). - """ - - def do_until(self, arg: str) -> bool | None: - """unt(il) [lineno] - - Without argument, continue execution until the line with a - number greater than the current one is reached. With a line - number, continue execution until a line with a number greater - or equal to that is reached. In both cases, also stop when - the current frame returns. - """ - - def do_step(self, arg: str) -> bool | None: - """s(tep) - - Execute the current line, stop at the first possible occasion - (either in a function that is called or in the current - function). - """ - - def do_next(self, arg: str) -> bool | None: - """n(ext) - - Continue execution until the next line in the current function - is reached or it returns. - """ - - def do_run(self, arg: str) -> bool | None: - """run [args...] - - Restart the debugged python program. If a string is supplied - it is split with "shlex", and the result is used as the new - sys.argv. History, breakpoints, actions and debugger options - are preserved. "restart" is an alias for "run". - """ - - def do_return(self, arg: str) -> bool | None: - """r(eturn) - - Continue execution until the current function returns. - """ - - def do_continue(self, arg: str) -> bool | None: - """c(ont(inue)) - - Continue execution, only stop when a breakpoint is encountered. - """ - - def do_jump(self, arg: str) -> bool | None: - """j(ump) lineno - - Set the next line that will be executed. Only available in - the bottom-most frame. This lets you jump back and execute - code again, or jump forward to skip code that you don't want - to run. - - It should be noted that not all jumps are allowed -- for - instance it is not possible to jump into the middle of a - for loop or out of a finally clause. - """ - - def do_debug(self, arg: str) -> bool | None: - """debug code - - Enter a recursive debugger that steps through the code - argument (which is an arbitrary expression or statement to be - executed in the current environment). - """ - - def do_quit(self, arg: str) -> bool | None: - """q(uit) | exit - - Quit from the debugger. The program being executed is aborted. - """ - - def do_EOF(self, arg: str) -> bool | None: - """EOF - - Handles the receipt of EOF as a command. - """ - - def do_args(self, arg: str) -> bool | None: - """a(rgs) - - Print the argument list of the current function. - """ - - def do_retval(self, arg: str) -> bool | None: - """retval - - Print the return value for the last return of a function. - """ - - def do_p(self, arg: str) -> bool | None: - """p expression - - Print the value of the expression. - """ - - def do_pp(self, arg: str) -> bool | None: - """pp expression - - Pretty-print the value of the expression. - """ - - def do_list(self, arg: str) -> bool | None: - """l(ist) [first[, last] | .] - - List source code for the current file. Without arguments, - list 11 lines around the current line or continue the previous - listing. With . as argument, list 11 lines around the current - line. With one argument, list 11 lines starting at that line. - With two arguments, list the given range; if the second - argument is less than the first, it is a count. - - The current line in the current frame is indicated by "->". - If an exception is being debugged, the line where the - exception was originally raised or propagated is indicated by - ">>", if it differs from the current line. - """ - - def do_whatis(self, arg: str) -> bool | None: - """whatis expression - - Print the type of the argument. - """ - - def do_alias(self, arg: str) -> bool | None: - """alias [name [command]] - - Create an alias called 'name' that executes 'command'. The - command must *not* be enclosed in quotes. Replaceable - parameters can be indicated by %1, %2, and so on, while %* is - replaced by all the parameters. If no command is given, the - current alias for name is shown. If no name is given, all - aliases are listed. - - Aliases may be nested and can contain anything that can be - legally typed at the pdb prompt. Note! You *can* override - internal pdb commands with aliases! Those internal commands - are then hidden until the alias is removed. Aliasing is - recursively applied to the first word of the command line; all - other words in the line are left alone. - - As an example, here are two useful aliases (especially when - placed in the .pdbrc file): - - # Print instance variables (usage "pi classInst") - alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) - # Print instance variables in self - alias ps pi self - """ - - def do_unalias(self, arg: str) -> bool | None: - """unalias name - - Delete the specified alias. - """ - - def do_help(self, arg: str) -> bool | None: - """h(elp) - - Without argument, print the list of available commands. - With a command name as argument, print help about that command. - "help pdb" shows the full pdb documentation. - "help exec" gives help on the ! command. - """ + def do_exceptions(self, arg: str) -> bool | None: ... + + def do_up(self, arg: str) -> bool | None: ... + def do_down(self, arg: str) -> bool | None: ... + def do_until(self, arg: str) -> bool | None: ... + def do_step(self, arg: str) -> bool | None: ... + def do_next(self, arg: str) -> bool | None: ... + def do_run(self, arg: str) -> bool | None: ... + def do_return(self, arg: str) -> bool | None: ... + def do_continue(self, arg: str) -> bool | None: ... + def do_jump(self, arg: str) -> bool | None: ... + def do_debug(self, arg: str) -> bool | None: ... + def do_quit(self, arg: str) -> bool | None: ... + def do_EOF(self, arg: str) -> bool | None: ... + def do_args(self, arg: str) -> bool | None: ... + def do_retval(self, arg: str) -> bool | None: ... + def do_p(self, arg: str) -> bool | None: ... + def do_pp(self, arg: str) -> bool | None: ... + def do_list(self, arg: str) -> bool | None: ... + def do_whatis(self, arg: str) -> bool | None: ... + def do_alias(self, arg: str) -> bool | None: ... + def do_unalias(self, arg: str) -> bool | None: ... + def do_help(self, arg: str) -> bool | None: ... do_b = do_break do_cl = do_clear do_w = do_where @@ -916,21 +188,7 @@ class Pdb(Bdb, Cmd): do_rv = do_retval do_l = do_list do_h = do_help - def help_exec(self) -> None: - """(!) statement - - Execute the (one-line) statement in the context of the current - stack frame. The exclamation point can be omitted unless the - first word of the statement resembles a debugger command, e.g.: - (Pdb) ! n=42 - (Pdb) - - To assign to a global variable you must always prefix the command with - a 'global' command, e.g.: - (Pdb) global list_options; list_options = ['-l'] - (Pdb) - """ - + def help_exec(self) -> None: ... def help_pdb(self) -> None: ... def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... if sys.version_info >= (3, 13): @@ -943,48 +201,22 @@ class Pdb(Bdb, Cmd): def completenames(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override] if sys.version_info >= (3, 12): def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ... + if sys.version_info >= (3, 13) and sys.version_info < (3, 14): + # Added in 3.13.8. + @property + def rlcompleter(self) -> type[Completer]: ... def _select_frame(self, number: int) -> None: ... def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... - def _print_lines(self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None) -> None: - """Print a range of lines.""" - + def _print_lines( + self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None + ) -> None: ... def _cmdloop(self) -> None: ... - def do_display(self, arg: str) -> bool | None: - """display [expression] - - Display the value of the expression if it changed, each time execution - stops in the current frame. - - Without expression, list all display expressions for the current frame. - """ - - def do_interact(self, arg: str) -> bool | None: - """interact - - Start an interactive interpreter whose global namespace - contains all the (global and local) names found in the current scope. - """ - - def do_longlist(self, arg: str) -> bool | None: - """ll | longlist - - List the whole source code for the current function or frame. - """ - - def do_source(self, arg: str) -> bool | None: - """source expression - - Try to get source code for the given object and display it. - """ - - def do_undisplay(self, arg: str) -> bool | None: - """undisplay [expression] - - Do not display the expression any more in the current frame. - - Without expression, clear all display expressions for the current frame. - """ + def do_display(self, arg: str) -> bool | None: ... + def do_interact(self, arg: str) -> bool | None: ... + def do_longlist(self, arg: str) -> bool | None: ... + def do_source(self, arg: str) -> bool | None: ... + def do_undisplay(self, arg: str) -> bool | None: ... do_ll = do_longlist def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... def _complete_bpnumber(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... @@ -1024,6 +256,4 @@ if sys.version_info < (3, 10): def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): - """String that doesn't quote its repr.""" - def __repr__(self) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi index 1348b91018be7..d94fe208f4468 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi @@ -1,28 +1,3 @@ -"""Create portable serialized representations of Python objects. - -See module copyreg for a mechanism for registering custom picklers. -See module pickletools source for extensive comments. - -Classes: - - Pickler - Unpickler - -Functions: - - dump(object, file) - dumps(object) -> string - load(file) -> object - loads(bytes) -> object - -Misc variables: - - __version__ - format_version - compatible_formats - -""" - from _pickle import ( PickleError as PickleError, Pickler as Pickler, @@ -134,22 +109,11 @@ bytes_types: tuple[type[Any], ...] # undocumented @final class PickleBuffer: - """Wrapper for potentially out-of-band buffers""" - def __new__(cls, buffer: ReadableBuffer) -> Self: ... - def raw(self) -> memoryview: - """Return a memoryview of the raw memory underlying this buffer. - Will raise BufferError is the buffer isn't contiguous. - """ - - def release(self) -> None: - """Release the underlying buffer exposed by the PickleBuffer object.""" - - def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object.""" - - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object.""" + def raw(self) -> memoryview: ... + def release(self) -> None: ... + def __buffer__(self, flags: int, /) -> memoryview: ... + def __release_buffer__(self, buffer: memoryview, /) -> None: ... MARK: Final = b"(" STOP: Final = b"." @@ -231,46 +195,8 @@ BYTEARRAY8: Final = b"\x96" NEXT_BUFFER: Final = b"\x97" READONLY_BUFFER: Final = b"\x98" -def encode_long(x: int) -> bytes: # undocumented - """Encode a long to a two's complement little-endian binary string. - Note that 0 is a special case, returning an empty string, to save a - byte in the LONG1 pickling context. - - >>> encode_long(0) - b'' - >>> encode_long(255) - b'\\xff\\x00' - >>> encode_long(32767) - b'\\xff\\x7f' - >>> encode_long(-256) - b'\\x00\\xff' - >>> encode_long(-32768) - b'\\x00\\x80' - >>> encode_long(-128) - b'\\x80' - >>> encode_long(127) - b'\\x7f' - >>> - """ - -def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: # undocumented - """Decode a long from a two's complement little-endian binary string. - - >>> decode_long(b'') - 0 - >>> decode_long(b"\\xff\\x00") - 255 - >>> decode_long(b"\\xff\\x7f") - 32767 - >>> decode_long(b"\\x00\\xff") - -256 - >>> decode_long(b"\\x00\\x80") - -32768 - >>> decode_long(b"\\x80") - -128 - >>> decode_long(b"\\x7f") - 127 - """ +def encode_long(x: int) -> bytes: ... # undocumented +def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: ... # undocumented # undocumented pure-Python implementations class _Pickler: @@ -286,53 +212,9 @@ class _Pickler: *, fix_imports: bool = True, buffer_callback: _BufferCallback = None, - ) -> None: - """This takes a binary file for writing a pickle data stream. - - The optional *protocol* argument tells the pickler to use the - given protocol; supported protocols are 0, 1, 2, 3, 4 and 5. - The default protocol is 5. It was introduced in Python 3.8, and - is incompatible with previous versions. - - Specifying a negative protocol version selects the highest - protocol version supported. The higher the protocol used, the - more recent the version of Python needed to read the pickle - produced. - - The *file* argument must have a write() method that accepts a - single bytes argument. It can thus be a file object opened for - binary writing, an io.BytesIO instance, or any other custom - object that meets this interface. - - If *fix_imports* is True and *protocol* is less than 3, pickle - will try to map the new Python 3 names to the old module names - used in Python 2, so that the pickle data stream is readable - with Python 2. - - If *buffer_callback* is None (the default), buffer views are - serialized into *file* as part of the pickle stream. - - If *buffer_callback* is not None, then it can be called any number - of times with a buffer view. If the callback returns a false value - (such as None), the given buffer is out-of-band; otherwise the - buffer is serialized in-band, i.e. inside the pickle stream. - - It is an error if *buffer_callback* is not None and *protocol* - is None or smaller than 5. - """ - - def dump(self, obj: Any) -> None: - """Write a pickled representation of obj to the open file.""" - - def clear_memo(self) -> None: - """Clears the pickler's "memo". - - The memo is the data structure that remembers which objects the - pickler has already seen, so that shared or recursive objects - are pickled by reference and not by value. This method is - useful when re-using picklers. - """ - + ) -> None: ... + def dump(self, obj: Any) -> None: ... + def clear_memo(self) -> None: ... def persistent_id(self, obj: Any) -> Any: ... class _Unpickler: @@ -345,50 +227,7 @@ class _Unpickler: encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = None, - ) -> None: - """This takes a binary file for reading a pickle data stream. - - The protocol version of the pickle is detected automatically, so - no proto argument is needed. - - The argument *file* must have two methods, a read() method that - takes an integer argument, and a readline() method that requires - no arguments. Both methods should return bytes. Thus *file* - can be a binary file object opened for reading, an io.BytesIO - object, or any other custom object that meets this interface. - - The file-like object must have two methods, a read() method - that takes an integer argument, and a readline() method that - requires no arguments. Both methods should return bytes. - Thus file-like object can be a binary file object opened for - reading, a BytesIO object, or any other custom object that - meets this interface. - - If *buffers* is not None, it should be an iterable of buffer-enabled - objects that is consumed each time the pickle stream references - an out-of-band buffer view. Such buffers have been given in order - to the *buffer_callback* of a Pickler object. - - If *buffers* is None (the default), then the buffers are taken - from the pickle stream, assuming they are serialized there. - It is an error for *buffers* to be None if the pickle stream - was produced with a non-None *buffer_callback*. - - Other optional arguments are *fix_imports*, *encoding* and - *errors*, which are used to control compatibility support for - pickle stream generated by Python 2. If *fix_imports* is True, - pickle will try to map the old Python 2 names to the new names - used in Python 3. The *encoding* and *errors* tell pickle how - to decode 8-bit string instances pickled by Python 2; these - default to 'ASCII' and 'strict', respectively. *encoding* can be - 'bytes' to read these 8-bit string instances as bytes objects. - """ - - def load(self) -> Any: - """Read a pickled object representation from the open file. - - Return the reconstituted object hierarchy specified in the file. - """ - + ) -> None: ... + def load(self) -> Any: ... def find_class(self, module: str, name: str) -> Any: ... def persistent_load(self, pid: Any) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi index e5497b1ecb2f6..8bbfaba31b671 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi @@ -1,15 +1,3 @@ -""" "Executable documentation" for the pickle module. - -Extensive comments about the pickle protocols and pickle-machine opcodes -can be found here. Some functions meant for external use: - -genops(pickle) - Generate all the opcodes in a pickle, as (opcode, arg, position) triples. - -dis(pickle, out=None, memo=None, indentlevel=4) - Print a symbolic disassembly of a pickle. -""" - import sys from collections.abc import Callable, Iterator, MutableMapping from typing import IO, Any, Final @@ -34,114 +22,33 @@ class ArgumentDescriptor: doc: str def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... -def read_uint1(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_uint1(io.BytesIO(b'\\xff')) - 255 - """ +def read_uint1(f: IO[bytes]) -> int: ... uint1: ArgumentDescriptor -def read_uint2(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_uint2(io.BytesIO(b'\\xff\\x00')) - 255 - >>> read_uint2(io.BytesIO(b'\\xff\\xff')) - 65535 - """ +def read_uint2(f: IO[bytes]) -> int: ... uint2: ArgumentDescriptor -def read_int4(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_int4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) - 255 - >>> read_int4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == -(2**31) - True - """ +def read_int4(f: IO[bytes]) -> int: ... int4: ArgumentDescriptor -def read_uint4(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_uint4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) - 255 - >>> read_uint4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == 2**31 - True - """ +def read_uint4(f: IO[bytes]) -> int: ... uint4: ArgumentDescriptor -def read_uint8(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_uint8(io.BytesIO(b'\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00')) - 255 - >>> read_uint8(io.BytesIO(b'\\xff' * 8)) == 2**64-1 - True - """ +def read_uint8(f: IO[bytes]) -> int: ... uint8: ArgumentDescriptor if sys.version_info >= (3, 12): - def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1") -> bytes | str: - """ - >>> import io - >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) - 'abcd' - - >>> read_stringnl(io.BytesIO(b"\\n")) - Traceback (most recent call last): - ... - ValueError: no string quotes around b'' - - >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) - '' - - >>> read_stringnl(io.BytesIO(b"''\\n")) - '' - - >>> read_stringnl(io.BytesIO(b'"abcd"')) - Traceback (most recent call last): - ... - ValueError: no newline found when trying to read stringnl - - Embedded escapes are undone in the result. - >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) - 'a\\n\\\\b\\x00c\\td' - """ + def read_stringnl( + f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1" + ) -> bytes | str: ... else: - def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: - """ - >>> import io - >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) - 'abcd' - - >>> read_stringnl(io.BytesIO(b"\\n")) - Traceback (most recent call last): - ... - ValueError: no string quotes around b'' - - >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) - '' - - >>> read_stringnl(io.BytesIO(b"''\\n")) - '' - - >>> read_stringnl(io.BytesIO(b'"abcd"')) - Traceback (most recent call last): - ... - ValueError: no newline found when trying to read stringnl - - Embedded escapes are undone in the result. - >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) - 'a\\n\\\\b\\x00c\\td' - """ + def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... stringnl: ArgumentDescriptor @@ -149,230 +56,65 @@ def read_stringnl_noescape(f: IO[bytes]) -> str: ... stringnl_noescape: ArgumentDescriptor -def read_stringnl_noescape_pair(f: IO[bytes]) -> str: - """ - >>> import io - >>> read_stringnl_noescape_pair(io.BytesIO(b"Queue\\nEmpty\\njunk")) - 'Queue Empty' - """ +def read_stringnl_noescape_pair(f: IO[bytes]) -> str: ... stringnl_noescape_pair: ArgumentDescriptor -def read_string1(f: IO[bytes]) -> str: - """ - >>> import io - >>> read_string1(io.BytesIO(b"\\x00")) - '' - >>> read_string1(io.BytesIO(b"\\x03abcdef")) - 'abc' - """ +def read_string1(f: IO[bytes]) -> str: ... string1: ArgumentDescriptor -def read_string4(f: IO[bytes]) -> str: - """ - >>> import io - >>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) - '' - >>> read_string4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) - 'abc' - >>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) - Traceback (most recent call last): - ... - ValueError: expected 50331648 bytes in a string4, but only 6 remain - """ +def read_string4(f: IO[bytes]) -> str: ... string4: ArgumentDescriptor -def read_bytes1(f: IO[bytes]) -> bytes: - """ - >>> import io - >>> read_bytes1(io.BytesIO(b"\\x00")) - b'' - >>> read_bytes1(io.BytesIO(b"\\x03abcdef")) - b'abc' - """ +def read_bytes1(f: IO[bytes]) -> bytes: ... bytes1: ArgumentDescriptor -def read_bytes4(f: IO[bytes]) -> bytes: - """ - >>> import io - >>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) - b'' - >>> read_bytes4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) - b'abc' - >>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) - Traceback (most recent call last): - ... - ValueError: expected 50331648 bytes in a bytes4, but only 6 remain - """ +def read_bytes4(f: IO[bytes]) -> bytes: ... bytes4: ArgumentDescriptor -def read_bytes8(f: IO[bytes]) -> bytes: - """ - >>> import io, struct, sys - >>> read_bytes8(io.BytesIO(b"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00abc")) - b'' - >>> read_bytes8(io.BytesIO(b"\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00abcdef")) - b'abc' - >>> bigsize8 = struct.pack(">> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS - Traceback (most recent call last): - ... - ValueError: expected ... bytes in a bytes8, but only 6 remain - """ +def read_bytes8(f: IO[bytes]) -> bytes: ... bytes8: ArgumentDescriptor -def read_unicodestringnl(f: IO[bytes]) -> str: - """ - >>> import io - >>> read_unicodestringnl(io.BytesIO(b"abc\\\\uabcd\\njunk")) == 'abc\\uabcd' - True - """ +def read_unicodestringnl(f: IO[bytes]) -> str: ... unicodestringnl: ArgumentDescriptor -def read_unicodestring1(f: IO[bytes]) -> str: - """ - >>> import io - >>> s = 'abcd\\uabcd' - >>> enc = s.encode('utf-8') - >>> enc - b'abcd\\xea\\xaf\\x8d' - >>> n = bytes([len(enc)]) # little-endian 1-byte length - >>> t = read_unicodestring1(io.BytesIO(n + enc + b'junk')) - >>> s == t - True - - >>> read_unicodestring1(io.BytesIO(n + enc[:-1])) - Traceback (most recent call last): - ... - ValueError: expected 7 bytes in a unicodestring1, but only 6 remain - """ +def read_unicodestring1(f: IO[bytes]) -> str: ... unicodestring1: ArgumentDescriptor -def read_unicodestring4(f: IO[bytes]) -> str: - """ - >>> import io - >>> s = 'abcd\\uabcd' - >>> enc = s.encode('utf-8') - >>> enc - b'abcd\\xea\\xaf\\x8d' - >>> n = bytes([len(enc), 0, 0, 0]) # little-endian 4-byte length - >>> t = read_unicodestring4(io.BytesIO(n + enc + b'junk')) - >>> s == t - True - - >>> read_unicodestring4(io.BytesIO(n + enc[:-1])) - Traceback (most recent call last): - ... - ValueError: expected 7 bytes in a unicodestring4, but only 6 remain - """ +def read_unicodestring4(f: IO[bytes]) -> str: ... unicodestring4: ArgumentDescriptor -def read_unicodestring8(f: IO[bytes]) -> str: - """ - >>> import io - >>> s = 'abcd\\uabcd' - >>> enc = s.encode('utf-8') - >>> enc - b'abcd\\xea\\xaf\\x8d' - >>> n = bytes([len(enc)]) + b'\\0' * 7 # little-endian 8-byte length - >>> t = read_unicodestring8(io.BytesIO(n + enc + b'junk')) - >>> s == t - True - - >>> read_unicodestring8(io.BytesIO(n + enc[:-1])) - Traceback (most recent call last): - ... - ValueError: expected 7 bytes in a unicodestring8, but only 6 remain - """ +def read_unicodestring8(f: IO[bytes]) -> str: ... unicodestring8: ArgumentDescriptor -def read_decimalnl_short(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_decimalnl_short(io.BytesIO(b"1234\\n56")) - 1234 - - >>> read_decimalnl_short(io.BytesIO(b"1234L\\n56")) - Traceback (most recent call last): - ... - ValueError: invalid literal for int() with base 10: b'1234L' - """ - -def read_decimalnl_long(f: IO[bytes]) -> int: - """ - >>> import io - - >>> read_decimalnl_long(io.BytesIO(b"1234L\\n56")) - 1234 - - >>> read_decimalnl_long(io.BytesIO(b"123456789012345678901234L\\n6")) - 123456789012345678901234 - """ +def read_decimalnl_short(f: IO[bytes]) -> int: ... +def read_decimalnl_long(f: IO[bytes]) -> int: ... decimalnl_short: ArgumentDescriptor decimalnl_long: ArgumentDescriptor -def read_floatnl(f: IO[bytes]) -> float: - """ - >>> import io - >>> read_floatnl(io.BytesIO(b"-1.25\\n6")) - -1.25 - """ +def read_floatnl(f: IO[bytes]) -> float: ... floatnl: ArgumentDescriptor -def read_float8(f: IO[bytes]) -> float: - """ - >>> import io, struct - >>> raw = struct.pack(">d", -1.25) - >>> raw - b'\\xbf\\xf4\\x00\\x00\\x00\\x00\\x00\\x00' - >>> read_float8(io.BytesIO(raw + b"\\n")) - -1.25 - """ +def read_float8(f: IO[bytes]) -> float: ... float8: ArgumentDescriptor -def read_long1(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_long1(io.BytesIO(b"\\x00")) - 0 - >>> read_long1(io.BytesIO(b"\\x02\\xff\\x00")) - 255 - >>> read_long1(io.BytesIO(b"\\x02\\xff\\x7f")) - 32767 - >>> read_long1(io.BytesIO(b"\\x02\\x00\\xff")) - -256 - >>> read_long1(io.BytesIO(b"\\x02\\x00\\x80")) - -32768 - """ +def read_long1(f: IO[bytes]) -> int: ... long1: ArgumentDescriptor -def read_long4(f: IO[bytes]) -> int: - """ - >>> import io - >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x00")) - 255 - >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x7f")) - 32767 - >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\xff")) - -256 - >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\x80")) - -32768 - >>> read_long1(io.BytesIO(b"\\x00\\x00\\x00\\x00")) - 0 - """ +def read_long4(f: IO[bytes]) -> int: ... long4: ArgumentDescriptor @@ -424,74 +166,12 @@ class OpcodeInfo: opcodes: list[OpcodeInfo] -def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: - """Generate all the opcodes in a pickle. - - 'pickle' is a file-like object, or string, containing the pickle. - - Each opcode in the pickle is generated, from the current pickle position, - stopping after a STOP opcode is delivered. A triple is generated for - each opcode: - - opcode, arg, pos - - opcode is an OpcodeInfo record, describing the current opcode. - - If the opcode has an argument embedded in the pickle, arg is its decoded - value, as a Python object. If the opcode doesn't have an argument, arg - is None. - - If the pickle has a tell() method, pos was the value of pickle.tell() - before reading the current opcode. If the pickle is a bytes object, - it's wrapped in a BytesIO object, and the latter's tell() result is - used. Else (the pickle doesn't have a tell(), and it's not obvious how - to query its current position) pos is None. - """ - -def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: - """Optimize a pickle string by removing unused PUT opcodes""" - +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... def dis( pickle: bytes | bytearray | IO[bytes], out: IO[str] | None = None, memo: MutableMapping[int, Any] | None = None, indentlevel: int = 4, annotate: int = 0, -) -> None: - """Produce a symbolic disassembly of a pickle. - - 'pickle' is a file-like object, or string, containing a (at least one) - pickle. The pickle is disassembled from the current position, through - the first STOP opcode encountered. - - Optional arg 'out' is a file-like object to which the disassembly is - printed. It defaults to sys.stdout. - - Optional arg 'memo' is a Python dict, used as the pickle's memo. It - may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes. - Passing the same memo object to another dis() call then allows disassembly - to proceed across multiple pickles that were all created by the same - pickler with the same memo. Ordinarily you don't need to worry about this. - - Optional arg 'indentlevel' is the number of blanks by which to indent - a new MARK level. It defaults to 4. - - Optional arg 'annotate' if nonzero instructs dis() to add short - description of the opcode on each line of disassembled output. - The value given to 'annotate' must be an integer and is used as a - hint for the column where annotation should start. The default - value is 0, meaning no annotations. - - In addition to printing the disassembly, some sanity checks are made: - - + All embedded opcode arguments "make sense". - - + Explicit and implicit pop operations have enough items on the stack. - - + When an opcode implicitly refers to a markobject, a markobject is - actually on the stack. - - + A memo entry isn't referenced before it's defined. - - + The markobject isn't stored in the memo. - """ +) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi index 360cb519ff272..fe680bfddf5f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi @@ -1,93 +1,16 @@ -"""Conversion pipeline templates. - -The problem: ------------- - -Suppose you have some data that you want to convert to another format, -such as from GIF image format to PPM image format. Maybe the -conversion involves several steps (e.g. piping it through compress or -uuencode). Some of the conversion steps may require that their input -is a disk file, others may be able to read standard input; similar for -their output. The input to the entire conversion may also be read -from a disk file or from an open file, and similar for its output. - -The module lets you construct a pipeline template by sticking one or -more conversion steps together. It will take care of creating and -removing temporary files if they are necessary to hold intermediate -data. You can then use the template to do conversions from many -different sources to many different destinations. The temporary -file names used are different each time the template is used. - -The templates are objects so you can create templates for many -different conversion steps and store them in a dictionary, for -instance. - - -Directions: ------------ - -To create a template: - t = Template() - -To add a conversion step to a template: - t.append(command, kind) -where kind is a string of two characters: the first is '-' if the -command reads its standard input or 'f' if it requires a file; the -second likewise for the output. The command must be valid /bin/sh -syntax. If input or output files are required, they are passed as -$IN and $OUT; otherwise, it must be possible to use the command in -a pipeline. - -To add a conversion step at the beginning: - t.prepend(command, kind) - -To convert a file to another file using a template: - sts = t.copy(infile, outfile) -If infile or outfile are the empty string, standard input is read or -standard output is written, respectively. The return value is the -exit status of the conversion pipeline. - -To open a file for reading or writing through a conversion pipeline: - fp = t.open(file, mode) -where mode is 'r' to read the file, or 'w' to write it -- just like -for the built-in function open() or for os.popen(). - -To create a new template object initialized to a given one: - t2 = t.clone() -""" - import os __all__ = ["Template"] class Template: - """Class representing a pipeline template.""" - - def reset(self) -> None: - """t.reset() restores a pipeline template to its initial state.""" - - def clone(self) -> Template: - """t.clone() returns a new pipeline template with identical - initial state as the current one. - """ - - def debug(self, flag: bool) -> None: - """t.debug(flag) turns debugging on or off.""" - - def append(self, cmd: str, kind: str) -> None: - """t.append(cmd, kind) adds a new step at the end.""" - - def prepend(self, cmd: str, kind: str) -> None: - """t.prepend(cmd, kind) adds a new step at the front.""" - - def open(self, file: str, rw: str) -> os._wrap_close: - """t.open(file, rw) returns a pipe or file object open for - reading or writing; the file is the other end of the pipeline. - """ - + def reset(self) -> None: ... + def clone(self) -> Template: ... + def debug(self, flag: bool) -> None: ... + def append(self, cmd: str, kind: str) -> None: ... + def prepend(self, cmd: str, kind: str) -> None: ... + def open(self, file: str, rw: str) -> os._wrap_close: ... def copy(self, infile: str, outfile: str) -> int: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. -def quote(s: str) -> str: - """Return a shell-escaped version of the string *s*.""" +def quote(s: str) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi index 955d0862fa4ea..7c70dcc4c5ab1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi @@ -1,5 +1,3 @@ -"""Utilities to support packages.""" - import sys from _typeshed import StrOrBytesPath, SupportsRead from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol @@ -25,219 +23,37 @@ if sys.version_info < (3, 12): _PathT = TypeVar("_PathT", bound=Iterable[str]) class ModuleInfo(NamedTuple): - """A namedtuple with minimal info about a module.""" - module_finder: MetaPathFinderProtocol | PathEntryFinderProtocol name: str ispkg: bool -def extend_path(path: _PathT, name: str) -> _PathT: - """Extend a package's path. - - Intended use is to place the following code in a package's __init__.py: - - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) - - For each directory on sys.path that has a subdirectory that - matches the package name, add the subdirectory to the package's - __path__. This is useful if one wants to distribute different - parts of a single logical package as multiple directories. - - It also looks for *.pkg files beginning where * matches the name - argument. This feature is similar to *.pth files (see site.py), - except that it doesn't special-case lines starting with 'import'. - A *.pkg file is trusted at face value: apart from checking for - duplicates, all entries found in a *.pkg file are added to the - path, regardless of whether they are exist the filesystem. (This - is a feature.) - - If the input path is not a list (as is the case for frozen - packages) it is returned unchanged. The input path is not - modified; an extended copy is returned. Items are only appended - to the copy at the end. - - It is assumed that sys.path is a sequence. Items of sys.path that - are not (unicode or 8-bit) strings referring to existing - directories are ignored. Unicode items of sys.path that cause - errors when used as filenames may cause this function to raise an - exception (in line with os.path.isdir() behavior). - """ +def extend_path(path: _PathT, name: str) -> _PathT: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpImporter: - """PEP 302 Finder that wraps Python's "classic" import algorithm - - ImpImporter(dirname) produces a PEP 302 finder that searches that - directory. ImpImporter(None) produces a PEP 302 finder that searches - the current sys.path, plus any modules that are frozen or built-in. - - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ - def __init__(self, path: StrOrBytesPath | None = None) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpLoader: - """PEP 302 Loader that wraps Python's "classic" import algorithm""" - def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") - def find_loader(fullname: str) -> LoaderProtocol | None: - """Find a "loader" object for fullname - - This is a backwards compatibility wrapper around - importlib.util.find_spec that converts most failures to ImportError - and only returns the loader rather than the full spec - """ - + def find_loader(fullname: str) -> LoaderProtocol | None: ... @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") - def get_loader(module_or_name: str) -> LoaderProtocol | None: - """Get a "loader" object for module_or_name - - Returns None if the module cannot be found or imported. - If the named module is not already imported, its containing package - (if any) is imported, in order to establish the package __path__. - """ + def get_loader(module_or_name: str) -> LoaderProtocol | None: ... else: - def find_loader(fullname: str) -> LoaderProtocol | None: - """Find a "loader" object for fullname - - This is a backwards compatibility wrapper around - importlib.util.find_spec that converts most failures to ImportError - and only returns the loader rather than the full spec - """ - - def get_loader(module_or_name: str) -> LoaderProtocol | None: - """Get a "loader" object for module_or_name - - Returns None if the module cannot be found or imported. - If the named module is not already imported, its containing package - (if any) is imported, in order to establish the package __path__. - """ - -def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: - """Retrieve a finder for the given path item - - The returned finder is cached in sys.path_importer_cache - if it was newly created by a path hook. - - The cache (or part of it) can be cleared manually if a - rescan of sys.path_hooks is necessary. - """ - -def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: - """Yield finders for the given module name - - If fullname contains a '.', the finders will be for the package - containing fullname, otherwise they will be all registered top level - finders (i.e. those on both sys.meta_path and sys.path_hooks). - - If the named module is in a package, that package is imported as a side - effect of invoking this function. - - If no module name is specified, all top level finders are produced. - """ - -def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: - """Yields ModuleInfo for all submodules on path, - or, if path is None, all top-level modules on sys.path. - - 'path' should be either None or a list of paths to look for - modules in. - - 'prefix' is a string to output on the front of every module name - on output. - """ + def find_loader(fullname: str) -> LoaderProtocol | None: ... + def get_loader(module_or_name: str) -> LoaderProtocol | None: ... +def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: ... +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ... +def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None -) -> Iterator[ModuleInfo]: - """Yields ModuleInfo for all modules recursively - on path, or, if path is None, all accessible modules. - - 'path' should be either None or a list of paths to look for - modules in. - - 'prefix' is a string to output on the front of every module name - on output. - - Note that this function must import all *packages* (NOT all - modules!) on the given path, in order to access the __path__ - attribute to find submodules. - - 'onerror' is a function which gets called with one argument (the - name of the package which was being imported) if any exception - occurs while trying to import a package. If no onerror function is - supplied, ImportErrors are caught and ignored, while all other - exceptions are propagated, terminating the search. - - Examples: - - # list all modules python can access - walk_packages() - - # list all submodules of ctypes - walk_packages(ctypes.__path__, ctypes.__name__+'.') - """ - -def get_data(package: str, resource: str) -> bytes | None: - """Get a resource from a package. - - This is a wrapper round the PEP 302 loader get_data API. The package - argument should be the name of a package, in standard module format - (foo.bar). The resource argument should be in the form of a relative - filename, using '/' as the path separator. The parent directory name '..' - is not allowed, and nor is a rooted name (starting with a '/'). - - The function returns a binary string, which is the contents of the - specified resource. - - For packages located in the filesystem, which have already been imported, - this is the rough equivalent of - - d = os.path.dirname(sys.modules[package].__file__) - data = open(os.path.join(d, resource), 'rb').read() - - If the package cannot be located or loaded, or it uses a PEP 302 loader - which does not support get_data(), then None is returned. - """ - -def resolve_name(name: str) -> Any: - """ - Resolve a name to an object. - - It is expected that `name` will be a string in one of the following - formats, where W is shorthand for a valid Python identifier and dot stands - for a literal period in these pseudo-regexes: - - W(.W)* - W(.W)*:(W(.W)*)? - - The first form is intended for backward compatibility only. It assumes that - some part of the dotted name is a package, and the rest is an object - somewhere within that package, possibly nested inside other objects. - Because the place where the package stops and the object hierarchy starts - can't be inferred by inspection, repeated attempts to import must be done - with this form. - - In the second form, the caller makes the division point clear through the - provision of a single colon: the dotted name to the left of the colon is a - package to be imported, and the dotted name to the right is the object - hierarchy within that package. Only one import is needed in this form. If - it ends with the colon, then a module object is returned. - - The function will return an object (which might be a module), or raise one - of the following exceptions: - - ValueError - if `name` isn't in a recognised format - ImportError - if an import failed when it shouldn't have - AttributeError - if a failure occurred when traversing the object hierarchy - within the imported package to get to the desired object. - """ +) -> Iterator[ModuleInfo]: ... +def get_data(package: str, resource: str) -> bytes | None: ... +def resolve_name(name: str) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi index cc2e1aa5694ec..69d702bb155cd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi @@ -1,44 +1,14 @@ -"""This module tries to retrieve as much platform-identifying data as -possible. It makes this information available via function APIs. - -If called from the command line, it prints the platform -information concatenated as single string to stdout. The output -format is usable as part of a filename. - -""" - import sys from typing import NamedTuple, type_check_only from typing_extensions import Self, deprecated, disjoint_base -def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: - """Tries to determine the libc version that the file executable - (which defaults to the Python interpreter) is linked against. - - Returns a tuple of strings (lib,version) which default to the - given parameters in case the lookup fails. - - Note that the function has intimate knowledge of how different - libc versions add symbols to the executable and thus is probably - only usable for executables compiled using gcc. - - The file is read and scanned in chunks of chunksize bytes. - - """ - +def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... def win32_edition() -> str: ... def win32_is_iot() -> bool: ... def mac_ver( release: str = "", versioninfo: tuple[str, str, str] = ("", "", ""), machine: str = "" -) -> tuple[str, tuple[str, str, str], str]: - """Get macOS version information and return it as tuple (release, - versioninfo, machine) with versioninfo being a tuple (version, - dev_stage, non_release_version). - - Entries which cannot be determined are set to the parameter values - which default to ''. All tuple entries are strings. - """ +) -> tuple[str, tuple[str, str, str], str]: ... if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -47,17 +17,7 @@ if sys.version_info >= (3, 13): vendor: str = "", vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", ""), - ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: - """Version interface for Jython. - - Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being - a tuple (vm_name, vm_release, vm_vendor) and osinfo being a - tuple (os_name, os_version, os_arch). - - Values which cannot be determined are set to the defaults - given as parameters (which all default to ''). - - """ + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... else: def java_ver( @@ -65,47 +25,10 @@ else: vendor: str = "", vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", ""), - ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: - """Version interface for Jython. - - Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being - a tuple (vm_name, vm_release, vm_vendor) and osinfo being a - tuple (os_name, os_version, os_arch). - - Values which cannot be determined are set to the defaults - given as parameters (which all default to ''). - - """ - -def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: - """Returns (system, release, version) aliased to common - marketing names used for some systems. - - It also does some reordering of the information in some cases - where it would otherwise cause confusion. - - """ - -def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: - """Queries the given executable (defaults to the Python interpreter - binary) for various architecture information. + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... - Returns a tuple (bits, linkage) which contains information about - the bit architecture and the linkage format used for the - executable. Both values are returned as strings. - - Values that cannot be determined are returned as given by the - parameter presets. If bits is given as '', the sizeof(pointer) - (or sizeof(long) on Python version < 1.5.2) is used as - indicator for the supported pointer size. - - The function relies on the system's "file" command to do the - actual work. This is available on most if not all Unix - platforms. On some non-Unix platforms where the "file" command - does not exist and the executable is set to the Python interpreter - binary defaults from _default_architecture are used. - - """ +def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... # This class is not exposed. It calls itself platform.uname_result_base. # At runtime it only has 5 fields. @@ -125,186 +48,43 @@ class _uname_result_base(NamedTuple): # is lazily evaluated rather than being passed in to the constructor. if sys.version_info >= (3, 12): class uname_result(_uname_result_base): - """ - A uname_result that's largely compatible with a - simple namedtuple except that 'processor' is - resolved late and cached to avoid calling "uname" - except when needed. - """ - __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] - def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: - """Create new instance of uname_result_base(system, node, release, version, machine)""" - + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... @property def processor(self) -> str: ... else: @disjoint_base class uname_result(_uname_result_base): - """ - A uname_result that's largely compatible with a - simple namedtuple except that 'processor' is - resolved late and cached to avoid calling "uname" - except when needed. - """ - if sys.version_info >= (3, 10): __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] - def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: - """Create new instance of uname_result_base(system, node, release, version, machine)""" - + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... @property def processor(self) -> str: ... -def uname() -> uname_result: - """Fairly portable uname interface. Returns a tuple - of strings (system, node, release, version, machine, processor) - identifying the underlying platform. - - Note that unlike the os.uname function this also returns - possible processor information as an additional tuple entry. - - Entries which cannot be determined are set to ''. - - """ - -def system() -> str: - """Returns the system/OS name, e.g. 'Linux', 'Windows' or 'Java'. - - An empty string is returned if the value cannot be determined. - - """ - -def node() -> str: - """Returns the computer's network name (which may not be fully - qualified) - - An empty string is returned if the value cannot be determined. - - """ - -def release() -> str: - """Returns the system's release, e.g. '2.2.0' or 'NT' - - An empty string is returned if the value cannot be determined. - - """ - -def version() -> str: - """Returns the system's release version, e.g. '#3 on degas' - - An empty string is returned if the value cannot be determined. - - """ - -def machine() -> str: - """Returns the machine type, e.g. 'i386' - - An empty string is returned if the value cannot be determined. - - """ - -def processor() -> str: - """Returns the (true) processor name, e.g. 'amdk6' - - An empty string is returned if the value cannot be - determined. Note that many platforms do not provide this - information or simply return the same value as for machine(), - e.g. NetBSD does this. - - """ - -def python_implementation() -> str: - """Returns a string identifying the Python implementation. - - Currently, the following implementations are identified: - 'CPython' (C implementation of Python), - 'Jython' (Java implementation of Python), - 'PyPy' (Python implementation of Python). - - """ - -def python_version() -> str: - """Returns the Python version as string 'major.minor.patchlevel' - - Note that unlike the Python sys.version, the returned value - will always include the patchlevel (it defaults to 0). - - """ - -def python_version_tuple() -> tuple[str, str, str]: - """Returns the Python version as tuple (major, minor, patchlevel) - of strings. - - Note that unlike the Python sys.version, the returned value - will always include the patchlevel (it defaults to 0). - - """ - -def python_branch() -> str: - """Returns a string identifying the Python implementation - branch. - - For CPython this is the SCM branch from which the - Python binary was built. - - If not available, an empty string is returned. - - """ - -def python_revision() -> str: - """Returns a string identifying the Python implementation - revision. - - For CPython this is the SCM revision from which the - Python binary was built. - - If not available, an empty string is returned. - - """ - -def python_build() -> tuple[str, str]: - """Returns a tuple (buildno, builddate) stating the Python - build number and date as strings. - - """ - -def python_compiler() -> str: - """Returns a string identifying the compiler used for compiling - Python. - - """ - -def platform(aliased: bool = False, terse: bool = False) -> str: - """Returns a single string identifying the underlying platform - with as much useful information as possible (but no more :). - - The output is intended to be human readable rather than - machine parseable. It may look different on different - platforms and this is intended. - - If "aliased" is true, the function will use aliases for - various platforms that report system names which differ from - their common names, e.g. SunOS will be reported as - Solaris. The system_alias() function is used to implement - this. - - Setting terse to true causes the function to return only the - absolute minimum information needed to identify the platform. - - """ +def uname() -> uname_result: ... +def system() -> str: ... +def node() -> str: ... +def release() -> str: ... +def version() -> str: ... +def machine() -> str: ... +def processor() -> str: ... +def python_implementation() -> str: ... +def python_version() -> str: ... +def python_version_tuple() -> tuple[str, str, str]: ... +def python_branch() -> str: ... +def python_revision() -> str: ... +def python_build() -> tuple[str, str]: ... +def python_compiler() -> str: ... +def platform(aliased: bool = False, terse: bool = False) -> str: ... if sys.version_info >= (3, 10): - def freedesktop_os_release() -> dict[str, str]: - """Return operation system identification from freedesktop.org os-release""" + def freedesktop_os_release() -> dict[str, str]: ... if sys.version_info >= (3, 13): class AndroidVer(NamedTuple): - """AndroidVer(release, api_level, manufacturer, model, device, is_emulator)""" - release: str api_level: int manufacturer: str @@ -313,8 +93,6 @@ if sys.version_info >= (3, 13): is_emulator: bool class IOSVersionInfo(NamedTuple): - """IOSVersionInfo(system, release, model, is_simulator)""" - system: str release: str model: str @@ -328,14 +106,7 @@ if sys.version_info >= (3, 13): device: str = "", is_emulator: bool = False, ) -> AndroidVer: ... - def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: - """Get iOS version information, and return it as a namedtuple: - (system, release, model, is_simulator). - - If values can't be determined, they are set to values provided as - parameters. - """ + def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... if sys.version_info >= (3, 14): - def invalidate_caches() -> None: - """Invalidate the cached results.""" + def invalidate_caches() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi index 845d5a7d2d4bf..dc3247ee47fb8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi @@ -1,60 +1,3 @@ -"""plistlib.py -- a tool to generate and parse MacOSX .plist files. - -The property list (.plist) file format is a simple XML pickle supporting -basic object types, like dictionaries, lists, numbers and strings. -Usually the top level object is a dictionary. - -To write out a plist file, use the dump(value, file) -function. 'value' is the top level object, 'file' is -a (writable) file object. - -To parse a plist from a file, use the load(file) function, -with a (readable) file object as the only argument. It -returns the top level object (again, usually a dictionary). - -To work with plist data in bytes objects, you can use loads() -and dumps(). - -Values can be strings, integers, floats, booleans, tuples, lists, -dictionaries (but only with string keys), Data, bytes, bytearray, or -datetime.datetime objects. - -Generate Plist example: - - import datetime - import plistlib - - pl = dict( - aString = "Doodah", - aList = ["A", "B", 12, 32.1, [1, 2, 3]], - aFloat = 0.1, - anInt = 728, - aDict = dict( - anotherString = "", - aThirdString = "M\\xe4ssig, Ma\\xdf", - aTrueValue = True, - aFalseValue = False, - ), - someData = b"", - someMoreData = b"" * 10, - aDate = datetime.datetime.now() - ) - print(plistlib.dumps(pl).decode()) - -Parse Plist example: - - import plistlib - - plist = b''' - - foo - bar - - ''' - pl = plistlib.loads(plist) - print(pl["foo"]) -""" - import sys from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping @@ -66,8 +9,6 @@ from typing_extensions import Self __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] class PlistFormat(Enum): - """An enumeration.""" - FMT_XML = 1 FMT_BINARY = 2 @@ -80,32 +21,20 @@ if sys.version_info >= (3, 13): fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ..., aware_datetime: bool = False, - ) -> Any: - """Read a .plist file. 'fp' should be a readable and binary file object. - Return the unpacked root object (which usually is a dictionary). - """ - + ) -> Any: ... def loads( value: ReadableBuffer | str, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ..., aware_datetime: bool = False, - ) -> Any: - """Read a .plist file from a bytes object. - Return the unpacked root object (which usually is a dictionary). - """ + ) -> Any: ... else: - def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: - """Read a .plist file. 'fp' should be a readable and binary file object. - Return the unpacked root object (which usually is a dictionary). - """ - - def loads(value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: - """Read a .plist file from a bytes object. - Return the unpacked root object (which usually is a dictionary). - """ + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... if sys.version_info >= (3, 13): def dump( @@ -116,11 +45,7 @@ if sys.version_info >= (3, 13): sort_keys: bool = True, skipkeys: bool = False, aware_datetime: bool = False, - ) -> None: - """Write 'value' to a .plist file. 'fp' should be a writable, - binary file object. - """ - + ) -> None: ... def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, @@ -128,8 +53,7 @@ if sys.version_info >= (3, 13): skipkeys: bool = False, sort_keys: bool = True, aware_datetime: bool = False, - ) -> bytes: - """Return a bytes object with the contents for a .plist file.""" + ) -> bytes: ... else: def dump( @@ -139,19 +63,14 @@ else: fmt: PlistFormat = ..., sort_keys: bool = True, skipkeys: bool = False, - ) -> None: - """Write 'value' to a .plist file. 'fp' should be a writable, - binary file object. - """ - + ) -> None: ... def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., skipkeys: bool = False, sort_keys: bool = True, - ) -> bytes: - """Return a bytes object with the contents for a .plist file.""" + ) -> bytes: ... class UID: data: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi index c5c59d710f586..9ff2b764aeb68 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi @@ -1,8 +1,3 @@ -"""A POP3 client class. - -Based on the J. Myers POP3 draft, Jan. 96 -""" - import socket import ssl import sys @@ -25,49 +20,6 @@ CRLF: Final = b"\r\n" HAVE_SSL: Final[bool] class POP3: - """This class supports both the minimal and optional command sets. - Arguments can be strings or integers (where appropriate) - (e.g.: retr(1) and retr('1') both work equally well. - - Minimal Command Set: - USER name user(name) - PASS string pass_(string) - STAT stat() - LIST [msg] list(msg = None) - RETR msg retr(msg) - DELE msg dele(msg) - NOOP noop() - RSET rset() - QUIT quit() - - Optional Commands (some servers support these): - RPOP name rpop(name) - APOP name digest apop(name, digest) - TOP msg n top(msg, n) - UIDL [msg] uidl(msg = None) - CAPA capa() - STLS stls() - UTF8 utf8() - - Raises one exception: 'error_proto'. - - Instantiate with: - POP3(hostname, port=110) - - NB: the POP protocol locks the mailbox from user - authorization until QUIT, so be sure to get in, suck - the messages, and quit, each time you access the - mailbox. - - POP is a line-based protocol, which means large mail - messages consume lots of python cycles reading them - line-by-line. - - If it's available on your mail server, use IMAP4 - instead, it doesn't suffer from the two problems - above. - """ - encoding: str host: str port: int @@ -77,140 +29,34 @@ class POP3: def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... - def user(self, user: str) -> bytes: - """Send user name, return response - - (should indicate password required). - """ - - def pass_(self, pswd: str) -> bytes: - """Send password, return response - - (response includes message count, mailbox size). - - NB: mailbox is locked by server from here to 'quit()' - """ - - def stat(self) -> tuple[int, int]: - """Get mailbox status. - - Result is tuple of 2 ints (message count, mailbox size) - """ - - def list(self, which: Any | None = None) -> _LongResp: - """Request listing, return result. - - Result without a message number argument is in form - ['response', ['mesg_num octets', ...], octets]. - - Result when a message number argument is given is a - single response: the "scan listing" for that message. - """ - - def retr(self, which: Any) -> _LongResp: - """Retrieve whole message number 'which'. - - Result is in form ['response', ['line', ...], octets]. - """ - - def dele(self, which: Any) -> bytes: - """Delete message number 'which'. - - Result is 'response'. - """ - - def noop(self) -> bytes: - """Does nothing. - - One supposes the response indicates the server is alive. - """ - - def rset(self) -> bytes: - """Unmark all messages marked for deletion.""" - - def quit(self) -> bytes: - """Signoff: commit changes on server, unlock mailbox, close connection.""" - - def close(self) -> None: - """Close the connection without assuming anything about it.""" - - def rpop(self, user: str) -> bytes: - """Send RPOP command to access the mailbox with an alternate user.""" + def user(self, user: str) -> bytes: ... + def pass_(self, pswd: str) -> bytes: ... + def stat(self) -> tuple[int, int]: ... + def list(self, which: Any | None = None) -> _LongResp: ... + def retr(self, which: Any) -> _LongResp: ... + def dele(self, which: Any) -> bytes: ... + def noop(self) -> bytes: ... + def rset(self) -> bytes: ... + def quit(self) -> bytes: ... + def close(self) -> None: ... + def rpop(self, user: str) -> bytes: ... timestamp: Pattern[str] - def apop(self, user: str, password: str) -> bytes: - """Authorisation - - - only possible if server has supplied a timestamp in initial greeting. - - Args: - user - mailbox user; - password - mailbox password. - - NB: mailbox is locked by server from here to 'quit()' - """ - - def top(self, which: Any, howmuch: int) -> _LongResp: - """Retrieve message header of message number 'which' - and first 'howmuch' lines of message body. - - Result is in form ['response', ['line', ...], octets]. - """ - + def apop(self, user: str, password: str) -> bytes: ... + def top(self, which: Any, howmuch: int) -> _LongResp: ... @overload - def uidl(self) -> _LongResp: - """Return message digest (unique id) list. - - If 'which', result contains unique id for that message - in the form 'response mesgnum uid', otherwise result is - the list ['response', ['mesgnum uid', ...], octets] - """ - + def uidl(self) -> _LongResp: ... @overload def uidl(self, which: Any) -> bytes: ... - def utf8(self) -> bytes: - """Try to enter UTF-8 mode (see RFC 6856). Returns server response.""" - - def capa(self) -> dict[str, _list[str]]: - """Return server capabilities (RFC 2449) as a dictionary - >>> c=poplib.POP3('localhost') - >>> c.capa() - {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'], - 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [], - 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [], - 'UIDL': [], 'RESP-CODES': []} - >>> - - Really, according to RFC 2449, the cyrus folks should avoid - having the implementation split into multiple arguments... - """ - - def stls(self, context: ssl.SSLContext | None = None) -> bytes: - """Start a TLS session on the active connection as specified in RFC 2595. - - context - a ssl.SSLContext - """ + def utf8(self) -> bytes: ... + def capa(self) -> dict[str, _list[str]]: ... + def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... class POP3_SSL(POP3): - """POP3 client class over SSL connection - - Instantiate with: POP3_SSL(hostname, port=995, context=None) - - hostname - the hostname of the pop3 over ssl server - port - port number - context - a ssl.SSLContext - - See the methods of the parent class POP3 for more documentation. - """ - if sys.version_info >= (3, 12): def __init__( self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None ) -> None: ... - def stls(self, context: Any = None) -> NoReturn: - """The method unconditionally raises an exception since the - STLS command doesn't make any sense on an already established - SSL/TLS session. - """ + def stls(self, context: Any = None) -> NoReturn: ... else: def __init__( self, @@ -223,8 +69,4 @@ class POP3_SSL(POP3): ) -> None: ... # "context" is actually the last argument, # but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: - """The method unconditionally raises an exception since the - STLS command doesn't make any sense on an already established - SSL/TLS session. - """ + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi index 52aeeae72d2b7..6d0d76ab82176 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi @@ -1,9 +1,3 @@ -"""This module provides access to operating system functionality that is -standardized by the C Standard and the POSIX standard (a thinly -disguised Unix interface). Refer to the library manual and -corresponding Unix manual entries for more information on calls. -""" - import sys if sys.platform != "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi index d501e9538ff6d..84e1b1e028bde 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi @@ -1,15 +1,3 @@ -"""Common operations on Posix pathnames. - -Instead of importing this module directly, import os and refer to -this module as os.path. The "os.path" name is an alias for this -module on Posix systems; on other systems (e.g. Windows), -os.path provides the same operations in a manner specific to that -platform, and is an alias to another module (e.g. ntpath). - -Some of this can actually be useful on non-Posix systems too, e.g. -for manipulation of the pathname component of URLs. -""" - import sys from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Iterable @@ -94,55 +82,35 @@ devnull: LiteralString # Overloads are necessary to work around python/mypy#17952 & python/mypy#11880 @overload -def abspath(path: PathLike[AnyStr]) -> AnyStr: - """Return an absolute path.""" - +def abspath(path: PathLike[AnyStr]) -> AnyStr: ... @overload def abspath(path: AnyStr) -> AnyStr: ... @overload -def basename(p: PathLike[AnyStr]) -> AnyStr: - """Returns the final component of a pathname""" - +def basename(p: PathLike[AnyStr]) -> AnyStr: ... @overload def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def dirname(p: PathLike[AnyStr]) -> AnyStr: - """Returns the directory component of a pathname""" - +def dirname(p: PathLike[AnyStr]) -> AnyStr: ... @overload def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def expanduser(path: PathLike[AnyStr]) -> AnyStr: - """Expand ~ and ~user constructions. If user or $HOME is unknown, - do nothing. - """ - +def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload -def expandvars(path: PathLike[AnyStr]) -> AnyStr: - """Expand shell variables of form $var and ${var}. Unknown variables - are left unchanged. - """ - +def expandvars(path: PathLike[AnyStr]) -> AnyStr: ... @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload -def normcase(s: PathLike[AnyStr]) -> AnyStr: - """Normalize case of pathname. Has no effect under Posix""" - +def normcase(s: PathLike[AnyStr]) -> AnyStr: ... @overload def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def normpath(path: PathLike[AnyStr]) -> AnyStr: - """Normalize path, eliminating double slashes, etc.""" - +def normpath(path: PathLike[AnyStr]) -> AnyStr: ... @overload def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def commonpath(paths: Iterable[LiteralString]) -> LiteralString: - """Given a sequence of path names, returns the longest common sub-path.""" - +def commonpath(paths: Iterable[LiteralString]) -> LiteralString: ... @overload def commonpath(paths: Iterable[StrPath]) -> str: ... @overload @@ -152,83 +120,41 @@ def commonpath(paths: Iterable[BytesPath]) -> bytes: ... # but must be defined as pos-only in the stub or cross-platform code doesn't type-check, # as the parameter name is different in ntpath.join() @overload -def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: - """Join two or more pathname components, inserting '/' as needed. - If any component is an absolute path, all previous path components - will be discarded. An empty last part will result in a path that - ends with a separator. - """ - +def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: ... @overload def join(a: StrPath, /, *paths: StrPath) -> str: ... @overload def join(a: BytesPath, /, *paths: BytesPath) -> bytes: ... @overload -def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: - """Return the canonical path of the specified filename, eliminating any - symbolic links encountered in the path. - """ - +def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: ... @overload def realpath(filename: AnyStr, *, strict: bool | _AllowMissingType = False) -> AnyStr: ... @overload -def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: - """Return a relative version of a path""" - +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... @overload def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... @overload def relpath(path: StrPath, start: StrPath | None = None) -> str: ... @overload -def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: - """Split a pathname. Returns tuple "(head, tail)" where "tail" is - everything after the final slash. Either part may be empty. - """ - +def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload -def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: - """Split a pathname into drive and path. On Posix, drive is always - empty. - """ - +def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload -def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: - """Split the extension from a pathname. - - Extension is everything from the last dot to the end, ignoring - leading dots. Returns "(root, ext)"; ext may be empty. - """ - +def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... -def isabs(s: StrOrBytesPath) -> bool: - """Test whether a path is absolute""" - -def islink(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a symbolic link""" - -def ismount(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a mount point""" - -def lexists(path: FileDescriptorOrPath) -> bool: - """Test whether a path exists. Returns True for broken symbolic links""" +def isabs(s: StrOrBytesPath) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... if sys.version_info >= (3, 12): - def isjunction(path: StrOrBytesPath) -> bool: - """Test whether a path is a junction - Junctions are not supported on the current platform - """ - + def isjunction(path: StrOrBytesPath) -> bool: ... @overload - def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: - """Split a pathname into drive, root and tail. - - The tail contains anything after the root. - """ - + def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi index 6146ea8d584fc..1e80462e25657 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi @@ -1,29 +1,3 @@ -"""Support to pretty-print lists, tuples, & dictionaries recursively. - -Very simple, but useful, especially in debugging data structures. - -Classes -------- - -PrettyPrinter() - Handle pretty-printing operations onto a stream using a configured - set of formatting parameters. - -Functions ---------- - -pformat() - Format a Python object into a pretty-printed representation. - -pprint() - Pretty-print a Python object to a stream [default is sys.stdout]. - -saferepr() - Generate a 'standard' repr()-like value, but protect against recursive - data structures. - -""" - import sys from _typeshed import SupportsWrite from collections import deque @@ -41,8 +15,7 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> str: - """Format a Python object into a pretty-printed representation.""" + ) -> str: ... else: def pformat( @@ -53,8 +26,7 @@ else: *, compact: bool = False, sort_dicts: bool = True, - ) -> str: - """Format a Python object into a pretty-printed representation.""" + ) -> str: ... if sys.version_info >= (3, 10): def pp( @@ -67,8 +39,7 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = False, underscore_numbers: bool = False, - ) -> None: - """Pretty-print a Python object""" + ) -> None: ... else: def pp( @@ -80,8 +51,7 @@ else: *, compact: bool = False, sort_dicts: bool = False, - ) -> None: - """Pretty-print a Python object""" + ) -> None: ... if sys.version_info >= (3, 10): def pprint( @@ -94,8 +64,7 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> None: - """Pretty-print a Python object to a stream [default is sys.stdout].""" + ) -> None: ... else: def pprint( @@ -107,17 +76,11 @@ else: *, compact: bool = False, sort_dicts: bool = True, - ) -> None: - """Pretty-print a Python object to a stream [default is sys.stdout].""" - -def isreadable(object: object) -> bool: - """Determine if saferepr(object) is readable by eval().""" - -def isrecursive(object: object) -> bool: - """Determine if object requires a recursive representation.""" + ) -> None: ... -def saferepr(object: object) -> str: - """Version of repr() which can handle recursive data structures.""" +def isreadable(object: object) -> bool: ... +def isrecursive(object: object) -> bool: ... +def saferepr(object: object) -> str: ... class PrettyPrinter: if sys.version_info >= (3, 10): @@ -131,33 +94,7 @@ class PrettyPrinter: compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> None: - """Handle pretty printing operations onto a stream using a set of - configured parameters. - - indent - Number of spaces to indent for each level of nesting. - - width - Attempted maximum number of columns in the output. - - depth - The maximum depth to print out nested structures. - - stream - The desired output stream. If omitted (or false), the standard - output stream available at construction will be used. - - compact - If true, several items will be combined in one line. - - sort_dicts - If true, dict keys are sorted. - - underscore_numbers - If true, digit groups are separated with underscores. - - """ + ) -> None: ... else: def __init__( self, @@ -168,41 +105,13 @@ class PrettyPrinter: *, compact: bool = False, sort_dicts: bool = True, - ) -> None: - """Handle pretty printing operations onto a stream using a set of - configured parameters. - - indent - Number of spaces to indent for each level of nesting. - - width - Attempted maximum number of columns in the output. - - depth - The maximum depth to print out nested structures. - - stream - The desired output stream. If omitted (or false), the standard - output stream available at construction will be used. - - compact - If true, several items will be combined in one line. - - sort_dicts - If true, dict keys are sorted. - - """ + ) -> None: ... def pformat(self, object: object) -> str: ... def pprint(self, object: object) -> None: ... def isreadable(self, object: object) -> bool: ... def isrecursive(self, object: object) -> bool: ... - def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: - """Format object for a specific context, returning a string - and flags indicating whether the representation is 'readable' - and whether the object represents a recursive construct. - """ - + def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... def _format( self, object: object, stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi index dff0edf5e6c6e..696193d9dc169 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi @@ -1,5 +1,3 @@ -"""Class for profiling Python code.""" - from _typeshed import StrOrBytesPath from collections.abc import Callable, Mapping from typing import Any, TypeVar @@ -7,70 +5,16 @@ from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: - """Run statement under profiler optionally saving results in filename - - This function takes a single argument that can be passed to the - "exec" statement, and an optional file name. In all cases this - routine attempts to "exec" its first argument and gather profiling - statistics from the execution. If no file name is present, then this - function automatically prints a simple profiling report, sorted by the - standard name string (file/line/function-name) that is presented in - each line. - """ - +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 -) -> None: - """Run statement under profiler, supplying your own globals and locals, - optionally saving results in filename. - - statement and filename have the same semantics as profile.run - """ +) -> None: ... _T = TypeVar("_T") _P = ParamSpec("_P") _Label: TypeAlias = tuple[str, int, str] class Profile: - """Profiler class. - - self.cur is always a tuple. Each such tuple corresponds to a stack - frame that is currently active (self.cur[-2]). The following are the - definitions of its members. We use this external "parallel stack" to - avoid contaminating the program that we are profiling. (old profiler - used to write into the frames local dictionary!!) Derived classes - can change the definition of some entries, as long as they leave - [-2:] intact (frame and previous tuple). In case an internal error is - detected, the -3 element is used as the function name. - - [ 0] = Time that needs to be charged to the parent frame's function. - It is used so that a function call will not have to access the - timing data for the parent frame. - [ 1] = Total time spent in this frame's function, excluding time in - subfunctions (this latter is tallied in cur[2]). - [ 2] = Total time spent in subfunctions, excluding time executing the - frame's function (this latter is tallied in cur[1]). - [-3] = Name of the function that corresponds to this frame. - [-2] = Actual frame that we correspond to (used to sync exception handling). - [-1] = Our parent 6-tuple (corresponds to frame.f_back). - - Timing data for each function is stored as a 5-tuple in the dictionary - self.timings[]. The index is always the name stored in self.cur[-3]. - The following are the definitions of the members: - - [0] = The number of times this function was called, not counting direct - or indirect recursion, - [1] = Number of times this function appears on the stack, minus one - [2] = Total time spent internal to this function - [3] = Cumulative time that this function was present on the stack. In - non-recursive functions, this is the total execution time from start - to finish of each invocation of a function, including time spent in - all subfunctions. - [4] = A dictionary indicating for each function name, the number of times - it was called by us. - """ - bias: int stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi index fe07c90aea1af..c4dee1f6b8f69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi @@ -1,5 +1,3 @@ -"""Class for printing reports on profiled python code.""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -20,8 +18,6 @@ _Selector: TypeAlias = str | float | int if sys.version_info >= (3, 11): class SortKey(StrEnum): - """An enumeration.""" - CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -34,8 +30,6 @@ if sys.version_info >= (3, 11): else: class SortKey(str, Enum): - """An enumeration.""" - CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -48,8 +42,6 @@ else: @dataclass(unsafe_hash=True) class FunctionProfile: - """FunctionProfile(ncalls: str, tottime: float, percall_tottime: float, cumtime: float, percall_cumtime: float, file_name: str, line_number: int)""" - ncalls: str tottime: float percall_tottime: float @@ -60,46 +52,12 @@ class FunctionProfile: @dataclass(unsafe_hash=True) class StatsProfile: - """Class for keeping track of an item in inventory.""" - total_tt: float func_profiles: dict[str, FunctionProfile] _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] class Stats: - """This class is used for creating reports from data generated by the - Profile class. It is a "friend" of that class, and imports data either - by direct access to members of Profile class, or by reading in a dictionary - that was emitted (via marshal) from the Profile class. - - The big change from the previous Profiler (in terms of raw functionality) - is that an "add()" method has been provided to combine Stats from - several distinct profile runs. Both the constructor and the add() - method now take arbitrarily many file names as arguments. - - All the print methods now take an argument that indicates how many lines - to print. If the arg is a floating-point number between 0 and 1.0, then - it is taken as a decimal percentage of the available lines to be printed - (e.g., .1 means print 10% of all available lines). If it is an integer, - it is taken to mean the number of lines of data that you wish to have - printed. - - The sort_stats() method now processes some additional options (i.e., in - addition to the old -1, 0, 1, or 2 that are respectively interpreted as - 'stdname', 'calls', 'time', and 'cumulative'). It takes either an - arbitrary number of quoted strings or SortKey enum to select the sort - order. - - For example sort_stats('time', 'name') or sort_stats(SortKey.TIME, - SortKey.NAME) sorts on the major key of 'internal function time', and on - the minor key of 'the name of the function'. Look at the two tables in - sort_stats() and get_sort_arg_defs(self) for more examples. - - All methods return self, so you can string together commands like: - Stats('foo', 'goo').strip_dirs().sort_stats('calls'). print_stats(5).print_callers(5) - """ - sort_arg_dict_default: _SortArgDict def __init__( self, @@ -112,12 +70,8 @@ class Stats: def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... def get_top_level_stats(self) -> None: ... def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... - def dump_stats(self, filename: StrOrBytesPath) -> None: - """Write the profile data to a file we know how to load back.""" - - def get_sort_arg_defs(self) -> _SortArgDict: - """Expand all abbreviations that are unique.""" - + def dump_stats(self, filename: StrOrBytesPath) -> None: ... + def get_sort_arg_defs(self) -> _SortArgDict: ... @overload def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload @@ -126,13 +80,7 @@ class Stats: def strip_dirs(self) -> Self: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... - def get_stats_profile(self) -> StatsProfile: - """This method returns an instance of StatsProfile, which contains a mapping - of function names to instances of FunctionProfile. Each FunctionProfile - instance holds information related to the function's profile such as how - long the function took to run, how many times it was called, etc... - """ - + def get_stats_profile(self) -> StatsProfile: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... def print_stats(self, *amount: _Selector) -> Self: ... def print_callees(self, *amount: _Selector) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi index 28d5ae4280124..d1c78f9e3dd67 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi @@ -1,5 +1,3 @@ -"""Pseudo terminal utilities.""" - import sys from collections.abc import Callable, Iterable from typing import Final @@ -14,44 +12,17 @@ if sys.platform != "win32": STDERR_FILENO: Final = 2 CHILD: Final = 0 - def openpty() -> tuple[int, int]: - """openpty() -> (master_fd, slave_fd) - Open a pty master/slave pair, using os.openpty() if possible. - """ + def openpty() -> tuple[int, int]: ... + if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") - def master_open() -> tuple[int, str]: - """master_open() -> (master_fd, slave_name) - Open a pty master and return the fd, and the filename of the slave end. - Deprecated, use openpty() instead. - """ - + def master_open() -> tuple[int, str]: ... @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") - def slave_open(tty_name: str) -> int: - """slave_open(tty_name) -> slave_fd - Open the pty slave and acquire the controlling terminal, returning - opened filedescriptor. - Deprecated, use openpty() instead. - """ + def slave_open(tty_name: str) -> int: ... else: - def master_open() -> tuple[int, str]: - """master_open() -> (master_fd, slave_name) - Open a pty master and return the fd, and the filename of the slave end. - Deprecated, use openpty() instead. - """ - - def slave_open(tty_name: str) -> int: - """slave_open(tty_name) -> slave_fd - Open the pty slave and acquire the controlling terminal, returning - opened filedescriptor. - Deprecated, use openpty() instead. - """ - - def fork() -> tuple[int, int]: - """fork() -> (pid, master_fd) - Fork and make the child a session leader with a controlling terminal. - """ + def master_open() -> tuple[int, str]: ... + def slave_open(tty_name: str) -> int: ... - def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: - """Create a spawned process.""" + def fork() -> tuple[int, int]: ... + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi index ceb7dc7cf7453..a84ba324718af 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi @@ -1,13 +1,3 @@ -"""This module provides access to the Unix password database. -It is available on all Unix versions. - -Password database entries are reported as 7-tuples containing the following -items from the password database (see `'), in order: -pw_name, pw_passwd, pw_uid, pw_gid, pw_gecos, pw_dir, pw_shell. -The uid and gid items are integers, all others are strings. An -exception is raised if the entry asked for cannot be found. -""" - import sys from _typeshed import structseq from typing import Any, Final, final @@ -15,58 +5,24 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): - """pwd.struct_passwd: Results from getpw*() routines. - - This object may be accessed either as a tuple of - (pw_name,pw_passwd,pw_uid,pw_gid,pw_gecos,pw_dir,pw_shell) - or via the object attributes as named in the above tuple. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("pw_name", "pw_passwd", "pw_uid", "pw_gid", "pw_gecos", "pw_dir", "pw_shell") @property - def pw_name(self) -> str: - """user name""" - + def pw_name(self) -> str: ... @property - def pw_passwd(self) -> str: - """password""" - + def pw_passwd(self) -> str: ... @property - def pw_uid(self) -> int: - """user id""" - + def pw_uid(self) -> int: ... @property - def pw_gid(self) -> int: - """group id""" - + def pw_gid(self) -> int: ... @property - def pw_gecos(self) -> str: - """real name""" - + def pw_gecos(self) -> str: ... @property - def pw_dir(self) -> str: - """home directory""" - + def pw_dir(self) -> str: ... @property - def pw_shell(self) -> str: - """shell program""" - - def getpwall() -> list[struct_passwd]: - """Return a list of all available password database entries, in arbitrary order. - - See help(pwd) for more on password database entries. - """ - - def getpwuid(uid: int, /) -> struct_passwd: - """Return the password database entry for the given numeric user ID. - - See `help(pwd)` for more on password database entries. - """ - - def getpwnam(name: str, /) -> struct_passwd: - """Return the password database entry for the given user name. + def pw_shell(self) -> str: ... - See `help(pwd)` for more on password database entries. - """ + def getpwall() -> list[struct_passwd]: ... + def getpwuid(uid: int, /) -> struct_passwd: ... + def getpwnam(name: str, /) -> struct_passwd: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi index 65c5f48879fa2..334ce79b5dd04 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi @@ -1,8 +1,3 @@ -"""Routine to "compile" a .py file to a .pyc file. - -This module has intimate knowledge of the format of .pyc files. -""" - import enum import sys from typing import AnyStr @@ -10,33 +5,6 @@ from typing import AnyStr __all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] class PyCompileError(Exception): - """Exception raised when an error occurs while attempting to - compile the file. - - To raise this exception, use - - raise PyCompileError(exc_type,exc_value,file[,msg]) - - where - - exc_type: exception type to be used in error message - type name can be accesses as class variable - 'exc_type_name' - - exc_value: exception value to be used in error message - can be accesses as class variable 'exc_value' - - file: name of file being compiled to be used in error message - can be accesses as class variable 'file' - - msg: string message to be written as error message - If no value is given, a default exception message will be - given, consistent with 'standard' py_compile output. - message (or default) can be accesses as class variable - 'msg' - - """ - exc_type_name: str exc_value: BaseException file: str @@ -44,8 +12,6 @@ class PyCompileError(Exception): def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): - """An enumeration.""" - TIMESTAMP = 1 CHECKED_HASH = 2 UNCHECKED_HASH = 3 @@ -59,63 +25,10 @@ def compile( optimize: int = -1, invalidation_mode: PycInvalidationMode | None = None, quiet: int = 0, -) -> AnyStr | None: - """Byte-compile one Python source file to Python bytecode. - - :param file: The source file name. - :param cfile: The target byte compiled file name. When not given, this - defaults to the PEP 3147/PEP 488 location. - :param dfile: Purported file name, i.e. the file name that shows up in - error messages. Defaults to the source file name. - :param doraise: Flag indicating whether or not an exception should be - raised when a compile error is found. If an exception occurs and this - flag is set to False, a string indicating the nature of the exception - will be printed, and the function will return to the caller. If an - exception occurs and this flag is set to True, a PyCompileError - exception will be raised. - :param optimize: The optimization level for the compiler. Valid values - are -1, 0, 1 and 2. A value of -1 means to use the optimization - level of the current interpreter, as given by -O command line options. - :param invalidation_mode: - :param quiet: Return full output with False or 0, errors only with 1, - and no output with 2. - - :return: Path to the resulting byte compiled file. - - Note that it isn't necessary to byte-compile Python modules for - execution efficiency -- Python itself byte-compiles a module when - it is loaded, and if it can, writes out the bytecode to the - corresponding .pyc file. - - However, if a Python installation is shared between users, it is a - good idea to byte-compile all modules upon installation, since - other users may not be able to write in the source directories, - and thus they won't be able to write the .pyc file, and then - they would be byte-compiling every module each time it is loaded. - This can slow down program start-up considerably. - - See compileall.py for a script/module that uses this module to - byte-compile all installed files (or all files in selected - directories). - - Do note that FileExistsError is raised if cfile ends up pointing at a - non-regular file or symlink. Because the compilation uses a file renaming, - the resulting file would be regular and thus not the same type of file as - it was previously. - """ +) -> AnyStr | None: ... if sys.version_info >= (3, 10): def main() -> None: ... else: - def main(args: list[str] | None = None) -> int: - """Compile several source files. - - The files named in 'args' (or on the command line, if 'args' is - not specified) are compiled and the resulting bytecode is cached - in the normal manner. This function does not search a directory - structure to locate source files; it only compiles files named - explicitly. If '-' is the only parameter in args, the list of - files is taken from standard input. - - """ + def main(args: list[str] | None = None) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi index ad8a1aead66a0..504a5d5f115a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi @@ -1,54 +1,9 @@ -"""Parse a Python module and describe its classes and functions. - -Parse enough of a Python file to recognize imports and class and -function definitions, and to find out the superclasses of a class. - -The interface consists of a single function: - readmodule_ex(module, path=None) -where module is the name of a Python module, and path is an optional -list of directories where the module is to be searched. If present, -path is prepended to the system search path sys.path. The return value -is a dictionary. The keys of the dictionary are the names of the -classes and functions defined in the module (including classes that are -defined via the from XXX import YYY construct). The values are -instances of classes Class and Function. One special key/value pair is -present for packages: the key '__path__' has a list as its value which -contains the package search path. - -Classes and Functions have a common superclass: _Object. Every instance -has the following attributes: - module -- name of the module; - name -- name of the object; - file -- file in which the object is defined; - lineno -- line in the file where the object's definition starts; - end_lineno -- line in the file where the object's definition ends; - parent -- parent of this object, if any; - children -- nested objects contained in this object. -The 'children' attribute is a dictionary mapping names to objects. - -Instances of Function describe functions with the attributes from _Object, -plus the following: - is_async -- if a function is defined with an 'async' prefix - -Instances of Class describe classes with the attributes from _Object, -plus the following: - super -- list of super classes (Class instances if possible); - methods -- mapping of method names to beginning line numbers. -If the name of a super class is not recognized, the corresponding -entry in the list of super classes is not a class instance but a -string giving the name of the super class. Since import statements -are recognized and imported modules are scanned as well, this -shouldn't happen often. -""" - import sys from collections.abc import Mapping, Sequence __all__ = ["readmodule", "readmodule_ex", "Class", "Function"] class _Object: - """Information about Python class or function.""" - module: str name: str file: int @@ -71,8 +26,6 @@ class _Object: def __init__(self, module: str, name: str, file: str, lineno: int, parent: _Object | None) -> None: ... class Function(_Object): - """Information about a Python function, including methods.""" - if sys.version_info >= (3, 10): is_async: bool @@ -95,8 +48,6 @@ class Function(_Object): def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... class Class(_Object): - """Information about a Python class.""" - super: list[Class | str] | None methods: dict[str, int] parent: Class | None @@ -119,16 +70,5 @@ class Class(_Object): self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None ) -> None: ... -def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: - """Return Class objects for the top-level classes in module. - - This is the original interface, before Functions were added. - """ - -def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: - """Return a dictionary with all functions and classes in module. - - Search for module in PATH + sys.path. - If possible, include imported superclasses. - Do this by reading source, without importing (and executing) it. - """ +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi index cd5a5c1b94a60..935f9420f88c0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi @@ -1,42 +1,3 @@ -"""Generate Python documentation in HTML or text for interactive use. - -At the Python interactive prompt, calling help(thing) on a Python object -documents the object, and calling help() starts up an interactive -help session. - -Or, at the shell command line outside of Python: - -Run "pydoc " to show documentation on something. may be -the name of a function, module, package, or a dotted reference to a -class or function within a module or module in a package. If the -argument contains a path segment delimiter (e.g. slash on Unix, -backslash on Windows) it is treated as the path to a Python source file. - -Run "pydoc -k " to search for a keyword in the synopsis lines -of all available modules. - -Run "pydoc -n " to start an HTTP server with the given -hostname (default: localhost) on the local machine. - -Run "pydoc -p " to start an HTTP server on the given port on the -local machine. Port number 0 can be used to get an arbitrary unused port. - -Run "pydoc -b" to start an HTTP server on an arbitrary unused port and -open a web browser to interactively browse documentation. Combine with -the -n and -p options to control the hostname and port used. - -Run "pydoc -w " to write out the HTML documentation for a module -to a file named ".html". - -Module docs for core modules are assumed to be in - - https://docs.python.org/X.Y/library/ - -This can be overridden by setting the PYTHONDOCS environment variable -to a different URL or to a local directory containing the Library -Reference Manual pages. -""" - import sys from _typeshed import OptExcInfo, SupportsWrite, Unused from abc import abstractmethod @@ -60,112 +21,57 @@ __credits__: Final[str] class _Pager(Protocol): def __call__(self, text: str, title: str = "") -> None: ... -def pathdirs() -> list[str]: - """Convert sys.path into a list of absolute, existing, unique paths.""" - -def getdoc(object: object) -> str: - """Get the doc string or comments for an object.""" - -def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: - """Split a doc string into a synopsis line (if any) and the rest.""" - -def classname(object: object, modname: str) -> str: - """Get a class name and qualify it with a module name if necessary.""" - -def isdata(object: object) -> bool: - """Check if an object is of a type that probably means it's data.""" - -def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: - """Do a series of global replacements on a string.""" - -def cram(text: str, maxlen: int) -> str: - """Omit part of a string if needed to make it fit in a maximum length.""" - -def stripid(text: str) -> str: - """Remove the hexadecimal id from a Python object representation.""" - +def pathdirs() -> list[str]: ... +def getdoc(object: object) -> str: ... +def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def classname(object: object, modname: str) -> str: ... +def isdata(object: object) -> bool: ... +def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... +def cram(text: str, maxlen: int) -> str: ... +def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... -def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: - """Decide whether to show documentation on a variable.""" - -def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: - """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods.""" +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... +def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13.") - def ispackage(path: str) -> bool: # undocumented - """Guess whether a path refers to a package directory.""" + def ispackage(path: str) -> bool: ... # undocumented else: - def ispackage(path: str) -> bool: # undocumented - """Guess whether a path refers to a package directory.""" - -def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: - """Return the one-line summary of a file object, if present""" + def ispackage(path: str) -> bool: ... # undocumented -def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: - """Get the one-line summary out of a module file.""" +def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: ... class ErrorDuringImport(Exception): - """Errors that occurred while trying to import something to document it.""" - filename: str exc: type[BaseException] | None value: BaseException | None tb: TracebackType | None def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... -def importfile(path: str) -> ModuleType: - """Import a Python source file or compiled file given its path.""" - -def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: - """Import a module; handle errors; return None if the module isn't found. - - If the module *is* found but an exception occurs, it's wrapped in an - ErrorDuringImport exception and reraised. Unlike __import__, if a - package path is specified, the module at the end of the path is returned, - not the package at the beginning. If the optional 'forceload' argument - is 1, we reload the module from disk (unless it's a dynamic extension). - """ +def importfile(path: str) -> ModuleType: ... +def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: ... class Doc: PYTHONDOCS: str - def document(self, object: object, name: str | None = None, *args: Any) -> str: - """Generate documentation for an object.""" - - def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: - """Raise an exception for unimplemented types.""" - + def document(self, object: object, name: str | None = None, *args: Any) -> str: ... + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... @abstractmethod - def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docclass(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docother(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - + def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docdata(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types.""" - - def getdocloc(self, object: object, basedir: str = ...) -> str | None: - """Return the location of module docs or None""" + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... + def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... class HTMLRepr(Repr): - """Class for safely making an HTML representation of a Python object.""" - def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... @@ -176,17 +82,12 @@ class HTMLRepr(Repr): def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): - """Formatter class for HTML documentation.""" - _repr_instance: HTMLRepr repr = _repr_instance.repr escape = _repr_instance.escape - def page(self, title: str, contents: str) -> str: - """Format an HTML page.""" + def page(self, title: str, contents: str) -> str: ... if sys.version_info >= (3, 11): - def heading(self, title: str, extras: str = "") -> str: - """Format a page heading.""" - + def heading(self, title: str, extras: str = "") -> str: ... def section( self, title: str, @@ -196,15 +97,10 @@ class HTMLDoc(Doc): prelude: str = "", marginalia: str | None = None, gap: str = " ", - ) -> str: - """Format a section with a heading.""" - - def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: - """Format a list of items into a multi-column list.""" + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... else: - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: - """Format a page heading.""" - + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... def section( self, title: str, @@ -215,31 +111,16 @@ class HTMLDoc(Doc): prelude: str = "", marginalia: str | None = None, gap: str = " ", - ) -> str: - """Format a section with a heading.""" - - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: - """Format a list of items into a multi-column list.""" - - def bigsection(self, title: str, *args: Any) -> str: - """Format a section with a big heading.""" - - def preformat(self, text: str) -> str: - """Format literal preformatted text.""" + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... + def bigsection(self, title: str, *args: Any) -> str: ... + def preformat(self, text: str) -> str: ... def grey(self, text: str) -> str: ... - def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: - """Make a link for an identifier, given name-to-URL mappings.""" - - def classlink(self, object: object, modname: str) -> str: - """Make a link for a class.""" - - def modulelink(self, object: object) -> str: - """Make a link for a module.""" - - def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: - """Make a link for a module or package to display in an index.""" - + def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... + def classlink(self, object: object, modname: str) -> str: ... + def modulelink(self, object: object) -> str: ... + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: ... def markup( self, text: str, @@ -247,17 +128,11 @@ class HTMLDoc(Doc): funcs: Mapping[str, str] = {}, classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, - ) -> str: - """Mark up some plain text, given a context of symbols to look for. - Each context dictionary maps object names to anchor names. - """ - - def formattree(self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None) -> str: - """Produce HTML for a class tree as given by inspect.getclasstree().""" - - def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: - """Produce HTML documentation for a module object.""" - + ) -> str: ... + def formattree( + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... def docclass( self, object: object, @@ -266,14 +141,9 @@ class HTMLDoc(Doc): funcs: Mapping[str, str] = {}, classes: Mapping[str, str] = {}, *ignored: Unused, - ) -> str: - """Produce HTML documentation for a class object.""" - - def formatvalue(self, object: object) -> str: - """Format an argument default value as text.""" - - def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: - """Produce HTML documentation for a data object.""" + ) -> str: ... + def formatvalue(self, object: object) -> str: ... + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -285,18 +155,13 @@ class HTMLDoc(Doc): methods: Mapping[str, str] = {}, cl: type | None = None, homecls: type | None = None, - ) -> str: - """Produce HTML documentation for a function or method object.""" - + ) -> str: ... def docproperty( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: - """Produce html documentation for a data descriptor.""" - + ) -> str: ... def docdata( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: - """Produce html documentation for a data descriptor.""" + ) -> str: ... else: def docroutine( # type: ignore[override] self, @@ -307,27 +172,16 @@ class HTMLDoc(Doc): classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, cl: type | None = None, - ) -> str: - """Produce HTML documentation for a function or method object.""" - - def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce html documentation for a data descriptor.""" - - def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce html documentation for a data descriptor.""" + ) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] if sys.version_info >= (3, 11): - def parentlink(self, object: type | ModuleType, modname: str) -> str: - """Make a link for the enclosing class or module.""" - - def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: - """Generate an HTML index for a directory of modules.""" + def parentlink(self, object: type | ModuleType, modname: str) -> str: ... - def filelink(self, url: str, path: str) -> str: - """Make a link to source file.""" + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... + def filelink(self, url: str, path: str) -> str: ... class TextRepr(Repr): - """Class for safely making a text representation of a Python object.""" - def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... @@ -335,29 +189,16 @@ class TextRepr(Repr): def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): - """Formatter class for text documentation.""" - _repr_instance: TextRepr repr = _repr_instance.repr - def bold(self, text: str) -> str: - """Format a string in bold by overstriking.""" - - def indent(self, text: str, prefix: str = " ") -> str: - """Indent text by prepending a given prefix to each line.""" - - def section(self, title: str, contents: str) -> str: - """Format a section with a given heading.""" - + def bold(self, text: str) -> str: ... + def indent(self, text: str, prefix: str = " ") -> str: ... + def section(self, title: str, contents: str) -> str: ... def formattree( self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" - ) -> str: - """Render in text a class tree as returned by inspect.getclasstree().""" - - def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: - """Produce text documentation for a given class object.""" - - def formatvalue(self, object: object) -> str: - """Format an argument default value as text.""" + ) -> str: ... + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... + def formatvalue(self, object: object) -> str: ... if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -366,22 +207,14 @@ class TextDoc(Doc): mod: str | None = None, cl: Any | None = None, homecls: Any | None = None, - ) -> str: - """Produce text documentation for a function or method object.""" - - def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: - """Produce text documentation for a given module object.""" - + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... def docproperty( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: - """Produce text documentation for a data descriptor.""" - + ) -> str: ... def docdata( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: - """Produce text documentation for a data descriptor.""" - + ) -> str: ... def docother( self, object: object, @@ -391,21 +224,12 @@ class TextDoc(Doc): *ignored: Unused, maxlen: int | None = None, doc: Any | None = None, - ) -> str: - """Produce text documentation for a data object.""" + ) -> str: ... else: - def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a function or method object.""" - - def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a given module object.""" - - def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a data descriptor.""" - - def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a data descriptor.""" - + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] def docother( # type: ignore[override] self, object: object, @@ -414,41 +238,25 @@ class TextDoc(Doc): parent: str | None = None, maxlen: int | None = None, doc: Any | None = None, - ) -> str: - """Produce text documentation for a data object.""" + ) -> str: ... if sys.version_info >= (3, 13): - def pager(text: str, title: str = "") -> None: - """The first time this is called, determine what kind of pager to use.""" + def pager(text: str, title: str = "") -> None: ... else: - def pager(text: str) -> None: - """The first time this is called, determine what kind of pager to use.""" + def pager(text: str) -> None: ... -def plain(text: str) -> str: - """Remove boldface formatting from text.""" - -def describe(thing: Any) -> str: - """Produce a short description of the given thing.""" - -def locate(path: str, forceload: bool = ...) -> object: - """Locate an object by name or dotted path, importing as necessary.""" +def plain(text: str) -> str: ... +def describe(thing: Any) -> str: ... +def locate(path: str, forceload: bool = ...) -> object: ... if sys.version_info >= (3, 13): - def get_pager() -> _Pager: - """Decide what method to use for paging through text.""" - - def pipe_pager(text: str, cmd: str, title: str = "") -> None: - """Page through text by feeding it to another program.""" - - def tempfile_pager(text: str, cmd: str, title: str = "") -> None: - """Page through text by invoking a program on a temporary file.""" - - def tty_pager(text: str, title: str = "") -> None: - """Page through text on a text terminal.""" + def get_pager() -> _Pager: ... + def pipe_pager(text: str, cmd: str, title: str = "") -> None: ... + def tempfile_pager(text: str, cmd: str, title: str = "") -> None: ... + def tty_pager(text: str, title: str = "") -> None: ... + def plain_pager(text: str, title: str = "") -> None: ... - def plain_pager(text: str, title: str = "") -> None: - """Simply print unformatted text. This is the ultimate fallback.""" # For backwards compatibility. getpager = get_pager pipepager = pipe_pager @@ -456,31 +264,19 @@ if sys.version_info >= (3, 13): ttypager = tty_pager plainpager = plain_pager else: - def getpager() -> Callable[[str], None]: - """Decide what method to use for paging through text.""" - - def pipepager(text: str, cmd: str) -> None: - """Page through text by feeding it to another program.""" - - def tempfilepager(text: str, cmd: str) -> None: - """Page through text by invoking a program on a temporary file.""" - - def ttypager(text: str) -> None: - """Page through text on a text terminal.""" - - def plainpager(text: str) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" + def getpager() -> Callable[[str], None]: ... + def pipepager(text: str, cmd: str) -> None: ... + def tempfilepager(text: str, cmd: str) -> None: ... + def ttypager(text: str) -> None: ... + def plainpager(text: str) -> None: ... text: TextDoc html: HTMLDoc -def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: - """Given an object or a path to an object, get the object and its name.""" - +def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... def render_doc( thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None -) -> str: - """Render text documentation, given an object or a path to an object.""" +) -> str: ... if sys.version_info >= (3, 11): def doc( @@ -489,8 +285,7 @@ if sys.version_info >= (3, 11): forceload: bool = ..., output: SupportsWrite[str] | None = None, is_cli: bool = False, - ) -> None: - """Display text documentation, given an object or a path to an object.""" + ) -> None: ... else: def doc( @@ -498,14 +293,10 @@ else: title: str = "Python Library Documentation: %s", forceload: bool = ..., output: SupportsWrite[str] | None = None, - ) -> None: - """Display text documentation, given an object or a path to an object.""" - -def writedoc(thing: str | object, forceload: bool = ...) -> None: - """Write HTML documentation to a file in the current directory.""" + ) -> None: ... -def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: - """Write out HTML documentation for all modules in a directory tree.""" +def writedoc(thing: str | object, forceload: bool = ...) -> None: ... +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... class Helper: keywords: dict[str, str | tuple[str, str]] @@ -518,8 +309,7 @@ class Helper: def output(self) -> IO[str]: ... def __call__(self, request: str | Helper | object = ...) -> None: ... def interact(self) -> None: ... - def getline(self, prompt: str) -> str: - """Read one line, using input() when appropriate.""" + def getline(self, prompt: str) -> str: ... if sys.version_info >= (3, 11): def help(self, request: Any, is_cli: bool = False) -> None: ... else: @@ -537,8 +327,6 @@ class Helper: help: Helper class ModuleScanner: - """An interruptible scanner that searches module synopses.""" - quit: bool def run( self, @@ -548,9 +336,6 @@ class ModuleScanner: onerror: Callable[[str], object] | None = None, ) -> None: ... -def apropos(key: str) -> None: - """Print all the one-line module summaries that contain a substring.""" - +def apropos(key: str) -> None: ... def ispath(x: object) -> TypeGuard[str]: ... -def cli() -> None: - """Command-line interface (looks at sys.argv to decide what to do).""" +def cli() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi index d58d06ba3151a..21e676052098d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,5 +1,3 @@ -"""Python wrapper for Expat parser.""" - from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable from pyexpat import errors as errors, model as model @@ -21,56 +19,16 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - """XML parser""" - - def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: - """Parse XML data. - - 'isfinal' should be true at end of input. - """ - - def ParseFile(self, file: SupportsRead[bytes], /) -> int: - """Parse XML data from file-like object.""" - - def SetBase(self, base: str, /) -> None: - """Set the base URL for the parser.""" - - def GetBase(self) -> str | None: - """Return base URL string for the parser.""" - - def GetInputContext(self) -> bytes | None: - """Return the untranslated text of the input that caused the current event. - - If the event was generated by a large amount of text (such as a start tag - for an element with many attributes), not all of the text may be available. - """ - - def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: - """Create a parser for parsing an external entity based on the information passed to the ExternalEntityRefHandler.""" - - def SetParamEntityParsing(self, flag: int, /) -> int: - """Controls parsing of parameter entities (including the external DTD subset). - - Possible flag values are XML_PARAM_ENTITY_PARSING_NEVER, - XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE and - XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag - was successful. - """ - - def UseForeignDTD(self, flag: bool = True, /) -> None: - """Allows the application to provide an artificial external subset if one is not specified as part of the document instance. - - This readily allows the use of a 'default' document type controlled by the - application, while still getting the advantage of providing document type - information to the parser. 'flag' defaults to True if not provided. - """ - - def GetReparseDeferralEnabled(self) -> bool: - """Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0.""" - - def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: - """Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0.""" - + def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: ... + def ParseFile(self, file: SupportsRead[bytes], /) -> int: ... + def SetBase(self, base: str, /) -> None: ... + def GetBase(self) -> str | None: ... + def GetInputContext(self) -> bytes | None: ... + def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: ... + def SetParamEntityParsing(self, flag: int, /) -> int: ... + def UseForeignDTD(self, flag: bool = True, /) -> None: ... + def GetReparseDeferralEnabled(self) -> bool: ... + def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: ... @property def intern(self) -> dict[str, str]: ... buffer_size: int @@ -114,13 +72,11 @@ class XMLParserType: ExternalEntityRefHandler: Callable[[str, str | None, str | None, str | None], int] | None SkippedEntityHandler: Callable[[str, bool], Any] | None -def ErrorString(code: int, /) -> str: - """Returns string error for given number.""" +def ErrorString(code: int, /) -> str: ... # intern is undocumented def ParserCreate( encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None -) -> XMLParserType: - """Return a new XML parser object.""" +) -> XMLParserType: ... expat_CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi index 3b5bbeb23e998..493ae03456044 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi @@ -1,5 +1,3 @@ -"""Constants used to describe error conditions.""" - import sys from typing import Final from typing_extensions import LiteralString diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi index 3de4eec9d8dd8..bac8f3692ce58 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi @@ -1,5 +1,3 @@ -"""Constants used to interpret content model information.""" - from typing import Final XML_CTYPE_ANY: Final = 2 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi index 8ab929e446a33..65e2ac1559adf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi @@ -1,5 +1,3 @@ -"""A multi-producer, multi-consumer queue.""" - import sys from _queue import Empty as Empty, SimpleQueue as SimpleQueue from _typeshed import SupportsRichComparisonT @@ -13,19 +11,12 @@ if sys.version_info >= (3, 13): _T = TypeVar("_T") -class Full(Exception): - """Exception raised by Queue.put(block=0)/put_nowait().""" +class Full(Exception): ... if sys.version_info >= (3, 13): - class ShutDown(Exception): - """Raised when put/get with shut-down queue.""" + class ShutDown(Exception): ... class Queue(Generic[_T]): - """Create a queue object with a given maximum size. - - If maxsize is <= 0, the queue size is infinite. - """ - maxsize: int mutex: Lock # undocumented @@ -40,130 +31,25 @@ class Queue(Generic[_T]): queue: Any # undocumented def __init__(self, maxsize: int = 0) -> None: ... def _init(self, maxsize: int) -> None: ... - def empty(self) -> bool: - """Return True if the queue is empty, False otherwise (not reliable!). - - This method is likely to be removed at some point. Use qsize() == 0 - as a direct substitute, but be aware that either approach risks a race - condition where a queue can grow before the result of empty() or - qsize() can be used. - - To create code that needs to wait for all queued tasks to be - completed, the preferred technique is to use the join() method. - """ - - def full(self) -> bool: - """Return True if the queue is full, False otherwise (not reliable!). - - This method is likely to be removed at some point. Use qsize() >= n - as a direct substitute, but be aware that either approach risks a race - condition where a queue can shrink before the result of full() or - qsize() can be used. - """ - - def get(self, block: bool = True, timeout: float | None = None) -> _T: - """Remove and return an item from the queue. - - If optional args 'block' is true and 'timeout' is None (the default), - block if necessary until an item is available. If 'timeout' is - a non-negative number, it blocks at most 'timeout' seconds and raises - the Empty exception if no item was available within that time. - Otherwise ('block' is false), return an item if one is immediately - available, else raise the Empty exception ('timeout' is ignored - in that case). - - Raises ShutDown if the queue has been shut down and is empty, - or if the queue has been shut down immediately. - """ - - def get_nowait(self) -> _T: - """Remove and return an item from the queue without blocking. - - Only get an item if one is immediately available. Otherwise - raise the Empty exception. - """ + def empty(self) -> bool: ... + def full(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... if sys.version_info >= (3, 13): - def shutdown(self, immediate: bool = False) -> None: - """Shut-down the queue, making queue gets and puts raise ShutDown. - - By default, gets will only raise once the queue is empty. Set - 'immediate' to True to make gets raise immediately instead. - - All blocked callers of put() and get() will be unblocked. If - 'immediate', a task is marked as done for each item remaining in - the queue, which may unblock callers of join(). - """ + def shutdown(self, immediate: bool = False) -> None: ... def _get(self) -> _T: ... - def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: - """Put an item into the queue. - - If optional args 'block' is true and 'timeout' is None (the default), - block if necessary until a free slot is available. If 'timeout' is - a non-negative number, it blocks at most 'timeout' seconds and raises - the Full exception if no free slot was available within that time. - Otherwise ('block' is false), put an item on the queue if a free slot - is immediately available, else raise the Full exception ('timeout' - is ignored in that case). - - Raises ShutDown if the queue has been shut down. - """ - - def put_nowait(self, item: _T) -> None: - """Put an item into the queue without blocking. - - Only enqueue the item if a free slot is immediately available. - Otherwise raise the Full exception. - """ - + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... def _put(self, item: _T) -> None: ... - def join(self) -> None: - """Blocks until all items in the Queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer thread calls task_done() - to indicate the item was retrieved and all work on it is complete. - - When the count of unfinished tasks drops to zero, join() unblocks. - """ - - def qsize(self) -> int: - """Return the approximate size of the queue (not reliable!).""" - + def join(self) -> None: ... + def qsize(self) -> int: ... def _qsize(self) -> int: ... - def task_done(self) -> None: - """Indicate that a formerly enqueued task is complete. - - Used by Queue consumer threads. For each get() used to fetch a task, - a subsequent call to task_done() tells the queue that the processing - on the task is complete. - - If a join() is currently blocking, it will resume when all items - have been processed (meaning that a task_done() call was received - for every item that had been put() into the queue). - - shutdown(immediate=True) calls task_done() for each remaining item in - the queue. - - Raises a ValueError if called more times than there were items - placed in the queue. - """ - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def task_done(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class PriorityQueue(Queue[SupportsRichComparisonT]): - """Variant of Queue that retrieves open entries in priority order (lowest first). - - Entries are typically tuples of the form: (priority number, data). - """ - queue: list[SupportsRichComparisonT] class LifoQueue(Queue[_T]): - """Variant of Queue that retrieves most recently added entries first.""" - queue: list[_T] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi index 4594503f76e3c..be6892fcbcd78 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi @@ -1,5 +1,3 @@ -"""Conversions to/from quoted-printable transport encoding as per RFC 1521.""" - from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite from typing import Protocol, type_check_only @@ -8,21 +6,7 @@ __all__ = ["encode", "decode", "encodestring", "decodestring"] @type_check_only class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... -def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: - """Read 'input', apply quoted-printable encoding, and write to 'output'. - - 'input' and 'output' are binary file objects. The 'quotetabs' flag - indicates whether embedded tabs and spaces should be quoted. Note that - line-ending tabs and spaces are always encoded, as per RFC 1521. - The 'header' flag indicates whether we are encoding spaces as _ as per RFC - 1522. - """ - +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... -def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: - """Read 'input', apply quoted-printable decoding, and write to 'output'. - 'input' and 'output' are binary file objects. - If 'header' is true, decode underscore as space (per RFC 1522). - """ - +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi index 1ba0e2106407a..a797794b8050f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi @@ -1,51 +1,3 @@ -"""Random variable generators. - - bytes - ----- - uniform bytes (values between 0 and 255) - - integers - -------- - uniform within range - - sequences - --------- - pick random element - pick random sample - pick weighted random sample - generate random permutation - - distributions on the real line: - ------------------------------ - uniform - triangular - normal (Gaussian) - lognormal - negative exponential - gamma - beta - pareto - Weibull - - distributions on the circle (angles 0 to 2pi) - --------------------------------------------- - circular uniform - von Mises - - discrete distributions - ---------------------- - binomial - - -General notes on the underlying Mersenne Twister core generator: - -* The period is 2**19937-1. -* It is one of the most extensively tested generators in existence. -* The random() method is implemented in C, executes in a single Python step, - and is, therefore, threadsafe. - -""" - import _random import sys from _typeshed import SupportsLenAndGetItem @@ -88,25 +40,8 @@ if sys.version_info >= (3, 12): _T = TypeVar("_T") class Random(_random.Random): - """Random number generator base class used by bound module functions. - - Used to instantiate instances of Random to get generators that don't - share state. - - Class Random can also be subclassed if you want to use a different basic - generator of your own devising: in that case, override the following - methods: random(), seed(), getstate(), and setstate(). - Optionally, implement a getrandbits() method so that randrange() - can cover arbitrarily large ranges. - - """ - VERSION: ClassVar[int] - def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: # noqa: Y041 - """Initialize an instance. - - Optional argument x controls seeding, as for Random.seed(). - """ + def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: ... # noqa: Y041 # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. @@ -114,47 +49,13 @@ class Random(_random.Random): # this is a workaround for pyright correctly flagging an inconsistent inherited constructor, see #14624 def __new__(cls, x: int | float | str | bytes | bytearray | None = None) -> Self: ... # noqa: Y041 - def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: # type: ignore[override] # noqa: Y041 - """Initialize internal state from a seed. - - The only supported seed types are None, int, float, - str, bytes, and bytearray. - - None or no argument seeds from current time or from an operating - system specific randomness source if available. - - If *a* is an int, all bits are used. - - For version 2 (the default), all of the bits are used if *a* is a str, - bytes, or bytearray. For version 1 (provided for reproducing random - sequences from older versions of Python), the algorithm for str and - bytes generates a narrower range of seeds. - - """ - - def getstate(self) -> tuple[Any, ...]: - """Return internal state; can be passed to setstate() later.""" - - def setstate(self, state: tuple[Any, ...]) -> None: - """Restore internal state from object returned by getstate().""" - - def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: - """Choose a random item from range(stop) or range(start, stop[, step]). - - Roughly equivalent to ``choice(range(start, stop, step))`` but - supports arbitrarily large ranges and is optimized for common cases. - - """ - - def randint(self, a: int, b: int) -> int: - """Return random integer in range [a, b], including both end points.""" - - def randbytes(self, n: int) -> bytes: - """Generate n random bytes.""" - - def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: - """Choose a random element from a non-empty sequence.""" - + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 + def getstate(self) -> tuple[Any, ...]: ... + def setstate(self, state: tuple[Any, ...]) -> None: ... + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... + def randint(self, a: int, b: int) -> int: ... + def randbytes(self, n: int) -> bytes: ... + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... def choices( self, population: SupportsLenAndGetItem[_T], @@ -162,273 +63,47 @@ class Random(_random.Random): *, cum_weights: Sequence[float | Fraction] | None = None, k: int = 1, - ) -> list[_T]: - """Return a k sized list of population elements chosen with replacement. - - If the relative weights or cumulative weights are not specified, - the selections are made with equal probability. - - """ + ) -> list[_T]: ... if sys.version_info >= (3, 11): - def shuffle(self, x: MutableSequence[Any]) -> None: - """Shuffle list x in place, and return None.""" + def shuffle(self, x: MutableSequence[Any]) -> None: ... else: - def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: - """Shuffle list x in place, and return None. - - Optional argument random is a 0-argument function returning a - random float in [0.0, 1.0); if it is the default None, the - standard random.random will be used. - - """ + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... if sys.version_info >= (3, 11): - def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: - """Chooses k unique random elements from a population sequence. - - Returns a new list containing elements from the population while - leaving the original population unchanged. The resulting list is - in selection order so that all sub-slices will also be valid random - samples. This allows raffle winners (the sample) to be partitioned - into grand prize and second place winners (the subslices). - - Members of the population need not be hashable or unique. If the - population contains repeats, then each occurrence is a possible - selection in the sample. - - Repeated elements can be specified one at a time or with the optional - counts parameter. For example: - - sample(['red', 'blue'], counts=[4, 2], k=5) - - is equivalent to: - - sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) - - To choose a sample from a range of integers, use range() for the - population argument. This is especially fast and space efficient - for sampling from a large population: - - sample(range(10000000), 60) - - """ + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... else: - def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: - """Chooses k unique random elements from a population sequence or set. - - Returns a new list containing elements from the population while - leaving the original population unchanged. The resulting list is - in selection order so that all sub-slices will also be valid random - samples. This allows raffle winners (the sample) to be partitioned - into grand prize and second place winners (the subslices). - - Members of the population need not be hashable or unique. If the - population contains repeats, then each occurrence is a possible - selection in the sample. - - Repeated elements can be specified one at a time or with the optional - counts parameter. For example: - - sample(['red', 'blue'], counts=[4, 2], k=5) - - is equivalent to: - - sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) - - To choose a sample from a range of integers, use range() for the - population argument. This is especially fast and space efficient - for sampling from a large population: - - sample(range(10000000), 60) - - """ - - def uniform(self, a: float, b: float) -> float: - """Get a random number in the range [a, b) or [a, b] depending on rounding. - - The mean (expected value) and variance of the random variable are: - - E[X] = (a + b) / 2 - Var[X] = (b - a) ** 2 / 12 - - """ + def sample( + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None + ) -> list[_T]: ... - def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: - """Triangular distribution. - - Continuous distribution bounded by given lower and upper limits, - and having a given mode value in-between. - - http://en.wikipedia.org/wiki/Triangular_distribution - - The mean (expected value) and variance of the random variable are: - - E[X] = (low + high + mode) / 3 - Var[X] = (low**2 + high**2 + mode**2 - low*high - low*mode - high*mode) / 18 - - """ + def uniform(self, a: float, b: float) -> float: ... + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... if sys.version_info >= (3, 12): - def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: - """Binomial random variable. - - Gives the number of successes for *n* independent trials - with the probability of success in each trial being *p*: - - sum(random() < p for i in range(n)) - - Returns an integer in the range: - - 0 <= X <= n - - The integer is chosen with the probability: - - P(X == k) = math.comb(n, k) * p ** k * (1 - p) ** (n - k) - - The mean (expected value) and variance of the random variable are: - - E[X] = n * p - Var[X] = n * p * (1 - p) - - """ + def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: ... - def betavariate(self, alpha: float, beta: float) -> float: - """Beta distribution. - - Conditions on the parameters are alpha > 0 and beta > 0. - Returned values range between 0 and 1. - - The mean (expected value) and variance of the random variable are: - - E[X] = alpha / (alpha + beta) - Var[X] = alpha * beta / ((alpha + beta)**2 * (alpha + beta + 1)) - - """ + def betavariate(self, alpha: float, beta: float) -> float: ... if sys.version_info >= (3, 12): - def expovariate(self, lambd: float = 1.0) -> float: - """Exponential distribution. - - lambd is 1.0 divided by the desired mean. It should be - nonzero. (The parameter would be called "lambda", but that is - a reserved word in Python.) Returned values range from 0 to - positive infinity if lambd is positive, and from negative - infinity to 0 if lambd is negative. - - The mean (expected value) and variance of the random variable are: - - E[X] = 1 / lambd - Var[X] = 1 / lambd ** 2 - - """ + def expovariate(self, lambd: float = 1.0) -> float: ... else: - def expovariate(self, lambd: float) -> float: - """Exponential distribution. - - lambd is 1.0 divided by the desired mean. It should be - nonzero. (The parameter would be called "lambda", but that is - a reserved word in Python.) Returned values range from 0 to - positive infinity if lambd is positive, and from negative - infinity to 0 if lambd is negative. - - """ - - def gammavariate(self, alpha: float, beta: float) -> float: - """Gamma distribution. Not the gamma function! - - Conditions on the parameters are alpha > 0 and beta > 0. - - The probability distribution function is: - - x ** (alpha - 1) * math.exp(-x / beta) - pdf(x) = -------------------------------------- - math.gamma(alpha) * beta ** alpha + def expovariate(self, lambd: float) -> float: ... - The mean (expected value) and variance of the random variable are: - - E[X] = alpha * beta - Var[X] = alpha * beta ** 2 - - """ + def gammavariate(self, alpha: float, beta: float) -> float: ... if sys.version_info >= (3, 11): - def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: - """Gaussian distribution. - - mu is the mean, and sigma is the standard deviation. This is - slightly faster than the normalvariate() function. - - Not thread-safe without a lock around calls. - - """ - - def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: - """Normal distribution. - - mu is the mean, and sigma is the standard deviation. - - """ + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... else: - def gauss(self, mu: float, sigma: float) -> float: - """Gaussian distribution. - - mu is the mean, and sigma is the standard deviation. This is - slightly faster than the normalvariate() function. - - Not thread-safe without a lock around calls. - - """ - - def normalvariate(self, mu: float, sigma: float) -> float: - """Normal distribution. - - mu is the mean, and sigma is the standard deviation. - - """ + def gauss(self, mu: float, sigma: float) -> float: ... + def normalvariate(self, mu: float, sigma: float) -> float: ... - def lognormvariate(self, mu: float, sigma: float) -> float: - """Log normal distribution. - - If you take the natural logarithm of this distribution, you'll get a - normal distribution with mean mu and standard deviation sigma. - mu can have any value, and sigma must be greater than zero. - - """ - - def vonmisesvariate(self, mu: float, kappa: float) -> float: - """Circular data distribution. - - mu is the mean angle, expressed in radians between 0 and 2*pi, and - kappa is the concentration parameter, which must be greater than or - equal to zero. If kappa is equal to zero, this distribution reduces - to a uniform random angle over the range 0 to 2*pi. - - """ - - def paretovariate(self, alpha: float) -> float: - """Pareto distribution. alpha is the shape parameter.""" - - def weibullvariate(self, alpha: float, beta: float) -> float: - """Weibull distribution. - - alpha is the scale parameter and beta is the shape parameter. - - """ + def lognormvariate(self, mu: float, sigma: float) -> float: ... + def vonmisesvariate(self, mu: float, kappa: float) -> float: ... + def paretovariate(self, alpha: float) -> float: ... + def weibullvariate(self, alpha: float, beta: float) -> float: ... # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): - """Alternate random number generator using sources provided - by the operating system (such as /dev/urandom on Unix or - CryptGenRandom on Windows). - - Not available on all systems (see os.urandom() for details). - - """ - - def getrandbits(self, k: int) -> int: # k can be passed by keyword - """getrandbits(k) -> x. Generates an int with k random bits.""" - - def getstate(self, *args: Any, **kwds: Any) -> NoReturn: - """Method should not be called for a system random number generator.""" - - def setstate(self, *args: Any, **kwds: Any) -> NoReturn: - """Method should not be called for a system random number generator.""" + def getrandbits(self, k: int) -> int: ... # k can be passed by keyword + def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... _inst: Random seed = _inst.seed diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi index af6be98d28d49..fb2a06d5e4c81 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi @@ -1,111 +1,3 @@ -"""Support for regular expressions (RE). - -This module provides regular expression matching operations similar to -those found in Perl. It supports both 8-bit and Unicode strings; both -the pattern and the strings being processed can contain null bytes and -characters outside the US ASCII range. - -Regular expressions can contain both special and ordinary characters. -Most ordinary characters, like "A", "a", or "0", are the simplest -regular expressions; they simply match themselves. You can -concatenate ordinary characters, so last matches the string 'last'. - -The special characters are: - "." Matches any character except a newline. - "^" Matches the start of the string. - "$" Matches the end of the string or just before the newline at - the end of the string. - "*" Matches 0 or more (greedy) repetitions of the preceding RE. - Greedy means that it will match as many repetitions as possible. - "+" Matches 1 or more (greedy) repetitions of the preceding RE. - "?" Matches 0 or 1 (greedy) of the preceding RE. - *?,+?,?? Non-greedy versions of the previous three special characters. - {m,n} Matches from m to n repetitions of the preceding RE. - {m,n}? Non-greedy version of the above. - "\\\\" Either escapes special characters or signals a special sequence. - [] Indicates a set of characters. - A "^" as the first character indicates a complementing set. - "|" A|B, creates an RE that will match either A or B. - (...) Matches the RE inside the parentheses. - The contents can be retrieved or matched later in the string. - (?aiLmsux) The letters set the corresponding flags defined below. - (?:...) Non-grouping version of regular parentheses. - (?P...) The substring matched by the group is accessible by name. - (?P=name) Matches the text matched earlier by the group named name. - (?#...) A comment; ignored. - (?=...) Matches if ... matches next, but doesn't consume the string. - (?!...) Matches if ... doesn't match next. - (?<=...) Matches if preceded by ... (must be fixed length). - (?= 3, 11) or # sre_constants. Typeshed has it here because its __module__ attribute is set to "re". class error(Exception): - """Exception raised for invalid regular expressions. - - Attributes: - - msg: The unformatted error message - pattern: The regular expression pattern - pos: The index in the pattern where compilation failed (may be None) - lineno: The line corresponding to pos (may be None) - colno: The column corresponding to pos (may be None) - """ - msg: str pattern: str | bytes | None pos: int | None @@ -182,51 +63,30 @@ class error(Exception): @final class Match(Generic[AnyStr]): - """The result of re.match() and re.search(). - Match objects always have a boolean value of True. - """ - @property - def pos(self) -> int: - """The index into the string at which the RE engine started looking for a match.""" - + def pos(self) -> int: ... @property - def endpos(self) -> int: - """The index into the string beyond which the RE engine will not go.""" - + def endpos(self) -> int: ... @property - def lastindex(self) -> int | None: - """The integer index of the last matched capturing group.""" - + def lastindex(self) -> int | None: ... @property - def lastgroup(self) -> str | None: - """The name of the last matched capturing group.""" - + def lastgroup(self) -> str | None: ... @property - def string(self) -> AnyStr: - """The string passed to match() or search().""" + def string(self) -> AnyStr: ... + # The regular expression object whose match() or search() method produced # this match instance. @property - def re(self) -> Pattern[AnyStr]: - """The regular expression object.""" - + def re(self) -> Pattern[AnyStr]: ... @overload - def expand(self: Match[str], template: str) -> str: - """Return the string obtained by doing backslash substitution on the string template, as done by the sub() method.""" - + def expand(self: Match[str], template: str) -> str: ... @overload def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... @overload def expand(self, template: AnyStr) -> AnyStr: ... # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def group(self, group: Literal[0] = 0, /) -> AnyStr: - """group([group1, ...]) -> str or tuple. - Return subgroup(s) of the match by indices or names. - For 0 returns the entire match. - """ - + def group(self, group: Literal[0] = 0, /) -> AnyStr: ... @overload def group(self, group: str | int, /) -> AnyStr | MaybeNone: ... @overload @@ -234,93 +94,53 @@ class Match(Generic[AnyStr]): # Each item of groups()'s return tuple is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload - def groups(self) -> tuple[AnyStr | MaybeNone, ...]: - """Return a tuple containing all the subgroups of the match, from 1. - - default - Is used for groups that did not participate in the match. - """ - + def groups(self) -> tuple[AnyStr | MaybeNone, ...]: ... @overload def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... # Each value in groupdict()'s return dict is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload - def groupdict(self) -> dict[str, AnyStr | MaybeNone]: - """Return a dictionary containing all the named subgroups of the match, keyed by the subgroup name. - - default - Is used for groups that did not participate in the match. - """ - + def groupdict(self) -> dict[str, AnyStr | MaybeNone]: ... @overload def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... - def start(self, group: int | str = 0, /) -> int: - """Return index of the start of the substring matched by group.""" - - def end(self, group: int | str = 0, /) -> int: - """Return index of the end of the substring matched by group.""" - - def span(self, group: int | str = 0, /) -> tuple[int, int]: - """For match object m, return the 2-tuple (m.start(group), m.end(group)).""" - + def start(self, group: int | str = 0, /) -> int: ... + def end(self, group: int | str = 0, /) -> int: ... + def span(self, group: int | str = 0, /) -> tuple[int, int]: ... @property def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def __getitem__(self, key: Literal[0], /) -> AnyStr: - """Return self[key].""" - + def __getitem__(self, key: Literal[0], /) -> AnyStr: ... @overload def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ... def __copy__(self) -> Match[AnyStr]: ... def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @final class Pattern(Generic[AnyStr]): - """Compiled regular expression object.""" - @property - def flags(self) -> int: - """The regex matching flags.""" - + def flags(self) -> int: ... @property - def groupindex(self) -> Mapping[str, int]: - """A dictionary mapping group names to group numbers.""" - + def groupindex(self) -> Mapping[str, int]: ... @property - def groups(self) -> int: - """The number of capturing groups in the pattern.""" - + def groups(self) -> int: ... @property - def pattern(self) -> AnyStr: - """The pattern string from which the RE object was compiled.""" - + def pattern(self) -> AnyStr: ... @overload - def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: - """Scan through string looking for a match, and return a corresponding match object instance. - - Return None if no position in the string matches. - """ - + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... @overload def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: - """Matches zero or more characters at the beginning of the string.""" - + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... @overload def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: - """Matches against all of the string.""" - + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... @overload def fullmatch( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -328,29 +148,20 @@ class Pattern(Generic[AnyStr]): @overload def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: - """Split string by the occurrences of pattern.""" - + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: ... @overload def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | MaybeNone]: ... @overload def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | MaybeNone]: ... # return type depends on the number of groups in the pattern @overload - def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: - """Return a list of all non-overlapping matches of pattern in string.""" - + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... @overload - def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: - """Return an iterator over all non-overlapping matches for the RE pattern in string. - - For each match, the iterator returns a match object. - """ - + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... @overload def finditer( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -358,9 +169,7 @@ class Pattern(Generic[AnyStr]): @overload def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... @overload - def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: - """Return the string obtained by replacing the leftmost non-overlapping occurrences of pattern in string by the replacement repl.""" - + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... @overload def sub( self: Pattern[bytes], @@ -371,9 +180,7 @@ class Pattern(Generic[AnyStr]): @overload def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... @overload - def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: - """Return the tuple (new_string, number_of_subs_made) found by replacing the leftmost non-overlapping occurrences of pattern with the replacement repl.""" - + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... @overload def subn( self: Pattern[bytes], @@ -387,14 +194,11 @@ class Pattern(Generic[AnyStr]): def __deepcopy__(self, memo: Any, /) -> Pattern[AnyStr]: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # ----- re variables and constants ----- class RegexFlag(enum.IntFlag): - """An enumeration.""" - A = sre_compile.SRE_FLAG_ASCII ASCII = A DEBUG = sre_compile.SRE_FLAG_DEBUG @@ -446,85 +250,39 @@ _FlagsType: TypeAlias = int | RegexFlag # pattern arguments do *not* accept arbitrary buffers such as bytearray, # because the pattern must be hashable. @overload -def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: - """Compile a regular expression pattern, returning a Pattern object.""" - +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: - """Scan through string looking for a match to the pattern, returning - a Match object, or None if no match was found. - """ - +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: - """Try to apply the pattern at the start of the string, returning - a Match object, or None if no match was found. - """ - +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: - """Try to apply the pattern to all of the string, returning - a Match object, or None if no match was found. - """ - +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: - """Split the source string by the occurrences of the pattern, - returning a list containing the resulting substrings. If - capturing parentheses are used in pattern, then the text of all - groups in the pattern are also returned as part of the resulting - list. If maxsplit is nonzero, at most maxsplit splits occur, - and the remainder of the string is returned as the final element - of the list. - """ - +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: ... @overload def split( pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 ) -> list[bytes | MaybeNone]: ... @overload -def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: - """Return a list of all non-overlapping matches in the string. - - If one or more capturing groups are present in the pattern, return - a list of groups; this will be a list of tuples if the pattern - has more than one group. - - Empty matches are included in the result. - """ - +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... @overload def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... @overload -def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: - """Return an iterator over all non-overlapping matches in the - string. For each match, the iterator returns a Match object. - - Empty matches are included in the result. - """ - +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... @overload def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... @overload def sub( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 -) -> str: - """Return the string obtained by replacing the leftmost - non-overlapping occurrences of the pattern in string by the - replacement repl. repl can be either a string or a callable; - if a string, backslash escapes in it are processed. If it is - a callable, it's passed the Match object and must return - a replacement string to be used. - """ - +) -> str: ... @overload def sub( pattern: bytes | Pattern[bytes], @@ -536,17 +294,7 @@ def sub( @overload def subn( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 -) -> tuple[str, int]: - """Return a 2-tuple containing (new_string, number). - new_string is the string obtained by replacing the leftmost - non-overlapping occurrences of the pattern in the source - string by the replacement repl. number is the number of - substitutions that were made. repl can be either a string or a - callable; if a string, backslash escapes in it are processed. - If it is a callable, it's passed the Match object and must - return a replacement string to be used. - """ - +) -> tuple[str, int]: ... @overload def subn( pattern: bytes | Pattern[bytes], @@ -555,19 +303,12 @@ def subn( count: int = 0, flags: _FlagsType = 0, ) -> tuple[bytes, int]: ... -def escape(pattern: AnyStr) -> AnyStr: - """ - Escape special characters in a string. - """ - -def purge() -> None: - """Clear the regular expression caches""" +def escape(pattern: AnyStr) -> AnyStr: ... +def purge() -> None: ... if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `re.compile()` instead.") - def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented - """Compile a template pattern, returning a Pattern object, deprecated""" + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented else: - def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented - """Compile a template pattern, returning a Pattern object""" + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi index 0358c632ee3d0..7325c267b32c2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi @@ -1,5 +1,3 @@ -"""Importing this module enables command line editing using libedit readline.""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -10,122 +8,33 @@ if sys.platform != "win32": _Completer: TypeAlias = Callable[[str, int], str | None] _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] - def parse_and_bind(string: str, /) -> None: - """Execute the init line provided in the string argument.""" - - def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: - """Execute a readline initialization file. - - The default filename is the last filename used. - """ - - def get_line_buffer() -> str: - """Return the current contents of the line buffer.""" - - def insert_text(string: str, /) -> None: - """Insert text into the line buffer at the cursor position.""" - - def redisplay() -> None: - """Change what's displayed on the screen to reflect contents of the line buffer.""" - - def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: - """Load a readline history file. - - The default filename is ~/.history. - """ - - def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: - """Save a readline history file. - - The default filename is ~/.history. - """ - - def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: - """Append the last nelements items of the history list to file. - - The default filename is ~/.history. - """ - - def get_history_length() -> int: - """Return the maximum number of lines that will be written to the history file.""" - - def set_history_length(length: int, /) -> None: - """Set the maximal number of lines which will be written to the history file. - - A negative length is used to inhibit history truncation. - """ - - def clear_history() -> None: - """Clear the current readline history.""" - - def get_current_history_length() -> int: - """Return the current (not the maximum) length of history.""" - - def get_history_item(index: int, /) -> str: - """Return the current contents of history item at one-based index.""" - - def remove_history_item(pos: int, /) -> None: - """Remove history item given by its zero-based position.""" - - def replace_history_item(pos: int, line: str, /) -> None: - """Replaces history item given by its position with contents of line. - - pos is zero-based. - """ - - def add_history(string: str, /) -> None: - """Add an item to the history buffer.""" - - def set_auto_history(enabled: bool, /) -> None: - """Enables or disables automatic history.""" - - def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: - """Set or remove the function invoked by the rl_startup_hook callback. - - The function is called with no arguments just - before readline prints the first prompt. - """ - - def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: - """Set or remove the function invoked by the rl_pre_input_hook callback. - - The function is called with no arguments after the first prompt - has been printed and just before readline starts reading input - characters. - """ - - def set_completer(function: _Completer | None = None, /) -> None: - """Set or remove the completer function. - - The function is called as function(text, state), - for state in 0, 1, 2, ..., until it returns a non-string. - It should return the next possible completion starting with 'text'. - """ - - def get_completer() -> _Completer | None: - """Get the current completer function.""" - - def get_completion_type() -> int: - """Get the type of completion being attempted.""" - - def get_begidx() -> int: - """Get the beginning index of the completion scope.""" - - def get_endidx() -> int: - """Get the ending index of the completion scope.""" - - def set_completer_delims(string: str, /) -> None: - """Set the word delimiters for completion.""" - - def get_completer_delims() -> str: - """Get the word delimiters for completion.""" - - def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: - """Set or remove the completion display function. + def parse_and_bind(string: str, /) -> None: ... + def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def get_line_buffer() -> str: ... + def insert_text(string: str, /) -> None: ... + def redisplay() -> None: ... + def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... + def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: ... + def get_history_length() -> int: ... + def set_history_length(length: int, /) -> None: ... + def clear_history() -> None: ... + def get_current_history_length() -> int: ... + def get_history_item(index: int, /) -> str: ... + def remove_history_item(pos: int, /) -> None: ... + def replace_history_item(pos: int, line: str, /) -> None: ... + def add_history(string: str, /) -> None: ... + def set_auto_history(enabled: bool, /) -> None: ... + def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: ... + def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: ... + def set_completer(function: _Completer | None = None, /) -> None: ... + def get_completer() -> _Completer | None: ... + def get_completion_type() -> int: ... + def get_begidx() -> int: ... + def get_endidx() -> int: ... + def set_completer_delims(string: str, /) -> None: ... + def get_completer_delims() -> str: ... + def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: ... - The function is called as - function(substitution, [matches], longest_match_length) - once each time matches need to be displayed. - """ if sys.version_info >= (3, 13): backend: Literal["readline", "editline"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi index e94cc4db69fdc..68ada65693485 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi @@ -1,5 +1,3 @@ -"""Redo the builtin repr() (representation) but with limits on most sizes.""" - import sys from array import array from collections import deque @@ -11,8 +9,7 @@ __all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc: TypeAlias = Callable[[Any], str] -def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: - """Decorator to make a repr function return fillvalue for a recursive call""" +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... class Repr: maxlevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi index f5b27d28cef0e..f99cd5b088056 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi @@ -30,14 +30,6 @@ if sys.platform != "win32": class struct_rusage( structseq[float], tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] ): - """struct_rusage: Result from getrusage. - - This object may be accessed either as a tuple of - (utime,stime,maxrss,ixrss,idrss,isrss,minflt,majflt, - nswap,inblock,oublock,msgsnd,msgrcv,nsignals,nvcsw,nivcsw) - or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ( "ru_utime", @@ -59,68 +51,37 @@ if sys.platform != "win32": ) @property - def ru_utime(self) -> float: - """user time used""" - + def ru_utime(self) -> float: ... @property - def ru_stime(self) -> float: - """system time used""" - + def ru_stime(self) -> float: ... @property - def ru_maxrss(self) -> int: - """max. resident set size""" - + def ru_maxrss(self) -> int: ... @property - def ru_ixrss(self) -> int: - """shared memory size""" - + def ru_ixrss(self) -> int: ... @property - def ru_idrss(self) -> int: - """unshared data size""" - + def ru_idrss(self) -> int: ... @property - def ru_isrss(self) -> int: - """unshared stack size""" - + def ru_isrss(self) -> int: ... @property - def ru_minflt(self) -> int: - """page faults not requiring I/O""" - + def ru_minflt(self) -> int: ... @property - def ru_majflt(self) -> int: - """page faults requiring I/O""" - + def ru_majflt(self) -> int: ... @property - def ru_nswap(self) -> int: - """number of swap outs""" - + def ru_nswap(self) -> int: ... @property - def ru_inblock(self) -> int: - """block input operations""" - + def ru_inblock(self) -> int: ... @property - def ru_oublock(self) -> int: - """block output operations""" - + def ru_oublock(self) -> int: ... @property - def ru_msgsnd(self) -> int: - """IPC messages sent""" - + def ru_msgsnd(self) -> int: ... @property - def ru_msgrcv(self) -> int: - """IPC messages received""" - + def ru_msgrcv(self) -> int: ... @property - def ru_nsignals(self) -> int: - """signals received""" - + def ru_nsignals(self) -> int: ... @property - def ru_nvcsw(self) -> int: - """voluntary context switches""" - + def ru_nvcsw(self) -> int: ... @property - def ru_nivcsw(self) -> int: - """involuntary context switches""" + def ru_nivcsw(self) -> int: ... def getpagesize() -> int: ... def getrlimit(resource: int, /) -> tuple[int, int]: ... @@ -130,6 +91,5 @@ if sys.platform != "win32": if sys.version_info >= (3, 12): def prlimit(pid: int, resource: int, limits: tuple[int, int] | None = None, /) -> tuple[int, int]: ... else: - def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: - """prlimit(pid, resource, [limits])""" + def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: ... error = OSError diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi index 37185ea5d74be..8d9477e3ee452 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi @@ -1,79 +1,9 @@ -"""Word completion for GNU readline. - -The completer completes keywords, built-ins and globals in a selectable -namespace (which defaults to __main__); when completing NAME.NAME..., it -evaluates (!) the expression up to the last dot and completes its attributes. - -It's very cool to do "import sys" type "sys.", hit the completion key (twice), -and see the list of names defined by the sys module! - -Tip: to use the tab key as the completion key, call - - readline.parse_and_bind("tab: complete") - -Notes: - -- Exceptions raised by the completer function are *ignored* (and generally cause - the completion to fail). This is a feature -- since readline sets the tty - device in raw (or cbreak) mode, printing a traceback wouldn't work well - without some complicated hoopla to save, reset and restore the tty state. - -- The evaluation of the NAME.NAME... form may cause arbitrary application - defined code to be executed if an object with a __getattr__ hook is found. - Since it is the responsibility of the application (or the user) to enable this - feature, I consider this an acceptable risk. More complicated expressions - (e.g. function calls or indexing operations) are *not* evaluated. - -- When the original stdin is not a tty device, GNU readline is never - used, and this module (and the readline module) are silently inactive. - -""" - from typing import Any __all__ = ["Completer"] class Completer: - def __init__(self, namespace: dict[str, Any] | None = None) -> None: - """Create a new completer for the command line. - - Completer([namespace]) -> completer instance. - - If unspecified, the default namespace where completions are performed - is __main__ (technically, __main__.__dict__). Namespaces should be - given as dictionaries. - - Completer instances should be used as the completion mechanism of - readline via the set_completer() call: - - readline.set_completer(Completer(my_namespace).complete) - """ - - def complete(self, text: str, state: int) -> str | None: - """Return the next possible completion for 'text'. - - This is called successively with state == 0, 1, 2, ... until it - returns None. The completion should begin with 'text'. - - """ - - def attr_matches(self, text: str) -> list[str]: - """Compute matches when text contains a dot. - - Assuming the text is of the form NAME.NAME....[NAME], and is - evaluable in self.namespace, it will be evaluated and its attributes - (as revealed by dir()) are used as possible completions. (For class - instances, class members are also considered.) - - WARNING: this can still invoke arbitrary C code, if an object - with a __getattr__ hook is evaluated. - - """ - - def global_matches(self, text: str) -> list[str]: - """Compute matches when text is a simple name. - - Return a list of all keywords, built-in functions and names currently - defined in self.namespace that match. - - """ + def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... + def complete(self, text: str, state: int) -> str | None: ... + def attr_matches(self, text: str) -> list[str]: ... + def global_matches(self, text: str) -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi index a52a3bd7f3f40..d4406ea4ac41e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi @@ -1,12 +1,3 @@ -"""runpy.py - locating and running Python code using the module namespace - -Provides support for locating and running Python scripts using the Python -module namespace instead of the native filesystem. - -This allows Python code to play nicely with non-filesystem based PEP 302 -importers when locating support scripts as well as when importing modules. -""" - from _typeshed import Unused from types import ModuleType from typing import Any @@ -15,8 +6,6 @@ from typing_extensions import Self __all__ = ["run_module", "run_path"] class _TempModule: - """Temporarily replace a module in sys.modules with an empty namespace""" - mod_name: str module: ModuleType def __init__(self, mod_name: str) -> None: ... @@ -31,39 +20,5 @@ class _ModifiedArgv0: def run_module( mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False -) -> dict[str, Any]: - """Execute a module's code without importing it. - - mod_name -- an absolute module name or package name. - - Optional arguments: - init_globals -- dictionary used to pre-populate the module’s - globals dictionary before the code is executed. - - run_name -- if not None, this will be used for setting __name__; - otherwise, __name__ will be set to mod_name + '__main__' if the - named module is a package and to just mod_name otherwise. - - alter_sys -- if True, sys.argv[0] is updated with the value of - __file__ and sys.modules[__name__] is updated with a temporary - module object for the module being executed. Both are - restored to their original values before the function returns. - - Returns the resulting module globals dictionary. - """ - -def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: - """Execute code located at the specified filesystem location. - - path_name -- filesystem location of a Python script, zipfile, - or directory containing a top level __main__.py script. - - Optional arguments: - init_globals -- dictionary used to pre-populate the module’s - globals dictionary before the code is executed. - - run_name -- if not None, this will be used to set __name__; - otherwise, '' will be used for __name__. - - Returns the resulting module globals dictionary. - """ +) -> dict[str, Any]: ... +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi index 0567d514ac3dc..52f87ab68ff54 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi @@ -1,28 +1,3 @@ -"""A generally useful event scheduler class. - -Each instance of this class manages its own queue. -No multi-threading is implied; you are supposed to hack that -yourself, or use a single instance per application. - -Each instance is parametrized with two functions, one that is -supposed to return the current time, one that is supposed to -implement a delay. You can implement real-time scheduling by -substituting time and sleep from built-in module time, or you can -implement simulated time by writing your own functions. This can -also be used to integrate scheduling with STDWIN events; the delay -function is allowed to modify the queue. Time can be expressed as -integers or floating-point numbers, as long as it is consistent. - -Events are specified by tuples (time, priority, action, argument, kwargs). -As in UNIX, lower priority numbers mean higher priority; in this -way the queue can be maintained as a priority queue. Execution of the -event means calling the action function, passing it the argument -sequence in "argument" (remember that in Python, multiple function -arguments are be packed in a sequence) and keyword parameters in "kwargs". -The action function may be an instance method so it -has another way to reference private data (besides global variables). -""" - import sys from collections.abc import Callable from typing import Any, ClassVar, NamedTuple, type_check_only @@ -34,8 +9,6 @@ _ActionCallback: TypeAlias = Callable[..., Any] if sys.version_info >= (3, 10): class Event(NamedTuple): - """Event(time, priority, sequence, action, argument, kwargs)""" - time: float priority: Any sequence: int @@ -59,71 +32,15 @@ class scheduler: timefunc: Callable[[], float] delayfunc: Callable[[float], object] - def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: - """Initialize a new instance, passing the time and delay - functions - """ - + def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... def enterabs( self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... - ) -> Event: - """Enter a new event in the queue at an absolute time. - - Returns an ID for the event which can be used to remove it, - if necessary. - - """ - + ) -> Event: ... def enter( self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... - ) -> Event: - """A variant that specifies the time as a relative time. - - This is actually the more commonly used interface. - - """ - - def run(self, blocking: bool = True) -> float | None: - """Execute events until the queue is empty. - If blocking is False executes the scheduled events due to - expire soonest (if any) and then return the deadline of the - next scheduled call in the scheduler. - - When there is a positive delay until the first event, the - delay function is called and the event is left in the queue; - otherwise, the event is removed from the queue and executed - (its action function is called, passing it the argument). If - the delay function returns prematurely, it is simply - restarted. - - It is legal for both the delay function and the action - function to modify the queue or to raise an exception; - exceptions are not caught but the scheduler's state remains - well-defined so run() may be called again. - - A questionable hack is added to allow other threads to run: - just after an event is executed, a delay of 0 is executed, to - avoid monopolizing the CPU when other threads are also - runnable. - - """ - - def cancel(self, event: Event) -> None: - """Remove an event from the queue. - - This must be presented the ID as returned by enter(). - If the event is not in the queue, this raises ValueError. - - """ - - def empty(self) -> bool: - """Check whether the queue is empty.""" - + ) -> Event: ... + def run(self, blocking: bool = True) -> float | None: ... + def cancel(self, event: Event) -> None: ... + def empty(self) -> bool: ... @property - def queue(self) -> list[Event]: - """An ordered list of upcoming events. - - Events are named tuples with fields for: - time, priority, action, arguments, kwargs - - """ + def queue(self) -> list[Event]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi index d6c5de9b37615..4861b6f09340e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi @@ -1,11 +1,3 @@ -"""Generate cryptographically strong pseudo-random numbers suitable for -managing secrets such as account authentication, tokens, and similar. - -See PEP 506 for more information. -https://peps.python.org/pep-0506/ - -""" - from _typeshed import SupportsLenAndGetItem from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom @@ -15,45 +7,9 @@ __all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "to _T = TypeVar("_T") -def randbelow(exclusive_upper_bound: int) -> int: - """Return a random int in the range [0, n).""" - -def randbits(k: int) -> int: - """getrandbits(k) -> x. Generates an int with k random bits.""" - -def choice(seq: SupportsLenAndGetItem[_T]) -> _T: - """Choose a random element from a non-empty sequence.""" - -def token_bytes(nbytes: int | None = None) -> bytes: - """Return a random byte string containing *nbytes* bytes. - - If *nbytes* is ``None`` or not supplied, a reasonable - default is used. - - >>> token_bytes(16) #doctest:+SKIP - b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' - - """ - -def token_hex(nbytes: int | None = None) -> str: - """Return a random text string, in hexadecimal. - - The string has *nbytes* random bytes, each byte converted to two - hex digits. If *nbytes* is ``None`` or not supplied, a reasonable - default is used. - - >>> token_hex(16) #doctest:+SKIP - 'f9bf78b9a18ce6d46a0cd2b0b86df9da' - - """ - -def token_urlsafe(nbytes: int | None = None) -> str: - """Return a random URL-safe text string, in Base64 encoding. - - The string has *nbytes* random bytes. If *nbytes* is ``None`` - or not supplied, a reasonable default is used. - - >>> token_urlsafe(16) #doctest:+SKIP - 'Drmhze6EPcv0fN_81Bj-nA' - - """ +def randbelow(exclusive_upper_bound: int) -> int: ... +def randbits(k: int) -> int: ... +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... +def token_bytes(nbytes: int | None = None) -> bytes: ... +def token_hex(nbytes: int | None = None) -> str: ... +def token_urlsafe(nbytes: int | None = None) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi index 93d3b5a72f05f..587bc75376ef1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi @@ -1,9 +1,3 @@ -"""This module supports asynchronous I/O on multiple file descriptors. - -*** IMPORTANT NOTICE *** -On Windows, only sockets are supported; on Unix, all file descriptors. -""" - import sys from _typeshed import FileDescriptorLike from collections.abc import Iterable @@ -31,12 +25,6 @@ if sys.platform != "win32": # This is actually a function that returns an instance of a class. # The class is not accessible directly, and also calls itself select.poll. class poll: - """Returns a polling object. - - This object supports registering and unregistering file descriptors, and then - polling them for I/O events. - """ - # default value is select.POLLIN | select.POLLPRI | select.POLLOUT def register(self, fd: FileDescriptorLike, eventmask: int = 7, /) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int, /) -> None: ... @@ -45,30 +33,7 @@ if sys.platform != "win32": def select( rlist: Iterable[Any], wlist: Iterable[Any], xlist: Iterable[Any], timeout: float | None = None, / -) -> tuple[list[Any], list[Any], list[Any]]: - """Wait until one or more file descriptors are ready for some kind of I/O. - - The first three arguments are iterables of file descriptors to be waited for: - rlist -- wait until ready for reading - wlist -- wait until ready for writing - xlist -- wait for an "exceptional condition" - If only one kind of condition is required, pass [] for the other lists. - - A file descriptor is either a socket or file object, or a small integer - gotten from a fileno() method call on one of those. - - The optional 4th argument specifies a timeout in seconds; it may be - a floating-point number to specify fractions of seconds. If it is absent - or None, the call will never time out. - - The return value is a tuple of three lists corresponding to the first three - arguments; each contains the subset of the corresponding file descriptors - that are ready. - - *** IMPORTANT NOTICE *** - On Windows, only sockets are supported; on Unix, all file - descriptors can be used. - """ +) -> tuple[list[Any], list[Any], list[Any]]: ... error = OSError @@ -76,22 +41,6 @@ if sys.platform != "linux" and sys.platform != "win32": # BSD only @final class kevent: - """kevent(ident, filter=KQ_FILTER_READ, flags=KQ_EV_ADD, fflags=0, data=0, udata=0) - - This object is the equivalent of the struct kevent for the C API. - - See the kqueue manpage for more detailed information about the meaning - of the arguments. - - One minor note: while you might hope that udata could store a - reference to a python object, it cannot, because it is impossible to - keep a proper reference count of the object once it's passed into the - kernel. Therefore, I have restricted it to only storing an integer. I - recommend ignoring it and simply using the 'ident' field to key off - of. You could also set up a dictionary on the python side to store a - udata->object mapping. - """ - data: Any fflags: int filter: int @@ -112,48 +61,15 @@ if sys.platform != "linux" and sys.platform != "win32": # BSD only @final class kqueue: - """Kqueue syscall wrapper. - - For example, to start watching a socket for input: - >>> kq = kqueue() - >>> sock = socket() - >>> sock.connect((host, port)) - >>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_ADD)], 0) - - To wait one second for it to become writeable: - >>> kq.control(None, 1, 1000) - - To stop listening: - >>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_DELETE)], 0) - """ - closed: bool def __init__(self) -> None: ... - def close(self) -> None: - """Close the kqueue control file descriptor. - - Further operations on the kqueue object will raise an exception. - """ - - def control(self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, /) -> list[kevent]: - """Calls the kernel kevent function. - - changelist - Must be an iterable of kevent objects describing the changes to be made - to the kernel's watch list or None. - maxevents - The maximum number of events that the kernel will return. - timeout - The maximum time to wait in seconds, or else None to wait forever. - This accepts floats for smaller timeouts, too. - """ - - def fileno(self) -> int: - """Return the kqueue control file descriptor.""" - + def close(self) -> None: ... + def control( + self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, / + ) -> list[kevent]: ... + def fileno(self) -> int: ... @classmethod - def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: - """Create a kqueue object from a given control fd.""" + def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: ... KQ_EV_ADD: Final[int] KQ_EV_CLEAR: Final[int] @@ -198,15 +114,6 @@ if sys.platform != "linux" and sys.platform != "win32": if sys.platform == "linux": @final class epoll: - """select.epoll(sizehint=-1, flags=0) - - Returns an epolling object - - sizehint must be a positive integer or -1 for the default size. The - sizehint is used to optimize internal data structures. It doesn't limit - the maximum number of monitored events. - """ - def __new__(self, sizehint: int = ..., flags: int = ...) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -216,58 +123,15 @@ if sys.platform == "linux": exc_tb: TracebackType | None = None, /, ) -> None: ... - def close(self) -> None: - """Close the epoll control file descriptor. - - Further operations on the epoll object will raise an exception. - """ + def close(self) -> None: ... closed: bool - def fileno(self) -> int: - """Return the epoll control file descriptor.""" - - def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: - """Registers a new fd or raises an OSError if the fd is already registered. - - fd - the target file descriptor of the operation - eventmask - a bit set composed of the various EPOLL constants - - The epoll interface supports all file descriptors that support poll. - """ - - def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: - """Modify event mask for a registered file descriptor. - - fd - the target file descriptor of the operation - eventmask - a bit set composed of the various EPOLL constants - """ - - def unregister(self, fd: FileDescriptorLike) -> None: - """Remove a registered file descriptor from the epoll object. - - fd - the target file descriptor of the operation - """ - - def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: - """Wait for events on the epoll file descriptor. - - timeout - the maximum time to wait in seconds (as float); - a timeout of None or -1 makes poll wait indefinitely - maxevents - the maximum number of events returned; -1 means no limit - - Returns a list containing any descriptors that have events to report, - as a list of (fd, events) 2-tuples. - """ - + def fileno(self) -> int: ... + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... @classmethod - def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: - """Create an epoll object from a given control fd.""" + def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: ... EPOLLERR: Final[int] EPOLLEXCLUSIVE: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi index ed95b56b13847..bcca4e341b9a1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi @@ -1,9 +1,3 @@ -"""Selectors module. - -This module allows high-level and efficient I/O multiplexing, built upon the -`select` module primitives. -""" - import sys from _typeshed import FileDescriptor, FileDescriptorLike, Unused from abc import ABCMeta, abstractmethod @@ -17,151 +11,43 @@ EVENT_READ: Final = 1 EVENT_WRITE: Final = 2 class SelectorKey(NamedTuple): - """SelectorKey(fileobj, fd, events, data) - - Object used to associate a file object to its backing - file descriptor, selected event mask, and attached data. - """ - fileobj: FileDescriptorLike fd: FileDescriptor events: _EventMask data: Any class BaseSelector(metaclass=ABCMeta): - """Selector abstract base class. - - A selector supports registering file objects to be monitored for specific - I/O events. - - A file object is a file descriptor or any object with a `fileno()` method. - An arbitrary object can be attached to the file object, which can be used - for example to store context information, a callback, etc. - - A selector can use various implementations (select(), poll(), epoll()...) - depending on the platform. The default `Selector` class uses the most - efficient implementation on the current platform. - """ - @abstractmethod - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: - """Register a file object. - - Parameters: - fileobj -- file object or file descriptor - events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) - data -- attached data - - Returns: - SelectorKey instance - - Raises: - ValueError if events is invalid - KeyError if fileobj is already registered - OSError if fileobj is closed or otherwise is unacceptable to - the underlying system call (if a system call is made) - - Note: - OSError may or may not be raised - """ - + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod - def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: - """Unregister a file object. - - Parameters: - fileobj -- file object or file descriptor - - Returns: - SelectorKey instance - - Raises: - KeyError if fileobj is not registered - - Note: - If fileobj is registered but has since been closed this does - *not* raise OSError (even if the wrapped syscall does) - """ - - def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: - """Change a registered file object monitored events or attached data. - - Parameters: - fileobj -- file object or file descriptor - events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) - data -- attached data - - Returns: - SelectorKey instance - - Raises: - Anything that unregister() or register() raises - """ - + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: - """Perform the actual selection, until some monitored file objects are - ready or a timeout expires. - - Parameters: - timeout -- if timeout > 0, this specifies the maximum wait time, in - seconds - if timeout <= 0, the select() call won't block, and will - report the currently ready file objects - if timeout is None, select() will block until a monitored - file object becomes ready - - Returns: - list of (key, events) for ready file objects - `events` is a bitwise mask of EVENT_READ|EVENT_WRITE - """ - - def close(self) -> None: - """Close the selector. - - This must be called to make sure that any underlying resource is freed. - """ - - def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: - """Return the key associated to a registered file object. - - Returns: - SelectorKey for this file object - """ - + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def close(self) -> None: ... + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... @abstractmethod - def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: - """Return a mapping of file objects to selector keys.""" - + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... class _BaseSelectorImpl(BaseSelector, metaclass=ABCMeta): - """Base selector implementation.""" - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class SelectSelector(_BaseSelectorImpl): - """Select-based selector.""" - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... class _PollLikeSelector(_BaseSelectorImpl): - """Base class shared between poll, epoll and devpoll selectors.""" - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": - class PollSelector(_PollLikeSelector): - """Poll-based selector.""" + class PollSelector(_PollLikeSelector): ... if sys.platform == "linux": class EpollSelector(_PollLikeSelector): - """Epoll-based selector.""" - def fileno(self) -> int: ... if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": @@ -171,8 +57,6 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win if sys.platform != "win32" and sys.platform != "linux": class KqueueSelector(_BaseSelectorImpl): - """Kqueue-based selector.""" - def fileno(self) -> int: ... def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... @@ -180,8 +64,6 @@ if sys.platform != "win32" and sys.platform != "linux": # The runtime logic is more fine-grained than a `sys.platform` check; # not really expressible in the stubs class DefaultSelector(_BaseSelectorImpl): - """Epoll-based selector.""" - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": def fileno(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi index 7fd7398162125..654c2ea097f78 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi @@ -1,61 +1,3 @@ -"""Manage shelves of pickled objects. - -A "shelf" is a persistent, dictionary-like object. The difference -with dbm databases is that the values (not the keys!) in a shelf can -be essentially arbitrary Python objects -- anything that the "pickle" -module can handle. This includes most class instances, recursive data -types, and objects containing lots of shared sub-objects. The keys -are ordinary strings. - -To summarize the interface (key is a string, data is an arbitrary -object): - - import shelve - d = shelve.open(filename) # open, with (g)dbm filename -- no suffix - - d[key] = data # store data at key (overwrites old data if - # using an existing key) - data = d[key] # retrieve a COPY of the data at key (raise - # KeyError if no such key) -- NOTE that this - # access returns a *copy* of the entry! - del d[key] # delete data stored at key (raises KeyError - # if no such key) - flag = key in d # true if the key exists - list = d.keys() # a list of all existing keys (slow!) - - d.close() # close it - -Dependent on the implementation, closing a persistent dictionary may -or may not be necessary to flush changes to disk. - -Normally, d[key] returns a COPY of the entry. This needs care when -mutable entries are mutated: for example, if d[key] is a list, - d[key].append(anitem) -does NOT modify the entry d[key] itself, as stored in the persistent -mapping -- it only modifies the copy, which is then immediately -discarded, so that the append has NO effect whatsoever. To append an -item to d[key] in a way that will affect the persistent mapping, use: - data = d[key] - data.append(anitem) - d[key] = data - -To avoid the problem with mutable entries, you may pass the keyword -argument writeback=True in the call to shelve.open. When you use: - d = shelve.open(filename, writeback=True) -then d keeps a cache of all entries you access, and writes them all back -to the persistent mapping when you call d.close(). This ensures that -such usage as d[key].append(anitem) works as intended. - -However, using keyword argument writeback=True may consume vast amount -of memory for the cache, and it may make d.close() very slow, if you -access many of d's entries after opening it in this way: d has no way to -check which of the entries you access are mutable and/or which ones you -actually mutate, so it must cache, and write back at close, all of the -entries that you access. You can call d.sync() to write back all the -entries in the cache, and empty the cache (d.sync() also synchronizes -the persistent dictionary on disk, if feasible). -""" - import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -70,12 +12,6 @@ _T = TypeVar("_T") _VT = TypeVar("_VT") class Shelf(MutableMapping[str, _VT]): - """Base class for shelf implementations. - - This is initialized with a dictionary-like object. - See the module's __doc__ string for an overview of the interface. - """ - def __init__( self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" ) -> None: ... @@ -100,18 +36,6 @@ class Shelf(MutableMapping[str, _VT]): def sync(self) -> None: ... class BsdDbShelf(Shelf[_VT]): - """Shelf implementation using the "BSD" db interface. - - This adds methods first(), next(), previous(), last() and - set_location() that have no counterpart in [g]dbm databases. - - The actual database must be opened using one of the "bsddb" - modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or - bsddb.rnopen) and passed to the constructor. - - See the module's __doc__ string for an overview of the interface. - """ - def set_location(self, key: str) -> tuple[str, _VT]: ... def next(self) -> tuple[str, _VT]: ... def previous(self) -> tuple[str, _VT]: ... @@ -119,12 +43,6 @@ class BsdDbShelf(Shelf[_VT]): def last(self) -> tuple[str, _VT]: ... class DbfilenameShelf(Shelf[_VT]): - """Shelf implementation using the "dbm" generic dbm interface. - - This is initialized with the filename for the dbm database. - See the module's __doc__ string for an overview of the interface. - """ - if sys.version_info >= (3, 11): def __init__( self, filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False @@ -133,29 +51,9 @@ class DbfilenameShelf(Shelf[_VT]): def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: - """Open a persistent dictionary for reading and writing. - - The filename parameter is the base filename for the underlying - database. As a side-effect, an extension may be added to the - filename and more than one file may be created. The optional flag - parameter has the same interpretation as the flag parameter of - dbm.open(). The optional protocol parameter specifies the - version of the pickle protocol. - - See the module's __doc__ string for an overview of the interface. - """ + def open( + filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False + ) -> Shelf[Any]: ... else: - def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: - """Open a persistent dictionary for reading and writing. - - The filename parameter is the base filename for the underlying - database. As a side-effect, an extension may be added to the - filename and more than one file may be created. The optional flag - parameter has the same interpretation as the flag parameter of - dbm.open(). The optional protocol parameter specifies the - version of the pickle protocol. - - See the module's __doc__ string for an overview of the interface. - """ + def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi index 732cd5b06ac6d..1c27483782fb5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi @@ -1,5 +1,3 @@ -"""A lexical analyzer class for simple shell-like syntaxes.""" - import sys from collections import deque from collections.abc import Iterable @@ -16,28 +14,20 @@ class _ShlexInstream(Protocol): def close(self) -> object: ... if sys.version_info >= (3, 12): - def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax.""" + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... else: @overload - def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax.""" - + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... @overload @deprecated("Passing None for 's' to shlex.split() is deprecated and will raise an error in Python 3.12.") def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: ... -def join(split_command: Iterable[str]) -> str: - """Return a shell-escaped string from *split_command*.""" - -def quote(s: str) -> str: - """Return a shell-escaped version of the string *s*.""" +def join(split_command: Iterable[str]) -> str: ... +def quote(s: str) -> str: ... # TODO: Make generic over infile once PEP 696 is implemented. class shlex: - """A lexical analyzer class for simple shell-like syntaxes.""" - commenters: str wordchars: str whitespace: str @@ -62,24 +52,12 @@ class shlex: posix: bool = False, punctuation_chars: bool | str = False, ) -> None: ... - def get_token(self) -> str | None: - """Get a token from the input stream (or from stack if it's nonempty)""" - - def push_token(self, tok: str) -> None: - """Push a token onto the stack popped by the get_token method""" - + def get_token(self) -> str | None: ... + def push_token(self, tok: str) -> None: ... def read_token(self) -> str | None: ... - def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: - """Hook called on a filename to be sourced.""" - - def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: - """Push an input source onto the lexer's input source stack.""" - - def pop_source(self) -> None: - """Pop the input source stack.""" - - def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: - """Emit a C-compiler-like, Emacs-friendly error-message leader.""" - + def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: ... + def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: ... + def pop_source(self) -> None: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: ... def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi index 8a79660dff987..cc26cfc556a00 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi @@ -1,9 +1,3 @@ -"""Utility functions for copying and archiving files and directory trees. - -XXX The functions here don't copy the resource fork or other metadata on Mac. - -""" - import os import sys from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, MaybeNone, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite @@ -47,101 +41,31 @@ _StrPathT = TypeVar("_StrPathT", bound=StrPath) _BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) class Error(OSError): ... - -class SameFileError(Error): - """Raised when source and destination are the same file.""" - -class SpecialFileError(OSError): - """Raised when trying to do a kind of operation (e.g. copying) which is - not supported on a special file (e.g. a named pipe) - """ +class SameFileError(Error): ... +class SpecialFileError(OSError): ... if sys.version_info >= (3, 14): ExecError = RuntimeError # Deprecated in Python 3.14; removal scheduled for Python 3.16 else: - class ExecError(OSError): - """Raised when a command could not be executed""" - -class ReadError(OSError): - """Raised when an archive cannot be read""" - -class RegistryError(Exception): - """Raised when a registry operation with the archiving - and unpacking registries fails - """ - -def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: - """copy data from file-like object fsrc to file-like object fdst""" - -def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: - """Copy data from src to dst in the most efficient way possible. - - If follow_symlinks is not set and src is a symbolic link, a new - symlink will be created instead of copying the file it points to. - - """ + class ExecError(OSError): ... -def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: - """Copy mode bits from src to dst. - - If follow_symlinks is not set, symlinks aren't followed if and only - if both `src` and `dst` are symlinks. If `lchmod` isn't available - (e.g. Linux) this method does nothing. - - """ - -def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: - """Copy file metadata - - Copy the permission bits, last access time, last modification time, and - flags from `src` to `dst`. On Linux, copystat() also copies the "extended - attributes" where possible. The file contents, owner, and group are - unaffected. `src` and `dst` are path-like objects or path names given as - strings. - - If the optional flag `follow_symlinks` is not set, symlinks aren't - followed if and only if both `src` and `dst` are symlinks. - """ +class ReadError(OSError): ... +class RegistryError(Exception): ... +def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... @overload -def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: - """Copy data and mode bits ("cp src dst"). Return the file's destination. - - The destination may be a directory. - - If follow_symlinks is false, symlinks won't be followed. This - resembles GNU's "cp -P src dst". - - If source and destination are the same file, a SameFileError will be - raised. - - """ - +def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... @overload def copy(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... @overload -def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: - """Copy data and metadata. Return the file's destination. - - Metadata is copied with copystat(). Please see the copystat function - for more information. - - The destination may be a directory. - - If follow_symlinks is false, symlinks won't be followed. This - resembles GNU's "cp -P src dst". - """ - +def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... @overload def copy2(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... -def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: - """Function that can be used as copytree() ignore parameter. - - Patterns is a sequence of glob-style patterns - that are used to exclude files - """ - +def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... def copytree( src: StrPath, dst: _StrPathT, @@ -150,45 +74,7 @@ def copytree( copy_function: Callable[[str, str], object] = ..., ignore_dangling_symlinks: bool = False, dirs_exist_ok: bool = False, -) -> _StrPathT: - """Recursively copy a directory tree and return the destination directory. - - If exception(s) occur, an Error is raised with a list of reasons. - - If the optional symlinks flag is true, symbolic links in the - source tree result in symbolic links in the destination tree; if - it is false, the contents of the files pointed to by symbolic - links are copied. If the file pointed to by the symlink doesn't - exist, an exception will be added in the list of errors raised in - an Error exception at the end of the copy process. - - You can set the optional ignore_dangling_symlinks flag to true if you - want to silence this exception. Notice that this has no effect on - platforms that don't support os.symlink. - - The optional ignore argument is a callable. If given, it - is called with the `src` parameter, which is the directory - being visited by copytree(), and `names` which is the list of - `src` contents, as returned by os.listdir(): - - callable(src, names) -> ignored_names - - Since copytree() is called recursively, the callable will be - called once for each directory that is copied. It returns a - list of names relative to the `src` directory that should - not be copied. - - The optional copy_function argument is a callable that will be used - to copy each file. It will be called with the source path and the - destination path as arguments. By default, copy2() is used, but any - function that supports the same signature (like copy()) can be used. - - If dirs_exist_ok is false (the default) and `dst` already exists, a - `FileExistsError` is raised. If `dirs_exist_ok` is true, the copying - operation will continue if it encounters existing directories, and files - within the `dst` tree will be overwritten by corresponding files from the - `src` tree. - """ +) -> _StrPathT: ... _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], str, ExcInfo], object] _OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object] @@ -250,46 +136,14 @@ _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], # N.B. shutil.move appears to take bytes arguments, however, # this does not work when dst is (or is within) an existing directory. # (#6832) -def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: - """Recursively move a file or directory to another location. This is - similar to the Unix "mv" command. Return the file or directory's - destination. - - If dst is an existing directory or a symlink to a directory, then src is - moved inside that directory. The destination path in that directory must - not already exist. - - If dst already exists but is not a directory, it may be overwritten - depending on os.rename() semantics. - - If the destination is on our current filesystem, then rename() is used. - Otherwise, src is copied to the destination and then removed. Symlinks are - recreated under the new name if os.rename() fails because of cross - filesystem renames. - - The optional `copy_function` argument is a callable that will be used - to copy the source or it will be delegated to `copytree`. - By default, copy2() is used, but any function that supports the same - signature (like copy()) can be used. - - A lot more could be done here... A look at a mv.c shows a lot of - the issues this implementation glosses over. - - """ +def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: ... class _ntuple_diskusage(NamedTuple): - """usage(total, used, free)""" - total: int used: int free: int -def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: - """Return disk usage statistics about the given path. - - Returned value is a named tuple with attributes 'total', 'used' and - 'free', which are the amount of total, used and free space, in bytes. - """ +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's @@ -303,20 +157,7 @@ if sys.version_info >= (3, 13): *, dir_fd: int | None = None, follow_symlinks: bool = True, - ) -> None: - """Change owner user and group of the given path. - - user and group can be the uid/gid or the user/group names, and in that case, - they are converted to their respective uid/gid. - - If dir_fd is set, it should be an open file descriptor to the directory to - be used as the root of *path* if it is relative. - - If follow_symlinks is set to False and the last element of the path is a - symbolic link, chown will modify the link itself and not the file being - referenced by the link. - """ - + ) -> None: ... @overload def chown( path: FileDescriptorOrPath, @@ -337,13 +178,7 @@ if sys.version_info >= (3, 13): else: @overload - def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: - """Change owner user and group of the given path. - - user and group can be the uid/gid or the user/group names, and in that case, - they are converted to their respective uid/gid. - """ - + def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: ... @overload def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ... @overload @@ -354,29 +189,10 @@ else: if sys.platform == "win32" and sys.version_info < (3, 12): @overload @deprecated("On Windows before Python 3.12, using a PathLike as `cmd` would always fail or return `None`.") - def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - """ + def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: ... @overload -def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. - - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - """ - +def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: ... @overload def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... def make_archive( @@ -389,43 +205,12 @@ def make_archive( owner: str | None = None, group: str | None = None, logger: Any | None = None, -) -> str: - """Create an archive file (eg. zip or tar). - - 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "gztar", - "bztar", "xztar", or "zstdtar". Or any other registered format. - - 'root_dir' is a directory that will be the root directory of the - archive; ie. we typically chdir into 'root_dir' before creating the - archive. 'base_dir' is the directory where we start archiving from; - ie. 'base_dir' will be the common prefix of all files and - directories in the archive. 'root_dir' and 'base_dir' both default - to the current directory. Returns the name of the archive file. - - 'owner' and 'group' are used when creating a tar archive. By default, - uses the current owner and group. - """ - -def get_archive_formats() -> list[tuple[str, str]]: - """Returns a list of supported formats for archiving and unarchiving. - - Each element of the returned sequence is a tuple (name, description) - """ - +) -> str: ... +def get_archive_formats() -> list[tuple[str, str]]: ... @overload def register_archive_format( name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" -) -> None: - """Registers an archive format. - - name is the name of the format. function is the callable that will be - used to create archives. If provided, extra_args is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_archive_formats() function. - """ - +) -> None: ... @overload def register_archive_format( name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" @@ -433,25 +218,7 @@ def register_archive_format( def unregister_archive_format(name: str) -> None: ... def unpack_archive( filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None, *, filter: _TarfileFilter | None = None -) -> None: - """Unpack an archive. - - `filename` is the name of the archive. - - `extract_dir` is the name of the target directory, where the archive - is unpacked. If not provided, the current working directory is used. - - `format` is the archive format: one of "zip", "tar", "gztar", "bztar", - "xztar", or "zstdtar". Or any other registered format. If not provided, - unpack_archive will use the filename extension and see if an unpacker - was registered for that extension. - - In case none is found, a ValueError is raised. - - If `filter` is given, it is passed to the underlying - extraction function. - """ - +) -> None: ... @overload def register_unpack_format( name: str, @@ -459,53 +226,11 @@ def register_unpack_format( function: Callable[..., object], extra_args: Sequence[tuple[str, Any]], description: str = "", -) -> None: - """Registers an unpack format. - - `name` is the name of the format. `extensions` is a list of extensions - corresponding to the format. - - `function` is the callable that will be - used to unpack archives. The callable will receive archives to unpack. - If it's unable to handle an archive, it needs to raise a ReadError - exception. - - If provided, `extra_args` is a sequence of - (name, value) tuples that will be passed as arguments to the callable. - description can be provided to describe the format, and will be returned - by the get_unpack_formats() function. - """ - +) -> None: ... @overload def register_unpack_format( name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... -def unregister_unpack_format(name: str) -> None: - """Removes the pack format from the registry.""" - -def get_unpack_formats() -> list[tuple[str, list[str], str]]: - """Returns a list of supported formats for unpacking. - - Each element of the returned sequence is a tuple - (name, extensions, description) - """ - -def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: - """Get the size of the terminal window. - - For each of the two dimensions, the environment variable, COLUMNS - and LINES respectively, is checked. If the variable is defined and - the value is a positive integer, it is used. - - When COLUMNS or LINES is not defined, which is the common case, - the terminal connected to sys.__stdout__ is queried - by invoking os.get_terminal_size. - - If the terminal size cannot be successfully queried, either because - the system doesn't support querying, or because we are not - connected to a terminal, the value given in fallback parameter - is used. Fallback defaults to (80, 24) which is the default - size used by many terminal emulators. - - The value returned is a named tuple of type os.terminal_size. - """ +def unregister_unpack_format(name: str) -> None: ... +def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... +def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi index 0a10912372264..c2668bd8b32d9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi @@ -9,8 +9,6 @@ from typing_extensions import Never, TypeAlias NSIG: int class Signals(IntEnum): - """An enumeration.""" - SIGABRT = 6 SIGFPE = 8 SIGILL = 4 @@ -60,8 +58,6 @@ class Signals(IntEnum): SIGSTKFLT = 16 class Handlers(IntEnum): - """An enumeration.""" - SIG_DFL = 0 SIG_IGN = 1 @@ -71,55 +67,15 @@ SIG_IGN: Final = Handlers.SIG_IGN _SIGNUM: TypeAlias = int | Signals _HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None -def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: - """The default handler for SIGINT installed by Python. - - It raises KeyboardInterrupt. - """ +def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: ... if sys.version_info >= (3, 10): # arguments changed in 3.10.2 - def getsignal(signalnum: _SIGNUM) -> _HANDLER: - """Return the current action for the given signal. - - The return value can be: - SIG_IGN -- if the signal is being ignored - SIG_DFL -- if the default action for the signal is in effect - None -- if an unknown handler is in effect - anything else -- the callable Python object used as a handler - """ - - def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: - """Set the action for the given signal. - - The action can be SIG_DFL, SIG_IGN, or a callable Python object. - The previous action is returned. See getsignal() for possible return values. - - *** IMPORTANT NOTICE *** - A signal handler function is called with two arguments: - the first is the signal number, the second is the interrupted stack frame. - """ + def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... else: - def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: - """Return the current action for the given signal. - - The return value can be: - SIG_IGN -- if the signal is being ignored - SIG_DFL -- if the default action for the signal is in effect - None -- if an unknown handler is in effect - anything else -- the callable Python object used as a handler - """ - - def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: - """Set the action for the given signal. - - The action can be SIG_DFL, SIG_IGN, or a callable Python object. - The previous action is returned. See getsignal() for possible return values. - - *** IMPORTANT NOTICE *** - A signal handler function is called with two arguments: - the first is the signal number, the second is the interrupted stack frame. - """ + def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: ... SIGABRT: Final = Signals.SIGABRT SIGFPE: Final = Signals.SIGFPE @@ -167,8 +123,6 @@ else: ITIMER_VIRTUAL: int class Sigmasks(IntEnum): - """An enumeration.""" - SIG_BLOCK = 0 SIG_UNBLOCK = 1 SIG_SETMASK = 2 @@ -176,62 +130,22 @@ else: SIG_BLOCK: Final = Sigmasks.SIG_BLOCK SIG_UNBLOCK: Final = Sigmasks.SIG_UNBLOCK SIG_SETMASK: Final = Sigmasks.SIG_SETMASK - def alarm(seconds: int, /) -> int: - """Arrange for SIGALRM to arrive after the given number of seconds.""" - - def getitimer(which: int, /) -> tuple[float, float]: - """Returns current value of given itimer.""" - - def pause() -> None: - """Wait until a signal arrives.""" - - def pthread_kill(thread_id: int, signalnum: int, /) -> None: - """Send a signal to a thread.""" + def alarm(seconds: int, /) -> int: ... + def getitimer(which: int, /) -> tuple[float, float]: ... + def pause() -> None: ... + def pthread_kill(thread_id: int, signalnum: int, /) -> None: ... if sys.version_info >= (3, 10): # arguments changed in 3.10.2 - def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: - """Fetch and/or change the signal mask of the calling thread.""" + def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: ... else: - def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: - """Fetch and/or change the signal mask of the calling thread.""" - - def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: - """Sets given itimer (one of ITIMER_REAL, ITIMER_VIRTUAL or ITIMER_PROF). - - The timer will fire after value seconds and after that every interval seconds. - The itimer can be cleared by setting seconds to zero. - - Returns old values as a tuple: (delay, interval). - """ - - def siginterrupt(signalnum: int, flag: bool, /) -> None: - """Change system call restart behaviour. - - If flag is False, system calls will be restarted when interrupted by - signal sig, else system calls will be interrupted. - """ - - def sigpending() -> Any: - """Examine pending signals. + def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: ... - Returns a set of signal numbers that are pending for delivery to - the calling thread. - """ + def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: ... + def siginterrupt(signalnum: int, flag: bool, /) -> None: ... + def sigpending() -> Any: ... if sys.version_info >= (3, 10): # argument changed in 3.10.2 - def sigwait(sigset: Iterable[int]) -> _SIGNUM: - """Wait for a signal. - - Suspend execution of the calling thread until the delivery of one of the - signals specified in the signal set sigset. The function accepts the signal - and returns the signal number. - """ + def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... else: - def sigwait(sigset: Iterable[int], /) -> _SIGNUM: - """Wait for a signal. - - Suspend execution of the calling thread until the delivery of one of the - signals specified in the signal set sigset. The function accepts the signal - and returns the signal number. - """ + def sigwait(sigset: Iterable[int], /) -> _SIGNUM: ... if sys.platform != "darwin": SIGCLD: Final = Signals.SIGCHLD # alias SIGPOLL: Final = Signals.SIGIO # alias @@ -243,83 +157,31 @@ else: @final class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): - """struct_siginfo: Result from sigwaitinfo or sigtimedwait. - - This object may be accessed either as a tuple of - (si_signo, si_code, si_errno, si_pid, si_uid, si_status, si_band), - or via the attributes si_signo, si_code, and so on. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("si_signo", "si_code", "si_errno", "si_pid", "si_uid", "si_status", "si_band") @property - def si_signo(self) -> int: - """signal number""" - + def si_signo(self) -> int: ... @property - def si_code(self) -> int: - """signal code""" - + def si_code(self) -> int: ... @property - def si_errno(self) -> int: - """errno associated with this signal""" - + def si_errno(self) -> int: ... @property - def si_pid(self) -> int: - """sending process ID""" - + def si_pid(self) -> int: ... @property - def si_uid(self) -> int: - """real user ID of sending process""" - + def si_uid(self) -> int: ... @property - def si_status(self) -> int: - """exit value or signal""" - + def si_status(self) -> int: ... @property - def si_band(self) -> int: - """band event for SIGPOLL""" - - def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: - """Like sigwaitinfo(), but with a timeout. - - The timeout is specified in seconds, with floating-point numbers allowed. - """ - - def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: - """Wait synchronously until one of the signals in *sigset* is delivered. - - Returns a struct_siginfo containing information about the signal. - """ - -def strsignal(signalnum: _SIGNUM, /) -> str | None: - """Return the system description of the given signal. - - Returns the description of signal *signalnum*, such as "Interrupt" - for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no - description. Raises :exc:`ValueError` if *signalnum* is invalid. - """ - -def valid_signals() -> set[Signals]: - """Return a set of valid signal numbers on this platform. - - The signal numbers returned by this function can be safely passed to - functions like `pthread_sigmask`. - """ - -def raise_signal(signalnum: _SIGNUM, /) -> None: - """Send a signal to the executing process.""" - -def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: - """Sets the fd to be written to (with the signal number) when a signal comes in. + def si_band(self) -> int: ... - A library can use this to wakeup select or poll. - The previous fd or -1 is returned. + def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: ... + def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: ... - The fd must be non-blocking. - """ +def strsignal(signalnum: _SIGNUM, /) -> str | None: ... +def valid_signals() -> set[Signals]: ... +def raise_signal(signalnum: _SIGNUM, /) -> None: ... +def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: ... if sys.platform == "linux": - def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: - """Send a signal to a process referred to by a pid file descriptor.""" + def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi index 9c302c7617a71..6e39677aaea0e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi @@ -1,74 +1,3 @@ -"""Append module search paths for third-party packages to sys.path. - -**************************************************************** -* This module is automatically imported during initialization. * -**************************************************************** - -This will append site-specific paths to the module search path. On -Unix (including Mac OSX), it starts with sys.prefix and -sys.exec_prefix (if different) and appends -lib/python/site-packages. -On other platforms (such as Windows), it tries each of the -prefixes directly, as well as with lib/site-packages appended. The -resulting directories, if they exist, are appended to sys.path, and -also inspected for path configuration files. - -If a file named "pyvenv.cfg" exists one directory above sys.executable, -sys.prefix and sys.exec_prefix are set to that directory and -it is also checked for site-packages (sys.base_prefix and -sys.base_exec_prefix will always be the "real" prefixes of the Python -installation). If "pyvenv.cfg" (a bootstrap configuration file) contains -the key "include-system-site-packages" set to anything other than "false" -(case-insensitive), the system-level prefixes will still also be -searched for site-packages; otherwise they won't. - -All of the resulting site-specific directories, if they exist, are -appended to sys.path, and also inspected for path configuration -files. - -A path configuration file is a file whose name has the form -.pth; its contents are additional directories (one per line) -to be added to sys.path. Non-existing directories (or -non-directories) are never added to sys.path; no directory is added to -sys.path more than once. Blank lines and lines beginning with -'#' are skipped. Lines starting with 'import' are executed. - -For example, suppose sys.prefix and sys.exec_prefix are set to -/usr/local and there is a directory /usr/local/lib/python2.5/site-packages -with three subdirectories, foo, bar and spam, and two path -configuration files, foo.pth and bar.pth. Assume foo.pth contains the -following: - - # foo package configuration - foo - bar - bletch - -and bar.pth contains: - - # bar package configuration - bar - -Then the following directories are added to sys.path, in this order: - - /usr/local/lib/python2.5/site-packages/bar - /usr/local/lib/python2.5/site-packages/foo - -Note that bletch is omitted because it doesn't exist; bar precedes foo -because bar.pth comes alphabetically before foo.pth; and spam is -omitted because it is not mentioned in either path configuration file. - -The readline module is also automatically configured to enable -completion for systems that support it. This can be overridden in -sitecustomize, usercustomize or PYTHONSTARTUP. Starting Python in -isolated mode (-I) disables automatic readline configuration. - -After these operations, an attempt is made to import a module -named sitecustomize, which can perform arbitrary additional -site-specific customizations. If this import fails with an -ImportError exception, it is silently ignored. -""" - import sys from _typeshed import StrPath from collections.abc import Iterable @@ -78,115 +7,30 @@ ENABLE_USER_SITE: bool | None USER_SITE: str | None USER_BASE: str | None -def main() -> None: - """Add standard site-specific directories to the module search path. - - This function is called automatically when this module is imported, - unless the python interpreter was started with the -S flag. - """ - -def abs_paths() -> None: # undocumented - """Set all module __file__ and __cached__ attributes to an absolute path""" - -def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: # undocumented - """Process a .pth file within the site-packages directory: - For each line in the file, either combine it with sitedir to a path - and add that to known_paths, or execute it if it starts with 'import '. - """ - -def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: - """Add 'sitedir' argument to sys.path if missing and handle .pth files in - 'sitedir' - """ - -def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: # undocumented - """Add site-packages to sys.path""" - -def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: # undocumented - """Add a per user site-package to sys.path - - Each user has its own python directory with site-packages in the - home directory. - """ - -def check_enableusersite() -> bool | None: # undocumented - """Check if user site directory is safe for inclusion - - The function tests for the command line flag (including environment var), - process uid/gid equal to effective uid/gid. - - None: Disabled for security reasons - False: Disabled by user (command line option) - True: Safe and enabled - """ +def main() -> None: ... +def abs_paths() -> None: ... # undocumented +def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented +def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def check_enableusersite() -> bool | None: ... # undocumented if sys.version_info >= (3, 13): - def gethistoryfile() -> str: # undocumented - """Check if the PYTHON_HISTORY environment variable is set and define - it as the .python_history file. If PYTHON_HISTORY is not set, use the - default .python_history file. - """ + def gethistoryfile() -> str: ... # undocumented -def enablerlcompleter() -> None: # undocumented - """Enable default readline configuration on interactive prompts, by - registering a sys.__interactivehook__. - """ +def enablerlcompleter() -> None: ... # undocumented if sys.version_info >= (3, 13): - def register_readline() -> None: # undocumented - """Configure readline completion on interactive prompts. - - If the readline module can be imported, the hook will set the Tab key - as completion key and register ~/.python_history as history file. - This can be overridden in the sitecustomize or usercustomize module, - or in a PYTHONSTARTUP file. - """ - -def execsitecustomize() -> None: # undocumented - """Run custom site specific code, if available.""" - -def execusercustomize() -> None: # undocumented - """Run custom user specific code, if available.""" - -def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: - """Returns a list containing all global site-packages directories. - - For each directory present in ``prefixes`` (or the global ``PREFIXES``), - this function will find its `site-packages` subdirectory depending on the - system environment, and will return a list of full paths. - """ - -def getuserbase() -> str: - """Returns the `user base` directory path. - - The `user base` directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. - """ - -def getusersitepackages() -> str: - """Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. - """ + def register_readline() -> None: ... # undocumented +def execsitecustomize() -> None: ... # undocumented +def execusercustomize() -> None: ... # undocumented +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... +def getuserbase() -> str: ... +def getusersitepackages() -> str: ... def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented -def removeduppaths() -> set[str]: # undocumented - """Remove duplicate entries from sys.path along with making them - absolute - """ - -def setcopyright() -> None: # undocumented - """Set 'copyright' and 'credits' in builtins""" - +def removeduppaths() -> set[str]: ... # undocumented +def setcopyright() -> None: ... # undocumented def sethelper() -> None: ... # undocumented -def setquit() -> None: # undocumented - """Define new builtins 'quit' and 'exit'. - - These are objects which make the interpreter exit when called. - The repr of each object contains a hint at how it works. - - """ - +def setquit() -> None: ... # undocumented def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi index b201659d1b2fe..dee7e949f42fa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi @@ -1,48 +1,3 @@ -"""An RFC 5321 smtp proxy with optional RFC 1870 and RFC 6531 extensions. - -Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]] - -Options: - - --nosetuid - -n - This program generally tries to setuid `nobody', unless this flag is - set. The setuid call will fail if this program is not run as root (in - which case, use this flag). - - --version - -V - Print the version number and exit. - - --class classname - -c classname - Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by - default. - - --size limit - -s limit - Restrict the total size of the incoming message to "limit" number of - bytes via the RFC 1870 SIZE extension. Defaults to 33554432 bytes. - - --smtputf8 - -u - Enable the SMTPUTF8 extension and behave as an RFC 6531 smtp proxy. - - --debug - -d - Turn on debugging prints. - - --help - -h - Print this message and exit. - -Version: %(__version__)s - -If localhost is not given then `localhost' is used, and if localport is not -given then 8025 is used. If remotehost is not given then `localhost' is used, -and if remoteport is not given, then 25 is used. -""" - import asynchat import asyncore import socket @@ -122,37 +77,9 @@ class SMTPServer(asyncore.dispatcher): decode_data: bool = False, ) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... - def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any) -> str | None: - """Override this abstract method to handle messages from the client. - - peer is a tuple containing (ipaddr, port) of the client that made the - socket connection to our smtp port. - - mailfrom is the raw address the client claims the message is coming - from. - - rcpttos is a list of raw addresses the client wishes to deliver the - message to. - - data is a string containing the entire full text of the message, - headers (if supplied) and all. It has been `de-transparencied' - according to RFC 821, Section 4.5.2. In other words, a line - containing a `.' followed by other text has had the leading dot - removed. - - kwargs is a dictionary containing additional information. It is - empty if decode_data=True was given as init parameter, otherwise - it will contain the following keys: - 'mail_options': list of parameters to the mail command. All - elements are uppercase strings. Example: - ['BODY=8BITMIME', 'SMTPUTF8']. - 'rcpt_options': same, for the rcpt command. - - This function should return None for a normal `250 Ok' response; - otherwise, it should return the desired response string in RFC 821 - format. - - """ + def process_message( + self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any + ) -> str | None: ... class DebuggingServer(SMTPServer): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi index d2df39ff39189..6a8467689367a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi @@ -1,35 +1,3 @@ -"""SMTP/ESMTP client class. - -This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP -Authentication) and RFC 2487 (Secure SMTP over TLS). - -Notes: - -Please remember, when doing ESMTP, that the names of the SMTP service -extensions are NOT the same thing as the option keywords for the RCPT -and MAIL commands! - -Example: - - >>> import smtplib - >>> s=smtplib.SMTP("localhost") - >>> print(s.help()) - This is Sendmail version 8.8.4 - Topics: - HELO EHLO MAIL RCPT DATA - RSET NOOP QUIT HELP VRFY - EXPN VERB ETRN DSN - For more info use "HELP ". - To report bugs in the implementation send email to - sendmail-bugs@sendmail.org. - For local information send email to Postmaster at your site. - End of HELP info - >>> s.putcmd("vrfy","someone@here") - >>> s.getreply() - (250, "Somebody OverHere ") - >>> s.quit() -""" - import sys from _socket import _Address as _SourceAddress from _typeshed import ReadableBuffer, SizedBuffer @@ -69,91 +37,34 @@ bCRLF: Final[bytes] OLDSTYLE_AUTH: Final[Pattern[str]] -class SMTPException(OSError): - """Base class for all exceptions raised by this module.""" - -class SMTPNotSupportedError(SMTPException): - """The command or option is not supported by the SMTP server. - - This exception is raised when an attempt is made to run a command or a - command with an option which is not supported by the server. - """ - -class SMTPServerDisconnected(SMTPException): - """Not connected to any SMTP server. - - This exception is raised when the server unexpectedly disconnects, - or when an attempt is made to use the SMTP instance before - connecting it to a server. - """ +class SMTPException(OSError): ... +class SMTPNotSupportedError(SMTPException): ... +class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): - """Base class for all exceptions that include an SMTP error code. - - These exceptions are generated in some instances when the SMTP - server returns an error code. The error code is stored in the - `smtp_code' attribute of the error, and the `smtp_error' attribute - is set to the error message. - """ - smtp_code: int smtp_error: bytes | str args: tuple[int, bytes | str] | tuple[int, bytes, str] def __init__(self, code: int, msg: bytes | str) -> None: ... class SMTPSenderRefused(SMTPResponseException): - """Sender address refused. - - In addition to the attributes set by on all SMTPResponseException - exceptions, this sets 'sender' to the string that the SMTP refused. - """ - smtp_error: bytes sender: str args: tuple[int, bytes, str] def __init__(self, code: int, msg: bytes, sender: str) -> None: ... class SMTPRecipientsRefused(SMTPException): - """All recipient addresses refused. - - The errors for each recipient are accessible through the attribute - 'recipients', which is a dictionary of exactly the same sort as - SMTP.sendmail() returns. - """ - recipients: _SendErrs args: tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... -class SMTPDataError(SMTPResponseException): - """The SMTP server didn't accept the data.""" - -class SMTPConnectError(SMTPResponseException): - """Error during connection establishment.""" - -class SMTPHeloError(SMTPResponseException): - """The server refused our HELO reply.""" - -class SMTPAuthenticationError(SMTPResponseException): - """Authentication error. - - Most probably the server didn't accept the username/password - combination provided. - """ - -def quoteaddr(addrstring: str) -> str: - """Quote a subset of the email addresses defined by RFC 821. - - Should be able to handle anything email.utils.parseaddr can handle. - """ - -def quotedata(data: str) -> str: - """Quote data for email. - - Double leading '.', and change Unix newline '\\n', or Mac '\\r' into - internet CRLF end-of-line. - """ +class SMTPDataError(SMTPResponseException): ... +class SMTPConnectError(SMTPResponseException): ... +class SMTPHeloError(SMTPResponseException): ... +class SMTPAuthenticationError(SMTPResponseException): ... +def quoteaddr(addrstring: str) -> str: ... +def quotedata(data: str) -> str: ... @type_check_only class _AuthObject(Protocol): @overload @@ -162,35 +73,6 @@ class _AuthObject(Protocol): def __call__(self, challenge: bytes, /) -> str: ... class SMTP: - """This class manages a connection to an SMTP or ESMTP server. - SMTP Objects: - SMTP objects have the following attributes: - helo_resp - This is the message given by the server in response to the - most recent HELO command. - - ehlo_resp - This is the message given by the server in response to the - most recent EHLO command. This is usually multiline. - - does_esmtp - This is a True value _after you do an EHLO command_, if the - server supports ESMTP. - - esmtp_features - This is a dictionary, which, if the server supports ESMTP, - will _after you do an EHLO command_, contain the names of the - SMTP service extensions this server supports, and their - parameters (if any). - - Note, all extension names are mapped to lower case in the - dictionary. - - See each method's docstrings for details. In general, there is a - method of the same name to perform each SMTP command. There is also a - method called 'sendmail' that will do an entire mail transaction. - """ - debuglevel: int sock: socket | None # Type of file should match what socket.makefile() returns @@ -212,240 +94,46 @@ class SMTP: local_hostname: str | None = None, timeout: float = ..., source_address: _SourceAddress | None = None, - ) -> None: - """Initialize a new instance. - - If specified, `host` is the name of the remote host to which to - connect. If specified, `port` specifies the port to which to connect. - By default, smtplib.SMTP_PORT is used. If a host is specified the - connect method is called, and if it returns anything other than a - success code an SMTPConnectError is raised. If specified, - `local_hostname` is used as the FQDN of the local host in the HELO/EHLO - command. Otherwise, the local hostname is found using - socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host, - port) for the socket to bind to as its source address before - connecting. If the host is '' and port is 0, the OS default behavior - will be used. - - """ - + ) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... - def set_debuglevel(self, debuglevel: int) -> None: - """Set the debug output level. - - A non-false value results in debug messages for connection and for all - messages sent to and received from the server. - - """ - - def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: - """Connect to a host on a given port. - - If the hostname ends with a colon (':') followed by a number, and - there is no port specified, that suffix will be stripped off and the - number interpreted as the port number to use. - - Note: This method is automatically invoked by __init__, if a host is - specified during instantiation. - - """ - - def send(self, s: ReadableBuffer | str) -> None: - """Send 's' to the server.""" - - def putcmd(self, cmd: str, args: str = "") -> None: - """Send a command to the server.""" - - def getreply(self) -> _Reply: - """Get a reply from the server. - - Returns a tuple consisting of: - - - server response code (e.g. '250', or such, if all goes well) - Note: returns -1 if it can't read response code. - - - server response string corresponding to response code (multiline - responses are converted to a single, multiline string). - - Raises SMTPServerDisconnected if end-of-file is reached. - """ - - def docmd(self, cmd: str, args: str = "") -> _Reply: - """Send a command, and return its response code.""" - - def helo(self, name: str = "") -> _Reply: - """SMTP 'helo' command. - Hostname to send for this command defaults to the FQDN of the local - host. - """ - - def ehlo(self, name: str = "") -> _Reply: - """SMTP 'ehlo' command. - Hostname to send for this command defaults to the FQDN of the local - host. - """ - - def has_extn(self, opt: str) -> bool: - """Does the server support a given SMTP service extension?""" - - def help(self, args: str = "") -> bytes: - """SMTP 'help' command. - Returns help text from server. - """ - - def rset(self) -> _Reply: - """SMTP 'rset' command -- resets session.""" - - def noop(self) -> _Reply: - """SMTP 'noop' command -- doesn't do anything :>""" - - def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: - """SMTP 'mail' command -- begins mail xfer session. - - This method may raise the following exceptions: - - SMTPNotSupportedError The options parameter includes 'SMTPUTF8' - but the SMTPUTF8 extension is not supported by - the server. - """ - - def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: - """SMTP 'rcpt' command -- indicates 1 recipient for this mail.""" - - def data(self, msg: ReadableBuffer | str) -> _Reply: - """SMTP 'DATA' command -- sends message data to server. - - Automatically quotes lines beginning with a period per rfc821. - Raises SMTPDataError if there is an unexpected reply to the - DATA command; the return value from this method is the final - response code received when the all data is sent. If msg - is a string, lone '\\r' and '\\n' characters are converted to - '\\r\\n' characters. If msg is bytes, it is transmitted as is. - """ - - def verify(self, address: str) -> _Reply: - """SMTP 'verify' command -- checks for address validity.""" + def set_debuglevel(self, debuglevel: int) -> None: ... + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... + def send(self, s: ReadableBuffer | str) -> None: ... + def putcmd(self, cmd: str, args: str = "") -> None: ... + def getreply(self) -> _Reply: ... + def docmd(self, cmd: str, args: str = "") -> _Reply: ... + def helo(self, name: str = "") -> _Reply: ... + def ehlo(self, name: str = "") -> _Reply: ... + def has_extn(self, opt: str) -> bool: ... + def help(self, args: str = "") -> bytes: ... + def rset(self) -> _Reply: ... + def noop(self) -> _Reply: ... + def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: ... + def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... + def verify(self, address: str) -> _Reply: ... vrfy = verify - def expn(self, address: str) -> _Reply: - """SMTP 'expn' command -- expands a mailing list.""" - - def ehlo_or_helo_if_needed(self) -> None: - """Call self.ehlo() and/or self.helo() if needed. - - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. - - This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - """ + def expn(self, address: str) -> _Reply: ... + def ehlo_or_helo_if_needed(self) -> None: ... user: str password: str - def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: - """Authentication command - requires response processing. - - 'mechanism' specifies which authentication mechanism is to - be used - the valid values are those listed in the 'auth' - element of 'esmtp_features'. - - 'authobject' must be a callable object taking a single argument: - - data = authobject(challenge) - - It will be called to process the server's challenge response; the - challenge argument it is passed will be a bytes. It should return - an ASCII string that will be base64 encoded and sent to the server. - - Keyword arguments: - - initial_response_ok: Allow sending the RFC 4954 initial-response - to the AUTH command, if the authentication methods supports it. - """ - + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... @overload - def auth_cram_md5(self, challenge: None = None) -> None: - """Authobject to use with CRAM-MD5 authentication. Requires self.user - and self.password to be set. - """ - + def auth_cram_md5(self, challenge: None = None) -> None: ... @overload def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... - def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: - """Authobject to use with PLAIN authentication. Requires self.user and - self.password to be set. - """ - - def auth_login(self, challenge: ReadableBuffer | None = None) -> str: - """Authobject to use with LOGIN authentication. Requires self.user and - self.password to be set. - """ - - def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: - """Log in on an SMTP server that requires authentication. - - The arguments are: - - user: The user name to authenticate with. - - password: The password for the authentication. - - Keyword arguments: - - initial_response_ok: Allow sending the RFC 4954 initial-response - to the AUTH command, if the authentication methods supports it. - - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. - - This method will return normally if the authentication was successful. - - This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - SMTPAuthenticationError The server didn't accept the username/ - password combination. - SMTPNotSupportedError The AUTH command is not supported by the - server. - SMTPException No suitable authentication method was - found. - """ + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... if sys.version_info >= (3, 12): - def starttls(self, *, context: SSLContext | None = None) -> _Reply: - """Puts the connection to the SMTP server into TLS mode. - - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. - - If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the context parameter, - the identity of the SMTP server and client can be checked. This, - however, depends on whether the socket module really checks the - certificates. - - This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - """ + def starttls(self, *, context: SSLContext | None = None) -> _Reply: ... else: - def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: - """Puts the connection to the SMTP server into TLS mode. - - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. - - If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the keyfile and certfile parameters, - the identity of the SMTP server and client can be checked. This, - however, depends on whether the socket module really checks the - certificates. - - This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - """ + def starttls( + self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None + ) -> _Reply: ... def sendmail( self, @@ -454,70 +142,7 @@ class SMTP: msg: SizedBuffer | str, mail_options: Sequence[str] = (), rcpt_options: Sequence[str] = (), - ) -> _SendErrs: - """This command performs an entire mail transaction. - -The arguments are: - - from_addr : The address sending this mail. - - to_addrs : A list of addresses to send this mail to. A bare - string will be treated as a list with 1 address. - - msg : The message to send. - - mail_options : List of ESMTP options (such as 8bitmime) for the - mail command. - - rcpt_options : List of ESMTP options (such as DSN commands) for - all the rcpt commands. - -msg may be a string containing characters in the ASCII range, or a byte -string. A string is encoded to bytes using the ascii codec, and lone -\\r and \\n characters are converted to \\r\\n characters. - -If there has been no previous EHLO or HELO command this session, this -method tries ESMTP EHLO first. If the server does ESMTP, message size -and each of the specified options will be passed to it. If EHLO -fails, HELO will be tried and ESMTP options suppressed. - -This method will return normally if the mail is accepted for at least -one recipient. It returns a dictionary, with one entry for each -recipient that was refused. Each entry contains a tuple of the SMTP -error code and the accompanying error message sent by the server. - -This method may raise the following exceptions: - - SMTPHeloError The server didn't reply properly to - the helo greeting. - SMTPRecipientsRefused The server rejected ALL recipients - (no mail was sent). - SMTPSenderRefused The server didn't accept the from_addr. - SMTPDataError The server replied with an unexpected - error code (other than a refusal of - a recipient). - SMTPNotSupportedError The mail_options parameter includes 'SMTPUTF8' - but the SMTPUTF8 extension is not supported by - the server. - -Note: the connection will be open even after an exception is raised. - -Example: - - >>> import smtplib - >>> s=smtplib.SMTP("localhost") - >>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"] - >>> msg = '''\\ - ... From: Me@my.org - ... Subject: testin'... - ... - ... This is a test ''' - >>> s.sendmail("me@my.org",tolist,msg) - { "three@three.org" : ( 550 ,"User unknown" ) } - >>> s.quit() - -In the above example, the message was accepted for delivery to three -of the four addresses, and one was rejected, with the error code -550. If all addresses are accepted, then the method will return an -empty dictionary. - -""" - + ) -> _SendErrs: ... def send_message( self, msg: _Message, @@ -525,44 +150,11 @@ empty dictionary. to_addrs: str | Sequence[str] | None = None, mail_options: Sequence[str] = (), rcpt_options: Sequence[str] = (), - ) -> _SendErrs: - """Converts message to a bytestring and passes it to sendmail. - - The arguments are as for sendmail, except that msg is an - email.message.Message object. If from_addr is None or to_addrs is - None, these arguments are taken from the headers of the Message as - described in RFC 2822 (a ValueError is raised if there is more than - one set of 'Resent-' headers). Regardless of the values of from_addr and - to_addr, any Bcc field (or Resent-Bcc field, when the Message is a - resent) of the Message object won't be transmitted. The Message - object is then serialized using email.generator.BytesGenerator and - sendmail is called to transmit the message. If the sender or any of - the recipient addresses contain non-ASCII and the server advertises the - SMTPUTF8 capability, the policy is cloned with utf8 set to True for the - serialization, and SMTPUTF8 and BODY=8BITMIME are asserted on the send. - If the server does not support SMTPUTF8, an SMTPNotSupported error is - raised. Otherwise the generator is called without modifying the - policy. - - """ - - def close(self) -> None: - """Close the connection to the SMTP server.""" - - def quit(self) -> _Reply: - """Terminate the SMTP session.""" + ) -> _SendErrs: ... + def close(self) -> None: ... + def quit(self) -> _Reply: ... class SMTP_SSL(SMTP): - """This is a subclass derived from SMTP that connects over an SSL - encrypted socket (to use this class you need a socket module that was - compiled with SSL support). If host is not specified, '' (the local - host) is used. If port is omitted, the standard SMTP-over-SSL port - (465) is used. local_hostname and source_address have the same meaning - as they do in the SMTP class. context also optional, can contain a - SSLContext. - - """ - keyfile: str | None certfile: str | None context: SSLContext @@ -593,20 +185,6 @@ class SMTP_SSL(SMTP): LMTP_PORT: Final = 2003 class LMTP(SMTP): - """LMTP - Local Mail Transfer Protocol - - The LMTP protocol, which is very similar to ESMTP, is heavily based - on the standard SMTP client. It's common to use Unix sockets for - LMTP, so our connect() method must support that as well as a regular - host:port server. local_hostname and source_address have the same - meaning as they do in the SMTP class. To specify a Unix socket, - you must use an absolute path as the host, starting with a '/'. - - Authentication is supported, using the regular SMTP mechanism. When - using a Unix socket, LMTP generally don't support or require any - authentication, but your mileage might vary. - """ - def __init__( self, host: str = "", @@ -614,5 +192,4 @@ class LMTP(SMTP): local_hostname: str | None = None, source_address: _SourceAddress | None = None, timeout: float = ..., - ) -> None: - """Initialize a new instance.""" + ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi index e6863787046a8..f4d487607fbb2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi @@ -1,48 +1,14 @@ -"""Routines to help recognizing sound files. - -Function whathdr() recognizes various types of sound file headers. -It understands almost all headers that SOX can decode. - -The return tuple contains the following items, in this order: -- file type (as SOX understands it) -- sampling rate (0 if unknown or hard to decode) -- number of channels (0 if unknown or hard to decode) -- number of frames in the file (-1 if unknown or hard to decode) -- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW - -If the file doesn't have a recognizable type, it returns None. -If the file can't be opened, OSError is raised. - -To compute the total time, divide the number of frames by the -sampling rate (a frame contains a sample for each channel). - -Function what() calls whathdr(). (It used to also use some -heuristics for raw data, but this doesn't work very well.) - -Finally, the function test() is a simple main program that calls -what() for all files mentioned on the argument list. For directory -arguments it calls what() for all files in that directory. Default -argument is "." (testing all files in the current directory). The -option -r tells it to recurse down directories found inside -explicitly given directories. -""" - from _typeshed import StrOrBytesPath from typing import NamedTuple __all__ = ["what", "whathdr"] class SndHeaders(NamedTuple): - """SndHeaders(filetype, framerate, nchannels, nframes, sampwidth)""" - filetype: str framerate: int nchannels: int nframes: int sampwidth: int | str -def what(filename: StrOrBytesPath) -> SndHeaders | None: - """Guess the type of a sound file.""" - -def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: - """Recognize sound headers.""" +def what(filename: StrOrBytesPath) -> SndHeaders | None: ... +def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi index ad2af8629207e..b10b3560b91fa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi @@ -1,50 +1,3 @@ -"""This module provides socket operations and some related functions. -On Unix, it supports IP (Internet Protocol) and Unix domain sockets. -On other systems, it only supports IP. Functions specific for a -socket are available as methods of the socket object. - -Functions: - -socket() -- create a new socket object -socketpair() -- create a pair of new socket objects [*] -fromfd() -- create a socket object from an open file descriptor [*] -send_fds() -- Send file descriptor to the socket. -recv_fds() -- Receive file descriptors from the socket. -fromshare() -- create a socket object from data received from socket.share() [*] -gethostname() -- return the current hostname -gethostbyname() -- map a hostname to its IP number -gethostbyaddr() -- map an IP number or hostname to DNS info -getservbyname() -- map a service name and a protocol name to a port number -getprotobyname() -- map a protocol name (e.g. 'tcp') to a number -ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order -htons(), htonl() -- convert 16, 32 bit int from host to network byte order -inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format -inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) -socket.getdefaulttimeout() -- get the default timeout value -socket.setdefaulttimeout() -- set the default timeout value -create_connection() -- connects to an address, with an optional timeout and - optional source address. -create_server() -- create a TCP socket and bind it to a specified address. - - [*] not available on all platforms! - -Special objects: - -SocketType -- type object for socket objects -error -- exception raised for I/O errors -has_ipv6 -- boolean value indicating if IPv6 is supported - -IntEnum constants: - -AF_INET, AF_UNIX -- socket domains (first argument to socket() call) -SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) - -Integer constants: - -Many other constants may be defined; these may be used in calls to -the setsockopt() and getsockopt() methods. -""" - # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. @@ -1123,8 +1076,6 @@ else: class timeout(error): ... class AddressFamily(IntEnum): - """An enumeration.""" - AF_INET = 2 AF_INET6 = 10 AF_APPLETALK = 5 @@ -1229,8 +1180,6 @@ if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darw AF_DIVERT: Final = AddressFamily.AF_DIVERT class SocketKind(IntEnum): - """An enumeration.""" - SOCK_STREAM = 1 SOCK_DGRAM = 2 SOCK_RAW = 3 @@ -1250,8 +1199,6 @@ if sys.platform == "linux": SOCK_NONBLOCK: Final = SocketKind.SOCK_NONBLOCK class MsgFlag(IntFlag): - """An enumeration.""" - MSG_CTRUNC = 8 MSG_DONTROUTE = 4 MSG_OOB = 1 @@ -1313,8 +1260,6 @@ if sys.platform != "win32" and sys.platform != "linux": MSG_EOF: Final = MsgFlag.MSG_EOF class AddressInfo(IntFlag): - """An enumeration.""" - AI_ADDRCONFIG = 32 AI_ALL = 16 AI_CANONNAME = 2 @@ -1355,28 +1300,14 @@ class _SendableFile(Protocol): # def fileno(self) -> int: ... class socket(_socket.socket): - """A subclass of _socket.socket adding the makefile() method.""" - __slots__ = ["__weakref__", "_io_refs", "_closed"] def __init__( self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def dup(self) -> Self: - """dup() -> socket object - - Duplicate the socket. Return a new socket object connected to the same - system resource. The new socket is non-inheritable. - """ - - def accept(self) -> tuple[socket, _RetAddress]: - """accept() -> (socket object, address info) - - Wait for an incoming connection. Return a new socket - representing the connection, and the address of the client. - For IP sockets, the address info is a pair (hostaddr, port). - """ + def dup(self) -> Self: ... + def accept(self) -> tuple[socket, _RetAddress]: ... # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @overload @@ -1388,14 +1319,7 @@ class socket(_socket.socket): encoding: str | None = None, errors: str | None = None, newline: str | None = None, - ) -> SocketIO: - """makefile(...) -> an I/O stream connected to the socket - - The arguments are as for io.open() after the filename, except the only - supported mode values are 'r' (default), 'w', 'b', or a combination of - those. - """ - + ) -> SocketIO: ... @overload def makefile( self, @@ -1446,130 +1370,43 @@ class socket(_socket.socket): errors: str | None = None, newline: str | None = None, ) -> TextIOWrapper: ... - def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: - """sendfile(file[, offset[, count]]) -> sent - - Send a file until EOF is reached by using high-performance - os.sendfile() and return the total number of bytes which - were sent. - *file* must be a regular file object opened in binary mode. - If os.sendfile() is not available (e.g. Windows) or file is - not a regular file socket.send() will be used instead. - *offset* tells from where to start reading the file. - If specified, *count* is the total number of bytes to transmit - as opposed to sending the file until EOF is reached. - File position is updated on return or also in case of error in - which case file.tell() can be used to figure out the number of - bytes which were sent. - The socket must be of SOCK_STREAM type. - Non-blocking sockets are not supported. - """ - + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... @property - def family(self) -> AddressFamily: - """Read-only access to the address family for this socket.""" - + def family(self) -> AddressFamily: ... @property - def type(self) -> SocketKind: - """Read-only access to the socket type.""" + def type(self) -> SocketKind: ... + def get_inheritable(self) -> bool: ... + def set_inheritable(self, inheritable: bool) -> None: ... - def get_inheritable(self) -> bool: - """Get the inheritable flag of the socket""" - - def set_inheritable(self, inheritable: bool) -> None: - """Set the inheritable flag of the socket""" - -def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: - """fromfd(fd, family, type[, proto]) -> socket object - - Create a socket object from a duplicate of the given file - descriptor. The remaining arguments are the same as for socket(). - """ +def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... if sys.platform != "win32": def send_fds( sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None - ) -> int: - """send_fds(sock, buffers, fds[, flags[, address]]) -> integer - - Send the list of file descriptors fds over an AF_UNIX socket. - """ - - def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: - """recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file - descriptors, msg_flags, address) - - Receive up to maxfds file descriptors returning the message - data and a list containing the descriptors. - """ + ) -> int: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... if sys.platform == "win32": - def fromshare(info: bytes) -> socket: - """fromshare(info) -> socket object - - Create a socket object from the bytes object returned by - socket.share(pid). - """ + def fromshare(info: bytes) -> socket: ... if sys.platform == "win32": - def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: - """socketpair([family[, type[, proto]]]) -> (socket object, socket object) - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is AF_UNIX - if defined on the platform; otherwise, the default is AF_INET. - """ + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... else: def socketpair( family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 - ) -> tuple[socket, socket]: - """socketpair([family[, type[, proto]]]) -> (socket object, socket object) - Create a pair of socket objects from the sockets returned by the platform - socketpair() function. - The arguments are the same as for socket() except the default family is AF_UNIX - if defined on the platform; otherwise, the default is AF_INET. - """ + ) -> tuple[socket, socket]: ... class SocketIO(RawIOBase): - """Raw I/O implementation for stream sockets. - - This class supports the makefile() method on sockets. It provides - the raw I/O interface on top of a socket object. - """ - def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... - def readinto(self, b: WriteableBuffer) -> int | None: - """Read up to len(b) bytes into the writable buffer *b* and return - the number of bytes read. If the socket is non-blocking and no bytes - are available, None is returned. - - If *b* is non-empty, a 0 return value indicates that the connection - was shutdown at the other end. - """ - - def write(self, b: ReadableBuffer) -> int | None: - """Write the given bytes or bytearray object *b* to the socket - and return the number of bytes written. This can be less than - len(b) if not all data could be written. If the socket is - non-blocking and no bytes could be written None is returned. - """ - + def readinto(self, b: WriteableBuffer) -> int | None: ... + def write(self, b: ReadableBuffer) -> int | None: ... @property def name(self) -> int: ... # return value is really "int" @property def mode(self) -> Literal["rb", "wb", "rwb"]: ... -def getfqdn(name: str = "") -> str: - """Get fully qualified domain name from name. - - An empty argument is interpreted as meaning the local host. - - First the hostname returned by gethostbyaddr() is checked, then - possibly existing aliases. In case no FQDN is available and `name` - was given, it is returned unchanged. If `name` was empty, '0.0.0.0' or '::', - hostname from gethostname() is returned. - """ +def getfqdn(name: str = "") -> str: ... if sys.version_info >= (3, 11): def create_connection( @@ -1578,77 +1415,19 @@ if sys.version_info >= (3, 11): source_address: _Address | None = None, *, all_errors: bool = False, - ) -> socket: - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - A host of '' or port 0 tells the OS to use the default. When a connection - cannot be created, raises the last error if *all_errors* is False, - and an ExceptionGroup of all errors if *all_errors* is True. - """ + ) -> socket: ... else: def create_connection( address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None - ) -> socket: - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - A host of '' or port 0 tells the OS to use the default. - """ - -def has_dualstack_ipv6() -> bool: - """Return True if the platform supports creating a SOCK_STREAM socket - which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections. - """ + ) -> socket: ... +def has_dualstack_ipv6() -> bool: ... def create_server( address: _Address, *, family: int = ..., backlog: int | None = None, reuse_port: bool = False, dualstack_ipv6: bool = False -) -> socket: - """Convenience function which creates a SOCK_STREAM type socket - bound to *address* (a 2-tuple (host, port)) and return the socket - object. - - *family* should be either AF_INET or AF_INET6. - *backlog* is the queue size passed to socket.listen(). - *reuse_port* dictates whether to use the SO_REUSEPORT socket option. - *dualstack_ipv6*: if true and the platform supports it, it will - create an AF_INET6 socket able to accept both IPv4 or IPv6 - connections. When false it will explicitly disable this option on - platforms that enable it by default (e.g. Linux). - - >>> with create_server(('', 8000)) as server: - ... while True: - ... conn, addr = server.accept() - ... # handle new connection - """ +) -> socket: ... # The 5th tuple item is the socket address, for IP4, IP6, or IP6 if Python is compiled with --disable-ipv6, respectively. def getaddrinfo( host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 -) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: - """Resolve host and port into list of address info entries. - - Translate the host/port argument into a sequence of 5-tuples that contain - all the necessary arguments for creating a socket connected to that service. - host is a domain name, a string representation of an IPv4/v6 address or - None. port is a string service name such as 'http', a numeric port number or - None. By passing None as the value of host and port, you can pass NULL to - the underlying C API. - - The family, type and proto arguments can be optionally specified in order to - narrow the list of addresses returned. Passing zero as a value for each of - these arguments selects the full range of results. - """ +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi index efd983449177e..f321d14a792b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi @@ -1,123 +1,3 @@ -"""Generic socket server classes. - -This module tries to capture the various aspects of defining a server: - -For socket-based servers: - -- address family: - - AF_INET{,6}: IP (Internet Protocol) sockets (default) - - AF_UNIX: Unix domain sockets - - others, e.g. AF_DECNET are conceivable (see -- socket type: - - SOCK_STREAM (reliable stream, e.g. TCP) - - SOCK_DGRAM (datagrams, e.g. UDP) - -For request-based servers (including socket-based): - -- client address verification before further looking at the request - (This is actually a hook for any processing that needs to look - at the request before anything else, e.g. logging) -- how to handle multiple requests: - - synchronous (one request is handled at a time) - - forking (each request is handled by a new process) - - threading (each request is handled by a new thread) - -The classes in this module favor the server type that is simplest to -write: a synchronous TCP/IP server. This is bad class design, but -saves some typing. (There's also the issue that a deep class hierarchy -slows down method lookups.) - -There are five classes in an inheritance diagram, four of which represent -synchronous servers of four types: - - +------------+ - | BaseServer | - +------------+ - | - v - +-----------+ +------------------+ - | TCPServer |------->| UnixStreamServer | - +-----------+ +------------------+ - | - v - +-----------+ +--------------------+ - | UDPServer |------->| UnixDatagramServer | - +-----------+ +--------------------+ - -Note that UnixDatagramServer derives from UDPServer, not from -UnixStreamServer -- the only difference between an IP and a Unix -stream server is the address family, which is simply repeated in both -unix server classes. - -Forking and threading versions of each type of server can be created -using the ForkingMixIn and ThreadingMixIn mix-in classes. For -instance, a threading UDP server class is created as follows: - - class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass - -The Mix-in class must come first, since it overrides a method defined -in UDPServer! Setting the various member variables also changes -the behavior of the underlying server mechanism. - -To implement a service, you must derive a class from -BaseRequestHandler and redefine its handle() method. You can then run -various versions of the service by combining one of the server classes -with your request handler class. - -The request handler class must be different for datagram or stream -services. This can be hidden by using the request handler -subclasses StreamRequestHandler or DatagramRequestHandler. - -Of course, you still have to use your head! - -For instance, it makes no sense to use a forking server if the service -contains state in memory that can be modified by requests (since the -modifications in the child process would never reach the initial state -kept in the parent process and passed to each child). In this case, -you can use a threading server, but you will probably have to use -locks to avoid two requests that come in nearly simultaneous to apply -conflicting changes to the server state. - -On the other hand, if you are building e.g. an HTTP server, where all -data is stored externally (e.g. in the file system), a synchronous -class will essentially render the service "deaf" while one request is -being handled -- which may be for a very long time if a client is slow -to read all the data it has requested. Here a threading or forking -server is appropriate. - -In some cases, it may be appropriate to process part of a request -synchronously, but to finish processing in a forked child depending on -the request data. This can be implemented by using a synchronous -server and doing an explicit fork in the request handler class -handle() method. - -Another approach to handling multiple simultaneous requests in an -environment that supports neither threads nor fork (or where these are -too expensive or inappropriate for the service) is to maintain an -explicit table of partially finished requests and to use a selector to -decide which request to work on next (or whether to handle a new -incoming request). This is particularly important for stream services -where each client can potentially be connected for a long time (if -threads or subprocesses cannot be used). - -Future work: -- Standard classes for Sun RPC (which uses either UDP or TCP) -- Standard mix-in classes to implement various authentication - and encryption schemes - -XXX Open problems: -- What to do with out-of-band data? - -BaseServer: -- split generic "request" functionality out into BaseServer class. - Copyright (C) 2000 Luke Kenneth Casson Leighton - - example: read entries from a SQL database (requires overriding - get_request() to return a table entry from the database). - entry is processed by a RequestHandlerClass. - -""" - import sys import types from _socket import _Address, _RetAddress @@ -159,189 +39,32 @@ _AfInet6Address: TypeAlias = tuple[str | bytes | bytearray, int, int, int] # ad # This can possibly be generic at some point: class BaseServer: - """Base class for server classes. - - Methods for the caller: - - - __init__(server_address, RequestHandlerClass) - - serve_forever(poll_interval=0.5) - - shutdown() - - handle_request() # if you do not use serve_forever() - - fileno() -> int # for selector - - Methods that may be overridden: - - - server_bind() - - server_activate() - - get_request() -> request, client_address - - handle_timeout() - - verify_request(request, client_address) - - server_close() - - process_request(request, client_address) - - shutdown_request(request) - - close_request(request) - - service_actions() - - handle_error() - - Methods for derived classes: - - - finish_request(request, client_address) - - Class variables that may be overridden by derived classes or - instances: - - - timeout - - address_family - - socket_type - - allow_reuse_address - - allow_reuse_port - - Instance variables: - - - RequestHandlerClass - - socket - - """ - server_address: _Address timeout: float | None RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] def __init__( self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] - ) -> None: - """Constructor. May be extended, do not override.""" - - def handle_request(self) -> None: - """Handle one request, possibly blocking. - - Respects self.timeout. - """ - - def serve_forever(self, poll_interval: float = 0.5) -> None: - """Handle one request at a time until shutdown. - - Polls for shutdown every poll_interval seconds. Ignores - self.timeout. If you need to do periodic tasks, do them in - another thread. - """ - - def shutdown(self) -> None: - """Stops the serve_forever loop. - - Blocks until the loop has finished. This must be called while - serve_forever() is running in another thread, or it will - deadlock. - """ - - def server_close(self) -> None: - """Called to clean-up the server. - - May be overridden. - - """ - - def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Finish one request by instantiating RequestHandlerClass.""" - + ) -> None: ... + def handle_request(self) -> None: ... + def serve_forever(self, poll_interval: float = 0.5) -> None: ... + def shutdown(self) -> None: ... + def server_close(self) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def get_request(self) -> tuple[Any, Any]: ... # Not implemented here, but expected to exist on subclasses - def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: - """Handle an error gracefully. May be overridden. - - The default is to print a traceback and continue. - - """ - - def handle_timeout(self) -> None: - """Called if no new request arrives within self.timeout. - - Overridden by ForkingMixIn. - """ - - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Call finish_request. - - Overridden by ForkingMixIn and ThreadingMixIn. - - """ - - def server_activate(self) -> None: - """Called by constructor to activate the server. - - May be overridden. - - """ - - def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: - """Verify the request. May be overridden. - - Return True if we should proceed with this request. - - """ - + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def handle_timeout(self) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_activate(self) -> None: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... - def service_actions(self) -> None: - """Called by the serve_forever() loop. - - May be overridden by a subclass / Mixin to implement any code that - needs to be run during the loop. - """ - - def shutdown_request(self, request: _RequestType) -> None: # undocumented - """Called to shutdown and close an individual request.""" - - def close_request(self, request: _RequestType) -> None: # undocumented - """Called to clean up an individual request.""" + def service_actions(self) -> None: ... + def shutdown_request(self, request: _RequestType) -> None: ... # undocumented + def close_request(self, request: _RequestType) -> None: ... # undocumented class TCPServer(BaseServer): - """Base class for various socket-based server classes. - - Defaults to synchronous IP stream (i.e., TCP). - - Methods for the caller: - - - __init__(server_address, RequestHandlerClass, bind_and_activate=True) - - serve_forever(poll_interval=0.5) - - shutdown() - - handle_request() # if you don't use serve_forever() - - fileno() -> int # for selector - - Methods that may be overridden: - - - server_bind() - - server_activate() - - get_request() -> request, client_address - - handle_timeout() - - verify_request(request, client_address) - - process_request(request, client_address) - - shutdown_request(request) - - close_request(request) - - handle_error() - - Methods for derived classes: - - - finish_request(request, client_address) - - Class variables that may be overridden by derived classes or - instances: - - - timeout - - address_family - - socket_type - - request_queue_size (only for stream sockets) - - allow_reuse_address - - allow_reuse_port - - Instance variables: - - - server_address - - RequestHandlerClass - - socket - - """ - address_family: int socket: _socket allow_reuse_address: bool @@ -355,33 +78,12 @@ class TCPServer(BaseServer): server_address: _AfInetAddress | _AfInet6Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: - """Constructor. May be extended, do not override.""" - - def fileno(self) -> int: - """Return socket file number. - - Interface required by selector. - - """ - - def get_request(self) -> tuple[_socket, _RetAddress]: - """Get the request and client address from the socket. - - May be overridden. - - """ - - def server_bind(self) -> None: - """Called by constructor to bind the socket. - - May be overridden. - - """ + ) -> None: ... + def fileno(self) -> int: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... + def server_bind(self) -> None: ... class UDPServer(TCPServer): - """UDP server class.""" - max_packet_size: ClassVar[int] def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] @@ -393,8 +95,7 @@ if sys.platform != "win32": server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: - """Constructor. May be extended, do not override.""" + ) -> None: ... class UnixDatagramServer(UDPServer): server_address: _AfUnixAddress # type: ignore[assignment] @@ -403,52 +104,25 @@ if sys.platform != "win32": server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: - """Constructor. May be extended, do not override.""" + ) -> None: ... if sys.platform != "win32": class ForkingMixIn: - """Mix-in class to handle each request in a new process.""" - timeout: float | None # undocumented active_children: set[int] | None # undocumented max_children: int # undocumented block_on_close: bool - def collect_children(self, *, blocking: bool = False) -> None: # undocumented - """Internal routine to wait for children that have exited.""" - - def handle_timeout(self) -> None: # undocumented - """Wait for zombies after self.timeout seconds of inactivity. - - May be extended, do not override. - """ - - def service_actions(self) -> None: # undocumented - """Collect the zombie child processes regularly in the ForkingMixIn. - - service_actions is called in the BaseServer's serve_forever loop. - """ - - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Fork a new subprocess to process the request.""" - + def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented + def handle_timeout(self) -> None: ... # undocumented + def service_actions(self) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... class ThreadingMixIn: - """Mix-in class to handle each request in a new thread.""" - daemon_threads: bool block_on_close: bool - def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: # undocumented - """Same as in BaseServer but as a thread. - - In addition, exception handling is done here. - - """ - - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Start a new thread to process the request.""" - + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... if sys.platform != "win32": @@ -466,22 +140,6 @@ if sys.platform != "win32": class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: - """Base class for request handler classes. - - This class is instantiated for each request to be handled. The - constructor sets the instance variables request, client_address - and server, and then calls the handle() method. To implement a - specific service, all you need to do is to derive a class which - defines a handle() method. - - The handle() method can find the request as self.request, the - client address as self.client_address, and the server (in case it - needs access to per-server information) as self.server. Since a - separate instance is created for each request, the handle() method - can define other arbitrary instance variables. - - """ - # `request` is technically of type _RequestType, # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see @@ -497,8 +155,6 @@ class BaseRequestHandler: def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): - """Define self.rfile and self.wfile for stream sockets.""" - rbufsize: ClassVar[int] # undocumented wbufsize: ClassVar[int] # undocumented timeout: ClassVar[float | None] # undocumented @@ -508,8 +164,6 @@ class StreamRequestHandler(BaseRequestHandler): wfile: BufferedIOBase class DatagramRequestHandler(BaseRequestHandler): - """Define self.rfile and self.wfile for datagram sockets.""" - packet: bytes # undocumented socket: _socket # undocumented rfile: BufferedIOBase diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi index 70e8e3ea4c1b2..3a5d39997dcc7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi @@ -1,14 +1,3 @@ -"""This module provides access to the Unix shadow password database. -It is available on various Unix versions. - -Shadow password database entries are reported as 9-tuples of type struct_spwd, -containing the following items from the password database (see `'): -sp_namp, sp_pwdp, sp_lstchg, sp_min, sp_max, sp_warn, sp_inact, sp_expire, sp_flag. -The sp_namp and sp_pwdp are strings, the rest are integers. -An exception is raised if the entry asked for cannot be found. -You have to be root to be able to use this module. -""" - import sys from _typeshed import structseq from typing import Any, Final, final @@ -16,13 +5,6 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): - """spwd.struct_spwd: Results from getsp*() routines. - - This object may be accessed either as a 9-tuple of - (sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag) - or via the object attributes as named in the above tuple. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ( "sp_namp", @@ -37,57 +19,28 @@ if sys.platform != "win32": ) @property - def sp_namp(self) -> str: - """login name""" - + def sp_namp(self) -> str: ... @property - def sp_pwdp(self) -> str: - """encrypted password""" - + def sp_pwdp(self) -> str: ... @property - def sp_lstchg(self) -> int: - """date of last change""" - + def sp_lstchg(self) -> int: ... @property - def sp_min(self) -> int: - """min #days between changes""" - + def sp_min(self) -> int: ... @property - def sp_max(self) -> int: - """max #days between changes""" - + def sp_max(self) -> int: ... @property - def sp_warn(self) -> int: - """#days before pw expires to warn user about it""" - + def sp_warn(self) -> int: ... @property - def sp_inact(self) -> int: - """#days after pw expires until account is disabled""" - + def sp_inact(self) -> int: ... @property - def sp_expire(self) -> int: - """#days since 1970-01-01 when account expires""" - + def sp_expire(self) -> int: ... @property - def sp_flag(self) -> int: - """reserved""" + def sp_flag(self) -> int: ... # Deprecated aliases below. @property - def sp_nam(self) -> str: - """login name; deprecated""" - + def sp_nam(self) -> str: ... @property - def sp_pwd(self) -> str: - """encrypted password; deprecated""" - - def getspall() -> list[struct_spwd]: - """Return a list of all available shadow password database entries, in arbitrary order. - - See `help(spwd)` for more on shadow password database entries. - """ - - def getspnam(arg: str, /) -> struct_spwd: - """Return the shadow password database entry for the given user name. + def sp_pwd(self) -> str: ... - See `help(spwd)` for more on shadow password database entries. - """ + def getspall() -> list[struct_spwd]: ... + def getspnam(arg: str, /) -> struct_spwd: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi index e378b4d434026..6b0f1ba949106 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi @@ -1,37 +1,3 @@ -""" -The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compliant -interface to the SQLite library, and requires SQLite 3.15.2 or newer. - -To use the module, start by creating a database Connection object: - - import sqlite3 - cx = sqlite3.connect("test.db") # test.db will be created or opened - -The special path name ":memory:" can be provided to connect to a transient -in-memory database: - - cx = sqlite3.connect(":memory:") # connect to a database in RAM - -Once a connection has been established, create a Cursor object and call -its execute() method to perform SQL queries: - - cu = cx.cursor() - - # create a table - cu.execute("create table lang(name, first_appeared)") - - # insert values into a table - cu.execute("insert into lang values (?, ?)", ("C", 1972)) - - # execute a query and iterate over the result - for row in cu.execute("select * from lang"): - print(row) - - cx.close() - -The sqlite3 module is written by Gerhard Häring . -""" - import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence @@ -304,8 +270,6 @@ class Warning(Exception): ... @disjoint_base class Connection: - """SQLite database connection object.""" - @property def DataError(self) -> type[DataError]: ... @property @@ -364,59 +328,19 @@ class Connection: uri: bool = False, ) -> None: ... - def close(self) -> None: - """Close the database connection. - - Any pending transaction is not committed implicitly. - """ + def close(self) -> None: ... if sys.version_info >= (3, 11): - def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: - """Open and return a BLOB object. - - table - Table name. - column - Column name. - row - Row index. - readonly - Open the BLOB without write permissions. - name - Database name. - """ - - def commit(self) -> None: - """Commit any pending transaction to the database. + def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: ... - If there is no open transaction, this method is a no-op. - """ - - def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: - """Creates a new aggregate. - - Note: Passing keyword arguments 'name', 'n_arg' and 'aggregate_class' - to _sqlite3.Connection.create_aggregate() is deprecated. Parameters - 'name', 'n_arg' and 'aggregate_class' will become positional-only in - Python 3.15. - """ + def commit(self) -> None: ... + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... if sys.version_info >= (3, 11): # num_params determines how many params will be passed to the aggregate class. We provide an overload # for the case where num_params = 1, which is expected to be the common case. @overload def create_window_function( self, name: str, num_params: Literal[1], aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None, / - ) -> None: - """Creates or redefines an aggregate window function. Non-standard. - - name - The name of the SQL aggregate window function to be created or - redefined. - num_params - The number of arguments the step and inverse methods takes. - aggregate_class - A class with step(), finalize(), value(), and inverse() methods. - Set to None to clear the window function. - """ + ) -> None: ... # And for num_params = -1, which means the aggregate must accept any number of parameters. @overload def create_window_function( @@ -427,98 +351,36 @@ class Connection: self, name: str, num_params: int, aggregate_class: Callable[[], _WindowAggregateClass] | None, / ) -> None: ... - def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: - """Creates a collation function.""" - + def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: ... def create_function( self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False - ) -> None: - """Creates a new function. - - Note: Passing keyword arguments 'name', 'narg' and 'func' to - _sqlite3.Connection.create_function() is deprecated. Parameters - 'name', 'narg' and 'func' will become positional-only in Python 3.15. - """ - + ) -> None: ... @overload - def cursor(self, factory: None = None) -> Cursor: - """Return a cursor for the connection.""" - + def cursor(self, factory: None = None) -> Cursor: ... @overload def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: ... - def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: - """Executes an SQL statement.""" - - def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: - """Repeatedly executes an SQL statement.""" - - def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once.""" - - def interrupt(self) -> None: - """Abort any pending database operation.""" + def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: ... + def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: ... + def executescript(self, sql_script: str, /) -> Cursor: ... + def interrupt(self) -> None: ... if sys.version_info >= (3, 13): - def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: - """Returns iterator to the dump of the database in an SQL text format. - - filter - An optional LIKE pattern for database objects to dump - """ + def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: ... else: - def iterdump(self) -> Generator[str, None, None]: - """Returns iterator to the dump of the database in an SQL text format.""" - - def rollback(self) -> None: - """Roll back to the start of any pending transaction. - - If there is no open transaction, this method is a no-op. - """ + def iterdump(self) -> Generator[str, None, None]: ... + def rollback(self) -> None: ... def set_authorizer( self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None - ) -> None: - """Set authorizer callback. - - Note: Passing keyword argument 'authorizer_callback' to - _sqlite3.Connection.set_authorizer() is deprecated. Parameter - 'authorizer_callback' will become positional-only in Python 3.15. - """ - - def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: - """Set progress handler callback. - - progress_handler - A callable that takes no arguments. - If the callable returns non-zero, the current query is terminated, - and an exception is raised. - n - The number of SQLite virtual machine instructions that are - executed between invocations of 'progress_handler'. - - If 'progress_handler' is None or 'n' is 0, the progress handler is disabled. - - Note: Passing keyword argument 'progress_handler' to - _sqlite3.Connection.set_progress_handler() is deprecated. Parameter - 'progress_handler' will become positional-only in Python 3.15. - """ - - def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: - """Set a trace callback called for each SQL statement (passed as unicode). - - Note: Passing keyword argument 'trace_callback' to - _sqlite3.Connection.set_trace_callback() is deprecated. Parameter - 'trace_callback' will become positional-only in Python 3.15. - """ + ) -> None: ... + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: ... + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: ... # enable_load_extension and load_extension is not available on python distributions compiled # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 - def enable_load_extension(self, enable: bool, /) -> None: - """Enable dynamic loading of SQLite extension modules.""" + def enable_load_extension(self, enable: bool, /) -> None: ... if sys.version_info >= (3, 12): - def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: - """Load SQLite extension module.""" + def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: ... else: - def load_extension(self, name: str, /) -> None: - """Load SQLite extension module.""" + def load_extension(self, name: str, /) -> None: ... def backup( self, @@ -528,93 +390,24 @@ class Connection: progress: Callable[[int, int, int], object] | None = None, name: str = "main", sleep: float = 0.25, - ) -> None: - """Makes a backup of the database.""" + ) -> None: ... if sys.version_info >= (3, 11): - def setlimit(self, category: int, limit: int, /) -> int: - """Set connection run-time limits. - - category - The limit category to be set. - limit - The new limit. If the new limit is a negative number, the limit is - unchanged. - - Attempts to increase a limit above its hard upper bound are silently truncated - to the hard upper bound. Regardless of whether or not the limit was changed, - the prior value of the limit is returned. - """ - - def getlimit(self, category: int, /) -> int: - """Get connection run-time limits. - - category - The limit category to be queried. - """ - - def serialize(self, *, name: str = "main") -> bytes: - """Serialize a database into a byte string. - - name - Which database to serialize. - - For an ordinary on-disk database file, the serialization is just a copy of the - disk file. For an in-memory database or a "temp" database, the serialization is - the same sequence of bytes which would be written to disk if that database - were backed up to disk. - """ - - def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: - """Load a serialized database. - - data - The serialized database content. - name - Which database to reopen with the deserialization. - - The deserialize interface causes the database connection to disconnect from the - target database, and then reopen it as an in-memory database based on the given - serialized data. - - The deserialize interface will fail with SQLITE_BUSY if the database is - currently in a read transaction or is involved in a backup operation. - """ + def setlimit(self, category: int, limit: int, /) -> int: ... + def getlimit(self, category: int, /) -> int: ... + def serialize(self, *, name: str = "main") -> bytes: ... + def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: ... if sys.version_info >= (3, 12): - def getconfig(self, op: int, /) -> bool: - """Query a boolean connection configuration option. - - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. - """ - - def setconfig(self, op: int, enable: bool = True, /) -> bool: - """Set a boolean connection configuration option. - - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. - """ - - def __call__(self, sql: str, /) -> _Statement: - """Call self as a function.""" - - def __enter__(self) -> Self: - """Called when the connection is used as a context manager. - - Returns itself as a convenience to the caller. - """ + def getconfig(self, op: int, /) -> bool: ... + def setconfig(self, op: int, enable: bool = True, /) -> bool: ... + def __call__(self, sql: str, /) -> _Statement: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / - ) -> Literal[False]: - """Called when the connection is used as a context manager. - - If there was any exception, a rollback takes place; otherwise we commit. - """ + ) -> Literal[False]: ... @disjoint_base class Cursor: - """SQLite database cursor class.""" - arraysize: int @property def connection(self) -> Connection: ... @@ -627,68 +420,35 @@ class Cursor: @property def rowcount(self) -> int: ... def __init__(self, cursor: Connection, /) -> None: ... - def close(self) -> None: - """Closes the cursor.""" - - def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: - """Executes an SQL statement.""" - - def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: - """Repeatedly executes an SQL statement.""" - - def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once.""" - - def fetchall(self) -> list[Any]: - """Fetches all rows from the resultset.""" - - def fetchmany(self, size: int | None = 1) -> list[Any]: - """Fetches several rows from the resultset. - - size - The default value is set by the Cursor.arraysize attribute. - """ + def close(self) -> None: ... + def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: ... + def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: ... + def executescript(self, sql_script: str, /) -> Cursor: ... + def fetchall(self) -> list[Any]: ... + def fetchmany(self, size: int | None = 1) -> list[Any]: ... # Returns either a row (as created by the row_factory) or None, but # putting None in the return annotation causes annoying false positives. - def fetchone(self) -> Any: - """Fetches one row from the resultset.""" - - def setinputsizes(self, sizes: Unused, /) -> None: # does nothing - """Required by DB-API. Does nothing in sqlite3.""" - - def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: # does nothing - """Required by DB-API. Does nothing in sqlite3.""" - - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> Any: - """Implement next(self).""" + def fetchone(self) -> Any: ... + def setinputsizes(self, sizes: Unused, /) -> None: ... # does nothing + def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: ... # does nothing + def __iter__(self) -> Self: ... + def __next__(self) -> Any: ... @final class PrepareProtocol: - """PEP 246 style object adaption protocol type.""" - def __init__(self, *args: object, **kwargs: object) -> None: ... @disjoint_base class Row(Sequence[Any]): def __new__(cls, cursor: Cursor, data: tuple[Any, ...], /) -> Self: ... - def keys(self) -> list[str]: - """Returns the keys of the row.""" - + def keys(self) -> list[str]: ... @overload - def __getitem__(self, key: int | str, /) -> Any: - """Return self[key].""" - + def __getitem__(self, key: int | str, /) -> Any: ... @overload def __getitem__(self, key: slice, /) -> tuple[Any, ...]: ... def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[Any]: - """Implement iter(self).""" - - def __len__(self) -> int: - """Return len(self).""" + def __iter__(self) -> Iterator[Any]: ... + def __len__(self) -> int: ... # These return NotImplemented for anything that is not a Row. def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: object, /) -> bool: ... @@ -705,49 +465,14 @@ class _Statement: ... if sys.version_info >= (3, 11): @final class Blob: - def close(self) -> None: - """Close the blob.""" - - def read(self, length: int = -1, /) -> bytes: - """Read data at the current offset position. - - length - Read length in bytes. - - If the end of the blob is reached, the data up to end of file will be returned. - When length is not specified, or is negative, Blob.read() will read until the - end of the blob. - """ - - def write(self, data: ReadableBuffer, /) -> None: - """Write data at the current offset. - - This function cannot change the blob length. Writing beyond the end of the - blob will result in an exception being raised. - """ - - def tell(self) -> int: - """Return the current access position for the blob.""" + def close(self) -> None: ... + def read(self, length: int = -1, /) -> bytes: ... + def write(self, data: ReadableBuffer, /) -> None: ... + def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, offset: int, origin: int = 0, /) -> None: - """Set the current access position to offset. - - The origin argument defaults to os.SEEK_SET (absolute blob positioning). - Other values for origin are os.SEEK_CUR (seek relative to the current position) - and os.SEEK_END (seek relative to the blob's end). - """ - - def __len__(self) -> int: - """Return len(self).""" - - def __enter__(self) -> Self: - """Blob context manager enter.""" - - def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: - """Blob context manager exit.""" - - def __getitem__(self, key: SupportsIndex | slice, /) -> int: - """Return self[key].""" - - def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: - """Set self[key] to value.""" + def seek(self, offset: int, origin: int = 0, /) -> None: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: ... + def __getitem__(self, key: SupportsIndex | slice, /) -> int: ... + def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi index b3205231f6f5d..d8f0b7937e994 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi @@ -1,5 +1,3 @@ -"""Internal support module for sre""" - from re import Pattern from sre_constants import * from sre_constants import _NamedIntConstant diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi index d1c52ccfb1025..9a1da4ac89e7e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi @@ -1,5 +1,3 @@ -"""Internal support module for sre""" - import sys from re import error as error from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi index 89d33fcdd2379..eaacbff312a92 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi @@ -1,5 +1,3 @@ -"""Internal support module for sre""" - import sys from collections.abc import Iterable from re import Match, Pattern as _Pattern diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi index b4540c8ca8c97..faa98cb399200 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi @@ -1,93 +1,3 @@ -"""This module provides some more Pythonic support for SSL. - -Object types: - - SSLSocket -- subtype of socket.socket which does SSL over the socket - -Exceptions: - - SSLError -- exception raised for I/O errors - -Functions: - - cert_time_to_seconds -- convert time string used for certificate - notBefore and notAfter functions to integer - seconds past the Epoch (the time values - returned from time.time()) - - get_server_certificate (addr, ssl_version, ca_certs, timeout) -- Retrieve the - certificate from the server at the specified - address and return it as a PEM-encoded string - - -Integer constants: - -SSL_ERROR_ZERO_RETURN -SSL_ERROR_WANT_READ -SSL_ERROR_WANT_WRITE -SSL_ERROR_WANT_X509_LOOKUP -SSL_ERROR_SYSCALL -SSL_ERROR_SSL -SSL_ERROR_WANT_CONNECT - -SSL_ERROR_EOF -SSL_ERROR_INVALID_ERROR_CODE - -The following group define certificate requirements that one side is -allowing/requiring from the other side: - -CERT_NONE - no certificates from the other side are required (or will - be looked at if provided) -CERT_OPTIONAL - certificates are not required, but if provided will be - validated, and if validation fails, the connection will - also fail -CERT_REQUIRED - certificates are required, and will be validated, and - if validation fails, the connection will also fail - -The following constants identify various SSL protocol variants: - -PROTOCOL_SSLv2 -PROTOCOL_SSLv3 -PROTOCOL_SSLv23 -PROTOCOL_TLS -PROTOCOL_TLS_CLIENT -PROTOCOL_TLS_SERVER -PROTOCOL_TLSv1 -PROTOCOL_TLSv1_1 -PROTOCOL_TLSv1_2 - -The following constants identify various SSL alert message descriptions as per -http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6 - -ALERT_DESCRIPTION_CLOSE_NOTIFY -ALERT_DESCRIPTION_UNEXPECTED_MESSAGE -ALERT_DESCRIPTION_BAD_RECORD_MAC -ALERT_DESCRIPTION_RECORD_OVERFLOW -ALERT_DESCRIPTION_DECOMPRESSION_FAILURE -ALERT_DESCRIPTION_HANDSHAKE_FAILURE -ALERT_DESCRIPTION_BAD_CERTIFICATE -ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE -ALERT_DESCRIPTION_CERTIFICATE_REVOKED -ALERT_DESCRIPTION_CERTIFICATE_EXPIRED -ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN -ALERT_DESCRIPTION_ILLEGAL_PARAMETER -ALERT_DESCRIPTION_UNKNOWN_CA -ALERT_DESCRIPTION_ACCESS_DENIED -ALERT_DESCRIPTION_DECODE_ERROR -ALERT_DESCRIPTION_DECRYPT_ERROR -ALERT_DESCRIPTION_PROTOCOL_VERSION -ALERT_DESCRIPTION_INSUFFICIENT_SECURITY -ALERT_DESCRIPTION_INTERNAL_ERROR -ALERT_DESCRIPTION_USER_CANCELLED -ALERT_DESCRIPTION_NO_RENEGOTIATION -ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION -ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE -ALERT_DESCRIPTION_UNRECOGNIZED_NAME -ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE -ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE -ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY -""" - import enum import socket import sys @@ -155,33 +65,16 @@ class _Cipher(TypedDict): symmetric: str class SSLError(OSError): - """An error occurred in the SSL implementation.""" - library: str reason: str -class SSLZeroReturnError(SSLError): - """SSL/TLS session closed cleanly.""" - -class SSLWantReadError(SSLError): - """Non-blocking SSL socket needs to read more data - before the requested operation can be completed. - """ - -class SSLWantWriteError(SSLError): - """Non-blocking SSL socket needs to write more data - before the requested operation can be completed. - """ - -class SSLSyscallError(SSLError): - """System error when attempting SSL operation.""" - -class SSLEOFError(SSLError): - """SSL/TLS connection terminated abruptly.""" +class SSLZeroReturnError(SSLError): ... +class SSLWantReadError(SSLError): ... +class SSLWantWriteError(SSLError): ... +class SSLSyscallError(SSLError): ... +class SSLEOFError(SSLError): ... class SSLCertVerificationError(SSLError, ValueError): - """A certificate could not be verified.""" - verify_code: int verify_message: str @@ -202,63 +95,22 @@ if sys.version_info < (3, 12): ciphers: str | None = None, ) -> SSLSocket: ... @deprecated("Deprecated since Python 3.7; removed in Python 3.12.") - def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed. - - The function matches IP addresses rather than dNSNames if hostname is a - valid ipaddress string. IPv4 addresses are supported on all platforms. - IPv6 addresses are supported on platforms with IPv6 support (AF_INET6 - and inet_pton). + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... - CertificateError is raised on failure. On success, the function - returns nothing. - """ - -def cert_time_to_seconds(cert_time: str) -> int: - """Return the time in seconds since the Epoch, given the timestring - representing the "notBefore" or "notAfter" date from a certificate - in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale). - - "notBefore" or "notAfter" dates must use UTC (RFC 5280). - - Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec - UTC should be specified as GMT (see ASN1_TIME_print()) - """ +def cert_time_to_seconds(cert_time: str) -> int: ... if sys.version_info >= (3, 10): def get_server_certificate( addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... - ) -> str: - """Retrieve the certificate from the server at the specified address, - and return it as a PEM-encoded string. - If 'ca_certs' is specified, validate the server cert against it. - If 'ssl_version' is specified, use it in the connection attempt. - If 'timeout' is specified, use it in the connection attempt. - """ + ) -> str: ... else: - def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: - """Retrieve the certificate from the server at the specified address, - and return it as a PEM-encoded string. - If 'ca_certs' is specified, validate the server cert against it. - If 'ssl_version' is specified, use it in the connection attempt. - """ - -def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: - """Takes a certificate in binary DER format and returns the - PEM version of it as a string. - """ - -def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: - """Takes a certificate in ASCII PEM format and returns the - DER-encoded version of it as a byte sequence - """ + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... -class DefaultVerifyPaths(NamedTuple): - """DefaultVerifyPaths(cafile, capath, openssl_cafile_env, openssl_cafile, openssl_capath_env, openssl_capath)""" +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... +def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... +class DefaultVerifyPaths(NamedTuple): cafile: str capath: str openssl_cafile_env: str @@ -266,12 +118,9 @@ class DefaultVerifyPaths(NamedTuple): openssl_capath_env: str openssl_capath: str -def get_default_verify_paths() -> DefaultVerifyPaths: - """Return paths to default cafile and capath.""" +def get_default_verify_paths() -> DefaultVerifyPaths: ... class VerifyMode(enum.IntEnum): - """An enumeration.""" - CERT_NONE = 0 CERT_OPTIONAL = 1 CERT_REQUIRED = 2 @@ -281,8 +130,6 @@ CERT_OPTIONAL: Final = VerifyMode.CERT_OPTIONAL CERT_REQUIRED: Final = VerifyMode.CERT_REQUIRED class VerifyFlags(enum.IntFlag): - """An enumeration.""" - VERIFY_DEFAULT = 0 VERIFY_CRL_CHECK_LEAF = 4 VERIFY_CRL_CHECK_CHAIN = 12 @@ -303,8 +150,6 @@ if sys.version_info >= (3, 10): VERIFY_X509_PARTIAL_CHAIN: Final = VerifyFlags.VERIFY_X509_PARTIAL_CHAIN class _SSLMethod(enum.IntEnum): - """An enumeration.""" - PROTOCOL_SSLv23 = 2 PROTOCOL_SSLv2 = ... PROTOCOL_SSLv3 = ... @@ -326,8 +171,6 @@ PROTOCOL_TLS_CLIENT: Final = _SSLMethod.PROTOCOL_TLS_CLIENT PROTOCOL_TLS_SERVER: Final = _SSLMethod.PROTOCOL_TLS_SERVER class Options(enum.IntFlag): - """An enumeration.""" - OP_ALL = 2147483728 OP_NO_SSLv2 = 0 OP_NO_SSLv3 = 33554432 @@ -373,8 +216,6 @@ HAS_NEVER_CHECK_COMMON_NAME: Final[bool] CHANNEL_BINDING_TYPES: Final[list[str]] class AlertDescription(enum.IntEnum): - """An enumeration.""" - ALERT_DESCRIPTION_ACCESS_DENIED = 49 ALERT_DESCRIPTION_BAD_CERTIFICATE = 42 ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = 114 @@ -440,20 +281,13 @@ class _ASN1ObjectBase(NamedTuple): oid: str class _ASN1Object(_ASN1ObjectBase): - """ASN.1 object identifier lookup""" - def __new__(cls, oid: str) -> Self: ... @classmethod - def fromnid(cls, nid: int) -> Self: - """Create _ASN1Object from OpenSSL numeric ID""" - + def fromnid(cls, nid: int) -> Self: ... @classmethod - def fromname(cls, name: str) -> Self: - """Create _ASN1Object from short name, long name or OID""" + def fromname(cls, name: str) -> Self: ... class Purpose(_ASN1Object, enum.Enum): - """SSLContext purpose flags with X509v3 Extended Key Usage objects""" - # Normally this class would inherit __new__ from _ASN1Object, but # because this is an enum, the inherited __new__ is replaced at runtime with # Enum.__new__. @@ -462,30 +296,15 @@ class Purpose(_ASN1Object, enum.Enum): CLIENT_AUTH = (130, "clientAuth", "TLS Web Client Authentication", "1.3.6.1.5.5.7.3.1") # pyright: ignore[reportCallIssue] class SSLSocket(socket.socket): - """This class implements a subtype of socket.socket that wraps - the underlying OS socket in an SSL context when necessary, and - provides read and write methods over that channel. - """ - context: SSLContext server_side: bool server_hostname: str | None session: SSLSession | None @property - def session_reused(self) -> bool | None: - """Was the client session reused during handshake""" - + def session_reused(self) -> bool | None: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def connect(self, addr: socket._Address) -> None: - """Connects to remote ADDR, and then wraps the connection in - an SSL channel. - """ - - def connect_ex(self, addr: socket._Address) -> int: - """Connects to remote ADDR, and then wraps the connection in - an SSL channel. - """ - + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... @@ -499,111 +318,40 @@ class SSLSocket(socket.socket): @overload def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... - def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: - """Read up to LEN bytes and return them. - Return zero-length string on EOF. - """ - - def write(self, data: ReadableBuffer) -> int: - """Write DATA to the underlying SSL channel. Returns - number of bytes of DATA actually transmitted. - """ - - def do_handshake(self, block: bool = False) -> None: # block is undocumented - """Start the SSL/TLS handshake.""" - + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def do_handshake(self, block: bool = False) -> None: ... # block is undocumented @overload - def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: - """Returns a formatted version of the data in the certificate provided - by the other end of the SSL channel. - - Return None if no certificate was provided, {} if a certificate was - provided, but not validated. - """ - + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... - def cipher(self) -> tuple[str, str, int] | None: - """Return the currently selected cipher as a 3-tuple ``(name, - ssl_version, secret_bits)``. - """ - - def shared_ciphers(self) -> list[tuple[str, str, int]] | None: - """Return a list of ciphers shared by the client during the handshake or - None if this is not a valid server connection. - """ - - def compression(self) -> str | None: - """Return the current compression algorithm in use, or ``None`` if - compression was not negotiated or not supported by one of the peers. - """ - - def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: - """Get channel binding data for current connection. Raise ValueError - if the requested `cb_type` is not supported. Return bytes of the data - or None if the data is not available (e.g. before the handshake). - """ - - def selected_alpn_protocol(self) -> str | None: - """Return the currently selected ALPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if ALPN is not supported by one - of the peers. - """ + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + def selected_alpn_protocol(self) -> str | None: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") - def selected_npn_protocol(self) -> str | None: - """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. - """ + def selected_npn_protocol(self) -> str | None: ... else: - def selected_npn_protocol(self) -> str | None: - """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. - """ - - def accept(self) -> tuple[SSLSocket, socket._RetAddress]: - """Accepts a new connection from a remote client, and returns - a tuple containing that new connection wrapped with a server-side - SSL channel, and the address of the remote client. - """ - - def unwrap(self) -> socket.socket: - """Start the SSL shutdown handshake.""" - - def version(self) -> str | None: - """Return a string identifying the protocol version used by the - current SSL channel. - """ - - def pending(self) -> int: - """Return the number of bytes that can be read immediately.""" + def selected_npn_protocol(self) -> str | None: ... + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... + def unwrap(self) -> socket.socket: ... + def version(self) -> str | None: ... + def pending(self) -> int: ... def verify_client_post_handshake(self) -> None: ... # These methods always raise `NotImplementedError`: def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: - """Returns verified certificate chain provided by the other - end of the SSL channel as a list of DER-encoded bytes. - - If certificate verification was disabled method acts the same as - ``SSLSocket.get_unverified_chain``. - """ - - def get_unverified_chain(self) -> list[bytes]: - """Returns raw certificate chain provided by the other - end of the SSL channel as a list of DER-encoded bytes. - """ + def get_verified_chain(self) -> list[bytes]: ... + def get_unverified_chain(self) -> list[bytes]: ... class TLSVersion(enum.IntEnum): - """An enumeration.""" - MINIMUM_SUPPORTED = -2 MAXIMUM_SUPPORTED = -1 SSLv3 = 768 @@ -613,10 +361,6 @@ class TLSVersion(enum.IntEnum): TLSv1_3 = 772 class SSLContext(_SSLContext): - """An SSLContext holds various SSL-related configuration options and - data, such as certificates and possibly a private key. - """ - options: Options verify_flags: VerifyFlags verify_mode: VerifyMode @@ -651,16 +395,7 @@ class SSLContext(_SSLContext): cadata: str | ReadableBuffer | None = None, ) -> None: ... @overload - def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: - """Returns a list of dicts with information of loaded CA certs. - - If the optional argument is True, returns a DER-encoded copy of the CA - certificate. - - NOTE: Certificates in a capath directory aren't loaded unless they have - been used at least once. - """ - + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -702,13 +437,7 @@ def create_default_context( cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, -) -> SSLContext: - """Create a SSLContext object with default settings. - - NOTE: The protocol and settings may change anytime without prior - deprecation. The values represent a fair balance between maximum - compatibility and security. - """ +) -> SSLContext: ... if sys.version_info >= (3, 10): def _create_unverified_context( @@ -722,14 +451,7 @@ if sys.version_info >= (3, 10): cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, - ) -> SSLContext: - """Create a SSLContext object for Python stdlib modules - - All Python stdlib modules shall use this function to create SSLContext - objects in order to keep common settings in one place. The configuration - is less restrict than create_default_context()'s to increase backward - compatibility. - """ + ) -> SSLContext: ... else: def _create_unverified_context( @@ -743,148 +465,49 @@ else: cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, - ) -> SSLContext: - """Create a SSLContext object for Python stdlib modules - - All Python stdlib modules shall use this function to create SSLContext - objects in order to keep common settings in one place. The configuration - is less restrict than create_default_context()'s to increase backward - compatibility. - """ + ) -> SSLContext: ... _create_default_https_context = create_default_context class SSLObject: - """This class implements an interface on top of a low-level SSL object as - implemented by OpenSSL. This object captures the state of an SSL connection - but does not provide any network IO itself. IO needs to be performed - through separate "BIO" objects which are OpenSSL's IO abstraction layer. - - This class does not have a public constructor. Instances are returned by - ``SSLContext.wrap_bio``. This class is typically used by framework authors - that want to implement asynchronous IO for SSL through memory buffers. - - When compared to ``SSLSocket``, this object lacks the following features: - - * Any form of network IO, including methods such as ``recv`` and ``send``. - * The ``do_handshake_on_connect`` and ``suppress_ragged_eofs`` machinery. - """ - context: SSLContext @property - def server_side(self) -> bool: - """Whether this is a server-side socket.""" - + def server_side(self) -> bool: ... @property - def server_hostname(self) -> str | None: - """The currently set server hostname (for SNI), or ``None`` if no - server hostname is set. - """ + def server_hostname(self) -> str | None: ... session: SSLSession | None @property - def session_reused(self) -> bool: - """Was the client session reused during handshake""" - + def session_reused(self) -> bool: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: - """Read up to 'len' bytes from the SSL object and return them. - - If 'buffer' is provided, read into this buffer and return the number of - bytes read. - """ - - def write(self, data: ReadableBuffer) -> int: - """Write 'data' to the SSL object and return the number of bytes - written. - - The 'data' argument must support the buffer interface. - """ - + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... @overload - def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: - """Returns a formatted version of the data in the certificate provided - by the other end of the SSL channel. - - Return None if no certificate was provided, {} if a certificate was - provided, but not validated. - """ - + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... - def selected_alpn_protocol(self) -> str | None: - """Return the currently selected ALPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if ALPN is not supported by one - of the peers. - """ + def selected_alpn_protocol(self) -> str | None: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") - def selected_npn_protocol(self) -> str | None: - """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. - """ + def selected_npn_protocol(self) -> str | None: ... else: - def selected_npn_protocol(self) -> str | None: - """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. - """ - - def cipher(self) -> tuple[str, str, int] | None: - """Return the currently selected cipher as a 3-tuple ``(name, - ssl_version, secret_bits)``. - """ - - def shared_ciphers(self) -> list[tuple[str, str, int]] | None: - """Return a list of ciphers shared by the client during the handshake or - None if this is not a valid server connection. - """ - - def compression(self) -> str | None: - """Return the current compression algorithm in use, or ``None`` if - compression was not negotiated or not supported by one of the peers. - """ - - def pending(self) -> int: - """Return the number of bytes that can be read immediately.""" - - def do_handshake(self) -> None: - """Start the SSL/TLS handshake.""" - - def unwrap(self) -> None: - """Start the SSL shutdown handshake.""" - - def version(self) -> str | None: - """Return a string identifying the protocol version used by the - current SSL channel. - """ - - def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: - """Get channel binding data for current connection. Raise ValueError - if the requested `cb_type` is not supported. Return bytes of the data - or None if the data is not available (e.g. before the handshake). - """ - + def selected_npn_protocol(self) -> str | None: ... + + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def pending(self) -> int: ... + def do_handshake(self) -> None: ... + def unwrap(self) -> None: ... + def version(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... def verify_client_post_handshake(self) -> None: ... if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: - """Returns verified certificate chain provided by the other - end of the SSL channel as a list of DER-encoded bytes. - - If certificate verification was disabled method acts the same as - ``SSLSocket.get_unverified_chain``. - """ - - def get_unverified_chain(self) -> list[bytes]: - """Returns raw certificate chain provided by the other - end of the SSL channel as a list of DER-encoded bytes. - """ + def get_verified_chain(self) -> list[bytes]: ... + def get_unverified_chain(self) -> list[bytes]: ... class SSLErrorNumber(enum.IntEnum): - """An enumeration.""" - SSL_ERROR_EOF = 8 SSL_ERROR_INVALID_ERROR_CODE = 10 SSL_ERROR_SSL = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi index cee0bea41f991..face28ab0cbb6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi @@ -1,8 +1,3 @@ -"""Constants/functions for interpreting results of os.stat() and os.lstat(). - -Suggested usage: from stat import * -""" - import sys from _stat import * from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi index f5f731c46275e..ba9e5f1b6b71f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi @@ -1,109 +1,3 @@ -""" -Basic statistics module. - -This module provides functions for calculating statistics of data, including -averages, variance, and standard deviation. - -Calculating averages --------------------- - -================== ================================================== -Function Description -================== ================================================== -mean Arithmetic mean (average) of data. -fmean Fast, floating-point arithmetic mean. -geometric_mean Geometric mean of data. -harmonic_mean Harmonic mean of data. -median Median (middle value) of data. -median_low Low median of data. -median_high High median of data. -median_grouped Median, or 50th percentile, of grouped data. -mode Mode (most common value) of data. -multimode List of modes (most common values of data). -quantiles Divide data into intervals with equal probability. -================== ================================================== - -Calculate the arithmetic mean ("the average") of data: - ->>> mean([-1.0, 2.5, 3.25, 5.75]) -2.625 - - -Calculate the standard median of discrete data: - ->>> median([2, 3, 4, 5]) -3.5 - - -Calculate the median, or 50th percentile, of data grouped into class intervals -centred on the data values provided. E.g. if your data points are rounded to -the nearest whole number: - ->>> median_grouped([2, 2, 3, 3, 3, 4]) #doctest: +ELLIPSIS -2.8333333333... - -This should be interpreted in this way: you have two data points in the class -interval 1.5-2.5, three data points in the class interval 2.5-3.5, and one in -the class interval 3.5-4.5. The median of these data points is 2.8333... - - -Calculating variability or spread ---------------------------------- - -================== ============================================= -Function Description -================== ============================================= -pvariance Population variance of data. -variance Sample variance of data. -pstdev Population standard deviation of data. -stdev Sample standard deviation of data. -================== ============================================= - -Calculate the standard deviation of sample data: - ->>> stdev([2.5, 3.25, 5.5, 11.25, 11.75]) #doctest: +ELLIPSIS -4.38961843444... - -If you have previously calculated the mean, you can pass it as the optional -second argument to the four "spread" functions to avoid recalculating it: - ->>> data = [1, 2, 2, 4, 4, 4, 5, 6] ->>> mu = mean(data) ->>> pvariance(data, mu) -2.5 - - -Statistics for relations between two inputs -------------------------------------------- - -================== ==================================================== -Function Description -================== ==================================================== -covariance Sample covariance for two variables. -correlation Pearson's correlation coefficient for two variables. -linear_regression Intercept and slope for simple linear regression. -================== ==================================================== - -Calculate covariance, Pearson's correlation, and simple linear regression -for two inputs: - ->>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] ->>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] ->>> covariance(x, y) -0.75 ->>> correlation(x, y) #doctest: +ELLIPSIS -0.31622776601... ->>> linear_regression(x, y) #doctest: -LinearRegression(slope=0.1, intercept=1.5) - - -Exceptions ----------- - -A single exception is defined: StatisticsError is a subclass of ValueError. - -""" - import sys from _typeshed import SupportsRichComparisonT from collections.abc import Callable, Hashable, Iterable, Sequence @@ -150,650 +44,96 @@ _Seed: TypeAlias = int | float | str | bytes | bytearray # noqa: Y041 class StatisticsError(ValueError): ... if sys.version_info >= (3, 11): - def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: - """Convert data to floats and compute the arithmetic mean. - - This runs faster than the mean() function and it always returns a float. - If the input dataset is empty, it raises a StatisticsError. - - >>> fmean([3.5, 4.0, 5.25]) - 4.25 - - """ + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... else: - def fmean(data: Iterable[SupportsFloat]) -> float: - """Convert data to floats and compute the arithmetic mean. - - This runs faster than the mean() function and it always returns a float. - If the input dataset is empty, it raises a StatisticsError. - - >>> fmean([3.5, 4.0, 5.25]) - 4.25 - """ - -def geometric_mean(data: Iterable[SupportsFloat]) -> float: - """Convert data to floats and compute the geometric mean. - - Raises a StatisticsError if the input dataset is empty - or if it contains a negative value. - - Returns zero if the product of inputs is zero. - - No special efforts are made to achieve exact results. - (However, this may change in the future.) - - >>> round(geometric_mean([54, 24, 36]), 9) - 36.0 - - """ - -def mean(data: Iterable[_NumberT]) -> _NumberT: - """Return the sample arithmetic mean of data. - - >>> mean([1, 2, 3, 4, 4]) - 2.8 - - >>> from fractions import Fraction as F - >>> mean([F(3, 7), F(1, 21), F(5, 3), F(1, 3)]) - Fraction(13, 21) + def fmean(data: Iterable[SupportsFloat]) -> float: ... - >>> from decimal import Decimal as D - >>> mean([D("0.5"), D("0.75"), D("0.625"), D("0.375")]) - Decimal('0.5625') - - If ``data`` is empty, StatisticsError will be raised. - - """ +def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... +def mean(data: Iterable[_NumberT]) -> _NumberT: ... if sys.version_info >= (3, 10): - def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: - """Return the harmonic mean of data. - - The harmonic mean is the reciprocal of the arithmetic mean of the - reciprocals of the data. It can be used for averaging ratios or - rates, for example speeds. - - Suppose a car travels 40 km/hr for 5 km and then speeds-up to - 60 km/hr for another 5 km. What is the average speed? - - >>> harmonic_mean([40, 60]) - 48.0 - - Suppose a car travels 40 km/hr for 5 km, and when traffic clears, - speeds-up to 60 km/hr for the remaining 30 km of the journey. What - is the average speed? - - >>> harmonic_mean([40, 60], weights=[5, 30]) - 56.0 - - If ``data`` is empty, or any element is less than zero, - ``harmonic_mean`` will raise ``StatisticsError``. - - """ + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... else: - def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: - """Return the harmonic mean of data. - - The harmonic mean, sometimes called the subcontrary mean, is the - reciprocal of the arithmetic mean of the reciprocals of the data, - and is often appropriate when averaging quantities which are rates - or ratios, for example speeds. Example: - - Suppose an investor purchases an equal value of shares in each of - three companies, with P/E (price/earning) ratios of 2.5, 3 and 10. - What is the average P/E ratio for the investor's portfolio? - - >>> harmonic_mean([2.5, 3, 10]) # For an equal investment portfolio. - 3.6 - - Using the arithmetic mean would give an average of about 5.167, which - is too high. - - If ``data`` is empty, or any element is less than zero, - ``harmonic_mean`` will raise ``StatisticsError``. - """ + def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... -def median(data: Iterable[_NumberT]) -> _NumberT: - """Return the median (middle value) of numeric data. - - When the number of data points is odd, return the middle data point. - When the number of data points is even, the median is interpolated by - taking the average of the two middle values: - - >>> median([1, 3, 5]) - 3 - >>> median([1, 3, 5, 7]) - 4.0 - - """ - -def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: - """Return the low median of numeric data. - - When the number of data points is odd, the middle value is returned. - When it is even, the smaller of the two middle values is returned. - - >>> median_low([1, 3, 5]) - 3 - >>> median_low([1, 3, 5, 7]) - 3 - - """ - -def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: - """Return the high median of data. - - When the number of data points is odd, the middle value is returned. - When it is even, the larger of the two middle values is returned. - - >>> median_high([1, 3, 5]) - 3 - >>> median_high([1, 3, 5, 7]) - 5 - - """ +def median(data: Iterable[_NumberT]) -> _NumberT: ... +def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... +def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... if sys.version_info >= (3, 11): - def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: - """Estimates the median for numeric data binned around the midpoints - of consecutive, fixed-width intervals. - - The *data* can be any iterable of numeric data with each value being - exactly the midpoint of a bin. At least one value must be present. - - The *interval* is width of each bin. - - For example, demographic information may have been summarized into - consecutive ten-year age groups with each group being represented - by the 5-year midpoints of the intervals: - - >>> demographics = Counter({ - ... 25: 172, # 20 to 30 years old - ... 35: 484, # 30 to 40 years old - ... 45: 387, # 40 to 50 years old - ... 55: 22, # 50 to 60 years old - ... 65: 6, # 60 to 70 years old - ... }) - - The 50th percentile (median) is the 536th person out of the 1071 - member cohort. That person is in the 30 to 40 year old age group. - - The regular median() function would assume that everyone in the - tricenarian age group was exactly 35 years old. A more tenable - assumption is that the 484 members of that age group are evenly - distributed between 30 and 40. For that, we use median_grouped(). - - >>> data = list(demographics.elements()) - >>> median(data) - 35 - >>> round(median_grouped(data, interval=10), 1) - 37.5 - - The caller is responsible for making sure the data points are separated - by exact multiples of *interval*. This is essential for getting a - correct result. The function does not check this precondition. - - Inputs may be any numeric type that can be coerced to a float during - the interpolation step. - - """ + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... else: - def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: - """Return the 50th percentile (median) of grouped continuous data. - - >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) - 3.7 - >>> median_grouped([52, 52, 53, 54]) - 52.5 - - This calculates the median as the 50th percentile, and should be - used when your data is continuous and grouped. In the above example, - the values 1, 2, 3, etc. actually represent the midpoint of classes - 0.5-1.5, 1.5-2.5, 2.5-3.5, etc. The middle value falls somewhere in - class 3.5-4.5, and interpolation is used to estimate it. - - Optional argument ``interval`` represents the class interval, and - defaults to 1. Changing the class interval naturally will change the - interpolated 50th percentile value: - - >>> median_grouped([1, 3, 3, 5, 7], interval=1) - 3.25 - >>> median_grouped([1, 3, 3, 5, 7], interval=2) - 3.5 - - This function does not check whether the data points are at least - ``interval`` apart. - """ - -def mode(data: Iterable[_HashableT]) -> _HashableT: - """Return the most common data point from discrete or nominal data. - - ``mode`` assumes discrete data, and returns a single value. This is the - standard treatment of the mode as commonly taught in schools: - - >>> mode([1, 1, 2, 3, 3, 3, 3, 4]) - 3 - - This also works with nominal (non-numeric) data: - - >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) - 'red' - - If there are multiple modes with same frequency, return the first one - encountered: - - >>> mode(['red', 'red', 'green', 'blue', 'blue']) - 'red' - - If *data* is empty, ``mode``, raises StatisticsError. - - """ - -def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: - """Return a list of the most frequently occurring values. - - Will return more than one result if there are multiple modes - or an empty list if *data* is empty. - - >>> multimode('aabbbbbbbbcc') - ['b'] - >>> multimode('aabbbbccddddeeffffgg') - ['b', 'd', 'f'] - >>> multimode('') - [] - - """ - -def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: - """Return the square root of the population variance. - - See ``pvariance`` for arguments and other details. - - >>> pstdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) - 0.986893273527251 - - """ - -def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: - """Return the population variance of ``data``. - - data should be a sequence or iterable of Real-valued numbers, with at least one - value. The optional argument mu, if given, should be the mean of - the data. If it is missing or None, the mean is automatically calculated. - - Use this function to calculate the variance from the entire population. - To estimate the variance from a sample, the ``variance`` function is - usually a better choice. - - Examples: - - >>> data = [0.0, 0.25, 0.25, 1.25, 1.5, 1.75, 2.75, 3.25] - >>> pvariance(data) - 1.25 - - If you have already calculated the mean of the data, you can pass it as - the optional second argument to avoid recalculating it: - - >>> mu = mean(data) - >>> pvariance(data, mu) - 1.25 - - Decimals and Fractions are supported: - - >>> from decimal import Decimal as D - >>> pvariance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) - Decimal('24.815') - - >>> from fractions import Fraction as F - >>> pvariance([F(1, 4), F(5, 4), F(1, 2)]) - Fraction(13, 72) - - """ - -def quantiles(data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive") -> list[_NumberT]: - """Divide *data* into *n* continuous intervals with equal probability. - - Returns a list of (n - 1) cut points separating the intervals. - - Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. - Set *n* to 100 for percentiles which gives the 99 cuts points that - separate *data* in to 100 equal sized groups. - - The *data* can be any iterable containing sample. - The cut points are linearly interpolated between data points. - - If *method* is set to *inclusive*, *data* is treated as population - data. The minimum value is treated as the 0th percentile and the - maximum value is treated as the 100th percentile. - - """ - -def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: - """Return the square root of the sample variance. - - See ``variance`` for arguments and other details. - - >>> stdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) - 1.0810874155219827 - - """ - -def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: - """Return the sample variance of data. - - data should be an iterable of Real-valued numbers, with at least two - values. The optional argument xbar, if given, should be the mean of - the data. If it is missing or None, the mean is automatically calculated. - - Use this function when your data is a sample from a population. To - calculate the variance from the entire population, see ``pvariance``. - - Examples: - - >>> data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5] - >>> variance(data) - 1.3720238095238095 - - If you have already calculated the mean of your data, you can pass it as - the optional second argument ``xbar`` to avoid recalculating it: - - >>> m = mean(data) - >>> variance(data, m) - 1.3720238095238095 - - This function does not check that ``xbar`` is actually the mean of - ``data``. Giving arbitrary values for ``xbar`` may lead to invalid or - impossible results. - - Decimals and Fractions are supported: - - >>> from decimal import Decimal as D - >>> variance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) - Decimal('31.01875') - - >>> from fractions import Fraction as F - >>> variance([F(1, 6), F(1, 2), F(5, 3)]) - Fraction(67, 108) - - """ + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... + +def mode(data: Iterable[_HashableT]) -> _HashableT: ... +def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def quantiles( + data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" +) -> list[_NumberT]: ... +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... class NormalDist: - """Normal distribution of a random variable""" - __slots__ = {"_mu": "Arithmetic mean of a normal distribution", "_sigma": "Standard deviation of a normal distribution"} - def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: - """NormalDist where mu is the mean and sigma is the standard deviation.""" - + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... @property - def mean(self) -> float: - """Arithmetic mean of the normal distribution.""" - + def mean(self) -> float: ... @property - def median(self) -> float: - """Return the median of the normal distribution""" - + def median(self) -> float: ... @property - def mode(self) -> float: - """Return the mode of the normal distribution - - The mode is the value x where which the probability density - function (pdf) takes its maximum value. - """ - + def mode(self) -> float: ... @property - def stdev(self) -> float: - """Standard deviation of the normal distribution.""" - + def stdev(self) -> float: ... @property - def variance(self) -> float: - """Square of the standard deviation.""" - + def variance(self) -> float: ... @classmethod - def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: - """Make a normal distribution instance from sample data.""" - - def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: - """Generate *n* samples for a given mean and standard deviation.""" - - def pdf(self, x: float) -> float: - """Probability density function. P(x <= X < x+dx) / dx""" - - def cdf(self, x: float) -> float: - """Cumulative distribution function. P(X <= x)""" - - def inv_cdf(self, p: float) -> float: - """Inverse cumulative distribution function. x : P(X <= x) = p - - Finds the value of the random variable such that the probability of - the variable being less than or equal to that value equals the given - probability. - - This function is also called the percent point function or quantile - function. - """ - - def overlap(self, other: NormalDist) -> float: - """Compute the overlapping coefficient (OVL) between two normal distributions. - - Measures the agreement between two normal probability distributions. - Returns a value between 0.0 and 1.0 giving the overlapping area in - the two underlying probability density functions. - - >>> N1 = NormalDist(2.4, 1.6) - >>> N2 = NormalDist(3.2, 2.0) - >>> N1.overlap(N2) - 0.8035050657330205 - """ - - def quantiles(self, n: int = 4) -> list[float]: - """Divide into *n* continuous intervals with equal probability. - - Returns a list of (n - 1) cut points separating the intervals. - - Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. - Set *n* to 100 for percentiles which gives the 99 cuts points that - separate the normal distribution in to 100 equal sized groups. - """ - - def zscore(self, x: float) -> float: - """Compute the Standard Score. (x - mean) / stdev - - Describes *x* in terms of the number of standard deviations - above or below the mean of the normal distribution. - """ - - def __eq__(x1, x2: object) -> bool: - """Two NormalDist objects are equal if their mu and sigma are both equal.""" - - def __add__(x1, x2: float | NormalDist) -> NormalDist: - """Add a constant or another NormalDist instance. - - If *other* is a constant, translate mu by the constant, - leaving sigma unchanged. - - If *other* is a NormalDist, add both the means and the variances. - Mathematically, this works only if the two distributions are - independent or if they are jointly normally distributed. - """ - - def __sub__(x1, x2: float | NormalDist) -> NormalDist: - """Subtract a constant or another NormalDist instance. - - If *other* is a constant, translate by the constant mu, - leaving sigma unchanged. - - If *other* is a NormalDist, subtract the means and add the variances. - Mathematically, this works only if the two distributions are - independent or if they are jointly normally distributed. - """ - - def __mul__(x1, x2: float) -> NormalDist: - """Multiply both mu and sigma by a constant. - - Used for rescaling, perhaps to change measurement units. - Sigma is scaled with the absolute value of the constant. - """ - - def __truediv__(x1, x2: float) -> NormalDist: - """Divide both mu and sigma by a constant. - - Used for rescaling, perhaps to change measurement units. - Sigma is scaled with the absolute value of the constant. - """ - - def __pos__(x1) -> NormalDist: - """Return a copy of the instance.""" - - def __neg__(x1) -> NormalDist: - """Negates mu while keeping sigma the same.""" + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... + def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: ... + def pdf(self, x: float) -> float: ... + def cdf(self, x: float) -> float: ... + def inv_cdf(self, p: float) -> float: ... + def overlap(self, other: NormalDist) -> float: ... + def quantiles(self, n: int = 4) -> list[float]: ... + def zscore(self, x: float) -> float: ... + def __eq__(x1, x2: object) -> bool: ... + def __add__(x1, x2: float | NormalDist) -> NormalDist: ... + def __sub__(x1, x2: float | NormalDist) -> NormalDist: ... + def __mul__(x1, x2: float) -> NormalDist: ... + def __truediv__(x1, x2: float) -> NormalDist: ... + def __pos__(x1) -> NormalDist: ... + def __neg__(x1) -> NormalDist: ... __radd__ = __add__ - def __rsub__(x1, x2: float | NormalDist) -> NormalDist: - """Subtract a NormalDist from a constant or another NormalDist.""" + def __rsub__(x1, x2: float | NormalDist) -> NormalDist: ... __rmul__ = __mul__ - def __hash__(self) -> int: - """NormalDist objects hash equal if their mu and sigma are both equal.""" + def __hash__(self) -> int: ... if sys.version_info >= (3, 12): - def correlation(x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear") -> float: - """Pearson's correlation coefficient - - Return the Pearson's correlation coefficient for two inputs. Pearson's - correlation coefficient *r* takes values between -1 and +1. It measures - the strength and direction of a linear relationship. - - >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] - >>> correlation(x, x) - 1.0 - >>> correlation(x, y) - -1.0 - - If *method* is "ranked", computes Spearman's rank correlation coefficient - for two inputs. The data is replaced by ranks. Ties are averaged - so that equal values receive the same rank. The resulting coefficient - measures the strength of a monotonic relationship. - - Spearman's rank correlation coefficient is appropriate for ordinal - data or for continuous data that doesn't meet the linear proportion - requirement for Pearson's correlation coefficient. - - """ + def correlation( + x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear" + ) -> float: ... elif sys.version_info >= (3, 10): - def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: - """Pearson's correlation coefficient - - Return the Pearson's correlation coefficient for two inputs. Pearson's - correlation coefficient *r* takes values between -1 and +1. It measures the - strength and direction of the linear relationship, where +1 means very - strong, positive linear relationship, -1 very strong, negative linear - relationship, and 0 no linear relationship. - - >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] - >>> correlation(x, x) - 1.0 - >>> correlation(x, y) - -1.0 - - """ + def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... if sys.version_info >= (3, 10): - def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: - """Covariance - - Return the sample covariance of two inputs *x* and *y*. Covariance - is a measure of the joint variability of two inputs. - - >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] - >>> covariance(x, y) - 0.75 - >>> z = [9, 8, 7, 6, 5, 4, 3, 2, 1] - >>> covariance(x, z) - -7.5 - >>> covariance(z, x) - -7.5 - - """ + def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... class LinearRegression(NamedTuple): - """LinearRegression(slope, intercept)""" - slope: float intercept: float if sys.version_info >= (3, 11): def linear_regression( regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /, *, proportional: bool = False - ) -> LinearRegression: - """Slope and intercept for simple linear regression. - - Return the slope and intercept of simple linear regression - parameters estimated using ordinary least squares. Simple linear - regression describes relationship between an independent variable - *x* and a dependent variable *y* in terms of a linear function: - - y = slope * x + intercept + noise - - where *slope* and *intercept* are the regression parameters that are - estimated, and noise represents the variability of the data that was - not explained by the linear regression (it is equal to the - difference between predicted and actual values of the dependent - variable). - - The parameters are returned as a named tuple. - - >>> x = [1, 2, 3, 4, 5] - >>> noise = NormalDist().samples(5, seed=42) - >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] - >>> linear_regression(x, y) #doctest: +ELLIPSIS - LinearRegression(slope=3.17495..., intercept=1.00925...) - - If *proportional* is true, the independent variable *x* and the - dependent variable *y* are assumed to be directly proportional. - The data is fit to a line passing through the origin. - - Since the *intercept* will always be 0.0, the underlying linear - function simplifies to: - - y = slope * x + noise - - >>> y = [3 * x[i] + noise[i] for i in range(5)] - >>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS - LinearRegression(slope=2.90475..., intercept=0.0) - - """ + ) -> LinearRegression: ... elif sys.version_info >= (3, 10): - def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: - """Slope and intercept for simple linear regression. - - Return the slope and intercept of simple linear regression - parameters estimated using ordinary least squares. Simple linear - regression describes relationship between an independent variable - *x* and a dependent variable *y* in terms of linear function: - - y = slope * x + intercept + noise - - where *slope* and *intercept* are the regression parameters that are - estimated, and noise represents the variability of the data that was - not explained by the linear regression (it is equal to the - difference between predicted and actual values of the dependent - variable). - - The parameters are returned as a named tuple. - - >>> x = [1, 2, 3, 4, 5] - >>> noise = NormalDist().samples(5, seed=42) - >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] - >>> linear_regression(x, y) #doctest: +ELLIPSIS - LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) - - """ + def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: ... if sys.version_info >= (3, 13): _Kernel: TypeAlias = Literal[ @@ -811,123 +151,9 @@ if sys.version_info >= (3, 13): "triweight", "cosine", ] - def kde(data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False) -> Callable[[float], float]: - """Kernel Density Estimation: Create a continuous probability density - function or cumulative distribution function from discrete samples. - - The basic idea is to smooth the data using a kernel function - to help draw inferences about a population from a sample. - - The degree of smoothing is controlled by the scaling parameter h - which is called the bandwidth. Smaller values emphasize local - features while larger values give smoother results. - - The kernel determines the relative weights of the sample data - points. Generally, the choice of kernel shape does not matter - as much as the more influential bandwidth smoothing parameter. - - Kernels that give some weight to every sample point: - - normal (gauss) - logistic - sigmoid - - Kernels that only give weight to sample points within - the bandwidth: - - rectangular (uniform) - triangular - parabolic (epanechnikov) - quartic (biweight) - triweight - cosine - - If *cumulative* is true, will return a cumulative distribution function. - - A StatisticsError will be raised if the data sequence is empty. - - Example - ------- - - Given a sample of six data points, construct a continuous - function that estimates the underlying probability density: - - >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] - >>> f_hat = kde(sample, h=1.5) - - Compute the area under the curve: - - >>> area = sum(f_hat(x) for x in range(-20, 20)) - >>> round(area, 4) - 1.0 - - Plot the estimated probability density function at - evenly spaced points from -6 to 10: - - >>> for x in range(-6, 11): - ... density = f_hat(x) - ... plot = ' ' * int(density * 400) + 'x' - ... print(f'{x:2}: {density:.3f} {plot}') - ... - -6: 0.002 x - -5: 0.009 x - -4: 0.031 x - -3: 0.070 x - -2: 0.111 x - -1: 0.125 x - 0: 0.110 x - 1: 0.086 x - 2: 0.068 x - 3: 0.059 x - 4: 0.066 x - 5: 0.082 x - 6: 0.082 x - 7: 0.058 x - 8: 0.028 x - 9: 0.009 x - 10: 0.002 x - - Estimate P(4.5 < X <= 7.5), the probability that a new sample value - will be between 4.5 and 7.5: - - >>> cdf = kde(sample, h=1.5, cumulative=True) - >>> round(cdf(7.5) - cdf(4.5), 2) - 0.22 - - References - ---------- - - Kernel density estimation and its application: - https://www.itm-conferences.org/articles/itmconf/pdf/2018/08/itmconf_sam2018_00037.pdf - - Kernel functions in common use: - https://en.wikipedia.org/wiki/Kernel_(statistics)#kernel_functions_in_common_use - - Interactive graphical demonstration and exploration: - https://demonstrations.wolfram.com/KernelDensityEstimation/ - - Kernel estimation of cumulative distribution function of a random variable with bounded support - https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf - - """ - + def kde( + data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False + ) -> Callable[[float], float]: ... def kde_random( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: _Seed | None = None - ) -> Callable[[], float]: - """Return a function that makes a random selection from the estimated - probability density function created by kde(data, h, kernel). - - Providing a *seed* allows reproducible selections within a single - thread. The seed may be an integer, float, str, or bytes. - - A StatisticsError will be raised if the *data* sequence is empty. - - Example: - - >>> data = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] - >>> rand = kde_random(data, h=1.5, seed=8675309) - >>> new_selections = [rand() for i in range(10)] - >>> [round(x, 1) for x in new_selections] - [0.7, 6.2, 1.2, 6.9, 7.0, 1.8, 2.5, -0.5, -1.8, 5.6] - - """ + ) -> Callable[[], float]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi index 6d87737eff51f..c8b32a98e26d7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi @@ -1,19 +1,3 @@ -"""A collection of string constants. - -Public module variables: - -whitespace -- a string containing all ASCII whitespace -ascii_lowercase -- a string containing all ASCII lowercase letters -ascii_uppercase -- a string containing all ASCII uppercase letters -ascii_letters -- a string containing all ASCII letters -digits -- a string containing all ASCII decimal digits -hexdigits -- a string containing all ASCII hexadecimal digits -octdigits -- a string containing all ASCII octal digits -punctuation -- a string containing all ASCII punctuation characters -printable -- a string containing all ASCII characters considered printable - -""" - import sys from _typeshed import StrOrLiteralStr from collections.abc import Iterable, Mapping, Sequence @@ -46,21 +30,9 @@ octdigits: Final = "01234567" punctuation: Final = r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~""" printable: Final[LiteralString] # string too long -def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: - """capwords(s [,sep]) -> string - - Split the argument into words using split, capitalize each - word using capitalize, and join the capitalized words using - join. If the optional second argument sep is absent or None, - runs of whitespace characters are replaced by a single space - and leading and trailing whitespace are removed, otherwise - sep is used to split and join the words. - - """ +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... class Template: - """A string class for supporting $-substitutions.""" - template: str delimiter: ClassVar[str] idpattern: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi index 8690a8093b048..9906d31c63915 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi @@ -1,5 +1,3 @@ -"""Support for template string literals (t-strings).""" - from collections.abc import Iterator from types import GenericAlias from typing import Any, Literal, TypeVar, final, overload @@ -8,29 +6,18 @@ _T = TypeVar("_T") @final class Template: # TODO: consider making `Template` generic on `TypeVarTuple` - """Template object""" - strings: tuple[str, ...] interpolations: tuple[Interpolation, ...] def __new__(cls, *args: str | Interpolation) -> Template: ... - def __iter__(self) -> Iterator[str | Interpolation]: - """Implement iter(self).""" - - def __add__(self, other: Template, /) -> Template: - """Return self+value.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - + def __iter__(self) -> Iterator[str | Interpolation]: ... + def __add__(self, other: Template, /) -> Template: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @property - def values(self) -> tuple[Any, ...]: # Tuple of interpolation values, which can have any type - """Values of interpolations""" + def values(self) -> tuple[Any, ...]: ... # Tuple of interpolation values, which can have any type @final class Interpolation: - """Interpolation object""" - value: Any # TODO: consider making `Interpolation` generic in runtime expression: str conversion: Literal["a", "r", "s"] | None @@ -41,12 +28,9 @@ class Interpolation: def __new__( cls, value: Any, expression: str = "", conversion: Literal["a", "r", "s"] | None = None, format_spec: str = "" ) -> Interpolation: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... @overload -def convert(obj: _T, /, conversion: None) -> _T: - """Convert *obj* using formatted string literal semantics.""" - +def convert(obj: _T, /, conversion: None) -> _T: ... @overload def convert(obj: object, /, conversion: Literal["r", "s", "a"]) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi index 75a354552168e..d67955e499c85 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi @@ -1,9 +1,3 @@ -"""Library that exposes various tables found in the StringPrep RFC 3454. - -There are two kinds of tables: sets, for which a member test is provided, -and mappings, for which a mapping function is provided. -""" - from typing import Final b1_set: Final[set[int]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi index 9cc366060b83d..2c26908746ecc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi @@ -1,33 +1,3 @@ -"""Functions to convert between Python values and C structs. -Python bytes objects are used to hold the data representing the C struct -and also as format strings (explained below) to describe the layout of data -in the C struct. - -The optional first format char indicates byte order, size and alignment: - @: native order, size & alignment (default) - =: native order, std. size & alignment - <: little-endian, std. size & alignment - >: big-endian, std. size & alignment - !: same as > - -The remaining chars indicate types of args and must match exactly; -these can be preceded by a decimal repeat count: - x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; - ?: _Bool (requires C99; if not available, char is used instead) - h:short; H:unsigned short; i:int; I:unsigned int; - l:long; L:unsigned long; f:float; d:double; e:half-float. -Special cases (preceding decimal count indicates length): - s:string (array of char); p: pascal string (with count byte). -Special cases (only available in native format): - n:ssize_t; N:size_t; - P:an integer type that is wide enough to hold a pointer. -Special case (not in native mode unless 'long long' in platform C): - q:long long; Q:unsigned long long -Whitespace between formats is ignored. - -The variable struct.error is an exception raised on errors. -""" - from _struct import * __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi index 60e4906577077..e1e25bcb50cbe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi @@ -1,37 +1,3 @@ -"""Subprocesses with accessible I/O streams - -This module allows you to spawn processes, connect to their -input/output/error pipes, and obtain their return codes. - -For a complete description of this module see the Python documentation. - -Main API -======== -run(...): Runs a command, waits for it to complete, then returns a - CompletedProcess instance. -Popen(...): A class for flexibly executing a command in a new process - -Constants ---------- -DEVNULL: Special value that indicates that os.devnull should be used -PIPE: Special value that indicates a pipe should be created -STDOUT: Special value that indicates that stderr should go to stdout - - -Older API -========= -call(...): Runs a command, waits for it to complete, then returns - the return code. -check_call(...): Same as call() but raises CalledProcessError() - if return code is not 0 -check_output(...): Same as check_call() but returns the contents of - stdout instead of a return code -getoutput(...): Runs a command in the shell, waits for it to complete, - then returns the output -getstatusoutput(...): Runs a command in the shell, waits for it to complete, - then returns a (exitcode, output) tuple -""" - import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence @@ -109,17 +75,6 @@ if sys.version_info >= (3, 11): _USE_POSIX_SPAWN: Final[bool] class CompletedProcess(Generic[_T]): - """A process that has finished running. - - This is returned by run(). - - Attributes: - args: The list or str args passed to run(). - returncode: The exit code of the process, negative for signals. - stdout: The standard output (None if not captured). - stderr: The standard error (None if not captured). - """ - # morally: _CMD args: Any returncode: int @@ -128,14 +83,8 @@ class CompletedProcess(Generic[_T]): stdout: _T stderr: _T def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... - def check_returncode(self) -> None: - """Raise CalledProcessError if the exit code is non-zero.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def check_returncode(self) -> None: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -172,36 +121,7 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> CompletedProcess[str]: - """Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout (seconds) is given and the process takes too long, - a TimeoutExpired exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -408,36 +328,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: - """Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -638,35 +529,7 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> CompletedProcess[str]: - """Run command with arguments and return a CompletedProcess instance. - - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. - - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. - - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. - - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. - - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. - - The other arguments are the same as for the Popen constructor. - """ - + ) -> CompletedProcess[str]: ... @overload def run( args: _CMD, @@ -860,14 +723,7 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> int: - """Run command with arguments. Wait for command to complete or - for timeout seconds, then return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ + ) -> int: ... elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -898,14 +754,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: - """Run command with arguments. Wait for command to complete or - timeout, then return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ + ) -> int: ... else: def call( @@ -934,14 +783,7 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> int: - """Run command with arguments. Wait for command to complete or - timeout, then return the returncode attribute. - - The arguments are the same as for the Popen constructor. Example: - - retcode = call(["ls", "-l"]) - """ + ) -> int: ... # Same args as Popen.__init__ if sys.version_info >= (3, 11): @@ -974,16 +816,7 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> int: - """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the call function. Example: - - check_call(["ls", "-l"]) - """ + ) -> int: ... elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -1014,16 +847,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: - """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the call function. Example: - - check_call(["ls", "-l"]) - """ + ) -> int: ... else: def check_call( @@ -1052,16 +876,7 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> int: - """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. - - The arguments are the same as for the call function. Example: - - check_call(["ls", "-l"]) - """ + ) -> int: ... if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -1095,42 +910,7 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> str: - """Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\\n") - b'when in the course of barman events\\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - + ) -> str: ... @overload def check_output( args: _CMD, @@ -1319,42 +1099,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: - """Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\\n") - b'when in the course of barman events\\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - + ) -> str: ... @overload def check_output( args: _CMD, @@ -1536,42 +1281,7 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> str: - """Run command with arguments and return its output. - - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. - - The arguments are the same as for the Popen constructor. Example: - - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' - - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. - - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\\n' - - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: - - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\\n") - b'when in the course of barman events\\n' - - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ - + ) -> str: ... @overload def check_output( args: _CMD, @@ -1726,13 +1436,6 @@ DEVNULL: Final[int] class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): - """This exception is raised when the timeout expires while waiting for a - child process. - - Attributes: - cmd, output, stdout, stderr, timeout - """ - def __init__( self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None ) -> None: ... @@ -1745,13 +1448,6 @@ class TimeoutExpired(SubprocessError): stderr: bytes | None class CalledProcessError(SubprocessError): - """Raised when run() is called with check=True and the process - returns a non-zero exit status. - - Attributes: - cmd, returncode, stdout, stderr, output - """ - returncode: int # morally: _CMD cmd: Any @@ -1766,62 +1462,6 @@ class CalledProcessError(SubprocessError): ) -> None: ... class Popen(Generic[AnyStr]): - """Execute a child program in a new process. - - For a complete description of the arguments see the Python documentation. - - Arguments: - args: A string, or a sequence of program arguments. - - bufsize: supplied as the buffering argument to the open() function when - creating the stdin/stdout/stderr pipe file objects - - executable: A replacement program to execute. - - stdin, stdout and stderr: These specify the executed programs' standard - input, standard output and standard error file handles, respectively. - - preexec_fn: (POSIX only) An object to be called in the child process - just before the child is executed. - - close_fds: Controls closing or inheriting of file descriptors. - - shell: If true, the command will be executed through the shell. - - cwd: Sets the current directory before the child is executed. - - env: Defines the environment variables for the new process. - - text: If true, decode stdin, stdout and stderr using the given encoding - (if set) or the system default otherwise. - - universal_newlines: Alias of text, provided for backwards compatibility. - - startupinfo and creationflags (Windows only) - - restore_signals (POSIX only) - - start_new_session (POSIX only) - - process_group (POSIX only) - - group (POSIX only) - - extra_groups (POSIX only) - - user (POSIX only) - - umask (POSIX only) - - pass_fds (POSIX only) - - encoding and errors: Text mode encoding and error handling to use for - file objects stdin, stdout and stderr. - - Attributes: - stdin, stdout, stderr, pid, returncode - """ - args: _CMD stdin: IO[AnyStr] | None stdout: IO[AnyStr] | None @@ -1862,9 +1502,7 @@ class Popen(Generic[AnyStr]): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> None: - """Create new Popen instance.""" - + ) -> None: ... @overload def __init__( self: Popen[str], @@ -2052,9 +1690,7 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: - """Create new Popen instance.""" - + ) -> None: ... @overload def __init__( self: Popen[str], @@ -2235,9 +1871,7 @@ class Popen(Generic[AnyStr]): group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> None: - """Create new Popen instance.""" - + ) -> None: ... @overload def __init__( self: Popen[str], @@ -2385,148 +2019,32 @@ class Popen(Generic[AnyStr]): umask: int = -1, ) -> None: ... - def poll(self) -> int | None: - """Check if child process has terminated. Set and return returncode - attribute. - """ - - def wait(self, timeout: float | None = None) -> int: - """Wait for child process to terminate; returns self.returncode.""" + def poll(self) -> int | None: ... + def wait(self, timeout: float | None = None) -> int: ... # morally the members of the returned tuple should be optional # TODO: this should allow ReadableBuffer for Popen[bytes], but adding # overloads for that runs into a mypy bug (python/mypy#14070). - def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: - """Interact with process: Send data to stdin and close it. - Read data from stdout and stderr, until end-of-file is - reached. Wait for process to terminate. - - The optional "input" argument should be data to be sent to the - child process, or None, if no data should be sent to the child. - communicate() returns a tuple (stdout, stderr). - - By default, all communication is in bytes, and therefore any - "input" should be bytes, and the (stdout, stderr) will be bytes. - If in text mode (indicated by self.text_mode), any "input" should - be a string, and (stdout, stderr) will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or - universal_newlines. - """ - - def send_signal(self, sig: int) -> None: - """Send a signal to the process.""" - - def terminate(self) -> None: - """Terminate the process with SIGTERM""" - - def kill(self) -> None: - """Kill the process with SIGKILL""" - + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... + def send_signal(self, sig: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: - """Return (exitcode, output) of executing cmd in a shell. - - Execute the string 'cmd' in a shell with 'check_output' and - return a 2-tuple (status, output). The locale encoding is used - to decode the output and process newlines. - - A trailing newline is stripped from the output. - The exit status for the command can be interpreted - according to the rules for the function 'wait'. Example: - - >>> import subprocess - >>> subprocess.getstatusoutput('ls /bin/ls') - (0, '/bin/ls') - >>> subprocess.getstatusoutput('cat /bin/junk') - (1, 'cat: /bin/junk: No such file or directory') - >>> subprocess.getstatusoutput('/bin/junk') - (127, 'sh: /bin/junk: not found') - >>> subprocess.getstatusoutput('/bin/kill $$') - (-15, '') - """ - - def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: - """Return output (stdout or stderr) of executing cmd in a shell. - - Like getstatusoutput(), except the exit status is ignored and the return - value is a string containing the command's output. Example: - - >>> import subprocess - >>> subprocess.getoutput('ls /bin/ls') - '/bin/ls' - """ + def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... + def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: ... else: - def getstatusoutput(cmd: _CMD) -> tuple[int, str]: - """Return (exitcode, output) of executing cmd in a shell. - - Execute the string 'cmd' in a shell with 'check_output' and - return a 2-tuple (status, output). The locale encoding is used - to decode the output and process newlines. - - A trailing newline is stripped from the output. - The exit status for the command can be interpreted - according to the rules for the function 'wait'. Example: - - >>> import subprocess - >>> subprocess.getstatusoutput('ls /bin/ls') - (0, '/bin/ls') - >>> subprocess.getstatusoutput('cat /bin/junk') - (1, 'cat: /bin/junk: No such file or directory') - >>> subprocess.getstatusoutput('/bin/junk') - (127, 'sh: /bin/junk: not found') - >>> subprocess.getstatusoutput('/bin/kill $$') - (-15, '') - """ - - def getoutput(cmd: _CMD) -> str: - """Return output (stdout or stderr) of executing cmd in a shell. - - Like getstatusoutput(), except the exit status is ignored and the return - value is a string containing the command's output. Example: - - >>> import subprocess - >>> subprocess.getoutput('ls /bin/ls') - '/bin/ls' - """ - -def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: # undocumented - """ - Translate a sequence of arguments into a command line - string, using the same rules as the MS C runtime: - - 1) Arguments are delimited by white space, which is either a - space or a tab. - - 2) A string surrounded by double quotation marks is - interpreted as a single argument, regardless of white space - contained within. A quoted string can be embedded in an - argument. - - 3) A double quotation mark preceded by a backslash is - interpreted as a literal double quotation mark. - - 4) Backslashes are interpreted literally, unless they - immediately precede a double quotation mark. + def getstatusoutput(cmd: _CMD) -> tuple[int, str]: ... + def getoutput(cmd: _CMD) -> str: ... - 5) If backslashes immediately precede a double quotation mark, - every pair of backslashes is interpreted as a literal - backslash. If the number of backslashes is odd, the last - backslash escapes the next double quotation mark as - described in rule 3. - """ +def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented if sys.platform == "win32": if sys.version_info >= (3, 13): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi index 5dbcffbe8914d..f83a0a4c520e7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi @@ -1,108 +1,3 @@ -"""Stuff to parse Sun and NeXT audio files. - -An audio file consists of a header followed by the data. The structure -of the header is as follows. - - +---------------+ - | magic word | - +---------------+ - | header size | - +---------------+ - | data size | - +---------------+ - | encoding | - +---------------+ - | sample rate | - +---------------+ - | # of channels | - +---------------+ - | info | - | | - +---------------+ - -The magic word consists of the 4 characters '.snd'. Apart from the -info field, all header fields are 4 bytes in size. They are all -32-bit unsigned integers encoded in big-endian byte order. - -The header size really gives the start of the data. -The data size is the physical size of the data. From the other -parameters the number of frames can be calculated. -The encoding gives the way in which audio samples are encoded. -Possible values are listed below. -The info field currently consists of an ASCII string giving a -human-readable description of the audio file. The info field is -padded with NUL bytes to the header size. - -Usage. - -Reading audio files: - f = sunau.open(file, 'r') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods read(), seek(), and close(). -When the setpos() and rewind() methods are not used, the seek() -method is not necessary. - -This returns an instance of a class with the following public methods: - getnchannels() -- returns number of audio channels (1 for - mono, 2 for stereo) - getsampwidth() -- returns sample width in bytes - getframerate() -- returns sampling frequency - getnframes() -- returns number of audio frames - getcomptype() -- returns compression type ('NONE' or 'ULAW') - getcompname() -- returns human-readable version of - compression type ('not compressed' matches 'NONE') - getparams() -- returns a namedtuple consisting of all of the - above in the above order - getmarkers() -- returns None (for compatibility with the - aifc module) - getmark(id) -- raises an error since the mark does not - exist (for compatibility with the aifc module) - readframes(n) -- returns at most n frames of audio - rewind() -- rewind to the beginning of the audio stream - setpos(pos) -- seek to the specified position - tell() -- return the current position - close() -- close the instance (make it unusable) -The position returned by tell() and the position given to setpos() -are compatible and have nothing to do with the actual position in the -file. -The close() method is called automatically when the class instance -is destroyed. - -Writing audio files: - f = sunau.open(file, 'w') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods write(), tell(), seek(), and -close(). - -This returns an instance of a class with the following public methods: - setnchannels(n) -- set the number of channels - setsampwidth(n) -- set the sample width - setframerate(n) -- set the frame rate - setnframes(n) -- set the number of frames - setcomptype(type, name) - -- set the compression type and the - human-readable compression type - setparams(tuple)-- set all parameters at once - tell() -- return current position in output file - writeframesraw(data) - -- write audio frames without pathing up the - file header - writeframes(data) - -- write audio frames and patch up the file header - close() -- patch up the file header and close the - output file -You should set the parameters before the first writeframesraw or -writeframes. The total number of frames does not need to be set, -but when it is set to the correct value, the header does not have to -be patched up. -It is best to first set all parameters, perhaps possibly the -compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes(b'') or -close() to patch up the sizes in the header. -The close() method is called automatically when the class instance -is destroyed. -""" - from _typeshed import Unused from typing import IO, Any, Final, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias @@ -127,8 +22,6 @@ AUDIO_FILE_ENCODING_ALAW_8: Final = 27 AUDIO_UNKNOWN_SIZE: Final = 0xFFFFFFFF class _sunau_params(NamedTuple): - """_sunau_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" - nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi index 3eb6e5d171a89..5344ce504c6c7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi @@ -1,5 +1,3 @@ -"""Non-terminal symbols of Python grammar (from "graminit.h").""" - from typing import Final single_input: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi index 14d5f762b5e2a..a727b878688ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi @@ -1,5 +1,3 @@ -"""Interface to the compiler's internal symbol tables""" - import sys from _collections_abc import dict_keys from collections.abc import Sequence @@ -11,12 +9,7 @@ __all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] if sys.version_info >= (3, 13): __all__ += ["SymbolTableType"] -def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: - """Return the toplevel *SymbolTable* for the source code. - - *filename* is the name of the file with the code - and *compile_type* is the *compile()* mode argument. - """ +def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ... if sys.version_info >= (3, 13): from enum import StrEnum @@ -33,171 +26,63 @@ if sys.version_info >= (3, 13): class SymbolTable: def __init__(self, raw_table: Any, filename: str) -> None: ... if sys.version_info >= (3, 13): - def get_type(self) -> SymbolTableType: - """Return the type of the symbol table. - - The value returned is one of the values in - the ``SymbolTableType`` enumeration. - """ + def get_type(self) -> SymbolTableType: ... else: - def get_type(self) -> str: - """Return the type of the symbol table. - - The values returned are 'class', 'module', 'function', - 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. - """ - - def get_id(self) -> int: - """Return an identifier for the table.""" - - def get_name(self) -> str: - """Return the table's name. - - This corresponds to the name of the class, function - or 'top' if the table is for a class, function or - global respectively. - """ - - def get_lineno(self) -> int: - """Return the number of the first line in the - block for the table. - """ - - def is_optimized(self) -> bool: - """Return *True* if the locals in the table - are optimizable. - """ - - def is_nested(self) -> bool: - """Return *True* if the block is a nested class - or function. - """ - - def has_children(self) -> bool: - """Return *True* if the block has nested namespaces.""" - - def get_identifiers(self) -> dict_keys[str, int]: - """Return a view object containing the names of symbols in the table.""" - - def lookup(self, name: str) -> Symbol: - """Lookup a *name* in the table. - - Returns a *Symbol* instance. - """ - - def get_symbols(self) -> list[Symbol]: - """Return a list of *Symbol* instances for - names in the table. - """ - - def get_children(self) -> list[SymbolTable]: - """Return a list of the nested symbol tables.""" + def get_type(self) -> str: ... + + def get_id(self) -> int: ... + def get_name(self) -> str: ... + def get_lineno(self) -> int: ... + def is_optimized(self) -> bool: ... + def is_nested(self) -> bool: ... + def has_children(self) -> bool: ... + def get_identifiers(self) -> dict_keys[str, int]: ... + def lookup(self, name: str) -> Symbol: ... + def get_symbols(self) -> list[Symbol]: ... + def get_children(self) -> list[SymbolTable]: ... class Function(SymbolTable): - def get_parameters(self) -> tuple[str, ...]: - """Return a tuple of parameters to the function.""" - - def get_locals(self) -> tuple[str, ...]: - """Return a tuple of locals in the function.""" - - def get_globals(self) -> tuple[str, ...]: - """Return a tuple of globals in the function.""" - - def get_frees(self) -> tuple[str, ...]: - """Return a tuple of free variables in the function.""" - - def get_nonlocals(self) -> tuple[str, ...]: - """Return a tuple of nonlocals in the function.""" + def get_parameters(self) -> tuple[str, ...]: ... + def get_locals(self) -> tuple[str, ...]: ... + def get_globals(self) -> tuple[str, ...]: ... + def get_frees(self) -> tuple[str, ...]: ... + def get_nonlocals(self) -> tuple[str, ...]: ... class Class(SymbolTable): if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") - def get_methods(self) -> tuple[str, ...]: - """Return a tuple of methods declared in the class.""" + def get_methods(self) -> tuple[str, ...]: ... else: - def get_methods(self) -> tuple[str, ...]: - """Return a tuple of methods declared in the class.""" + def get_methods(self) -> tuple[str, ...]: ... class Symbol: def __init__( self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False ) -> None: ... - def is_nonlocal(self) -> bool: - """Return *True* if the symbol is nonlocal.""" - - def get_name(self) -> str: - """Return a name of a symbol.""" - - def is_referenced(self) -> bool: - """Return *True* if the symbol is used in - its block. - """ - - def is_parameter(self) -> bool: - """Return *True* if the symbol is a parameter.""" + def is_nonlocal(self) -> bool: ... + def get_name(self) -> str: ... + def is_referenced(self) -> bool: ... + def is_parameter(self) -> bool: ... if sys.version_info >= (3, 14): - def is_type_parameter(self) -> bool: - """Return *True* if the symbol is a type parameter.""" - - def is_global(self) -> bool: - """Return *True* if the symbol is global.""" - - def is_declared_global(self) -> bool: - """Return *True* if the symbol is declared global - with a global statement. - """ - - def is_local(self) -> bool: - """Return *True* if the symbol is local.""" + def is_type_parameter(self) -> bool: ... - def is_annotated(self) -> bool: - """Return *True* if the symbol is annotated.""" - - def is_free(self) -> bool: - """Return *True* if a referenced symbol is - not assigned to. - """ + def is_global(self) -> bool: ... + def is_declared_global(self) -> bool: ... + def is_local(self) -> bool: ... + def is_annotated(self) -> bool: ... + def is_free(self) -> bool: ... if sys.version_info >= (3, 14): - def is_free_class(self) -> bool: - """Return *True* if a class-scoped symbol is free from - the perspective of a method. - """ - - def is_imported(self) -> bool: - """Return *True* if the symbol is created from - an import statement. - """ + def is_free_class(self) -> bool: ... - def is_assigned(self) -> bool: - """Return *True* if a symbol is assigned to.""" + def is_imported(self) -> bool: ... + def is_assigned(self) -> bool: ... if sys.version_info >= (3, 14): - def is_comp_iter(self) -> bool: - """Return *True* if the symbol is a comprehension iteration variable.""" - - def is_comp_cell(self) -> bool: - """Return *True* if the symbol is a cell in an inlined comprehension.""" - - def is_namespace(self) -> bool: - """Returns *True* if name binding introduces new namespace. - - If the name is used as the target of a function or class - statement, this will be true. - - Note that a single name can be bound to multiple objects. If - is_namespace() is true, the name may also be bound to other - objects, like an int or list, that does not introduce a new - namespace. - """ - - def get_namespaces(self) -> Sequence[SymbolTable]: - """Return a list of namespaces bound to this name""" - - def get_namespace(self) -> SymbolTable: - """Return the single namespace bound to this name. + def is_comp_iter(self) -> bool: ... + def is_comp_cell(self) -> bool: ... - Raises ValueError if the name is bound to multiple namespaces - or no namespace. - """ + def is_namespace(self) -> bool: ... + def get_namespaces(self) -> Sequence[SymbolTable]: ... + def get_namespace(self) -> SymbolTable: ... class SymbolTableFactory: def new(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index 21514c7609d2d..7807b0eab01f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -1,75 +1,3 @@ -"""This module provides access to some objects used or maintained by the -interpreter and to functions that interact strongly with the interpreter. - -Dynamic objects: - -argv -- command line arguments; argv[0] is the script pathname if known -path -- module search path; path[0] is the script directory, else '' -modules -- dictionary of loaded modules - -displayhook -- called to show results in an interactive session -excepthook -- called to handle any uncaught exception other than SystemExit - To customize printing in an interactive session or to install a custom - top-level exception handler, assign other functions to replace these. - -stdin -- standard input file object; used by input() -stdout -- standard output file object; used by print() -stderr -- standard error object; used for error messages - By assigning other file objects (or objects that behave like files) - to these, it is possible to redirect all of the interpreter's I/O. - -last_exc - the last uncaught exception - Only available in an interactive session after a - traceback has been printed. -last_type -- type of last uncaught exception -last_value -- value of last uncaught exception -last_traceback -- traceback of last uncaught exception - These three are the (deprecated) legacy representation of last_exc. - -Static objects: - -builtin_module_names -- tuple of module names built into this interpreter -copyright -- copyright notice pertaining to this interpreter -exec_prefix -- prefix used to find the machine-specific Python library -executable -- absolute path of the executable binary of the Python interpreter -float_info -- a named tuple with information about the float implementation. -float_repr_style -- string indicating the style of repr() output for floats -hash_info -- a named tuple with information about the hash algorithm. -hexversion -- version information encoded as a single integer -implementation -- Python implementation information. -int_info -- a named tuple with information about the int implementation. -maxsize -- the largest supported length of containers. -maxunicode -- the value of the largest Unicode code point -platform -- platform identifier -prefix -- prefix used to find the Python library -thread_info -- a named tuple with information about the thread implementation. -version -- the version of this interpreter as a string -version_info -- version information as a named tuple -__stdin__ -- the original stdin; don't touch! -__stdout__ -- the original stdout; don't touch! -__stderr__ -- the original stderr; don't touch! -__displayhook__ -- the original displayhook; don't touch! -__excepthook__ -- the original excepthook; don't touch! - -Functions: - -displayhook() -- print an object to the screen, and save it in builtins._ -excepthook() -- print an exception and its traceback to sys.stderr -exception() -- return the current thread's active exception -exc_info() -- return information about the current thread's active exception -exit() -- exit the interpreter by raising SystemExit -getdlopenflags() -- returns flags to be used for dlopen() calls -getprofile() -- get the global profiling function -getrefcount() -- return the reference count for an object (plus one :-) -getrecursionlimit() -- return the max recursion depth for the interpreter -getsizeof() -- return the size of an object in bytes -gettrace() -- get the global debug tracing function -setdlopenflags() -- set the flags to be used for dlopen() calls -setprofile() -- set the global profiling function -setrecursionlimit() -- set the max recursion depth for the interpreter -settrace() -- set the global debug tracing function -""" - import sys from _typeshed import MaybeNone, OptExcInfo, ProfileFunction, StrOrBytesPath, TraceFunction, structseq from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol @@ -174,11 +102,6 @@ flags: _flags @final @type_check_only class _flags(_UninstantiableStructseq, tuple[int, ...]): - """sys.flags - - Flags provided through command line arguments or environment vars. - """ - # `safe_path` was added in py311 if sys.version_info >= (3, 11): __match_args__: Final = ( @@ -223,92 +146,56 @@ class _flags(_UninstantiableStructseq, tuple[int, ...]): ) @property - def debug(self) -> int: - """-d""" - + def debug(self) -> int: ... @property - def inspect(self) -> int: - """-i""" - + def inspect(self) -> int: ... @property - def interactive(self) -> int: - """-i""" - + def interactive(self) -> int: ... @property - def optimize(self) -> int: - """-O or -OO""" - + def optimize(self) -> int: ... @property - def dont_write_bytecode(self) -> int: - """-B""" - + def dont_write_bytecode(self) -> int: ... @property - def no_user_site(self) -> int: - """-s""" - + def no_user_site(self) -> int: ... @property - def no_site(self) -> int: - """-S""" - + def no_site(self) -> int: ... @property - def ignore_environment(self) -> int: - """-E""" - + def ignore_environment(self) -> int: ... @property - def verbose(self) -> int: - """-v""" - + def verbose(self) -> int: ... @property - def bytes_warning(self) -> int: - """-b""" - + def bytes_warning(self) -> int: ... @property - def quiet(self) -> int: - """-q""" - + def quiet(self) -> int: ... @property - def hash_randomization(self) -> int: - """-R""" - + def hash_randomization(self) -> int: ... @property - def isolated(self) -> int: - """-I""" - + def isolated(self) -> int: ... @property - def dev_mode(self) -> bool: - """-X dev""" - + def dev_mode(self) -> bool: ... @property - def utf8_mode(self) -> int: - """-X utf8""" + def utf8_mode(self) -> int: ... if sys.version_info >= (3, 10): @property - def warn_default_encoding(self) -> int: - """-X warn_default_encoding""" + def warn_default_encoding(self) -> int: ... if sys.version_info >= (3, 11): @property - def safe_path(self) -> bool: - """-P""" + def safe_path(self) -> bool: ... if sys.version_info >= (3, 13): @property - def gil(self) -> Literal[0, 1]: - """-X gil""" + def gil(self) -> Literal[0, 1]: ... if sys.version_info >= (3, 14): @property - def thread_inherit_context(self) -> Literal[0, 1]: - """-X thread_inherit_context""" - + def thread_inherit_context(self) -> Literal[0, 1]: ... @property - def context_aware_warnings(self) -> Literal[0, 1]: - """-X context_aware_warnings""" + def context_aware_warnings(self) -> Literal[0, 1]: ... # Whether or not this exists on lower versions of Python # may depend on which patch release you're using # (it was backported to all Python versions on 3.8+ as a security fix) # Added in: 3.9.14, 3.10.7 # and present in all versions of 3.11 and later. @property - def int_max_str_digits(self) -> int: - """-X int_max_str_digits""" + def int_max_str_digits(self) -> int: ... float_info: _float_info @@ -316,13 +203,6 @@ float_info: _float_info @final @type_check_only class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): - """sys.float_info - - A named tuple holding information about the float type. It contains low level - information about the precision and internal representation. Please study - your system's :file:`float.h` for more information. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ( "max", @@ -339,48 +219,27 @@ class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, ) @property - def max(self) -> float: # DBL_MAX - """DBL_MAX -- maximum representable finite float""" - + def max(self) -> float: ... # DBL_MAX @property - def max_exp(self) -> int: # DBL_MAX_EXP - """DBL_MAX_EXP -- maximum int e such that radix**(e-1) is representable""" - + def max_exp(self) -> int: ... # DBL_MAX_EXP @property - def max_10_exp(self) -> int: # DBL_MAX_10_EXP - """DBL_MAX_10_EXP -- maximum int e such that 10**e is representable""" - + def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP @property - def min(self) -> float: # DBL_MIN - """DBL_MIN -- Minimum positive normalized float""" - + def min(self) -> float: ... # DBL_MIN @property - def min_exp(self) -> int: # DBL_MIN_EXP - """DBL_MIN_EXP -- minimum int e such that radix**(e-1) is a normalized float""" - + def min_exp(self) -> int: ... # DBL_MIN_EXP @property - def min_10_exp(self) -> int: # DBL_MIN_10_EXP - """DBL_MIN_10_EXP -- minimum int e such that 10**e is a normalized float""" - + def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP @property - def dig(self) -> int: # DBL_DIG - """DBL_DIG -- maximum number of decimal digits that can be faithfully represented in a float""" - + def dig(self) -> int: ... # DBL_DIG @property - def mant_dig(self) -> int: # DBL_MANT_DIG - """DBL_MANT_DIG -- mantissa digits""" - + def mant_dig(self) -> int: ... # DBL_MANT_DIG @property - def epsilon(self) -> float: # DBL_EPSILON - """DBL_EPSILON -- Difference between 1 and the next representable float""" - + def epsilon(self) -> float: ... # DBL_EPSILON @property - def radix(self) -> int: # FLT_RADIX - """FLT_RADIX -- radix of exponent""" - + def radix(self) -> int: ... # FLT_RADIX @property - def rounds(self) -> int: # FLT_ROUNDS - """FLT_ROUNDS -- rounding mode used for arithmetic operations""" + def rounds(self) -> int: ... # FLT_ROUNDS hash_info: _hash_info @@ -388,50 +247,27 @@ hash_info: _hash_info @final @type_check_only class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): - """hash_info - - A named tuple providing parameters used for computing - hashes. The attributes are read only. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("width", "modulus", "inf", "nan", "imag", "algorithm", "hash_bits", "seed_bits", "cutoff") @property - def width(self) -> int: - """width of the type used for hashing, in bits""" - + def width(self) -> int: ... @property - def modulus(self) -> int: - """prime number giving the modulus on which the hash function is based""" - + def modulus(self) -> int: ... @property - def inf(self) -> int: - """value to be used for hash of a positive infinity""" - + def inf(self) -> int: ... @property - def nan(self) -> int: - """value to be used for hash of a nan""" - + def nan(self) -> int: ... @property - def imag(self) -> int: - """multiplier used for the imaginary part of a complex number""" - + def imag(self) -> int: ... @property - def algorithm(self) -> str: - """name of the algorithm for hashing of str, bytes and memoryviews""" - + def algorithm(self) -> str: ... @property - def hash_bits(self) -> int: - """internal output size of hash algorithm""" - + def hash_bits(self) -> int: ... @property - def seed_bits(self) -> int: - """seed size of hash algorithm""" - + def seed_bits(self) -> int: ... @property - def cutoff(self) -> int: # undocumented - """small string optimization cutoff""" + def cutoff(self) -> int: ... # undocumented implementation: _implementation @@ -454,30 +290,17 @@ int_info: _int_info @final @type_check_only class _int_info(structseq[int], tuple[int, int, int, int]): - """sys.int_info - - A named tuple that holds information about Python's - internal representation of integers. The attributes are read only. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("bits_per_digit", "sizeof_digit", "default_max_str_digits", "str_digits_check_threshold") @property - def bits_per_digit(self) -> int: - """size of a digit in bits""" - + def bits_per_digit(self) -> int: ... @property - def sizeof_digit(self) -> int: - """size in bytes of the C type used to represent a digit""" - + def sizeof_digit(self) -> int: ... @property - def default_max_str_digits(self) -> int: - """maximum string conversion digits limitation""" - + def default_max_str_digits(self) -> int: ... @property - def str_digits_check_threshold(self) -> int: - """minimum positive value for int_max_str_digits""" + def str_digits_check_threshold(self) -> int: ... _ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"] _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None @@ -486,25 +309,15 @@ _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None @final @type_check_only class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]): - """sys.thread_info - - A named tuple holding information about the thread implementation. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("name", "lock", "version") @property - def name(self) -> _ThreadInfoName: - """name of the thread implementation""" - + def name(self) -> _ThreadInfoName: ... @property - def lock(self) -> _ThreadInfoLock: - """name of the lock implementation""" - + def lock(self) -> _ThreadInfoLock: ... @property - def version(self) -> str | None: - """name and version of the thread library""" + def version(self) -> str | None: ... thread_info: _thread_info _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @@ -513,189 +326,62 @@ _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @final @type_check_only class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]): - """sys.version_info - - Version information as a named tuple. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("major", "minor", "micro", "releaselevel", "serial") @property - def major(self) -> int: - """Major release number""" - + def major(self) -> int: ... @property - def minor(self) -> int: - """Minor release number""" - + def minor(self) -> int: ... @property - def micro(self) -> int: - """Patch release number""" - + def micro(self) -> int: ... @property - def releaselevel(self) -> _ReleaseLevel: - """'alpha', 'beta', 'candidate', or 'final'""" - + def releaselevel(self) -> _ReleaseLevel: ... @property - def serial(self) -> int: - """Serial release number""" + def serial(self) -> int: ... version_info: _version_info -def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: - """Call func(*args), while tracing is enabled. - - The tracing state is saved, and restored afterwards. This is intended - to be called from a debugger from a checkpoint, to recursively debug - some other code. - """ +def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: ... if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13. Use `_clear_internal_caches()` instead.") - def _clear_type_cache() -> None: - """Clear the internal type lookup cache.""" + def _clear_type_cache() -> None: ... else: - def _clear_type_cache() -> None: - """Clear the internal type lookup cache.""" - -def _current_frames() -> dict[int, FrameType]: - """Return a dict mapping each thread's thread id to its current stack frame. - - This function should be used for specialized purposes only. - """ - -def _getframe(depth: int = 0, /) -> FrameType: - """Return a frame object from the call stack. + def _clear_type_cache() -> None: ... - If optional integer depth is given, return the frame object that many - calls below the top of the stack. If that is deeper than the call - stack, ValueError is raised. The default for depth is zero, returning - the frame at the top of the call stack. - - This function should be used for internal and specialized purposes - only. - """ +def _current_frames() -> dict[int, FrameType]: ... +def _getframe(depth: int = 0, /) -> FrameType: ... if sys.version_info >= (3, 12): - def _getframemodulename(depth: int = 0) -> str | None: - """Return the name of the module for a calling frame. - - The default depth returns the module containing the call to this API. - A more typical use in a library will pass a depth of 1 to get the user's - module rather than the library module. - - If no frame, module, or name can be found, returns None. - """ - -def _debugmallocstats() -> None: - """Print summary info to stderr about the state of pymalloc's structures. - - In Py_DEBUG mode, also perform some expensive internal consistency - checks. - """ - -def __displayhook__(object: object, /) -> None: - """Print an object to sys.stdout and also save it in builtins._""" - -def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: - """Handle an exception by displaying it with a traceback on sys.stderr.""" - -def exc_info() -> OptExcInfo: - """Return current exception information: (type, value, traceback). + def _getframemodulename(depth: int = 0) -> str | None: ... - Return information about the most recent exception caught by an except - clause in the current stack frame or in an older stack frame. - """ +def _debugmallocstats() -> None: ... +def __displayhook__(object: object, /) -> None: ... +def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: ... +def exc_info() -> OptExcInfo: ... if sys.version_info >= (3, 11): - def exception() -> BaseException | None: - """Return the current exception. + def exception() -> BaseException | None: ... - Return the most recent exception caught by an except clause - in the current stack frame or in an older stack frame, or None - if no such exception exists. - """ - -def exit(status: _ExitCode = None, /) -> NoReturn: - """Exit the interpreter by raising SystemExit(status). - - If the status is omitted or None, it defaults to zero (i.e., success). - If the status is an integer, it will be used as the system exit status. - If it is another kind of object, it will be printed and the system - exit status will be one (i.e., failure). - """ - -def getallocatedblocks() -> int: - """Return the number of memory blocks currently allocated.""" - -def getdefaultencoding() -> str: - """Return the current default encoding used by the Unicode implementation.""" +def exit(status: _ExitCode = None, /) -> NoReturn: ... +def getallocatedblocks() -> int: ... +def getdefaultencoding() -> str: ... if sys.platform != "win32": - def getdlopenflags() -> int: - """Return the current value of the flags that are used for dlopen calls. - - The flag constants are defined in the os module. - """ - -def getfilesystemencoding() -> str: - """Return the encoding used to convert Unicode filenames to OS filenames.""" - -def getfilesystemencodeerrors() -> str: - """Return the error mode used Unicode to OS filename conversion.""" - -def getrefcount(object: Any, /) -> int: - """Return the reference count of object. - - The count returned is generally one higher than you might expect, - because it includes the (temporary) reference as an argument to - getrefcount(). - """ - -def getrecursionlimit() -> int: - """Return the current value of the recursion limit. - - The recursion limit is the maximum depth of the Python interpreter - stack. This limit prevents infinite recursion from causing an overflow - of the C stack and crashing Python. - """ - -def getsizeof(obj: object, default: int = ...) -> int: - """getsizeof(object [, default]) -> int - - Return the size of object in bytes. - """ - -def getswitchinterval() -> float: - """Return the current thread switch interval; see sys.setswitchinterval().""" - -def getprofile() -> ProfileFunction | None: - """Return the profiling function set with sys.setprofile. - - See the profiler chapter in the library manual. - """ - -def setprofile(function: ProfileFunction | None, /) -> None: - """Set the profiling function. - - It will be called on each function call and return. See the profiler - chapter in the library manual. - """ - -def gettrace() -> TraceFunction | None: - """Return the global debug tracing function set with sys.settrace. - - See the debugger chapter in the library manual. - """ - -def settrace(function: TraceFunction | None, /) -> None: - """Set the global debug tracing function. - - It will be called on each function call. See the debugger chapter - in the library manual. - """ + def getdlopenflags() -> int: ... + +def getfilesystemencoding() -> str: ... +def getfilesystemencodeerrors() -> str: ... +def getrefcount(object: Any, /) -> int: ... +def getrecursionlimit() -> int: ... +def getsizeof(obj: object, default: int = ...) -> int: ... +def getswitchinterval() -> float: ... +def getprofile() -> ProfileFunction | None: ... +def setprofile(function: ProfileFunction | None, /) -> None: ... +def gettrace() -> TraceFunction | None: ... +def settrace(function: TraceFunction | None, /) -> None: ... if sys.platform == "win32": # A tuple of length 5, even though it has more than 5 attributes. @@ -723,78 +409,25 @@ if sys.platform == "win32": @property def platform_version(self) -> tuple[int, int, int]: ... - def getwindowsversion() -> _WinVersion: - """Return info about the running version of Windows as a named tuple. + def getwindowsversion() -> _WinVersion: ... - The members are named: major, minor, build, platform, service_pack, - service_pack_major, service_pack_minor, suite_mask, product_type and - platform_version. For backward compatibility, only the first 5 items - are available by indexing. All elements are numbers, except - service_pack and platform_type which are strings, and platform_version - which is a 3-tuple. Platform is always 2. Product_type may be 1 for a - workstation, 2 for a domain controller, 3 for a server. - Platform_version is a 3-tuple containing a version number that is - intended for identifying the OS rather than feature detection. - """ - -def intern(string: str, /) -> str: - """``Intern'' the given string. - - This enters the string in the (global) table of interned strings whose - purpose is to speed up dictionary lookups. Return the string itself or - the previously interned string object with the same value. - """ +def intern(string: str, /) -> str: ... if sys.version_info >= (3, 13): - def _is_gil_enabled() -> bool: - """Return True if the GIL is currently enabled and False otherwise.""" - - def _clear_internal_caches() -> None: - """Clear all internal performance-related caches.""" - - def _is_interned(string: str, /) -> bool: - """Return True if the given string is "interned".""" + def _is_gil_enabled() -> bool: ... + def _clear_internal_caches() -> None: ... + def _is_interned(string: str, /) -> bool: ... -def is_finalizing() -> bool: - """Return True if Python is exiting.""" - -def breakpointhook(*args: Any, **kwargs: Any) -> Any: - """This hook function is called by built-in breakpoint().""" +def is_finalizing() -> bool: ... +def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... __breakpointhook__ = breakpointhook # Contains the original value of breakpointhook if sys.platform != "win32": - def setdlopenflags(flags: int, /) -> None: - """Set the flags used by the interpreter for dlopen calls. - - This is used, for example, when the interpreter loads extension - modules. Among other things, this will enable a lazy resolving of - symbols when importing a module, if called as sys.setdlopenflags(0). - To share symbols across extension modules, call as - sys.setdlopenflags(os.RTLD_GLOBAL). Symbolic names for the flag - modules can be found in the os module (RTLD_xxx constants, e.g. - os.RTLD_LAZY). - """ - -def setrecursionlimit(limit: int, /) -> None: - """Set the maximum depth of the Python interpreter stack to n. - - This limit prevents infinite recursion from causing an overflow of the C - stack and crashing Python. The highest possible limit is platform- - dependent. - """ - -def setswitchinterval(interval: float, /) -> None: - """Set the ideal thread switching delay inside the Python interpreter. - - The actual frequency of switching threads can be lower if the - interpreter executes long sequences of uninterruptible code - (this is implementation-specific and workload-dependent). - - The parameter must represent the desired switching delay in seconds - A typical value is 0.005 (5 milliseconds). - """ + def setdlopenflags(flags: int, /) -> None: ... +def setrecursionlimit(limit: int, /) -> None: ... +def setswitchinterval(interval: float, /) -> None: ... def gettotalrefcount() -> int: ... # Debug builds only # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. @@ -808,23 +441,9 @@ class UnraisableHookArgs(Protocol): unraisablehook: Callable[[UnraisableHookArgs], Any] -def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: - """Handle an unraisable exception. - - The unraisable argument has the following attributes: - - * exc_type: Exception type. - * exc_value: Exception value, can be None. - * exc_traceback: Exception traceback, can be None. - * err_msg: Error message, can be None. - * object: Object causing the exception, can be None. - """ - -def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: - """Adds a new audit hook callback.""" - -def audit(event: str, /, *args: Any) -> None: - """Passes the event to any audit hooks that are attached.""" +def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: ... +def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... +def audit(event: str, /, *args: Any) -> None: ... _AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None @@ -840,17 +459,8 @@ class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHo @property def finalizer(self) -> _AsyncgenHook: ... -def get_asyncgen_hooks() -> _asyncgen_hooks: - """Return the installed asynchronous generators hooks. - - This returns a namedtuple of the form (firstiter, finalizer). - """ - -def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: - """set_asyncgen_hooks([firstiter] [, finalizer]) - - Set a finalizer for async generators objects. - """ +def get_asyncgen_hooks() -> _asyncgen_hooks: ... +def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... if sys.platform == "win32": if sys.version_info >= (3, 13): @@ -858,94 +468,36 @@ if sys.platform == "win32": "Deprecated since Python 3.13; will be removed in Python 3.16. " "Use the `PYTHONLEGACYWINDOWSFSENCODING` environment variable instead." ) - def _enablelegacywindowsfsencoding() -> None: - """Changes the default filesystem encoding to mbcs:replace. - - This is done for consistency with earlier versions of Python. See PEP - 529 for more information. - - This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING - environment variable before launching Python. - """ + def _enablelegacywindowsfsencoding() -> None: ... else: - def _enablelegacywindowsfsencoding() -> None: - """Changes the default filesystem encoding to mbcs:replace. - - This is done for consistency with earlier versions of Python. See PEP - 529 for more information. - - This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING - environment variable before launching Python. - """ + def _enablelegacywindowsfsencoding() -> None: ... -def get_coroutine_origin_tracking_depth() -> int: - """Check status of origin tracking for coroutine objects in this thread.""" - -def set_coroutine_origin_tracking_depth(depth: int) -> None: - """Enable or disable origin tracking for coroutine objects in this thread. - - Coroutine objects will track 'depth' frames of traceback information - about where they came from, available in their cr_origin attribute. - - Set a depth of 0 to disable. - """ +def get_coroutine_origin_tracking_depth() -> int: ... +def set_coroutine_origin_tracking_depth(depth: int) -> None: ... # The following two functions were added in 3.11.0, 3.10.7, and 3.9.14, # as part of the response to CVE-2020-10735 -def set_int_max_str_digits(maxdigits: int) -> None: - """Set the maximum string digits limit for non-binary int<->str conversions.""" - -def get_int_max_str_digits() -> int: - """Return the maximum string digits limit for non-binary int<->str conversions.""" +def set_int_max_str_digits(maxdigits: int) -> None: ... +def get_int_max_str_digits() -> int: ... if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): - def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: - """Return the number of elements of the unicode interned dictionary""" + def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: ... else: - def getunicodeinternedsize() -> int: - """Return the number of elements of the unicode interned dictionary""" + def getunicodeinternedsize() -> int: ... - def deactivate_stack_trampoline() -> None: - """Deactivate the current stack profiler trampoline backend. - - If no stack profiler is activated, this function has no effect. - """ - - def is_stack_trampoline_active() -> bool: - """Return *True* if a stack profiler trampoline is active.""" + def deactivate_stack_trampoline() -> None: ... + def is_stack_trampoline_active() -> bool: ... # It always exists, but raises on non-linux platforms: if sys.platform == "linux": - def activate_stack_trampoline(backend: str, /) -> None: - """Activate stack profiler trampoline *backend*.""" + def activate_stack_trampoline(backend: str, /) -> None: ... else: - def activate_stack_trampoline(backend: str, /) -> NoReturn: - """Activate stack profiler trampoline *backend*.""" + def activate_stack_trampoline(backend: str, /) -> NoReturn: ... + from . import _monitoring monitoring = _monitoring if sys.version_info >= (3, 14): - def is_remote_debug_enabled() -> bool: - """Return True if remote debugging is enabled, False otherwise.""" - - def remote_exec(pid: int, script: StrOrBytesPath) -> None: - """Executes a file containing Python code in a given remote Python process. - - This function returns immediately, and the code will be executed by the - target process's main thread at the next available opportunity, similarly - to how signals are handled. There is no interface to determine when the - code has been executed. The caller is responsible for making sure that - the file still exists whenever the remote process tries to read it and that - it hasn't been overwritten. - - The remote process must be running a CPython interpreter of the same major - and minor version as the local process. If either the local or remote - interpreter is pre-release (alpha, beta, or release candidate) then the - local and remote interpreters must be the same exact version. - - Args: - pid (int): The process ID of the target Python process. - script (str|bytes): The path to a file containing - the Python code to be executed. - """ + def is_remote_debug_enabled() -> bool: ... + def remote_exec(pid: int, script: StrOrBytesPath) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi index 747771c9e9aba..807a979050e80 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi @@ -1,5 +1,3 @@ -"""Access to Python's configuration information.""" - import sys from typing import IO, Any, Literal, overload from typing_extensions import deprecated @@ -20,75 +18,24 @@ __all__ = [ @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") -def get_config_var(name: Literal["SO"]) -> Any: - """Return the value of a single variable using the dictionary returned by - 'get_config_vars()'. - - Equivalent to get_config_vars().get(name) - """ - +def get_config_var(name: Literal["SO"]) -> Any: ... @overload def get_config_var(name: str) -> Any: ... @overload -def get_config_vars() -> dict[str, Any]: - """With no arguments, return a dictionary of all configuration - variables relevant for the current platform. - - On Unix, this means every variable defined in Python's installed Makefile; - On Windows it's a much smaller set. - - With arguments, return a list of values that result from looking up - each argument in the configuration variable dictionary. - """ - +def get_config_vars() -> dict[str, Any]: ... @overload def get_config_vars(arg: str, /, *args: str) -> list[Any]: ... -def get_scheme_names() -> tuple[str, ...]: - """Return a tuple containing the schemes names.""" +def get_scheme_names() -> tuple[str, ...]: ... if sys.version_info >= (3, 10): def get_default_scheme() -> str: ... def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... -def get_path_names() -> tuple[str, ...]: - """Return a tuple containing the paths names.""" - -def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: - """Return a path corresponding to the scheme. - - ``scheme`` is the install scheme name. - """ - -def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: - """Return a mapping containing an install scheme. - - ``scheme`` is the install scheme name. If not provided, it will - return the default scheme for the current platform. - """ - +def get_path_names() -> tuple[str, ...]: ... +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... def get_python_version() -> str: ... -def get_platform() -> str: - """Return a string that identifies the current platform. - - This is used mainly to distinguish platform-specific build directories and - platform-specific built distributions. Typically includes the OS name and - version and the architecture (as supplied by 'os.uname()'), although the - exact information included depends on the OS; on Linux, the kernel version - isn't particularly important. - - Examples of returned values: - linux-i586 - linux-alpha (?) - solaris-2.6-sun4u - - Windows will return one of: - win-amd64 (64-bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) - win-arm64 (64-bit Windows on ARM64 (aka AArch64) - win32 (all others - specifically, sys.platform is returned) - - For other non-POSIX platforms, currently just returns 'sys.platform'. - - """ +def get_platform() -> str: ... if sys.version_info >= (3, 11): def is_python_build(check_home: object = None) -> bool: ... @@ -96,16 +43,6 @@ if sys.version_info >= (3, 11): else: def is_python_build(check_home: bool = False) -> bool: ... -def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: - """Parse a config.h-style file. - - A dictionary containing name/value pairs is returned. If an - optional dictionary is passed in as the second argument, it is - used instead of a new dictionary. - """ - -def get_config_h_filename() -> str: - """Return the path of pyconfig.h.""" - -def get_makefile_filename() -> str: - """Return the path of the Makefile.""" +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi index 8300ab0f7ce23..1e0d0d3839022 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi @@ -46,26 +46,12 @@ if sys.platform != "win32": LOG_RAS: Final = 120 LOG_REMOTEAUTH: Final = 104 - def LOG_MASK(pri: int, /) -> int: - """Calculates the mask for the individual priority pri.""" - - def LOG_UPTO(pri: int, /) -> int: - """Calculates the mask for all priorities up to and including pri.""" - - def closelog() -> None: - """Reset the syslog module values and call the system library closelog().""" - - def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: - """Set logging options of subsequent syslog() calls.""" - - def setlogmask(maskpri: int, /) -> int: - """Set the priority mask to maskpri and return the previous mask value.""" - + def LOG_MASK(pri: int, /) -> int: ... + def LOG_UPTO(pri: int, /) -> int: ... + def closelog() -> None: ... + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... + def setlogmask(maskpri: int, /) -> int: ... @overload - def syslog(priority: int, message: str) -> None: - """syslog([priority=LOG_INFO,] message) - Send the string message to the system logger. - """ - + def syslog(priority: int, message: str) -> None: ... @overload def syslog(message: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi index 84a7f691aeae8..8a8592f441242 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi @@ -1,15 +1,3 @@ -"""The Tab Nanny despises ambiguous indentation. She knows no mercy. - -tabnanny -- Detection of ambiguous indentation - -For the time being this module is intended to be called as a script. -However it is possible to import it into an IDE and use the function -check() described below. - -Warning: The API provided by this module is likely to change in future -releases; such changes may not be backward compatible. -""" - from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -19,24 +7,10 @@ verbose: int filename_only: int class NannyNag(Exception): - """ - Raised by process_tokens() if detecting an ambiguous indent. - Captured and handled in check(). - """ - def __init__(self, lineno: int, msg: str, line: str) -> None: ... def get_lineno(self) -> int: ... def get_msg(self) -> str: ... def get_line(self) -> str: ... -def check(file: StrOrBytesPath) -> None: - """check(file_or_dir) - - If file_or_dir is a directory and not a symbolic link, then recursively - descend the directory tree named by file_or_dir, checking all .py files - along the way. If file_or_dir is an ordinary Python source file, it is - checked for whitespace related problems. The diagnostic messages are - written to standard output using the print statement. - """ - +def check(file: StrOrBytesPath) -> None: ... def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi index 389e24eb4c1d6..f6623ea9929d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi @@ -1,5 +1,3 @@ -"""Read from and write to tar format archives.""" - import bz2 import io import sys @@ -118,8 +116,6 @@ class ExFileObject(io.BufferedReader): # undocumented def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... class TarFile: - """The TarFile Class provides an interface to tar archives.""" - OPEN_METH: ClassVar[Mapping[str, str]] name: StrOrBytesPath | None mode: Literal["r", "a", "w", "x"] @@ -154,15 +150,7 @@ class TarFile: errorlevel: int | None = None, copybufsize: int | None = None, # undocumented stream: bool = False, - ) -> None: - """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. 'mode' - defaults to 'r'. - If 'fileobj' is given, it is used for reading or writing data. If it - can be determined, 'mode' is overridden by 'fileobj's mode. - 'fileobj' is not closed, when TarFile is closed. - """ + ) -> None: ... else: def __init__( self, @@ -179,23 +167,13 @@ class TarFile: debug: int | None = None, errorlevel: int | None = None, copybufsize: int | None = None, # undocumented - ) -> None: - """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. `mode' - defaults to 'r'. - If `fileobj' is given, it is used for reading or writing data. If it - can be determined, `mode' is overridden by `fileobj's mode. - `fileobj' is not closed, when TarFile is closed. - """ + ) -> None: ... def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self) -> Iterator[TarInfo]: - """Provide an iterator object.""" - + def __iter__(self) -> Iterator[TarInfo]: ... @overload @classmethod def open( @@ -214,47 +192,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'r:xz' open for reading with lzma compression - 'r:zst' open for reading with zstd compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - 'w:xz' open for writing with lzma compression - 'w:zst' open for writing with zstd compression - - 'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created - 'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created - 'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created - 'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created - 'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - - 'r|*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'r|xz' open an lzma compressed stream of tar blocks - 'r|zst' open a zstd compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - 'w|xz' open an lzma compressed stream for writing - 'w|zst' open a zstd compressed stream for writing - """ + ) -> Self: ... if sys.version_info >= (3, 14): @overload @classmethod @@ -277,47 +215,7 @@ class TarFile: level: None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> Self: - """Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'r:xz' open for reading with lzma compression - 'r:zst' open for reading with zstd compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - 'w:xz' open for writing with lzma compression - 'w:zst' open for writing with zstd compression - - 'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created - 'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created - 'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created - 'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created - 'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - - 'r|*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'r|xz' open an lzma compressed stream of tar blocks - 'r|zst' open a zstd compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - 'w|xz' open an lzma compressed stream for writing - 'w|zst' open a zstd compressed stream for writing - """ + ) -> Self: ... @overload @classmethod @@ -458,48 +356,7 @@ class TarFile: errorlevel: int | None = ..., options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> Self: - """Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'r:xz' open for reading with lzma compression - 'r:zst' open for reading with zstd compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - 'w:xz' open for writing with lzma compression - 'w:zst' open for writing with zstd compression - - 'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created - 'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created - 'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created - 'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created - 'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - - 'r|*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'r|xz' open an lzma compressed stream of tar blocks - 'r|zst' open a zstd compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - 'w|xz' open an lzma compressed stream for writing - 'w|zst' open a zstd compressed stream for writing - """ - + ) -> Self: ... @overload @classmethod def open( @@ -654,9 +511,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open uncompressed tar archive name for reading or writing.""" - + ) -> Self: ... @overload @classmethod def gzopen( @@ -674,11 +529,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open gzip compressed tar archive name for reading or writing. - Appending is not allowed. - """ - + ) -> Self: ... @overload @classmethod def gzopen( @@ -714,11 +565,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open bzip2 compressed tar archive name for reading or writing. - Appending is not allowed. - """ - + ) -> Self: ... @overload @classmethod def bz2open( @@ -753,10 +600,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open lzma compressed tar archive name for reading or writing. - Appending is not allowed. - """ + ) -> Self: ... if sys.version_info >= (3, 14): @overload @classmethod @@ -777,11 +621,7 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: - """Open zstd compressed tar archive name for reading or writing. - Appending is not allowed. - """ - + ) -> Self: ... @overload @classmethod def zstopen( @@ -803,35 +643,11 @@ class TarFile: errorlevel: int | None = ..., ) -> Self: ... - def getmember(self, name: str) -> TarInfo: - """Return a TarInfo object for member 'name'. If 'name' can not be - found in the archive, KeyError is raised. If a member occurs more - than once in the archive, its last occurrence is assumed to be the - most up-to-date version. - """ - - def getmembers(self) -> _list[TarInfo]: - """Return the members of the archive as a list of TarInfo objects. The - list has the same order as the members in the archive. - """ - - def getnames(self) -> _list[str]: - """Return the members of the archive as a list of their names. It has - the same order as the list returned by getmembers(). - """ - - def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: - """Print a table of contents to sys.stdout. If 'verbose' is False, only - the names of the members are printed. If it is True, an 'ls -l'-like - output is produced. 'members' is optional and must be a subset of the - list returned by getmembers(). - """ - - def next(self) -> TarInfo | None: - """Return the next member of the archive as a TarInfo object, when - TarFile is opened for reading. Return None if there is no more - available. - """ + def getmember(self, name: str) -> TarInfo: ... + def getmembers(self) -> _list[TarInfo]: ... + def getnames(self) -> _list[str]: ... + def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: ... + def next(self) -> TarInfo | None: ... # Calling this method without `filter` is deprecated, but it may be set either on the class or in an # individual call, so we can't mark it as @deprecated here. def extractall( @@ -841,19 +657,7 @@ class TarFile: *, numeric_owner: bool = False, filter: _TarfileFilter | None = None, - ) -> None: - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. 'path' specifies a different directory - to extract to. 'members' is optional and must be a subset of the - list returned by getmembers(). If 'numeric_owner' is True, only - the numbers for user/group names are used and not the names. - - The 'filter' function will be called on each member just - before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. - """ + ) -> None: ... # Same situation as for `extractall`. def extract( self, @@ -863,20 +667,7 @@ class TarFile: *, numeric_owner: bool = False, filter: _TarfileFilter | None = None, - ) -> None: - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. 'member' may be a filename or a TarInfo object. You can - specify a different directory using 'path'. File attributes (owner, - mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' - is True, only the numbers for user/group names are used and not - the names. - - The 'filter' function will be called before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. - """ - + ) -> None: ... def _extract_member( self, tarinfo: TarInfo, @@ -886,69 +677,20 @@ class TarFile: *, filter_function: _FilterFunction | None = None, extraction_root: str | None = None, - ) -> None: # undocumented - """Extract the filtered TarInfo object tarinfo to a physical - file called targetpath. - - filter_function is only used when extracting a *different* - member (e.g. as fallback to creating a symlink) - """ - - def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: - """Extract a member from the archive as a file object. 'member' may be - a filename or a TarInfo object. If 'member' is a regular file or - a link, an io.BufferedReader object is returned. For all other - existing members, None is returned. If 'member' does not appear - in the archive, KeyError is raised. - """ - - def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a directory called targetpath.""" - - def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a file called targetpath.""" - - def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a file from a TarInfo object with an unknown type - at targetpath. - """ - - def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a fifo called targetpath.""" - - def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a character or block device called targetpath.""" - - def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. - """ - + ) -> None: ... # undocumented + def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... + def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented def makelink_with_filter( self, tarinfo: TarInfo, targetpath: StrOrBytesPath, filter_function: _FilterFunction, extraction_root: str - ) -> None: # undocumented - """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. - - filter_function is only used when extracting a *different* - member (e.g. as fallback to creating a link). - """ - - def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: # undocumented - """Set owner of targetpath according to tarinfo. If numeric_owner - is True, use .gid/.uid instead of .gname/.uname. If numeric_owner - is False, fall back to .gid/.uid when the search based on name - fails. - """ - - def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Set file permissions of targetpath according to tarinfo.""" - - def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Set modification time of targetpath according to tarinfo.""" - + ) -> None: ... # undocumented + def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: ... # undocumented + def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented def add( self, name: StrPath, @@ -956,67 +698,23 @@ class TarFile: recursive: bool = True, *, filter: Callable[[TarInfo], TarInfo | None] | None = None, - ) -> None: - """Add the file 'name' to the archive. 'name' may be any type of file - (directory, fifo, symbolic link, etc.). If given, 'arcname' - specifies an alternative name for the file in the archive. - Directories are added recursively by default. This can be avoided by - setting 'recursive' to False. 'filter' is a function - that expects a TarInfo object argument and returns the changed - TarInfo object, if it returns None the TarInfo object will be - excluded from the archive. - """ - - def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: - """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents - a non zero-size regular file, the 'fileobj' argument should be a binary file, - and tarinfo.size bytes are read from it and added to the archive. - You can create TarInfo objects directly, or by using gettarinfo(). - """ - + ) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: ... def gettarinfo( self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None - ) -> TarInfo: - """Create a TarInfo object from the result of os.stat or equivalent - on an existing file. The file is either named by 'name', or - specified as a file object 'fileobj' with a file descriptor. If - given, 'arcname' specifies an alternative name for the file in the - archive, otherwise, the name is taken from the 'name' attribute of - 'fileobj', or the 'name' argument. The name should be a text - string. - """ - - def close(self) -> None: - """Close the TarFile. In write-mode, two finishing zero blocks are - appended to the archive. - """ + ) -> TarInfo: ... + def close(self) -> None: ... open = TarFile.open -def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: - """Return True if name points to a tar archive that we - are able to handle, else return False. - - 'name' should be a string, file, or file-like object. - """ - -class TarError(Exception): - """Base exception.""" +def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... -class ReadError(TarError): - """Exception for unreadable tar archives.""" - -class CompressionError(TarError): - """Exception for unavailable compression methods.""" - -class StreamError(TarError): - """Exception for unsupported operations on stream-like TarFiles.""" - -class ExtractError(TarError): - """General exception for extract errors.""" - -class HeaderError(TarError): - """Base exception for header errors.""" +class TarError(Exception): ... +class ReadError(TarError): ... +class CompressionError(TarError): ... +class StreamError(TarError): ... +class ExtractError(TarError): ... +class HeaderError(TarError): ... class FilterError(TarError): # This attribute is only set directly on the subclasses, but the documentation guarantees @@ -1046,13 +744,6 @@ def tar_filter(member: TarInfo, dest_path: str) -> TarInfo: ... def data_filter(member: TarInfo, dest_path: str) -> TarInfo: ... class TarInfo: - """Informational class which holds the details about an - archive member given by a tar header block. - TarInfo objects are returned by TarFile.getmember(), - TarFile.getmembers() and TarFile.gettarinfo() and are - usually created internally. - """ - __slots__ = ( "name", "mode", @@ -1093,10 +784,7 @@ class TarInfo: uname: str gname: str pax_headers: Mapping[str, str] - def __init__(self, name: str = "") -> None: - """Construct a TarInfo object. name is the optional name - of the member. - """ + def __init__(self, name: str = "") -> None: ... if sys.version_info >= (3, 13): @property @deprecated("Deprecated since Python 3.13; will be removed in Python 3.16.") @@ -1108,19 +796,11 @@ class TarInfo: tarfile: TarFile | None @classmethod - def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: - """Construct a TarInfo object from a 512 byte bytes object.""" - + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod - def fromtarfile(cls, tarfile: TarFile) -> Self: - """Return the next TarInfo object from TarFile object - tarfile. - """ - + def fromtarfile(cls, tarfile: TarFile) -> Self: ... @property - def linkpath(self) -> str: - """In pax headers, "linkname" is called "linkpath".""" - + def linkpath(self) -> str: ... @linkpath.setter def linkpath(self, linkname: str) -> None: ... def replace( @@ -1135,55 +815,25 @@ class TarInfo: uname: str = ..., gname: str = ..., deep: bool = True, - ) -> Self: - """Return a deep copy of self with the given attributes replaced.""" - - def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: - """Return the TarInfo's attributes as a dictionary.""" - - def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: - """Return a tar header as a string of 512 byte blocks.""" - - def create_ustar_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str) -> bytes: - """Return the object as a ustar header block.""" - - def create_gnu_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str) -> bytes: - """Return the object as a GNU header block sequence.""" - - def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: - """Return the object as a ustar header block. If it cannot be - represented this way, prepend a pax extended header sequence - with supplement information. - """ - + ) -> Self: ... + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... + def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + def create_ustar_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_gnu_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: ... @classmethod - def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: - """Return the object as a pax global header block sequence.""" - - def isfile(self) -> bool: - """Return True if the Tarinfo object is a regular file.""" - - def isreg(self) -> bool: - """Return True if the Tarinfo object is a regular file.""" - + def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: ... + def isfile(self) -> bool: ... + def isreg(self) -> bool: ... def issparse(self) -> bool: ... - def isdir(self) -> bool: - """Return True if it is a directory.""" - - def issym(self) -> bool: - """Return True if it is a symbolic link.""" - - def islnk(self) -> bool: - """Return True if it is a hard link.""" - - def ischr(self) -> bool: - """Return True if it is a character device.""" - - def isblk(self) -> bool: - """Return True if it is a block device.""" - - def isfifo(self) -> bool: - """Return True if it is a FIFO.""" - - def isdev(self) -> bool: - """Return True if it is one of character device, block device or FIFO.""" + def isdir(self) -> bool: ... + def issym(self) -> bool: ... + def islnk(self) -> bool: ... + def ischr(self) -> bool: ... + def isblk(self) -> bool: ... + def isfifo(self) -> bool: ... + def isdev(self) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi index 24255fbb39cec..88aa43d248996 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi @@ -1,36 +1,3 @@ -"""TELNET client class. - -Based on RFC 854: TELNET Protocol Specification, by J. Postel and -J. Reynolds - -Example: - ->>> from telnetlib import Telnet ->>> tn = Telnet('www.python.org', 79) # connect to finger port ->>> tn.write(b'guido\\r\\n') ->>> print(tn.read_all()) -Login Name TTY Idle When Where -guido Guido van Rossum pts/2 snag.cnri.reston.. - ->>> - -Note that read_all() won't read until eof -- it just reads some data --- but it guarantees to read at least one byte unless EOF is hit. - -It is possible to pass a Telnet object to a selector in order to wait until -more data is available. Note that in this case, read_eager() may return b'' -even if there was data on the socket, because the protocol negotiation may have -eaten the data. This is why EOFError is needed in some cases to distinguish -between "no data" and "connection closed" (since the socket also appears ready -for reading when it is closed). - -To do: -- option negotiation -- timeout should be intrinsic to the connection object instead of an - option on one of the read calls only - -""" - import socket from collections.abc import Callable, MutableSequence, Sequence from re import Match, Pattern @@ -120,241 +87,37 @@ EXOPL: Final = b"\xff" NOOPT: Final = b"\x00" class Telnet: - """Telnet interface class. - - An instance of this class represents a connection to a telnet - server. The instance is initially not connected; the open() - method must be used to establish a connection. Alternatively, the - host name and optional port number can be passed to the - constructor, too. - - Don't try to reopen an already connected instance. - - This class has many read_*() methods. Note that some of them - raise EOFError when the end of the connection is read, because - they can return an empty string for other reasons. See the - individual doc strings. - - read_until(expected, [timeout]) - Read until the expected string has been seen, or a timeout is - hit (default is no timeout); may block. - - read_all() - Read all data until EOF; may block. - - read_some() - Read at least one byte or EOF; may block. - - read_very_eager() - Read all data available already queued or on the socket, - without blocking. - - read_eager() - Read either data already queued or some data available on the - socket, without blocking. - - read_lazy() - Read all data in the raw queue (processing it first), without - doing any socket I/O. - - read_very_lazy() - Reads all data in the cooked queue, without doing any socket - I/O. - - read_sb_data() - Reads available data between SB ... SE sequence. Don't block. - - set_option_negotiation_callback(callback) - Each time a telnet option is read on the input flow, this callback - (if set) is called with the following parameters : - callback(telnet socket, command, option) - option will be chr(0) when there is no option. - No other action is done afterwards by telnetlib. - - """ - host: str | None # undocumented sock: socket.socket | None # undocumented - def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: - """Constructor. - - When called without arguments, create an unconnected instance. - With a hostname argument, it connects the instance; port number - and timeout are optional. - """ - - def open(self, host: str, port: int = 0, timeout: float = ...) -> None: - """Connect to a host. - - The optional second argument is the port number, which - defaults to the standard telnet port (23). - - Don't try to reopen an already connected instance. - """ - - def msg(self, msg: str, *args: Any) -> None: - """Print a debug message, when the debug level is > 0. - - If extra arguments are present, they are substituted in the - message using the standard string formatting operator. - - """ - - def set_debuglevel(self, debuglevel: int) -> None: - """Set the debug level. - - The higher it is, the more debug output you get (on sys.stdout). - - """ - - def close(self) -> None: - """Close the connection.""" - - def get_socket(self) -> socket.socket: - """Return the socket object used internally.""" - - def fileno(self) -> int: - """Return the fileno() of the socket object used internally.""" - - def write(self, buffer: bytes) -> None: - """Write a string to the socket, doubling any IAC characters. - - Can block if the connection is blocked. May raise - OSError if the connection is closed. - - """ - - def read_until(self, match: bytes, timeout: float | None = None) -> bytes: - """Read until a given string is encountered or until timeout. - - When no match is found, return whatever is available instead, - possibly the empty string. Raise EOFError if the connection - is closed and no cooked data is available. - - """ - - def read_all(self) -> bytes: - """Read all data until EOF; block until connection closed.""" - - def read_some(self) -> bytes: - """Read at least one byte of cooked data unless EOF is hit. - - Return b'' if EOF is hit. Block if no data is immediately - available. - - """ - - def read_very_eager(self) -> bytes: - """Read everything that's possible without blocking in I/O (eager). - - Raise EOFError if connection closed and no cooked data - available. Return b'' if no cooked data available otherwise. - Don't block unless in the midst of an IAC sequence. - - """ - - def read_eager(self) -> bytes: - """Read readily available data. - - Raise EOFError if connection closed and no cooked data - available. Return b'' if no cooked data available otherwise. - Don't block unless in the midst of an IAC sequence. - - """ - - def read_lazy(self) -> bytes: - """Process and return data that's already in the queues (lazy). - - Raise EOFError if connection closed and no data available. - Return b'' if no cooked data available otherwise. Don't block - unless in the midst of an IAC sequence. - - """ - - def read_very_lazy(self) -> bytes: - """Return any data available in the cooked queue (very lazy). - - Raise EOFError if connection closed and no data available. - Return b'' if no cooked data available otherwise. Don't block. - - """ - - def read_sb_data(self) -> bytes: - """Return any data available in the SB ... SE queue. - - Return b'' if no SB ... SE available. Should only be called - after seeing a SB or SE command. When a new SB command is - found, old unread SB data will be discarded. Don't block. - - """ - - def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: - """Provide a callback function called after each receipt of a telnet option.""" - - def process_rawq(self) -> None: - """Transfer from raw queue to cooked queue. - - Set self.eof when connection is closed. Don't block unless in - the midst of an IAC sequence. - - """ - - def rawq_getchar(self) -> bytes: - """Get next char from raw queue. - - Block if no data is immediately available. Raise EOFError - when connection is closed. - - """ - - def fill_rawq(self) -> None: - """Fill raw queue from exactly one recv() system call. - - Block if no data is immediately available. Set self.eof when - connection is closed. - - """ - - def sock_avail(self) -> bool: - """Test whether data is available on the socket.""" - - def interact(self) -> None: - """Interaction function, emulates a very dumb telnet client.""" - - def mt_interact(self) -> None: - """Multithreaded version of interact().""" - - def listener(self) -> None: - """Helper for mt_interact() -- this executes in the other thread.""" - + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... + def msg(self, msg: str, *args: Any) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def close(self) -> None: ... + def get_socket(self) -> socket.socket: ... + def fileno(self) -> int: ... + def write(self, buffer: bytes) -> None: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... + def read_all(self) -> bytes: ... + def read_some(self) -> bytes: ... + def read_very_eager(self) -> bytes: ... + def read_eager(self) -> bytes: ... + def read_lazy(self) -> bytes: ... + def read_very_lazy(self) -> bytes: ... + def read_sb_data(self) -> bytes: ... + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: ... + def process_rawq(self) -> None: ... + def rawq_getchar(self) -> bytes: ... + def fill_rawq(self) -> None: ... + def sock_avail(self) -> bool: ... + def interact(self) -> None: ... + def mt_interact(self) -> None: ... + def listener(self) -> None: ... def expect( self, list: MutableSequence[Pattern[bytes] | bytes] | Sequence[Pattern[bytes]], timeout: float | None = None - ) -> tuple[int, Match[bytes] | None, bytes]: - """Read until one from a list of a regular expressions matches. - - The first argument is a list of regular expressions, either - compiled (re.Pattern instances) or uncompiled (strings). - The optional second argument is a timeout, in seconds; default - is no timeout. - - Return a tuple of three items: the index in the list of the - first regular expression that matches; the re.Match object - returned; and the text read up till and including the match. - - If EOF is read and no text was read, raise EOFError. - Otherwise, when nothing matches, return (-1, None, text) where - text is the text received so far (may be the empty string if a - timeout happened). - - If a regular expression ends with a greedy match (e.g. '.*') - or if more than one expression can match the same input, the - results are undeterministic, and may depend on the I/O timing. - - """ - + ) -> tuple[int, Match[bytes] | None, bytes]: ... def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __del__(self) -> None: - """Destructor -- close the connection.""" + def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi index 7b0af097ed1d6..26491074ff71d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi @@ -1,28 +1,3 @@ -"""Temporary files. - -This module provides generic, low- and high-level interfaces for -creating temporary files and directories. All of the interfaces -provided by this module can be used without fear of race conditions -except for 'mktemp'. 'mktemp' is subject to race conditions and -should not be used; it is provided for backward compatibility only. - -The default path names are returned as str. If you supply bytes as -input, all return values will be in bytes. Ex: - - >>> tempfile.mkstemp() - (4, '/tmp/tmptpu9nin8') - >>> tempfile.mkdtemp(suffix=b'') - b'/tmp/tmppbi8f0hy' - -This module also provides some data items to the user: - - TMP_MAX - maximum number of names that will be tried before - giving up. - tempdir - If this is set to a string before the first use of - any routine from this module, it will be considered as - another candidate location to store temporary files. -""" - import io import sys from _typeshed import ( @@ -77,30 +52,7 @@ if sys.version_info >= (3, 12): *, errors: str | None = None, delete_on_close: bool = True, - ) -> _TemporaryFileWrapper[str]: - """Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is automatically deleted (default True). - 'delete_on_close' -- if 'delete', whether the file is deleted on close - (default True) or otherwise either on context manager exit - (if context manager was used) or on object finalization. . - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - - On POSIX, NamedTemporaryFiles cannot be automatically deleted if - the creating process is terminated abruptly with a SIGKILL signal. - Windows can delete the file even in this case. - """ - + ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -143,27 +95,7 @@ else: delete: bool = True, *, errors: str | None = None, - ) -> _TemporaryFileWrapper[str]: - """Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is deleted on close (default True). - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - - On POSIX, NamedTemporaryFiles cannot be automatically deleted if - the creating process is terminated abruptly with a SIGKILL signal. - Windows can delete the file even in this case. - """ - + ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -206,21 +138,7 @@ else: dir: GenericPath[AnyStr] | None = None, *, errors: str | None = None, - ) -> io.TextIOWrapper: - """Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface. The file has no - name, and will cease to exist when it is closed. - """ - + ) -> io.TextIOWrapper: ... @overload def TemporaryFile( mode: OpenBinaryMode, @@ -294,13 +212,6 @@ else: ) -> IO[Any]: ... class _TemporaryFileWrapper(IO[AnyStr]): - """Temporary file wrapper - - This class provides a wrapper around files opened for - temporary use. In particular, it seeks to automatically - remove the file when it is no longer needed. - """ - file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool @@ -312,10 +223,7 @@ class _TemporaryFileWrapper(IO[AnyStr]): def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... - def close(self) -> None: - """ - Close the temporary file, possibly deleting it. - """ + def close(self) -> None: ... # These methods don't exist directly on this object, but # are delegated to the underlying IO object through __getattr__. # We need to add them here so that this class is concrete. @@ -364,11 +272,6 @@ else: # It does not actually derive from IO[AnyStr], but it does mostly behave # like one. class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): - """Temporary file wrapper, specialized to switch from BytesIO - or StringIO to a real file when it exceeds a certain size or - when a fileno is needed. - """ - _file: IO[AnyStr] @property def encoding(self) -> str: ... # undocumented @@ -492,35 +395,10 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def readable(self) -> bool: ... def seekable(self) -> bool: ... def writable(self) -> bool: ... - def __next__(self) -> AnyStr: # type: ignore[override] - """Implement next(self).""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __next__(self) -> AnyStr: ... # type: ignore[override] + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class TemporaryDirectory(Generic[AnyStr]): - """Create and return a temporary directory. This has the same - behavior as mkdtemp but can be used as a context manager. For - example: - - with TemporaryDirectory() as tmpdir: - ... - - Upon exiting the context, the directory and everything contained - in it are removed (unless delete=False is passed or an exception - is raised during cleanup and ignore_cleanup_errors is not True). - - Optional Arguments: - suffix - A str suffix for the directory name. (see mkdtemp) - prefix - A str prefix for the directory name. (see mkdtemp) - dir - A directory to create this temp dir in. (see mkdtemp) - ignore_cleanup_errors - False; ignore exceptions during cleanup? - delete - True; whether the directory is automatically deleted. - """ - name: AnyStr if sys.version_info >= (3, 12): @overload @@ -576,45 +454,13 @@ class TemporaryDirectory(Generic[AnyStr]): def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # The overloads overlap, but they should still work fine. @overload def mkstemp( suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False -) -> tuple[int, str]: - """User-callable function to create and return a unique temporary - file. The return value is a pair (fd, name) where fd is the - file descriptor returned by os.open, and name is the filename. - - If 'suffix' is not None, the file name will end with that suffix, - otherwise there will be no suffix. - - If 'prefix' is not None, the file name will begin with that prefix, - otherwise a default prefix is used. - - If 'dir' is not None, the file will be created in that directory, - otherwise a default directory is used. - - If 'text' is specified and true, the file is opened in text - mode. Else (the default) the file is opened in binary mode. - - If any of 'suffix', 'prefix' and 'dir' are not None, they must be the - same type. If they are bytes, the returned name will be bytes; str - otherwise. - - The file is readable and writable only by the creating user ID. - If the operating system uses permission bits to indicate whether a - file is executable, the file is executable by no one. The file - descriptor is not inherited by children of this process. - - Caller is responsible for deleting the file when done with it. - """ - +) -> tuple[int, str]: ... @overload def mkstemp( suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False @@ -622,44 +468,12 @@ def mkstemp( # The overloads overlap, but they should still work fine. @overload -def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: - """User-callable function to create and return a unique temporary - directory. The return value is the pathname of the directory. - - Arguments are as for mkstemp, except that the 'text' argument is - not accepted. - - The directory is readable, writable, and searchable only by the - creating user. - - Caller is responsible for deleting the directory when done with it. - """ - +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... @overload def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... @deprecated("Deprecated since Python 2.3. Use `mkstemp()` or `NamedTemporaryFile(delete=False)` instead.") -def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: - """User-callable function to return a unique temporary file name. The - file is not created. - - Arguments are similar to mkstemp, except that the 'text' argument is - not accepted, and suffix=None, prefix=None and bytes file names are not - supported. - - THIS FUNCTION IS UNSAFE AND SHOULD NOT BE USED. The file name may - refer to a file that did not exist at some point, but by the time - you get around to creating it, someone else may have beaten you to - the punch. - """ - -def gettempdirb() -> bytes: - """Returns tempfile.tempdir as bytes.""" - -def gettempprefixb() -> bytes: - """The default prefix for temporary directories as bytes.""" - -def gettempdir() -> str: - """Returns tempfile.tempdir as str.""" - -def gettempprefix() -> str: - """The default prefix for temporary directories as string.""" +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... +def gettempdirb() -> bytes: ... +def gettempprefixb() -> bytes: ... +def gettempdir() -> str: ... +def gettempprefix() -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi index a8c402ab07309..a35be5dfe740a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi @@ -1,13 +1,3 @@ -"""This module provides an interface to the Posix calls for tty I/O control. -For a complete description of these calls, see the Posix or Unix manual -pages. It is only available for those Unix versions that support Posix -termios style tty I/O control. - -All functions in this module take a file descriptor fd as their first -argument. This can be an integer file descriptor, such as returned by -sys.stdin.fileno(), or a file object, such as sys.stdin itself. -""" - import sys from _typeshed import FileDescriptorLike from typing import Any, Final @@ -301,65 +291,14 @@ if sys.platform != "win32": INIT_C_CC: Final[int] NSWTCH: Final[int] - def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: - """Get the tty attributes for file descriptor fd. - - Returns a list [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] - where cc is a list of the tty special characters (each a string of - length 1, except the items with indices VMIN and VTIME, which are - integers when these fields are defined). The interpretation of the - flags and the speeds as well as the indexing in the cc array must be - done using the symbolic constants defined in this module. - """ - - def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: - """Set the tty attributes for file descriptor fd. - - The attributes to be set are taken from the attributes argument, which - is a list like the one returned by tcgetattr(). The when argument - determines when the attributes are changed: termios.TCSANOW to - change immediately, termios.TCSADRAIN to change after transmitting all - queued output, or termios.TCSAFLUSH to change after transmitting all - queued output and discarding all queued input. - """ - - def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: - """Send a break on file descriptor fd. - - A zero duration sends a break for 0.25-0.5 seconds; a nonzero duration - has a system dependent meaning. - """ - - def tcdrain(fd: FileDescriptorLike, /) -> None: - """Wait until all output written to file descriptor fd has been transmitted.""" - - def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: - """Discard queued data on file descriptor fd. - - The queue selector specifies which queue: termios.TCIFLUSH for the input - queue, termios.TCOFLUSH for the output queue, or termios.TCIOFLUSH for - both queues. - """ - - def tcflow(fd: FileDescriptorLike, action: int, /) -> None: - """Suspend or resume input or output on file descriptor fd. - - The action argument can be termios.TCOOFF to suspend output, - termios.TCOON to restart output, termios.TCIOFF to suspend input, - or termios.TCION to restart input. - """ + def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: ... + def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: ... + def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: ... + def tcdrain(fd: FileDescriptorLike, /) -> None: ... + def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: ... + def tcflow(fd: FileDescriptorLike, action: int, /) -> None: ... if sys.version_info >= (3, 11): - def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: - """Get the tty winsize for file descriptor fd. - - Returns a tuple (ws_row, ws_col). - """ - - def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: - """Set the tty winsize for file descriptor fd. - - The winsize to be set is taken from the winsize argument, which - is a two-item tuple (ws_row, ws_col) like the one returned by tcgetwinsize(). - """ + def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: ... + def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: ... class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi index d24ccdebc33fb..c00cce3c2d577 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi @@ -1,59 +1,9 @@ -"""Text wrapping and filling.""" - from collections.abc import Callable from re import Pattern __all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] class TextWrapper: - """ - Object for wrapping/filling text. The public interface consists of - the wrap() and fill() methods; the other methods are just there for - subclasses to override in order to tweak the default behaviour. - If you want to completely replace the main wrapping algorithm, - you'll probably have to override _wrap_chunks(). - - Several instance attributes control various aspects of wrapping: - width (default: 70) - the maximum width of wrapped lines (unless break_long_words - is false) - initial_indent (default: "") - string that will be prepended to the first line of wrapped - output. Counts towards the line's width. - subsequent_indent (default: "") - string that will be prepended to all lines save the first - of wrapped output; also counts towards each line's width. - expand_tabs (default: true) - Expand tabs in input text to spaces before further processing. - Each tab will become 0 .. 'tabsize' spaces, depending on its position - in its line. If false, each tab is treated as a single character. - tabsize (default: 8) - Expand tabs in input text to 0 .. 'tabsize' spaces, unless - 'expand_tabs' is false. - replace_whitespace (default: true) - Replace all whitespace characters in the input text by spaces - after tab expansion. Note that if expand_tabs is false and - replace_whitespace is true, every tab will be converted to a - single space! - fix_sentence_endings (default: false) - Ensure that sentence-ending punctuation is always followed - by two spaces. Off by default because the algorithm is - (unavoidably) imperfect. - break_long_words (default: true) - Break words longer than 'width'. If false, those words will not - be broken, and some lines might be longer than 'width'. - break_on_hyphens (default: true) - Allow breaking hyphenated words. If true, wrapping will occur - preferably on whitespaces and right after hyphens part of - compound words. - drop_whitespace (default: true) - Drop leading and trailing whitespace from lines. - max_lines (default: None) - Truncate wrapped lines. - placeholder (default: ' [...]') - Append to the last line of truncated text. - """ - width: int initial_indent: str subsequent_indent: str @@ -92,81 +42,14 @@ class TextWrapper: placeholder: str = " [...]", ) -> None: ... # Private methods *are* part of the documented API for subclasses. - def _munge_whitespace(self, text: str) -> str: - """_munge_whitespace(text : string) -> string - - Munge whitespace in text: expand tabs and convert all other - whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz" - becomes " foo bar baz". - """ - - def _split(self, text: str) -> list[str]: - """_split(text : string) -> [string] - - Split the text to wrap into indivisible chunks. Chunks are - not quite the same as words; see _wrap_chunks() for full - details. As an example, the text - Look, goof-ball -- use the -b option! - breaks into the following chunks: - 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', - 'use', ' ', 'the', ' ', '-b', ' ', 'option!' - if break_on_hyphens is True, or in: - 'Look,', ' ', 'goof-ball', ' ', '--', ' ', - 'use', ' ', 'the', ' ', '-b', ' ', option!' - otherwise. - """ - - def _fix_sentence_endings(self, chunks: list[str]) -> None: - """_fix_sentence_endings(chunks : [string]) - - Correct for sentence endings buried in 'chunks'. Eg. when the - original text contains "... foo.\\nBar ...", munge_whitespace() - and split() will convert that to [..., "foo.", " ", "Bar", ...] - which has one too few spaces; this method simply changes the one - space to two. - """ - - def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: - """_handle_long_word(chunks : [string], - cur_line : [string], - cur_len : int, width : int) - - Handle a chunk of text (most likely a word, not whitespace) that - is too long to fit in any line. - """ - - def _wrap_chunks(self, chunks: list[str]) -> list[str]: - """_wrap_chunks(chunks : [string]) -> [string] - - Wrap a sequence of text chunks and return a list of lines of - length 'self.width' or less. (If 'break_long_words' is false, - some lines may be longer than this.) Chunks correspond roughly - to words and the whitespace between them: each chunk is - indivisible (modulo 'break_long_words'), but a line break can - come between any two chunks. Chunks should not have internal - whitespace; ie. a chunk is either all whitespace or a "word". - Whitespace chunks will be removed from the beginning and end of - lines, but apart from that whitespace is preserved. - """ - + def _munge_whitespace(self, text: str) -> str: ... + def _split(self, text: str) -> list[str]: ... + def _fix_sentence_endings(self, chunks: list[str]) -> None: ... + def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: ... + def _wrap_chunks(self, chunks: list[str]) -> list[str]: ... def _split_chunks(self, text: str) -> list[str]: ... - def wrap(self, text: str) -> list[str]: - """wrap(text : string) -> [string] - - Reformat the single paragraph in 'text' so it fits in lines of - no more than 'self.width' columns, and return a list of wrapped - lines. Tabs in 'text' are expanded with string.expandtabs(), - and all other whitespace characters (including newline) are - converted to space. - """ - - def fill(self, text: str) -> str: - """fill(text : string) -> string - - Reformat the single paragraph in 'text' to fit in lines of no - more than 'self.width' columns, and return a new string - containing the entire wrapped paragraph. - """ + def wrap(self, text: str) -> list[str]: ... + def fill(self, text: str) -> str: ... def wrap( text: str, @@ -183,17 +66,7 @@ def wrap( drop_whitespace: bool = True, max_lines: int | None = None, placeholder: str = " [...]", -) -> list[str]: - """Wrap a single paragraph of text, returning a list of wrapped lines. - - Reformat the single paragraph in 'text' so it fits in lines of no - more than 'width' columns, and return a list of wrapped lines. By - default, tabs in 'text' are expanded with string.expandtabs(), and - all other whitespace characters (including newline) are converted to - space. See TextWrapper class for available keyword args to customize - wrapping behaviour. - """ - +) -> list[str]: ... def fill( text: str, width: int = 70, @@ -209,16 +82,7 @@ def fill( drop_whitespace: bool = True, max_lines: int | None = None, placeholder: str = " [...]", -) -> str: - """Fill a single paragraph of text, returning a new string. - - Reformat the single paragraph in 'text' to fit in lines of no more - than 'width' columns, and return a new string containing the entire - wrapped paragraph. As with wrap(), tabs are expanded and other - whitespace characters converted to space. See TextWrapper class for - available keyword args to customize wrapping behaviour. - """ - +) -> str: ... def shorten( text: str, width: int, @@ -234,38 +98,6 @@ def shorten( drop_whitespace: bool = True, # Omit `max_lines: int = None`, it is forced to 1 here. placeholder: str = " [...]", -) -> str: - """Collapse and truncate the given text to fit in the given width. - - The text first has its whitespace collapsed. If it then fits in - the *width*, it is returned as is. Otherwise, as many words - as possible are joined and then the placeholder is appended:: - - >>> textwrap.shorten("Hello world!", width=12) - 'Hello world!' - >>> textwrap.shorten("Hello world!", width=11) - 'Hello [...]' - """ - -def dedent(text: str) -> str: - """Remove any common leading whitespace from every line in `text`. - - This can be used to make triple-quoted strings line up with the left - edge of the display, while still presenting them in the source code - in indented form. - - Note that tabs and spaces are both treated as whitespace, but they - are not equal: the lines " hello" and "\\thello" are - considered to have no common leading whitespace. - - Entirely blank lines are normalized to a newline character. - """ - -def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: - """Adds 'prefix' to the beginning of selected lines in 'text'. - - If 'predicate' is provided, 'prefix' will only be added to the lines - where 'predicate(line)' is True. If 'predicate' is not provided, - it will default to adding 'prefix' to all non-empty lines that do not - consist solely of whitespace characters. - """ +) -> str: ... +def dedent(text: str) -> str: ... +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi index 95244ee608683..28fa5267a9975 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi @@ -1,5 +1,3 @@ -"""Thread module emulating a subset of Java's threading model.""" - import _thread import sys from _thread import _excepthook, _ExceptHookArgs, get_native_id as get_native_id @@ -47,120 +45,27 @@ if sys.version_info >= (3, 12): _profile_hook: ProfileFunction | None -def active_count() -> int: - """Return the number of Thread objects currently alive. - - The returned count is equal to the length of the list returned by - enumerate(). - - """ - +def active_count() -> int: ... @deprecated("Deprecated since Python 3.10. Use `active_count()` instead.") -def activeCount() -> int: - """Return the number of Thread objects currently alive. - - This function is deprecated, use active_count() instead. - - """ - -def current_thread() -> Thread: - """Return the current Thread object, corresponding to the caller's thread of control. - - If the caller's thread of control was not created through the threading - module, a dummy thread object with limited functionality is returned. - - """ - +def activeCount() -> int: ... +def current_thread() -> Thread: ... @deprecated("Deprecated since Python 3.10. Use `current_thread()` instead.") -def currentThread() -> Thread: - """Return the current Thread object, corresponding to the caller's thread of control. - - This function is deprecated, use current_thread() instead. - - """ - -def get_ident() -> int: - """Return a non-zero integer that uniquely identifies the current thread - amongst other threads that exist simultaneously. - This may be used to identify per-thread resources. - Even though on some platforms threads identities may appear to be - allocated consecutive numbers starting at 1, this behavior should not - be relied upon, and the number should be seen purely as a magic cookie. - A thread's identity may be reused for another thread after it exits. - """ - -def enumerate() -> list[Thread]: - """Return a list of all Thread objects currently alive. - - The list includes daemonic threads, dummy thread objects created by - current_thread(), and the main thread. It excludes terminated threads and - threads that have not yet been started. - - """ - -def main_thread() -> Thread: - """Return the main thread object. - - In normal conditions, the main thread is the thread from which the - Python interpreter was started. - """ - -def settrace(func: TraceFunction) -> None: - """Set a trace function for all threads started from the threading module. - - The func will be passed to sys.settrace() for each thread, before its run() - method is called. - """ - -def setprofile(func: ProfileFunction | None) -> None: - """Set a profile function for all threads started from the threading module. - - The func will be passed to sys.setprofile() for each thread, before its - run() method is called. - """ +def currentThread() -> Thread: ... +def get_ident() -> int: ... +def enumerate() -> list[Thread]: ... +def main_thread() -> Thread: ... +def settrace(func: TraceFunction) -> None: ... +def setprofile(func: ProfileFunction | None) -> None: ... if sys.version_info >= (3, 12): - def setprofile_all_threads(func: ProfileFunction | None) -> None: - """Set a profile function for all threads started from the threading module - and all Python threads that are currently executing. - - The func will be passed to sys.setprofile() for each thread, before its - run() method is called. - """ - - def settrace_all_threads(func: TraceFunction) -> None: - """Set a trace function for all threads started from the threading module - and all Python threads that are currently executing. - - The func will be passed to sys.settrace() for each thread, before its run() - method is called. - """ + def setprofile_all_threads(func: ProfileFunction | None) -> None: ... + def settrace_all_threads(func: TraceFunction) -> None: ... if sys.version_info >= (3, 10): - def gettrace() -> TraceFunction | None: - """Get the trace function as set by threading.settrace().""" + def gettrace() -> TraceFunction | None: ... + def getprofile() -> ProfileFunction | None: ... - def getprofile() -> ProfileFunction | None: - """Get the profiler function as set by threading.setprofile().""" - -def stack_size(size: int = 0, /) -> int: - """Return the thread stack size used when creating new threads. The - optional size argument specifies the stack size (in bytes) to be used - for subsequently created threads, and must be 0 (use platform or - configured default) or a positive integer value of at least 32,768 (32k). - If changing the thread stack size is unsupported, a ThreadError - exception is raised. If the specified size is invalid, a ValueError - exception is raised, and the stack size is unmodified. 32k bytes - currently the minimum supported stack size value to guarantee - sufficient stack space for the interpreter itself. - - Note that some platforms may have particular restrictions on values for - the stack size, such as requiring a minimum stack size larger than 32 KiB or - requiring allocation in multiples of the system memory page size - - platform documentation should be referred to for more information - (4 KiB pages are common; using multiples of 4096 for the stack size is - the suggested approach in the absence of more specific information). - """ +def stack_size(size: int = 0, /) -> int: ... TIMEOUT_MAX: Final[float] @@ -168,24 +73,9 @@ ThreadError = _thread.error local = _thread._local class Thread: - """A class that represents a thread of control. - - This class can be safely subclassed in a limited fashion. There are two ways - to specify the activity: by passing a callable object to the constructor, or - by overriding the run() method in a subclass. - - """ - name: str @property - def ident(self) -> int | None: - """Thread identifier of this thread or None if it has not been started. - - This is a nonzero integer. See the get_ident() function. Thread - identifiers may be recycled when a thread exits and another thread is - created. The identifier is available even after the thread has exited. - - """ + def ident(self) -> int | None: ... daemon: bool if sys.version_info >= (3, 14): def __init__( @@ -198,36 +88,7 @@ class Thread: *, daemon: bool | None = None, context: ContextVar[Any] | None = None, - ) -> None: - """This constructor should always be called with keyword arguments. Arguments are: - - *group* should be None; reserved for future extension when a ThreadGroup - class is implemented. - - *target* is the callable object to be invoked by the run() - method. Defaults to None, meaning nothing is called. - - *name* is the thread name. By default, a unique name is constructed of - the form "Thread-N" where N is a small decimal number. - - *args* is a list or tuple of arguments for the target invocation. Defaults to (). - - *kwargs* is a dictionary of keyword arguments for the target - invocation. Defaults to {}. - - *context* is the contextvars.Context value to use for the thread. - The default value is None, which means to check - sys.flags.thread_inherit_context. If that flag is true, use a copy - of the context of the caller. If false, use an empty context. To - explicitly start with an empty context, pass a new instance of - contextvars.Context(). To explicitly start with a copy of the current - context, pass the value from contextvars.copy_context(). - - If a subclass overrides the constructor, it must make sure to invoke - the base class constructor (Thread.__init__()) before doing anything - else to the thread. - - """ + ) -> None: ... else: def __init__( self, @@ -238,124 +99,22 @@ class Thread: kwargs: Mapping[str, Any] | None = None, *, daemon: bool | None = None, - ) -> None: - """This constructor should always be called with keyword arguments. Arguments are: - - *group* should be None; reserved for future extension when a ThreadGroup - class is implemented. - - *target* is the callable object to be invoked by the run() - method. Defaults to None, meaning nothing is called. - - *name* is the thread name. By default, a unique name is constructed of - the form "Thread-N" where N is a small decimal number. - - *args* is a list or tuple of arguments for the target invocation. Defaults to (). - - *kwargs* is a dictionary of keyword arguments for the target - invocation. Defaults to {}. - - If a subclass overrides the constructor, it must make sure to invoke - the base class constructor (Thread.__init__()) before doing anything - else to the thread. - - """ - - def start(self) -> None: - """Start the thread's activity. - - It must be called at most once per thread object. It arranges for the - object's run() method to be invoked in a separate thread of control. - - This method will raise a RuntimeError if called more than once on the - same thread object. - - """ - - def run(self) -> None: - """Method representing the thread's activity. - - You may override this method in a subclass. The standard run() method - invokes the callable object passed to the object's constructor as the - target argument, if any, with sequential and keyword arguments taken - from the args and kwargs arguments, respectively. - - """ - - def join(self, timeout: float | None = None) -> None: - """Wait until the thread terminates. - - This blocks the calling thread until the thread whose join() method is - called terminates -- either normally or through an unhandled exception - or until the optional timeout occurs. - - When the timeout argument is present and not None, it should be a - floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). As join() always returns None, you must call - is_alive() after join() to decide whether a timeout happened -- if the - thread is still alive, the join() call timed out. - - When the timeout argument is not present or None, the operation will - block until the thread terminates. - - A thread can be join()ed many times. - - join() raises a RuntimeError if an attempt is made to join the current - thread as that would cause a deadlock. It is also an error to join() a - thread before it has been started and attempts to do so raises the same - exception. - - """ + ) -> None: ... + def start(self) -> None: ... + def run(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... @property - def native_id(self) -> int | None: # only available on some platforms - """Native integral thread ID of this thread, or None if it has not been started. - - This is a non-negative integer. See the get_native_id() function. - This represents the Thread ID as reported by the kernel. - - """ - - def is_alive(self) -> bool: - """Return whether the thread is alive. - - This method returns True just before the run() method starts until just - after the run() method terminates. See also the module function - enumerate(). - - """ - + def native_id(self) -> int | None: ... # only available on some platforms + def is_alive(self) -> bool: ... @deprecated("Deprecated since Python 3.10. Read the `daemon` attribute instead.") - def isDaemon(self) -> bool: - """Return whether this thread is a daemon. - - This method is deprecated, use the daemon attribute instead. - - """ - + def isDaemon(self) -> bool: ... @deprecated("Deprecated since Python 3.10. Set the `daemon` attribute instead.") - def setDaemon(self, daemonic: bool) -> None: - """Set whether this thread is a daemon. - - This method is deprecated, use the .daemon property instead. - - """ - + def setDaemon(self, daemonic: bool) -> None: ... @deprecated("Deprecated since Python 3.10. Read the `name` attribute instead.") - def getName(self) -> str: - """Return a string used for identification purposes only. - - This method is deprecated, use the name attribute instead. - - """ - + def getName(self) -> str: ... @deprecated("Deprecated since Python 3.10. Set the `name` attribute instead.") - def setName(self, name: str) -> None: - """Set the name string for this thread. - - This method is deprecated, use the name attribute instead. - - """ + def setName(self, name: str) -> None: ... class _DummyThread(Thread): def __init__(self) -> None: ... @@ -366,80 +125,18 @@ Lock = _thread.LockType # Python implementation of RLock. @final class _RLock: - """This class implements reentrant lock objects. - - A reentrant lock must be released by the thread that acquired it. Once a - thread has acquired a reentrant lock, the same thread may acquire it - again without blocking; the thread must release it once for each time it - has acquired it. - - """ - _count: int - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - """Acquire a lock, blocking or non-blocking. - - When invoked without arguments: if this thread already owns the lock, - increment the recursion level by one, and return immediately. Otherwise, - if another thread owns the lock, block until the lock is unlocked. Once - the lock is unlocked (not owned by any thread), then grab ownership, set - the recursion level to one, and return. If more than one thread is - blocked waiting until the lock is unlocked, only one at a time will be - able to grab ownership of the lock. There is no return value in this - case. - - When invoked with the blocking argument set to true, do the same thing - as when called without arguments, and return true. - - When invoked with the blocking argument set to false, do not block. If a - call without an argument would block, return false immediately; - otherwise, do the same thing as when called without arguments, and - return true. - - When invoked with the floating-point timeout argument set to a positive - value, block for at most the number of seconds specified by timeout - and as long as the lock cannot be acquired. Return true if the lock has - been acquired, false if the timeout has elapsed. - - """ - - def release(self) -> None: - """Release a lock, decrementing the recursion level. - - If after the decrement it is zero, reset the lock to unlocked (not owned - by any thread), and if any other threads are blocked waiting for the - lock to become unlocked, allow exactly one of them to proceed. If after - the decrement the recursion level is still nonzero, the lock remains - locked and owned by the calling thread. - - Only call this method when the calling thread owns the lock. A - RuntimeError is raised if this method is called when the lock is - unlocked. - - There is no return value. - - """ + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... if sys.version_info >= (3, 14): - def locked(self) -> bool: - """Return whether this object is locked.""" + def locked(self) -> bool: ... RLock = _thread.RLock # Actually a function at runtime. class Condition: - """Class that implements a condition variable. - - A condition variable allows one or more threads to wait until they are - notified by another thread. - - If the lock argument is given and not None, it must be a Lock or RLock - object, and it is used as the underlying lock. Otherwise, a new RLock object - is created and used as the underlying lock. - - """ - def __init__(self, lock: Lock | _RLock | RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( @@ -447,218 +144,35 @@ class Condition: ) -> None: ... def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = None) -> bool: - """Wait until notified or until a timeout occurs. - - If the calling thread has not acquired the lock when this method is - called, a RuntimeError is raised. - - This method releases the underlying lock, and then blocks until it is - awakened by a notify() or notify_all() call for the same condition - variable in another thread, or until the optional timeout occurs. Once - awakened or timed out, it re-acquires the lock and returns. - - When the timeout argument is present and not None, it should be a - floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). - - When the underlying lock is an RLock, it is not released using its - release() method, since this may not actually unlock the lock when it - was acquired multiple times recursively. Instead, an internal interface - of the RLock class is used, which really unlocks it even when it has - been recursively acquired several times. Another internal interface is - then used to restore the recursion level when the lock is reacquired. - - """ - - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: - """Wait until a condition evaluates to True. - - predicate should be a callable which result will be interpreted as a - boolean value. A timeout may be provided giving the maximum time to - wait. - - """ - - def notify(self, n: int = 1) -> None: - """Wake up one or more threads waiting on this condition, if any. - - If the calling thread has not acquired the lock when this method is - called, a RuntimeError is raised. - - This method wakes up at most n of the threads waiting for the condition - variable; it is a no-op if no threads are waiting. - - """ - - def notify_all(self) -> None: - """Wake up all threads waiting on this condition. - - If the calling thread has not acquired the lock when this method - is called, a RuntimeError is raised. - - """ - + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... @deprecated("Deprecated since Python 3.10. Use `notify_all()` instead.") - def notifyAll(self) -> None: - """Wake up all threads waiting on this condition. - - This method is deprecated, use notify_all() instead. - - """ + def notifyAll(self) -> None: ... class Semaphore: - """This class implements semaphore objects. - - Semaphores manage a counter representing the number of release() calls minus - the number of acquire() calls, plus an initial value. The acquire() method - blocks if necessary until it can return without making the counter - negative. If not given, value defaults to 1. - - """ - _value: int def __init__(self, value: int = 1) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: - """Acquire a semaphore, decrementing the internal counter by one. - - When invoked without arguments: if the internal counter is larger than - zero on entry, decrement it by one and return immediately. If it is zero - on entry, block, waiting until some other thread has called release() to - make it larger than zero. This is done with proper interlocking so that - if multiple acquire() calls are blocked, release() will wake exactly one - of them up. The implementation may pick one at random, so the order in - which blocked threads are awakened should not be relied on. There is no - return value in this case. - - When invoked with blocking set to true, do the same thing as when called - without arguments, and return true. - - When invoked with blocking set to false, do not block. If a call without - an argument would block, return false immediately; otherwise, do the - same thing as when called without arguments, and return true. - - When invoked with a timeout other than None, it will block for at - most timeout seconds. If acquire does not complete successfully in - that interval, return false. Return true otherwise. - - """ - - def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: - """Acquire a semaphore, decrementing the internal counter by one. - - When invoked without arguments: if the internal counter is larger than - zero on entry, decrement it by one and return immediately. If it is zero - on entry, block, waiting until some other thread has called release() to - make it larger than zero. This is done with proper interlocking so that - if multiple acquire() calls are blocked, release() will wake exactly one - of them up. The implementation may pick one at random, so the order in - which blocked threads are awakened should not be relied on. There is no - return value in this case. - - When invoked with blocking set to true, do the same thing as when called - without arguments, and return true. - - When invoked with blocking set to false, do not block. If a call without - an argument would block, return false immediately; otherwise, do the - same thing as when called without arguments, and return true. - - When invoked with a timeout other than None, it will block for at - most timeout seconds. If acquire does not complete successfully in - that interval, return false. Return true otherwise. - - """ - - def release(self, n: int = 1) -> None: - """Release a semaphore, incrementing the internal counter by one or more. + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def release(self, n: int = 1) -> None: ... - When the counter is zero on entry and another thread is waiting for it - to become larger than zero again, wake up that thread. - - """ - -class BoundedSemaphore(Semaphore): - """Implements a bounded semaphore. - - A bounded semaphore checks to make sure its current value doesn't exceed its - initial value. If it does, ValueError is raised. In most situations - semaphores are used to guard resources with limited capacity. - - If the semaphore is released too many times it's a sign of a bug. If not - given, value defaults to 1. - - Like regular semaphores, bounded semaphores manage a counter representing - the number of release() calls minus the number of acquire() calls, plus an - initial value. The acquire() method blocks if necessary until it can return - without making the counter negative. If not given, value defaults to 1. - - """ +class BoundedSemaphore(Semaphore): ... class Event: - """Class implementing event objects. - - Events manage a flag that can be set to true with the set() method and reset - to false with the clear() method. The wait() method blocks until the flag is - true. The flag is initially false. - - """ - - def is_set(self) -> bool: - """Return true if and only if the internal flag is true.""" - + def is_set(self) -> bool: ... @deprecated("Deprecated since Python 3.10. Use `is_set()` instead.") - def isSet(self) -> bool: - """Return true if and only if the internal flag is true. - - This method is deprecated, use is_set() instead. - - """ - - def set(self) -> None: - """Set the internal flag to true. - - All threads waiting for it to become true are awakened. Threads - that call wait() once the flag is true will not block at all. - - """ - - def clear(self) -> None: - """Reset the internal flag to false. - - Subsequently, threads calling wait() will block until set() is called to - set the internal flag to true again. - - """ - - def wait(self, timeout: float | None = None) -> bool: - """Block until the internal flag is true. - - If the internal flag is true on entry, return immediately. Otherwise, - block until another thread calls set() to set the flag to true, or until - the optional timeout occurs. - - When the timeout argument is present and not None, it should be a - floating-point number specifying a timeout for the operation in seconds - (or fractions thereof). - - This method returns the internal flag on exit, so it will always return - True except if a timeout is given and the operation times out. - - """ + def isSet(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... excepthook = _excepthook ExceptHookArgs = _ExceptHookArgs class Timer(Thread): - """Call a function after a specified number of seconds: - - t = Timer(30.0, f, args=None, kwargs=None) - t.start() - t.cancel() # stop the timer's action if it's still waiting - - """ - args: Iterable[Any] # undocumented finished: Event # undocumented function: Callable[..., Any] # undocumented @@ -672,64 +186,18 @@ class Timer(Thread): args: Iterable[Any] | None = None, kwargs: Mapping[str, Any] | None = None, ) -> None: ... - def cancel(self) -> None: - """Stop the timer if it hasn't finished yet.""" + def cancel(self) -> None: ... class Barrier: - """Implements a Barrier. - - Useful for synchronizing a fixed number of threads at known synchronization - points. Threads block on 'wait()' and are simultaneously awoken once they - have all made that call. - - """ - @property - def parties(self) -> int: - """Return the number of threads required to trip the barrier.""" - + def parties(self) -> int: ... @property - def n_waiting(self) -> int: - """Return the number of threads currently waiting at the barrier.""" - + def n_waiting(self) -> int: ... @property - def broken(self) -> bool: - """Return True if the barrier is in a broken state.""" - - def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: - """Create a barrier, initialised to 'parties' threads. - - 'action' is a callable which, when supplied, will be called by one of - the threads after they have all entered the barrier and just prior to - releasing them all. If a 'timeout' is provided, it is used as the - default for all subsequent 'wait()' calls. - - """ - - def wait(self, timeout: float | None = None) -> int: - """Wait for the barrier. - - When the specified number of threads have started waiting, they are all - simultaneously awoken. If an 'action' was provided for the barrier, one - of the threads will have executed that callback prior to returning. - Returns an individual index number from 0 to 'parties-1'. - - """ - - def reset(self) -> None: - """Reset the barrier to the initial state. - - Any threads currently waiting will get the BrokenBarrier exception - raised. - - """ - - def abort(self) -> None: - """Place the barrier into a 'broken' state. - - Useful in case of error. Any currently waiting threads and threads - attempting to 'wait()' will have BrokenBarrierError raised. - - """ + def broken(self) -> bool: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... + def reset(self) -> None: ... + def abort(self) -> None: ... class BrokenBarrierError(RuntimeError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi index a9e363b9f60a6..5665efbba69d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi @@ -1,27 +1,3 @@ -"""This module provides various functions to manipulate time values. - -There are two standard representations of time. One is the number -of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer -or a floating-point number (to represent fractions of seconds). -The epoch is the point where the time starts, the return value of time.gmtime(0). -It is January 1, 1970, 00:00:00 (UTC) on all platforms. - -The other representation is a tuple of 9 integers giving local time. -The tuple items are: - year (including century, e.g. 1998) - month (1-12) - day (1-31) - hours (0-23) - minutes (0-59) - seconds (0-59) - weekday (0-6, Monday is 0) - Julian day (day in the year, 1-366) - DST (Daylight Savings Time) flag (-1, 0 or 1) -If the DST flag is 0, the time is given in the regular time zone; -if it is 1, the time is given in the DST time zone; -if it is -1, mktime() should guess based on the date and time. -""" - import sys from _typeshed import structseq from typing import Any, Final, Literal, Protocol, final, type_check_only @@ -64,191 +40,45 @@ if sys.platform == "linux": # https://github.com/python/typeshed/pull/6560#discussion_r767162532 @final class struct_time(structseq[Any | int], _TimeTuple): - """The time value as returned by gmtime(), localtime(), and strptime(), and - accepted by asctime(), mktime() and strftime(). May be considered as a - sequence of 9 integers. - - Note that several fields' values are not the same as those defined by - the C language standard for struct tm. For example, the value of the - field tm_year is the actual year, not year - 1900. See individual - fields' descriptions for details. - """ - if sys.version_info >= (3, 10): __match_args__: Final = ("tm_year", "tm_mon", "tm_mday", "tm_hour", "tm_min", "tm_sec", "tm_wday", "tm_yday", "tm_isdst") @property - def tm_year(self) -> int: - """year, for example, 1993""" - + def tm_year(self) -> int: ... @property - def tm_mon(self) -> int: - """month of year, range [1, 12]""" - + def tm_mon(self) -> int: ... @property - def tm_mday(self) -> int: - """day of month, range [1, 31]""" - + def tm_mday(self) -> int: ... @property - def tm_hour(self) -> int: - """hours, range [0, 23]""" - + def tm_hour(self) -> int: ... @property - def tm_min(self) -> int: - """minutes, range [0, 59]""" - + def tm_min(self) -> int: ... @property - def tm_sec(self) -> int: - """seconds, range [0, 61])""" - + def tm_sec(self) -> int: ... @property - def tm_wday(self) -> int: - """day of week, range [0, 6], Monday is 0""" - + def tm_wday(self) -> int: ... @property - def tm_yday(self) -> int: - """day of year, range [1, 366]""" - + def tm_yday(self) -> int: ... @property - def tm_isdst(self) -> int: - """1 if summer time is in effect, 0 if not, and -1 if unknown""" + def tm_isdst(self) -> int: ... # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. @property - def tm_zone(self) -> str: - """abbreviation of timezone name""" - + def tm_zone(self) -> str: ... @property - def tm_gmtoff(self) -> int: - """offset from UTC in seconds""" - -def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: - """asctime([tuple]) -> string - - Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'. - When the time tuple is not present, current time as returned by localtime() - is used. - """ - -def ctime(seconds: float | None = None, /) -> str: - """ctime(seconds) -> string - - Convert a time in seconds since the Epoch to a string in local time. - This is equivalent to asctime(localtime(seconds)). When the time tuple is - not present, current time as returned by localtime() is used. - """ - -def gmtime(seconds: float | None = None, /) -> struct_time: - """gmtime([seconds]) -> (tm_year, tm_mon, tm_mday, tm_hour, tm_min, - tm_sec, tm_wday, tm_yday, tm_isdst) - - Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a. - GMT). When 'seconds' is not passed in, convert the current time instead. - - If the platform supports the tm_gmtoff and tm_zone, they are available as - attributes only. - """ - -def localtime(seconds: float | None = None, /) -> struct_time: - """localtime([seconds]) -> (tm_year,tm_mon,tm_mday,tm_hour,tm_min, - tm_sec,tm_wday,tm_yday,tm_isdst) - - Convert seconds since the Epoch to a time tuple expressing local time. - When 'seconds' is not passed in, convert the current time instead. - """ - -def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: - """mktime(tuple) -> floating-point number - - Convert a time tuple in local time to seconds since the Epoch. - Note that mktime(gmtime(0)) will not generally return zero for most - time zones; instead the returned value will either be equal to that - of the timezone or altzone attributes on the time module. - """ - -def sleep(seconds: float, /) -> None: - """sleep(seconds) - - Delay execution for a given number of seconds. The argument may be - a floating-point number for subsecond precision. - """ - -def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: - """strftime(format[, tuple]) -> string - - Convert a time tuple to a string according to a format specification. - See the library reference manual for formatting codes. When the time tuple - is not present, current time as returned by localtime() is used. - - Commonly used format codes: - - %Y Year with century as a decimal number. - %m Month as a decimal number [01,12]. - %d Day of the month as a decimal number [01,31]. - %H Hour (24-hour clock) as a decimal number [00,23]. - %M Minute as a decimal number [00,59]. - %S Second as a decimal number [00,61]. - %z Time zone offset from UTC. - %a Locale's abbreviated weekday name. - %A Locale's full weekday name. - %b Locale's abbreviated month name. - %B Locale's full month name. - %c Locale's appropriate date and time representation. - %I Hour (12-hour clock) as a decimal number [01,12]. - %p Locale's equivalent of either AM or PM. - - Other codes may be available on your platform. See documentation for - the C library strftime function. - """ - -def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: - """strptime(string, format) -> struct_time - - Parse a string to a time tuple according to a format specification. - See the library reference manual for formatting codes (same as - strftime()). - - Commonly used format codes: - - %Y Year with century as a decimal number. - %m Month as a decimal number [01,12]. - %d Day of the month as a decimal number [01,31]. - %H Hour (24-hour clock) as a decimal number [00,23]. - %M Minute as a decimal number [00,59]. - %S Second as a decimal number [00,61]. - %z Time zone offset from UTC. - %a Locale's abbreviated weekday name. - %A Locale's full weekday name. - %b Locale's abbreviated month name. - %B Locale's full month name. - %c Locale's appropriate date and time representation. - %I Hour (12-hour clock) as a decimal number [01,12]. - %p Locale's equivalent of either AM or PM. - - Other codes may be available on your platform. See documentation for - the C library strftime function. - """ - -def time() -> float: - """time() -> floating-point number - - Return the current time in seconds since the Epoch. - Fractions of a second may be present if the system clock provides them. - """ + def tm_gmtoff(self) -> int: ... + +def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... +def ctime(seconds: float | None = None, /) -> str: ... +def gmtime(seconds: float | None = None, /) -> struct_time: ... +def localtime(seconds: float | None = None, /) -> struct_time: ... +def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: ... +def sleep(seconds: float, /) -> None: ... +def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... +def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: ... +def time() -> float: ... if sys.platform != "win32": - def tzset() -> None: # Unix only - """tzset() - - Initialize, or reinitialize, the local timezone to the value stored in - os.environ['TZ']. The TZ environment variable should be specified in - standard Unix timezone format as documented in the tzset man page - (eg. 'US/Eastern', 'Europe/Amsterdam'). Unknown timezones will silently - fall back to UTC. If the TZ environment variable is not set, the local - timezone is set to the systems best guess of wallclock time. - Changing the TZ environment variable without calling tzset *may* change - the local timezone used by methods such as localtime, but this behaviour - should not be relied on. - """ + def tzset() -> None: ... # Unix only @type_check_only class _ClockInfo(Protocol): @@ -257,97 +87,26 @@ class _ClockInfo(Protocol): monotonic: bool resolution: float -def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: - """get_clock_info(name: str) -> dict - - Get information of the specified clock. - """ - -def monotonic() -> float: - """monotonic() -> float - - Monotonic clock, cannot go backward. - """ - -def perf_counter() -> float: - """perf_counter() -> float - - Performance counter for benchmarking. - """ - -def process_time() -> float: - """process_time() -> float - - Process time for profiling: sum of the kernel and user-space CPU time. - """ +def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: ... +def monotonic() -> float: ... +def perf_counter() -> float: ... +def process_time() -> float: ... if sys.platform != "win32": - def clock_getres(clk_id: int, /) -> float: # Unix only - """clock_getres(clk_id) -> floating-point number - - Return the resolution (precision) of the specified clock clk_id. - """ - - def clock_gettime(clk_id: int, /) -> float: # Unix only - """Return the time of the specified clock clk_id as a float.""" - - def clock_settime(clk_id: int, time: float, /) -> None: # Unix only - """clock_settime(clk_id, time) - - Set the time of the specified clock clk_id. - """ + def clock_getres(clk_id: int, /) -> float: ... # Unix only + def clock_gettime(clk_id: int, /) -> float: ... # Unix only + def clock_settime(clk_id: int, time: float, /) -> None: ... # Unix only if sys.platform != "win32": - def clock_gettime_ns(clk_id: int, /) -> int: - """Return the time of the specified clock clk_id as nanoseconds (int).""" - - def clock_settime_ns(clock_id: int, time: int, /) -> int: - """clock_settime_ns(clk_id, time) - - Set the time of the specified clock clk_id with nanoseconds. - """ + def clock_gettime_ns(clk_id: int, /) -> int: ... + def clock_settime_ns(clock_id: int, time: int, /) -> int: ... if sys.platform == "linux": - def pthread_getcpuclockid(thread_id: int, /) -> int: - """pthread_getcpuclockid(thread_id) -> int - - Return the clk_id of a thread's CPU time clock. - """ - -def monotonic_ns() -> int: - """monotonic_ns() -> int - - Monotonic clock, cannot go backward, as nanoseconds. - """ - -def perf_counter_ns() -> int: - """perf_counter_ns() -> int - - Performance counter for benchmarking as nanoseconds. - """ - -def process_time_ns() -> int: - """process_time() -> int - - Process time for profiling as nanoseconds: - sum of the kernel and user-space CPU time. - """ - -def time_ns() -> int: - """time_ns() -> int - - Return the current time in nanoseconds since the Epoch. - """ - -def thread_time() -> float: - """thread_time() -> float - - Thread time for profiling: sum of the kernel and user-space CPU time. - """ - -def thread_time_ns() -> int: - """thread_time() -> int - - Thread time for profiling as nanoseconds: - sum of the kernel and user-space CPU time. - """ + def pthread_getcpuclockid(thread_id: int, /) -> int: ... + +def monotonic_ns() -> int: ... +def perf_counter_ns() -> int: ... +def process_time_ns() -> int: ... +def time_ns() -> int: ... +def thread_time() -> float: ... +def thread_time_ns() -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi index a24cf2c71bbb0..a5da943c84848 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi @@ -1,51 +1,3 @@ -"""Tool for measuring execution time of small code snippets. - -This module avoids a number of common traps for measuring execution -times. See also Tim Peters' introduction to the Algorithms chapter in -the Python Cookbook, published by O'Reilly. - -Library usage: see the Timer class. - -Command line usage: - python timeit.py [-n N] [-r N] [-s S] [-p] [-h] [--] [statement] - -Options: - -n/--number N: how many times to execute 'statement' (default: see below) - -r/--repeat N: how many times to repeat the timer (default 5) - -s/--setup S: statement to be executed once initially (default 'pass'). - Execution time of this setup statement is NOT timed. - -p/--process: use time.process_time() (default is time.perf_counter()) - -v/--verbose: print raw timing results; repeat for more digits precision - -u/--unit: set the output time unit (nsec, usec, msec, or sec) - -h/--help: print this usage message and exit - --: separate options from statement, use when statement starts with - - statement: statement to be timed (default 'pass') - -A multi-line statement may be given by specifying each line as a -separate argument; indented lines are possible by enclosing an -argument in quotes and using leading spaces. Multiple -s options are -treated similarly. - -If -n is not given, a suitable number of loops is calculated by trying -increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the -total time is at least 0.2 seconds. - -Note: there is a certain baseline overhead associated with executing a -pass statement. It differs between versions. The code here doesn't try -to hide it, but you should be aware of it. The baseline overhead can be -measured by invoking the program without arguments. - -Classes: - - Timer - -Functions: - - timeit(string, string) -> float - repeat(string, string) -> list - default_timer() -> float -""" - from collections.abc import Callable, Sequence from typing import IO, Any from typing_extensions import TypeAlias @@ -58,94 +10,17 @@ _Stmt: TypeAlias = str | Callable[[], object] default_timer: _Timer class Timer: - """Class for timing execution speed of small code snippets. - - The constructor takes a statement to be timed, an additional - statement used for setup, and a timer function. Both statements - default to 'pass'; the timer function is platform-dependent (see - module doc string). If 'globals' is specified, the code will be - executed within that namespace (as opposed to inside timeit's - namespace). - - To measure the execution time of the first statement, use the - timeit() method. The repeat() method is a convenience to call - timeit() multiple times and return a list of results. - - The statements may contain newlines, as long as they don't contain - multi-line string literals. - """ - def __init__( self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None - ) -> None: - """Constructor. See class doc string.""" - - def print_exc(self, file: IO[str] | None = None) -> None: - """Helper to print a traceback from the timed code. - - Typical use: - - t = Timer(...) # outside the try/except - try: - t.timeit(...) # or t.repeat(...) - except: - t.print_exc() - - The advantage over the standard traceback is that source lines - in the compiled template will be displayed. - - The optional file argument directs where the traceback is - sent; it defaults to sys.stderr. - """ - - def timeit(self, number: int = 1000000) -> float: - """Time 'number' executions of the main statement. - - To be precise, this executes the setup statement once, and - then returns the time it takes to execute the main statement - a number of times, as float seconds if using the default timer. The - argument is the number of times through the loop, defaulting - to one million. The main statement, the setup statement and - the timer function to be used are passed to the constructor. - """ - - def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: - """Call timeit() a few times. - - This is a convenience function that calls the timeit() - repeatedly, returning a list of results. The first argument - specifies how many times to call timeit(), defaulting to 5; - the second argument specifies the timer argument, defaulting - to one million. - - Note: it's tempting to calculate mean and standard deviation - from the result vector and report these. However, this is not - very useful. In a typical case, the lowest value gives a - lower bound for how fast your machine can run the given code - snippet; higher values in the result vector are typically not - caused by variability in Python's speed, but by other - processes interfering with your timing accuracy. So the min() - of the result is probably the only number you should be - interested in. After that, you should look at the entire - vector and apply common sense rather than statistics. - """ - - def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: - """Return the number of loops and time taken so that total time >= 0.2. - - Calls the timeit method with increasing numbers from the sequence - 1, 2, 5, 10, 20, 50, ... until the time taken is at least 0.2 - second. Returns (number, time_taken). - - If *callback* is given and is not None, it will be called after - each trial with two arguments: ``callback(number, time_taken)``. - """ + ) -> None: ... + def print_exc(self, file: IO[str] | None = None) -> None: ... + def timeit(self, number: int = 1000000) -> float: ... + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... def timeit( stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None -) -> float: - """Convenience function to create Timer object and call timeit method.""" - +) -> float: ... def repeat( stmt: _Stmt = "pass", setup: _Stmt = "pass", @@ -153,23 +28,5 @@ def repeat( repeat: int = 5, number: int = 1000000, globals: dict[str, Any] | None = None, -) -> list[float]: - """Convenience function to create Timer object and call repeat method.""" - -def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: - """Main program, used when run as a script. - - The optional 'args' argument specifies the command line to be parsed, - defaulting to sys.argv[1:]. - - The return value is an exit code to be passed to sys.exit(); it - may be None to indicate success. - - When an exception happens during timing, a traceback is printed to - stderr and the return value is 1. Exceptions at other times - (including the template compilation) are not caught. - - '_wrap_timer' is an internal interface used for unit testing. If it - is not None, it must be a callable that accepts a timer function - and returns another timer function (used for unit testing). - """ +) -> list[float]: ... +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 80fe7a95459f3..ef57faa2b0097 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1,35 +1,3 @@ -"""Wrapper functions for Tcl/Tk. - -Tkinter provides classes which allow the display, positioning and -control of widgets. Toplevel widgets are Tk and Toplevel. Other -widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton, -Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox -LabelFrame and PanedWindow. - -Properties of the widgets are specified with keyword arguments. -Keyword arguments have the same name as the corresponding resource -under Tk. - -Widgets are positioned with one of the geometry managers Place, Pack -or Grid. These managers can be called with methods place, pack, grid -available in every Widget. - -Actions are bound to events by resources (e.g. keyword argument -command) or with the method bind. - -Example (Hello, World): -import tkinter -from tkinter.constants import * -tk = tkinter.Tk() -frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2) -frame.pack(fill=BOTH,expand=1) -label = tkinter.Label(frame, text="Hello, World") -label.pack(fill=X, expand=1) -button = tkinter.Button(frame,text="Exit",command=tk.destroy) -button.pack(side=BOTTOM) -tk.mainloop() -""" - import _tkinter import sys from _typeshed import Incomplete, MaybeNone, StrOrBytesPath @@ -225,8 +193,6 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 11): class EventType(StrEnum): - """An enumeration.""" - Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -269,8 +235,6 @@ if sys.version_info >= (3, 11): else: class EventType(str, Enum): - """An enumeration.""" - Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -316,48 +280,6 @@ _W = TypeVar("_W", bound=Misc) _W_co = TypeVar("_W_co", covariant=True, bound=Misc, default=Misc) class Event(Generic[_W_co]): - """Container for the properties of an event. - - Instances of this type are generated if one of the following events occurs: - - KeyPress, KeyRelease - for keyboard events - ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events - Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate, - Colormap, Gravity, Reparent, Property, Destroy, Activate, - Deactivate - for window events. - - If a callback function for one of these events is registered - using bind, bind_all, bind_class, or tag_bind, the callback is - called with an Event as first argument. It will have the - following attributes (in braces are the event types for which - the attribute is valid): - - serial - serial number of event - num - mouse button pressed (ButtonPress, ButtonRelease) - focus - whether the window has the focus (Enter, Leave) - height - height of the exposed window (Configure, Expose) - width - width of the exposed window (Configure, Expose) - keycode - keycode of the pressed key (KeyPress, KeyRelease) - state - state of the event as a number (ButtonPress, ButtonRelease, - Enter, KeyPress, KeyRelease, - Leave, Motion) - state - state as a string (Visibility) - time - when the event occurred - x - x-position of the mouse - y - y-position of the mouse - x_root - x-position of the mouse on the screen - (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) - y_root - y-position of the mouse on the screen - (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) - char - pressed character (KeyPress, KeyRelease) - send_event - see X/Windows documentation - keysym - keysym of the event as a string (KeyPress, KeyRelease) - keysym_num - keysym of the event as a number (KeyPress, KeyRelease) - type - type of the event as a number - widget - widget in which the event occurred - delta - delta of wheel movement (MouseWheel) - """ - serial: int num: int focus: bool @@ -378,248 +300,67 @@ class Event(Generic[_W_co]): widget: _W_co delta: int if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... -def NoDefaultRoot() -> None: - """Inhibit setting of default root window. - - Call this function to inhibit that the first instance of - Tk is used for windows without an explicit parent window. - """ +def NoDefaultRoot() -> None: ... class Variable: - """Class to define value holders for e.g. buttons. - - Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations - that constrain the type of the value returned from get(). - """ - - def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: - """Construct a variable - - MASTER can be given as master widget. - VALUE is an optional value (defaults to "") - NAME is an optional Tcl name (defaults to PY_VARnum). - - If NAME matches an existing variable and VALUE is omitted - then the existing value is retained. - """ - - def set(self, value) -> None: - """Set the variable to VALUE.""" + def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: ... + def set(self, value) -> None: ... initialize = set - def get(self): - """Return value of variable.""" - - def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: - """Define a trace callback for the variable. - - Mode is one of "read", "write", "unset", or a list or tuple of - such strings. - Callback must be a function which is called when the variable is - read, written or unset. - - Return the name of the callback. - """ - - def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: - """Delete the trace callback for a variable. - - Mode is one of "read", "write", "unset" or a list or tuple of - such strings. Must be same as were specified in trace_add(). - cbname is the name of the callback returned from trace_add(). - """ - - def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: - """Return all trace callback information.""" + def get(self): ... + def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: ... + def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: ... + def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: ... if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") - def trace(self, mode, callback) -> str: - """Define a trace callback for the variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CALLBACK must be a function which is called when - the variable is read, written or undefined. - - Return the name of the callback. - - This deprecated method wraps a deprecated Tcl method removed - in Tcl 9.0. Use trace_add() instead. - """ - + def trace(self, mode, callback) -> str: ... @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") - def trace_variable(self, mode, callback) -> str: - """Define a trace callback for the variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CALLBACK must be a function which is called when - the variable is read, written or undefined. - - Return the name of the callback. - - This deprecated method wraps a deprecated Tcl method removed - in Tcl 9.0. Use trace_add() instead. - """ - + def trace_variable(self, mode, callback) -> str: ... @deprecated("Deprecated since Python 3.14. Use `trace_remove()` instead.") - def trace_vdelete(self, mode, cbname) -> None: - """Delete the trace callback for a variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CBNAME is the name of the callback returned from trace_variable or trace. - - This deprecated method wraps a deprecated Tcl method removed - in Tcl 9.0. Use trace_remove() instead. - """ - + def trace_vdelete(self, mode, cbname) -> None: ... @deprecated("Deprecated since Python 3.14. Use `trace_info()` instead.") - def trace_vinfo(self) -> list[Incomplete]: - """Return all trace callback information. - - This deprecated method wraps a deprecated Tcl method removed - in Tcl 9.0. Use trace_info() instead. - """ + def trace_vinfo(self) -> list[Incomplete]: ... else: - def trace(self, mode, callback) -> str: - """Define a trace callback for the variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CALLBACK must be a function which is called when - the variable is read, written or undefined. - - Return the name of the callback. - - This deprecated method wraps a deprecated Tcl method that will - likely be removed in the future. Use trace_add() instead. - """ - - def trace_variable(self, mode, callback) -> str: - """Define a trace callback for the variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CALLBACK must be a function which is called when - the variable is read, written or undefined. - - Return the name of the callback. - - This deprecated method wraps a deprecated Tcl method that will - likely be removed in the future. Use trace_add() instead. - """ - - def trace_vdelete(self, mode, cbname) -> None: - """Delete the trace callback for a variable. - - MODE is one of "r", "w", "u" for read, write, undefine. - CBNAME is the name of the callback returned from trace_variable or trace. - - This deprecated method wraps a deprecated Tcl method that will - likely be removed in the future. Use trace_remove() instead. - """ - - def trace_vinfo(self) -> list[Incomplete]: - """Return all trace callback information. - - This deprecated method wraps a deprecated Tcl method that will - likely be removed in the future. Use trace_info() instead. - """ + def trace(self, mode, callback) -> str: ... + def trace_variable(self, mode, callback) -> str: ... + def trace_vdelete(self, mode, cbname) -> None: ... + def trace_vinfo(self) -> list[Incomplete]: ... def __eq__(self, other: object) -> bool: ... - def __del__(self) -> None: - """Unset the variable in Tcl.""" + def __del__(self) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] class StringVar(Variable): - """Value holder for strings variables.""" - - def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: - """Construct a string variable. - - MASTER can be given as master widget. - VALUE is an optional value (defaults to "") - NAME is an optional Tcl name (defaults to PY_VARnum). - - If NAME matches an existing variable and VALUE is omitted - then the existing value is retained. - """ - - def set(self, value: str) -> None: - """Set the variable to VALUE.""" + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... + def set(self, value: str) -> None: ... initialize = set - def get(self) -> str: - """Return value of variable as string.""" + def get(self) -> str: ... class IntVar(Variable): - """Value holder for integer variables.""" - - def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: - """Construct an integer variable. - - MASTER can be given as master widget. - VALUE is an optional value (defaults to 0) - NAME is an optional Tcl name (defaults to PY_VARnum). - - If NAME matches an existing variable and VALUE is omitted - then the existing value is retained. - """ - - def set(self, value: int) -> None: - """Set the variable to VALUE.""" + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... + def set(self, value: int) -> None: ... initialize = set - def get(self) -> int: - """Return the value of the variable as an integer.""" + def get(self) -> int: ... class DoubleVar(Variable): - """Value holder for float variables.""" - - def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: - """Construct a float variable. - - MASTER can be given as master widget. - VALUE is an optional value (defaults to 0.0) - NAME is an optional Tcl name (defaults to PY_VARnum). - - If NAME matches an existing variable and VALUE is omitted - then the existing value is retained. - """ - - def set(self, value: float) -> None: - """Set the variable to VALUE.""" + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... + def set(self, value: float) -> None: ... initialize = set - def get(self) -> float: - """Return the value of the variable as a float.""" + def get(self) -> float: ... class BooleanVar(Variable): - """Value holder for boolean variables.""" - - def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: - """Construct a boolean variable. - - MASTER can be given as master widget. - VALUE is an optional value (defaults to False) - NAME is an optional Tcl name (defaults to PY_VARnum). - - If NAME matches an existing variable and VALUE is omitted - then the existing value is retained. - """ - - def set(self, value: bool) -> None: - """Set the variable to VALUE.""" + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... + def set(self, value: bool) -> None: ... initialize = set - def get(self) -> bool: - """Return the value of the variable as a bool.""" + def get(self) -> bool: ... -def mainloop(n: int = 0) -> None: - """Run the main loop of Tcl.""" +def mainloop(n: int = 0) -> None: ... getint = int getdouble = float -def getboolean(s) -> bool: - """Convert Tcl object to True or False.""" +def getboolean(s) -> bool: ... _Ts = TypeVarTuple("_Ts") @@ -635,596 +376,140 @@ class _BusyInfo(TypedDict): cursor: _Cursor class Misc: - """Internal class. - - Base class which defines methods common for interior widgets. - """ - master: Misc | None tk: _tkinter.TkappType children: dict[str, Widget] - def destroy(self) -> None: - """Internal function. - - Delete all Tcl commands created for - this widget in the Tcl interpreter. - """ - - def deletecommand(self, name: str) -> None: - """Internal function. - - Delete the Tcl command provided in NAME. - """ - - def tk_strictMotif(self, boolean=None): - """Set Tcl internal variable, whether the look and feel - should adhere to Motif. - - A parameter of 1 means adhere to Motif (e.g. no color - change if mouse passes over slider). - Returns the set value. - """ - - def tk_bisque(self) -> None: - """Change the color scheme to light brown as used in Tk 3.6 and before.""" - - def tk_setPalette(self, *args, **kw) -> None: - """Set a new color scheme for all widget elements. - - A single color as argument will cause that all colors of Tk - widget elements are derived from this. - Alternatively several keyword parameters and its associated - colors can be given. The following keywords are valid: - activeBackground, foreground, selectColor, - activeForeground, highlightBackground, selectBackground, - background, highlightColor, selectForeground, - disabledForeground, insertBackground, troughColor. - """ - - def wait_variable(self, name: str | Variable = "PY_VAR") -> None: - """Wait until the variable is modified. - - A parameter of type IntVar, StringVar, DoubleVar or - BooleanVar must be given. - """ + def destroy(self) -> None: ... + def deletecommand(self, name: str) -> None: ... + def tk_strictMotif(self, boolean=None): ... + def tk_bisque(self) -> None: ... + def tk_setPalette(self, *args, **kw) -> None: ... + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... waitvar = wait_variable - def wait_window(self, window: Misc | None = None) -> None: - """Wait until a WIDGET is destroyed. - - If no parameter is given self is used. - """ - - def wait_visibility(self, window: Misc | None = None) -> None: - """Wait until the visibility of a WIDGET changes - (e.g. it appears). - - If no parameter is given self is used. - """ - - def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: - """Set Tcl variable NAME to VALUE.""" - - def getvar(self, name: str = "PY_VAR"): - """Return value of Tcl variable NAME.""" - + def wait_window(self, window: Misc | None = None) -> None: ... + def wait_visibility(self, window: Misc | None = None) -> None: ... + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... + def getvar(self, name: str = "PY_VAR"): ... def getint(self, s) -> int: ... def getdouble(self, s) -> float: ... - def getboolean(self, s) -> bool: - """Return a boolean value for Tcl boolean values true and false given as parameter.""" - - def focus_set(self) -> None: - """Direct input focus to this widget. - - If the application currently does not have the focus - this widget will get the focus if the application gets - the focus through the window manager. - """ + def getboolean(self, s) -> bool: ... + def focus_set(self) -> None: ... focus = focus_set - def focus_force(self) -> None: - """Direct input focus to this widget even if the - application does not have the focus. Use with - caution! - """ - - def focus_get(self) -> Misc | None: - """Return the widget which has currently the focus in the - application. - - Use focus_displayof to allow working with several - displays. Return None if application does not have - the focus. - """ - - def focus_displayof(self) -> Misc | None: - """Return the widget which has currently the focus on the - display where this widget is located. - - Return None if the application does not have the focus. - """ - - def focus_lastfor(self) -> Misc | None: - """Return the widget which would have the focus if top level - for this widget gets the focus from the window manager. - """ - - def tk_focusFollowsMouse(self) -> None: - """The widget under mouse will get automatically focus. Can not - be disabled easily. - """ - - def tk_focusNext(self) -> Misc | None: - """Return the next widget in the focus order which follows - widget which has currently the focus. - - The focus order first goes to the next child, then to - the children of the child recursively and then to the - next sibling which is higher in the stacking order. A - widget is omitted if it has the takefocus resource set - to 0. - """ - - def tk_focusPrev(self) -> Misc | None: - """Return previous widget in the focus order. See tk_focusNext for details.""" + def focus_force(self) -> None: ... + def focus_get(self) -> Misc | None: ... + def focus_displayof(self) -> Misc | None: ... + def focus_lastfor(self) -> Misc | None: ... + def tk_focusFollowsMouse(self) -> None: ... + def tk_focusNext(self) -> Misc | None: ... + def tk_focusPrev(self) -> Misc | None: ... # .after() can be called without the "func" argument, but it is basically never what you want. # It behaves like time.sleep() and freezes the GUI app. - def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: - """Call function once after given time. - - MS specifies the time in milliseconds. FUNC gives the - function which shall be called. Additional parameters - are given as parameters to the function call. Return - identifier to cancel scheduling with after_cancel. - """ + def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... # after_idle is essentially partialmethod(after, "idle") - def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: - """Call FUNC once if the Tcl main loop has no event to - process. - - Return an identifier to cancel the scheduling with - after_cancel. - """ - - def after_cancel(self, id: str) -> None: - """Cancel scheduling of function identified with ID. - - Identifier returned by after or after_idle must be - given as first parameter. - """ + def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... + def after_cancel(self, id: str) -> None: ... if sys.version_info >= (3, 13): - def after_info(self, id: str | None = None) -> tuple[str, ...]: - """Return information about existing event handlers. - - With no argument, return a tuple of the identifiers for all existing - event handlers created by the after and after_idle commands for this - interpreter. If id is supplied, it specifies an existing handler; id - must have been the return value from some previous call to after or - after_idle and it must not have triggered yet or been canceled. If the - id doesn't exist, a TclError is raised. Otherwise, the return value is - a tuple containing (script, type) where script is a reference to the - function to be called by the event handler and type is either 'idle' - or 'timer' to indicate what kind of event handler it is. - """ + def after_info(self, id: str | None = None) -> tuple[str, ...]: ... - def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: - """Ring a display's bell.""" + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... if sys.version_info >= (3, 13): # Supports options from `_BusyInfo`` - def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: - """Return the value of busy configuration option. - - The widget must have been previously made busy by - tk_busy_hold(). Option may have any of the values accepted by - tk_busy_hold(). - """ + def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: ... busy_cget = tk_busy_cget - def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: - """Query or modify the busy configuration options. - - The widget must have been previously made busy by - tk_busy_hold(). Options may have any of the values accepted by - tk_busy_hold(). - - Please note that the option database is referenced by the widget - name or class. For example, if a Frame widget with name "frame" - is to be made busy, the busy cursor can be specified for it by - either call: - - w.option_add('*frame.busyCursor', 'gumby') - w.option_add('*Frame.BusyCursor', 'gumby') - """ + def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: ... tk_busy_config = tk_busy_configure busy_configure = tk_busy_configure busy_config = tk_busy_configure - def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: - """Return a list of widgets that are currently busy. - - If a pattern is given, only busy widgets whose path names match - a pattern are returned. - """ + def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: ... busy_current = tk_busy_current - def tk_busy_forget(self) -> None: - """Make this widget no longer busy. - - User events will again be received by the widget. - """ + def tk_busy_forget(self) -> None: ... busy_forget = tk_busy_forget - def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: - """Make this widget appear busy. - - The specified widget and its descendants will be blocked from - user interactions. Normally update() should be called - immediately afterward to insure that the hold operation is in - effect before the application starts its processing. - - The only supported configuration option is: - - cursor: the cursor to be displayed when the widget is made - busy. - """ + def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: ... tk_busy = tk_busy_hold busy_hold = tk_busy_hold busy = tk_busy_hold - def tk_busy_status(self) -> bool: - """Return True if the widget is busy, False otherwise.""" + def tk_busy_status(self) -> bool: ... busy_status = tk_busy_status - def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: - """Retrieve data from the clipboard on window's display. - - The window keyword defaults to the root window of the Tkinter - application. - - The type keyword specifies the form in which the data is - to be returned and should be an atom name such as STRING - or FILE_NAME. Type defaults to STRING, except on X11, where the default - is to try UTF8_STRING and fall back to STRING. - - This command is equivalent to: - - selection_get(CLIPBOARD) - """ - - def clipboard_clear(self, *, displayof: Misc = ...) -> None: - """Clear the data in the Tk clipboard. - - A widget specified for the optional displayof keyword - argument specifies the target display. - """ - - def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: - """Append STRING to the Tk clipboard. - - A widget specified at the optional displayof keyword - argument specifies the target display. The clipboard - can be retrieved with selection_get. - """ - - def grab_current(self): - """Return widget which has currently the grab in this application - or None. - """ - - def grab_release(self) -> None: - """Release grab for this widget if currently set.""" - - def grab_set(self) -> None: - """Set grab for this widget. - - A grab directs all events to this and descendant - widgets in the application. - """ - - def grab_set_global(self) -> None: - """Set global grab for this widget. - - A global grab directs all events to this and - descendant widgets on the display. Use with caution - - other applications do not get events anymore. - """ - - def grab_status(self) -> Literal["local", "global"] | None: - """Return None, "local" or "global" if this widget has - no, a local or a global grab. - """ - + def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... + def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... + def grab_current(self): ... + def grab_release(self) -> None: ... + def grab_set(self) -> None: ... + def grab_set_global(self) -> None: ... + def grab_status(self) -> Literal["local", "global"] | None: ... def option_add( self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None - ) -> None: - """Set a VALUE (second parameter) for an option - PATTERN (first parameter). - - An optional third parameter gives the numeric priority - (defaults to 80). - """ - - def option_clear(self) -> None: - """Clear the option database. - - It will be reloaded if option_add is called. - """ - - def option_get(self, name, className): - """Return the value for an option NAME for this widget - with CLASSNAME. - - Values with higher priority override lower values. - """ - - def option_readfile(self, fileName, priority=None) -> None: - """Read file FILENAME into the option database. - - An optional second parameter gives the numeric - priority. - """ - - def selection_clear(self, **kw) -> None: - """Clear the current X selection.""" - - def selection_get(self, **kw): - """Return the contents of the current X selection. - - A keyword parameter selection specifies the name of - the selection and defaults to PRIMARY. A keyword - parameter displayof specifies a widget on the display - to use. A keyword parameter type specifies the form of data to be - fetched, defaulting to STRING except on X11, where UTF8_STRING is tried - before STRING. - """ - - def selection_handle(self, command, **kw) -> None: - """Specify a function COMMAND to call if the X - selection owned by this widget is queried by another - application. - - This function must return the contents of the - selection. The function will be called with the - arguments OFFSET and LENGTH which allows the chunking - of very long selections. The following keyword - parameters can be provided: - selection - name of the selection (default PRIMARY), - type - type of the selection (e.g. STRING, FILE_NAME). - """ - - def selection_own(self, **kw) -> None: - """Become owner of X selection. - - A keyword parameter selection specifies the name of - the selection (default PRIMARY). - """ - - def selection_own_get(self, **kw): - """Return owner of X selection. - - The following keyword parameter can - be provided: - selection - name of the selection (default PRIMARY), - type - type of the selection (e.g. STRING, FILE_NAME). - """ - - def send(self, interp, cmd, *args): - """Send Tcl command CMD to different interpreter INTERP to be executed.""" - - def lower(self, belowThis=None) -> None: - """Lower this widget in the stacking order.""" - - def tkraise(self, aboveThis=None) -> None: - """Raise this widget in the stacking order.""" + ) -> None: ... + def option_clear(self) -> None: ... + def option_get(self, name, className): ... + def option_readfile(self, fileName, priority=None) -> None: ... + def selection_clear(self, **kw) -> None: ... + def selection_get(self, **kw): ... + def selection_handle(self, command, **kw) -> None: ... + def selection_own(self, **kw) -> None: ... + def selection_own_get(self, **kw): ... + def send(self, interp, cmd, *args): ... + def lower(self, belowThis=None) -> None: ... + def tkraise(self, aboveThis=None) -> None: ... lift = tkraise if sys.version_info >= (3, 11): - def info_patchlevel(self) -> _VersionInfoType: - """Returns the exact version of the Tcl library.""" - - def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: - """Return integer which represents atom NAME.""" - - def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: - """Return name of atom with identifier ID.""" - - def winfo_cells(self) -> int: - """Return number of cells in the colormap for this widget.""" - - def winfo_children(self) -> list[Widget | Toplevel]: - """Return a list of all widgets which are children of this widget.""" - - def winfo_class(self) -> str: - """Return window class name of this widget.""" - - def winfo_colormapfull(self) -> bool: - """Return True if at the last color request the colormap was full.""" - - def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: - """Return the widget which is at the root coordinates ROOTX, ROOTY.""" - - def winfo_depth(self) -> int: - """Return the number of bits per pixel.""" - - def winfo_exists(self) -> bool: - """Return true if this widget exists.""" - - def winfo_fpixels(self, number: float | str) -> float: - """Return the number of pixels for the given distance NUMBER - (e.g. "3c") as float. - """ - - def winfo_geometry(self) -> str: - """Return geometry string for this widget in the form "widthxheight+X+Y".""" - - def winfo_height(self) -> int: - """Return height of this widget.""" - - def winfo_id(self) -> int: - """Return identifier ID for this widget.""" - - def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: - """Return the name of all Tcl interpreters for this display.""" - - def winfo_ismapped(self) -> bool: - """Return true if this widget is mapped.""" - - def winfo_manager(self) -> str: - """Return the window manager name for this widget.""" - - def winfo_name(self) -> str: - """Return the name of this widget.""" - - def winfo_parent(self) -> str: # return value needs nametowidget() - """Return the name of the parent of this widget.""" - - def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): - """Return the pathname of the widget given by ID.""" - - def winfo_pixels(self, number: float | str) -> int: - """Rounded integer value of winfo_fpixels.""" - - def winfo_pointerx(self) -> int: - """Return the x coordinate of the pointer on the root window.""" - - def winfo_pointerxy(self) -> tuple[int, int]: - """Return a tuple of x and y coordinates of the pointer on the root window.""" - - def winfo_pointery(self) -> int: - """Return the y coordinate of the pointer on the root window.""" - - def winfo_reqheight(self) -> int: - """Return requested height of this widget.""" - - def winfo_reqwidth(self) -> int: - """Return requested width of this widget.""" - - def winfo_rgb(self, color: str) -> tuple[int, int, int]: - """Return a tuple of integer RGB values in range(65536) for color in this widget.""" - - def winfo_rootx(self) -> int: - """Return x coordinate of upper left corner of this widget on the - root window. - """ - - def winfo_rooty(self) -> int: - """Return y coordinate of upper left corner of this widget on the - root window. - """ - - def winfo_screen(self) -> str: - """Return the screen name of this widget.""" - - def winfo_screencells(self) -> int: - """Return the number of the cells in the colormap of the screen - of this widget. - """ - - def winfo_screendepth(self) -> int: - """Return the number of bits per pixel of the root window of the - screen of this widget. - """ - - def winfo_screenheight(self) -> int: - """Return the number of pixels of the height of the screen of this widget - in pixel. - """ - - def winfo_screenmmheight(self) -> int: - """Return the number of pixels of the height of the screen of - this widget in mm. - """ - - def winfo_screenmmwidth(self) -> int: - """Return the number of pixels of the width of the screen of - this widget in mm. - """ - - def winfo_screenvisual(self) -> str: - """Return one of the strings directcolor, grayscale, pseudocolor, - staticcolor, staticgray, or truecolor for the default - colormodel of this screen. - """ - - def winfo_screenwidth(self) -> int: - """Return the number of pixels of the width of the screen of - this widget in pixel. - """ - - def winfo_server(self) -> str: - """Return information of the X-Server of the screen of this widget in - the form "XmajorRminor vendor vendorVersion". - """ - - def winfo_toplevel(self) -> Tk | Toplevel: - """Return the toplevel widget of this widget.""" - - def winfo_viewable(self) -> bool: - """Return true if the widget and all its higher ancestors are mapped.""" - - def winfo_visual(self) -> str: - """Return one of the strings directcolor, grayscale, pseudocolor, - staticcolor, staticgray, or truecolor for the - colormodel of this widget. - """ - - def winfo_visualid(self) -> str: - """Return the X identifier for the visual for this widget.""" - - def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: - """Return a list of all visuals available for the screen - of this widget. - - Each item in the list consists of a visual name (see winfo_visual), a - depth and if includeids is true is given also the X identifier. - """ - - def winfo_vrootheight(self) -> int: - """Return the height of the virtual root window associated with this - widget in pixels. If there is no virtual root window return the - height of the screen. - """ - - def winfo_vrootwidth(self) -> int: - """Return the width of the virtual root window associated with this - widget in pixel. If there is no virtual root window return the - width of the screen. - """ - - def winfo_vrootx(self) -> int: - """Return the x offset of the virtual root relative to the root - window of the screen of this widget. - """ - - def winfo_vrooty(self) -> int: - """Return the y offset of the virtual root relative to the root - window of the screen of this widget. - """ - - def winfo_width(self) -> int: - """Return the width of this widget.""" - - def winfo_x(self) -> int: - """Return the x coordinate of the upper left corner of this widget - in the parent. - """ - - def winfo_y(self) -> int: - """Return the y coordinate of the upper left corner of this widget - in the parent. - """ - - def update(self) -> None: - """Enter event loop until all pending events have been processed by Tcl.""" - - def update_idletasks(self) -> None: - """Enter event loop until all idle callbacks have been called. This - will update the display of windows but not process events caused by - the user. - """ - - @overload - def bindtags(self, tagList: None = None) -> tuple[str, ...]: - """Set or get the list of bindtags for this widget. - - With no argument return the list of all bindtags associated with - this widget. With a list of strings as argument the bindtags are - set to this list. The bindtags determine in which order events are - processed (see bind). - """ - + def info_patchlevel(self) -> _VersionInfoType: ... + + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... + def winfo_cells(self) -> int: ... + def winfo_children(self) -> list[Widget | Toplevel]: ... + def winfo_class(self) -> str: ... + def winfo_colormapfull(self) -> bool: ... + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... + def winfo_depth(self) -> int: ... + def winfo_exists(self) -> bool: ... + def winfo_fpixels(self, number: float | str) -> float: ... + def winfo_geometry(self) -> str: ... + def winfo_height(self) -> int: ... + def winfo_id(self) -> int: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... + def winfo_ismapped(self) -> bool: ... + def winfo_manager(self) -> str: ... + def winfo_name(self) -> str: ... + def winfo_parent(self) -> str: ... # return value needs nametowidget() + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... + def winfo_pixels(self, number: float | str) -> int: ... + def winfo_pointerx(self) -> int: ... + def winfo_pointerxy(self) -> tuple[int, int]: ... + def winfo_pointery(self) -> int: ... + def winfo_reqheight(self) -> int: ... + def winfo_reqwidth(self) -> int: ... + def winfo_rgb(self, color: str) -> tuple[int, int, int]: ... + def winfo_rootx(self) -> int: ... + def winfo_rooty(self) -> int: ... + def winfo_screen(self) -> str: ... + def winfo_screencells(self) -> int: ... + def winfo_screendepth(self) -> int: ... + def winfo_screenheight(self) -> int: ... + def winfo_screenmmheight(self) -> int: ... + def winfo_screenmmwidth(self) -> int: ... + def winfo_screenvisual(self) -> str: ... + def winfo_screenwidth(self) -> int: ... + def winfo_server(self) -> str: ... + def winfo_toplevel(self) -> Tk | Toplevel: ... + def winfo_viewable(self) -> bool: ... + def winfo_visual(self) -> str: ... + def winfo_visualid(self) -> str: ... + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... + def winfo_vrootheight(self) -> int: ... + def winfo_vrootwidth(self) -> int: ... + def winfo_vrootx(self) -> int: ... + def winfo_vrooty(self) -> int: ... + def winfo_width(self) -> int: ... + def winfo_x(self) -> int: ... + def winfo_y(self) -> int: ... + def update(self) -> None: ... + def update_idletasks(self) -> None: ... + @overload + def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... @overload def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other @@ -1235,46 +520,7 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to this widget at event SEQUENCE a call to function FUNC. - - SEQUENCE is a string of concatenated event - patterns. An event pattern is of the form - where MODIFIER is one - of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, - Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, - B3, Alt, Button4, B4, Double, Button5, B5 Triple, - Mod1, M1. TYPE is one of Activate, Enter, Map, - ButtonPress, Button, Expose, Motion, ButtonRelease - FocusIn, MouseWheel, Circulate, FocusOut, Property, - Colormap, Gravity Reparent, Configure, KeyPress, Key, - Unmap, Deactivate, KeyRelease Visibility, Destroy, - Leave and DETAIL is the button number for ButtonPress, - ButtonRelease and DETAIL is the Keysym for KeyPress and - KeyRelease. Examples are - for pressing Control and mouse button 1 or - for pressing A and the Alt key (KeyPress can be omitted). - An event pattern can also be a virtual event of the form - <> where AString can be arbitrary. This - event can be generated by event_generate. - If events are concatenated they must appear shortly - after each other. - - FUNC will be called if the event sequence occurs with an - instance of Event as argument. If the return value of FUNC is - "break" no further bound function is invoked. - - An additional boolean parameter ADD specifies whether FUNC will - be called additionally to the other bound function or whether - it will replace the previous function. - - Bind will return an identifier to allow deletion of the bound function with - unbind without memory leak. - - If FUNC or SEQUENCE is omitted the bound function or list - of bound events are returned. - """ - + ) -> str: ... @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -1287,13 +533,7 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to all widgets at an event SEQUENCE a call to function FUNC. - An additional boolean parameter ADD specifies whether FUNC will - be called additionally to the other bound function or whether - it will replace the previous function. See bind for the return value. - """ - + ) -> str: ... @overload def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -1305,99 +545,34 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to widgets with bindtag CLASSNAME at event - SEQUENCE a call of function FUNC. An additional - boolean parameter ADD specifies whether FUNC will be - called additionally to the other bound function or - whether it will replace the previous function. See bind for - the return value. - """ - + ) -> str: ... @overload def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def unbind(self, sequence: str, funcid: str | None = None) -> None: - """Unbind for this widget the event SEQUENCE. - - If FUNCID is given, only unbind the function identified with FUNCID - and also delete the corresponding Tcl command. - - Otherwise destroy the current binding for SEQUENCE, leaving SEQUENCE - unbound. - """ - - def unbind_all(self, sequence: str) -> None: - """Unbind for all widgets for event SEQUENCE all functions.""" - - def unbind_class(self, className: str, sequence: str) -> None: - """Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE - all functions. - """ - - def mainloop(self, n: int = 0) -> None: - """Call the mainloop of Tk.""" - - def quit(self) -> None: - """Quit the Tcl interpreter. All widgets will be destroyed.""" - + def unbind(self, sequence: str, funcid: str | None = None) -> None: ... + def unbind_all(self, sequence: str) -> None: ... + def unbind_class(self, className: str, sequence: str) -> None: ... + def mainloop(self, n: int = 0) -> None: ... + def quit(self) -> None: ... @property - def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: - """Internal function.""" - - def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: - """Return the Tkinter instance of a widget identified by - its Tcl name NAME. - """ - + def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... + def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... def register( self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 - ) -> str: - """Return a newly created Tcl function. If this - function is called, the Python function FUNC will - be executed. An optional function SUBST can - be given which will be executed before FUNC. - """ - - def keys(self) -> list[str]: - """Return a list of all resource names of this widget.""" - + ) -> str: ... + def keys(self) -> list[str]: ... @overload - def pack_propagate(self, flag: bool) -> bool | None: - """Set or get the status for propagation of geometry information. - - A boolean argument specifies whether the geometry information - of the slaves will determine the size of this widget. If no argument - is given the current setting will be returned. - """ - + def pack_propagate(self, flag: bool) -> bool | None: ... @overload def pack_propagate(self) -> None: ... propagate = pack_propagate - def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: - """The anchor value controls how to place the grid within the - master when no row/column has any weight. - - The default anchor is nw. - """ + def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: ... anchor = grid_anchor @overload def grid_bbox( self, column: None = None, row: None = None, col2: None = None, row2: None = None - ) -> tuple[int, int, int, int] | None: - """Return a tuple of integer coordinates for the bounding - box of this widget controlled by the geometry manager grid. - - If COLUMN, ROW is given the bounding box applies from - the cell with row and column 0 to the specified - cell. If COL2 and ROW2 are given the bounding box - starts at that cell. - - The returned integers specify the offset of the upper left - corner in the master widget and the width and height. - """ - + ) -> tuple[int, int, int, int] | None: ... @overload def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... @overload @@ -1412,14 +587,7 @@ class Misc: pad: float | str = ..., uniform: str = ..., weight: int = ..., - ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check - """Configure column INDEX of a grid. - - Valid resources are minsize (minimum size of the column), - weight (how much does additional space propagate to this column) - and pad (how much space to let additionally). - """ - + ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check def grid_rowconfigure( self, index: int | str | list[int] | tuple[int, ...], @@ -1429,60 +597,23 @@ class Misc: pad: float | str = ..., uniform: str = ..., weight: int = ..., - ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check - """Configure row INDEX of a grid. - - Valid resources are minsize (minimum size of the row), - weight (how much does additional space propagate to this row) - and pad (how much space to let additionally). - """ + ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check columnconfigure = grid_columnconfigure rowconfigure = grid_rowconfigure - def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: - """Return a tuple of column and row which identify the cell - at which the pixel at position X and Y inside the master - widget is located. - """ - + def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: ... @overload - def grid_propagate(self, flag: bool) -> None: - """Set or get the status for propagation of geometry information. - - A boolean argument specifies whether the geometry information - of the slaves will determine the size of this widget. If no argument - is given, the current setting will be returned. - """ - + def grid_propagate(self, flag: bool) -> None: ... @overload def grid_propagate(self) -> bool: ... - def grid_size(self) -> tuple[int, int]: - """Return a tuple of the number of column and rows in the grid.""" + def grid_size(self) -> tuple[int, int]: ... size = grid_size # Widget because Toplevel or Tk is never a slave - def pack_slaves(self) -> list[Widget]: - """Return a list of all slaves of this widget - in its packing order. - """ - - def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: - """Return a list of all slaves of this widget - in its packing order. - """ - - def place_slaves(self) -> list[Widget]: - """Return a list of all slaves of this widget - in its packing order. - """ + def pack_slaves(self) -> list[Widget]: ... + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... + def place_slaves(self) -> list[Widget]: ... slaves = pack_slaves - def event_add(self, virtual: str, *sequences: str) -> None: - """Bind a virtual event VIRTUAL (of the form <>) - to an event SEQUENCE such that the virtual event is triggered - whenever SEQUENCE occurs. - """ - - def event_delete(self, virtual: str, *sequences: str) -> None: - """Unbind a virtual event VIRTUAL from SEQUENCE.""" - + def event_add(self, virtual: str, *sequences: str) -> None: ... + def event_delete(self, virtual: str, *sequences: str) -> None: ... def event_generate( self, sequence: str, @@ -1514,101 +645,44 @@ class Misc: when: Literal["now", "tail", "head", "mark"] = ..., x: float | str = ..., y: float | str = ..., - ) -> None: - """Generate an event SEQUENCE. Additional - keyword arguments specify parameter of the event - (e.g. x, y, rootx, rooty). - """ - - def event_info(self, virtual: str | None = None) -> tuple[str, ...]: - """Return a list of all virtual events or the information - about the SEQUENCE bound to the virtual event VIRTUAL. - """ - - def image_names(self) -> tuple[str, ...]: - """Return a list of all existing image names.""" - - def image_types(self) -> tuple[str, ...]: - """Return a list of all available image types (e.g. photo bitmap).""" + ) -> None: ... + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... + def image_names(self) -> tuple[str, ...]: ... + def image_types(self) -> tuple[str, ...]: ... # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... - def __getitem__(self, key: str) -> Any: - """Return the resource value for a KEY given as string.""" - - def cget(self, key: str) -> Any: - """Return the resource value for a KEY given as string.""" - - def configure(self, cnf: Any = None) -> Any: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ + def __getitem__(self, key: str) -> Any: ... + def cget(self, key: str) -> Any: ... + def configure(self, cnf: Any = None) -> Any: ... # TODO: config is an alias of configure, but adding that here creates # conflict with the type of config in the subclasses. See #13149 class CallWrapper: - """Internal class. Stores function to call when some user - defined Tcl function is called e.g. after an event occurred. - """ - func: Incomplete subst: Incomplete widget: Incomplete - def __init__(self, func, subst, widget) -> None: - """Store FUNC, SUBST and WIDGET as members.""" - - def __call__(self, *args): - """Apply first function SUBST to arguments, than FUNC.""" + def __init__(self, func, subst, widget) -> None: ... + def __call__(self, *args): ... class XView: - """Mix-in class for querying and changing the horizontal position - of a widget's window. - """ - @overload - def xview(self) -> tuple[float, float]: - """Query and change the horizontal position of the view.""" - + def xview(self) -> tuple[float, float]: ... @overload def xview(self, *args) -> None: ... - def xview_moveto(self, fraction: float) -> None: - """Adjusts the view in the window so that FRACTION of the - total width of the canvas is off-screen to the left. - """ - + def xview_moveto(self, fraction: float) -> None: ... @overload - def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: - """Shift the x-view according to NUMBER which is measured in "units" - or "pages" (WHAT). - """ - + def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... @overload def xview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... class YView: - """Mix-in class for querying and changing the vertical position - of a widget's window. - """ - @overload - def yview(self) -> tuple[float, float]: - """Query and change the vertical position of the view.""" - + def yview(self) -> tuple[float, float]: ... @overload def yview(self, *args) -> None: ... - def yview_moveto(self, fraction: float) -> None: - """Adjusts the view in the window so that FRACTION of the - total height of the canvas is off-screen to the top. - """ - + def yview_moveto(self, fraction: float) -> None: ... @overload - def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: - """Shift the y-view according to NUMBER which is measured in - "units" or "pages" (WHAT). - """ - + def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... @overload def yview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... @@ -1645,15 +719,8 @@ else: type: str class Wm: - """Provides functions for the communication with the window manager.""" - @overload - def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: - """Instruct the window manager to set the aspect ratio (width/height) - of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple - of the actual values if no argument is given. - """ - + def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... @overload def wm_aspect( self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None @@ -1661,79 +728,23 @@ class Wm: aspect = wm_aspect if sys.version_info >= (3, 13): @overload - def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: ... @overload def wm_attributes(self, *, return_python_dict: Literal[True]) -> _WmAttributes: ... else: @overload - def wm_attributes(self) -> tuple[Any, ...]: - """This subcommand returns or sets platform specific attributes - - The first form returns a list of the platform specific flags and - their values. The second form returns the value for the specific - option. The third form sets one or more of the values. The values - are as follows: - - On Windows, -disabled gets or sets whether the window is in a - disabled state. -toolwindow gets or sets the style of the window - to toolwindow (as defined in the MSDN). -topmost gets or sets - whether this is a topmost window (displays above all other - windows). - - On Macintosh, XXXXX - - On Unix, there are currently no special attribute values. - """ + def wm_attributes(self) -> tuple[Any, ...]: ... @overload - def wm_attributes(self, option: Literal["-alpha"], /) -> float: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-alpha"], /) -> float: ... @overload def wm_attributes(self, option: Literal["-fullscreen"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["-topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["-modified"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-modified"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["-notify"], /) -> bool: ... @overload @@ -1744,20 +755,7 @@ class Wm: def wm_attributes(self, option: Literal["-type"], /) -> str: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: ... @overload def wm_attributes(self, option: Literal["-disabled"], /) -> bool: ... @overload @@ -1765,58 +763,19 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["-type"], /) -> str: ... if sys.version_info >= (3, 13): @overload - def wm_attributes(self, option: Literal["alpha"], /) -> float: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["alpha"], /) -> float: ... @overload def wm_attributes(self, option: Literal["fullscreen"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["modified"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["modified"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["notify"], /) -> bool: ... @overload @@ -1827,20 +786,7 @@ class Wm: def wm_attributes(self, option: Literal["type"], /) -> str: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: ... @overload def wm_attributes(self, option: Literal["disabled"], /) -> bool: ... @overload @@ -1848,20 +794,7 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["zoomed"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["zoomed"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["type"], /) -> str: ... @@ -1875,20 +808,7 @@ class Wm: def wm_attributes(self, option: Literal["-topmost"], value: bool, /) -> Literal[""]: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: ... @overload def wm_attributes(self, option: Literal["-notify"], value: bool, /) -> Literal[""]: ... @overload @@ -1897,20 +817,7 @@ class Wm: def wm_attributes(self, option: Literal["-transparent"], value: bool, /) -> Literal[""]: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: ... @overload def wm_attributes(self, option: Literal["-disabled"], value: bool, /) -> Literal[""]: ... @overload @@ -1918,20 +825,7 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: ... @overload def wm_attributes(self, option: Literal["-type"], value: str, /) -> Literal[""]: ... @@ -1950,19 +844,7 @@ class Wm: titlepath: str = ..., topmost: bool = ..., transparent: bool = ..., - ) -> None: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ + ) -> None: ... elif sys.platform == "win32": @overload def wm_attributes( @@ -1974,266 +856,110 @@ class Wm: fullscreen: bool = ..., toolwindow: bool = ..., topmost: bool = ..., - ) -> None: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ + ) -> None: ... else: # X11 @overload def wm_attributes( self, *, alpha: float = ..., topmost: bool = ..., zoomed: bool = ..., fullscreen: bool = ..., type: str = ... - ) -> None: - """Return or sets platform specific attributes. + ) -> None: ... - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ attributes = wm_attributes - def wm_client(self, name: str | None = None) -> str: - """Store NAME in WM_CLIENT_MACHINE property of this widget. Return - current value. - """ + def wm_client(self, name: str | None = None) -> str: ... client = wm_client @overload - def wm_colormapwindows(self) -> list[Misc]: - """Store list of window names (WLIST) into WM_COLORMAPWINDOWS property - of this widget. This list contains windows whose colormaps differ from their - parents. Return current list of widgets if WLIST is empty. - """ - + def wm_colormapwindows(self) -> list[Misc]: ... @overload def wm_colormapwindows(self, wlist: list[Misc] | tuple[Misc, ...], /) -> None: ... @overload def wm_colormapwindows(self, first_wlist_item: Misc, /, *other_wlist_items: Misc) -> None: ... colormapwindows = wm_colormapwindows - def wm_command(self, value: str | None = None) -> str: - """Store VALUE in WM_COMMAND property. It is the command - which shall be used to invoke the application. Return current - command if VALUE is None. - """ + def wm_command(self, value: str | None = None) -> str: ... command = wm_command # Some of these always return empty string, but return type is set to None to prevent accidentally using it - def wm_deiconify(self) -> None: - """Deiconify this widget. If it was never mapped it will not be mapped. - On Windows it will raise this widget and give it the focus. - """ + def wm_deiconify(self) -> None: ... deiconify = wm_deiconify - def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: - """Set focus model to MODEL. "active" means that this widget will claim - the focus itself, "passive" means that the window manager shall give - the focus. Return current focus model if MODEL is None. - """ + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... focusmodel = wm_focusmodel - def wm_forget(self, window: Wm) -> None: - """The window will be unmapped from the screen and will no longer - be managed by wm. toplevel windows will be treated like frame - windows once they are no longer managed by wm, however, the menu - option configuration will be remembered and the menus will return - once the widget is managed again. - """ + def wm_forget(self, window: Wm) -> None: ... forget = wm_forget - def wm_frame(self) -> str: - """Return identifier for decorative frame of this widget if present.""" + def wm_frame(self) -> str: ... frame = wm_frame @overload - def wm_geometry(self, newGeometry: None = None) -> str: - """Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return - current value if None is given. - """ - + def wm_geometry(self, newGeometry: None = None) -> str: ... @overload def wm_geometry(self, newGeometry: str) -> None: ... geometry = wm_geometry - def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): - """Instruct the window manager that this widget shall only be - resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and - height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the - number of grid units requested in Tk_GeometryRequest. - """ + def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): ... grid = wm_grid - def wm_group(self, pathName=None): - """Set the group leader widgets for related widgets to PATHNAME. Return - the group leader of this widget if None is given. - """ + def wm_group(self, pathName=None): ... group = wm_group - def wm_iconbitmap(self, bitmap=None, default=None): - """Set bitmap for the iconified widget to BITMAP. Return - the bitmap if None is given. - - Under Windows, the DEFAULT parameter can be used to set the icon - for the widget and any descendants that don't have an icon set - explicitly. DEFAULT can be the relative path to a .ico file - (example: root.iconbitmap(default='myicon.ico') ). See Tk - documentation for more information. - """ + def wm_iconbitmap(self, bitmap=None, default=None): ... iconbitmap = wm_iconbitmap - def wm_iconify(self) -> None: - """Display widget as icon.""" + def wm_iconify(self) -> None: ... iconify = wm_iconify - def wm_iconmask(self, bitmap=None): - """Set mask for the icon bitmap of this widget. Return the - mask if None is given. - """ + def wm_iconmask(self, bitmap=None): ... iconmask = wm_iconmask - def wm_iconname(self, newName=None) -> str: - """Set the name of the icon for this widget. Return the name if - None is given. - """ + def wm_iconname(self, newName=None) -> str: ... iconname = wm_iconname - def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: - """Sets the titlebar icon for this window based on the named photo - images passed through args. If default is True, this is applied to - all future created toplevels as well. - - The data in the images is taken as a snapshot at the time of - invocation. If the images are later changed, this is not reflected - to the titlebar icons. Multiple images are accepted to allow - different images sizes to be provided. The window manager may scale - provided icons to an appropriate size. - - On Windows, the images are packed into a Windows icon structure. - This will override an icon specified to wm_iconbitmap, and vice - versa. - - On X, the images are arranged into the _NET_WM_ICON X property, - which most modern window managers support. An icon specified by - wm_iconbitmap may exist simultaneously. - - On Macintosh, this currently does nothing. - """ + def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: ... iconphoto = wm_iconphoto - def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: - """Set the position of the icon of this widget to X and Y. Return - a tuple of the current values of X and X if None is given. - """ + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... iconposition = wm_iconposition - def wm_iconwindow(self, pathName=None): - """Set widget PATHNAME to be displayed instead of icon. Return the current - value if None is given. - """ + def wm_iconwindow(self, pathName=None): ... iconwindow = wm_iconwindow - def wm_manage(self, widget) -> None: - """The widget specified will become a stand alone top-level window. - The window will be decorated with the window managers title bar, - etc. - """ + def wm_manage(self, widget) -> None: ... manage = wm_manage @overload - def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: - """Set max WIDTH and HEIGHT for this widget. If the window is gridded - the values are given in grid units. Return the current values if None - is given. - """ - + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload - def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: - """Set min WIDTH and HEIGHT for this widget. If the window is gridded - the values are given in grid units. Return the current values if None - is given. - """ - + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @overload - def wm_overrideredirect(self, boolean: None = None) -> bool | None: # returns True or None - """Instruct the window manager to ignore this widget - if BOOLEAN is given with 1. Return the current value if None - is given. - """ - + def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect - def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: - """Instruct the window manager that the position of this widget shall - be defined by the user if WHO is "user", and by its own policy if WHO is - "program". - """ + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... positionfrom = wm_positionfrom @overload - def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: - """Bind function FUNC to command NAME for this widget. - Return the function bound to NAME if None is given. NAME could be - e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW". - """ - + def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... @overload def wm_protocol(self, name: str, func: None = None) -> str: ... @overload def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... protocol = wm_protocol @overload - def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: - """Instruct the window manager whether this width can be resized - in WIDTH or HEIGHT. Both values are boolean values. - """ - + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable - def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: - """Instruct the window manager that the size of this widget shall - be defined by the user if WHO is "user", and by its own policy if WHO is - "program". - """ + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... sizefrom = wm_sizefrom @overload - def wm_state(self, newstate: None = None) -> str: - """Query or set the state of this widget as one of normal, icon, - iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only). - """ - + def wm_state(self, newstate: None = None) -> str: ... @overload def wm_state(self, newstate: str) -> None: ... state = wm_state @overload - def wm_title(self, string: None = None) -> str: - """Set the title of this widget.""" - + def wm_title(self, string: None = None) -> str: ... @overload def wm_title(self, string: str) -> None: ... title = wm_title @overload - def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: - """Instruct the window manager that this widget is transient - with regard to widget MASTER. - """ - + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... @overload def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... transient = wm_transient - def wm_withdraw(self) -> None: - """Withdraw this widget from the screen such that it is unmapped - and forgotten by the window manager. Re-draw it with wm_deiconify. - """ + def wm_withdraw(self) -> None: ... withdraw = wm_withdraw class Tk(Misc, Wm): - """Toplevel widget of Tk which represents mostly the main window - of an application. It has an associated Tcl interpreter. - """ - master: None def __init__( # Make sure to keep in sync with other functions that use the same @@ -2246,13 +972,7 @@ class Tk(Misc, Wm): useTk: bool = True, sync: bool = False, use: str | None = None, - ) -> None: - """Return a new top level widget on screen SCREENNAME. A new Tcl interpreter will - be created. BASENAME will be used for the identification of the profile file (see - readprofile). - It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME - is the name of the widget class. - """ + ) -> None: ... # Keep this in sync with ttktheme.ThemedTk. See issue #13858 @overload def configure( @@ -2275,27 +995,12 @@ class Tk(Misc, Wm): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def destroy(self) -> None: - """Destroy this and all descendants widgets. This will - end the application of this Tcl interpreter. - """ - - def readprofile(self, baseName: str, className: str) -> None: - """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into - the Tcl Interpreter and calls exec on the contents of .BASENAME.py and - .CLASSNAME.py if such a file exists in the home directory. - """ + def destroy(self) -> None: ... + def readprofile(self, baseName: str, className: str) -> None: ... report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object] # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo # Please keep in sync with _tkinter.TkappType. @@ -2350,11 +1055,6 @@ class _PackInfo(_InMiscTotal): pady: int | tuple[int, int] class Pack: - """Geometry manager Pack. - - Base class to use the methods pack_* in every widget. - """ - # _PackInfo is not the valid type for cnf because pad stuff accepts any # screen units instead of int only. I didn't bother to create another # TypedDict for cnf because it appears to be a legacy thing that was @@ -2375,30 +1075,9 @@ class Pack: pady: float | str | tuple[float | str, float | str] = ..., in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: - """Pack a widget in the parent widget. Use as options: - after=widget - pack it after you have packed widget - anchor=NSEW (or subset) - position widget according to - given direction - before=widget - pack it before you will pack widget - expand=bool - expand widget if parent size grows - fill=NONE or X or Y or BOTH - fill widget if widget grows - in=master - use master to contain this widget - in_=master - see 'in' option description - ipadx=amount - add internal padding in x direction - ipady=amount - add internal padding in y direction - padx=amount - add padding in x direction - pady=amount - add padding in y direction - side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget. - """ - - def pack_forget(self) -> None: - """Unmap this widget and do not use it for the packing order.""" - - def pack_info(self) -> _PackInfo: # errors if widget hasn't been packed - """Return information about the packing options - for this widget. - """ + ) -> None: ... + def pack_forget(self) -> None: ... + def pack_info(self) -> _PackInfo: ... # errors if widget hasn't been packed pack = pack_configure forget = pack_forget propagate = Misc.pack_propagate @@ -2417,11 +1096,6 @@ class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed rely: str # can be float()ed if not empty string class Place: - """Geometry manager Place. - - Base class to use the methods place_* in every widget. - """ - def place_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -2439,36 +1113,9 @@ class Place: rely: str | float = ..., in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: - """Place a widget in the parent widget. Use as options: - in=master - master relative to which the widget is placed - in_=master - see 'in' option description - x=amount - locate anchor of this widget at position x of master - y=amount - locate anchor of this widget at position y of master - relx=amount - locate anchor of this widget between 0.0 and 1.0 - relative to width of master (1.0 is right edge) - rely=amount - locate anchor of this widget between 0.0 and 1.0 - relative to height of master (1.0 is bottom edge) - anchor=NSEW (or subset) - position anchor according to given direction - width=amount - width of this widget in pixel - height=amount - height of this widget in pixel - relwidth=amount - width of this widget between 0.0 and 1.0 - relative to width of master (1.0 is the same width - as the master) - relheight=amount - height of this widget between 0.0 and 1.0 - relative to height of master (1.0 is the same - height as the master) - bordermode="inside" or "outside" - whether to take border width of - master widget into account - """ - - def place_forget(self) -> None: - """Unmap this widget.""" - - def place_info(self) -> _PlaceInfo: - """Return information about the placing options - for this widget. - """ + ) -> None: ... + def place_forget(self) -> None: ... + def place_info(self) -> _PlaceInfo: ... place = place_configure info = place_info @@ -2485,11 +1132,6 @@ class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded sticky: str # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty class Grid: - """Geometry manager Grid. - - Base class to use the methods grid_* in every widget. - """ - def grid_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -2505,57 +1147,22 @@ class Grid: sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: - """Position a widget in the parent widget in a grid. Use as options: - column=number - use cell identified with given column (starting with 0) - columnspan=number - this widget will span several columns - in=master - use master to contain this widget - in_=master - see 'in' option description - ipadx=amount - add internal padding in x direction - ipady=amount - add internal padding in y direction - padx=amount - add padding in x direction - pady=amount - add padding in y direction - row=number - use cell identified with given row (starting with 0) - rowspan=number - this widget will span several rows - sticky=NSEW - if cell is larger on which sides will this - widget stick to the cell boundary - """ - - def grid_forget(self) -> None: - """Unmap this widget.""" - - def grid_remove(self) -> None: - """Unmap this widget but remember the grid options.""" - - def grid_info(self) -> _GridInfo: - """Return information about the options - for positioning this widget in a grid. - """ + ) -> None: ... + def grid_forget(self) -> None: ... + def grid_remove(self) -> None: ... + def grid_info(self) -> _GridInfo: ... grid = grid_configure location = Misc.grid_location size = Misc.grid_size class BaseWidget(Misc): - """Internal class.""" - master: Misc widgetName: str - def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: - """Construct a widget with the parent widget MASTER, a name WIDGETNAME - and appropriate options. - """ - - def destroy(self) -> None: - """Destroy this and all descendants widgets.""" + def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: ... + def destroy(self) -> None: ... # This class represents any widget except Toplevel or Tk. class Widget(BaseWidget, Pack, Place, Grid): - """Internal class. - - Base class for a widget which can be positioned with the geometry managers - Pack, Place or Grid. - """ - # Allow bind callbacks to take e.g. Event[Label] instead of Event[Misc]. # Tk and Toplevel get notified for their child widgets' events, but other # widgets don't. @@ -2565,54 +1172,13 @@ class Widget(BaseWidget, Pack, Place, Grid): sequence: str | None = None, func: Callable[[Event[_W]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to this widget at event SEQUENCE a call to function FUNC. - - SEQUENCE is a string of concatenated event - patterns. An event pattern is of the form - where MODIFIER is one - of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, - Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, - B3, Alt, Button4, B4, Double, Button5, B5 Triple, - Mod1, M1. TYPE is one of Activate, Enter, Map, - ButtonPress, Button, Expose, Motion, ButtonRelease - FocusIn, MouseWheel, Circulate, FocusOut, Property, - Colormap, Gravity Reparent, Configure, KeyPress, Key, - Unmap, Deactivate, KeyRelease Visibility, Destroy, - Leave and DETAIL is the button number for ButtonPress, - ButtonRelease and DETAIL is the Keysym for KeyPress and - KeyRelease. Examples are - for pressing Control and mouse button 1 or - for pressing A and the Alt key (KeyPress can be omitted). - An event pattern can also be a virtual event of the form - <> where AString can be arbitrary. This - event can be generated by event_generate. - If events are concatenated they must appear shortly - after each other. - - FUNC will be called if the event sequence occurs with an - instance of Event as argument. If the return value of FUNC is - "break" no further bound function is invoked. - - An additional boolean parameter ADD specifies whether FUNC will - be called additionally to the other bound function or whether - it will replace the previous function. - - Bind will return an identifier to allow deletion of the bound function with - unbind without memory leak. - - If FUNC or SEQUENCE is omitted the bound function or list - of bound events are returned. - """ - + ) -> str: ... @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... class Toplevel(BaseWidget, Wm): - """Toplevel widget, e.g. for dialogs.""" - # Toplevel and Tk have the same options because they correspond to the same # Tcl/Tk toplevel widget. For some reason, config and configure must be # copy/pasted here instead of aliasing as 'config = Tk.config'. @@ -2644,15 +1210,7 @@ class Toplevel(BaseWidget, Wm): use: int = ..., visual: str | tuple[str, int] = "", width: float | str = 0, - ) -> None: - """Construct a toplevel widget with the parent MASTER. - - Valid resource names: background, bd, bg, borderwidth, class, - colormap, container, cursor, height, highlightbackground, - highlightcolor, highlightthickness, menu, relief, screen, takefocus, - use, visual, width. - """ - + ) -> None: ... @overload def configure( self, @@ -2674,21 +1232,12 @@ class Toplevel(BaseWidget, Wm): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Button(Widget): - """Button widget.""" - def __init__( self, master: Misc | None = None, @@ -2736,26 +1285,7 @@ class Button(Widget): underline: int = -1, width: float | str = 0, wraplength: float | str = 0, - ) -> None: - """Construct a button widget with the parent MASTER. - - STANDARD OPTIONS - - activebackground, activeforeground, anchor, - background, bitmap, borderwidth, cursor, - disabledforeground, font, foreground - highlightbackground, highlightcolor, - highlightthickness, image, justify, - padx, pady, relief, repeatdelay, - repeatinterval, takefocus, text, - textvariable, underline, wraplength - - WIDGET-SPECIFIC OPTIONS - - command, compound, default, height, - overrelief, state, width - """ - + ) -> None: ... @overload def configure( self, @@ -2797,40 +1327,14 @@ class Button(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def flash(self) -> None: - """Flash the button. - - This is accomplished by redisplaying - the button several times, alternating between active and - normal colors. At the end of the flash the button is left - in the same normal/active state as when the command was - invoked. This command is ignored if the button's state is - disabled. - """ - - def invoke(self) -> Any: - """Invoke the command associated with the button. - - The return value is the return value from the command, - or an empty string if there is no command associated with - the button. This command is ignored if the button's state - is disabled. - """ + def flash(self) -> None: ... + def invoke(self) -> Any: ... class Canvas(Widget, XView, YView): - """Canvas widget to display graphical elements like lines or text.""" - def __init__( self, master: Misc | None = None, @@ -2870,18 +1374,7 @@ class Canvas(Widget, XView, YView): xscrollincrement: float | str = 0, yscrollcommand: str | Callable[[float, float], object] = "", yscrollincrement: float | str = 0, - ) -> None: - """Construct a canvas widget with the parent MASTER. - - Valid resource names: background, bd, bg, borderwidth, closeenough, - confine, cursor, height, highlightbackground, highlightcolor, - highlightthickness, insertbackground, insertborderwidth, - insertofftime, insertontime, insertwidth, offset, relief, - scrollregion, selectbackground, selectborderwidth, selectforeground, - state, takefocus, width, xscrollcommand, xscrollincrement, - yscrollcommand, yscrollincrement. - """ - + ) -> None: ... @overload def configure( self, @@ -2917,90 +1410,32 @@ class Canvas(Widget, XView, YView): xscrollincrement: float | str = ..., yscrollcommand: str | Callable[[float, float], object] = ..., yscrollincrement: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def addtag(self, *args): # internal method - """Internal function.""" - - def addtag_above(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items above TAGORID.""" - - def addtag_all(self, newtag: str) -> None: - """Add tag NEWTAG to all items.""" - - def addtag_below(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items below TAGORID.""" - + def addtag(self, *args): ... # internal method + def addtag_above(self, newtag: str, tagOrId: str | int) -> None: ... + def addtag_all(self, newtag: str) -> None: ... + def addtag_below(self, newtag: str, tagOrId: str | int) -> None: ... def addtag_closest( self, newtag: str, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None - ) -> None: - """Add tag NEWTAG to item which is closest to pixel at X, Y. - If several match take the top-most. - All items closer than HALO are considered overlapping (all are - closest). If START is specified the next below this tag is taken. - """ - - def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: - """Add tag NEWTAG to all items in the rectangle defined - by X1,Y1,X2,Y2. - """ - - def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: - """Add tag NEWTAG to all items which overlap the rectangle - defined by X1,Y1,X2,Y2. - """ - - def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items with TAGORID.""" - - def find(self, *args): # internal method - """Internal function.""" - - def find_above(self, tagOrId: str | int) -> tuple[int, ...]: - """Return items above TAGORID.""" - - def find_all(self) -> tuple[int, ...]: - """Return all items.""" - - def find_below(self, tagOrId: str | int) -> tuple[int, ...]: - """Return all items below TAGORID.""" - + ) -> None: ... + def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... + def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... + def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: ... + def find(self, *args): ... # internal method + def find_above(self, tagOrId: str | int) -> tuple[int, ...]: ... + def find_all(self) -> tuple[int, ...]: ... + def find_below(self, tagOrId: str | int) -> tuple[int, ...]: ... def find_closest( self, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None - ) -> tuple[int, ...]: - """Return item which is closest to pixel at X, Y. - If several match take the top-most. - All items closer than HALO are considered overlapping (all are - closest). If START is specified the next below this tag is taken. - """ - - def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: - """Return all items in rectangle defined - by X1,Y1,X2,Y2. - """ - - def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: - """Return all items which overlap the rectangle - defined by X1,Y1,X2,Y2. - """ - - def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: - """Return all items with TAGORID.""" + ) -> tuple[int, ...]: ... + def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: ... + def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: ... + def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: ... # Incompatible with Misc.bbox(), tkinter violates LSP - def bbox(self, *args: str | int) -> tuple[int, int, int, int]: # type: ignore[override] - """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle - which encloses all items with tags specified as arguments. - """ - + def bbox(self, *args: str | int) -> tuple[int, int, int, int]: ... # type: ignore[override] @overload def tag_bind( self, @@ -3008,39 +1443,18 @@ class Canvas(Widget, XView, YView): sequence: str | None = None, func: Callable[[Event[Canvas]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to all items with TAGORID at event SEQUENCE a call to function FUNC. - - An additional boolean parameter ADD specifies whether FUNC will be - called additionally to the other bound function or whether it will - replace the previous function. See bind for the return value. - """ - + ) -> str: ... @overload def tag_bind( self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None ) -> None: ... @overload def tag_bind(self, tagOrId: str | int, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: - """Unbind for all items with TAGORID for event SEQUENCE the - function identified with FUNCID. - """ - - def canvasx(self, screenx, gridspacing=None): - """Return the canvas x coordinate of pixel position SCREENX rounded - to nearest multiple of GRIDSPACING units. - """ - - def canvasy(self, screeny, gridspacing=None): - """Return the canvas y coordinate of pixel position SCREENY rounded - to nearest multiple of GRIDSPACING units. - """ - + def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: ... + def canvasx(self, screenx, gridspacing=None): ... + def canvasy(self, screeny, gridspacing=None): ... @overload - def coords(self, tagOrId: str | int, /) -> list[float]: - """Return a list of coordinates for the item given in ARGS.""" - + def coords(self, tagOrId: str | int, /) -> list[float]: ... @overload def coords(self, tagOrId: str | int, args: list[int] | list[float] | tuple[float, ...], /) -> None: ... @overload @@ -3048,15 +1462,9 @@ class Canvas(Widget, XView, YView): # create_foo() methods accept coords as a list or tuple, or as separate arguments. # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. # Keyword arguments should be the same in all overloads of each method. - def create_arc(self, *args, **kw) -> int: - """Create arc shaped region with coordinates x1,y1,x2,y2.""" - - def create_bitmap(self, *args, **kw) -> int: - """Create bitmap with coordinates x1,y1.""" - - def create_image(self, *args, **kw) -> int: - """Create image item with coordinates x1,y1.""" - + def create_arc(self, *args, **kw) -> int: ... + def create_bitmap(self, *args, **kw) -> int: ... + def create_image(self, *args, **kw) -> int: ... @overload def create_line( self, @@ -3088,9 +1496,7 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: - """Create line with coordinates x1,y1,...,xn,yn.""" - + ) -> int: ... @overload def create_line( self, @@ -3189,9 +1595,7 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: - """Create oval with coordinates x1,y1,x2,y2.""" - + ) -> int: ... @overload def create_oval( self, @@ -3295,9 +1699,7 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: - """Create polygon with coordinates x1,y1,...,xn,yn.""" - + ) -> int: ... @overload def create_polygon( self, @@ -3404,9 +1806,7 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: - """Create rectangle with coordinates x1,y1,x2,y2.""" - + ) -> int: ... @overload def create_rectangle( self, @@ -3497,9 +1897,7 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: float | str = ..., - ) -> int: - """Create text with coordinates x1,y1.""" - + ) -> int: ... @overload def create_text( self, @@ -3535,9 +1933,7 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., window: Widget = ..., - ) -> int: - """Create window with coordinates x1,y1,x2,y2.""" - + ) -> int: ... @overload def create_window( self, @@ -3551,135 +1947,47 @@ class Canvas(Widget, XView, YView): width: float | str = ..., window: Widget = ..., ) -> int: ... - def dchars(self, *args) -> None: - """Delete characters of text items identified by tag or id in ARGS (possibly - several times) from FIRST to LAST character (including). - """ - - def delete(self, *tagsOrCanvasIds: str | int) -> None: - """Delete items identified by all tag or ids contained in ARGS.""" - + def dchars(self, *args) -> None: ... + def delete(self, *tagsOrCanvasIds: str | int) -> None: ... @overload - def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: - """Delete tag or id given as last arguments in ARGS from items - identified by first argument in ARGS. - """ - + def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: ... @overload def dtag(self, id: int, tag_to_delete: str, /) -> None: ... - def focus(self, *args): - """Set focus to the first item specified in ARGS.""" - - def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: - """Return tags associated with the first item specified in ARGS.""" - - def icursor(self, *args) -> None: - """Set cursor at position POS in the item identified by TAGORID. - In ARGS TAGORID must be first. - """ - - def index(self, *args): - """Return position of cursor as integer in item specified in ARGS.""" - - def insert(self, *args) -> None: - """Insert TEXT in item TAGORID at position POS. ARGS must - be TAGORID POS TEXT. - """ - - def itemcget(self, tagOrId, option): - """Return the resource value for an OPTION for item TAGORID.""" + def focus(self, *args): ... + def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: ... + def icursor(self, *args) -> None: ... + def index(self, *args): ... + def insert(self, *args) -> None: ... + def itemcget(self, tagOrId, option): ... # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( self, tagOrId: str | int, cnf: dict[str, Any] | None = None, **kw: Any - ) -> dict[str, tuple[str, str, str, str, str]] | None: - """Configure resources of an item TAGORID. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method without arguments. - """ + ) -> dict[str, tuple[str, str, str, str, str]] | None: ... itemconfig = itemconfigure - def move(self, *args) -> None: - """Move an item TAGORID given in ARGS.""" - - def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: - """Move the items given by TAGORID in the canvas coordinate - space so that the first coordinate pair of the bottommost - item with tag TAGORID is located at position (X,Y). - X and Y may be the empty string, in which case the - corresponding coordinate will be unchanged. All items matching - TAGORID remain in the same positions relative to each other. - """ - - def postscript(self, cnf={}, **kw): - """Print the contents of the canvas to a postscript - file. Valid options: colormap, colormode, file, fontmap, - height, pageanchor, pageheight, pagewidth, pagex, pagey, - rotate, width, x, y. - """ + def move(self, *args) -> None: ... + def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... + def postscript(self, cnf={}, **kw): ... # tkinter does: # lower = tag_lower # lift = tkraise = tag_raise # # But mypy doesn't like aliasing here (maybe because Misc defines the same names) - def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: - """Lower an item TAGORID given in ARGS - (optional below another item). - """ - - def lower(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] - """Lower an item TAGORID given in ARGS - (optional below another item). - """ - - def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: - """Raise an item TAGORID given in ARGS - (optional above another item). - """ - - def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] - """Raise an item TAGORID given in ARGS - (optional above another item). - """ - - def lift(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] - """Raise an item TAGORID given in ARGS - (optional above another item). - """ - - def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: - """Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE.""" - - def scan_mark(self, x, y) -> None: - """Remember the current X, Y coordinates.""" - - def scan_dragto(self, x, y, gain: int = 10) -> None: - """Adjust the view of the canvas to GAIN times the - difference between X and Y and the coordinates given in - scan_mark. - """ - - def select_adjust(self, tagOrId, index) -> None: - """Adjust the end of the selection near the cursor of an item TAGORID to index.""" - - def select_clear(self) -> None: - """Clear the selection if it is in this widget.""" - - def select_from(self, tagOrId, index) -> None: - """Set the fixed end of a selection in item TAGORID to INDEX.""" - - def select_item(self): - """Return the item which has the selection.""" - - def select_to(self, tagOrId, index) -> None: - """Set the variable end of a selection in item TAGORID to INDEX.""" - - def type(self, tagOrId: str | int) -> int | None: - """Return the type of the item TAGORID.""" + def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... + def lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: ... + def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def lift(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] + def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y, gain: int = 10) -> None: ... + def select_adjust(self, tagOrId, index) -> None: ... + def select_clear(self) -> None: ... + def select_from(self, tagOrId, index) -> None: ... + def select_item(self): ... + def select_to(self, tagOrId, index) -> None: ... + def type(self, tagOrId: str | int) -> int | None: ... class Checkbutton(Widget): - """Checkbutton widget which is either in on- or off-state.""" - def __init__( self, master: Misc | None = None, @@ -3738,18 +2046,7 @@ class Checkbutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = 0, wraplength: float | str = 0, - ) -> None: - """Construct a checkbutton widget with the parent MASTER. - - Valid resource names: activebackground, activeforeground, anchor, - background, bd, bg, bitmap, borderwidth, command, cursor, - disabledforeground, fg, font, foreground, height, - highlightbackground, highlightcolor, highlightthickness, image, - indicatoron, justify, offvalue, onvalue, padx, pady, relief, - selectcolor, selectimage, state, takefocus, text, textvariable, - underline, variable, width, wraplength. - """ - + ) -> None: ... @overload def configure( self, @@ -3797,35 +2094,17 @@ class Checkbutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self) -> None: - """Put the button in off-state.""" - - def flash(self) -> None: - """Flash the button.""" - - def invoke(self) -> Any: - """Toggle the button and invoke a command if given as resource.""" - - def select(self) -> None: - """Put the button in on-state.""" - - def toggle(self) -> None: - """Toggle the button.""" + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... + def toggle(self) -> None: ... class Entry(Widget, XView): - """Entry widget which allows displaying simple text.""" - def __init__( self, master: Misc | None = None, @@ -3869,19 +2148,7 @@ class Entry(Widget, XView): vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", # same as validatecommand width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct an entry widget with the parent MASTER. - - Valid resource names: background, bd, bg, borderwidth, cursor, - exportselection, fg, font, foreground, highlightbackground, - highlightcolor, highlightthickness, insertbackground, - insertborderwidth, insertofftime, insertontime, insertwidth, - invalidcommand, invcmd, justify, relief, selectbackground, - selectborderwidth, selectforeground, show, state, takefocus, - textvariable, validate, validatecommand, vcmd, width, - xscrollcommand. - """ - + ) -> None: ... @overload def configure( self, @@ -3924,60 +2191,23 @@ class Entry(Widget, XView): vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def delete(self, first: str | int, last: str | int | None = None) -> None: - """Delete text from FIRST to LAST (not included).""" - - def get(self) -> str: - """Return the text.""" - - def icursor(self, index: str | int) -> None: - """Insert cursor at INDEX.""" - - def index(self, index: str | int) -> int: - """Return position of cursor.""" - - def insert(self, index: str | int, string: str) -> None: - """Insert STRING at INDEX.""" - - def scan_mark(self, x) -> None: - """Remember the current X, Y coordinates.""" - - def scan_dragto(self, x) -> None: - """Adjust the view of the canvas to 10 times the - difference between X and Y and the coordinates given in - scan_mark. - """ - - def selection_adjust(self, index: str | int) -> None: - """Adjust the end of the selection near the cursor to INDEX.""" - - def selection_clear(self) -> None: # type: ignore[override] - """Clear the selection if it is in this widget.""" - - def selection_from(self, index: str | int) -> None: - """Set the fixed end of a selection to INDEX.""" - - def selection_present(self) -> bool: - """Return True if there are characters selected in the entry, False - otherwise. - """ - - def selection_range(self, start: str | int, end: str | int) -> None: - """Set the selection from START to END (not included).""" - - def selection_to(self, index: str | int) -> None: - """Set the variable end of a selection to INDEX.""" + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self) -> str: ... + def icursor(self, index: str | int) -> None: ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, string: str) -> None: ... + def scan_mark(self, x) -> None: ... + def scan_dragto(self, x) -> None: ... + def selection_adjust(self, index: str | int) -> None: ... + def selection_clear(self) -> None: ... # type: ignore[override] + def selection_from(self, index: str | int) -> None: ... + def selection_present(self) -> bool: ... + def selection_range(self, start: str | int, end: str | int) -> None: ... + def selection_to(self, index: str | int) -> None: ... select_adjust = selection_adjust select_clear = selection_clear select_from = selection_from @@ -3986,8 +2216,6 @@ class Entry(Widget, XView): select_to = selection_to class Frame(Widget): - """Frame widget which may contain other widgets and can have a 3D border.""" - def __init__( self, master: Misc | None = None, @@ -4013,14 +2241,7 @@ class Frame(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, visual: str | tuple[str, int] = "", # can't be changed with configure() width: float | str = 0, - ) -> None: - """Construct a frame widget with the parent MASTER. - - Valid resource names: background, bd, bg, borderwidth, class, - colormap, container, cursor, height, highlightbackground, - highlightcolor, highlightthickness, relief, takefocus, visual, width. - """ - + ) -> None: ... @overload def configure( self, @@ -4041,21 +2262,12 @@ class Frame(Widget): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): - """Label widget which can display text and bitmaps.""" - def __init__( self, master: Misc | None = None, @@ -4093,25 +2305,7 @@ class Label(Widget): underline: int = -1, width: float | str = 0, wraplength: float | str = 0, - ) -> None: - """Construct a label widget with the parent MASTER. - - STANDARD OPTIONS - - activebackground, activeforeground, anchor, - background, bitmap, borderwidth, cursor, - disabledforeground, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, image, justify, - padx, pady, relief, takefocus, text, - textvariable, underline, wraplength - - WIDGET-SPECIFIC OPTIONS - - height, state, width - - """ - + ) -> None: ... @overload def configure( self, @@ -4148,21 +2342,12 @@ class Label(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Listbox(Widget, XView, YView): - """Listbox widget which can display a list of strings.""" - def __init__( self, master: Misc | None = None, @@ -4213,16 +2398,7 @@ class Listbox(Widget, XView, YView): width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct a listbox widget with the parent MASTER. - - Valid resource names: background, bd, bg, borderwidth, cursor, - exportselection, fg, font, foreground, height, highlightbackground, - highlightcolor, highlightthickness, relief, selectbackground, - selectborderwidth, selectforeground, selectmode, setgrid, takefocus, - width, xscrollcommand, yscrollcommand, listvariable. - """ - + ) -> None: ... @overload def configure( self, @@ -4257,89 +2433,35 @@ class Listbox(Widget, XView, YView): width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index: str | int) -> None: - """Activate item identified by INDEX.""" - - def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: # type: ignore[override] - """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle - which encloses the item identified by the given index. - """ - - def curselection(self): - """Return the indices of currently selected item.""" - - def delete(self, first: str | int, last: str | int | None = None) -> None: - """Delete items from FIRST to LAST (included).""" - - def get(self, first: str | int, last: str | int | None = None): - """Get list of items from FIRST to LAST (included).""" - - def index(self, index: str | int) -> int: - """Return index of item identified with INDEX.""" - - def insert(self, index: str | int, *elements: str | float) -> None: - """Insert ELEMENTS at INDEX.""" - - def nearest(self, y): - """Get index of item which is nearest to y coordinate Y.""" - - def scan_mark(self, x, y) -> None: - """Remember the current X, Y coordinates.""" - - def scan_dragto(self, x, y) -> None: - """Adjust the view of the listbox to 10 times the - difference between X and Y and the coordinates given in - scan_mark. - """ - - def see(self, index: str | int) -> None: - """Scroll such that INDEX is visible.""" - - def selection_anchor(self, index: str | int) -> None: - """Set the fixed end oft the selection to INDEX.""" + def activate(self, index: str | int) -> None: ... + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def curselection(self): ... + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self, first: str | int, last: str | int | None = None): ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, *elements: str | float) -> None: ... + def nearest(self, y): ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y) -> None: ... + def see(self, index: str | int) -> None: ... + def selection_anchor(self, index: str | int) -> None: ... select_anchor = selection_anchor - def selection_clear(self, first: str | int, last: str | int | None = None) -> None: # type: ignore[override] - """Clear the selection from FIRST to LAST (included).""" + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] select_clear = selection_clear - def selection_includes(self, index: str | int): - """Return True if INDEX is part of the selection.""" + def selection_includes(self, index: str | int): ... select_includes = selection_includes - def selection_set(self, first: str | int, last: str | int | None = None) -> None: - """Set the selection from FIRST to LAST (included) without - changing the currently selected elements. - """ + def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... select_set = selection_set - def size(self) -> int: # type: ignore[override] - """Return the number of elements in the listbox.""" - - def itemcget(self, index: str | int, option): - """Return the resource value for an ITEM and an OPTION.""" - - def itemconfigure(self, index: str | int, cnf=None, **kw): - """Configure resources of an ITEM. - - The values for resources are specified as keyword arguments. - To get an overview about the allowed keyword arguments - call the method without arguments. - Valid resource names: background, bg, foreground, fg, - selectbackground, selectforeground. - """ + def size(self) -> int: ... # type: ignore[override] + def itemcget(self, index: str | int, option): ... + def itemconfigure(self, index: str | int, cnf=None, **kw): ... itemconfig = itemconfigure class Menu(Widget): - """Menu widget which allows displaying menu bars, pull-down menus and pop-up menus.""" - def __init__( self, master: Misc | None = None, @@ -4370,15 +2492,7 @@ class Menu(Widget): tearoffcommand: Callable[[str, str], object] | str = "", title: str = "", type: Literal["menubar", "tearoff", "normal"] = "normal", - ) -> None: - """Construct menu widget with the parent MASTER. - - Valid resource names: activebackground, activeborderwidth, - activeforeground, background, bd, bg, borderwidth, cursor, - disabledforeground, fg, font, foreground, postcommand, relief, - selectcolor, takefocus, tearoff, tearoffcommand, title, type. - """ - + ) -> None: ... @overload def configure( self, @@ -4405,29 +2519,14 @@ class Menu(Widget): tearoffcommand: Callable[[str, str], object] | str = ..., title: str = ..., type: Literal["menubar", "tearoff", "normal"] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: - """Post the menu at position X,Y with entry ENTRY.""" - - def activate(self, index: str | int) -> None: - """Activate entry at INDEX.""" - - def add(self, itemType, cnf={}, **kw): # docstring says "Internal function." - """Internal function.""" - - def insert(self, index, itemType, cnf={}, **kw): # docstring says "Internal function." - """Internal function.""" - + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... + def activate(self, index: str | int) -> None: ... + def add(self, itemType, cnf={}, **kw): ... # docstring says "Internal function." + def insert(self, index, itemType, cnf={}, **kw): ... # docstring says "Internal function." def add_cascade( self, cnf: dict[str, Any] | None = {}, @@ -4448,9 +2547,7 @@ class Menu(Widget): menu: Menu = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: - """Add hierarchical menu item.""" - + ) -> None: ... def add_checkbutton( self, cnf: dict[str, Any] | None = {}, @@ -4476,9 +2573,7 @@ class Menu(Widget): state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., variable: Variable = ..., - ) -> None: - """Add checkbutton menu item.""" - + ) -> None: ... def add_command( self, cnf: dict[str, Any] | None = {}, @@ -4498,9 +2593,7 @@ class Menu(Widget): label: str = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: - """Add command menu item.""" - + ) -> None: ... def add_radiobutton( self, cnf: dict[str, Any] | None = {}, @@ -4525,12 +2618,8 @@ class Menu(Widget): underline: int = ..., value: Any = ..., variable: Variable = ..., - ) -> None: - """Add radio menu item.""" - - def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: - """Add separator.""" - + ) -> None: ... + def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... def insert_cascade( self, index: str | int, @@ -4552,9 +2641,7 @@ class Menu(Widget): menu: Menu = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: - """Add hierarchical menu item at INDEX.""" - + ) -> None: ... def insert_checkbutton( self, index: str | int, @@ -4581,9 +2668,7 @@ class Menu(Widget): state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., variable: Variable = ..., - ) -> None: - """Add checkbutton menu item at INDEX.""" - + ) -> None: ... def insert_command( self, index: str | int, @@ -4604,9 +2689,7 @@ class Menu(Widget): label: str = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: - """Add command menu item at INDEX.""" - + ) -> None: ... def insert_radiobutton( self, index: str | int, @@ -4632,51 +2715,23 @@ class Menu(Widget): underline: int = ..., value: Any = ..., variable: Variable = ..., - ) -> None: - """Add radio menu item at INDEX.""" - - def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: - """Add separator at INDEX.""" - - def delete(self, index1: str | int, index2: str | int | None = None) -> None: - """Delete menu items between INDEX1 and INDEX2 (included).""" - - def entrycget(self, index: str | int, option: str) -> Any: - """Return the resource value of a menu item for OPTION at INDEX.""" - + ) -> None: ... + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... + def entrycget(self, index: str | int, option: str) -> Any: ... def entryconfigure( self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure a menu item at INDEX.""" + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... entryconfig = entryconfigure - def index(self, index: str | int) -> int | None: - """Return the index of a menu item identified by INDEX.""" - - def invoke(self, index: str | int) -> Any: - """Invoke a menu item identified by INDEX and execute - the associated command. - """ - - def post(self, x: int, y: int) -> None: - """Display a menu at position X,Y.""" - - def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: - """Return the type of the menu item at INDEX.""" - - def unpost(self) -> None: - """Unmap a menu.""" - - def xposition(self, index: str | int) -> int: - """Return the x-position of the leftmost pixel of the menu item - at INDEX. - """ - - def yposition(self, index: str | int) -> int: - """Return the y-position of the topmost pixel of the menu item at INDEX.""" + def index(self, index: str | int) -> int | None: ... + def invoke(self, index: str | int) -> Any: ... + def post(self, x: int, y: int) -> None: ... + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... + def unpost(self) -> None: ... + def xposition(self, index: str | int) -> int: ... + def yposition(self, index: str | int) -> int: ... class Menubutton(Widget): - """Menubutton widget, obsolete since Tk8.0.""" - def __init__( self, master: Misc | None = None, @@ -4757,21 +2812,12 @@ class Menubutton(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Message(Widget): - """Message widget to display multiline text. Obsolete since Label does it too.""" - def __init__( self, master: Misc | None = None, @@ -4829,21 +2875,12 @@ class Message(Widget): text: float | str = ..., textvariable: Variable = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Radiobutton(Widget): - """Radiobutton widget which shows only one of several buttons in on-state.""" - def __init__( self, master: Misc | None = None, @@ -4891,18 +2928,7 @@ class Radiobutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = 0, wraplength: float | str = 0, - ) -> None: - """Construct a radiobutton widget with the parent MASTER. - - Valid resource names: activebackground, activeforeground, anchor, - background, bd, bg, bitmap, borderwidth, command, cursor, - disabledforeground, fg, font, foreground, height, - highlightbackground, highlightcolor, highlightthickness, image, - indicatoron, justify, padx, pady, relief, selectcolor, selectimage, - state, takefocus, text, textvariable, underline, value, variable, - width, wraplength. - """ - + ) -> None: ... @overload def configure( self, @@ -4949,32 +2975,16 @@ class Radiobutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self) -> None: - """Put the button in off-state.""" - - def flash(self) -> None: - """Flash the button.""" - - def invoke(self) -> Any: - """Toggle the button and invoke a command if given as resource.""" - - def select(self) -> None: - """Put the button in on-state.""" + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... class Scale(Widget): - """Scale widget which can display a numerical scale.""" - def __init__( self, master: Misc | None = None, @@ -5016,17 +3026,7 @@ class Scale(Widget): troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: float | str = 15, - ) -> None: - """Construct a scale widget with the parent MASTER. - - Valid resource names: activebackground, background, bigincrement, bd, - bg, borderwidth, command, cursor, digits, fg, font, foreground, from, - highlightbackground, highlightcolor, highlightthickness, label, - length, orient, relief, repeatdelay, repeatinterval, resolution, - showvalue, sliderlength, sliderrelief, state, takefocus, - tickinterval, to, troughcolor, variable, width. - """ - + ) -> None: ... @overload def configure( self, @@ -5066,37 +3066,16 @@ class Scale(Widget): troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def get(self) -> float: - """Get the current value as integer or float.""" - - def set(self, value) -> None: - """Set the value to VALUE.""" - - def coords(self, value: float | None = None) -> tuple[int, int]: - """Return a tuple (X,Y) of the point along the centerline of the - trough that corresponds to VALUE or the current value if None is - given. - """ - - def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: - """Return where the point X,Y lies. Valid return values are "slider", - "though1" and "though2". - """ + def get(self) -> float: ... + def set(self, value) -> None: ... + def coords(self, value: float | None = None) -> tuple[int, int]: ... + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... class Scrollbar(Widget): - """Scrollbar widget which displays a slider at a certain position.""" - def __init__( self, master: Misc | None = None, @@ -5128,17 +3107,7 @@ class Scrollbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", troughcolor: str = ..., width: float | str = ..., - ) -> None: - """Construct a scrollbar widget with the parent MASTER. - - Valid resource names: activebackground, activerelief, - background, bd, bg, borderwidth, command, cursor, - elementborderwidth, highlightbackground, - highlightcolor, highlightthickness, jump, orient, - relief, repeatdelay, repeatinterval, takefocus, - troughcolor, width. - """ - + ) -> None: ... @overload def configure( self, @@ -5164,60 +3133,23 @@ class Scrollbar(Widget): repeatinterval: int = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., troughcolor: str = ..., - width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - - @overload - def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - config = configure - def activate(self, index=None): - """Marks the element indicated by index as active. - The only index values understood by this method are "arrow1", - "slider", or "arrow2". If any other value is specified then no - element of the scrollbar will be active. If index is not specified, - the method returns the name of the element that is currently active, - or None if no element is active. - """ - - def delta(self, deltax: int, deltay: int) -> float: - """Return the fractional change of the scrollbar setting if it - would be moved by DELTAX or DELTAY pixels. - """ - - def fraction(self, x: int, y: int) -> float: - """Return the fractional value which corresponds to a slider - position of X,Y. - """ - - def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: - """Return the element under position X,Y as one of - "arrow1","slider","arrow2" or "". - """ - - def get(self) -> tuple[float, float, float, float] | tuple[float, float]: - """Return the current fractional values (upper and lower end) - of the slider position. - """ - - def set(self, first: float | str, last: float | str) -> None: - """Set the fractional values of the slider position (upper and - lower ends as value between 0 and 1). - """ + width: float | str = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def activate(self, index=None): ... + def delta(self, deltax: int, deltay: int) -> float: ... + def fraction(self, x: int, y: int) -> float: ... + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ... + def set(self, first: float | str, last: float | str) -> None: ... -_TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc _WhatToCount: TypeAlias = Literal[ "chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels" ] class Text(Widget, XView, YView): - """Text widget which can display text in various forms.""" - def __init__( self, master: Misc | None = None, @@ -5273,30 +3205,7 @@ class Text(Widget, XView, YView): wrap: Literal["none", "char", "word"] = "char", xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct a text widget with the parent MASTER. - - STANDARD OPTIONS - - background, borderwidth, cursor, - exportselection, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, insertbackground, - insertborderwidth, insertofftime, - insertontime, insertwidth, padx, pady, - relief, selectbackground, - selectborderwidth, selectforeground, - setgrid, takefocus, - xscrollcommand, yscrollcommand, - - WIDGET-SPECIFIC OPTIONS - - autoseparators, height, maxundo, - spacing1, spacing2, spacing3, - state, tabs, undo, width, wrap, - - """ - + ) -> None: ... @overload def configure( self, @@ -5347,55 +3256,41 @@ class Text(Widget, XView, YView): wrap: Literal["none", "char", "word"] = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index: _TextIndex) -> tuple[int, int, int, int] | None: # type: ignore[override] - """Return a tuple of (x,y,width,height) which gives the bounding - box of the visible part of the character at the given index. - """ - - def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: - """Return whether between index INDEX1 and index INDEX2 the - relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. - """ + def bbox(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def compare( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + op: Literal["<", "<=", "==", ">=", ">", "!="], + index2: str | float | _tkinter.Tcl_Obj | Widget, + ) -> bool: ... if sys.version_info >= (3, 13): @overload - def count(self, index1: _TextIndex, index2: _TextIndex, *, return_ints: Literal[True]) -> int: - """Counts the number of relevant things between the two indices. - - If INDEX1 is after INDEX2, the result will be a negative number - (and this holds for each of the possible options). - - The actual items which are counted depends on the options given. - The result is a tuple of integers, one for the result of each - counting option given, if more than one option is specified or - return_ints is false (default), otherwise it is an integer. - Valid counting options are "chars", "displaychars", - "displayindices", "displaylines", "indices", "lines", "xpixels" - and "ypixels". The default value, if no option is specified, is - "indices". There is an additional possible option "update", - which if given then all subsequent options ensure that any - possible out of date information is recalculated. - """ - + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + *, + return_ints: Literal[True], + ) -> int: ... @overload def count( - self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], /, *, return_ints: Literal[True] + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg: _WhatToCount | Literal["update"], + /, + *, + return_ints: Literal[True], ) -> int: ... @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: Literal["update"], arg2: _WhatToCount, /, @@ -5405,8 +3300,8 @@ class Text(Widget, XView, YView): @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount, arg2: Literal["update"], /, @@ -5415,13 +3310,20 @@ class Text(Widget, XView, YView): ) -> int: ... @overload def count( - self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /, *, return_ints: Literal[True] + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + *, + return_ints: Literal[True], ) -> tuple[int, int]: ... @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount | Literal["update"], arg2: _WhatToCount | Literal["update"], arg3: _WhatToCount | Literal["update"], @@ -5430,12 +3332,18 @@ class Text(Widget, XView, YView): return_ints: Literal[True], ) -> tuple[int, ...]: ... @overload - def count(self, index1: _TextIndex, index2: _TextIndex, *, return_ints: Literal[False] = False) -> tuple[int] | None: ... + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + *, + return_ints: Literal[False] = False, + ) -> tuple[int] | None: ... @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg: _WhatToCount | Literal["update"], /, *, @@ -5444,8 +3352,8 @@ class Text(Widget, XView, YView): @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: Literal["update"], arg2: _WhatToCount, /, @@ -5455,8 +3363,8 @@ class Text(Widget, XView, YView): @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount, arg2: Literal["update"], /, @@ -5466,8 +3374,8 @@ class Text(Widget, XView, YView): @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount, arg2: _WhatToCount, /, @@ -5477,8 +3385,8 @@ class Text(Widget, XView, YView): @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount | Literal["update"], arg2: _WhatToCount | Literal["update"], arg3: _WhatToCount | Literal["update"], @@ -5488,35 +3396,49 @@ class Text(Widget, XView, YView): ) -> tuple[int, ...]: ... else: @overload - def count(self, index1: _TextIndex, index2: _TextIndex) -> tuple[int] | None: - """Counts the number of relevant things between the two indices. - If index1 is after index2, the result will be a negative number - (and this holds for each of the possible options). - - The actual items which are counted depends on the options given by - args. The result is a list of integers, one for the result of each - counting option given. Valid counting options are "chars", - "displaychars", "displayindices", "displaylines", "indices", - "lines", "xpixels" and "ypixels". There is an additional possible - option "update", which if given then all subsequent options ensure - that any possible out of date information is recalculated. - """ - + def count( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget + ) -> tuple[int] | None: ... @overload def count( - self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], / + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg: _WhatToCount | Literal["update"], + /, ) -> tuple[int] | None: ... @overload - def count(self, index1: _TextIndex, index2: _TextIndex, arg1: Literal["update"], arg2: _WhatToCount, /) -> int | None: ... + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: Literal["update"], + arg2: _WhatToCount, + /, + ) -> int | None: ... @overload - def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: Literal["update"], /) -> int | None: ... + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: Literal["update"], + /, + ) -> int | None: ... @overload - def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /) -> tuple[int, int]: ... + def count( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + arg1: _WhatToCount, + arg2: _WhatToCount, + /, + ) -> tuple[int, int]: ... @overload def count( self, - index1: _TextIndex, - index2: _TextIndex, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, arg1: _WhatToCount | Literal["update"], arg2: _WhatToCount | Literal["update"], arg3: _WhatToCount | Literal["update"], @@ -5525,27 +3447,18 @@ class Text(Widget, XView, YView): ) -> tuple[int, ...]: ... @overload - def debug(self, boolean: None = None) -> bool: - """Turn on the internal consistency checks of the B-Tree inside the text - widget according to BOOLEAN. - """ - + def debug(self, boolean: None = None) -> bool: ... @overload def debug(self, boolean: bool) -> None: ... - def delete(self, index1: _TextIndex, index2: _TextIndex | None = None) -> None: - """Delete the characters between INDEX1 and INDEX2 (not included).""" - - def dlineinfo(self, index: _TextIndex) -> tuple[int, int, int, int, int] | None: - """Return tuple (x,y,width,height,baseline) giving the bounding box - and baseline position of the visible part of the line containing - the character at INDEX. - """ - + def delete( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None + ) -> None: ... + def dlineinfo(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int, int] | None: ... @overload def dump( self, - index1: _TextIndex, - index2: _TextIndex | None = None, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, command: None = None, *, all: bool = ..., @@ -5554,25 +3467,12 @@ class Text(Widget, XView, YView): tag: bool = ..., text: bool = ..., window: bool = ..., - ) -> list[tuple[str, str, str]]: - """Return the contents of the widget between index1 and index2. - - The type of contents returned in filtered based on the keyword - parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are - given and true, then the corresponding items are returned. The result - is a list of triples of the form (key, value, index). If none of the - keywords are true then 'all' is used by default. - - If the 'command' argument is given, it is called once for each element - of the list of triples, with the values of each triple serving as the - arguments to the function. In this case the list is not returned. - """ - + ) -> list[tuple[str, str, str]]: ... @overload def dump( self, - index1: _TextIndex, - index2: _TextIndex | None, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None, command: Callable[[str, str, str], object] | str, *, all: bool = ..., @@ -5585,8 +3485,8 @@ class Text(Widget, XView, YView): @overload def dump( self, - index1: _TextIndex, - index2: _TextIndex | None = None, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, *, command: Callable[[str, str, str], object] | str, all: bool = ..., @@ -5596,82 +3496,36 @@ class Text(Widget, XView, YView): text: bool = ..., window: bool = ..., ) -> None: ... - def edit(self, *args): # docstring says "Internal method" - """Internal method - - This method controls the undo mechanism and - the modified flag. The exact behavior of the - command depends on the option argument that - follows the edit argument. The following forms - of the command are currently supported: - - edit_modified, edit_redo, edit_reset, edit_separator - and edit_undo - - """ - + def edit(self, *args): ... # docstring says "Internal method" @overload - def edit_modified(self, arg: None = None) -> bool: # actually returns Literal[0, 1] - """Get or Set the modified flag - - If arg is not specified, returns the modified - flag of the widget. The insert, delete, edit undo and - edit redo commands or the user can set or clear the - modified flag. If boolean is specified, sets the - modified flag of the widget to arg. - """ - + def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] @overload def edit_modified(self, arg: bool) -> None: ... # actually returns empty string - def edit_redo(self) -> None: # actually returns empty string - """Redo the last undone edit - - When the undo option is true, reapplies the last - undone edits provided no other edits were done since - then. Generates an error when the redo stack is empty. - Does nothing when the undo option is false. - """ - - def edit_reset(self) -> None: # actually returns empty string - """Clears the undo and redo stacks""" - - def edit_separator(self) -> None: # actually returns empty string - """Inserts a separator (boundary) on the undo stack. - - Does nothing when the undo option is false - """ - - def edit_undo(self) -> None: # actually returns empty string - """Undoes the last edit action - - If the undo option is true. An edit action is defined - as all the insert and delete commands that are recorded - on the undo stack in between two separators. Generates - an error when the undo stack is empty. Does nothing - when the undo option is false - """ - - def get(self, index1: _TextIndex, index2: _TextIndex | None = None) -> str: - """Return the text from INDEX1 to INDEX2 (not included).""" - + def edit_redo(self) -> None: ... # actually returns empty string + def edit_reset(self) -> None: ... # actually returns empty string + def edit_separator(self) -> None: ... # actually returns empty string + def edit_undo(self) -> None: ... # actually returns empty string + def get( + self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None + ) -> str: ... @overload - def image_cget(self, index: _TextIndex, option: Literal["image", "name"]) -> str: - """Return the value of OPTION of an embedded image at INDEX.""" - + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["image", "name"]) -> str: ... @overload - def image_cget(self, index: _TextIndex, option: Literal["padx", "pady"]) -> int: ... + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... @overload - def image_cget(self, index: _TextIndex, option: Literal["align"]) -> Literal["baseline", "bottom", "center", "top"]: ... + def image_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["align"] + ) -> Literal["baseline", "bottom", "center", "top"]: ... @overload - def image_cget(self, index: _TextIndex, option: str) -> Any: ... + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... @overload - def image_configure(self, index: _TextIndex, cnf: str) -> tuple[str, str, str, str, str | int]: - """Configure an embedded image at INDEX.""" - + def image_configure( + self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str + ) -> tuple[str, str, str, str, str | int]: ... @overload def image_configure( self, - index: _TextIndex, + index: str | float | _tkinter.Tcl_Obj | Widget, cnf: dict[str, Any] | None = None, *, align: Literal["baseline", "bottom", "center", "top"] = ..., @@ -5682,7 +3536,7 @@ class Text(Widget, XView, YView): ) -> dict[str, tuple[str, str, str, str, str | int]] | None: ... def image_create( self, - index: _TextIndex, + index: str | float | _tkinter.Tcl_Obj | Widget, cnf: dict[str, Any] | None = {}, *, align: Literal["baseline", "bottom", "center", "top"] = ..., @@ -5690,77 +3544,38 @@ class Text(Widget, XView, YView): name: str = ..., padx: float | str = ..., pady: float | str = ..., - ) -> str: - """Create an embedded image at INDEX.""" - - def image_names(self) -> tuple[str, ...]: - """Return all names of embedded images in this widget.""" - - def index(self, index: _TextIndex) -> str: - """Return the index in the form line.char for INDEX.""" - - def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: - """Insert CHARS before the characters at INDEX. An additional - tag can be given in ARGS. Additional CHARS and tags can follow in ARGS. - """ - + ) -> str: ... + def image_names(self) -> tuple[str, ...]: ... + def index(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str: ... + def insert( + self, index: str | float | _tkinter.Tcl_Obj | Widget, chars: str, *args: str | list[str] | tuple[str, ...] + ) -> None: ... @overload - def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: - """Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT). - Return the current value if None is given for DIRECTION. - """ - + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string - def mark_names(self) -> tuple[str, ...]: - """Return all mark names.""" - - def mark_set(self, markName: str, index: _TextIndex) -> None: - """Set mark MARKNAME before the character at INDEX.""" - - def mark_unset(self, *markNames: str) -> None: - """Delete all marks in MARKNAMES.""" - - def mark_next(self, index: _TextIndex) -> str | None: - """Return the name of the next mark after INDEX.""" - - def mark_previous(self, index: _TextIndex) -> str | None: - """Return the name of the previous mark before INDEX.""" + def mark_names(self) -> tuple[str, ...]: ... + def mark_set(self, markName: str, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... + def mark_unset(self, *markNames: str) -> None: ... + def mark_next(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... + def mark_previous(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... # **kw of peer_create is same as the kwargs of Text.__init__ - def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: - """Creates a peer text widget with the given newPathName, and any - optional standard configuration options. By default the peer will - have the same start and end line as the parent widget, but - these can be overridden with the standard configuration options. - """ - - def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: - """Returns a list of peers of this widget (this does not include - the widget itself). - """ - - def replace(self, index1: _TextIndex, index2: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: - """Replaces the range of characters between index1 and index2 with - the given characters and tags specified by args. - - See the method insert for some more information about args, and the - method delete for information about the indices. - """ - - def scan_mark(self, x: int, y: int) -> None: - """Remember the current X, Y coordinates.""" - - def scan_dragto(self, x: int, y: int) -> None: - """Adjust the view of the text to 10 times the - difference between X and Y and the coordinates given in - scan_mark. - """ - + def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: ... + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... + def replace( + self, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget, + chars: str, + *args: str | list[str] | tuple[str, ...], + ) -> None: ... + def scan_mark(self, x: int, y: int) -> None: ... + def scan_dragto(self, x: int, y: int) -> None: ... def search( self, pattern: str, - index: _TextIndex, - stopindex: _TextIndex | None = None, + index: str | float | _tkinter.Tcl_Obj | Widget, + stopindex: str | float | _tkinter.Tcl_Obj | Widget | None = None, forwards: bool | None = None, backwards: bool | None = None, exact: bool | None = None, @@ -5768,19 +3583,11 @@ class Text(Widget, XView, YView): nocase: bool | None = None, count: Variable | None = None, elide: bool | None = None, - ) -> str: # returns empty string for not found - """Search PATTERN beginning from INDEX until STOPINDEX. - Return the index of the first character of a match or an - empty string. - """ - - def see(self, index: _TextIndex) -> None: - """Scroll such that the character at INDEX is visible.""" - - def tag_add(self, tagName: str, index1: _TextIndex, *args: _TextIndex) -> None: - """Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS. - Additional pairs of indices may follow in ARGS. - """ + ) -> str: ... # returns empty string for not found + def see(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... + def tag_add( + self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, *args: str | float | _tkinter.Tcl_Obj | Widget + ) -> None: ... # tag_bind stuff is very similar to Canvas @overload def tag_bind( @@ -5789,24 +3596,12 @@ class Text(Widget, XView, YView): sequence: str | None, func: Callable[[Event[Text]], object] | None, add: Literal["", "+"] | bool | None = None, - ) -> str: - """Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC. - - An additional boolean parameter ADD specifies whether FUNC will be - called additionally to the other bound function or whether it will - replace the previous function. See bind for the return value. - """ - + ) -> str: ... @overload def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: - """Unbind for all characters with TAGNAME for event SEQUENCE the - function identified with FUNCID. - """ + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... # allowing any string for cget instead of just Literals because there's no other way to look up tag options - def tag_cget(self, tagName: str, option: str): - """Return the value of OPTION for tag TAGNAME.""" - + def tag_cget(self, tagName: str, option: str): ... @overload def tag_configure( self, @@ -5841,66 +3636,56 @@ class Text(Widget, XView, YView): underline: bool = ..., underlinefg: str = ..., wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure a tag TAGNAME.""" - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure - def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: # error if no tag names given - """Delete all tags in TAGNAMES.""" - - def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: - """Change the priority of tag TAGNAME such that it is lower - than the priority of BELOWTHIS. - """ - - def tag_names(self, index: _TextIndex | None = None) -> tuple[str, ...]: - """Return a list of all tag names.""" - - def tag_nextrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> tuple[str, str] | tuple[()]: - """Return a list of start and end index for the first sequence of - characters between INDEX1 and INDEX2 which all have tag TAGNAME. - The text is searched forward from INDEX1. - """ - - def tag_prevrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> tuple[str, str] | tuple[()]: - """Return a list of start and end index for the first sequence of - characters between INDEX1 and INDEX2 which all have tag TAGNAME. - The text is searched backwards from INDEX1. - """ - - def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: - """Change the priority of tag TAGNAME such that it is higher - than the priority of ABOVETHIS. - """ - - def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: - """Return a list of ranges of text which have tag TAGNAME.""" + def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: ... # error if no tag names given + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... + def tag_names(self, index: str | float | _tkinter.Tcl_Obj | Widget | None = None) -> tuple[str, ...]: ... + def tag_nextrange( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> tuple[str, str] | tuple[()]: ... + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... # tag_remove and tag_delete are different - def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> None: - """Remove tag TAGNAME from all characters between INDEX1 and INDEX2.""" - + def tag_remove( + self, + tagName: str, + index1: str | float | _tkinter.Tcl_Obj | Widget, + index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, + ) -> None: ... @overload - def window_cget(self, index: _TextIndex, option: Literal["padx", "pady"]) -> int: - """Return the value of OPTION of an embedded window at INDEX.""" - + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... @overload - def window_cget(self, index: _TextIndex, option: Literal["stretch"]) -> bool: ... # actually returns Literal[0, 1] + def window_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["stretch"] + ) -> bool: ... # actually returns Literal[0, 1] @overload - def window_cget(self, index: _TextIndex, option: Literal["align"]) -> Literal["baseline", "bottom", "center", "top"]: ... + def window_cget( + self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["align"] + ) -> Literal["baseline", "bottom", "center", "top"]: ... @overload # window is set to a widget, but read as the string name. - def window_cget(self, index: _TextIndex, option: Literal["create", "window"]) -> str: ... + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["create", "window"]) -> str: ... @overload - def window_cget(self, index: _TextIndex, option: str) -> Any: ... + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... @overload - def window_configure(self, index: _TextIndex, cnf: str) -> tuple[str, str, str, str, str | int]: - """Configure an embedded window at INDEX.""" - + def window_configure( + self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str + ) -> tuple[str, str, str, str, str | int]: ... @overload def window_configure( self, - index: _TextIndex, + index: str | float | _tkinter.Tcl_Obj | Widget, cnf: dict[str, Any] | None = None, *, align: Literal["baseline", "bottom", "center", "top"] = ..., @@ -5913,7 +3698,7 @@ class Text(Widget, XView, YView): window_config = window_configure def window_create( self, - index: _TextIndex, + index: str | float | _tkinter.Tcl_Obj | Widget, cnf: dict[str, Any] | None = {}, *, align: Literal["baseline", "bottom", "center", "top"] = ..., @@ -5922,25 +3707,16 @@ class Text(Widget, XView, YView): pady: float | str = ..., stretch: bool | Literal[0, 1] = ..., window: Misc | str = ..., - ) -> None: - """Create a window at INDEX.""" - - def window_names(self) -> tuple[str, ...]: - """Return all names of embedded windows in this widget.""" - - def yview_pickplace(self, *what): # deprecated - """Obsolete function, use see.""" + ) -> None: ... + def window_names(self) -> tuple[str, ...]: ... + def yview_pickplace(self, *what): ... # deprecated class _setit: - """Internal class. It wraps the command in the widget OptionMenu.""" - def __init__(self, var, value, callback=None) -> None: ... def __call__(self, *args) -> None: ... # manual page: tk_optionMenu class OptionMenu(Menubutton): - """OptionMenu which allows the user to select a value from a menu.""" - menuname: Incomplete def __init__( # differs from other widgets @@ -5951,12 +3727,7 @@ class OptionMenu(Menubutton): *values: str, # kwarg only from now on command: Callable[[StringVar], object] | None = ..., - ) -> None: - """Construct an optionmenu widget with the parent MASTER, with - the resource textvariable set to VARIABLE, the initially selected - value VALUE, the other menu values VALUES and an additional - keyword argument command. - """ + ) -> None: ... # configure, config, cget are inherited from Menubutton # destroy and __getitem__ are overridden, signature does not change @@ -5976,8 +3747,6 @@ class _BitmapImageLike(_Image): ... class _PhotoImageLike(_Image): ... class Image(_Image): - """Base class for images.""" - name: Incomplete tk: _tkinter.TkappType def __init__(self, imgtype, name=None, cnf={}, master: Misc | _tkinter.TkappType | None = None, **kw) -> None: ... @@ -5986,12 +3755,9 @@ class Image(_Image): def __getitem__(self, key): ... configure: Incomplete config: Incomplete - def type(self): - """Return the type of the image, e.g. "photo" or "bitmap".""" + def type(self): ... class PhotoImage(Image, _PhotoImageLike): - """Widget which can display images in PGM, PPM, GIF, PNG format.""" - # This should be kept in sync with PIL.ImageTK.PhotoImage.__init__() def __init__( self, @@ -6006,13 +3772,7 @@ class PhotoImage(Image, _PhotoImageLike): height: int = ..., palette: int | str = ..., width: int = ..., - ) -> None: - """Create an image with NAME. - - Valid resource names: data, format, file, gamma, height, palette, - width. - """ - + ) -> None: ... def configure( self, *, @@ -6023,15 +3783,10 @@ class PhotoImage(Image, _PhotoImageLike): height: int = ..., palette: int | str = ..., width: int = ..., - ) -> None: - """Configure the image.""" + ) -> None: ... config = configure - def blank(self) -> None: - """Display a transparent image.""" - - def cget(self, option: str) -> str: - """Return the value of OPTION.""" - + def blank(self) -> None: ... + def cget(self, option: str) -> str: ... def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' if sys.version_info >= (3, 13): def copy( @@ -6040,42 +3795,9 @@ class PhotoImage(Image, _PhotoImageLike): from_coords: Iterable[int] | None = None, zoom: int | tuple[int, int] | list[int] | None = None, subsample: int | tuple[int, int] | list[int] | None = None, - ) -> PhotoImage: - """Return a new PhotoImage with the same image as this widget. - - The FROM_COORDS option specifies a rectangular sub-region of the - source image to be copied. It must be a tuple or a list of 1 to 4 - integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally - opposite corners of the rectangle. If x2 and y2 are not specified, - the default value is the bottom-right corner of the source image. - The pixels copied will include the left and top edges of the - specified rectangle but not the bottom or right edges. If the - FROM_COORDS option is not given, the default is the whole source - image. - - If SUBSAMPLE or ZOOM are specified, the image is transformed as in - the subsample() or zoom() methods. The value must be a single - integer or a pair of integers. - """ - - def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: - """Return a new PhotoImage based on the same image as this widget - but use only every Xth or Yth pixel. If Y is not given, the - default value is the same as X. - - The FROM_COORDS option specifies a rectangular sub-region of the - source image to be copied, as in the copy() method. - """ - - def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: - """Return a new PhotoImage with the same image as this widget - but zoom it with a factor of X in the X direction and Y in the Y - direction. If Y is not given, the default value is the same as X. - - The FROM_COORDS option specifies a rectangular sub-region of the - source image to be copied, as in the copy() method. - """ - + ) -> PhotoImage: ... + def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... + def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... def copy_replace( self, sourceImage: PhotoImage | str, @@ -6087,67 +3809,13 @@ class PhotoImage(Image, _PhotoImageLike): subsample: int | tuple[int, int] | list[int] | None = None, # `None` defaults to overlay. compositingrule: Literal["overlay", "set"] | None = None, - ) -> None: - """Copy a region from the source image (which must be a PhotoImage) to - this image, possibly with pixel zooming and/or subsampling. If no - options are specified, this command copies the whole of the source - image into this image, starting at coordinates (0, 0). - - The FROM_COORDS option specifies a rectangular sub-region of the - source image to be copied. It must be a tuple or a list of 1 to 4 - integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally - opposite corners of the rectangle. If x2 and y2 are not specified, - the default value is the bottom-right corner of the source image. - The pixels copied will include the left and top edges of the - specified rectangle but not the bottom or right edges. If the - FROM_COORDS option is not given, the default is the whole source - image. - - The TO option specifies a rectangular sub-region of the destination - image to be affected. It must be a tuple or a list of 1 to 4 - integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally - opposite corners of the rectangle. If x2 and y2 are not specified, - the default value is (x1,y1) plus the size of the source region - (after subsampling and zooming, if specified). If x2 and y2 are - specified, the source region will be replicated if necessary to fill - the destination region in a tiled fashion. - - If SHRINK is true, the size of the destination image should be - reduced, if necessary, so that the region being copied into is at - the bottom-right corner of the image. - - If SUBSAMPLE or ZOOM are specified, the image is transformed as in - the subsample() or zoom() methods. The value must be a single - integer or a pair of integers. - - The COMPOSITINGRULE option specifies how transparent pixels in the - source image are combined with the destination image. When a - compositing rule of 'overlay' is set, the old contents of the - destination image are visible, as if the source image were printed - on a piece of transparent film and placed over the top of the - destination. When a compositing rule of 'set' is set, the old - contents of the destination image are discarded and the source image - is used as-is. The default compositing rule is 'overlay'. - """ + ) -> None: ... else: - def copy(self) -> PhotoImage: - """Return a new PhotoImage with the same image as this widget.""" - - def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: - """Return a new PhotoImage with the same image as this widget - but zoom it with a factor of x in the X direction and y in the Y - direction. If y is not given, the default value is the same as x. - """ - - def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: - """Return a new PhotoImage based on the same image as this widget - but use only every Xth or Yth pixel. If y is not given, the - default value is the same as x. - """ - - def get(self, x: int, y: int) -> tuple[int, int, int]: - """Return the color (red, green, blue) of the pixel at X,Y.""" + def copy(self) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def get(self, x: int, y: int) -> tuple[int, int, int]: ... def put( self, data: ( @@ -6161,10 +3829,7 @@ class PhotoImage(Image, _PhotoImageLike): | tuple[tuple[str, ...], ...] ), to: tuple[int, int] | tuple[int, int, int, int] | None = None, - ) -> None: - """Put row formatted colors to image starting from - position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) - """ + ) -> None: ... if sys.version_info >= (3, 13): def read( self, @@ -6174,29 +3839,7 @@ class PhotoImage(Image, _PhotoImageLike): from_coords: Iterable[int] | None = None, to: Iterable[int] | None = None, shrink: bool = False, - ) -> None: - """Reads image data from the file named FILENAME into the image. - - The FORMAT option specifies the format of the image data in the - file. - - The FROM_COORDS option specifies a rectangular sub-region of the image - file data to be copied to the destination image. It must be a tuple - or a list of 1 to 4 integers (x1, y1, x2, y2). (x1, y1) and - (x2, y2) specify diagonally opposite corners of the rectangle. If - x2 and y2 are not specified, the default value is the bottom-right - corner of the source image. The default, if this option is not - specified, is the whole of the image in the image file. - - The TO option specifies the coordinates of the top-left corner of - the region of the image into which data from filename are to be - read. The default is (0, 0). - - If SHRINK is true, the size of the destination image will be - reduced, if necessary, so that the region into which the image file - data are read is at the bottom-right corner of the image. - """ - + ) -> None: ... def write( self, filename: StrOrBytesPath, @@ -6205,59 +3848,11 @@ class PhotoImage(Image, _PhotoImageLike): *, background: str | None = None, grayscale: bool = False, - ) -> None: - """Writes image data from the image to a file named FILENAME. - - The FORMAT option specifies the name of the image file format - handler to be used to write the data to the file. If this option - is not given, the format is guessed from the file extension. - - The FROM_COORDS option specifies a rectangular region of the image - to be written to the image file. It must be a tuple or a list of 1 - to 4 integers (x1, y1, x2, y2). If only x1 and y1 are specified, - the region extends from (x1,y1) to the bottom-right corner of the - image. If all four coordinates are given, they specify diagonally - opposite corners of the rectangular region. The default, if this - option is not given, is the whole image. - - If BACKGROUND is specified, the data will not contain any - transparency information. In all transparent pixels the color will - be replaced by the specified color. - - If GRAYSCALE is true, the data will not contain color information. - All pixel data will be transformed into grayscale. - """ - + ) -> None: ... @overload def data( self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False - ) -> bytes: - """Returns image data. - - The FORMAT option specifies the name of the image file format - handler to be used. If this option is not given, this method uses - a format that consists of a tuple (one element per row) of strings - containing space-separated (one element per pixel/column) colors - in “#RRGGBB” format (where RR is a pair of hexadecimal digits for - the red channel, GG for green, and BB for blue). - - The FROM_COORDS option specifies a rectangular region of the image - to be returned. It must be a tuple or a list of 1 to 4 integers - (x1, y1, x2, y2). If only x1 and y1 are specified, the region - extends from (x1,y1) to the bottom-right corner of the image. If - all four coordinates are given, they specify diagonally opposite - corners of the rectangular region, including (x1, y1) and excluding - (x2, y2). The default, if this option is not given, is the whole - image. - - If BACKGROUND is specified, the data will not contain any - transparency information. In all transparent pixels the color will - be replaced by the specified color. - - If GRAYSCALE is true, the data will not contain color information. - All pixel data will be transformed into grayscale. - """ - + ) -> bytes: ... @overload def data( self, @@ -6269,20 +3864,14 @@ class PhotoImage(Image, _PhotoImageLike): ) -> tuple[str, ...]: ... else: - def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: - """Write image to file FILENAME in FORMAT starting from - position FROM_COORDS. - """ - - def transparency_get(self, x: int, y: int) -> bool: - """Return True if the pixel at x,y is transparent.""" + def write( + self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None + ) -> None: ... - def transparency_set(self, x: int, y: int, boolean: bool) -> None: - """Set the transparency of the pixel at x,y.""" + def transparency_get(self, x: int, y: int) -> bool: ... + def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... class BitmapImage(Image, _BitmapImageLike): - """Widget which can display images in XBM format.""" - # This should be kept in sync with PIL.ImageTK.BitmapImage.__init__() def __init__( self, @@ -6296,18 +3885,12 @@ class BitmapImage(Image, _BitmapImageLike): foreground: str = ..., maskdata: str = ..., maskfile: StrOrBytesPath = ..., - ) -> None: - """Create a bitmap with NAME. - - Valid resource names: background, data, file, foreground, maskdata, maskfile. - """ + ) -> None: ... def image_names() -> tuple[str, ...]: ... def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): - """spinbox widget.""" - def __init__( self, master: Misc | None = None, @@ -6365,34 +3948,7 @@ class Spinbox(Widget, XView): width: int = 20, wrap: bool = False, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct a spinbox widget with the parent MASTER. - - STANDARD OPTIONS - - activebackground, background, borderwidth, - cursor, exportselection, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, insertbackground, - insertborderwidth, insertofftime, - insertontime, insertwidth, justify, relief, - repeatdelay, repeatinterval, - selectbackground, selectborderwidth - selectforeground, takefocus, textvariable - xscrollcommand. - - WIDGET-SPECIFIC OPTIONS - - buttonbackground, buttoncursor, - buttondownrelief, buttonuprelief, - command, disabledbackground, - disabledforeground, format, from, - invalidcommand, increment, - readonlybackground, state, to, - validate, validatecommand values, - width, wrap, - """ - + ) -> None: ... @overload def configure( self, @@ -6448,141 +4004,32 @@ class Spinbox(Widget, XView): width: int = ..., wrap: bool = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index) -> tuple[int, int, int, int] | None: # type: ignore[override] - """Return a tuple of X1,Y1,X2,Y2 coordinates for a - rectangle which encloses the character given by index. - - The first two elements of the list give the x and y - coordinates of the upper-left corner of the screen - area covered by the character (in pixels relative - to the widget) and the last two elements give the - width and height of the character, in pixels. The - bounding box may refer to a region outside the - visible area of the window. - """ - - def delete(self, first, last=None) -> Literal[""]: - """Delete one or more elements of the spinbox. - - First is the index of the first character to delete, - and last is the index of the character just after - the last one to delete. If last isn't specified it - defaults to first+1, i.e. a single character is - deleted. This command returns an empty string. - """ - - def get(self) -> str: - """Returns the spinbox's string""" - - def icursor(self, index): - """Alter the position of the insertion cursor. - - The insertion cursor will be displayed just before - the character given by index. Returns an empty string - """ - - def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: - """Returns the name of the widget at position x, y - - Return value is one of: none, buttondown, buttonup, entry - """ - - def index(self, index: str | int) -> int: - """Returns the numerical index corresponding to index""" - - def insert(self, index: str | int, s: str) -> Literal[""]: - """Insert string s at index - - Returns an empty string. - """ + def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def delete(self, first, last=None) -> Literal[""]: ... + def get(self) -> str: ... + def icursor(self, index): ... + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, s: str) -> Literal[""]: ... # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented - def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: - """Causes the specified element to be invoked - - The element could be buttondown or buttonup - triggering the action associated with it. - """ - - def scan(self, *args): - """Internal function.""" - - def scan_mark(self, x): - """Records x and the current view in the spinbox window; - - used in conjunction with later scan dragto commands. - Typically this command is associated with a mouse button - press in the widget. It returns an empty string. - """ - - def scan_dragto(self, x): - """Compute the difference between the given x argument - and the x argument to the last scan mark command - - It then adjusts the view left or right by 10 times the - difference in x-coordinates. This command is typically - associated with mouse motion events in the widget, to - produce the effect of dragging the spinbox at high speed - through the window. The return value is an empty string. - """ - - def selection(self, *args) -> tuple[int, ...]: - """Internal function.""" - - def selection_adjust(self, index): - """Locate the end of the selection nearest to the character - given by index, - - Then adjust that end of the selection to be at index - (i.e including but not going beyond index). The other - end of the selection is made the anchor point for future - select to commands. If the selection isn't currently in - the spinbox, then a new selection is created to include - the characters between index and the most recent selection - anchor point, inclusive. - """ - - def selection_clear(self): # type: ignore[override] - """Clear the selection - - If the selection isn't in this widget then the - command has no effect. - """ - - def selection_element(self, element=None): - """Sets or gets the currently selected element. - - If a spinbutton element is specified, it will be - displayed depressed. - """ - - def selection_from(self, index: int) -> None: - """Set the fixed end of a selection to INDEX.""" - - def selection_present(self) -> None: - """Return True if there are characters selected in the spinbox, False - otherwise. - """ - - def selection_range(self, start: int, end: int) -> None: - """Set the selection from START to END (not included).""" - - def selection_to(self, index: int) -> None: - """Set the variable end of a selection to INDEX.""" + def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: ... + def scan(self, *args): ... + def scan_mark(self, x): ... + def scan_dragto(self, x): ... + def selection(self, *args) -> tuple[int, ...]: ... + def selection_adjust(self, index): ... + def selection_clear(self): ... # type: ignore[override] + def selection_element(self, element=None): ... + def selection_from(self, index: int) -> None: ... + def selection_present(self) -> None: ... + def selection_range(self, start: int, end: int) -> None: ... + def selection_to(self, index: int) -> None: ... class LabelFrame(Widget): - """labelframe widget.""" - def __init__( self, master: Misc | None = None, @@ -6615,23 +4062,7 @@ class LabelFrame(Widget): text: float | str = "", visual: str | tuple[str, int] = "", # can't be changed with configure() width: float | str = 0, - ) -> None: - """Construct a labelframe widget with the parent MASTER. - - STANDARD OPTIONS - - borderwidth, cursor, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, padx, pady, relief, - takefocus, text - - WIDGET-SPECIFIC OPTIONS - - background, class, colormap, container, - height, labelanchor, labelwidget, - visual, width - """ - + ) -> None: ... @overload def configure( self, @@ -6658,21 +4089,12 @@ class LabelFrame(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., text: float | str = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class PanedWindow(Widget): - """panedwindow widget.""" - def __init__( self, master: Misc | None = None, @@ -6700,21 +4122,7 @@ class PanedWindow(Widget): sashwidth: float | str = 3, showhandle: bool = False, width: float | str = "", - ) -> None: - """Construct a panedwindow widget with the parent MASTER. - - STANDARD OPTIONS - - background, borderwidth, cursor, height, - orient, relief, width - - WIDGET-SPECIFIC OPTIONS - - handlepad, handlesize, opaqueresize, - sashcursor, sashpad, sashrelief, - sashwidth, showhandle, - """ - + ) -> None: ... @overload def configure( self, @@ -6741,156 +4149,25 @@ class PanedWindow(Widget): sashwidth: float | str = ..., showhandle: bool = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def add(self, child: Widget, **kw) -> None: - """Add a child widget to the panedwindow in a new pane. - - The child argument is the name of the child widget - followed by pairs of arguments that specify how to - manage the windows. The possible options and values - are the ones accepted by the paneconfigure method. - """ - - def remove(self, child) -> None: - """Remove the pane containing child from the panedwindow - - All geometry management options for child will be forgotten. - """ + def add(self, child: Widget, **kw) -> None: ... + def remove(self, child) -> None: ... forget = remove # type: ignore[assignment] - def identify(self, x: int, y: int): - """Identify the panedwindow component at point x, y - - If the point is over a sash or a sash handle, the result - is a two element list containing the index of the sash or - handle, and a word indicating whether it is over a sash - or a handle, such as {0 sash} or {2 handle}. If the point - is over any other part of the panedwindow, the result is - an empty list. - """ - - def proxy(self, *args) -> tuple[Incomplete, ...]: - """Internal function.""" - - def proxy_coord(self) -> tuple[Incomplete, ...]: - """Return the x and y pair of the most recent proxy location""" - - def proxy_forget(self) -> tuple[Incomplete, ...]: - """Remove the proxy from the display.""" - - def proxy_place(self, x, y) -> tuple[Incomplete, ...]: - """Place the proxy at the given x and y coordinates.""" - - def sash(self, *args) -> tuple[Incomplete, ...]: - """Internal function.""" - - def sash_coord(self, index) -> tuple[Incomplete, ...]: - """Return the current x and y pair for the sash given by index. - - Index must be an integer between 0 and 1 less than the - number of panes in the panedwindow. The coordinates given are - those of the top left corner of the region containing the sash. - pathName sash dragto index x y This command computes the - difference between the given coordinates and the coordinates - given to the last sash coord command for the given sash. It then - moves that sash the computed difference. The return value is the - empty string. - """ - - def sash_mark(self, index) -> tuple[Incomplete, ...]: - """Records x and y for the sash given by index; - - Used in conjunction with later dragto commands to move the sash. - """ - - def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: - """Place the sash given by index at the given coordinates""" - - def panecget(self, child, option): - """Query a management option for window. - - Option may be any value allowed by the paneconfigure subcommand - """ - - def paneconfigure(self, tagOrId, cnf=None, **kw): - """Query or modify the management options for window. - - If no option is specified, returns a list describing all - of the available options for pathName. If option is - specified with no value, then the command returns a list - describing the one named option (this list will be identical - to the corresponding sublist of the value returned if no - option is specified). If one or more option-value pairs are - specified, then the command modifies the given widget - option(s) to have the given value(s); in this case the - command returns an empty string. The following options - are supported: - - after window - Insert the window after the window specified. window - should be the name of a window already managed by pathName. - before window - Insert the window before the window specified. window - should be the name of a window already managed by pathName. - height size - Specify a height for the window. The height will be the - outer dimension of the window including its border, if - any. If size is an empty string, or if -height is not - specified, then the height requested internally by the - window will be used initially; the height may later be - adjusted by the movement of sashes in the panedwindow. - Size may be any value accepted by Tk_GetPixels. - minsize n - Specifies that the size of the window cannot be made - less than n. This constraint only affects the size of - the widget in the paned dimension -- the x dimension - for horizontal panedwindows, the y dimension for - vertical panedwindows. May be any value accepted by - Tk_GetPixels. - padx n - Specifies a non-negative value indicating how much - extra space to leave on each side of the window in - the X-direction. The value may have any of the forms - accepted by Tk_GetPixels. - pady n - Specifies a non-negative value indicating how much - extra space to leave on each side of the window in - the Y-direction. The value may have any of the forms - accepted by Tk_GetPixels. - sticky style - If a window's pane is larger than the requested - dimensions of the window, this option may be used - to position (or stretch) the window within its pane. - Style is a string that contains zero or more of the - characters n, s, e or w. The string can optionally - contains spaces or commas, but they are ignored. Each - letter refers to a side (north, south, east, or west) - that the window will "stick" to. If both n and s - (or e and w) are specified, the window will be - stretched to fill the entire height (or width) of - its cavity. - width size - Specify a width for the window. The width will be - the outer dimension of the window including its - border, if any. If size is an empty string, or - if -width is not specified, then the width requested - internally by the window will be used initially; the - width may later be adjusted by the movement of sashes - in the panedwindow. Size may be any value accepted by - Tk_GetPixels. - - """ + def identify(self, x: int, y: int): ... + def proxy(self, *args) -> tuple[Incomplete, ...]: ... + def proxy_coord(self) -> tuple[Incomplete, ...]: ... + def proxy_forget(self) -> tuple[Incomplete, ...]: ... + def proxy_place(self, x, y) -> tuple[Incomplete, ...]: ... + def sash(self, *args) -> tuple[Incomplete, ...]: ... + def sash_coord(self, index) -> tuple[Incomplete, ...]: ... + def sash_mark(self, index) -> tuple[Incomplete, ...]: ... + def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: ... + def panecget(self, child, option): ... + def paneconfigure(self, tagOrId, cnf=None, **kw): ... paneconfig = paneconfigure - def panes(self): - """Returns an ordered list of the child panes.""" + def panes(self): ... def _test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi index 22a917d2104ee..d0d6de8426562 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi @@ -5,28 +5,8 @@ from typing import ClassVar __all__ = ["Chooser", "askcolor"] class Chooser(Dialog): - """Create a dialog for the tk_chooseColor command. - - Args: - master: The master widget for this dialog. If not provided, - defaults to options['parent'] (if defined). - options: Dictionary of options for the tk_chooseColor call. - initialcolor: Specifies the selected color when the - dialog is first displayed. This can be a tk color - string or a 3-tuple of ints in the range (0, 255) - for an RGB triplet. - parent: The parent window of the color dialog. The - color dialog is displayed on top of this. - title: A string for the title of the dialog box. - """ - command: ClassVar[str] def askcolor( color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... -) -> tuple[None, None] | tuple[tuple[int, int, int], str]: - """Display dialog window for selection of a color. - - Convenience wrapper for the Chooser class. Displays the color - chooser dialog with color as the initial value. - """ +) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi index 18906bd604937..521f451a9b2c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi @@ -1,104 +1,3 @@ -"""Drag-and-drop support for Tkinter. - -This is very preliminary. I currently only support dnd *within* one -application, between different windows (or within the same window). - -I am trying to make this as generic as possible -- not dependent on -the use of a particular widget or icon type, etc. I also hope that -this will work with Pmw. - -To enable an object to be dragged, you must create an event binding -for it that starts the drag-and-drop process. Typically, you should -bind to a callback function that you write. The function -should call Tkdnd.dnd_start(source, event), where 'source' is the -object to be dragged, and 'event' is the event that invoked the call -(the argument to your callback function). Even though this is a class -instantiation, the returned instance should not be stored -- it will -be kept alive automatically for the duration of the drag-and-drop. - -When a drag-and-drop is already in process for the Tk interpreter, the -call is *ignored*; this normally averts starting multiple simultaneous -dnd processes, e.g. because different button callbacks all -dnd_start(). - -The object is *not* necessarily a widget -- it can be any -application-specific object that is meaningful to potential -drag-and-drop targets. - -Potential drag-and-drop targets are discovered as follows. Whenever -the mouse moves, and at the start and end of a drag-and-drop move, the -Tk widget directly under the mouse is inspected. This is the target -widget (not to be confused with the target object, yet to be -determined). If there is no target widget, there is no dnd target -object. If there is a target widget, and it has an attribute -dnd_accept, this should be a function (or any callable object). The -function is called as dnd_accept(source, event), where 'source' is the -object being dragged (the object passed to dnd_start() above), and -'event' is the most recent event object (generally a event; -it can also be or ). If the dnd_accept() -function returns something other than None, this is the new dnd target -object. If dnd_accept() returns None, or if the target widget has no -dnd_accept attribute, the target widget's parent is considered as the -target widget, and the search for a target object is repeated from -there. If necessary, the search is repeated all the way up to the -root widget. If none of the target widgets can produce a target -object, there is no target object (the target object is None). - -The target object thus produced, if any, is called the new target -object. It is compared with the old target object (or None, if there -was no old target widget). There are several cases ('source' is the -source object, and 'event' is the most recent event object): - -- Both the old and new target objects are None. Nothing happens. - -- The old and new target objects are the same object. Its method -dnd_motion(source, event) is called. - -- The old target object was None, and the new target object is not -None. The new target object's method dnd_enter(source, event) is -called. - -- The new target object is None, and the old target object is not -None. The old target object's method dnd_leave(source, event) is -called. - -- The old and new target objects differ and neither is None. The old -target object's method dnd_leave(source, event), and then the new -target object's method dnd_enter(source, event) is called. - -Once this is done, the new target object replaces the old one, and the -Tk mainloop proceeds. The return value of the methods mentioned above -is ignored; if they raise an exception, the normal exception handling -mechanisms take over. - -The drag-and-drop processes can end in two ways: a final target object -is selected, or no final target object is selected. When a final -target object is selected, it will always have been notified of the -potential drop by a call to its dnd_enter() method, as described -above, and possibly one or more calls to its dnd_motion() method; its -dnd_leave() method has not been called since the last call to -dnd_enter(). The target is notified of the drop by a call to its -method dnd_commit(source, event). - -If no final target object is selected, and there was an old target -object, its dnd_leave(source, event) method is called to complete the -dnd sequence. - -Finally, the source object is notified that the drag-and-drop process -is over, by a call to source.dnd_end(target, event), specifying either -the selected target object, or None if no target object was selected. -The source object can use this to implement the commit action; this is -sometimes simpler than to do it in the target's dnd_commit(). The -target's dnd_commit() method could then simply be aliased to -dnd_leave(). - -At any time during a dnd sequence, the application can cancel the -sequence by calling the cancel() method on the object returned by -dnd_start(). This will call dnd_leave() if a target is currently -active; it will never call dnd_commit(). - -""" - from tkinter import Event, Misc, Tk, Widget from typing import ClassVar, Protocol, type_check_only diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi index af3c7c38aa0cf..b6ef8f45d0350 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi @@ -1,17 +1,3 @@ -"""File selection dialog classes. - -Classes: - -- FileDialog -- LoadFileDialog -- SaveFileDialog - -This module also presents tk common file dialogues, it provides interfaces -to the native file dialogues available in Tk 4.2 and newer, and the -directory dialogue available in Tk 8.3 and newer. -These interfaces were written by Fredrik Lundh, May 1997. -""" - from _typeshed import Incomplete, StrOrBytesPath, StrPath from collections.abc import Hashable, Iterable from tkinter import Button, Entry, Event, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog @@ -36,27 +22,6 @@ __all__ = [ dialogstates: dict[Hashable, tuple[str, str]] class FileDialog: - """Standard file selection dialog -- no checks on selected file. - - Usage: - - d = FileDialog(master) - fname = d.go(dir_or_file, pattern, default, key) - if fname is None: ...canceled... - else: ...open file... - - All arguments to go() are optional. - - The 'key' argument specifies a key in the global dictionary - 'dialogstates', which keeps track of the values for the directory - and pattern arguments, overriding the values passed in (it does - not keep track of the default argument!). If no key is specified, - the dialog keeps no memory of previous state. Note that memory is - kept even when the dialog is canceled. (All this emulates the - behavior of the Macintosh file selection dialogs.) - - """ - title: str master: Misc directory: str | None @@ -92,32 +57,22 @@ class FileDialog: def set_selection(self, file: StrPath) -> None: ... class LoadFileDialog(FileDialog): - """File selection dialog which checks that the file exists.""" - title: str def ok_command(self) -> None: ... class SaveFileDialog(FileDialog): - """File selection dialog which checks that the file may be created.""" - title: str def ok_command(self) -> None: ... class _Dialog(commondialog.Dialog): ... class Open(_Dialog): - """Ask for a filename to open""" - command: ClassVar[str] class SaveAs(_Dialog): - """Ask for a filename to save as""" - command: ClassVar[str] class Directory(commondialog.Dialog): - """Ask for a directory""" - command: ClassVar[str] # TODO: command kwarg available on macos @@ -131,9 +86,7 @@ def asksaveasfilename( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> str: # can be empty string - """Ask for a filename to save as""" - +) -> str: ... # can be empty string def askopenfilename( *, defaultextension: str | None = "", @@ -143,9 +96,7 @@ def askopenfilename( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> str: # can be empty string - """Ask for a filename to open""" - +) -> str: ... # can be empty string def askopenfilenames( *, defaultextension: str | None = "", @@ -155,17 +106,10 @@ def askopenfilenames( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> Literal[""] | tuple[str, ...]: - """Ask for multiple filenames to open - - Returns a list of filenames or empty list if - cancel button selected - """ - +) -> Literal[""] | tuple[str, ...]: ... def askdirectory( *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = False, parent: Misc | None = ..., title: str | None = ... -) -> str: # can be empty string - """Ask for a directory, and return the file name""" +) -> str: ... # can be empty string # TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) def asksaveasfile( @@ -179,9 +123,7 @@ def asksaveasfile( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> IO[Incomplete] | None: - """Ask for a filename to save as, and returned the opened file""" - +) -> IO[Incomplete] | None: ... def askopenfile( mode: str = "r", *, @@ -192,9 +134,7 @@ def askopenfile( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> IO[Incomplete] | None: - """Ask for a filename to open, and returned the opened file""" - +) -> IO[Incomplete] | None: ... def askopenfiles( mode: str = "r", *, @@ -205,13 +145,5 @@ def askopenfiles( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> tuple[IO[Incomplete], ...]: # can be empty tuple - """Ask for multiple filenames and return the open file - objects - - returns a list of open file objects or an empty list if - cancel selected - """ - -def test() -> None: - """Simple test program.""" +) -> tuple[IO[Incomplete], ...]: ... # can be empty tuple +def test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi index 879f51b1e6e60..327ba7a2432e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi @@ -40,27 +40,6 @@ class _MetricsDict(TypedDict): fixed: bool class Font: - """Represents a named font. - - Constructor options are: - - font -- font specifier (name, system font, or (family, size, style)-tuple) - name -- name to use for this font configuration (defaults to a unique name) - exists -- does a named font by this name already exist? - Creates a new named font if False, points to the existing font if True. - Raises _tkinter.TclError if the assertion is false. - - the following are ignored if font is specified: - - family -- font 'family', e.g. Courier, Times, Helvetica - size -- font size in points - weight -- font thickness: NORMAL, BOLD - slant -- font slant: ROMAN, ITALIC - underline -- font underlining: false (0), true (1) - overstrike -- font strikeout: false (0), true (1) - - """ - name: str delete_font: bool counter: ClassVar[itertools.count[int]] # undocumented @@ -83,9 +62,7 @@ class Font: __hash__: ClassVar[None] # type: ignore[assignment] def __setitem__(self, key: str, value: Any) -> None: ... @overload - def cget(self, option: Literal["family"]) -> str: - """Get font attribute""" - + def cget(self, option: Literal["family"]) -> str: ... @overload def cget(self, option: Literal["size"]) -> int: ... @overload @@ -98,9 +75,7 @@ class Font: def cget(self, option: str) -> Any: ... __getitem__ = cget @overload - def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: - """Return actual font attributes""" - + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... @overload def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... @overload @@ -122,40 +97,24 @@ class Font: slant: Literal["roman", "italic"] = ..., underline: bool = ..., overstrike: bool = ..., - ) -> _FontDict | None: - """Modify font attributes""" + ) -> _FontDict | None: ... configure = config - def copy(self) -> Font: - """Return a distinct copy of the current font""" - + def copy(self) -> Font: ... @overload - def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: - """Return font metrics. - - For best performance, create a dummy widget - using this font before calling this method. - """ - + def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: ... @overload def metrics(self, option: Literal["fixed"], /, *, displayof: tkinter.Misc | None = ...) -> bool: ... @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... - def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: - """Return text width""" - + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... def __eq__(self, other: object) -> bool: ... def __del__(self) -> None: ... -def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: - """Get font families (as a tuple)""" - -def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: - """Get names of defined fonts (as a tuple)""" +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... if sys.version_info >= (3, 10): - def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: - """Given the name of a tk named font, returns a Font representation.""" + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... else: - def nametofont(name: str) -> Font: - """Given the name of a tk named font, returns a Font representation.""" + def nametofont(name: str) -> Font: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi index 424e8903d6d49..cd95f0de5f803 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi @@ -22,8 +22,6 @@ YES: Final = "yes" NO: Final = "no" class Message(Dialog): - """A message box""" - command: ClassVar[str] def showinfo( @@ -34,9 +32,7 @@ def showinfo( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: - """Show an info message""" - +) -> str: ... def showwarning( title: str | None = None, message: str | None = None, @@ -45,9 +41,7 @@ def showwarning( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: - """Show a warning message""" - +) -> str: ... def showerror( title: str | None = None, message: str | None = None, @@ -56,9 +50,7 @@ def showerror( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: - """Show an error message""" - +) -> str: ... def askquestion( title: str | None = None, message: str | None = None, @@ -67,9 +59,7 @@ def askquestion( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["yes", "no"] = ..., parent: Misc = ..., -) -> str: - """Ask a question""" - +) -> str: ... def askokcancel( title: str | None = None, message: str | None = None, @@ -78,9 +68,7 @@ def askokcancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok", "cancel"] = ..., parent: Misc = ..., -) -> bool: - """Ask if operation should proceed; return true if the answer is ok""" - +) -> bool: ... def askyesno( title: str | None = None, message: str | None = None, @@ -89,9 +77,7 @@ def askyesno( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["yes", "no"] = ..., parent: Misc = ..., -) -> bool: - """Ask a question; return true if the answer is yes""" - +) -> bool: ... def askyesnocancel( title: str | None = None, message: str | None = None, @@ -100,9 +86,7 @@ def askyesnocancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["cancel", "yes", "no"] = ..., parent: Misc = ..., -) -> bool | None: - """Ask a question; return true if the answer is yes, None if cancelled.""" - +) -> bool | None: ... def askretrycancel( title: str | None = None, message: str | None = None, @@ -111,5 +95,4 @@ def askretrycancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["retry", "cancel"] = ..., parent: Misc = ..., -) -> bool: - """Ask if operation should be retried; return true if the answer is yes""" +) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi index 028a6fb395d5c..6f1abc7144877 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -1,16 +1,3 @@ -"""A ScrolledText widget feels like a text widget but also has a -vertical scroll bar on its right. (Later, options may be added to -add a horizontal bar as well, to make the bars disappear -automatically when not needed, to move them to the other side of the -window, etc.) - -Configuration options are passed to the Text widget. -A Frame widget is inserted between the master and the text, to hold -the Scrollbar widget. -Most methods calls are inherited from the Text widget; Pack, Grid and -Place methods are redirected to the Frame widget however. -""" - from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi index af5375e2a635d..45dce21a6b1c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi @@ -1,65 +1,13 @@ -"""This modules handles dialog boxes. - -It contains the following public symbols: - -SimpleDialog -- A simple but flexible modal dialog box - -Dialog -- a base class for dialogs - -askinteger -- get an integer from the user - -askfloat -- get a float from the user - -askstring -- get a string from the user -""" - from tkinter import Event, Frame, Misc, Toplevel class Dialog(Toplevel): - """Class to open dialogs. - - This class is intended as a base class for custom dialogs - """ - - def __init__(self, parent: Misc | None, title: str | None = None) -> None: - """Initialize a dialog. - - Arguments: - - parent -- a parent window (the application window) - - title -- the dialog title - """ - - def body(self, master: Frame) -> Misc | None: - """create dialog body. - - return widget that should have initial focus. - This method should be overridden, and is called - by the __init__ method. - """ - - def buttonbox(self) -> None: - """add standard button box. - - override if you do not want the standard buttons - """ - + def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... + def body(self, master: Frame) -> Misc | None: ... + def buttonbox(self) -> None: ... def ok(self, event: Event[Misc] | None = None) -> None: ... def cancel(self, event: Event[Misc] | None = None) -> None: ... - def validate(self) -> bool: - """validate the data - - This method is called automatically to validate the data before the - dialog is destroyed. By default, it always validates OK. - """ - - def apply(self) -> None: - """process the data - - This method is called automatically to process the data, *after* - the dialog is destroyed. By default, it does nothing. - """ + def validate(self) -> bool: ... + def apply(self) -> None: ... class SimpleDialog: def __init__( @@ -85,18 +33,7 @@ def askfloat( minvalue: float | None = ..., maxvalue: float | None = ..., parent: Misc | None = ..., -) -> float | None: - """get a float from the user - - Arguments: - - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class - - Return value is a float - """ - +) -> float | None: ... def askinteger( title: str | None, prompt: str, @@ -105,18 +42,7 @@ def askinteger( minvalue: int | None = ..., maxvalue: int | None = ..., parent: Misc | None = ..., -) -> int | None: - """get an integer from the user - - Arguments: - - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class - - Return value is an integer - """ - +) -> int | None: ... def askstring( title: str | None, prompt: str, @@ -125,14 +51,4 @@ def askstring( show: str | None = ..., # minvalue/maxvalue is accepted but not useful. parent: Misc | None = ..., -) -> str | None: - """get a string from the user - - Arguments: - - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class - - Return value is a string - """ +) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi index e8e383809e950..7891364fa02c6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi @@ -35,130 +35,19 @@ TCL_IDLE_EVENTS: Final = 32 TCL_ALL_EVENTS: Final = 0 class tixCommand: - """The tix commands provide access to miscellaneous elements - of Tix's internal state and the Tix application context. - Most of the information manipulated by these commands pertains - to the application as a whole, or to a screen or - display, rather than to a particular window. - - This is a mixin class, assumed to be mixed to Tkinter.Tk - that supports the self.tk.call method. - """ - - def tix_addbitmapdir(self, directory: str) -> None: - """Tix maintains a list of directories under which - the tix_getimage and tix_getbitmap commands will - search for image files. The standard bitmap directory - is $TIX_LIBRARY/bitmaps. The addbitmapdir command - adds directory into this list. By using this - command, the image files of an applications can - also be located using the tix_getimage or tix_getbitmap - command. - """ - - def tix_cget(self, option: str) -> Any: - """Returns the current value of the configuration - option given by option. Option may be any of the - options described in the CONFIGURATION OPTIONS section. - """ - - def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: - """Query or modify the configuration options of the Tix application - context. If no option is specified, returns a dictionary all of the - available options. If option is specified with no value, then the - command returns a list describing the one named option (this list - will be identical to the corresponding sublist of the value - returned if no option is specified). If one or more option-value - pairs are specified, then the command modifies the given option(s) - to have the given value(s); in this case the command returns an - empty string. Option may be any of the configuration options. - """ - - def tix_filedialog(self, dlgclass: str | None = None) -> str: - """Returns the file selection dialog that may be shared among - different calls from this application. This command will create a - file selection dialog widget when it is called the first time. This - dialog will be returned by all subsequent calls to tix_filedialog. - An optional dlgclass parameter can be passed to specified what type - of file selection dialog widget is desired. Possible options are - tix FileSelectDialog or tixExFileSelectDialog. - """ - - def tix_getbitmap(self, name: str) -> str: - """Locates a bitmap file of the name name.xpm or name in one of the - bitmap directories (see the tix_addbitmapdir command above). By - using tix_getbitmap, you can avoid hard coding the pathnames of the - bitmap files in your application. When successful, it returns the - complete pathname of the bitmap file, prefixed with the character - '@'. The returned value can be used to configure the -bitmap - option of the TK and Tix widgets. - """ - - def tix_getimage(self, name: str) -> str: - """Locates an image file of the name name.xpm, name.xbm or name.ppm - in one of the bitmap directories (see the addbitmapdir command - above). If more than one file with the same name (but different - extensions) exist, then the image type is chosen according to the - depth of the X display: xbm images are chosen on monochrome - displays and color images are chosen on color displays. By using - tix_ getimage, you can avoid hard coding the pathnames of the - image files in your application. When successful, this command - returns the name of the newly created image, which can be used to - configure the -image option of the Tk and Tix widgets. - """ - - def tix_option_get(self, name: str) -> Any: - """Gets the options maintained by the Tix - scheme mechanism. Available options include: - - active_bg active_fg bg - bold_font dark1_bg dark1_fg - dark2_bg dark2_fg disabled_fg - fg fixed_font font - inactive_bg inactive_fg input1_bg - input2_bg italic_font light1_bg - light1_fg light2_bg light2_fg - menu_font output1_bg output2_bg - select_bg select_fg selector - """ - - def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: - """Resets the scheme and fontset of the Tix application to - newScheme and newFontSet, respectively. This affects only those - widgets created after this call. Therefore, it is best to call the - resetoptions command before the creation of any widgets in a Tix - application. - - The optional parameter newScmPrio can be given to reset the - priority level of the Tk options set by the Tix schemes. - - Because of the way Tk handles the X option database, after Tix has - been has imported and inited, it is not possible to reset the color - schemes and font sets using the tix config command. Instead, the - tix_resetoptions command must be used. - """ + def tix_addbitmapdir(self, directory: str) -> None: ... + def tix_cget(self, option: str) -> Any: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... + def tix_getbitmap(self, name: str) -> str: ... + def tix_getimage(self, name: str) -> str: ... + def tix_option_get(self, name: str) -> Any: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... class Tk(tkinter.Tk, tixCommand): - """Toplevel widget of Tix which represents mostly the main window - of an application. It has an associated Tcl interpreter. - """ - def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): - """A TixWidget class is used to package all (or most) Tix widgets. - - Widget initialization is extended in two ways: - 1) It is possible to give a list of options which must be part of - the creation command (so called Tix 'static' options). These cannot be - given as a 'config' command later. - 2) It is possible to give the name of an existing TK widget. These are - child widgets created automatically by a Tix mega-widget. The Tk call - to create these widgets is therefore bypassed in TixWidget.__init__ - - Both options are for use by subclasses only. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -168,38 +57,17 @@ class TixWidget(tkinter.Widget): kw: dict[str, Any] = {}, ) -> None: ... def __getattr__(self, name: str): ... - def set_silent(self, value: str) -> None: - """Set a variable without calling its action routine""" - - def subwidget(self, name: str) -> tkinter.Widget: - """Return the named subwidget (which must have been created by - the sub-class). - """ - - def subwidgets_all(self) -> list[tkinter.Widget]: - """Return all subwidgets.""" - - def config_all(self, option: Any, value: Any) -> None: - """Set configuration options for all subwidgets (and self).""" - + def set_silent(self, value: str) -> None: ... + def subwidget(self, name: str) -> tkinter.Widget: ... + def subwidgets_all(self) -> list[tkinter.Widget]: ... + def config_all(self, option: Any, value: Any) -> None: ... def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): - """Subwidget class. - - This is used to mirror child widgets automatically created - by Tix/Tk as part of a mega-widget in Python (which is not informed - of this) - """ - def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: - """DisplayStyle - handle configuration options shared by - (multiple) Display Items - """ - def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... @@ -207,47 +75,16 @@ class DisplayStyle: def config(self, cnf: dict[str, Any] = {}, **kw): ... class Balloon(TixWidget): - """Balloon help widget. - - Subwidget Class - --------- ----- - label Label - message Message - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: - """Bind balloon widget to another. - One balloon widget may be bound to several widgets at the same time - """ - + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): - """ButtonBox - A container for pushbuttons. - Subwidgets are the buttons added with the add method. - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: - """Add a button with given name to box.""" - + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): - """ComboBox - an Entry field with a dropdown menu. The user can select a - choice by either typing in the entry subwidget or selecting from the - listbox subwidget. - - Subwidget Class - --------- ----- - entry Entry - arrow Button - slistbox ScrolledListBox - tick Button - cross Button : present if created with the fancy option - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... @@ -255,67 +92,21 @@ class ComboBox(TixWidget): def pick(self, index: int) -> None: ... class Control(TixWidget): - """Control - An entry field with value change arrows. The user can - adjust the value by pressing the two arrow buttons or by entering - the value directly into the entry. The new value will be checked - against the user-defined upper and lower limits. - - Subwidget Class - --------- ----- - incr Button - decr Button - entry Entry - label Label - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): - """LabelEntry - Entry field with label. Packages an entry widget - and a label into one mega widget. It can be used to simplify the creation - of ``entry-form'' type of interface. - - Subwidgets Class - ---------- ----- - label Label - entry Entry - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class LabelFrame(TixWidget): - """LabelFrame - Labelled Frame container. Packages a frame widget - and a label into one mega widget. To create widgets inside a - LabelFrame widget, one creates the new widgets relative to the - frame subwidget and manage them inside the frame subwidget. - - Subwidgets Class - ---------- ----- - label Label - frame Frame - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Meter(TixWidget): - """The Meter widget can be used to show the progress of a background - job which may take a long time to execute. - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class OptionMenu(TixWidget): - """OptionMenu - creates a menu button of options. - - Subwidget Class - --------- ----- - menubutton Menubutton - menu Menu - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... @@ -324,165 +115,52 @@ class OptionMenu(TixWidget): def enable(self, name: str) -> None: ... class PopupMenu(TixWidget): - """PopupMenu widget can be used as a replacement of the tk_popup command. - The advantage of the Tix PopupMenu widget is it requires less application - code to manipulate. - - - Subwidgets Class - ---------- ----- - menubutton Menubutton - menu Menu - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... class Select(TixWidget): - """Select - Container of button subwidgets. It can be used to provide - radio-box or check-box style of selection options for the user. - - Subwidgets are buttons added dynamically using the add method. - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help)""" - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): - """DirList - displays a list view of a directory, its previous - directories and its sub-directories. The user can choose one of - the directories displayed in the list or change to another directory. - - Subwidget Class - --------- ----- - hlist HList - hsb Scrollbar - vsb Scrollbar - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirTree(TixWidget): - """DirTree - Directory Listing in a hierarchical view. - Displays a tree view of a directory, its previous directories and its - sub-directories. The user can choose one of the directories displayed - in the list or change to another directory. - - Subwidget Class - --------- ----- - hlist HList - hsb Scrollbar - vsb Scrollbar - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirSelectDialog(TixWidget): - """The DirSelectDialog widget presents the directories in the file - system in a dialog window. The user can use this dialog window to - navigate through the file system to select the desired directory. - - Subwidgets Class - ---------- ----- - dirbox DirSelectDialog - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def popup(self) -> None: ... def popdown(self) -> None: ... class DirSelectBox(TixWidget): - """DirSelectBox - Motif style file select box. - It is generally used for - the user to choose a file. FileSelectBox stores the files mostly - recently selected into a ComboBox widget so that they can be quickly - selected again. - - Subwidget Class - --------- ----- - selection ComboBox - filter ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... class ExFileSelectBox(TixWidget): - """ExFileSelectBox - MS Windows style file select box. - It provides a convenient method for the user to select files. - - Subwidget Class - --------- ----- - cancel Button - ok Button - hidden Checkbutton - types ComboBox - dir ComboBox - file ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def filter(self) -> None: ... def invoke(self) -> None: ... class FileSelectBox(TixWidget): - """ExFileSelectBox - Motif style file select box. - It is generally used for - the user to choose a file. FileSelectBox stores the files mostly - recently selected into a ComboBox widget so that they can be quickly - selected again. - - Subwidget Class - --------- ----- - selection ComboBox - filter ComboBox - dirlist ScrolledListBox - filelist ScrolledListBox - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def apply_filter(self) -> None: ... def invoke(self) -> None: ... class FileEntry(TixWidget): - """FileEntry - Entry field with button that invokes a FileSelectDialog. - The user can type in the filename manually. Alternatively, the user can - press the button widget that sits next to the entry, which will bring - up a file selection dialog. - - Subwidgets Class - ---------- ----- - button Button - entry Entry - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self) -> None: ... def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): - """HList - Hierarchy display widget can be used to display any data - that have a hierarchical structure, for example, file system directory - trees. The list entries are indented and connected by branch lines - according to their places in the hierarchy. - - Subwidgets - None - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... @@ -539,89 +217,24 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): - """The CheckList widget - displays a list of items to be selected by the user. CheckList acts - similarly to the Tk checkbutton or radiobutton widgets, except it is - capable of handling many more items than checkbuttons or radiobuttons. - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: - """This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. - """ - - def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close.""" - - def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath.""" - - def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open.""" - - def getselection(self, mode: str = "on") -> tuple[str, ...]: - """Returns a list of items whose status matches status. If status is - not specified, the list of items in the "on" status will be returned. - Mode can be on, off, default - """ - - def getstatus(self, entrypath: str) -> str: - """Returns the current status of entryPath.""" - - def setstatus(self, entrypath: str, mode: str = "on") -> None: - """Sets the status of entryPath to be status. A bitmap will be - displayed next to the entry its status is on, off or default. - """ + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... + def getstatus(self, entrypath: str) -> str: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... class Tree(TixWidget): - """Tree - The tixTree widget can be used to display hierarchical - data in a tree form. The user can adjust - the view of the tree by opening or closing parts of the tree. - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: - """This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. - """ - - def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close.""" - - def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath.""" - - def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open.""" - - def setmode(self, entrypath: str, mode: str = "none") -> None: - """This command is used to indicate whether the entry given by - entryPath has children entries and whether the children are visible. mode - must be one of open, close or none. If mode is set to open, a (+) - indicator is drawn next the entry. If mode is set to close, a (-) - indicator is drawn next the entry. If mode is set to none, no - indicators will be drawn for this entry. The default mode is none. The - open mode indicates the entry has hidden children and this entry can be - opened by the user. The close mode indicates that all the children of the - entry are now visible and the entry can be closed by the user. - """ + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... class TList(TixWidget, tkinter.XView, tkinter.YView): - """TList - Hierarchy display widget which can be - used to display data in a tabular format. The list entries of a TList - widget are similar to the entries in the Tk listbox widget. The main - differences are (1) the TList widget can display the list entries in a - two dimensional format and (2) you can use graphical images as well as - multiple colors and fonts for the list entries. - - Subwidgets - None - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... @@ -648,17 +261,6 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): - """PanedWindow - Multi-pane container widget - allows the user to interactively manipulate the sizes of several - panes. The panes can be arranged either vertically or horizontally.The - user changes the sizes of the panes by dragging the resize handle - between two panes. - - Subwidgets Class - ---------- ----- - g/p widgets added dynamically with the add method. - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -668,14 +270,6 @@ class PanedWindow(TixWidget): def panes(self) -> list[tkinter.Widget]: ... class ListNoteBook(TixWidget): - """A ListNoteBook widget is very similar to the TixNoteBook widget: - it can be used to display many windows in a limited space using a - notebook metaphor. The notebook is divided into a stack of pages - (windows). At one time only one of these pages can be shown. - The user can navigate through these pages by - choosing the name of the desired page in the hlist subwidget. - """ - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -683,14 +277,6 @@ class ListNoteBook(TixWidget): def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): - """NoteBook - Multi-page container widget (tabbed notebook metaphor). - - Subwidgets Class - ---------- ----- - nbframe NoteBookFrame - page widgets added dynamically with the add method - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -700,20 +286,9 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): - """InputOnly - Invisible widget. Unix only. - - Subwidgets - None - """ - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Form: - """The Tix Form geometry manager - - Widgets can be arranged by specifying attachments to other widgets. - See Tix documentation for complete details - """ - def __setitem__(self, key: str, value: Any) -> None: ... def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi index 8c6d70c398fe3..1d72acd995126 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi @@ -1,17 +1,3 @@ -"""Ttk wrapper. - -This module provides classes to allow using Tk themed widget set. - -Ttk is based on a revised and enhanced version of -TIP #48 (http://tip.tcl.tk/48) specified style engine. - -Its basic idea is to separate, to the extent possible, the code -implementing a widget's behavior from the code implementing its -appearance. Widget class bindings are primarily responsible for -maintaining the widget state and invoking callbacks, all aspects -of the widgets appearance lies at Themes. -""" - import _tkinter import sys import tkinter @@ -49,19 +35,8 @@ __all__ = [ "Spinbox", ] -def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: - """Returns adict with its values converted from Tcl objects to Python - objects. - """ - -def setup_master(master: tkinter.Misc | None = None): - """If master is not None, itself is returned. If master is None, - the default master is returned if there is one, otherwise a new - master is created and returned. - - If it is not allowed to use the default root and master is None, - RuntimeError is raised. - """ +def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... +def setup_master(master: tkinter.Misc | None = None): ... _Padding: TypeAlias = ( float @@ -137,82 +112,24 @@ _ThemeSettingsValue = TypedDict( _ThemeSettings: TypeAlias = dict[str, _ThemeSettingsValue] class Style: - """Manipulate style database.""" - master: tkinter.Misc tk: _tkinter.TkappType def __init__(self, master: tkinter.Misc | None = None) -> None: ... # For these methods, values given vary between options. Returned values # seem to be str, but this might not always be the case. @overload - def configure(self, style: str) -> dict[str, Any] | None: # Returns None if no configuration. - """Query or sets the default value of the specified option(s) in - style. - - Each key in kw is an option and each value is either a string or - a sequence identifying the value for that option. - """ - + def configure(self, style: str) -> dict[str, Any] | None: ... # Returns None if no configuration. @overload def configure(self, style: str, query_opt: str, **kw: Any) -> Any: ... @overload def configure(self, style: str, query_opt: None = None, **kw: Any) -> None: ... @overload - def map(self, style: str, query_opt: str) -> _Statespec: - """Query or sets dynamic values of the specified option(s) in - style. - - Each key in kw is an option and each value should be a list or a - tuple (usually) containing statespecs grouped in tuples, or list, - or something else of your preference. A statespec is compound of - one or more states and then a value. - """ - + def map(self, style: str, query_opt: str) -> _Statespec: ... @overload def map(self, style: str, query_opt: None = None, **kw: Iterable[_Statespec]) -> dict[str, _Statespec]: ... - def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: - """Returns the value specified for option in style. - - If state is specified it is expected to be a sequence of one - or more states. If the default argument is set, it is used as - a fallback value in case no specification for option is found. - """ - + def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: ... @overload - def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: # Always seems to return an empty list - """Define the widget layout for given style. If layoutspec is - omitted, return the layout specification for given style. - - layoutspec is expected to be a list or an object different than - None that evaluates to False if you want to "turn off" that style. - If it is a list (or tuple, or something else), each item should be - a tuple where the first item is the layout name and the second item - should have the format described below: - - LAYOUTS - - A layout can contain the value None, if takes no options, or - a dict of options specifying how to arrange the element. - The layout mechanism uses a simplified version of the pack - geometry manager: given an initial cavity, each element is - allocated a parcel. Valid options/values are: - - side: whichside - Specifies which side of the cavity to place the - element; one of top, right, bottom or left. If - omitted, the element occupies the entire cavity. - - sticky: nswe - Specifies where the element is placed inside its - allocated parcel. - - children: [sublayout... ] - Specifies a list of elements to place inside the - element. Each element is a tuple (or other sequence) - where the first item is the layout name, and the other - is a LAYOUT. - """ - + def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: ... # Always seems to return an empty list @overload def layout(self, style: str, layoutspec: None = None) -> _LayoutSpec: ... @overload @@ -228,9 +145,7 @@ class Style: padding: _Padding = ..., sticky: str = ..., width: float | str = ..., - ) -> None: - """Create a new element in the current theme of given etype.""" - + ) -> None: ... @overload def element_create(self, elementname: str, etype: Literal["from"], themename: str, fromelement: str = ..., /) -> None: ... if sys.platform == "win32" and sys.version_info >= (3, 13): # and tk version >= 8.6 @@ -248,9 +163,7 @@ class Style: /, *, padding: _Padding = ..., - ) -> None: - """Create a new element in the current theme of given etype.""" - + ) -> None: ... @overload def element_create( self, @@ -277,98 +190,23 @@ class Style: height: float | str, ) -> None: ... - def element_names(self) -> tuple[str, ...]: - """Returns the list of elements defined in the current theme.""" - - def element_options(self, elementname: str) -> tuple[str, ...]: - """Return the list of elementname's options.""" - - def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: - """Creates a new theme. - - It is an error if themename already exists. If parent is - specified, the new theme will inherit styles, elements and - layouts from the specified parent theme. If settings are present, - they are expected to have the same syntax used for theme_settings. - """ - - def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: - """Temporarily sets the current theme to themename, apply specified - settings and then restore the previous theme. - - Each key in settings is a style and each value may contain the - keys 'configure', 'map', 'layout' and 'element create' and they - are expected to have the same format as specified by the methods - configure, map, layout and element_create respectively. - """ - - def theme_names(self) -> tuple[str, ...]: - """Returns a list of all known themes.""" - + def element_names(self) -> tuple[str, ...]: ... + def element_options(self, elementname: str) -> tuple[str, ...]: ... + def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: ... + def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: ... + def theme_names(self) -> tuple[str, ...]: ... @overload - def theme_use(self, themename: str) -> None: - """If themename is None, returns the theme in use, otherwise, set - the current theme to themename, refreshes all widgets and emits - a <> event. - """ - + def theme_use(self, themename: str) -> None: ... @overload def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - """Base class for Tk themed widgets.""" - - def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: - """Constructs a Ttk Widget with the parent master. - - STANDARD OPTIONS - - class, cursor, takefocus, style - - SCROLLABLE WIDGET OPTIONS - - xscrollcommand, yscrollcommand - - LABEL WIDGET OPTIONS - - text, textvariable, underline, image, compound, width - - WIDGET STATES - - active, disabled, focus, pressed, selected, background, - readonly, alternate, invalid - """ - - def identify(self, x: int, y: int) -> str: - """Returns the name of the element at position x, y, or the empty - string if the point does not lie within any element. - - x and y are pixel coordinates relative to the widget. - """ - - def instate(self, statespec, callback=None, *args, **kw): - """Test the widget's state. - - If callback is not specified, returns True if the widget state - matches statespec and False otherwise. If callback is specified, - then it will be invoked with *args, **kw if the widget state - matches statespec. statespec is expected to be a sequence. - """ - - def state(self, statespec=None): - """Modify or inquire widget state. - - Widget state is returned if statespec is None, otherwise it is - set according to the statespec flags and then a new state spec - is returned indicating which flags were changed. statespec is - expected to be a sequence. - """ + def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def instate(self, statespec, callback=None, *args, **kw): ... + def state(self, statespec=None): ... class Button(Widget): - """Ttk Button widget, displays a textual label and/or image, and - evaluates a command when pressed. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -388,19 +226,7 @@ class Button(Widget): textvariable: tkinter.Variable = ..., underline: int = -1, width: int | Literal[""] = "", - ) -> None: - """Construct a Ttk Button widget with the parent master. - - STANDARD OPTIONS - - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width - - WIDGET-SPECIFIC OPTIONS - - command, default, width - """ - + ) -> None: ... @overload def configure( self, @@ -419,23 +245,13 @@ class Button(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: - """Invokes the command associated with the button.""" + def invoke(self) -> Any: ... class Checkbutton(Widget): - """Ttk Checkbutton widget which is either in on- or off-state.""" - def __init__( self, master: tkinter.Misc | None = None, @@ -460,19 +276,7 @@ class Checkbutton(Widget): # here: https://sourceforge.net/projects/tcl/files/Tcl/8.6.9/tcltk-release-notes-8.6.9.txt/view variable: tkinter.Variable = ..., width: int | Literal[""] = "", - ) -> None: - """Construct a Ttk Checkbutton widget with the parent master. - - STANDARD OPTIONS - - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width - - WIDGET-SPECIFIC OPTIONS - - command, offvalue, onvalue, variable - """ - + ) -> None: ... @overload def configure( self, @@ -493,32 +297,13 @@ class Checkbutton(Widget): underline: int = ..., variable: tkinter.Variable = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: - """Toggles between the selected and deselected states and - invokes the associated command. If the widget is currently - selected, sets the option variable to the offvalue option - and deselects the widget; otherwise, sets the option variable - to the option onvalue. - - Returns the result of the associated command. - """ + def invoke(self) -> Any: ... class Entry(Widget, tkinter.Entry): - """Ttk Entry widget displays a one-line text string and allows that - string to be edited by the user. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -542,23 +327,7 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Constructs a Ttk Entry widget with the parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus, xscrollcommand - - WIDGET-SPECIFIC OPTIONS - - exportselection, invalidcommand, justify, show, state, - textvariable, validate, validatecommand, width - - VALIDATION MODES - - none, key, focus, focusin, focusout, all - """ - + ) -> None: ... @overload # type: ignore[override] def configure( self, @@ -580,14 +349,7 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) @@ -612,37 +374,14 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def bbox(self, index) -> tuple[int, int, int, int]: # type: ignore[override] - """Return a tuple of (x, y, width, height) which describes the - bounding box of the character given by index. - """ - - def identify(self, x: int, y: int) -> str: - """Returns the name of the element at position x, y, or the - empty string if the coordinates are outside the window. - """ - - def validate(self): - """Force revalidation, independent of the conditions specified - by the validate option. Returns False if validation fails, True - if it succeeds. Sets or clears the invalid state accordingly. - """ + def bbox(self, index) -> tuple[int, int, int, int]: ... # type: ignore[override] + def identify(self, x: int, y: int) -> str: ... + def validate(self): ... class Combobox(Entry): - """Ttk Combobox widget combines a text field with a pop-down list of - values. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -668,19 +407,7 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = 20, xscrollcommand: str | Callable[[float, float], object] = ..., # undocumented - ) -> None: - """Construct a Ttk Combobox widget with the parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - exportselection, justify, height, postcommand, state, - textvariable, values, width - """ - + ) -> None: ... @overload # type: ignore[override] def configure( self, @@ -705,14 +432,7 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) @@ -740,31 +460,13 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def current(self, newindex: int | None = None) -> int: - """If newindex is supplied, sets the combobox value to the - element at position newindex in the list of values. Otherwise, - returns the index of the current value in the list of values - or -1 if the current value does not appear in the list. - """ - - def set(self, value: Any) -> None: - """Sets the value of the combobox to value.""" + def current(self, newindex: int | None = None) -> int: ... + def set(self, value: Any) -> None: ... class Frame(Widget): - """Ttk Frame widget is a container, used to group other widgets - together. - """ - # This should be kept in sync with tkinter.ttk.LabeledScale.__init__() # (all of these keyword-only arguments are also present there) def __init__( @@ -782,18 +484,7 @@ class Frame(Widget): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: float | str = 0, - ) -> None: - """Construct a Ttk Frame with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - borderwidth, relief, padding, width, height - """ - + ) -> None: ... @overload def configure( self, @@ -808,21 +499,12 @@ class Frame(Widget): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): - """Ttk Label widget displays a textual label and/or image.""" - def __init__( self, master: tkinter.Misc | None = None, @@ -849,20 +531,7 @@ class Label(Widget): underline: int = -1, width: int | Literal[""] = "", wraplength: float | str = ..., - ) -> None: - """Construct a Ttk Label with parent master. - - STANDARD OPTIONS - - class, compound, cursor, image, style, takefocus, text, - textvariable, underline, width - - WIDGET-SPECIFIC OPTIONS - - anchor, background, font, foreground, justify, padding, - relief, text, wraplength - """ - + ) -> None: ... @overload def configure( self, @@ -888,24 +557,12 @@ class Label(Widget): underline: int = ..., width: int | Literal[""] = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Labelframe(Widget): - """Ttk Labelframe widget is a container used to group other widgets - together. It has an optional label, which may be a plain text string - or another widget. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -925,18 +582,7 @@ class Labelframe(Widget): text: float | str = "", underline: int = -1, width: float | str = 0, - ) -> None: - """Construct a Ttk Labelframe with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - labelanchor, text, underline, padding, labelwidget, width, - height - """ - + ) -> None: ... @overload def configure( self, @@ -955,14 +601,7 @@ class Labelframe(Widget): text: float | str = ..., underline: int = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -970,10 +609,6 @@ class Labelframe(Widget): LabelFrame = Labelframe class Menubutton(Widget): - """Ttk Menubutton widget displays a textual label and/or image, and - displays a menu when pressed. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -993,19 +628,7 @@ class Menubutton(Widget): textvariable: tkinter.Variable = ..., underline: int = -1, width: int | Literal[""] = "", - ) -> None: - """Construct a Ttk Menubutton with parent master. - - STANDARD OPTIONS - - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width - - WIDGET-SPECIFIC OPTIONS - - direction, menu - """ - + ) -> None: ... @overload def configure( self, @@ -1024,24 +647,12 @@ class Menubutton(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Notebook(Widget): - """Ttk Notebook widget manages a collection of windows and displays - a single one at a time. Each child window is associated with a tab, - which the user may select to change the currently-displayed window. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1054,36 +665,7 @@ class Notebook(Widget): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = 0, - ) -> None: - """Construct a Ttk Notebook with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - height, padding, width - - TAB OPTIONS - - state, sticky, padding, text, image, compound, underline - - TAB IDENTIFIERS (tab_id) - - The tab_id argument found in several methods may take any of - the following forms: - - * An integer between zero and the number of tabs - * The name of a child window - * A positional specification of the form "@x,y", which - defines the tab - * The string "current", which identifies the - currently-selected tab - * The string "end", which returns the number of tabs (only - valid for method index) - """ - + ) -> None: ... @overload def configure( self, @@ -1095,14 +677,7 @@ class Notebook(Widget): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -1119,91 +694,18 @@ class Notebook(Widget): image=..., compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., underline: int = ..., - ) -> None: - """Adds a new tab to the notebook. - - If window is currently managed by the notebook but hidden, it is - restored to its previous position. - """ - - def forget(self, tab_id) -> None: # type: ignore[override] - """Removes the tab specified by tab_id, unmaps and unmanages the - associated window. - """ - - def hide(self, tab_id) -> None: - """Hides the tab specified by tab_id. - - The tab will not be displayed, but the associated window remains - managed by the notebook and its configuration remembered. Hidden - tabs may be restored with the add command. - """ - - def identify(self, x: int, y: int) -> str: - """Returns the name of the tab element at position x, y, or the - empty string if none. - """ - - def index(self, tab_id): - """Returns the numeric index of the tab specified by tab_id, or - the total number of tabs if tab_id is the string "end". - """ - - def insert(self, pos, child, **kw) -> None: - """Inserts a pane at the specified position. - - pos is either the string end, an integer index, or the name of - a managed child. If child is already managed by the notebook, - moves it to the specified position. - """ - - def select(self, tab_id=None): - """Selects the specified tab. - - The associated child window will be displayed, and the - previously-selected window (if different) is unmapped. If tab_id - is omitted, returns the widget name of the currently selected - pane. - """ - - def tab(self, tab_id, option=None, **kw): - """Query or modify the options of the specific tab_id. - - If kw is not given, returns a dict of the tab option values. If option - is specified, returns the value of that option. Otherwise, sets the - options to the corresponding values. - """ - - def tabs(self): - """Returns a list of windows managed by the notebook.""" - - def enable_traversal(self) -> None: - """Enable keyboard traversal for a toplevel window containing - this notebook. - - This will extend the bindings for the toplevel window containing - this notebook as follows: - - Control-Tab: selects the tab following the currently selected - one - - Shift-Control-Tab: selects the tab preceding the currently - selected one - - Alt-K: where K is the mnemonic (underlined) character of any - tab, will select that tab. - - Multiple notebooks in a single toplevel may be enabled for - traversal, including nested notebooks. However, notebook traversal - only works properly if all panes are direct children of the - notebook. - """ + ) -> None: ... + def forget(self, tab_id) -> None: ... # type: ignore[override] + def hide(self, tab_id) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def index(self, tab_id): ... + def insert(self, pos, child, **kw) -> None: ... + def select(self, tab_id=None): ... + def tab(self, tab_id, option=None, **kw): ... + def tabs(self): ... + def enable_traversal(self) -> None: ... class Panedwindow(Widget, tkinter.PanedWindow): - """Ttk Panedwindow widget displays a number of subwindows, stacked - either vertically or horizontally. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1217,31 +719,8 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: int = 0, - ) -> None: - """Construct a Ttk Panedwindow with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - orient, width, height - - PANE OPTIONS - - weight - """ - - def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: - """Add a child widget to the panedwindow in a new pane. - - The child argument is the name of the child widget - followed by pairs of arguments that specify how to - manage the windows. The possible options and values - are the ones accepted by the paneconfigure method. - """ - + ) -> None: ... + def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: ... @overload # type: ignore[override] def configure( self, @@ -1252,14 +731,7 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) @@ -1273,54 +745,17 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget = tkinter.PanedWindow.forget - def insert(self, pos, child, **kw) -> None: - """Inserts a pane at the specified positions. - - pos is either the string end, and integer index, or the name - of a child. If child is already managed by the paned window, - moves it to the specified position. - """ - - def pane(self, pane, option=None, **kw): - """Query or modify the options of the specified pane. - - pane is either an integer index or the name of a managed subwindow. - If kw is not given, returns a dict of the pane option values. If - option is specified then the value for that option is returned. - Otherwise, sets the options to the corresponding values. - """ - - def sashpos(self, index, newpos=None): - """If newpos is specified, sets the position of sash number index. - - May adjust the positions of adjacent sashes to ensure that - positions are monotonically increasing. Sash positions are further - constrained to be between 0 and the total size of the widget. - - Returns the new position of sash number index. - """ + def insert(self, pos, child, **kw) -> None: ... + def pane(self, pane, option=None, **kw): ... + def sashpos(self, index, newpos=None): ... PanedWindow = Panedwindow class Progressbar(Widget): - """Ttk Progressbar widget shows the status of a long-running - operation. They can operate in two modes: determinate mode shows the - amount completed relative to the total amount of work to be done, and - indeterminate mode provides an animated display to let the user know - that something is happening. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1337,18 +772,7 @@ class Progressbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", value: float = 0.0, variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> None: - """Construct a Ttk Progressbar with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - orient, length, mode, maximum, value, variable, phase - """ - + ) -> None: ... @overload def configure( self, @@ -1364,40 +788,15 @@ class Progressbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def start(self, interval: Literal["idle"] | int | None = None) -> None: - """Begin autoincrement mode: schedules a recurring timer event - that calls method step every interval milliseconds. - - interval defaults to 50 milliseconds (20 steps/second) if omitted. - """ - - def step(self, amount: float | None = None) -> None: - """Increments the value option by amount. - - amount defaults to 1.0 if omitted. - """ - - def stop(self) -> None: - """Stop autoincrement mode: cancels any recurring timer event - initiated by start. - """ + def start(self, interval: Literal["idle"] | int | None = None) -> None: ... + def step(self, amount: float | None = None) -> None: ... + def stop(self) -> None: ... class Radiobutton(Widget): - """Ttk Radiobutton widgets are used in groups to show or change a - set of mutually-exclusive options. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1418,19 +817,7 @@ class Radiobutton(Widget): value: Any = "1", variable: tkinter.Variable | Literal[""] = ..., width: int | Literal[""] = "", - ) -> None: - """Construct a Ttk Radiobutton with parent master. - - STANDARD OPTIONS - - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width - - WIDGET-SPECIFIC OPTIONS - - command, value, variable - """ - + ) -> None: ... @overload def configure( self, @@ -1450,31 +837,14 @@ class Radiobutton(Widget): value: Any = ..., variable: tkinter.Variable | Literal[""] = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: - """Sets the option variable to the option value, selects the - widget, and invokes the associated command. - - Returns the result of the command, or an empty string if - no command is specified. - """ + def invoke(self) -> Any: ... # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scale(Widget, tkinter.Scale): # type: ignore[misc] - """Ttk Scale widget is typically used to control the numeric value of - a linked variable that varies uniformly over some range. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1492,18 +862,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = 1.0, value: float = 0, variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> None: - """Construct a Ttk Scale with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - command, from, length, orient, to, value, variable - """ - + ) -> None: ... @overload # type: ignore[override] def configure( self, @@ -1520,13 +879,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Modify or query scale options. - - Setting a value for any of the "from", "from_" or "to" options - generates a <> event. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) @@ -1546,28 +899,13 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def get(self, x: int | None = None, y: int | None = None) -> float: - """Get the current value of the value option, or the value - corresponding to the coordinates x, y if they are specified. - - x and y are pixel coordinates relative to the scale widget - origin. - """ + def get(self, x: int | None = None, y: int | None = None) -> float: ... # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] - """Ttk Scrollbar controls the viewport of a scrollable widget.""" - def __init__( self, master: tkinter.Misc | None = None, @@ -1579,18 +917,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = "vertical", style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: - """Construct a Ttk Scrollbar with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - command, orient - """ - + ) -> None: ... @overload # type: ignore[override] def configure( self, @@ -1601,14 +928,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) @@ -1622,22 +942,11 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... class Separator(Widget): - """Ttk Separator widget displays a horizontal or vertical separator - bar. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1648,18 +957,7 @@ class Separator(Widget): orient: Literal["horizontal", "vertical"] = "horizontal", style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: - """Construct a Ttk Separator with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus - - WIDGET-SPECIFIC OPTIONS - - orient - """ - + ) -> None: ... @overload def configure( self, @@ -1669,23 +967,12 @@ class Separator(Widget): orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Sizegrip(Widget): - """Ttk Sizegrip allows the user to resize the containing toplevel - window by pressing and dragging the grip. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1695,14 +982,7 @@ class Sizegrip(Widget): name: str = ..., style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: - """Construct a Ttk Sizegrip with parent master. - - STANDARD OPTIONS - - class, cursor, state, style, takefocus - """ - + ) -> None: ... @overload def configure( self, @@ -1711,25 +991,12 @@ class Sizegrip(Widget): cursor: tkinter._Cursor = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Spinbox(Entry): - """Ttk Spinbox is an Entry with increment and decrement arrows - - It is commonly used for number entry or to select from a list of - string values. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1759,19 +1026,7 @@ class Spinbox(Entry): width: int = ..., # undocumented wrap: bool = False, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct a Ttk Spinbox widget with the parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus, validate, - validatecommand, xscrollcommand, invalidcommand - - WIDGET-SPECIFIC OPTIONS - - to, from_, increment, values, wrap, format, command - """ - + ) -> None: ... @overload # type: ignore[override] def configure( self, @@ -1800,19 +1055,11 @@ class Spinbox(Entry): width: int = ..., wrap: bool = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure # type: ignore[assignment] - def set(self, value: Any) -> None: - """Sets the value of the Spinbox to value.""" + def set(self, value: Any) -> None: ... @type_check_only class _TreeviewItemDict(TypedDict): @@ -1847,13 +1094,6 @@ class _TreeviewColumnDict(TypedDict): id: str class Treeview(Widget, tkinter.XView, tkinter.YView): - """Ttk Treeview widget displays a hierarchical collection of items. - - Each item has a textual label, an optional image, and an optional list - of data values. The data values are displayed in successive columns - after the tree label. - """ - def __init__( self, master: tkinter.Misc | None = None, @@ -1875,27 +1115,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: - """Construct a Ttk Treeview with parent master. - - STANDARD OPTIONS - - class, cursor, style, takefocus, xscrollcommand, - yscrollcommand - - WIDGET-SPECIFIC OPTIONS - - columns, displaycolumns, height, padding, selectmode, show - - ITEM OPTIONS - - text, image, values, open, tags - - TAG OPTIONS - - foreground, background, font, image - """ - + ) -> None: ... @overload def configure( self, @@ -1912,49 +1132,15 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure resources of a widget. - - The values for resources are specified as keyword - arguments. To get an overview about - the allowed keyword arguments call the method keys. - """ - + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: # type: ignore[override] - """Returns the bounding box (relative to the treeview widget's - window) of the specified item in the form x y width height. - - If column is specified, returns the bounding box of that cell. - If the item is not visible (i.e., if it is a descendant of a - closed item or is scrolled offscreen), returns an empty string. - """ - - def get_children(self, item: str | int | None = None) -> tuple[str, ...]: - """Returns a tuple of children belonging to item. - - If item is not specified, returns root children. - """ - - def set_children(self, item: str | int, *newchildren: str | int) -> None: - """Replaces item's child with newchildren. - - Children present in item that are not present in newchildren - are detached from tree. No items in newchildren may be an - ancestor of item. - """ - + def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | int | None = None) -> tuple[str, ...]: ... + def set_children(self, item: str | int, *newchildren: str | int) -> None: ... @overload - def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: - """Query or modify the options for the specified column. - - If kw is not given, returns a dict of the column option values. If - option is specified then the value for that option is returned. - Otherwise, sets the options to the corresponding values. - """ - + def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: ... @overload def column(self, column: str | int, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 @overload @@ -1975,56 +1161,15 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., # id is read-only ) -> _TreeviewColumnDict | None: ... - def delete(self, *items: str | int) -> None: - """Delete all specified items and all their descendants. The root - item may not be deleted. - """ - - def detach(self, *items: str | int) -> None: - """Unlinks all of the specified items from the tree. - - The items and all of their descendants are still present, and may - be reinserted at another point in the tree, but will not be - displayed. The root item may not be detached. - """ - - def exists(self, item: str | int) -> bool: - """Returns True if the specified item is present in the tree, - False otherwise. - """ - + def delete(self, *items: str | int) -> None: ... + def detach(self, *items: str | int) -> None: ... + def exists(self, item: str | int) -> bool: ... @overload # type: ignore[override] - def focus(self, item: None = None) -> str: # can return empty string - """If item is specified, sets the focus item to item. Otherwise, - returns the current focus item, or '' if there is none. - """ - + def focus(self, item: None = None) -> str: ... # can return empty string @overload def focus(self, item: str | int) -> Literal[""]: ... @overload - def heading(self, column: str | int, option: Literal["text"]) -> str: - """Query or modify the heading options for the specified column. - - If kw is not given, returns a dict of the heading option values. If - option is specified then the value for that option is returned. - Otherwise, sets the options to the corresponding values. - - Valid options/values are: - text: text - The text to display in the column heading - image: image_name - Specifies an image to display to the right of the column - heading - anchor: anchor - Specifies how the heading text should be aligned. One of - the standard Tk anchor values - command: callback - A callback to be invoked when the heading label is - pressed. - - To configure the tree column heading, call this with column = "#0" - """ - + def heading(self, column: str | int, option: Literal["text"]) -> str: ... @overload def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ... @overload @@ -2047,43 +1192,12 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): command: str | Callable[[], object] = ..., ) -> None: ... # Internal Method. Leave untyped: - def identify(self, component, x, y): # type: ignore[override] - """Returns a description of the specified component under the - point given by x and y, or the empty string if no such component - is present at that position. - """ - - def identify_row(self, y: int) -> str: - """Returns the item ID of the item at position y.""" - - def identify_column(self, x: int) -> str: - """Returns the data column identifier of the cell at position x. - - The tree column has ID #0. - """ - - def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: - """Returns one of: - - heading: Tree heading area. - separator: Space between two columns headings; - tree: The tree area. - cell: A data cell. - - * Availability: Tk 8.6 - """ - - def identify_element(self, x: int, y: int) -> str: # don't know what possible return values are - """Returns the element at position x, y. - - * Availability: Tk 8.6 - """ - - def index(self, item: str | int) -> int: - """Returns the integer index of item within its parent's list - of children. - """ - + def identify(self, component, x, y): ... # type: ignore[override] + def identify_row(self, y: int) -> str: ... + def identify_column(self, x: int) -> str: ... + def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... + def identify_element(self, x: int, y: int) -> str: ... # don't know what possible return values are + def index(self, item: str | int) -> int: ... def insert( self, parent: str, @@ -2096,31 +1210,9 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): values: list[Any] | tuple[Any, ...] = ..., open: bool = ..., tags: str | list[str] | tuple[str, ...] = ..., - ) -> str: - """Creates a new item and return the item identifier of the newly - created item. - - parent is the item ID of the parent item, or the empty string - to create a new top-level item. index is an integer, or the value - end, specifying where in the list of parent's children to insert - the new item. If index is less than or equal to zero, the new node - is inserted at the beginning, if index is greater than or equal to - the current number of children, it is inserted at the end. If iid - is specified, it is used as the item identifier, iid must not - already exist in the tree. Otherwise, a new unique identifier - is generated. - """ - + ) -> str: ... @overload - def item(self, item: str | int, option: Literal["text"]) -> str: - """Query or modify the options for the specified item. - - If no options are given, a dict with options/values for the item - is returned. If option is specified then the value for that option - is returned. Otherwise, sets the options to the corresponding - values as given by kw. - """ - + def item(self, item: str | int, option: Literal["text"]) -> str: ... @overload def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ... @overload @@ -2145,75 +1237,31 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): open: bool = ..., tags: str | list[str] | tuple[str, ...] = ..., ) -> None: ... - def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: - """Moves item to position index in parent's list of children. - - It is illegal to move an item under one of its descendants. If - index is less than or equal to zero, item is moved to the - beginning, if greater than or equal to the number of children, - it is moved to the end. If item was detached it is reattached. - """ + def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: ... reattach = move - def next(self, item: str | int) -> str: # returning empty string means last item - """Returns the identifier of item's next sibling, or '' if item - is the last child of its parent. - """ - - def parent(self, item: str | int) -> str: - """Returns the ID of the parent of item, or '' if item is at the - top level of the hierarchy. - """ - - def prev(self, item: str | int) -> str: # returning empty string means first item - """Returns the identifier of item's previous sibling, or '' if - item is the first child of its parent. - """ - - def see(self, item: str | int) -> None: - """Ensure that item is visible. - - Sets all of item's ancestors open option to True, and scrolls - the widget if necessary so that item is within the visible - portion of the tree. - """ - - def selection(self) -> tuple[str, ...]: - """Returns the tuple of selected items.""" - + def next(self, item: str | int) -> str: ... # returning empty string means last item + def parent(self, item: str | int) -> str: ... + def prev(self, item: str | int) -> str: ... # returning empty string means first item + def see(self, item: str | int) -> None: ... + def selection(self) -> tuple[str, ...]: ... @overload - def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """The specified items becomes the new selection.""" - + def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... @overload def selection_set(self, *items: str | int) -> None: ... @overload - def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Add all of the specified items to the selection.""" - + def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... @overload def selection_add(self, *items: str | int) -> None: ... @overload - def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Remove all of the specified items from the selection.""" - + def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... @overload def selection_remove(self, *items: str | int) -> None: ... @overload - def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Toggle the selection state of each specified item.""" - + def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... @overload def selection_toggle(self, *items: str | int) -> None: ... @overload - def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: - """Query or set the value of given item. - - With one argument, return a dictionary of column/value pairs - for the specified item. With two arguments, return the current - value of the specified column. With three arguments, set the - value of given column in given item to the specified value. - """ - + def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: ... @overload def set(self, item: str | int, column: str | int, value: None = None) -> Any: ... @overload @@ -2223,26 +1271,13 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def tag_bind( self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None - ) -> str: - """Bind a callback for the given event sequence to the tag tagname. - When an event is delivered to an item, the callbacks for each - of the item's tags option are called. - """ - + ) -> str: ... @overload def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... @overload def tag_bind(self, tagname: str, *, callback: str) -> None: ... @overload - def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: - """Query or modify the options for the specified tagname. - - If kw is not given, returns a dict of the option settings for tagname. - If option is specified, returns the value for that option for the - specified tagname. Otherwise, sets the options to the corresponding - values for the given tagname. - """ - + def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: ... @overload def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... @overload @@ -2260,25 +1295,11 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._Image | str = ..., ) -> _TreeviewTagDict | MaybeNone: ... # can be None but annoying to check @overload - def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: - """If item is specified, returns 1 or 0 depending on whether the - specified item has the given tagname. Otherwise, returns a list of - all items which have the specified tag. - - * Availability: Tk 8.6 - """ - + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... @overload def tag_has(self, tagname: str, item: str | int) -> bool: ... class LabeledScale(Frame): - """A Ttk Scale widget with a Ttk Label widget indicating its - current value. - - The Ttk Scale can be accessed through instance.scale, and Ttk Label - can be accessed through instance.label - """ - label: Label scale: Scale # This should be kept in sync with tkinter.ttk.Frame.__init__() @@ -2302,25 +1323,11 @@ class LabeledScale(Frame): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: float | str = 0, - ) -> None: - """Construct a horizontal LabeledScale with parent master, a - variable to be associated with the Ttk Scale widget and its range. - If variable is not specified, a tkinter.IntVar is created. - - WIDGET-SPECIFIC OPTIONS - - compound: 'top' or 'bottom' - Specifies how to display the label relative to the scale. - Defaults to 'top'. - """ + ) -> None: ... # destroy is overridden, signature does not change value: Any class OptionMenu(Menubutton): - """Themed OptionMenu, based after tkinter's OptionMenu, which allows - the user to select a value from a menu. - """ - def __init__( self, master: tkinter.Misc | None, @@ -2331,24 +1338,7 @@ class OptionMenu(Menubutton): style: str = "", direction: Literal["above", "below", "left", "right", "flush"] = "below", command: Callable[[tkinter.StringVar], object] | None = None, - ) -> None: - """Construct a themed OptionMenu widget with master as the parent, - the resource textvariable set to variable, the initially selected - value specified by the default parameter, the menu values given by - *values and additional keywords. - - WIDGET-SPECIFIC OPTIONS - - style: stylename - Menubutton style. - direction: 'above', 'below', 'left', 'right', or 'flush' - Menubutton direction. - command: callback - A callback that will be invoked after selecting an item. - """ + ) -> None: ... # configure, config, cget, destroy are inherited from Menubutton # destroy and __setitem__ are overridden, signature does not change - def set_menu(self, default: str | None = None, *values: str) -> None: - """Build a new menu of radiobuttons with *values and optionally - a default value. - """ + def set_menu(self, default: str | None = None, *values: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi index 3da230b17b471..fd1b10da1d12e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi @@ -1,5 +1,3 @@ -"""Token constants.""" - import sys from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi index 9dad927315954..00a24b4eea07d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi @@ -1,25 +1,3 @@ -"""Tokenization help for Python programs. - -tokenize(readline) is a generator that breaks a stream of bytes into -Python tokens. It decodes the bytes according to PEP-0263 for -determining source file encoding. - -It accepts a readline-like method which is called repeatedly to get the -next line of input (or b"" for EOF). It generates 5-tuples with these -members: - - the token type (see token.py) - the token (a string) - the starting (row, column) indices of the token (a 2-tuple of ints) - the ending (row, column) indices of the token (a 2-tuple of ints) - the original line (string) - -It is designed to match the working of the Python tokenizer exactly, except -that it produces COMMENT tokens for comments and gives type OP for all -operators. Additionally, all token lists start with an ENCODING token -which tells you which encoding was used to decode the bytes stream. -""" - import sys from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence @@ -163,12 +141,7 @@ class Untokenizer: encoding: str | None def add_whitespace(self, start: _Position) -> None: ... if sys.version_info >= (3, 12): - def add_backslash_continuation(self, start: _Position) -> None: - """Add backslash continuation characters if the row has increased - without encountering a newline token. - - This also inserts the correct amount of whitespace before the backslash. - """ + def add_backslash_continuation(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... @@ -176,71 +149,11 @@ class Untokenizer: def escape_brackets(self, token: str) -> str: ... # Returns str, unless the ENCODING token is present, in which case it returns bytes. -def untokenize(iterable: Iterable[_Token]) -> str | Any: - """Transform tokens back into Python source code. - It returns a bytes object, encoded using the ENCODING - token, which is the first token sequence output by tokenize. - - Each element returned by the iterable must be a token sequence - with at least two elements, a token number and token value. If - only two tokens are passed, the resulting output is poor. - - The result is guaranteed to tokenize back to match the input so - that the conversion is lossless and round-trips are assured. - The guarantee applies only to the token type and token string as - the spacing between tokens (column positions) may change. - """ - -def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: - """ - The detect_encoding() function is used to detect the encoding that should - be used to decode a Python source file. It requires one argument, readline, - in the same way as the tokenize() generator. - - It will call readline a maximum of twice, and return the encoding used - (as a string) and a list of any lines (left as bytes) it has read in. - - It detects the encoding from the presence of a utf-8 bom or an encoding - cookie as specified in pep-0263. If both a bom and a cookie are present, - but disagree, a SyntaxError will be raised. If the encoding cookie is an - invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, - 'utf-8-sig' is returned. - - If no encoding is specified, then the default of 'utf-8' will be returned. - """ - -def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: - """ - The tokenize() generator requires one argument, readline, which - must be a callable object which provides the same interface as the - readline() method of built-in file objects. Each call to the function - should return one line of input as bytes. Alternatively, readline - can be a callable function terminating with StopIteration: - readline = open(myfile, 'rb').__next__ # Example of alternate readline - - The generator produces 5-tuples with these members: the token type; the - token string; a 2-tuple (srow, scol) of ints specifying the row and - column where the token begins in the source; a 2-tuple (erow, ecol) of - ints specifying the row and column where the token ends in the source; - and the line on which the token was found. The line passed is the - physical line. - - The first token sequence will always be an ENCODING token - which tells you which encoding was used to decode the bytes stream. - """ - -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: - """Tokenize a source reading Python code as unicode strings. - - This has the same API as tokenize(), except that it expects the *readline* - callable to return str objects instead of bytes. - """ - -def open(filename: FileDescriptorOrPath) -> TextIO: - """Open a file in read only mode using the encoding detected by - detect_encoding(). - """ - +def untokenize(iterable: Iterable[_Token]) -> str | Any: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... +def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi index 81a39b0ded553..4ff4097f8313a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi @@ -8,16 +8,6 @@ __all__ = ("loads", "load", "TOMLDecodeError") if sys.version_info >= (3, 14): class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML. - - Adds the following attributes to ValueError: - msg: The unformatted error message - doc: The TOML document being parsed - pos: The index of doc where parsing failed - lineno: The line corresponding to pos - colno: The column corresponding to pos - """ - msg: str doc: str pos: int @@ -30,11 +20,7 @@ if sys.version_info >= (3, 14): def __init__(self, msg: str | type = ..., doc: str | type = ..., pos: int | type = ..., *args: Any) -> None: ... else: - class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML.""" - -def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: - """Parse TOML from a binary file object.""" + class TOMLDecodeError(ValueError): ... -def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: - """Parse TOML from a string.""" +def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... +def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi index f2362a65d32be..7e7cc1e9ac54a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi @@ -1,24 +1,3 @@ -"""program/module to trace Python program or function execution - -Sample use, command line: - trace.py -c -f counts --ignore-dir '$prefix' spam.py eggs - trace.py -t --ignore-dir '$prefix' spam.py eggs - trace.py --trackcalls spam.py eggs - -Sample use, programmatically - import sys - - # create a Trace object, telling it what to ignore, and whether to - # do tracing or line-counting or both. - tracer = trace.Trace(ignoredirs=[sys.base_prefix, sys.base_exec_prefix,], - trace=0, count=1) - # run the new command using the given tracer - tracer.run('main()') - # make a report, placing output in /tmp - r = tracer.results() - r.write_results(show_missing=True, coverdir="/tmp") -""" - import sys import types from _typeshed import Incomplete, StrPath, TraceFunction @@ -47,8 +26,7 @@ class CoverageResults: callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, outfile: StrPath | None = None, ) -> None: ... # undocumented - def update(self, other: CoverageResults) -> None: - """Merge in the data from another CoverageResults""" + def update(self, other: CoverageResults) -> None: ... if sys.version_info >= (3, 13): def write_results( self, @@ -57,40 +35,14 @@ class CoverageResults: coverdir: StrPath | None = None, *, ignore_missing_files: bool = False, - ) -> None: - """ - Write the coverage results. - - :param show_missing: Show lines that had no hits. - :param summary: Include coverage summary per module. - :param coverdir: If None, the results of each module are placed in its - directory, otherwise it is included in the directory - specified. - :param ignore_missing_files: If True, counts for files that no longer - exist are silently ignored. Otherwise, a missing file - will raise a FileNotFoundError. - """ + ) -> None: ... else: - def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: - """ - Write the coverage results. - - :param show_missing: Show lines that had no hits. - :param summary: Include coverage summary per module. - :param coverdir: If None, the results of each module are placed in its - directory, otherwise it is included in the directory - specified. - """ + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... def write_results_file( self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None - ) -> tuple[int, int]: - """Return a coverage results file in path.""" - - def is_ignored_filename(self, filename: str) -> bool: # undocumented - """Return True if the filename does not refer to a file - we want to have reported. - """ + ) -> tuple[int, int]: ... + def is_ignored_filename(self, filename: str) -> bool: ... # undocumented class _Ignore: def __init__(self, modules: Iterable[str] | None = None, dirs: Iterable[StrPath] | None = None) -> None: ... @@ -118,50 +70,16 @@ class Trace: infile: StrPath | None = None, outfile: StrPath | None = None, timing: bool = False, - ) -> None: - """ - @param count true iff it should count number of times each - line is executed - @param trace true iff it should print out each line that is - being counted - @param countfuncs true iff it should just output a list of - (filename, modulename, funcname,) for functions - that were called at least once; This overrides - 'count' and 'trace' - @param ignoremods a list of the names of modules to ignore - @param ignoredirs a list of the names of directories to ignore - all of the (recursive) contents of - @param infile file from which to read stored counts to be - added into the results - @param outfile file in which to write the results - @param timing true iff timing information be displayed - """ - + ) -> None: ... def run(self, cmd: str | types.CodeType) -> None: ... def runctx( self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None ) -> None: ... def runfunc(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... def file_module_function_of(self, frame: types.FrameType) -> _FileModuleFunction: ... - def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: - """Handler for call events. - - Adds information about who called who to the self._callers dict. - """ - - def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: - """Handler for call events. - - Adds (filename, modulename, funcname) to the self._calledfuncs dict. - """ - - def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: - """Handler for call events. - - If the code block being entered is to be ignored, returns 'None', - else returns self.localtrace. - """ - + def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi index 4aa3b7a49284c..d587295cd1cf7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi @@ -1,5 +1,3 @@ -"""Extract, format and print information about Python stack traces.""" - import sys from _typeshed import SupportsWrite, Unused from collections.abc import Generator, Iterable, Iterator, Mapping @@ -34,14 +32,7 @@ if sys.version_info >= (3, 14): _FrameSummaryTuple: TypeAlias = tuple[str, int, str, str | None] -def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: - """Print up to 'limit' stack trace entries from the traceback 'tb'. - - If 'limit' is omitted or None, all entries are printed. If 'file' - is omitted or None, the output goes to sys.stderr; otherwise - 'file' should be an open file or file-like object with a write() - method. - """ +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): @overload @@ -53,18 +44,7 @@ if sys.version_info >= (3, 10): limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True, - ) -> None: - """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. - - This differs from print_tb() in the following ways: (1) if - traceback is not None, it prints a header "Traceback (most recent - call last):"; (2) it prints the exception type and value after the - stack trace; (3) if type is SyntaxError and value has the - appropriate format, it prints the line where the syntax error - occurred with a caret on the next line indicating the approximate - position of the error. - """ - + ) -> None: ... @overload def print_exception( exc: BaseException, /, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True @@ -77,16 +57,7 @@ if sys.version_info >= (3, 10): tb: TracebackType | None = ..., limit: int | None = None, chain: bool = True, - ) -> list[str]: - """Format a stack trace and the exception information. - - The arguments have the same meaning as the corresponding arguments - to print_exception(). The return value is a list of strings, each - ending in a newline and some containing internal newlines. When - these lines are concatenated and printed, exactly the same text is - printed as does print_exception(). - """ - + ) -> list[str]: ... @overload def format_exception(exc: BaseException, /, *, limit: int | None = None, chain: bool = True) -> list[str]: ... @@ -98,171 +69,44 @@ else: limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True, - ) -> None: - """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. - - This differs from print_tb() in the following ways: (1) if - traceback is not None, it prints a header "Traceback (most recent - call last):"; (2) it prints the exception type and value after the - stack trace; (3) if type is SyntaxError and value has the - appropriate format, it prints the line where the syntax error - occurred with a caret on the next line indicating the approximate - position of the error. - """ - + ) -> None: ... def format_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, limit: int | None = None, chain: bool = True, - ) -> list[str]: - """Format a stack trace and the exception information. - - The arguments have the same meaning as the corresponding arguments - to print_exception(). The return value is a list of strings, each - ending in a newline and some containing internal newlines. When - these lines are concatenated and printed, exactly the same text is - printed as does print_exception(). - """ - -def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """Shorthand for 'print_exception(sys.exception(), limit=limit, file=file, chain=chain)'.""" - -def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """This is a shorthand for 'print_exception(sys.last_exc, limit=limit, file=file, chain=chain)'.""" - -def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: - """Print a stack trace from its invocation point. - - The optional 'f' argument can be used to specify an alternate - stack frame at which to start. The optional 'limit' and 'file' - arguments have the same meaning as for print_exception(). - """ - -def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: - """ - Return a StackSummary object representing a list of - pre-processed entries from traceback. - - This is useful for alternate formatting of stack traces. If - 'limit' is omitted or None, all entries are extracted. A - pre-processed stack trace entry is a FrameSummary object - containing attributes filename, lineno, name, and line - representing the information that is usually printed for a stack - trace. The line is a string with leading and trailing - whitespace stripped; if the source is not available it is None. - """ + ) -> list[str]: ... -def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: - """Extract the raw traceback from the current stack frame. - - The return value has the same format as for extract_tb(). The - optional 'f' and 'limit' arguments have the same meaning as for - print_stack(). Each item in the list is a quadruple (filename, - line number, function name, text), and the entries are in order - from oldest to newest stack frame. - """ - -def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: - """Format a list of tuples or FrameSummary objects for printing. - - Given a list of tuples or FrameSummary objects as returned by - extract_tb() or extract_stack(), return a list of strings ready - for printing. - - Each string in the resulting list corresponds to the item with the - same index in the argument list. Each string ends in a newline; - the strings may contain internal newlines as well, for those items - whose source text line is not None. - """ - -def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: - """Print the list of tuples as returned by extract_tb() or - extract_stack() as a formatted stack trace to the given file. - """ +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... +def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: ... +def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 13): @overload - def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: - """Format the exception part of a traceback. - - The return value is a list of strings, each ending in a newline. - - The list contains the exception's message, which is - normally a single string; however, for :exc:`SyntaxError` exceptions, it - contains several lines that (when printed) display detailed information - about where the syntax error occurred. Following the message, the list - contains the exception's ``__notes__``. - - When *show_group* is ``True``, and the exception is an instance of - :exc:`BaseExceptionGroup`, the nested exceptions are included as - well, recursively, with indentation relative to their nesting depth. - """ - + def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: ... @overload def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... elif sys.version_info >= (3, 10): @overload - def format_exception_only(exc: BaseException | None, /) -> list[str]: - """Format the exception part of a traceback. - - The return value is a list of strings, each ending in a newline. - - The list contains the exception's message, which is - normally a single string; however, for :exc:`SyntaxError` exceptions, it - contains several lines that (when printed) display detailed information - about where the syntax error occurred. Following the message, the list - contains the exception's ``__notes__``. - """ - + def format_exception_only(exc: BaseException | None, /) -> list[str]: ... @overload def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: ... else: - def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: - """Format the exception part of a traceback. - - The arguments are the exception type and value such as given by - sys.last_type and sys.last_value. The return value is a list of - strings, each ending in a newline. - - Normally, the list contains a single string; however, for - SyntaxError exceptions, it contains several lines that (when - printed) display detailed information about where the syntax - error occurred. - - The message indicating which exception occurred is always the last - string in the list. - - """ + def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... -def format_exc(limit: int | None = None, chain: bool = True) -> str: - """Like print_exc() but return a string.""" - -def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: - """A shorthand for 'format_list(extract_tb(tb, limit))'.""" - -def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: - """Shorthand for 'format_list(extract_stack(f, limit))'.""" - -def clear_frames(tb: TracebackType | None) -> None: - """Clear all references to local variables in the frames of a traceback.""" - -def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: - """Walk a stack yielding the frame and line number for each frame. - - This will follow f.f_back from the given frame. If no frame is given, the - current stack is used. Usually used with StackSummary.extract. - """ - -def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: - """Walk a traceback yielding the frame and line number for each frame. - - This will follow tb.tb_next (and thus is in the opposite order to - walk_stack). Usually used with StackSummary.extract. - """ +def format_exc(limit: int | None = None, chain: bool = True) -> str: ... +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... +def clear_frames(tb: TracebackType | None) -> None: ... +def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... +def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... if sys.version_info >= (3, 11): class _ExceptionPrintContext: @@ -270,45 +114,6 @@ if sys.version_info >= (3, 11): def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... class TracebackException: - """An exception ready for rendering. - - The traceback module captures enough attributes from the original exception - to this intermediary form to ensure that no references are held, while - still being able to fully print or format it. - - max_group_width and max_group_depth control the formatting of exception - groups. The depth refers to the nesting level of the group, and the width - refers to the size of a single exception group's exceptions array. The - formatted output is truncated when either limit is exceeded. - - Use `from_exception` to create TracebackException instances from exception - objects, or the constructor to create TracebackException instances from - individual components. - - - :attr:`__cause__` A TracebackException of the original *__cause__*. - - :attr:`__context__` A TracebackException of the original *__context__*. - - :attr:`exceptions` For exception groups - a list of TracebackException - instances for the nested *exceptions*. ``None`` for other exceptions. - - :attr:`__suppress_context__` The *__suppress_context__* value from the - original exception. - - :attr:`stack` A `StackSummary` representing the traceback. - - :attr:`exc_type` (deprecated) The class of the original traceback. - - :attr:`exc_type_str` String display of exc_type - - :attr:`filename` For syntax errors - the filename where the error - occurred. - - :attr:`lineno` For syntax errors - the linenumber where the error - occurred. - - :attr:`end_lineno` For syntax errors - the end linenumber where the error - occurred. Can be `None` if not present. - - :attr:`text` For syntax errors - the text where the error - occurred. - - :attr:`offset` For syntax errors - the offset into the text where the - error occurred. - - :attr:`end_offset` For syntax errors - the end offset into the text where - the error occurred. Can be `None` if not present. - - :attr:`msg` For syntax errors - the compiler error message. - """ - __cause__: TracebackException | None __context__: TracebackException | None if sys.version_info >= (3, 11): @@ -406,8 +211,7 @@ class TracebackException: compact: bool = False, max_group_width: int = 15, max_group_depth: int = 10, - ) -> Self: - """Create a TracebackException from an exception.""" + ) -> Self: ... elif sys.version_info >= (3, 10): @classmethod def from_exception( @@ -418,93 +222,29 @@ class TracebackException: lookup_lines: bool = True, capture_locals: bool = False, compact: bool = False, - ) -> Self: - """Create a TracebackException from an exception.""" + ) -> Self: ... else: @classmethod def from_exception( cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False - ) -> Self: - """Create a TracebackException from an exception.""" + ) -> Self: ... def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 11): - def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: - """Format the exception. - - If chain is not *True*, *__cause__* and *__context__* will not be formatted. - - The return value is a generator of strings, each ending in a newline and - some containing internal newlines. `print_exception` is a wrapper around - this method which just prints the lines to a file. - - The message indicating which exception occurred is always the last - string in the output. - """ + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... else: - def format(self, *, chain: bool = True) -> Generator[str, None, None]: - """Format the exception. - - If chain is not *True*, *__cause__* and *__context__* will not be formatted. - - The return value is a generator of strings, each ending in a newline and - some containing internal newlines. `print_exception` is a wrapper around - this method which just prints the lines to a file. + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... - The message indicating which exception occurred is always the last - string in the output. - """ if sys.version_info >= (3, 13): - def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: - """Format the exception part of the traceback. - - The return value is a generator of strings, each ending in a newline. - - Generator yields the exception message. - For :exc:`SyntaxError` exceptions, it - also yields (before the exception message) - several lines that (when printed) - display detailed information about where the syntax error occurred. - Following the message, generator also yields - all the exception's ``__notes__``. - - When *show_group* is ``True``, and the exception is an instance of - :exc:`BaseExceptionGroup`, the nested exceptions are included as - well, recursively, with indentation relative to their nesting depth. - """ + def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: ... else: - def format_exception_only(self) -> Generator[str, None, None]: - """Format the exception part of the traceback. - - The return value is a generator of strings, each ending in a newline. + def format_exception_only(self) -> Generator[str, None, None]: ... - Generator yields the exception message. - For :exc:`SyntaxError` exceptions, it - also yields (before the exception message) - several lines that (when printed) - display detailed information about where the syntax error occurred. - Following the message, generator also yields - all the exception's ``__notes__``. - """ if sys.version_info >= (3, 11): - def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """Print the result of self.format(chain=chain) to 'file'.""" + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... class FrameSummary: - """Information about a single frame from a traceback. - - - :attr:`filename` The filename for the frame. - - :attr:`lineno` The line within filename for the frame that was - active when the frame was captured. - - :attr:`name` The name of the function or method that was executing - when the frame was captured. - - :attr:`line` The text from the linecache module for the - of code that was running when the frame was captured. - - :attr:`locals` Either None if locals were not supplied, or a dict - mapping the name to the repr() of the variable. - """ - if sys.version_info >= (3, 13): __slots__ = ( "filename", @@ -535,16 +275,7 @@ class FrameSummary: end_lineno: int | None = None, colno: int | None = None, end_colno: int | None = None, - ) -> None: - """Construct a FrameSummary. - - :param lookup_line: If True, `linecache` is consulted for the source - code line. Otherwise, the line will be looked up when first needed. - :param locals: If supplied the frame locals, which will be captured as - object representations. - :param line: If provided, use this instead of looking up the line in - the linecache. - """ + ) -> None: ... end_lineno: int | None colno: int | None end_colno: int | None @@ -558,16 +289,7 @@ class FrameSummary: lookup_line: bool = True, locals: Mapping[str, str] | None = None, line: str | None = None, - ) -> None: - """Construct a FrameSummary. - - :param lookup_line: If True, `linecache` is consulted for the source - code line. Otherwise, the line will be looked up when first needed. - :param locals: If supplied the frame locals, which will be captured as - object representations. - :param line: If provided, use this instead of looking up the line in - the linecache. - """ + ) -> None: ... filename: str lineno: int | None name: str @@ -592,8 +314,6 @@ class FrameSummary: __hash__: ClassVar[None] # type: ignore[assignment] class StackSummary(list[FrameSummary]): - """A list of FrameSummary objects, representing a stack of frames.""" - @classmethod def extract( cls, @@ -602,42 +322,10 @@ class StackSummary(list[FrameSummary]): limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False, - ) -> StackSummary: - """Create a StackSummary from a traceback or stack object. - - :param frame_gen: A generator that yields (frame, lineno) tuples - whose summaries are to be included in the stack. - :param limit: None to include all frames or the number of frames to - include. - :param lookup_lines: If True, lookup lines for each frame immediately, - otherwise lookup is deferred until the frame is rendered. - :param capture_locals: If True, the local variables from each frame will - be captured as object representations into the FrameSummary. - """ - + ) -> StackSummary: ... @classmethod - def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: - """ - Create a StackSummary object from a supplied list of - FrameSummary objects or old-style list of tuples. - """ + def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: ... if sys.version_info >= (3, 11): - def format_frame_summary(self, frame_summary: FrameSummary) -> str: - """Format the lines for a single FrameSummary. - - Returns a string representing one frame involved in the stack. This - gets called for every frame to be printed in the stack summary. - """ - - def format(self) -> list[str]: - """Format the stack ready for printing. - - Returns a list of strings ready for printing. Each string in the - resulting list corresponds to a single frame from the stack. - Each string ends in a newline; the strings may contain internal - newlines as well, for those items with source text lines. + def format_frame_summary(self, frame_summary: FrameSummary) -> str: ... - For long sequences of the same frame and line, the first few - repetitions are shown, followed by a summary line stating the exact - number of further repetitions. - """ + def format(self) -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi index 5a5bb840853c4..31d8f74456395 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi @@ -4,19 +4,8 @@ from collections.abc import Sequence from typing import Any, SupportsIndex, overload from typing_extensions import TypeAlias -def get_object_traceback(obj: object) -> Traceback | None: - """ - Get the traceback where the Python object *obj* was allocated. - Return a Traceback instance. - - Return None if the tracemalloc module is not tracing memory allocations or - did not trace the allocation of the object. - """ - -def take_snapshot() -> Snapshot: - """ - Take a snapshot of traces of memory blocks allocated by Python. - """ +def get_object_traceback(obj: object) -> Traceback | None: ... +def take_snapshot() -> Snapshot: ... class BaseFilter: inclusive: bool @@ -43,10 +32,6 @@ class Filter(BaseFilter): ) -> None: ... class Statistic: - """ - Statistic difference on memory allocations between two Snapshot instance. - """ - __slots__ = ("traceback", "size", "count") count: int size: int @@ -56,11 +41,6 @@ class Statistic: def __hash__(self) -> int: ... class StatisticDiff: - """ - Statistic difference on memory allocations between an old and a new - Snapshot instance. - """ - __slots__ = ("traceback", "size", "size_diff", "count", "count_diff") count: int count_diff: int @@ -74,10 +54,6 @@ class StatisticDiff: _FrameTuple: TypeAlias = tuple[str, int] class Frame: - """ - Frame of a traceback. - """ - __slots__ = ("_frame",) @property def filename(self) -> str: ... @@ -88,31 +64,17 @@ class Frame: def __hash__(self) -> int: ... def __lt__(self, other: Frame) -> bool: ... if sys.version_info >= (3, 11): - def __gt__(self, other: Frame) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __ge__(self, other: Frame) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __le__(self, other: Frame) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __gt__(self, other: Frame) -> bool: ... + def __ge__(self, other: Frame) -> bool: ... + def __le__(self, other: Frame) -> bool: ... else: - def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] class Trace: - """ - Trace of a memory block. - """ - __slots__ = ("_trace",) @property def domain(self) -> int: ... @@ -125,11 +87,6 @@ class Trace: def __hash__(self) -> int: ... class Traceback(Sequence[Frame]): - """ - Sequence of Frame instances sorted from the oldest frame - to the most recent frame. - """ - __slots__ = ("_frames", "_total_nframe") @property def total_nframe(self) -> int | None: ... @@ -145,59 +102,21 @@ class Traceback(Sequence[Frame]): def __hash__(self) -> int: ... def __lt__(self, other: Traceback) -> bool: ... if sys.version_info >= (3, 11): - def __gt__(self, other: Traceback) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __ge__(self, other: Traceback) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __le__(self, other: Traceback) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __gt__(self, other: Traceback) -> bool: ... + def __ge__(self, other: Traceback) -> bool: ... + def __le__(self, other: Traceback) -> bool: ... else: - def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" - - def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b).""" - - def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... class Snapshot: - """ - Snapshot of traces of memory blocks allocated by Python. - """ - def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... - def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: - """ - Compute the differences with an old snapshot old_snapshot. Get - statistics as a sorted list of StatisticDiff instances, grouped by - group_by. - """ - - def dump(self, filename: str) -> None: - """ - Write the snapshot into a file. - """ - - def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: - """ - Create a new Snapshot instance with a filtered traces sequence, filters - is a list of Filter or DomainFilter instances. If filters is an empty - list, return a new Snapshot instance with a copy of the traces. - """ - + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... + def dump(self, filename: str) -> None: ... + def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... @staticmethod - def load(filename: str) -> Snapshot: - """ - Load a snapshot from a file. - """ - - def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: - """ - Group statistics by key_type. Return a sorted list of Statistic - instances. - """ + def load(filename: str) -> Snapshot: ... + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... traceback_limit: int traces: Sequence[Trace] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi index 0219428d92bba..ca3f0013b20ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi @@ -1,5 +1,3 @@ -"""Terminal utilities.""" - import sys import termios from typing import IO, Final @@ -24,14 +22,9 @@ if sys.platform != "win32": ISPEED: Final = 4 OSPEED: Final = 5 CC: Final = 6 - def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into raw mode.""" + def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... - def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into cbreak mode.""" if sys.version_info >= (3, 12): - def cfmakeraw(mode: termios._Attr) -> None: - """Make termios mode raw.""" - - def cfmakecbreak(mode: termios._Attr) -> None: - """Make termios mode cbreak.""" + def cfmakeraw(mode: termios._Attr) -> None: ... + def cfmakecbreak(mode: termios._Attr) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi index 87b56b7e49ffc..9b9b329bd74bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi @@ -1,80 +1,3 @@ -""" -Turtle graphics is a popular way for introducing programming to -kids. It was part of the original Logo programming language developed -by Wally Feurzig and Seymour Papert in 1966. - -Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it -the command turtle.forward(15), and it moves (on-screen!) 15 pixels in -the direction it is facing, drawing a line as it moves. Give it the -command turtle.right(25), and it rotates in-place 25 degrees clockwise. - -By combining together these and similar commands, intricate shapes and -pictures can easily be drawn. - ------ turtle.py - -This module is an extended reimplementation of turtle.py from the -Python standard distribution up to Python 2.5. (See: https://www.python.org) - -It tries to keep the merits of turtle.py and to be (nearly) 100% -compatible with it. This means in the first place to enable the -learning programmer to use all the commands, classes and methods -interactively when using the module from within IDLE run with -the -n switch. - -Roughly it has the following features added: - -- Better animation of the turtle movements, especially of turning the - turtle. So the turtles can more easily be used as a visual feedback - instrument by the (beginning) programmer. - -- Different turtle shapes, image files as turtle shapes, user defined - and user controllable turtle shapes, among them compound - (multicolored) shapes. Turtle shapes can be stretched and tilted, which - makes turtles very versatile geometrical objects. - -- Fine control over turtle movement and screen updates via delay(), - and enhanced tracer() and speed() methods. - -- Aliases for the most commonly used commands, like fd for forward etc., - following the early Logo traditions. This reduces the boring work of - typing long sequences of commands, which often occur in a natural way - when kids try to program fancy pictures on their first encounter with - turtle graphics. - -- Turtles now have an undo()-method with configurable undo-buffer. - -- Some simple commands/methods for creating event driven programs - (mouse-, key-, timer-events). Especially useful for programming games. - -- A scrollable Canvas class. The default scrollable Canvas can be - extended interactively as needed while playing around with the turtle(s). - -- A TurtleScreen class with methods controlling background color or - background image, window and canvas size and other properties of the - TurtleScreen. - -- There is a method, setworldcoordinates(), to install a user defined - coordinate-system for the TurtleScreen. - -- The implementation uses a 2-vector class named Vec2D, derived from tuple. - This class is public, so it can be imported by the application programmer, - which makes certain types of computations very natural and compact. - -- Appearance of the TurtleScreen and the Turtles at startup/import can be - configured by means of a turtle.cfg configuration file. - The default configuration mimics the appearance of the old turtle module. - -- If configured appropriately the module reads in docstrings from a docstring - dictionary in some different language, supplied separately and replaces - the English ones by those read in. There is a utility function - write_docstringdict() to write a dictionary with the original (English) - docstrings to disc, so it can serve as a template for translations. - -Behind the scenes there are some features included with possible -extensions in mind. These will be commented and documented elsewhere. -""" - import sys from _typeshed import StrPath from collections.abc import Callable, Generator, Sequence @@ -242,20 +165,6 @@ _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] if sys.version_info >= (3, 12): class Vec2D(tuple[float, float]): - """A 2 dimensional vector class, used as a helper class - for implementing turtle graphics. - May be useful for turtle graphics programs also. - Derived from tuple, so a vector is a tuple! - - Provides (for a, b vectors, k number): - a+b vector addition - a-b vector subtraction - a*b inner product - k*a and a*k multiplication with scalar - |a| absolute value of a - a.rotate(angle) rotation - """ - def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -266,26 +175,11 @@ if sys.version_info >= (3, 12): def __sub__(self, other: tuple[float, float]) -> Vec2D: ... def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... - def rotate(self, angle: float) -> Vec2D: - """rotate self counterclockwise by angle""" + def rotate(self, angle: float) -> Vec2D: ... else: @disjoint_base class Vec2D(tuple[float, float]): - """A 2 dimensional vector class, used as a helper class - for implementing turtle graphics. - May be useful for turtle graphics programs also. - Derived from tuple, so a vector is a tuple! - - Provides (for a, b vectors, k number): - a+b vector addition - a-b vector subtraction - a*b inner product - k*a and a*k multiplication with scalar - |a| absolute value of a - a.rotate(angle) rotation - """ - def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -296,17 +190,10 @@ else: def __sub__(self, other: tuple[float, float]) -> Vec2D: ... def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... - def rotate(self, angle: float) -> Vec2D: - """rotate self counterclockwise by angle""" + def rotate(self, angle: float) -> Vec2D: ... # Does not actually inherit from Canvas, but dynamically gets all methods of Canvas class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] - """Modeled after the scrolled canvas class from Grayons's Tkinter book. - - Used as the default canvas, which pops up automatically when - using turtle graphics functions or the Turtle class. - """ - bg: str hscroll: Scrollbar vscroll: Scrollbar @@ -315,890 +202,130 @@ class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] ) -> None: ... canvwidth: int canvheight: int - def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: - """Adjust canvas and scrollbars according to given canvas size.""" + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... class TurtleScreenBase: - """Provide the basic graphics functionality. - Interface between Tkinter and turtle.py. - - To port turtle.py to some different graphics toolkit - a corresponding TurtleScreenBase class has to be implemented. - """ - cv: Canvas canvwidth: int canvheight: int xscale: float yscale: float def __init__(self, cv: Canvas) -> None: ... - def mainloop(self) -> None: - """Starts event loop - calling Tkinter's mainloop function. - - No argument. - - Must be last statement in a turtle graphics program. - Must NOT be used if a script is run from within IDLE in -n mode - (No subprocess) - for interactive use of turtle graphics. - - Example (for a TurtleScreen instance named screen): - >>> screen.mainloop() - - """ - - def textinput(self, title: str, prompt: str) -> str | None: - """Pop up a dialog window for input of a string. - - Arguments: title is the title of the dialog window, - prompt is a text mostly describing what information to input. - - Return the string input - If the dialog is canceled, return None. - - Example (for a TurtleScreen instance named screen): - >>> screen.textinput("NIM", "Name of first player:") - - """ - + def mainloop(self) -> None: ... + def textinput(self, title: str, prompt: str) -> str | None: ... def numinput( self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None - ) -> float | None: - """Pop up a dialog window for input of a number. - - Arguments: title is the title of the dialog window, - prompt is a text mostly describing what numerical information to input. - default: default value - minval: minimum value for input - maxval: maximum value for input - - The number input must be in the range minval .. maxval if these are - given. If not, a hint is issued and the dialog remains open for - correction. Return the number input. - If the dialog is canceled, return None. - - Example (for a TurtleScreen instance named screen): - >>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) - - """ + ) -> float | None: ... -class Terminator(Exception): - """Will be raised in TurtleScreen.update, if _RUNNING becomes False. - - This stops execution of a turtle graphics script. - Main purpose: use in the Demo-Viewer turtle.Demo.py. - """ - -class TurtleGraphicsError(Exception): - """Some TurtleGraphics Error""" +class Terminator(Exception): ... +class TurtleGraphicsError(Exception): ... class Shape: - """Data structure modeling shapes. - - attribute _type is one of "polygon", "image", "compound" - attribute _data is - depending on _type a poygon-tuple, - an image or a list constructed using the addcomponent method. - """ - - def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = None) -> None: ... - def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: - """Add component to a shape of type compound. - - Arguments: poly is a polygon, i. e. a tuple of number pairs. - fill is the fillcolor of the component, - outline is the outline color of the component. - - call (for a Shapeobject namend s): - -- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue") - - Example: - >>> poly = ((0,0),(10,-5),(0,10),(-10,-5)) - >>> s = Shape("compound") - >>> s.addcomponent(poly, "red", "blue") - >>> # .. add more components and then use register_shape() - """ + def __init__( + self, type_: Literal["polygon", "image", "compound"], data: _PolygonCoords | PhotoImage | None = None + ) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... class TurtleScreen(TurtleScreenBase): - """Provides screen oriented methods like bgcolor etc. - - Only relies upon the methods of TurtleScreenBase and NOT - upon components of the underlying graphics toolkit - - which is Tkinter in this case. - """ - - def __init__(self, cv: Canvas, mode: str = "standard", colormode: float = 1.0, delay: int = 10) -> None: ... - def clear(self) -> None: - """Delete all drawings and all turtles from the TurtleScreen. - - No argument. - - Reset empty TurtleScreen to its initial state: white background, - no backgroundimage, no eventbindings and tracing on. - - Example (for a TurtleScreen instance named screen): - >>> screen.clear() - - Note: this method is not available as function. - """ - + def __init__( + self, cv: Canvas, mode: Literal["standard", "logo", "world"] = "standard", colormode: float = 1.0, delay: int = 10 + ) -> None: ... + def clear(self) -> None: ... @overload - def mode(self, mode: None = None) -> str: - """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. - - Optional argument: - mode -- one of the strings 'standard', 'logo' or 'world' - - Mode 'standard' is compatible with turtle.py. - Mode 'logo' is compatible with most Logo-Turtle-Graphics. - Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in - this mode angles appear distorted if x/y unit-ratio doesn't equal 1. - If mode is not given, return the current mode. - - Mode Initial turtle heading positive angles - ------------|-------------------------|------------------- - 'standard' to the right (east) counterclockwise - 'logo' upward (north) clockwise - - Examples: - >>> mode('logo') # resets turtle heading to north - >>> mode() - 'logo' - """ - + def mode(self, mode: None = None) -> str: ... @overload - def mode(self, mode: str) -> None: ... - def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: - """Set up a user defined coordinate-system. - - Arguments: - llx -- a number, x-coordinate of lower left corner of canvas - lly -- a number, y-coordinate of lower left corner of canvas - urx -- a number, x-coordinate of upper right corner of canvas - ury -- a number, y-coordinate of upper right corner of canvas - - Set up user coodinat-system and switch to mode 'world' if necessary. - This performs a screen.reset. If mode 'world' is already active, - all drawings are redrawn according to the new coordinates. - - But ATTENTION: in user-defined coordinatesystems angles may appear - distorted. (see Screen.mode()) - - Example (for a TurtleScreen instance named screen): - >>> screen.setworldcoordinates(-10,-0.5,50,1.5) - >>> for _ in range(36): - ... left(10) - ... forward(0.5) - """ - - def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: - """Adds a turtle shape to TurtleScreen's shapelist. - - Arguments: - (1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! - (2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! - (3) name is an arbitrary string and shape is a tuple - of pairs of coordinates. Installs the corresponding - polygon shape - (4) name is an arbitrary string and shape is a - (compound) Shape object. Installs the corresponding - compound shape. - To use a shape, you have to issue the command shape(shapename). - - call: register_shape("turtle.gif") - --or: register_shape("tri", ((0,0), (10,10), (-10,10))) - - Example (for a TurtleScreen instance named screen): - >>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3))) - - """ - + def mode(self, mode: Literal["standard", "logo", "world"]) -> None: ... + def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload - def colormode(self, cmode: None = None) -> float: - """Return the colormode or set it to 1.0 or 255. - - Optional argument: - cmode -- one of the values 1.0 or 255 - - r, g, b values of colortriples have to be in range 0..cmode. - - Example (for a TurtleScreen instance named screen): - >>> screen.colormode() - 1.0 - >>> screen.colormode(255) - >>> pencolor(240,160,80) - """ - + def colormode(self, cmode: None = None) -> float: ... @overload def colormode(self, cmode: float) -> None: ... - def reset(self) -> None: - """Reset all Turtles on the Screen to their initial state. - - No argument. - - Example (for a TurtleScreen instance named screen): - >>> screen.reset() - """ - - def turtles(self) -> list[Turtle]: - """Return the list of turtles on the screen. - - Example (for a TurtleScreen instance named screen): - >>> screen.turtles() - [] - """ - + def reset(self) -> None: ... + def turtles(self) -> list[Turtle]: ... @overload - def bgcolor(self) -> _AnyColor: - """Set or return backgroundcolor of the TurtleScreen. - - Arguments (if given): a color string or three numbers - in the range 0..colormode or a 3-tuple of such numbers. - - Example (for a TurtleScreen instance named screen): - >>> screen.bgcolor("orange") - >>> screen.bgcolor() - 'orange' - >>> screen.bgcolor(0.5,0,0.5) - >>> screen.bgcolor() - '#800080' - """ - + def bgcolor(self) -> _AnyColor: ... @overload def bgcolor(self, color: _Color) -> None: ... @overload def bgcolor(self, r: float, g: float, b: float) -> None: ... @overload - def tracer(self, n: None = None) -> int: - """Turns turtle animation on/off and set delay for update drawings. - - Optional arguments: - n -- nonnegative integer - delay -- nonnegative integer - - If n is given, only each n-th regular screen update is really performed. - (Can be used to accelerate the drawing of complex graphics.) - Second arguments sets delay value (see RawTurtle.delay()) - - Example (for a TurtleScreen instance named screen): - >>> screen.tracer(8, 25) - >>> dist = 2 - >>> for i in range(200): - ... fd(dist) - ... rt(90) - ... dist += 2 - """ - + def tracer(self, n: None = None) -> int: ... @overload def tracer(self, n: int, delay: int | None = None) -> None: ... @overload - def delay(self, delay: None = None) -> int: - """Return or set the drawing delay in milliseconds. - - Optional argument: - delay -- positive integer - - Example (for a TurtleScreen instance named screen): - >>> screen.delay(15) - >>> screen.delay() - 15 - """ - + def delay(self, delay: None = None) -> int: ... @overload def delay(self, delay: int) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def no_animation(self) -> Generator[None]: - """Temporarily turn off auto-updating the screen. - - This is useful for drawing complex shapes where even the fastest setting - is too slow. Once this context manager is exited, the drawing will - be displayed. - - Example (for a TurtleScreen instance named screen - and a Turtle instance named turtle): - >>> with screen.no_animation(): - ... turtle.circle(50) - """ - - def update(self) -> None: - """Perform a TurtleScreen update.""" - - def window_width(self) -> int: - """Return the width of the turtle window. - - Example (for a TurtleScreen instance named screen): - >>> screen.window_width() - 640 - """ - - def window_height(self) -> int: - """Return the height of the turtle window. - - Example (for a TurtleScreen instance named screen): - >>> screen.window_height() - 480 - """ - - def getcanvas(self) -> Canvas: - """Return the Canvas of this TurtleScreen. - - No argument. - - Example (for a Screen instance named screen): - >>> cv = screen.getcanvas() - >>> cv - - """ - - def getshapes(self) -> list[str]: - """Return a list of names of all currently available turtle shapes. - - No argument. - - Example (for a TurtleScreen instance named screen): - >>> screen.getshapes() - ['arrow', 'blank', 'circle', ... , 'turtle'] - """ - - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: - """Bind fun to mouse-click event on canvas. - - Arguments: - fun -- a function with two arguments, the coordinates of the - clicked point on the canvas. - btn -- the number of the mouse-button, defaults to 1 - - Example (for a TurtleScreen instance named screen) - - >>> screen.onclick(goto) - >>> # Subsequently clicking into the TurtleScreen will - >>> # make the turtle move to the clicked point. - >>> screen.onclick(None) - """ - - def onkey(self, fun: Callable[[], object], key: str) -> None: - """Bind fun to key-release event of key. - - Arguments: - fun -- a function with no arguments - key -- a string: key (e.g. "a") or key-symbol (e.g. "space") - - In order to be able to register key-events, TurtleScreen - must have focus. (See method listen.) - - Example (for a TurtleScreen instance named screen): - - >>> def f(): - ... fd(50) - ... lt(60) - ... - >>> screen.onkey(f, "Up") - >>> screen.listen() - - Subsequently the turtle can be moved by repeatedly pressing - the up-arrow key, consequently drawing a hexagon - - """ - - def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: - """Set focus on TurtleScreen (in order to collect key-events) - - No arguments. - Dummy arguments are provided in order - to be able to pass listen to the onclick method. - - Example (for a TurtleScreen instance named screen): - >>> screen.listen() - """ - - def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: - """Install a timer, which calls fun after t milliseconds. - - Arguments: - fun -- a function with no arguments. - t -- a number >= 0 - - Example (for a TurtleScreen instance named screen): - - >>> running = True - >>> def f(): - ... if running: - ... fd(50) - ... lt(60) - ... screen.ontimer(f, 250) - ... - >>> f() # makes the turtle marching around - >>> running = False - """ + def no_animation(self) -> Generator[None]: ... + def update(self) -> None: ... + def window_width(self) -> int: ... + def window_height(self) -> int: ... + def getcanvas(self) -> Canvas: ... + def getshapes(self) -> list[str]: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... + def onkey(self, fun: Callable[[], object], key: str) -> None: ... + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... @overload - def bgpic(self, picname: None = None) -> str: - """Set background image or return name of current backgroundimage. - - Optional argument: - picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". - - If picname is a filename, set the corresponding image as background. - If picname is "nopic", delete backgroundimage, if present. - If picname is None, return the filename of the current backgroundimage. - - Example (for a TurtleScreen instance named screen): - >>> screen.bgpic() - 'nopic' - >>> screen.bgpic("landscape.gif") - >>> screen.bgpic() - 'landscape.gif' - """ - + def bgpic(self, picname: None = None) -> str: ... @overload def bgpic(self, picname: str) -> None: ... @overload - def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: - """Resize the canvas the turtles are drawing on. - - Optional arguments: - canvwidth -- positive integer, new width of canvas in pixels - canvheight -- positive integer, new height of canvas in pixels - bg -- colorstring or color-tuple, new backgroundcolor - If no arguments are given, return current (canvaswidth, canvasheight) - - Do not alter the drawing window. To observe hidden parts of - the canvas use the scrollbars. (Can make visible those parts - of a drawing, which were outside the canvas before!) - - Example (for a Turtle instance named turtle): - >>> turtle.screensize(2000,1500) - >>> # e.g. to search for an erroneously escaped turtle ;-) - """ + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... if sys.version_info >= (3, 14): - def save(self, filename: StrPath, *, overwrite: bool = False) -> None: - """Save the drawing as a PostScript file - - Arguments: - filename -- a string, the path of the created file. - Must end with '.ps' or '.eps'. - - Optional arguments: - overwrite -- boolean, if true, then existing files will be overwritten - - Example (for a TurtleScreen instance named screen): - >>> screen.save('my_drawing.eps') - """ + def save(self, filename: StrPath, *, overwrite: bool = False) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape - def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: - """Bind fun to key-press event of key if key is given, - or to any key-press-event if no key is given. - - Arguments: - fun -- a function with no arguments - key -- a string: key (e.g. "a") or key-symbol (e.g. "space") - - In order to be able to register key-events, TurtleScreen - must have focus. (See method listen.) - - Example (for a TurtleScreen instance named screen - and a Turtle instance named turtle): - - >>> def f(): - ... fd(50) - ... lt(60) - ... - >>> screen.onkeypress(f, "Up") - >>> screen.listen() - - Subsequently the turtle can be moved by repeatedly pressing - the up-arrow key, or by keeping pressed the up-arrow key. - consequently drawing a hexagon. - """ + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey class TNavigator: - """Navigation part of the RawTurtle. - Implements methods for turtle movement. - """ - START_ORIENTATION: dict[str, Vec2D] DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int DEFAULT_ANGLEORIENT: int - def __init__(self, mode: str = "standard") -> None: ... - def reset(self) -> None: - """reset turtle to its initial values - - Will be overwritten by parent class - """ - - def degrees(self, fullcircle: float = 360.0) -> None: - """Set angle measurement units to degrees. - - Optional argument: - fullcircle - a number - - Set angle measurement units, i. e. set number - of 'degrees' for a full circle. Default value is - 360 degrees. - - Example (for a Turtle instance named turtle): - >>> turtle.left(90) - >>> turtle.heading() - 90 - - Change angle measurement unit to grad (also known as gon, - grade, or gradian and equals 1/100-th of the right angle.) - >>> turtle.degrees(400.0) - >>> turtle.heading() - 100 - - """ - - def radians(self) -> None: - """Set the angle measurement units to radians. - - No arguments. - - Example (for a Turtle instance named turtle): - >>> turtle.heading() - 90 - >>> turtle.radians() - >>> turtle.heading() - 1.5707963267948966 - """ + def __init__(self, mode: Literal["standard", "logo", "world"] = "standard") -> None: ... + def reset(self) -> None: ... + def degrees(self, fullcircle: float = 360.0) -> None: ... + def radians(self) -> None: ... if sys.version_info >= (3, 12): - def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: - """To be overwritten by child class RawTurtle. - Includes no TPen references. - """ - - def forward(self, distance: float) -> None: - """Move the turtle forward by the specified distance. - - Aliases: forward | fd - - Argument: - distance -- a number (integer or float) - - Move the turtle forward by the specified distance, in the direction - the turtle is headed. - - Example (for a Turtle instance named turtle): - >>> turtle.position() - (0.00, 0.00) - >>> turtle.forward(25) - >>> turtle.position() - (25.00,0.00) - >>> turtle.forward(-75) - >>> turtle.position() - (-50.00,0.00) - """ - - def back(self, distance: float) -> None: - """Move the turtle backward by distance. - - Aliases: back | backward | bk - - Argument: - distance -- a number - - Move the turtle backward by distance, opposite to the direction the - turtle is headed. Do not change the turtle's heading. - - Example (for a Turtle instance named turtle): - >>> turtle.position() - (0.00, 0.00) - >>> turtle.backward(30) - >>> turtle.position() - (-30.00, 0.00) - """ - - def right(self, angle: float) -> None: - """Turn turtle right by angle units. - - Aliases: right | rt - - Argument: - angle -- a number (integer or float) - - Turn turtle right by angle units. (Units are by default degrees, - but can be set via the degrees() and radians() functions.) - Angle orientation depends on mode. (See this.) - - Example (for a Turtle instance named turtle): - >>> turtle.heading() - 22.0 - >>> turtle.right(45) - >>> turtle.heading() - 337.0 - """ - - def left(self, angle: float) -> None: - """Turn turtle left by angle units. - - Aliases: left | lt - - Argument: - angle -- a number (integer or float) - - Turn turtle left by angle units. (Units are by default degrees, - but can be set via the degrees() and radians() functions.) - Angle orientation depends on mode. (See this.) - - Example (for a Turtle instance named turtle): - >>> turtle.heading() - 22.0 - >>> turtle.left(45) - >>> turtle.heading() - 67.0 - """ - - def pos(self) -> Vec2D: - """Return the turtle's current location (x,y), as a Vec2D-vector. - - Aliases: pos | position - - No arguments. - - Example (for a Turtle instance named turtle): - >>> turtle.pos() - (0.00, 240.00) - """ - - def xcor(self) -> float: - """Return the turtle's x coordinate. - - No arguments. - - Example (for a Turtle instance named turtle): - >>> reset() - >>> turtle.left(60) - >>> turtle.forward(100) - >>> print(turtle.xcor()) - 50.0 - """ - - def ycor(self) -> float: - """Return the turtle's y coordinate - --- - No arguments. - - Example (for a Turtle instance named turtle): - >>> reset() - >>> turtle.left(60) - >>> turtle.forward(100) - >>> print(turtle.ycor()) - 86.6025403784 - """ + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + def forward(self, distance: float) -> None: ... + def back(self, distance: float) -> None: ... + def right(self, angle: float) -> None: ... + def left(self, angle: float) -> None: ... + def pos(self) -> Vec2D: ... + def xcor(self) -> float: ... + def ycor(self) -> float: ... @overload - def goto(self, x: tuple[float, float], y: None = None) -> None: - """Move turtle to an absolute position. - - Aliases: setpos | setposition | goto: - - Arguments: - x -- a number or a pair/vector of numbers - y -- a number None - - call: goto(x, y) # two coordinates - --or: goto((x, y)) # a pair (tuple) of coordinates - --or: goto(vec) # e.g. as returned by pos() - - Move turtle to an absolute position. If the pen is down, - a line will be drawn. The turtle's orientation does not change. - - Example (for a Turtle instance named turtle): - >>> tp = turtle.pos() - >>> tp - (0.00, 0.00) - >>> turtle.setpos(60,30) - >>> turtle.pos() - (60.00,30.00) - >>> turtle.setpos((20,80)) - >>> turtle.pos() - (20.00,80.00) - >>> turtle.setpos(tp) - >>> turtle.pos() - (0.00,0.00) - """ - + def goto(self, x: tuple[float, float], y: None = None) -> None: ... @overload def goto(self, x: float, y: float) -> None: ... - def home(self) -> None: - """Move turtle to the origin - coordinates (0,0). - - No arguments. - - Move turtle to the origin - coordinates (0,0) and set its - heading to its start-orientation (which depends on mode). - - Example (for a Turtle instance named turtle): - >>> turtle.home() - """ - - def setx(self, x: float) -> None: - """Set the turtle's first coordinate to x - - Argument: - x -- a number (integer or float) - - Set the turtle's first coordinate to x, leave second coordinate - unchanged. - - Example (for a Turtle instance named turtle): - >>> turtle.position() - (0.00, 240.00) - >>> turtle.setx(10) - >>> turtle.position() - (10.00, 240.00) - """ - - def sety(self, y: float) -> None: - """Set the turtle's second coordinate to y - - Argument: - y -- a number (integer or float) - - Set the turtle's first coordinate to x, second coordinate remains - unchanged. - - Example (for a Turtle instance named turtle): - >>> turtle.position() - (0.00, 40.00) - >>> turtle.sety(-10) - >>> turtle.position() - (0.00, -10.00) - """ - + def home(self) -> None: ... + def setx(self, x: float) -> None: ... + def sety(self, y: float) -> None: ... @overload - def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: - """Return the distance from the turtle to (x,y) in turtle step units. - - Arguments: - x -- a number or a pair/vector of numbers or a turtle instance - y -- a number None None - - call: distance(x, y) # two coordinates - --or: distance((x, y)) # a pair (tuple) of coordinates - --or: distance(vec) # e.g. as returned by pos() - --or: distance(mypen) # where mypen is another turtle - - Example (for a Turtle instance named turtle): - >>> turtle.pos() - (0.00, 0.00) - >>> turtle.distance(30,40) - 50.0 - >>> pen = Turtle() - >>> pen.forward(77) - >>> turtle.distance(pen) - 77.0 - """ - + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(self, x: float, y: float) -> float: ... @overload - def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: - """Return the angle of the line from the turtle's position to (x, y). - - Arguments: - x -- a number or a pair/vector of numbers or a turtle instance - y -- a number None None - - call: distance(x, y) # two coordinates - --or: distance((x, y)) # a pair (tuple) of coordinates - --or: distance(vec) # e.g. as returned by pos() - --or: distance(mypen) # where mypen is another turtle - - Return the angle, between the line from turtle-position to position - specified by x, y and the turtle's start orientation. (Depends on - modes - "standard" or "logo") - - Example (for a Turtle instance named turtle): - >>> turtle.pos() - (10.00, 10.00) - >>> turtle.towards(0,0) - 225.0 - """ - + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(self, x: float, y: float) -> float: ... - def heading(self) -> float: - """Return the turtle's current heading. - - No arguments. - - Example (for a Turtle instance named turtle): - >>> turtle.left(67) - >>> turtle.heading() - 67.0 - """ - - def setheading(self, to_angle: float) -> None: - """Set the orientation of the turtle to to_angle. - - Aliases: setheading | seth - - Argument: - to_angle -- a number (integer or float) - - Set the orientation of the turtle to to_angle. - Here are some common directions in degrees: - - standard - mode: logo-mode: - -------------------|-------------------- - 0 - east 0 - north - 90 - north 90 - east - 180 - west 180 - south - 270 - south 270 - west - - Example (for a Turtle instance named turtle): - >>> turtle.setheading(90) - >>> turtle.heading() - 90 - """ - - def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: - """Draw a circle with given radius. - - Arguments: - radius -- a number - extent (optional) -- a number - steps (optional) -- an integer - - Draw a circle with given radius. The center is radius units left - of the turtle; extent - an angle - determines which part of the - circle is drawn. If extent is not given, draw the entire circle. - If extent is not a full circle, one endpoint of the arc is the - current pen position. Draw the arc in counterclockwise direction - if radius is positive, otherwise in clockwise direction. Finally - the direction of the turtle is changed by the amount of extent. - - As the circle is approximated by an inscribed regular polygon, - steps determines the number of steps to use. If not given, - it will be calculated automatically. Maybe used to draw regular - polygons. - - call: circle(radius) # full circle - --or: circle(radius, extent) # arc - --or: circle(radius, extent, steps) - --or: circle(radius, steps=6) # 6-sided polygon - - Example (for a Turtle instance named turtle): - >>> turtle.circle(50) - >>> turtle.circle(120, 180) # semicircle - """ - - def speed(self, s: int | None = 0) -> int | None: - """dummy method - to be overwritten by child class""" + def heading(self) -> float: ... + def setheading(self, to_angle: float) -> None: ... + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... + def speed(self, s: int | None = 0) -> int | None: ... fd = forward bk = back backward = back @@ -1210,229 +337,36 @@ class TNavigator: seth = setheading class TPen: - """Drawing part of the RawTurtle. - Implements drawing properties. - """ - - def __init__(self, resizemode: str = "noresize") -> None: ... + def __init__(self, resizemode: Literal["auto", "user", "noresize"] = "noresize") -> None: ... @overload - def resizemode(self, rmode: None = None) -> str: - """Set resizemode to one of the values: "auto", "user", "noresize". - - (Optional) Argument: - rmode -- one of the strings "auto", "user", "noresize" - - Different resizemodes have the following effects: - - "auto" adapts the appearance of the turtle - corresponding to the value of pensize. - - "user" adapts the appearance of the turtle according to the - values of stretchfactor and outlinewidth (outline), - which are set by shapesize() - - "noresize" no adaption of the turtle's appearance takes place. - If no argument is given, return current resizemode. - resizemode("user") is called by a call of shapesize with arguments. - - - Examples (for a Turtle instance named turtle): - >>> turtle.resizemode("noresize") - >>> turtle.resizemode() - 'noresize' - """ - + def resizemode(self, rmode: None = None) -> str: ... @overload - def resizemode(self, rmode: str) -> None: ... + def resizemode(self, rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload - def pensize(self, width: None = None) -> int: - """Set or return the line thickness. - - Aliases: pensize | width - - Argument: - width -- positive number - - Set the line thickness to width or return it. If resizemode is set - to "auto" and turtleshape is a polygon, that polygon is drawn with - the same line thickness. If no argument is given, current pensize - is returned. - - Example (for a Turtle instance named turtle): - >>> turtle.pensize() - 1 - >>> turtle.pensize(10) # from here on lines of width 10 are drawn - """ - + def pensize(self, width: None = None) -> int: ... @overload def pensize(self, width: int) -> None: ... - def penup(self) -> None: - """Pull the pen up -- no drawing when moving. - - Aliases: penup | pu | up - - No argument - - Example (for a Turtle instance named turtle): - >>> turtle.penup() - """ - - def pendown(self) -> None: - """Pull the pen down -- drawing when moving. - - Aliases: pendown | pd | down - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.pendown() - """ - - def isdown(self) -> bool: - """Return True if pen is down, False if it's up. - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.penup() - >>> turtle.isdown() - False - >>> turtle.pendown() - >>> turtle.isdown() - True - """ - + def penup(self) -> None: ... + def pendown(self) -> None: ... + def isdown(self) -> bool: ... @overload - def speed(self, speed: None = None) -> int: - """Return or set the turtle's speed. - - Optional argument: - speed -- an integer in the range 0..10 or a speedstring (see below) - - Set the turtle's speed to an integer value in the range 0 .. 10. - If no argument is given: return current speed. - - If input is a number greater than 10 or smaller than 0.5, - speed is set to 0. - Speedstrings are mapped to speedvalues in the following way: - 'fastest' : 0 - 'fast' : 10 - 'normal' : 6 - 'slow' : 3 - 'slowest' : 1 - speeds from 1 to 10 enforce increasingly faster animation of - line drawing and turtle turning. - - Attention: - speed = 0 : *no* animation takes place. forward/back makes turtle jump - and likewise left/right make the turtle turn instantly. - - Example (for a Turtle instance named turtle): - >>> turtle.speed(3) - """ - + def speed(self, speed: None = None) -> int: ... @overload def speed(self, speed: _Speed) -> None: ... @overload - def pencolor(self) -> _AnyColor: - """Return or set the pencolor. - - Arguments: - Four input formats are allowed: - - pencolor() - Return the current pencolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - pencolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - pencolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - pencolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - - If turtleshape is a polygon, the outline of that polygon is drawn - with the newly set pencolor. - - Example (for a Turtle instance named turtle): - >>> turtle.pencolor('brown') - >>> tup = (0.2, 0.8, 0.55) - >>> turtle.pencolor(tup) - >>> turtle.pencolor() - '#33cc8c' - """ - + def pencolor(self) -> _AnyColor: ... @overload def pencolor(self, color: _Color) -> None: ... @overload def pencolor(self, r: float, g: float, b: float) -> None: ... @overload - def fillcolor(self) -> _AnyColor: - """Return or set the fillcolor. - - Arguments: - Four input formats are allowed: - - fillcolor() - Return the current fillcolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - fillcolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - fillcolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - fillcolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - - If turtleshape is a polygon, the interior of that polygon is drawn - with the newly set fillcolor. - - Example (for a Turtle instance named turtle): - >>> turtle.fillcolor('violet') - >>> col = turtle.pencolor() - >>> turtle.fillcolor(col) - >>> turtle.fillcolor(0, .5, 0) - """ - + def fillcolor(self) -> _AnyColor: ... @overload def fillcolor(self, color: _Color) -> None: ... @overload def fillcolor(self, r: float, g: float, b: float) -> None: ... @overload - def color(self) -> tuple[_AnyColor, _AnyColor]: - """Return or set the pencolor and fillcolor. - - Arguments: - Several input formats are allowed. - They use 0, 1, 2, or 3 arguments as follows: - - color() - Return the current pencolor and the current fillcolor - as a pair of color specification strings as are returned - by pencolor and fillcolor. - color(colorstring), color((r,g,b)), color(r,g,b) - inputs as in pencolor, set both, fillcolor and pencolor, - to the given value. - color(colorstring1, colorstring2), - color((r1,g1,b1), (r2,g2,b2)) - equivalent to pencolor(colorstring1) and fillcolor(colorstring2) - and analogously, if the other input format is used. - - If turtleshape is a polygon, outline and interior of that polygon - is drawn with the newly set colors. - For more info see: pencolor, fillcolor - - Example (for a Turtle instance named turtle): - >>> turtle.color('red', 'green') - >>> turtle.color() - ('red', 'green') - >>> colormode(255) - >>> color((40, 80, 120), (160, 200, 240)) - >>> color() - ('#285078', '#a0c8f0') - """ - + def color(self) -> tuple[_AnyColor, _AnyColor]: ... @overload def color(self, color: _Color) -> None: ... @overload @@ -1440,98 +374,14 @@ class TPen: @overload def color(self, color1: _Color, color2: _Color) -> None: ... if sys.version_info >= (3, 12): - def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: - """To be overwritten by child class RawTurtle. - Includes no TNavigator references. - """ - - def showturtle(self) -> None: - """Makes the turtle visible. - - Aliases: showturtle | st - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.hideturtle() - >>> turtle.showturtle() - """ + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... - def hideturtle(self) -> None: - """Makes the turtle invisible. - - Aliases: hideturtle | ht - - No argument. - - It's a good idea to do this while you're in the - middle of a complicated drawing, because hiding - the turtle speeds up the drawing observably. - - Example (for a Turtle instance named turtle): - >>> turtle.hideturtle() - """ - - def isvisible(self) -> bool: - """Return True if the Turtle is shown, False if it's hidden. - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.hideturtle() - >>> print(turtle.isvisible()) - False - """ + def showturtle(self) -> None: ... + def hideturtle(self) -> None: ... + def isvisible(self) -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload - def pen(self) -> _PenState: - """Return or set the pen's attributes. - - Arguments: - pen -- a dictionary with some or all of the below listed keys. - **pendict -- one or more keyword-arguments with the below - listed keys as keywords. - - Return or set the pen's attributes in a 'pen-dictionary' - with the following key/value pairs: - "shown" : True/False - "pendown" : True/False - "pencolor" : color-string or color-tuple - "fillcolor" : color-string or color-tuple - "pensize" : positive number - "speed" : number in range 0..10 - "resizemode" : "auto" or "user" or "noresize" - "stretchfactor": (positive number, positive number) - "shearfactor": number - "outline" : positive number - "tilt" : number - - This dictionary can be used as argument for a subsequent - pen()-call to restore the former pen-state. Moreover one - or more of these attributes can be provided as keyword-arguments. - This can be used to set several pen attributes in one statement. - - - Examples (for a Turtle instance named turtle): - >>> turtle.pen(fillcolor="black", pencolor="red", pensize=10) - >>> turtle.pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - >>> penstate=turtle.pen() - >>> turtle.color("yellow","") - >>> turtle.penup() - >>> turtle.pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - >>> p.pen(penstate, fillcolor="green") - >>> p.pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - """ - + def pen(self) -> _PenState: ... @overload def pen( self, @@ -1543,7 +393,7 @@ class TPen: fillcolor: _Color = ..., pensize: int = ..., speed: int = ..., - resizemode: str = ..., + resizemode: Literal["auto", "user", "noresize"] = ..., stretchfactor: tuple[float, float] = ..., outline: int = ..., tilt: float = ..., @@ -1557,11 +407,6 @@ class TPen: ht = hideturtle class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods in base classes - """Animation part of the RawTurtle. - Puts RawTurtle upon a TurtleScreen and provides tools for - its animation. - """ - screen: TurtleScreen screens: ClassVar[list[TurtleScreen]] def __init__( @@ -1571,566 +416,80 @@ class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods undobuffersize: int = 1000, visible: bool = True, ) -> None: ... - def reset(self) -> None: - """Delete the turtle's drawings and restore its default values. - - No argument. - - Delete the turtle's drawings from the screen, re-center the turtle - and set variables to the default values. - - Example (for a Turtle instance named turtle): - >>> turtle.position() - (0.00,-22.00) - >>> turtle.heading() - 100.0 - >>> turtle.reset() - >>> turtle.position() - (0.00,0.00) - >>> turtle.heading() - 0.0 - """ - - def setundobuffer(self, size: int | None) -> None: - """Set or disable undobuffer. - - Argument: - size -- an integer or None - - If size is an integer an empty undobuffer of given size is installed. - Size gives the maximum number of turtle-actions that can be undone - by the undo() function. - If size is None, no undobuffer is present. - - Example (for a Turtle instance named turtle): - >>> turtle.setundobuffer(42) - """ - - def undobufferentries(self) -> int: - """Return count of entries in the undobuffer. - - No argument. - - Example (for a Turtle instance named turtle): - >>> while undobufferentries(): - ... undo() - """ - - def clear(self) -> None: - """Delete the turtle's drawings from the screen. Do not move turtle. - - No arguments. - - Delete the turtle's drawings from the screen. Do not move turtle. - State and position of the turtle as well as drawings of other - turtles are not affected. - - Examples (for a Turtle instance named turtle): - >>> turtle.clear() - """ - - def clone(self) -> Self: - """Create and return a clone of the turtle. - - No argument. - - Create and return a clone of the turtle with same position, heading - and turtle properties. - - Example (for a Turtle instance named mick): - mick = Turtle() - joe = mick.clone() - """ - + def reset(self) -> None: ... + def setundobuffer(self, size: int | None) -> None: ... + def undobufferentries(self) -> int: ... + def clear(self) -> None: ... + def clone(self) -> Self: ... @overload - def shape(self, name: None = None) -> str: - """Set turtle shape to shape with given name / return current shapename. - - Optional argument: - name -- a string, which is a valid shapename - - Set turtle shape to shape with given name or, if name is not given, - return name of current shape. - Shape with name must exist in the TurtleScreen's shape dictionary. - Initially there are the following polygon shapes: - 'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. - To learn about how to deal with shapes see Screen-method register_shape. - - Example (for a Turtle instance named turtle): - >>> turtle.shape() - 'arrow' - >>> turtle.shape("turtle") - >>> turtle.shape() - 'turtle' - """ - + def shape(self, name: None = None) -> str: ... @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapesize(self) -> tuple[float, float, float]: - """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - - Optional arguments: - stretch_wid : positive number - stretch_len : positive number - outline : positive number - - Return or set the pen's attributes x/y-stretchfactors and/or outline. - Set resizemode to "user". - If and only if resizemode is set to "user", the turtle will be displayed - stretched according to its stretchfactors: - stretch_wid is stretchfactor perpendicular to orientation - stretch_len is stretchfactor in direction of turtles orientation. - outline determines the width of the shapes's outline. - - Examples (for a Turtle instance named turtle): - >>> turtle.resizemode("user") - >>> turtle.shapesize(5, 5, 12) - >>> turtle.shapesize(outline=8) - """ - + def shapesize(self) -> tuple[float, float, float]: ... @overload def shapesize( self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None ) -> None: ... @overload - def shearfactor(self, shear: None = None) -> float: - """Set or return the current shearfactor. - - Optional argument: shear -- number, tangent of the shear angle - - Shear the turtleshape according to the given shearfactor shear, - which is the tangent of the shear angle. DO NOT change the - turtle's heading (direction of movement). - If shear is not given: return the current shearfactor, i. e. the - tangent of the shear angle, by which lines parallel to the - heading of the turtle are sheared. - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("circle") - >>> turtle.shapesize(5,2) - >>> turtle.shearfactor(0.5) - >>> turtle.shearfactor() - >>> 0.5 - """ - + def shearfactor(self, shear: None = None) -> float: ... @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapetransform(self) -> tuple[float, float, float, float]: - """Set or return the current transformation matrix of the turtle shape. - - Optional arguments: t11, t12, t21, t22 -- numbers. - - If none of the matrix elements are given, return the transformation - matrix. - Otherwise set the given elements and transform the turtleshape - according to the matrix consisting of first row t11, t12 and - second row t21, 22. - Modify stretchfactor, shearfactor and tiltangle according to the - given matrix. - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("square") - >>> turtle.shapesize(4,2) - >>> turtle.shearfactor(-0.5) - >>> turtle.shapetransform() - (4.0, -1.0, -0.0, 2.0) - """ - + def shapetransform(self) -> tuple[float, float, float, float]: ... @overload def shapetransform( self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... - def get_shapepoly(self) -> _PolygonCoords | None: - """Return the current shape polygon as tuple of coordinate pairs. + def get_shapepoly(self) -> _PolygonCoords | None: ... - No argument. - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("square") - >>> turtle.shapetransform(4, -1, 0, 2) - >>> turtle.get_shapepoly() - ((50, -20), (30, 20), (-50, 20), (-30, -20)) - - """ if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") - def settiltangle(self, angle: float) -> None: - """Rotate the turtleshape to point in the specified direction - - Argument: angle -- number - - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). - - Deprecated since Python 3.1 - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("circle") - >>> turtle.shapesize(5,2) - >>> turtle.settiltangle(45) - >>> turtle.stamp() - >>> turtle.fd(50) - >>> turtle.settiltangle(-45) - >>> turtle.stamp() - >>> turtle.fd(50) - """ + def settiltangle(self, angle: float) -> None: ... @overload - def tiltangle(self, angle: None = None) -> float: - """Set or return the current tilt-angle. - - Optional argument: angle -- number - - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). - If angle is not given: return the current tilt-angle, i. e. the angle - between the orientation of the turtleshape and the heading of the - turtle (its direction of movement). - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("circle") - >>> turtle.shapesize(5, 2) - >>> turtle.tiltangle() - 0.0 - >>> turtle.tiltangle(45) - >>> turtle.tiltangle() - 45.0 - >>> turtle.stamp() - >>> turtle.fd(50) - >>> turtle.tiltangle(-45) - >>> turtle.tiltangle() - 315.0 - >>> turtle.stamp() - >>> turtle.fd(50) - """ - + def tiltangle(self, angle: None = None) -> float: ... @overload def tiltangle(self, angle: float) -> None: ... - def tilt(self, angle: float) -> None: - """Rotate the turtleshape by angle. - - Argument: - angle - a number - - Rotate the turtleshape by angle from its current tilt-angle, - but do NOT change the turtle's heading (direction of movement). - - Examples (for a Turtle instance named turtle): - >>> turtle.shape("circle") - >>> turtle.shapesize(5,2) - >>> turtle.tilt(30) - >>> turtle.fd(50) - >>> turtle.tilt(30) - >>> turtle.fd(50) - """ + def tilt(self, angle: float) -> None: ... # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. - def stamp(self) -> Any: - """Stamp a copy of the turtleshape onto the canvas and return its id. - - No argument. - - Stamp a copy of the turtle shape onto the canvas at the current - turtle position. Return a stamp_id for that stamp, which can be - used to delete it by calling clearstamp(stamp_id). - - Example (for a Turtle instance named turtle): - >>> turtle.color("blue") - >>> turtle.stamp() - 13 - >>> turtle.fd(50) - """ - - def clearstamp(self, stampid: int | tuple[int, ...]) -> None: - """Delete stamp with given stampid - - Argument: - stampid - an integer, must be return value of previous stamp() call. - - Example (for a Turtle instance named turtle): - >>> turtle.color("blue") - >>> astamp = turtle.stamp() - >>> turtle.fd(50) - >>> turtle.clearstamp(astamp) - """ - - def clearstamps(self, n: int | None = None) -> None: - """Delete all or first/last n of turtle's stamps. - - Optional argument: - n -- an integer - - If n is None, delete all of pen's stamps, - else if n > 0 delete first n stamps - else if n < 0 delete last n stamps. - - Example (for a Turtle instance named turtle): - >>> for i in range(8): - ... turtle.stamp(); turtle.fd(30) - ... - >>> turtle.clearstamps(2) - >>> turtle.clearstamps(-2) - >>> turtle.clearstamps() - """ - - def filling(self) -> bool: - """Return fillstate (True if filling, False else). - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.begin_fill() - >>> if turtle.filling(): - ... turtle.pensize(5) - ... else: - ... turtle.pensize(3) - """ + def stamp(self) -> Any: ... + def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... + def clearstamps(self, n: int | None = None) -> None: ... + def filling(self) -> bool: ... if sys.version_info >= (3, 14): @contextmanager - def fill(self) -> Generator[None]: - """A context manager for filling a shape. - - Implicitly ensures the code block is wrapped with - begin_fill() and end_fill(). - - Example (for a Turtle instance named turtle): - >>> turtle.color("black", "red") - >>> with turtle.fill(): - ... turtle.circle(60) - """ - - def begin_fill(self) -> None: - """Called just before drawing a shape to be filled. - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.color("black", "red") - >>> turtle.begin_fill() - >>> turtle.circle(60) - >>> turtle.end_fill() - """ - - def end_fill(self) -> None: - """Fill the shape drawn after the call begin_fill(). - - No argument. - - Example (for a Turtle instance named turtle): - >>> turtle.color("black", "red") - >>> turtle.begin_fill() - >>> turtle.circle(60) - >>> turtle.end_fill() - """ + def fill(self) -> Generator[None]: ... + def begin_fill(self) -> None: ... + def end_fill(self) -> None: ... @overload - def dot(self, size: int | _Color | None = None) -> None: - """Draw a dot with diameter size, using color. - - Optional arguments: - size -- an integer >= 1 (if given) - color -- a colorstring or a numeric color tuple - - Draw a circular dot with diameter size, using color. - If size is not given, the maximum of pensize+4 and 2*pensize is used. - - Example (for a Turtle instance named turtle): - >>> turtle.dot() - >>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50) - """ - + def dot(self, size: int | _Color | None = None) -> None: ... @overload def dot(self, size: int | None, color: _Color, /) -> None: ... @overload def dot(self, size: int | None, r: float, g: float, b: float, /) -> None: ... def write( self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal") - ) -> None: - """Write text at the current turtle position. - - Arguments: - arg -- info, which is to be written to the TurtleScreen - move (optional) -- True/False - align (optional) -- one of the strings "left", "center" or right" - font (optional) -- a triple (fontname, fontsize, fonttype) - - Write text - the string representation of arg - at the current - turtle position according to align ("left", "center" or right") - and with the given font. - If move is True, the pen is moved to the bottom-right corner - of the text. By default, move is False. - - Example (for a Turtle instance named turtle): - >>> turtle.write('Home = ', True, align="center") - >>> turtle.write((0,0), True) - """ + ) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def poly(self) -> Generator[None]: - """A context manager for recording the vertices of a polygon. - - Implicitly ensures that the code block is wrapped with - begin_poly() and end_poly() - - Example (for a Turtle instance named turtle) where we create a - triangle as the polygon and move the turtle 100 steps forward: - >>> with turtle.poly(): - ... for side in range(3) - ... turtle.forward(50) - ... turtle.right(60) - >>> turtle.forward(100) - """ - - def begin_poly(self) -> None: - """Start recording the vertices of a polygon. - - No argument. - - Start recording the vertices of a polygon. Current turtle position - is first point of polygon. - - Example (for a Turtle instance named turtle): - >>> turtle.begin_poly() - """ - - def end_poly(self) -> None: - """Stop recording the vertices of a polygon. - - No argument. - - Stop recording the vertices of a polygon. Current turtle position is - last point of polygon. This will be connected with the first point. - - Example (for a Turtle instance named turtle): - >>> turtle.end_poly() - """ - - def get_poly(self) -> _PolygonCoords | None: - """Return the lastly recorded polygon. - - No argument. - - Example (for a Turtle instance named turtle): - >>> p = turtle.get_poly() - >>> turtle.register_shape("myFavouriteShape", p) - """ - - def getscreen(self) -> TurtleScreen: - """Return the TurtleScreen object, the turtle is drawing on. - - No argument. - - Return the TurtleScreen object, the turtle is drawing on. - So TurtleScreen-methods can be called for that object. - - Example (for a Turtle instance named turtle): - >>> ts = turtle.getscreen() - >>> ts - - >>> ts.bgcolor("pink") - """ - - def getturtle(self) -> Self: - """Return the Turtleobject itself. - - No argument. - - Only reasonable use: as a function to return the 'anonymous turtle': + def poly(self) -> Generator[None]: ... - Example: - >>> pet = getturtle() - >>> pet.fd(50) - >>> pet - - >>> turtles() - [] - """ + def begin_poly(self) -> None: ... + def end_poly(self) -> None: ... + def get_poly(self) -> _PolygonCoords | None: ... + def getscreen(self) -> TurtleScreen: ... + def getturtle(self) -> Self: ... getpen = getturtle - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: - """Bind fun to mouse-click event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - add -- True or False. If True, new binding will be added, otherwise - it will replace a former binding. - - Example for the anonymous turtle, i. e. the procedural way: - - >>> def turn(x, y): - ... left(360) - ... - >>> onclick(turn) # Now clicking into the turtle will turn it. - >>> onclick(None) # event-binding will be removed - """ - - def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: - """Bind fun to mouse-button-release event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - - Example (for a MyTurtle instance named joe): - >>> class MyTurtle(Turtle): - ... def glow(self,x,y): - ... self.fillcolor("red") - ... def unglow(self,x,y): - ... self.fillcolor("") - ... - >>> joe = MyTurtle() - >>> joe.onclick(joe.glow) - >>> joe.onrelease(joe.unglow) - - Clicking on joe turns fillcolor red, unclicking turns it to - transparent. - """ - - def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: - """Bind fun to mouse-move event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - - Every sequence of mouse-move-events on a turtle is preceded by a - mouse-click event on that turtle. - - Example (for a Turtle instance named turtle): - >>> turtle.ondrag(turtle.goto) - - Subsequently clicking and dragging a Turtle will move it - across the screen thereby producing handdrawings (if pen is - down). - """ - - def undo(self) -> None: - """undo (repeatedly) the last turtle action. - - No argument. - - undo (repeatedly) the last turtle action. - Number of available undo actions is determined by the size of - the undobuffer. - - Example (for a Turtle instance named turtle): - >>> for i in range(4): - ... turtle.fd(50); turtle.lt(80) - ... - >>> for i in range(8): - ... turtle.undo() - ... - """ + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def undo(self) -> None: ... turtlesize = shapesize class _Screen(TurtleScreen): @@ -2142,993 +501,128 @@ class _Screen(TurtleScreen): height: int | float = 0.75, # noqa: Y041 startx: int | None = None, starty: int | None = None, - ) -> None: - """Set the size and position of the main window. - - Arguments: - width: as integer a size in pixels, as float a fraction of the screen. - Default is 50% of screen. - height: as integer the height in pixels, as float a fraction of the - screen. Default is 75% of screen. - startx: if positive, starting position in pixels from the left - edge of the screen, if negative from the right edge - Default, startx=None is to center window horizontally. - starty: if positive, starting position in pixels from the top - edge of the screen, if negative from the bottom edge - Default, starty=None is to center window vertically. - - Examples (for a Screen instance named screen): - >>> screen.setup (width=200, height=200, startx=0, starty=0) - - sets window to 200x200 pixels, in upper left of screen - - >>> screen.setup(width=.75, height=0.5, startx=None, starty=None) - - sets window to 75% of screen by 50% of screen and centers - """ - - def title(self, titlestring: str) -> None: - """Set title of turtle-window - - Argument: - titlestring -- a string, to appear in the titlebar of the - turtle graphics window. - - This is a method of Screen-class. Not available for TurtleScreen- - objects. - - Example (for a Screen instance named screen): - >>> screen.title("Welcome to the turtle-zoo!") - """ - - def bye(self) -> None: - """Shut the turtlegraphics window. - - Example (for a TurtleScreen instance named screen): - >>> screen.bye() - """ - - def exitonclick(self) -> None: - """Go into mainloop until the mouse is clicked. - - No arguments. - - Bind bye() method to mouseclick on TurtleScreen. - If "using_IDLE" - value in configuration dictionary is False - (default value), enter mainloop. - If IDLE with -n switch (no subprocess) is used, this value should be - set to True in turtle.cfg. In this case IDLE's mainloop - is active also for the client script. - - This is a method of the Screen-class and not available for - TurtleScreen instances. - - Example (for a Screen instance named screen): - >>> screen.exitonclick() - - """ + ) -> None: ... + def title(self, titlestring: str) -> None: ... + def bye(self) -> None: ... + def exitonclick(self) -> None: ... class Turtle(RawTurtle): - """RawTurtle auto-creating (scrolled) canvas. - - When a Turtle object is created or a function derived from some - Turtle method is called a TurtleScreen object is automatically created. - """ - def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... RawPen = RawTurtle Pen = Turtle -def write_docstringdict(filename: str = "turtle_docstringdict") -> None: - """Create and write docstring-dictionary to file. - - Optional argument: - filename -- a string, used as filename - default value is turtle_docstringdict - - Has to be called explicitly, (not used by the turtle-graphics classes) - The docstring dictionary will be written to the Python script .py - It is intended to serve as a template for translation of the docstrings - into different languages. - """ +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... # Functions copied from TurtleScreenBase: -def mainloop() -> None: - """Starts event loop - calling Tkinter's mainloop function. - - No argument. - - Must be last statement in a turtle graphics program. - Must NOT be used if a script is run from within IDLE in -n mode - (No subprocess) - for interactive use of turtle graphics. - - Example: - >>> mainloop() - - """ - -def textinput(title: str, prompt: str) -> str | None: - """Pop up a dialog window for input of a string. - - Arguments: title is the title of the dialog window, - prompt is a text mostly describing what information to input. - - Return the string input - If the dialog is canceled, return None. - - Example: - >>> textinput("NIM", "Name of first player:") - - """ - +def mainloop() -> None: ... +def textinput(title: str, prompt: str) -> str | None: ... def numinput( title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None -) -> float | None: - """Pop up a dialog window for input of a number. - - Arguments: title is the title of the dialog window, - prompt is a text mostly describing what numerical information to input. - default: default value - minval: minimum value for input - maxval: maximum value for input - - The number input must be in the range minval .. maxval if these are - given. If not, a hint is issued and the dialog remains open for - correction. Return the number input. - If the dialog is canceled, return None. - - Example: - >>> numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) - - """ +) -> float | None: ... # Functions copied from TurtleScreen: -def clear() -> None: - """Delete the turtle's drawings from the screen. Do not move - - No arguments. - - Delete the turtle's drawings from the screen. Do not move - State and position of the turtle as well as drawings of other - turtles are not affected. - - Examples: - >>> clear() - """ - +def clear() -> None: ... @overload -def mode(mode: None = None) -> str: - """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. - - Optional argument: - mode -- one of the strings 'standard', 'logo' or 'world' - - Mode 'standard' is compatible with turtle.py. - Mode 'logo' is compatible with most Logo-Turtle-Graphics. - Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in - this mode angles appear distorted if x/y unit-ratio doesn't equal 1. - If mode is not given, return the current mode. - - Mode Initial turtle heading positive angles - ------------|-------------------------|------------------- - 'standard' to the right (east) counterclockwise - 'logo' upward (north) clockwise - - Examples: - >>> mode('logo') # resets turtle heading to north - >>> mode() - 'logo' - """ - +def mode(mode: None = None) -> str: ... @overload -def mode(mode: str) -> None: ... -def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: - """Set up a user defined coordinate-system. - - Arguments: - llx -- a number, x-coordinate of lower left corner of canvas - lly -- a number, y-coordinate of lower left corner of canvas - urx -- a number, x-coordinate of upper right corner of canvas - ury -- a number, y-coordinate of upper right corner of canvas - - Set up user coodinat-system and switch to mode 'world' if necessary. - This performs a reset. If mode 'world' is already active, - all drawings are redrawn according to the new coordinates. - - But ATTENTION: in user-defined coordinatesystems angles may appear - distorted. (see Screen.mode()) - - Example: - >>> setworldcoordinates(-10,-0.5,50,1.5) - >>> for _ in range(36): - ... left(10) - ... forward(0.5) - """ - -def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: - """Adds a turtle shape to TurtleScreen's shapelist. - - Arguments: - (1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! - (2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! - (3) name is an arbitrary string and shape is a tuple - of pairs of coordinates. Installs the corresponding - polygon shape - (4) name is an arbitrary string and shape is a - (compound) Shape object. Installs the corresponding - compound shape. - To use a shape, you have to issue the command shape(shapename). - - call: register_shape("turtle.gif") - --or: register_shape("tri", ((0,0), (10,10), (-10,10))) - - Example: - >>> register_shape("triangle", ((5,-3),(0,5),(-5,-3))) - - """ - +def mode(mode: Literal["standard", "logo", "world"]) -> None: ... +def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload -def colormode(cmode: None = None) -> float: - """Return the colormode or set it to 1.0 or 255. - - Optional argument: - cmode -- one of the values 1.0 or 255 - - r, g, b values of colortriples have to be in range 0..cmode. - - Example: - >>> colormode() - 1.0 - >>> colormode(255) - >>> pencolor(240,160,80) - """ - +def colormode(cmode: None = None) -> float: ... @overload def colormode(cmode: float) -> None: ... -def reset() -> None: - """Delete the turtle's drawings and restore its default values. - - No argument. - - Delete the turtle's drawings from the screen, re-center the turtle - and set variables to the default values. - - Example: - >>> position() - (0.00,-22.00) - >>> heading() - 100.0 - >>> reset() - >>> position() - (0.00,0.00) - >>> heading() - 0.0 - """ - -def turtles() -> list[Turtle]: - """Return the list of turtles on the - - Example: - >>> turtles() - [] - """ - +def reset() -> None: ... +def turtles() -> list[Turtle]: ... @overload -def bgcolor() -> _AnyColor: - """Set or return backgroundcolor of the TurtleScreen. - - Arguments (if given): a color string or three numbers - in the range 0..colormode or a 3-tuple of such numbers. - - Example: - >>> bgcolor("orange") - >>> bgcolor() - 'orange' - >>> bgcolor(0.5,0,0.5) - >>> bgcolor() - '#800080' - """ - +def bgcolor() -> _AnyColor: ... @overload def bgcolor(color: _Color) -> None: ... @overload def bgcolor(r: float, g: float, b: float) -> None: ... @overload -def tracer(n: None = None) -> int: - """Turns turtle animation on/off and set delay for update drawings. - - Optional arguments: - n -- nonnegative integer - delay -- nonnegative integer - - If n is given, only each n-th regular screen update is really performed. - (Can be used to accelerate the drawing of complex graphics.) - Second arguments sets delay value (see RawTurtle.delay()) - - Example: - >>> tracer(8, 25) - >>> dist = 2 - >>> for i in range(200): - ... fd(dist) - ... rt(90) - ... dist += 2 - """ - +def tracer(n: None = None) -> int: ... @overload def tracer(n: int, delay: int | None = None) -> None: ... @overload -def delay(delay: None = None) -> int: - """Return or set the drawing delay in milliseconds. - - Optional argument: - delay -- positive integer - - Example: - >>> delay(15) - >>> delay() - 15 - """ - +def delay(delay: None = None) -> int: ... @overload def delay(delay: int) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def no_animation() -> Generator[None]: - """Temporarily turn off auto-updating the - - This is useful for drawing complex shapes where even the fastest setting - is too slow. Once this context manager is exited, the drawing will - be displayed. - - Example (for a TurtleScreen instance named screen - and a Turtle instance named turtle): - >>> with no_animation(): - ... turtle.circle(50) - """ - -def update() -> None: - """Perform a TurtleScreen update.""" - -def window_width() -> int: - """Return the width of the turtle window. - - Example: - >>> window_width() - 640 - """ - -def window_height() -> int: - """Return the height of the turtle window. - - Example: - >>> window_height() - 480 - """ - -def getcanvas() -> Canvas: - """Return the Canvas of this TurtleScreen. - - No argument. - - Example: - >>> cv = getcanvas() - >>> cv - - """ - -def getshapes() -> list[str]: - """Return a list of names of all currently available turtle shapes. - - No argument. - - Example: - >>> getshapes() - ['arrow', 'blank', 'circle', ... , 'turtle'] - """ - -def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: - """Bind fun to mouse-click event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - add -- True or False. If True, new binding will be added, otherwise - it will replace a former binding. - - Example for the anonymous turtle, i. e. the procedural way: - - >>> def turn(x, y): - ... left(360) - ... - >>> onclick(turn) # Now clicking into the turtle will turn it. - >>> onclick(None) # event-binding will be removed - """ - -def onkey(fun: Callable[[], object], key: str) -> None: - """Bind fun to key-release event of key. - - Arguments: - fun -- a function with no arguments - key -- a string: key (e.g. "a") or key-symbol (e.g. "space") - - In order to be able to register key-events, TurtleScreen - must have focus. (See method listen.) - - Example: - - >>> def f(): - ... fd(50) - ... lt(60) - ... - >>> onkey(f, "Up") - >>> listen() - - Subsequently the turtle can be moved by repeatedly pressing - the up-arrow key, consequently drawing a hexagon - - """ - -def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: - """Set focus on TurtleScreen (in order to collect key-events) - - No arguments. - Dummy arguments are provided in order - to be able to pass listen to the onclick method. - - Example: - >>> listen() - """ - -def ontimer(fun: Callable[[], object], t: int = 0) -> None: - """Install a timer, which calls fun after t milliseconds. - - Arguments: - fun -- a function with no arguments. - t -- a number >= 0 - - Example: - - >>> running = True - >>> def f(): - ... if running: - ... fd(50) - ... lt(60) - ... ontimer(f, 250) - ... - >>> f() # makes the turtle marching around - >>> running = False - """ + def no_animation() -> Generator[None]: ... +def update() -> None: ... +def window_width() -> int: ... +def window_height() -> int: ... +def getcanvas() -> Canvas: ... +def getshapes() -> list[str]: ... +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def onkey(fun: Callable[[], object], key: str) -> None: ... +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... +def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... @overload -def bgpic(picname: None = None) -> str: - """Set background image or return name of current backgroundimage. - - Optional argument: - picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". - - If picname is a filename, set the corresponding image as background. - If picname is "nopic", delete backgroundimage, if present. - If picname is None, return the filename of the current backgroundimage. - - Example: - >>> bgpic() - 'nopic' - >>> bgpic("landscape.gif") - >>> bgpic() - 'landscape.gif' - """ - +def bgpic(picname: None = None) -> str: ... @overload def bgpic(picname: str) -> None: ... @overload -def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: - """Resize the canvas the turtles are drawing on. - - Optional arguments: - canvwidth -- positive integer, new width of canvas in pixels - canvheight -- positive integer, new height of canvas in pixels - bg -- colorstring or color-tuple, new backgroundcolor - If no arguments are given, return current (canvaswidth, canvasheight) - - Do not alter the drawing window. To observe hidden parts of - the canvas use the scrollbars. (Can make visible those parts - of a drawing, which were outside the canvas before!) - - Example (for a Turtle instance named turtle): - >>> turtle.screensize(2000,1500) - >>> # e.g. to search for an erroneously escaped turtle ;-) - """ - +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... @overload def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... if sys.version_info >= (3, 14): - def save(filename: StrPath, *, overwrite: bool = False) -> None: - """Save the drawing as a PostScript file - - Arguments: - filename -- a string, the path of the created file. - Must end with '.ps' or '.eps'. - - Optional arguments: - overwrite -- boolean, if true, then existing files will be overwritten - - Example: - >>> save('my_drawing.eps') - """ + def save(filename: StrPath, *, overwrite: bool = False) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape -def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: - """Bind fun to key-press event of key if key is given, - or to any key-press-event if no key is given. - - Arguments: - fun -- a function with no arguments - key -- a string: key (e.g. "a") or key-symbol (e.g. "space") - - In order to be able to register key-events, TurtleScreen - must have focus. (See method listen.) - - Example (for a TurtleScreen instance named screen - and a Turtle instance named turtle): - - >>> def f(): - ... fd(50) - ... lt(60) - ... - >>> onkeypress(f, "Up") - >>> listen() - - Subsequently the turtle can be moved by repeatedly pressing - the up-arrow key, or by keeping pressed the up-arrow key. - consequently drawing a hexagon. - """ +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey # Functions copied from _Screen: -def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: - """Set the size and position of the main window. - - Arguments: - width: as integer a size in pixels, as float a fraction of the - Default is 50% of - height: as integer the height in pixels, as float a fraction of the - Default is 75% of - startx: if positive, starting position in pixels from the left - edge of the screen, if negative from the right edge - Default, startx=None is to center window horizontally. - starty: if positive, starting position in pixels from the top - edge of the screen, if negative from the bottom edge - Default, starty=None is to center window vertically. - - Examples: - >>> setup (width=200, height=200, startx=0, starty=0) - - sets window to 200x200 pixels, in upper left of screen - - >>> setup(width=.75, height=0.5, startx=None, starty=None) - - sets window to 75% of screen by 50% of screen and centers - """ - -def title(titlestring: str) -> None: - """Set title of turtle-window - - Argument: - titlestring -- a string, to appear in the titlebar of the - turtle graphics window. - - This is a method of Screen-class. Not available for TurtleScreen- - objects. - - Example: - >>> title("Welcome to the turtle-zoo!") - """ - -def bye() -> None: - """Shut the turtlegraphics window. - - Example: - >>> bye() - """ - -def exitonclick() -> None: - """Go into mainloop until the mouse is clicked. - - No arguments. - - Bind bye() method to mouseclick on TurtleScreen. - If "using_IDLE" - value in configuration dictionary is False - (default value), enter mainloop. - If IDLE with -n switch (no subprocess) is used, this value should be - set to True in turtle.cfg. In this case IDLE's mainloop - is active also for the client script. - - This is a method of the Screen-class and not available for - TurtleScreen instances. - - Example: - >>> exitonclick() - - """ - -def Screen() -> _Screen: - """Return the singleton screen object. - If none exists at the moment, create a new one and return it, - else return the existing one. - """ +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... +def title(titlestring: str) -> None: ... +def bye() -> None: ... +def exitonclick() -> None: ... +def Screen() -> _Screen: ... # Functions copied from TNavigator: -def degrees(fullcircle: float = 360.0) -> None: - """Set angle measurement units to degrees. - - Optional argument: - fullcircle - a number - - Set angle measurement units, i. e. set number - of 'degrees' for a full circle. Default value is - 360 degrees. - - Example: - >>> left(90) - >>> heading() - 90 - - Change angle measurement unit to grad (also known as gon, - grade, or gradian and equals 1/100-th of the right angle.) - >>> degrees(400.0) - >>> heading() - 100 - - """ - -def radians() -> None: - """Set the angle measurement units to radians. - - No arguments. - - Example: - >>> heading() - 90 - >>> radians() - >>> heading() - 1.5707963267948966 - """ - -def forward(distance: float) -> None: - """Move the turtle forward by the specified distance. - - Aliases: forward | fd - - Argument: - distance -- a number (integer or float) - - Move the turtle forward by the specified distance, in the direction - the turtle is headed. - - Example: - >>> position() - (0.00, 0.00) - >>> forward(25) - >>> position() - (25.00,0.00) - >>> forward(-75) - >>> position() - (-50.00,0.00) - """ - -def back(distance: float) -> None: - """Move the turtle backward by distance. - - Aliases: back | backward | bk - - Argument: - distance -- a number - - Move the turtle backward by distance, opposite to the direction the - turtle is headed. Do not change the turtle's heading. - - Example: - >>> position() - (0.00, 0.00) - >>> backward(30) - >>> position() - (-30.00, 0.00) - """ - -def right(angle: float) -> None: - """Turn turtle right by angle units. - - Aliases: right | rt - - Argument: - angle -- a number (integer or float) - - Turn turtle right by angle units. (Units are by default degrees, - but can be set via the degrees() and radians() functions.) - Angle orientation depends on mode. (See this.) - - Example: - >>> heading() - 22.0 - >>> right(45) - >>> heading() - 337.0 - """ - -def left(angle: float) -> None: - """Turn turtle left by angle units. - - Aliases: left | lt - - Argument: - angle -- a number (integer or float) - - Turn turtle left by angle units. (Units are by default degrees, - but can be set via the degrees() and radians() functions.) - Angle orientation depends on mode. (See this.) - - Example: - >>> heading() - 22.0 - >>> left(45) - >>> heading() - 67.0 - """ - -def pos() -> Vec2D: - """Return the turtle's current location (x,y), as a Vec2D-vector. - - Aliases: pos | position - - No arguments. - - Example: - >>> pos() - (0.00, 240.00) - """ - -def xcor() -> float: - """Return the turtle's x coordinate. - - No arguments. - - Example: - >>> reset() - >>> left(60) - >>> forward(100) - >>> print(xcor()) - 50.0 - """ - -def ycor() -> float: - """Return the turtle's y coordinate - --- - No arguments. - - Example: - >>> reset() - >>> left(60) - >>> forward(100) - >>> print(ycor()) - 86.6025403784 - """ - +def degrees(fullcircle: float = 360.0) -> None: ... +def radians() -> None: ... +def forward(distance: float) -> None: ... +def back(distance: float) -> None: ... +def right(angle: float) -> None: ... +def left(angle: float) -> None: ... +def pos() -> Vec2D: ... +def xcor() -> float: ... +def ycor() -> float: ... @overload -def goto(x: tuple[float, float], y: None = None) -> None: - """Move turtle to an absolute position. - - Aliases: setpos | setposition | goto: - - Arguments: - x -- a number or a pair/vector of numbers - y -- a number None - - call: goto(x, y) # two coordinates - --or: goto((x, y)) # a pair (tuple) of coordinates - --or: goto(vec) # e.g. as returned by pos() - - Move turtle to an absolute position. If the pen is down, - a line will be drawn. The turtle's orientation does not change. - - Example: - >>> tp = pos() - >>> tp - (0.00, 0.00) - >>> setpos(60,30) - >>> pos() - (60.00,30.00) - >>> setpos((20,80)) - >>> pos() - (20.00,80.00) - >>> setpos(tp) - >>> pos() - (0.00,0.00) - """ - +def goto(x: tuple[float, float], y: None = None) -> None: ... @overload def goto(x: float, y: float) -> None: ... -def home() -> None: - """Move turtle to the origin - coordinates (0,0). - - No arguments. - - Move turtle to the origin - coordinates (0,0) and set its - heading to its start-orientation (which depends on mode). - - Example: - >>> home() - """ - -def setx(x: float) -> None: - """Set the turtle's first coordinate to x - - Argument: - x -- a number (integer or float) - - Set the turtle's first coordinate to x, leave second coordinate - unchanged. - - Example: - >>> position() - (0.00, 240.00) - >>> setx(10) - >>> position() - (10.00, 240.00) - """ - -def sety(y: float) -> None: - """Set the turtle's second coordinate to y - - Argument: - y -- a number (integer or float) - - Set the turtle's first coordinate to x, second coordinate remains - unchanged. - - Example: - >>> position() - (0.00, 40.00) - >>> sety(-10) - >>> position() - (0.00, -10.00) - """ - +def home() -> None: ... +def setx(x: float) -> None: ... +def sety(y: float) -> None: ... @overload -def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: - """Return the distance from the turtle to (x,y) in turtle step units. - - Arguments: - x -- a number or a pair/vector of numbers or a turtle instance - y -- a number None None - - call: distance(x, y) # two coordinates - --or: distance((x, y)) # a pair (tuple) of coordinates - --or: distance(vec) # e.g. as returned by pos() - --or: distance(mypen) # where mypen is another turtle - - Example: - >>> pos() - (0.00, 0.00) - >>> distance(30,40) - 50.0 - >>> pen = Turtle() - >>> pen.forward(77) - >>> distance(pen) - 77.0 - """ - +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(x: float, y: float) -> float: ... @overload -def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: - """Return the angle of the line from the turtle's position to (x, y). - - Arguments: - x -- a number or a pair/vector of numbers or a turtle instance - y -- a number None None - - call: distance(x, y) # two coordinates - --or: distance((x, y)) # a pair (tuple) of coordinates - --or: distance(vec) # e.g. as returned by pos() - --or: distance(mypen) # where mypen is another turtle - - Return the angle, between the line from turtle-position to position - specified by x, y and the turtle's start orientation. (Depends on - modes - "standard" or "logo") - - Example: - >>> pos() - (10.00, 10.00) - >>> towards(0,0) - 225.0 - """ - +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(x: float, y: float) -> float: ... -def heading() -> float: - """Return the turtle's current heading. - - No arguments. - - Example: - >>> left(67) - >>> heading() - 67.0 - """ - -def setheading(to_angle: float) -> None: - """Set the orientation of the turtle to to_angle. - - Aliases: setheading | seth - - Argument: - to_angle -- a number (integer or float) - - Set the orientation of the turtle to to_angle. - Here are some common directions in degrees: - - standard - mode: logo-mode: - -------------------|-------------------- - 0 - east 0 - north - 90 - north 90 - east - 180 - west 180 - south - 270 - south 270 - west - - Example: - >>> setheading(90) - >>> heading() - 90 - """ - -def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: - """Draw a circle with given radius. - - Arguments: - radius -- a number - extent (optional) -- a number - steps (optional) -- an integer - - Draw a circle with given radius. The center is radius units left - of the turtle; extent - an angle - determines which part of the - circle is drawn. If extent is not given, draw the entire circle. - If extent is not a full circle, one endpoint of the arc is the - current pen position. Draw the arc in counterclockwise direction - if radius is positive, otherwise in clockwise direction. Finally - the direction of the turtle is changed by the amount of extent. - - As the circle is approximated by an inscribed regular polygon, - steps determines the number of steps to use. If not given, - it will be calculated automatically. Maybe used to draw regular - polygons. - - call: circle(radius) # full circle - --or: circle(radius, extent) # arc - --or: circle(radius, extent, steps) - --or: circle(radius, steps=6) # 6-sided polygon - - Example: - >>> circle(50) - >>> circle(120, 180) # semicircle - """ +def heading() -> float: ... +def setheading(to_angle: float) -> None: ... +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... fd = forward bk = back @@ -3142,317 +636,47 @@ seth = setheading # Functions copied from TPen: @overload -def resizemode(rmode: None = None) -> str: - """Set resizemode to one of the values: "auto", "user", "noresize". - - (Optional) Argument: - rmode -- one of the strings "auto", "user", "noresize" - - Different resizemodes have the following effects: - - "auto" adapts the appearance of the turtle - corresponding to the value of pensize. - - "user" adapts the appearance of the turtle according to the - values of stretchfactor and outlinewidth (outline), - which are set by shapesize() - - "noresize" no adaption of the turtle's appearance takes place. - If no argument is given, return current resizemode. - resizemode("user") is called by a call of shapesize with arguments. - - - Examples: - >>> resizemode("noresize") - >>> resizemode() - 'noresize' - """ - +def resizemode(rmode: None = None) -> str: ... @overload -def resizemode(rmode: str) -> None: ... +def resizemode(rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload -def pensize(width: None = None) -> int: - """Set or return the line thickness. - - Aliases: pensize | width - - Argument: - width -- positive number - - Set the line thickness to width or return it. If resizemode is set - to "auto" and turtleshape is a polygon, that polygon is drawn with - the same line thickness. If no argument is given, current pensize - is returned. - - Example: - >>> pensize() - 1 - >>> pensize(10) # from here on lines of width 10 are drawn - """ - +def pensize(width: None = None) -> int: ... @overload def pensize(width: int) -> None: ... -def penup() -> None: - """Pull the pen up -- no drawing when moving. - - Aliases: penup | pu | up - - No argument - - Example: - >>> penup() - """ - -def pendown() -> None: - """Pull the pen down -- drawing when moving. - - Aliases: pendown | pd | down - - No argument. - - Example: - >>> pendown() - """ - -def isdown() -> bool: - """Return True if pen is down, False if it's up. - - No argument. - - Example: - >>> penup() - >>> isdown() - False - >>> pendown() - >>> isdown() - True - """ - +def penup() -> None: ... +def pendown() -> None: ... +def isdown() -> bool: ... @overload -def speed(speed: None = None) -> int: - """Return or set the turtle's speed. - - Optional argument: - speed -- an integer in the range 0..10 or a speedstring (see below) - - Set the turtle's speed to an integer value in the range 0 .. 10. - If no argument is given: return current speed. - - If input is a number greater than 10 or smaller than 0.5, - speed is set to 0. - Speedstrings are mapped to speedvalues in the following way: - 'fastest' : 0 - 'fast' : 10 - 'normal' : 6 - 'slow' : 3 - 'slowest' : 1 - speeds from 1 to 10 enforce increasingly faster animation of - line drawing and turtle turning. - - Attention: - speed = 0 : *no* animation takes place. forward/back makes turtle jump - and likewise left/right make the turtle turn instantly. - - Example: - >>> speed(3) - """ - +def speed(speed: None = None) -> int: ... @overload def speed(speed: _Speed) -> None: ... @overload -def pencolor() -> _AnyColor: - """Return or set the pencolor. - - Arguments: - Four input formats are allowed: - - pencolor() - Return the current pencolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - pencolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - pencolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - pencolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - - If turtleshape is a polygon, the outline of that polygon is drawn - with the newly set pencolor. - - Example: - >>> pencolor('brown') - >>> tup = (0.2, 0.8, 0.55) - >>> pencolor(tup) - >>> pencolor() - '#33cc8c' - """ - +def pencolor() -> _AnyColor: ... @overload def pencolor(color: _Color) -> None: ... @overload def pencolor(r: float, g: float, b: float) -> None: ... @overload -def fillcolor() -> _AnyColor: - """Return or set the fillcolor. - - Arguments: - Four input formats are allowed: - - fillcolor() - Return the current fillcolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - fillcolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - fillcolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - fillcolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - - If turtleshape is a polygon, the interior of that polygon is drawn - with the newly set fillcolor. - - Example: - >>> fillcolor('violet') - >>> col = pencolor() - >>> fillcolor(col) - >>> fillcolor(0, .5, 0) - """ - +def fillcolor() -> _AnyColor: ... @overload def fillcolor(color: _Color) -> None: ... @overload def fillcolor(r: float, g: float, b: float) -> None: ... @overload -def color() -> tuple[_AnyColor, _AnyColor]: - """Return or set the pencolor and fillcolor. - - Arguments: - Several input formats are allowed. - They use 0, 1, 2, or 3 arguments as follows: - - color() - Return the current pencolor and the current fillcolor - as a pair of color specification strings as are returned - by pencolor and fillcolor. - color(colorstring), color((r,g,b)), color(r,g,b) - inputs as in pencolor, set both, fillcolor and pencolor, - to the given value. - color(colorstring1, colorstring2), - color((r1,g1,b1), (r2,g2,b2)) - equivalent to pencolor(colorstring1) and fillcolor(colorstring2) - and analogously, if the other input format is used. - - If turtleshape is a polygon, outline and interior of that polygon - is drawn with the newly set colors. - For more info see: pencolor, fillcolor - - Example: - >>> color('red', 'green') - >>> color() - ('red', 'green') - >>> colormode(255) - >>> color((40, 80, 120), (160, 200, 240)) - >>> color() - ('#285078', '#a0c8f0') - """ - +def color() -> tuple[_AnyColor, _AnyColor]: ... @overload def color(color: _Color) -> None: ... @overload def color(r: float, g: float, b: float) -> None: ... @overload def color(color1: _Color, color2: _Color) -> None: ... -def showturtle() -> None: - """Makes the turtle visible. - - Aliases: showturtle | st - - No argument. - - Example: - >>> hideturtle() - >>> showturtle() - """ - -def hideturtle() -> None: - """Makes the turtle invisible. - - Aliases: hideturtle | ht - - No argument. - - It's a good idea to do this while you're in the - middle of a complicated drawing, because hiding - the turtle speeds up the drawing observably. - - Example: - >>> hideturtle() - """ - -def isvisible() -> bool: - """Return True if the Turtle is shown, False if it's hidden. - - No argument. - - Example: - >>> hideturtle() - >>> print(isvisible()) - False - """ +def showturtle() -> None: ... +def hideturtle() -> None: ... +def isvisible() -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload -def pen() -> _PenState: - """Return or set the pen's attributes. - - Arguments: - pen -- a dictionary with some or all of the below listed keys. - **pendict -- one or more keyword-arguments with the below - listed keys as keywords. - - Return or set the pen's attributes in a 'pen-dictionary' - with the following key/value pairs: - "shown" : True/False - "pendown" : True/False - "pencolor" : color-string or color-tuple - "fillcolor" : color-string or color-tuple - "pensize" : positive number - "speed" : number in range 0..10 - "resizemode" : "auto" or "user" or "noresize" - "stretchfactor": (positive number, positive number) - "shearfactor": number - "outline" : positive number - "tilt" : number - - This dictionary can be used as argument for a subsequent - pen()-call to restore the former pen-state. Moreover one - or more of these attributes can be provided as keyword-arguments. - This can be used to set several pen attributes in one statement. - - - Examples: - >>> pen(fillcolor="black", pencolor="red", pensize=10) - >>> pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - >>> penstate=pen() - >>> color("yellow","") - >>> penup() - >>> pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - >>> p.pen(penstate, fillcolor="green") - >>> p.pen() - {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, - 'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', - 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} - """ - +def pen() -> _PenState: ... @overload def pen( pen: _PenState | None = None, @@ -3463,7 +687,7 @@ def pen( fillcolor: _Color = ..., pensize: int = ..., speed: int = ..., - resizemode: str = ..., + resizemode: Literal["auto", "user", "noresize"] = ..., stretchfactor: tuple[float, float] = ..., outline: int = ..., tilt: float = ..., @@ -3479,559 +703,88 @@ ht = hideturtle # Functions copied from RawTurtle: -def setundobuffer(size: int | None) -> None: - """Set or disable undobuffer. - - Argument: - size -- an integer or None - - If size is an integer an empty undobuffer of given size is installed. - Size gives the maximum number of turtle-actions that can be undone - by the undo() function. - If size is None, no undobuffer is present. - - Example: - >>> setundobuffer(42) - """ - -def undobufferentries() -> int: - """Return count of entries in the undobuffer. - - No argument. - - Example: - >>> while undobufferentries(): - ... undo() - """ - +def setundobuffer(size: int | None) -> None: ... +def undobufferentries() -> int: ... @overload -def shape(name: None = None) -> str: - """Set turtle shape to shape with given name / return current shapename. - - Optional argument: - name -- a string, which is a valid shapename - - Set turtle shape to shape with given name or, if name is not given, - return name of current shape. - Shape with name must exist in the TurtleScreen's shape dictionary. - Initially there are the following polygon shapes: - 'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. - To learn about how to deal with shapes see Screen-method register_shape. - - Example: - >>> shape() - 'arrow' - >>> shape("turtle") - >>> shape() - 'turtle' - """ - +def shape(name: None = None) -> str: ... @overload def shape(name: str) -> None: ... if sys.version_info >= (3, 12): - def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: - """Instantly move turtle to an absolute position. - - Arguments: - x -- a number or None - y -- a number None - fill_gap -- a boolean This argument must be specified by name. - - call: teleport(x, y) # two coordinates - --or: teleport(x) # teleport to x position, keeping y as is - --or: teleport(y=y) # teleport to y position, keeping x as is - --or: teleport(x, y, fill_gap=True) - # teleport but fill the gap in between - - Move turtle to an absolute position. Unlike goto(x, y), a line will not - be drawn. The turtle's orientation does not change. If currently - filling, the polygon(s) teleported from will be filled after leaving, - and filling will begin again after teleporting. This can be disabled - with fill_gap=True, which makes the imaginary line traveled during - teleporting act as a fill barrier like in goto(x, y). - - Example: - >>> tp = pos() - >>> tp - (0.00,0.00) - >>> teleport(60) - >>> pos() - (60.00,0.00) - >>> teleport(y=10) - >>> pos() - (60.00,10.00) - >>> teleport(20, 30) - >>> pos() - (20.00,30.00) - """ + def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... # Unsafely overlaps when no arguments are provided @overload -def shapesize() -> tuple[float, float, float]: - """Set/return turtle's stretchfactors/outline. Set resizemode to "user". - - Optional arguments: - stretch_wid : positive number - stretch_len : positive number - outline : positive number - - Return or set the pen's attributes x/y-stretchfactors and/or outline. - Set resizemode to "user". - If and only if resizemode is set to "user", the turtle will be displayed - stretched according to its stretchfactors: - stretch_wid is stretchfactor perpendicular to orientation - stretch_len is stretchfactor in direction of turtles orientation. - outline determines the width of the shapes's outline. - - Examples: - >>> resizemode("user") - >>> shapesize(5, 5, 12) - >>> shapesize(outline=8) - """ - +def shapesize() -> tuple[float, float, float]: ... @overload def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... @overload -def shearfactor(shear: None = None) -> float: - """Set or return the current shearfactor. - - Optional argument: shear -- number, tangent of the shear angle - - Shear the turtleshape according to the given shearfactor shear, - which is the tangent of the shear angle. DO NOT change the - turtle's heading (direction of movement). - If shear is not given: return the current shearfactor, i. e. the - tangent of the shear angle, by which lines parallel to the - heading of the turtle are sheared. - - Examples: - >>> shape("circle") - >>> shapesize(5,2) - >>> shearfactor(0.5) - >>> shearfactor() - >>> 0.5 - """ - +def shearfactor(shear: None = None) -> float: ... @overload def shearfactor(shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload -def shapetransform() -> tuple[float, float, float, float]: - """Set or return the current transformation matrix of the turtle shape. - - Optional arguments: t11, t12, t21, t22 -- numbers. - - If none of the matrix elements are given, return the transformation - matrix. - Otherwise set the given elements and transform the turtleshape - according to the matrix consisting of first row t11, t12 and - second row t21, 22. - Modify stretchfactor, shearfactor and tiltangle according to the - given matrix. - - Examples: - >>> shape("square") - >>> shapesize(4,2) - >>> shearfactor(-0.5) - >>> shapetransform() - (4.0, -1.0, -0.0, 2.0) - """ - +def shapetransform() -> tuple[float, float, float, float]: ... @overload def shapetransform( t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... -def get_shapepoly() -> _PolygonCoords | None: - """Return the current shape polygon as tuple of coordinate pairs. - - No argument. - - Examples: - >>> shape("square") - >>> shapetransform(4, -1, 0, 2) - >>> get_shapepoly() - ((50, -20), (30, 20), (-50, 20), (-30, -20)) - - """ +def get_shapepoly() -> _PolygonCoords | None: ... if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") - def settiltangle(angle: float) -> None: - """Rotate the turtleshape to point in the specified direction - - Argument: angle -- number - - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). - - Deprecated since Python 3.1 - - Examples: - >>> shape("circle") - >>> shapesize(5,2) - >>> settiltangle(45) - >>> stamp() - >>> fd(50) - >>> settiltangle(-45) - >>> stamp() - >>> fd(50) - """ + def settiltangle(angle: float) -> None: ... @overload -def tiltangle(angle: None = None) -> float: - """Set or return the current tilt-angle. - - Optional argument: angle -- number - - Rotate the turtleshape to point in the direction specified by angle, - regardless of its current tilt-angle. DO NOT change the turtle's - heading (direction of movement). - If angle is not given: return the current tilt-angle, i. e. the angle - between the orientation of the turtleshape and the heading of the - turtle (its direction of movement). - - Examples: - >>> shape("circle") - >>> shapesize(5, 2) - >>> tiltangle() - 0.0 - >>> tiltangle(45) - >>> tiltangle() - 45.0 - >>> stamp() - >>> fd(50) - >>> tiltangle(-45) - >>> tiltangle() - 315.0 - >>> stamp() - >>> fd(50) - """ - +def tiltangle(angle: None = None) -> float: ... @overload def tiltangle(angle: float) -> None: ... -def tilt(angle: float) -> None: - """Rotate the turtleshape by angle. - - Argument: - angle - a number - - Rotate the turtleshape by angle from its current tilt-angle, - but do NOT change the turtle's heading (direction of movement). - - Examples: - >>> shape("circle") - >>> shapesize(5,2) - >>> tilt(30) - >>> fd(50) - >>> tilt(30) - >>> fd(50) - """ +def tilt(angle: float) -> None: ... # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. -def stamp() -> Any: - """Stamp a copy of the turtleshape onto the canvas and return its id. - - No argument. - - Stamp a copy of the turtle shape onto the canvas at the current - turtle position. Return a stamp_id for that stamp, which can be - used to delete it by calling clearstamp(stamp_id). - - Example: - >>> color("blue") - >>> stamp() - 13 - >>> fd(50) - """ - -def clearstamp(stampid: int | tuple[int, ...]) -> None: - """Delete stamp with given stampid - - Argument: - stampid - an integer, must be return value of previous stamp() call. - - Example: - >>> color("blue") - >>> astamp = stamp() - >>> fd(50) - >>> clearstamp(astamp) - """ - -def clearstamps(n: int | None = None) -> None: - """Delete all or first/last n of turtle's stamps. - - Optional argument: - n -- an integer - - If n is None, delete all of pen's stamps, - else if n > 0 delete first n stamps - else if n < 0 delete last n stamps. - - Example: - >>> for i in range(8): - ... stamp(); fd(30) - ... - >>> clearstamps(2) - >>> clearstamps(-2) - >>> clearstamps() - """ - -def filling() -> bool: - """Return fillstate (True if filling, False else). - - No argument. - - Example: - >>> begin_fill() - >>> if filling(): - ... pensize(5) - ... else: - ... pensize(3) - """ +def stamp() -> Any: ... +def clearstamp(stampid: int | tuple[int, ...]) -> None: ... +def clearstamps(n: int | None = None) -> None: ... +def filling() -> bool: ... if sys.version_info >= (3, 14): @contextmanager - def fill() -> Generator[None]: - """A context manager for filling a shape. - - Implicitly ensures the code block is wrapped with - begin_fill() and end_fill(). - - Example: - >>> color("black", "red") - >>> with fill(): - ... circle(60) - """ - -def begin_fill() -> None: - """Called just before drawing a shape to be filled. - - No argument. - - Example: - >>> color("black", "red") - >>> begin_fill() - >>> circle(60) - >>> end_fill() - """ - -def end_fill() -> None: - """Fill the shape drawn after the call begin_fill(). - - No argument. - - Example: - >>> color("black", "red") - >>> begin_fill() - >>> circle(60) - >>> end_fill() - """ + def fill() -> Generator[None]: ... +def begin_fill() -> None: ... +def end_fill() -> None: ... @overload -def dot(size: int | _Color | None = None) -> None: - """Draw a dot with diameter size, using color. - - Optional arguments: - size -- an integer >= 1 (if given) - color -- a colorstring or a numeric color tuple - - Draw a circular dot with diameter size, using color. - If size is not given, the maximum of pensize+4 and 2*pensize is used. - - Example: - >>> dot() - >>> fd(50); dot(20, "blue"); fd(50) - """ - +def dot(size: int | _Color | None = None) -> None: ... @overload def dot(size: int | None, color: _Color, /) -> None: ... @overload def dot(size: int | None, r: float, g: float, b: float, /) -> None: ... -def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: - """Write text at the current turtle position. - - Arguments: - arg -- info, which is to be written to the TurtleScreen - move (optional) -- True/False - align (optional) -- one of the strings "left", "center" or right" - font (optional) -- a triple (fontname, fontsize, fonttype) - - Write text - the string representation of arg - at the current - turtle position according to align ("left", "center" or right") - and with the given font. - If move is True, the pen is moved to the bottom-right corner - of the text. By default, move is False. - - Example: - >>> write('Home = ', True, align="center") - >>> write((0,0), True) - """ +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def poly() -> Generator[None]: - """A context manager for recording the vertices of a polygon. - - Implicitly ensures that the code block is wrapped with - begin_poly() and end_poly() - - Example (for a Turtle instance named turtle) where we create a - triangle as the polygon and move the turtle 100 steps forward: - >>> with poly(): - ... for side in range(3) - ... forward(50) - ... right(60) - >>> forward(100) - """ - -def begin_poly() -> None: - """Start recording the vertices of a polygon. - - No argument. - - Start recording the vertices of a polygon. Current turtle position - is first point of polygon. - - Example: - >>> begin_poly() - """ - -def end_poly() -> None: - """Stop recording the vertices of a polygon. + def poly() -> Generator[None]: ... - No argument. - - Stop recording the vertices of a polygon. Current turtle position is - last point of polygon. This will be connected with the first point. - - Example: - >>> end_poly() - """ - -def get_poly() -> _PolygonCoords | None: - """Return the lastly recorded polygon. - - No argument. - - Example: - >>> p = get_poly() - >>> register_shape("myFavouriteShape", p) - """ - -def getscreen() -> TurtleScreen: - """Return the TurtleScreen object, the turtle is drawing on. - - No argument. - - Return the TurtleScreen object, the turtle is drawing on. - So TurtleScreen-methods can be called for that object. - - Example: - >>> ts = getscreen() - >>> ts - - >>> ts.bgcolor("pink") - """ - -def getturtle() -> Turtle: - """Return the Turtleobject itself. - - No argument. - - Only reasonable use: as a function to return the 'anonymous turtle': - - Example: - >>> pet = getturtle() - >>> pet.fd(50) - >>> pet - - >>> turtles() - [] - """ +def begin_poly() -> None: ... +def end_poly() -> None: ... +def get_poly() -> _PolygonCoords | None: ... +def getscreen() -> TurtleScreen: ... +def getturtle() -> Turtle: ... getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: - """Bind fun to mouse-button-release event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - - Example (for a MyTurtle instance named joe): - >>> class MyTurtle(Turtle): - ... def glow(self,x,y): - ... self.fillcolor("red") - ... def unglow(self,x,y): - ... self.fillcolor("") - ... - >>> joe = MyTurtle() - >>> joe.onclick(joe.glow) - >>> joe.onrelease(joe.unglow) - - Clicking on joe turns fillcolor red, unclicking turns it to - transparent. - """ - -def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: - """Bind fun to mouse-move event on this turtle on canvas. - - Arguments: - fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. - btn -- number of the mouse-button defaults to 1 (left mouse button). - - Every sequence of mouse-move-events on a turtle is preceded by a - mouse-click event on that - - Example: - >>> ondrag(goto) - - Subsequently clicking and dragging a Turtle will move it - across the screen thereby producing handdrawings (if pen is - down). - """ - -def undo() -> None: - """undo (repeatedly) the last turtle action. - - No argument. - - undo (repeatedly) the last turtle action. - Number of available undo actions is determined by the size of - the undobuffer. - - Example: - >>> for i in range(4): - ... fd(50); lt(80) - ... - >>> for i in range(8): - ... undo() - ... - """ +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def undo() -> None: ... turtlesize = shapesize # Functions copied from RawTurtle with a few tweaks: -def clone() -> Turtle: - """Create and return a clone of the - - No argument. - - Create and return a clone of the turtle with same position, heading - and turtle properties. - - Example (for a Turtle instance named mick): - mick = Turtle() - joe = mick.clone() - """ +def clone() -> Turtle: ... # Extra functions present only in the global scope: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi index b5769ad86ebdb..649e463ff71f8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi @@ -1,7 +1,3 @@ -""" -Define names for built-in types that aren't directly accessible as a builtin. -""" - import sys from _typeshed import AnnotationForm, MaybeNone, SupportsKeysAndGetItem from _typeshed.importlib import LoaderProtocol @@ -69,28 +65,12 @@ if sys.version_info >= (3, 13): _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") -_KT = TypeVar("_KT") +_KT_co = TypeVar("_KT_co", covariant=True) _VT_co = TypeVar("_VT_co", covariant=True) # Make sure this class definition stays roughly in line with `builtins.function` @final class FunctionType: - """Create a function object. - - code - a code object - globals - the globals dictionary - name - a string that overrides the name from the code object - argdefs - a tuple that specifies the default argument values - closure - a tuple that supplies the bindings for free variables - kwdefaults - a dictionary that specifies the default keyword argument values - """ - @property def __closure__(self) -> tuple[CellType, ...] | None: ... __code__: CodeType @@ -131,13 +111,9 @@ class FunctionType: closure: tuple[CellType, ...] | None = None, ) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... @overload - def __get__(self, instance: None, owner: type, /) -> FunctionType: - """Return an attribute of instance, which is of type owner.""" - + def __get__(self, instance: None, owner: type, /) -> FunctionType: ... @overload def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ... @@ -145,8 +121,6 @@ LambdaType = FunctionType @final class CodeType: - """Create a code object. Not for the faint of heart.""" - def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @property @@ -287,8 +261,7 @@ class CodeType: co_qualname: str = ..., co_linetable: bytes = ..., co_exceptiontable: bytes = ..., - ) -> Self: - """Return a copy of the code object with new values for the specified fields.""" + ) -> Self: ... elif sys.version_info >= (3, 10): def replace( self, @@ -309,8 +282,7 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_linetable: bytes = ..., - ) -> Self: - """Return a copy of the code object with new values for the specified fields.""" + ) -> Self: ... else: def replace( self, @@ -331,64 +303,37 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_lnotab: bytes = ..., - ) -> Self: - """Return a copy of the code object with new values for the specified fields.""" + ) -> Self: ... + if sys.version_info >= (3, 13): __replace__ = replace @final -class MappingProxyType(Mapping[_KT, _VT_co]): - """Read-only proxy of a mapping.""" - +class MappingProxyType(Mapping[_KT_co, _VT_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] __hash__: ClassVar[None] # type: ignore[assignment] - def __new__(cls, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> Self: ... - def __getitem__(self, key: _KT, /) -> _VT_co: - """Return self[key].""" - - def __iter__(self) -> Iterator[_KT]: - """Implement iter(self).""" - - def __len__(self) -> int: - """Return len(self).""" - + def __new__(cls, mapping: SupportsKeysAndGetItem[_KT_co, _VT_co]) -> Self: ... + def __getitem__(self, key: _KT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __iter__(self) -> Iterator[_KT_co]: ... + def __len__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... - def copy(self) -> dict[_KT, _VT_co]: - """D.copy() -> a shallow copy of D""" - - def keys(self) -> KeysView[_KT]: - """D.keys() -> a set-like object providing a view on D's keys""" - - def values(self) -> ValuesView[_VT_co]: - """D.values() -> an object providing a view on D's values""" - - def items(self) -> ItemsView[_KT, _VT_co]: - """D.items() -> a set-like object providing a view on D's items""" - + def copy(self) -> dict[_KT_co, _VT_co]: ... + def keys(self) -> KeysView[_KT_co]: ... + def values(self) -> ValuesView[_VT_co]: ... + def items(self) -> ItemsView[_KT_co, _VT_co]: ... @overload - def get(self, key: _KT, /) -> _VT_co | None: - """Return the value for key if key is in the mapping, else default.""" - + def get(self, key: _KT_co, /) -> _VT_co | None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload - def get(self, key: _KT, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + def get(self, key: _KT_co, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload - def get(self, key: _KT, default: _T2, /) -> _VT_co | _T2: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" - - def __reversed__(self) -> Iterator[_KT]: - """D.__reversed__() -> reverse iterator""" - - def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: - """Return self|value.""" - - def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT | _T1, _VT_co | _T2]: - """Return value|self.""" + def get(self, key: _KT_co, default: _T2, /) -> _VT_co | _T2: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __reversed__(self) -> Iterator[_KT_co]: ... + def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... + def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... if sys.version_info >= (3, 12): @disjoint_base class SimpleNamespace: - """A simple attribute-based namespace.""" - __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): def __init__( @@ -402,16 +347,10 @@ if sys.version_info >= (3, 12): def __setattr__(self, name: str, value: Any, /) -> None: ... def __delattr__(self, name: str, /) -> None: ... if sys.version_info >= (3, 13): - def __replace__(self, **kwargs: Any) -> Self: - """Return a copy of the namespace object with new values for the specified attributes.""" + def __replace__(self, **kwargs: Any) -> Self: ... else: class SimpleNamespace: - """A simple attribute-based namespace. - - SimpleNamespace(**kwargs) - """ - __hash__: ClassVar[None] # type: ignore[assignment] def __init__(self, **kwargs: Any) -> None: ... def __eq__(self, value: object, /) -> bool: ... @@ -421,11 +360,6 @@ else: @disjoint_base class ModuleType: - """Create a module object. - - The name must be a string; the optional doc argument can have any type. - """ - __name__: str __file__: str | None @property @@ -453,15 +387,6 @@ class ModuleType: @final class CellType: - """Create a new cell object. - - contents - the contents of the cell. If not specified, the cell will be empty, - and - further attempts to access its cell_contents attribute will - raise a ValueError. - """ - def __new__(cls, contents: object = ..., /) -> Self: ... __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -479,47 +404,28 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): @property def gi_running(self) -> bool: ... @property - def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: - """object being iterated by yield from, or None""" + def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: ... if sys.version_info >= (3, 11): @property def gi_suspended(self) -> bool: ... __name__: str __qualname__: str - def __iter__(self) -> Self: - """Implement iter(self).""" - - def __next__(self) -> _YieldT_co: - """Implement next(self).""" - - def send(self, arg: _SendT_contra, /) -> _YieldT_co: - """send(arg) -> send 'arg' into generator, - return next yielded value or raise StopIteration. - """ - + def __iter__(self) -> Self: ... + def __next__(self) -> _YieldT_co: ... + def send(self, arg: _SendT_contra, /) -> _YieldT_co: ... @overload - def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: - """throw(value) - throw(type[,value[,tb]]) - - Raise exception in generator, return next yielded value or raise - StopIteration. - the (type, val, tb) signature is deprecated, - and may be removed in a future version of Python. - """ - + def throw( + self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / + ) -> _YieldT_co: ... @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> Any: ... @final class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property - def ag_await(self) -> Awaitable[Any] | None: - """object being awaited on, or None""" - + def ag_await(self) -> Awaitable[Any] | None: ... @property def ag_code(self) -> CodeType: ... @property @@ -532,34 +438,17 @@ class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property def ag_suspended(self) -> bool: ... - def __aiter__(self) -> Self: - """Return an awaitable, that resolves in asynchronous iterator.""" - - def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: - """Return a value or raise StopAsyncIteration.""" - - def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: - """asend(v) -> send 'v' in generator.""" - + def __aiter__(self) -> Self: ... + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... + def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... @overload async def athrow( self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: - """athrow(value) - athrow(type[,value[,tb]]) - - raise exception in generator. - the (type, val, tb) signature is deprecated, - and may be removed in a future version of Python. - """ - + ) -> _YieldT_co: ... @overload async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... - def aclose(self) -> Coroutine[Any, Any, None]: - """aclose() -> raise GeneratorExit inside generator.""" - - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def aclose(self) -> Coroutine[Any, Any, None]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... # Non-default variations to accommodate coroutines _SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) @@ -570,9 +459,7 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): __name__: str __qualname__: str @property - def cr_await(self) -> Any | None: - """object being awaited on, or None""" - + def cr_await(self) -> Any | None: ... @property def cr_code(self) -> CodeType: ... if sys.version_info >= (3, 12): @@ -590,38 +477,20 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): @property def cr_suspended(self) -> bool: ... - def close(self) -> None: - """close() -> raise GeneratorExit inside coroutine.""" - - def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: - """Return an iterator to be used in await expression.""" - - def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: - """send(arg) -> send 'arg' into coroutine, - return next iterated value or raise StopIteration. - """ - + def close(self) -> None: ... + def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: ... + def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: ... @overload - def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: - """throw(value) - throw(type[,value[,traceback]]) - - Raise exception in coroutine, return next iterated value or raise - StopIteration. - the (type, val, tb) signature is deprecated, - and may be removed in a future version of Python. - """ - + def throw( + self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / + ) -> _YieldT_co: ... @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> Any: ... @final class MethodType: - """Create a bound instance method object.""" - @property def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function @property @@ -629,23 +498,18 @@ class MethodType: @property def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function @property - def __func__(self) -> Callable[..., Any]: - """the function (or other callable) implementing a method""" - + def __func__(self) -> Callable[..., Any]: ... @property - def __self__(self) -> object: - """the instance to which a method is bound""" - + def __self__(self) -> object: ... @property def __name__(self) -> str: ... # inherited from the added function @property def __qualname__(self) -> str: ... # inherited from the added function def __new__(cls, func: Callable[..., Any], instance: object, /) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + if sys.version_info >= (3, 13): - def __get__(self, instance: object, owner: type | None = None, /) -> Self: - """Return an attribute of instance, which is of type owner.""" + def __get__(self, instance: object, owner: type | None = None, /) -> Self: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -658,9 +522,7 @@ class BuiltinFunctionType: def __name__(self) -> str: ... @property def __qualname__(self) -> str: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -674,11 +536,8 @@ class WrapperDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner.""" + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... @final class MethodWrapperType: @@ -690,9 +549,7 @@ class MethodWrapperType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -705,11 +562,8 @@ class MethodDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner.""" + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... @final class ClassMethodDescriptorType: @@ -719,16 +573,11 @@ class ClassMethodDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function.""" - - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner.""" + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... @final class TracebackType: - """Create a new traceback object.""" - def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... tb_next: TracebackType | None # the rest are read-only @@ -744,39 +593,27 @@ class FrameType: @property def f_back(self) -> FrameType | None: ... @property - def f_builtins(self) -> dict[str, Any]: - """Return the built-in variables in the frame.""" - + def f_builtins(self) -> dict[str, Any]: ... @property - def f_code(self) -> CodeType: - """Return the code object being executed in this frame.""" - + def f_code(self) -> CodeType: ... @property - def f_globals(self) -> dict[str, Any]: - """Return the global variables in the frame.""" - + def f_globals(self) -> dict[str, Any]: ... @property - def f_lasti(self) -> int: - """Return the index of the last attempted instruction in the frame.""" + def f_lasti(self) -> int: ... # see discussion in #6769: f_lineno *can* sometimes be None, # but you should probably file a bug report with CPython if you encounter it being None in the wild. # An `int | None` annotation here causes too many false-positive errors, so applying `int | Any`. @property - def f_lineno(self) -> int | MaybeNone: - """Return the current line number in the frame.""" - + def f_lineno(self) -> int | MaybeNone: ... @property - def f_locals(self) -> dict[str, Any]: - """Return the mapping used by the frame to look up local variables.""" + def f_locals(self) -> dict[str, Any]: ... f_trace: Callable[[FrameType, str, Any], Any] | None f_trace_lines: bool f_trace_opcodes: bool - def clear(self) -> None: - """Clear all references held by the frame.""" + def clear(self) -> None: ... if sys.version_info >= (3, 14): @property - def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: - """Return the generator or coroutine associated with this frame, or None.""" + def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: ... @final class GetSetDescriptorType: @@ -786,14 +623,9 @@ class GetSetDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner.""" - - def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value.""" - - def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance.""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... @final class MemberDescriptorType: @@ -803,82 +635,27 @@ class MemberDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner.""" - - def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value.""" - - def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance.""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __set__(self, instance: Any, value: Any, /) -> None: ... + def __delete__(self, instance: Any, /) -> None: ... def new_class( name: str, bases: Iterable[object] = (), kwds: dict[str, Any] | None = None, exec_body: Callable[[dict[str, Any]], object] | None = None, -) -> type: - """Create a class object dynamically using the appropriate metaclass.""" - -def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: - """Resolve MRO entries dynamically as specified by PEP 560.""" - +) -> type: ... +def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... def prepare_class( name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None -) -> tuple[type, dict[str, Any], dict[str, Any]]: - """Call the __prepare__ method of the appropriate metaclass. - - Returns (metaclass, namespace, kwds) as a 3-tuple - - *metaclass* is the appropriate metaclass - *namespace* is the prepared class namespace - *kwds* is an updated copy of the passed in kwds argument with any - 'metaclass' entry removed. If no kwds argument is passed in, this will - be an empty dict. - """ +) -> tuple[type, dict[str, Any], dict[str, Any]]: ... if sys.version_info >= (3, 12): - def get_original_bases(cls: type, /) -> tuple[Any, ...]: - """Return the class's "original" bases prior to modification by `__mro_entries__`. - - Examples:: - - from typing import TypeVar, Generic, NamedTuple, TypedDict - - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) - - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) - """ + def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... # Does not actually inherit from property, but saying it does makes sure that # pyright handles this class correctly. class DynamicClassAttribute(property): - """Route attribute access on a class to __getattr__. - - This is a descriptor, used to define attributes that act differently when - accessed through an instance and through a class. Instance access remains - normal, but access to an attribute through a class will be routed to the - class's __getattr__ method; this is done by raising AttributeError. - - This allows one to have properties active on an instance, and have virtual - attributes on the class with the same name. (Enum used this between Python - versions 3.4 - 3.9 .) - - Subclass from this to use a different method of accessing virtual attributes - and still be treated properly by the inspect module. (Enum uses this since - Python 3.10 .) - - """ - fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], object] | None # type: ignore[assignment] fdel: Callable[[Any], object] | None # type: ignore[assignment] @@ -904,30 +681,19 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable @overload -def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: - """Convert regular generator function to a coroutine.""" - +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... @overload def coroutine(func: _Fn) -> _Fn: ... @disjoint_base class GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ - @property def __origin__(self) -> type | TypeAliasType: ... @property def __args__(self) -> tuple[Any, ...]: ... @property - def __parameters__(self) -> tuple[Any, ...]: - """Type variables in the GenericAlias.""" - + def __parameters__(self) -> tuple[Any, ...]: ... def __new__(cls, origin: type, args: Any, /) -> Self: ... - def __getitem__(self, typeargs: Any, /) -> GenericAlias: - """Return self[key].""" - + def __getitem__(self, typeargs: Any, /) -> GenericAlias: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... @@ -937,69 +703,43 @@ class GenericAlias: @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... if sys.version_info >= (3, 10): - def __or__(self, value: Any, /) -> UnionType: - """Return self|value.""" + def __or__(self, value: Any, /) -> UnionType: ... + def __ror__(self, value: Any, /) -> UnionType: ... - def __ror__(self, value: Any, /) -> UnionType: - """Return value|self.""" # GenericAlias delegates attr access to `__origin__` def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final class NoneType: - """The type of the None singleton.""" - - def __bool__(self) -> Literal[False]: - """True if self else False""" + def __bool__(self) -> Literal[False]: ... @final - class EllipsisType: - """The type of the Ellipsis singleton.""" + class EllipsisType: ... from builtins import _NotImplementedType NotImplementedType = _NotImplementedType @final class UnionType: - """Represent a union type - - E.g. for int | str - """ - @property def __args__(self) -> tuple[Any, ...]: ... @property - def __parameters__(self) -> tuple[Any, ...]: - """Type variables in the types.UnionType.""" + def __parameters__(self) -> tuple[Any, ...]: ... # `(int | str) | Literal["foo"]` returns a generic alias to an instance of `_SpecialForm` (`Union`). # Normally we'd express this using the return type of `_SpecialForm.__ror__`, # but because `UnionType.__or__` accepts `Any`, type checkers will use # the return type of `UnionType.__or__` to infer the result of this operation # rather than `_SpecialForm.__ror__`. To mitigate this, we use `| Any` # in the return type of `UnionType.__(r)or__`. - def __or__(self, value: Any, /) -> UnionType | Any: - """Return self|value.""" - - def __ror__(self, value: Any, /) -> UnionType | Any: - """Return value|self.""" - + def __or__(self, value: Any, /) -> UnionType | Any: ... + def __ror__(self, value: Any, /) -> UnionType | Any: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... # you can only subscript a `UnionType` instance if at least one of the elements # in the union is a generic alias instance that has a non-empty `__parameters__` - def __getitem__(self, parameters: Any) -> object: - """Return self[key].""" + def __getitem__(self, parameters: Any) -> object: ... if sys.version_info >= (3, 13): @final - class CapsuleType: - """Capsule objects let you wrap a C "void *" pointer in a Python - object. They're a way of passing data through the Python interpreter - without creating your own custom type. - - Capsules are used for communication between extension modules. - They provide a way for an extension module to export a C interface - to other extension modules, so that extension modules can use the - Python import mechanism to link to one another. - """ + class CapsuleType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi index 2555ba39341a4..2ca65dad4562f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi @@ -1,23 +1,3 @@ -""" -The typing module: Support for gradual typing as defined by PEP 484 and subsequent PEPs. - -Among other things, the module includes the following: -* Generic, Protocol, and internal machinery to support generic aliases. - All subscripted types like X[int], Union[int, str] are generic aliases. -* Various "special forms" that have unique meanings in type annotations: - NoReturn, Never, ClassVar, Self, Concatenate, Unpack, and others. -* Classes whose instances can be type arguments to generic classes and functions: - TypeVar, ParamSpec, TypeVarTuple. -* Public helper functions: get_type_hints, overload, cast, final, and others. -* Several protocols to support duck-typing: - SupportsFloat, SupportsIndex, SupportsAbs, and others. -* Special types: NewType, NamedTuple, TypedDict. -* Deprecated aliases for builtin types and collections.abc ABCs. - -Any name not present in __all__ is an implementation detail -that may be changed without notice. Use at your own risk! -""" - # Since this module defines "overload" it is not recognized by Ruff as typing.overload # TODO: The collections import is required, otherwise mypy crashes. # https://github.com/python/mypy/issues/16744 @@ -163,97 +143,14 @@ if sys.version_info >= (3, 13): # due to an import cycle. Below instead we use Any with a comment. # from _typeshed import AnnotationForm -class Any: - """Special type indicating an unconstrained type. - - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - checks. - """ +class Any: ... class _Final: - """Mixin to prohibit subclassing.""" - __slots__ = ("__weakref__",) -def final(f: _T) -> _T: - """Decorator to indicate final methods and final classes. - - Use this decorator to indicate to type checkers that the decorated - method cannot be overridden, and decorated class cannot be subclassed. - - For example:: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. The decorator - attempts to set the ``__final__`` attribute to ``True`` on the decorated - object to allow runtime introspection. - """ - +def final(f: _T) -> _T: ... @final class TypeVar: - """Type variable. - - The preferred way to construct a type variable is via the dedicated - syntax for generic functions, classes, and type aliases:: - - class Sequence[T]: # T is a TypeVar - ... - - This syntax can also be used to create bound and constrained type - variables:: - - # S is a TypeVar bound to str - class StrSequence[S: str]: - ... - - # A is a TypeVar constrained to str or bytes - class StrOrBytesSequence[A: (str, bytes)]: - ... - - Type variables can also have defaults: - - class IntDefault[T = int]: - ... - - However, if desired, reusable type variables can also be constructed - manually, like so:: - - T = TypeVar('T') # Can be anything - S = TypeVar('S', bound=str) # Can be any subtype of str - A = TypeVar('A', str, bytes) # Must be exactly str or bytes - D = TypeVar('D', default=int) # Defaults to int - - Type variables exist primarily for the benefit of static type - checkers. They serve as the parameters for generic types as well - as for generic function and type alias definitions. - - The variance of type variables is inferred by type checkers when they - are created through the type parameter syntax and when - ``infer_variance=True`` is passed. Manually created type variables may - be explicitly marked covariant or contravariant by passing - ``covariant=True`` or ``contravariant=True``. By default, manually - created type variables are invariant. See PEP 484 and PEP 695 for more - details. - """ - @property def __name__(self) -> str: ... @property @@ -310,11 +207,8 @@ class TypeVar: contravariant: bool = False, ) -> None: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any, /) -> _SpecialForm: # AnnotationForm - """Return self|value.""" - - def __ror__(self, left: Any, /) -> _SpecialForm: # AnnotationForm - """Return value|self.""" + def __or__(self, right: Any, /) -> _SpecialForm: ... # AnnotationForm + def __ror__(self, left: Any, /) -> _SpecialForm: ... # AnnotationForm if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... if sys.version_info >= (3, 13): @@ -365,52 +259,11 @@ if sys.version_info >= (3, 11): @final class TypeVarTuple: - """Type variable tuple. A specialized form of type variable that enables - variadic generics. - - The preferred way to construct a type variable tuple is via the - dedicated syntax for generic functions, classes, and type aliases, - where a single '*' indicates a type variable tuple:: - - def move_first_element_to_last[T, *Ts](tup: tuple[T, *Ts]) -> tuple[*Ts, T]: - return (*tup[1:], tup[0]) - - Type variables tuples can have default values: - - type AliasWithDefault[*Ts = (str, int)] = tuple[*Ts] - - For compatibility with Python 3.11 and earlier, TypeVarTuple objects - can also be created as follows:: - - Ts = TypeVarTuple('Ts') # Can be given any name - DefaultTs = TypeVarTuple('Ts', default=(str, int)) - - Just as a TypeVar (type variable) is a placeholder for a single type, - a TypeVarTuple is a placeholder for an *arbitrary* number of types. For - example, if we define a generic class using a TypeVarTuple:: - - class C[*Ts]: ... - - Then we can parameterize that class with an arbitrary number of type - arguments:: - - C[int] # Fine - C[int, str] # Also fine - C[()] # Even this is fine - - For more details, see PEP 646. - - Note that only TypeVarTuples defined in the global scope can be - pickled. - """ - @property def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: # AnnotationForm - """The default value for this TypeVarTuple.""" - + def __default__(self) -> Any: ... # AnnotationForm def has_default(self) -> bool: ... if sys.version_info >= (3, 13): def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm @@ -419,9 +272,7 @@ if sys.version_info >= (3, 11): else: def __init__(self, name: str) -> None: ... - def __iter__(self) -> Any: - """Implement iter(self).""" - + def __iter__(self) -> Any: ... def __typing_subst__(self, arg: Never, /) -> Never: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... if sys.version_info >= (3, 14): @@ -431,20 +282,6 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 10): @final class ParamSpecArgs: - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec:: - - >>> P = ParamSpec("P") - >>> P.args.__origin__ is P - True - - This type is meant for runtime introspection and has no special meaning - to static type checkers. - """ - @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -457,20 +294,6 @@ if sys.version_info >= (3, 10): @final class ParamSpecKwargs: - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec:: - - >>> P = ParamSpec("P") - >>> P.kwargs.__origin__ is P - True - - This type is meant for runtime introspection and has no special meaning - to static type checkers. - """ - @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -483,58 +306,6 @@ if sys.version_info >= (3, 10): @final class ParamSpec: - """Parameter specification variable. - - The preferred way to construct a parameter specification is via the - dedicated syntax for generic functions, classes, and type aliases, - where the use of '**' creates a parameter specification:: - - type IntFunc[**P] = Callable[P, int] - - The following syntax creates a parameter specification that defaults - to a callable accepting two positional-only arguments of types int - and str: - - type IntFuncDefault[**P = (int, str)] = Callable[P, int] - - For compatibility with Python 3.11 and earlier, ParamSpec objects - can also be created as follows:: - - P = ParamSpec('P') - DefaultP = ParamSpec('DefaultP', default=(int, str)) - - Parameter specification variables exist primarily for the benefit of - static type checkers. They are used to forward the parameter types of - one callable to another callable, a pattern commonly found in - higher-order functions and decorators. They are only valid when used - in ``Concatenate``, or as the first argument to ``Callable``, or as - parameters for user-defined Generics. See class Generic for more - information on generic types. - - An example for annotating a decorator:: - - def add_logging[**P, T](f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner - - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y - - Parameter specification variables can be introspected. e.g.:: - - >>> P = ParamSpec("P") - >>> P.__name__ - 'P' - - Note that only parameter specification variables defined in the global - scope can be pickled. - """ - @property def __name__(self) -> str: ... @property @@ -548,8 +319,7 @@ if sys.version_info >= (3, 10): def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: # AnnotationForm - """The default value for this ParamSpec.""" + def __default__(self) -> Any: ... # AnnotationForm if sys.version_info >= (3, 13): def __new__( cls, @@ -591,21 +361,15 @@ if sys.version_info >= (3, 10): ) -> None: ... @property - def args(self) -> ParamSpecArgs: - """Represents positional arguments.""" - + def args(self) -> ParamSpecArgs: ... @property - def kwargs(self) -> ParamSpecKwargs: - """Represents keyword arguments.""" + def kwargs(self) -> ParamSpecKwargs: ... if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... - def __or__(self, right: Any, /) -> _SpecialForm: - """Return self|value.""" - - def __ror__(self, left: Any, /) -> _SpecialForm: - """Return value|self.""" + def __or__(self, right: Any, /) -> _SpecialForm: ... + def __ror__(self, left: Any, /) -> _SpecialForm: ... if sys.version_info >= (3, 13): def has_default(self) -> bool: ... if sys.version_info >= (3, 14): @@ -617,27 +381,6 @@ if sys.version_info >= (3, 10): TypeGuard: _SpecialForm class NewType: - """NewType creates simple unique types with almost zero runtime overhead. - - NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy callable that simply returns its argument. - - Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ - def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm if sys.version_info >= (3, 11): @staticmethod @@ -650,24 +393,7 @@ if sys.version_info >= (3, 10): __supertype__: type | NewType else: - def NewType(name: str, tp: Any) -> Any: - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: - - UserId = NewType('UserId', int) - - def name_by_id(user_id: UserId) -> str: - ... - - UserId('user') # Fails type check - - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - - num = UserId(5) + 1 # type: int - """ + def NewType(name: str, tp: Any) -> Any: ... _F = TypeVar("_F", bound=Callable[..., Any]) _P = _ParamSpec("_P") @@ -684,54 +410,9 @@ _KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. _TC = TypeVar("_TC", bound=type[object]) -def overload(func: _F) -> _F: - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. - - For example:: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload:: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - ... # implementation goes here - - The overloads for a function can be retrieved at runtime using the - get_overloads() function. - """ - -def no_type_check(arg: _F) -> _F: - """Decorator to indicate that annotations are not type hints. - - The argument must be a class or function; if it is a class, it - applies recursively to all methods and classes defined in that class - (but not to methods defined in its superclasses or subclasses). - - This mutates the function(s) or class(es) in place. - """ - -def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: - """Decorator to give another decorator the @no_type_check effect. - - This wraps the decorator with something that wraps the decorated - function in @no_type_check. - """ +def overload(func: _F) -> _F: ... +def no_type_check(arg: _F) -> _F: ... +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # This itself is only available during type checking def type_check_only(func_or_cls: _FT) -> _FT: ... @@ -779,78 +460,45 @@ class _ProtocolMeta(ABCMeta): # Abstract base classes. -def runtime_checkable(cls: _TC) -> _TC: - """Mark a protocol class as a runtime protocol. - - Such protocol can be used with isinstance() and issubclass(). - Raise TypeError if applied to a non-protocol class. - This allows a simple-minded structural check very similar to - one trick ponies in collections.abc such as Iterable. - - For example:: - - @runtime_checkable - class Closable(Protocol): - def close(self): ... - - assert isinstance(open('/some/file'), Closable) - - Warning: this will check only the presence of the required methods, - not their type signatures! - """ - +def runtime_checkable(cls: _TC) -> _TC: ... @runtime_checkable class SupportsInt(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __int__.""" - __slots__ = () @abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __float__.""" - __slots__ = () @abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __complex__.""" - __slots__ = () @abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __bytes__.""" - __slots__ = () @abstractmethod def __bytes__(self) -> bytes: ... @runtime_checkable class SupportsIndex(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __index__.""" - __slots__ = () @abstractmethod def __index__(self) -> int: ... @runtime_checkable class SupportsAbs(Protocol[_T_co]): - """An ABC with one abstract method __abs__ that is covariant in its return type.""" - __slots__ = () @abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): - """An ABC with one abstract method __round__ that is covariant in its return type.""" - __slots__ = () @overload @abstractmethod @@ -880,9 +528,7 @@ class Iterable(Protocol[_T_co]): @runtime_checkable class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod - def __next__(self) -> _T_co: - """Return the next item from the iterator. When exhausted, raise StopIteration""" - + def __next__(self) -> _T_co: ... def __iter__(self) -> Iterator[_T_co]: ... @runtime_checkable @@ -896,35 +542,21 @@ _ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) @runtime_checkable class Generator(Iterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra, _ReturnT_co]): - def __next__(self) -> _YieldT_co: - """Return the next item from the generator. - When exhausted, raise StopIteration. - """ - + def __next__(self) -> _YieldT_co: ... @abstractmethod - def send(self, value: _SendT_contra, /) -> _YieldT_co: - """Send a value into the generator. - Return next yielded value or raise StopIteration. - """ - + def send(self, value: _SendT_contra, /) -> _YieldT_co: ... @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> _YieldT_co: - """Raise an exception in the generator. - Return next yielded value or raise StopIteration. - """ - + ) -> _YieldT_co: ... @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def close(self) -> _ReturnT_co | None: - """Raise GeneratorExit inside generator.""" + def close(self) -> _ReturnT_co | None: ... else: - def close(self) -> None: - """Raise GeneratorExit inside generator.""" + def close(self) -> None: ... def __iter__(self) -> Generator[_YieldT_co, _SendT_contra, _ReturnT_co]: ... @@ -935,12 +567,10 @@ else: from contextlib import AbstractAsyncContextManager, AbstractContextManager @runtime_checkable - class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): - """An abstract base class for context managers.""" + class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ... @runtime_checkable - class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): - """An abstract base class for asynchronous context managers.""" + class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ... @runtime_checkable class Awaitable(Protocol[_T_co]): @@ -956,26 +586,17 @@ class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, __qualname__: str @abstractmethod - def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: - """Send a value into the coroutine. - Return next yielded value or raise StopIteration. - """ - + def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: ... @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> _YieldT_co: - """Raise an exception in the coroutine. - Return next yielded value or raise StopIteration. - """ - + ) -> _YieldT_co: ... @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... @abstractmethod - def close(self) -> None: - """Raise GeneratorExit inside coroutine.""" + def close(self) -> None: ... # NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. # The parameters correspond to Generator, but the 4th is the original type. @@ -996,40 +617,25 @@ class AsyncIterable(Protocol[_T_co]): @runtime_checkable class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod - def __anext__(self) -> Awaitable[_T_co]: - """Return the next item or raise StopAsyncIteration when exhausted.""" - + def __anext__(self) -> Awaitable[_T_co]: ... def __aiter__(self) -> AsyncIterator[_T_co]: ... @runtime_checkable class AsyncGenerator(AsyncIterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra]): - def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: - """Return the next item from the asynchronous generator. - When exhausted, raise StopAsyncIteration. - """ - + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... @abstractmethod - def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: - """Send a value into the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - + def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... @overload @abstractmethod def athrow( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> Coroutine[Any, Any, _YieldT_co]: - """Raise an exception in the asynchronous generator. - Return next yielded value or raise StopAsyncIteration. - """ - + ) -> Coroutine[Any, Any, _YieldT_co]: ... @overload @abstractmethod def athrow( self, typ: BaseException, val: None = None, tb: TracebackType | None = None, / ) -> Coroutine[Any, Any, _YieldT_co]: ... - def aclose(self) -> Coroutine[Any, Any, None]: - """Raise GeneratorExit inside coroutine.""" + def aclose(self) -> Coroutine[Any, Any, None]: ... @runtime_checkable class Container(Protocol[_T_co]): @@ -1044,12 +650,6 @@ class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): def __len__(self) -> int: ... class Sequence(Reversible[_T_co], Collection[_T_co]): - """All the operations on a read-only sequence. - - Concrete subclasses must override __new__ or __init__, - __getitem__, and __len__. - """ - @overload @abstractmethod def __getitem__(self, index: int) -> _T_co: ... @@ -1057,32 +657,15 @@ class Sequence(Reversible[_T_co], Collection[_T_co]): @abstractmethod def __getitem__(self, index: slice) -> Sequence[_T_co]: ... # Mixin methods - def index(self, value: Any, start: int = 0, stop: int = ...) -> int: - """S.index(value, [start, [stop]]) -> integer -- return first index of value. - Raises ValueError if the value is not present. - - Supporting start and stop arguments is optional, but - recommended. - """ - - def count(self, value: Any) -> int: - """S.count(value) -> integer -- return number of occurrences of value""" - + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... + def count(self, value: Any) -> int: ... def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... class MutableSequence(Sequence[_T]): - """All the operations on a read-write sequence. - - Concrete subclasses must provide __new__ or __init__, - __getitem__, __setitem__, __delitem__, __len__, and insert(). - """ - @abstractmethod - def insert(self, index: int, value: _T) -> None: - """S.insert(index, value) -- insert value before index""" - + def insert(self, index: int, value: _T) -> None: ... @overload @abstractmethod def __getitem__(self, index: int) -> _T: ... @@ -1102,58 +685,18 @@ class MutableSequence(Sequence[_T]): @abstractmethod def __delitem__(self, index: slice) -> None: ... # Mixin methods - def append(self, value: _T) -> None: - """S.append(value) -- append value to the end of the sequence""" - - def clear(self) -> None: - """S.clear() -> None -- remove all items from S""" - - def extend(self, values: Iterable[_T]) -> None: - """S.extend(iterable) -- extend sequence by appending elements from the iterable""" - - def reverse(self) -> None: - """S.reverse() -- reverse *IN PLACE*""" - - def pop(self, index: int = -1) -> _T: - """S.pop([index]) -> item -- remove and return item at index (default last). - Raise IndexError if list is empty or index is out of range. - """ - - def remove(self, value: _T) -> None: - """S.remove(value) -- remove first occurrence of value. - Raise ValueError if the value is not present. - """ - + def append(self, value: _T) -> None: ... + def clear(self) -> None: ... + def extend(self, values: Iterable[_T]) -> None: ... + def reverse(self) -> None: ... + def pop(self, index: int = -1) -> _T: ... + def remove(self, value: _T) -> None: ... def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... class AbstractSet(Collection[_T_co]): - """A set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__ and __len__. - - To override the comparisons (presumably for speed, as the - semantics are fixed), redefine __le__ and __ge__, - then the other operations will automatically follow suit. - """ - @abstractmethod def __contains__(self, x: object) -> bool: ... - def _hash(self) -> int: - """Compute the hash value of a set. - - Note that we don't define __hash__: not all sets are hashable. - But if you define a hashable set type, its __hash__ should - call this function. - - This must be compatible __eq__. - - All sets ought to compare equal if they contain the same - elements, regardless of how they are implemented, and - regardless of the order of the elements; so there's not much - freedom for __eq__ or __hash__. We match the algorithm used - by the built-in frozenset type. - """ + def _hash(self) -> int: ... # Mixin methods def __le__(self, other: AbstractSet[Any]) -> bool: ... def __lt__(self, other: AbstractSet[Any]) -> bool: ... @@ -1164,38 +707,17 @@ class AbstractSet(Collection[_T_co]): def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... def __eq__(self, other: object) -> bool: ... - def isdisjoint(self, other: Iterable[Any]) -> bool: - """Return True if two sets have a null intersection.""" + def isdisjoint(self, other: Iterable[Any]) -> bool: ... class MutableSet(AbstractSet[_T]): - """A mutable set is a finite, iterable container. - - This class provides concrete generic implementations of all - methods except for __contains__, __iter__, __len__, - add(), and discard(). - - To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and - then the other operations will automatically follow suit. - """ - @abstractmethod - def add(self, value: _T) -> None: - """Add an element.""" - + def add(self, value: _T) -> None: ... @abstractmethod - def discard(self, value: _T) -> None: - """Remove an element. Do not raise an exception if absent.""" + def discard(self, value: _T) -> None: ... # Mixin methods - def clear(self) -> None: - """This is slow (creates N new iterators!) but effective.""" - - def pop(self) -> _T: - """Return the popped value. Raise KeyError if empty.""" - - def remove(self, value: _T) -> None: - """Remove an element. If not a member, raise a KeyError.""" - + def clear(self) -> None: ... + def pop(self) -> _T: ... + def remove(self, value: _T) -> None: ... def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] @@ -1238,68 +760,36 @@ class ValuesView(MappingView, Collection[_VT_co]): def __iter__(self) -> Iterator[_VT_co]: ... class Mapping(Collection[_KT], Generic[_KT, _VT_co]): - """A Mapping is a generic container for associating key/value - pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __iter__, and __len__. - """ - # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, key: _KT, /) -> _VT_co: ... # Mixin methods @overload - def get(self, key: _KT, /) -> _VT_co | None: - """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.""" - + def get(self, key: _KT, /) -> _VT_co | None: ... @overload def get(self, key: _KT, /, default: _VT_co) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload def get(self, key: _KT, /, default: _T) -> _VT_co | _T: ... - def items(self) -> ItemsView[_KT, _VT_co]: - """D.items() -> a set-like object providing a view on D's items""" - - def keys(self) -> KeysView[_KT]: - """D.keys() -> a set-like object providing a view on D's keys""" - - def values(self) -> ValuesView[_VT_co]: - """D.values() -> an object providing a view on D's values""" - + def items(self) -> ItemsView[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... def __contains__(self, key: object, /) -> bool: ... def __eq__(self, other: object, /) -> bool: ... class MutableMapping(Mapping[_KT, _VT]): - """A MutableMapping is a generic container for associating - key/value pairs. - - This class provides concrete generic implementations of all - methods except for __getitem__, __setitem__, __delitem__, - __iter__, and __len__. - """ - @abstractmethod def __setitem__(self, key: _KT, value: _VT, /) -> None: ... @abstractmethod def __delitem__(self, key: _KT, /) -> None: ... - def clear(self) -> None: - """D.clear() -> None. Remove all items from D.""" - + def clear(self) -> None: ... @overload - def pop(self, key: _KT, /) -> _VT: - """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. - """ - + def pop(self, key: _KT, /) -> _VT: ... @overload def pop(self, key: _KT, /, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, /, default: _T) -> _VT | _T: ... - def popitem(self) -> tuple[_KT, _VT]: - """D.popitem() -> (k, v), remove and return some (key, value) pair - as a 2-tuple; but raise KeyError if D is empty. - """ + def popitem(self) -> tuple[_KT, _VT]: ... # This overload should be allowed only if the value type is compatible with None. # # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: @@ -1307,9 +797,7 @@ class MutableMapping(Mapping[_KT, _VT]): # -- collections.ChainMap.setdefault # -- weakref.WeakKeyDictionary.setdefault @overload - def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: - """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D""" - + def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT, /) -> _VT: ... # 'update' used to take a Union, but using overloading is better. @@ -1333,13 +821,7 @@ class MutableMapping(Mapping[_KT, _VT]): # -- weakref.WeakValueDictionary.__ior__ # -- weakref.WeakKeyDictionary.__ior__ @overload - def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: - """D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. - If E present and has a .keys() method, does: for k in E.keys(): D[k] = E[k] - If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v - In either case, this is followed by: for k, v in F.items(): D[k] = v - """ - + def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... @overload def update(self: SupportsGetItem[str, _VT], m: SupportsKeysAndGetItem[str, _VT], /, **kwargs: _VT) -> None: ... @overload @@ -1357,18 +839,6 @@ TYPE_CHECKING: Final[bool] # This differs from runtime, but better reflects the fact that in reality # classes deriving from IO use different names for the arguments. class IO(Generic[AnyStr]): - """Generic base class for TextIO and BinaryIO. - - This is an abstract, generic version of the return of open(). - - NOTE: This does not distinguish between the different possible - classes (text vs. binary, read vs. write vs. read/write, - append-only, unbuffered). The TextIO and BinaryIO subclasses - below capture the distinctions between text vs. binary, which is - pervasive in the interface; however we currently do not offer a - way to track the other distinctions in the type system. - """ - # At runtime these are all abstract properties, # but making them abstract in the stub is hugely disruptive, for not much gain. # See #8726 @@ -1431,15 +901,11 @@ class IO(Generic[AnyStr]): ) -> None: ... class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode.""" - __slots__ = () @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): - """Typed version of the return of open() in text mode.""" - # See comment regarding the @properties in the `IO` class __slots__ = () @property @@ -1479,37 +945,7 @@ if sys.version_info >= (3, 14): include_extras: bool = False, *, format: Format | None = None, - ) -> dict[str, Any]: # AnnotationForm - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. For classes, the search - order is globals first then locals. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ + ) -> dict[str, Any]: ... # AnnotationForm else: def get_type_hints( @@ -1517,171 +953,33 @@ else: globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False, - ) -> dict[str, Any]: # AnnotationForm - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. For classes, the search - order is globals first then locals. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - -def get_args(tp: Any) -> tuple[Any, ...]: # AnnotationForm - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - - Examples:: + ) -> dict[str, Any]: ... # AnnotationForm - >>> T = TypeVar('T') - >>> assert get_args(Dict[str, int]) == (str, int) - >>> assert get_args(int) == () - >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) - >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - >>> assert get_args(Callable[[], T][int]) == ([], int) - """ +def get_args(tp: Any) -> tuple[Any, ...]: ... # AnnotationForm if sys.version_info >= (3, 10): @overload - def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - + def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload def get_origin(tp: UnionType) -> type[UnionType]: ... @overload -def get_origin(tp: GenericAlias) -> type: - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - +def get_origin(tp: GenericAlias) -> type: ... @overload def get_origin(tp: Any) -> Any | None: ... # AnnotationForm @overload -def cast(typ: type[_T], val: Any) -> _T: - """Cast a value to a type. - - This returns the value unchanged. To the type checker this - signals that the return value has the designated type, but at - runtime we intentionally don't check anything (we want this - to be as fast as possible). - """ - +def cast(typ: type[_T], val: Any) -> _T: ... @overload def cast(typ: str, val: Any) -> Any: ... @overload def cast(typ: object, val: Any) -> Any: ... if sys.version_info >= (3, 11): - def reveal_type(obj: _T, /) -> _T: - """Ask a static type checker to reveal the inferred type of an expression. - - When a static type checker encounters a call to ``reveal_type()``, - it will emit the inferred type of the argument:: - - x: int = 1 - reveal_type(x) - - Running a static type checker (e.g., mypy) on this example - will produce output similar to 'Revealed type is "builtins.int"'. - - At runtime, the function prints the runtime type of the - argument and returns the argument unchanged. - """ - - def assert_never(arg: Never, /) -> Never: - """Statically assert that a line of code is unreachable. - - Example:: - - def int_or_str(arg: int | str) -> None: - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - assert_never(arg) - - If a type checker finds that a call to assert_never() is - reachable, it will emit an error. - - At runtime, this throws an exception when called. - """ - - def assert_type(val: _T, typ: Any, /) -> _T: # AnnotationForm - """Ask a static type checker to confirm that the value is of the given type. - - At runtime this does nothing: it returns the first argument unchanged with no - checks or side effects, no matter the actual type of the argument. - - When a static type checker encounters a call to assert_type(), it - emits an error if the value is not of the specified type:: - - def greet(name: str) -> None: - assert_type(name, str) # OK - assert_type(name, int) # type checker error - """ - - def clear_overloads() -> None: - """Clear all overloads in the registry.""" - - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: - """Return all defined overloads for *func* as a sequence.""" - + def reveal_type(obj: _T, /) -> _T: ... + def assert_never(arg: Never, /) -> Never: ... + def assert_type(val: _T, typ: Any, /) -> _T: ... # AnnotationForm + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... def dataclass_transform( *, eq_default: bool = True, @@ -1690,94 +988,12 @@ if sys.version_info >= (3, 11): frozen_default: bool = False, # on 3.11, runtime accepts it as part of kwargs field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: Any, - ) -> IdentityFunction: - """Decorator to mark an object as providing dataclass-like behaviour. - - The decorator can be applied to a function, class, or metaclass. - - Example usage with a decorator function:: - - @dataclass_transform() - def create_model[T](cls: type[T]) -> type[T]: - ... - return cls - - @create_model - class CustomerModel: - id: int - name: str - - On a base class:: - - @dataclass_transform() - class ModelBase: ... - - class CustomerModel(ModelBase): - id: int - name: str - - On a metaclass:: - - @dataclass_transform() - class ModelMeta(type): ... - - class ModelBase(metaclass=ModelMeta): ... - - class CustomerModel(ModelBase): - id: int - name: str - - The ``CustomerModel`` classes defined above will - be treated by type checkers similarly to classes created with - ``@dataclasses.dataclass``. - For example, type checkers will assume these classes have - ``__init__`` methods that accept ``id`` and ``name``. - - The arguments to this decorator can be used to customize this behavior: - - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be - ``True`` or ``False`` if it is omitted by the caller. - - ``order_default`` indicates whether the ``order`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``kw_only_default`` indicates whether the ``kw_only`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``field_specifiers`` specifies a static list of supported classes - or functions that describe fields, similar to ``dataclasses.field()``. - - Arbitrary other keyword arguments are accepted in order to allow for - possible future extensions. - - At runtime, this decorator records its arguments in the - ``__dataclass_transform__`` attribute on the decorated object. - It has no other runtime effect. - - See PEP 681 for more details. - """ + ) -> IdentityFunction: ... # Type constructors # Obsolete, will be changed to a function. Use _typeshed._type_checker_internals.NamedTupleFallback instead. class NamedTuple(tuple[Any, ...]): - """Typed version of namedtuple. - - Usage:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has an extra __annotations__ attribute, giving a - dict that maps field names to types. (The field names are also in - the _fields attribute, which is part of the namedtuple API.) - An alternative equivalent functional syntax is also accepted:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] # __orig_bases__ sometimes exists on <3.12, but not consistently @@ -1827,15 +1043,11 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... @overload - def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: - """Return self|value.""" - + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload - def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: - """Return value|self.""" - + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of __or__ and __ior__ @@ -1852,32 +1064,11 @@ if sys.version_info >= (3, 14): locals: Mapping[str, Any] | None = None, type_params: tuple[TypeVar, ParamSpec, TypeVarTuple] | None = None, format: Format | None = None, - ) -> Any: # AnnotationForm - """Evaluate a forward reference as a type hint. - - This is similar to calling the ForwardRef.evaluate() method, - but unlike that method, evaluate_forward_ref() also - recursively evaluates forward references nested within the type hint. - - *forward_ref* must be an instance of ForwardRef. *owner*, if given, - should be the object that holds the annotations that the forward reference - derived from, such as a module, class object, or function. It is used to - infer the namespaces to use for looking up names. *globals* and *locals* - can also be explicitly given to provide the global and local namespaces. - *type_params* is a tuple of type parameters that are in scope when - evaluating the forward reference. This parameter should be provided (though - it may be an empty tuple) if *owner* is not given and the forward reference - does not already have an owner set. *format* specifies the format of the - annotation and is a member of the annotationlib.Format enum, defaulting to - VALUE. - - """ + ) -> Any: ... # AnnotationForm else: @final class ForwardRef(_Final): - """Internal wrapper to hold a forward reference.""" - __slots__ = ( "__forward_arg__", "__forward_code__", @@ -1937,89 +1128,28 @@ else: def __ror__(self, other: Any) -> _SpecialForm: ... if sys.version_info >= (3, 10): - def is_typeddict(tp: object) -> bool: - """Check if an annotation is a TypedDict class. - - For example:: - - >>> from typing import TypedDict - >>> class Film(TypedDict): - ... title: str - ... year: int - ... - >>> is_typeddict(Film) - True - >>> is_typeddict(dict) - False - """ - -def _type_repr(obj: object) -> str: - """Return the repr() of an object, special-casing types (internal helper). - - If obj is a type, we return a shorter version than the default - type.__repr__, based on the module and qualified name, which is - typically enough to uniquely identify a type. For everything - else, we fall back on repr(obj). - """ - -if sys.version_info >= (3, 12): - def override(method: _F, /) -> _F: - """Indicate that a method is intended to override a method in a base class. - - Usage:: + def is_typeddict(tp: object) -> bool: ... - class Base: - def method(self) -> None: - pass +def _type_repr(obj: object) -> str: ... - class Child(Base): - @override - def method(self) -> None: - super().method() - - When this decorator is applied to a method, the type checker will - validate that it overrides a method or attribute with the same name on a - base class. This helps prevent bugs that may occur when a base class is - changed without an equivalent change to a child class. - - There is no runtime checking of this property. The decorator attempts to - set the ``__override__`` attribute to ``True`` on the decorated object to - allow runtime introspection. - - See PEP 698 for details. - """ +if sys.version_info >= (3, 12): + _TypeParameter: typing_extensions.TypeAlias = ( + TypeVar + | typing_extensions.TypeVar + | ParamSpec + | typing_extensions.ParamSpec + | TypeVarTuple + | typing_extensions.TypeVarTuple + ) + def override(method: _F, /) -> _F: ... @final class TypeAliasType: - """Type alias. - - Type aliases are created through the type statement:: - - type Alias = int - - In this example, Alias and int will be treated equivalently by static - type checkers. - - At runtime, Alias is an instance of TypeAliasType. The __name__ - attribute holds the name of the type alias. The value of the type alias - is stored in the __value__ attribute. It is evaluated lazily, so the - value is computed only if the attribute is accessed. - - Type aliases can also be generic:: - - type ListOrSet[T] = list[T] | set[T] - - In this case, the type parameters of the alias are stored in the - __type_params__ attribute. - - See PEP 695 for more information. - """ - - def __new__(cls, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = ()) -> Self: ... + def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ... @property def __value__(self) -> Any: ... # AnnotationForm @property - def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... + def __type_params__(self) -> tuple[_TypeParameter, ...]: ... @property def __parameters__(self) -> tuple[Any, ...]: ... # AnnotationForm @property @@ -2027,49 +1157,16 @@ if sys.version_info >= (3, 12): # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] - def __getitem__(self, parameters: Any, /) -> GenericAlias: # AnnotationForm - """Return self[key].""" - - def __or__(self, right: Any, /) -> _SpecialForm: - """Return self|value.""" - - def __ror__(self, left: Any, /) -> _SpecialForm: - """Return value|self.""" + def __getitem__(self, parameters: Any, /) -> GenericAlias: ... # AnnotationForm + def __or__(self, right: Any, /) -> _SpecialForm: ... + def __ror__(self, left: Any, /) -> _SpecialForm: ... if sys.version_info >= (3, 14): @property def evaluate_value(self) -> EvaluateFunc: ... if sys.version_info >= (3, 13): - def is_protocol(tp: type, /) -> bool: - """Return True if the given type is a Protocol. - - Example:: - - >>> from typing import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False - """ - - def get_protocol_members(tp: type, /) -> frozenset[str]: - """Return the set of members defined in a Protocol. - - Example:: - - >>> from typing import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) == frozenset({'a', 'b'}) - True - - Raise a TypeError for arguments that are not Protocols. - """ - + def is_protocol(tp: type, /) -> bool: ... + def get_protocol_members(tp: type, /) -> frozenset[str]: ... @final @type_check_only class _NoDefaultType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi index bc5347a4b397f..5fd3f4578a8bd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi @@ -214,78 +214,14 @@ _T_contra = _TypeVar("_T_contra", contravariant=True) # on older versions of Python. Protocol: _SpecialForm -def runtime_checkable(cls: _TC) -> _TC: - """Mark a protocol class as a runtime protocol. - - Such protocol can be used with isinstance() and issubclass(). - Raise TypeError if applied to a non-protocol class. - This allows a simple-minded structural check very similar to - one trick ponies in collections.abc such as Iterable. - - For example:: - - @runtime_checkable - class Closable(Protocol): - def close(self): ... - - assert isinstance(open('/some/file'), Closable) - - Warning: this will check only the presence of the required methods, - not their type signatures! - """ +def runtime_checkable(cls: _TC) -> _TC: ... # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable Final: _SpecialForm -def final(f: _F) -> _F: - """Decorator to indicate final methods and final classes. - - Use this decorator to indicate to type checkers that the decorated - method cannot be overridden, and decorated class cannot be subclassed. - - For example:: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. The decorator - attempts to set the ``__final__`` attribute to ``True`` on the decorated - object to allow runtime introspection. - """ - -def disjoint_base(cls: _TC) -> _TC: - """This decorator marks a class as a disjoint base. - - Child classes of a disjoint base cannot inherit from other disjoint bases that are - not parent classes of the disjoint base. - - For example: - - @disjoint_base - class Disjoint1: pass - - @disjoint_base - class Disjoint2: pass - - class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error - - Type checkers can use knowledge of disjoint bases to detect unreachable code - and determine when two types can overlap. - - See PEP 800. - """ +def final(f: _F) -> _F: ... +def disjoint_base(cls: _TC) -> _TC: ... Literal: _SpecialForm @@ -303,7 +239,7 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __readonly_keys__: ClassVar[frozenset[str]] __mutable_keys__: ClassVar[frozenset[str]] # PEP 728 - __closed__: ClassVar[bool] + __closed__: ClassVar[bool | None] __extra_items__: ClassVar[AnnotationForm] def copy(self) -> Self: ... # Using Never so that only calls using mypy plugin hook that specialize the signature @@ -317,15 +253,11 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... @overload - def __or__(self, value: Self, /) -> Self: - """Return self|value.""" - + def __or__(self, value: Self, /) -> Self: ... @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload - def __ror__(self, value: Self, /) -> Self: - """Return value|self.""" - + def __ror__(self, value: Self, /) -> Self: ... @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of `__ior__` and `__or__`: @@ -339,95 +271,16 @@ if sys.version_info >= (3, 13): else: def get_type_hints( obj: Any, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False - ) -> dict[str, AnnotationForm]: - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' - (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - -def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - - Examples:: + ) -> dict[str, AnnotationForm]: ... - >>> T = TypeVar('T') - >>> assert get_args(Dict[str, int]) == (str, int) - >>> assert get_args(int) == () - >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) - >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - >>> assert get_args(Callable[[], T][int]) == ([], int) - """ +def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... if sys.version_info >= (3, 10): @overload - def get_origin(tp: UnionType) -> type[UnionType]: - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ + def get_origin(tp: UnionType) -> type[UnionType]: ... @overload -def get_origin(tp: GenericAlias) -> type: - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, - Annotated, and others. Return None for unsupported types. - - Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P - """ - +def get_origin(tp: GenericAlias) -> type: ... @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload @@ -449,36 +302,12 @@ if sys.version_info >= (3, 10): else: @final class ParamSpecArgs: - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec: - - P.args.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @final class ParamSpecKwargs: - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec: - - P.kwargs.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @@ -486,17 +315,7 @@ else: Concatenate: _SpecialForm TypeAlias: _SpecialForm TypeGuard: _SpecialForm - def is_typeddict(tp: object) -> bool: - """Check if an annotation is a TypedDict class - - For example:: - class Film(TypedDict): - title: str - year: int - - is_typeddict(Film) # => True - is_typeddict(Union[list, str]) # => False - """ + def is_typeddict(tp: object) -> bool: ... # New and changed things in 3.11 if sys.version_info >= (3, 11): @@ -519,63 +338,12 @@ if sys.version_info >= (3, 11): else: Self: _SpecialForm Never: _SpecialForm - def reveal_type(obj: _T, /) -> _T: - """Reveal the inferred type of a variable. - - When a static type checker encounters a call to ``reveal_type()``, - it will emit the inferred type of the argument:: - - x: int = 1 - reveal_type(x) - - Running a static type checker (e.g., ``mypy``) on this example - will produce output similar to 'Revealed type is "builtins.int"'. - - At runtime, the function prints the runtime type of the - argument and returns it unchanged. - - """ - - def assert_never(arg: Never, /) -> Never: - """Assert to the type checker that a line of code is unreachable. - - Example:: - - def int_or_str(arg: int | str) -> None: - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - assert_never(arg) - - If a type checker finds that a call to assert_never() is - reachable, it will emit an error. - - At runtime, this throws an exception when called. - - """ - - def assert_type(val: _T, typ: AnnotationForm, /) -> _T: - """Assert (to the type checker) that the value is of the given type. - - When the type checker encounters a call to assert_type(), it - emits an error if the value is not of the specified type:: - - def greet(name: str) -> None: - assert_type(name, str) # ok - assert_type(name, int) # type checker error - - At runtime this returns the first argument unchanged and otherwise - does nothing. - """ - - def clear_overloads() -> None: - """Clear all overloads in the registry.""" + def reveal_type(obj: _T, /) -> _T: ... + def assert_never(arg: Never, /) -> Never: ... + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: - """Return all defined overloads for *func* as a sequence.""" Required: _SpecialForm NotRequired: _SpecialForm LiteralString: _SpecialForm @@ -589,90 +357,9 @@ else: frozen_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: object, - ) -> IdentityFunction: - """Decorator that marks a function, class, or metaclass as providing - dataclass-like behavior. - - Example: - - from typing_extensions import dataclass_transform - - _T = TypeVar("_T") - - # Used on a decorator function - @dataclass_transform() - def create_model(cls: type[_T]) -> type[_T]: - ... - return cls - - @create_model - class CustomerModel: - id: int - name: str - - # Used on a base class - @dataclass_transform() - class ModelBase: ... - - class CustomerModel(ModelBase): - id: int - name: str - - # Used on a metaclass - @dataclass_transform() - class ModelMeta(type): ... - - class ModelBase(metaclass=ModelMeta): ... - - class CustomerModel(ModelBase): - id: int - name: str - - Each of the ``CustomerModel`` classes defined in this example will now - behave similarly to a dataclass created with the ``@dataclasses.dataclass`` - decorator. For example, the type checker will synthesize an ``__init__`` - method. - - The arguments to this decorator can be used to customize this behavior: - - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be - True or False if it is omitted by the caller. - - ``order_default`` indicates whether the ``order`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``kw_only_default`` indicates whether the ``kw_only`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``field_specifiers`` specifies a static list of supported classes - or functions that describe fields, similar to ``dataclasses.field()``. - - At runtime, this decorator records its arguments in the - ``__dataclass_transform__`` attribute on the decorated object. - - See PEP 681 for details. - - """ + ) -> IdentityFunction: ... class NamedTuple(tuple[Any, ...]): - """Typed version of namedtuple. - - Usage:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has an extra __annotations__ attribute, giving a - dict that maps field names to types. (The field names are also in - the _fields attribute, which is part of the namedtuple API.) - An alternative equivalent functional syntax is also accepted:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] __orig_bases__: ClassVar[tuple[Any, ...]] @@ -686,19 +373,6 @@ else: def _replace(self, **kwargs: Any) -> Self: ... class NewType: - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy callable that simply returns its argument. Usage:: - UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... - UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - num = UserId(5) + 1 # type: int - """ - def __init__(self, name: str, tp: AnnotationForm) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType @@ -720,110 +394,38 @@ if sys.version_info >= (3, 12): override as override, ) else: - def override(arg: _F, /) -> _F: - """Indicate that a method is intended to override a method in a base class. - - Usage: - - class Base: - def method(self) -> None: - pass - - class Child(Base): - @override - def method(self) -> None: - super().method() - - When this decorator is applied to a method, the type checker will - validate that it overrides a method with the same name on a base class. - This helps prevent bugs that may occur when a base class is changed - without an equivalent change to a child class. - - There is no runtime checking of these properties. The decorator - sets the ``__override__`` attribute to ``True`` on the decorated object - to allow runtime introspection. - - See PEP 698 for details. - - """ - - def get_original_bases(cls: type, /) -> tuple[Any, ...]: - """Return the class's "original" bases prior to modification by `__mro_entries__`. - - Examples:: - - from typing import TypeVar, Generic - from typing_extensions import NamedTuple, TypedDict + def override(arg: _F, /) -> _F: ... + def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) - - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) - """ # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @runtime_checkable class Buffer(Protocol, abc.ABC): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """Base class for classes that implement the buffer protocol. - - The buffer protocol allows Python objects to expose a low-level - memory buffer interface. Before Python 3.12, it is not possible - to implement the buffer protocol in pure Python code, or even - to check whether a class implements the buffer protocol. In - Python 3.12 and higher, the ``__buffer__`` method allows access - to the buffer protocol from Python code, and the - ``collections.abc.Buffer`` ABC allows checking whether a class - implements the buffer protocol. - - To indicate support for the buffer protocol in earlier versions, - inherit from this ABC, either in a stub file or at runtime, - or use ABC registration. This ABC provides no methods, because - there is no Python-accessible methods shared by pre-3.12 buffer - classes. It is useful primarily for static checks. - - """ - # Not actually a Protocol at runtime; see # https://github.com/python/typeshed/issues/10224 for why we're defining it this way def __buffer__(self, flags: int, /) -> memoryview: ... @runtime_checkable class SupportsInt(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __int__.""" - __slots__ = () @abc.abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __float__.""" - __slots__ = () @abc.abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __complex__.""" - __slots__ = () @abc.abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __bytes__.""" - __slots__ = () @abc.abstractmethod def __bytes__(self) -> bytes: ... @@ -836,20 +438,12 @@ else: @runtime_checkable class SupportsAbs(Protocol[_T_co]): - """ - An ABC with one abstract method __abs__ that is covariant in its return type. - """ - __slots__ = () @abc.abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): - """ - An ABC with one abstract method __round__ that is covariant in its return type. - """ - __slots__ = () @overload @abc.abstractmethod @@ -863,31 +457,15 @@ if sys.version_info >= (3, 14): else: @runtime_checkable class Reader(Protocol[_T_co]): - """Protocol for simple I/O reader instances. - - This protocol only supports blocking I/O. - """ - __slots__ = () @abc.abstractmethod - def read(self, size: int = ..., /) -> _T_co: - """Read data from the input stream and return it. - - If *size* is specified, at most *size* items (bytes/characters) will be - read. - """ + def read(self, size: int = ..., /) -> _T_co: ... @runtime_checkable class Writer(Protocol[_T_contra]): - """Protocol for simple I/O writer instances. - - This protocol only supports blocking I/O. - """ - __slots__ = () @abc.abstractmethod - def write(self, data: _T_contra, /) -> int: - """Write *data* to the output stream and return the number of items written.""" + def write(self, data: _T_contra, /) -> int: ... if sys.version_info >= (3, 13): from types import CapsuleType as CapsuleType @@ -903,96 +481,17 @@ if sys.version_info >= (3, 13): ) from warnings import deprecated as deprecated else: - def is_protocol(tp: type, /) -> bool: - """Return True if the given type is a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False - """ - - def get_protocol_members(tp: type, /) -> frozenset[str]: - """Return the set of members defined in a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) - frozenset({'a', 'b'}) - - Raise a TypeError for arguments that are not Protocols. - """ - + def is_protocol(tp: type, /) -> bool: ... + def get_protocol_members(tp: type, /) -> frozenset[str]: ... @final @type_check_only class _NoDefaultType: ... NoDefault: _NoDefaultType @final - class CapsuleType: - """Capsule objects let you wrap a C "void *" pointer in a Python - object. They're a way of passing data through the Python interpreter - without creating your own custom type. - - Capsules are used for communication between extension modules. - They provide a way for an extension module to export a C interface - to other extension modules, so that extension modules can use the - Python import mechanism to link to one another. - """ + class CapsuleType: ... class deprecated: - """Indicate that a class, function or overload is deprecated. - - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - - The warning specified by *category* will be emitted at runtime - on use of deprecated objects. For functions, that happens on calls; - for classes, on instantiation and on creation of subclasses. - If the *category* is ``None``, no warning is emitted at runtime. - The *stacklevel* determines where the - warning is emitted. If it is ``1`` (the default), the warning - is emitted at the direct caller of the deprecated object; if it - is higher, it is emitted further up the stack. - Static type checker behavior is not affected by the *category* - and *stacklevel* arguments. - - The deprecation message passed to the decorator is saved in the - ``__deprecated__`` attribute on the decorated object. - If applied to an overload, the decorator - must be after the ``@overload`` decorator for the attribute to - exist on the overload as returned by ``get_overloads()``. - - See PEP 702 for details. - - """ - message: LiteralString category: type[Warning] | None stacklevel: int @@ -1001,8 +500,6 @@ else: @final class TypeVar: - """Type variable.""" - @property def __name__(self) -> str: ... @property @@ -1030,18 +527,13 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: - """Return self|value.""" - - def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self.""" + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... @final class ParamSpec: - """Parameter specification.""" - @property def __name__(self) -> str: ... @property @@ -1070,16 +562,11 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: - """Return self|value.""" - - def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self.""" + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... @final class TypeVarTuple: - """Type variable tuple.""" - @property def __name__(self) -> str: ... @property @@ -1098,33 +585,6 @@ if sys.version_info >= (3, 14): else: @final class TypeAliasType: - """Create named, parameterized type aliases. - - This provides a backport of the new `type` statement in Python 3.12: - - type ListOrSet[T] = list[T] | set[T] - - is equivalent to: - - T = TypeVar("T") - ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) - - The name ListOrSet can then be used as an alias for the type it refers to. - - The type_params argument should contain all the type parameters used - in the value of the type alias. If the alias is not generic, this - argument is omitted. - - Static type checkers should only support type aliases declared using - TypeAliasType that follow these rules: - - - The first argument (the name) must be a string literal. - - The TypeAliasType instance must be immediately assigned to a variable - of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, - as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). - - """ - def __init__( self, name: str, value: AnnotationForm, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () ) -> None: ... @@ -1140,18 +600,7 @@ else: def __name__(self) -> str: ... # It's writable on types, but not on instances of TypeAliasType. @property - def __module__(self) -> str | None: # type: ignore[override] - """str(object='') -> str - str(bytes_or_buffer[, encoding[, errors]]) -> str - - Create a new string object from the given object. If encoding or - errors is specified, then the object must expose a data buffer - that will be decoded using the given encoding and error handler. - Otherwise, returns the result of object.__str__() (if defined) - or repr(object). - encoding defaults to 'utf-8'. - errors defaults to 'strict'. - """ + def __module__(self) -> str | None: ... # type: ignore[override] # Returns typing._GenericAlias, which isn't stubbed. def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> AnnotationForm: ... def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... @@ -1161,23 +610,6 @@ else: # PEP 727 class Doc: - """Define the documentation of a type annotation using ``Annotated``, to be - used in class attributes, function and method parameters, return values, - and variables. - - The value should be a positional-only string literal to allow static tools - like editors and documentation generators to use it. - - This complements docstrings. - - The string value passed is available in the attribute ``documentation``. - - Example:: - - >>> from typing_extensions import Annotated, Doc - >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... - """ - documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... @@ -1199,8 +631,6 @@ if sys.version_info >= (3, 14): from annotationlib import Format as Format, get_annotations as get_annotations, type_repr as type_repr else: class Format(enum.IntEnum): - """An enumeration.""" - VALUE = 1 VALUE_WITH_FAKE_GLOBALS = 2 FORWARDREF = 3 @@ -1214,43 +644,7 @@ else: locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, format: Literal[Format.STRING], - ) -> dict[str, str]: - """Compute the annotations dict for an object. - - obj may be a callable, class, or module. - Passing in an object of any other type raises TypeError. - - Returns a dict. get_annotations() returns a new dict every time - it's called; calling it twice on the same object will return two - different but equivalent dicts. - - This is a backport of `inspect.get_annotations`, which has been - in the standard library since Python 3.10. See the standard library - documentation for more: - - https://docs.python.org/3/library/inspect.html#inspect.get_annotations - - This backport adds the *format* argument introduced by PEP 649. The - three formats supported are: - * VALUE: the annotations are returned as-is. This is the default and - it is compatible with the behavior on previous Python versions. - * FORWARDREF: return annotations as-is if possible, but replace any - undefined names with ForwardRef objects. The implementation proposed by - PEP 649 relies on language changes that cannot be backported; the - typing-extensions implementation simply returns the same result as VALUE. - * STRING: return annotations as strings, in a format close to the original - source. Again, this behavior cannot be replicated directly in a backport. - As an approximation, typing-extensions retrieves the annotations under - VALUE semantics and then stringifies them. - - The purpose of this backport is to allow users who would like to use - FORWARDREF or STRING semantics once PEP 649 is implemented, but who also - want to support earlier Python versions, to simply write: - - typing_extensions.get_annotations(obj, format=Format.FORWARDREF) - - """ - + ) -> dict[str, str]: ... @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -1279,30 +673,7 @@ else: type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, format: Literal[Format.STRING], _recursive_guard: Container[str] = ..., - ) -> str: - """Evaluate a forward reference as a type hint. - - This is similar to calling the ForwardRef.evaluate() method, - but unlike that method, evaluate_forward_ref() also: - - * Recursively evaluates forward references nested within the type hint. - * Rejects certain objects that are not valid type hints. - * Replaces type hints that evaluate to None with types.NoneType. - * Supports the *FORWARDREF* and *STRING* formats. - - *forward_ref* must be an instance of ForwardRef. *owner*, if given, - should be the object that holds the annotations that the forward reference - derived from, such as a module, class object, or function. It is used to - infer the namespaces to use for looking up names. *globals* and *locals* - can also be explicitly given to provide the global and local namespaces. - *type_params* is a tuple of type parameters that are in scope when - evaluating the forward reference. This parameter must be provided (though - it may be an empty tuple) if *owner* is not given and the forward reference - does not already have an owner set. *format* specifies the format of the - annotation and is a member of the annotationlib.Format enum. - - """ - + ) -> str: ... @overload def evaluate_forward_ref( forward_ref: ForwardRef, @@ -1325,25 +696,10 @@ else: format: Format | None = None, _recursive_guard: Container[str] = ..., ) -> AnnotationForm: ... - def type_repr(value: object) -> str: - """Convert a Python value to a format suitable for use with the STRING format. - - This is intended as a helper for tools that support the STRING format but do - not have access to the code that originally produced the annotations. It uses - repr() for most objects. - - """ + def type_repr(value: object) -> str: ... # PEP 661 class Sentinel: - """Create a unique sentinel object. - - *name* should be the name of the variable to which the return value shall be assigned. - - *repr*, if supplied, will be used for the repr of the sentinel object. - If not provided, "" will be used. - """ - def __init__(self, name: str, repr: str | None = None) -> None: ... if sys.version_info >= (3, 14): def __or__(self, other: Any) -> UnionType: ... # other can be any type form legal for unions diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi index f0ef68d8478f8..9fff042f0b964 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi @@ -1,12 +1,3 @@ -"""This module provides access to the Unicode Character Database which -defines character properties for all Unicode characters. The data in -this database is based on the UnicodeData.txt file version -16.0.0 which is publicly available from ftp://ftp.unicode.org/. - -The module uses the same names and symbols as defined by the -UnicodeData File Format 16.0.0. -""" - import sys from _typeshed import ReadOnlyBuffer from typing import Any, Final, Literal, TypeVar, final, overload @@ -22,100 +13,32 @@ _T = TypeVar("_T") _NormalizationForm: TypeAlias = Literal["NFC", "NFD", "NFKC", "NFKD"] -def bidirectional(chr: str, /) -> str: - """Returns the bidirectional class assigned to the character chr as string. - - If no such value is defined, an empty string is returned. - """ - -def category(chr: str, /) -> str: - """Returns the general category assigned to the character chr as string.""" - -def combining(chr: str, /) -> int: - """Returns the canonical combining class assigned to the character chr as integer. - - Returns 0 if no combining class is defined. - """ - +def bidirectional(chr: str, /) -> str: ... +def category(chr: str, /) -> str: ... +def combining(chr: str, /) -> int: ... @overload -def decimal(chr: str, /) -> int: - """Converts a Unicode character into its equivalent decimal value. - - Returns the decimal value assigned to the character chr as integer. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - +def decimal(chr: str, /) -> int: ... @overload def decimal(chr: str, default: _T, /) -> int | _T: ... -def decomposition(chr: str, /) -> str: - """Returns the character decomposition mapping assigned to the character chr as string. - - An empty string is returned in case no such mapping is defined. - """ - +def decomposition(chr: str, /) -> str: ... @overload -def digit(chr: str, /) -> int: - """Converts a Unicode character into its equivalent digit value. - - Returns the digit value assigned to the character chr as integer. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - +def digit(chr: str, /) -> int: ... @overload def digit(chr: str, default: _T, /) -> int | _T: ... _EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] -def east_asian_width(chr: str, /) -> _EastAsianWidth: - """Returns the east asian width assigned to the character chr as string.""" - -def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: - """Return whether the Unicode string unistr is in the normal form 'form'. - - Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. - """ - -def lookup(name: str | ReadOnlyBuffer, /) -> str: - """Look up character by name. - - If a character with the given name is found, return the - corresponding character. If not found, KeyError is raised. - """ - -def mirrored(chr: str, /) -> int: - """Returns the mirrored property assigned to the character chr as integer. - - Returns 1 if the character has been identified as a "mirrored" - character in bidirectional text, 0 otherwise. - """ - +def east_asian_width(chr: str, /) -> _EastAsianWidth: ... +def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: ... +def lookup(name: str | ReadOnlyBuffer, /) -> str: ... +def mirrored(chr: str, /) -> int: ... @overload -def name(chr: str, /) -> str: - """Returns the name assigned to the character chr as a string. - - If no name is defined, default is returned, or, if not given, - ValueError is raised. - """ - +def name(chr: str, /) -> str: ... @overload def name(chr: str, default: _T, /) -> str | _T: ... -def normalize(form: _NormalizationForm, unistr: str, /) -> str: - """Return the normal form 'form' for the Unicode string unistr. - - Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. - """ - +def normalize(form: _NormalizationForm, unistr: str, /) -> str: ... @overload -def numeric(chr: str, /) -> float: - """Converts a Unicode character into its equivalent numeric value. - - Returns the numeric value assigned to the character chr as float. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - +def numeric(chr: str, /) -> float: ... @overload def numeric(chr: str, default: _T, /) -> float | _T: ... @final @@ -123,96 +46,28 @@ class UCD: # The methods below are constructed from the same array in C # (unicodedata_functions) and hence identical to the functions above. unidata_version: str - def bidirectional(self, chr: str, /) -> str: - """Returns the bidirectional class assigned to the character chr as string. - - If no such value is defined, an empty string is returned. - """ - - def category(self, chr: str, /) -> str: - """Returns the general category assigned to the character chr as string.""" - - def combining(self, chr: str, /) -> int: - """Returns the canonical combining class assigned to the character chr as integer. - - Returns 0 if no combining class is defined. - """ - + def bidirectional(self, chr: str, /) -> str: ... + def category(self, chr: str, /) -> str: ... + def combining(self, chr: str, /) -> int: ... @overload - def decimal(self, chr: str, /) -> int: - """Converts a Unicode character into its equivalent decimal value. - - Returns the decimal value assigned to the character chr as integer. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - + def decimal(self, chr: str, /) -> int: ... @overload def decimal(self, chr: str, default: _T, /) -> int | _T: ... - def decomposition(self, chr: str, /) -> str: - """Returns the character decomposition mapping assigned to the character chr as string. - - An empty string is returned in case no such mapping is defined. - """ - + def decomposition(self, chr: str, /) -> str: ... @overload - def digit(self, chr: str, /) -> int: - """Converts a Unicode character into its equivalent digit value. - - Returns the digit value assigned to the character chr as integer. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - + def digit(self, chr: str, /) -> int: ... @overload def digit(self, chr: str, default: _T, /) -> int | _T: ... - def east_asian_width(self, chr: str, /) -> _EastAsianWidth: - """Returns the east asian width assigned to the character chr as string.""" - - def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: - """Return whether the Unicode string unistr is in the normal form 'form'. - - Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. - """ - - def lookup(self, name: str | ReadOnlyBuffer, /) -> str: - """Look up character by name. - - If a character with the given name is found, return the - corresponding character. If not found, KeyError is raised. - """ - - def mirrored(self, chr: str, /) -> int: - """Returns the mirrored property assigned to the character chr as integer. - - Returns 1 if the character has been identified as a "mirrored" - character in bidirectional text, 0 otherwise. - """ - + def east_asian_width(self, chr: str, /) -> _EastAsianWidth: ... + def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: ... + def lookup(self, name: str | ReadOnlyBuffer, /) -> str: ... + def mirrored(self, chr: str, /) -> int: ... @overload - def name(self, chr: str, /) -> str: - """Returns the name assigned to the character chr as a string. - - If no name is defined, default is returned, or, if not given, - ValueError is raised. - """ - + def name(self, chr: str, /) -> str: ... @overload def name(self, chr: str, default: _T, /) -> str | _T: ... - def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: - """Return the normal form 'form' for the Unicode string unistr. - - Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. - """ - + def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: ... @overload - def numeric(self, chr: str, /) -> float: - """Converts a Unicode character into its equivalent numeric value. - - Returns the numeric value assigned to the character chr as float. - If no such value is defined, default is returned, or, if not given, - ValueError is raised. - """ - + def numeric(self, chr: str, /) -> float: ... @overload def numeric(self, chr: str, default: _T, /) -> float | _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi index 0d0cf6b52855d..546ea77bb4ca2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi @@ -1,49 +1,3 @@ -""" -Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's -Smalltalk testing framework (used with permission). - -This module contains the core framework classes that form the basis of -specific test cases and suites (TestCase, TestSuite etc.), and also a -text-based utility class for running the tests and reporting the results - (TextTestRunner). - -Simple usage: - - import unittest - - class IntegerArithmeticTestCase(unittest.TestCase): - def testAdd(self): # test method names begin with 'test' - self.assertEqual((1 + 2), 3) - self.assertEqual(0 + 1, 1) - def testMultiply(self): - self.assertEqual((0 * 10), 0) - self.assertEqual((5 * 8), 40) - - if __name__ == '__main__': - unittest.main() - -Further information is available in the bundled documentation, and from - - http://docs.python.org/library/unittest.html - -Copyright (c) 1999-2003 Steve Purcell -Copyright (c) 2003 Python Software Foundation -This module is free software, and you may redistribute it and/or modify -it under the same terms as Python itself, so long as this copyright message -and disclaimer are retained in their original form. - -IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, -SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF -THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. - -THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, -AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, -SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. -""" - import sys from unittest.async_case import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi index dddae531323d8..011a970d8bbce 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi @@ -7,14 +7,10 @@ from unittest.case import TestCase, _BaseTestCaseContext _L = TypeVar("_L", None, _LoggingWatcher) class _LoggingWatcher(NamedTuple): - """_LoggingWatcher(records, output)""" - records: list[logging.LogRecord] output: list[str] class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): - """A context manager for assertLogs() and assertNoLogs()""" - LOGGING_FORMAT: ClassVar[str] logger_name: str level: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi index 07dc69509e045..0b3fb9122c7b9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi @@ -20,11 +20,6 @@ class IsolatedAsyncioTestCase(TestCase): async def asyncTearDown(self) -> None: ... def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... if sys.version_info >= (3, 11): - async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: - """Enters the supplied asynchronous context manager. - - If successful, also adds its __aexit__ method as a cleanup - function and returns the result of the __aenter__ method. - """ + async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi index 9f738eb049046..a602196e73c64 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi @@ -1,5 +1,3 @@ -"""Test case implementation""" - import logging import sys import unittest.result @@ -42,90 +40,26 @@ class _AssertRaisesBaseContext(_BaseTestCaseContext): # This returns Self if args is the empty list, and None otherwise. # but it's not possible to construct an overload which expresses that - def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: - """ - If args is empty, assertRaises/Warns is being used as a - context manager, so check for a 'msg' kwarg and return self. - If args is not empty, call a callable passing positional and keyword - arguments. - """ - -def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: - """Same as addCleanup, except the cleanup items are called even if - setUpModule fails (unlike tearDownModule). - """ + def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: ... -def doModuleCleanups() -> None: - """Execute all module cleanup functions. Normally called for you after - tearDownModule. - """ +def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... +def doModuleCleanups() -> None: ... if sys.version_info >= (3, 11): - def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: - """Same as enterContext, but module-wide.""" + def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: ... def expectedFailure(test_item: _FT) -> _FT: ... -def skip(reason: str) -> Callable[[_FT], _FT]: - """ - Unconditionally skip a test. - """ - -def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: - """ - Skip a test if the condition is true. - """ - -def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: - """ - Skip a test unless the condition is true. - """ +def skip(reason: str) -> Callable[[_FT], _FT]: ... +def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... +def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... class SkipTest(Exception): - """ - Raise this exception in a test to skip it. - - Usually you can use TestCase.skipTest() or one of the skipping decorators - instead of raising this directly. - """ - def __init__(self, reason: str) -> None: ... @type_check_only class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... class TestCase: - """A class whose instances are single test cases. - - By default, the test code itself should be placed in a method named - 'runTest'. - - If the fixture may be used for many test cases, create as - many test methods as are needed. When instantiating such a TestCase - subclass, specify in the constructor arguments the name of the test method - that the instance is to execute. - - Test authors should subclass TestCase for their own tests. Construction - and deconstruction of the test's environment ('fixture') can be - implemented by overriding the 'setUp' and 'tearDown' methods respectively. - - If it is necessary to override the __init__ method, the base class - __init__ method must always be called. It is important that subclasses - should not change the signature of their __init__ method, since instances - of the classes are instantiated automatically by parts of the framework - in order to be run. - - When subclassing TestCase, you can set these attributes: - * failureException: determines which exception will be raised when - the instance's assertion methods fail; test methods raising this - exception will be deemed to have 'failed' rather than 'errored'. - * longMessage: determines whether long messages (including repr of - objects used in assert methods) will be printed on failure in *addition* - to any explicit message passed. - * maxDiff: sets the maximum length of a diff in failure messages - by assert methods using difflib. It is looked up as an instance - attribute so can be configured by individual tests if required. - """ - failureException: type[BaseException] longMessage: bool maxDiff: int | None @@ -133,110 +67,49 @@ class TestCase: _testMethodName: str # undocumented _testMethodDoc: str - def __init__(self, methodName: str = "runTest") -> None: - """Create an instance of the class that will use the named test - method when executed. Raises a ValueError if the instance does - not have a method with the specified name. - """ - + def __init__(self, methodName: str = "runTest") -> None: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... - def setUp(self) -> None: - """Hook method for setting up the test fixture before exercising it.""" - - def tearDown(self) -> None: - """Hook method for deconstructing the test fixture after testing it.""" - + def setUp(self) -> None: ... + def tearDown(self) -> None: ... @classmethod - def setUpClass(cls) -> None: - """Hook method for setting up class fixture before running tests in the class.""" - + def setUpClass(cls) -> None: ... @classmethod - def tearDownClass(cls) -> None: - """Hook method for deconstructing the class fixture after running all tests in the class.""" - + def tearDownClass(cls) -> None: ... def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... - def skipTest(self, reason: Any) -> NoReturn: - """Skip this test.""" - - def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: - """Return a context manager that will return the enclosed block - of code in a subtest identified by the optional message and - keyword parameters. A failure in the subtest marks the test - case as failed but resumes execution at the end of the enclosed - block, allowing further test code to be executed. - """ - - def debug(self) -> None: - """Run the test without collecting errors in a TestResult""" + def skipTest(self, reason: Any) -> NoReturn: ... + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... + def debug(self) -> None: ... if sys.version_info < (3, 11): def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... - def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: - """Fail if the two objects are unequal as determined by the '==' - operator. - """ - - def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: - """Fail if the two objects are equal as determined by the '!=' - operator. - """ - - def assertTrue(self, expr: Any, msg: Any = None) -> None: - """Check that the expression is true.""" - - def assertFalse(self, expr: Any, msg: Any = None) -> None: - """Check that the expression is false.""" - - def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: - """Just like self.assertTrue(a is b), but with a nicer default message.""" - - def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: - """Just like self.assertTrue(a is not b), but with a nicer default message.""" - - def assertIsNone(self, obj: object, msg: Any = None) -> None: - """Same as self.assertTrue(obj is None), with a nicer default message.""" - - def assertIsNotNone(self, obj: object, msg: Any = None) -> None: - """Included for symmetry with assertIsNone.""" - - def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: - """Just like self.assertTrue(a in b), but with a nicer default message.""" - - def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: - """Just like self.assertTrue(a not in b), but with a nicer default message.""" - - def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: - """Same as self.assertTrue(isinstance(obj, cls)), with a nicer - default message. - """ - - def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: - """Included for symmetry with assertIsInstance.""" - + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertTrue(self, expr: Any, msg: Any = None) -> None: ... + def assertFalse(self, expr: Any, msg: Any = None) -> None: ... + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNone(self, obj: object, msg: Any = None) -> None: ... + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... @overload - def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a > b), but with a nicer default message.""" - + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... @overload def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a >= b), but with a nicer default message.""" - + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... @overload def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... @overload - def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a < b), but with a nicer default message.""" - + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... @overload def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a <= b), but with a nicer default message.""" - + def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: ... @overload def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` @@ -249,33 +122,7 @@ class TestCase: callable: Callable[..., object], *args: Any, **kwargs: Any, - ) -> None: - """Fail unless an exception of class expected_exception is raised - by the callable when invoked with specified positional and - keyword arguments. If a different type of exception is - raised, it will not be caught, and the test case will be - deemed to have suffered an error, exactly as for an - unexpected exception. - - If called with the callable and arguments omitted, will return a - context object used like this:: - - with self.assertRaises(SomeException): - do_something() - - An optional keyword argument 'msg' can be provided when assertRaises - is used as a context object. - - The context manager keeps a reference to the exception as - the 'exception' attribute. This allows you to inspect the - exception after the assertion:: - - with self.assertRaises(SomeException) as cm: - do_something() - the_exception = cm.exception - self.assertEqual(the_exception.error_code, 3) - """ - + ) -> None: ... @overload def assertRaises( self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... @@ -288,19 +135,7 @@ class TestCase: callable: Callable[..., object], *args: Any, **kwargs: Any, - ) -> None: - """Asserts that the message in a raised exception matches a regex. - - Args: - expected_exception: Exception class expected to be raised. - expected_regex: Regex (re.Pattern object or string) expected - to be found in error message. - args: Function to be called and extra positional args. - kwargs: Extra kwargs. - msg: Optional message used in case of failure. Can only be used - when assertRaisesRegex is used as a context manager. - """ - + ) -> None: ... @overload def assertRaisesRegex( self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... @@ -312,35 +147,7 @@ class TestCase: callable: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs, - ) -> None: - """Fail unless a warning of class warnClass is triggered - by the callable when invoked with specified positional and - keyword arguments. If a different type of warning is - triggered, it will not be handled: depending on the other - warning filtering rules in effect, it might be silenced, printed - out, or raised as an exception. - - If called with the callable and arguments omitted, will return a - context object used like this:: - - with self.assertWarns(SomeWarning): - do_something() - - An optional keyword argument 'msg' can be provided when assertWarns - is used as a context object. - - The context manager keeps a reference to the first matching - warning as the 'warning' attribute; similarly, the 'filename' - and 'lineno' attributes give you information about the line - of Python code from which the warning was triggered. - This allows you to inspect the warning after the assertion:: - - with self.assertWarns(SomeWarning) as cm: - do_something() - the_warning = cm.warning - self.assertEqual(the_warning.some_attribute, 147) - """ - + ) -> None: ... @overload def assertWarns( self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... @@ -353,73 +160,21 @@ class TestCase: callable: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs, - ) -> None: - """Asserts that the message in a triggered warning matches a regexp. - Basic functioning is similar to assertWarns() with the addition - that only warnings whose messages also match the regular expression - are considered successful matches. - - Args: - expected_warning: Warning class expected to be triggered. - expected_regex: Regex (re.Pattern object or string) expected - to be found in error message. - args: Function to be called and extra positional args. - kwargs: Extra kwargs. - msg: Optional message used in case of failure. Can only be used - when assertWarnsRegex is used as a context manager. - """ - + ) -> None: ... @overload def assertWarnsRegex( self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( self, logger: str | logging.Logger | None = None, level: int | str | None = None - ) -> _AssertLogsContext[_LoggingWatcher]: - """Fail unless a log message of level *level* or higher is emitted - on *logger_name* or its children. If omitted, *level* defaults to - INFO and *logger* defaults to the root logger. - - This method must be used as a context manager, and will yield - a recording object with two attributes: `output` and `records`. - At the end of the context manager, the `output` attribute will - be a list of the matching formatted log messages and the - `records` attribute will be a list of the corresponding LogRecord - objects. - - Example:: - - with self.assertLogs('foo', level='INFO') as cm: - logging.getLogger('foo').info('first message') - logging.getLogger('foo.bar').error('second message') - self.assertEqual(cm.output, ['INFO:foo:first message', - 'ERROR:foo.bar:second message']) - """ + ) -> _AssertLogsContext[_LoggingWatcher]: ... if sys.version_info >= (3, 10): def assertNoLogs( self, logger: str | logging.Logger | None = None, level: int | str | None = None - ) -> _AssertLogsContext[None]: - """Fail unless no log messages of level *level* or higher are emitted - on *logger_name* or its children. - - This method must be used as a context manager. - """ + ) -> _AssertLogsContext[None]: ... @overload - def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: - """Fail if the two objects are unequal as determined by their - difference rounded to the given number of decimal places - (default 7) and comparing to zero, or by comparing that the - difference between the two objects is more than the given - delta. - - Note that decimal places (from zero) are usually not the same - as significant digits (measured from the most significant digit). - - If the two objects compare equal then they will automatically - compare almost equal. - """ - + def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -443,18 +198,7 @@ class TestCase: delta: None = None, ) -> None: ... @overload - def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: - """Fail if the two objects are equal as determined by their - difference rounded to the given number of decimal places - (default 7) and comparing to zero, or by comparing that the - difference between the two objects is less than the given delta. - - Note that decimal places (from zero) are usually not the same - as significant digits (measured from the most significant digit). - - Objects that are equal automatically fail. - """ - + def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertNotAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -477,164 +221,42 @@ class TestCase: msg: Any = None, delta: None = None, ) -> None: ... - def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: - """Fail the test unless the text matches the regular expression.""" - - def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: - """Fail the test if the text matches the regular expression.""" - - def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: - """Asserts that two iterables have the same elements, the same number of - times, without regard to order. - - self.assertEqual(Counter(list(first)), - Counter(list(second))) - - Example: - - [0, 1, 1] and [1, 0, 1] compare equal. - - [0, 0, 1] and [0, 1] compare unequal. - - """ - - def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: - """Add a type specific assertEqual style function to compare a type. - - This method is for use by TestCase subclasses that need to register - their own type equality functions to provide nicer error messages. - - Args: - typeobj: The data type to call this function on when both values - are of the same type in assertEqual(). - function: The callable taking two arguments and an optional - msg= argument that raises self.failureException with a - useful error message when the two arguments are not equal. - """ - - def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: - """Assert that two multi-line strings are equal.""" - + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... + def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... def assertSequenceEqual( self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None - ) -> None: - """An equality assertion for ordered sequences (like lists and tuples). - - For the purposes of this function, a valid ordered sequence type is one - which can be indexed, has a length, and has an equality operator. - - Args: - seq1: The first sequence to compare. - seq2: The second sequence to compare. - seq_type: The expected datatype of the sequences, or None if no - datatype should be enforced. - msg: Optional message to use on failure instead of a list of - differences. - """ - - def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: - """A list-specific equality assertion. - - Args: - list1: The first list to compare. - list2: The second list to compare. - msg: Optional message to use on failure instead of a list of - differences. - - """ - - def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: - """A tuple-specific equality assertion. - - Args: - tuple1: The first tuple to compare. - tuple2: The second tuple to compare. - msg: Optional message to use on failure instead of a list of - differences. - """ - - def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: - """A set-specific equality assertion. - - Args: - set1: The first set to compare. - set2: The second set to compare. - msg: Optional message to use on failure instead of a list of - differences. - - assertSetEqual uses ducktyping to support different types of sets, and - is optimized for sets specifically (parameters must support a - difference method). - """ + ) -> None: ... + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... # assertDictEqual accepts only true dict instances. We can't use that here, since that would make # assertDictEqual incompatible with TypedDict. def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... - def fail(self, msg: Any = None) -> NoReturn: - """Fail immediately, with the given message.""" - + def fail(self, msg: Any = None) -> NoReturn: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... - def shortDescription(self) -> str | None: - """Returns a one-line description of the test, or None if no - description has been provided. + def shortDescription(self) -> str | None: ... + def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... - The default implementation of this method returns the first line of - the specified test method's docstring. - """ - - def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: - """Add a function, with arguments, to be called when the test is - completed. Functions added are called on a LIFO basis and are - called after tearDown on test failure or success. - - Cleanup items are called even if setUp fails (unlike tearDown). - """ if sys.version_info >= (3, 11): - def enterContext(self, cm: AbstractContextManager[_T]) -> _T: - """Enters the supplied context manager. - - If successful, also adds its __exit__ method as a cleanup - function and returns the result of the __enter__ method. - """ - - def doCleanups(self) -> None: - """Execute all cleanup functions. Normally called for you after - tearDown. - """ + def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... + def doCleanups(self) -> None: ... @classmethod - def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: - """Same as addCleanup, except the cleanup items are called even if - setUpClass fails (unlike tearDownClass). - """ - + def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... @classmethod - def doClassCleanups(cls) -> None: - """Execute all class cleanup functions. Normally called for you after - tearDownClass. - """ + def doClassCleanups(cls) -> None: ... + if sys.version_info >= (3, 11): @classmethod - def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: - """Same as enterContext, but class-wide.""" - - def _formatMessage(self, msg: str | None, standardMsg: str) -> str: # undocumented - """Honour the longMessage attribute when generating failure messages. - If longMessage is False this means: - * Use only an explicit message if it is provided - * Otherwise use the standard message for the assert - - If longMessage is True: - * Use the standard message - * If an explicit message is provided, plus ' : ' and the explicit message - """ - - def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: # undocumented - """Get a detailed comparison function for the types of the two args. + def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: ... - Returns: A callable accepting (first, second, msg=None) that will - raise a failure exception if first != second with a useful human - readable error message for those types. - """ + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented + def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented if sys.version_info < (3, 12): failUnlessEqual = assertEqual assertEquals = assertEqual @@ -651,8 +273,10 @@ class TestCase: assertRegexpMatches = assertRegex assertNotRegexpMatches = assertNotRegex assertRaisesRegexp = assertRaisesRegex - def assertDictContainsSubset(self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None) -> None: - """Checks whether dictionary is a superset of subset.""" + def assertDictContainsSubset( + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None + ) -> None: ... + if sys.version_info >= (3, 10): # Runtime has *args, **kwargs, but will error if any are supplied def __init_subclass__(cls, *args: Never, **kwargs: Never) -> None: ... @@ -668,14 +292,6 @@ class TestCase: def assertNotEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... class FunctionTestCase(TestCase): - """A test case that wraps a test function. - - This is useful for slipping pre-existing test functions into the - unittest framework. Optionally, set-up and tidy-up functions can be - supplied. As with TestCase, the tidy-up ('tearDown') function will - always be called if the set-up ('setUp') function ran successfully. - """ - def __init__( self, testFunc: Callable[[], object], @@ -688,22 +304,14 @@ class FunctionTestCase(TestCase): def __eq__(self, other: object) -> bool: ... class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): - """A context manager used to implement TestCase.assertRaises* methods.""" - exception: _E def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _AssertWarnsContext(_AssertRaisesBaseContext): - """A context manager used to implement TestCase.assertWarns* methods.""" - warning: WarningMessage filename: str lineno: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi index 9473618bb255f..81de40c898496 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi @@ -1,5 +1,3 @@ -"""Loading unittests.""" - import sys import unittest.case import unittest.suite @@ -15,72 +13,23 @@ _SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite VALID_MODULE_NAME: Final[Pattern[str]] class TestLoader: - """ - This class is responsible for loading tests according to various criteria - and returning them wrapped in a TestSuite - """ - errors: list[type[BaseException]] testMethodPrefix: str sortTestMethodsUsing: _SortComparisonMethod testNamePatterns: list[str] | None suiteClass: _SuiteClass - def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in testCaseClass""" + def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... if sys.version_info >= (3, 12): - def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in the given module""" + def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: ... else: - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in the given module""" - - def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases given a string specifier. - - The name may resolve either to a module, a test case class, a - test method within a test case class, or a callable object which - returns a TestCase or TestSuite instance. - - The method optionally resolves the names relative to a given module. - """ - - def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases found using the given sequence - of string specifiers. See 'loadTestsFromName()'. - """ - - def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: - """Return a sorted sequence of method names found within testCaseClass""" - - def discover(self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None) -> unittest.suite.TestSuite: - """Find and return all test modules from the specified start - directory, recursing into subdirectories to find them and return all - tests found within them. Only test files that match the pattern will - be loaded. (Using shell style pattern matching.) - - All test modules must be importable from the top level of the project. - If the start directory is not the top level directory then the top - level directory must be specified separately. - - If a test package name (directory with '__init__.py') matches the - pattern then the package will be checked for a 'load_tests' function. If - this exists then it will be called with (loader, tests, pattern) unless - the package has already had load_tests called from the same discovery - invocation, in which case the package module object is not scanned for - tests - this ensures that when a package uses discover to further - discover child tests that infinite recursion does not happen. - - If load_tests exists then discovery does *not* recurse into the package, - load_tests is responsible for loading all tests in the package. - - The pattern is deliberately not stored as a loader attribute so that - packages can continue discovery themselves. top_level_dir is stored so - load_tests does not need to pass this argument in to loader.discover(). - - Paths are sorted before being imported to ensure reproducible execution - order even on filesystems with non-alphabetical ordering like ext3/4. - """ - + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ... + + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... + def discover( + self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None + ) -> unittest.suite.TestSuite: ... def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... defaultTestLoader: TestLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi index a4e8e9cb02bdf..23ead1638ecc2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi @@ -1,5 +1,3 @@ -"""Unittest main program""" - import sys import unittest.case import unittest.loader @@ -19,10 +17,6 @@ class _TestRunner(Protocol): # not really documented class TestProgram: - """A command-line program that runs a set of tests; this is primarily - for making test modules conveniently executable. - """ - result: unittest.result.TestResult module: None | str | ModuleType verbosity: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi index 615eec5fc45fe..f3e58bcd1c009 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi @@ -55,14 +55,10 @@ else: FILTER_DIR: bool # controls the way mock objects respond to `dir` function class _SentinelObject: - """A unique, named, sentinel object.""" - name: Any def __init__(self, name: Any) -> None: ... class _Sentinel: - """Access attributes to return a named object, usable as a sentinel.""" - def __getattr__(self, name: str) -> Any: ... sentinel: _Sentinel @@ -74,25 +70,6 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs if sys.version_info >= (3, 12): class _Call(tuple[Any, ...]): - """ - A tuple for holding the results of a call to a mock, either in the form - `(args, kwargs)` or `(name, args, kwargs)`. - - If args or kwargs are empty then a call tuple will compare equal to - a tuple without those values. This makes comparisons less verbose:: - - _Call(('name', (), {})) == ('name',) - _Call(('name', (1,), {})) == ('name', (1,)) - _Call(((), {'a': 'b'})) == ({'a': 'b'},) - - The `_Call` object provides a useful shortcut for comparing with call:: - - _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) - _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) - - If the _Call has no name then it will match any name. - """ - def __new__( cls, value: _CallValue = (), @@ -119,34 +96,11 @@ if sys.version_info >= (3, 12): def args(self) -> tuple[Any, ...]: ... @property def kwargs(self) -> Mapping[str, Any]: ... - def call_list(self) -> Any: - """For a call object that represents multiple calls, `call_list` - returns a list of all the intermediate calls as well as the - final call. - """ + def call_list(self) -> Any: ... else: @disjoint_base class _Call(tuple[Any, ...]): - """ - A tuple for holding the results of a call to a mock, either in the form - `(args, kwargs)` or `(name, args, kwargs)`. - - If args or kwargs are empty then a call tuple will compare equal to - a tuple without those values. This makes comparisons less verbose:: - - _Call(('name', (), {})) == ('name',) - _Call(('name', (1,), {})) == ('name', (1,)) - _Call(((), {'a': 'b'})) == ({'a': 'b'},) - - The `_Call` object provides a useful shortcut for comparing with call:: - - _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) - _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) - - If the _Call has no name then it will match any name. - """ - def __new__( cls, value: _CallValue = (), @@ -173,11 +127,7 @@ else: def args(self) -> tuple[Any, ...]: ... @property def kwargs(self) -> Mapping[str, Any]: ... - def call_list(self) -> Any: - """For a call object that represents multiple calls, `call_list` - returns a list of all the intermediate calls as well as the - final call. - """ + def call_list(self) -> Any: ... call: _Call @@ -190,8 +140,6 @@ class Base: # We subclass with "Any" because mocks are explicitly designed to stand in for other types, # something that can't be expressed with our static type system. class NonCallableMock(Base, Any): - """A non-callable version of `Mock`""" - if sys.version_info >= (3, 12): def __new__( cls, @@ -229,93 +177,22 @@ class NonCallableMock(Base, Any): def __getattr__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... - def __dir__(self) -> list[str]: - """Filter the output of `dir(mock)` to only useful members.""" - - def assert_called_with(self, *args: Any, **kwargs: Any) -> None: - """assert that the last call was made with the specified arguments. - - Raises an AssertionError if the args and keyword args passed in are - different to the last call to the mock. - """ - - def assert_not_called(self) -> None: - """assert that the mock was never called.""" - - def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: - """assert that the mock was called exactly once and that that call was - with the specified arguments. - """ - + def __dir__(self) -> list[str]: ... + def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_not_called(self) -> None: ... + def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... - def assert_called(self) -> None: - """assert that the mock was called at least once""" - - def assert_called_once(self) -> None: - """assert that the mock was called only once.""" - - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: - """Restore the mock object to its initial state.""" - + def assert_called(self) -> None: ... + def assert_called_once(self) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... def _extract_mock_name(self) -> str: ... - def _get_call_signature_from_name(self, name: str) -> Any: - """ - * If call objects are asserted against a method/function like obj.meth1 - then there could be no name for the call object to lookup. Hence just - return the spec_signature of the method/function being asserted against. - * If the name is not empty then remove () and split by '.' to get - list of names to iterate through the children until a potential - match is found. A child mock is created only during attribute access - so if we get a _SpecState then no attributes of the spec were accessed - and can be safely exited. - """ - - def assert_any_call(self, *args: Any, **kwargs: Any) -> None: - """assert the mock has been called with the specified arguments. - - The assert passes if the mock has *ever* been called, unlike - `assert_called_with` and `assert_called_once_with` that only pass if - the call is the most recent one. - """ - - def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: - """assert the mock has been called with the specified calls. - The `mock_calls` list is checked for the calls. - - If `any_order` is False (the default) then the calls must be - sequential. There can be extra calls before or after the - specified calls. - - If `any_order` is True then the calls can be in any order, but - they must all appear in `mock_calls`. - """ - - def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: - """Add a spec to a mock. `spec` can either be an object or a - list of strings. Only attributes on the `spec` can be fetched as - attributes from the mock. - - If `spec_set` is True then only attributes on the spec can be set. - """ - + def _get_call_signature_from_name(self, name: str) -> Any: ... + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... - def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: - """ - Attach a mock as an attribute of this one, replacing its name and - parent. Calls to the attached mock will be recorded in the - `method_calls` and `mock_calls` attributes of this one. - """ - - def configure_mock(self, **kwargs: Any) -> None: - """Set attributes on the mock through keyword arguments. - - Attributes plus return values and side effects can be set on child - mocks using standard dot notation and unpacking a dictionary in the - method call: - - >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} - >>> mock.configure_mock(**attrs) - """ + def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... + def configure_mock(self, **kwargs: Any) -> None: ... return_value: Any side_effect: Any called: bool @@ -324,43 +201,12 @@ class NonCallableMock(Base, Any): call_args_list: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... - def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: - """ - Given a call (or simply an (args, kwargs) tuple), return a - comparison key suitable for matching with other calls. - This is a best effort method which relies on the spec's signature, - if available, or falls back on the arguments themselves. - """ - - def _get_child_mock(self, **kw: Any) -> NonCallableMock: - """Create the child mocks for attributes and return value. - By default child mocks will be the same type as the parent. - Subclasses of Mock may want to override this to customize the way - child mocks are made. - - For non-callable mocks the callable variant will be used (rather than - any custom subclass). - """ + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... + def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... if sys.version_info >= (3, 13): - def _calls_repr(self) -> str: - """Renders self.mock_calls as a string. - - Example: " - Calls: [call(1), call(2)]." - - If self.mock_calls is empty, an empty string is returned. The - output will be truncated if very long. - """ + def _calls_repr(self) -> str: ... else: - def _calls_repr(self, prefix: str = "Calls") -> str: - """Renders self.mock_calls as a string. - - Example: " - Calls: [call(1), call(2)]." - - If self.mock_calls is empty, an empty string is returned. The - output will be truncated if very long. - """ + def _calls_repr(self, prefix: str = "Calls") -> str: ... class CallableMixin(Base): side_effect: Any @@ -380,63 +226,7 @@ class CallableMixin(Base): ) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... -class Mock(CallableMixin, NonCallableMock): - """ - Create a new `Mock` object. `Mock` takes several optional arguments - that specify the behaviour of the Mock object: - - * `spec`: This can be either a list of strings or an existing object (a - class or instance) that acts as the specification for the mock object. If - you pass in an object then a list of strings is formed by calling dir on - the object (excluding unsupported magic attributes and methods). Accessing - any attribute not in this list will raise an `AttributeError`. - - If `spec` is an object (rather than a list of strings) then - `mock.__class__` returns the class of the spec object. This allows mocks - to pass `isinstance` tests. - - * `spec_set`: A stricter variant of `spec`. If used, attempting to *set* - or get an attribute on the mock that isn't on the object passed as - `spec_set` will raise an `AttributeError`. - - * `side_effect`: A function to be called whenever the Mock is called. See - the `side_effect` attribute. Useful for raising exceptions or - dynamically changing return values. The function is called with the same - arguments as the mock, and unless it returns `DEFAULT`, the return - value of this function is used as the return value. - - If `side_effect` is an iterable then each call to the mock will return - the next value from the iterable. If any of the members of the iterable - are exceptions they will be raised instead of returned. - - * `return_value`: The value returned when the mock is called. By default - this is a new Mock (created on first access). See the - `return_value` attribute. - - * `unsafe`: By default, accessing any attribute whose name starts with - *assert*, *assret*, *asert*, *aseert*, or *assrt* raises an AttributeError. - Additionally, an AttributeError is raised when accessing - attributes that match the name of an assertion method without the prefix - `assert_`, e.g. accessing `called_once` instead of `assert_called_once`. - Passing `unsafe=True` will allow access to these attributes. - - * `wraps`: Item for the mock object to wrap. If `wraps` is not None then - calling the Mock will pass the call through to the wrapped object - (returning the real result). Attribute access on the mock will return a - Mock object that wraps the corresponding attribute of the wrapped object - (so attempting to access an attribute that doesn't exist will raise an - `AttributeError`). - - If the mock has an explicit `return_value` set then calls are not passed - to the wrapped object and the `return_value` is returned instead. - - * `name`: If the mock has a name then it will be used in the repr of the - mock. This can be useful for debugging. The name is propagated to child - mocks. - - Mocks can also be called with arbitrary keyword arguments. These will be - used to set attributes on the mock after it is created. - """ +class Mock(CallableMixin, NonCallableMock): ... class _patch(Generic[_T]): attribute_name: Any @@ -499,19 +289,12 @@ class _patch(Generic[_T]): target: Any temp_original: Any is_local: bool - def __enter__(self) -> _T: - """Perform the patch.""" - + def __enter__(self) -> _T: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> None: - """Undo the patch.""" - - def start(self) -> _T: - """Activate a patch, returning any created mock.""" - - def stop(self) -> None: - """Stop an active patch.""" + ) -> None: ... + def start(self) -> _T: ... + def stop(self) -> None: ... # This class does not exist at runtime, it's a hack to make this work: # @patch("foo") @@ -526,35 +309,6 @@ class _patch_pass_arg(_patch[_T]): def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... class _patch_dict: - """ - Patch a dictionary, or dictionary like object, and restore the dictionary - to its original state after the test, where the restored dictionary is - a copy of the dictionary as it was before the test. - - `in_dict` can be a dictionary or a mapping like container. If it is a - mapping then it must at least support getting, setting and deleting items - plus iterating over keys. - - `in_dict` can also be a string specifying the name of the dictionary, which - will then be fetched by importing it. - - `values` can be a dictionary of values to set in the dictionary. `values` - can also be an iterable of `(key, value)` pairs. - - If `clear` is True then the dictionary will be cleared before the new - values are set. - - `patch.dict` can also be called with arbitrary keyword arguments to set - values in the dictionary:: - - with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()): - ... - - `patch.dict` can be used as a context manager, decorator or class - decorator. When used as a class decorator `patch.dict` honours - `patch.TEST_PREFIX` for choosing which methods to wrap. - """ - in_dict: Any values: Any clear: Any @@ -565,11 +319,8 @@ class _patch_dict: def decorate_async_callable(self, f: _AF) -> _AF: ... def decorate_class(self, klass: Any) -> Any: ... - def __enter__(self) -> Any: - """Patch the dict.""" - - def __exit__(self, *args: object) -> Any: - """Unpatch the dict.""" + def __enter__(self) -> Any: ... + def __exit__(self, *args: object) -> Any: ... start: Any stop: Any @@ -723,72 +474,20 @@ patch: _patcher class MagicMixin(Base): def __init__(self, *args: Any, **kw: Any) -> None: ... -class NonCallableMagicMock(MagicMixin, NonCallableMock): - """A version of `MagicMock` that isn't callable.""" - -class MagicMock(MagicMixin, Mock): - """ - MagicMock is a subclass of Mock with default implementations - of most of the magic methods. You can use MagicMock without having to - configure the magic methods yourself. - - If you use the `spec` or `spec_set` arguments then *only* magic - methods that exist in the spec will be created. - - Attributes and the return value of a `MagicMock` will also be `MagicMocks`. - """ +class NonCallableMagicMock(MagicMixin, NonCallableMock): ... +class MagicMock(MagicMixin, Mock): ... class AsyncMockMixin(Base): def __init__(self, *args: Any, **kwargs: Any) -> None: ... async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... - def assert_awaited(self) -> None: - """ - Assert that the mock was awaited at least once. - """ - - def assert_awaited_once(self) -> None: - """ - Assert that the mock was awaited exactly once. - """ - - def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: - """ - Assert that the last await was with the specified arguments. - """ - - def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: - """ - Assert that the mock was awaited exactly once and with the specified - arguments. - """ - - def assert_any_await(self, *args: Any, **kwargs: Any) -> None: - """ - Assert the mock has ever been awaited with the specified arguments. - """ - - def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: - """ - Assert the mock has been awaited with the specified calls. - The :attr:`await_args_list` list is checked for the awaits. - - If `any_order` is False (the default) then the awaits must be - sequential. There can be extra calls before or after the - specified awaits. - - If `any_order` is True then the awaits can be in any order, but - they must all appear in :attr:`await_args_list`. - """ - - def assert_not_awaited(self) -> None: - """ - Assert that the mock was never awaited. - """ - - def reset_mock(self, *args: Any, **kwargs: Any) -> None: - """ - See :func:`.Mock.reset_mock()` - """ + def assert_awaited(self) -> None: ... + def assert_awaited_once(self) -> None: ... + def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... + def assert_not_awaited(self) -> None: ... + def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... await_count: int await_args: _Call | None await_args_list: _CallList @@ -797,54 +496,10 @@ class AsyncMagicMixin(MagicMixin): def __init__(self, *args: Any, **kw: Any) -> None: ... class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): - """ - Enhance :class:`Mock` with features allowing to mock - an async function. - - The :class:`AsyncMock` object will behave so the object is - recognized as an async function, and the result of a call is an awaitable: - - >>> mock = AsyncMock() - >>> inspect.iscoroutinefunction(mock) - True - >>> inspect.isawaitable(mock()) - True - - - The result of ``mock()`` is an async function which will have the outcome - of ``side_effect`` or ``return_value``: - - - if ``side_effect`` is a function, the async function will return the - result of that function, - - if ``side_effect`` is an exception, the async function will raise the - exception, - - if ``side_effect`` is an iterable, the async function will return the - next value of the iterable, however, if the sequence of result is - exhausted, ``StopIteration`` is raised immediately, - - if ``side_effect`` is not defined, the async function will return the - value defined by ``return_value``, hence, by default, the async function - returns a new :class:`AsyncMock` object. - - If the outcome of ``side_effect`` or ``return_value`` is an async function, - the mock async function obtained when the mock object is called will be this - async function itself (and not an async function returning an async - function). - - The test author can also specify a wrapped object with ``wraps``. In this - case, the :class:`Mock` object behavior is the same as with an - :class:`.Mock` object: the wrapped object may have methods - defined as async function functions. - - Based on Martin Richard's asynctest project. - """ - # Improving the `reset_mock` signature. # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal. # But, `NonCallableMock` super-class has the better version. - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: - """ - See :func:`.Mock.reset_mock()` - """ + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... class MagicProxy(Base): name: str @@ -855,8 +510,6 @@ class MagicProxy(Base): # See https://github.com/python/typeshed/issues/14701 class _ANY(Any): - """A helper object that compares equal to everything.""" - def __eq__(self, other: object) -> Literal[True]: ... def __ne__(self, other: object) -> Literal[False]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -873,29 +526,7 @@ if sys.version_info >= (3, 10): *, unsafe: bool = False, **kwargs: Any, - ) -> Any: - """Create a mock object using another object as a spec. Attributes on the - mock will use the corresponding attribute on the `spec` object as their - spec. - - Functions or methods being mocked will have their arguments checked - to check that they are called with the correct signature. - - If `spec_set` is True then attempting to set attributes that don't exist - on the spec object will raise an `AttributeError`. - - If a class is used as a spec then the return value of the mock (the - instance of the class) will have the same spec. You can use a class as the - spec for an instance object by passing `instance=True`. The returned mock - will only be callable if instances of the mock are callable. - - `create_autospec` will raise a `RuntimeError` if passed some common - misspellings of the arguments autospec and spec_set. Pass the argument - `unsafe` with the value True to disable that check. - - `create_autospec` also takes arbitrary keyword arguments that are passed to - the constructor of the created mock. - """ + ) -> Any: ... else: def create_autospec( @@ -905,25 +536,7 @@ else: _parent: Any | None = None, _name: Any | None = None, **kwargs: Any, - ) -> Any: - """Create a mock object using another object as a spec. Attributes on the - mock will use the corresponding attribute on the `spec` object as their - spec. - - Functions or methods being mocked will have their arguments checked - to check that they are called with the correct signature. - - If `spec_set` is True then attempting to set attributes that don't exist - on the spec object will raise an `AttributeError`. - - If a class is used as a spec then the return value of the mock (the - instance of the class) will have the same spec. You can use a class as the - spec for an instance object by passing `instance=True`. The returned mock - will only be callable if instances of the mock are callable. - - `create_autospec` also takes arbitrary keyword arguments that are passed to - the constructor of the created mock. - """ + ) -> Any: ... class _SpecState: spec: Any @@ -942,29 +555,9 @@ class _SpecState: instance: Any = False, ) -> None: ... -def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: - """ - A helper function to create a mock to replace the use of `open`. It works - for `open` called directly or used as a context manager. - - The `mock` argument is the mock object to configure. If `None` (the - default) then a `MagicMock` will be created for you, with the API limited - to methods or attributes available on standard file handles. - - `read_data` is a string for the `read`, `readline` and `readlines` of the - file handle to return. This is an empty string by default. - """ +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... class PropertyMock(Mock): - """ - A mock intended to be used as a property, or other descriptor, on a class. - `PropertyMock` provides `__get__` and `__set__` methods so you can specify - a return value when it is fetched. - - Fetching a `PropertyMock` instance from an object calls the mock, with - no args. Setting it calls the mock with the value being set. - """ - def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... def __set__(self, obj: Any, val: Any) -> None: ... @@ -974,46 +567,10 @@ if sys.version_info >= (3, 13): def __init__(self, /, *args: Any, timeout: float | None | _SentinelObject = ..., **kwargs: Any) -> None: ... # Same as `NonCallableMock.reset_mock.` - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: - """ - See :func:`.Mock.reset_mock()` - """ - - def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: - """Wait until the mock object is called. - - `timeout` - time to wait for in seconds, waits forever otherwise. - Defaults to the constructor provided timeout. - Use None to block undefinetively. - """ - - def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: - """Wait until the mock object is called with given args. - - Waits for the timeout in seconds provided in the constructor. - """ - - class ThreadingMock(ThreadingMixin, MagicMixin, Mock): - """ - A mock that can be used to wait until on calls happening - in a different thread. - - The constructor can take a `timeout` argument which - controls the timeout in seconds for all `wait` calls of the mock. - - You can change the default timeout of all instances via the - `ThreadingMock.DEFAULT_TIMEOUT` attribute. - - If no timeout is set, it will block undefinetively. - """ - -def seal(mock: Any) -> None: - """Disable the automatic generation of child mocks. + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: ... + def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: ... - Given an input Mock, seals it to ensure no further mocks will be generated - when accessing an attribute that was not already defined. + class ThreadingMock(ThreadingMixin, MagicMixin, Mock): ... - The operation recursively seals the mock passed in, meaning that - the mock itself, any mocks generated by accessing one of its attributes, - and all assigned mocks without a name or spec will be sealed. - """ +def seal(mock: Any) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi index 121d275d3b0de..0761baaa2830b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi @@ -1,5 +1,3 @@ -"""Test result object""" - import sys import unittest.case from _typeshed import OptExcInfo @@ -17,17 +15,6 @@ STDERR_LINE: Final[str] def failfast(method: _F) -> _F: ... class TestResult: - """Holder for test result information. - - Test results are automatically managed by the TestCase and TestSuite - classes, and do not need to be explicitly manipulated by writers of tests. - - Each instance holds the total number of tests run, and collections of - failures and errors that occurred among those test runs. The collections - contain tuples of (testcase, exceptioninfo), where exceptioninfo is the - formatted traceback of the error that occurred. - """ - errors: list[tuple[unittest.case.TestCase, str]] failures: list[tuple[unittest.case.TestCase, str]] skipped: list[tuple[unittest.case.TestCase, str]] @@ -42,64 +29,19 @@ class TestResult: collectedDurations: _DurationsType def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... - def printErrors(self) -> None: - """Called by TestRunner after test run""" - - def wasSuccessful(self) -> bool: - """Tells whether or not this result was a success.""" - - def stop(self) -> None: - """Indicates that the tests should be aborted.""" - - def startTest(self, test: unittest.case.TestCase) -> None: - """Called when the given test is about to be run""" - - def stopTest(self, test: unittest.case.TestCase) -> None: - """Called when the given test has been run""" - - def startTestRun(self) -> None: - """Called once before any tests are executed. - - See startTest for a method called before each test. - """ - - def stopTestRun(self) -> None: - """Called once after all tests are executed. - - See stopTest for a method called after each test. - """ - - def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: - """Called when an error has occurred. 'err' is a tuple of values as - returned by sys.exc_info(). - """ - - def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: - """Called when an error has occurred. 'err' is a tuple of values as - returned by sys.exc_info(). - """ - - def addSuccess(self, test: unittest.case.TestCase) -> None: - """Called when a test has completed successfully""" - - def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: - """Called when a test is skipped.""" - - def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: - """Called when an expected failure/error occurred.""" - - def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: - """Called when a test was expected to fail, but succeed.""" - - def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: - """Called at the end of a subtest. - 'err' is None if the subtest ended successfully, otherwise it's a - tuple of values as returned by sys.exc_info(). - """ + def printErrors(self) -> None: ... + def wasSuccessful(self) -> bool: ... + def stop(self) -> None: ... + def startTest(self, test: unittest.case.TestCase) -> None: ... + def stopTest(self, test: unittest.case.TestCase) -> None: ... + def startTestRun(self) -> None: ... + def stopTestRun(self) -> None: ... + def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: ... + def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ... if sys.version_info >= (3, 12): - def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: - """Called when a test finished to run, regardless of its outcome. - *test* is the test case corresponding to the test method. - *elapsed* is the time represented in seconds, and it includes the - execution of cleanup functions. - """ + def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi index 3f5a9406879f2..f76771f55e131 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi @@ -1,5 +1,3 @@ -"""Running tests""" - import sys import unittest.case import unittest.result @@ -24,8 +22,6 @@ class _TextTestStream(_SupportsWriteAndFlush, Protocol): # But that's not feasible to do Generically # We can expand the attributes if requested class _WritelnDecorator: - """Used to decorate file-like objects with a handy 'writeln' method""" - def __init__(self, stream: _SupportsWriteAndFlush) -> None: ... def writeln(self, arg: str | None = None) -> None: ... def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__ @@ -39,11 +35,6 @@ class _WritelnDecorator: _StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator) class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): - """A test result class that can print formatted text results to a stream. - - Used by TextTestRunner. - """ - descriptions: bool # undocumented dots: bool # undocumented separator1: str @@ -52,10 +43,7 @@ class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): stream: _StreamT # undocumented if sys.version_info >= (3, 12): durations: int | None - def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: - """Construct a TextTestResult. Subclasses should accept **kwargs - to ensure compatibility as the interface changes. - """ + def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: ... else: def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ... @@ -63,12 +51,6 @@ class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... class TextTestRunner: - """A test runner class that displays results in textual form. - - It prints out the names of tests as they are run, errors as they - occur, and a summary of the results at the end of the test run. - """ - resultclass: _ResultClassType stream: _WritelnDecorator descriptions: bool @@ -92,12 +74,7 @@ class TextTestRunner: *, tb_locals: bool = False, durations: int | None = None, - ) -> None: - """Construct a TextTestRunner. - - Subclasses should accept **kwargs to ensure compatibility as the - interface changes. - """ + ) -> None: ... else: def __init__( self, @@ -110,13 +87,7 @@ class TextTestRunner: warnings: str | None = None, *, tb_locals: bool = False, - ) -> None: - """Construct a TextTestRunner. - - Subclasses should accept **kwargs to ensure compatibility as the - interface changes. - """ + ) -> None: ... def _makeResult(self) -> TextTestResult: ... - def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: - """Run the given test case or test suite.""" + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi index 3445b85ce3f9e..443396164b6fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi @@ -1,5 +1,3 @@ -"""TestSuite""" - import unittest.case import unittest.result from collections.abc import Iterable, Iterator @@ -9,8 +7,6 @@ from typing_extensions import TypeAlias _TestType: TypeAlias = unittest.case.TestCase | TestSuite class BaseTestSuite: - """A simple test suite that doesn't provide class or module shared fixtures.""" - _tests: list[unittest.case.TestCase] _removed_tests: int def __init__(self, tests: Iterable[_TestType] = ()) -> None: ... @@ -18,22 +14,11 @@ class BaseTestSuite: def addTest(self, test: _TestType) -> None: ... def addTests(self, tests: Iterable[_TestType]) -> None: ... def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... - def debug(self) -> None: - """Run the tests without collecting errors in a TestResult""" - + def debug(self) -> None: ... def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] class TestSuite(BaseTestSuite): - """A test suite is a composite test consisting of a number of TestCases. - - For use, create an instance of TestSuite, then add test case instances. - When all tests have been added, the suite can be passed to a test - runner, such as TextTestRunner. It will run the individual test cases - in the order in which they were added, aggregating the results. When - subclassing, do not forget to call the base class constructor. - """ - def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi index 0bed55dbccf72..31c830e8268a7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi @@ -1,5 +1,3 @@ -"""Various utility functions.""" - from collections.abc import MutableSequence, Sequence from typing import Any, Final, TypeVar from typing_extensions import TypeAlias @@ -18,28 +16,8 @@ def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... def safe_repr(obj: object, short: bool = False) -> str: ... def strclass(cls: type) -> str: ... -def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: - """Finds elements in only one or the other of two, sorted input lists. - - Returns a two-element tuple of lists. The first list contains those - elements in the "expected" list but not in the "actual" list, and the - second contains those elements in the "actual" list but not in the - "expected" list. Duplicate elements in either input list are ignored. - """ - -def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: - """Same behavior as sorted_list_difference but - for lists of unorderable items (like dicts). - - As it does a linear search per item (remove) it - has O(n*n) performance. - """ - -def three_way_cmp(x: Any, y: Any) -> int: - """Return -1 if x < y, 0 if x == y and 1 if x > y""" - -def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: - """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ""" - -def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: - """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ""" +def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... +def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: ... +def three_way_cmp(x: Any, y: Any) -> int: ... +def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... +def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi index 42386f7f06c5d..2173d7e6efaa5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi @@ -1,16 +1,3 @@ -"""Exception classes raised by urllib. - -The base exception class is URLError, which inherits from OSError. It -doesn't define any behavior of its own, but is the base class for all -exceptions defined in this package. - -HTTPError is an exception class that is also a valid HTTP response -instance. It behaves this way because HTTP protocol errors are valid -responses, with a status code, headers, and a body. In some contexts, -an application may want to handle an exception like a regular -response. -""" - from email.message import Message from typing import IO from urllib.response import addinfourl @@ -24,8 +11,6 @@ class URLError(OSError): def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): - """Raised when HTTP error occurs, but also acts like non-error return""" - @property def headers(self) -> Message: ... @headers.setter @@ -39,7 +24,5 @@ class HTTPError(URLError, addinfourl): def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... class ContentTooShortError(URLError): - """Exception raised when downloaded size does not match content-length.""" - content: tuple[str, Message] def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi index 10b9bcf0b6ac0..364892ecdf698 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi @@ -1,36 +1,3 @@ -"""Parse (absolute and relative) URLs. - -urlparse module is based upon the following RFC specifications. - -RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding -and L. Masinter, January 2005. - -RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter -and L.Masinter, December 1999. - -RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. -Berners-Lee, R. Fielding, and L. Masinter, August 1998. - -RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. - -RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June -1995. - -RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. -McCahill, December 1994 - -RFC 3986 is considered the current standard and any future changes to -urlparse module should conform with it. The urlparse module is -currently not entirely compliant with this RFC due to defacto -scenarios for parsing, and for backward compatibility purposes, some -parsing quirks from older RFCs are retained. The testcases in -test_urlparse.py provides a good indicator of parsing behavior. - -The WHATWG URL Parser spec should also be considered. We are not compliant with -it either due to existing user code API behavior expectations (Hyrum's Law). -It serves as a useful guide when making changes. -""" - import sys from collections.abc import Iterable, Mapping, Sequence from types import GenericAlias @@ -72,20 +39,14 @@ if sys.version_info < (3, 11): MAX_CACHE_SIZE: Final[int] class _ResultMixinStr: - """Standard approach to encoding parsed results from str to bytes""" - __slots__ = () def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... class _ResultMixinBytes: - """Standard approach to decoding parsed results from bytes to str""" - __slots__ = () def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): - """Shared methods for the parsed result objects containing a netloc element""" - __slots__ = () @property def username(self) -> AnyStr | None: ... @@ -95,11 +56,7 @@ class _NetlocResultMixinBase(Generic[AnyStr]): def hostname(self) -> AnyStr | None: ... @property def port(self) -> int | None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """Represent a PEP 585 generic type - - E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). - """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): __slots__ = () @@ -108,24 +65,10 @@ class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): __slots__ = () class _DefragResultBase(NamedTuple, Generic[AnyStr]): - """ - DefragResult(url, fragment) - - A 2-tuple that contains the url without fragment identifier and the fragment - identifier as a separate argument. - """ - url: AnyStr fragment: AnyStr class _SplitResultBase(NamedTuple, Generic[AnyStr]): - """ - SplitResult(scheme, netloc, path, query, fragment) - - A 5-tuple that contains the different components of a URL. Similar to - ParseResult, but does not split params. - """ - scheme: AnyStr netloc: AnyStr path: AnyStr @@ -133,12 +76,6 @@ class _SplitResultBase(NamedTuple, Generic[AnyStr]): fragment: AnyStr class _ParseResultBase(NamedTuple, Generic[AnyStr]): - """ - ParseResult(scheme, netloc, path, params, query, fragment) - - A 6-tuple that contains components of a parsed URL. - """ - scheme: AnyStr netloc: AnyStr path: AnyStr @@ -174,36 +111,7 @@ def parse_qs( errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", -) -> dict[AnyStr, list[AnyStr]]: - """Parse a query given as a string argument. - - Arguments: - - qs: percent-encoded query string to be parsed - - keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. - - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. - - encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. - - max_num_fields: int. If set, then throws a ValueError if there - are more than n fields read by parse_qsl(). - - separator: str. The symbol to use for separating the query arguments. - Defaults to &. - - Returns a dictionary. - """ - +) -> dict[AnyStr, list[AnyStr]]: ... def parse_qsl( qs: AnyStr | None, keep_blank_values: bool = False, @@ -212,123 +120,21 @@ def parse_qsl( errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", -) -> list[tuple[AnyStr, AnyStr]]: - """Parse a query given as a string argument. - - Arguments: - - qs: percent-encoded query string to be parsed - - keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. - A true value indicates that blanks should be retained as blank - strings. The default false value indicates that blank values - are to be ignored and treated as if they were not included. - - strict_parsing: flag indicating what to do with parsing errors. If - false (the default), errors are silently ignored. If true, - errors raise a ValueError exception. - - encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. - - max_num_fields: int. If set, then throws a ValueError - if there are more than n fields read by parse_qsl(). - - separator: str. The symbol to use for separating the query arguments. - Defaults to &. - - Returns a list, as G-d intended. - """ - +) -> list[tuple[AnyStr, AnyStr]]: ... @overload -def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: - """quote('abc def') -> 'abc%20def' - - Each part of a URL, e.g. the path info, the query, etc., has a - different set of reserved characters that must be quoted. The - quote function offers a cautious (not minimal) way to quote a - string for most of these parts. - - RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists - the following (un)reserved characters. - - unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" - reserved = gen-delims / sub-delims - gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" - sub-delims = "!" / "$" / "&" / "'" / "(" / ")" - / "*" / "+" / "," / ";" / "=" - - Each of the reserved characters is reserved in some component of a URL, - but not necessarily in all of them. - - The quote function %-escapes all characters that are neither in the - unreserved chars ("always safe") nor the additional chars set via the - safe arg. - - The default for the safe arg is '/'. The character is reserved, but in - typical usage the quote function is being called on a path where the - existing slash characters are to be preserved. - - Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. - Now, "~" is included in the set of unreserved characters. - - string and safe may be either str or bytes objects. encoding and errors - must not be specified if string is a bytes object. - - The optional encoding and errors parameters specify how to deal with - non-ASCII characters, as accepted by the str.encode method. - By default, encoding='utf-8' (characters are encoded with UTF-8), and - errors='strict' (unsupported characters raise a UnicodeEncodeError). - """ - +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... @overload def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... -def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: - """Like quote(), but accepts a bytes object rather than a str, and does - not perform string-to-bytes encoding. It always returns an ASCII string. - quote_from_bytes(b'abc def?') -> 'abc%20def%3f' - """ - +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... @overload -def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: - """Like quote(), but also replace ' ' with '+', as required for quoting - HTML form values. Plus signs in the original string are escaped unless - they are included in safe. It also does not have safe default to '/'. - """ - +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... @overload def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... -def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: - """Replace %xx escapes by their single-character equivalent. The optional - encoding and errors parameters specify how to decode percent-encoded - sequences into Unicode characters, as accepted by the bytes.decode() - method. - By default, percent-encoded sequences are decoded with UTF-8, and invalid - sequences are replaced by a placeholder character. - - unquote('abc%20def') -> 'abc def'. - """ - -def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: - """unquote_to_bytes('abc%20def') -> b'abc def'.""" - -def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: - """Like unquote(), but also replace plus signs by spaces, as required for - unquoting HTML form values. - - unquote_plus('%7e/abc+def') -> '~/abc def' - """ - +def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... @overload -def urldefrag(url: str) -> DefragResult: - """Removes any existing fragment from URL. - - Returns a tuple of the defragmented URL and the fragment. If - the URL contained no fragments, the second element is the - empty string. - """ - +def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... @@ -358,149 +164,38 @@ def urlencode( encoding: str | None = None, errors: str | None = None, quote_via: _QuoteVia = ..., -) -> str: - """Encode a dict or sequence of two-element tuples into a URL query string. - - If any values in the query arg are sequences and doseq is true, each - sequence element is converted to a separate parameter. - - If the query arg is a sequence of two-element tuples, the order of the - parameters in the output will match the order of parameters in the - input. - - The components of a query arg may each be either a string or a bytes type. - - The safe, encoding, and errors parameters are passed down to the function - specified by quote_via (encoding and errors only if a component is a str). - """ - -def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: - """Join a base URL and a possibly relative URL to form an absolute - interpretation of the latter. - """ - +) -> str: ... +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... @overload -def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: - """Parse a URL into 6 components: - :///;?# - - The result is a named 6-tuple with fields corresponding to the - above. It is either a ParseResult or ParseResultBytes object, - depending on the type of the url parameter. - - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. - - The scheme argument provides the default value of the scheme - component when no scheme is found in url. - - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. - - Note that % escapes are not expanded. - """ - +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... @overload def urlparse( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> ParseResultBytes: ... @overload -def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: - """Parse a URL into 5 components: - :///?# - - The result is a named 5-tuple with fields corresponding to the - above. It is either a SplitResult or SplitResultBytes object, - depending on the type of the url parameter. - - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. - - The scheme argument provides the default value of the scheme - component when no scheme is found in url. - - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. - - Note that % escapes are not expanded. - """ +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... if sys.version_info >= (3, 11): @overload - def urlsplit(url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True) -> SplitResultBytes: - """Parse a URL into 5 components: - :///?# - - The result is a named 5-tuple with fields corresponding to the - above. It is either a SplitResult or SplitResultBytes object, - depending on the type of the url parameter. - - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. - - The scheme argument provides the default value of the scheme - component when no scheme is found in url. - - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. - - Note that % escapes are not expanded. - """ + def urlsplit( + url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... else: @overload def urlsplit( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True - ) -> SplitResultBytes: - """Parse a URL into 5 components: - :///?# - - The result is a named 5-tuple with fields corresponding to the - above. It is either a SplitResult or SplitResultBytes object, - depending on the type of the url parameter. - - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. - - The scheme argument provides the default value of the scheme - component when no scheme is found in url. - - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. - - Note that % escapes are not expanded. - """ + ) -> SplitResultBytes: ... # Requires an iterable of length 6 @overload -def urlunparse(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] - """Put a parsed URL back together again. This may result in a - slightly different, but equivalent URL, if the URL that was parsed - originally had redundant delimiters, e.g. a ? with an empty query - (the draft states that these are equivalent). - """ - +def urlunparse(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] @overload def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... # Requires an iterable of length 5 @overload -def urlunsplit(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] - """Combine the elements of a tuple as returned by urlsplit() into a - complete URL as a string. The data argument can be any five-item iterable. - This may result in a slightly different, but equivalent URL, if the URL that - was parsed originally had unnecessary delimiters (for example, a ? with an - empty query; the RFC states that these are equivalent). - """ - +def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] @overload def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... -def unwrap(url: str) -> str: - """Transform a string like '' into 'scheme://host/path'. - - The string is returned unchanged if it's not a wrapped URL. - """ +def unwrap(url: str) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi index 4d9636102ed5c..876b9d3f165cd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi @@ -1,72 +1,3 @@ -"""An extensible library for opening URLs using a variety of protocols - -The simplest way to use this module is to call the urlopen function, -which accepts a string containing a URL or a Request object (described -below). It opens the URL and returns the results as file-like -object; the returned object has some extra methods described below. - -The OpenerDirector manages a collection of Handler objects that do -all the actual work. Each Handler implements a particular protocol or -option. The OpenerDirector is a composite object that invokes the -Handlers needed to open the requested URL. For example, the -HTTPHandler performs HTTP GET and POST requests and deals with -non-error returns. The HTTPRedirectHandler automatically deals with -HTTP 301, 302, 303, 307, and 308 redirect errors, and the -HTTPDigestAuthHandler deals with digest authentication. - -urlopen(url, data=None) -- Basic usage is the same as original -urllib. pass the url and optionally data to post to an HTTP URL, and -get a file-like object back. One difference is that you can also pass -a Request instance instead of URL. Raises a URLError (subclass of -OSError); for HTTP errors, raises an HTTPError, which can also be -treated as a valid response. - -build_opener -- Function that creates a new OpenerDirector instance. -Will install the default handlers. Accepts one or more Handlers as -arguments, either instances or Handler classes that it will -instantiate. If one of the argument is a subclass of the default -handler, the argument will be installed instead of the default. - -install_opener -- Installs a new opener as the default opener. - -objects of interest: - -OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages -the Handler classes, while dealing with requests and responses. - -Request -- An object that encapsulates the state of a request. The -state can be as simple as the URL. It can also include extra HTTP -headers, e.g. a User-Agent. - -BaseHandler -- - -internals: -BaseHandler and parent -_call_chain conventions - -Example usage: - -import urllib.request - -# set up authentication info -authinfo = urllib.request.HTTPBasicAuthHandler() -authinfo.add_password(realm='PDQ Application', - uri='https://mahler:8092/site-updates.py', - user='klem', - passwd='geheim$parole') - -proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) - -# build a new opener that adds authentication and caching FTP handlers -opener = urllib.request.build_opener(proxy_support, authinfo, - urllib.request.CacheFTPHandler) - -# install it -urllib.request.install_opener(opener) - -f = urllib.request.urlopen('https://www.python.org/') -""" - import ssl import sys from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead @@ -124,47 +55,7 @@ _DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | if sys.version_info >= (3, 13): def urlopen( url: str | Request, data: _DataType | None = None, timeout: float | None = ..., *, context: ssl.SSLContext | None = None - ) -> _UrlopenRet: - """Open the URL url, which can be either a string or a Request object. - - *data* must be an object specifying additional data to be sent to - the server, or None if no such data is needed. See Request for - details. - - urllib.request module uses HTTP/1.1 and includes a "Connection:close" - header in its HTTP requests. - - The optional *timeout* parameter specifies a timeout in seconds for - blocking operations like the connection attempt (if not specified, the - global default timeout setting will be used). This only works for HTTP, - HTTPS and FTP connections. - - If *context* is specified, it must be a ssl.SSLContext instance describing - the various SSL options. See HTTPSConnection for more details. - - - This function always returns an object which can work as a - context manager and has the properties url, headers, and status. - See urllib.response.addinfourl for more detail on these properties. - - For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse - object slightly modified. In addition to the three new methods above, the - msg attribute contains the same information as the reason attribute --- - the reason phrase returned by the server --- instead of the response - headers as it is specified in the documentation for HTTPResponse. - - For FTP, file, and data URLs, this function returns a - urllib.response.addinfourl object. - - Note that None may be returned if no handler handles the request (though - the default installed global OpenerDirector uses UnknownHandler to ensure - this never happens). - - In addition, if proxy settings are detected (for example, when a *_proxy - environment variable like http_proxy is set), ProxyHandler is default - installed and makes sure the requests are handled through the proxy. - - """ + ) -> _UrlopenRet: ... else: def urlopen( @@ -176,144 +67,32 @@ else: capath: str | None = None, cadefault: bool = False, context: ssl.SSLContext | None = None, - ) -> _UrlopenRet: - """Open the URL url, which can be either a string or a Request object. - - *data* must be an object specifying additional data to be sent to - the server, or None if no such data is needed. See Request for - details. - - urllib.request module uses HTTP/1.1 and includes a "Connection:close" - header in its HTTP requests. - - The optional *timeout* parameter specifies a timeout in seconds for - blocking operations like the connection attempt (if not specified, the - global default timeout setting will be used). This only works for HTTP, - HTTPS and FTP connections. - - If *context* is specified, it must be a ssl.SSLContext instance describing - the various SSL options. See HTTPSConnection for more details. - - The optional *cafile* and *capath* parameters specify a set of trusted CA - certificates for HTTPS requests. cafile should point to a single file - containing a bundle of CA certificates, whereas capath should point to a - directory of hashed certificate files. More information can be found in - ssl.SSLContext.load_verify_locations(). - - The *cadefault* parameter is ignored. - - - This function always returns an object which can work as a - context manager and has the properties url, headers, and status. - See urllib.response.addinfourl for more detail on these properties. - - For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse - object slightly modified. In addition to the three new methods above, the - msg attribute contains the same information as the reason attribute --- - the reason phrase returned by the server --- instead of the response - headers as it is specified in the documentation for HTTPResponse. - - For FTP, file, and data URLs and requests explicitly handled by legacy - URLopener and FancyURLopener classes, this function returns a - urllib.response.addinfourl object. - - Note that None may be returned if no handler handles the request (though - the default installed global OpenerDirector uses UnknownHandler to ensure - this never happens). - - In addition, if proxy settings are detected (for example, when a *_proxy - environment variable like http_proxy is set), ProxyHandler is default - installed and makes sure the requests are handled through the proxy. - - """ + ) -> _UrlopenRet: ... def install_opener(opener: OpenerDirector) -> None: ... -def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: - """Create an opener object from a list of handlers. - - The opener will use several default handlers, including support - for HTTP, FTP and when applicable HTTPS. - - If any of the handlers passed as arguments are subclasses of the - default handlers, the default handlers will not be used. - """ +def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... if sys.version_info >= (3, 14): - def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: - """Convert the given file URL to a local file system path. - - The 'file:' scheme prefix must be omitted unless *require_scheme* - is set to true. - - The URL authority may be resolved with gethostbyname() if - *resolve_host* is set to true. - """ - - def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: - """Convert the given local file system path to a file URL. - - The 'file:' scheme prefix is omitted unless *add_scheme* - is set to true. - """ + def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: ... + def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: ... else: if sys.platform == "win32": from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname else: - def url2pathname(pathname: str) -> str: - """OS-specific conversion from a relative URL of the 'file' scheme - to a file system path; not recommended for general use. - """ - - def pathname2url(pathname: str) -> str: - """OS-specific conversion from a file system path to a relative URL - of the 'file' scheme; not recommended for general use. - """ + def url2pathname(pathname: str) -> str: ... + def pathname2url(pathname: str) -> str: ... -def getproxies() -> dict[str, str]: - """Return a dictionary of scheme -> proxy server URL mappings. - - Scan the environment for variables named _proxy; - this seems to be the standard convention. - """ - -def getproxies_environment() -> dict[str, str]: - """Return a dictionary of scheme -> proxy server URL mappings. - - Scan the environment for variables named _proxy; - this seems to be the standard convention. - """ - -def parse_http_list(s: str) -> list[str]: - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Neither commas nor quotes count if they are escaped. - Only double-quotes count, not single-quotes. - """ - -def parse_keqv_list(l: list[str]) -> dict[str, str]: - """Parse list of key=value strings where keys are not duplicated.""" +def getproxies() -> dict[str, str]: ... +def getproxies_environment() -> dict[str, str]: ... +def parse_http_list(s: str) -> list[str]: ... +def parse_keqv_list(l: list[str]) -> dict[str, str]: ... if sys.platform == "win32" or sys.platform == "darwin": - def proxy_bypass(host: str) -> Any: # undocumented - """Return True, if host should be bypassed. - - Checks proxy settings gathered from the environment, if specified, - or the registry. - - """ + def proxy_bypass(host: str) -> Any: ... # undocumented else: - def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: # undocumented - """Test if proxies should not be used for a particular host. - - Checks the proxy dict for the value of no_proxy, which should - be a list of comma separated DNS suffixes, or '*' for all hosts. - - """ + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented class Request: @property @@ -341,9 +120,7 @@ class Request: unverifiable: bool = False, method: str | None = None, ) -> None: ... - def get_method(self) -> str: - """Return a string indicating the HTTP request method.""" - + def get_method(self) -> str: ... def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... @@ -382,17 +159,7 @@ class HTTPRedirectHandler(BaseHandler): inf_msg: ClassVar[str] # undocumented def redirect_request( self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage, newurl: str - ) -> Request | None: - """Return a Request or None in response to a redirect. - - This is called by the http_error_30x methods when a - redirection response is received. If a redirection should - take place, return a new Request to allow http_error_30x to - perform the redirect. Otherwise, raise HTTPError if no-one - else should try to handle this url. Return None if you can't - but another Handler might. - """ - + ) -> Request | None: ... def http_error_301(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -418,14 +185,8 @@ class ProxyHandler(BaseHandler): class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... - def is_suburi(self, base: str, test: str) -> bool: # undocumented - """Check if test is below base in a URI tree - - Both args must be URIs in reduced form. - """ - - def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: # undocumented - """Accept authority or URI and extract only the authority and path.""" + def is_suburi(self, base: str, test: str) -> bool: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: ... # undocumented class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -469,12 +230,6 @@ class AbstractDigestAuthHandler: def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): - """An authentication protocol defined by RFC 2069 - - Digest authentication improves on basic authentication because it - does not transmit passwords in the clear. - """ - auth_header: ClassVar[str] # undocumented def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -503,11 +258,7 @@ class AbstractHTTPHandler(BaseHandler): # undocumented def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... - def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: - """Return an HTTPResponse object for the request, using http_class. - - http_class must implement the HTTPConnection API from http.client. - """ + def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... class HTTPHandler(AbstractHTTPHandler): def http_open(self, req: Request) -> HTTPResponse: ... @@ -536,8 +287,6 @@ class DataHandler(BaseHandler): def data_open(self, req: Request) -> addinfourl: ... class ftpwrapper: # undocumented - """Class used by open_ftp() for cache of open FTP connections.""" - def __init__( self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True ) -> None: ... @@ -564,8 +313,6 @@ class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> NoReturn: ... class HTTPErrorProcessor(BaseHandler): - """Process HTTP error responses.""" - def http_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... def https_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... @@ -574,132 +321,61 @@ def urlretrieve( filename: StrOrBytesPath | None = None, reporthook: Callable[[int, int, int], object] | None = None, data: _DataType = None, -) -> tuple[str, HTTPMessage]: - """ - Retrieve a URL into a temporary location on disk. - - Requires a URL argument. If a filename is passed, it is used as - the temporary file location. The reporthook argument should be - a callable that accepts a block number, a read size, and the - total file size of the URL target. The data argument should be - valid URL encoded data. - - If a filename is passed and the URL points to a local resource, - the result is a copy from local file to new file. - - Returns a tuple containing the path to the newly created - data file as well as the resulting HTTPMessage object. - """ - -def urlcleanup() -> None: - """Clean up temporary files from urlretrieve calls.""" +) -> tuple[str, HTTPMessage]: ... +def urlcleanup() -> None: ... if sys.version_info < (3, 14): @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class URLopener: - """Class to open URLs. - This is a class rather than just a subroutine because we may need - more than one set of global protocol-specific options. - Note -- this is a base class for those who don't want the - automatic handling of errors type 302 (relocated) and 401 - (authorization needed). - """ - version: ClassVar[str] def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... - def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: - """Use URLopener().open(file) instead of open(file, 'r').""" - - def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: - """Overridable interface to open unknown URL type.""" - + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... def retrieve( self, url: str, filename: str | None = None, reporthook: Callable[[int, int, int], object] | None = None, data: ReadableBuffer | None = None, - ) -> tuple[str, Message | None]: - """retrieve(url) returns (filename, headers) for a local object - or (tempfilename, headers) for a remote object. - """ - - def addheader(self, *args: tuple[str, str]) -> None: # undocumented - """Add a header to be used by the HTTP interface only - e.g. u.addheader('Accept', 'sound/basic') - """ - + ) -> tuple[str, Message | None]: ... + def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None - ) -> _UrlopenRet: # undocumented - """Handle http errors. - - Derived class can override this, or provide specific handlers - named http_error_DDD where DDD is the 3-digit error code. - """ - + ) -> _UrlopenRet: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> _UrlopenRet: # undocumented - """Default error handler: close the connection and raise OSError.""" - - def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: # undocumented - """Use "data" URL.""" - - def open_file(self, url: str) -> addinfourl: # undocumented - """Use local file or FTP depending on form of URL.""" - - def open_ftp(self, url: str) -> addinfourl: # undocumented - """Use FTP protocol.""" - - def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented - """Use HTTP protocol.""" - - def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented - """Use HTTPS protocol.""" - - def open_local_file(self, url: str) -> addinfourl: # undocumented - """Use local file.""" - - def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: # undocumented - """Overridable interface to open unknown URL type.""" - + ) -> _UrlopenRet: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented + def open_file(self, url: str) -> addinfourl: ... # undocumented + def open_ftp(self, url: str) -> addinfourl: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_local_file(self, url: str) -> addinfourl: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented def __del__(self) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class FancyURLopener(URLopener): - """Derived class with handlers for errors we can handle (perhaps).""" - - def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: - """Override this in a GUI environment!""" - + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented def http_error_301( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 301 -- also relocated (permanently).""" - + ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_302( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 302 -- relocated (temporarily).""" - + ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_303( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 303 -- also relocated (essentially identical to 302).""" - + ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_307( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 307 -- relocated, but turn POST into error.""" + ) -> _UrlopenRet | addinfourl | None: ... # undocumented if sys.version_info >= (3, 11): def http_error_308( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 308 -- relocated, but turn POST into error.""" + ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_401( self, @@ -710,11 +386,7 @@ if sys.version_info < (3, 14): headers: HTTPMessage, data: ReadableBuffer | None = None, retry: bool = False, - ) -> _UrlopenRet | None: # undocumented - """Error 401 -- authentication required. - This function supports Basic authentication only. - """ - + ) -> _UrlopenRet | None: ... # undocumented def http_error_407( self, url: str, @@ -724,16 +396,10 @@ if sys.version_info < (3, 14): headers: HTTPMessage, data: ReadableBuffer | None = None, retry: bool = False, - ) -> _UrlopenRet | None: # undocumented - """Error 407 -- proxy authentication required. - This function supports Basic authentication only. - """ - + ) -> _UrlopenRet | None: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> addinfourl: # undocumented - """Default error handling -- don't raise an exception.""" - + ) -> addinfourl: ... # undocumented def redirect_internal( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None ) -> _UrlopenRet | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi index 0012cd166d853..65df9cdff58ff 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi @@ -1,11 +1,3 @@ -"""Response classes used by urllib. - -The base class, addbase, defines a minimal file-like interface, -including read() and readline(). The typical response object is an -addinfourl instance, which defines an info() method that returns -headers and a geturl() method that returns the url. -""" - import tempfile from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable @@ -16,8 +8,6 @@ from typing import IO, Any __all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(tempfile._TemporaryFileWrapper[bytes]): - """Base class for addinfo and addclosehook. Is a good idea for garbage collection.""" - fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... def __exit__( @@ -31,22 +21,16 @@ class addbase(tempfile._TemporaryFileWrapper[bytes]): def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): - """Class to add a close hook to an open file.""" - closehook: Callable[..., object] hookargs: tuple[Any, ...] def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... class addinfo(addbase): - """class to add an info() method to an open file.""" - headers: Message def __init__(self, fp: IO[bytes], headers: Message) -> None: ... def info(self) -> Message: ... class addinfourl(addinfo): - """class to add info() and geturl() methods to an open file.""" - url: str code: int | None @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi index 111de19789d2e..14ceef550dab6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi @@ -1,63 +1,20 @@ -"""robotparser.py - -Copyright (C) 2000 Bastian Kleineidam - -You can choose between two licenses when using this package: -1) GNU GPLv2 -2) PSF license for Python 2.2 - -The robots.txt Exclusion Protocol is implemented as specified in -http://www.robotstxt.org/norobots-rfc.txt -""" - from collections.abc import Iterable from typing import NamedTuple __all__ = ["RobotFileParser"] class RequestRate(NamedTuple): - """RequestRate(requests, seconds)""" - requests: int seconds: int class RobotFileParser: - """This class provides a set of methods to read, parse and answer - questions about a single robots.txt file. - - """ - def __init__(self, url: str = "") -> None: ... - def set_url(self, url: str) -> None: - """Sets the URL referring to a robots.txt file.""" - - def read(self) -> None: - """Reads the robots.txt URL and feeds it to the parser.""" - - def parse(self, lines: Iterable[str]) -> None: - """Parse the input lines from a robots.txt file. - - We allow that a user-agent: line is not preceded by - one or more blank lines. - """ - - def can_fetch(self, useragent: str, url: str) -> bool: - """using the parsed robots.txt decide if useragent can fetch url""" - - def mtime(self) -> int: - """Returns the time the robots.txt file was last fetched. - - This is useful for long-running web spiders that need to - check for new robots.txt files periodically. - - """ - - def modified(self) -> None: - """Sets the time the robots.txt file was last fetched to the - current time. - - """ - + def set_url(self, url: str) -> None: ... + def read(self) -> None: ... + def parse(self, lines: Iterable[str]) -> None: ... + def can_fetch(self, useragent: str, url: str) -> bool: ... + def mtime(self) -> int: ... + def modified(self) -> None: ... def crawl_delay(self, useragent: str) -> str | None: ... def request_rate(self, useragent: str) -> RequestRate | None: ... def site_maps(self) -> list[str] | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi index 158a8affaa4f4..324053e04337c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi @@ -1,9 +1,3 @@ -"""Implementation of the UUencode and UUdecode functions. - -encode(in_file, out_file [,name, mode], *, backtick=False) -decode(in_file [, out_file, mode, quiet]) -""" - from typing import BinaryIO from typing_extensions import TypeAlias @@ -13,8 +7,7 @@ _File: TypeAlias = str | BinaryIO class Error(Exception): ... -def encode(in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False) -> None: - """Uuencode file""" - -def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: - """Decode uuencoded file""" +def encode( + in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi index 86a27772a7950..303fb10eaf537 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi @@ -1,61 +1,3 @@ -"""UUID objects (universally unique identifiers) according to RFC 4122/9562. - -This module provides immutable UUID objects (class UUID) and functions for -generating UUIDs corresponding to a specific UUID version as specified in -RFC 4122/9562, e.g., uuid1() for UUID version 1, uuid3() for UUID version 3, -and so on. - -Note that UUID version 2 is deliberately omitted as it is outside the scope -of the RFC. - -If all you want is a unique ID, you should probably call uuid1() or uuid4(). -Note that uuid1() may compromise privacy since it creates a UUID containing -the computer's network address. uuid4() creates a random UUID. - -Typical usage: - - >>> import uuid - - # make a UUID based on the host ID and current time - >>> uuid.uuid1() # doctest: +SKIP - UUID('a8098c1a-f86e-11da-bd1a-00112444be1e') - - # make a UUID using an MD5 hash of a namespace UUID and a name - >>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') - UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e') - - # make a random UUID - >>> uuid.uuid4() # doctest: +SKIP - UUID('16fd2706-8baf-433b-82eb-8c7fada847da') - - # make a UUID using a SHA-1 hash of a namespace UUID and a name - >>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') - UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d') - - # make a UUID from a string of hex digits (braces and hyphens ignored) - >>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') - - # convert a UUID to a string of hex digits in standard form - >>> str(x) - '00010203-0405-0607-0809-0a0b0c0d0e0f' - - # get the raw 16 bytes of the UUID - >>> x.bytes - b'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f' - - # make a UUID from a 16-byte string - >>> uuid.UUID(bytes=x.bytes) - UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') - - # get the Nil UUID - >>> uuid.NIL - UUID('00000000-0000-0000-0000-000000000000') - - # get the Max UUID - >>> uuid.MAX - UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') -""" - import builtins import sys from enum import Enum @@ -65,74 +7,11 @@ from typing_extensions import LiteralString, TypeAlias _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] class SafeUUID(Enum): - """An enumeration.""" - safe = 0 unsafe = -1 unknown = None class UUID: - """Instances of the UUID class represent UUIDs as specified in RFC 4122. - UUID objects are immutable, hashable, and usable as dictionary keys. - Converting a UUID to a string with str() yields something in the form - '12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts - five possible forms: a similar string of hexadecimal digits, or a tuple - of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and - 48-bit values respectively) as an argument named 'fields', or a string - of 16 bytes (with all the integer fields in big-endian order) as an - argument named 'bytes', or a string of 16 bytes (with the first three - fields in little-endian order) as an argument named 'bytes_le', or a - single 128-bit integer as an argument named 'int'. - - UUIDs have these read-only attributes: - - bytes the UUID as a 16-byte string (containing the six - integer fields in big-endian byte order) - - bytes_le the UUID as a 16-byte string (with time_low, time_mid, - and time_hi_version in little-endian byte order) - - fields a tuple of the six integer fields of the UUID, - which are also available as six individual attributes - and two derived attributes. Those attributes are not - always relevant to all UUID versions: - - The 'time_*' attributes are only relevant to version 1. - - The 'clock_seq*' and 'node' attributes are only relevant - to versions 1 and 6. - - The 'time' attribute is only relevant to versions 1, 6 - and 7. - - time_low the first 32 bits of the UUID - time_mid the next 16 bits of the UUID - time_hi_version the next 16 bits of the UUID - clock_seq_hi_variant the next 8 bits of the UUID - clock_seq_low the next 8 bits of the UUID - node the last 48 bits of the UUID - - time the 60-bit timestamp for UUIDv1/v6, - or the 48-bit timestamp for UUIDv7 - clock_seq the 14-bit sequence number - - hex the UUID as a 32-character hexadecimal string - - int the UUID as a 128-bit integer - - urn the UUID as a URN as specified in RFC 4122/9562 - - variant the UUID variant (one of the constants RESERVED_NCS, - RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) - - version the UUID version number (1 through 8, meaningful only - when the variant is RFC_4122) - - is_safe An enum indicating whether the UUID has been generated in - a way that is safe for multiprocessing applications, via - uuid_generate_time_safe(3). - """ - __slots__ = ("int", "is_safe", "__weakref__") def __init__( self, @@ -144,36 +23,7 @@ class UUID: version: builtins.int | None = None, *, is_safe: SafeUUID = SafeUUID.unknown, - ) -> None: - """Create a UUID from either a string of 32 hexadecimal digits, - a string of 16 bytes as the 'bytes' argument, a string of 16 bytes - in little-endian order as the 'bytes_le' argument, a tuple of six - integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, - 8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as - the 'fields' argument, or a single 128-bit integer as the 'int' - argument. When a string of hex digits is given, curly braces, - hyphens, and a URN prefix are all optional. For example, these - expressions all yield the same UUID: - - UUID('{12345678-1234-5678-1234-567812345678}') - UUID('12345678123456781234567812345678') - UUID('urn:uuid:12345678-1234-5678-1234-567812345678') - UUID(bytes='\\x12\\x34\\x56\\x78'*4) - UUID(bytes_le='\\x78\\x56\\x34\\x12\\x34\\x12\\x78\\x56' + - '\\x12\\x34\\x56\\x78\\x12\\x34\\x56\\x78') - UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) - UUID(int=0x12345678123456781234567812345678) - - Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must - be given. The 'version' argument is optional; if given, the resulting - UUID will have its variant and version set according to RFC 4122, - overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. - - is_safe is an enum exposed as an attribute on the instance. It - indicates whether the UUID has been generated in a way that is safe - for multiprocessing applications, via uuid_generate_time_safe(3). - """ - + ) -> None: ... @property def is_safe(self) -> SafeUUID: ... @property @@ -216,67 +66,27 @@ class UUID: def __ge__(self, other: UUID) -> bool: ... def __hash__(self) -> builtins.int: ... -def getnode() -> int: - """Get the hardware address as a 48-bit positive integer. - - The first time this runs, it may launch a separate program, which could - be quite slow. If all attempts to obtain the hardware address fail, we - choose a random 48-bit number with its eighth bit set to 1 as recommended - in RFC 4122. - """ - -def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: - """Generate a UUID from a host ID, sequence number, and the current time. - If 'node' is not given, getnode() is used to obtain the hardware - address. If 'clock_seq' is given, it is used as the sequence number; - otherwise a random 14-bit sequence number is chosen. - """ +def getnode() -> int: ... +def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... if sys.version_info >= (3, 14): - def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: - """Similar to :func:`uuid1` but where fields are ordered differently - for improved DB locality. - - More precisely, given a 60-bit timestamp value as specified for UUIDv1, - for UUIDv6 the first 48 most significant bits are stored first, followed - by the 4-bit version (same position), followed by the remaining 12 bits - of the original 60-bit timestamp. - """ - - def uuid7() -> UUID: - """Generate a UUID from a Unix timestamp in milliseconds and random bits. - - UUIDv7 objects feature monotonicity within a millisecond. - """ - - def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: - """Generate a UUID from three custom blocks. - - * 'a' is the first 48-bit chunk of the UUID (octets 0-5); - * 'b' is the mid 12-bit chunk (octets 6-7); - * 'c' is the last 62-bit chunk (octets 8-15). - - When a value is not specified, a pseudo-random value is generated. - """ + def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: ... + def uuid7() -> UUID: ... + def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: ... if sys.version_info >= (3, 12): - def uuid3(namespace: UUID, name: str | bytes) -> UUID: - """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... else: - def uuid3(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + def uuid3(namespace: UUID, name: str) -> UUID: ... -def uuid4() -> UUID: - """Generate a random UUID.""" +def uuid4() -> UUID: ... if sys.version_info >= (3, 12): - def uuid5(namespace: UUID, name: str | bytes) -> UUID: - """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" + def uuid5(namespace: UUID, name: str | bytes) -> UUID: ... else: - def uuid5(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" + def uuid5(namespace: UUID, name: str) -> UUID: ... if sys.version_info >= (3, 14): NIL: Final[UUID] @@ -292,5 +102,4 @@ RESERVED_MICROSOFT: Final[LiteralString] RESERVED_FUTURE: Final[LiteralString] if sys.version_info >= (3, 12): - def main() -> None: - """Run the uuid command line interface.""" + def main() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi index aa09f42e017e1..14db88523dba4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi @@ -1,9 +1,3 @@ -""" -Virtual environment (venv) package for Python. Based on PEP 405. - -Copyright (C) 2011-2014 Vinay Sajip. -Licensed to the PSF under a contributor agreement. -""" import logging import sys from _typeshed import StrOrBytesPath @@ -16,32 +10,6 @@ logger: logging.Logger CORE_VENV_DEPS: Final[tuple[str, ...]] class EnvBuilder: - """ -This class exists to allow virtual environment creation to be -customized. The constructor parameters determine the builder's -behaviour when called upon to create a virtual environment. - -By default, the builder makes the system (global) site-packages dir -*un*available to the created environment. - -If invoked using the Python -m option, the default is to use copying -on Windows platforms but symlinks elsewhere. If instantiated some -other way, the default is to *not* use symlinks. - -:param system_site_packages: If True, the system (global) site-packages - dir is available to created environments. -:param clear: If True, delete the contents of the environment directory if - it already exists, before environment creation. -:param symlinks: If True, attempt to symlink rather than copy files into - virtual environment. -:param upgrade: If True, upgrade an existing virtual environment. -:param with_pip: If True, ensure pip is installed in the virtual - environment -:param prompt: Alternative terminal prefix for the environment. -:param upgrade_deps: Update the base venv modules to the latest on PyPI -:param scm_ignore_files: Create ignore files for the SCMs specified by the - iterable. -""" system_site_packages: bool clear: bool symlinks: bool @@ -74,100 +42,22 @@ other way, the default is to *not* use symlinks. upgrade_deps: bool = False, ) -> None: ... - def create(self, env_dir: StrOrBytesPath) -> None: - """ -Create a virtual environment in a directory. - -:param env_dir: The target directory to create an environment in. - -""" + def create(self, env_dir: StrOrBytesPath) -> None: ... def clear_directory(self, path: StrOrBytesPath) -> None: ... # undocumented - def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: - """ -Create the directories for the environment. - -Returns a context object which holds paths in the environment, -for use by subsequent logic. -""" - def create_configuration(self, context: SimpleNamespace) -> None: - """ -Create a configuration file indicating where the environment's Python -was copied from, and whether the system site-packages should be made -available in the environment. - -:param context: The information for the environment creation request - being processed. -""" + def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... + def create_configuration(self, context: SimpleNamespace) -> None: ... def symlink_or_copy( self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False - ) -> None: # undocumented - """ -Try symlinking a file, and if that fails, fall back to copying. -(Unused on Windows, because we can't just copy a failed symlink file: we -switch to a different set of files instead.) -""" - def setup_python(self, context: SimpleNamespace) -> None: - """ -Set up a Python executable in the environment. - -:param context: The information for the environment creation request - being processed. -""" - def _setup_pip(self, context: SimpleNamespace) -> None: # undocumented - """Installs or upgrades pip in a virtual environment -""" - def setup_scripts(self, context: SimpleNamespace) -> None: - """ -Set up scripts into the created environment from a directory. - -This method installs the default scripts into the environment -being created. You can prevent the default installation by overriding -this method if you really need to, or if you need to specify -a different location for the scripts to install. By default, the -'scripts' directory in the venv package is used as the source of -scripts to install. -""" - def post_setup(self, context: SimpleNamespace) -> None: - """ -Hook for post-setup modification of the venv. Subclasses may install -additional packages or scripts here, add activation shell scripts, etc. - -:param context: The information for the environment creation request - being processed. -""" - def replace_variables(self, text: str, context: SimpleNamespace) -> str: # undocumented - """ -Replace variable placeholders in script text with context-specific -variables. - -Return the text passed in , but with variables replaced. - -:param text: The text in which to replace placeholder variables. -:param context: The information for the environment creation request - being processed. -""" - def install_scripts(self, context: SimpleNamespace, path: str) -> None: - """ -Install scripts into the created environment from a directory. - -:param context: The information for the environment creation request - being processed. -:param path: Absolute pathname of a directory containing script. - Scripts in the 'common' subdirectory of this directory, - and those in the directory named for the platform - being run on, are installed in the created environment. - Placeholder variables are replaced with environment- - specific values. -""" + ) -> None: ... # undocumented + def setup_python(self, context: SimpleNamespace) -> None: ... + def _setup_pip(self, context: SimpleNamespace) -> None: ... # undocumented + def setup_scripts(self, context: SimpleNamespace) -> None: ... + def post_setup(self, context: SimpleNamespace) -> None: ... + def replace_variables(self, text: str, context: SimpleNamespace) -> str: ... # undocumented + def install_scripts(self, context: SimpleNamespace, path: str) -> None: ... def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... if sys.version_info >= (3, 13): - def create_git_ignore_file(self, context: SimpleNamespace) -> None: - """ -Create a .gitignore file in the environment directory. - -The contents of the file cause the entire environment directory to be -ignored by git. -""" + def create_git_ignore_file(self, context: SimpleNamespace) -> None: ... if sys.version_info >= (3, 13): def create( @@ -180,9 +70,7 @@ if sys.version_info >= (3, 13): upgrade_deps: bool = False, *, scm_ignore_files: Iterable[str] = ..., - ) -> None: - """Create a virtual environment in a directory. -""" + ) -> None: ... else: def create( @@ -193,8 +81,6 @@ else: with_pip: bool = False, prompt: str | None = None, upgrade_deps: bool = False, - ) -> None: - """Create a virtual environment in a directory. -""" + ) -> None: ... def main(args: Sequence[str] | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi index 0a4a3af9922e6..49c98cb07540e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi @@ -1,5 +1,3 @@ -"""Python part of the warnings subsystem.""" - import re import sys from _warnings import warn as warn, warn_explicit as warn_explicit @@ -38,42 +36,17 @@ def showwarning( lineno: int, file: TextIO | None = None, line: str | None = None, -) -> None: - """Hook to write a warning to a file; replace if you like.""" - -def formatwarning(message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None) -> str: - """Function to format a warning the standard way.""" - +) -> None: ... +def formatwarning( + message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None +) -> str: ... def filterwarnings( action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False -) -> None: - """Insert an entry into the list of warnings filters (at the front). - - 'action' -- one of "error", "ignore", "always", "all", "default", "module", - or "once" - 'message' -- a regex that the warning message must match - 'category' -- a class that the warning must be a subclass of - 'module' -- a regex that the module name must match - 'lineno' -- an integer line number, 0 matches all warnings - 'append' -- if true, append to the list of filters - """ - -def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: - """Insert a simple entry into the list of warnings filters (at the front). - - A simple filter matches all modules and messages. - 'action' -- one of "error", "ignore", "always", "all", "default", "module", - or "once" - 'category' -- a class that the warning must be a subclass of - 'lineno' -- an integer line number, 0 matches all warnings - 'append' -- if true, append to the list of filters - """ - -def resetwarnings() -> None: - """Clear the list of warning filters, so that no filters are active.""" +) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... +def resetwarnings() -> None: ... -class _OptionError(Exception): - """Exception used by option processing helpers.""" +class _OptionError(Exception): ... class WarningMessage: message: Warning | str @@ -95,24 +68,6 @@ class WarningMessage: ) -> None: ... class catch_warnings(Generic[_W_co]): - """A context manager that copies and restores the warnings filter upon - exiting the context. - - The 'record' argument specifies whether warnings should be captured by a - custom implementation of warnings.showwarning() and be appended to a list - returned by the context manager. Otherwise None is returned by the context - manager. The objects appended to the list are arguments whose attributes - mirror the arguments to showwarning(). - - The 'module' argument is to specify an alternative module to the module - named 'warnings' and imported under that name. This argument is only useful - when testing the warnings module itself. - - If the 'action' argument is not None, the remaining arguments are passed - to warnings.simplefilter() as if it were called immediately on entering the - context. - """ - if sys.version_info >= (3, 11): @overload def __init__( @@ -124,12 +79,7 @@ class catch_warnings(Generic[_W_co]): category: type[Warning] = ..., lineno: int = 0, append: bool = False, - ) -> None: - """Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - """ - + ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage]], @@ -154,15 +104,7 @@ class catch_warnings(Generic[_W_co]): ) -> None: ... else: @overload - def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: - """Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. - - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. - - """ - + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None @@ -177,48 +119,6 @@ class catch_warnings(Generic[_W_co]): if sys.version_info >= (3, 13): class deprecated: - """Indicate that a class, function or overload is deprecated. - - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - - The warning specified by *category* will be emitted at runtime - on use of deprecated objects. For functions, that happens on calls; - for classes, on instantiation and on creation of subclasses. - If the *category* is ``None``, no warning is emitted at runtime. - The *stacklevel* determines where the - warning is emitted. If it is ``1`` (the default), the warning - is emitted at the direct caller of the deprecated object; if it - is higher, it is emitted further up the stack. - Static type checker behavior is not affected by the *category* - and *stacklevel* arguments. - - The deprecation message passed to the decorator is saved in the - ``__deprecated__`` attribute on the decorated object. - If applied to an overload, the decorator - must be after the ``@overload`` decorator for the attribute to - exist on the overload as returned by ``get_overloads()``. - - See PEP 702 for details. - - """ - message: LiteralString category: type[Warning] | None stacklevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi index 9819c4fa1907b..fd7dbfade884b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi @@ -1,76 +1,3 @@ -"""Stuff to parse WAVE files. - -Usage. - -Reading WAVE files: - f = wave.open(file, 'r') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods read(), seek(), and close(). -When the setpos() and rewind() methods are not used, the seek() -method is not necessary. - -This returns an instance of a class with the following public methods: - getnchannels() -- returns number of audio channels (1 for - mono, 2 for stereo) - getsampwidth() -- returns sample width in bytes - getframerate() -- returns sampling frequency - getnframes() -- returns number of audio frames - getcomptype() -- returns compression type ('NONE' for linear samples) - getcompname() -- returns human-readable version of - compression type ('not compressed' linear samples) - getparams() -- returns a namedtuple consisting of all of the - above in the above order - getmarkers() -- returns None (for compatibility with the - old aifc module) - getmark(id) -- raises an error since the mark does not - exist (for compatibility with the old aifc module) - readframes(n) -- returns at most n frames of audio - rewind() -- rewind to the beginning of the audio stream - setpos(pos) -- seek to the specified position - tell() -- return the current position - close() -- close the instance (make it unusable) -The position returned by tell() and the position given to setpos() -are compatible and have nothing to do with the actual position in the -file. -The close() method is called automatically when the class instance -is destroyed. - -Writing WAVE files: - f = wave.open(file, 'w') -where file is either the name of a file or an open file pointer. -The open file pointer must have methods write(), tell(), seek(), and -close(). - -This returns an instance of a class with the following public methods: - setnchannels(n) -- set the number of channels - setsampwidth(n) -- set the sample width - setframerate(n) -- set the frame rate - setnframes(n) -- set the number of frames - setcomptype(type, name) - -- set the compression type and the - human-readable compression type - setparams(tuple) - -- set all parameters at once - tell() -- return current position in output file - writeframesraw(data) - -- write audio frames without patching up the - file header - writeframes(data) - -- write audio frames and patch up the file header - close() -- patch up the file header and close the - output file -You should set the parameters before the first writeframesraw or -writeframes. The total number of frames does not need to be set, -but when it is set to the correct value, the header does not have to -be patched up. -It is best to first set all parameters, perhaps possibly the -compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes(b'') or -close() to patch up the sizes in the header. -The close() method is called automatically when the class instance -is destroyed. -""" - import sys from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload @@ -85,8 +12,6 @@ class Error(Exception): ... WAVE_FORMAT_PCM: Final = 0x0001 class _wave_params(NamedTuple): - """_wave_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" - nchannels: int sampwidth: int framerate: int @@ -95,36 +20,6 @@ class _wave_params(NamedTuple): compname: str class Wave_read: - """Variables used in this class: - - These variables are available to the user though appropriate - methods of this class: - _file -- the open file with methods read(), close(), and seek() - set through the __init__() method - _nchannels -- the number of audio channels - available through the getnchannels() method - _nframes -- the number of audio frames - available through the getnframes() method - _sampwidth -- the number of bytes per audio sample - available through the getsampwidth() method - _framerate -- the sampling frequency - available through the getframerate() method - _comptype -- the AIFF-C compression type ('NONE' if AIFF) - available through the getcomptype() method - _compname -- the human-readable AIFF-C compression type - available through the getcomptype() method - _soundpos -- the position in the audio stream - available through the tell() method, set through the - setpos() method - - These variables are used internally only: - _fmt_chunk_read -- 1 iff the FMT chunk has been read - _data_seek_needed -- 1 iff positioned correctly in audio - file for readframes() - _data_chunk -- instantiation of a chunk class for the DATA chunk - _framesize -- size of one frame in the file - """ - def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... @@ -153,31 +48,6 @@ class Wave_read: def readframes(self, nframes: int) -> bytes: ... class Wave_write: - """Variables used in this class: - - These variables are user settable through appropriate methods - of this class: - _file -- the open file with methods write(), close(), tell(), seek() - set through the __init__() method - _comptype -- the AIFF-C compression type ('NONE' in AIFF) - set through the setcomptype() or setparams() method - _compname -- the human-readable AIFF-C compression type - set through the setcomptype() or setparams() method - _nchannels -- the number of audio channels - set through the setnchannels() or setparams() method - _sampwidth -- the number of bytes per audio sample - set through the setsampwidth() or setparams() method - _framerate -- the sampling frequency - set through the setframerate() or setparams() method - _nframes -- the number of audio frames written to the header - set through the setnframes() or setparams() method - - These variables are used internally only: - _datalength -- the size of the audio samples written to the header - _nframeswritten -- the number of frames actually written - _datawritten -- the size of the audio samples actually written - """ - def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi index 4dbec668b1679..76ab86b957a13 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi @@ -1,10 +1,3 @@ -"""Weak reference support for Python. - -This module is an implementation of PEP 205: - -https://peps.python.org/pep-0205/ -""" - from _typeshed import SupportsKeysAndGetItem from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy from _weakrefset import WeakSet as WeakSet @@ -63,24 +56,16 @@ class ProxyType(Generic[_T]): # "weakproxy" class ReferenceType(Generic[_T]): # "weakref" __callback__: Callable[[Self], Any] def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ... - def __call__(self) -> _T | None: - """Call self as a function.""" - + def __call__(self) -> _T | None: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... ref = ReferenceType # everything below here is implemented in weakref.py class WeakMethod(ref[_CallableT]): - """ - A custom `weakref.ref` subclass which simulates a weak reference to - a bound method, working around the lifetime problem of bound methods. - """ - __slots__ = ("_func_ref", "_meth_type", "_alive", "__weakref__") def __new__(cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... @@ -89,12 +74,6 @@ class WeakMethod(ref[_CallableT]): def __hash__(self) -> int: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): - """Mapping class that references values weakly. - - Entries in the dictionary will be discarded when no strong - reference to the value exists anymore - """ - @overload def __init__(self) -> None: ... @overload @@ -129,28 +108,8 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] - def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: - """Return an iterator that yields the weak references to the values. - - The references are not guaranteed to be 'live' at the time - they are used, so the result of calling the references needs - to be checked before being used. This can be used to avoid - creating references that will cause the garbage collector to - keep the values around longer than needed. - - """ - - def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: - """Return a list of weak references to the values. - - The references are not guaranteed to be 'live' at the time - they are used, so the result of calling the references needs - to be checked before being used. This can be used to avoid - creating references that will cause the garbage collector to - keep the values around longer than needed. - - """ - + def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... + def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @@ -173,31 +132,12 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): - """Specialized reference that includes a key corresponding to the value. - - This is used in the WeakValueDictionary to avoid having to create - a function object for each key stored in the mapping. A shared - callback object can use the 'key' attribute of a KeyedRef instead - of getting a reference to the key from an enclosing scope. - - """ - __slots__ = ("key",) key: _KT def __new__(type, ob: _T, callback: Callable[[Self], Any], key: _KT) -> Self: ... def __init__(self, ob: _T, callback: Callable[[Self], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): - """Mapping class that references keys weakly. - - Entries in the dictionary will be discarded when there is no - longer a strong reference to the key. This can be used to - associate additional data with an object owned by other parts of - an application without adding attributes to those objects. This - can be especially useful with objects that override attribute - accesses. - """ - @overload def __init__(self, dict: None = None) -> None: ... @overload @@ -221,16 +161,7 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] - def keyrefs(self) -> list[ref[_KT]]: - """Return a list of weak references to the keys. - - The references are not guaranteed to be 'live' at the time - they are used, so the result of calling the references needs - to be checked before being used. This can be used to avoid - creating references that will cause the garbage collector to - keep the keys around longer than needed. - - """ + def keyrefs(self) -> list[ref[_KT]]: ... # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences @overload def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... @@ -257,37 +188,11 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class finalize(Generic[_P, _T]): - """Class for finalization of weakrefable objects - - finalize(obj, func, *args, **kwargs) returns a callable finalizer - object which will be called when obj is garbage collected. The - first time the finalizer is called it evaluates func(*arg, **kwargs) - and returns the result. After this the finalizer is dead, and - calling it just returns None. - - When the program exits any remaining finalizers for which the - atexit attribute is true will be run in reverse order of creation. - By default atexit is true. - """ - __slots__ = () def __init__(self, obj: _T, func: Callable[_P, Any], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... - def __call__(self, _: Any = None) -> Any | None: - """If alive then mark as dead and return func(*args, **kwargs); - otherwise return None - """ - - def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: - """If alive then mark as dead and return (obj, func, args, kwargs); - otherwise return None - """ - - def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: - """If alive then return (obj, func, args, kwargs); - otherwise return None - """ - + def __call__(self, _: Any = None) -> Any | None: ... + def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... + def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... @property - def alive(self) -> bool: - """Whether finalizer is alive""" + def alive(self) -> bool: ... atexit: bool diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi index f02ee7477a512..56c30f8727277 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi @@ -1,5 +1,3 @@ -"""Interfaces for launching and remotely controlling web browsers.""" - import sys from abc import abstractmethod from collections.abc import Callable, Sequence @@ -12,40 +10,13 @@ class Error(Exception): ... def register( name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False -) -> None: - """Register a browser connector.""" - -def get(using: str | None = None) -> BaseBrowser: - """Return a browser launcher instance appropriate for the environment.""" - -def open(url: str, new: int = 0, autoraise: bool = True) -> bool: - """Display url using the default browser. - - If possible, open url in a location determined by new. - - 0: the same browser window (the default). - - 1: a new browser window. - - 2: a new browser page ("tab"). - If possible, autoraise raises the window (the default) or not. - - If opening the browser succeeds, return True. - If there is a problem, return False. - """ - -def open_new(url: str) -> bool: - """Open url in a new window of the default browser. - - If not possible, then open url in the only browser window. - """ - -def open_new_tab(url: str) -> bool: - """Open url in a new page ("tab") of the default browser. - - If not possible, then the behavior becomes equivalent to open_new(). - """ +) -> None: ... +def get(using: str | None = None) -> BaseBrowser: ... +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... +def open_new(url: str) -> bool: ... +def open_new_tab(url: str) -> bool: ... class BaseBrowser: - """Parent class for all browsers. Do not use directly.""" - args: list[str] name: str basename: str @@ -56,21 +27,12 @@ class BaseBrowser: def open_new_tab(self, url: str) -> bool: ... class GenericBrowser(BaseBrowser): - """Class for all browsers started with a command - and without remote functionality. - """ - def __init__(self, name: str | Sequence[str]) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... -class BackgroundBrowser(GenericBrowser): - """Class for all browsers which are to be started in the - background. - """ +class BackgroundBrowser(GenericBrowser): ... class UnixBrowser(BaseBrowser): - """Parent class for all Unix browsers with remote functionality.""" - def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool @@ -80,34 +42,20 @@ class UnixBrowser(BaseBrowser): remote_action_newwin: str remote_action_newtab: str -class Mozilla(UnixBrowser): - """Launcher class for Mozilla browsers.""" +class Mozilla(UnixBrowser): ... if sys.version_info < (3, 12): class Galeon(UnixBrowser): - """Launcher class for Galeon/Epiphany browsers.""" - raise_opts: list[str] class Grail(BaseBrowser): def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... -class Chrome(UnixBrowser): - """Launcher class for Google Chrome browser.""" - -class Opera(UnixBrowser): - """Launcher class for Opera browser.""" - -class Elinks(UnixBrowser): - """Launcher class for Elinks browsers.""" +class Chrome(UnixBrowser): ... +class Opera(UnixBrowser): ... +class Elinks(UnixBrowser): ... class Konqueror(BaseBrowser): - """Controller for the KDE File Manager (kfm, or Konqueror). - - See the output of ``kfmclient --commands`` - for more information on the Konqueror remote-control interface. - """ - def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "win32": @@ -119,31 +67,11 @@ if sys.platform == "darwin": if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") class MacOSX(BaseBrowser): - """Launcher class for Aqua browsers on Mac OS X - - Optionally specify a browser name on instantiation. Note that this - will not work for Aqua browsers if the user has moved the application - package after installation. - - If no browser is specified, the default browser, as specified in the - Internet System Preferences panel, will be used. - """ - def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... else: class MacOSX(BaseBrowser): - """Launcher class for Aqua browsers on Mac OS X - - Optionally specify a browser name on instantiation. Note that this - will not work for Aqua browsers if the user has moved the application - package after installation. - - If no browser is specified, the default browser, as specified in the - Internet System Preferences panel, will be used. - """ - def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi index ae3009add5e55..53457112ee968 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi @@ -1,42 +1,3 @@ -"""This module provides access to the Windows registry API. - -Functions: - -CloseKey() - Closes a registry key. -ConnectRegistry() - Establishes a connection to a predefined registry handle - on another computer. -CreateKey() - Creates the specified key, or opens it if it already exists. -DeleteKey() - Deletes the specified key. -DeleteValue() - Removes a named value from the specified registry key. -EnumKey() - Enumerates subkeys of the specified open registry key. -EnumValue() - Enumerates values of the specified open registry key. -ExpandEnvironmentStrings() - Expand the env strings in a REG_EXPAND_SZ - string. -FlushKey() - Writes all the attributes of the specified key to the registry. -LoadKey() - Creates a subkey under HKEY_USER or HKEY_LOCAL_MACHINE and - stores registration information from a specified file into that - subkey. -OpenKey() - Opens the specified key. -OpenKeyEx() - Alias of OpenKey(). -QueryValue() - Retrieves the value associated with the unnamed value for a - specified key in the registry. -QueryValueEx() - Retrieves the type and data for a specified value name - associated with an open registry key. -QueryInfoKey() - Returns information about the specified key. -SaveKey() - Saves the specified key, and all its subkeys a file. -SetValue() - Associates a value with a specified key. -SetValueEx() - Stores data in the value field of an open registry key. - -Special objects: - -HKEYType -- type object for HKEY objects -error -- exception raised for Win32 errors - -Integer constants: -Many constants are defined - see the documentation for each function -to see what constants are used, and where. -""" - import sys from _typeshed import ReadableBuffer, Unused from types import TracebackType @@ -45,363 +6,29 @@ from typing_extensions import Self, TypeAlias if sys.platform == "win32": _KeyType: TypeAlias = HKEYType | int - def CloseKey(hkey: _KeyType, /) -> None: - """Closes a previously opened registry key. - - hkey - A previously opened key. - - Note that if the key is not closed using this method, it will be - closed when the hkey object is destroyed by Python. - """ - - def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: - """Establishes a connection to the registry on another computer. - - computer_name - The name of the remote computer, of the form r"\\\\computername". If - None, the local computer is used. - key - The predefined key to connect to. - - The return value is the handle of the opened key. - If the function fails, an OSError exception is raised. - """ - - def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: - """Creates or opens the specified key. - - key - An already open key, or one of the predefined HKEY_* constants. - sub_key - The name of the key this method opens or creates. - - If key is one of the predefined keys, sub_key may be None. In that case, - the handle returned is the same key handle passed in to the function. - - If the key already exists, this function opens the existing key. - - The return value is the handle of the opened key. - If the function fails, an OSError exception is raised. - """ - - def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: - """Creates or opens the specified key. - - key - An already open key, or one of the predefined HKEY_* constants. - sub_key - The name of the key this method opens or creates. - reserved - A reserved integer, and must be zero. Default is zero. - access - An integer that specifies an access mask that describes the - desired security access for the key. Default is KEY_WRITE. - - If key is one of the predefined keys, sub_key may be None. In that case, - the handle returned is the same key handle passed in to the function. - - If the key already exists, this function opens the existing key - - The return value is the handle of the opened key. - If the function fails, an OSError exception is raised. - """ - - def DeleteKey(key: _KeyType, sub_key: str, /) -> None: - """Deletes the specified key. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that must be the name of a subkey of the key identified by - the key parameter. This value must not be None, and the key may not - have subkeys. - - This method can not delete keys with subkeys. - - If the function succeeds, the entire key, including all of its values, - is removed. If the function fails, an OSError exception is raised. - """ - - def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: - """Deletes the specified key (intended for 64-bit OS). - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that must be the name of a subkey of the key identified by - the key parameter. This value must not be None, and the key may not - have subkeys. - access - An integer that specifies an access mask that describes the - desired security access for the key. Default is KEY_WOW64_64KEY. - reserved - A reserved integer, and must be zero. Default is zero. - - While this function is intended to be used for 64-bit OS, it is also - available on 32-bit systems. - - This method can not delete keys with subkeys. - - If the function succeeds, the entire key, including all of its values, - is removed. If the function fails, an OSError exception is raised. - On unsupported Windows versions, NotImplementedError is raised. - """ - - def DeleteValue(key: _KeyType, value: str, /) -> None: - """Removes a named value from a registry key. - - key - An already open key, or any one of the predefined HKEY_* constants. - value - A string that identifies the value to remove. - """ - - def EnumKey(key: _KeyType, index: int, /) -> str: - """Enumerates subkeys of an open registry key. - - key - An already open key, or any one of the predefined HKEY_* constants. - index - An integer that identifies the index of the key to retrieve. - - The function retrieves the name of one subkey each time it is called. - It is typically called repeatedly until an OSError exception is - raised, indicating no more values are available. - """ - - def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: - """Enumerates values of an open registry key. - - key - An already open key, or any one of the predefined HKEY_* constants. - index - An integer that identifies the index of the value to retrieve. - - The function retrieves the name of one subkey each time it is called. - It is typically called repeatedly, until an OSError exception - is raised, indicating no more values. - - The result is a tuple of 3 items: - value_name - A string that identifies the value. - value_data - An object that holds the value data, and whose type depends - on the underlying registry type. - data_type - An integer that identifies the type of the value data. - """ - - def ExpandEnvironmentStrings(string: str, /) -> str: - """Expand environment vars.""" - - def FlushKey(key: _KeyType, /) -> None: - """Writes all the attributes of a key to the registry. - - key - An already open key, or any one of the predefined HKEY_* constants. - - It is not necessary to call FlushKey to change a key. Registry changes - are flushed to disk by the registry using its lazy flusher. Registry - changes are also flushed to disk at system shutdown. Unlike - CloseKey(), the FlushKey() method returns only when all the data has - been written to the registry. - - An application should only call FlushKey() if it requires absolute - certainty that registry changes are on disk. If you don't know whether - a FlushKey() call is required, it probably isn't. - """ - - def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: - """Insert data into the registry from a file. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub-key to load. - file_name - The name of the file to load registry data from. This file must - have been created with the SaveKey() function. Under the file - allocation table (FAT) file system, the filename may not have an - extension. - - Creates a subkey under the specified key and stores registration - information from a specified file into that subkey. - - A call to LoadKey() fails if the calling process does not have the - SE_RESTORE_PRIVILEGE privilege. - - If key is a handle returned by ConnectRegistry(), then the path - specified in fileName is relative to the remote computer. - - The MSDN docs imply key must be in the HKEY_USER or HKEY_LOCAL_MACHINE - tree. - """ - - def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: - """Opens the specified key. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub_key to open. - reserved - A reserved integer that must be zero. Default is zero. - access - An integer that specifies an access mask that describes the desired - security access for the key. Default is KEY_READ. - - The result is a new handle to the specified key. - If the function fails, an OSError exception is raised. - """ - - def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: - """Opens the specified key. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub_key to open. - reserved - A reserved integer that must be zero. Default is zero. - access - An integer that specifies an access mask that describes the desired - security access for the key. Default is KEY_READ. - - The result is a new handle to the specified key. - If the function fails, an OSError exception is raised. - """ - - def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: - """Returns information about a key. - - key - An already open key, or any one of the predefined HKEY_* constants. - - The result is a tuple of 3 items: - An integer that identifies the number of sub keys this key has. - An integer that identifies the number of values this key has. - An integer that identifies when the key was last modified (if available) - as 100's of nanoseconds since Jan 1, 1600. - """ - - def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: - """Retrieves the unnamed value for a key. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that holds the name of the subkey with which the value - is associated. If this parameter is None or empty, the function - retrieves the value set by the SetValue() method for the key - identified by key. - - Values in the registry have name, type, and data components. This method - retrieves the data for a key's first value that has a NULL name. - But since the underlying API call doesn't return the type, you'll - probably be happier using QueryValueEx; this function is just here for - completeness. - """ - - def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: - """Retrieves the type and value of a specified sub-key. - - key - An already open key, or any one of the predefined HKEY_* constants. - name - A string indicating the value to query. - - Behaves mostly like QueryValue(), but also returns the type of the - specified value name associated with the given open registry key. - - The return value is a tuple of the value and the type_id. - """ - - def SaveKey(key: _KeyType, file_name: str, /) -> None: - """Saves the specified key, and all its subkeys to the specified file. - - key - An already open key, or any one of the predefined HKEY_* constants. - file_name - The name of the file to save registry data to. This file cannot - already exist. If this filename includes an extension, it cannot be - used on file allocation table (FAT) file systems by the LoadKey(), - ReplaceKey() or RestoreKey() methods. - - If key represents a key on a remote computer, the path described by - file_name is relative to the remote computer. - - The caller of this method must possess the SeBackupPrivilege - security privilege. This function passes NULL for security_attributes - to the API. - """ - - def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: - """Associates a value with a specified key. - - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that names the subkey with which the value is associated. - type - An integer that specifies the type of the data. Currently this must - be REG_SZ, meaning only strings are supported. - value - A string that specifies the new value. - - If the key specified by the sub_key parameter does not exist, the - SetValue function creates it. - - Value lengths are limited by available memory. Long values (more than - 2048 bytes) should be stored as files with the filenames stored in - the configuration registry to help the registry perform efficiently. - - The key identified by the key parameter must have been opened with - KEY_SET_VALUE access. - """ - + def CloseKey(hkey: _KeyType, /) -> None: ... + def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: ... + def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... + def DeleteKey(key: _KeyType, sub_key: str, /) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... + def DeleteValue(key: _KeyType, value: str, /) -> None: ... + def EnumKey(key: _KeyType, index: int, /) -> str: ... + def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: ... + def ExpandEnvironmentStrings(string: str, /) -> str: ... + def FlushKey(key: _KeyType, /) -> None: ... + def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: ... + def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: ... + def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: ... + def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: ... + def SaveKey(key: _KeyType, file_name: str, /) -> None: ... + def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: ... @overload # type=REG_DWORD|REG_QWORD - def SetValueEx(key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, /) -> None: - """Stores data in the value field of an open registry key. - - key - An already open key, or any one of the predefined HKEY_* constants. - value_name - A string containing the name of the value to set, or None. - reserved - Can be anything - zero is always passed to the API. - type - An integer that specifies the type of the data, one of: - REG_BINARY -- Binary data in any form. - REG_DWORD -- A 32-bit number. - REG_DWORD_LITTLE_ENDIAN -- A 32-bit number in little-endian format. Equivalent to REG_DWORD - REG_DWORD_BIG_ENDIAN -- A 32-bit number in big-endian format. - REG_EXPAND_SZ -- A null-terminated string that contains unexpanded - references to environment variables (for example, - %PATH%). - REG_LINK -- A Unicode symbolic link. - REG_MULTI_SZ -- A sequence of null-terminated strings, terminated - by two null characters. Note that Python handles - this termination automatically. - REG_NONE -- No defined value type. - REG_QWORD -- A 64-bit number. - REG_QWORD_LITTLE_ENDIAN -- A 64-bit number in little-endian format. Equivalent to REG_QWORD. - REG_RESOURCE_LIST -- A device-driver resource list. - REG_SZ -- A null-terminated string. - value - A string that specifies the new value. - - This method can also set additional value and type information for the - specified key. The key identified by the key parameter must have been - opened with KEY_SET_VALUE access. - - To open the key, use the CreateKeyEx() or OpenKeyEx() methods. - - Value lengths are limited by available memory. Long values (more than - 2048 bytes) should be stored as files with the filenames stored in - the configuration registry to help the registry perform efficiently. - """ - + def SetValueEx( + key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, / + ) -> None: ... @overload # type=REG_SZ|REG_EXPAND_SZ def SetValueEx( key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[1, 2], value: str | None, / @@ -428,38 +55,10 @@ if sys.platform == "win32": value: int | str | list[str] | ReadableBuffer | None, /, ) -> None: ... - def DisableReflectionKey(key: _KeyType, /) -> None: - """Disables registry reflection for 32bit processes running on a 64bit OS. - - key - An already open key, or any one of the predefined HKEY_* constants. - - Will generally raise NotImplementedError if executed on a 32bit OS. - - If the key is not on the reflection list, the function succeeds but has - no effect. Disabling reflection for a key does not affect reflection - of any subkeys. - """ - - def EnableReflectionKey(key: _KeyType, /) -> None: - """Restores registry reflection for the specified disabled key. - - key - An already open key, or any one of the predefined HKEY_* constants. - - Will generally raise NotImplementedError if executed on a 32bit OS. - Restoring reflection for a key does not affect reflection of any - subkeys. - """ - - def QueryReflectionKey(key: _KeyType, /) -> bool: - """Returns the reflection state for the specified key as a bool. - - key - An already open key, or any one of the predefined HKEY_* constants. + def DisableReflectionKey(key: _KeyType, /) -> None: ... + def EnableReflectionKey(key: _KeyType, /) -> None: ... + def QueryReflectionKey(key: _KeyType, /) -> bool: ... - Will generally raise NotImplementedError if executed on a 32bit OS. - """ HKEY_CLASSES_ROOT: Final[int] HKEY_CURRENT_USER: Final[int] HKEY_LOCAL_MACHINE: Final[int] @@ -520,56 +119,14 @@ if sys.platform == "win32": # Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason @final class HKEYType: - """PyHKEY Object - A Python object, representing a win32 registry key. - - This object wraps a Windows HKEY object, automatically closing it when - the object is destroyed. To guarantee cleanup, you can call either - the Close() method on the PyHKEY, or the CloseKey() method. - - All functions which accept a handle object also accept an integer -- - however, use of the handle object is encouraged. - - Functions: - Close() - Closes the underlying handle. - Detach() - Returns the integer Win32 handle, detaching it from the object - - Properties: - handle - The integer Win32 handle. - - Operations: - __bool__ - Handles with an open object return true, otherwise false. - __int__ - Converting a handle to an integer returns the Win32 handle. - rich comparison - Handle objects are compared using the handle value. - """ - - def __bool__(self) -> bool: - """True if self else False""" - - def __int__(self) -> int: - """int(self)""" - + def __bool__(self) -> bool: ... + def __int__(self) -> int: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> bool | None: ... - def Close(self) -> None: - """Closes the underlying Windows handle. - - If the handle is already closed, no error is raised. - """ - - def Detach(self) -> int: - """Detaches the Windows handle from the handle object. - - The result is the value of the handle before it is detached. If the - handle is already detached, this will return zero. - - After calling this function, the handle is effectively invalidated, - but the handle is not closed. You would call this function when you - need the underlying win32 handle to exist beyond the lifetime of the - handle object. - """ - + def Close(self) -> None: ... + def Detach(self) -> int: ... def __hash__(self) -> int: ... @property def handle(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi index cd80dab55aee6..39dfa7b8b9c42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi @@ -1,22 +1,3 @@ -"""PlaySound(sound, flags) - play a sound -SND_FILENAME - sound is a wav file name -SND_ALIAS - sound is a registry sound association name -SND_LOOP - Play the sound repeatedly; must also specify SND_ASYNC -SND_MEMORY - sound is a memory image of a wav file -SND_PURGE - stop all instances of the specified sound -SND_ASYNC - PlaySound returns immediately -SND_NODEFAULT - Do not play a default beep if the sound can not be found -SND_NOSTOP - Do not interrupt any sounds currently playing -SND_NOWAIT - Return immediately if the sound driver is busy -SND_APPLICATION - sound is an application-specific alias in the registry. -SND_SENTRY - Triggers a SoundSentry event when the sound is played. -SND_SYNC - Play the sound synchronously, default behavior. -SND_SYSTEM - Assign sound to the audio session for system notification sounds. - -Beep(frequency, duration) - Make a beep through the PC speaker. -MessageBeep(type) - Call Windows MessageBeep. -""" - import sys from _typeshed import ReadableBuffer from typing import Final, Literal, overload @@ -48,30 +29,10 @@ if sys.platform == "win32": MB_ICONSTOP: Final = 16 MB_ICONWARNING: Final = 48 - def Beep(frequency: int, duration: int) -> None: - """A wrapper around the Windows Beep API. - - frequency - Frequency of the sound in hertz. - Must be in the range 37 through 32,767. - duration - How long the sound should play, in milliseconds. - """ + def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload - def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: - """A wrapper around the Windows PlaySound API. - - sound - The sound to play; a filename, data, or None. - flags - Flag values, ored together. See module documentation. - """ - + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... @overload def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... - def MessageBeep(type: int = 0) -> None: - """Call Windows MessageBeep(x). - - x defaults to MB_OK. - """ + def MessageBeep(type: int = 0) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi index 59ee48fddec23..e69de29bb2d1d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi @@ -1,25 +0,0 @@ -"""wsgiref -- a WSGI (PEP 3333) Reference Library - -Current Contents: - -* util -- Miscellaneous useful functions and wrappers - -* headers -- Manage response headers - -* handlers -- base classes for server/gateway implementations - -* simple_server -- a simple BaseHTTPServer that supports WSGI - -* validate -- validation wrapper that sits between an app and a server - to detect errors in either - -* types -- collection of WSGI-related types for static type checking - -To-Do: - -* cgi_gateway -- Run WSGI apps under CGI (pending a deployment standard) - -* cgi_wrapper -- Run CGI apps under WSGI - -* router -- a simple middleware component that handles URL traversal -""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi index 82f7f560a74e1..ebead540018e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi @@ -1,5 +1,3 @@ -"""Base classes for server/gateway implementations""" - from _typeshed import OptExcInfo from _typeshed.wsgi import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment from abc import abstractmethod @@ -12,12 +10,9 @@ from .util import FileWrapper __all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISCGIHandler", "read_environ"] def format_date_time(timestamp: float | None) -> str: ... # undocumented -def read_environ() -> dict[str, str]: - """Read environment, fixing HTTP variables""" +def read_environ() -> dict[str, str]: ... class BaseHandler: - """Manage the invocation of a WSGI application""" - wsgi_version: tuple[int, int] # undocumented wsgi_multithread: bool wsgi_multiprocess: bool @@ -36,147 +31,38 @@ class BaseHandler: error_status: str error_headers: list[tuple[str, str]] error_body: bytes - def run(self, application: WSGIApplication) -> None: - """Invoke the application""" - - def setup_environ(self) -> None: - """Set up the environment for one request""" - - def finish_response(self) -> None: - """Send any iterable data, then close self and the iterable - - Subclasses intended for use in asynchronous servers will - want to redefine this method, such that it sets up callbacks - in the event loop to iterate over the data, and to call - 'self.close()' once the response is finished. - """ - - def get_scheme(self) -> str: - """Return the URL scheme being used""" - - def set_content_length(self) -> None: - """Compute Content-Length or switch to chunked encoding if possible""" - - def cleanup_headers(self) -> None: - """Make any necessary header changes or defaults - - Subclasses can extend this to add other defaults. - """ - + def run(self, application: WSGIApplication) -> None: ... + def setup_environ(self) -> None: ... + def finish_response(self) -> None: ... + def get_scheme(self) -> str: ... + def set_content_length(self) -> None: ... + def cleanup_headers(self) -> None: ... def start_response( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None - ) -> Callable[[bytes], None]: - """'start_response()' callable as specified by PEP 3333""" - - def send_preamble(self) -> None: - """Transmit version/status/date/server, via self._write()""" - - def write(self, data: bytes) -> None: - """'write()' callable as specified by PEP 3333""" - - def sendfile(self) -> bool: - """Platform-specific file transmission - - Override this method in subclasses to support platform-specific - file transmission. It is only called if the application's - return iterable ('self.result') is an instance of - 'self.wsgi_file_wrapper'. - - This method should return a true value if it was able to actually - transmit the wrapped file-like object using a platform-specific - approach. It should return a false value if normal iteration - should be used instead. An exception can be raised to indicate - that transmission was attempted, but failed. - - NOTE: this method should call 'self.send_headers()' if - 'self.headers_sent' is false and it is going to attempt direct - transmission of the file. - """ - - def finish_content(self) -> None: - """Ensure headers and content have both been sent""" - - def close(self) -> None: - """Close the iterable (if needed) and reset all instance vars - - Subclasses may want to also drop the client connection. - """ - - def send_headers(self) -> None: - """Transmit headers to the client, via self._write()""" - - def result_is_file(self) -> bool: - """True if 'self.result' is an instance of 'self.wsgi_file_wrapper'""" - - def client_is_modern(self) -> bool: - """True if client can accept status and headers""" - - def log_exception(self, exc_info: OptExcInfo) -> None: - """Log the 'exc_info' tuple in the server log - - Subclasses may override to retarget the output or change its format. - """ - - def handle_error(self) -> None: - """Log current error, and send error output to client if possible""" - - def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: - """WSGI mini-app to create error output - - By default, this just uses the 'error_status', 'error_headers', - and 'error_body' attributes to generate an output page. It can - be overridden in a subclass to dynamically generate diagnostics, - choose an appropriate message for the user's preferred language, etc. - - Note, however, that it's not recommended from a security perspective to - spit out diagnostics to any old user; ideally, you should have to do - something special to enable diagnostic output, which is why we don't - include any here! - """ - + ) -> Callable[[bytes], None]: ... + def send_preamble(self) -> None: ... + def write(self, data: bytes) -> None: ... + def sendfile(self) -> bool: ... + def finish_content(self) -> None: ... + def close(self) -> None: ... + def send_headers(self) -> None: ... + def result_is_file(self) -> bool: ... + def client_is_modern(self) -> bool: ... + def log_exception(self, exc_info: OptExcInfo) -> None: ... + def handle_error(self) -> None: ... + def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... @abstractmethod - def _write(self, data: bytes) -> None: - """Override in subclass to buffer data for send to client - - It's okay if this method actually transmits the data; BaseHandler - just separates write and flush operations for greater efficiency - when the underlying system actually has such a distinction. - """ - + def _write(self, data: bytes) -> None: ... @abstractmethod - def _flush(self) -> None: - """Override in subclass to force sending of recent '_write()' calls - - It's okay if this method is a no-op (i.e., if '_write()' actually - sends the data. - """ - + def _flush(self) -> None: ... @abstractmethod - def get_stdin(self) -> InputStream: - """Override in subclass to return suitable 'wsgi.input'""" - + def get_stdin(self) -> InputStream: ... @abstractmethod - def get_stderr(self) -> ErrorStream: - """Override in subclass to return suitable 'wsgi.errors'""" - + def get_stderr(self) -> ErrorStream: ... @abstractmethod - def add_cgi_vars(self) -> None: - """Override in subclass to insert CGI variables in 'self.environ'""" + def add_cgi_vars(self) -> None: ... class SimpleHandler(BaseHandler): - """Handler that's just initialized with streams, environment, etc. - - This handler subclass is intended for synchronous HTTP/1.0 origin servers, - and handles sending the entire response output, given the correct inputs. - - Usage:: - - handler = SimpleHandler( - inp,out,err,env, multithread=False, multiprocess=True - ) - handler.run(app) - """ - stdin: InputStream stdout: IO[bytes] stderr: ErrorStream @@ -196,50 +82,10 @@ class SimpleHandler(BaseHandler): def _write(self, data: bytes) -> None: ... def _flush(self) -> None: ... -class BaseCGIHandler(SimpleHandler): - """CGI-like systems using input/output/error streams and environ mapping - - Usage:: - - handler = BaseCGIHandler(inp,out,err,env) - handler.run(app) - - This handler class is useful for gateway protocols like ReadyExec and - FastCGI, that have usable input/output/error streams and an environment - mapping. It's also the base class for CGIHandler, which just uses - sys.stdin, os.environ, and so on. - - The constructor also takes keyword arguments 'multithread' and - 'multiprocess' (defaulting to 'True' and 'False' respectively) to control - the configuration sent to the application. It sets 'origin_server' to - False (to enable CGI-like output), and assumes that 'wsgi.run_once' is - False. - """ +class BaseCGIHandler(SimpleHandler): ... class CGIHandler(BaseCGIHandler): - """CGI-based invocation via sys.stdin/stdout/stderr and os.environ - - Usage:: - - CGIHandler().run(app) - - The difference between this class and BaseCGIHandler is that it always - uses 'wsgi.run_once' of 'True', 'wsgi.multithread' of 'False', and - 'wsgi.multiprocess' of 'True'. It does not take any initialization - parameters, but always uses 'sys.stdin', 'os.environ', and friends. - - If you need to override any of these parameters, use BaseCGIHandler - instead. - """ - def __init__(self) -> None: ... class IISCGIHandler(BaseCGIHandler): - """CGI-based invocation with workaround for IIS path bug - - This handler should be used in preference to CGIHandler when deploying on - Microsoft IIS without having set the config allowPathInfo option (IIS>=7) - or metabase allowPathInfoForScriptMappings (IIS<7). - """ - def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi index 6019972f31b1e..9febad4b32775 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi @@ -1,10 +1,3 @@ -"""Manage HTTP Response Headers - -Much of this module is red-handedly pilfered from email.message in the stdlib, -so portions are Copyright (C) 2001 Python Software Foundation, and were -written by Barry Warsaw. -""" - from re import Pattern from typing import Final, overload from typing_extensions import TypeAlias @@ -14,97 +7,20 @@ _HeaderList: TypeAlias = list[tuple[str, str]] tspecials: Final[Pattern[str]] # undocumented class Headers: - """Manage a collection of HTTP response headers""" - def __init__(self, headers: _HeaderList | None = None) -> None: ... - def __len__(self) -> int: - """Return the total number of headers, including duplicates.""" - - def __setitem__(self, name: str, val: str) -> None: - """Set the value of a header.""" - - def __delitem__(self, name: str) -> None: - """Delete all occurrences of a header, if present. - - Does *not* raise an exception if the header is missing. - """ - - def __getitem__(self, name: str) -> str | None: - """Get the first header value for 'name' - - Return None if the header is missing instead of raising an exception. - - Note that if the header appeared multiple times, the first exactly which - occurrence gets returned is undefined. Use getall() to get all - the values matching a header field name. - """ - - def __contains__(self, name: str) -> bool: - """Return true if the message contains the header.""" - - def get_all(self, name: str) -> list[str]: - """Return a list of all the values for the named field. - - These will be sorted in the order they appeared in the original header - list or were added to this instance, and may contain duplicates. Any - fields deleted and re-inserted are always appended to the header list. - If no fields exist with the given name, returns an empty list. - """ - + def __len__(self) -> int: ... + def __setitem__(self, name: str, val: str) -> None: ... + def __delitem__(self, name: str) -> None: ... + def __getitem__(self, name: str) -> str | None: ... + def __contains__(self, name: str) -> bool: ... + def get_all(self, name: str) -> list[str]: ... @overload - def get(self, name: str, default: str) -> str: - """Get the first header value for 'name', or return 'default'""" - + def get(self, name: str, default: str) -> str: ... @overload def get(self, name: str, default: str | None = None) -> str | None: ... - def keys(self) -> list[str]: - """Return a list of all the header field names. - - These will be sorted in the order they appeared in the original header - list, or were added to this instance, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - - def values(self) -> list[str]: - """Return a list of all header values. - - These will be sorted in the order they appeared in the original header - list, or were added to this instance, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - - def items(self) -> _HeaderList: - """Get all the header fields and values. - - These will be sorted in the order they were in the original header - list, or were added to this instance, and may contain duplicates. - Any fields deleted and re-inserted are always appended to the header - list. - """ - + def keys(self) -> list[str]: ... + def values(self) -> list[str]: ... + def items(self) -> _HeaderList: ... def __bytes__(self) -> bytes: ... - def setdefault(self, name: str, value: str) -> str: - """Return first matching header value for 'name', or 'value' - - If there is no header named 'name', add a new header with name 'name' - and value 'value'. - """ - - def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: - """Extended header setting. - - _name is the header field to add. keyword arguments can be used to set - additional parameters for the header field, with underscores converted - to dashes. Normally the parameter will be added as key="value" unless - value is None, in which case only the key will be added. - - Example: - - h.add_header('content-disposition', 'attachment', filename='bud.gif') - - Note that unlike the corresponding 'email.message' method, this does - *not* handle '(charset, language, value)' tuples: all values must be - strings or None. - """ + def setdefault(self, name: str, value: str) -> str: ... + def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi index 87bdda21f280c..bdf58719c8289 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi @@ -1,15 +1,3 @@ -"""BaseHTTPServer that implements the Python WSGI protocol (PEP 3333) - -This is both an example of how WSGI can be implemented, and a basis for running -simple web applications on a local machine, such as might be done when testing -or debugging an application. It has not been reviewed for security issues, -however, and we strongly recommend that you use a "real" web server for -production use. - -For example usage, see the 'if __name__=="__main__"' block at the end of the -module. See also the BaseHTTPServer module docs for other API information. -""" - from _typeshed.wsgi import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment from http.server import BaseHTTPRequestHandler, HTTPServer from typing import Final, TypeVar, overload @@ -26,8 +14,6 @@ class ServerHandler(SimpleHandler): # undocumented server_software: str class WSGIServer(HTTPServer): - """BaseHTTPServer that implements the Python WSGI protocol""" - application: WSGIApplication | None base_environ: WSGIEnvironment # only available after call to setup_environ() def setup_environ(self) -> None: ... @@ -44,9 +30,7 @@ def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[by _S = TypeVar("_S", bound=WSGIServer) @overload -def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: - """Create a new WSGI server listening on `host` and `port` for `app`""" - +def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: ... @overload def make_server( host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi index cbb98184ec87b..57276fd05ea84 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi @@ -1,5 +1,3 @@ -"""WSGI-related types for static type checking""" - from _typeshed import OptExcInfo from collections.abc import Callable, Iterable, Iterator from typing import Any, Protocol @@ -8,8 +6,6 @@ from typing_extensions import TypeAlias __all__ = ["StartResponse", "WSGIEnvironment", "WSGIApplication", "InputStream", "ErrorStream", "FileWrapper"] class StartResponse(Protocol): - """start_response() callable as defined in PEP 3333""" - def __call__( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., / ) -> Callable[[bytes], object]: ... @@ -18,16 +14,12 @@ WSGIEnvironment: TypeAlias = dict[str, Any] WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] class InputStream(Protocol): - """WSGI input stream as defined in PEP 3333""" - def read(self, size: int = ..., /) -> bytes: ... def readline(self, size: int = ..., /) -> bytes: ... def readlines(self, hint: int = ..., /) -> list[bytes]: ... def __iter__(self) -> Iterator[bytes]: ... class ErrorStream(Protocol): - """WSGI error stream as defined in PEP 3333""" - def flush(self) -> object: ... def write(self, s: str, /) -> object: ... def writelines(self, seq: list[str], /) -> object: ... @@ -37,6 +29,4 @@ class _Readable(Protocol): # Optional: def close(self) -> object: ... class FileWrapper(Protocol): - """WSGI file wrapper as defined in PEP 3333""" - def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi index a89336a1c1aaa..3966e17b0d28d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi @@ -1,5 +1,3 @@ -"""Miscellaneous WSGI-related Utilities""" - import sys from _typeshed.wsgi import WSGIEnvironment from collections.abc import Callable @@ -10,8 +8,6 @@ if sys.version_info >= (3, 13): __all__ += ["is_hop_by_hop"] class FileWrapper: - """Wrapper to convert file-like objects to iterables""" - filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists @@ -22,41 +18,9 @@ class FileWrapper: def __iter__(self) -> FileWrapper: ... def __next__(self) -> bytes: ... -def guess_scheme(environ: WSGIEnvironment) -> str: - """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'""" - -def application_uri(environ: WSGIEnvironment) -> str: - """Return the application's base URI (no PATH_INFO or QUERY_STRING)""" - -def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: - """Return the full request URI, optionally including the query string""" - -def shift_path_info(environ: WSGIEnvironment) -> str | None: - """Shift a name from PATH_INFO to SCRIPT_NAME, returning it - - If there are no remaining path segments in PATH_INFO, return None. - Note: 'environ' is modified in-place; use a copy if you need to keep - the original PATH_INFO or SCRIPT_NAME. - - Note: when PATH_INFO is just a '/', this returns '' and appends a trailing - '/' to SCRIPT_NAME, even though empty path segments are normally ignored, - and SCRIPT_NAME doesn't normally end in a '/'. This is intentional - behavior, to ensure that an application can tell the difference between - '/x' and '/x/' when traversing to objects. - """ - -def setup_testing_defaults(environ: WSGIEnvironment) -> None: - """Update 'environ' with trivial defaults for testing purposes - - This adds various parameters required for WSGI, including HTTP_HOST, - SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO, - and all of the wsgi.* variables. It only supplies default values, - and does not replace any existing settings for these variables. - - This routine is intended to make it easier for unit tests of WSGI - servers and applications to set up dummy environments. It should *not* - be used by actual WSGI servers or applications, since the data is fake! - """ - -def is_hop_by_hop(header_name: str) -> bool: - """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header""" +def guess_scheme(environ: WSGIEnvironment) -> str: ... +def application_uri(environ: WSGIEnvironment) -> str: ... +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... +def shift_path_info(environ: WSGIEnvironment) -> str | None: ... +def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... +def is_hop_by_hop(header_name: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi index b537918d4368f..fa8a6bbb8d039 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi @@ -1,110 +1,3 @@ -""" -Middleware to check for obedience to the WSGI specification. - -Some of the things this checks: - -* Signature of the application and start_response (including that - keyword arguments are not used). - -* Environment checks: - - - Environment is a dictionary (and not a subclass). - - - That all the required keys are in the environment: REQUEST_METHOD, - SERVER_NAME, SERVER_PORT, wsgi.version, wsgi.input, wsgi.errors, - wsgi.multithread, wsgi.multiprocess, wsgi.run_once - - - That HTTP_CONTENT_TYPE and HTTP_CONTENT_LENGTH are not in the - environment (these headers should appear as CONTENT_LENGTH and - CONTENT_TYPE). - - - Warns if QUERY_STRING is missing, as the cgi module acts - unpredictably in that case. - - - That CGI-style variables (that don't contain a .) have - (non-unicode) string values - - - That wsgi.version is a tuple - - - That wsgi.url_scheme is 'http' or 'https' (@@: is this too - restrictive?) - - - Warns if the REQUEST_METHOD is not known (@@: probably too - restrictive). - - - That SCRIPT_NAME and PATH_INFO are empty or start with / - - - That at least one of SCRIPT_NAME or PATH_INFO are set. - - - That CONTENT_LENGTH is a positive integer. - - - That SCRIPT_NAME is not '/' (it should be '', and PATH_INFO should - be '/'). - - - That wsgi.input has the methods read, readline, readlines, and - __iter__ - - - That wsgi.errors has the methods flush, write, writelines - -* The status is a string, contains a space, starts with an integer, - and that integer is in range (> 100). - -* That the headers is a list (not a subclass, not another kind of - sequence). - -* That the items of the headers are tuples of strings. - -* That there is no 'status' header (that is used in CGI, but not in - WSGI). - -* That the headers don't contain newlines or colons, end in _ or -, or - contain characters codes below 037. - -* That Content-Type is given if there is content (CGI often has a - default content type, but WSGI does not). - -* That no Content-Type is given when there is no content (@@: is this - too restrictive?) - -* That the exc_info argument to start_response is a tuple or None. - -* That all calls to the writer are with strings, and no other methods - on the writer are accessed. - -* That wsgi.input is used properly: - - - .read() is called with exactly one argument - - - That it returns a string - - - That readline, readlines, and __iter__ return strings - - - That .close() is not called - - - No other methods are provided - -* That wsgi.errors is used properly: - - - .write() and .writelines() is called with a string - - - That .close() is not called, and no other methods are provided. - -* The response iterator: - - - That it is not a string (it should be a list of a single string; a - string will work, but perform horribly). - - - That .__next__() returns a string - - - That the iterator is not iterated over until start_response has - been called (that can signal either a server or application - error). - - - That .close() is called (doesn't raise exception, only prints to - sys.stderr, because we only know it isn't called when the object - is garbage collected). -""" - from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication from collections.abc import Callable, Iterable, Iterator from typing import Any, NoReturn @@ -112,21 +5,9 @@ from typing_extensions import TypeAlias __all__ = ["validator"] -class WSGIWarning(Warning): - """ - Raised in response to WSGI-spec-related warnings - """ +class WSGIWarning(Warning): ... -def validator(application: WSGIApplication) -> WSGIApplication: - """ - When applied between a WSGI server and a WSGI application, this - middleware will check for WSGI compliance on a number of levels. - This middleware does not modify the request or response in any - way, but will raise an AssertionError if anything seems off - (except for a failure to close the application iterator, which - will be printed to stderr -- there's no way to raise an exception - at that point). - """ +def validator(application: WSGIApplication) -> WSGIApplication: ... class InputWrapper: input: InputStream diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi index bf5ccda81c32d..78f3ecec8d78b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi @@ -1,9 +1,3 @@ -"""Implements (a subset of) Sun XDR -- eXternal Data Representation. - -See: RFC 1014 - -""" - from collections.abc import Callable, Sequence from typing import TypeVar @@ -12,24 +6,12 @@ __all__ = ["Error", "Packer", "Unpacker", "ConversionError"] _T = TypeVar("_T") class Error(Exception): - """Exception class for this module. Use: - - except xdrlib.Error as var: - # var has the Error instance for the exception - - Public ivars: - msg -- contains the message - - """ - msg: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: - """Pack various data representations into a buffer.""" - def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... @@ -51,8 +33,6 @@ class Packer: def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... class Unpacker: - """Unpacks various data representations from the given buffer.""" - def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi index a99fc38e3fd9e..7a240965136e5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi @@ -1,21 +1,3 @@ -"""Core XML support for Python. - -This package contains four sub-packages: - -dom -- The W3C Document Object Model. This supports DOM Level 1 + - Namespaces. - -parsers -- Python wrappers for XML parsers (currently only supports Expat). - -sax -- The Simple API for XML, developed by XML-Dev, led by David - Megginson and ported to Python by Lars Marius Garshol. This - supports the SAX 2 API. - -etree -- The ElementTree XML library. This is a subset of the full - ElementTree XML release. - -""" - # At runtime, listing submodules in __all__ without them being imported is # valid, and causes them to be included in a star import. See #6523 __all__ = ["dom", "parsers", "sax", "etree"] # noqa: F822 # pyright: ignore[reportUnsupportedDunderAll] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi index 47f645764b214..7b301373f5288 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi @@ -2,10 +2,6 @@ from typing import Final from xml.dom.minidom import Node class NodeFilter: - """ - This is the DOM2 NodeFilter interface. It contains only constants. - """ - FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi index 2022c8dc422e9..5dbb6c536f617 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi @@ -1,26 +1,8 @@ -"""W3C Document Object Model implementation for Python. - -The Python mapping of the Document Object Model is documented in the -Python Library Reference in the section on the xml.dom package. - -This package contains the following modules: - -minidom -- A simple implementation of the Level 1 DOM with namespace - support added (based on the Level 2 specification) and other - minor Level 2 functionality. - -pulldom -- DOM builder supporting on-demand tree-building for selected - subtrees of the document. - -""" - from typing import Any, Final, Literal from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation class Node: - """Class giving the NodeType constants.""" - __slots__ = () ELEMENT_NODE: Final = 1 ATTRIBUTE_NODE: Final = 2 @@ -54,10 +36,6 @@ INVALID_ACCESS_ERR: Final = 15 VALIDATION_ERR: Final = 16 class DOMException(Exception): - """Abstract base class for DOM exceptions. - Exceptions with specific codes are specializations of this class. - """ - code: int def __init__(self, *args: Any, **kw: Any) -> None: ... def _get_code(self) -> int: ... @@ -111,8 +89,6 @@ class ValidationErr(DOMException): code: Literal[16] class UserDataHandler: - """Class giving the operation constants for UserDataHandler.handle().""" - NODE_CLONED: Final = 1 NODE_IMPORTED: Final = 2 NODE_DELETED: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi index 4d6cf5f45ca06..346a4bf63bd4d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi @@ -1,34 +1,8 @@ -"""Registration facilities for DOM. This module should not be used -directly. Instead, the functions getDOMImplementation and -registerDOMImplementation should be imported from xml.dom. -""" - from _typeshed.xml import DOMImplementation from collections.abc import Callable, Iterable well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] -def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: - """registerDOMImplementation(name, factory) - - Register the factory function with the name. The factory function - should return an object which implements the DOMImplementation - interface. The factory function can either return the same object, - or a new one (e.g. if that implementation supports some - customization). - """ - -def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: - """getDOMImplementation(name = None, features = ()) -> DOM implementation. - - Return a suitable DOM implementation. The name is either - well-known, the module name of a DOM implementation, or None. If - it is not None, imports the corresponding module and returns - DOMImplementation object if the import succeeds. - - If name is not given, consider the available implementations to - find one with the required feature set. If no implementation can - be found, raise an ImportError. The features list must be a sequence - of (feature, version) pairs which are passed to hasFeature. - """ +def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... +def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi index e5536237d7187..2b9ac88769700 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -1,9 +1,3 @@ -"""Facility to use the Expat parser to load a minidom instance -from a string or file. - -This avoids all the overhead of SAX and pulldom to gain performance. -""" - from _typeshed import ReadableBuffer, SupportsRead from typing import Any, Final, NoReturn from typing_extensions import TypeAlias @@ -34,33 +28,15 @@ class ElementInfo: def isIdNS(self, euri: str, ename: str, auri: str, aname: str) -> bool: ... class ExpatBuilder: - """Document builder that uses Expat to build a ParsedXML.DOM document - instance. - """ - document: Document # Created in self.reset() curNode: DocumentFragment | Element | Document # Created in self.reset() def __init__(self, options: Options | None = None) -> None: ... - def createParser(self) -> XMLParserType: - """Create a new parser object.""" - - def getParser(self) -> XMLParserType: - """Return the parser object, creating a new one if needed.""" - - def reset(self) -> None: - """Free all data structures used during DOM construction.""" - - def install(self, parser: XMLParserType) -> None: - """Install the callbacks needed to build the DOM into the parser.""" - - def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: - """Parse a document from a file object, returning the document - node. - """ - - def parseString(self, string: str | ReadableBuffer) -> Document: - """Parse a document from a string, returning the document node.""" - + def createParser(self) -> XMLParserType: ... + def getParser(self) -> XMLParserType: ... + def reset(self) -> None: ... + def install(self, parser: XMLParserType) -> None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... def start_doctype_decl_handler( self, doctypeName: str, systemId: str | None, publicId: str | None, has_internal_subset: bool ) -> None: ... @@ -91,10 +67,6 @@ class ExpatBuilder: def xml_decl_handler(self, version: str, encoding: str | None, standalone: int) -> None: ... class FilterVisibilityController: - """Wrapper around a DOMBuilderFilter which implements the checks - to make the whatToShow filter attribute work. - """ - __slots__ = ("filter",) filter: DOMBuilderFilter def __init__(self, filter: DOMBuilderFilter) -> None: ... @@ -116,62 +88,29 @@ class Skipper(FilterCrutch): def end_element_handler(self, *args: Any) -> None: ... class FragmentBuilder(ExpatBuilder): - """Builder which constructs document fragments given XML source - text and a context node. - - The context node is expected to provide information about the - namespace declarations which are in scope at the start of the - fragment. - """ - fragment: DocumentFragment | None originalDocument: Document context: Node def __init__(self, context: Node, options: Options | None = None) -> None: ... def reset(self) -> None: ... - def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: # type: ignore[override] - """Parse a document fragment from a file object, returning the - fragment node. - """ - - def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: # type: ignore[override] - """Parse a document fragment from a string, returning the - fragment node. - """ - + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: ... # type: ignore[override] + def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: ... # type: ignore[override] def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... class Namespaces: - """Mix-in class for builders; adds support for namespaces.""" - - def createParser(self) -> XMLParserType: - """Create a new namespace-handling parser.""" - - def install(self, parser: XMLParserType) -> None: - """Insert the namespace-handlers onto the parser.""" - - def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: - """Push this namespace declaration on our storage.""" - + def createParser(self) -> XMLParserType: ... + def install(self, parser: XMLParserType) -> None: ... + def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: ... def start_element_handler(self, name: str, attributes: list[str]) -> None: ... def end_element_handler(self, name: str) -> None: ... # only exists if __debug__ -class ExpatBuilderNS(Namespaces, ExpatBuilder): - """Document builder that supports namespaces.""" - -class FragmentBuilderNS(Namespaces, FragmentBuilder): - """Fragment builder that supports namespaces.""" - -class ParseEscape(Exception): - """Exception raised to short-circuit parsing in InternalSubsetExtractor.""" +class ExpatBuilderNS(Namespaces, ExpatBuilder): ... +class FragmentBuilderNS(Namespaces, FragmentBuilder): ... +class ParseEscape(Exception): ... class InternalSubsetExtractor(ExpatBuilder): - """XML processor which can rip out the internal document type subset.""" - subset: str | list[str] | None = None - def getSubset(self) -> str: - """Return the internal subset as a string.""" - + def getSubset(self) -> str: ... def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler( # type: ignore[override] @@ -180,30 +119,8 @@ class InternalSubsetExtractor(ExpatBuilder): def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name: str, attrs: list[str]) -> NoReturn: ... -def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: - """Parse a document, returning the resulting Document node. - - 'file' may be either a file name or an open file object. - """ - -def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: - """Parse a document from a string, returning the resulting - Document node. - """ - -def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: - """Parse a fragment of a document, given the context from which it - was originally extracted. context should be the parent of the - node(s) which are in the fragment. - - 'file' may be either a file name or an open file object. - """ - -def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: - """Parse a fragment of a document from a string, given the context - from which it was originally extracted. context should be the - parent of the node(s) which are in the fragment. - """ - -def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: - """Create a builder based on an Options object.""" +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: ... +def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: ... +def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: ... +def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi index a0dffd6cc5434..6fcaee019dc20 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi @@ -1,9 +1,3 @@ -"""Python version compatibility support for minidom. - -This module contains internal implementation details and -should not be imported; use xml.dom.minidom instead. -""" - from collections.abc import Iterable from typing import Any, Literal, TypeVar @@ -16,17 +10,13 @@ StringTypes: tuple[type[str]] class NodeList(list[_T]): __slots__ = () @property - def length(self) -> int: - """The number of nodes in the NodeList.""" - + def length(self) -> int: ... def item(self, index: int) -> _T | None: ... class EmptyNodeList(tuple[()]): __slots__ = () @property - def length(self) -> Literal[0]: - """The number of nodes in the NodeList.""" - + def length(self) -> Literal[0]: ... def item(self, index: int) -> None: ... def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi index 6547439155c1c..e0431417aa3c0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,20 +1,3 @@ -"""Simple implementation of the Level 1 DOM. - -Namespaces and other minor Level 2 features are also supported. - -parse("foo.xml") - -parseString("") - -Todo: -===== - * convenience methods for getting elements and text. - * more testing - * bring some of the writer and linearizer code into conformance with this - interface - * SAX 2 namespaces -""" - import xml.dom from _collections_abc import dict_keys, dict_values from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite @@ -67,12 +50,8 @@ class _UserDataHandler(Protocol): def parse( file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None -) -> Document: - """Parse a file into a DOM by filename or file object.""" - -def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: - """Parse a file into a DOM from a string.""" - +) -> Document: ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: ... @overload def getDOMImplementation(features: None = None) -> DOMImplementation: ... @overload @@ -104,17 +83,11 @@ class Node(xml.dom.Node): attributes: NamedNodeMap | None # non-null only for Element @property - def firstChild(self) -> _NodesThatAreChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _NodesThatAreChildren | None: ... @property - def lastChild(self) -> _NodesThatAreChildren | None: - """Last child node, or None.""" - + def lastChild(self) -> _NodesThatAreChildren | None: ... @property - def localName(self) -> str | None: # non-null only for Element and Attr - """Namespace-local name of this node.""" - + def localName(self) -> str | None: ... # non-null only for Element and Attr def __bool__(self) -> Literal[True]: ... @overload def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... @@ -192,18 +165,14 @@ class DocumentFragment(Node): previousSibling: None childNodes: NodeList[_DocumentFragmentChildren] @property - def firstChild(self) -> _DocumentFragmentChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _DocumentFragmentChildren | None: ... @property - def lastChild(self) -> _DocumentFragmentChildren | None: - """Last child node, or None.""" + def lastChild(self) -> _DocumentFragmentChildren | None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" - + def localName(self) -> None: ... def __init__(self) -> None: ... def insertBefore( # type: ignore[override] self, newChild: _DFChildrenPlusFragment, refChild: _DocumentFragmentChildren | None @@ -230,17 +199,15 @@ class Attr(Node): previousSibling: None childNodes: NodeList[_AttrChildren] @property - def firstChild(self) -> _AttrChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _AttrChildren | None: ... @property - def lastChild(self) -> _AttrChildren | None: - """Last child node, or None.""" + def lastChild(self) -> _AttrChildren | None: ... + namespaceURI: str | None prefix: str | None @property - def localName(self) -> str: - """Namespace-local name of this attribute.""" + def localName(self) -> str: ... + name: str value: str specified: bool @@ -251,13 +218,9 @@ class Attr(Node): ) -> None: ... def unlink(self) -> None: ... @property - def isId(self) -> bool: - """True if this attribute is an ID.""" - + def isId(self) -> bool: ... @property - def schemaType(self) -> TypeInfo: - """Schema type for this attribute.""" - + def schemaType(self) -> TypeInfo: ... def insertBefore(self, newChild: _AttrChildrenPlusFragment, refChild: _AttrChildren | None) -> _AttrChildrenPlusFragment: ... # type: ignore[override] def appendChild(self, node: _AttrChildrenPlusFragment) -> _AttrChildrenPlusFragment: ... # type: ignore[override] @overload # type: ignore[override] @@ -269,20 +232,10 @@ class Attr(Node): # In the DOM, this interface isn't specific to Attr, but our implementation is # because that's the only place we use it. class NamedNodeMap: - """The attribute list is a transient interface to the underlying - dictionaries. Mutations here will change the underlying element's - dictionary. - - Ordering is imposed artificially and does not reflect the order of - attributes as found in an input document. - """ - __slots__ = ("_attrs", "_attrsNS", "_ownerElement") def __init__(self, attrs: dict[str, Attr], attrsNS: dict[_NSName, Attr], ownerElement: Element) -> None: ... @property - def length(self) -> int: - """Number of nodes in the NamedNodeMap.""" - + def length(self) -> int: ... def item(self, index: int) -> Node | None: ... def items(self) -> list[tuple[str, str]]: ... def itemsNS(self) -> list[tuple[_NSName, str]]: ... @@ -338,24 +291,22 @@ class Element(Node): nodeName: str # same as Element.tagName nodeValue: None @property - def attributes(self) -> NamedNodeMap: # type: ignore[override] - """NamedNodeMap of attributes on the element.""" + def attributes(self) -> NamedNodeMap: ... # type: ignore[override] + parentNode: Document | Element | DocumentFragment | None nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: NodeList[_ElementChildren] @property - def firstChild(self) -> _ElementChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _ElementChildren | None: ... @property - def lastChild(self) -> _ElementChildren | None: - """Last child node, or None.""" + def lastChild(self) -> _ElementChildren | None: ... + namespaceURI: str | None prefix: str | None @property - def localName(self) -> str: - """Namespace-local name of this element.""" + def localName(self) -> str: ... + schemaType: TypeInfo tagName: str @@ -363,16 +314,7 @@ class Element(Node): self, tagName: str, namespaceURI: str | None = None, prefix: str | None = None, localName: str | None = None ) -> None: ... def unlink(self) -> None: ... - def getAttribute(self, attname: str) -> str: - """Returns the value of the specified attribute. - - Returns the value of the element's attribute named attname as - a string. An empty string is returned if the element does not - have such an attribute. Note that an empty string may also be - returned as an explicitly given attribute value, use the - hasAttribute method to distinguish these two cases. - """ - + def getAttribute(self, attname: str) -> str: ... def getAttributeNS(self, namespaceURI: str | None, localName: str) -> str: ... def setAttribute(self, attname: str, value: str) -> None: ... def setAttributeNS(self, namespaceURI: str | None, qualifiedName: str, value: str) -> None: ... @@ -384,29 +326,11 @@ class Element(Node): def removeAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... def removeAttributeNode(self, node: Attr) -> Attr: ... removeAttributeNodeNS = removeAttributeNode - def hasAttribute(self, name: str) -> bool: - """Checks whether the element has an attribute with the specified name. - - Returns True if the element has an attribute with the specified name. - Otherwise, returns False. - """ - + def hasAttribute(self, name: str) -> bool: ... def hasAttributeNS(self, namespaceURI: str | None, localName: str) -> bool: ... - def getElementsByTagName(self, name: str) -> NodeList[Element]: - """Returns all descendant elements with the given tag name. - - Returns the list of all descendant elements (not direct children - only) with the specified tag name. - """ - + def getElementsByTagName(self, name: str) -> NodeList[Element]: ... def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... - def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: - """Write an XML element to a file-like object - - Write the element to the writer object that must provide - a write method (e.g. a file or StringIO object). - """ - + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def hasAttributes(self) -> bool: ... def setIdAttribute(self, name: str) -> None: ... def setIdAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... @@ -424,21 +348,13 @@ class Element(Node): def removeChild(self, oldChild: _ElementChildrenVar) -> _ElementChildrenVar: ... # type: ignore[override] class Childless: - """Mixin that makes childless-ness easy to implement and avoids - the complexity of the Node methods that deal with children. - """ - __slots__ = () attributes: None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" - + def lastChild(self) -> None: ... def appendChild(self, node: _NodesThatAreChildren | DocumentFragment) -> NoReturn: ... def hasChildNodes(self) -> Literal[False]: ... def insertBefore( @@ -460,17 +376,15 @@ class ProcessingInstruction(Childless, Node): previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" + def lastChild(self) -> None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + target: str data: str @@ -487,16 +401,14 @@ class CharacterData(Childless, Node): previousSibling: _NodesThatAreChildren | None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + ownerDocument: Document | None data: str def __init__(self) -> None: ... @property - def length(self) -> int: - """Length of the string data.""" - + def length(self) -> int: ... def __len__(self) -> int: ... def substringData(self, offset: int, count: int) -> str: ... def appendData(self, arg: str) -> None: ... @@ -516,28 +428,23 @@ class Text(CharacterData): previousSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" + def lastChild(self) -> None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + data: str def splitText(self, offset: int) -> Self: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def replaceWholeText(self, content: str) -> Self | None: ... @property - def isWhitespaceInElementContent(self) -> bool: - """True iff this text node contains only whitespace and is in element content.""" - + def isWhitespaceInElementContent(self) -> bool: ... @property - def wholeText(self) -> str: - """The text of all logically-adjacent text nodes.""" + def wholeText(self) -> str: ... class Comment(CharacterData): nodeType: ClassVar[Literal[8]] @@ -550,18 +457,14 @@ class Comment(CharacterData): previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" + def lastChild(self) -> None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" - + def localName(self) -> None: ... def __init__(self, data: str) -> None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... @@ -591,12 +494,9 @@ class ReadOnlySequentialNamedNodeMap(Generic[_N]): def setNamedItem(self, node: Node) -> NoReturn: ... def setNamedItemNS(self, node: Node) -> NoReturn: ... @property - def length(self) -> int: - """Number of entries in the NamedNodeMap.""" + def length(self) -> int: ... class Identified: - """Mix-in class that supports the publicId and systemId attributes.""" - __slots__ = ("publicId", "systemId") publicId: str | None systemId: str | None @@ -612,17 +512,15 @@ class DocumentType(Identified, Childless, Node): previousSibling: _DocumentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" + def lastChild(self) -> None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + name: str | None internalSubset: str | None entities: ReadOnlySequentialNamedNodeMap[Entity] @@ -643,17 +541,15 @@ class Entity(Identified, Node): previousSibling: None childNodes: NodeList[_EntityChildren] @property - def firstChild(self) -> _EntityChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _EntityChildren | None: ... @property - def lastChild(self) -> _EntityChildren | None: - """Last child node, or None.""" + def lastChild(self) -> _EntityChildren | None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + actualEncoding: str | None encoding: str | None version: str | None @@ -676,18 +572,14 @@ class Notation(Identified, Childless, Node): previousSibling: _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: - """The type of the None singleton.""" - + def firstChild(self) -> None: ... @property - def lastChild(self) -> None: - """The type of the None singleton.""" + def lastChild(self) -> None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" - + def localName(self) -> None: ... def __init__(self, name: str, publicId: str | None, systemId: str | None) -> None: ... class DOMImplementation(DOMImplementationLS): @@ -697,30 +589,15 @@ class DOMImplementation(DOMImplementationLS): def getInterface(self, feature: str) -> Self | None: ... class ElementInfo: - """Object that represents content-model information for an element. - - This implementation is not expected to be used in practice; DOM - builders should provide implementations which do the right thing - using information available to it. - - """ - __slots__ = ("tagName",) tagName: str def __init__(self, name: str) -> None: ... def getAttributeType(self, aname: str) -> TypeInfo: ... def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... def isElementContent(self) -> bool: ... - def isEmpty(self) -> bool: - """Returns true iff this element is declared to have an EMPTY - content model. - """ - - def isId(self, aname: str) -> bool: - """Returns true iff the named attribute is a DTD-style ID.""" - - def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: - """Returns true iff the identified attribute is a DTD-style ID.""" + def isEmpty(self) -> bool: ... + def isId(self, aname: str) -> bool: ... + def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: ... _DocumentChildrenPlusFragment = TypeVar("_DocumentChildrenPlusFragment", bound=_DocumentChildren | DocumentFragment) @@ -736,17 +613,15 @@ class Document(Node, DocumentLS): nextSibling: None childNodes: NodeList[_DocumentChildren] @property - def firstChild(self) -> _DocumentChildren | None: - """First child node, or None.""" - + def firstChild(self) -> _DocumentChildren | None: ... @property - def lastChild(self) -> _DocumentChildren | None: - """Last child node, or None.""" + def lastChild(self) -> _DocumentChildren | None: ... + namespaceURI: None prefix: None @property - def localName(self) -> None: - """Namespace-local name of this node.""" + def localName(self) -> None: ... + implementation: DOMImplementation actualEncoding: str | None encoding: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi index 2518ca34ccd53..df7a3ad0eddb0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi @@ -69,8 +69,7 @@ class PullDOM(ContentHandler): def startDocument(self) -> None: ... def buildDocument(self, uri: str | None, tagname: str | None) -> Element: ... def endDocument(self) -> None: ... - def clear(self) -> None: - """clear(): Explicitly release parsing structures""" + def clear(self) -> None: ... class ErrorHandler: def warning(self, exception: BaseException) -> None: ... @@ -91,8 +90,7 @@ class DOMEventStream: def getEvent(self) -> _Event | None: ... def expandNode(self, node: Document) -> None: ... def reset(self) -> None: ... - def clear(self) -> None: - """clear(): Explicitly release parsing objects""" + def clear(self) -> None: ... class SAX2DOM(PullDOM): def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi index 837803c4724ae..f19f7050b08df 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -1,5 +1,3 @@ -"""Implementation of the DOM Level 3 'LS-Load' feature.""" - from _typeshed import SupportsRead from typing import Any, Final, Literal, NoReturn from xml.dom.minidom import Document, Node, _DOMErrorHandler @@ -7,12 +5,6 @@ from xml.dom.minidom import Document, Node, _DOMErrorHandler __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] class Options: - """Features object that has variables set for each DOMBuilder feature. - - The DOMBuilder class uses an instance of this class to pass settings to - the ExpatBuilder class. - """ - namespaces: int namespace_declarations: bool validation: bool @@ -66,10 +58,6 @@ class DOMInputSource: baseURI: str | None class DOMBuilderFilter: - """Element filter which can be used to tailor construction of - a DOM instance. - """ - FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 @@ -79,8 +67,6 @@ class DOMBuilderFilter: def startContainer(self, element: Node) -> Literal[1, 2, 3, 4]: ... class DocumentLS: - """Mixin to create documents that conform to the load/save spec.""" - async_: bool def abort(self) -> NoReturn: ... def load(self, uri: str) -> NoReturn: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi index b56f88621468f..d42db1bc0c571 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,38 +1,3 @@ -"""Lightweight XML support for Python. - -XML is an inherently hierarchical data format, and the most natural way to -represent it is with a tree. This module has two classes for this purpose: - - 1. ElementTree represents the whole XML document as a tree and - - 2. Element represents a single node in this tree. - -Interactions with the whole document (reading and writing to/from files) are -usually done on the ElementTree level. Interactions with a single XML element -and its sub-elements are done on the Element level. - -Element is a flexible container object designed to store hierarchical data -structures in memory. It can be described as a cross between a list and a -dictionary. Each Element has a number of properties associated with it: - - 'tag' - a string containing the element's name. - - 'attributes' - a Python dictionary storing the element's attributes. - - 'text' - a string containing the element's text content. - - 'tail' - an optional string containing text after the element's end tag. - - And a number of child elements stored in a Python sequence. - -To create an element instance, use the Element constructor, -or the SubElement factory function. - -You can also use the ElementTree class to wrap an element structure -and convert it to and from XML. - -""" - import sys from _collections_abc import dict_keys from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite @@ -82,9 +47,7 @@ class ParseError(SyntaxError): position: tuple[int, int] # In reality it works based on `.tag` attribute duck typing. -def iselement(element: object) -> TypeGuard[Element]: - """Return True if *element* appears to be an Element.""" - +def iselement(element: object) -> TypeGuard[Element]: ... @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -98,20 +61,7 @@ def canonicalize( qname_aware_attrs: Iterable[str] | None = None, exclude_attrs: Iterable[str] | None = None, exclude_tags: Iterable[str] | None = None, -) -> str: - """Convert XML to its C14N 2.0 serialised form. - - If *out* is provided, it must be a file or file-like object that receives - the serialised canonical XML output (text, not bytes) through its ``.write()`` - method. To write to a file, open it in text mode with encoding "utf-8". - If *out* is not provided, this function returns the output as text string. - - Either *xml_data* (an XML string) or *from_file* (a file path or - file-like object) must be provided as input. - - The configuration options are the same as for the ``C14NWriterTarget``. - """ - +) -> str: ... @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -143,7 +93,7 @@ class Element(Generic[_Tag]): def __init__(self, tag: _Tag, attrib: dict[str, str] = {}, **extra: str) -> None: ... def append(self, subelement: Element[Any], /) -> None: ... def clear(self) -> None: ... - def extend(self, elements: Iterable[Element], /) -> None: ... + def extend(self, elements: Iterable[Element[Any]], /) -> None: ... def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... @overload @@ -154,7 +104,7 @@ class Element(Generic[_Tag]): def get(self, key: str, default: None = None) -> str | None: ... @overload def get(self, key: str, default: _T) -> str | _T: ... - def insert(self, index: int, subelement: Element, /) -> None: ... + def insert(self, index: int, subelement: Element[Any], /) -> None: ... def items(self) -> ItemsView[str, str]: ... def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... @overload @@ -165,74 +115,34 @@ class Element(Generic[_Tag]): def keys(self) -> dict_keys[str, str]: ... # makeelement returns the type of self in Python impl, but not in C impl def makeelement(self, tag: _OtherTag, attrib: dict[str, str], /) -> Element[_OtherTag]: ... - def remove(self, subelement: Element, /) -> None: ... + def remove(self, subelement: Element[Any], /) -> None: ... def set(self, key: str, value: str, /) -> None: ... def __copy__(self) -> Element[_Tag]: ... # returns the type of self in Python impl, but not in C impl def __deepcopy__(self, memo: Any, /) -> Element: ... # Only exists in C impl - def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key].""" - + def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> Element: - """Return self[key].""" - + def __getitem__(self, key: SupportsIndex, /) -> Element: ... @overload def __getitem__(self, key: slice, /) -> list[Element]: ... - def __len__(self) -> int: - """Return len(self).""" + def __len__(self) -> int: ... # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. def __iter__(self) -> Iterator[Element]: ... @overload - def __setitem__(self, key: SupportsIndex, value: Element, /) -> None: - """Set self[key] to value.""" - + def __setitem__(self, key: SupportsIndex, value: Element[Any], /) -> None: ... @overload - def __setitem__(self, key: slice, value: Iterable[Element], /) -> None: ... + def __setitem__(self, key: slice, value: Iterable[Element[Any]], /) -> None: ... # Doesn't really exist in earlier versions, where __len__ is called implicitly instead @deprecated("Testing an element's truth value is deprecated.") - def __bool__(self) -> bool: - """True if self else False""" - -def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... -def Comment(text: str | None = None) -> Element[_ElementCallable]: - """Comment element factory. - - This function creates a special element which the standard serializer - serializes as an XML comment. + def __bool__(self) -> bool: ... - *text* is a string containing the comment string. - - """ - -def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: - """Processing Instruction element factory. - - This function creates a special element which the standard serializer - serializes as an XML comment. - - *target* is a string containing the processing instruction, *text* is a - string containing the processing instruction contents, if any. - - """ +def SubElement(parent: Element[Any], tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... +def Comment(text: str | None = None) -> Element[_ElementCallable]: ... +def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: ... PI = ProcessingInstruction class QName: - """Qualified name wrapper. - - This class can be used to wrap a QName attribute value in order to get - proper namespace handing on output. - - *text_or_uri* is a string containing the QName value either in the form - {uri}local, or if the tag argument is given, the URI part of a QName. - - *tag* is an optional argument which if given, will make the first - argument (text_or_uri) be interpreted as a URI, and this argument (tag) - be interpreted as a local name. - - """ - text: str def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... def __lt__(self, other: QName | str) -> bool: ... @@ -245,95 +155,18 @@ class QName: _Root = TypeVar("_Root", Element, Element | None, default=Element | None) class ElementTree(Generic[_Root]): - """An XML element hierarchy. - - This class also provides support for serialization to and from - standard XML. - - *element* is an optional root element node, - *file* is an optional file handle or file name of an XML file whose - contents will be used to initialize the tree with. - - """ - - def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... - def getroot(self) -> _Root: - """Return root element of this tree.""" - - def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: - """Load external XML document into element tree. - - *source* is a file name or file object, *parser* is an optional parser - instance that defaults to XMLParser. - - ParseError is raised if the parser fails to parse the document. - - Returns the root element of the given source document. - - """ - - def iter(self, tag: str | None = None) -> Generator[Element, None, None]: - """Create and return tree iterator for the root element. - - The iterator loops over all elements in this tree, in document order. - - *tag* is a string with the tag name to iterate over - (default is to return all elements). - - """ - - def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: - """Find first matching element by tag name or path. - - Same as getroot().find(path), which is Element.find() - - *path* is a string having either an element tag or an XPath, - *namespaces* is an optional mapping from namespace prefix to full name. - - Return the first matching element, or None if no element was found. - - """ - + def __init__(self, element: Element[Any] | None = None, file: _FileRead | None = None) -> None: ... + def getroot(self) -> _Root: ... + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... @overload - def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: - """Find first matching element by tag name or path. - - Same as getroot().findtext(path), which is Element.findtext() - - *path* is a string having either an element tag or an XPath, - *namespaces* is an optional mapping from namespace prefix to full name. - - Return the first matching element, or None if no element was found. - - """ - + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @overload def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... - def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: - """Find all matching subelements by tag name or path. - - Same as getroot().findall(path), which is Element.findall(). - - *path* is a string having either an element tag or an XPath, - *namespaces* is an optional mapping from namespace prefix to full name. - - Return list containing all matching elements in document order. - - """ - + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... @overload - def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: # type: ignore[overload-overlap] - """Find all matching subelements by tag name or path. - - Same as getroot().iterfind(path), which is element.iterfind() - - *path* is a string having either an element tag or an XPath, - *namespaces* is an optional mapping from namespace prefix to full name. - - Return an iterable yielding all matching elements in document order. - - """ - + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] @overload def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( @@ -345,75 +178,25 @@ class ElementTree(Generic[_Root]): method: Literal["xml", "html", "text", "c14n"] | None = None, *, short_empty_elements: bool = True, - ) -> None: - """Write element tree to a file as XML. - - Arguments: - *file_or_filename* -- file name or a file object opened for writing - - *encoding* -- the output encoding (default: US-ASCII) - - *xml_declaration* -- bool indicating if an XML declaration should be - added to the output. If None, an XML declaration - is added if encoding IS NOT either of: - US-ASCII, UTF-8, or Unicode - - *default_namespace* -- sets the default XML namespace (for "xmlns") - - *method* -- either "xml" (default), "html, "text", or "c14n" - - *short_empty_elements* -- controls the formatting of elements - that contain no content. If True (default) - they are emitted as a single self-closed - tag, otherwise they are emitted as a pair - of start/end tags - - """ - + ) -> None: ... def write_c14n(self, file: _FileWriteC14N) -> None: ... HTML_EMPTY: Final[set[str]] -def register_namespace(prefix: str, uri: str) -> None: - """Register a namespace prefix. - - The registry is global, and any existing mapping for either the - given prefix or the namespace URI will be removed. - - *prefix* is the namespace prefix, *uri* is a namespace uri. Tags and - attributes in this namespace will be serialized with prefix if possible. - - ValueError is raised if prefix is reserved or is invalid. - - """ - +def register_namespace(prefix: str, uri: str) -> None: ... @overload def tostring( - element: Element, + element: Element[Any], encoding: None = None, method: Literal["xml", "html", "text", "c14n"] | None = None, *, xml_declaration: bool | None = None, default_namespace: str | None = None, short_empty_elements: bool = True, -) -> bytes: - """Generate string representation of XML element. - - All subelements are included. If encoding is "unicode", a string - is returned. Otherwise a bytestring is returned. - - *element* is an Element instance, *encoding* is an optional output - encoding defaulting to US-ASCII, *method* is an optional output which can - be one of "xml" (default), "html", "text" or "c14n", *default_namespace* - sets the default XML namespace (for "xmlns"). - - Returns an (optionally) encoded string containing the XML data. - - """ - +) -> bytes: ... @overload def tostring( - element: Element, + element: Element[Any], encoding: Literal["unicode"], method: Literal["xml", "html", "text", "c14n"] | None = None, *, @@ -423,7 +206,7 @@ def tostring( ) -> str: ... @overload def tostring( - element: Element, + element: Element[Any], encoding: str, method: Literal["xml", "html", "text", "c14n"] | None = None, *, @@ -433,7 +216,7 @@ def tostring( ) -> Any: ... @overload def tostringlist( - element: Element, + element: Element[Any], encoding: None = None, method: Literal["xml", "html", "text", "c14n"] | None = None, *, @@ -443,7 +226,7 @@ def tostringlist( ) -> list[bytes]: ... @overload def tostringlist( - element: Element, + element: Element[Any], encoding: Literal["unicode"], method: Literal["xml", "html", "text", "c14n"] | None = None, *, @@ -453,7 +236,7 @@ def tostringlist( ) -> list[str]: ... @overload def tostringlist( - element: Element, + element: Element[Any], encoding: str, method: Literal["xml", "html", "text", "c14n"] | None = None, *, @@ -461,42 +244,9 @@ def tostringlist( default_namespace: str | None = None, short_empty_elements: bool = True, ) -> list[Any]: ... -def dump(elem: Element | ElementTree[Any]) -> None: - """Write element tree or element structure to sys.stdout. - - This function should be used for debugging only. - - *elem* is either an ElementTree, or a single Element. The exact output - format is implementation dependent. In this version, it's written as an - ordinary XML file. - - """ - -def indent(tree: Element | ElementTree[Any], space: str = " ", level: int = 0) -> None: - """Indent an XML document by inserting newlines and indentation space - after elements. - - *tree* is the ElementTree or Element to modify. The (root) element - itself will not be changed, but the tail text of all elements in its - subtree will be adapted. - - *space* is the whitespace to insert for each indentation level, two - space characters by default. - - *level* is the initial indentation level. Setting this to a higher - value than 0 can be used for indenting subtrees that are more deeply - nested inside of a document. - """ - -def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: - """Parse XML document into element tree. - - *source* is a filename or file object containing XML data, - *parser* is an optional parser instance defaulting to XMLParser. - - Return an ElementTree instance. - - """ +def dump(elem: Element[Any] | ElementTree[Any]) -> None: ... +def indent(tree: Element[Any] | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... +def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: ... # This class is defined inside the body of iterparse @type_check_only @@ -507,80 +257,24 @@ class _IterParseIterator(Iterator[tuple[str, Element]], Protocol): if sys.version_info >= (3, 11): def __del__(self) -> None: ... -def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: - """Incrementally parse XML document into ElementTree. - - This class also reports what's going on to the user based on the - *events* it is initialized with. The supported events are the strings - "start", "end", "start-ns" and "end-ns" (the "ns" events are used to get - detailed namespace information). If *events* is omitted, only - "end" events are reported. - - *source* is a filename or file object containing XML data, *events* is - a list of events to report back, *parser* is an optional parser instance. - - Returns an iterator providing (event, elem) pairs. - - """ +def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: ... _EventQueue: TypeAlias = tuple[str] | tuple[str, tuple[str, str]] | tuple[str, None] class XMLPullParser(Generic[_E]): def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser[_E] | None = None) -> None: ... - def feed(self, data: str | ReadableBuffer) -> None: - """Feed encoded data to parser.""" - - def close(self) -> None: - """Finish feeding data to parser. - - Unlike XMLParser, does not return the root element. Use - read_events() to consume elements from XMLPullParser. - """ - - def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: - """Return an iterator over currently available (event, elem) pairs. - - Events are consumed from the internal event queue as they are - retrieved from the iterator. - """ - + def feed(self, data: str | ReadableBuffer) -> None: ... + def close(self) -> None: ... + def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: ... def flush(self) -> None: ... -def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: - """Parse XML document from string constant. - - This function can be used to embed "XML Literals" in Python code. - - *text* is a string containing XML data, *parser* is an - optional parser instance, defaulting to the standard XMLParser. - - Returns an Element instance. - - """ - -def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: - """Parse XML document from string constant for its IDs. - - *text* is a string containing XML data, *parser* is an - optional parser instance, defaulting to the standard XMLParser. - - Returns an (Element, dict) tuple, in which the - dict maps element id:s to elements. - - """ +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: - """Parse XML document from sequence of string fragments. - - *sequence* is a list of other sequence, *parser* is an optional parser - instance, defaulting to the standard XMLParser. - - Returns an Element instance. - - """ +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -619,28 +313,6 @@ class TreeBuilder: def pi(self, target: str, text: str | None = None, /) -> Element[Any]: ... class C14NWriterTarget: - """ - Canonicalization writer target for the XMLParser. - - Serialises parse events to XML C14N 2.0. - - The *write* function is used for writing out the resulting data stream - as text (not bytes). To write to a file, open it in text mode with encoding - "utf-8" and pass its ``.write`` method. - - Configuration options: - - - *with_comments*: set to true to include comments - - *strip_text*: set to true to strip whitespace before and after text content - - *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}" - - *qname_aware_tags*: a set of qname aware tag names in which prefixes - should be replaced in text content - - *qname_aware_attrs*: a set of qname aware attribute names in which prefixes - should be replaced in text content - - *exclude_attrs*: a set of attribute names that should not be serialised - - *exclude_tags*: a set of tag names that should not be serialised - """ - def __init__( self, write: Callable[[str], object], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi index 82eee29371d62..cebdb6a30014b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi @@ -1,10 +1 @@ -"""Python interfaces to XML parsers. - -This package contains one module: - -expat -- Python wrapper for James Clark's Expat parser, with namespace - support. - -""" - from xml.parsers import expat as expat diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi index c4ec99a4b18dc..d9b7ea5369998 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi @@ -1,5 +1,3 @@ -"""Interface to the Expat non-validating XML parser.""" - from pyexpat import * # This is actually implemented in the C module pyexpat, but considers itself to live here. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi index 185a8df99ccd5..e22d769ec3403 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi @@ -1,3 +1 @@ -"""Constants used to describe error conditions.""" - from pyexpat.errors import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi index 23131be71d8f2..d8f44b47c51b0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi @@ -1,3 +1 @@ -"""Constants used to interpret content model information.""" - from pyexpat.model import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi index 48fd8107f293a..679466fa34d2c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,24 +1,3 @@ -"""Simple API for XML (SAX) implementation for Python. - -This module provides an implementation of the SAX 2 interface; -information about the Java version of the interface can be found at -http://www.megginson.com/SAX/. The Python version of the interface is -documented at <...>. - -This package contains the following modules: - -handler -- Base classes and constants which define the SAX 2 API for - the 'client-side' of SAX for Python. - -saxutils -- Implementation of the convenience classes commonly used to - work with SAX. - -xmlreader -- Base classes and constants which define the SAX 2 API for - the parsers used with SAX for Python. - -expatreader -- Driver that allows use of the Expat parser with SAX. -""" - import sys from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable @@ -42,15 +21,7 @@ _Source: TypeAlias = StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[st default_parser_list: Final[list[str]] -def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: - """Creates and returns a SAX parser. - - Creates the first parser it is able to instantiate of the ones - given in the iterable created by chaining parser_list and - default_parser_list. The iterables must contain the names of Python - modules containing both a SAX parser and a create_parser function. - """ - +def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ... def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi index 1803a2abc1bed..e9cc8856a9c8d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi @@ -1,88 +1,19 @@ -"""Different kinds of SAX Exceptions""" - from typing import NoReturn from xml.sax.xmlreader import Locator class SAXException(Exception): - """Encapsulate an XML error or warning. This class can contain - basic error or warning information from either the XML parser or - the application: you can subclass it to provide additional - functionality, or to add localization. Note that although you will - receive a SAXException as the argument to the handlers in the - ErrorHandler interface, you are not actually required to raise - the exception; instead, you can simply read the information in - it. - """ - - def __init__(self, msg: str, exception: Exception | None = None) -> None: - """Creates an exception. The message is required, but the exception - is optional. - """ - - def getMessage(self) -> str: - """Return a message for this exception.""" - - def getException(self) -> Exception | None: - """Return the embedded exception, or None if there was none.""" - - def __getitem__(self, ix: object) -> NoReturn: - """Avoids weird error messages if someone does exception[ix] by - mistake, since Exception has __getitem__ defined. - """ + def __init__(self, msg: str, exception: Exception | None = None) -> None: ... + def getMessage(self) -> str: ... + def getException(self) -> Exception | None: ... + def __getitem__(self, ix: object) -> NoReturn: ... class SAXParseException(SAXException): - """Encapsulate an XML parse error or warning. - - This exception will include information for locating the error in - the original XML document. Note that although the application will - receive a SAXParseException as the argument to the handlers in the - ErrorHandler interface, the application is not actually required - to raise the exception; instead, it can simply read the - information in it and take a different action. - - Since this exception is a subclass of SAXException, it inherits - the ability to wrap another exception. - """ - - def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: - """Creates the exception. The exception parameter is allowed to be None.""" - - def getColumnNumber(self) -> int | None: - """The column number of the end of the text where the exception - occurred. - """ - - def getLineNumber(self) -> int | None: - """The line number of the end of the text where the exception occurred.""" - - def getPublicId(self) -> str | None: - """Get the public identifier of the entity where the exception occurred.""" - - def getSystemId(self) -> str | None: - """Get the system identifier of the entity where the exception occurred.""" - -class SAXNotRecognizedException(SAXException): - """Exception class for an unrecognized identifier. - - An XMLReader will raise this exception when it is confronted with an - unrecognized feature or property. SAX applications and extensions may - use this class for similar purposes. - """ - -class SAXNotSupportedException(SAXException): - """Exception class for an unsupported operation. - - An XMLReader will raise this exception when a service it cannot - perform is requested (specifically setting a state or value). SAX - applications and extensions may use this class for similar - purposes. - """ - -class SAXReaderNotAvailable(SAXNotSupportedException): - """Exception class for a missing driver. - - An XMLReader module (driver) should raise this exception when it - is first imported, e.g. when a support module cannot be imported. - It also may be raised during parsing, e.g. if executing an external - program is not permitted. - """ + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... + +class SAXNotRecognizedException(SAXException): ... +class SAXNotSupportedException(SAXException): ... +class SAXReaderNotAvailable(SAXNotSupportedException): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi index 42e85c503ad99..3f9573a25f9aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi @@ -1,8 +1,3 @@ -""" -SAX driver for the pyexpat C module. This driver works with -pyexpat.__version__ == '2.22'. -""" - import sys from _typeshed import ReadableBuffer from collections.abc import Mapping @@ -25,12 +20,6 @@ class _ClosedParser: ErrorLineNumber: int class ExpatLocator(xmlreader.Locator): - """Locator for use with the ExpatParser class. - - This uses a weak reference to the parser object to avoid creating - a circular reference between the parser and the content handler. - """ - def __init__(self, parser: ExpatParser) -> None: ... def getColumnNumber(self) -> int | None: ... def getLineNumber(self) -> int: ... @@ -38,12 +27,8 @@ class ExpatLocator(xmlreader.Locator): def getSystemId(self) -> str | None: ... class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): - """SAX driver for the pyexpat C module.""" - def __init__(self, namespaceHandling: _BoolType = 0, bufsize: int = 65516) -> None: ... - def parse(self, source: xmlreader.InputSource | _Source) -> None: - """Parse an XML document from a URL or an InputSource.""" - + def parse(self, source: xmlreader.InputSource | _Source) -> None: ... def prepareParser(self, source: xmlreader.InputSource) -> None: ... def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... def getFeature(self, name: str) -> _BoolType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi index 97f8c2f4fa0e4..5ecbfa6f1272c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi @@ -1,14 +1,3 @@ -""" -This module contains the core classes of version 2.0 of SAX for Python. -This file provides only default classes with absolutely minimum -functionality, from which drivers and applications can be subclassed. - -Many of these classes are empty and are included only as documentation -of the interfaces. - -$Id$ -""" - import sys from typing import Final, NoReturn, Protocol, type_check_only from xml.sax import xmlreader @@ -22,24 +11,9 @@ class _ErrorHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def warning(self, exception: BaseException) -> None: ... class ErrorHandler: - """Basic interface for SAX error handlers. - - If you create an object that implements this interface, then - register the object with your XMLReader, the parser will call the - methods in your object to report all warnings and errors. There - are three levels of errors available: warnings, (possibly) - recoverable errors, and unrecoverable errors. All methods take a - SAXParseException as the only parameter. - """ - - def error(self, exception: BaseException) -> NoReturn: - """Handle a recoverable error.""" - - def fatalError(self, exception: BaseException) -> NoReturn: - """Handle a non-recoverable error.""" - - def warning(self, exception: BaseException) -> None: - """Handle a warning.""" + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... @type_check_only class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -58,172 +32,19 @@ class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def skippedEntity(self, name: str) -> None: ... class ContentHandler: - """Interface for receiving logical document content events. - - This is the main callback interface in SAX, and the one most - important to applications. The order of events in this interface - mirrors the order of the information in the document. - """ - - def setDocumentLocator(self, locator: xmlreader.Locator) -> None: - """Called by the parser to give the application a locator for - locating the origin of document events. - - SAX parsers are strongly encouraged (though not absolutely - required) to supply a locator: if it does so, it must supply - the locator to the application by invoking this method before - invoking any of the other methods in the DocumentHandler - interface. - - The locator allows the application to determine the end - position of any document-related event, even if the parser is - not reporting an error. Typically, the application will use - this information for reporting its own errors (such as - character content that does not match an application's - business rules). The information returned by the locator is - probably not sufficient for use with a search engine. - - Note that the locator will return correct information only - during the invocation of the events in this interface. The - application should not attempt to use it at any other time. - """ - - def startDocument(self) -> None: - """Receive notification of the beginning of a document. - - The SAX parser will invoke this method only once, before any - other methods in this interface or in DTDHandler (except for - setDocumentLocator). - """ - - def endDocument(self) -> None: - """Receive notification of the end of a document. - - The SAX parser will invoke this method only once, and it will - be the last method invoked during the parse. The parser shall - not invoke this method until it has either abandoned parsing - (because of an unrecoverable error) or reached the end of - input. - """ - - def startPrefixMapping(self, prefix: str | None, uri: str) -> None: - """Begin the scope of a prefix-URI Namespace mapping. - - The information from this event is not necessary for normal - Namespace processing: the SAX XML reader will automatically - replace prefixes for element and attribute names when the - http://xml.org/sax/features/namespaces feature is true (the - default). - - There are cases, however, when applications need to use - prefixes in character data or in attribute values, where they - cannot safely be expanded automatically; the - start/endPrefixMapping event supplies the information to the - application to expand prefixes in those contexts itself, if - necessary. - - Note that start/endPrefixMapping events are not guaranteed to - be properly nested relative to each-other: all - startPrefixMapping events will occur before the corresponding - startElement event, and all endPrefixMapping events will occur - after the corresponding endElement event, but their order is - not guaranteed. - """ - - def endPrefixMapping(self, prefix: str | None) -> None: - """End the scope of a prefix-URI mapping. - - See startPrefixMapping for details. This event will always - occur after the corresponding endElement event, but the order - of endPrefixMapping events is not otherwise guaranteed. - """ - - def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: - """Signals the start of an element in non-namespace mode. - - The name parameter contains the raw XML 1.0 name of the - element type as a string and the attrs parameter holds an - instance of the Attributes class containing the attributes of - the element. - """ - - def endElement(self, name: str) -> None: - """Signals the end of an element in non-namespace mode. - - The name parameter contains the name of the element type, just - as with the startElement event. - """ - - def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: - """Signals the start of an element in namespace mode. - - The name parameter contains the name of the element type as a - (uri, localname) tuple, the qname parameter the raw XML 1.0 - name used in the source document, and the attrs parameter - holds an instance of the Attributes class containing the - attributes of the element. - - The uri part of the name tuple is None for elements which have - no namespace. - """ - - def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: - """Signals the end of an element in namespace mode. - - The name parameter contains the name of the element type, just - as with the startElementNS event. - """ - - def characters(self, content: str) -> None: - """Receive notification of character data. - - The Parser will call this method to report each chunk of - character data. SAX parsers may return all contiguous - character data in a single chunk, or they may split it into - several chunks; however, all of the characters in any single - event must come from the same external entity so that the - Locator provides useful information. - """ - - def ignorableWhitespace(self, whitespace: str) -> None: - """Receive notification of ignorable whitespace in element content. - - Validating Parsers must use this method to report each chunk - of ignorable whitespace (see the W3C XML 1.0 recommendation, - section 2.10): non-validating parsers may also use this method - if they are capable of parsing and using content models. - - SAX parsers may return all contiguous whitespace in a single - chunk, or they may split it into several chunks; however, all - of the characters in any single event must come from the same - external entity, so that the Locator provides useful - information. - """ - - def processingInstruction(self, target: str, data: str) -> None: - """Receive notification of a processing instruction. - - The Parser will invoke this method once for each processing - instruction found: note that processing instructions may occur - before or after the main document element. - - A SAX parser should never report an XML declaration (XML 1.0, - section 2.8) or a text declaration (XML 1.0, section 4.3.1) - using this method. - """ - - def skippedEntity(self, name: str) -> None: - """Receive notification of a skipped entity. - - The Parser will invoke this method once for each entity - skipped. Non-validating processors may skip entities if they - have not seen the declarations (because, for example, the - entity was declared in an external DTD subset). All processors - may skip external entities, depending on the values of the - http://xml.org/sax/features/external-general-entities and the - http://xml.org/sax/features/external-parameter-entities - properties. - """ + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... + def endPrefixMapping(self, prefix: str | None) -> None: ... + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... + def endElement(self, name: str) -> None: ... + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... + def characters(self, content: str) -> None: ... + def ignorableWhitespace(self, whitespace: str) -> None: ... + def processingInstruction(self, target: str, data: str) -> None: ... + def skippedEntity(self, name: str) -> None: ... @type_check_only class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -231,35 +52,15 @@ class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... class DTDHandler: - """Handle DTD events. - - This interface specifies only those DTD events required for basic - parsing (unparsed entities and attributes). - """ - - def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: - """Handle a notation declaration event.""" - - def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: - """Handle an unparsed entity declaration event.""" + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... @type_check_only class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... class EntityResolver: - """Basic interface for resolving entities. If you create an object - implementing this interface, then register the object with your - Parser, the parser will call the method in your object to - resolve all external entities. Note that DefaultHandler implements - this interface with the default behaviour. - """ - - def resolveEntity(self, publicId: str | None, systemId: str) -> str: - """Resolve the system identifier of an entity and return either - the system identifier to read from as a string, or an InputSource - to read from. - """ + def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... feature_namespaces: Final = "http://xml.org/sax/features/namespaces" feature_namespace_prefixes: Final = "http://xml.org/sax/features/namespace-prefixes" @@ -278,49 +79,8 @@ all_properties: Final[list[str]] if sys.version_info >= (3, 10): class LexicalHandler: - """Optional SAX2 handler for lexical events. - - This handler is used to obtain lexical information about an XML - document, that is, information about how the document was encoded - (as opposed to what it contains, which is reported to the - ContentHandler), such as comments and CDATA marked section - boundaries. - - To set the LexicalHandler of an XMLReader, use the setProperty - method with the property identifier - 'http://xml.org/sax/properties/lexical-handler'. - """ - - def comment(self, content: str) -> None: - """Reports a comment anywhere in the document (including the - DTD and outside the document element). - - content is a string that holds the contents of the comment. - """ - - def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: - """Report the start of the DTD declarations, if the document - has an associated DTD. - - A startEntity event will be reported before declaration events - from the external DTD subset are reported, and this can be - used to infer from which subset DTD declarations derive. - - name is the name of the document element type, public_id the - public identifier of the DTD (or None if none were supplied) - and system_id the system identifier of the external subset (or - None if none were supplied). - """ - - def endDTD(self) -> None: - """Signals the end of DTD declarations.""" - - def startCDATA(self) -> None: - """Reports the beginning of a CDATA marked section. - - The contents of the CDATA marked section will be reported - through the characters event. - """ - - def endCDATA(self) -> None: - """Reports the end of a CDATA marked section.""" + def comment(self, content: str) -> None: ... + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: ... + def endDTD(self) -> None: ... + def startCDATA(self) -> None: ... + def endCDATA(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi index 9873218936be9..a29588faae2ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi @@ -1,7 +1,3 @@ -"""A library of useful helper classes to the SAX classes, for the -convenience of application and driver writers. -""" - from _typeshed import SupportsWrite from codecs import StreamReaderWriter, StreamWriter from collections.abc import Mapping @@ -9,33 +5,9 @@ from io import RawIOBase, TextIOBase from typing import Literal, NoReturn from xml.sax import _Source, handler, xmlreader -def escape(data: str, entities: Mapping[str, str] = {}) -> str: - """Escape &, <, and > in a string of data. - - You can escape other strings of data by passing a dictionary as - the optional entities parameter. The keys and values must all be - strings; each key will be replaced with its corresponding value. - """ - -def unescape(data: str, entities: Mapping[str, str] = {}) -> str: - """Unescape &, <, and > in a string of data. - - You can unescape other strings of data by passing a dictionary as - the optional entities parameter. The keys and values must all be - strings; each key will be replaced with its corresponding value. - """ - -def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: - """Escape and quote an attribute value. - - Escape &, <, and > in a string of data, then quote it for use as - an attribute value. The " character will be escaped as well, if - necessary. - - You can escape other strings of data by passing a dictionary as - the optional entities parameter. The keys and values must all be - strings; each key will be replaced with its corresponding value. - """ +def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: ... class XMLGenerator(handler.ContentHandler): def __init__( @@ -44,9 +16,7 @@ class XMLGenerator(handler.ContentHandler): encoding: str = "iso-8859-1", short_empty_elements: bool = False, ) -> None: ... - def _qname(self, name: tuple[str | None, str]) -> str: - """Builds a qualified name from a (ns_url, localname) pair""" - + def _qname(self, name: tuple[str | None, str]) -> str: ... def startDocument(self) -> None: ... def endDocument(self) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... @@ -60,14 +30,6 @@ class XMLGenerator(handler.ContentHandler): def processingInstruction(self, target: str, data: str) -> None: ... class XMLFilterBase(xmlreader.XMLReader): - """This class is designed to sit between an XMLReader and the - client application's event handlers. By default, it does nothing - but pass requests up to the reader and events on to the handlers - unmodified, but subclasses can override specific methods to modify - the event stream or the configuration requests as they pass - through. - """ - def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... # ErrorHandler methods def error(self, exception: BaseException) -> NoReturn: ... @@ -103,7 +65,4 @@ class XMLFilterBase(xmlreader.XMLReader): def getParent(self) -> xmlreader.XMLReader | None: ... def setParent(self, parent: xmlreader.XMLReader) -> None: ... -def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: - """This function takes an InputSource and an optional base URL and - returns a fully resolved InputSource object ready for reading. - """ +def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi index 27ada179270c9..e7d04ddeadb80 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -1,7 +1,3 @@ -"""An XML Reader is the SAX 2 name for an XML parser. XML Parsers -should be based on this code. -""" - from _typeshed import ReadableBuffer from collections.abc import Mapping from typing import Generic, Literal, TypeVar, overload @@ -10,228 +6,52 @@ from xml.sax import _Source, _SupportsReadClose from xml.sax.handler import _ContentHandlerProtocol, _DTDHandlerProtocol, _EntityResolverProtocol, _ErrorHandlerProtocol class XMLReader: - """Interface for reading an XML document using callbacks. - - XMLReader is the interface that an XML parser's SAX2 driver must - implement. This interface allows an application to set and query - features and properties in the parser, to register event handlers - for document processing, and to initiate a document parse. - - All SAX interfaces are assumed to be synchronous: the parse - methods must not return until parsing is complete, and readers - must wait for an event-handler callback to return before reporting - the next event. - """ - - def parse(self, source: InputSource | _Source) -> None: - """Parse an XML document from a system identifier or an InputSource.""" - - def getContentHandler(self) -> _ContentHandlerProtocol: - """Returns the current ContentHandler.""" - - def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: - """Registers a new object to receive document content events.""" - - def getDTDHandler(self) -> _DTDHandlerProtocol: - """Returns the current DTD handler.""" - - def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: - """Register an object to receive basic DTD-related events.""" - - def getEntityResolver(self) -> _EntityResolverProtocol: - """Returns the current EntityResolver.""" - - def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: - """Register an object to resolve external entities.""" - - def getErrorHandler(self) -> _ErrorHandlerProtocol: - """Returns the current ErrorHandler.""" - - def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: - """Register an object to receive error-message events.""" - - def setLocale(self, locale: str) -> None: - """Allow an application to set the locale for errors and warnings. - - SAX parsers are not required to provide localization for errors - and warnings; if they cannot support the requested locale, - however, they must raise a SAX exception. Applications may - request a locale change in the middle of a parse. - """ - - def getFeature(self, name: str) -> Literal[0, 1] | bool: - """Looks up and returns the state of a SAX2 feature.""" - - def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: - """Sets the state of a SAX2 feature.""" - - def getProperty(self, name: str) -> object: - """Looks up and returns the value of a SAX2 property.""" - - def setProperty(self, name: str, value: object) -> None: - """Sets the value of a SAX2 property.""" + def parse(self, source: InputSource | _Source) -> None: ... + def getContentHandler(self) -> _ContentHandlerProtocol: ... + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... + def getDTDHandler(self) -> _DTDHandlerProtocol: ... + def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: ... + def getEntityResolver(self) -> _EntityResolverProtocol: ... + def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: ... + def getErrorHandler(self) -> _ErrorHandlerProtocol: ... + def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: ... + def setLocale(self, locale: str) -> None: ... + def getFeature(self, name: str) -> Literal[0, 1] | bool: ... + def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: ... + def getProperty(self, name: str) -> object: ... + def setProperty(self, name: str, value: object) -> None: ... class IncrementalParser(XMLReader): - """This interface adds three extra methods to the XMLReader - interface that allow XML parsers to support incremental - parsing. Support for this interface is optional, since not all - underlying XML parsers support this functionality. - - When the parser is instantiated it is ready to begin accepting - data from the feed method immediately. After parsing has been - finished with a call to close the reset method must be called to - make the parser ready to accept new data, either from feed or - using the parse method. - - Note that these methods must _not_ be called during parsing, that - is, after parse has been called and before it returns. - - By default, the class also implements the parse method of the XMLReader - interface using the feed, close and reset methods of the - IncrementalParser interface as a convenience to SAX 2.0 driver - writers. - """ - def __init__(self, bufsize: int = 65536) -> None: ... def parse(self, source: InputSource | _Source) -> None: ... - def feed(self, data: str | ReadableBuffer) -> None: - """This method gives the raw XML data in the data parameter to - the parser and makes it parse the data, emitting the - corresponding events. It is allowed for XML constructs to be - split across several calls to feed. - - feed may raise SAXException. - """ - - def prepareParser(self, source: InputSource) -> None: - """This method is called by the parse implementation to allow - the SAX 2.0 driver to prepare itself for parsing. - """ - - def close(self) -> None: - """This method is called when the entire XML document has been - passed to the parser through the feed method, to notify the - parser that there are no more data. This allows the parser to - do the final checks on the document and empty the internal - data buffer. - - The parser will not be ready to parse another document until - the reset method has been called. - - close may raise SAXException. - """ - - def reset(self) -> None: - """This method is called after close has been called to reset - the parser so that it is ready to parse new documents. The - results of calling parse or feed after close without calling - reset are undefined. - """ + def feed(self, data: str | ReadableBuffer) -> None: ... + def prepareParser(self, source: InputSource) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... class Locator: - """Interface for associating a SAX event with a document - location. A locator object will return valid results only during - calls to DocumentHandler methods; at any other time, the - results are unpredictable. - """ - - def getColumnNumber(self) -> int | None: - """Return the column number where the current event ends.""" - - def getLineNumber(self) -> int | None: - """Return the line number where the current event ends.""" - - def getPublicId(self) -> str | None: - """Return the public identifier for the current event.""" - - def getSystemId(self) -> str | None: - """Return the system identifier for the current event.""" + def getColumnNumber(self) -> int | None: ... + def getLineNumber(self) -> int | None: ... + def getPublicId(self) -> str | None: ... + def getSystemId(self) -> str | None: ... class InputSource: - """Encapsulation of the information needed by the XMLReader to - read entities. - - This class may include information about the public identifier, - system identifier, byte stream (possibly with character encoding - information) and/or the character stream of an entity. - - Applications will create objects of this class for use in the - XMLReader.parse method and for returning from - EntityResolver.resolveEntity. - - An InputSource belongs to the application, the XMLReader is not - allowed to modify InputSource objects passed to it from the - application, although it may make copies and modify those. - """ - def __init__(self, system_id: str | None = None) -> None: ... - def setPublicId(self, public_id: str | None) -> None: - """Sets the public identifier of this InputSource.""" - - def getPublicId(self) -> str | None: - """Returns the public identifier of this InputSource.""" - - def setSystemId(self, system_id: str | None) -> None: - """Sets the system identifier of this InputSource.""" - - def getSystemId(self) -> str | None: - """Returns the system identifier of this InputSource.""" - - def setEncoding(self, encoding: str | None) -> None: - """Sets the character encoding of this InputSource. - - The encoding must be a string acceptable for an XML encoding - declaration (see section 4.3.3 of the XML recommendation). - - The encoding attribute of the InputSource is ignored if the - InputSource also contains a character stream. - """ - - def getEncoding(self) -> str | None: - """Get the character encoding of this InputSource.""" - - def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: - """Set the byte stream (a Python file-like object which does - not perform byte-to-character conversion) for this input - source. - - The SAX parser will ignore this if there is also a character - stream specified, but it will use a byte stream in preference - to opening a URI connection itself. - - If the application knows the character encoding of the byte - stream, it should set it with the setEncoding method. - """ - - def getByteStream(self) -> _SupportsReadClose[bytes] | None: - """Get the byte stream for this input source. - - The getEncoding method will return the character encoding for - this byte stream, or None if unknown. - """ - - def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: - """Set the character stream for this input source. (The stream - must be a Python 2.0 Unicode-wrapped file-like that performs - conversion to Unicode strings.) - - If there is a character stream specified, the SAX parser will - ignore any byte stream and will not attempt to open a URI - connection to the system identifier. - """ - - def getCharacterStream(self) -> _SupportsReadClose[str] | None: - """Get the character stream for this input source.""" + def setPublicId(self, public_id: str | None) -> None: ... + def getPublicId(self) -> str | None: ... + def setSystemId(self, system_id: str | None) -> None: ... + def getSystemId(self) -> str | None: ... + def setEncoding(self, encoding: str | None) -> None: ... + def getEncoding(self) -> str | None: ... + def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: ... + def getByteStream(self) -> _SupportsReadClose[bytes] | None: ... + def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: ... + def getCharacterStream(self) -> _SupportsReadClose[str] | None: ... _AttrKey = TypeVar("_AttrKey", default=str) class AttributesImpl(Generic[_AttrKey]): - def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: - """Non-NS-aware implementation. - - attrs should be of the form {name : value}. - """ - + def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: ... def getLength(self) -> int: ... def getType(self, name: str) -> str: ... def getValue(self, name: _AttrKey) -> str: ... @@ -255,13 +75,7 @@ class AttributesImpl(Generic[_AttrKey]): _NSName: TypeAlias = tuple[str | None, str] class AttributesNSImpl(AttributesImpl[_NSName]): - def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: - """NS-aware implementation. - - attrs should be of the form {(ns_uri, lname): value, ...}. - qnames of the form {(ns_uri, lname): qname, ...}. - """ - + def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ... def getValue(self, name: _NSName) -> str: ... def getNameByQName(self, name: str) -> _NSName: ... def getQNameByName(self, name: _NSName) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi index 7a00f503ef763..42420ee85848f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi @@ -1,45 +1,3 @@ -""" -An XML-RPC client interface for Python. - -The marshalling and response parser code can also be used to -implement XML-RPC servers. - -Exported exceptions: - - Error Base class for client errors - ProtocolError Indicates an HTTP protocol error - ResponseError Indicates a broken response package - Fault Indicates an XML-RPC fault package - -Exported classes: - - ServerProxy Represents a logical connection to an XML-RPC server - - MultiCall Executor of boxcared xmlrpc requests - DateTime dateTime wrapper for an ISO 8601 string or time tuple or - localtime integer value to generate a "dateTime.iso8601" - XML-RPC value - Binary binary data wrapper - - Marshaller Generate an XML-RPC params chunk from a Python data structure - Unmarshaller Unmarshal an XML-RPC response from incoming XML event message - Transport Handles an HTTP transaction to an XML-RPC server - SafeTransport Handles an HTTPS transaction to an XML-RPC server - -Exported constants: - - (none) - -Exported functions: - - getparser Create instance of the fastest available parser & attach - to an unmarshalling object - dumps Convert an argument tuple or a Fault instance to an XML-RPC - request (or response, if the methodresponse option is used). - loads Convert an XML-RPC packet to unmarshalled data plus a method - name (None if not present). -""" - import gzip import http.client import time @@ -94,24 +52,18 @@ METHOD_NOT_FOUND: Final[int] # undocumented INVALID_METHOD_PARAMS: Final[int] # undocumented INTERNAL_ERROR: Final[int] # undocumented -class Error(Exception): - """Base class for client errors.""" +class Error(Exception): ... class ProtocolError(Error): - """Indicates an HTTP protocol error.""" - url: str errcode: int errmsg: str headers: dict[str, str] def __init__(self, url: str, errcode: int, errmsg: str, headers: dict[str, str]) -> None: ... -class ResponseError(Error): - """Indicates a broken response package.""" +class ResponseError(Error): ... class Fault(Error): - """Indicates an XML-RPC fault package.""" - faultCode: int faultString: str def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... @@ -123,11 +75,6 @@ def _iso8601_format(value: datetime) -> str: ... # undocumented def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: - """DateTime wrapper for an ISO 8601 string or time tuple or - localtime integer value to generate 'dateTime.iso8601' XML-RPC - value. - """ - value: str # undocumented def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -145,8 +92,6 @@ def _datetime(data: Any) -> DateTime: ... # undocumented def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: - """Wrapper for binary data.""" - data: bytes def __init__(self, data: bytes | bytearray | None = None) -> None: ... def decode(self, data: ReadableBuffer) -> None: ... @@ -166,15 +111,6 @@ class ExpatParser: # undocumented _WriteCallback: TypeAlias = Callable[[str], object] class Marshaller: - """Generate an XML-RPC params chunk from a Python data structure. - - Create a Marshaller instance for each set of parameters, and use - the "dumps" method to convert your data (represented as a tuple) - to an XML-RPC params chunk. To write a fault response, pass a - Fault instance instead. You may prefer to use the "dumps" module - function for this purpose. - """ - dispatch: dict[type[_Marshallable] | Literal["_arbitrary_instance"], Callable[[Marshaller, Any, _WriteCallback], None]] memo: dict[Any, None] data: None @@ -198,14 +134,6 @@ class Marshaller: def dump_instance(self, value: object, write: _WriteCallback) -> None: ... class Unmarshaller: - """Unmarshal an XML-RPC response, based on incoming XML event - messages (start, data, end). Call close() to get the resulting - data structure. - - Note that this reader is fairly tolerant, and gladly accepts bogus - XML-RPC data without complaining (but not bogus XML). - """ - dispatch: dict[str, Callable[[Unmarshaller, str], None]] _type: str | None @@ -249,31 +177,11 @@ class _MultiCallMethod: # undocumented def __call__(self, *args: _Marshallable) -> None: ... class MultiCallIterator: # undocumented - """Iterates over the results of a multicall. Exceptions are - raised in response to xmlrpc faults. - """ - results: list[list[_Marshallable]] def __init__(self, results: list[list[_Marshallable]]) -> None: ... def __getitem__(self, i: int) -> _Marshallable: ... class MultiCall: - """server -> an object used to boxcar method calls - - server should be a ServerProxy object. - - Methods can be added to the MultiCall using normal - method call syntax e.g.: - - multicall = MultiCall(server_proxy) - multicall.add(2,3) - multicall.get_address("Guido") - - To execute the multicall, call the MultiCall object e.g.: - - add_result, address = multicall() - """ - __server: ServerProxy __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... @@ -285,70 +193,21 @@ FastMarshaller: Marshaller | None FastParser: ExpatParser | None FastUnmarshaller: Unmarshaller | None -def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: - """getparser() -> parser, unmarshaller - - Create an instance of the fastest available parser, and attach it - to an unmarshalling object. Return both objects. - """ - +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... def dumps( params: Fault | tuple[_Marshallable, ...], methodname: str | None = None, methodresponse: bool | None = None, encoding: str | None = None, allow_none: bool = False, -) -> str: - """data [,options] -> marshalled data - - Convert an argument tuple or a Fault instance to an XML-RPC - request (or response, if the methodresponse option is used). - - In addition to the data object, the following options can be given - as keyword arguments: - - methodname: the method name for a methodCall packet - - methodresponse: true to create a methodResponse packet. - If this option is used with a tuple, the tuple must be - a singleton (i.e. it can contain only one element). - - encoding: the packet encoding (default is UTF-8) - - All byte strings in the data structure are assumed to use the - packet encoding. Unicode strings are automatically converted, - where necessary. - """ - +) -> str: ... def loads( data: str | ReadableBuffer, use_datetime: bool = False, use_builtin_types: bool = False -) -> tuple[tuple[_Marshallable, ...], str | None]: - """data -> unmarshalled data, method name - - Convert an XML-RPC packet to unmarshalled data plus a method - name (None if not present). - - If the XML-RPC packet represents a fault condition, this function - raises a Fault exception. - """ - -def gzip_encode(data: ReadableBuffer) -> bytes: # undocumented - """data -> gzip encoded data - - Encode data using the gzip content encoding as described in RFC 1952 - """ - -def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: # undocumented - """gzip encoded data -> unencoded data - - Decode data using the gzip content encoding as described in RFC 1952 - """ +) -> tuple[tuple[_Marshallable, ...], str | None]: ... +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented class GzipDecodedResponse(gzip.GzipFile): # undocumented - """a file-like object to decode a response encoded with the gzip - method, as described in RFC 1952. - """ - io: BytesIO def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... @@ -360,8 +219,6 @@ class _Method: # undocumented def __call__(self, *args: _Marshallable) -> _Marshallable: ... class Transport: - """Handles an HTTP transaction to an XML-RPC server.""" - user_agent: str accept_gzip_encoding: bool encode_threshold: int | None @@ -393,8 +250,6 @@ class Transport: def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): - """Handles an HTTPS transaction to an XML-RPC server.""" - def __init__( self, use_datetime: bool = False, @@ -406,27 +261,6 @@ class SafeTransport(Transport): def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... class ServerProxy: - """uri [,options] -> a logical connection to an XML-RPC server - - uri is the connection point on the server, given as - scheme://host/target. - - The standard implementation always supports the "http" scheme. If - SSL socket support is available (Python 2.0), it also supports - "https". - - If the target part and the slash preceding it are both omitted, - "/RPC2" is assumed. - - The following options can be given as keyword arguments: - - transport: a transport factory - encoding: the request encoding (default is UTF-8) - - All 8-bit strings passed to the server proxy are assumed to use - the given encoding. - """ - __host: str __handler: str __transport: Transport @@ -449,11 +283,7 @@ class ServerProxy: ) -> None: ... def __getattr__(self, name: str) -> _Method: ... @overload - def __call__(self, attr: Literal["close"]) -> Callable[[], None]: - """A workaround to get special attributes on the ServerProxy - without interfering with the magic __getattr__ - """ - + def __call__(self, attr: Literal["close"]) -> Callable[[], None]: ... @overload def __call__(self, attr: Literal["transport"]) -> Transport: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi index 8b0c3fc8ee8d3..286aaf980fbf5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi @@ -1,106 +1,3 @@ -"""XML-RPC Servers. - -This module can be used to create simple XML-RPC servers -by creating a server and either installing functions, a -class instance, or by extending the SimpleXMLRPCServer -class. - -It can also be used to handle XML-RPC requests in a CGI -environment using CGIXMLRPCRequestHandler. - -The Doc* classes can be used to create XML-RPC servers that -serve pydoc-style documentation in response to HTTP -GET requests. This documentation is dynamically generated -based on the functions and methods registered with the -server. - -A list of possible usage patterns follows: - -1. Install functions: - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_function(pow) -server.register_function(lambda x,y: x+y, 'add') -server.serve_forever() - -2. Install an instance: - -class MyFuncs: - def __init__(self): - # make all of the sys functions available through sys.func_name - import sys - self.sys = sys - def _listMethods(self): - # implement this method so that system.listMethods - # knows to advertise the sys methods - return list_public_methods(self) + \\ - ['sys.' + method for method in list_public_methods(self.sys)] - def pow(self, x, y): return pow(x, y) - def add(self, x, y) : return x + y - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_introspection_functions() -server.register_instance(MyFuncs()) -server.serve_forever() - -3. Install an instance with custom dispatch method: - -class Math: - def _listMethods(self): - # this method must be present for system.listMethods - # to work - return ['add', 'pow'] - def _methodHelp(self, method): - # this method must be present for system.methodHelp - # to work - if method == 'add': - return "add(2,3) => 5" - elif method == 'pow': - return "pow(x, y[, z]) => number" - else: - # By convention, return empty - # string if no help is available - return "" - def _dispatch(self, method, params): - if method == 'pow': - return pow(*params) - elif method == 'add': - return params[0] + params[1] - else: - raise ValueError('bad method') - -server = SimpleXMLRPCServer(("localhost", 8000)) -server.register_introspection_functions() -server.register_instance(Math()) -server.serve_forever() - -4. Subclass SimpleXMLRPCServer: - -class MathServer(SimpleXMLRPCServer): - def _dispatch(self, method, params): - try: - # We are forcing the 'export_' prefix on methods that are - # callable through XML-RPC to prevent potential security - # problems - func = getattr(self, 'export_' + method) - except AttributeError: - raise Exception('method "%s" is not supported' % method) - else: - return func(*params) - - def export_add(self, x, y): - return x + y - -server = MathServer(("localhost", 8000)) -server.serve_forever() - -5. CGI script: - -server = CGIXMLRPCRequestHandler() -server.register_function(pow) -server.handle_request() -""" - import http.server import pydoc import socketserver @@ -142,191 +39,43 @@ _DispatchProtocol: TypeAlias = ( _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN ) -def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: # undocumented - """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d - - Resolves a dotted attribute name to an object. Raises - an AttributeError if any attribute in the chain starts with a '_'. - - If the optional allow_dotted_names argument is false, dots are not - supported and this function operates similar to getattr(obj, attr). - """ - -def list_public_methods(obj: Any) -> list[str]: # undocumented - """Returns a list of attribute strings, found in the specified - object, which represent callable attributes - """ +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented +def list_public_methods(obj: Any) -> list[str]: ... # undocumented class SimpleXMLRPCDispatcher: # undocumented - """Mix-in class that dispatches XML-RPC requests. - - This class is used to register XML-RPC method handlers - and then to dispatch them. This class doesn't need to be - instanced directly when used by SimpleXMLRPCServer but it - can be instanced when used by the MultiPathXMLRPCServer - """ - funcs: dict[str, _DispatchProtocol] instance: Any | None allow_none: bool encoding: str use_builtin_types: bool def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... - def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: - """Registers an instance to respond to XML-RPC requests. - - Only one instance can be installed at a time. - - If the registered instance has a _dispatch method then that - method will be called with the name of the XML-RPC method and - its parameters as a tuple - e.g. instance._dispatch('add',(2,3)) - - If the registered instance does not have a _dispatch method - then the instance will be searched to find a matching method - and, if found, will be called. Methods beginning with an '_' - are considered private and will not be called by - SimpleXMLRPCServer. - - If a registered function matches an XML-RPC request, then it - will be called instead of the registered instance. - - If the optional allow_dotted_names argument is true and the - instance does not have a _dispatch method, method names - containing dots are supported and resolved, as long as none of - the name segments start with an '_'. - - *** SECURITY WARNING: *** - - Enabling the allow_dotted_names options allows intruders - to access your module's global variables and may allow - intruders to execute arbitrary code on your machine. Only - use this option on a secure, closed network. - - """ - - def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: - """Registers a function to respond to XML-RPC requests. - - The optional name argument can be used to set a Unicode name - for the function. - """ - - def register_introspection_functions(self) -> None: - """Registers the XML-RPC introspection methods in the system - namespace. - - see http://xmlrpc.usefulinc.com/doc/reserved.html - """ - - def register_multicall_functions(self) -> None: - """Registers the XML-RPC multicall method in the system - namespace. - - see http://www.xmlrpc.com/discuss/msgReader$1208 - """ - + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... + def register_introspection_functions(self) -> None: ... + def register_multicall_functions(self) -> None: ... def _marshaled_dispatch( self, data: str | ReadableBuffer, dispatch_method: Callable[[str, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, path: Any | None = None, - ) -> str: # undocumented - """Dispatches an XML-RPC method from marshalled (XML) data. - - XML-RPC methods are dispatched from the marshalled (XML) data - using the _dispatch method and the result is returned as - marshalled data. For backwards compatibility, a dispatch - function can be provided as an argument (see comment in - SimpleXMLRPCRequestHandler.do_POST) but overriding the - existing method through subclassing is the preferred means - of changing method dispatch behavior. - """ - - def system_listMethods(self) -> list[str]: # undocumented - """system.listMethods() => ['add', 'subtract', 'multiple'] - - Returns a list of the methods supported by the server. - """ - - def system_methodSignature(self, method_name: str) -> str: # undocumented - """system.methodSignature('add') => [double, int, int] - - Returns a list describing the signature of the method. In the - above example, the add method takes two integers as arguments - and returns a double result. - - This server does NOT support system.methodSignature. - """ - - def system_methodHelp(self, method_name: str) -> str: # undocumented - """system.methodHelp('add') => "Adds two integers together" - - Returns a string containing documentation for the specified method. - """ - - def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: # undocumented - """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => [[4], ...] - - Allows the caller to package multiple XML-RPC calls into a single - request. - - See http://www.xmlrpc.com/discuss/msgReader$1208 - """ - - def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: # undocumented - """Dispatches the XML-RPC method. - - XML-RPC calls are forwarded to a registered function that - matches the called XML-RPC method name. If no such function - exists then the call is forwarded to the registered instance, - if available. - - If the registered instance has a _dispatch method then that - method will be called with the name of the XML-RPC method and - its parameters as a tuple - e.g. instance._dispatch('add',(2,3)) - - If the registered instance does not have a _dispatch method - then the instance will be searched to find a matching method - and, if found, will be called. - - Methods beginning with an '_' are considered private and will - not be called. - """ + ) -> str: ... # undocumented + def system_listMethods(self) -> list[str]: ... # undocumented + def system_methodSignature(self, method_name: str) -> str: ... # undocumented + def system_methodHelp(self, method_name: str) -> str: ... # undocumented + def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: ... # undocumented + def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: ... # undocumented class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): - """Simple XML-RPC request handler class. - - Handles all HTTP POST requests and attempts to decode them as - XML-RPC requests. - """ - rpc_paths: ClassVar[tuple[str, ...]] encode_threshold: int # undocumented aepattern: Pattern[str] # undocumented def accept_encodings(self) -> dict[str, float]: ... def is_rpc_path_valid(self) -> bool: ... - def do_POST(self) -> None: - """Handles the HTTP POST request. - - Attempts to interpret all HTTP POST requests as XML-RPC calls, - which are forwarded to the server's _dispatch method for handling. - """ - + def do_POST(self) -> None: ... def decode_request_content(self, data: bytes) -> bytes | None: ... def report_404(self) -> None: ... class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): - """Simple XML-RPC server. - - Simple XML-RPC server that allows functions and a single instance - to be installed to handle requests. The default implementation - attempts to dispatch XML-RPC calls to the functions or instance - installed in the server. Override the _dispatch method inherited - from SimpleXMLRPCDispatcher to change this behavior. - """ - _send_traceback_handler: bool def __init__( self, @@ -340,14 +89,6 @@ class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): ) -> None: ... class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented - """Multipath XML-RPC Server - This specialization of SimpleXMLRPCServer allows the user to create - multiple Dispatcher instances and assign them to different - HTTP request paths. This makes it possible to run two or more - 'virtual XML-RPC servers' at the same port. - Make sure that the requestHandler accepts the paths in question. - """ - dispatchers: dict[str, SimpleXMLRPCDispatcher] def __init__( self, @@ -363,30 +104,12 @@ class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): - """Simple handler for XML-RPC data passed through CGI.""" - def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... - def handle_xmlrpc(self, request_text: str) -> None: - """Handle a single XML-RPC request""" - - def handle_get(self) -> None: - """Handle a single HTTP GET request. - - Default implementation indicates an error because - XML-RPC uses the POST method. - """ - - def handle_request(self, request_text: str | None = None) -> None: - """Handle a single XML-RPC request passed through a CGI post method. - - If no XML data is given then it is read from stdin. The resulting - XML-RPC response is printed to stdout along with the correct HTTP - headers. - """ + def handle_xmlrpc(self, request_text: str) -> None: ... + def handle_get(self) -> None: ... + def handle_request(self, request_text: str | None = None) -> None: ... class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented - """Class used to generate pydoc HTML document for a server""" - def docroutine( # type: ignore[override] self, object: object, @@ -396,67 +119,22 @@ class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, cl: type | None = None, - ) -> str: - """Produce HTML documentation for a function or method object.""" - - def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: - """Produce HTML documentation for an XML-RPC server.""" + ) -> str: ... + def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... class XMLRPCDocGenerator: # undocumented - """Generates documentation for an XML-RPC server. - - This class is designed as mix-in and should not - be constructed directly. - """ - server_name: str server_documentation: str server_title: str - def set_server_title(self, server_title: str) -> None: - """Set the HTML title of the generated server documentation""" - - def set_server_name(self, server_name: str) -> None: - """Set the name of the generated HTML server documentation""" - - def set_server_documentation(self, server_documentation: str) -> None: - """Set the documentation string for the entire server.""" - - def generate_html_documentation(self) -> str: - """generate_html_documentation() => html documentation for the server - - Generates HTML documentation for the server using introspection for - installed functions and instances that do not implement the - _dispatch method. Alternatively, instances can choose to implement - the _get_method_argstring(method_name) method to provide the - argument string used in the documentation and the - _methodHelp(method_name) method to provide the help text used - in the documentation. - """ + def set_server_title(self, server_title: str) -> None: ... + def set_server_name(self, server_name: str) -> None: ... + def set_server_documentation(self, server_documentation: str) -> None: ... + def generate_html_documentation(self) -> str: ... class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): - """XML-RPC and documentation request handler class. - - Handles all HTTP POST requests and attempts to decode them as - XML-RPC requests. - - Handles all HTTP GET requests and interprets them as requests - for documentation. - """ - - def do_GET(self) -> None: - """Handles the HTTP GET request. - - Interpret all HTTP GET requests as requests for server - documentation. - """ + def do_GET(self) -> None: ... class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): - """XML-RPC and HTML documentation server. - - Adds the ability to serve server documentation to the capabilities - of SimpleXMLRPCServer. - """ - def __init__( self, addr: tuple[str, int], @@ -469,8 +147,4 @@ class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): ) -> None: ... class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): - """Handler for XML-RPC data and documentation requests passed through - CGI - """ - def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi index 17d744cf83fc5..78a50b85f405a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi @@ -1,5 +1,3 @@ -"""This is a template module just for instruction.""" - import sys from typing import Any, ClassVar, final @@ -7,21 +5,12 @@ class Str(str): ... @final class Xxo: - """A class that explicitly stores attributes in an internal dict""" - - def demo(self) -> None: - """demo(o) -> o""" + def demo(self) -> None: ... if sys.version_info >= (3, 11) and sys.platform != "win32": x_exports: int -def foo(i: int, j: int, /) -> Any: - """foo(i,j) - - Return the sum of i and j. - """ - -def new() -> Xxo: - """new() -> new Xx object""" +def foo(i: int, j: int, /) -> Any: ... +def new() -> Xxo: ... if sys.version_info >= (3, 10): class Error(Exception): ... @@ -32,5 +21,4 @@ else: class Null: __hash__: ClassVar[None] # type: ignore[assignment] - def roj(b: Any, /) -> None: - """roj(a,b) -> None""" + def roj(b: Any, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi index 354ecc600e871..c7cf1704b1359 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi @@ -16,24 +16,5 @@ def create_archive( main: str | None = None, filter: Callable[[Path], bool] | None = None, compressed: bool = False, -) -> None: - """Create an application archive from SOURCE. - - The SOURCE can be the name of a directory, or a filename or a file-like - object referring to an existing archive. - - The content of SOURCE is packed into an application archive in TARGET, - which can be a filename or a file-like object. If SOURCE is a directory, - TARGET can be omitted and will default to the name of SOURCE with .pyz - appended. - - The created application archive will have a shebang line specifying - that it should run with INTERPRETER (there will be no shebang line if - INTERPRETER is None), and a __main__.py which runs MAIN (if MAIN is - not specified, an existing __main__.py will be used). It is an error - to specify MAIN for anything other than a directory source with no - __main__.py, and it is an error to omit MAIN if the directory has no - __main__.py. - """ - +) -> None: ... def get_interpreter(archive: _Path) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi index 0389fe1cba78f..e573d04dba051 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi @@ -1,9 +1,3 @@ -""" -Read and write ZIP files. - -XXX references to utf-8 need further investigation. -""" - import io import sys from _typeshed import SizedBuffer, StrOrBytesPath, StrPath @@ -45,11 +39,7 @@ class BadZipFile(Exception): ... BadZipfile = BadZipFile error = BadZipfile -class LargeZipFile(Exception): - """ - Raised when writing a zipfile, the zipfile requires ZIP64 extensions - and those extensions are disabled. - """ +class LargeZipFile(Exception): ... @type_check_only class _ZipStream(Protocol): @@ -71,10 +61,6 @@ class _ClosableZipStream(_ZipStream, Protocol): def close(self) -> object: ... class ZipExtFile(io.BufferedIOBase): - """File-like object for reading an archive member. - Is returned by ZipFile.open(). - """ - MAX_N: int MIN_READ_SIZE: int MAX_SEEK_READ: int @@ -104,23 +90,10 @@ class ZipExtFile(io.BufferedIOBase): pwd: bytes | None = None, close_fileobj: Literal[False] = False, ) -> None: ... - def read(self, n: int | None = -1) -> bytes: - """Read and return up to n bytes. - If the argument is omitted, None, or negative, data is read and returned until EOF is reached. - """ - - def readline(self, limit: int = -1) -> bytes: # type: ignore[override] - """Read and return a line from the stream. - - If limit is specified, at most limit bytes will be read. - """ - - def peek(self, n: int = 1) -> bytes: - """Returns buffered bytes without advancing the position.""" - - def read1(self, n: int | None) -> bytes: # type: ignore[override] - """Read up to n bytes with at most one read() system call.""" - + def read(self, n: int | None = -1) -> bytes: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + def peek(self, n: int = 1) -> bytes: ... + def read1(self, n: int | None) -> bytes: ... # type: ignore[override] def seek(self, offset: int, whence: int = 0) -> int: ... @type_check_only @@ -146,32 +119,6 @@ class _ZipWritable(Protocol): def write(self, b: bytes, /) -> int: ... class ZipFile: - """Class with methods to open, read, write, close, list zip files. - - z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True, - compresslevel=None) - - file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. - mode: The mode can be either read 'r', write 'w', exclusive create 'x', - or append 'a'. - compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), - ZIP_BZIP2 (requires bz2), ZIP_LZMA (requires lzma), or - ZIP_ZSTANDARD (requires compression.zstd). - allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. - compresslevel: None (default for the given compression type) or an integer - specifying the level to pass to the compressor. - When using ZIP_STORED or ZIP_LZMA this keyword has no effect. - When using ZIP_DEFLATED integers 0 through 9 are accepted. - When using ZIP_BZIP2 integers 1 through 9 are accepted. - When using ZIP_ZSTANDARD integers -7 though 22 are common, - see the CompressionParameter enum in compression.zstd for - details. - - """ - filename: str | None debug: int comment: bytes @@ -196,10 +143,7 @@ class ZipFile: *, strict_timestamps: bool = True, metadata_encoding: str | None = None, - ) -> None: - """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', - or append 'a'. - """ + ) -> None: ... # metadata_encoding is only allowed for read mode @overload def __init__( @@ -248,11 +192,7 @@ class ZipFile: compresslevel: int | None = None, *, strict_timestamps: bool = True, - ) -> None: - """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', - or append 'a'. - """ - + ) -> None: ... @overload def __init__( self, @@ -291,132 +231,47 @@ class ZipFile: def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def close(self) -> None: - """Close the file, and for mode 'w', 'x' and 'a' write the ending - records. - """ - - def getinfo(self, name: str) -> ZipInfo: - """Return the instance of ZipInfo given 'name'.""" - - def infolist(self) -> list[ZipInfo]: - """Return a list of class ZipInfo instances for files in the - archive. - """ - - def namelist(self) -> list[str]: - """Return a list of file names in the archive.""" - + def close(self) -> None: ... + def getinfo(self, name: str) -> ZipInfo: ... + def infolist(self) -> list[ZipInfo]: ... + def namelist(self) -> list[str]: ... def open( self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False - ) -> IO[bytes]: - """Return file-like object for 'name'. - - name is a string for the file name within the ZIP file, or a ZipInfo - object. - - mode should be 'r' to read a file already in the ZIP file, or 'w' to - write to a file newly added to the archive. - - pwd is the password to decrypt files (only used for reading). - - When writing, if the file size is not known in advance but may exceed - 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large - files. If the size is known in advance, it is best to pass a ZipInfo - instance for name, with zinfo.file_size set. - """ - - def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. 'member' may be a filename or a ZipInfo object. You can - specify a different directory using 'path'. You can specify the - password to decrypt the file using 'pwd'. - """ - + ) -> IO[bytes]: ... + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... def extractall( self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None - ) -> None: - """Extract all members from the archive to the current working - directory. 'path' specifies a different directory to extract to. - 'members' is optional and must be a subset of the list returned - by namelist(). You can specify the password to decrypt all files - using 'pwd'. - """ - - def printdir(self, file: _Writer | None = None) -> None: - """Print a table of contents for the zip file.""" - - def setpassword(self, pwd: bytes) -> None: - """Set default password for encrypted files.""" - - def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: - """Return file bytes for name. 'pwd' is the password to decrypt - encrypted files. - """ - - def testzip(self) -> str | None: - """Read all the files and check the CRC. - - Return None if all files could be read successfully, or the name - of the offending file otherwise. - """ - + ) -> None: ... + def printdir(self, file: _Writer | None = None) -> None: ... + def setpassword(self, pwd: bytes) -> None: ... + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... + def testzip(self) -> str | None: ... def write( self, filename: StrPath, arcname: StrPath | None = None, compress_type: int | None = None, compresslevel: int | None = None, - ) -> None: - """Put the bytes from filename into the archive under the name - arcname. - """ - + ) -> None: ... def writestr( self, zinfo_or_arcname: str | ZipInfo, data: SizedBuffer | str, compress_type: int | None = None, compresslevel: int | None = None, - ) -> None: - """Write a file into the archive. The contents is 'data', which - may be either a 'str' or a 'bytes' instance; if it is a 'str', - it is encoded as UTF-8 first. - 'zinfo_or_arcname' is either a ZipInfo instance or - the name of the file in the archive. - """ + ) -> None: ... if sys.version_info >= (3, 11): - def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: - """Creates a directory inside the zip archive.""" + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... - def __del__(self) -> None: - """Call the "close()" method in case the user forgot.""" + def __del__(self) -> None: ... class PyZipFile(ZipFile): - """Class to create ZIP archives with Python library files and packages.""" - def __init__( self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 ) -> None: ... - def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: - """Add all files from "pathname" to the ZIP archive. - - If pathname is a package directory, search the directory and - all package subdirectories recursively for all *.py and enter - the modules into the archive. If pathname is a plain - directory, listdir *.py and enter all modules. Else, pathname - must be a Python *.py file and the module will be put into the - archive. Added modules are always module.pyc. - This method will compile the module.py into module.pyc if - necessary. - If filterfunc(pathname) is given, it is called with every argument. - When it is False, the file or directory is skipped. - """ + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... class ZipInfo: - """Class with attributes describing each file in the ZIP archive.""" - __slots__ = ( "orig_filename", "filename", @@ -463,146 +318,27 @@ class ZipInfo: def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ... @classmethod - def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: - """Construct an appropriate ZipInfo for a file on the filesystem. - - filename should be the path to a file or directory on the filesystem. - - arcname is the name which it will have within the archive (by default, - this will be the same as filename, but without a drive letter and with - leading path separators removed). - """ - - def is_dir(self) -> bool: - """Return True if this archive member is a directory.""" - - def FileHeader(self, zip64: bool | None = None) -> bytes: - """Return the per-file header as a bytes object. - - When the optional zip64 arg is None rather than a bool, we will - decide based upon the file_size and compress_size, if known, - False otherwise. - """ + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... + def is_dir(self) -> bool: ... + def FileHeader(self, zip64: bool | None = None) -> bytes: ... if sys.version_info >= (3, 12): from zipfile._path import CompleteDirs as CompleteDirs, Path as Path else: class CompleteDirs(ZipFile): - """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - """ - - def resolve_dir(self, name: str) -> str: - """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ - + def resolve_dir(self, name: str) -> str: ... @overload @classmethod - def make(cls, source: ZipFile) -> CompleteDirs: - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - + def make(cls, source: ZipFile) -> CompleteDirs: ... @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... class Path: - """ - A pathlib-compatible interface for zip files. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> root = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): - - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') - - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text() - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. - - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ - root: CompleteDirs at: str - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: - """ - Construct a Path from a ZipFile or filename. - - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ - + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... @property def name(self) -> str: ... @property @@ -629,13 +365,7 @@ else: write_through: bool = ..., *, pwd: bytes | None = None, - ) -> TextIOWrapper: - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - + ) -> TextIOWrapper: ... @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... @@ -663,11 +393,7 @@ else: def __truediv__(self, add: StrPath) -> Path: ... -def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: - """Quickly see if a file is a ZIP file by checking the magic number. - - The filename argument may be a file or file-like object too. - """ +def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... ZIP64_LIMIT: Final[int] ZIP_FILECOUNT_LIMIT: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi index 4b6743b707e05..4c7b39ec4c6ca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi @@ -1,12 +1,3 @@ -""" -A Path-like interface for zipfiles. - -This codebase is shared between zipfile.Path in the stdlib -and zipp in PyPI. See -https://github.com/python/importlib_metadata/wiki/Development-Methodology -for more detail. -""" - import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -22,166 +13,26 @@ if sys.version_info >= (3, 12): __all__ = ["Path"] class InitializedState: - """ - Mix-in to save the initialization state for pickling. - """ - def __init__(self, *args: object, **kwargs: object) -> None: ... def __getstate__(self) -> tuple[list[object], dict[object, object]]: ... def __setstate__(self, state: Sequence[tuple[list[object], dict[object, object]]]) -> None: ... class CompleteDirs(InitializedState, ZipFile): - """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - - >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt'])) - ['foo/', 'foo/bar/'] - >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/'])) - ['foo/'] - """ - - def resolve_dir(self, name: str) -> str: - """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ - + def resolve_dir(self, name: str) -> str: ... @overload @classmethod - def make(cls, source: ZipFile) -> CompleteDirs: - """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ - + def make(cls, source: ZipFile) -> CompleteDirs: ... @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... if sys.version_info >= (3, 13): @classmethod - def inject(cls, zf: _ZF) -> _ZF: - """ - Given a writable zip file zf, inject directory entries for - any directories implied by the presence of children. - """ + def inject(cls, zf: _ZF) -> _ZF: ... class Path: - """ - A :class:`importlib.resources.abc.Traversable` interface for zip files. - - Implements many of the features users enjoy from - :class:`pathlib.Path`. - - Consider a zip file with this structure:: - - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt - - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' - - Path accepts the zipfile object itself or a filename - - >>> path = Path(zf) - - From there, several path operations are available. - - Directory iteration (including the zip file itself): - - >>> a, b = path.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') - - name property: - - >>> b.name - 'b' - - join with divide operator: - - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' - - Read text: - - >>> c.read_text(encoding='utf-8') - 'content of c' - - existence: - - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False - - Coercion to string: - - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' - - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. - - >>> str(path) - 'mem/abcde.zip/' - >>> path.name - 'abcde.zip' - >>> path.filename == pathlib.Path('mem/abcde.zip') - True - >>> str(path.parent) - 'mem' - - If the zipfile has no filename, such attributes are not - valid and accessing them will raise an Exception. - - >>> zf.filename = None - >>> path.name - Traceback (most recent call last): - ... - TypeError: ... - - >>> path.filename - Traceback (most recent call last): - ... - TypeError: ... - - >>> path.parent - Traceback (most recent call last): - ... - TypeError: ... - - # workaround python/cpython#106763 - >>> pass - """ - root: CompleteDirs at: str - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: - """ - Construct a Path from a ZipFile or filename. - - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ - + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... @property def name(self) -> str: ... @property @@ -205,13 +56,7 @@ if sys.version_info >= (3, 12): write_through: bool = ..., *, pwd: bytes | None = None, - ) -> TextIOWrapper: - """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ - + ) -> TextIOWrapper: ... @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... def iterdir(self) -> Iterator[Self]: ... @@ -230,18 +75,9 @@ if sys.version_info >= (3, 12): def joinpath(self, *other: StrPath) -> Path: ... def glob(self, pattern: str) -> Iterator[Self]: ... def rglob(self, pattern: str) -> Iterator[Self]: ... - def is_symlink(self) -> Literal[False]: - """ - Return whether this path is a symlink. - """ - + def is_symlink(self) -> Literal[False]: ... def relative_to(self, other: Path, *extra: StrPath) -> str: ... def match(self, path_pattern: str) -> bool: ... - def __eq__(self, other: object) -> bool: - """ - >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo' - False - """ - + def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi index 8c607b19d1d8c..f6a661be8cdf4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi @@ -4,121 +4,23 @@ from re import Match if sys.version_info >= (3, 13): class Translator: - """ - >>> Translator('xyz') - Traceback (most recent call last): - ... - AssertionError: Invalid separators - - >>> Translator('') - Traceback (most recent call last): - ... - AssertionError: Invalid separators - """ - if sys.platform == "win32": def __init__(self, seps: str = "\\/") -> None: ... else: def __init__(self, seps: str = "/") -> None: ... - def translate(self, pattern: str) -> str: - """ - Given a glob pattern, produce a regex that matches it. - """ - - def extend(self, pattern: str) -> str: - """ - Extend regex for pattern-wide concerns. - - Apply '(?s:)' to create a non-matching group that - matches newlines (valid on Unix). - - Append '\\z' to imply fullmatch even when match is used. - """ - - def match_dirs(self, pattern: str) -> str: - """ - Ensure that zipfile.Path directory names are matched. - - zipfile.Path directory names always end in a slash. - """ - - def translate_core(self, pattern: str) -> str: - """ - Given a glob pattern, produce a regex that matches it. - - >>> t = Translator() - >>> t.translate_core('*.txt').replace('\\\\\\\\', '') - '[^/]*\\\\.txt' - >>> t.translate_core('a?txt') - 'a[^/]txt' - >>> t.translate_core('**/*').replace('\\\\\\\\', '') - '.*/[^/][^/]*' - """ - - def replace(self, match: Match[str]) -> str: - """ - Perform the replacements for a match from :func:`separate`. - """ - - def restrict_rglob(self, pattern: str) -> None: - """ - Raise ValueError if ** appears in anything but a full path segment. - - >>> Translator().translate('**foo') - Traceback (most recent call last): - ... - ValueError: ** must appear alone in a path segment - """ - - def star_not_empty(self, pattern: str) -> str: - """ - Ensure that * will not match an empty segment. - """ + def translate(self, pattern: str) -> str: ... + def extend(self, pattern: str) -> str: ... + def match_dirs(self, pattern: str) -> str: ... + def translate_core(self, pattern: str) -> str: ... + def replace(self, match: Match[str]) -> str: ... + def restrict_rglob(self, pattern: str) -> None: ... + def star_not_empty(self, pattern: str) -> str: ... else: - def translate(pattern: str) -> str: - """ - Given a glob pattern, produce a regex that matches it. - - >>> translate('*.txt') - '[^/]*\\\\.txt' - >>> translate('a?txt') - 'a.txt' - >>> translate('**/*') - '.*/[^/]*' - """ - - def match_dirs(pattern: str) -> str: - """ - Ensure that zipfile.Path directory names are matched. - - zipfile.Path directory names always end in a slash. - """ - - def translate_core(pattern: str) -> str: - """ - Given a glob pattern, produce a regex that matches it. - - >>> translate('*.txt') - '[^/]*\\\\.txt' - >>> translate('a?txt') - 'a.txt' - >>> translate('**/*') - '.*/[^/]*' - """ - - def replace(match: Match[str]) -> str: - """ - Perform the replacements for a match from :func:`separate`. - """ - -def separate(pattern: str) -> Iterator[Match[str]]: - """ - Separate out character sets to avoid translating their contents. + def translate(pattern: str) -> str: ... + def match_dirs(pattern: str) -> str: ... + def translate_core(pattern: str) -> str: ... + def replace(match: Match[str]) -> str: ... - >>> [m.group(0) for m in separate('*.txt')] - ['*.txt'] - >>> [m.group(0) for m in separate('a[?]txt')] - ['a', '[?]', 'txt'] - """ +def separate(pattern: str) -> Iterator[Match[str]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi index d0025b07f0521..22af3c272759b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi @@ -1,15 +1,3 @@ -"""zipimport provides support for importing Python modules from Zip archives. - -This module exports two objects: -- zipimporter: a class; its constructor takes a path to a Zip archive. -- ZipImportError: exception raised by zipimporter objects. It's a - subclass of ImportError, so it can be caught as ImportError, too. - -It is usually not needed to use the zipimport module explicitly; it is -used by the builtin import mechanism for sys.path items that are paths -to Zip archives. -""" - import sys from _typeshed import StrOrBytesPath from importlib.machinery import ModuleSpec @@ -31,20 +19,6 @@ __all__ = ["ZipImportError", "zipimporter"] class ZipImportError(ImportError): ... class zipimporter(_LoaderBasics): - """zipimporter(archivepath) -> zipimporter object - - Create a new zipimporter instance. 'archivepath' must be a path to - a zipfile, or to a specific path inside a zipfile. For example, it can be - '/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a - valid directory inside the archive. - - 'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip - archive. - - The 'archive' attribute of zipimporter objects contains the name of the - zipfile targeted. - """ - archive: str prefix: str if sys.version_info >= (3, 11): @@ -55,132 +29,31 @@ class zipimporter(_LoaderBasics): if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: - """find_loader(fullname, path=None) -> self, str or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, a string containing the - full path name if it's possibly a portion of a namespace package, - or None otherwise. The optional 'path' argument is ignored -- it's - there for compatibility with the importer protocol. - - Deprecated since Python 3.10. Use find_spec() instead. - """ - + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: - """find_module(fullname, path=None) -> self or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, or None if it wasn't. - The optional 'path' argument is ignored -- it's there for compatibility - with the importer protocol. - - Deprecated since Python 3.10. Use find_spec() instead. - """ + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... else: - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: - """find_loader(fullname, path=None) -> self, str or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, a string containing the - full path name if it's possibly a portion of a namespace package, - or None otherwise. The optional 'path' argument is ignored -- it's - there for compatibility with the importer protocol. - """ - - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: - """find_module(fullname, path=None) -> self or None. - - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, or None if it wasn't. - The optional 'path' argument is ignored -- it's there for compatibility - with the importer protocol. - """ - - def get_code(self, fullname: str) -> CodeType: - """get_code(fullname) -> code object. + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... - Return the code object for the specified module. Raise ZipImportError - if the module couldn't be imported. - """ - - def get_data(self, pathname: str) -> bytes: - """get_data(pathname) -> string with file data. - - Return the data associated with 'pathname'. Raise OSError if - the file wasn't found. - """ - - def get_filename(self, fullname: str) -> str: - """get_filename(fullname) -> filename string. - - Return the filename for the specified module or raise ZipImportError - if it couldn't be imported. - """ + def get_code(self, fullname: str) -> CodeType: ... + def get_data(self, pathname: str) -> bytes: ... + def get_filename(self, fullname: str) -> str: ... if sys.version_info >= (3, 14): - def get_resource_reader(self, fullname: str) -> ZipReader: # undocumented - """Return the ResourceReader for a module in a zip file.""" + def get_resource_reader(self, fullname: str) -> ZipReader: ... # undocumented elif sys.version_info >= (3, 10): - def get_resource_reader(self, fullname: str) -> ZipReader | None: # undocumented - """Return the ResourceReader for a module in a zip file.""" + def get_resource_reader(self, fullname: str) -> ZipReader | None: ... # undocumented else: - def get_resource_reader(self, fullname: str) -> ResourceReader | None: # undocumented - """Return the ResourceReader for a package in a zip file. - - If 'fullname' is a package within the zip file, return the - 'ResourceReader' object for the package. Otherwise return None. - """ - - def get_source(self, fullname: str) -> str | None: - """get_source(fullname) -> source string. - - Return the source code for the specified module. Raise ZipImportError - if the module couldn't be found, return None if the archive does - contain the module, but has no source for it. - """ - - def is_package(self, fullname: str) -> bool: - """is_package(fullname) -> bool. + def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented - Return True if the module specified by fullname is a package. - Raise ZipImportError if the module couldn't be found. - """ + def get_source(self, fullname: str) -> str | None: ... + def is_package(self, fullname: str) -> bool: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> ModuleType: - """load_module(fullname) -> module. - - Load the module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the imported - module, or raises ZipImportError if it could not be imported. - - Deprecated since Python 3.10. Use exec_module() instead. - """ - - def exec_module(self, module: ModuleType) -> None: - """Execute the module.""" - - def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation.""" - - def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: - """Create a ModuleSpec for the specified module. - - Returns None if the module cannot be found. - """ - - def invalidate_caches(self) -> None: - """Invalidates the cache of file data of the archive path.""" + def load_module(self, fullname: str) -> ModuleType: ... + def exec_module(self, module: ModuleType) -> None: ... + def create_module(self, spec: ModuleSpec) -> None: ... + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... else: - def load_module(self, fullname: str) -> ModuleType: - """load_module(fullname) -> module. - - Load the module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the imported - module, or raises ZipImportError if it wasn't found. - """ + def load_module(self, fullname: str) -> ModuleType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi index 97d70804a36fe..4e410fdd18ad9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi @@ -1,18 +1,3 @@ -"""The functions in this module allow compression and decompression using the -zlib library, which is based on GNU zip. - -adler32(string[, start]) -- Compute an Adler-32 checksum. -compress(data[, level]) -- Compress data, with compression level 0-9 or -1. -compressobj([level[, ...]]) -- Return a compressor object. -crc32(string[, start]) -- Compute a CRC-32 checksum. -decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string. -decompressobj([wbits[, zdict]]) -- Return a decompressor object. - -'wbits' is window buffer size and container format. -Compressor objects support compress() and flush() methods; decompressor -objects support decompress() and flush(). -""" - import sys from _typeshed import ReadableBuffer from typing import Any, Final, final, type_check_only @@ -73,91 +58,17 @@ class _Decompress: def flush(self, length: int = 16384, /) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(data: ReadableBuffer, value: int = 1, /) -> int: - """Compute an Adler-32 checksum of data. - - value - Starting value of the checksum. - - The returned checksum is an integer. - """ +def adler32(data: ReadableBuffer, value: int = 1, /) -> int: ... if sys.version_info >= (3, 11): - def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: - """Returns a bytes object containing compressed data. - - data - Binary data to be compressed. - level - Compression level, in 0-9 or -1. - wbits - The window buffer size and container format. - """ + def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: ... else: - def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: - """Returns a bytes object containing compressed data. - - data - Binary data to be compressed. - level - Compression level, in 0-9 or -1. - """ + def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: ... def compressobj( level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None -) -> _Compress: - """Return a compressor object. - - level - The compression level (an integer in the range 0-9 or -1; default is - currently equivalent to 6). Higher compression levels are slower, - but produce smaller results. - method - The compression algorithm. If given, this must be DEFLATED. - wbits - +9 to +15: The base-two logarithm of the window size. Include a zlib - container. - -9 to -15: Generate a raw stream. - +25 to +31: Include a gzip container. - memLevel - Controls the amount of memory used for internal compression state. - Valid values range from 1 to 9. Higher values result in higher memory - usage, faster compression, and smaller output. - strategy - Used to tune the compression algorithm. Possible values are - Z_DEFAULT_STRATEGY, Z_FILTERED, and Z_HUFFMAN_ONLY. - zdict - The predefined compression dictionary - a sequence of bytes - containing subsequences that are likely to occur in the input data. - """ - -def crc32(data: ReadableBuffer, value: int = 0, /) -> int: - """Compute a CRC-32 checksum of data. - - value - Starting value of the checksum. - - The returned checksum is an integer. - """ - -def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: - """Returns a bytes object containing the uncompressed data. - - data - Compressed data. - wbits - The window buffer size and container format. - bufsize - The initial output buffer size. - """ - -def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: - """Return a decompressor object. - - wbits - The window buffer size and container format. - zdict - The predefined compression dictionary. This must be the same - dictionary as used by the compressor that produced the input data. - """ +) -> _Compress: ... +def crc32(data: ReadableBuffer, value: int = 0, /) -> int: ... +def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: ... +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi index 9cae9b7de8f88..b7433f835f83d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi @@ -18,28 +18,18 @@ class ZoneInfo(tzinfo): def key(self) -> str: ... def __new__(cls, key: str) -> Self: ... @classmethod - def no_cache(cls, key: str) -> Self: - """Get a new instance of ZoneInfo, bypassing the cache.""" + def no_cache(cls, key: str) -> Self: ... if sys.version_info >= (3, 12): @classmethod - def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: - """Create a ZoneInfo file from a file object.""" + def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: ... else: @classmethod - def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: - """Create a ZoneInfo file from a file object.""" + def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: - """Clear the ZoneInfo cache.""" - - def tzname(self, dt: datetime | None, /) -> str | None: - """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime.""" - - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the UTC offset in a zone at the given datetime.""" - - def dst(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime.""" + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... + def tzname(self, dt: datetime | None, /) -> str | None: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... + def dst(self, dt: datetime | None, /) -> timedelta | None: ... def __dir__() -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi index 69ddef03f693a..e6d2d83caac18 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi @@ -11,5 +11,4 @@ def load_data( fobj: _IOBytes, ) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[str, ...], bytes | None]: ... -class ZoneInfoNotFoundError(KeyError): - """Exception raised when a ZoneInfo key is not found.""" +class ZoneInfoNotFoundError(KeyError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi index e4f17bb61f0e3..0ef78d03e5f4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi @@ -4,23 +4,10 @@ from collections.abc import Sequence # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 -def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: - """Reset global TZPATH.""" - -def find_tzfile(key: str) -> str | None: - """Retrieve the path to a TZif file from a key.""" - -def available_timezones() -> set[str]: - """Returns a set containing all available time zones. - - .. caution:: - - This may attempt to open a large number of files, since the best way to - determine if a given file on the time zone search path is to open it - and check for the "magic string" at the beginning. - """ +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... +def find_tzfile(key: str) -> str | None: ... +def available_timezones() -> set[str]: ... TZPATH: tuple[str, ...] -class InvalidTZPathWarning(RuntimeWarning): - """Warning raised if an invalid path is specified in PYTHONTZPATH.""" +class InvalidTZPathWarning(RuntimeWarning): ... From b5fdf0f37022ea30d5d1ee6e315e0efeee0dfd22 Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:32:44 +0000 Subject: [PATCH 2/8] Sync Linux docstrings --- .../vendor/typeshed/stdlib/__future__.pyi | 61 +- .../vendor/typeshed/stdlib/_asyncio.pyi | 224 +- .../vendor/typeshed/stdlib/_bisect.pyi | 95 +- .../vendor/typeshed/stdlib/_blake2.pyi | 38 +- .../vendor/typeshed/stdlib/_bootlocale.pyi | 5 + .../vendor/typeshed/stdlib/_bz2.pyi | 57 +- .../vendor/typeshed/stdlib/_codecs.pyi | 53 +- .../typeshed/stdlib/_collections_abc.pyi | 36 +- .../vendor/typeshed/stdlib/_compression.pyi | 8 +- .../vendor/typeshed/stdlib/_contextvars.pyi | 67 +- .../vendor/typeshed/stdlib/_csv.pyi | 115 +- .../vendor/typeshed/stdlib/_ctypes.pyi | 112 +- .../vendor/typeshed/stdlib/_curses.pyi | 1111 ++++- .../vendor/typeshed/stdlib/_curses_panel.pyi | 68 +- .../vendor/typeshed/stdlib/_dbm.pyi | 24 +- .../vendor/typeshed/stdlib/_decimal.pyi | 35 +- .../typeshed/stdlib/_frozen_importlib.pyi | 171 +- .../stdlib/_frozen_importlib_external.pyi | 321 +- .../vendor/typeshed/stdlib/_gdbm.pyi | 36 +- .../vendor/typeshed/stdlib/_hashlib.pyi | 244 +- .../vendor/typeshed/stdlib/_heapq.pyi | 83 +- .../vendor/typeshed/stdlib/_imp.pyi | 77 +- .../typeshed/stdlib/_interpchannels.pyi | 198 +- .../vendor/typeshed/stdlib/_interpqueues.pyi | 84 +- .../vendor/typeshed/stdlib/_interpreters.pyi | 154 +- .../vendor/typeshed/stdlib/_io.pyi | 641 ++- .../vendor/typeshed/stdlib/_json.pyi | 71 +- .../vendor/typeshed/stdlib/_locale.pyi | 54 +- .../vendor/typeshed/stdlib/_lsprof.pyi | 54 +- .../vendor/typeshed/stdlib/_lzma.pyi | 106 +- .../vendor/typeshed/stdlib/_markupbase.pyi | 13 +- .../vendor/typeshed/stdlib/_operator.pyi | 244 +- .../vendor/typeshed/stdlib/_osx_support.pyi | 100 +- .../vendor/typeshed/stdlib/_pickle.pyi | 179 +- .../typeshed/stdlib/_posixsubprocess.pyi | 50 +- .../vendor/typeshed/stdlib/_py_abc.pyi | 26 +- .../vendor/typeshed/stdlib/_pydecimal.pyi | 2 + .../vendor/typeshed/stdlib/_queue.pyi | 54 +- .../vendor/typeshed/stdlib/_random.pyi | 27 +- .../vendor/typeshed/stdlib/_sitebuiltins.pyi | 14 + .../vendor/typeshed/stdlib/_socket.pyi | 525 ++- .../vendor/typeshed/stdlib/_sqlite3.pyi | 60 +- .../vendor/typeshed/stdlib/_ssl.pyi | 106 +- .../vendor/typeshed/stdlib/_stat.pyi | 142 +- .../vendor/typeshed/stdlib/_struct.pyi | 131 +- .../vendor/typeshed/stdlib/_thread.pyi | 314 +- .../typeshed/stdlib/_threading_local.pyi | 18 +- .../vendor/typeshed/stdlib/_tkinter.pyi | 48 +- .../vendor/typeshed/stdlib/_tracemalloc.pyi | 65 +- .../vendor/typeshed/stdlib/_warnings.pyi | 28 +- .../vendor/typeshed/stdlib/_weakref.pyi | 17 +- .../vendor/typeshed/stdlib/_weakrefset.pyi | 6 +- .../vendor/typeshed/stdlib/_zstd.pyi | 245 +- .../vendor/typeshed/stdlib/abc.pyi | 111 +- .../vendor/typeshed/stdlib/aifc.pyi | 137 + .../vendor/typeshed/stdlib/annotationlib.pyi | 120 +- .../vendor/typeshed/stdlib/argparse.pyi | 235 +- .../vendor/typeshed/stdlib/array.pyi | 220 +- .../vendor/typeshed/stdlib/ast.pyi | 988 ++++- .../vendor/typeshed/stdlib/asynchat.pyi | 33 +- .../typeshed/stdlib/asyncio/__init__.pyi | 2 + .../typeshed/stdlib/asyncio/base_events.pyi | 370 +- .../typeshed/stdlib/asyncio/base_futures.pyi | 16 +- .../stdlib/asyncio/base_subprocess.pyi | 6 +- .../typeshed/stdlib/asyncio/constants.pyi | 2 + .../typeshed/stdlib/asyncio/coroutines.pyi | 15 +- .../vendor/typeshed/stdlib/asyncio/events.pyi | 515 ++- .../typeshed/stdlib/asyncio/exceptions.pyi | 36 +- .../stdlib/asyncio/format_helpers.pyi | 21 +- .../typeshed/stdlib/asyncio/futures.pyi | 6 +- .../vendor/typeshed/stdlib/asyncio/graph.pyi | 51 +- .../vendor/typeshed/stdlib/asyncio/locks.pyi | 249 +- .../vendor/typeshed/stdlib/asyncio/log.pyi | 2 + .../vendor/typeshed/stdlib/asyncio/mixins.pyi | 2 + .../stdlib/asyncio/proactor_events.pyi | 19 +- .../typeshed/stdlib/asyncio/protocols.pyi | 180 +- .../vendor/typeshed/stdlib/asyncio/queues.pyi | 130 +- .../typeshed/stdlib/asyncio/runners.pyi | 91 +- .../stdlib/asyncio/selector_events.pyi | 17 +- .../typeshed/stdlib/asyncio/sslproto.pyi | 170 +- .../typeshed/stdlib/asyncio/staggered.pyi | 54 +- .../typeshed/stdlib/asyncio/streams.pyi | 247 +- .../typeshed/stdlib/asyncio/subprocess.pyi | 6 +- .../typeshed/stdlib/asyncio/taskgroups.pyi | 21 +- .../vendor/typeshed/stdlib/asyncio/tasks.pyi | 349 +- .../typeshed/stdlib/asyncio/threads.pyi | 13 +- .../typeshed/stdlib/asyncio/timeouts.pyi | 61 +- .../vendor/typeshed/stdlib/asyncio/tools.pyi | 27 +- .../typeshed/stdlib/asyncio/transports.pyi | 215 +- .../vendor/typeshed/stdlib/asyncio/trsock.pyi | 6 + .../typeshed/stdlib/asyncio/unix_events.pyi | 251 +- .../vendor/typeshed/stdlib/asyncore.pyi | 20 + .../vendor/typeshed/stdlib/atexit.pyi | 36 +- .../vendor/typeshed/stdlib/audioop.pyi | 104 +- .../vendor/typeshed/stdlib/base64.pyi | 196 +- .../vendor/typeshed/stdlib/bdb.pyi | 364 +- .../vendor/typeshed/stdlib/binascii.pyi | 111 +- .../vendor/typeshed/stdlib/binhex.pyi | 14 +- .../vendor/typeshed/stdlib/bisect.pyi | 2 + .../vendor/typeshed/stdlib/builtins.pyi | 3590 ++++++++++++++--- .../vendor/typeshed/stdlib/bz2.pyi | 145 +- .../vendor/typeshed/stdlib/cProfile.pyi | 30 +- .../vendor/typeshed/stdlib/calendar.pyi | 265 +- .../vendor/typeshed/stdlib/cgi.pyi | 224 +- .../vendor/typeshed/stdlib/cgitb.pyi | 53 +- .../vendor/typeshed/stdlib/chunk.pyi | 76 +- .../vendor/typeshed/stdlib/cmath.pyi | 114 +- .../vendor/typeshed/stdlib/cmd.pyi | 152 +- .../vendor/typeshed/stdlib/code.pyi | 225 +- .../vendor/typeshed/stdlib/codecs.pyi | 516 ++- .../vendor/typeshed/stdlib/codeop.pyi | 104 +- .../typeshed/stdlib/collections/__init__.pyi | 515 ++- .../typeshed/stdlib/collections/abc.pyi | 4 + .../vendor/typeshed/stdlib/colorsys.pyi | 17 + .../vendor/typeshed/stdlib/compileall.pyi | 126 +- .../stdlib/compression/_common/_streams.pyi | 8 +- .../typeshed/stdlib/compression/bz2.pyi | 5 + .../typeshed/stdlib/compression/gzip.pyi | 5 + .../typeshed/stdlib/compression/lzma.pyi | 9 + .../typeshed/stdlib/compression/zlib.pyi | 14 + .../stdlib/compression/zstd/__init__.pyi | 93 +- .../stdlib/compression/zstd/_zstdfile.pyi | 137 +- .../stdlib/concurrent/futures/__init__.pyi | 2 + .../stdlib/concurrent/futures/_base.pyi | 268 +- .../stdlib/concurrent/futures/interpreter.pyi | 19 +- .../stdlib/concurrent/futures/process.pyi | 169 +- .../stdlib/concurrent/futures/thread.pyi | 31 +- .../concurrent/interpreters/__init__.pyi | 89 +- .../concurrent/interpreters/_crossinterp.pyi | 18 +- .../concurrent/interpreters/_queues.pyi | 88 +- .../vendor/typeshed/stdlib/configparser.pyi | 384 +- .../vendor/typeshed/stdlib/contextlib.pyi | 261 +- .../vendor/typeshed/stdlib/copy.pyi | 69 +- .../vendor/typeshed/stdlib/copyreg.pyi | 13 +- .../vendor/typeshed/stdlib/crypt.pyi | 30 +- .../vendor/typeshed/stdlib/csv.pyi | 103 +- .../typeshed/stdlib/ctypes/__init__.pyi | 95 +- .../vendor/typeshed/stdlib/ctypes/_endian.pyi | 12 +- .../stdlib/ctypes/macholib/__init__.pyi | 7 + .../typeshed/stdlib/ctypes/macholib/dyld.pyi | 18 +- .../typeshed/stdlib/ctypes/macholib/dylib.pyi | 24 +- .../stdlib/ctypes/macholib/framework.pyi | 24 +- .../vendor/typeshed/stdlib/ctypes/util.pyi | 4 +- .../typeshed/stdlib/curses/__init__.pyi | 19 +- .../vendor/typeshed/stdlib/curses/ascii.pyi | 2 + .../vendor/typeshed/stdlib/curses/panel.pyi | 4 + .../vendor/typeshed/stdlib/curses/textpad.pyi | 42 +- .../vendor/typeshed/stdlib/dataclasses.pyi | 291 +- .../vendor/typeshed/stdlib/datetime.pyi | 356 +- .../vendor/typeshed/stdlib/dbm/__init__.pyi | 78 +- .../vendor/typeshed/stdlib/dbm/dumb.pyi | 52 +- .../vendor/typeshed/stdlib/dbm/gnu.pyi | 2 + .../vendor/typeshed/stdlib/dbm/ndbm.pyi | 2 + .../vendor/typeshed/stdlib/dbm/sqlite3.pyi | 15 +- .../vendor/typeshed/stdlib/decimal.pyi | 1011 ++++- .../vendor/typeshed/stdlib/difflib.pyi | 755 +++- .../vendor/typeshed/stdlib/dis.pyi | 252 +- .../typeshed/stdlib/distutils/__init__.pyi | 9 + .../stdlib/distutils/archive_util.pyi | 50 +- .../stdlib/distutils/bcppcompiler.pyi | 10 +- .../typeshed/stdlib/distutils/ccompiler.pyi | 341 +- .../vendor/typeshed/stdlib/distutils/cmd.pyi | 151 +- .../stdlib/distutils/command/__init__.pyi | 5 + .../stdlib/distutils/command/bdist.pyi | 9 +- .../stdlib/distutils/command/bdist_dumb.pyi | 6 + .../stdlib/distutils/command/bdist_rpm.pyi | 5 + .../distutils/command/bdist_wininst.pyi | 5 + .../stdlib/distutils/command/build.pyi | 4 + .../stdlib/distutils/command/build_clib.pyi | 17 +- .../stdlib/distutils/command/build_ext.pyi | 62 +- .../stdlib/distutils/command/build_py.pyi | 40 +- .../distutils/command/build_scripts.pyi | 11 +- .../stdlib/distutils/command/check.pyi | 34 +- .../stdlib/distutils/command/clean.pyi | 4 + .../stdlib/distutils/command/config.pyi | 81 +- .../stdlib/distutils/command/install.pyi | 89 +- .../stdlib/distutils/command/install_data.pyi | 5 + .../distutils/command/install_egg_info.pyi | 26 +- .../distutils/command/install_headers.pyi | 5 + .../stdlib/distutils/command/install_lib.pyi | 18 +- .../distutils/command/install_scripts.pyi | 5 + .../stdlib/distutils/command/register.pyi | 49 +- .../stdlib/distutils/command/sdist.pyi | 94 +- .../stdlib/distutils/command/upload.pyi | 6 + .../typeshed/stdlib/distutils/config.pyi | 26 +- .../vendor/typeshed/stdlib/distutils/core.pyi | 72 +- .../stdlib/distutils/cygwinccompiler.pyi | 48 +- .../typeshed/stdlib/distutils/dep_util.pyi | 34 +- .../typeshed/stdlib/distutils/dir_util.pyi | 54 +- .../vendor/typeshed/stdlib/distutils/dist.pyi | 180 +- .../typeshed/stdlib/distutils/errors.pyi | 105 +- .../typeshed/stdlib/distutils/extension.pyi | 68 + .../stdlib/distutils/fancy_getopt.pyi | 62 +- .../typeshed/stdlib/distutils/file_util.pyi | 44 +- .../typeshed/stdlib/distutils/filelist.pyi | 77 +- .../vendor/typeshed/stdlib/distutils/log.pyi | 2 + .../stdlib/distutils/msvccompiler.pyi | 10 +- .../typeshed/stdlib/distutils/spawn.pyi | 31 +- .../typeshed/stdlib/distutils/sysconfig.pyi | 85 +- .../typeshed/stdlib/distutils/text_file.pyi | 114 +- .../stdlib/distutils/unixccompiler.pyi | 14 + .../vendor/typeshed/stdlib/distutils/util.pyi | 146 +- .../typeshed/stdlib/distutils/version.pyi | 87 + .../vendor/typeshed/stdlib/doctest.pyi | 830 +++- .../vendor/typeshed/stdlib/email/__init__.pyi | 26 +- .../stdlib/email/_header_value_parser.pyi | 487 ++- .../typeshed/stdlib/email/_policybase.pyi | 301 +- .../typeshed/stdlib/email/base64mime.pyi | 51 +- .../vendor/typeshed/stdlib/email/charset.pyi | 143 +- .../vendor/typeshed/stdlib/email/encoders.pyi | 22 +- .../vendor/typeshed/stdlib/email/errors.pyi | 106 +- .../typeshed/stdlib/email/feedparser.pyi | 46 +- .../typeshed/stdlib/email/generator.pyi | 116 +- .../vendor/typeshed/stdlib/email/header.pyi | 122 +- .../typeshed/stdlib/email/headerregistry.pyi | 196 +- .../typeshed/stdlib/email/iterators.pyi | 27 +- .../vendor/typeshed/stdlib/email/message.pyi | 455 ++- .../stdlib/email/mime/application.pyi | 20 +- .../typeshed/stdlib/email/mime/audio.pyi | 26 +- .../typeshed/stdlib/email/mime/base.pyi | 12 +- .../typeshed/stdlib/email/mime/image.pyi | 25 +- .../typeshed/stdlib/email/mime/message.pyi | 15 +- .../typeshed/stdlib/email/mime/multipart.pyi | 24 +- .../stdlib/email/mime/nonmultipart.pyi | 6 +- .../typeshed/stdlib/email/mime/text.pyi | 16 +- .../vendor/typeshed/stdlib/email/parser.pyi | 77 +- .../vendor/typeshed/stdlib/email/policy.pyi | 206 +- .../typeshed/stdlib/email/quoprimime.pyi | 99 +- .../vendor/typeshed/stdlib/email/utils.pyi | 151 +- .../typeshed/stdlib/encodings/__init__.pyi | 41 +- .../typeshed/stdlib/encodings/aliases.pyi | 17 + .../typeshed/stdlib/encodings/ascii.pyi | 8 + .../stdlib/encodings/base64_codec.pyi | 6 + .../typeshed/stdlib/encodings/bz2_codec.pyi | 8 + .../typeshed/stdlib/encodings/charmap.pyi | 11 + .../typeshed/stdlib/encodings/cp037.pyi | 3 + .../typeshed/stdlib/encodings/cp1006.pyi | 3 + .../typeshed/stdlib/encodings/cp1026.pyi | 3 + .../typeshed/stdlib/encodings/cp1125.pyi | 3 + .../typeshed/stdlib/encodings/cp1140.pyi | 3 + .../typeshed/stdlib/encodings/cp1250.pyi | 3 + .../typeshed/stdlib/encodings/cp1251.pyi | 3 + .../typeshed/stdlib/encodings/cp1252.pyi | 3 + .../typeshed/stdlib/encodings/cp1253.pyi | 3 + .../typeshed/stdlib/encodings/cp1254.pyi | 3 + .../typeshed/stdlib/encodings/cp1255.pyi | 3 + .../typeshed/stdlib/encodings/cp1256.pyi | 3 + .../typeshed/stdlib/encodings/cp1257.pyi | 3 + .../typeshed/stdlib/encodings/cp1258.pyi | 3 + .../typeshed/stdlib/encodings/cp273.pyi | 3 + .../typeshed/stdlib/encodings/cp424.pyi | 3 + .../typeshed/stdlib/encodings/cp437.pyi | 3 + .../typeshed/stdlib/encodings/cp500.pyi | 3 + .../typeshed/stdlib/encodings/cp720.pyi | 4 + .../typeshed/stdlib/encodings/cp737.pyi | 3 + .../typeshed/stdlib/encodings/cp775.pyi | 3 + .../typeshed/stdlib/encodings/cp850.pyi | 3 + .../typeshed/stdlib/encodings/cp852.pyi | 3 + .../typeshed/stdlib/encodings/cp855.pyi | 3 + .../typeshed/stdlib/encodings/cp856.pyi | 3 + .../typeshed/stdlib/encodings/cp857.pyi | 3 + .../typeshed/stdlib/encodings/cp858.pyi | 3 + .../typeshed/stdlib/encodings/cp860.pyi | 3 + .../typeshed/stdlib/encodings/cp861.pyi | 3 + .../typeshed/stdlib/encodings/cp862.pyi | 3 + .../typeshed/stdlib/encodings/cp863.pyi | 3 + .../typeshed/stdlib/encodings/cp864.pyi | 3 + .../typeshed/stdlib/encodings/cp865.pyi | 3 + .../typeshed/stdlib/encodings/cp866.pyi | 3 + .../typeshed/stdlib/encodings/cp869.pyi | 3 + .../typeshed/stdlib/encodings/cp874.pyi | 3 + .../typeshed/stdlib/encodings/cp875.pyi | 3 + .../typeshed/stdlib/encodings/hex_codec.pyi | 6 + .../typeshed/stdlib/encodings/hp_roman8.pyi | 10 + .../typeshed/stdlib/encodings/iso8859_1.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_10.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_11.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_13.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_14.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_15.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_16.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_2.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_3.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_4.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_5.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_6.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_7.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_8.pyi | 3 + .../typeshed/stdlib/encodings/iso8859_9.pyi | 3 + .../typeshed/stdlib/encodings/koi8_r.pyi | 3 + .../typeshed/stdlib/encodings/koi8_t.pyi | 2 + .../typeshed/stdlib/encodings/koi8_u.pyi | 3 + .../typeshed/stdlib/encodings/kz1048.pyi | 3 + .../typeshed/stdlib/encodings/latin_1.pyi | 8 + .../typeshed/stdlib/encodings/mac_arabic.pyi | 3 + .../stdlib/encodings/mac_croatian.pyi | 3 + .../stdlib/encodings/mac_cyrillic.pyi | 3 + .../typeshed/stdlib/encodings/mac_farsi.pyi | 3 + .../typeshed/stdlib/encodings/mac_greek.pyi | 3 + .../typeshed/stdlib/encodings/mac_iceland.pyi | 3 + .../typeshed/stdlib/encodings/mac_latin2.pyi | 8 + .../typeshed/stdlib/encodings/mac_roman.pyi | 3 + .../stdlib/encodings/mac_romanian.pyi | 3 + .../typeshed/stdlib/encodings/mac_turkish.pyi | 3 + .../typeshed/stdlib/encodings/palmos.pyi | 5 + .../typeshed/stdlib/encodings/ptcp154.pyi | 8 + .../typeshed/stdlib/encodings/punycode.pyi | 40 +- .../stdlib/encodings/quopri_codec.pyi | 4 + .../stdlib/encodings/raw_unicode_escape.pyi | 8 + .../typeshed/stdlib/encodings/rot_13.pyi | 6 + .../typeshed/stdlib/encodings/tis_620.pyi | 3 + .../typeshed/stdlib/encodings/undefined.pyi | 11 + .../stdlib/encodings/unicode_escape.pyi | 8 + .../typeshed/stdlib/encodings/utf_16.pyi | 8 + .../typeshed/stdlib/encodings/utf_16_be.pyi | 8 + .../typeshed/stdlib/encodings/utf_16_le.pyi | 8 + .../typeshed/stdlib/encodings/utf_32.pyi | 3 + .../typeshed/stdlib/encodings/utf_32_be.pyi | 3 + .../typeshed/stdlib/encodings/utf_32_le.pyi | 3 + .../typeshed/stdlib/encodings/utf_7.pyi | 4 + .../typeshed/stdlib/encodings/utf_8.pyi | 8 + .../typeshed/stdlib/encodings/utf_8_sig.pyi | 9 + .../typeshed/stdlib/encodings/uu_codec.pyi | 8 + .../typeshed/stdlib/encodings/zlib_codec.pyi | 6 + .../typeshed/stdlib/ensurepip/__init__.pyi | 13 +- .../vendor/typeshed/stdlib/enum.pyi | 371 +- .../vendor/typeshed/stdlib/errno.pyi | 13 + .../vendor/typeshed/stdlib/faulthandler.pyi | 43 +- .../vendor/typeshed/stdlib/fcntl.pyi | 82 +- .../vendor/typeshed/stdlib/filecmp.pyi | 95 +- .../vendor/typeshed/stdlib/fileinput.pyi | 152 +- .../vendor/typeshed/stdlib/fnmatch.pyi | 48 +- .../vendor/typeshed/stdlib/formatter.pyi | 57 +- .../vendor/typeshed/stdlib/fractions.pyi | 283 +- .../vendor/typeshed/stdlib/ftplib.pyi | 354 +- .../vendor/typeshed/stdlib/functools.pyi | 250 +- .../ty_vendored/vendor/typeshed/stdlib/gc.pyi | 127 +- .../vendor/typeshed/stdlib/genericpath.pyi | 70 +- .../vendor/typeshed/stdlib/getopt.pyi | 59 +- .../vendor/typeshed/stdlib/getpass.pyi | 58 +- .../vendor/typeshed/stdlib/gettext.pyi | 16 +- .../vendor/typeshed/stdlib/glob.pyi | 95 +- .../vendor/typeshed/stdlib/graphlib.pyi | 92 +- .../vendor/typeshed/stdlib/grp.pyi | 56 +- .../vendor/typeshed/stdlib/gzip.pyi | 118 +- .../vendor/typeshed/stdlib/hashlib.pyi | 67 +- .../vendor/typeshed/stdlib/heapq.pyi | 65 +- .../vendor/typeshed/stdlib/hmac.pyi | 72 +- .../vendor/typeshed/stdlib/html/__init__.pyi | 20 +- .../vendor/typeshed/stdlib/html/entities.pyi | 2 + .../vendor/typeshed/stdlib/html/parser.pyi | 44 +- .../vendor/typeshed/stdlib/http/__init__.pyi | 25 + .../vendor/typeshed/stdlib/http/client.pyi | 247 +- .../vendor/typeshed/stdlib/http/cookiejar.pyi | 229 +- .../vendor/typeshed/stdlib/http/cookies.pyi | 154 +- .../vendor/typeshed/stdlib/http/server.pyi | 433 +- .../vendor/typeshed/stdlib/imaplib.pyi | 504 ++- .../vendor/typeshed/stdlib/imghdr.pyi | 6 +- .../vendor/typeshed/stdlib/imp.pyi | 117 +- .../typeshed/stdlib/importlib/__init__.pyi | 32 +- .../vendor/typeshed/stdlib/importlib/_abc.pyi | 36 +- .../typeshed/stdlib/importlib/_bootstrap.pyi | 8 + .../stdlib/importlib/_bootstrap_external.pyi | 8 + .../vendor/typeshed/stdlib/importlib/abc.pyi | 374 +- .../typeshed/stdlib/importlib/machinery.pyi | 6 +- .../stdlib/importlib/metadata/__init__.pyi | 531 ++- .../stdlib/importlib/metadata/_meta.pyi | 22 +- .../typeshed/stdlib/importlib/readers.pyi | 32 +- .../stdlib/importlib/resources/__init__.pyi | 70 +- .../stdlib/importlib/resources/_common.pyi | 46 +- .../importlib/resources/_functional.pyi | 35 +- .../stdlib/importlib/resources/abc.pyi | 95 +- .../stdlib/importlib/resources/simple.pyi | 39 +- .../typeshed/stdlib/importlib/simple.pyi | 6 + .../vendor/typeshed/stdlib/importlib/util.pyi | 71 +- .../vendor/typeshed/stdlib/inspect.pyi | 821 +++- .../ty_vendored/vendor/typeshed/stdlib/io.pyi | 113 +- .../vendor/typeshed/stdlib/ipaddress.pyi | 823 +++- .../vendor/typeshed/stdlib/itertools.pyi | 309 +- .../vendor/typeshed/stdlib/json/__init__.pyi | 235 +- .../vendor/typeshed/stdlib/json/decoder.pyi | 89 +- .../vendor/typeshed/stdlib/json/encoder.pyi | 133 +- .../vendor/typeshed/stdlib/json/scanner.pyi | 2 + .../vendor/typeshed/stdlib/json/tool.pyi | 5 + .../vendor/typeshed/stdlib/keyword.pyi | 19 +- .../typeshed/stdlib/lib2to3/btm_matcher.pyi | 44 +- .../typeshed/stdlib/lib2to3/fixer_base.pyi | 96 +- .../stdlib/lib2to3/fixes/fix_apply.pyi | 4 + .../stdlib/lib2to3/fixes/fix_asserts.pyi | 2 + .../stdlib/lib2to3/fixes/fix_basestring.pyi | 2 + .../stdlib/lib2to3/fixes/fix_buffer.pyi | 2 + .../stdlib/lib2to3/fixes/fix_dict.pyi | 25 + .../stdlib/lib2to3/fixes/fix_except.pyi | 21 + .../stdlib/lib2to3/fixes/fix_exec.pyi | 7 + .../stdlib/lib2to3/fixes/fix_execfile.pyi | 5 + .../stdlib/lib2to3/fixes/fix_exitfunc.pyi | 3 + .../stdlib/lib2to3/fixes/fix_filter.pyi | 11 + .../stdlib/lib2to3/fixes/fix_funcattrs.pyi | 2 + .../stdlib/lib2to3/fixes/fix_future.pyi | 4 + .../stdlib/lib2to3/fixes/fix_getcwdu.pyi | 3 + .../stdlib/lib2to3/fixes/fix_has_key.pyi | 27 + .../stdlib/lib2to3/fixes/fix_idioms.pyi | 27 + .../stdlib/lib2to3/fixes/fix_import.pyi | 16 +- .../stdlib/lib2to3/fixes/fix_imports.pyi | 2 + .../stdlib/lib2to3/fixes/fix_imports2.pyi | 3 + .../stdlib/lib2to3/fixes/fix_input.pyi | 2 + .../stdlib/lib2to3/fixes/fix_intern.pyi | 4 + .../stdlib/lib2to3/fixes/fix_isinstance.pyi | 7 + .../stdlib/lib2to3/fixes/fix_itertools.pyi | 8 + .../lib2to3/fixes/fix_itertools_imports.pyi | 2 + .../stdlib/lib2to3/fixes/fix_long.pyi | 2 + .../typeshed/stdlib/lib2to3/fixes/fix_map.pyi | 17 + .../stdlib/lib2to3/fixes/fix_metaclass.pyi | 40 +- .../stdlib/lib2to3/fixes/fix_methodattrs.pyi | 2 + .../typeshed/stdlib/lib2to3/fixes/fix_ne.pyi | 2 + .../stdlib/lib2to3/fixes/fix_next.pyi | 2 + .../stdlib/lib2to3/fixes/fix_nonzero.pyi | 2 + .../stdlib/lib2to3/fixes/fix_numliterals.pyi | 2 + .../stdlib/lib2to3/fixes/fix_operator.pyi | 10 + .../stdlib/lib2to3/fixes/fix_paren.pyi | 4 + .../stdlib/lib2to3/fixes/fix_print.pyi | 11 + .../stdlib/lib2to3/fixes/fix_raise.pyi | 22 + .../stdlib/lib2to3/fixes/fix_raw_input.pyi | 2 + .../stdlib/lib2to3/fixes/fix_reduce.pyi | 5 + .../stdlib/lib2to3/fixes/fix_reload.pyi | 4 + .../stdlib/lib2to3/fixes/fix_renames.pyi | 5 + .../stdlib/lib2to3/fixes/fix_repr.pyi | 2 + .../stdlib/lib2to3/fixes/fix_set_literal.pyi | 3 + .../lib2to3/fixes/fix_standarderror.pyi | 2 + .../stdlib/lib2to3/fixes/fix_sys_exc.pyi | 6 + .../stdlib/lib2to3/fixes/fix_throw.pyi | 8 + .../stdlib/lib2to3/fixes/fix_tuple_params.pyi | 18 + .../stdlib/lib2to3/fixes/fix_types.pyi | 17 + .../stdlib/lib2to3/fixes/fix_unicode.pyi | 9 + .../stdlib/lib2to3/fixes/fix_urllib.pyi | 20 +- .../stdlib/lib2to3/fixes/fix_ws_comma.pyi | 6 + .../stdlib/lib2to3/fixes/fix_xrange.pyi | 2 + .../stdlib/lib2to3/fixes/fix_xreadlines.pyi | 4 + .../typeshed/stdlib/lib2to3/fixes/fix_zip.pyi | 8 + .../vendor/typeshed/stdlib/lib2to3/main.pyi | 46 +- .../stdlib/lib2to3/pgen2/__init__.pyi | 2 + .../typeshed/stdlib/lib2to3/pgen2/driver.pyi | 29 +- .../typeshed/stdlib/lib2to3/pgen2/grammar.pyi | 79 +- .../stdlib/lib2to3/pgen2/literals.pyi | 2 + .../typeshed/stdlib/lib2to3/pgen2/parse.pyi | 102 +- .../typeshed/stdlib/lib2to3/pgen2/token.pyi | 2 + .../stdlib/lib2to3/pgen2/tokenize.pyi | 74 +- .../vendor/typeshed/stdlib/lib2to3/pygram.pyi | 9 +- .../vendor/typeshed/stdlib/lib2to3/pytree.pyi | 316 +- .../typeshed/stdlib/lib2to3/refactor.pyi | 164 +- .../vendor/typeshed/stdlib/linecache.pyi | 45 +- .../vendor/typeshed/stdlib/locale.pyi | 132 +- .../typeshed/stdlib/logging/__init__.pyi | 1014 ++++- .../vendor/typeshed/stdlib/logging/config.pyi | 141 +- .../typeshed/stdlib/logging/handlers.pyi | 518 ++- .../vendor/typeshed/stdlib/lzma.pyi | 159 +- .../vendor/typeshed/stdlib/mailbox.pyi | 611 ++- .../vendor/typeshed/stdlib/mailcap.pyi | 23 +- .../vendor/typeshed/stdlib/marshal.pyi | 164 +- .../vendor/typeshed/stdlib/math.pyi | 346 +- .../vendor/typeshed/stdlib/mimetypes.pyi | 189 +- .../vendor/typeshed/stdlib/mmap.pyi | 46 +- .../vendor/typeshed/stdlib/modulefinder.pyi | 23 +- .../stdlib/multiprocessing/connection.pyi | 88 +- .../stdlib/multiprocessing/context.pyi | 129 +- .../stdlib/multiprocessing/forkserver.pyi | 36 +- .../typeshed/stdlib/multiprocessing/heap.pyi | 3 + .../stdlib/multiprocessing/managers.pyi | 160 +- .../typeshed/stdlib/multiprocessing/pool.pyi | 53 +- .../stdlib/multiprocessing/process.pyi | 79 +- .../stdlib/multiprocessing/queues.pyi | 12 +- .../stdlib/multiprocessing/reduction.pyi | 34 +- .../multiprocessing/resource_sharer.pyi | 10 +- .../multiprocessing/resource_tracker.pyi | 19 +- .../stdlib/multiprocessing/shared_memory.pyi | 79 +- .../stdlib/multiprocessing/sharedctypes.pyi | 20 +- .../typeshed/stdlib/multiprocessing/spawn.pyi | 35 +- .../typeshed/stdlib/multiprocessing/util.pyi | 35 +- .../vendor/typeshed/stdlib/netrc.pyi | 8 +- .../vendor/typeshed/stdlib/nntplib.pyi | 302 +- .../vendor/typeshed/stdlib/ntpath.pyi | 9 +- .../vendor/typeshed/stdlib/nturl2path.pyi | 25 +- .../vendor/typeshed/stdlib/numbers.pyi | 377 +- .../vendor/typeshed/stdlib/opcode.pyi | 8 +- .../vendor/typeshed/stdlib/operator.pyi | 35 +- .../vendor/typeshed/stdlib/optparse.pyi | 324 +- .../vendor/typeshed/stdlib/os/__init__.pyi | 1926 +++++++-- .../vendor/typeshed/stdlib/os/path.pyi | 11 + .../vendor/typeshed/stdlib/parser.pyi | 60 +- .../typeshed/stdlib/pathlib/__init__.pyi | 598 ++- .../vendor/typeshed/stdlib/pathlib/types.pyi | 6 + .../vendor/typeshed/stdlib/pdb.pyi | 874 +++- .../vendor/typeshed/stdlib/pickle.pyi | 177 +- .../vendor/typeshed/stdlib/pickletools.pyi | 371 +- .../vendor/typeshed/stdlib/pipes.pyi | 89 +- .../vendor/typeshed/stdlib/pkgutil.pyi | 198 +- .../vendor/typeshed/stdlib/platform.pyi | 264 +- .../vendor/typeshed/stdlib/plistlib.pyi | 96 +- .../vendor/typeshed/stdlib/poplib.pyi | 183 +- .../vendor/typeshed/stdlib/posix.pyi | 5 + .../vendor/typeshed/stdlib/posixpath.pyi | 105 +- .../vendor/typeshed/stdlib/pprint.pyi | 120 +- .../vendor/typeshed/stdlib/profile.pyi | 58 +- .../vendor/typeshed/stdlib/pstats.pyi | 56 +- .../vendor/typeshed/stdlib/pty.pyi | 42 +- .../vendor/typeshed/stdlib/pwd.pyi | 61 +- .../vendor/typeshed/stdlib/py_compile.pyi | 89 +- .../vendor/typeshed/stdlib/pyclbr.pyi | 62 +- .../vendor/typeshed/stdlib/pydoc.pyi | 433 +- .../typeshed/stdlib/pyexpat/__init__.pyi | 66 +- .../vendor/typeshed/stdlib/pyexpat/errors.pyi | 2 + .../vendor/typeshed/stdlib/pyexpat/model.pyi | 2 + .../vendor/typeshed/stdlib/queue.pyi | 133 +- .../vendor/typeshed/stdlib/quopri.pyi | 18 +- .../vendor/typeshed/stdlib/random.pyi | 381 +- .../ty_vendored/vendor/typeshed/stdlib/re.pyi | 331 +- .../vendor/typeshed/stdlib/readline.pyi | 137 +- .../vendor/typeshed/stdlib/reprlib.pyi | 6 +- .../vendor/typeshed/stdlib/resource.pyi | 75 +- .../vendor/typeshed/stdlib/rlcompleter.pyi | 74 +- .../vendor/typeshed/stdlib/runpy.pyi | 47 +- .../vendor/typeshed/stdlib/sched.pyi | 91 +- .../vendor/typeshed/stdlib/secrets.pyi | 53 +- .../vendor/typeshed/stdlib/select.pyi | 97 +- .../vendor/typeshed/stdlib/selectors.pyi | 124 +- .../vendor/typeshed/stdlib/shelve.pyi | 104 +- .../vendor/typeshed/stdlib/shlex.pyi | 44 +- .../vendor/typeshed/stdlib/shutil.pyi | 302 +- .../vendor/typeshed/stdlib/signal.pyi | 195 +- .../vendor/typeshed/stdlib/site.pyi | 182 +- .../vendor/typeshed/stdlib/smtpd.pyi | 76 +- .../vendor/typeshed/stdlib/smtplib.pyi | 483 ++- .../vendor/typeshed/stdlib/sndhdr.pyi | 38 +- .../vendor/typeshed/stdlib/socket.pyi | 246 +- .../vendor/typeshed/stdlib/socketserver.pyi | 382 +- .../vendor/typeshed/stdlib/spwd.pyi | 72 +- .../typeshed/stdlib/sqlite3/__init__.pyi | 383 +- .../vendor/typeshed/stdlib/sre_compile.pyi | 2 + .../vendor/typeshed/stdlib/sre_constants.pyi | 2 + .../vendor/typeshed/stdlib/sre_parse.pyi | 2 + .../vendor/typeshed/stdlib/ssl.pyi | 471 ++- .../vendor/typeshed/stdlib/stat.pyi | 4 + .../vendor/typeshed/stdlib/statistics.pyi | 859 +++- .../typeshed/stdlib/string/__init__.pyi | 29 +- .../typeshed/stdlib/string/templatelib.pyi | 30 +- .../vendor/typeshed/stdlib/stringprep.pyi | 5 + .../vendor/typeshed/stdlib/struct.pyi | 29 + .../vendor/typeshed/stdlib/subprocess.pyi | 526 ++- .../vendor/typeshed/stdlib/sunau.pyi | 106 + .../vendor/typeshed/stdlib/symbol.pyi | 2 + .../vendor/typeshed/stdlib/symtable.pyi | 191 +- .../vendor/typeshed/stdlib/sys/__init__.pyi | 639 ++- .../vendor/typeshed/stdlib/sysconfig.pyi | 85 +- .../vendor/typeshed/stdlib/syslog.pyi | 25 +- .../vendor/typeshed/stdlib/tabnanny.pyi | 25 +- .../vendor/typeshed/stdlib/tarfile.pyi | 451 ++- .../vendor/typeshed/stdlib/telnetlib.pyi | 272 +- .../vendor/typeshed/stdlib/tempfile.pyi | 205 +- .../vendor/typeshed/stdlib/termios.pyi | 71 +- .../vendor/typeshed/stdlib/textwrap.pyi | 181 +- .../vendor/typeshed/stdlib/threading.pyi | 600 ++- .../vendor/typeshed/stdlib/time.pyi | 302 +- .../vendor/typeshed/stdlib/timeit.pyi | 154 +- .../typeshed/stdlib/tkinter/__init__.pyi | 3548 +++++++++++++--- .../typeshed/stdlib/tkinter/colorchooser.pyi | 21 +- .../vendor/typeshed/stdlib/tkinter/dnd.pyi | 100 + .../typeshed/stdlib/tkinter/filedialog.pyi | 82 +- .../vendor/typeshed/stdlib/tkinter/font.pyi | 63 +- .../typeshed/stdlib/tkinter/messagebox.pyi | 34 +- .../typeshed/stdlib/tkinter/scrolledtext.pyi | 12 + .../typeshed/stdlib/tkinter/simpledialog.pyi | 92 +- .../vendor/typeshed/stdlib/tkinter/tix.pyi | 434 +- .../vendor/typeshed/stdlib/tkinter/ttk.pyi | 1147 +++++- .../vendor/typeshed/stdlib/token.pyi | 2 + .../vendor/typeshed/stdlib/tokenize.pyi | 93 +- .../vendor/typeshed/stdlib/tomllib.pyi | 21 +- .../vendor/typeshed/stdlib/trace.pyi | 96 +- .../vendor/typeshed/stdlib/traceback.pyi | 375 +- .../vendor/typeshed/stdlib/tracemalloc.pyi | 112 +- .../vendor/typeshed/stdlib/tty.pyi | 18 +- .../vendor/typeshed/stdlib/turtle.pyi | 3504 +++++++++++++++- .../vendor/typeshed/stdlib/types.pyi | 437 +- .../vendor/typeshed/stdlib/typing.pyi | 1049 ++++- .../typeshed/stdlib/typing_extensions.pyi | 689 +++- .../vendor/typeshed/stdlib/unicodedata.pyi | 174 +- .../typeshed/stdlib/unittest/__init__.pyi | 45 + .../vendor/typeshed/stdlib/unittest/_log.pyi | 4 + .../typeshed/stdlib/unittest/async_case.pyi | 7 +- .../vendor/typeshed/stdlib/unittest/case.pyi | 495 ++- .../typeshed/stdlib/unittest/loader.pyi | 66 +- .../vendor/typeshed/stdlib/unittest/main.pyi | 5 + .../vendor/typeshed/stdlib/unittest/mock.pyi | 517 ++- .../typeshed/stdlib/unittest/result.pyi | 83 +- .../typeshed/stdlib/unittest/runner.pyi | 36 +- .../vendor/typeshed/stdlib/unittest/suite.pyi | 16 +- .../vendor/typeshed/stdlib/unittest/util.pyi | 31 +- .../vendor/typeshed/stdlib/urllib/error.pyi | 16 + .../vendor/typeshed/stdlib/urllib/parse.pyi | 328 +- .../vendor/typeshed/stdlib/urllib/request.pyi | 401 +- .../typeshed/stdlib/urllib/response.pyi | 15 + .../typeshed/stdlib/urllib/robotparser.pyi | 50 +- .../ty_vendored/vendor/typeshed/stdlib/uu.pyi | 13 +- .../vendor/typeshed/stdlib/uuid.pyi | 215 +- .../vendor/typeshed/stdlib/venv/__init__.pyi | 140 +- .../vendor/typeshed/stdlib/warnings.pyi | 114 +- .../vendor/typeshed/stdlib/wave.pyi | 127 + .../vendor/typeshed/stdlib/weakref.pyi | 104 +- .../vendor/typeshed/stdlib/webbrowser.pyi | 70 +- .../typeshed/stdlib/wsgiref/__init__.pyi | 25 + .../typeshed/stdlib/wsgiref/handlers.pyi | 195 +- .../typeshed/stdlib/wsgiref/headers.pyi | 100 +- .../typeshed/stdlib/wsgiref/simple_server.pyi | 17 +- .../vendor/typeshed/stdlib/wsgiref/types.pyi | 10 + .../vendor/typeshed/stdlib/wsgiref/util.pyi | 47 +- .../typeshed/stdlib/wsgiref/validate.pyi | 122 +- .../vendor/typeshed/stdlib/xdrlib.pyi | 18 + .../vendor/typeshed/stdlib/xml/__init__.pyi | 17 + .../typeshed/stdlib/xml/dom/NodeFilter.pyi | 3 + .../typeshed/stdlib/xml/dom/__init__.pyi | 22 + .../vendor/typeshed/stdlib/xml/dom/domreg.pyi | 28 +- .../typeshed/stdlib/xml/dom/expatbuilder.pyi | 114 +- .../typeshed/stdlib/xml/dom/minicompat.pyi | 13 +- .../typeshed/stdlib/xml/dom/minidom.pyi | 263 +- .../typeshed/stdlib/xml/dom/pulldom.pyi | 8 +- .../typeshed/stdlib/xml/dom/xmlbuilder.pyi | 12 + .../typeshed/stdlib/xml/etree/ElementTree.pyi | 368 +- .../typeshed/stdlib/xml/parsers/__init__.pyi | 8 + .../stdlib/xml/parsers/expat/__init__.pyi | 2 + .../stdlib/xml/parsers/expat/errors.pyi | 2 + .../stdlib/xml/parsers/expat/model.pyi | 2 + .../typeshed/stdlib/xml/sax/__init__.pyi | 29 +- .../typeshed/stdlib/xml/sax/_exceptions.pyi | 90 +- .../typeshed/stdlib/xml/sax/expatreader.pyi | 15 +- .../typeshed/stdlib/xml/sax/handler.pyi | 270 +- .../typeshed/stdlib/xml/sax/saxutils.pyi | 47 +- .../typeshed/stdlib/xml/sax/xmlreader.pyi | 242 +- .../vendor/typeshed/stdlib/xmlrpc/client.pyi | 175 +- .../vendor/typeshed/stdlib/xmlrpc/server.pyi | 352 +- .../vendor/typeshed/stdlib/xxlimited.pyi | 18 +- .../vendor/typeshed/stdlib/zipapp.pyi | 20 +- .../typeshed/stdlib/zipfile/__init__.pyi | 317 +- .../stdlib/zipfile/_path/__init__.pyi | 168 +- .../typeshed/stdlib/zipfile/_path/glob.pyi | 93 +- .../vendor/typeshed/stdlib/zipimport.pyi | 157 +- .../vendor/typeshed/stdlib/zlib.pyi | 99 +- .../typeshed/stdlib/zoneinfo/__init__.pyi | 28 +- .../typeshed/stdlib/zoneinfo/_common.pyi | 4 +- .../typeshed/stdlib/zoneinfo/_tzpath.pyi | 22 +- 648 files changed, 70734 insertions(+), 8362 deletions(-) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi index a90cf1eddab76..27d0e6e145d4b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi @@ -1,11 +1,68 @@ +"""Record of phased-in incompatible language changes. + +Each line is of the form: + + FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease "," + CompilerFlag ")" + +where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples +of the same form as sys.version_info: + + (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int + PY_MINOR_VERSION, # the 1; an int + PY_MICRO_VERSION, # the 0; an int + PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string + PY_RELEASE_SERIAL # the 3; an int + ) + +OptionalRelease records the first release in which + + from __future__ import FeatureName + +was accepted. + +In the case of MandatoryReleases that have not yet occurred, +MandatoryRelease predicts the release in which the feature will become part +of the language. + +Else MandatoryRelease records when the feature became part of the language; +in releases at or after that, modules no longer need + + from __future__ import FeatureName + +to use the feature in question, but may continue to use such imports. + +MandatoryRelease may also be None, meaning that a planned feature got +dropped or that the release version is undetermined. + +Instances of class _Feature have two corresponding methods, +.getOptionalRelease() and .getMandatoryRelease(). + +CompilerFlag is the (bitfield) flag that should be passed in the fourth +argument to the builtin function compile() to enable the feature in +dynamically compiled code. This flag is stored in the .compiler_flag +attribute on _Future instances. These values must match the appropriate +#defines of CO_xxx flags in Include/cpython/compile.h. + +No feature line is ever to be deleted from this file. +""" from typing_extensions import TypeAlias _VersionInfo: TypeAlias = tuple[int, int, int, str, int] class _Feature: def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ... - def getOptionalRelease(self) -> _VersionInfo: ... - def getMandatoryRelease(self) -> _VersionInfo | None: ... + def getOptionalRelease(self) -> _VersionInfo: + """Return first release in which this feature was recognized. + +This is a 5-tuple, of the same form as sys.version_info. +""" + def getMandatoryRelease(self) -> _VersionInfo | None: + """Return release in which this feature will become mandatory. + +This is a 5-tuple, of the same form as sys.version_info, or, if +the feature was dropped, or the release date is undetermined, is None. +""" compiler_flag: int absolute_import: _Feature diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi index d663f5d935554..aff8cf899ba4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi @@ -1,3 +1,5 @@ +"""Accelerator module for asyncio +""" import sys from asyncio.events import AbstractEventLoop from collections.abc import Awaitable, Callable, Coroutine, Generator @@ -12,6 +14,19 @@ _TaskYieldType: TypeAlias = Future[object] | None @disjoint_base class Future(Awaitable[_T]): + """This class is *almost* compatible with concurrent.futures.Future. + + Differences: + + - result() and exception() do not take a timeout argument and + raise an exception when the future isn't done yet. + + - Callbacks registered with add_done_callback() are always called + via the event loop's call_soon_threadsafe(). + + - This class is not compatible with the wait() and as_completed() + methods in the concurrent.futures package. +""" _state: str @property def _exception(self) -> BaseException | None: ... @@ -22,24 +37,80 @@ class Future(Awaitable[_T]): def _log_traceback(self, val: Literal[False]) -> None: ... _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def __del__(self) -> None: ... - def get_loop(self) -> AbstractEventLoop: ... + def __del__(self) -> None: + """Called when the instance is about to be destroyed. +""" + def get_loop(self) -> AbstractEventLoop: + """Return the event loop the Future is bound to. +""" @property def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... - def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: ... - def cancel(self, msg: Any | None = None) -> bool: ... - def cancelled(self) -> bool: ... - def done(self) -> bool: ... - def result(self) -> _T: ... - def exception(self) -> BaseException | None: ... - def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: ... - def set_result(self, result: _T, /) -> None: ... - def set_exception(self, exception: type | BaseException, /) -> None: ... - def __iter__(self) -> Generator[Any, None, _T]: ... - def __await__(self) -> Generator[Any, None, _T]: ... + def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: + """Add a callback to be run when the future becomes done. + +The callback is called with a single argument - the future object. If +the future is already done when this is called, the callback is +scheduled with call_soon. +""" + def cancel(self, msg: Any | None = None) -> bool: + """Cancel the future and schedule callbacks. + +If the future is already done or cancelled, return False. Otherwise, +change the future's state to cancelled, schedule the callbacks and +return True. +""" + def cancelled(self) -> bool: + """Return True if the future was cancelled. +""" + def done(self) -> bool: + """Return True if the future is done. + +Done means either that a result / exception are available, or that the +future was cancelled. +""" + def result(self) -> _T: + """Return the result this future represents. + +If the future has been cancelled, raises CancelledError. If the +future's result isn't yet available, raises InvalidStateError. If +the future is done and has an exception set, this exception is raised. +""" + def exception(self) -> BaseException | None: + """Return the exception that was set on this future. + +The exception (or None if no exception was set) is returned only if +the future is done. If the future has been cancelled, raises +CancelledError. If the future isn't done yet, raises +InvalidStateError. +""" + def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: + """Remove all instances of a callback from the "call when done" list. + +Returns the number of callbacks removed. +""" + def set_result(self, result: _T, /) -> None: + """Mark the future done and set its result. + +If the future is already done when this method is called, raises +InvalidStateError. +""" + def set_exception(self, exception: type | BaseException, /) -> None: + """Mark the future done and set an exception. + +If the future is already done when this method is called, raises +InvalidStateError. +""" + def __iter__(self) -> Generator[Any, None, _T]: + """Implement iter(self). +""" + def __await__(self) -> Generator[Any, None, _T]: + """Return an iterator to be used in await expression. +""" @property def _loop(self) -> AbstractEventLoop: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] @@ -52,6 +123,8 @@ else: # and `asyncio.Task.set_result()` always raises. @disjoint_base class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] + """A coroutine wrapped in a Future. +""" if sys.version_info >= (3, 12): def __init__( self, @@ -86,27 +159,118 @@ class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportIn if sys.version_info >= (3, 12): def get_context(self) -> Context: ... - def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... - def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: + """Return the list of stack frames for this task's coroutine. + +If the coroutine is not done, this returns the stack where it is +suspended. If the coroutine has completed successfully or was +cancelled, this returns an empty list. If the coroutine was +terminated by an exception, this returns the list of traceback +frames. + +The frames are always ordered from oldest to newest. + +The optional limit gives the maximum number of frames to +return; by default all available frames are returned. Its +meaning differs depending on whether a stack or a traceback is +returned: the newest frames of a stack are returned, but the +oldest frames of a traceback are returned. (This matches the +behavior of the traceback module.) + +For reasons beyond our control, only one stack frame is +returned for a suspended coroutine. +""" + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: + """Print the stack or traceback for this task's coroutine. + +This produces output similar to that of the traceback module, +for the frames retrieved by get_stack(). The limit argument +is passed to get_stack(). The file argument is an I/O stream +to which the output is written; by default output is written +to sys.stderr. +""" if sys.version_info >= (3, 11): - def cancelling(self) -> int: ... - def uncancel(self) -> int: ... + def cancelling(self) -> int: + """Return the count of the task's cancellation requests. + +This count is incremented when .cancel() is called +and may be decremented using .uncancel(). +""" + def uncancel(self) -> int: + """Decrement the task's count of cancellation requests. + +This should be used by tasks that catch CancelledError +and wish to continue indefinitely until they are cancelled again. + +Returns the remaining number of cancellation requests. +""" + + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" + +def get_event_loop() -> AbstractEventLoop: + """Return an asyncio event loop. + +When called from a coroutine or a callback (e.g. scheduled with +call_soon or similar API), this function will always return the +running event loop. + +If there is no running event loop set, the function will return +the result of `get_event_loop_policy().get_event_loop()` call. +""" +def get_running_loop() -> AbstractEventLoop: + """Return the running event loop. Raise a RuntimeError if there is none. + +This function is thread-specific. +""" +def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: + """Set the running event loop. + +This is a low-level function intended to be used by event loops. +This function is thread-specific. +""" +def _get_running_loop() -> AbstractEventLoop: + """Return the running event loop or None. + +This is a low-level function intended to be used by event loops. +This function is thread-specific. +""" +def _register_task(task: Task[Any]) -> None: + """Register a new task in asyncio as executed by loop. + +Returns None. +""" +def _unregister_task(task: Task[Any]) -> None: + """Unregister a task. + +Returns None. +""" +def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: + """Enter into task execution or resume suspended task. + +Task belongs to loop. + +Returns None. +""" +def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: + """Leave task execution or suspend a task. - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... +Task belongs to loop. -def get_event_loop() -> AbstractEventLoop: ... -def get_running_loop() -> AbstractEventLoop: ... -def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: ... -def _get_running_loop() -> AbstractEventLoop: ... -def _register_task(task: Task[Any]) -> None: ... -def _unregister_task(task: Task[Any]) -> None: ... -def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... -def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +Returns None. +""" if sys.version_info >= (3, 12): - def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: + """Return a currently executed task. +""" if sys.version_info >= (3, 14): def future_discard_from_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... - def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... - def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: + """Record that `fut` is awaited on by `waiter`. +""" + def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: + """Return a set of all tasks for the loop. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi index 58488e3d15afe..04b301296cbd4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi @@ -1,3 +1,10 @@ +"""Bisection algorithms. + +This module provides support for maintaining a list in sorted order without +having to sort the list after each insertion. For long lists of items with +expensive comparison operations, this can be an improvement over the more +common approach. +""" import sys from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT from collections.abc import Callable, MutableSequence @@ -14,7 +21,18 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> int: ... + ) -> int: + """Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e < x, and all e in +a[i:] have e >= x. So if x already appears in the list, a.insert(i, x) will +insert just before the leftmost x already there. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. + +A custom key function can be supplied to customize the sort order. +""" @overload def bisect_left( a: SupportsLenAndGetItem[_T], @@ -32,7 +50,18 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> int: ... + ) -> int: + """Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e <= x, and all e in +a[i:] have e > x. So if x already appears in the list, a.insert(i, x) will +insert just after the rightmost x already there. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. + +A custom key function can be supplied to customize the sort order. +""" @overload def bisect_right( a: SupportsLenAndGetItem[_T], @@ -50,7 +79,16 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> None: ... + ) -> None: + """Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the left of the leftmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. + +A custom key function can be supplied to customize the sort order. +""" @overload def insort_left( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -63,7 +101,16 @@ if sys.version_info >= (3, 10): hi: int | None = None, *, key: None = None, - ) -> None: ... + ) -> None: + """Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the right of the rightmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. + +A custom key function can be supplied to customize the sort order. +""" @overload def insort_right( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -72,13 +119,45 @@ if sys.version_info >= (3, 10): else: def bisect_left( a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> int: ... + ) -> int: + """Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e < x, and all e in +a[i:] have e >= x. So if x already appears in the list, i points just +before the leftmost x already there. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +""" def bisect_right( a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> int: ... + ) -> int: + """Return the index where to insert item x in list a, assuming a is sorted. + +The return value i is such that all e in a[:i] have e <= x, and all e in +a[i:] have e > x. So if x already appears in the list, i points just +beyond the rightmost x already there + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +""" def insort_left( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> None: ... + ) -> None: + """Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the left of the leftmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +""" def insort_right( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None - ) -> None: ... + ) -> None: + """Insert item x in list a, and keep it sorted assuming a is sorted. + +If x is already in a, insert it to the right of the rightmost x. + +Optional args lo (default 0) and hi (default len(a)) bound the +slice of a to be searched. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi index a6c3869fb8513..3dd4a55422b20 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi @@ -1,3 +1,5 @@ +"""_blake2b provides BLAKE2b for hashlib +""" import sys from _typeshed import ReadableBuffer from typing import ClassVar, Final, final @@ -14,6 +16,8 @@ BLAKE2S_SALT_SIZE: Final = 8 @final class blake2b: + """Return a new BLAKE2b hash object. +""" MAX_DIGEST_SIZE: ClassVar[int] = 64 MAX_KEY_SIZE: ClassVar[int] = 64 PERSON_SIZE: ClassVar[int] = 16 @@ -60,13 +64,23 @@ class blake2b: usedforsecurity: bool = True, ) -> Self: ... - def copy(self) -> Self: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def update(self, data: ReadableBuffer, /) -> None: ... + def copy(self) -> Self: + """Return a copy of the hash object. +""" + def digest(self) -> bytes: + """Return the digest value as a bytes object. +""" + def hexdigest(self) -> str: + """Return the digest value as a string of hexadecimal digits. +""" + def update(self, data: ReadableBuffer, /) -> None: + """Update this hash object's state with the provided bytes-like object. +""" @final class blake2s: + """Return a new BLAKE2s hash object. +""" MAX_DIGEST_SIZE: ClassVar[int] = 32 MAX_KEY_SIZE: ClassVar[int] = 32 PERSON_SIZE: ClassVar[int] = 8 @@ -113,7 +127,15 @@ class blake2s: usedforsecurity: bool = True, ) -> Self: ... - def copy(self) -> Self: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def update(self, data: ReadableBuffer, /) -> None: ... + def copy(self) -> Self: + """Return a copy of the hash object. +""" + def digest(self) -> bytes: + """Return the digest value as a bytes object. +""" + def hexdigest(self) -> str: + """Return the digest value as a string of hexadecimal digits. +""" + def update(self, data: ReadableBuffer, /) -> None: + """Update this hash object's state with the provided bytes-like object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi index 233d4934f3c6d..d5e032c352a8a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi @@ -1 +1,6 @@ +"""A minimal subset of the locale module used at interpreter startup +(imported by the _io module), in order to reduce startup time. + +Don't import directly from third-party code; use the `locale` module instead! +""" def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi index fdad932ca22e6..5aca352012c78 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi @@ -5,20 +5,65 @@ from typing_extensions import Self @final class BZ2Compressor: + """Create a compressor object for compressing data incrementally. + + compresslevel + Compression level, as a number between 1 and 9. + +For one-shot compression, use the compress() function instead. +""" if sys.version_info >= (3, 12): def __new__(cls, compresslevel: int = 9, /) -> Self: ... else: def __init__(self, compresslevel: int = 9, /) -> None: ... - def compress(self, data: ReadableBuffer, /) -> bytes: ... - def flush(self) -> bytes: ... + def compress(self, data: ReadableBuffer, /) -> bytes: + """Provide data to the compressor object. + +Returns a chunk of compressed data if possible, or b'' otherwise. + +When you have finished providing data to the compressor, call the +flush() method to finish the compression process. +""" + def flush(self) -> bytes: + """Finish the compression process. + +Returns the compressed data left in internal buffers. + +The compressor object may not be used after this method is called. +""" @final class BZ2Decompressor: - def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + """Create a decompressor object for decompressing data incrementally. + +For one-shot decompression, use the decompress() function instead. +""" + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: + """Decompress *data*, returning uncompressed data as bytes. + +If *max_length* is nonnegative, returns at most *max_length* bytes of +decompressed data. If this limit is reached and further output can be +produced, *self.needs_input* will be set to ``False``. In this case, the next +call to *decompress()* may provide *data* as b'' to obtain more of the output. + +If all of the input data was decompressed and returned (either because this +was less than *max_length* bytes, or because *max_length* was negative), +*self.needs_input* will be set to True. + +Attempting to decompress data after the end of stream is reached raises an +EOFError. Any data found after the end of the stream is ignored and saved in +the unused_data attribute. +""" @property - def eof(self) -> bool: ... + def eof(self) -> bool: + """True if the end-of-stream marker has been reached. +""" @property - def needs_input(self) -> bool: ... + def needs_input(self) -> bool: + """True if more input is needed before more decompressed data can be produced. +""" @property - def unused_data(self) -> bytes: ... + def unused_data(self) -> bytes: + """Data found after the end of the compressed stream. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi index 89f97edb9ba81..a64ade83f139d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi @@ -16,13 +16,34 @@ _CharMap: TypeAlias = dict[int, int] | _EncodingMap _Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] _SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] -def register(search_function: _SearchFunction, /) -> None: ... +def register(search_function: _SearchFunction, /) -> None: + """Register a codec search function. + +Search functions are expected to take one argument, the encoding name in +all lower case letters, and either return None, or a tuple of functions +(encoder, decoder, stream_reader, stream_writer) (or a CodecInfo object). +""" if sys.version_info >= (3, 10): - def unregister(search_function: _SearchFunction, /) -> None: ... + def unregister(search_function: _SearchFunction, /) -> None: + """Unregister a codec search function and clear the registry's cache. + +If the search function is not registered, do nothing. +""" + +def register_error(errors: str, handler: _Handler, /) -> None: + """Register the specified error handler under the name errors. -def register_error(errors: str, handler: _Handler, /) -> None: ... -def lookup_error(name: str, /) -> _Handler: ... +handler must be a callable object, that will be called with an exception +instance containing information about the location of the encoding/decoding +error and must return a (replacement, new position) tuple. +""" +def lookup_error(name: str, /) -> _Handler: + """lookup_error(errors) -> handler + +Return the error handler for the specified error handling name or raise a +LookupError, if no handler exists under this name. +""" # The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 # https://docs.python.org/3/library/codecs.html#binary-transforms @@ -48,13 +69,29 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: + """Encodes obj using the codec registered for encoding. + +The default encoding is 'utf-8'. errors may be given to set a +different error handling scheme. Default is 'strict' meaning that encoding +errors raise a ValueError. Other possible values are 'ignore', 'replace' +and 'backslashreplace' as well as any other name registered with +codecs.register_error that can handle ValueErrors. +""" @overload def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap] @overload def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @overload -def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[overload-overlap] +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: # type: ignore[overload-overlap] + """Decodes obj using the codec registered for encoding. + +Default encoding is 'utf-8'. errors may be given to set a +different error handling scheme. Default is 'strict' meaning that encoding +errors raise a ValueError. Other possible values are 'ignore', 'replace' +and 'backslashreplace' as well as any other name registered with +codecs.register_error that can handle ValueErrors. +""" @overload def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... @@ -71,7 +108,9 @@ def decode( def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... @overload def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... -def lookup(encoding: str, /) -> codecs.CodecInfo: ... +def lookup(encoding: str, /) -> codecs.CodecInfo: + """Looks up a codec tuple in the Python codec registry and returns a CodecInfo object. +""" def charmap_build(map: str, /) -> _CharMap: ... def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi index 319577c9284bc..0792b4ef5502c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi @@ -1,3 +1,7 @@ +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" import sys from abc import abstractmethod from types import MappingProxyType @@ -71,31 +75,47 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. @final class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[_KT_co]: ... + def __reversed__(self) -> Iterator[_KT_co]: + """Return a reverse iterator over the dict keys. +""" __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): - def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ... + def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: + """Return True if the view and the given iterable have a null intersection. +""" if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: + """dictionary that this view refers to +""" @final class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented - def __reversed__(self) -> Iterator[_VT_co]: ... + def __reversed__(self) -> Iterator[_VT_co]: + """Return a reverse iterator over the dict values. +""" if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: + """dictionary that this view refers to +""" @final class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: + """Return a reverse iterator over the dict items. +""" __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): - def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ... + def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: + """Return True if the view and the given iterable have a null intersection. +""" if sys.version_info >= (3, 10): @property - def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: + """dictionary that this view refers to +""" if sys.version_info >= (3, 12): @runtime_checkable diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi index aa67df2ab4787..327135f4148c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi @@ -1,3 +1,5 @@ +"""Internal classes used by the gzip, lzma and bz2 modules +""" # _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) from _typeshed import Incomplete, WriteableBuffer @@ -13,9 +15,13 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... -class BaseStream(BufferedIOBase): ... +class BaseStream(BufferedIOBase): + """Mode-checking helper functions. +""" class DecompressReader(RawIOBase): + """Adapts the decompressor API to a RawIOBase reader API +""" def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi index 0ddeca7882cd1..0abc254f3774a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi @@ -1,3 +1,5 @@ +"""Context Variables +""" import sys from collections.abc import Callable, Iterator, Mapping from types import GenericAlias, TracebackType @@ -18,14 +20,36 @@ class ContextVar(Generic[_T]): @property def name(self) -> str: ... @overload - def get(self) -> _T: ... + def get(self) -> _T: + """Return a value for the context variable for the current context. + +If there is no value for the variable in the current context, the method will: + * return the value of the default argument of the method, if provided; or + * return the default value for the context variable, if it was created + with one; or + * raise a LookupError. +""" @overload def get(self, default: _T, /) -> _T: ... @overload def get(self, default: _D, /) -> _D | _T: ... - def set(self, value: _T, /) -> Token[_T]: ... - def reset(self, token: Token[_T], /) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def set(self, value: _T, /) -> Token[_T]: + """Call to set a new value for the context variable in the current context. + +The required value argument is the new value for the context variable. + +Returns a Token object that can be used to restore the variable to its previous +value via the `ContextVar.reset()` method. +""" + def reset(self, token: Token[_T], /) -> None: + """Reset the context variable. + +The variable is reset to the value it had before the `ContextVar.set()` that +created the token was used. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @final class Token(Generic[_T]): @@ -35,12 +59,18 @@ class Token(Generic[_T]): def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express MISSING: ClassVar[object] __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" if sys.version_info >= (3, 14): - def __enter__(self) -> Self: ... + def __enter__(self) -> Self: + """Enter into Token context manager. +""" def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> None: ... + ) -> None: + """Exit from Token context manager, restore the linked ContextVar. +""" def copy_context() -> Context: ... @@ -50,15 +80,28 @@ def copy_context() -> Context: ... class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... @overload - def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: ... + def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: + """Return the value for `key` if `key` has the value in the context object. + +If `key` does not exist, return `default`. If `default` is not given, +return None. +""" @overload def get(self, key: ContextVar[_T], default: _T, /) -> _T: ... @overload def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ... def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... - def copy(self) -> Context: ... + def copy(self) -> Context: + """Return a shallow copy of the context object. +""" __hash__: ClassVar[None] # type: ignore[assignment] - def __getitem__(self, key: ContextVar[_T], /) -> _T: ... - def __iter__(self) -> Iterator[ContextVar[Any]]: ... - def __len__(self) -> int: ... + def __getitem__(self, key: ContextVar[_T], /) -> _T: + """Return self[key]. +""" + def __iter__(self) -> Iterator[ContextVar[Any]]: + """Implement iter(self). +""" + def __len__(self) -> int: + """Return len(self). +""" def __eq__(self, value: object, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi index ea90766afee66..118bd5669477f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi @@ -1,3 +1,5 @@ +"""CSV parsing and writing. +""" import csv import sys from _typeshed import SupportsWrite @@ -26,6 +28,10 @@ _DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Diale @disjoint_base class Dialect: + """CSV dialect + +The Dialect type records CSV parsing and generation options. +""" delimiter: str quotechar: str | None escapechar: str | None @@ -51,23 +57,57 @@ if sys.version_info >= (3, 10): # This class calls itself _csv.reader. @disjoint_base class Reader: + """CSV reader + +Reader objects are responsible for reading and parsing tabular data +in CSV format. +""" @property def dialect(self) -> Dialect: ... line_num: int - def __iter__(self) -> Self: ... - def __next__(self) -> list[str]: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> list[str]: + """Implement next(self). +""" # This class calls itself _csv.writer. @disjoint_base class Writer: + """CSV writer + +Writer objects are responsible for generating tabular data +in CSV format from sequence input. +""" @property def dialect(self) -> Dialect: ... if sys.version_info >= (3, 13): - def writerow(self, row: Iterable[Any], /) -> Any: ... - def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: ... + def writerow(self, row: Iterable[Any], /) -> Any: + """writerow(iterable) + +Construct and write a CSV record from an iterable of fields. Non-string +elements will be converted to string. +""" + def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: + """writerows(iterable of iterables) + +Construct and write a series of iterables to a csv file. Non-string +elements will be converted to string. +""" else: - def writerow(self, row: Iterable[Any]) -> Any: ... - def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... + def writerow(self, row: Iterable[Any]) -> Any: + """writerow(iterable) + +Construct and write a CSV record from an iterable of fields. Non-string +elements will be converted to string. +""" + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: + """writerows(iterable of iterables) + +Construct and write a series of iterables to a csv file. Non-string +elements will be converted to string. +""" # For the return types below. # These aliases can be removed when typeshed drops support for 3.9. @@ -104,7 +144,20 @@ def writer( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> _writer: ... +) -> _writer: + """ csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + for row in sequence: + csv_writer.writerow(row) + + [or] + + csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + csv_writer.writerows(rows) + +The "fileobj" argument can be any object that supports the file API. +""" def reader( iterable: Iterable[str], /, @@ -118,7 +171,21 @@ def reader( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> _reader: ... +) -> _reader: + """ csv_reader = reader(iterable [, dialect='excel'] + [optional keyword args]) + for row in csv_reader: + process(row) + +The "iterable" argument can be any object that returns a line +of input for each iteration, such as a file object or a list. The +optional "dialect" parameter is discussed below. The function +also accepts optional keyword arguments which override settings +provided by the dialect. + +The returned object is an iterator. Each iteration returns a row +of the CSV file (which can span multiple input lines). +""" def register_dialect( name: str, /, @@ -132,8 +199,30 @@ def register_dialect( lineterminator: str = "\r\n", quoting: _QuotingType = 0, strict: bool = False, -) -> None: ... -def unregister_dialect(name: str) -> None: ... -def get_dialect(name: str) -> Dialect: ... -def list_dialects() -> list[str]: ... -def field_size_limit(new_limit: int = ...) -> int: ... +) -> None: + """Create a mapping from a string name to a dialect class. + dialect = csv.register_dialect(name[, dialect[, **fmtparams]]) +""" +def unregister_dialect(name: str) -> None: + """Delete the name/dialect mapping associated with a string name. + + csv.unregister_dialect(name) +""" +def get_dialect(name: str) -> Dialect: + """Return the dialect instance associated with name. + + dialect = csv.get_dialect(name) +""" +def list_dialects() -> list[str]: + """Return a list of all known dialect names. + + names = csv.list_dialects() +""" +def field_size_limit(new_limit: int = ...) -> int: + """Sets an upper limit on parsed fields. + + csv.field_size_limit([limit]) + +Returns old limit. If limit is not given, no new limit is set and +the old limit is returned +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi index c87cf5e326caa..af9e7512b1cc9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi @@ -1,3 +1,5 @@ +"""Create and manipulate C compatible data types in Python. +""" import _typeshed import sys from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -47,10 +49,16 @@ if sys.platform == "win32": def FreeLibrary(handle: int, /) -> None: ... else: - def dlclose(handle: int, /) -> None: ... + def dlclose(handle: int, /) -> None: + """dlclose a library +""" # The default for flag is RTLD_GLOBAL|RTLD_LOCAL, which is platform dependent. - def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: ... - def dlsym(handle: int, name: str, /) -> int: ... + def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: + """dlopen(name, flag={RTLD_GLOBAL|RTLD_LOCAL}) open a shared library +""" + def dlsym(handle: int, name: str, /) -> int: + """find symbol in shared library +""" if sys.version_info >= (3, 13): # This class is not exposed. It calls itself _ctypes.CType_Type. @@ -97,6 +105,8 @@ class _PyCSimpleType(_CTypeBaseType): def __rmul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _SimpleCData(_CData, Generic[_T], metaclass=_PyCSimpleType): + """XXX to be provided +""" value: _T # The TypeVar can be unsolved here, # but we can't use overloads without creating many, many mypy false-positive errors @@ -124,6 +134,8 @@ class _PyCPointerType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): + """XXX to be provided +""" _type_: type[_CT] contents: _CT @overload @@ -131,17 +143,35 @@ class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): @overload def __init__(self, arg: _CT) -> None: ... @overload - def __getitem__(self, key: int, /) -> Any: ... + def __getitem__(self, key: int, /) -> Any: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> list[Any]: ... - def __setitem__(self, key: int, value: Any, /) -> None: ... + def __setitem__(self, key: int, value: Any, /) -> None: + """Set self[key] to value. +""" if sys.version_info < (3, 14): @overload - def POINTER(type: None, /) -> type[c_void_p]: ... + def POINTER(type: None, /) -> type[c_void_p]: + """Create and return a new ctypes pointer type. + + type + A ctypes type. + +Pointer types are cached and reused internally, +so calling this function repeatedly is cheap. +""" @overload def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... - def pointer(obj: _CT, /) -> _Pointer[_CT]: ... + def pointer(obj: _CT, /) -> _Pointer[_CT]: + """Create a new pointer instance, pointing to 'obj'. + +The returned object is of the type POINTER(type(obj)). Note that if you +just want to pass a pointer to an object to a foreign function call, you +should use byref(obj) which is much faster. +""" # This class is not exposed. It calls itself _ctypes.CArgObject. @final @@ -149,10 +179,16 @@ if sys.version_info < (3, 14): class _CArgObject: ... if sys.version_info >= (3, 14): - def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: ... + def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: + """Return a pointer lookalike to a C instance, only usable as function argument. +""" else: - def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: ... + def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: + """byref(C instance[, offset=0]) -> byref-object +Return a pointer lookalike to a C instance, only usable +as function argument +""" _ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType] _PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any] @@ -171,6 +207,8 @@ class _PyCFuncPtrType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): + """Function Pointer +""" restype: type[_CDataType] | Callable[[int], Any] | None argtypes: Sequence[type[_CDataType]] errcheck: _ECT @@ -190,7 +228,9 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): cls, vtbl_index: int, name: str, paramflags: tuple[_PF, ...] | None = ..., iid: _CData | _CDataType | None = ..., / ) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" _GetT = TypeVar("_GetT") _SetT = TypeVar("_SetT") @@ -232,6 +272,8 @@ class _UnionType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Union(_CData, metaclass=_UnionType): + """Union base class +""" _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -260,6 +302,8 @@ class _PyCStructType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Structure(_CData, metaclass=_PyCStructType): + """Structure base class +""" _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -284,6 +328,14 @@ class _PyCArrayType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): + """Abstract base class for arrays. + +The recommended way to create concrete array types is by multiplying any +ctypes data type with a non-negative integer. Alternatively, you can subclass +this type and define _length_ and _type_ class variables. Array elements can +be read and written using standard subscript and slice accesses for slice +reads, the resulting object is not itself an Array. +""" @property @abstractmethod def _length_(self) -> int: ... @@ -314,28 +366,48 @@ class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): # the array element type would belong are annotated with Any instead. def __init__(self, *args: Any) -> None: ... @overload - def __getitem__(self, key: int, /) -> Any: ... + def __getitem__(self, key: int, /) -> Any: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> list[Any]: ... @overload - def __setitem__(self, key: int, value: Any, /) -> None: ... + def __setitem__(self, key: int, value: Any, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ... def __iter__(self) -> Iterator[Any]: ... # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. - def __len__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - -def addressof(obj: _CData | _CDataType, /) -> int: ... -def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... + def __len__(self) -> int: + """Return len(self). +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" + +def addressof(obj: _CData | _CDataType, /) -> int: + """Return the address of the C instance internal buffer +""" +def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: + """alignment(C type) -> integer +alignment(C instance) -> integer +Return the alignment requirements of a C instance +""" def get_errno() -> int: ... -def resize(obj: _CData | _CDataType, size: int, /) -> None: ... +def resize(obj: _CData | _CDataType, size: int, /) -> None: + """Resize the memory buffer of a ctypes instance +""" def set_errno(value: int, /) -> int: ... -def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: ... +def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: + """Return the size in bytes of a C instance. +""" def PyObj_FromPtr(address: int, /) -> Any: ... def Py_DECREF(o: _T, /) -> _T: ... def Py_INCREF(o: _T, /) -> _T: ... -def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: ... +def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: + """Return buffer interface information +""" def call_cdeclfunction(address: int, arguments: tuple[Any, ...], /) -> Any: ... def call_function(address: int, arguments: tuple[Any, ...], /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi index d4e4d48f4e20f..398da932f4038 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi @@ -276,82 +276,482 @@ REPORT_MOUSE_POSITION: Final[int] _C_API: Any version: Final[bytes] -def baudrate() -> int: ... -def beep() -> None: ... -def can_change_color() -> bool: ... -def cbreak(flag: bool = True, /) -> None: ... -def color_content(color_number: int, /) -> tuple[int, int, int]: ... -def color_pair(pair_number: int, /) -> int: ... -def curs_set(visibility: int, /) -> int: ... -def def_prog_mode() -> None: ... -def def_shell_mode() -> None: ... -def delay_output(ms: int, /) -> None: ... -def doupdate() -> None: ... -def echo(flag: bool = True, /) -> None: ... -def endwin() -> None: ... -def erasechar() -> bytes: ... +def baudrate() -> int: + """Return the output speed of the terminal in bits per second. +""" +def beep() -> None: + """Emit a short attention sound. +""" +def can_change_color() -> bool: + """Return True if the programmer can change the colors displayed by the terminal. +""" +def cbreak(flag: bool = True, /) -> None: + """Enter cbreak mode. + + flag + If false, the effect is the same as calling nocbreak(). + +In cbreak mode (sometimes called "rare" mode) normal tty line buffering is +turned off and characters are available to be read one by one. However, +unlike raw mode, special characters (interrupt, quit, suspend, and flow +control) retain their effects on the tty driver and calling program. +Calling first raw() then cbreak() leaves the terminal in cbreak mode. +""" +def color_content(color_number: int, /) -> tuple[int, int, int]: + """Return the red, green, and blue (RGB) components of the specified color. + + color_number + The number of the color (0 - (COLORS-1)). + +A 3-tuple is returned, containing the R, G, B values for the given color, +which will be between 0 (no component) and 1000 (maximum amount of component). +""" +def color_pair(pair_number: int, /) -> int: + """Return the attribute value for displaying text in the specified color. + + pair_number + The number of the color pair. + +This attribute value can be combined with A_STANDOUT, A_REVERSE, and the +other A_* attributes. pair_number() is the counterpart to this function. +""" +def curs_set(visibility: int, /) -> int: + """Set the cursor state. + + visibility + 0 for invisible, 1 for normal visible, or 2 for very visible. + +If the terminal supports the visibility requested, the previous cursor +state is returned; otherwise, an exception is raised. On many terminals, +the "visible" mode is an underline cursor and the "very visible" mode is +a block cursor. +""" +def def_prog_mode() -> None: + """Save the current terminal mode as the "program" mode. + +The "program" mode is the mode when the running program is using curses. + +Subsequent calls to reset_prog_mode() will restore this mode. +""" +def def_shell_mode() -> None: + """Save the current terminal mode as the "shell" mode. + +The "shell" mode is the mode when the running program is not using curses. + +Subsequent calls to reset_shell_mode() will restore this mode. +""" +def delay_output(ms: int, /) -> None: + """Insert a pause in output. + + ms + Duration in milliseconds. +""" +def doupdate() -> None: + """Update the physical screen to match the virtual screen. +""" +def echo(flag: bool = True, /) -> None: + """Enter echo mode. + + flag + If false, the effect is the same as calling noecho(). + +In echo mode, each character input is echoed to the screen as it is entered. +""" +def endwin() -> None: + """De-initialize the library, and return terminal to normal status. +""" +def erasechar() -> bytes: + """Return the user's current erase character. +""" def filter() -> None: ... -def flash() -> None: ... -def flushinp() -> None: ... -def get_escdelay() -> int: ... -def get_tabsize() -> int: ... -def getmouse() -> tuple[int, int, int, int, int]: ... -def getsyx() -> tuple[int, int]: ... -def getwin(file: SupportsRead[bytes], /) -> window: ... -def halfdelay(tenths: int, /) -> None: ... -def has_colors() -> bool: ... +def flash() -> None: + """Flash the screen. + +That is, change it to reverse-video and then change it back in a short interval. +""" +def flushinp() -> None: + """Flush all input buffers. + +This throws away any typeahead that has been typed by the user and has not +yet been processed by the program. +""" +def get_escdelay() -> int: + """Gets the curses ESCDELAY setting. + +Gets the number of milliseconds to wait after reading an escape character, +to distinguish between an individual escape character entered on the +keyboard from escape sequences sent by cursor and function keys. +""" +def get_tabsize() -> int: + """Gets the curses TABSIZE setting. + +Gets the number of columns used by the curses library when converting a tab +character to spaces as it adds the tab to a window. +""" +def getmouse() -> tuple[int, int, int, int, int]: + """Retrieve the queued mouse event. + +After getch() returns KEY_MOUSE to signal a mouse event, this function +returns a 5-tuple (id, x, y, z, bstate). +""" +def getsyx() -> tuple[int, int]: + """Return the current coordinates of the virtual screen cursor. + +Return a (y, x) tuple. If leaveok is currently true, return (-1, -1). +""" +def getwin(file: SupportsRead[bytes], /) -> window: + """Read window related data stored in the file by an earlier putwin() call. + +The routine then creates and initializes a new window using that data, +returning the new window object. +""" +def halfdelay(tenths: int, /) -> None: + """Enter half-delay mode. + + tenths + Maximal blocking delay in tenths of seconds (1 - 255). + +Use nocbreak() to leave half-delay mode. +""" +def has_colors() -> bool: + """Return True if the terminal can display colors; otherwise, return False. +""" if sys.version_info >= (3, 10): - def has_extended_color_support() -> bool: ... + def has_extended_color_support() -> bool: + """Return True if the module supports extended colors; otherwise, return False. + +Extended color support allows more than 256 color-pairs for terminals +that support more than 16 colors (e.g. xterm-256color). +""" if sys.version_info >= (3, 14): - def assume_default_colors(fg: int, bg: int, /) -> None: ... - -def has_ic() -> bool: ... -def has_il() -> bool: ... -def has_key(key: int, /) -> bool: ... -def init_color(color_number: int, r: int, g: int, b: int, /) -> None: ... -def init_pair(pair_number: int, fg: int, bg: int, /) -> None: ... -def initscr() -> window: ... + def assume_default_colors(fg: int, bg: int, /) -> None: + """Allow use of default values for colors on terminals supporting this feature. + +Assign terminal default foreground/background colors to color number -1. +Change the definition of the color-pair 0 to (fg, bg). + +Use this to support transparency in your application. +""" + +def has_ic() -> bool: + """Return True if the terminal has insert- and delete-character capabilities. +""" +def has_il() -> bool: + """Return True if the terminal has insert- and delete-line capabilities. +""" +def has_key(key: int, /) -> bool: + """Return True if the current terminal type recognizes a key with that value. + + key + Key number. +""" +def init_color(color_number: int, r: int, g: int, b: int, /) -> None: + """Change the definition of a color. + + color_number + The number of the color to be changed (0 - (COLORS-1)). + r + Red component (0 - 1000). + g + Green component (0 - 1000). + b + Blue component (0 - 1000). + +When init_color() is used, all occurrences of that color on the screen +immediately change to the new definition. This function is a no-op on +most terminals; it is active only if can_change_color() returns true. +""" +def init_pair(pair_number: int, fg: int, bg: int, /) -> None: + """Change the definition of a color-pair. + + pair_number + The number of the color-pair to be changed (1 - (COLOR_PAIRS-1)). + fg + Foreground color number (-1 - (COLORS-1)). + bg + Background color number (-1 - (COLORS-1)). + +If the color-pair was previously initialized, the screen is refreshed and +all occurrences of that color-pair are changed to the new definition. +""" +def initscr() -> window: + """Initialize the library. + +Return a WindowObject which represents the whole screen. +""" def intrflush(flag: bool, /) -> None: ... -def is_term_resized(nlines: int, ncols: int, /) -> bool: ... -def isendwin() -> bool: ... -def keyname(key: int, /) -> bytes: ... -def killchar() -> bytes: ... -def longname() -> bytes: ... -def meta(yes: bool, /) -> None: ... -def mouseinterval(interval: int, /) -> None: ... -def mousemask(newmask: int, /) -> tuple[int, int]: ... -def napms(ms: int, /) -> int: ... -def newpad(nlines: int, ncols: int, /) -> window: ... -def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: ... -def nl(flag: bool = True, /) -> None: ... -def nocbreak() -> None: ... -def noecho() -> None: ... -def nonl() -> None: ... -def noqiflush() -> None: ... -def noraw() -> None: ... -def pair_content(pair_number: int, /) -> tuple[int, int]: ... -def pair_number(attr: int, /) -> int: ... -def putp(string: ReadOnlyBuffer, /) -> None: ... -def qiflush(flag: bool = True, /) -> None: ... -def raw(flag: bool = True, /) -> None: ... -def reset_prog_mode() -> None: ... -def reset_shell_mode() -> None: ... -def resetty() -> None: ... -def resize_term(nlines: int, ncols: int, /) -> None: ... -def resizeterm(nlines: int, ncols: int, /) -> None: ... -def savetty() -> None: ... -def set_escdelay(ms: int, /) -> None: ... -def set_tabsize(size: int, /) -> None: ... -def setsyx(y: int, x: int, /) -> None: ... -def setupterm(term: str | None = None, fd: int = -1) -> None: ... -def start_color() -> None: ... -def termattrs() -> int: ... -def termname() -> bytes: ... -def tigetflag(capname: str, /) -> int: ... -def tigetnum(capname: str, /) -> int: ... -def tigetstr(capname: str, /) -> bytes | None: ... +def is_term_resized(nlines: int, ncols: int, /) -> bool: + """Return True if resize_term() would modify the window structure, False otherwise. + + nlines + Height. + ncols + Width. +""" +def isendwin() -> bool: + """Return True if endwin() has been called. +""" +def keyname(key: int, /) -> bytes: + """Return the name of specified key. + + key + Key number. +""" +def killchar() -> bytes: + """Return the user's current line kill character. +""" +def longname() -> bytes: + """Return the terminfo long name field describing the current terminal. + +The maximum length of a verbose description is 128 characters. It is defined +only after the call to initscr(). +""" +def meta(yes: bool, /) -> None: + """Enable/disable meta keys. + +If yes is True, allow 8-bit characters to be input. If yes is False, +allow only 7-bit characters. +""" +def mouseinterval(interval: int, /) -> None: + """Set and retrieve the maximum time between press and release in a click. + + interval + Time in milliseconds. + +Set the maximum time that can elapse between press and release events in +order for them to be recognized as a click, and return the previous interval +value. +""" +def mousemask(newmask: int, /) -> tuple[int, int]: + """Set the mouse events to be reported, and return a tuple (availmask, oldmask). + +Return a tuple (availmask, oldmask). availmask indicates which of the +specified mouse events can be reported; on complete failure it returns 0. +oldmask is the previous value of the given window's mouse event mask. +If this function is never called, no mouse events are ever reported. +""" +def napms(ms: int, /) -> int: + """Sleep for specified time. + + ms + Duration in milliseconds. +""" +def newpad(nlines: int, ncols: int, /) -> window: + """Create and return a pointer to a new pad data structure. + + nlines + Height. + ncols + Width. +""" +def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: + """newwin(nlines, ncols, [begin_y=0, begin_x=0]) +Return a new window. + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + +By default, the window will extend from the specified position to the lower +right corner of the screen. +""" +def nl(flag: bool = True, /) -> None: + """Enter newline mode. + + flag + If false, the effect is the same as calling nonl(). + +This mode translates the return key into newline on input, and translates +newline into return and line-feed on output. Newline mode is initially on. +""" +def nocbreak() -> None: + """Leave cbreak mode. + +Return to normal "cooked" mode with line buffering. +""" +def noecho() -> None: + """Leave echo mode. + +Echoing of input characters is turned off. +""" +def nonl() -> None: + """Leave newline mode. + +Disable translation of return into newline on input, and disable low-level +translation of newline into newline/return on output. +""" +def noqiflush() -> None: + """Disable queue flushing. + +When queue flushing is disabled, normal flush of input and output queues +associated with the INTR, QUIT and SUSP characters will not be done. +""" +def noraw() -> None: + """Leave raw mode. + +Return to normal "cooked" mode with line buffering. +""" +def pair_content(pair_number: int, /) -> tuple[int, int]: + """Return a tuple (fg, bg) containing the colors for the requested color pair. + + pair_number + The number of the color pair (0 - (COLOR_PAIRS-1)). +""" +def pair_number(attr: int, /) -> int: + """Return the number of the color-pair set by the specified attribute value. + +color_pair() is the counterpart to this function. +""" +def putp(string: ReadOnlyBuffer, /) -> None: + """Emit the value of a specified terminfo capability for the current terminal. + +Note that the output of putp() always goes to standard output. +""" +def qiflush(flag: bool = True, /) -> None: + """Enable queue flushing. + + flag + If false, the effect is the same as calling noqiflush(). + +If queue flushing is enabled, all output in the display driver queue +will be flushed when the INTR, QUIT and SUSP characters are read. +""" +def raw(flag: bool = True, /) -> None: + """Enter raw mode. + + flag + If false, the effect is the same as calling noraw(). + +In raw mode, normal line buffering and processing of interrupt, quit, +suspend, and flow control keys are turned off; characters are presented to +curses input functions one by one. +""" +def reset_prog_mode() -> None: + """Restore the terminal to "program" mode, as previously saved by def_prog_mode(). +""" +def reset_shell_mode() -> None: + """Restore the terminal to "shell" mode, as previously saved by def_shell_mode(). +""" +def resetty() -> None: + """Restore terminal mode. +""" +def resize_term(nlines: int, ncols: int, /) -> None: + """Backend function used by resizeterm(), performing most of the work. + + nlines + Height. + ncols + Width. + +When resizing the windows, resize_term() blank-fills the areas that are +extended. The calling application should fill in these areas with appropriate +data. The resize_term() function attempts to resize all windows. However, +due to the calling convention of pads, it is not possible to resize these +without additional interaction with the application. +""" +def resizeterm(nlines: int, ncols: int, /) -> None: + """Resize the standard and current windows to the specified dimensions. + + nlines + Height. + ncols + Width. + +Adjusts other bookkeeping data used by the curses library that record the +window dimensions (in particular the SIGWINCH handler). +""" +def savetty() -> None: + """Save terminal mode. +""" +def set_escdelay(ms: int, /) -> None: + """Sets the curses ESCDELAY setting. + + ms + length of the delay in milliseconds. + +Sets the number of milliseconds to wait after reading an escape character, +to distinguish between an individual escape character entered on the +keyboard from escape sequences sent by cursor and function keys. +""" +def set_tabsize(size: int, /) -> None: + """Sets the curses TABSIZE setting. + + size + rendered cell width of a tab character. + +Sets the number of columns used by the curses library when converting a tab +character to spaces as it adds the tab to a window. +""" +def setsyx(y: int, x: int, /) -> None: + """Set the virtual screen cursor. + + y + Y-coordinate. + x + X-coordinate. + +If y and x are both -1, then leaveok is set. +""" +def setupterm(term: str | None = None, fd: int = -1) -> None: + """Initialize the terminal. + + term + Terminal name. + If omitted, the value of the TERM environment variable will be used. + fd + File descriptor to which any initialization sequences will be sent. + If not supplied, the file descriptor for sys.stdout will be used. +""" +def start_color() -> None: + """Initializes eight basic colors and global variables COLORS and COLOR_PAIRS. + +Must be called if the programmer wants to use colors, and before any other +color manipulation routine is called. It is good practice to call this +routine right after initscr(). + +It also restores the colors on the terminal to the values they had when the +terminal was just turned on. +""" +def termattrs() -> int: + """Return a logical OR of all video attributes supported by the terminal. +""" +def termname() -> bytes: + """Return the value of the environment variable TERM, truncated to 14 characters. +""" +def tigetflag(capname: str, /) -> int: + """Return the value of the Boolean capability. + + capname + The terminfo capability name. + +The value -1 is returned if capname is not a Boolean capability, or 0 if +it is canceled or absent from the terminal description. +""" +def tigetnum(capname: str, /) -> int: + """Return the value of the numeric capability. + + capname + The terminfo capability name. + +The value -2 is returned if capname is not a numeric capability, or -1 if +it is canceled or absent from the terminal description. +""" +def tigetstr(capname: str, /) -> bytes | None: + """Return the value of the string capability. + + capname + The terminfo capability name. + +None is returned if capname is not a string capability, or is canceled or +absent from the terminal description. +""" def tparm( str: ReadOnlyBuffer, i1: int = 0, @@ -364,15 +764,53 @@ def tparm( i8: int = 0, i9: int = 0, /, -) -> bytes: ... -def typeahead(fd: int, /) -> None: ... -def unctrl(ch: _ChType, /) -> bytes: ... -def unget_wch(ch: int | str, /) -> None: ... -def ungetch(ch: _ChType, /) -> None: ... -def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ... +) -> bytes: + """Instantiate the specified byte string with the supplied parameters. + + str + Parameterized byte string obtained from the terminfo database. +""" +def typeahead(fd: int, /) -> None: + """Specify that the file descriptor fd be used for typeahead checking. + + fd + File descriptor. + +If fd is -1, then no typeahead checking is done. +""" +def unctrl(ch: _ChType, /) -> bytes: + """Return a string which is a printable representation of the character ch. + +Control characters are displayed as a caret followed by the character, +for example as ^C. Printing characters are left as they are. +""" +def unget_wch(ch: int | str, /) -> None: + """Push ch so the next get_wch() will return it. +""" +def ungetch(ch: _ChType, /) -> None: + """Push ch so the next getch() will return it. +""" +def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: + """Push a KEY_MOUSE event onto the input queue. + +The following getmouse() will return the given state data. +""" def update_lines_cols() -> None: ... -def use_default_colors() -> None: ... -def use_env(flag: bool, /) -> None: ... +def use_default_colors() -> None: + """Equivalent to assume_default_colors(-1, -1). +""" +def use_env(flag: bool, /) -> None: + """Use environment variables LINES and COLUMNS. + +If used, this function should be called before initscr() or newterm() are +called. + +When flag is False, the values of lines and columns specified in the terminfo +database will be used, even if environment variables LINES and COLUMNS (used +by default) are set, or if curses is running in a window (in which case +default behavior would be to use the window size if LINES and COLUMNS are +not set). +""" class error(Exception): ... @@ -380,22 +818,95 @@ class error(Exception): ... class window: # undocumented encoding: str @overload - def addch(self, ch: _ChType, attr: int = ...) -> None: ... + def addch(self, ch: _ChType, attr: int = ...) -> None: + """addch([y, x,] ch, [attr=_curses.A_NORMAL]) +Paint the character. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to add. + attr + Attributes for the character. + +Paint character ch at (y, x) with attributes attr, +overwriting any character previously painted at that location. +By default, the character position and attributes are the +current settings for the window object. +""" @overload def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... @overload - def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... + def addnstr(self, str: str, n: int, attr: int = ...) -> None: + """addnstr([y, x,] str, n, [attr]) +Paint at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + n + Maximal number of characters. + attr + Attributes for characters. + +Paint at most n characters of the string str at (y, x) with +attributes attr, overwriting anything previously on the display. +By default, the character position and attributes are the +current settings for the window object. +""" @overload def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload - def addstr(self, str: str, attr: int = ...) -> None: ... + def addstr(self, str: str, attr: int = ...) -> None: + """addstr([y, x,] str, [attr]) +Paint the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + attr + Attributes for characters. + +Paint the string str at (y, x) with attributes attr, +overwriting anything previously on the display. +By default, the character position and attributes are the +current settings for the window object. +""" @overload def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... - def attroff(self, attr: int, /) -> None: ... - def attron(self, attr: int, /) -> None: ... - def attrset(self, attr: int, /) -> None: ... - def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: ... - def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: ... + def attroff(self, attr: int, /) -> None: + """Remove attribute attr from the "background" set. +""" + def attron(self, attr: int, /) -> None: + """Add attribute attr from the "background" set. +""" + def attrset(self, attr: int, /) -> None: + """Set the "background" set of attributes. +""" + def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: + """Set the background property of the window. + + ch + Background character. + attr + Background attributes. +""" + def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: + """Set the window's background. + + ch + Background character. + attr + Background attributes. +""" def border( self, ls: _ChType = ..., @@ -406,13 +917,67 @@ class window: # undocumented tr: _ChType = ..., bl: _ChType = ..., br: _ChType = ..., - ) -> None: ... + ) -> None: + """Draw a border around the edges of the window. + + ls + Left side. + rs + Right side. + ts + Top side. + bs + Bottom side. + tl + Upper-left corner. + tr + Upper-right corner. + bl + Bottom-left corner. + br + Bottom-right corner. + +Each parameter specifies the character to use for a specific part of the +border. The characters can be specified as integers or as one-character +strings. A 0 value for any parameter will cause the default character to be +used for that parameter. +""" @overload - def box(self) -> None: ... + def box(self) -> None: + """box([verch=0, horch=0]) +Draw a border around the edges of the window. + + verch + Left and right side. + horch + Top and bottom side. + +Similar to border(), but both ls and rs are verch and both ts and bs are +horch. The default corner characters are always used by this function. +""" @overload def box(self, vertch: _ChType = 0, horch: _ChType = 0) -> None: ... @overload - def chgat(self, attr: int) -> None: ... + def chgat(self, attr: int) -> None: + """chgat([y, x,] [n=-1,] attr) +Set the attributes of characters. + + y + Y-coordinate. + x + X-coordinate. + n + Number of characters. + attr + Attributes for characters. + +Set the attributes of num characters at the current cursor position, or at +position (y, x) if supplied. If no value of num is given or num = -1, the +attribute will be set on all the characters to the end of the line. This +function does not move the cursor. The changed line will be touched using +the touchline() method so that the contents will be redisplayed by the next +window refresh. +""" @overload def chgat(self, num: int, attr: int) -> None: ... @overload @@ -425,35 +990,120 @@ class window: # undocumented def clrtoeol(self) -> None: ... def cursyncup(self) -> None: ... @overload - def delch(self) -> None: ... + def delch(self) -> None: + """delch([y, x]) +Delete any character at (y, x). + + y + Y-coordinate. + x + X-coordinate. +""" @overload def delch(self, y: int, x: int) -> None: ... def deleteln(self) -> None: ... @overload - def derwin(self, begin_y: int, begin_x: int) -> window: ... + def derwin(self, begin_y: int, begin_x: int) -> window: + """derwin([nlines=0, ncols=0,] begin_y, begin_x) +Create a sub-window (window-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + +derwin() is the same as calling subwin(), except that begin_y and begin_x +are relative to the origin of the window, rather than relative to the entire +screen. +""" @overload def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... - def echochar(self, ch: _ChType, attr: int = 0, /) -> None: ... - def enclose(self, y: int, x: int, /) -> bool: ... + def echochar(self, ch: _ChType, attr: int = 0, /) -> None: + """Add character ch with attribute attr, and refresh. + + ch + Character to add. + attr + Attributes for the character. +""" + def enclose(self, y: int, x: int, /) -> bool: + """Return True if the screen-relative coordinates are enclosed by the window. + + y + Y-coordinate. + x + X-coordinate. +""" def erase(self) -> None: ... def getbegyx(self) -> tuple[int, int]: ... - def getbkgd(self) -> tuple[int, int]: ... + def getbkgd(self) -> tuple[int, int]: + """Return the window's current background character/attribute pair. +""" @overload - def getch(self) -> int: ... + def getch(self) -> int: + """getch([y, x]) +Get a character code from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + +The integer returned does not have to be in ASCII range: function keys, +keypad keys and so on return numbers higher than 256. In no-delay mode, -1 +is returned if there is no input, else getch() waits until a key is pressed. +""" @overload def getch(self, y: int, x: int) -> int: ... @overload - def get_wch(self) -> int | str: ... + def get_wch(self) -> int | str: + """get_wch([y, x]) +Get a wide character from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + +Return a character for most keys, or an integer for function keys, +keypad keys, and other special keys. +""" @overload def get_wch(self, y: int, x: int) -> int | str: ... @overload - def getkey(self) -> str: ... + def getkey(self) -> str: + """getkey([y, x]) +Get a character (string) from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. + +Returning a string instead of an integer, as getch() does. Function keys, +keypad keys and other special keys return a multibyte string containing the +key name. In no-delay mode, an exception is raised if there is no input. +""" @overload def getkey(self, y: int, x: int) -> str: ... def getmaxyx(self) -> tuple[int, int]: ... def getparyx(self) -> tuple[int, int]: ... @overload - def getstr(self) -> bytes: ... + def getstr(self) -> bytes: + """getstr([[y, x,] n=2047]) +Read a string from the user, with primitive line editing capacity. + + y + Y-coordinate. + x + X-coordinate. + n + Maximal number of characters. +""" @overload def getstr(self, n: int) -> bytes: ... @overload @@ -462,35 +1112,135 @@ class window: # undocumented def getstr(self, y: int, x: int, n: int) -> bytes: ... def getyx(self) -> tuple[int, int]: ... @overload - def hline(self, ch: _ChType, n: int) -> None: ... + def hline(self, ch: _ChType, n: int) -> None: + """hline([y, x,] ch, n, [attr=_curses.A_NORMAL]) +Display a horizontal line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the characters. +""" @overload def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... def idcok(self, flag: bool) -> None: ... def idlok(self, yes: bool) -> None: ... def immedok(self, flag: bool) -> None: ... @overload - def inch(self) -> int: ... + def inch(self) -> int: + """inch([y, x]) +Return the character at the given position in the window. + + y + Y-coordinate. + x + X-coordinate. + +The bottom 8 bits are the character proper, and upper bits are the attributes. +""" @overload def inch(self, y: int, x: int) -> int: ... @overload - def insch(self, ch: _ChType, attr: int = ...) -> None: ... + def insch(self, ch: _ChType, attr: int = ...) -> None: + """insch([y, x,] ch, [attr=_curses.A_NORMAL]) +Insert a character before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to insert. + attr + Attributes for the character. + +All characters to the right of the cursor are shifted one position right, with +the rightmost characters on the line being lost. +""" @overload def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... def insdelln(self, nlines: int) -> None: ... def insertln(self) -> None: ... @overload - def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... + def insnstr(self, str: str, n: int, attr: int = ...) -> None: + """insnstr([y, x,] str, n, [attr]) +Insert at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + n + Maximal number of characters. + attr + Attributes for characters. + +Insert a character string (as many characters as will fit on the line) +before the character under the cursor, up to n characters. If n is zero +or negative, the entire string is inserted. All characters to the right +of the cursor are shifted right, with the rightmost characters on the line +being lost. The cursor position does not change (after moving to y, x, if +specified). +""" @overload def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload - def insstr(self, str: str, attr: int = ...) -> None: ... + def insstr(self, str: str, attr: int = ...) -> None: + """insstr([y, x,] str, [attr]) +Insert the string before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + attr + Attributes for characters. + +Insert a character string (as many characters as will fit on the line) +before the character under the cursor. All characters to the right of +the cursor are shifted right, with the rightmost characters on the line +being lost. The cursor position does not change (after moving to y, x, +if specified). +""" @overload def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... @overload - def instr(self, n: int = 2047) -> bytes: ... + def instr(self, n: int = 2047) -> bytes: + """instr([y, x,] n=2047) +Return a string of characters, extracted from the window. + + y + Y-coordinate. + x + X-coordinate. + n + Maximal number of characters. + +Return a string of characters, extracted from the window starting at the +current cursor position, or at y, x if specified. Attributes are stripped +from the characters. If n is specified, instr() returns a string at most +n characters long (exclusive of the trailing NUL). +""" @overload def instr(self, y: int, x: int, n: int = 2047) -> bytes: ... - def is_linetouched(self, line: int, /) -> bool: ... + def is_linetouched(self, line: int, /) -> bool: + """Return True if the specified line was modified, otherwise return False. + + line + Line number. + +Raise a curses.error exception if line is not valid for the given window. +""" def is_wintouched(self) -> bool: ... def keypad(self, yes: bool, /) -> None: ... def leaveok(self, yes: bool) -> None: ... @@ -500,51 +1250,178 @@ class window: # undocumented def nodelay(self, yes: bool) -> None: ... def notimeout(self, yes: bool) -> None: ... @overload - def noutrefresh(self) -> None: ... + def noutrefresh(self) -> None: + """noutrefresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) +Mark for refresh but wait. + +This function updates the data structure representing the desired state of the +window, but does not force an update of the physical screen. To accomplish +that, call doupdate(). +""" @overload def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... @overload - def overlay(self, destwin: window) -> None: ... + def overlay(self, destwin: window) -> None: + """overlay(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol]) +Overlay the window on top of destwin. + +The windows need not be the same size, only the overlapping region is copied. +This copy is non-destructive, which means that the current background +character does not overwrite the old contents of destwin. + +To get fine-grained control over the copied region, the second form of +overlay() can be used. sminrow and smincol are the upper-left coordinates +of the source window, and the other variables mark a rectangle in the +destination window. +""" @overload def overlay( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int ) -> None: ... @overload - def overwrite(self, destwin: window) -> None: ... + def overwrite(self, destwin: window) -> None: + """overwrite(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, + dmaxcol]) +Overwrite the window on top of destwin. + +The windows need not be the same size, in which case only the overlapping +region is copied. This copy is destructive, which means that the current +background character overwrites the old contents of destwin. + +To get fine-grained control over the copied region, the second form of +overwrite() can be used. sminrow and smincol are the upper-left coordinates +of the source window, the other variables mark a rectangle in the destination +window. +""" @overload def overwrite( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int ) -> None: ... - def putwin(self, file: SupportsWrite[bytes], /) -> None: ... - def redrawln(self, beg: int, num: int, /) -> None: ... + def putwin(self, file: SupportsWrite[bytes], /) -> None: + """Write all data associated with the window into the provided file object. + +This information can be later retrieved using the getwin() function. +""" + def redrawln(self, beg: int, num: int, /) -> None: + """Mark the specified lines corrupted. + + beg + Starting line number. + num + The number of lines. + +They should be completely redrawn on the next refresh() call. +""" def redrawwin(self) -> None: ... @overload - def refresh(self) -> None: ... + def refresh(self) -> None: + """refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) +Update the display immediately. + +Synchronize actual screen with previous drawing/deleting methods. +The 6 optional arguments can only be specified when the window is a pad +created with newpad(). The additional parameters are needed to indicate +what part of the pad and screen are involved. pminrow and pmincol specify +the upper left-hand corner of the rectangle to be displayed in the pad. +sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to +be displayed on the screen. The lower right-hand corner of the rectangle to +be displayed in the pad is calculated from the screen coordinates, since the +rectangles must be the same size. Both rectangles must be entirely contained +within their respective structures. Negative values of pminrow, pmincol, +sminrow, or smincol are treated as if they were zero. +""" @overload def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... def resize(self, nlines: int, ncols: int) -> None: ... - def scroll(self, lines: int = 1) -> None: ... + def scroll(self, lines: int = 1) -> None: + """scroll([lines=1]) +Scroll the screen or scrolling region. + + lines + Number of lines to scroll. + +Scroll upward if the argument is positive and downward if it is negative. +""" def scrollok(self, flag: bool) -> None: ... - def setscrreg(self, top: int, bottom: int, /) -> None: ... + def setscrreg(self, top: int, bottom: int, /) -> None: + """Define a software scrolling region. + + top + First line number. + bottom + Last line number. + +All scrolling actions will take place in this region. +""" def standend(self) -> None: ... def standout(self) -> None: ... @overload - def subpad(self, begin_y: int, begin_x: int) -> window: ... + def subpad(self, begin_y: int, begin_x: int) -> window: + """subwin([nlines=0, ncols=0,] begin_y, begin_x) +Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + +By default, the sub-window will extend from the specified position to the +lower right corner of the window. +""" @overload def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... @overload - def subwin(self, begin_y: int, begin_x: int) -> window: ... + def subwin(self, begin_y: int, begin_x: int) -> window: + """subwin([nlines=0, ncols=0,] begin_y, begin_x) +Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + +By default, the sub-window will extend from the specified position to the +lower right corner of the window. +""" @overload def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... def syncdown(self) -> None: ... def syncok(self, flag: bool) -> None: ... def syncup(self) -> None: ... def timeout(self, delay: int) -> None: ... - def touchline(self, start: int, count: int, changed: bool = True) -> None: ... + def touchline(self, start: int, count: int, changed: bool = True) -> None: + """touchline(start, count, [changed=True]) +Pretend count lines have been changed, starting with line start. + +If changed is supplied, it specifies whether the affected lines are marked +as having been changed (changed=True) or unchanged (changed=False). +""" def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload - def vline(self, ch: _ChType, n: int) -> None: ... + def vline(self, ch: _ChType, n: int) -> None: + """vline([y, x,] ch, n, [attr=_curses.A_NORMAL]) +Display a vertical line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the character. +""" @overload def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi index a552a151ddf14..fb695b9fff475 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi @@ -8,20 +8,56 @@ class error(Exception): ... @final class panel: - def above(self) -> panel: ... - def below(self) -> panel: ... - def bottom(self) -> None: ... - def hidden(self) -> bool: ... - def hide(self) -> None: ... - def move(self, y: int, x: int, /) -> None: ... - def replace(self, win: window, /) -> None: ... - def set_userptr(self, obj: object, /) -> None: ... - def show(self) -> None: ... - def top(self) -> None: ... - def userptr(self) -> object: ... - def window(self) -> window: ... + def above(self) -> panel: + """Return the panel above the current panel. +""" + def below(self) -> panel: + """Return the panel below the current panel. +""" + def bottom(self) -> None: + """Push the panel to the bottom of the stack. +""" + def hidden(self) -> bool: + """Return True if the panel is hidden (not visible), False otherwise. +""" + def hide(self) -> None: + """Hide the panel. -def bottom_panel() -> panel: ... -def new_panel(win: window, /) -> panel: ... -def top_panel() -> panel: ... -def update_panels() -> panel: ... +This does not delete the object, it just makes the window on screen invisible. +""" + def move(self, y: int, x: int, /) -> None: + """Move the panel to the screen coordinates (y, x). +""" + def replace(self, win: window, /) -> None: + """Change the window associated with the panel to the window win. +""" + def set_userptr(self, obj: object, /) -> None: + """Set the panel's user pointer to obj. +""" + def show(self) -> None: + """Display the panel (which might have been hidden). +""" + def top(self) -> None: + """Push panel to the top of the stack. +""" + def userptr(self) -> object: + """Return the user pointer for the panel. +""" + def window(self) -> window: + """Return the window object associated with the panel. +""" + +def bottom_panel() -> panel: + """Return the bottom panel in the panel stack. +""" +def new_panel(win: window, /) -> panel: + """Return a panel object, associating it with the given window win. +""" +def top_panel() -> panel: + """Return the top panel in the panel stack. +""" +def update_panels() -> panel: + """Updates the virtual screen after changes in the panel stack. + +This does not call curses.doupdate(), so you'll have to do this yourself. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi index 222c3ffcb246b..828fdaaa96bb8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi @@ -39,6 +39,26 @@ if sys.platform != "win32": __init__: None # type: ignore[assignment] if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: + """Return a database object. + + filename + The filename to open. + flags + How to open the file. "r" for reading, "w" for writing, etc. + mode + If creating a new file, the mode bits for the new file + (e.g. os.O_RDWR). +""" else: - def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ... + def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: + """Return a database object. + + filename + The filename to open. + flags + How to open the file. "r" for reading, "w" for writing, etc. + mode + If creating a new file, the mode bits for the new file + (e.g. os.O_RDWR). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi index 3cfe8944dfaf4..e51c433f15889 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi @@ -1,3 +1,5 @@ +"""C decimal arithmetic module +""" import sys from decimal import ( Clamped as Clamped, @@ -44,8 +46,14 @@ MIN_ETINY: Final[int] if sys.version_info >= (3, 14): IEEE_CONTEXT_MAX_BITS: Final[int] -def setcontext(context: Context, /) -> None: ... -def getcontext() -> Context: ... +def setcontext(context: Context, /) -> None: + """Set a new default context. + +""" +def getcontext() -> Context: + """Get the current default context. + +""" if sys.version_info >= (3, 11): def localcontext( @@ -59,13 +67,30 @@ if sys.version_info >= (3, 11): clamp: int | None = None, traps: dict[_TrapType, bool] | None = None, flags: dict[_TrapType, bool] | None = None, - ) -> _ContextManager: ... + ) -> _ContextManager: + """Return a context manager that will set the default context to a copy of ctx +on entry to the with-statement and restore the previous default context when +exiting the with-statement. If no context is specified, a copy of the current +default context is used. + +""" else: - def localcontext(ctx: Context | None = None) -> _ContextManager: ... + def localcontext(ctx: Context | None = None) -> _ContextManager: + """Return a context manager that will set the default context to a copy of ctx +on entry to the with-statement and restore the previous default context when +exiting the with-statement. If no context is specified, a copy of the current +default context is used. + +""" if sys.version_info >= (3, 14): - def IEEEContext(bits: int, /) -> Context: ... + def IEEEContext(bits: int, /) -> Context: + """Return a context object initialized to the proper values for one of the +IEEE interchange formats. The argument must be a multiple of 32 and less +than IEEE_CONTEXT_MAX_BITS. + +""" DefaultContext: Context BasicContext: Context diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi index 58db64a016f34..2c8f4b7d146f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi @@ -1,3 +1,11 @@ +"""Core implementation of import. + +This module is NOT meant to be directly imported! It has been designed such +that it can be bootstrapped into Python as the implementation of import. As +such it requires the injection of specific modules and attributes in order to +work. One should use importlib as the public-facing version of this module. + +""" import importlib.abc import importlib.machinery import sys @@ -15,16 +23,65 @@ def __import__( locals: Mapping[str, object] | None = None, fromlist: Sequence[str] | None = (), level: int = 0, -) -> ModuleType: ... +) -> ModuleType: + """Import a module. + +The 'globals' argument is used to infer where the import is occurring from +to handle relative imports. The 'locals' argument is ignored. The +'fromlist' argument specifies what should exist as attributes on the module +being imported (e.g. ``from module import ``). The 'level' +argument represents the package location to import from in a relative +import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). + +""" def spec_from_loader( name: str, loader: LoaderProtocol | None, *, origin: str | None = None, is_package: bool | None = None -) -> importlib.machinery.ModuleSpec | None: ... -def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... +) -> importlib.machinery.ModuleSpec | None: + """Return a module spec based on various loader methods. +""" +def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: + """Create a module based on the provided spec. +""" def _init_module_attrs( spec: importlib.machinery.ModuleSpec, module: types.ModuleType, *, override: bool = False ) -> types.ModuleType: ... class ModuleSpec: + """The specification for a module, used for loading. + +A module's spec is the source for information about the module. For +data associated with the module, including source, use the spec's +loader. + +`name` is the absolute name of the module. `loader` is the loader +to use when loading the module. `parent` is the name of the +package the module is in. The parent is derived from the name. + +`is_package` determines if the module is considered a package or +not. On modules this is reflected by the `__path__` attribute. + +`origin` is the specific location used by the loader from which to +load the module, if that information is available. When filename is +set, origin will match. + +`has_location` indicates that a spec's "origin" reflects a location. +When this is True, `__file__` attribute of the module is set. + +`cached` is the location of the cached bytecode file, if any. It +corresponds to the `__cached__` attribute. + +`submodule_search_locations` is the sequence of path entries to +search when importing submodules. If set, is_package should be +True--and False otherwise. + +Packages are simply modules that (may) have submodules. If a spec +has a non-None value in `submodule_search_locations`, the import +system will consider modules loaded from the spec as packages. + +Only finders (see importlib.abc.MetaPathFinder and +importlib.abc.PathEntryFinder) should modify ModuleSpec instances. + +""" def __init__( self, name: str, @@ -41,17 +98,32 @@ class ModuleSpec: loader_state: Any cached: str | None @property - def parent(self) -> str | None: ... + def parent(self) -> str | None: + """The name of the module's parent. +""" has_location: bool def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + """Meta path import for built-in modules. + +All methods are either class or static methods to avoid the need to +instantiate the class. + +""" # MetaPathFinder if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: + """Find the built-in module. + + If 'path' is ever specified then the search is considered a failure. + + This method is deprecated. Use find_spec() instead. + + """ @classmethod def find_spec( @@ -59,13 +131,24 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) ) -> ModuleSpec | None: ... # InspectLoader @classmethod - def is_package(cls, fullname: str) -> bool: ... + def is_package(cls, fullname: str) -> bool: + """Return False as built-in modules are never packages. +""" @classmethod - def load_module(cls, fullname: str) -> types.ModuleType: ... + def load_module(cls, fullname: str) -> types.ModuleType: + """Load the specified module into sys.modules and return it. + +This method is deprecated. Use loader.exec_module() instead. + +""" @classmethod - def get_code(cls, fullname: str) -> None: ... + def get_code(cls, fullname: str) -> None: + """Return None as built-in modules do not have code objects. +""" @classmethod - def get_source(cls, fullname: str) -> None: ... + def get_source(cls, fullname: str) -> None: + """Return None as built-in modules do not have source code. +""" # Loader if sys.version_info < (3, 12): @staticmethod @@ -73,24 +156,48 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: ... + def module_repr(module: types.ModuleType) -> str: + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ if sys.version_info >= (3, 10): @staticmethod - def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + def create_module(spec: ModuleSpec) -> types.ModuleType | None: + """Create a built-in module +""" @staticmethod - def exec_module(module: types.ModuleType) -> None: ... + def exec_module(module: types.ModuleType) -> None: + """Exec a built-in module +""" else: @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: + """Create a built-in module +""" @classmethod - def exec_module(cls, module: types.ModuleType) -> None: ... + def exec_module(cls, module: types.ModuleType) -> None: + """Exec a built-in module +""" class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + """Meta path import for frozen modules. + +All methods are either class or static methods to avoid the need to +instantiate the class. + +""" # MetaPathFinder if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: + """Find a frozen module. + + This method is deprecated. Use find_spec() instead. + + """ @classmethod def find_spec( @@ -98,13 +205,24 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): ) -> ModuleSpec | None: ... # InspectLoader @classmethod - def is_package(cls, fullname: str) -> bool: ... + def is_package(cls, fullname: str) -> bool: + """Return True if the frozen module is a package. +""" @classmethod - def load_module(cls, fullname: str) -> types.ModuleType: ... + def load_module(cls, fullname: str) -> types.ModuleType: + """Load a frozen module. + +This method is deprecated. Use exec_module() instead. + +""" @classmethod - def get_code(cls, fullname: str) -> None: ... + def get_code(cls, fullname: str) -> None: + """Return the code object for the frozen module. +""" @classmethod - def get_source(cls, fullname: str) -> None: ... + def get_source(cls, fullname: str) -> None: + """Return None as frozen modules do not have source code. +""" # Loader if sys.version_info < (3, 12): @staticmethod @@ -112,13 +230,22 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(m: types.ModuleType) -> str: ... + def module_repr(m: types.ModuleType) -> str: + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ if sys.version_info >= (3, 10): @staticmethod - def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + def create_module(spec: ModuleSpec) -> types.ModuleType | None: + """Set __file__, if able. +""" else: @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: + """Use default semantics for module creation. +""" @staticmethod def exec_module(module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi index 4778be3af1f39..b157f74dcf1a6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi @@ -1,3 +1,11 @@ +"""Core implementation of path-based import. + +This module is NOT meant to be directly imported! It has been designed such +that it can be bootstrapped into Python as the implementation of import. As +such it requires the injection of specific modules and attributes in order to +work. One should use importlib as the public-facing version of this module. + +""" import _ast import _io import importlib.abc @@ -26,25 +34,71 @@ else: MAGIC_NUMBER: Final[bytes] -def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... -def source_from_cache(path: StrPath) -> str: ... -def decode_source(source_bytes: ReadableBuffer) -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: + """Given the path to a .py file, return the path to its .pyc file. + +The .py file does not need to exist; this simply returns the path to the +.pyc file calculated as if the .py file were imported. + +The 'optimization' parameter controls the presumed optimization level of +the bytecode file. If 'optimization' is not None, the string representation +of the argument is taken and verified to be alphanumeric (else ValueError +is raised). + +The debug_override parameter is deprecated. If debug_override is not None, +a True value is the same as setting 'optimization' to the empty string +while a False value is equivalent to setting 'optimization' to '1'. + +If sys.implementation.cache_tag is None then NotImplementedError is raised. + +""" +def source_from_cache(path: StrPath) -> str: + """Given the path to a .pyc. file, return the path to its .py file. + +The .pyc file does not need to exist; this simply returns the path to +the .py file calculated to correspond to the .pyc file. If path does +not conform to PEP 3147/488 format, ValueError will be raised. If +sys.implementation.cache_tag is None then NotImplementedError is raised. + +""" +def decode_source(source_bytes: ReadableBuffer) -> str: + """Decode bytes representing source code and return the string. + +Universal newline support is used in the decoding. +""" def spec_from_file_location( name: str, location: StrOrBytesPath | None = None, *, loader: LoaderProtocol | None = None, submodule_search_locations: list[str] | None = ..., -) -> importlib.machinery.ModuleSpec | None: ... +) -> importlib.machinery.ModuleSpec | None: + """Return a module spec based on a file location. + +To indicate that the module is a package, set +submodule_search_locations to a list of directory paths. An +empty list is sufficient, though its not otherwise useful to the +import system. + +The loader must take a spec as its only __init__() arg. + +""" @deprecated( "Deprecated since Python 3.6. Use site configuration instead. " "Future versions of Python may not enable this finder by default." ) class WindowsRegistryFinder(importlib.abc.MetaPathFinder): + """Meta path finder for modules declared in the Windows registry. +""" if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: + """Find module named in the registry. + + This method is deprecated. Use find_spec() instead. + + """ @classmethod def find_spec( @@ -52,27 +106,61 @@ class WindowsRegistryFinder(importlib.abc.MetaPathFinder): ) -> ModuleSpec | None: ... class PathFinder(importlib.abc.MetaPathFinder): + """Meta path finder for sys.path and package __path__ attributes. +""" if sys.version_info >= (3, 10): @staticmethod - def invalidate_caches() -> None: ... + def invalidate_caches() -> None: + """Call the invalidate_caches() method on all path entry finders +stored in sys.path_importer_cache (where implemented). +""" else: @classmethod - def invalidate_caches(cls) -> None: ... + def invalidate_caches(cls) -> None: + """Call the invalidate_caches() method on all path entry finders + stored in sys.path_importer_caches (where implemented). +""" if sys.version_info >= (3, 10): @staticmethod - def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: + """ +Find distributions. + +Return an iterable of all Distribution instances capable of +loading the metadata for packages matching ``context.name`` +(or all names if ``None`` indicated) along the paths in the list +of directories ``context.path``. +""" else: @classmethod - def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ @classmethod def find_spec( cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None - ) -> ModuleSpec | None: ... + ) -> ModuleSpec | None: + """Try to find a spec for 'fullname' on sys.path or 'path'. + +The search is based on sys.path_hooks and sys.path_importer_cache. +""" if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: + """find the module on sys.path or 'path' based on sys.path_hooks and + sys.path_importer_cache. + + This method is deprecated. Use find_spec() instead. + + """ SOURCE_SUFFIXES: Final[list[str]] DEBUG_BYTECODE_SUFFIXES: Final = [".pyc"] @@ -81,36 +169,113 @@ BYTECODE_SUFFIXES: Final = [".pyc"] EXTENSION_SUFFIXES: Final[list[str]] class FileFinder(importlib.abc.PathEntryFinder): + """File-based finder. + +Interactions with the file system are cached for performance, being +refreshed when the directory the finder is handling has been modified. + +""" path: str - def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: ... + def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: + """Initialize with the path to search on and a variable number of +2-tuples containing the loader and the file suffixes the loader +recognizes. +""" @classmethod def path_hook( cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] - ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... + ) -> Callable[[str], importlib.abc.PathEntryFinder]: + """A class method which returns a closure to use on sys.path_hook +which will return an instance using the specified loaders and the path +called on the closure. + +If the path called on the closure is not a directory, ImportError is +raised. + +""" class _LoaderBasics: - def is_package(self, fullname: str) -> bool: ... - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... - def exec_module(self, module: types.ModuleType) -> None: ... - def load_module(self, fullname: str) -> types.ModuleType: ... + """Base class of common code needed by both SourceLoader and +SourcelessFileLoader. +""" + def is_package(self, fullname: str) -> bool: + """Concrete implementation of InspectLoader.is_package by checking if +the path returned by get_filename has a filename of '__init__.py'. +""" + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: + """Use default semantics for module creation. +""" + def exec_module(self, module: types.ModuleType) -> None: + """Execute the module. +""" + def load_module(self, fullname: str) -> types.ModuleType: + """This method is deprecated. +""" class SourceLoader(_LoaderBasics): - def path_mtime(self, path: str) -> float: ... - def set_data(self, path: str, data: bytes) -> None: ... - def get_source(self, fullname: str) -> str | None: ... - def path_stats(self, path: str) -> Mapping[str, Any]: ... + def path_mtime(self, path: str) -> float: + """Optional method that returns the modification time (an int) for the +specified path (a str). + +Raises OSError when the path cannot be handled. +""" + def set_data(self, path: str, data: bytes) -> None: + """Optional method which writes data (bytes) to a file path (a str). + +Implementing this method allows for the writing of bytecode files. +""" + def get_source(self, fullname: str) -> str | None: + """Concrete implementation of InspectLoader.get_source. +""" + def path_stats(self, path: str) -> Mapping[str, Any]: + """Optional method returning a metadata dict for the specified +path (a str). + +Possible keys: +- 'mtime' (mandatory) is the numeric timestamp of last source + code modification; +- 'size' (optional) is the size in bytes of the source code. + +Implementing this method allows the loader to read bytecode files. +Raises OSError when the path cannot be handled. +""" def source_to_code( self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath - ) -> types.CodeType: ... - def get_code(self, fullname: str) -> types.CodeType | None: ... + ) -> types.CodeType: + """Return the code object compiled from source. + +The 'data' argument can be any object type that compile() supports. +""" + def get_code(self, fullname: str) -> types.CodeType | None: + """Concrete implementation of InspectLoader.get_code. + +Reading of bytecode requires path_stats to be implemented. To write +bytecode, set_data must also be implemented. + +""" class FileLoader: + """Base file loader class which implements the loader protocol methods that +require file system usage. +""" name: str path: str - def __init__(self, fullname: str, path: str) -> None: ... - def get_data(self, path: str) -> bytes: ... - def get_filename(self, fullname: str | None = None) -> str: ... - def load_module(self, fullname: str | None = None) -> types.ModuleType: ... + def __init__(self, fullname: str, path: str) -> None: + """Cache the module name and the path to the file found by the +finder. +""" + def get_data(self, path: str) -> bytes: + """Return the data from path as raw bytes. +""" + def get_filename(self, fullname: str | None = None) -> str: + """Return the path to the source file as found by the finder. +""" + def load_module(self, fullname: str | None = None) -> types.ModuleType: + """Load a module from a file. + +This method is deprecated. Use exec_module() instead. + +""" if sys.version_info >= (3, 10): def get_resource_reader(self, name: str | None = None) -> importlib.readers.FileReader: ... else: @@ -121,27 +286,56 @@ class FileLoader: def contents(self) -> Iterator[str]: ... class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.SourceLoader, SourceLoader): # type: ignore[misc] # incompatible method arguments in base classes - def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... - def path_stats(self, path: str) -> Mapping[str, Any]: ... + """Concrete implementation of SourceLoader using the file system. +""" + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: + """Write bytes data to a file. +""" + def path_stats(self, path: str) -> Mapping[str, Any]: + """Return the metadata for the path. +""" def source_to_code( # type: ignore[override] # incompatible with InspectLoader.source_to_code self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath, *, _optimize: int = -1, - ) -> types.CodeType: ... + ) -> types.CodeType: + """Return the code object compiled from source. + +The 'data' argument can be any object type that compile() supports. +""" class SourcelessFileLoader(importlib.abc.FileLoader, FileLoader, _LoaderBasics): + """Loader which handles sourceless file imports. +""" def get_code(self, fullname: str) -> types.CodeType | None: ... - def get_source(self, fullname: str) -> None: ... + def get_source(self, fullname: str) -> None: + """Return None as there is no source code. +""" class ExtensionFileLoader(FileLoader, _LoaderBasics, importlib.abc.ExecutionLoader): + """Loader for extension modules. + +The constructor is designed to work with FileFinder. + +""" def __init__(self, name: str, path: str) -> None: ... - def get_filename(self, fullname: str | None = None) -> str: ... - def get_source(self, fullname: str) -> None: ... - def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... - def exec_module(self, module: types.ModuleType) -> None: ... - def get_code(self, fullname: str) -> None: ... + def get_filename(self, fullname: str | None = None) -> str: + """Return the path to the source file as found by the finder. +""" + def get_source(self, fullname: str) -> None: + """Return None as extension modules have no source code. +""" + def create_module(self, spec: ModuleSpec) -> types.ModuleType: + """Create an uninitialized extension module +""" + def exec_module(self, module: types.ModuleType) -> None: + """Initialize an extension module +""" + def get_code(self, fullname: str) -> None: + """Return None as an extension module cannot create a code object. +""" def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -153,10 +347,17 @@ if sys.version_info >= (3, 11): def is_package(self, fullname: str) -> Literal[True]: ... def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... - def create_module(self, spec: ModuleSpec) -> None: ... + def create_module(self, spec: ModuleSpec) -> None: + """Use default semantics for module creation. +""" def exec_module(self, module: types.ModuleType) -> None: ... @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> types.ModuleType: ... + def load_module(self, fullname: str) -> types.ModuleType: + """Load a namespace module. + +This method is deprecated. Use exec_module() instead. + +""" def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... if sys.version_info < (3, 12): @staticmethod @@ -164,7 +365,12 @@ if sys.version_info >= (3, 11): "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: ... + def module_repr(module: types.ModuleType) -> str: + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ _NamespaceLoader = NamespaceLoader else: @@ -175,26 +381,51 @@ else: def is_package(self, fullname: str) -> Literal[True]: ... def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... - def create_module(self, spec: ModuleSpec) -> None: ... + def create_module(self, spec: ModuleSpec) -> None: + """Use default semantics for module creation. +""" def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> types.ModuleType: ... + def load_module(self, fullname: str) -> types.ModuleType: + """Load a namespace module. + + This method is deprecated. Use exec_module() instead. + + """ @staticmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(module: types.ModuleType) -> str: ... + def module_repr(module: types.ModuleType) -> str: + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... else: - def load_module(self, fullname: str) -> types.ModuleType: ... + def load_module(self, fullname: str) -> types.ModuleType: + """Load a namespace module. + + This method is deprecated. Use exec_module() instead. + + """ @classmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(cls, module: types.ModuleType) -> str: ... + def module_repr(cls, module: types.ModuleType) -> str: + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ if sys.version_info >= (3, 13): - class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): ... + class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): + """A loader for modules that have been packaged as frameworks for +compatibility with Apple's iOS App Store policies. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi index 2cb5fba29dfa1..a05b39aaa0b63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi @@ -1,3 +1,14 @@ +"""This module provides an interface to the GNU DBM (GDBM) library. + +This module is quite similar to the dbm module, but uses GDBM instead to +provide some additional functionality. Please note that the file formats +created by GDBM and dbm are incompatible. + +GDBM objects behave like mappings (dictionaries), except that keys and +values are always immutable bytes-like objects or strings. Printing +a GDBM object doesn't print the keys and values, and the items() and +values() methods are not supported. +""" import sys from _typeshed import ReadOnlyBuffer, StrOrBytesPath from types import TracebackType @@ -43,6 +54,29 @@ if sys.platform != "win32": __init__: None # type: ignore[assignment] if sys.version_info >= (3, 11): - def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... + def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: + """Open a dbm database and return a dbm object. + +The filename argument is the name of the database file. + +The optional flags argument can be 'r' (to open an existing database +for reading only -- default), 'w' (to open an existing database for +reading and writing), 'c' (which creates the database if it doesn't +exist), or 'n' (which always creates a new empty database). + +Some versions of gdbm support additional flags which must be +appended to one of the flags described above. The module constant +'open_flags' is a string of valid additional flags. The 'f' flag +opens the database in fast mode; altered data will not automatically +be written to the disk after every change. This results in faster +writes to the database, but may result in an inconsistent database +if the program crashes while the database is still open. Use the +sync() method to force any unwritten data to be written to the disk. +The 's' flag causes all database operations to be synchronized to +disk. The 'u' flag disables locking of the database file. + +The optional mode argument is the Unix mode of the file, used only +when the database has to be created. It defaults to octal 0o666. +""" else: def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi index 03c1eef3be3ff..45ef7d3e00087 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi @@ -1,3 +1,5 @@ +"""OpenSSL interface for hashlib module +""" import sys from _typeshed import ReadableBuffer from collections.abc import Callable @@ -24,104 +26,260 @@ class _HashObject(Protocol): @disjoint_base class HASH: + """A hash is an object used to calculate a checksum of a string of information. + +Methods: + +update() -- updates the current digest with an additional string +digest() -- return the current digest value +hexdigest() -- return the current digest as a string of hexadecimal digits +copy() -- return a copy of the current hash object + +Attributes: + +name -- the hash algorithm being used by this object +digest_size -- number of bytes in this hashes output +""" @property def digest_size(self) -> int: ... @property def block_size(self) -> int: ... @property def name(self) -> str: ... - def copy(self) -> Self: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def update(self, obj: ReadableBuffer, /) -> None: ... + def copy(self) -> Self: + """Return a copy of the hash object. +""" + def digest(self) -> bytes: + """Return the digest value as a bytes object. +""" + def hexdigest(self) -> str: + """Return the digest value as a string of hexadecimal digits. +""" + def update(self, obj: ReadableBuffer, /) -> None: + """Update this hash object's state with the provided string. +""" if sys.version_info >= (3, 10): class UnsupportedDigestmodError(ValueError): ... class HASHXOF(HASH): - def digest(self, length: int) -> bytes: ... # type: ignore[override] - def hexdigest(self, length: int) -> str: ... # type: ignore[override] + """A hash is an object used to calculate a checksum of a string of information. + +Methods: + +update() -- updates the current digest with an additional string +digest(length) -- return the current digest value +hexdigest(length) -- return the current digest as a string of hexadecimal digits +copy() -- return a copy of the current hash object + +Attributes: + +name -- the hash algorithm being used by this object +digest_size -- number of bytes in this hashes output +""" + def digest(self, length: int) -> bytes: # type: ignore[override] + """Return the digest value as a bytes object. +""" + def hexdigest(self, length: int) -> str: # type: ignore[override] + """Return the digest value as a string of hexadecimal digits. +""" @final class HMAC: + """The object used to calculate HMAC of a message. + +Methods: + +update() -- updates the current digest with an additional string +digest() -- return the current digest value +hexdigest() -- return the current digest as a string of hexadecimal digits +copy() -- return a copy of the current hash object + +Attributes: + +name -- the name, including the hash algorithm used by this object +digest_size -- number of bytes in digest() output +""" @property def digest_size(self) -> int: ... @property def block_size(self) -> int: ... @property def name(self) -> str: ... - def copy(self) -> Self: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def update(self, msg: ReadableBuffer) -> None: ... + def copy(self) -> Self: + """Return a copy ("clone") of the HMAC object. +""" + def digest(self) -> bytes: + """Return the digest of the bytes passed to the update() method so far. +""" + def hexdigest(self) -> str: + """Return hexadecimal digest of the bytes passed to the update() method so far. + +This may be used to exchange the value safely in email or other non-binary +environments. +""" + def update(self, msg: ReadableBuffer) -> None: + """Update the HMAC object with msg. +""" @overload -def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: ... +def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: + """Return 'a == b'. + +This function uses an approach designed to prevent +timing analysis, making it appropriate for cryptography. + +a and b must both be of the same type: either str (ASCII only), +or any bytes-like object. + +Note: If a and b are of different lengths, or if an error occurs, +a timing attack could theoretically reveal information about the +types and lengths of a and b--but not their values. +""" @overload def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... -def get_fips_mode() -> int: ... -def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: ... +def get_fips_mode() -> int: + """Determine the OpenSSL FIPS mode of operation. + +For OpenSSL 3.0.0 and newer it returns the state of the default provider +in the default OSSL context. It's not quite the same as FIPS_mode() but good +enough for unittests. + +Effectively any non-zero return value indicates FIPS mode; +values other than 1 may have additional significance. +""" +def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: + """Return a new hmac object. +""" if sys.version_info >= (3, 13): def new( name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Return a new hash object using the named algorithm. + +An optional string argument may be provided and will be +automatically hashed. + +The MD5 and SHA1 algorithms are always supported. +""" def openssl_md5( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a md5 hash object; optionally initialized with a string +""" def openssl_sha1( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha1 hash object; optionally initialized with a string +""" def openssl_sha224( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha224 hash object; optionally initialized with a string +""" def openssl_sha256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha256 hash object; optionally initialized with a string +""" def openssl_sha384( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha384 hash object; optionally initialized with a string +""" def openssl_sha512( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha512 hash object; optionally initialized with a string +""" def openssl_sha3_224( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha3-224 hash object; optionally initialized with a string +""" def openssl_sha3_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha3-256 hash object; optionally initialized with a string +""" def openssl_sha3_384( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha3-384 hash object; optionally initialized with a string +""" def openssl_sha3_512( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: ... + ) -> HASH: + """Returns a sha3-512 hash object; optionally initialized with a string +""" def openssl_shake_128( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASHXOF: ... + ) -> HASHXOF: + """Returns a shake-128 variable hash object; optionally initialized with a string +""" def openssl_shake_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASHXOF: ... + ) -> HASHXOF: + """Returns a shake-256 variable hash object; optionally initialized with a string +""" else: - def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: ... - def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... - def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: ... - -def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: ... + def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Return a new hash object using the named algorithm. + +An optional string argument may be provided and will be +automatically hashed. + +The MD5 and SHA1 algorithms are always supported. +""" + def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a md5 hash object; optionally initialized with a string +""" + def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha1 hash object; optionally initialized with a string +""" + def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha224 hash object; optionally initialized with a string +""" + def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha256 hash object; optionally initialized with a string +""" + def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha384 hash object; optionally initialized with a string +""" + def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha512 hash object; optionally initialized with a string +""" + def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha3-224 hash object; optionally initialized with a string +""" + def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha3-256 hash object; optionally initialized with a string +""" + def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha3-384 hash object; optionally initialized with a string +""" + def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: + """Returns a sha3-512 hash object; optionally initialized with a string +""" + def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: + """Returns a shake-128 variable hash object; optionally initialized with a string +""" + def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: + """Returns a shake-256 variable hash object; optionally initialized with a string +""" + +def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: + """Single-shot HMAC. +""" def pbkdf2_hmac( hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None -) -> bytes: ... +) -> bytes: + """Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as pseudorandom function. +""" def scrypt( password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64 -) -> bytes: ... +) -> bytes: + """scrypt password-based key derivation function. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi index 4d7d6aba32418..618451d739ec2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi @@ -1,18 +1,81 @@ +"""Heap queue algorithm (a.k.a. priority queue). + +Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for +all k, counting elements from 0. For the sake of comparison, +non-existing elements are considered to be infinite. The interesting +property of a heap is that a[0] is always its smallest element. + +Usage: + +heap = [] # creates an empty heap +heappush(heap, item) # pushes a new item on the heap +item = heappop(heap) # pops the smallest item from the heap +item = heap[0] # smallest item on the heap without popping it +heapify(x) # transforms list into a heap, in-place, in linear time +item = heapreplace(heap, item) # pops and returns smallest item, and adds + # new item; the heap size is unchanged + +Our API differs from textbook heap algorithms as follows: + +- We use 0-based indexing. This makes the relationship between the + index for a node and the indexes for its children slightly less + obvious, but is more suitable since Python uses 0-based indexing. + +- Our heappop() method returns the smallest item, not the largest. + +These two make it possible to view the heap as a regular Python list +without surprises: heap[0] is the smallest item, and heap.sort() +maintains the heap invariant! +""" import sys from _typeshed import SupportsRichComparisonT as _T # All type variable use in this module requires comparability. from typing import Final __about__: Final[str] -def heapify(heap: list[_T], /) -> None: ... -def heappop(heap: list[_T], /) -> _T: ... -def heappush(heap: list[_T], item: _T, /) -> None: ... -def heappushpop(heap: list[_T], item: _T, /) -> _T: ... -def heapreplace(heap: list[_T], item: _T, /) -> _T: ... +def heapify(heap: list[_T], /) -> None: + """Transform list into a heap, in-place, in O(len(heap)) time. +""" +def heappop(heap: list[_T], /) -> _T: + """Pop the smallest item off the heap, maintaining the heap invariant. +""" +def heappush(heap: list[_T], item: _T, /) -> None: + """Push item onto heap, maintaining the heap invariant. +""" +def heappushpop(heap: list[_T], item: _T, /) -> _T: + """Push item on the heap, then pop and return the smallest item from the heap. + +The combined action runs more efficiently than heappush() followed by +a separate call to heappop(). +""" +def heapreplace(heap: list[_T], item: _T, /) -> _T: + """Pop and return the current smallest value, and add the new item. + +This is more efficient than heappop() followed by heappush(), and can be +more appropriate when using a fixed-size heap. Note that the value +returned may be larger than item! That constrains reasonable uses of +this routine unless written as part of a conditional replacement: + + if item > heap[0]: + item = heapreplace(heap, item) +""" if sys.version_info >= (3, 14): - def heapify_max(heap: list[_T], /) -> None: ... - def heappop_max(heap: list[_T], /) -> _T: ... - def heappush_max(heap: list[_T], item: _T, /) -> None: ... - def heappushpop_max(heap: list[_T], item: _T, /) -> _T: ... - def heapreplace_max(heap: list[_T], item: _T, /) -> _T: ... + def heapify_max(heap: list[_T], /) -> None: + """Maxheap variant of heapify. +""" + def heappop_max(heap: list[_T], /) -> _T: + """Maxheap variant of heappop. +""" + def heappush_max(heap: list[_T], item: _T, /) -> None: + """Push item onto max heap, maintaining the heap invariant. +""" + def heappushpop_max(heap: list[_T], item: _T, /) -> _T: + """Maxheap variant of heappushpop. + +The combined action runs more efficiently than heappush_max() followed by +a separate call to heappop_max(). +""" + def heapreplace_max(heap: list[_T], item: _T, /) -> _T: + """Maxheap variant of heapreplace. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi index c12c26d08ba2a..58ee36affd6ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi @@ -1,3 +1,5 @@ +"""(Extremely) low-level import machinery bits as used by importlib. +""" import sys import types from _typeshed import ReadableBuffer @@ -9,22 +11,67 @@ if sys.version_info >= (3, 14): pyc_magic_number_token: int def source_hash(key: int, source: ReadableBuffer) -> bytes: ... -def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: ... -def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: ... -def acquire_lock() -> None: ... -def exec_builtin(mod: types.ModuleType, /) -> int: ... -def exec_dynamic(mod: types.ModuleType, /) -> int: ... -def extension_suffixes() -> list[str]: ... -def init_frozen(name: str, /) -> types.ModuleType: ... -def is_builtin(name: str, /) -> int: ... -def is_frozen(name: str, /) -> bool: ... -def is_frozen_package(name: str, /) -> bool: ... -def lock_held() -> bool: ... -def release_lock() -> None: ... +def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: + """Create an extension module. +""" +def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: + """Create an extension module. +""" +def acquire_lock() -> None: + """Acquires the interpreter's import lock for the current thread. + +This lock should be used by import hooks to ensure thread-safety when importing +modules. On platforms without threads, this function does nothing. +""" +def exec_builtin(mod: types.ModuleType, /) -> int: + """Initialize a built-in module. +""" +def exec_dynamic(mod: types.ModuleType, /) -> int: + """Initialize an extension module. +""" +def extension_suffixes() -> list[str]: + """Returns the list of file suffixes used to identify extension modules. +""" +def init_frozen(name: str, /) -> types.ModuleType: + """Initializes a frozen module. +""" +def is_builtin(name: str, /) -> int: + """Returns True if the module name corresponds to a built-in module. +""" +def is_frozen(name: str, /) -> bool: + """Returns True if the module name corresponds to a frozen module. +""" +def is_frozen_package(name: str, /) -> bool: + """Returns True if the module name is of a frozen package. +""" +def lock_held() -> bool: + """Return True if the import lock is currently held, else False. + +On platforms without threads, return False. +""" +def release_lock() -> None: + """Release the interpreter's import lock. + +On platforms without threads, this function does nothing. +""" if sys.version_info >= (3, 11): - def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... - def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: ... + def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: + """Return info about the corresponding frozen module (if there is one) or None. + +The returned info (a 2-tuple): + + * data the raw marshalled bytes + * is_package whether or not it is a package + * origname the originally frozen module's name, or None if not + a stdlib module (this will usually be the same as + the module's current name) +""" + def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: + """Create a code object for a frozen module. +""" else: - def get_frozen_object(name: str, /) -> types.CodeType: ... + def get_frozen_object(name: str, /) -> types.CodeType: + """Create a code object for a frozen module. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi index a631a6f16616b..ad78f8f718b4f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi @@ -1,3 +1,6 @@ +"""This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" from _typeshed import structseq from typing import Any, Final, Literal, SupportsIndex, final from typing_extensions import Buffer, Self @@ -11,24 +14,40 @@ class ChannelNotFoundError(ChannelError): ... # Mark as final, since instantiating ChannelID is not supported. @final class ChannelID: + """A channel ID identifies a channel and may be used as an int. +""" @property - def end(self) -> Literal["send", "recv", "both"]: ... + def end(self) -> Literal["send", "recv", "both"]: + """'send', 'recv', or 'both' +""" @property - def send(self) -> Self: ... + def send(self) -> Self: + """the 'send' end of the channel +""" @property - def recv(self) -> Self: ... + def recv(self) -> Self: + """the 'recv' end of the channel +""" def __eq__(self, other: object, /) -> bool: ... def __ge__(self, other: ChannelID, /) -> bool: ... def __gt__(self, other: ChannelID, /) -> bool: ... def __hash__(self) -> int: ... - def __index__(self) -> int: ... - def __int__(self) -> int: ... + def __index__(self) -> int: + """Return self converted to an integer, if self is suitable for use as an index into a list. +""" + def __int__(self) -> int: + """int(self) +""" def __le__(self, other: ChannelID, /) -> bool: ... def __lt__(self, other: ChannelID, /) -> bool: ... def __ne__(self, other: object, /) -> bool: ... @final class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]): + """ChannelInfo + +A named tuple of a channel's state. +""" __match_args__: Final = ( "open", "closing", @@ -40,47 +59,160 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in "num_interp_recv_released", ) @property - def open(self) -> bool: ... + def open(self) -> bool: + """both ends are open +""" @property - def closing(self) -> bool: ... + def closing(self) -> bool: + """send is closed, recv is non-empty +""" @property - def closed(self) -> bool: ... + def closed(self) -> bool: + """both ends are closed +""" @property - def count(self) -> int: ... # type: ignore[override] + def count(self) -> int: # type: ignore[override] + """queued objects +""" @property - def num_interp_send(self) -> int: ... + def num_interp_send(self) -> int: + """interpreters bound to the send end +""" @property - def num_interp_send_released(self) -> int: ... + def num_interp_send_released(self) -> int: + """interpreters bound to the send end and released +""" @property - def num_interp_recv(self) -> int: ... + def num_interp_recv(self) -> int: + """interpreters bound to the send end +""" @property - def num_interp_recv_released(self) -> int: ... + def num_interp_recv_released(self) -> int: + """interpreters bound to the send end and released +""" @property - def num_interp_both(self) -> int: ... + def num_interp_both(self) -> int: + """interpreters bound to both ends +""" @property - def num_interp_both_recv_released(self) -> int: ... + def num_interp_both_recv_released(self) -> int: + """interpreters bound to both ends and released_from_the recv end +""" @property - def num_interp_both_send_released(self) -> int: ... + def num_interp_both_send_released(self) -> int: + """interpreters bound to both ends and released_from_the send end +""" @property - def num_interp_both_released(self) -> int: ... + def num_interp_both_released(self) -> int: + """interpreters bound to both ends and released_from_both +""" @property - def recv_associated(self) -> bool: ... + def recv_associated(self) -> bool: + """current interpreter is bound to the recv end +""" @property - def recv_released(self) -> bool: ... + def recv_released(self) -> bool: + """current interpreter *was* bound to the recv end +""" @property - def send_associated(self) -> bool: ... + def send_associated(self) -> bool: + """current interpreter is bound to the send end +""" @property - def send_released(self) -> bool: ... + def send_released(self) -> bool: + """current interpreter *was* bound to the send end +""" + +def create(unboundop: Literal[1, 2, 3]) -> ChannelID: + """channel_create(unboundop) -> cid + +Create a new cross-interpreter channel and return a unique generated ID. +""" +def destroy(cid: SupportsIndex) -> None: + """channel_destroy(cid) + +Close and finalize the channel. Afterward attempts to use the channel +will behave as though it never existed. +""" +def list_all() -> list[ChannelID]: + """channel_list_all() -> [cid] + +Return the list of all IDs for active channels. +""" +def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: + """channel_list_interpreters(cid, *, send) -> [id] + +Return the list of all interpreter IDs associated with an end of the channel. + +The 'send' argument should be a boolean indicating whether to use the send or +receive end. +""" +def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: + """channel_send(cid, obj, *, blocking=True, timeout=None) + +Add the object's data to the channel's queue. +By default this waits for the object to be received. +""" +def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: + """channel_send_buffer(cid, obj, *, blocking=True, timeout=None) + +Add the object's buffer to the channel's queue. +By default this waits for the object to be received. +""" +def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: + """channel_recv(cid, [default]) -> (obj, unboundop) + +Return a new object from the data at the front of the channel's queue. + +If there is nothing to receive then raise ChannelEmptyError, unless +a default value is provided. In that case return it. +""" +def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: + """channel_close(cid, *, send=None, recv=None, force=False) + +Close the channel for all interpreters. + +If the channel is empty then the keyword args are ignored and both +ends are immediately closed. Otherwise, if 'force' is True then +all queued items are released and both ends are immediately +closed. + +If the channel is not empty *and* 'force' is False then following +happens: + + * recv is True (regardless of send): + - raise ChannelNotEmptyError + * recv is None and send is None: + - raise ChannelNotEmptyError + * send is True and recv is not True: + - fully close the 'send' end + - close the 'recv' end to interpreters not already receiving + - fully close it once empty + +Closing an already closed channel results in a ChannelClosedError. + +Once the channel's ID has no more ref counts in any interpreter +the channel will be destroyed. +""" +def get_count(cid: SupportsIndex) -> int: + """get_count(cid) + +Return the number of items in the channel. +""" +def get_info(cid: SupportsIndex) -> ChannelInfo: + """get_info(cid) + +Return details about the channel. +""" +def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: + """get_channel_defaults(cid) + +Return the channel's default values, set when it was created. +""" +def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: + """channel_release(cid, *, send=None, recv=None, force=True) -def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ... -def destroy(cid: SupportsIndex) -> None: ... -def list_all() -> list[ChannelID]: ... -def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ... -def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ... -def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ... -def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ... -def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ... -def get_count(cid: SupportsIndex) -> int: ... -def get_info(cid: SupportsIndex) -> ChannelInfo: ... -def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ... -def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ... +Close the channel for the current interpreter. 'send' and 'recv' +(bool) may be used to indicate the ends to close. By default both +ends are closed. Closing an already closed end is a noop. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi index c9323b106f3dc..42271acfb4aa8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi @@ -1,19 +1,79 @@ +"""This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" from typing import Any, Literal, SupportsIndex from typing_extensions import TypeAlias _UnboundOp: TypeAlias = Literal[1, 2, 3] -class QueueError(RuntimeError): ... +class QueueError(RuntimeError): + """Indicates that a queue-related error happened. +""" class QueueNotFoundError(QueueError): ... -def bind(qid: SupportsIndex) -> None: ... -def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: ... -def destroy(qid: SupportsIndex) -> None: ... -def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: ... -def get_count(qid: SupportsIndex) -> int: ... -def get_maxsize(qid: SupportsIndex) -> int: ... -def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: ... -def is_full(qid: SupportsIndex) -> bool: ... -def list_all() -> list[tuple[int, int, _UnboundOp]]: ... -def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: ... -def release(qid: SupportsIndex) -> None: ... +def bind(qid: SupportsIndex) -> None: + """bind(qid) + +Take a reference to the identified queue. +The queue is not destroyed until there are no references left. +""" +def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: + """create(maxsize, unboundop, fallback) -> qid + +Create a new cross-interpreter queue and return its unique generated ID. +It is a new reference as though bind() had been called on the queue. + +The caller is responsible for calling destroy() for the new queue +before the runtime is finalized. +""" +def destroy(qid: SupportsIndex) -> None: + """destroy(qid) + +Clear and destroy the queue. Afterward attempts to use the queue +will behave as though it never existed. +""" +def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: + """get(qid) -> (obj, unboundop) + +Return a new object from the data at the front of the queue. +The unbound op is also returned. + +If there is nothing to receive then raise QueueEmpty. +""" +def get_count(qid: SupportsIndex) -> int: + """get_count(qid) + +Return the number of items in the queue. +""" +def get_maxsize(qid: SupportsIndex) -> int: + """get_maxsize(qid) + +Return the maximum number of items in the queue. +""" +def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: + """get_queue_defaults(qid) + +Return the queue's default values, set when it was created. +""" +def is_full(qid: SupportsIndex) -> bool: + """is_full(qid) + +Return true if the queue has a maxsize and has reached it. +""" +def list_all() -> list[tuple[int, int, _UnboundOp]]: + """list_all() -> [(qid, unboundop, fallback)] + +Return the list of IDs for all queues. +Each corresponding default unbound op and fallback is also included. +""" +def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: + """put(qid, obj) + +Add the object's data to the queue. +""" +def release(qid: SupportsIndex) -> None: + """release(qid) + +Release a reference to the queue. +The queue is destroyed once there are no references left. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi index 8e097efad618a..88e6315a8f742 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi @@ -1,3 +1,6 @@ +"""This module provides primitive operations to manage Python interpreters. +The 'interpreters' module provides a more convenient interface. +""" import types from collections.abc import Callable from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload @@ -8,26 +11,102 @@ _R = TypeVar("_R") _Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""] _SharedDict: TypeAlias = dict[str, Any] # many objects can be shared -class InterpreterError(Exception): ... -class InterpreterNotFoundError(InterpreterError): ... +class InterpreterError(Exception): + """A cross-interpreter operation failed +""" +class InterpreterNotFoundError(InterpreterError): + """An interpreter was not found +""" class NotShareableError(ValueError): ... @disjoint_base class CrossInterpreterBufferView: - def __buffer__(self, flags: int, /) -> memoryview: ... - -def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: ... -def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: ... -def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: ... -def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: ... -def get_current() -> tuple[int, _Whence]: ... -def get_main() -> tuple[int, _Whence]: ... -def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: ... -def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: ... -def whence(id: SupportsIndex) -> _Whence: ... + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + +def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: + """new_config(name='isolated', /, **overrides) -> type.SimpleNamespace + +Return a representation of a new PyInterpreterConfig. + +The name determines the initial values of the config. Supported named +configs are: default, isolated, legacy, and empty. + +Any keyword arguments are set on the corresponding config fields, +overriding the initial values. +""" +def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: + """create([config], *, reqrefs=False) -> ID + +Create a new interpreter and return a unique generated ID. + +The caller is responsible for destroying the interpreter before exiting, +typically by using _interpreters.destroy(). This can be managed +automatically by passing "reqrefs=True" and then using _incref() and +_decref() appropriately. + +"config" must be a valid interpreter config or the name of a +predefined config ("isolated" or "legacy"). The default +is "isolated". +""" +def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: + """destroy(id, *, restrict=False) + +Destroy the identified interpreter. + +Attempting to destroy the current interpreter raises InterpreterError. +So does an unrecognized ID. +""" +def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: + """list_all() -> [(ID, whence)] + +Return a list containing the ID of every existing interpreter. +""" +def get_current() -> tuple[int, _Whence]: + """get_current() -> (ID, whence) + +Return the ID of current interpreter. +""" +def get_main() -> tuple[int, _Whence]: + """get_main() -> (ID, whence) + +Return the ID of main interpreter. +""" +def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: + """is_running(id, *, restrict=False) -> bool + +Return whether or not the identified interpreter is running. +""" +def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: + """get_config(id, *, restrict=False) -> types.SimpleNamespace + +Return a representation of the config used to initialize the interpreter. +""" +def whence(id: SupportsIndex) -> _Whence: + """whence(id) -> int + +Return an identifier for where the interpreter was created. +""" def exec( id: SupportsIndex, code: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None | types.SimpleNamespace: ... +) -> None | types.SimpleNamespace: + """exec(id, code, shared=None, *, restrict=False) + +Execute the provided code in the identified interpreter. +This is equivalent to running the builtin exec() under the target +interpreter, using the __dict__ of its __main__ module as both +globals and locals. + +"code" may be a string containing the text of a Python script. + +Functions (and code objects) are also supported, with some restrictions. +The code/function must not take any arguments or be a closure +(i.e. have cell vars). Methods and other callables are not supported. + +If a function is provided, its code object is used and all its state +is ignored, including its __globals__ dict. +""" def call( id: SupportsIndex, callable: Callable[..., _R], @@ -36,19 +115,54 @@ def call( *, preserve_exc: bool = False, restrict: bool = False, -) -> tuple[_R, types.SimpleNamespace]: ... +) -> tuple[_R, types.SimpleNamespace]: + """call(id, callable, args=None, kwargs=None, *, restrict=False) + +Call the provided object in the identified interpreter. +Pass the given args and kwargs, if possible. +""" def run_string( id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None: ... +) -> None: + """run_string(id, script, shared=None, *, restrict=False) + +Execute the provided string in the identified interpreter. + +(See _interpreters.exec(). +""" def run_func( id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False -) -> None: ... -def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: ... +) -> None: + """run_func(id, func, shared=None, *, restrict=False) + +Execute the body of the provided function in the identified interpreter. +Code objects are also supported. In both cases, closures and args +are not supported. Methods and other callables are not supported either. + +(See _interpreters.exec(). +""" +def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: + """set___main___attrs(id, ns, *, restrict=False) + +Bind the given attributes in the interpreter's __main__ module. +""" def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ... def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ... -def is_shareable(obj: object) -> bool: ... +def is_shareable(obj: object) -> bool: + """is_shareable(obj) -> bool + +Return True if the object's data may be shared between interpreters and +False otherwise. +""" @overload -def capture_exception(exc: BaseException) -> types.SimpleNamespace: ... +def capture_exception(exc: BaseException) -> types.SimpleNamespace: + """capture_exception(exc=None) -> types.SimpleNamespace + +Return a snapshot of an exception. If "exc" is None +then the current exception, if any, is used (but not cleared). + +The returned snapshot is the same as what _interpreters.exec() returns. +""" @overload def capture_exception(exc: None = None) -> types.SimpleNamespace | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi index 2d2a60e4dddf1..2bca099f80b49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi @@ -1,3 +1,36 @@ +"""The io module provides the Python interfaces to stream handling. The +builtin open function is defined in this module. + +At the top of the I/O hierarchy is the abstract base class IOBase. It +defines the basic interface to a stream. Note, however, that there is no +separation between reading and writing to streams; implementations are +allowed to raise an OSError if they do not support a given operation. + +Extending IOBase is RawIOBase which deals simply with the reading and +writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide +an interface to OS files. + +BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its +subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer +streams that are readable, writable, and both respectively. +BufferedRandom provides a buffered interface to random access +streams. BytesIO is a simple stream of in-memory bytes. + +Another IOBase subclass, TextIOBase, deals with the encoding and decoding +of streams into text. TextIOWrapper, which extends it, is a buffered text +interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO +is an in-memory stream for text. + +Argument names are not part of the specification, and only the arguments +of open() are intended to be used as keyword arguments. + +data: + +DEFAULT_BUFFER_SIZE + + An int containing the default buffer size used by the module's buffered + I/O classes. +""" import builtins import codecs import sys @@ -18,69 +51,290 @@ else: open = builtins.open -def open_code(path: str) -> IO[bytes]: ... +def open_code(path: str) -> IO[bytes]: + """Opens the provided file with the intent to import the contents. + +This may perform extra validation beyond open(), but is otherwise interchangeable +with calling open(path, 'rb'). +""" BlockingIOError = builtins.BlockingIOError if sys.version_info >= (3, 12): @disjoint_base class _IOBase: - def __iter__(self) -> Iterator[bytes]: ... - def __next__(self) -> bytes: ... + """The abstract base class for all I/O classes. + +This class provides dummy implementations for many methods that +derived classes can override selectively; the default implementations +represent a file that cannot be read, written or seeked. + +Even though IOBase does not declare read, readinto, or write because +their signatures will vary, implementations and clients should +consider those methods part of the interface. Also, implementations +may raise UnsupportedOperation when operations they do not support are +called. + +The basic type used for binary data read from or written to a file is +bytes. Other bytes-like objects are accepted as method arguments too. +In some cases (such as readinto), a writable object is required. Text +I/O classes work with str data. + +Note that calling any method (except additional calls to close(), +which are ignored) on a closed stream should raise a ValueError. + +IOBase (and its subclasses) support the iterator protocol, meaning +that an IOBase object can be iterated over yielding the lines in a +stream. + +IOBase also supports the :keyword:`with` statement. In this example, +fp is closed after the suite of the with statement is complete: + +with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') +""" + def __iter__(self) -> Iterator[bytes]: + """Implement iter(self). +""" + def __next__(self) -> bytes: + """Implement next(self). +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def close(self) -> None: ... - def fileno(self) -> int: ... - def flush(self) -> None: ... - def isatty(self) -> bool: ... - def readable(self) -> bool: ... + def close(self) -> None: + """Flush and close the IO object. + +This method has no effect if the file is already closed. +""" + def fileno(self) -> int: + """Return underlying file descriptor if one exists. + +Raise OSError if the IO object does not use a file descriptor. +""" + def flush(self) -> None: + """Flush write buffers, if applicable. + +This is not implemented for read-only and non-blocking streams. +""" + def isatty(self) -> bool: + """Return whether this is an 'interactive' stream. + +Return False if it can't be determined. +""" + def readable(self) -> bool: + """Return whether object was opened for reading. + +If False, read() will raise OSError. +""" read: Callable[..., Any] - def readlines(self, hint: int = -1, /) -> list[bytes]: ... - def seek(self, offset: int, whence: int = 0, /) -> int: ... - def seekable(self) -> bool: ... - def tell(self) -> int: ... - def truncate(self, size: int | None = None, /) -> int: ... - def writable(self) -> bool: ... + def readlines(self, hint: int = -1, /) -> list[bytes]: + """Return a list of lines from the stream. + +hint can be specified to control the number of lines read: no more +lines will be read if the total size (in bytes/characters) of all +lines so far exceeds hint. +""" + def seek(self, offset: int, whence: int = 0, /) -> int: + """Change the stream position to the given byte offset. + + offset + The stream position, relative to 'whence'. + whence + The relative position to seek from. + +The offset is interpreted relative to the position indicated by whence. +Values for whence are: + +* os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive +* os.SEEK_CUR or 1 -- current stream position; offset may be negative +* os.SEEK_END or 2 -- end of stream; offset is usually negative + +Return the new absolute position. +""" + def seekable(self) -> bool: + """Return whether object supports random access. + +If False, seek(), tell() and truncate() will raise OSError. +This method may need to do a test seek(). +""" + def tell(self) -> int: + """Return current stream position. +""" + def truncate(self, size: int | None = None, /) -> int: + """Truncate file to size bytes. + +File pointer is left unchanged. Size defaults to the current IO position +as reported by tell(). Return the new size. +""" + def writable(self) -> bool: + """Return whether object was opened for writing. + +If False, write() will raise OSError. +""" write: Callable[..., Any] - def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... - def readline(self, size: int | None = -1, /) -> bytes: ... - def __del__(self) -> None: ... + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: + """Write a list of lines to stream. + +Line separators are not added, so it is usual for each of the +lines provided to have a line separator at the end. +""" + def readline(self, size: int | None = -1, /) -> bytes: + """Read and return a line from the stream. + +If size is specified, at most size bytes will be read. + +The line terminator is always b'\\n' for binary files; for text +files, the newlines argument to open can be used to select the line +terminator(s) recognized. +""" + def __del__(self) -> None: + """Called when the instance is about to be destroyed. +""" @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented else: class _IOBase: - def __iter__(self) -> Iterator[bytes]: ... - def __next__(self) -> bytes: ... + """The abstract base class for all I/O classes. + +This class provides dummy implementations for many methods that +derived classes can override selectively; the default implementations +represent a file that cannot be read, written or seeked. + +Even though IOBase does not declare read, readinto, or write because +their signatures will vary, implementations and clients should +consider those methods part of the interface. Also, implementations +may raise UnsupportedOperation when operations they do not support are +called. + +The basic type used for binary data read from or written to a file is +bytes. Other bytes-like objects are accepted as method arguments too. +In some cases (such as readinto), a writable object is required. Text +I/O classes work with str data. + +Note that calling any method (except additional calls to close(), +which are ignored) on a closed stream should raise a ValueError. + +IOBase (and its subclasses) support the iterator protocol, meaning +that an IOBase object can be iterated over yielding the lines in a +stream. + +IOBase also supports the :keyword:`with` statement. In this example, +fp is closed after the suite of the with statement is complete: + +with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') +""" + def __iter__(self) -> Iterator[bytes]: + """Implement iter(self). +""" + def __next__(self) -> bytes: + """Implement next(self). +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def close(self) -> None: ... - def fileno(self) -> int: ... - def flush(self) -> None: ... - def isatty(self) -> bool: ... - def readable(self) -> bool: ... + def close(self) -> None: + """Flush and close the IO object. + +This method has no effect if the file is already closed. +""" + def fileno(self) -> int: + """Returns underlying file descriptor if one exists. + +OSError is raised if the IO object does not use a file descriptor. +""" + def flush(self) -> None: + """Flush write buffers, if applicable. + +This is not implemented for read-only and non-blocking streams. +""" + def isatty(self) -> bool: + """Return whether this is an 'interactive' stream. + +Return False if it can't be determined. +""" + def readable(self) -> bool: + """Return whether object was opened for reading. + +If False, read() will raise OSError. +""" read: Callable[..., Any] - def readlines(self, hint: int = -1, /) -> list[bytes]: ... - def seek(self, offset: int, whence: int = 0, /) -> int: ... - def seekable(self) -> bool: ... - def tell(self) -> int: ... - def truncate(self, size: int | None = None, /) -> int: ... - def writable(self) -> bool: ... + def readlines(self, hint: int = -1, /) -> list[bytes]: + """Return a list of lines from the stream. + +hint can be specified to control the number of lines read: no more +lines will be read if the total size (in bytes/characters) of all +lines so far exceeds hint. +""" + def seek(self, offset: int, whence: int = 0, /) -> int: + """Change the stream position to the given byte offset. + + offset + The stream position, relative to 'whence'. + whence + The relative position to seek from. + +The offset is interpreted relative to the position indicated by whence. +Values for whence are: + +* os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive +* os.SEEK_CUR or 1 -- current stream position; offset may be negative +* os.SEEK_END or 2 -- end of stream; offset is usually negative + +Return the new absolute position. +""" + def seekable(self) -> bool: + """Return whether object supports random access. + +If False, seek(), tell() and truncate() will raise OSError. +This method may need to do a test seek(). +""" + def tell(self) -> int: + """Return current stream position. +""" + def truncate(self, size: int | None = None, /) -> int: + """Truncate file to size bytes. + +File pointer is left unchanged. Size defaults to the current IO +position as reported by tell(). Returns the new size. +""" + def writable(self) -> bool: + """Return whether object was opened for writing. + +If False, write() will raise OSError. +""" write: Callable[..., Any] - def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ... - def readline(self, size: int | None = -1, /) -> bytes: ... + def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: + """Write a list of lines to stream. + +Line separators are not added, so it is usual for each of the +lines provided to have a line separator at the end. +""" + def readline(self, size: int | None = -1, /) -> bytes: + """Read and return a line from the stream. + +If size is specified, at most size bytes will be read. + +The line terminator is always b'\\n' for binary files; for text +files, the newlines argument to open can be used to select the line +terminator(s) recognized. +""" def __del__(self) -> None: ... @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented class _RawIOBase(_IOBase): - def readall(self) -> bytes: ... + """Base class for raw binary I/O. +""" + def readall(self) -> bytes: + """Read until EOF, using multiple read() call. +""" # The following methods can return None if the file is in non-blocking mode # and no data is available. def readinto(self, buffer: WriteableBuffer, /) -> int | MaybeNone: ... @@ -88,15 +342,78 @@ class _RawIOBase(_IOBase): def read(self, size: int = -1, /) -> bytes | MaybeNone: ... class _BufferedIOBase(_IOBase): - def detach(self) -> RawIOBase: ... + """Base class for buffered IO objects. + +The main difference with RawIOBase is that the read() method +supports omitting the size argument, and does not have a default +implementation that defers to readinto(). + +In addition, read(), readinto() and write() may raise +BlockingIOError if the underlying raw stream is in non-blocking +mode and not ready; unlike their raw counterparts, they will never +return None. + +A typical implementation should not inherit from a RawIOBase +implementation, but wrap one. +""" + def detach(self) -> RawIOBase: + """Disconnect this buffer from its underlying raw stream and return it. + +After the raw stream has been detached, the buffer is in an unusable +state. +""" def readinto(self, buffer: WriteableBuffer, /) -> int: ... - def write(self, buffer: ReadableBuffer, /) -> int: ... + def write(self, buffer: ReadableBuffer, /) -> int: + """Write buffer b to the IO stream. + +Return the number of bytes written, which is always +the length of b in bytes. + +Raise BlockingIOError if the buffer is full and the +underlying raw stream cannot accept more data at the moment. +""" def readinto1(self, buffer: WriteableBuffer, /) -> int: ... - def read(self, size: int | None = -1, /) -> bytes: ... - def read1(self, size: int = -1, /) -> bytes: ... + def read(self, size: int | None = -1, /) -> bytes: + """Read and return up to n bytes. + +If the size argument is omitted, None, or negative, read and +return all data until EOF. + +If the size argument is positive, and the underlying raw stream is +not 'interactive', multiple raw reads may be issued to satisfy +the byte count (unless EOF is reached first). +However, for interactive raw streams (as well as sockets and pipes), +at most one raw read will be issued, and a short result does not +imply that EOF is imminent. + +Return an empty bytes object on EOF. + +Return None if the underlying raw stream was open in non-blocking +mode and no data is available at the moment. +""" + def read1(self, size: int = -1, /) -> bytes: + """Read and return up to size bytes, with at most one read() call to the underlying raw stream. + +Return an empty bytes object on EOF. +A short result does not imply that EOF is imminent. +""" @disjoint_base class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes + """Open a file. + +The mode can be 'r' (default), 'w', 'x' or 'a' for reading, +writing, exclusive creation or appending. The file will be created if it +doesn't exist when opened for writing or appending; it will be truncated +when opened for writing. A FileExistsError will be raised if it already +exists when opened for creating. Opening a file for creating implies +writing so this mode behaves in a similar way to 'w'.Add a '+' to the mode +to allow simultaneous reading and writing. A custom opener can be used by +passing a callable as *opener*. The underlying file descriptor for the file +object is then obtained by calling opener with (*name*, *flags*). +*opener* must return an open file descriptor (passing os.open as *opener* +results in functionality similar to passing None). +""" mode: str # The type of "name" equals the argument passed in to the constructor, # but that can make FileIO incompatible with other I/O types that assume @@ -106,22 +423,70 @@ class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompat self, file: FileDescriptorOrPath, mode: str = "r", closefd: bool = True, opener: _Opener | None = None ) -> None: ... @property - def closefd(self) -> bool: ... - def seek(self, pos: int, whence: int = 0, /) -> int: ... - def read(self, size: int | None = -1, /) -> bytes | MaybeNone: ... + def closefd(self) -> bool: + """True if the file descriptor will be closed by close(). +""" + def seek(self, pos: int, whence: int = 0, /) -> int: + """Move to new file position and return the file position. + +Argument offset is a byte count. Optional argument whence defaults to +SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values +are SEEK_CUR or 1 (move relative to current position, positive or negative), +and SEEK_END or 2 (move relative to end of file, usually negative, although +many platforms allow seeking beyond the end of a file). + +Note that not all file objects are seekable. +""" + def read(self, size: int | None = -1, /) -> bytes | MaybeNone: + """Read at most size bytes, returned as bytes. + +If size is less than 0, read all bytes in the file making multiple read calls. +See ``FileIO.readall``. + +Attempts to make only one system call, retrying only per PEP 475 (EINTR). This +means less data may be returned than requested. + +In non-blocking mode, returns None if no data is available. Return an empty +bytes object at EOF. +""" @disjoint_base class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes + """Buffered I/O implementation using an in-memory bytes buffer. +""" def __init__(self, initial_bytes: ReadableBuffer = b"") -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def getvalue(self) -> bytes: ... - def getbuffer(self) -> memoryview: ... - def read1(self, size: int | None = -1, /) -> bytes: ... - def readlines(self, size: int | None = None, /) -> list[bytes]: ... - def seek(self, pos: int, whence: int = 0, /) -> int: ... + def getvalue(self) -> bytes: + """Retrieve the entire contents of the BytesIO object. +""" + def getbuffer(self) -> memoryview: + """Get a read-write view over the contents of the BytesIO object. +""" + def read1(self, size: int | None = -1, /) -> bytes: + """Read at most size bytes, returned as a bytes object. + +If the size argument is negative or omitted, read until EOF is reached. +Return an empty bytes object at EOF. +""" + def readlines(self, size: int | None = None, /) -> list[bytes]: + """List of bytes objects, each a line from the file. + +Call readline() repeatedly and return a list of the lines so read. +The optional size argument, if given, is an approximate bound on the +total number of bytes in the lines returned. +""" + def seek(self, pos: int, whence: int = 0, /) -> int: + """Change stream position. + +Seek to byte offset pos relative to position indicated by whence: + 0 Start of stream (the default). pos should be >= 0; + 1 Current position - pos may be negative; + 2 End of stream - pos usually negative. +Returns the new absolute position. +""" @type_check_only class _BufferedReaderStream(Protocol): @@ -153,6 +518,8 @@ _BufferedReaderStreamT = TypeVar("_BufferedReaderStreamT", bound=_BufferedReader @disjoint_base class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_BufferedReaderStreamT]): # type: ignore[misc] # incompatible definitions of methods in the base classes + """Create a new buffered reader using the given readable raw IO object. +""" raw: _BufferedReaderStreamT if sys.version_info >= (3, 14): def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 131072) -> None: ... @@ -165,6 +532,12 @@ class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_Buffere @disjoint_base class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes + """A buffer for a writeable sequential RawIO object. + +The constructor creates a BufferedWriter for the given writeable raw +stream. If the buffer_size is not given, it defaults to +DEFAULT_BUFFER_SIZE. +""" raw: RawIOBase if sys.version_info >= (3, 14): def __init__(self, raw: RawIOBase, buffer_size: int = 131072) -> None: ... @@ -177,6 +550,12 @@ class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore @disjoint_base class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes + """A buffered interface to random access streams. + +The constructor creates a reader and writer for a seekable stream, +raw, given in the first argument. If the buffer_size is omitted it +defaults to DEFAULT_BUFFER_SIZE. +""" mode: str name: Any raw: RawIOBase @@ -191,6 +570,16 @@ class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore @disjoint_base class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStreamT]): + """A buffered reader and writer object together. + +A buffered reader object and buffered writer object put together to +form a sequential IO object that can read and write. This is typically +used with a socket or two-way pipe. + +reader and writer are RawIOBase objects that are readable and +writeable respectively. If the buffer_size is omitted it defaults to +DEFAULT_BUFFER_SIZE. +""" if sys.version_info >= (3, 14): def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 131072, /) -> None: ... else: @@ -199,17 +588,57 @@ class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStr def peek(self, size: int = 0, /) -> bytes: ... class _TextIOBase(_IOBase): + """Base class for text I/O. + +This class provides a character and line based interface to stream +I/O. There is no readinto method because Python's character strings +are immutable. +""" encoding: str errors: str | None newlines: str | tuple[str, ...] | None - def __iter__(self) -> Iterator[str]: ... # type: ignore[override] - def __next__(self) -> str: ... # type: ignore[override] - def detach(self) -> BinaryIO: ... - def write(self, s: str, /) -> int: ... - def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override] - def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] - def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override] - def read(self, size: int | None = -1, /) -> str: ... + def __iter__(self) -> Iterator[str]: # type: ignore[override] + """Implement iter(self). +""" + def __next__(self) -> str: # type: ignore[override] + """Implement next(self). +""" + def detach(self) -> BinaryIO: + """Separate the underlying buffer from the TextIOBase and return it. + +After the underlying buffer has been detached, the TextIO is in an unusable state. +""" + def write(self, s: str, /) -> int: + """Write string s to stream. + +Return the number of characters written +(which is always equal to the length of the string). +""" + def writelines(self, lines: Iterable[str], /) -> None: # type: ignore[override] + """Write a list of lines to stream. + +Line separators are not added, so it is usual for each of the +lines provided to have a line separator at the end. +""" + def readline(self, size: int = -1, /) -> str: # type: ignore[override] + """Read until newline or EOF. + +Return an empty string if EOF is hit immediately. +If size is specified, at most size characters will be read. +""" + def readlines(self, hint: int = -1, /) -> list[str]: # type: ignore[override] + """Return a list of lines from the stream. + +hint can be specified to control the number of lines read: no more +lines will be read if the total size (in bytes/characters) of all +lines so far exceeds hint. +""" + def read(self, size: int | None = -1, /) -> str: + """Read at most size characters from stream. + +Read from underlying buffer until we have size characters or we hit EOF. +If size is negative or omitted, read until EOF. +""" @type_check_only class _WrappedBuffer(Protocol): @@ -238,6 +667,35 @@ _BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffe @disjoint_base class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes + """Character and line based layer over a BufferedIOBase object, buffer. + +encoding gives the name of the encoding that the stream will be +decoded or encoded with. It defaults to locale.getencoding(). + +errors determines the strictness of encoding and decoding (see +help(codecs.Codec) or the documentation for codecs.register) and +defaults to "strict". + +newline controls how line endings are handled. It can be None, '', +'\\n', '\\r', and '\\r\\n'. It works as follows: + +* On input, if newline is None, universal newlines mode is + enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and + these are translated into '\\n' before being returned to the + caller. If it is '', universal newline mode is enabled, but line + endings are returned to the caller untranslated. If it has any of + the other legal values, input lines are only terminated by the given + string, and the line ending is returned to the caller untranslated. + +* On output, if newline is None, any '\\n' characters written are + translated to the system default line separator, os.linesep. If + newline is '' or '\\n', no translation takes place. If newline is any + of the other legal values, any '\\n' characters written are translated + to the given string. + +If line_buffering is True, a call to flush is implied when a call to +write contains a newline character. +""" def __init__( self, buffer: _BufferT_co, @@ -262,30 +720,83 @@ class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # t newline: str | None = None, line_buffering: bool | None = None, write_through: bool | None = None, - ) -> None: ... + ) -> None: + """Reconfigure the text stream with new parameters. + +This also does an implicit stream flush. +""" def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] # Equals the "buffer" argument passed in to the constructor. def detach(self) -> _BufferT_co: ... # type: ignore[override] # TextIOWrapper's version of seek only supports a limited subset of # operations. - def seek(self, cookie: int, whence: int = 0, /) -> int: ... + def seek(self, cookie: int, whence: int = 0, /) -> int: + """Set the stream position, and return the new stream position. + + cookie + Zero or an opaque number returned by tell(). + whence + The relative position to seek from. + +Four operations are supported, given by the following argument +combinations: + +- seek(0, SEEK_SET): Rewind to the start of the stream. +- seek(cookie, SEEK_SET): Restore a previous position; + 'cookie' must be a number returned by tell(). +- seek(0, SEEK_END): Fast-forward to the end of the stream. +- seek(0, SEEK_CUR): Leave the current stream position unchanged. + +Any other argument combinations are invalid, +and may raise exceptions. +""" def truncate(self, pos: int | None = None, /) -> int: ... @disjoint_base class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes + """Text I/O implementation using an in-memory buffer. + +The initial_value argument sets the value of object. The newline +argument is like the one of TextIOWrapper's constructor. +""" def __init__(self, initial_value: str | None = "", newline: str | None = "\n") -> None: ... # StringIO does not contain a "name" field. This workaround is necessary # to allow StringIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def getvalue(self) -> str: ... + def getvalue(self) -> str: + """Retrieve the entire contents of the object. +""" @property def line_buffering(self) -> bool: ... - def seek(self, pos: int, whence: int = 0, /) -> int: ... - def truncate(self, pos: int | None = None, /) -> int: ... + def seek(self, pos: int, whence: int = 0, /) -> int: + """Change stream position. + +Seek to character offset pos relative to position indicated by whence: + 0 Start of stream (the default). pos should be >= 0; + 1 Current position - pos must be 0; + 2 End of stream - pos must be 0. +Returns the new absolute position. +""" + def truncate(self, pos: int | None = None, /) -> int: + """Truncate size to pos. + +The pos argument defaults to the current file position, as +returned by tell(). The current file position is unchanged. +Returns the new absolute position. +""" @disjoint_base class IncrementalNewlineDecoder: + """Codec used when reading a file in universal newlines mode. + +It wraps another incremental decoder, translating \\r\\n and \\r into \\n. +It also records the types of newlines encountered. When used with +translate=False, it ensures that the newline sequence is returned in +one piece. When used with decoder=None, it expects unicode strings as +decode input and translates newlines without first invoking an external +decoder. +""" def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = "strict") -> None: ... def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... @property @@ -296,6 +807,18 @@ class IncrementalNewlineDecoder: if sys.version_info >= (3, 10): @overload - def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: ... + def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: + """A helper function to choose the text encoding. + +When encoding is not None, this function returns it. +Otherwise, this function returns the default text encoding +(i.e. "locale" or "utf-8" depends on UTF-8 mode). + +This function emits an EncodingWarning if encoding is None and +sys.flags.warn_default_encoding is true. + +This can be used in APIs with an encoding=None parameter. +However, please consider using encoding="utf-8" for new APIs. +""" @overload def text_encoding(encoding: _T, stacklevel: int = 2, /) -> _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi index 4a77e5be594ab..628f898923d3a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi @@ -1,25 +1,45 @@ +"""json speedups +""" from collections.abc import Callable from typing import Any, final from typing_extensions import Self @final class make_encoder: + """Encoder(markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan) +""" @property - def sort_keys(self) -> bool: ... + def sort_keys(self) -> bool: + """sort_keys +""" @property - def skipkeys(self) -> bool: ... + def skipkeys(self) -> bool: + """skipkeys +""" @property - def key_separator(self) -> str: ... + def key_separator(self) -> str: + """key_separator +""" @property - def indent(self) -> str | None: ... + def indent(self) -> str | None: + """indent +""" @property - def markers(self) -> dict[int, Any] | None: ... + def markers(self) -> dict[int, Any] | None: + """markers +""" @property - def default(self) -> Callable[[Any], Any]: ... + def default(self) -> Callable[[Any], Any]: + """default +""" @property - def encoder(self) -> Callable[[str], str]: ... + def encoder(self) -> Callable[[str], str]: + """encoder +""" @property - def item_separator(self) -> str: ... + def item_separator(self) -> str: + """item_separator +""" def __new__( cls, markers: dict[int, Any] | None, @@ -32,10 +52,14 @@ class make_encoder: skipkeys: bool, allow_nan: bool, ) -> Self: ... - def __call__(self, obj: object, _current_indent_level: int) -> Any: ... + def __call__(self, obj: object, _current_indent_level: int) -> Any: + """Call self as a function. +""" @final class make_scanner: + """JSON scanner object +""" object_hook: Any object_pairs_hook: Any parse_int: Any @@ -44,8 +68,29 @@ class make_scanner: strict: bool # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __new__(cls, context: make_scanner) -> Self: ... - def __call__(self, string: str, index: int) -> tuple[Any, int]: ... + def __call__(self, string: str, index: int) -> tuple[Any, int]: + """Call self as a function. +""" -def encode_basestring(s: str, /) -> str: ... -def encode_basestring_ascii(s: str, /) -> str: ... -def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: ... +def encode_basestring(s: str, /) -> str: + """encode_basestring(string) -> string + +Return a JSON representation of a Python string +""" +def encode_basestring_ascii(s: str, /) -> str: + """encode_basestring_ascii(string) -> string + +Return an ASCII-only JSON representation of a Python string +""" +def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: + """scanstring(string, end, strict=True) -> (string, end) + +Scan the string s for a JSON string. End is the index of the +character in s after the quote that started the JSON string. +Unescapes all valid JSON string escape sequences and raises ValueError +on attempt to decode an invalid string. If strict is False then literal +control characters are allowed in the string. + +Returns a tuple of the decoded string and the index of the character in s +after the end quote. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi index ccce7a0d9d70f..f140761483a01 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi @@ -1,3 +1,5 @@ +"""Support for POSIX locales. +""" import sys from _typeshed import StrPath from typing import Final, Literal, TypedDict, type_check_only @@ -31,14 +33,24 @@ LC_NUMERIC: Final[int] LC_ALL: Final[int] CHAR_MAX: Final = 127 -def setlocale(category: int, locale: str | None = None, /) -> str: ... -def localeconv() -> _LocaleConv: ... +def setlocale(category: int, locale: str | None = None, /) -> str: + """Activates/queries locale processing. +""" +def localeconv() -> _LocaleConv: + """Returns numeric and monetary locale-specific parameters. +""" if sys.version_info >= (3, 11): - def getencoding() -> str: ... + def getencoding() -> str: + """Get the current locale encoding. +""" -def strcoll(os1: str, os2: str, /) -> int: ... -def strxfrm(string: str, /) -> str: ... +def strcoll(os1: str, os2: str, /) -> int: + """Compares two strings according to the locale. +""" +def strxfrm(string: str, /) -> str: + """Return a string that can be used as a key for locale-aware comparisons. +""" # native gettext functions # https://docs.python.org/3/library/locale.html#access-to-message-catalogs @@ -108,14 +120,32 @@ if sys.platform != "win32": CRNCYSTR: Final[int] ALT_DIGITS: Final[int] - def nl_langinfo(key: int, /) -> str: ... + def nl_langinfo(key: int, /) -> str: + """Return the value for the locale information associated with key. +""" # This is dependent on `libintl.h` which is a part of `gettext` # system dependency. These functions might be missing. # But, we always say that they are present. - def gettext(msg: str, /) -> str: ... - def dgettext(domain: str | None, msg: str, /) -> str: ... - def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ... - def textdomain(domain: str | None, /) -> str: ... - def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ... - def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ... + def gettext(msg: str, /) -> str: + """gettext(msg) -> string + +Return translation of msg. +""" + def dgettext(domain: str | None, msg: str, /) -> str: + """dgettext(domain, msg) -> string + +Return translation of msg in domain. +""" + def dcgettext(domain: str | None, msg: str, category: int, /) -> str: + """Return translation of msg in domain and category. +""" + def textdomain(domain: str | None, /) -> str: + """Set the C library's textdmain to domain, returning the new domain. +""" + def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: + """Bind the C library's domain to dir. +""" + def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: + """Bind the C library's domain to codeset. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi index 4f6d98b8ffb61..b2875fd9a5f22 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi @@ -1,3 +1,5 @@ +"""Fast profiler +""" import sys from _typeshed import structseq from collections.abc import Callable @@ -7,13 +9,57 @@ from typing_extensions import disjoint_base @disjoint_base class Profiler: + """Build a profiler object using the specified timer function. + +The default timer is a fast built-in one based on real time. +For custom timer functions returning integers, 'timeunit' can +be a float specifying a scale (that is, how long each integer unit +is, in seconds). +""" def __init__( self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True ) -> None: ... - def getstats(self) -> list[profiler_entry]: ... - def enable(self, subcalls: bool = True, builtins: bool = True) -> None: ... - def disable(self) -> None: ... - def clear(self) -> None: ... + def getstats(self) -> list[profiler_entry]: + """list of profiler_entry objects. + +getstats() -> list of profiler_entry objects + +Return all information collected by the profiler. +Each profiler_entry is a tuple-like object with the +following attributes: + + code code object + callcount how many times this was called + reccallcount how many times called recursively + totaltime total time in this entry + inlinetime inline time in this entry (not in subcalls) + calls details of the calls + +The calls attribute is either None or a list of +profiler_subentry objects: + + code called code object + callcount how many times this is called + reccallcount how many times this is called recursively + totaltime total time spent in this call + inlinetime inline time (not in further subcalls) +""" + def enable(self, subcalls: bool = True, builtins: bool = True) -> None: + """Start collecting profiling information. + + subcalls + If True, also records for each function + statistics separated according to its current caller. + builtins + If True, records the time spent in + built-in functions separately from their caller. +""" + def disable(self) -> None: + """Stop collecting profiling information. +""" + def clear(self) -> None: + """Clear all profiling information collected so far. +""" @final class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi index b38dce9fadedf..1eaea87686c57 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi @@ -37,23 +37,92 @@ PRESET_EXTREME: Final[int] # v big number @final class LZMADecompressor: + """Create a decompressor object for decompressing data incrementally. + + format + Specifies the container format of the input stream. If this is + FORMAT_AUTO (the default), the decompressor will automatically detect + whether the input is FORMAT_XZ or FORMAT_ALONE. Streams created with + FORMAT_RAW cannot be autodetected. + memlimit + Limit the amount of memory used by the decompressor. This will cause + decompression to fail if the input cannot be decompressed within the + given limit. + filters + A custom filter chain. This argument is required for FORMAT_RAW, and + not accepted with any other format. When provided, this should be a + sequence of dicts, each indicating the ID and options for a single + filter. + +For one-shot decompression, use the decompress() function instead. +""" if sys.version_info >= (3, 12): def __new__(cls, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> Self: ... else: def __init__(self, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> None: ... - def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: + """Decompress *data*, returning uncompressed data as bytes. + +If *max_length* is nonnegative, returns at most *max_length* bytes of +decompressed data. If this limit is reached and further output can be +produced, *self.needs_input* will be set to ``False``. In this case, the next +call to *decompress()* may provide *data* as b'' to obtain more of the output. + +If all of the input data was decompressed and returned (either because this +was less than *max_length* bytes, or because *max_length* was negative), +*self.needs_input* will be set to True. + +Attempting to decompress data after the end of stream is reached raises an +EOFError. Any data found after the end of the stream is ignored and saved in +the unused_data attribute. +""" @property - def check(self) -> int: ... + def check(self) -> int: + """ID of the integrity check used by the input stream. +""" @property - def eof(self) -> bool: ... + def eof(self) -> bool: + """True if the end-of-stream marker has been reached. +""" @property - def unused_data(self) -> bytes: ... + def unused_data(self) -> bytes: + """Data found after the end of the compressed stream. +""" @property - def needs_input(self) -> bool: ... + def needs_input(self) -> bool: + """True if more input is needed before more decompressed data can be produced. +""" @final class LZMACompressor: + """LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None) + +Create a compressor object for compressing data incrementally. + +format specifies the container format to use for the output. This can +be FORMAT_XZ (default), FORMAT_ALONE, or FORMAT_RAW. + +check specifies the integrity check to use. For FORMAT_XZ, the default +is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not support integrity +checks; for these formats, check must be omitted, or be CHECK_NONE. + +The settings used by the compressor can be specified either as a +preset compression level (with the 'preset' argument), or in detail +as a custom filter chain (with the 'filters' argument). For FORMAT_XZ +and FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset +level. For FORMAT_RAW, the caller must always specify a filter chain; +the raw compressor does not support preset compression levels. + +preset (if provided) should be an integer in the range 0-9, optionally +OR-ed with the constant PRESET_EXTREME. + +filters (if provided) should be a sequence of dicts. Each dict should +have an entry for "id" indicating the ID of the filter, plus +additional entries for options to the filter. + +For one-shot compression, use the compress() function instead. +""" if sys.version_info >= (3, 12): def __new__( cls, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None @@ -63,9 +132,28 @@ class LZMACompressor: self, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None ) -> None: ... - def compress(self, data: ReadableBuffer, /) -> bytes: ... - def flush(self) -> bytes: ... + def compress(self, data: ReadableBuffer, /) -> bytes: + """Provide data to the compressor object. + +Returns a chunk of compressed data if possible, or b'' otherwise. + +When you have finished providing data to the compressor, call the +flush() method to finish the compression process. +""" + def flush(self) -> bytes: + """Finish the compression process. + +Returns the compressed data left in internal buffers. + +The compressor object may not be used after this method is called. +""" + +class LZMAError(Exception): + """Call to liblzma failed. +""" -class LZMAError(Exception): ... +def is_check_supported(check_id: int, /) -> bool: + """Test whether the given integrity check is supported. -def is_check_supported(check_id: int, /) -> bool: ... +Always returns True for CHECK_NONE and CHECK_CRC32. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi index 597bd09b700b0..6348d2d98d298 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi @@ -1,9 +1,20 @@ +"""Shared support for scanning document type declarations in HTML and XHTML. + +This module is used as a foundation for the html.parser module. It has no +documented public API and should not be used directly. + +""" import sys from typing import Any class ParserBase: + """Parser base class which provides some common support methods used +by the SGML/HTML and XHTML parsers. +""" def reset(self) -> None: ... - def getpos(self) -> tuple[int, int]: ... + def getpos(self) -> tuple[int, int]: + """Return current line number and offset. +""" def unknown_decl(self, data: str) -> None: ... def parse_comment(self, i: int, report: bool = True) -> int: ... # undocumented def parse_declaration(self, i: int) -> int: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi index cb1c1bcfc4aab..893772a615317 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi @@ -1,3 +1,11 @@ +"""Operator interface. + +This module exports a set of functions implemented in C corresponding +to the intrinsic operators of Python. For example, operator.add(x, y) +is equivalent to the expression x+y. The function names are those +used for special methods; variants without leading and trailing +'__' are also provided for convenience. +""" import sys from _typeshed import SupportsGetItem from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence @@ -47,76 +55,202 @@ class _SupportsPos(Protocol[_T_co]): def __pos__(self) -> _T_co: ... # All four comparison functions must have the same signature, or we get false-positive errors -def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... -def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... -def eq(a: object, b: object, /) -> Any: ... -def ne(a: object, b: object, /) -> Any: ... -def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... -def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: ... -def not_(a: object, /) -> bool: ... -def truth(a: object, /) -> bool: ... -def is_(a: object, b: object, /) -> bool: ... -def is_not(a: object, b: object, /) -> bool: ... -def abs(a: SupportsAbs[_T], /) -> _T: ... -def add(a: Any, b: Any, /) -> Any: ... -def and_(a: Any, b: Any, /) -> Any: ... -def floordiv(a: Any, b: Any, /) -> Any: ... -def index(a: SupportsIndex, /) -> int: ... -def inv(a: _SupportsInversion[_T_co], /) -> _T_co: ... -def invert(a: _SupportsInversion[_T_co], /) -> _T_co: ... -def lshift(a: Any, b: Any, /) -> Any: ... -def mod(a: Any, b: Any, /) -> Any: ... -def mul(a: Any, b: Any, /) -> Any: ... -def matmul(a: Any, b: Any, /) -> Any: ... -def neg(a: _SupportsNeg[_T_co], /) -> _T_co: ... -def or_(a: Any, b: Any, /) -> Any: ... -def pos(a: _SupportsPos[_T_co], /) -> _T_co: ... -def pow(a: Any, b: Any, /) -> Any: ... -def rshift(a: Any, b: Any, /) -> Any: ... -def sub(a: Any, b: Any, /) -> Any: ... -def truediv(a: Any, b: Any, /) -> Any: ... -def xor(a: Any, b: Any, /) -> Any: ... -def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: ... -def contains(a: Container[object], b: object, /) -> bool: ... -def countOf(a: Iterable[object], b: object, /) -> int: ... +def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: + """Same as a < b. +""" +def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: + """Same as a <= b. +""" +def eq(a: object, b: object, /) -> Any: + """Same as a == b. +""" +def ne(a: object, b: object, /) -> Any: + """Same as a != b. +""" +def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: + """Same as a >= b. +""" +def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: + """Same as a > b. +""" +def not_(a: object, /) -> bool: + """Same as not a. +""" +def truth(a: object, /) -> bool: + """Return True if a is true, False otherwise. +""" +def is_(a: object, b: object, /) -> bool: + """Same as a is b. +""" +def is_not(a: object, b: object, /) -> bool: + """Same as a is not b. +""" +def abs(a: SupportsAbs[_T], /) -> _T: + """Same as abs(a). +""" +def add(a: Any, b: Any, /) -> Any: + """Same as a + b. +""" +def and_(a: Any, b: Any, /) -> Any: + """Same as a & b. +""" +def floordiv(a: Any, b: Any, /) -> Any: + """Same as a // b. +""" +def index(a: SupportsIndex, /) -> int: + """Same as a.__index__() +""" +def inv(a: _SupportsInversion[_T_co], /) -> _T_co: + """Same as ~a. +""" +def invert(a: _SupportsInversion[_T_co], /) -> _T_co: + """Same as ~a. +""" +def lshift(a: Any, b: Any, /) -> Any: + """Same as a << b. +""" +def mod(a: Any, b: Any, /) -> Any: + """Same as a % b. +""" +def mul(a: Any, b: Any, /) -> Any: + """Same as a * b. +""" +def matmul(a: Any, b: Any, /) -> Any: + """Same as a @ b. +""" +def neg(a: _SupportsNeg[_T_co], /) -> _T_co: + """Same as -a. +""" +def or_(a: Any, b: Any, /) -> Any: + """Same as a | b. +""" +def pos(a: _SupportsPos[_T_co], /) -> _T_co: + """Same as +a. +""" +def pow(a: Any, b: Any, /) -> Any: + """Same as a ** b. +""" +def rshift(a: Any, b: Any, /) -> Any: + """Same as a >> b. +""" +def sub(a: Any, b: Any, /) -> Any: + """Same as a - b. +""" +def truediv(a: Any, b: Any, /) -> Any: + """Same as a / b. +""" +def xor(a: Any, b: Any, /) -> Any: + """Same as a ^ b. +""" +def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: + """Same as a + b, for a and b sequences. +""" +def contains(a: Container[object], b: object, /) -> bool: + """Same as b in a (note reversed operands). +""" +def countOf(a: Iterable[object], b: object, /) -> int: + """Return the number of items in a which are, or which equal, b. +""" @overload -def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: ... +def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: + """Same as del a[b]. +""" @overload def delitem(a: MutableSequence[Any], b: slice, /) -> None: ... @overload def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ... @overload -def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: ... +def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: + """Same as a[b]. +""" @overload def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ... -def indexOf(a: Iterable[_T], b: _T, /) -> int: ... +def indexOf(a: Iterable[_T], b: _T, /) -> int: + """Return the first index of b in a. +""" @overload -def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: ... +def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: + """Same as a[b] = c. +""" @overload def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ... @overload def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ... -def length_hint(obj: object, default: int = 0, /) -> int: ... -def iadd(a: Any, b: Any, /) -> Any: ... -def iand(a: Any, b: Any, /) -> Any: ... -def iconcat(a: Any, b: Any, /) -> Any: ... -def ifloordiv(a: Any, b: Any, /) -> Any: ... -def ilshift(a: Any, b: Any, /) -> Any: ... -def imod(a: Any, b: Any, /) -> Any: ... -def imul(a: Any, b: Any, /) -> Any: ... -def imatmul(a: Any, b: Any, /) -> Any: ... -def ior(a: Any, b: Any, /) -> Any: ... -def ipow(a: Any, b: Any, /) -> Any: ... -def irshift(a: Any, b: Any, /) -> Any: ... -def isub(a: Any, b: Any, /) -> Any: ... -def itruediv(a: Any, b: Any, /) -> Any: ... -def ixor(a: Any, b: Any, /) -> Any: ... +def length_hint(obj: object, default: int = 0, /) -> int: + """Return an estimate of the number of items in obj. + +This is useful for presizing containers when building from an iterable. + +If the object supports len(), the result will be exact. +Otherwise, it may over- or under-estimate by an arbitrary amount. +The result will be an integer >= 0. +""" +def iadd(a: Any, b: Any, /) -> Any: + """Same as a += b. +""" +def iand(a: Any, b: Any, /) -> Any: + """Same as a &= b. +""" +def iconcat(a: Any, b: Any, /) -> Any: + """Same as a += b, for a and b sequences. +""" +def ifloordiv(a: Any, b: Any, /) -> Any: + """Same as a //= b. +""" +def ilshift(a: Any, b: Any, /) -> Any: + """Same as a <<= b. +""" +def imod(a: Any, b: Any, /) -> Any: + """Same as a %= b. +""" +def imul(a: Any, b: Any, /) -> Any: + """Same as a *= b. +""" +def imatmul(a: Any, b: Any, /) -> Any: + """Same as a @= b. +""" +def ior(a: Any, b: Any, /) -> Any: + """Same as a |= b. +""" +def ipow(a: Any, b: Any, /) -> Any: + """Same as a **= b. +""" +def irshift(a: Any, b: Any, /) -> Any: + """Same as a >>= b. +""" +def isub(a: Any, b: Any, /) -> Any: + """Same as a -= b. +""" +def itruediv(a: Any, b: Any, /) -> Any: + """Same as a /= b. +""" +def ixor(a: Any, b: Any, /) -> Any: + """Same as a ^= b. +""" if sys.version_info >= (3, 11): - def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: + """Same as obj(*args, **kwargs). +""" + +def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: + """Return 'a == b'. + +This function uses an approach designed to prevent +timing analysis, making it appropriate for cryptography. + +a and b must both be of the same type: either str (ASCII only), +or any bytes-like object. -def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... +Note: If a and b are of different lengths, or if an error occurs, +a timing attack could theoretically reveal information about the +types and lengths of a and b--but not their values. +""" if sys.version_info >= (3, 14): - def is_none(a: object, /) -> TypeIs[None]: ... - def is_not_none(a: _T | None, /) -> TypeIs[_T]: ... + def is_none(a: object, /) -> TypeIs[None]: + """Same as a is None. +""" + def is_not_none(a: _T | None, /) -> TypeIs[_T]: + """Same as a is not None. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi index fb00e6986dd06..caff65eead678 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi @@ -1,3 +1,5 @@ +"""Shared OS X support functions. +""" from collections.abc import Iterable, Sequence from typing import Final, TypeVar @@ -11,24 +13,90 @@ _UNIVERSAL_CONFIG_VARS: Final[tuple[str, ...]] # undocumented _COMPILER_CONFIG_VARS: Final[tuple[str, ...]] # undocumented _INITPRE: Final[str] # undocumented -def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented -def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented -def _find_build_tool(toolname: str) -> str: ... # undocumented +def _find_executable(executable: str, path: str | None = None) -> str | None: # undocumented + """Tries to find 'executable' in the directories listed in 'path'. + +A string listing directories separated by 'os.pathsep'; defaults to +os.environ['PATH']. Returns the complete filename or None if not found. +""" +def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: # undocumented + """Output from successful command execution or None +""" +def _find_build_tool(toolname: str) -> str: # undocumented + """Find a build tool on current path or using xcrun +""" _SYSTEM_VERSION: Final[str | None] # undocumented -def _get_system_version() -> str: ... # undocumented -def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented -def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented -def _supports_universal_builds() -> bool: ... # undocumented -def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented -def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented -def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented -def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented -def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented -def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ... -def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ... -def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... +def _get_system_version() -> str: # undocumented + """Return the OS X system version as a string +""" +def _remove_original_values(_config_vars: dict[str, str]) -> None: # undocumented + """Remove original unmodified values for testing +""" +def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: # undocumented + """Save modified and original unmodified value of configuration var +""" +def _supports_universal_builds() -> bool: # undocumented + """Returns True if universal builds are supported on this system +""" +def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented + """Find appropriate C compiler for extension module builds +""" +def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented + """Remove all universal build arguments from config vars +""" +def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented + """Remove any unsupported archs from config vars +""" +def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented + """Allow override of all archs with ARCHFLAGS env var +""" +def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented + """Remove references to any SDKs not available +""" +def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: + """ +This function will strip '-isysroot PATH' and '-arch ARCH' from the +compile flags if the user has specified one them in extra_compile_flags. + +This is needed because '-arch ARCH' adds another architecture to the +build, without a way to remove an architecture. Furthermore GCC will +barf if multiple '-isysroot' arguments are present. +""" +def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: + """Customize Python build configuration variables. + +Called internally from sysconfig with a mutable mapping +containing name/value pairs parsed from the configured +makefile used to build this interpreter. Returns +the mapping updated as needed to reflect the environment +in which the interpreter is running; in the case of +a Python from a binary installer, the installed +environment may be very different from the build +environment, i.e. different OS levels, different +built tools, different available CPU architectures. + +This customization is performed whenever +distutils.sysconfig.get_config_vars() is first +called. It may be used in environments where no +compilers are present, i.e. when installing pure +Python dists. Customization of compiler paths +and detection of unavailable archs is deferred +until the first extension module build is +requested (in distutils.sysconfig.customize_compiler). + +Currently called from distutils.sysconfig +""" +def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: + """Customize compiler path and configuration variables. + +This customization is performed when the first +extension module build is requested +in distutils.sysconfig.customize_compiler. +""" def get_platform_osx( _config_vars: dict[str, str], osname: _T, release: _K, machine: _V -) -> tuple[str | _T, str | _K, str | _V]: ... +) -> tuple[str | _T, str | _K, str | _V]: + """Filter values for get_platform() +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi index 544f787172d6f..ef86bcf806fa4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi @@ -1,3 +1,5 @@ +"""Optimized C implementation for the Python pickle module. +""" from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from pickle import PickleBuffer as PickleBuffer @@ -26,10 +28,56 @@ def dump( *, fix_imports: bool = True, buffer_callback: _BufferCallback = None, -) -> None: ... +) -> None: + """Write a pickled representation of obj to the open file object file. + +This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may +be more efficient. + +The optional *protocol* argument tells the pickler to use the given +protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default +protocol is 5. It was introduced in Python 3.8, and is incompatible +with previous versions. + +Specifying a negative protocol version selects the highest protocol +version supported. The higher the protocol used, the more recent the +version of Python needed to read the pickle produced. + +The *file* argument must have a write() method that accepts a single +bytes argument. It can thus be a file object opened for binary +writing, an io.BytesIO instance, or any other custom object that meets +this interface. + +If *fix_imports* is True and protocol is less than 3, pickle will try +to map the new Python 3 names to the old module names used in Python +2, so that the pickle data stream is readable with Python 2. + +If *buffer_callback* is None (the default), buffer views are serialized +into *file* as part of the pickle stream. It is an error if +*buffer_callback* is not None and *protocol* is None or smaller than 5. +""" def dumps( obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None -) -> bytes: ... +) -> bytes: + """Return the pickled representation of the object as a bytes object. + +The optional *protocol* argument tells the pickler to use the given +protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default +protocol is 5. It was introduced in Python 3.8, and is incompatible +with previous versions. + +Specifying a negative protocol version selects the highest protocol +version supported. The higher the protocol used, the more recent the +version of Python needed to read the pickle produced. + +If *fix_imports* is True and *protocol* is less than 3, pickle will +try to map the new Python 3 names to the old module names used in +Python 2, so that the pickle data stream is readable with Python 2. + +If *buffer_callback* is None (the default), buffer views are serialized +into *file* as part of the pickle stream. It is an error if +*buffer_callback* is not None and *protocol* is None or smaller than 5. +""" def load( file: _ReadableFileobj, *, @@ -37,7 +85,31 @@ def load( encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = (), -) -> Any: ... +) -> Any: + """Read and return an object from the pickle data stored in a file. + +This is equivalent to ``Unpickler(file).load()``, but may be more +efficient. + +The protocol version of the pickle is detected automatically, so no +protocol argument is needed. Bytes past the pickled object's +representation are ignored. + +The argument *file* must have two methods, a read() method that takes +an integer argument, and a readline() method that requires no +arguments. Both methods should return bytes. Thus *file* can be a +binary file object opened for reading, an io.BytesIO object, or any +other custom object that meets this interface. + +Optional keyword arguments are *fix_imports*, *encoding* and *errors*, +which are used to control compatibility support for pickle stream +generated by Python 2. If *fix_imports* is True, pickle will try to +map the old Python 2 names to the new names used in Python 3. The +*encoding* and *errors* tell pickle how to decode 8-bit string +instances pickled by Python 2; these default to 'ASCII' and 'strict', +respectively. The *encoding* can be 'bytes' to read these 8-bit +string instances as bytes objects. +""" def loads( data: ReadableBuffer, /, @@ -46,7 +118,22 @@ def loads( encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = (), -) -> Any: ... +) -> Any: + """Read and return an object from the given pickle data. + +The protocol version of the pickle is detected automatically, so no +protocol argument is needed. Bytes past the pickled object's +representation are ignored. + +Optional keyword arguments are *fix_imports*, *encoding* and *errors*, +which are used to control compatibility support for pickle stream +generated by Python 2. If *fix_imports* is True, pickle will try to +map the old Python 2 names to the new names used in Python 3. The +*encoding* and *errors* tell pickle how to decode 8-bit string +instances pickled by Python 2; these default to 'ASCII' and 'strict', +respectively. The *encoding* can be 'bytes' to read these 8-bit +string instances as bytes objects. +""" class PickleError(Exception): ... class PicklingError(PickleError): ... @@ -59,6 +146,37 @@ class PicklerMemoProxy: @disjoint_base class Pickler: + """This takes a binary file for writing a pickle data stream. + +The optional *protocol* argument tells the pickler to use the given +protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default +protocol is 5. It was introduced in Python 3.8, and is incompatible +with previous versions. + +Specifying a negative protocol version selects the highest protocol +version supported. The higher the protocol used, the more recent the +version of Python needed to read the pickle produced. + +The *file* argument must have a write() method that accepts a single +bytes argument. It can thus be a file object opened for binary +writing, an io.BytesIO instance, or any other custom object that meets +this interface. + +If *fix_imports* is True and protocol is less than 3, pickle will try +to map the new Python 3 names to the old module names used in Python +2, so that the pickle data stream is readable with Python 2. + +If *buffer_callback* is None (the default), buffer views are +serialized into *file* as part of the pickle stream. + +If *buffer_callback* is not None, then it can be called any number +of times with a buffer view. If the callback returns a false value +(such as None), the given buffer is out-of-band; otherwise the +buffer is serialized in-band, i.e. inside the pickle stream. + +It is an error if *buffer_callback* is not None and *protocol* +is None or smaller than 5. +""" fast: bool dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] reducer_override: Callable[[Any], Any] @@ -74,8 +192,17 @@ class Pickler: def memo(self) -> PicklerMemoProxy: ... @memo.setter def memo(self, value: PicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... - def dump(self, obj: Any, /) -> None: ... - def clear_memo(self) -> None: ... + def dump(self, obj: Any, /) -> None: + """Write a pickled representation of the given object to the open file. +""" + def clear_memo(self) -> None: + """Clears the pickler's "memo". + +The memo is the data structure that remembers which objects the +pickler has already seen, so that shared or recursive objects are +pickled by reference and not by value. This method is useful when +re-using picklers. +""" # this method has no default implementation for Python < 3.13 def persistent_id(self, obj: Any, /) -> Any: ... @@ -87,6 +214,27 @@ class UnpicklerMemoProxy: @disjoint_base class Unpickler: + """This takes a binary file for reading a pickle data stream. + +The protocol version of the pickle is detected automatically, so no +protocol argument is needed. Bytes past the pickled object's +representation are ignored. + +The argument *file* must have two methods, a read() method that takes +an integer argument, and a readline() method that requires no +arguments. Both methods should return bytes. Thus *file* can be a +binary file object opened for reading, an io.BytesIO object, or any +other custom object that meets this interface. + +Optional keyword arguments are *fix_imports*, *encoding* and *errors*, +which are used to control compatibility support for pickle stream +generated by Python 2. If *fix_imports* is True, pickle will try to +map the old Python 2 names to the new names used in Python 3. The +*encoding* and *errors* tell pickle how to decode 8-bit string +instances pickled by Python 2; these default to 'ASCII' and 'strict', +respectively. The *encoding* can be 'bytes' to read these 8-bit +string instances as bytes objects. +""" def __init__( self, file: _ReadableFileobj, @@ -100,8 +248,23 @@ class Unpickler: def memo(self) -> UnpicklerMemoProxy: ... @memo.setter def memo(self, value: UnpicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... - def load(self) -> Any: ... - def find_class(self, module_name: str, global_name: str, /) -> Any: ... + def load(self) -> Any: + """Load a pickle. + +Read a pickled object representation from the open file object given +in the constructor, and return the reconstituted object hierarchy +specified therein. +""" + def find_class(self, module_name: str, global_name: str, /) -> Any: + """Return an object from a specified module. + +If necessary, the module will be imported. Subclasses may override +this method (e.g. to restrict unpickling of arbitrary classes and +functions). + +This method is called whenever a class or a function object is +needed. Both arguments passed are str objects. +""" # this method has no default implementation for Python < 3.13 def persistent_load(self, pid: Any, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi index dd74e316e8990..ba2509ac9aad4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi @@ -1,3 +1,5 @@ +"""A POSIX helper for the subprocess module. +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -29,7 +31,29 @@ if sys.platform != "win32": child_umask: int, preexec_fn: Callable[[], None], /, - ) -> int: ... + ) -> int: + """Spawn a fresh new child process. + +Fork a child process, close parent file descriptors as appropriate in the +child and duplicate the few that are needed before calling exec() in the +child process. + +If close_fds is True, close file descriptors 3 and higher, except those listed +in the sorted tuple pass_fds. + +The preexec_fn, if supplied, will be called immediately before closing file +descriptors and exec. + +WARNING: preexec_fn is NOT SAFE if your application uses threads. + It may trigger infrequent, difficult to debug deadlocks. + +If an error occurs in the child process before the exec, it is +serialized and written to the errpipe_write fd per subprocess.py. + +Returns: the child process's PID. + +Raises: Only on an error in the parent process. +""" else: def fork_exec( args: Sequence[StrOrBytesPath] | None, @@ -56,4 +80,26 @@ if sys.platform != "win32": preexec_fn: Callable[[], None], allow_vfork: bool, /, - ) -> int: ... + ) -> int: + """Spawn a fresh new child process. + +Fork a child process, close parent file descriptors as appropriate in the +child and duplicate the few that are needed before calling exec() in the +child process. + +If close_fds is True, close file descriptors 3 and higher, except those listed +in the sorted tuple pass_fds. + +The preexec_fn, if supplied, will be called immediately before closing file +descriptors and exec. + +WARNING: preexec_fn is NOT SAFE if your application uses threads. + It may trigger infrequent, difficult to debug deadlocks. + +If an error occurs in the child process before the exec, it is +serialized and written to the errpipe_write fd per subprocess.py. + +Returns: the child process's PID. + +Raises: Only on an error in the parent process. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi index 1260717489e41..471c434effb42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi @@ -5,10 +5,32 @@ _T = TypeVar("_T") _CacheToken = NewType("_CacheToken", int) -def get_cache_token() -> _CacheToken: ... +def get_cache_token() -> _CacheToken: + """Returns the current ABC cache token. + +The token is an opaque object (supporting equality testing) identifying the +current version of the ABC cache for virtual subclasses. The token changes +with every call to ``register()`` on any ABC. +""" class ABCMeta(type): + """Metaclass for defining Abstract Base Classes (ABCs). + +Use this metaclass to create an ABC. An ABC can be subclassed +directly, and then acts as a mix-in class. You can also register +unrelated concrete classes (even built-in classes) and unrelated +ABCs as 'virtual subclasses' -- these and their descendants will +be considered subclasses of the registering ABC by the built-in +issubclass() function, but the registering ABC won't show up in +their MRO (Method Resolution Order) nor will method +implementations defined by the registering ABC be callable (not +even via super()). +""" def __new__( mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], / ) -> _typeshed.Self: ... - def register(cls, subclass: type[_T]) -> type[_T]: ... + def register(cls, subclass: type[_T]) -> type[_T]: + """Register a virtual subclass of an ABC. + +Returns the subclass, to allow usage as a class decorator. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi index a6723f749da6d..77408ac26a399 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi @@ -1,3 +1,5 @@ +"""Python decimal arithmetic module +""" # This is a slight lie, the implementations aren't exactly identical # However, in all likelihood, the differences are inconsequential import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi index edd484a9a71a4..1bf712af28979 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi @@ -1,18 +1,56 @@ +"""C implementation of the Python queue module. +This module is an implementation detail, please do not use it directly. +""" from types import GenericAlias from typing import Any, Generic, TypeVar from typing_extensions import disjoint_base _T = TypeVar("_T") -class Empty(Exception): ... +class Empty(Exception): + """Exception raised by Queue.get(block=0)/get_nowait(). +""" @disjoint_base class SimpleQueue(Generic[_T]): + """Simple, unbounded, reentrant FIFO queue. +""" def __init__(self) -> None: ... - def empty(self) -> bool: ... - def get(self, block: bool = True, timeout: float | None = None) -> _T: ... - def get_nowait(self) -> _T: ... - def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... - def put_nowait(self, item: _T) -> None: ... - def qsize(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def empty(self) -> bool: + """Return True if the queue is empty, False otherwise (not reliable!). +""" + def get(self, block: bool = True, timeout: float | None = None) -> _T: + """Remove and return an item from the queue. + +If optional args 'block' is true and 'timeout' is None (the default), +block if necessary until an item is available. If 'timeout' is +a non-negative number, it blocks at most 'timeout' seconds and raises +the Empty exception if no item was available within that time. +Otherwise ('block' is false), return an item if one is immediately +available, else raise the Empty exception ('timeout' is ignored +in that case). +""" + def get_nowait(self) -> _T: + """Remove and return an item from the queue without blocking. + +Only get an item if one is immediately available. Otherwise +raise the Empty exception. +""" + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: + """Put the item on the queue. + +The optional 'block' and 'timeout' arguments are ignored, as this method +never blocks. They are provided for compatibility with the Queue class. +""" + def put_nowait(self, item: _T) -> None: + """Put an item into the queue without blocking. + +This is exactly equivalent to `put(item)` and is only provided +for compatibility with the Queue class. +""" + def qsize(self) -> int: + """Return the approximate size of the queue (not reliable!). +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi index ac00fdfb7272b..41e15341277c0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi @@ -1,3 +1,5 @@ +"""Module implements the Mersenne Twister random number generator. +""" import sys from typing_extensions import Self, TypeAlias, disjoint_base @@ -6,13 +8,28 @@ _State: TypeAlias = tuple[int, ...] @disjoint_base class Random: + """Random() -> create a random number generator with its own internal state. +""" if sys.version_info >= (3, 10): def __init__(self, seed: object = ..., /) -> None: ... else: def __new__(self, seed: object = ..., /) -> Self: ... - def seed(self, n: object = None, /) -> None: ... - def getstate(self) -> _State: ... - def setstate(self, state: _State, /) -> None: ... - def random(self) -> float: ... - def getrandbits(self, k: int, /) -> int: ... + def seed(self, n: object = None, /) -> None: + """seed([n]) -> None. + +Defaults to use urandom and falls back to a combination +of the current time and the process identifier. +""" + def getstate(self) -> _State: + """getstate() -> tuple containing the current state. +""" + def setstate(self, state: _State, /) -> None: + """setstate(state) -> None. Restores generator state. +""" + def random(self) -> float: + """random() -> x in the interval [0, 1). +""" + def getrandbits(self, k: int, /) -> int: + """getrandbits(k) -> x. Generates an int with k random bits. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi index eb6c811294216..abd394f971041 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi @@ -1,3 +1,6 @@ +""" +The objects used by the site module to add custom builtins. +""" import sys from collections.abc import Iterable from typing import ClassVar, Literal, NoReturn @@ -9,9 +12,20 @@ class Quitter: def __call__(self, code: sys._ExitCode = None) -> NoReturn: ... class _Printer: + """interactive prompt objects for printing the license text, a list of +contributors and the copyright notice. +""" MAXLINES: ClassVar[Literal[23]] def __init__(self, name: str, data: str, files: Iterable[str] = (), dirs: Iterable[str] = ()) -> None: ... def __call__(self) -> None: ... class _Helper: + """Define the builtin 'help'. + +This is a wrapper around pydoc.help that provides a helpful message +when 'help' is typed at the Python interactive prompt. + +Calling help() at the Python prompt starts an interactive help session. +Calling help(thing) prints help for the python object 'thing'. +""" def __call__(self, request: object = ...) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi index cdad886b3415e..5fa8cbfd93421 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi @@ -1,3 +1,7 @@ +"""Implementation module for socket operations. + +See the socket module for documentation. +""" import sys from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterable @@ -733,15 +737,68 @@ if sys.platform != "win32" and sys.platform != "darwin": @disjoint_base class socket: + """socket(family=AF_INET, type=SOCK_STREAM, proto=0) -> socket object +socket(family=-1, type=-1, proto=-1, fileno=None) -> socket object + +Open a socket of the given type. The family argument specifies the +address family; it defaults to AF_INET. The type argument specifies +whether this is a stream (SOCK_STREAM, this is the default) +or datagram (SOCK_DGRAM) socket. The protocol argument defaults to 0, +specifying the default protocol. Keyword arguments are accepted. +The socket is created as non-inheritable. + +When a fileno is passed in, family, type and proto are auto-detected, +unless they are explicitly set. + +A socket object represents one endpoint of a network connection. + +Methods of socket objects (keyword arguments not allowed): + +_accept() -- accept connection, returning new socket fd and client address +bind(addr) -- bind the socket to a local address +close() -- close the socket +connect(addr) -- connect the socket to a remote address +connect_ex(addr) -- connect, return an error code instead of an exception +dup() -- return a new socket fd duplicated from fileno() +fileno() -- return underlying file descriptor +getpeername() -- return remote address [*] +getsockname() -- return local address +getsockopt(level, optname[, buflen]) -- get socket options +gettimeout() -- return timeout or None +listen([n]) -- start listening for incoming connections +recv(buflen[, flags]) -- receive data +recv_into(buffer[, nbytes[, flags]]) -- receive data (into a buffer) +recvfrom(buflen[, flags]) -- receive data and sender's address +recvfrom_into(buffer[, nbytes, [, flags]) + -- receive data and sender's address (into a buffer) +sendall(data[, flags]) -- send all data +send(data[, flags]) -- send data, may not send all of it +sendto(data[, flags], addr) -- send data to a given address +setblocking(bool) -- set or clear the blocking I/O flag +getblocking() -- return True if socket is blocking, False if non-blocking +setsockopt(level, optname, value[, optlen]) -- set socket options +settimeout(None | float) -- set or clear the timeout +shutdown(how) -- shut down traffic in one or both directions + + [*] not available on all platforms! +""" @property - def family(self) -> int: ... + def family(self) -> int: + """the socket family +""" @property - def type(self) -> int: ... + def type(self) -> int: + """the socket type +""" @property - def proto(self) -> int: ... + def proto(self) -> int: + """the socket protocol +""" # F811: "Redefinition of unused `timeout`" @property - def timeout(self) -> float | None: ... # noqa: F811 + def timeout(self) -> float | None: # noqa: F811 + """the socket timeout +""" if sys.platform == "win32": def __init__( self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = None @@ -749,38 +806,205 @@ class socket: else: def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | None = None) -> None: ... - def bind(self, address: _Address, /) -> None: ... - def close(self) -> None: ... - def connect(self, address: _Address, /) -> None: ... - def connect_ex(self, address: _Address, /) -> int: ... - def detach(self) -> int: ... - def fileno(self) -> int: ... - def getpeername(self) -> _RetAddress: ... - def getsockname(self) -> _RetAddress: ... + def bind(self, address: _Address, /) -> None: + """bind(address) + +Bind the socket to a local address. For IP sockets, the address is a +pair (host, port); the host must refer to the local host. For raw packet +sockets the address is a tuple (ifname, proto [,pkttype [,hatype [,addr]]]) +""" + def close(self) -> None: + """close() + +Close the socket. It cannot be used after this call. +""" + def connect(self, address: _Address, /) -> None: + """connect(address) + +Connect the socket to a remote address. For IP sockets, the address +is a pair (host, port). +""" + def connect_ex(self, address: _Address, /) -> int: + """connect_ex(address) -> errno + +This is like connect(address), but returns an error code (the errno value) +instead of raising an exception when an error occurs. +""" + def detach(self) -> int: + """detach() + +Close the socket object without closing the underlying file descriptor. +The object cannot be used after this call, but the file descriptor +can be reused for other purposes. The file descriptor is returned. +""" + def fileno(self) -> int: + """fileno() -> integer + +Return the integer file descriptor of the socket. +""" + def getpeername(self) -> _RetAddress: + """getpeername() -> address info + +Return the address of the remote endpoint. For IP sockets, the address +info is a pair (hostaddr, port). +""" + def getsockname(self) -> _RetAddress: + """getsockname() -> address info + +Return the address of the local endpoint. The format depends on the +address family. For IPv4 sockets, the address info is a pair +(hostaddr, port). For IPv6 sockets, the address info is a 4-tuple +(hostaddr, port, flowinfo, scope_id). +""" @overload - def getsockopt(self, level: int, optname: int, /) -> int: ... + def getsockopt(self, level: int, optname: int, /) -> int: + """getsockopt(level, option[, buffersize]) -> value + +Get a socket option. See the Unix manual for level and option. +If a nonzero buffersize argument is given, the return value is a +string of that length; otherwise it is an integer. +""" @overload def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes: ... - def getblocking(self) -> bool: ... - def gettimeout(self) -> float | None: ... + def getblocking(self) -> bool: + """getblocking() + +Returns True if socket is in blocking mode, or False if it +is in non-blocking mode. +""" + def gettimeout(self) -> float | None: + """gettimeout() -> timeout + +Returns the timeout in seconds (float) associated with socket +operations. A timeout of None indicates that timeouts on socket +operations are disabled. +""" if sys.platform == "win32": def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: ... - def listen(self, backlog: int = ..., /) -> None: ... - def recv(self, bufsize: int, flags: int = 0, /) -> bytes: ... - def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: ... + def listen(self, backlog: int = ..., /) -> None: + """listen([backlog]) + +Enable a server to accept connections. If backlog is specified, it must be +at least 0 (if it is lower, it is set to 0); it specifies the number of +unaccepted connections that the system will allow before refusing new +connections. If not specified, a default reasonable value is chosen. +""" + def recv(self, bufsize: int, flags: int = 0, /) -> bytes: + """recv(buffersize[, flags]) -> data + +Receive up to buffersize bytes from the socket. For the optional flags +argument, see the Unix manual. When no data is available, block until +at least one byte is available or until the remote end is closed. When +the remote end is closed and all data is read, return the empty string. +""" + def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: + """recvfrom(buffersize[, flags]) -> (data, address info) + +Like recv(buffersize, flags) but also return the sender's address info. +""" if sys.platform != "win32": - def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: + """recvmsg(bufsize[, ancbufsize[, flags]]) -> (data, ancdata, msg_flags, address) + +Receive normal data (up to bufsize bytes) and ancillary data from the +socket. The ancbufsize argument sets the size in bytes of the +internal buffer used to receive the ancillary data; it defaults to 0, +meaning that no ancillary data will be received. Appropriate buffer +sizes for ancillary data can be calculated using CMSG_SPACE() or +CMSG_LEN(), and items which do not fit into the buffer might be +truncated or discarded. The flags argument defaults to 0 and has the +same meaning as for recv(). + +The return value is a 4-tuple: (data, ancdata, msg_flags, address). +The data item is a bytes object holding the non-ancillary data +received. The ancdata item is a list of zero or more tuples +(cmsg_level, cmsg_type, cmsg_data) representing the ancillary data +(control messages) received: cmsg_level and cmsg_type are integers +specifying the protocol level and protocol-specific type respectively, +and cmsg_data is a bytes object holding the associated data. The +msg_flags item is the bitwise OR of various flags indicating +conditions on the received message; see your system documentation for +details. If the receiving socket is unconnected, address is the +address of the sending socket, if available; otherwise, its value is +unspecified. + +If recvmsg() raises an exception after the system call returns, it +will first attempt to close any file descriptors received via the +SCM_RIGHTS mechanism. +""" def recvmsg_into( self, buffers: Iterable[WriteableBuffer], ancbufsize: int = 0, flags: int = 0, / - ) -> tuple[int, list[_CMSG], int, Any]: ... - - def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: ... - def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: ... - def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: ... - def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: ... + ) -> tuple[int, list[_CMSG], int, Any]: + """recvmsg_into(buffers[, ancbufsize[, flags]]) -> (nbytes, ancdata, msg_flags, address) + +Receive normal data and ancillary data from the socket, scattering the +non-ancillary data into a series of buffers. The buffers argument +must be an iterable of objects that export writable buffers +(e.g. bytearray objects); these will be filled with successive chunks +of the non-ancillary data until it has all been written or there are +no more buffers. The ancbufsize argument sets the size in bytes of +the internal buffer used to receive the ancillary data; it defaults to +0, meaning that no ancillary data will be received. Appropriate +buffer sizes for ancillary data can be calculated using CMSG_SPACE() +or CMSG_LEN(), and items which do not fit into the buffer might be +truncated or discarded. The flags argument defaults to 0 and has the +same meaning as for recv(). + +The return value is a 4-tuple: (nbytes, ancdata, msg_flags, address). +The nbytes item is the total number of bytes of non-ancillary data +written into the buffers. The ancdata item is a list of zero or more +tuples (cmsg_level, cmsg_type, cmsg_data) representing the ancillary +data (control messages) received: cmsg_level and cmsg_type are +integers specifying the protocol level and protocol-specific type +respectively, and cmsg_data is a bytes object holding the associated +data. The msg_flags item is the bitwise OR of various flags +indicating conditions on the received message; see your system +documentation for details. If the receiving socket is unconnected, +address is the address of the sending socket, if available; otherwise, +its value is unspecified. + +If recvmsg_into() raises an exception after the system call returns, +it will first attempt to close any file descriptors received via the +SCM_RIGHTS mechanism. +""" + + def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: + """recvfrom_into(buffer[, nbytes[, flags]]) -> (nbytes, address info) + +Like recv_into(buffer[, nbytes[, flags]]) but also return the sender's address info. +""" + def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: + """recv_into(buffer, [nbytes[, flags]]) -> nbytes_read + +A version of recv() that stores its data into a buffer rather than creating +a new string. Receive up to buffersize bytes from the socket. If buffersize +is not specified (or 0), receive up to the size available in the given buffer. + +See recv() for documentation about the flags. +""" + def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: + """send(data[, flags]) -> count + +Send a data string to the socket. For the optional flags +argument, see the Unix manual. Return the number of bytes +sent; this may be less than len(data) if the network is busy. +""" + def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: + """sendall(data[, flags]) + +Send a data string to the socket. For the optional flags +argument, see the Unix manual. This calls send() repeatedly +until all data is sent. If an error occurs, it's impossible +to tell how much data has been sent. +""" @overload - def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: ... + def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: + """sendto(data[, flags], address) -> count + +Like send(data, flags) but allows specifying the destination address. +For IP sockets, the address is a pair (hostaddr, port). +""" @overload def sendto(self, data: ReadableBuffer, flags: int, address: _Address, /) -> int: ... if sys.platform != "win32": @@ -791,68 +1015,243 @@ class socket: flags: int = 0, address: _Address | None = None, /, - ) -> int: ... + ) -> int: + """sendmsg(buffers[, ancdata[, flags[, address]]]) -> count + +Send normal and ancillary data to the socket, gathering the +non-ancillary data from a series of buffers and concatenating it into +a single message. The buffers argument specifies the non-ancillary +data as an iterable of bytes-like objects (e.g. bytes objects). +The ancdata argument specifies the ancillary data (control messages) +as an iterable of zero or more tuples (cmsg_level, cmsg_type, +cmsg_data), where cmsg_level and cmsg_type are integers specifying the +protocol level and protocol-specific type respectively, and cmsg_data +is a bytes-like object holding the associated data. The flags +argument defaults to 0 and has the same meaning as for send(). If +address is supplied and not None, it sets a destination address for +the message. The return value is the number of bytes of non-ancillary +data sent. +""" if sys.platform == "linux": def sendmsg_afalg( self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 - ) -> int: ... - - def setblocking(self, flag: bool, /) -> None: ... - def settimeout(self, value: float | None, /) -> None: ... + ) -> int: + """sendmsg_afalg([msg], *, op[, iv[, assoclen[, flags=MSG_MORE]]]) + +Set operation mode, IV and length of associated data for an AF_ALG +operation socket. +""" + + def setblocking(self, flag: bool, /) -> None: + """setblocking(flag) + +Set the socket to blocking (flag is true) or non-blocking (false). +setblocking(True) is equivalent to settimeout(None); +setblocking(False) is equivalent to settimeout(0.0). +""" + def settimeout(self, value: float | None, /) -> None: + """settimeout(timeout) + +Set a timeout on socket operations. 'timeout' can be a float, +giving in seconds, or None. Setting a timeout of None disables +the timeout feature and is equivalent to setblocking(1). +Setting a timeout of zero is the same as setblocking(0). +""" @overload - def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: ... + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: + """setsockopt(level, option, value: int) +setsockopt(level, option, value: buffer) +setsockopt(level, option, None, optlen: int) + +Set a socket option. See the Unix manual for level and option. +The value argument can either be an integer, a string buffer, or +None, optlen. +""" @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None: ... if sys.platform == "win32": def share(self, process_id: int, /) -> bytes: ... - def shutdown(self, how: int, /) -> None: ... + def shutdown(self, how: int, /) -> None: + """shutdown(flag) + +Shut down the reading side of the socket (flag == SHUT_RD), the writing side +of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR). +""" SocketType = socket # ===== Functions ===== -def close(fd: SupportsIndex, /) -> None: ... -def dup(fd: SupportsIndex, /) -> int: ... +def close(fd: SupportsIndex, /) -> None: + """close(integer) -> None + +Close an integer socket file descriptor. This is like os.close(), but for +sockets; on some platforms os.close() won't work for socket file descriptors. +""" +def dup(fd: SupportsIndex, /) -> int: + """dup(integer) -> integer + +Duplicate an integer socket file descriptor. This is like os.dup(), but for +sockets; on some platforms os.dup() won't work for socket file descriptors. +""" # the 5th tuple item is an address def getaddrinfo( host: bytes | str | None, port: bytes | str | int | None, family: int = ..., type: int = 0, proto: int = 0, flags: int = 0 -) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... -def gethostbyname(hostname: str, /) -> str: ... -def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: ... -def gethostname() -> str: ... -def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: ... -def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: ... -def getprotobyname(protocolname: str, /) -> int: ... -def getservbyname(servicename: str, protocolname: str = ..., /) -> int: ... -def getservbyport(port: int, protocolname: str = ..., /) -> str: ... -def ntohl(x: int, /) -> int: ... # param & ret val are 32-bit ints -def ntohs(x: int, /) -> int: ... # param & ret val are 16-bit ints -def htonl(x: int, /) -> int: ... # param & ret val are 32-bit ints -def htons(x: int, /) -> int: ... # param & ret val are 16-bit ints -def inet_aton(ip_addr: str, /) -> bytes: ... # ret val 4 bytes in length -def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ... -def inet_pton(address_family: int, ip_string: str, /) -> bytes: ... -def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ... -def getdefaulttimeout() -> float | None: ... +) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: + """getaddrinfo(host, port [, family, type, proto, flags]) + -> list of (family, type, proto, canonname, sockaddr) + +Resolve host and port into addrinfo struct. +""" +def gethostbyname(hostname: str, /) -> str: + """gethostbyname(host) -> address + +Return the IP address (a string of the form '255.255.255.255') for a host. +""" +def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: + """gethostbyname_ex(host) -> (name, aliaslist, addresslist) + +Return the true host name, a list of aliases, and a list of IP addresses, +for a host. The host argument is a string giving a host name or IP number. +""" +def gethostname() -> str: + """gethostname() -> string + +Return the current host name. +""" +def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: + """gethostbyaddr(host) -> (name, aliaslist, addresslist) + +Return the true host name, a list of aliases, and a list of IP addresses, +for a host. The host argument is a string giving a host name or IP number. +""" +def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: + """getnameinfo(sockaddr, flags) --> (host, port) + +Get host and port for a sockaddr. +""" +def getprotobyname(protocolname: str, /) -> int: + """getprotobyname(name) -> integer + +Return the protocol number for the named protocol. (Rarely used.) +""" +def getservbyname(servicename: str, protocolname: str = ..., /) -> int: + """getservbyname(servicename[, protocolname]) -> integer + +Return a port number from a service name and protocol name. +The optional protocol name, if given, should be 'tcp' or 'udp', +otherwise any protocol will match. +""" +def getservbyport(port: int, protocolname: str = ..., /) -> str: + """getservbyport(port[, protocolname]) -> string + +Return the service name from a port number and protocol name. +The optional protocol name, if given, should be 'tcp' or 'udp', +otherwise any protocol will match. +""" +def ntohl(x: int, /) -> int: # param & ret val are 32-bit ints + """Convert a 32-bit unsigned integer from network to host byte order. +""" +def ntohs(x: int, /) -> int: # param & ret val are 16-bit ints + """Convert a 16-bit unsigned integer from network to host byte order. +""" +def htonl(x: int, /) -> int: # param & ret val are 32-bit ints + """Convert a 32-bit unsigned integer from host to network byte order. +""" +def htons(x: int, /) -> int: # param & ret val are 16-bit ints + """Convert a 16-bit unsigned integer from host to network byte order. +""" +def inet_aton(ip_addr: str, /) -> bytes: # ret val 4 bytes in length + """Convert an IP address in string format (123.45.67.89) to the 32-bit packed binary format used in low-level network functions. +""" +def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: + """Convert an IP address from 32-bit packed binary format to string format. +""" +def inet_pton(address_family: int, ip_string: str, /) -> bytes: + """inet_pton(af, ip) -> packed IP address string + +Convert an IP address from string format to a packed string suitable +for use with low-level network functions. +""" +def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: + """inet_ntop(af, packed_ip) -> string formatted IP address + +Convert a packed IP address of the given family to string format. +""" +def getdefaulttimeout() -> float | None: + """getdefaulttimeout() -> timeout + +Returns the default timeout in seconds (float) for new socket objects. +A value of None indicates that new socket objects have no timeout. +When the socket module is first imported, the default is None. +""" # F811: "Redefinition of unused `timeout`" -def setdefaulttimeout(timeout: float | None, /) -> None: ... # noqa: F811 +def setdefaulttimeout(timeout: float | None, /) -> None: # noqa: F811 + """setdefaulttimeout(timeout) -if sys.platform != "win32": - def sethostname(name: str, /) -> None: ... - def CMSG_LEN(length: int, /) -> int: ... - def CMSG_SPACE(length: int, /) -> int: ... - def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: ... +Set the default timeout in seconds (float) for new socket objects. +A value of None indicates that new socket objects have no timeout. +When the socket module is first imported, the default is None. +""" -def if_nameindex() -> list[tuple[int, str]]: ... -def if_nametoindex(oname: str, /) -> int: ... +if sys.platform != "win32": + def sethostname(name: str, /) -> None: + """sethostname(name) + +Sets the hostname to name. +""" + def CMSG_LEN(length: int, /) -> int: + """CMSG_LEN(length) -> control message length + +Return the total length, without trailing padding, of an ancillary +data item with associated data of the given length. This value can +often be used as the buffer size for recvmsg() to receive a single +item of ancillary data, but RFC 3542 requires portable applications to +use CMSG_SPACE() and thus include space for padding, even when the +item will be the last in the buffer. Raises OverflowError if length +is outside the permissible range of values. +""" + def CMSG_SPACE(length: int, /) -> int: + """CMSG_SPACE(length) -> buffer size + +Return the buffer size needed for recvmsg() to receive an ancillary +data item with associated data of the given length, along with any +trailing padding. The buffer space needed to receive multiple items +is the sum of the CMSG_SPACE() values for their associated data +lengths. Raises OverflowError if length is outside the permissible +range of values. +""" + def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: + """socketpair([family[, type [, proto]]]) -> (socket object, socket object) + +Create a pair of socket objects from the sockets returned by the platform +socketpair() function. +The arguments are the same as for socket() except the default family is +AF_UNIX if defined on the platform; otherwise, the default is AF_INET. +""" + +def if_nameindex() -> list[tuple[int, str]]: + """if_nameindex() + +Returns a list of network interface information (index, name) tuples. +""" +def if_nametoindex(oname: str, /) -> int: + """Returns the interface index corresponding to the interface name if_name. +""" if sys.version_info >= (3, 14): - def if_indextoname(if_index: int, /) -> str: ... + def if_indextoname(if_index: int, /) -> str: + """Returns the interface name corresponding to the interface index if_index. +""" else: - def if_indextoname(index: int, /) -> str: ... + def if_indextoname(index: int, /) -> str: + """if_indextoname(if_index) + +Returns the interface name corresponding to the interface index if_index. +""" CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi index 50006dcf4032d..67b46423aa359 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi @@ -215,10 +215,14 @@ if sys.version_info >= (3, 11): # Can take or return anything depending on what's in the registry. @overload -def adapt(obj: Any, proto: Any, /) -> Any: ... +def adapt(obj: Any, proto: Any, /) -> Any: + """Adapt given object to given protocol. +""" @overload def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: ... -def complete_statement(statement: str) -> bool: ... +def complete_statement(statement: str) -> bool: + """Checks if a string contains a complete SQL statement. +""" if sys.version_info >= (3, 12): @overload @@ -232,7 +236,17 @@ if sys.version_info >= (3, 12): uri: bool = False, *, autocommit: bool = ..., - ) -> Connection: ... + ) -> Connection: + """Open a connection to the SQLite database file 'database'. + +You can use ":memory:" to open a database connection to a database that +resides in RAM instead of on disk. + +Note: Passing more than 1 positional argument to _sqlite3.connect() is +deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', +'check_same_thread', 'factory', 'cached_statements' and 'uri' will +become keyword-only parameters in Python 3.15. +""" @overload def connect( database: StrOrBytesPath, @@ -270,7 +284,12 @@ else: check_same_thread: bool = True, cached_statements: int = 128, uri: bool = False, - ) -> Connection: ... + ) -> Connection: + """Opens a connection to the SQLite database file database. + +You can use ":memory:" to open a database connection to a database that resides +in RAM instead of on disk. +""" @overload def connect( database: StrOrBytesPath, @@ -295,19 +314,40 @@ else: uri: bool = False, ) -> _ConnectionT: ... -def enable_callback_tracebacks(enable: bool, /) -> None: ... +def enable_callback_tracebacks(enable: bool, /) -> None: + """Enable or disable callback functions throwing errors to stderr. +""" if sys.version_info < (3, 12): # takes a pos-or-keyword argument because there is a C wrapper - def enable_shared_cache(do_enable: int) -> None: ... + def enable_shared_cache(do_enable: int) -> None: + """Enable or disable shared cache mode for the calling thread. + +This method is deprecated and will be removed in Python 3.12. +Shared cache is strongly discouraged by the SQLite 3 documentation. +If shared cache must be used, open the database in URI mode using +the cache=shared query parameter. +""" if sys.version_info >= (3, 10): - def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: ... - def register_converter(typename: str, converter: _Converter, /) -> None: ... + def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: + """Register a function to adapt Python objects to SQLite values. +""" + def register_converter(typename: str, converter: _Converter, /) -> None: + """Register a function to convert SQLite values to Python objects. +""" else: - def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: ... - def register_converter(name: str, converter: _Converter, /) -> None: ... + def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: + """register_adapter(type, callable) + +Registers an adapter with sqlite3's adapter registry. +""" + def register_converter(name: str, converter: _Converter, /) -> None: + """register_converter(typename, callable) + +Registers a converter with sqlite3. +""" if sys.version_info < (3, 10): OptimizedUnicode = str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi index 73a43f29c8c5f..00d3d6e2fbb56 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi @@ -1,3 +1,6 @@ +"""Implementation module for SSL socket operations. See the socket module +for documentation. +""" import sys from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable @@ -47,26 +50,54 @@ class _CertInfo(TypedDict): caIssuers: NotRequired[tuple[str, ...] | None] crlDistributionPoints: NotRequired[tuple[str, ...] | None] -def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: ... -def RAND_bytes(n: int, /) -> bytes: ... +def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: + """Mix string into the OpenSSL PRNG state. + +entropy (a float) is a lower bound on the entropy contained in +string. See RFC 4086. +""" +def RAND_bytes(n: int, /) -> bytes: + """Generate n cryptographically strong pseudo-random bytes. +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.6; removed in Python 3.12. Use `ssl.RAND_bytes()` instead.") - def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: ... + def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: + """Generate n pseudo-random bytes. + +Return a pair (bytes, is_cryptographic). is_cryptographic is True +if the bytes generated are cryptographically strong. +""" if sys.version_info < (3, 10): def RAND_egd(path: str) -> None: ... -def RAND_status() -> bool: ... -def get_default_verify_paths() -> tuple[str, str, str, str]: ... +def RAND_status() -> bool: + """Returns True if the OpenSSL PRNG has been seeded with enough data and False if not. + +It is necessary to seed the PRNG with RAND_add() on some platforms before +using the ssl() function. +""" +def get_default_verify_paths() -> tuple[str, str, str, str]: + """Return search paths and environment vars that are used by SSLContext's set_default_verify_paths() to load default CAs. + +The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. +""" if sys.platform == "win32": _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] def enum_certificates(store_name: str) -> _EnumRetType: ... def enum_crls(store_name: str) -> _EnumRetType: ... -def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: ... -def nid2obj(nid: int, /) -> tuple[int, str, str, str]: ... +def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: + """Lookup NID, short name, long name and OID of an ASN1_OBJECT. + +By default objects are looked up by OID. With name=True short and +long name are also matched. +""" +def nid2obj(nid: int, /) -> tuple[int, str, str, str]: + """Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID. +""" @disjoint_base class _SSLContext: check_hostname: bool @@ -83,9 +114,25 @@ class _SSLContext: verify_flags: int verify_mode: int def __new__(cls, protocol: int, /) -> Self: ... - def cert_store_stats(self) -> dict[str, int]: ... + def cert_store_stats(self) -> dict[str, int]: + """Returns quantities of loaded X.509 certificates. + +X.509 certificates with a CA extension and certificate revocation lists +inside the context's cert store. + +NOTE: Certificates in a capath directory aren't loaded unless they have +been used at least once. +""" @overload - def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: + """Returns a list of dicts with information of loaded CA certs. + +If the optional argument is True, returns a DER-encoded copy of the CA +certificate. + +NOTE: Certificates in a capath directory aren't loaded unless they have +been used at least once. +""" @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -116,23 +163,48 @@ class MemoryBIO: eof: bool pending: int def __new__(self) -> Self: ... - def read(self, size: int = -1, /) -> bytes: ... - def write(self, b: ReadableBuffer, /) -> int: ... - def write_eof(self) -> None: ... + def read(self, size: int = -1, /) -> bytes: + """Read up to size bytes from the memory BIO. + +If size is not specified, read the entire buffer. +If the return value is an empty bytes instance, this means either +EOF or that no data is available. Use the "eof" property to +distinguish between the two. +""" + def write(self, b: ReadableBuffer, /) -> int: + """Writes the bytes b into the memory BIO. + +Returns the number of bytes written. +""" + def write_eof(self) -> None: + """Write an EOF marker to the memory BIO. + +When all data has been read, the "eof" property will be True. +""" @final class SSLSession: __hash__: ClassVar[None] # type: ignore[assignment] @property - def has_ticket(self) -> bool: ... + def has_ticket(self) -> bool: + """Does the session contain a ticket? +""" @property - def id(self) -> bytes: ... + def id(self) -> bytes: + """Session ID. +""" @property - def ticket_lifetime_hint(self) -> int: ... + def ticket_lifetime_hint(self) -> int: + """Ticket life time hint. +""" @property - def time(self) -> int: ... + def time(self) -> int: + """Session creation time (seconds since epoch). +""" @property - def timeout(self) -> int: ... + def timeout(self) -> int: + """Session timeout (delta in seconds). +""" # _ssl.Certificate is weird: it can't be instantiated or subclassed. # Instances can only be created via methods of the private _ssl._SSLSocket class, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi index 7129a282b5747..2688e78a9c8cc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi @@ -1,3 +1,73 @@ +"""S_IFMT_: file type bits +S_IFDIR: directory +S_IFCHR: character device +S_IFBLK: block device +S_IFREG: regular file +S_IFIFO: fifo (named pipe) +S_IFLNK: symbolic link +S_IFSOCK: socket file +S_IFDOOR: door +S_IFPORT: event port +S_IFWHT: whiteout + +S_ISUID: set UID bit +S_ISGID: set GID bit +S_ENFMT: file locking enforcement +S_ISVTX: sticky bit +S_IREAD: Unix V7 synonym for S_IRUSR +S_IWRITE: Unix V7 synonym for S_IWUSR +S_IEXEC: Unix V7 synonym for S_IXUSR +S_IRWXU: mask for owner permissions +S_IRUSR: read by owner +S_IWUSR: write by owner +S_IXUSR: execute by owner +S_IRWXG: mask for group permissions +S_IRGRP: read by group +S_IWGRP: write by group +S_IXGRP: execute by group +S_IRWXO: mask for others (not in group) permissions +S_IROTH: read by others +S_IWOTH: write by others +S_IXOTH: execute by others + +UF_SETTABLE: mask of owner changeable flags +UF_NODUMP: do not dump file +UF_IMMUTABLE: file may not be changed +UF_APPEND: file may only be appended to +UF_OPAQUE: directory is opaque when viewed through a union stack +UF_NOUNLINK: file may not be renamed or deleted +UF_COMPRESSED: macOS: file is hfs-compressed +UF_TRACKED: used for dealing with document IDs +UF_DATAVAULT: entitlement required for reading and writing +UF_HIDDEN: macOS: file should not be displayed +SF_SETTABLE: mask of super user changeable flags +SF_ARCHIVED: file may be archived +SF_IMMUTABLE: file may not be changed +SF_APPEND: file may only be appended to +SF_RESTRICTED: entitlement required for writing +SF_NOUNLINK: file may not be renamed or deleted +SF_SNAPSHOT: file is a snapshot file +SF_FIRMLINK: file is a firmlink +SF_DATALESS: file is a dataless object + +On macOS: +SF_SUPPORTED: mask of super user supported flags +SF_SYNTHETIC: mask of read-only synthetic flags + +ST_MODE +ST_INO +ST_DEV +ST_NLINK +ST_UID +ST_GID +ST_SIZE +ST_ATIME +ST_MTIME +ST_CTIME + +FILE_ATTRIBUTE_*: Windows file attribute constants + (only present on Windows) +""" import sys from typing import Final @@ -64,19 +134,65 @@ UF_NODUMP: Final = 0x00000001 UF_NOUNLINK: Final = 0x00000010 UF_OPAQUE: Final = 0x00000008 -def S_IMODE(mode: int, /) -> int: ... -def S_IFMT(mode: int, /) -> int: ... -def S_ISBLK(mode: int, /) -> bool: ... -def S_ISCHR(mode: int, /) -> bool: ... -def S_ISDIR(mode: int, /) -> bool: ... -def S_ISDOOR(mode: int, /) -> bool: ... -def S_ISFIFO(mode: int, /) -> bool: ... -def S_ISLNK(mode: int, /) -> bool: ... -def S_ISPORT(mode: int, /) -> bool: ... -def S_ISREG(mode: int, /) -> bool: ... -def S_ISSOCK(mode: int, /) -> bool: ... -def S_ISWHT(mode: int, /) -> bool: ... -def filemode(mode: int, /) -> str: ... +def S_IMODE(mode: int, /) -> int: + """Return the portion of the file's mode that can be set by os.chmod(). +""" +def S_IFMT(mode: int, /) -> int: + """Return the portion of the file's mode that describes the file type. +""" +def S_ISBLK(mode: int, /) -> bool: + """S_ISBLK(mode) -> bool + +Return True if mode is from a block special device file. +""" +def S_ISCHR(mode: int, /) -> bool: + """S_ISCHR(mode) -> bool + +Return True if mode is from a character special device file. +""" +def S_ISDIR(mode: int, /) -> bool: + """S_ISDIR(mode) -> bool + +Return True if mode is from a directory. +""" +def S_ISDOOR(mode: int, /) -> bool: + """S_ISDOOR(mode) -> bool + +Return True if mode is from a door. +""" +def S_ISFIFO(mode: int, /) -> bool: + """S_ISFIFO(mode) -> bool + +Return True if mode is from a FIFO (named pipe). +""" +def S_ISLNK(mode: int, /) -> bool: + """S_ISLNK(mode) -> bool + +Return True if mode is from a symbolic link. +""" +def S_ISPORT(mode: int, /) -> bool: + """S_ISPORT(mode) -> bool + +Return True if mode is from an event port. +""" +def S_ISREG(mode: int, /) -> bool: + """S_ISREG(mode) -> bool + +Return True if mode is from a regular file. +""" +def S_ISSOCK(mode: int, /) -> bool: + """S_ISSOCK(mode) -> bool + +Return True if mode is from a socket. +""" +def S_ISWHT(mode: int, /) -> bool: + """S_ISWHT(mode) -> bool + +Return True if mode is from a whiteout. +""" +def filemode(mode: int, /) -> str: + """Convert a file's mode to a string of the form '-rwxrwxrwx' +""" if sys.platform == "win32": IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi index a8fac2aea1b00..8b18d34a85c49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi @@ -1,23 +1,128 @@ +"""Functions to convert between Python values and C structs. +Python bytes objects are used to hold the data representing the C struct +and also as format strings (explained below) to describe the layout of data +in the C struct. + +The optional first format char indicates byte order, size and alignment: + @: native order, size & alignment (default) + =: native order, std. size & alignment + <: little-endian, std. size & alignment + >: big-endian, std. size & alignment + !: same as > + +The remaining chars indicate types of args and must match exactly; +these can be preceded by a decimal repeat count: + x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; + ?: _Bool (requires C99; if not available, char is used instead) + h:short; H:unsigned short; i:int; I:unsigned int; + l:long; L:unsigned long; f:float; d:double; e:half-float. +Special cases (preceding decimal count indicates length): + s:string (array of char); p: pascal string (with count byte). +Special cases (only available in native format): + n:ssize_t; N:size_t; + P:an integer type that is wide enough to hold a pointer. +Special case (not in native mode unless 'long long' in platform C): + q:long long; Q:unsigned long long +Whitespace between formats is ignored. + +The variable struct.error is an exception raised on errors. +""" from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterator from typing import Any from typing_extensions import disjoint_base -def pack(fmt: str | bytes, /, *v: Any) -> bytes: ... -def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: ... -def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... -def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... -def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... -def calcsize(format: str | bytes, /) -> int: ... +def pack(fmt: str | bytes, /, *v: Any) -> bytes: + """pack(format, v1, v2, ...) -> bytes + +Return a bytes object containing the values v1, v2, ... packed according +to the format string. See help(struct) for more on format strings. +""" +def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: + """pack_into(format, buffer, offset, v1, v2, ...) + +Pack the values v1, v2, ... according to the format string and write +the packed bytes into the writable buffer buf starting at offset. Note +that the offset is a required argument. See help(struct) for more +on format strings. +""" +def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: + """Return a tuple containing values unpacked according to the format string. + +The buffer's size in bytes must be calcsize(format). + +See help(struct) for more on format strings. +""" +def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: + """Return a tuple containing values unpacked according to the format string. + +The buffer's size, minus offset, must be at least calcsize(format). + +See help(struct) for more on format strings. +""" +def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: + """Return an iterator yielding tuples unpacked from the given bytes. + +The bytes are unpacked according to the format string, like +a repeated invocation of unpack_from(). + +Requires that the bytes length be a multiple of the format struct size. +""" +def calcsize(format: str | bytes, /) -> int: + """Return size in bytes of the struct described by the format string. +""" @disjoint_base class Struct: + """Struct(fmt) --> compiled struct object + +""" @property - def format(self) -> str: ... + def format(self) -> str: + """struct format string +""" @property - def size(self) -> int: ... + def size(self) -> int: + """struct size in bytes +""" def __init__(self, format: str | bytes) -> None: ... - def pack(self, *v: Any) -> bytes: ... - def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... - def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ... - def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... - def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ... + def pack(self, *v: Any) -> bytes: + """S.pack(v1, v2, ...) -> bytes + +Return a bytes object containing values v1, v2, ... packed according +to the format string S.format. See help(struct) for more on format +strings. +""" + def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: + """S.pack_into(buffer, offset, v1, v2, ...) + +Pack the values v1, v2, ... according to the format string S.format +and write the packed bytes into the writable buffer buf starting at +offset. Note that the offset is a required argument. See +help(struct) for more on format strings. +""" + def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: + """Return a tuple containing unpacked values. + +Unpack according to the format string Struct.format. The buffer's size +in bytes must be Struct.size. + +See help(struct) for more on format strings. +""" + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: + """Return a tuple containing unpacked values. + +Values are unpacked according to the format string Struct.format. + +The buffer's size in bytes, starting at position offset, must be +at least Struct.size. + +See help(struct) for more on format strings. +""" + def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: + """Return an iterator yielding tuples. + +Tuples are unpacked from the given bytes source, like a repeated +invocation of unpack_from(). + +Requires that the bytes length be a multiple of the struct size. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi index 6969ae48cae79..32b186fbf488b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi @@ -1,3 +1,6 @@ +"""This module provides primitive operations to write multi-threaded programs. +The 'threading' module provides a more convenient interface. +""" import signal import sys from _typeshed import structseq @@ -11,15 +14,51 @@ _Ts = TypeVarTuple("_Ts") error = RuntimeError -def _count() -> int: ... +def _count() -> int: + """Return the number of currently running Python threads, excluding +the main thread. The returned number comprises all threads created +through `start_new_thread()` as well as `threading.Thread`, and not +yet finished. + +This function is meant for internal and specialized purposes only. +In most applications `threading.enumerate()` should be used instead. +""" @final class RLock: - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release(self) -> None: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: + """Lock the lock. `blocking` indicates whether we should wait +for the lock to be available or not. If `blocking` is False +and another thread holds the lock, the method will return False +immediately. If `blocking` is True and another thread holds +the lock, the method will wait for the lock to be released, +take it and then return True. +(note: the blocking operation is interruptible.) + +In all other cases, the method will return True immediately. +Precisely, if the current thread already holds the lock, its +internal counter is simply incremented. If nobody holds the lock, +the lock is taken and its internal counter initialized to 1. +""" + def release(self) -> None: + """Release the lock, allowing another thread that is blocked waiting for +the lock to acquire the lock. The lock must be in the locked state, +and must be locked by the same thread that unlocks it; otherwise a +`RuntimeError` is raised. + +Do note that if the lock was acquire()d several times in a row by the +current thread, release() needs to be called as many times for the lock +to be available for other threads. +""" __enter__ = acquire - def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: + """Release the lock. +""" if sys.version_info >= (3, 14): - def locked(self) -> bool: ... + def locked(self) -> bool: + """locked() + +Return a boolean indicating whether this object is locked right now. +""" if sys.version_info >= (3, 13): @final @@ -32,86 +71,283 @@ if sys.version_info >= (3, 13): def start_joinable_thread( function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True - ) -> _ThreadHandle: ... + ) -> _ThreadHandle: + """*For internal use only*: start a new thread. + +Like start_new_thread(), this starts a new thread calling the given function. +Unlike start_new_thread(), this returns a handle object with methods to join +or detach the given thread. +This function is not for third-party code, please use the +`threading` module instead. During finalization the runtime will not wait for +the thread to exit if daemon is True. If handle is provided it must be a +newly created thread._ThreadHandle instance. +""" @final class lock: - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release(self) -> None: ... - def locked(self) -> bool: ... - def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release_lock(self) -> None: ... - def locked_lock(self) -> bool: ... - def __enter__(self) -> bool: ... + """A lock object is a synchronization primitive. To create a lock, +call threading.Lock(). Methods are: + +acquire() -- lock the lock, possibly blocking until it can be obtained +release() -- unlock of the lock +locked() -- test whether the lock is currently locked + +A lock is not owned by the thread that locked it; another thread may +unlock it. A thread attempting to lock a lock that it has already locked +will block until another thread unlocks it. Deadlocks may ensue. +""" + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: + """Lock the lock. Without argument, this blocks if the lock is already +locked (even by the same thread), waiting for another thread to release +the lock, and return True once the lock is acquired. +With an argument, this will only block if the argument is true, +and the return value reflects whether the lock is acquired. +The blocking operation is interruptible. +""" + def release(self) -> None: + """Release the lock, allowing another thread that is blocked waiting for +the lock to acquire the lock. The lock must be in the locked state, +but it needn't be locked by the same thread that unlocks it. +""" + def locked(self) -> bool: + """Return whether the lock is in the locked state. +""" + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: + """An obsolete synonym of acquire(). +""" + def release_lock(self) -> None: + """An obsolete synonym of release(). +""" + def locked_lock(self) -> bool: + """An obsolete synonym of locked(). +""" + def __enter__(self) -> bool: + """Lock the lock. +""" def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> None: ... + ) -> None: + """Release the lock. +""" LockType = lock else: @final class LockType: - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release(self) -> None: ... - def locked(self) -> bool: ... - def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release_lock(self) -> None: ... - def locked_lock(self) -> bool: ... - def __enter__(self) -> bool: ... + """A lock object is a synchronization primitive. To create a lock, +call threading.Lock(). Methods are: + +acquire() -- lock the lock, possibly blocking until it can be obtained +release() -- unlock of the lock +locked() -- test whether the lock is currently locked + +A lock is not owned by the thread that locked it; another thread may +unlock it. A thread attempting to lock a lock that it has already locked +will block until another thread unlocks it. Deadlocks may ensue. +""" + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: + """acquire(blocking=True, timeout=-1) -> bool +(acquire_lock() is an obsolete synonym) + +Lock the lock. Without argument, this blocks if the lock is already +locked (even by the same thread), waiting for another thread to release +the lock, and return True once the lock is acquired. +With an argument, this will only block if the argument is true, +and the return value reflects whether the lock is acquired. +The blocking operation is interruptible. +""" + def release(self) -> None: + """release() +(release_lock() is an obsolete synonym) + +Release the lock, allowing another thread that is blocked waiting for +the lock to acquire the lock. The lock must be in the locked state, +but it needn't be locked by the same thread that unlocks it. +""" + def locked(self) -> bool: + """locked() -> bool +(locked_lock() is an obsolete synonym) + +Return whether the lock is in the locked state. +""" + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: + """acquire(blocking=True, timeout=-1) -> bool +(acquire_lock() is an obsolete synonym) + +Lock the lock. Without argument, this blocks if the lock is already +locked (even by the same thread), waiting for another thread to release +the lock, and return True once the lock is acquired. +With an argument, this will only block if the argument is true, +and the return value reflects whether the lock is acquired. +The blocking operation is interruptible. +""" + def release_lock(self) -> None: + """release() +(release_lock() is an obsolete synonym) + +Release the lock, allowing another thread that is blocked waiting for +the lock to acquire the lock. The lock must be in the locked state, +but it needn't be locked by the same thread that unlocks it. +""" + def locked_lock(self) -> bool: + """locked() -> bool +(locked_lock() is an obsolete synonym) + +Return whether the lock is in the locked state. +""" + def __enter__(self) -> bool: + """acquire(blocking=True, timeout=-1) -> bool +(acquire_lock() is an obsolete synonym) + +Lock the lock. Without argument, this blocks if the lock is already +locked (even by the same thread), waiting for another thread to release +the lock, and return True once the lock is acquired. +With an argument, this will only block if the argument is true, +and the return value reflects whether the lock is acquired. +The blocking operation is interruptible. +""" def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None - ) -> None: ... + ) -> None: + """release() +(release_lock() is an obsolete synonym) + +Release the lock, allowing another thread that is blocked waiting for +the lock to acquire the lock. The lock must be in the locked state, +but it needn't be locked by the same thread that unlocks it. +""" @overload -def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... +def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: + """Start a new thread and return its identifier. + +The thread will call the function with positional arguments from the +tuple args and keyword arguments taken from the optional dictionary +kwargs. The thread exits when the function returns; the return value +is ignored. The thread will also exit when the function raises an +unhandled exception; a stack trace will be printed unless the exception +is SystemExit. +""" @overload def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... # Obsolete synonym for start_new_thread() @overload -def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ... +def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: + """An obsolete synonym of start_new_thread(). +""" @overload def start_new(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... if sys.version_info >= (3, 10): - def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: ... + def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: + """Simulate the arrival of the given signal in the main thread, +where the corresponding signal handler will be executed. +If *signum* is omitted, SIGINT is assumed. +A subthread can use this function to interrupt the main thread. + +Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. +""" else: - def interrupt_main() -> None: ... + def interrupt_main() -> None: + """interrupt_main() + +Raise a KeyboardInterrupt in the main thread. +A subthread can use this function to interrupt the main thread. +""" -def exit() -> NoReturn: ... -def exit_thread() -> NoReturn: ... # Obsolete synonym for exit() -def allocate_lock() -> LockType: ... -def allocate() -> LockType: ... # Obsolete synonym for allocate_lock() -def get_ident() -> int: ... -def stack_size(size: int = 0, /) -> int: ... +def exit() -> NoReturn: + """This is synonymous to ``raise SystemExit''. It will cause the current +thread to exit silently unless the exception is caught. +""" +def exit_thread() -> NoReturn: # Obsolete synonym for exit() + """An obsolete synonym of exit(). +""" +def allocate_lock() -> LockType: + """Create a new lock object. See help(type(threading.Lock())) for +information about locks. +""" +def allocate() -> LockType: # Obsolete synonym for allocate_lock() + """An obsolete synonym of allocate_lock(). +""" +def get_ident() -> int: + """Return a non-zero integer that uniquely identifies the current thread +amongst other threads that exist simultaneously. +This may be used to identify per-thread resources. +Even though on some platforms threads identities may appear to be +allocated consecutive numbers starting at 1, this behavior should not +be relied upon, and the number should be seen purely as a magic cookie. +A thread's identity may be reused for another thread after it exits. +""" +def stack_size(size: int = 0, /) -> int: + """Return the thread stack size used when creating new threads. The +optional size argument specifies the stack size (in bytes) to be used +for subsequently created threads, and must be 0 (use platform or +configured default) or a positive integer value of at least 32,768 (32k). +If changing the thread stack size is unsupported, a ThreadError +exception is raised. If the specified size is invalid, a ValueError +exception is raised, and the stack size is unmodified. 32k bytes + currently the minimum supported stack size value to guarantee +sufficient stack space for the interpreter itself. + +Note that some platforms may have particular restrictions on values for +the stack size, such as requiring a minimum stack size larger than 32 KiB or +requiring allocation in multiples of the system memory page size +- platform documentation should be referred to for more information +(4 KiB pages are common; using multiples of 4096 for the stack size is +the suggested approach in the absence of more specific information). +""" TIMEOUT_MAX: Final[float] -def get_native_id() -> int: ... # only available on some platforms +def get_native_id() -> int: # only available on some platforms + """Return a non-negative integer identifying the thread as reported +by the OS (kernel). This may be used to uniquely identify a +particular thread within a system. +""" @final class _ExceptHookArgs(structseq[Any], tuple[type[BaseException], BaseException | None, TracebackType | None, Thread | None]): + """ExceptHookArgs + +Type used to pass arguments to threading.excepthook. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("exc_type", "exc_value", "exc_traceback", "thread") @property - def exc_type(self) -> type[BaseException]: ... + def exc_type(self) -> type[BaseException]: + """Exception type +""" @property - def exc_value(self) -> BaseException | None: ... + def exc_value(self) -> BaseException | None: + """Exception value +""" @property - def exc_traceback(self) -> TracebackType | None: ... + def exc_traceback(self) -> TracebackType | None: + """Exception traceback +""" @property - def thread(self) -> Thread | None: ... + def thread(self) -> Thread | None: + """Thread +""" _excepthook: Callable[[_ExceptHookArgs], Any] if sys.version_info >= (3, 12): - def daemon_threads_allowed() -> bool: ... + def daemon_threads_allowed() -> bool: + """Return True if daemon threads are allowed in the current interpreter, +and False otherwise. +""" if sys.version_info >= (3, 14): - def set_name(name: str) -> None: ... + def set_name(name: str) -> None: + """Set the name of the current thread. +""" @disjoint_base class _local: + """Thread-local data +""" def __getattribute__(self, name: str, /) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... def __delattr__(self, name: str, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi index 5f6acaf840aa1..bae09fa7e458b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi @@ -1,3 +1,10 @@ +"""Thread-local objects. + +(Note that this module provides a Python version of the threading.local + class. Depending on the version of Python you're using, there may be a + faster one available. You should always import the `local` class from + `threading`.) +""" from threading import RLock from typing import Any from typing_extensions import Self, TypeAlias @@ -7,14 +14,21 @@ __all__ = ["local"] _LocalDict: TypeAlias = dict[Any, Any] class _localimpl: + """A class managing thread-local dicts +""" __slots__ = ("key", "dicts", "localargs", "locallock", "__weakref__") key: str dicts: dict[int, tuple[ReferenceType[Any], _LocalDict]] # Keep localargs in sync with the *args, **kwargs annotation on local.__new__ localargs: tuple[list[Any], dict[str, Any]] locallock: RLock - def get_dict(self) -> _LocalDict: ... - def create_dict(self) -> _LocalDict: ... + def get_dict(self) -> _LocalDict: + """Return the dict for the current thread. Raises KeyError if none +defined. +""" + def create_dict(self) -> _LocalDict: + """Create a new dict for the current thread, and return it. +""" class local: __slots__ = ("_local__impl", "__dict__") diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi index a3868f467c6ca..3e14cb7f3823b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi @@ -19,9 +19,13 @@ from typing_extensions import TypeAlias, deprecated @final class Tcl_Obj: @property - def string(self) -> str: ... + def string(self) -> str: + """the string representation of this object, either as str or bytes +""" @property - def typename(self) -> str: ... + def typename(self) -> str: + """name of the Tcl type +""" __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, value, /): ... def __ge__(self, value, /): ... @@ -92,8 +96,12 @@ class TkappType: def wantobjects(self, *args, **kwargs): ... def willdispatch(self) -> None: ... if sys.version_info >= (3, 12): - def gettrace(self, /) -> _TkinterTraceFunc | None: ... - def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ... + def gettrace(self, /) -> _TkinterTraceFunc | None: + """Get the tracing function. +""" + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: + """Set the tracing function. +""" # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS ALL_EVENTS: Final = -3 @@ -125,7 +133,16 @@ if sys.version_info >= (3, 13): sync: bool = False, use: str | None = None, /, - ): ... + ): + """ + + wantTk + if false, then Tk_Init() doesn't get called + sync + if true, then pass -sync to wish + use + if not None, then pass -use to wish +""" else: def create( @@ -138,7 +155,22 @@ else: sync: bool = False, use: str | None = None, /, - ): ... + ): + """ + + wantTk + if false, then Tk_Init() doesn't get called + sync + if true, then pass -sync to wish + use + if not None, then pass -use to wish +""" + +def getbusywaitinterval() -> int: + """Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. +""" +def setbusywaitinterval(new_val: int, /) -> None: + """Set the busy-wait interval in milliseconds between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. -def getbusywaitinterval() -> int: ... -def setbusywaitinterval(new_val: int, /) -> None: ... +It should be set to a divisor of the maximum time between frames in an animation. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi index e9720f46692ce..fbbdacb49e8f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi @@ -1,13 +1,58 @@ +"""Debug module to trace memory blocks allocated by Python. +""" from collections.abc import Sequence from tracemalloc import _FrameTuple, _TraceTuple -def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: ... -def _get_traces() -> Sequence[_TraceTuple]: ... -def clear_traces() -> None: ... -def get_traceback_limit() -> int: ... -def get_traced_memory() -> tuple[int, int]: ... -def get_tracemalloc_memory() -> int: ... -def is_tracing() -> bool: ... -def reset_peak() -> None: ... -def start(nframe: int = 1, /) -> None: ... -def stop() -> None: ... +def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: + """Get the traceback where the Python object obj was allocated. + +Return a tuple of (filename: str, lineno: int) tuples. +Return None if the tracemalloc module is disabled or did not +trace the allocation of the object. +""" +def _get_traces() -> Sequence[_TraceTuple]: + """Get traces of all memory blocks allocated by Python. + +Return a list of (size: int, traceback: tuple) tuples. +traceback is a tuple of (filename: str, lineno: int) tuples. + +Return an empty list if the tracemalloc module is disabled. +""" +def clear_traces() -> None: + """Clear traces of memory blocks allocated by Python. +""" +def get_traceback_limit() -> int: + """Get the maximum number of frames stored in the traceback of a trace. + +By default, a trace of an allocated memory block only stores +the most recent frame: the limit is 1. +""" +def get_traced_memory() -> tuple[int, int]: + """Get the current size and peak size of memory blocks traced by tracemalloc. + +Returns a tuple: (current: int, peak: int). +""" +def get_tracemalloc_memory() -> int: + """Get the memory usage in bytes of the tracemalloc module. + +This memory is used internally to trace memory allocations. +""" +def is_tracing() -> bool: + """Return True if the tracemalloc module is tracing Python memory allocations. +""" +def reset_peak() -> None: + """Set the peak size of memory blocks traced by tracemalloc to the current size. + +Do nothing if the tracemalloc module is not tracing memory allocations. +""" +def start(nframe: int = 1, /) -> None: + """Start tracing Python memory allocations. + +Also set the maximum number of frames stored in the traceback of a +trace to nframe. +""" +def stop() -> None: + """Stop tracing Python memory allocations. + +Also clear traces of memory blocks allocated by Python. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi index 2dbc7b8552813..5418a5aa0928c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi @@ -1,3 +1,6 @@ +"""_warnings provides basic warning filtering support. +It is a helper module to speed up interpreter start-up. +""" import sys from typing import Any, overload @@ -14,7 +17,22 @@ if sys.version_info >= (3, 12): source: Any | None = None, *, skip_file_prefixes: tuple[str, ...] = (), - ) -> None: ... + ) -> None: + """Issue a warning, or maybe ignore it or raise an exception. + + message + Text of the warning message. + category + The Warning category subclass. Defaults to UserWarning. + stacklevel + How far up the call stack to make this warning appear. A value of 2 for + example attributes the warning to the caller of the code calling warn(). + source + If supplied, the destroyed object which emitted a ResourceWarning + skip_file_prefixes + An optional tuple of module filename prefixes indicating frames to skip + during stacklevel computations for stack frame attribution. +""" @overload def warn( message: Warning, @@ -27,7 +45,9 @@ if sys.version_info >= (3, 12): else: @overload - def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... + def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: + """Issue a warning, or maybe ignore it or raise an exception. +""" @overload def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @@ -41,7 +61,9 @@ def warn_explicit( registry: dict[str | tuple[str, type[Warning], int], int] | None = None, module_globals: dict[str, Any] | None = None, source: Any | None = None, -) -> None: ... +) -> None: + """Issue a warning, or maybe ignore it or raise an exception. +""" @overload def warn_explicit( message: Warning, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi index a744340afaabd..a2e8785813fc7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi @@ -1,3 +1,5 @@ +"""Weak-reference support module. +""" from collections.abc import Callable from typing import Any, TypeVar, overload from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType, ref as ref @@ -5,11 +7,20 @@ from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyTy _C = TypeVar("_C", bound=Callable[..., Any]) _T = TypeVar("_T") -def getweakrefcount(object: Any, /) -> int: ... -def getweakrefs(object: Any, /) -> list[Any]: ... +def getweakrefcount(object: Any, /) -> int: + """Return the number of weak references to 'object'. +""" +def getweakrefs(object: Any, /) -> list[Any]: + """Return a list of all weak reference objects pointing to 'object'. +""" # Return CallableProxyType if object is callable, ProxyType otherwise @overload -def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: ... +def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: + """Create a proxy object that weakly references 'object'. + +'callback', if given, is called with a reference to the +proxy when 'object' is about to be finalized. +""" @overload def proxy(object: _T, callback: Callable[[_T], Any] | None = None, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi index dad1ed7a4fb5c..3341561681ffb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi @@ -45,4 +45,8 @@ class WeakSet(MutableSet[_T]): def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi index f5e98ef88bb9f..2f544bab641f4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi @@ -1,3 +1,5 @@ +"""Implementation module for Zstandard compression. +""" from _typeshed import ReadableBuffer from collections.abc import Mapping from compression.zstd import CompressionParameter, DecompressionParameter @@ -42,6 +44,18 @@ _ZstdCompressorFlushFrame: TypeAlias = Literal[2] @final class ZstdCompressor: + """Create a compressor object for compressing data incrementally. + + level + The compression level to use. Defaults to COMPRESSION_LEVEL_DEFAULT. + options + A dict object that contains advanced compression parameters. + zstd_dict + A ZstdDict object, a pre-trained Zstandard dictionary. + +Thread-safe at method level. For one-shot compression, use the compress() +function instead. +""" CONTINUE: Final = 0 FLUSH_BLOCK: Final = 1 FLUSH_FRAME: Final = 2 @@ -50,48 +64,237 @@ class ZstdCompressor: ) -> Self: ... def compress( self, /, data: ReadableBuffer, mode: _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 0 - ) -> bytes: ... - def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: ... - def set_pledged_input_size(self, size: int | None, /) -> None: ... + ) -> bytes: + """Provide data to the compressor object. + + mode + Can be these 3 values ZstdCompressor.CONTINUE, + ZstdCompressor.FLUSH_BLOCK, ZstdCompressor.FLUSH_FRAME + +Return a chunk of compressed data if possible, or b'' otherwise. When you have +finished providing data to the compressor, call the flush() method to finish +the compression process. +""" + def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: + """Finish the compression process. + + mode + Can be these 2 values ZstdCompressor.FLUSH_FRAME, + ZstdCompressor.FLUSH_BLOCK + +Flush any remaining data left in internal buffers. Since Zstandard data +consists of one or more independent frames, the compressor object can still +be used after this method is called. +""" + def set_pledged_input_size(self, size: int | None, /) -> None: + """Set the uncompressed content size to be written into the frame header. + + size + The size of the uncompressed data to be provided to the compressor. + +This method can be used to ensure the header of the frame about to be written +includes the size of the data, unless the CompressionParameter.content_size_flag +is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised. + +It is important to ensure that the pledged data size matches the actual data +size. If they do not match the compressed output data may be corrupted and the +final chunk written may be lost. +""" @property - def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: ... + def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: + """The last mode used to this compressor object, its value can be .CONTINUE, +.FLUSH_BLOCK, .FLUSH_FRAME. Initialized to .FLUSH_FRAME. + +It can be used to get the current state of a compressor, such as, data +flushed, or a frame ended. +""" @final class ZstdDecompressor: + """Create a decompressor object for decompressing data incrementally. + + zstd_dict + A ZstdDict object, a pre-trained Zstandard dictionary. + options + A dict object that contains advanced decompression parameters. + +Thread-safe at method level. For one-shot decompression, use the decompress() +function instead. +""" def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> Self: ... - def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: + """Decompress *data*, returning uncompressed bytes if possible, or b'' otherwise. + + data + A bytes-like object, Zstandard data to be decompressed. + max_length + Maximum size of returned data. When it is negative, the size of + output buffer is unlimited. When it is nonnegative, returns at + most max_length bytes of decompressed data. + +If *max_length* is nonnegative, returns at most *max_length* bytes of +decompressed data. If this limit is reached and further output can be +produced, *self.needs_input* will be set to ``False``. In this case, the next +call to *decompress()* may provide *data* as b'' to obtain more of the output. + +If all of the input data was decompressed and returned (either because this +was less than *max_length* bytes, or because *max_length* was negative), +*self.needs_input* will be set to True. + +Attempting to decompress data after the end of a frame is reached raises an +EOFError. Any data found after the end of the frame is ignored and saved in +the self.unused_data attribute. +""" @property - def eof(self) -> bool: ... + def eof(self) -> bool: + """True means the end of the first frame has been reached. If decompress data +after that, an EOFError exception will be raised. +""" @property - def needs_input(self) -> bool: ... + def needs_input(self) -> bool: + """If the max_length output limit in .decompress() method has been reached, +and the decompressor has (or may has) unconsumed input data, it will be set +to False. In this case, passing b'' to the .decompress() method may output +further data. +""" @property - def unused_data(self) -> bytes: ... + def unused_data(self) -> bytes: + """A bytes object of un-consumed input data. + +When ZstdDecompressor object stops after a frame is +decompressed, unused input data after the frame. Otherwise this will be b''. +""" @final class ZstdDict: + """Represents a Zstandard dictionary. + + dict_content + The content of a Zstandard dictionary as a bytes-like object. + is_raw + If true, perform no checks on *dict_content*, useful for some + advanced cases. Otherwise, check that the content represents + a Zstandard dictionary created by the zstd library or CLI. + +The dictionary can be used for compression or decompression, and can be shared +by multiple ZstdCompressor or ZstdDecompressor objects. +""" def __new__(cls, dict_content: bytes, /, *, is_raw: bool = False) -> Self: ... - def __len__(self, /) -> int: ... + def __len__(self, /) -> int: + """Return len(self). +""" @property - def as_digested_dict(self) -> tuple[Self, int]: ... + def as_digested_dict(self) -> tuple[Self, int]: + """Load as a digested dictionary to compressor. + +Pass this attribute as zstd_dict argument: +compress(dat, zstd_dict=zd.as_digested_dict) + +1. Some advanced compression parameters of compressor may be overridden + by parameters of digested dictionary. +2. ZstdDict has a digested dictionaries cache for each compression level. + It's faster when loading again a digested dictionary with the same + compression level. +3. No need to use this for decompression. +""" @property - def as_prefix(self) -> tuple[Self, int]: ... + def as_prefix(self) -> tuple[Self, int]: + """Load as a prefix to compressor/decompressor. + +Pass this attribute as zstd_dict argument: +compress(dat, zstd_dict=zd.as_prefix) + +1. Prefix is compatible with long distance matching, while dictionary is not. +2. It only works for the first frame, then the compressor/decompressor will + return to no prefix state. +3. When decompressing, must use the same prefix as when compressing. +""" @property - def as_undigested_dict(self) -> tuple[Self, int]: ... + def as_undigested_dict(self) -> tuple[Self, int]: + """Load as an undigested dictionary to compressor. + +Pass this attribute as zstd_dict argument: +compress(dat, zstd_dict=zd.as_undigested_dict) + +1. The advanced compression parameters of compressor will not be overridden. +2. Loading an undigested dictionary is costly. If load an undigested dictionary + multiple times, consider reusing a compressor object. +3. No need to use this for decompression. +""" @property - def dict_content(self) -> bytes: ... + def dict_content(self) -> bytes: + """The content of a Zstandard dictionary, as a bytes object. +""" @property - def dict_id(self) -> int: ... + def dict_id(self) -> int: + """The Zstandard dictionary, an int between 0 and 2**32. -class ZstdError(Exception): ... +A non-zero value represents an ordinary Zstandard dictionary, +conforming to the standardised format. + +A value of zero indicates a 'raw content' dictionary, +without any restrictions on format or content. +""" + +class ZstdError(Exception): + """An error occurred in the zstd library. +""" def finalize_dict( custom_dict_bytes: bytes, samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, compression_level: int, / -) -> bytes: ... -def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: ... -def get_frame_size(frame_buffer: ReadableBuffer) -> int: ... -def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: ... -def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: ... -def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: ... +) -> bytes: + """Finalize a Zstandard dictionary. + + custom_dict_bytes + Custom dictionary content. + samples_bytes + Concatenation of samples. + samples_sizes + Tuple of samples' sizes. + dict_size + The size of the dictionary. + compression_level + Optimize for a specific Zstandard compression level, 0 means default. +""" +def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: + """Get Zstandard frame infomation from a frame header. + + frame_buffer + A bytes-like object, containing the header of a Zstandard frame. +""" +def get_frame_size(frame_buffer: ReadableBuffer) -> int: + """Get the size of a Zstandard frame, including the header and optional checksum. + + frame_buffer + A bytes-like object, it should start from the beginning of a frame, + and contains at least one complete frame. +""" +def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: + """Get CompressionParameter/DecompressionParameter bounds. + + parameter + The parameter to get bounds. + is_compress + True for CompressionParameter, False for DecompressionParameter. +""" +def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: + """Set CompressionParameter and DecompressionParameter types for validity check. + + c_parameter_type + CompressionParameter IntEnum type object + d_parameter_type + DecompressionParameter IntEnum type object +""" +def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: + """Train a Zstandard dictionary on sample data. + + samples_bytes + Concatenation of samples. + samples_sizes + Tuple of samples' sizes. + dict_size + The size of the dictionary. +""" zstd_version: Final[str] zstd_version_number: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi index c8cd549e30eca..bbc3d793d9683 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi @@ -1,3 +1,5 @@ +"""Abstract Base Classes (ABCs) according to PEP 3119. +""" import _typeshed import sys from _typeshed import SupportsWrite @@ -12,6 +14,18 @@ _P = ParamSpec("_P") # These definitions have special processing in mypy class ABCMeta(type): + """Metaclass for defining Abstract Base Classes (ABCs). + +Use this metaclass to create an ABC. An ABC can be subclassed +directly, and then acts as a mix-in class. You can also register +unrelated concrete classes (even built-in classes) and unrelated +ABCs as 'virtual subclasses' -- these and their descendants will +be considered subclasses of the registering ABC by the built-in +issubclass() function, but the registering ABC won't show up in +their MRO (Method Resolution Order) nor will method +implementations defined by the registering ABC be callable (not +even via super()). +""" __abstractmethods__: frozenset[str] if sys.version_info >= (3, 11): def __new__( @@ -22,30 +36,113 @@ class ABCMeta(type): mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any ) -> _typeshed.Self: ... - def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... - def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... - def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... - def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: + """Override for isinstance(instance, cls). +""" + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: + """Override for issubclass(subclass, cls). +""" + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: + """Debug helper to print the ABC registry. +""" + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: + """Register a virtual subclass of an ABC. + +Returns the subclass, to allow usage as a class decorator. +""" + +def abstractmethod(funcobj: _FuncT) -> _FuncT: + """A decorator indicating abstract methods. + +Requires that the metaclass is ABCMeta or derived from it. A +class that has a metaclass derived from ABCMeta cannot be +instantiated unless all of its abstract methods are overridden. +The abstract methods can be called using any of the normal +'super' call mechanisms. abstractmethod() may be used to declare +abstract methods for properties and descriptors. + +Usage: -def abstractmethod(funcobj: _FuncT) -> _FuncT: ... + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, arg1, arg2, argN): + ... +""" @deprecated("Deprecated since Python 3.3. Use `@classmethod` stacked on top of `@abstractmethod` instead.") class abstractclassmethod(classmethod[_T, _P, _R_co]): + """A decorator indicating abstract classmethods. + +Deprecated, use 'classmethod' with 'abstractmethod' instead: + + class C(ABC): + @classmethod + @abstractmethod + def my_abstract_classmethod(cls, ...): + ... + +""" __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... @deprecated("Deprecated since Python 3.3. Use `@staticmethod` stacked on top of `@abstractmethod` instead.") class abstractstaticmethod(staticmethod[_P, _R_co]): + """A decorator indicating abstract staticmethods. + +Deprecated, use 'staticmethod' with 'abstractmethod' instead: + + class C(ABC): + @staticmethod + @abstractmethod + def my_abstract_staticmethod(...): + ... + +""" __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[_P, _R_co]) -> None: ... @deprecated("Deprecated since Python 3.3. Use `@property` stacked on top of `@abstractmethod` instead.") class abstractproperty(property): + """A decorator indicating abstract properties. + +Deprecated, use 'property' with 'abstractmethod' instead: + + class C(ABC): + @property + @abstractmethod + def my_abstract_property(self): + ... + +""" __isabstractmethod__: Literal[True] class ABC(metaclass=ABCMeta): + """Helper class that provides a standard way to create an ABC using +inheritance. +""" __slots__ = () -def get_cache_token() -> object: ... +def get_cache_token() -> object: + """Returns the current ABC cache token. + +The token is an opaque object (supporting equality testing) identifying the +current version of the ABC cache for virtual subclasses. The token changes +with every call to register() on any ABC. +""" if sys.version_info >= (3, 10): - def update_abstractmethods(cls: type[_T]) -> type[_T]: ... + def update_abstractmethods(cls: type[_T]) -> type[_T]: + """Recalculate the set of abstract methods of an abstract class. + +If a class has had one of its abstract methods implemented after the +class was created, the method will not be considered implemented until +this function is called. Alternatively, if a new abstract method has been +added to the class, it will only be considered an abstract method of the +class after this function is called. + +This function should be called before any use is made of the class, +usually in class decorators that add methods to the subject class. + +Returns cls, to allow usage as a class decorator. + +If cls is not an instance of ABCMeta, does nothing. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi index bfe12c6af2b0b..b163547755f2f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi @@ -1,3 +1,138 @@ +"""Stuff to parse AIFF-C and AIFF files. + +Unless explicitly stated otherwise, the description below is true +both for AIFF-C files and AIFF files. + +An AIFF-C file has the following structure. + + +-----------------+ + | FORM | + +-----------------+ + | | + +----+------------+ + | | AIFC | + | +------------+ + | | | + | | . | + | | . | + | | . | + +----+------------+ + +An AIFF file has the string "AIFF" instead of "AIFC". + +A chunk consists of an identifier (4 bytes) followed by a size (4 bytes, +big endian order), followed by the data. The size field does not include +the size of the 8 byte header. + +The following chunk types are recognized. + + FVER + (AIFF-C only). + MARK + <# of markers> (2 bytes) + list of markers: + (2 bytes, must be > 0) + (4 bytes) + ("pstring") + COMM + <# of channels> (2 bytes) + <# of sound frames> (4 bytes) + (2 bytes) + (10 bytes, IEEE 80-bit extended + floating point) + in AIFF-C files only: + (4 bytes) + ("pstring") + SSND + (4 bytes, not used by this program) + (4 bytes, not used by this program) + + +A pstring consists of 1 byte length, a string of characters, and 0 or 1 +byte pad to make the total length even. + +Usage. + +Reading AIFF files: + f = aifc.open(file, 'r') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods read(), seek(), and close(). +In some types of audio files, if the setpos() method is not used, +the seek() method is not necessary. + +This returns an instance of a class with the following public methods: + getnchannels() -- returns number of audio channels (1 for + mono, 2 for stereo) + getsampwidth() -- returns sample width in bytes + getframerate() -- returns sampling frequency + getnframes() -- returns number of audio frames + getcomptype() -- returns compression type ('NONE' for AIFF files) + getcompname() -- returns human-readable version of + compression type ('not compressed' for AIFF files) + getparams() -- returns a namedtuple consisting of all of the + above in the above order + getmarkers() -- get the list of marks in the audio file or None + if there are no marks + getmark(id) -- get mark with the specified id (raises an error + if the mark does not exist) + readframes(n) -- returns at most n frames of audio + rewind() -- rewind to the beginning of the audio stream + setpos(pos) -- seek to the specified position + tell() -- return the current position + close() -- close the instance (make it unusable) +The position returned by tell(), the position given to setpos() and +the position of marks are all compatible and have nothing to do with +the actual position in the file. +The close() method is called automatically when the class instance +is destroyed. + +Writing AIFF files: + f = aifc.open(file, 'w') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods write(), tell(), seek(), and +close(). + +This returns an instance of a class with the following public methods: + aiff() -- create an AIFF file (AIFF-C default) + aifc() -- create an AIFF-C file + setnchannels(n) -- set the number of channels + setsampwidth(n) -- set the sample width + setframerate(n) -- set the frame rate + setnframes(n) -- set the number of frames + setcomptype(type, name) + -- set the compression type and the + human-readable compression type + setparams(tuple) + -- set all parameters at once + setmark(id, pos, name) + -- add specified mark to the list of marks + tell() -- return current position in output file (useful + in combination with setmark()) + writeframesraw(data) + -- write audio frames without pathing up the + file header + writeframes(data) + -- write audio frames and patch up the file header + close() -- patch up the file header and close the + output file +You should set the parameters before the first writeframesraw or +writeframes. The total number of frames does not need to be set, +but when it is set to the correct value, the header does not have to +be patched up. +It is best to first set all parameters, perhaps possibly the +compression type, and then write audio frames using writeframesraw. +When all frames have been written, either call writeframes(b'') or +close() to patch up the sizes in the header. +Marks can be added anytime. If there are any marks, you must call +close() after all frames have been written. +The close() method is called automatically when the class instance +is destroyed. + +When a file is opened with the extension '.aiff', an AIFF file is +written, otherwise an AIFF-C file is written. This default can be +changed by calling aiff() or aifc() before the first writeframes or +writeframesraw. +""" from types import TracebackType from typing import IO, Any, Literal, NamedTuple, overload from typing_extensions import Self, TypeAlias @@ -7,6 +142,8 @@ __all__ = ["Error", "open"] class Error(Exception): ... class _aifc_params(NamedTuple): + """_aifc_params(nchannels, sampwidth, framerate, nframes, comptype, compname) +""" nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi index 3679dc29daaa0..0254954591fdd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi @@ -1,3 +1,5 @@ +"""Helpers for introspecting and wrapping annotations. +""" import sys from typing import Literal @@ -28,6 +30,17 @@ if sys.version_info >= (3, 14): @final class ForwardRef: + """Wrapper that holds a forward reference. + +Constructor arguments: +* arg: a string representing the code to be evaluated. +* module: the module where the forward reference was created. + Must be a string, not a module object. +* owner: The owning object (module, class, or function). +* is_argument: Does nothing, retained for compatibility. +* is_class: True if the forward reference was created in class scope. + +""" __slots__ = ( "__forward_is_argument__", "__forward_is_class__", @@ -57,7 +70,11 @@ if sys.version_info >= (3, 14): type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] | None = None, owner: object = None, format: Literal[Format.STRING], - ) -> str: ... + ) -> str: + """Evaluate the forward reference and return the value. + +If the forward reference cannot be evaluated, raise an exception. +""" @overload def evaluate( self, @@ -97,7 +114,11 @@ if sys.version_info >= (3, 14): def __ror__(self, other: Any) -> types.UnionType: ... @overload - def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: ... + def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: + """Call an evaluate function. Evaluate functions are normally generated for +the value of type aliases and the bounds, constraints, and defaults of +type parameter objects. +""" @overload def call_evaluate_function( evaluate: EvaluateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None @@ -107,14 +128,38 @@ if sys.version_info >= (3, 14): @overload def call_annotate_function( annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None - ) -> dict[str, str]: ... + ) -> dict[str, str]: + """Call an __annotate__ function. __annotate__ functions are normally +generated by the compiler to defer the evaluation of annotations. They +can be called with any of the format arguments in the Format enum, but +compiler-generated __annotate__ functions only support the VALUE format. +This function provides additional functionality to call __annotate__ +functions with the FORWARDREF and STRING formats. + +*annotate* must be an __annotate__ function, which takes a single argument +and returns a dict of annotations. + +*format* must be a member of the Format enum or one of the corresponding +integer values. + +*owner* can be the object that owns the annotations (i.e., the module, +class, or function that the __annotate__ function derives from). With the +FORWARDREF format, it is used to provide better evaluation capabilities +on the generated ForwardRef objects. + +""" @overload def call_annotate_function( annotate: AnnotateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None ) -> dict[str, AnnotationForm | ForwardRef]: ... @overload def call_annotate_function(annotate: AnnotateFunc, format: Format, *, owner: object = None) -> dict[str, AnnotationForm]: ... - def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: ... + def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: + """Retrieve the annotate function from a class namespace dictionary. + +Return None if the namespace does not contain an annotate function. +This is useful in metaclass ``__new__`` methods to retrieve the annotate function. +""" @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -123,7 +168,57 @@ if sys.version_info >= (3, 14): locals: Mapping[str, object] | None = None, eval_str: bool = False, format: Literal[Format.STRING], - ) -> dict[str, str]: ... + ) -> dict[str, str]: + """Compute the annotations dict for an object. + +obj may be a callable, class, module, or other object with +__annotate__ or __annotations__ attributes. +Passing any other object raises TypeError. + +The *format* parameter controls the format in which annotations are returned, +and must be a member of the Format enum or its integer equivalent. +For the VALUE format, the __annotations__ is tried first; if it +does not exist, the __annotate__ function is called. The +FORWARDREF format uses __annotations__ if it exists and can be +evaluated, and otherwise falls back to calling the __annotate__ function. +The SOURCE format tries __annotate__ first, and falls back to +using __annotations__, stringified using annotations_to_string(). + +This function handles several details for you: + + * If eval_str is true, values of type str will + be un-stringized using eval(). This is intended + for use with stringized annotations + ("from __future__ import annotations"). + * If obj doesn't have an annotations dict, returns an + empty dict. (Functions and methods always have an + annotations dict; classes, modules, and other types of + callables may not.) + * Ignores inherited annotations on classes. If a class + doesn't have its own annotations dict, returns an empty dict. + * All accesses to object members and dict values are done + using getattr() and dict.get() for safety. + * Always, always, always returns a freshly-created dict. + +eval_str controls whether or not values of type str are replaced +with the result of calling eval() on those values: + + * If eval_str is true, eval() is called on values of type str. + * If eval_str is false (the default), values of type str are unchanged. + +globals and locals are passed in to eval(); see the documentation +for eval() for more information. If either globals or locals is +None, this function may replace that value with a context-specific +default, contingent on type(obj): + + * If obj is a module, globals defaults to obj.__dict__. + * If obj is a class, globals defaults to + sys.modules[obj.__module__].__dict__ and locals + defaults to the obj class namespace. + * If obj is a callable, globals defaults to obj.__globals__, + although if obj is a wrapped function (using + functools.update_wrapper()) it is first unwrapped. +""" @overload def get_annotations( obj: Any, @@ -142,5 +237,16 @@ if sys.version_info >= (3, 14): eval_str: bool = False, format: Format = Format.VALUE, # noqa: Y011 ) -> dict[str, AnnotationForm]: ... - def type_repr(value: object) -> str: ... - def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: ... + def type_repr(value: object) -> str: + """Convert a Python value to a format suitable for use with the STRING format. + +This is intended as a helper for tools that support the STRING format but do +not have access to the code that originally produced the annotations. It uses +repr() for most objects. + +""" + def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: + """Convert an annotation dict containing values to approximately the STRING format. + +Always returns a fresh a dictionary. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi index bce20e09250c9..9132c0a066260 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi @@ -1,3 +1,65 @@ +"""Command-line parsing library + +This module is an optparse-inspired command-line parsing library that: + + - handles both optional and positional arguments + - produces highly informative usage messages + - supports parsers that dispatch to sub-parsers + +The following is a simple usage example that sums integers from the +command-line and writes the result to a file:: + + parser = argparse.ArgumentParser( + description='sum the integers at the command line') + parser.add_argument( + 'integers', metavar='int', nargs='+', type=int, + help='an integer to be summed') + parser.add_argument( + '--log', + help='the file where the sum should be written') + args = parser.parse_args() + with (open(args.log, 'w') if args.log is not None + else contextlib.nullcontext(sys.stdout)) as log: + log.write('%s' % sum(args.integers)) + +The module contains the following public classes: + + - ArgumentParser -- The main entry point for command-line parsing. As the + example above shows, the add_argument() method is used to populate + the parser with actions for optional and positional arguments. Then + the parse_args() method is invoked to convert the args at the + command-line into an object with attributes. + + - ArgumentError -- The exception raised by ArgumentParser objects when + there are errors with the parser's actions. Errors raised while + parsing the command-line are caught by ArgumentParser and emitted + as command-line messages. + + - FileType -- A factory for defining types of files to be created. As the + example above shows, instances of FileType are typically passed as + the type= argument of add_argument() calls. Deprecated since + Python 3.14. + + - Action -- The base class for parser actions. Typically actions are + selected by passing strings like 'store_true' or 'append_const' to + the action= argument of add_argument(). However, for greater + customization of ArgumentParser actions, subclasses of Action may + be defined and passed as the action= argument. + + - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, + ArgumentDefaultsHelpFormatter -- Formatter classes which + may be passed as the formatter_class= argument to the + ArgumentParser constructor. HelpFormatter is the default, + RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser + not to change the formatting for help text, and + ArgumentDefaultsHelpFormatter adds information about argument defaults + to the help. + +All other classes in this module are considered implementation details. +(Also note that HelpFormatter and RawDescriptionHelpFormatter are only +considered public as object names -- the API of the formatter objects is +still considered an implementation detail.) +""" import sys from _typeshed import SupportsWrite, sentinel from collections.abc import Callable, Generator, Iterable, Sequence @@ -41,12 +103,24 @@ ZERO_OR_MORE: Final = "*" _UNRECOGNIZED_ARGS_ATTR: Final = "_unrecognized_args" # undocumented class ArgumentError(Exception): + """An error from creating or using an argument (optional or positional). + +The string value of this exception is the message, augmented with +information about the argument that caused it. +""" argument_name: str | None message: str def __init__(self, argument: Action | None, message: str) -> None: ... # undocumented class _AttributeHolder: + """Abstract base class that provides __repr__. + +The __repr__ method returns a string in the format:: + ClassName(attr=name, attr=name, ...) +The attributes are determined either by a class-level attribute, +'_kwarg_names', or by inspecting the instance __dict__. +""" def _get_kwargs(self) -> list[tuple[str, Any]]: ... def _get_args(self) -> list[Any]: ... @@ -90,7 +164,11 @@ class _ActionsContainer: dest: str | None = ..., version: str = ..., **kwargs: Any, - ) -> Action: ... + ) -> Action: + """ +add_argument(dest, ..., name=value, ...) +add_argument(option_string, option_string, ..., name=value, ...) +""" def add_argument_group( self, title: str | None = None, @@ -117,6 +195,29 @@ class _FormatterClass(Protocol): def __call__(self, *, prog: str) -> HelpFormatter: ... class ArgumentParser(_AttributeHolder, _ActionsContainer): + """Object for parsing command line strings into Python objects. + +Keyword Arguments: + - prog -- The name of the program (default: + ``os.path.basename(sys.argv[0])``) + - usage -- A usage message (default: auto-generated from arguments) + - description -- A description of what the program does + - epilog -- Text following the argument descriptions + - parents -- Parsers whose arguments should be copied into this one + - formatter_class -- HelpFormatter class for printing help messages + - prefix_chars -- Characters that prefix optional arguments + - fromfile_prefix_chars -- Characters that prefix files containing + additional arguments + - argument_default -- The default value for all arguments + - conflict_handler -- String indicating how to handle conflicts + - add_help -- Add a -h/-help option + - allow_abbrev -- Allow long options to be abbreviated unambiguously + - exit_on_error -- Determines whether or not ArgumentParser exits with + error info when an error occurs + - suggest_on_error - Enables suggestions for mistyped argument choices + and subparser names (default: ``False``) + - color - Allow color output in help messages (default: ``False``) +""" prog: str usage: str | None epilog: str | None @@ -221,7 +322,15 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def parse_known_args(self, *, namespace: _N) -> tuple[_N, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... - def error(self, message: str) -> NoReturn: ... + def error(self, message: str) -> NoReturn: + """error(message: string) + +Prints a usage message incorporating the message to stderr and +exits. + +If you override this in a subclass, it should not return -- it +should either exit or raise an exception. +""" @overload def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... @overload @@ -259,6 +368,11 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def _print_message(self, message: str, file: SupportsWrite[str] | None = None) -> None: ... class HelpFormatter: + """Formatter for generating usage messages and argument help strings. + +Only the name of this class is considered a public API. All the methods +provided by the class are considered an implementation detail. +""" # undocumented _prog: str _indent_increment: int @@ -319,12 +433,82 @@ class HelpFormatter: def _get_default_metavar_for_optional(self, action: Action) -> str: ... def _get_default_metavar_for_positional(self, action: Action) -> str: ... -class RawDescriptionHelpFormatter(HelpFormatter): ... -class RawTextHelpFormatter(RawDescriptionHelpFormatter): ... -class ArgumentDefaultsHelpFormatter(HelpFormatter): ... -class MetavarTypeHelpFormatter(HelpFormatter): ... +class RawDescriptionHelpFormatter(HelpFormatter): + """Help message formatter which retains any formatting in descriptions. + +Only the name of this class is considered a public API. All the methods +provided by the class are considered an implementation detail. +""" +class RawTextHelpFormatter(RawDescriptionHelpFormatter): + """Help message formatter which retains formatting of all help text. + +Only the name of this class is considered a public API. All the methods +provided by the class are considered an implementation detail. +""" +class ArgumentDefaultsHelpFormatter(HelpFormatter): + """Help message formatter which adds default values to argument help. + +Only the name of this class is considered a public API. All the methods +provided by the class are considered an implementation detail. +""" +class MetavarTypeHelpFormatter(HelpFormatter): + """Help message formatter which uses the argument 'type' as the default +metavar value (instead of the argument 'dest') + +Only the name of this class is considered a public API. All the methods +provided by the class are considered an implementation detail. +""" class Action(_AttributeHolder): + """Information about how to convert command line strings to Python objects. + +Action objects are used by an ArgumentParser to represent the information +needed to parse a single argument from one or more strings from the +command line. The keyword arguments to the Action constructor are also +all attributes of Action instances. + +Keyword Arguments: + + - option_strings -- A list of command-line option strings which + should be associated with this action. + + - dest -- The name of the attribute to hold the created object(s) + + - nargs -- The number of command-line arguments that should be + consumed. By default, one argument will be consumed and a single + value will be produced. Other values include: + - N (an integer) consumes N arguments (and produces a list) + - '?' consumes zero or one arguments + - '*' consumes zero or more arguments (and produces a list) + - '+' consumes one or more arguments (and produces a list) + Note that the difference between the default and nargs=1 is that + with the default, a single value will be produced, while with + nargs=1, a list containing a single value will be produced. + + - const -- The value to be produced if the option is specified and the + option uses an action that takes no values. + + - default -- The value to be produced if the option is not specified. + + - type -- A callable that accepts a single string argument, and + returns the converted value. The standard Python types str, int, + float, and complex are useful examples of such callables. If None, + str is used. + + - choices -- A container of values that should be allowed. If not None, + after a command-line argument has been converted to the appropriate + type, an exception will be raised if it is not a member of this + collection. + + - required -- True if the action must always be specified at the + command line. This is only meaningful for optional command-line + arguments. + + - help -- The help string describing the argument. + + - metavar -- The name to be used for the option's argument with the + help string. If None, the 'dest' value will be used as the name. +""" option_strings: Sequence[str] dest: str nargs: int | str | None @@ -460,6 +644,11 @@ else: ) -> None: ... class Namespace(_AttributeHolder): + """Simple object for storing attributes. + +Implements equality by attribute names and values, and provides a simple +string representation. +""" def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... @@ -470,6 +659,21 @@ class Namespace(_AttributeHolder): if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14. Open files after parsing arguments instead.") class FileType: + """Deprecated factory for creating file object types + +Instances of FileType are typically passed as type= arguments to the +ArgumentParser add_argument() method. + +Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. +""" # undocumented _mode: str _bufsize: int @@ -482,6 +686,21 @@ if sys.version_info >= (3, 14): else: class FileType: + """Factory for creating file object types + +Instances of FileType are typically passed as type= arguments to the +ArgumentParser add_argument() method. + +Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. +""" # undocumented _mode: str _bufsize: int @@ -821,7 +1040,9 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): def _get_subactions(self) -> list[Action]: ... # undocumented -class ArgumentTypeError(Exception): ... +class ArgumentTypeError(Exception): + """An error from trying to convert a command line string to a type. +""" # undocumented def _get_action_name(argument: Action | None) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi index a6b0344a1e2ea..299bbe7535924 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi @@ -1,3 +1,8 @@ +"""This module defines an object type which can efficiently represent +an array of basic values: characters, integers, floating-point +numbers. Arrays are sequence types and behave very much like lists, +except that the type of objects stored in them is constrained. +""" import sys from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Iterable, MutableSequence @@ -19,10 +24,71 @@ typecodes: str @disjoint_base class array(MutableSequence[_T]): + """array(typecode [, initializer]) -> array + +Return a new array whose items are restricted by typecode, and +initialized from the optional initializer value, which must be a list, +string or iterable over elements of the appropriate type. + +Arrays represent basic values and behave very much like lists, except +the type of objects stored in them is constrained. The type is specified +at object creation time by using a type code, which is a single character. +The following type codes are defined: + + Type code C Type Minimum size in bytes + 'b' signed integer 1 + 'B' unsigned integer 1 + 'u' Unicode character 2 (see note) + 'h' signed integer 2 + 'H' unsigned integer 2 + 'i' signed integer 2 + 'I' unsigned integer 2 + 'l' signed integer 4 + 'L' unsigned integer 4 + 'q' signed integer 8 (see note) + 'Q' unsigned integer 8 (see note) + 'f' floating-point 4 + 'd' floating-point 8 + +NOTE: The 'u' typecode corresponds to Python's unicode character. On +narrow builds this is 2-bytes on wide builds this is 4-bytes. + +NOTE: The 'q' and 'Q' type codes are only available if the platform +C compiler used to build Python supports 'long long', or, on Windows, +'__int64'. + +Methods: + +append() -- append a new item to the end of the array +buffer_info() -- return information giving the current memory info +byteswap() -- byteswap all the items of the array +count() -- return number of occurrences of an object +extend() -- extend array by appending multiple elements from an iterable +fromfile() -- read items from a file object +fromlist() -- append items from the list +frombytes() -- append items from the string +index() -- return index of first occurrence of an object +insert() -- insert a new item into the array at a provided position +pop() -- remove and return item (default last) +remove() -- remove first occurrence of an object +reverse() -- reverse the order of the items in the array +tofile() -- write all items to a file object +tolist() -- return the array converted to an ordinary list +tobytes() -- return the array converted to a string + +Attributes: + +typecode -- the typecode character used to create the array +itemsize -- the length in bytes of one array item +""" @property - def typecode(self) -> _TypeCode: ... + def typecode(self) -> _TypeCode: + """the typecode character used to create the array +""" @property - def itemsize(self) -> int: ... + def itemsize(self) -> int: + """the size, in bytes, of one array item +""" @overload def __new__( cls: type[array[int]], typecode: _IntTypeCode, initializer: bytes | bytearray | Iterable[int] = ..., / @@ -52,55 +118,139 @@ class array(MutableSequence[_T]): def __new__(cls, typecode: str, initializer: Iterable[_T], /) -> Self: ... @overload def __new__(cls, typecode: str, initializer: bytes | bytearray = ..., /) -> Self: ... - def append(self, v: _T, /) -> None: ... - def buffer_info(self) -> tuple[int, int]: ... - def byteswap(self) -> None: ... - def count(self, v: _T, /) -> int: ... - def extend(self, bb: Iterable[_T], /) -> None: ... - def frombytes(self, buffer: ReadableBuffer, /) -> None: ... - def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: ... - def fromlist(self, list: list[_T], /) -> None: ... - def fromunicode(self, ustr: str, /) -> None: ... + def append(self, v: _T, /) -> None: + """Append new value v to the end of the array. +""" + def buffer_info(self) -> tuple[int, int]: + """Return a tuple (address, length) giving the current memory address and the length in items of the buffer used to hold array's contents. + +The length should be multiplied by the itemsize attribute to calculate +the buffer length in bytes. +""" + def byteswap(self) -> None: + """Byteswap all items of the array. + +If the items in the array are not 1, 2, 4, or 8 bytes in size, RuntimeError is +raised. +""" + def count(self, v: _T, /) -> int: + """Return number of occurrences of v in the array. +""" + def extend(self, bb: Iterable[_T], /) -> None: + """Append items to the end of the array. +""" + def frombytes(self, buffer: ReadableBuffer, /) -> None: + """Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method. +""" + def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: + """Read n objects from the file object f and append them to the end of the array. +""" + def fromlist(self, list: list[_T], /) -> None: + """Append items to array from list. +""" + def fromunicode(self, ustr: str, /) -> None: + """Extends this array with data from the unicode string ustr. + +The array must be a unicode type array; otherwise a ValueError is raised. +Use array.frombytes(ustr.encode(...)) to append Unicode data to an array of +some other type. +""" if sys.version_info >= (3, 10): - def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: ... + def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: + """Return index of first occurrence of v in the array. + +Raise ValueError if the value is not present. +""" else: - def index(self, v: _T, /) -> int: ... # type: ignore[override] + def index(self, v: _T, /) -> int: # type: ignore[override] + """Return index of first occurrence of v in the array. +""" + + def insert(self, i: int, v: _T, /) -> None: + """Insert a new item v into the array before position i. +""" + def pop(self, i: int = -1, /) -> _T: + """Return the i-th element and delete it from the array. + +i defaults to -1. +""" + def remove(self, v: _T, /) -> None: + """Remove the first occurrence of v in the array. +""" + def tobytes(self) -> bytes: + """Convert the array to an array of machine values and return the bytes representation. +""" + def tofile(self, f: SupportsWrite[bytes], /) -> None: + """Write all items (as machine values) to the file object f. +""" + def tolist(self) -> list[_T]: + """Convert array to an ordinary list with the same items. +""" + def tounicode(self) -> str: + """Extends this array with data from the unicode string ustr. - def insert(self, i: int, v: _T, /) -> None: ... - def pop(self, i: int = -1, /) -> _T: ... - def remove(self, v: _T, /) -> None: ... - def tobytes(self) -> bytes: ... - def tofile(self, f: SupportsWrite[bytes], /) -> None: ... - def tolist(self) -> list[_T]: ... - def tounicode(self) -> str: ... +Convert the array to a unicode string. The array must be a unicode type array; +otherwise a ValueError is raised. Use array.tobytes().decode() to obtain a +unicode string from an array of some other type. +""" __hash__: ClassVar[None] # type: ignore[assignment] - def __contains__(self, value: object, /) -> bool: ... - def __len__(self) -> int: ... + def __contains__(self, value: object, /) -> bool: + """Return bool(key in self). +""" + def __len__(self) -> int: + """Return len(self). +""" @overload - def __getitem__(self, key: SupportsIndex, /) -> _T: ... + def __getitem__(self, key: SupportsIndex, /) -> _T: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> array[_T]: ... @overload # type: ignore[override] - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: array[_T], /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... - def __add__(self, value: array[_T], /) -> array[_T]: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: + """Delete self[key]. +""" + def __add__(self, value: array[_T], /) -> array[_T]: + """Return self+value. +""" def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: array[_T], /) -> bool: ... def __gt__(self, value: array[_T], /) -> bool: ... - def __iadd__(self, value: array[_T], /) -> Self: ... # type: ignore[override] - def __imul__(self, value: int, /) -> Self: ... + def __iadd__(self, value: array[_T], /) -> Self: # type: ignore[override] + """Implement self+=value. +""" + def __imul__(self, value: int, /) -> Self: + """Implement self*=value. +""" def __le__(self, value: array[_T], /) -> bool: ... def __lt__(self, value: array[_T], /) -> bool: ... - def __mul__(self, value: int, /) -> array[_T]: ... - def __rmul__(self, value: int, /) -> array[_T]: ... - def __copy__(self) -> array[_T]: ... - def __deepcopy__(self, unused: Any, /) -> array[_T]: ... - def __buffer__(self, flags: int, /) -> memoryview: ... - def __release_buffer__(self, buffer: memoryview, /) -> None: ... + def __mul__(self, value: int, /) -> array[_T]: + """Return self*value. +""" + def __rmul__(self, value: int, /) -> array[_T]: + """Return value*self. +""" + def __copy__(self) -> array[_T]: + """Return a copy of the array. +""" + def __deepcopy__(self, unused: Any, /) -> array[_T]: + """Return a copy of the array. +""" + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object. +""" if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" ArrayType = array diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi index e66e609ee6645..5cf48716038b4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi @@ -1,3 +1,25 @@ +""" +The `ast` module helps Python applications to process trees of the Python +abstract syntax grammar. The abstract syntax itself might change with +each Python release; this module helps to find out programmatically what +the current grammar looks like and allows modifications of it. + +An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as +a flag to the `compile()` builtin function or by using the `parse()` +function from this module. The result will be a tree of objects whose +classes all inherit from `ast.AST`. + +A modified abstract syntax tree can be compiled into a Python code object +using the built-in `compile()` function. + +Additionally various helper functions are provided that make working with +the trees simpler. The main intention of the helper functions and this +module in general is to provide an easy to use interface for libraries +that work tightly with the python syntax (template engines for example). + +:copyright: Copyright 2008 by Armin Ronacher. +:license: Python License. +""" import ast import builtins import os @@ -40,7 +62,9 @@ if sys.version_info >= (3, 12): _field_types: ClassVar[dict[str, Any]] if sys.version_info >= (3, 14): - def __replace__(self) -> Self: ... + def __replace__(self) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" else: class AST: @@ -49,9 +73,16 @@ else: _attributes: ClassVar[tuple[str, ...]] _fields: ClassVar[tuple[str, ...]] -class mod(AST): ... +class mod(AST): + """mod = Module(stmt* body, type_ignore* type_ignores) + | Interactive(stmt* body) + | Expression(expr body) + | FunctionType(expr* argtypes, expr returns) +""" class Module(mod): + """Module(stmt* body, type_ignore* type_ignores) +""" if sys.version_info >= (3, 10): __match_args__ = ("body", "type_ignores") body: list[stmt] @@ -62,9 +93,13 @@ class Module(mod): def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ... + def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Interactive(mod): + """Interactive(stmt* body) +""" if sys.version_info >= (3, 10): __match_args__ = ("body",) body: list[stmt] @@ -74,18 +109,26 @@ class Interactive(mod): def __init__(self, body: list[stmt]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: list[stmt] = ...) -> Self: ... + def __replace__(self, *, body: list[stmt] = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Expression(mod): + """Expression(expr body) +""" if sys.version_info >= (3, 10): __match_args__ = ("body",) body: expr def __init__(self, body: expr) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, body: expr = ...) -> Self: ... + def __replace__(self, *, body: expr = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class FunctionType(mod): + """FunctionType(expr* argtypes, expr returns) +""" if sys.version_info >= (3, 10): __match_args__ = ("argtypes", "returns") argtypes: list[expr] @@ -99,9 +142,40 @@ class FunctionType(mod): def __init__(self, argtypes: list[expr], returns: expr) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ... + def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class stmt(AST): + """stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) + | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) + | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) + | Return(expr? value) + | Delete(expr* targets) + | Assign(expr* targets, expr value, string? type_comment) + | TypeAlias(expr name, type_param* type_params, expr value) + | AugAssign(expr target, operator op, expr value) + | AnnAssign(expr target, expr annotation, expr? value, int simple) + | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | While(expr test, stmt* body, stmt* orelse) + | If(expr test, stmt* body, stmt* orelse) + | With(withitem* items, stmt* body, string? type_comment) + | AsyncWith(withitem* items, stmt* body, string? type_comment) + | Match(expr subject, match_case* cases) + | Raise(expr? exc, expr? cause) + | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | Assert(expr test, expr? msg) + | Import(alias* names) + | ImportFrom(identifier? module, alias* names, int? level) + | Global(identifier* names) + | Nonlocal(identifier* names) + | Expr(expr value) + | Pass + | Break + | Continue +""" lineno: int col_offset: int end_lineno: int | None @@ -109,9 +183,13 @@ class stmt(AST): def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class FunctionDef(stmt): + """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) +""" if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -186,9 +264,13 @@ class FunctionDef(stmt): type_comment: str | None = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class AsyncFunctionDef(stmt): + """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) +""" if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -263,9 +345,13 @@ class AsyncFunctionDef(stmt): type_comment: str | None = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class ClassDef(stmt): + """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) +""" if sys.version_info >= (3, 12): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") elif sys.version_info >= (3, 10): @@ -321,18 +407,26 @@ class ClassDef(stmt): decorator_list: list[expr] = ..., type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Return(stmt): + """Return(expr? value) +""" if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Delete(stmt): + """Delete(expr* targets) +""" if sys.version_info >= (3, 10): __match_args__ = ("targets",) targets: list[expr] @@ -342,9 +436,13 @@ class Delete(stmt): def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Assign(stmt): + """Assign(expr* targets, expr value, string? type_comment) +""" if sys.version_info >= (3, 10): __match_args__ = ("targets", "value", "type_comment") targets: list[expr] @@ -367,10 +465,14 @@ class Assign(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 12): class TypeAlias(stmt): + """TypeAlias(expr name, type_param* type_params, expr value) +""" __match_args__ = ("name", "type_params", "value") name: Name type_params: list[type_param] @@ -397,9 +499,13 @@ if sys.version_info >= (3, 12): type_params: list[type_param] = ..., value: expr = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class AugAssign(stmt): + """AugAssign(expr target, operator op, expr value) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") target: Name | Attribute | Subscript @@ -417,9 +523,13 @@ class AugAssign(stmt): op: operator = ..., value: expr = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class AnnAssign(stmt): + """AnnAssign(expr target, expr annotation, expr? value, int simple) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") target: Name | Attribute | Subscript @@ -455,9 +565,13 @@ class AnnAssign(stmt): value: expr | None = ..., simple: int = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class For(stmt): + """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -496,9 +610,13 @@ class For(stmt): orelse: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class AsyncFor(stmt): + """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -537,9 +655,13 @@ class AsyncFor(stmt): orelse: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class While(stmt): + """While(expr test, stmt* body, stmt* orelse) +""" if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -555,9 +677,13 @@ class While(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class If(stmt): + """If(expr test, stmt* body, stmt* orelse) +""" if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -573,9 +699,13 @@ class If(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class With(stmt): + """With(withitem* items, stmt* body, string? type_comment) +""" if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -602,9 +732,13 @@ class With(stmt): body: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class AsyncWith(stmt): + """AsyncWith(withitem* items, stmt* body, string? type_comment) +""" if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -631,9 +765,13 @@ class AsyncWith(stmt): body: list[stmt] = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Raise(stmt): + """Raise(expr? exc, expr? cause) +""" if sys.version_info >= (3, 10): __match_args__ = ("exc", "cause") exc: expr | None @@ -641,9 +779,13 @@ class Raise(stmt): def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Try(stmt): + """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) +""" if sys.version_info >= (3, 10): __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] @@ -678,10 +820,14 @@ class Try(stmt): orelse: list[stmt] = ..., finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 11): class TryStar(stmt): + """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) +""" __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] handlers: list[ExceptHandler] @@ -715,9 +861,13 @@ if sys.version_info >= (3, 11): orelse: list[stmt] = ..., finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Assert(stmt): + """Assert(expr test, expr? msg) +""" if sys.version_info >= (3, 10): __match_args__ = ("test", "msg") test: expr @@ -725,9 +875,13 @@ class Assert(stmt): def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Import(stmt): + """Import(alias* names) +""" if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[alias] @@ -737,9 +891,13 @@ class Import(stmt): def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class ImportFrom(stmt): + """ImportFrom(identifier? module, alias* names, int? level) +""" if sys.version_info >= (3, 10): __match_args__ = ("module", "names", "level") module: str | None @@ -763,9 +921,13 @@ class ImportFrom(stmt): if sys.version_info >= (3, 14): def __replace__( self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Global(stmt): + """Global(identifier* names) +""" if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -775,9 +937,13 @@ class Global(stmt): def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Nonlocal(stmt): + """Nonlocal(identifier* names) +""" if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -787,22 +953,64 @@ class Nonlocal(stmt): def __init__(self, names: list[str], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Expr(stmt): + """Expr(expr value) +""" if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" -class Pass(stmt): ... -class Break(stmt): ... -class Continue(stmt): ... +class Pass(stmt): + """Pass +""" +class Break(stmt): + """Break +""" +class Continue(stmt): + """Continue +""" class expr(AST): + """expr = BoolOp(boolop op, expr* values) + | NamedExpr(expr target, expr value) + | BinOp(expr left, operator op, expr right) + | UnaryOp(unaryop op, expr operand) + | Lambda(arguments args, expr body) + | IfExp(expr test, expr body, expr orelse) + | Dict(expr?* keys, expr* values) + | Set(expr* elts) + | ListComp(expr elt, comprehension* generators) + | SetComp(expr elt, comprehension* generators) + | DictComp(expr key, expr value, comprehension* generators) + | GeneratorExp(expr elt, comprehension* generators) + | Await(expr value) + | Yield(expr? value) + | YieldFrom(expr value) + | Compare(expr left, cmpop* ops, expr* comparators) + | Call(expr func, expr* args, keyword* keywords) + | FormattedValue(expr value, int conversion, expr? format_spec) + | Interpolation(expr value, constant str, int conversion, expr? format_spec) + | JoinedStr(expr* values) + | TemplateStr(expr* values) + | Constant(constant value, string? kind) + | Attribute(expr value, identifier attr, expr_context ctx) + | Subscript(expr value, expr slice, expr_context ctx) + | Starred(expr value, expr_context ctx) + | Name(identifier id, expr_context ctx) + | List(expr* elts, expr_context ctx) + | Tuple(expr* elts, expr_context ctx) + | Slice(expr? lower, expr? upper, expr? step) +""" lineno: int col_offset: int end_lineno: int | None @@ -810,9 +1018,13 @@ class expr(AST): def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class BoolOp(expr): + """BoolOp(boolop op, expr* values) +""" if sys.version_info >= (3, 10): __match_args__ = ("op", "values") op: boolop @@ -823,9 +1035,13 @@ class BoolOp(expr): def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class NamedExpr(expr): + """NamedExpr(expr target, expr value) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "value") target: Name @@ -833,9 +1049,13 @@ class NamedExpr(expr): def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class BinOp(expr): + """BinOp(expr left, operator op, expr right) +""" if sys.version_info >= (3, 10): __match_args__ = ("left", "op", "right") left: expr @@ -846,9 +1066,13 @@ class BinOp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class UnaryOp(expr): + """UnaryOp(unaryop op, expr operand) +""" if sys.version_info >= (3, 10): __match_args__ = ("op", "operand") op: unaryop @@ -856,9 +1080,13 @@ class UnaryOp(expr): def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Lambda(expr): + """Lambda(arguments args, expr body) +""" if sys.version_info >= (3, 10): __match_args__ = ("args", "body") args: arguments @@ -866,9 +1094,13 @@ class Lambda(expr): def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class IfExp(expr): + """IfExp(expr test, expr body, expr orelse) +""" if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -879,9 +1111,13 @@ class IfExp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Dict(expr): + """Dict(expr?* keys, expr* values) +""" if sys.version_info >= (3, 10): __match_args__ = ("keys", "values") keys: list[expr | None] @@ -894,9 +1130,13 @@ class Dict(expr): if sys.version_info >= (3, 14): def __replace__( self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Set(expr): + """Set(expr* elts) +""" if sys.version_info >= (3, 10): __match_args__ = ("elts",) elts: list[expr] @@ -906,9 +1146,13 @@ class Set(expr): def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class ListComp(expr): + """ListComp(expr elt, comprehension* generators) +""" if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -921,9 +1165,13 @@ class ListComp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class SetComp(expr): + """SetComp(expr elt, comprehension* generators) +""" if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -936,9 +1184,13 @@ class SetComp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class DictComp(expr): + """DictComp(expr key, expr value, comprehension* generators) +""" if sys.version_info >= (3, 10): __match_args__ = ("key", "value", "generators") key: expr @@ -954,9 +1206,13 @@ class DictComp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class GeneratorExp(expr): + """GeneratorExp(expr elt, comprehension* generators) +""" if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -969,36 +1225,52 @@ class GeneratorExp(expr): if sys.version_info >= (3, 14): def __replace__( self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Await(expr): + """Await(expr value) +""" if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Yield(expr): + """Yield(expr? value) +""" if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class YieldFrom(expr): + """YieldFrom(expr value) +""" if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Compare(expr): + """Compare(expr left, cmpop* ops, expr* comparators) +""" if sys.version_info >= (3, 10): __match_args__ = ("left", "ops", "comparators") left: expr @@ -1014,9 +1286,13 @@ class Compare(expr): if sys.version_info >= (3, 14): def __replace__( self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Call(expr): + """Call(expr func, expr* args, keyword* keywords) +""" if sys.version_info >= (3, 10): __match_args__ = ("func", "args", "keywords") func: expr @@ -1032,9 +1308,13 @@ class Call(expr): if sys.version_info >= (3, 14): def __replace__( self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class FormattedValue(expr): + """FormattedValue(expr value, int conversion, expr? format_spec) +""" if sys.version_info >= (3, 10): __match_args__ = ("value", "conversion", "format_spec") value: expr @@ -1045,9 +1325,13 @@ class FormattedValue(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class JoinedStr(expr): + """JoinedStr(expr* values) +""" if sys.version_info >= (3, 10): __match_args__ = ("values",) values: list[expr] @@ -1057,16 +1341,24 @@ class JoinedStr(expr): def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 14): class TemplateStr(expr): + """TemplateStr(expr* values) +""" __match_args__ = ("values",) values: list[expr] def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... - def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Interpolation(expr): + """Interpolation(expr value, constant str, int conversion, expr? format_spec) +""" __match_args__ = ("value", "str", "conversion", "format_spec") value: expr str: builtins.str @@ -1088,7 +1380,9 @@ if sys.version_info >= (3, 14): conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 10): from types import EllipsisType @@ -1099,6 +1393,8 @@ else: _ConstantValue: typing_extensions.TypeAlias = str | bytes | bool | int | float | complex | None | ellipsis # noqa: F821 class Constant(expr): + """Constant(constant value, string? kind) +""" if sys.version_info >= (3, 10): __match_args__ = ("value", "kind") value: _ConstantValue @@ -1107,13 +1403,17 @@ class Constant(expr): # Aliases for value, for backwards compatibility @property @deprecated("Removed in Python 3.14. Use `value` instead.") - def n(self) -> _ConstantValue: ... + def n(self) -> _ConstantValue: + """Deprecated. Use value instead. +""" @n.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def n(self, value: _ConstantValue) -> None: ... @property @deprecated("Removed in Python 3.14. Use `value` instead.") - def s(self) -> _ConstantValue: ... + def s(self) -> _ConstantValue: + """Deprecated. Use value instead. +""" @s.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def s(self, value: _ConstantValue) -> None: ... @@ -1121,9 +1421,13 @@ class Constant(expr): def __init__(self, value: _ConstantValue, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Attribute(expr): + """Attribute(expr value, identifier attr, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("value", "attr", "ctx") value: expr @@ -1134,9 +1438,13 @@ class Attribute(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., attr: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Subscript(expr): + """Subscript(expr value, expr slice, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("value", "slice", "ctx") value: expr @@ -1147,9 +1455,13 @@ class Subscript(expr): if sys.version_info >= (3, 14): def __replace__( self, *, value: expr = ..., slice: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Starred(expr): + """Starred(expr value, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("value", "ctx") value: expr @@ -1157,9 +1469,13 @@ class Starred(expr): def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Name(expr): + """Name(identifier id, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("id", "ctx") id: str @@ -1167,9 +1483,13 @@ class Name(expr): def __init__(self, id: str, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class List(expr): + """List(expr* elts, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1180,9 +1500,13 @@ class List(expr): def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Tuple(expr): + """Tuple(expr* elts, expr_context ctx) +""" if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1194,12 +1518,18 @@ class Tuple(expr): def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" @deprecated("Deprecated since Python 3.9.") -class slice(AST): ... +class slice(AST): + """Deprecated AST node class. +""" class Slice(expr): + """Slice(expr? lower, expr? upper, expr? step) +""" if sys.version_info >= (3, 10): __match_args__ = ("lower", "upper", "step") lower: expr | None @@ -1212,68 +1542,158 @@ class Slice(expr): if sys.version_info >= (3, 14): def __replace__( self, *, lower: expr | None = ..., upper: expr | None = ..., step: expr | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" @deprecated("Deprecated since Python 3.9. Use `ast.Tuple` instead.") class ExtSlice(slice): + """Deprecated AST node class. Use ast.Tuple instead. +""" def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_Attributes]) -> Tuple: ... # type: ignore[misc] @deprecated("Deprecated since Python 3.9. Use the index value directly instead.") class Index(slice): + """Deprecated AST node class. Use the index value directly instead. +""" def __new__(cls, value: expr, **kwargs: Unpack[_Attributes]) -> expr: ... # type: ignore[misc] -class expr_context(AST): ... +class expr_context(AST): + """expr_context = Load | Store | Del +""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugLoad(expr_context): ... +class AugLoad(expr_context): + """Deprecated AST node class. Unused in Python 3. +""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class AugStore(expr_context): ... +class AugStore(expr_context): + """Deprecated AST node class. Unused in Python 3. +""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Param(expr_context): ... +class Param(expr_context): + """Deprecated AST node class. Unused in Python 3. +""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") -class Suite(mod): ... - -class Load(expr_context): ... -class Store(expr_context): ... -class Del(expr_context): ... -class boolop(AST): ... -class And(boolop): ... -class Or(boolop): ... -class operator(AST): ... -class Add(operator): ... -class Sub(operator): ... -class Mult(operator): ... -class MatMult(operator): ... -class Div(operator): ... -class Mod(operator): ... -class Pow(operator): ... -class LShift(operator): ... -class RShift(operator): ... -class BitOr(operator): ... -class BitXor(operator): ... -class BitAnd(operator): ... -class FloorDiv(operator): ... -class unaryop(AST): ... -class Invert(unaryop): ... -class Not(unaryop): ... -class UAdd(unaryop): ... -class USub(unaryop): ... -class cmpop(AST): ... -class Eq(cmpop): ... -class NotEq(cmpop): ... -class Lt(cmpop): ... -class LtE(cmpop): ... -class Gt(cmpop): ... -class GtE(cmpop): ... -class Is(cmpop): ... -class IsNot(cmpop): ... -class In(cmpop): ... -class NotIn(cmpop): ... +class Suite(mod): + """Deprecated AST node class. Unused in Python 3. +""" + +class Load(expr_context): + """Load +""" +class Store(expr_context): + """Store +""" +class Del(expr_context): + """Del +""" +class boolop(AST): + """boolop = And | Or +""" +class And(boolop): + """And +""" +class Or(boolop): + """Or +""" +class operator(AST): + """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv +""" +class Add(operator): + """Add +""" +class Sub(operator): + """Sub +""" +class Mult(operator): + """Mult +""" +class MatMult(operator): + """MatMult +""" +class Div(operator): + """Div +""" +class Mod(operator): + """Mod +""" +class Pow(operator): + """Pow +""" +class LShift(operator): + """LShift +""" +class RShift(operator): + """RShift +""" +class BitOr(operator): + """BitOr +""" +class BitXor(operator): + """BitXor +""" +class BitAnd(operator): + """BitAnd +""" +class FloorDiv(operator): + """FloorDiv +""" +class unaryop(AST): + """unaryop = Invert | Not | UAdd | USub +""" +class Invert(unaryop): + """Invert +""" +class Not(unaryop): + """Not +""" +class UAdd(unaryop): + """UAdd +""" +class USub(unaryop): + """USub +""" +class cmpop(AST): + """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn +""" +class Eq(cmpop): + """Eq +""" +class NotEq(cmpop): + """NotEq +""" +class Lt(cmpop): + """Lt +""" +class LtE(cmpop): + """LtE +""" +class Gt(cmpop): + """Gt +""" +class GtE(cmpop): + """GtE +""" +class Is(cmpop): + """Is +""" +class IsNot(cmpop): + """IsNot +""" +class In(cmpop): + """In +""" +class NotIn(cmpop): + """NotIn +""" class comprehension(AST): + """comprehension(expr target, expr iter, expr* ifs, int is_async) +""" if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "ifs", "is_async") target: expr @@ -1289,9 +1709,13 @@ class comprehension(AST): def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ... + def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class excepthandler(AST): + """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body) +""" lineno: int col_offset: int end_lineno: int | None @@ -1301,9 +1725,13 @@ class excepthandler(AST): if sys.version_info >= (3, 14): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ... - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class ExceptHandler(excepthandler): + """ExceptHandler(expr? type, identifier? name, stmt* body) +""" if sys.version_info >= (3, 10): __match_args__ = ("type", "name", "body") type: expr | None @@ -1324,9 +1752,13 @@ class ExceptHandler(excepthandler): if sys.version_info >= (3, 14): def __replace__( self, *, type: expr | None = ..., name: str | None = ..., body: list[stmt] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class arguments(AST): + """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults) +""" if sys.version_info >= (3, 10): __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") posonlyargs: list[arg] @@ -1395,9 +1827,13 @@ class arguments(AST): kw_defaults: list[expr | None] = ..., kwarg: arg | None = ..., defaults: list[expr] = ..., - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class arg(AST): + """arg(identifier arg, expr? annotation, string? type_comment) +""" lineno: int col_offset: int end_lineno: int | None @@ -1414,9 +1850,13 @@ class arg(AST): if sys.version_info >= (3, 14): def __replace__( self, *, arg: str = ..., annotation: expr | None = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class keyword(AST): + """keyword(identifier? arg, expr value) +""" lineno: int col_offset: int end_lineno: int | None @@ -1431,9 +1871,13 @@ class keyword(AST): def __init__(self, arg: str | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class alias(AST): + """alias(identifier name, identifier? asname) +""" name: str asname: str | None if sys.version_info >= (3, 10): @@ -1449,9 +1893,13 @@ class alias(AST): def __init__(self, name: str, asname: str | None = None) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ... + def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class withitem(AST): + """withitem(expr context_expr, expr? optional_vars) +""" if sys.version_info >= (3, 10): __match_args__ = ("context_expr", "optional_vars") context_expr: expr @@ -1459,10 +1907,21 @@ class withitem(AST): def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ... + def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 10): class pattern(AST): + """pattern = MatchValue(expr value) + | MatchSingleton(constant value) + | MatchSequence(pattern* patterns) + | MatchMapping(expr* keys, pattern* patterns, identifier? rest) + | MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) + | MatchStar(identifier? name) + | MatchAs(pattern? pattern, identifier? name) + | MatchOr(pattern* patterns) +""" lineno: int col_offset: int end_lineno: int @@ -1472,9 +1931,13 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ... - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class match_case(AST): + """match_case(pattern pattern, expr? guard, stmt* body) +""" __match_args__ = ("pattern", "guard", "body") pattern: ast.pattern guard: expr | None @@ -1488,9 +1951,13 @@ if sys.version_info >= (3, 10): def __init__(self, pattern: ast.pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ... + def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class Match(stmt): + """Match(expr subject, match_case* cases) +""" __match_args__ = ("subject", "cases") subject: expr cases: list[match_case] @@ -1502,25 +1969,37 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__( self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchValue(pattern): + """MatchValue(expr value) +""" __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchSingleton(pattern): + """MatchSingleton(constant value) +""" __match_args__ = ("value",) value: bool | None def __init__(self, value: bool | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchSequence(pattern): + """MatchSequence(pattern* patterns) +""" __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -1529,9 +2008,13 @@ if sys.version_info >= (3, 10): def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchMapping(pattern): + """MatchMapping(expr* keys, pattern* patterns, identifier? rest) +""" __match_args__ = ("keys", "patterns", "rest") keys: list[expr] patterns: list[pattern] @@ -1557,9 +2040,13 @@ if sys.version_info >= (3, 10): patterns: list[pattern] = ..., rest: str | None = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchClass(pattern): + """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) +""" __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") cls: expr patterns: list[pattern] @@ -1593,17 +2080,25 @@ if sys.version_info >= (3, 10): kwd_attrs: list[str] = ..., kwd_patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchStar(pattern): + """MatchStar(identifier? name) +""" __match_args__ = ("name",) name: str | None def __init__(self, name: str | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchAs(pattern): + """MatchAs(pattern? pattern, identifier? name) +""" __match_args__ = ("pattern", "name") pattern: ast.pattern | None name: str | None @@ -1614,9 +2109,13 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__( self, *, pattern: ast.pattern | None = ..., name: str | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class MatchOr(pattern): + """MatchOr(pattern* patterns) +""" __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -1625,11 +2124,17 @@ if sys.version_info >= (3, 10): def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" -class type_ignore(AST): ... +class type_ignore(AST): + """type_ignore = TypeIgnore(int lineno, string tag) +""" class TypeIgnore(type_ignore): + """TypeIgnore(int lineno, string tag) +""" if sys.version_info >= (3, 10): __match_args__ = ("lineno", "tag") lineno: int @@ -1637,10 +2142,16 @@ class TypeIgnore(type_ignore): def __init__(self, lineno: int, tag: str) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ... + def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 12): class type_param(AST): + """type_param = TypeVar(identifier name, expr? bound, expr? default_value) + | ParamSpec(identifier name, expr? default_value) + | TypeVarTuple(identifier name, expr? default_value) +""" lineno: int col_offset: int end_lineno: int @@ -1648,9 +2159,13 @@ if sys.version_info >= (3, 12): def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): - def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ... + def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class TypeVar(type_param): + """TypeVar(identifier name, expr? bound, expr? default_value) +""" if sys.version_info >= (3, 13): __match_args__ = ("name", "bound", "default_value") else: @@ -1673,9 +2188,13 @@ if sys.version_info >= (3, 12): bound: expr | None = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]], - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class ParamSpec(type_param): + """ParamSpec(identifier name, expr? default_value) +""" if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -1690,9 +2209,13 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" class TypeVarTuple(type_param): + """TypeVarTuple(identifier name, expr? default_value) +""" if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -1707,7 +2230,9 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] - ) -> Self: ... + ) -> Self: + """Return a copy of the AST node with new values for the specified fields. +""" if sys.version_info >= (3, 14): @type_check_only @@ -1721,22 +2246,32 @@ else: if sys.version_info < (3, 14): @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Num(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead +""" def __new__(cls, n: complex, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Str(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead +""" def __new__(cls, s: str, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Bytes(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead +""" def __new__(cls, s: bytes, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class NameConstant(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead +""" def __new__(cls, value: _ConstantValue, kind: str | None, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Ellipsis(Constant, metaclass=_ABC): + """Deprecated AST node class. Use ast.Constant instead +""" def __new__(cls, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] # everything below here is defined in ast.py @@ -1753,7 +2288,12 @@ if sys.version_info >= (3, 13): type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, optimize: Literal[-1, 0, 1, 2] = -1, - ) -> _T: ... + ) -> _T: + """ +Parse the source into an AST node. +Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). +Pass type_comments=True to get back type comments where the syntax allows. +""" @overload def parse( source: str | ReadableBuffer, @@ -1841,7 +2381,12 @@ else: *, type_comments: bool = False, feature_version: None | int | tuple[int, int] = None, - ) -> _T: ... + ) -> _T: + """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ @overload def parse( source: str | ReadableBuffer, @@ -1912,7 +2457,15 @@ else: feature_version: None | int | tuple[int, int] = None, ) -> mod: ... -def literal_eval(node_or_string: str | AST) -> Any: ... +def literal_eval(node_or_string: str | AST) -> Any: + """ +Evaluate an expression node or a string containing only a Python +expression. The string or node provided may only consist of the following +Python literal structures: strings, bytes, numbers, tuples, lists, dicts, +sets, booleans, and None. + +Caution: A complex expression can overflow the C stack and cause a crash. +""" if sys.version_info >= (3, 13): def dump( @@ -1922,30 +2475,129 @@ if sys.version_info >= (3, 13): *, indent: int | str | None = None, show_empty: bool = False, - ) -> str: ... + ) -> str: + """ +Return a formatted dump of the tree in node. This is mainly useful for +debugging purposes. If annotate_fields is true (by default), +the returned string will show the names and the values for fields. +If annotate_fields is false, the result string will be more compact by +omitting unambiguous field names. Attributes such as line +numbers and column offsets are not dumped by default. If this is wanted, +include_attributes can be set to true. If indent is a non-negative +integer or string, then the tree will be pretty-printed with that indent +level. None (the default) selects the single line representation. +If show_empty is False, then empty lists and fields that are None +will be omitted from the output for better readability. +""" else: def dump( node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None - ) -> str: ... - -def copy_location(new_node: _T, old_node: AST) -> _T: ... -def fix_missing_locations(node: _T) -> _T: ... -def increment_lineno(node: _T, n: int = 1) -> _T: ... -def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... -def iter_child_nodes(node: AST) -> Iterator[AST]: ... -def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... -def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... -def walk(node: AST) -> Iterator[AST]: ... + ) -> str: + """ + Return a formatted dump of the tree in node. This is mainly useful for + debugging purposes. If annotate_fields is true (by default), + the returned string will show the names and the values for fields. + If annotate_fields is false, the result string will be more compact by + omitting unambiguous field names. Attributes such as line + numbers and column offsets are not dumped by default. If this is wanted, + include_attributes can be set to true. If indent is a non-negative + integer or string, then the tree will be pretty-printed with that indent + level. None (the default) selects the single line representation. + """ + +def copy_location(new_node: _T, old_node: AST) -> _T: + """ +Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` +attributes) from *old_node* to *new_node* if possible, and return *new_node*. +""" +def fix_missing_locations(node: _T) -> _T: + """ +When you compile a node tree with compile(), the compiler expects lineno and +col_offset attributes for every node that supports them. This is rather +tedious to fill in for generated nodes, so this helper adds these attributes +recursively where not already set, by setting them to the values of the +parent node. It works recursively starting at *node*. +""" +def increment_lineno(node: _T, n: int = 1) -> _T: + """ +Increment the line number and end line number of each node in the tree +starting at *node* by *n*. This is useful to "move code" to a different +location in a file. +""" +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: + """ +Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` +that is present on *node*. +""" +def iter_child_nodes(node: AST) -> Iterator[AST]: + """ +Yield all direct child nodes of *node*, that is, all fields that are nodes +and all items of fields that are lists of nodes. +""" +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: + """ +Return the docstring for the given node or None if no docstring can +be found. If the node provided does not have docstrings a TypeError +will be raised. + +If *clean* is `True`, all tabs are expanded to spaces and any whitespace +that can be uniformly removed from the second line onwards is removed. +""" +def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: + """Get source code segment of the *source* that generated *node*. + +If some location information (`lineno`, `end_lineno`, `col_offset`, +or `end_col_offset`) is missing, return None. + +If *padded* is `True`, the first line of a multi-line statement will +be padded with spaces to match its original position. +""" +def walk(node: AST) -> Iterator[AST]: + """ +Recursively yield all descendant nodes in the tree starting at *node* +(including *node* itself), in no specified order. This is useful if you +only want to modify nodes in place and don't care about the context. +""" if sys.version_info >= (3, 14): - def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: ... + def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: + """Recursively compares two ASTs. + +compare_attributes affects whether AST attributes are considered +in the comparison. If compare_attributes is False (default), then +attributes are ignored. Otherwise they must all be equal. This +option is useful to check whether the ASTs are structurally equal but +might differ in whitespace or similar details. +""" class NodeVisitor: + """ +A node visitor base class that walks the abstract syntax tree and calls a +visitor function for every node found. This function may return a value +which is forwarded by the `visit` method. + +This class is meant to be subclassed, with the subclass adding visitor +methods. + +Per default the visitor functions for the nodes are ``'visit_'`` + +class name of the node. So a `TryFinally` node visit function would +be `visit_TryFinally`. This behavior can be changed by overriding +the `visit` method. If no visitor function exists for a node +(return value `None`) the `generic_visit` visitor is used instead. + +Don't use the `NodeVisitor` if you want to apply changes to nodes during +traversing. For this a special visitor exists (`NodeTransformer`) that +allows modifications. +""" # All visit methods below can be overwritten by subclasses and return an # arbitrary value, which is passed to the caller. - def visit(self, node: AST) -> Any: ... - def generic_visit(self, node: AST) -> Any: ... + def visit(self, node: AST) -> Any: + """Visit a node. +""" + def generic_visit(self, node: AST) -> Any: + """Called if no explicit visitor function exists for a node. +""" # The following visit methods are not defined on NodeVisitor, but can # be implemented by subclasses and are called during a visit if defined. def visit_Module(self, node: Module) -> Any: ... @@ -2085,6 +2737,40 @@ class NodeVisitor: def visit_Ellipsis(self, node: Ellipsis) -> Any: ... # type: ignore[deprecated] class NodeTransformer(NodeVisitor): + """ +A :class:`NodeVisitor` subclass that walks the abstract syntax tree and +allows modification of nodes. + +The `NodeTransformer` will walk the AST and use the return value of the +visitor methods to replace or remove the old node. If the return value of +the visitor method is ``None``, the node will be removed from its location, +otherwise it is replaced with the return value. The return value may be the +original node in which case no replacement takes place. + +Here is an example transformer that rewrites all occurrences of name lookups +(``foo``) to ``data['foo']``:: + + class RewriteName(NodeTransformer): + + def visit_Name(self, node): + return Subscript( + value=Name(id='data', ctx=Load()), + slice=Constant(value=node.id), + ctx=node.ctx + ) + +Keep in mind that if the node you're operating on has child nodes you must +either transform the child nodes yourself or call the :meth:`generic_visit` +method for the node first. + +For nodes that were part of a collection of statements (that applies to all +statement nodes), the visitor may also return a list of nodes rather than +just a single node. + +Usually you use the transformer like this:: + + node = YourTransformer().visit(node) +""" def generic_visit(self, node: AST) -> AST: ... # TODO: Override the visit_* methods with better return types. # The usual return type is AST | None, but Iterable[AST] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi index 79a70d1c1ec8d..e012aa2771ebc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi @@ -1,3 +1,23 @@ +"""A class supporting chat-style (command/response) protocols. + +This class adds support for 'chat' style protocols - where one side +sends a 'command', and the other sends a response (examples would be +the common internet protocols - smtp, nntp, ftp, etc..). + +The handle_read() method looks at the input stream for the current +'terminator' (usually '\\r\\n' for single-line responses, '\\r\\n.\\r\\n' +for multi-line output), calling self.found_terminator() on its +receipt. + +for example: +Say you build an async nntp client using this class. At the start +of the connection, you'll have self.terminator set to '\\r\\n', in +order to process the single-line greeting. Just before issuing a +'LIST' command you'll set it to '\\r\\n.\\r\\n'. The output of the LIST +command will be accumulated (using your own 'collect_incoming_data' +method) up to the terminator, and then control will be returned to +you - by calling your self.found_terminator() method. +""" import asyncore from abc import abstractmethod @@ -6,16 +26,25 @@ class simple_producer: def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): + """This is an abstract class. You must derive from this class, and add + the two methods collect_incoming_data() and found_terminator() +""" ac_in_buffer_size: int ac_out_buffer_size: int @abstractmethod def collect_incoming_data(self, data: bytes) -> None: ... @abstractmethod def found_terminator(self) -> None: ... - def set_terminator(self, term: bytes | int | None) -> None: ... + def set_terminator(self, term: bytes | int | None) -> None: + """Set the input delimiter. + + Can be a fixed string of any length, an integer, or None. + """ def get_terminator(self) -> bytes | int | None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... - def close_when_done(self) -> None: ... + def close_when_done(self) -> None: + """automatically close this channel once the outgoing queue is empty +""" def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi index 23cf57aaac335..cb39de1059ff1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi @@ -1,3 +1,5 @@ +"""The asyncio package, tracking PEP 3156. +""" # This condition is so big, it's clearer to keep to platform condition in two blocks # Can't NOQA on a specific line: https://github.com/plinss/flake8-noqa/issues/22 import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi index 1f493210d6655..34af33ed6366f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi @@ -1,3 +1,17 @@ +"""Base implementation of event loop. + +The event loop can be broken up into a multiplexer (the part +responsible for notifying us of I/O events) and the event loop proper, +which wraps a multiplexer with functionality for scheduling callbacks, +immediately or at a given time in the future. + +Whenever a public API takes a callback, subsequent positional +arguments will be passed to the callback if/when it is called. This +avoids the proliferation of trivial lambdas implementing closures. +Keyword arguments for the callback are not supported; this is a +conscious design decision, leaving the door open for keyword arguments +to modify the meaning of the API call itself. +""" import ssl import sys from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer @@ -59,41 +73,145 @@ class Server(AbstractServer): @property def sockets(self) -> tuple[socket, ...]: ... def close(self) -> None: ... - async def wait_closed(self) -> None: ... + async def wait_closed(self) -> None: + """Wait until server is closed and all connections are dropped. + +- If the server is not closed, wait. +- If it is closed, but there are still active connections, wait. + +Anyone waiting here will be unblocked once both conditions +(server is closed and all connections have been dropped) +have become true, in either order. + +Historical note: In 3.11 and before, this was broken, returning +immediately if the server was already closed, even if there +were still active connections. An attempted fix in 3.12.0 was +still broken, returning immediately if the server was still +open and there were no active connections. Hopefully in 3.12.1 +we have it right. +""" class BaseEventLoop(AbstractEventLoop): - def run_forever(self) -> None: ... - def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... - def stop(self) -> None: ... - def is_running(self) -> bool: ... - def is_closed(self) -> bool: ... - def close(self) -> None: ... - async def shutdown_asyncgens(self) -> None: ... + def run_forever(self) -> None: + """Run until stop() is called. +""" + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: + """Run until the Future is done. + +If the argument is a coroutine, it is wrapped in a Task. + +WARNING: It would be disastrous to call run_until_complete() +with the same coroutine twice -- it would wrap it in two +different Tasks and that can't be good. + +Return the Future's result, or raise its exception. +""" + def stop(self) -> None: + """Stop running the event loop. + +Every callback already scheduled will still run. This simply informs +run_forever to stop looping after a complete iteration. +""" + def is_running(self) -> bool: + """Returns True if the event loop is running. +""" + def is_closed(self) -> bool: + """Returns True if the event loop was closed. +""" + def close(self) -> None: + """Close the event loop. + +This clears the queues and shuts down the executor, +but does not wait for the executor to finish. + +The event loop must not be running. +""" + async def shutdown_asyncgens(self) -> None: + """Shutdown all active asynchronous generators. +""" # Methods scheduling callbacks. All these return Handles. def call_soon( self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: ... + ) -> Handle: + """Arrange for a callback to be called as soon as possible. + +This operates as a FIFO queue: callbacks are called in the +order in which they are registered. Each callback will be +called exactly once. + +Any positional arguments after the callback will be passed to +the callback when it is called. +""" def call_later( self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: ... + ) -> TimerHandle: + """Arrange for a callback to be called at a given time. + +Return a Handle: an opaque object with a cancel() method that +can be used to cancel the call. + +The delay can be an int or float, expressed in seconds. It is +always relative to the current time. + +Each callback will be called exactly once. If two callbacks +are scheduled for exactly the same time, it is undefined which +will be called first. + +Any positional arguments after the callback will be passed to +the callback when it is called. +""" def call_at( self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> TimerHandle: ... - def time(self) -> float: ... + ) -> TimerHandle: + """Like call_later(), but uses an absolute time. + +Absolute time corresponds to the event loop's time() method. +""" + def time(self) -> float: + """Return the time according to the event loop's clock. + +This is a float expressed in seconds since an epoch, but the +epoch, precision, accuracy and drift are unspecified and may +differ per event loop. +""" # Future methods - def create_future(self) -> Future[Any]: ... + def create_future(self) -> Future[Any]: + """Create a Future object attached to the loop. +""" # Tasks methods if sys.version_info >= (3, 11): - def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: ... + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: + """Schedule or begin executing a coroutine object. + +Return a task object. +""" else: - def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ... + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: + """Schedule a coroutine object. + + Return a task object. + """ + + def set_task_factory(self, factory: _TaskFactory | None) -> None: + """Set a task factory that will be used by loop.create_task(). - def set_task_factory(self, factory: _TaskFactory | None) -> None: ... - def get_task_factory(self) -> _TaskFactory | None: ... +If factory is None the default task factory will be set. + +If factory is a callable, it should have a signature matching +'(loop, coro, **kwargs)', where 'loop' will be a reference to the active +event loop, 'coro' will be a coroutine object, and **kwargs will be +arbitrary keyword arguments that should be passed on to Task. +The callable must return a Task. +""" + def get_task_factory(self) -> _TaskFactory | None: + """Return a task factory, or None if the default one is in use. +""" # Methods for interacting with threads def call_soon_threadsafe( self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: ... + ) -> Handle: + """Like call_soon(), but thread-safe. +""" def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... def set_default_executor(self, executor: ThreadPoolExecutor) -> None: ... # type: ignore[override] # Network I/O methods returning Futures. @@ -128,7 +246,18 @@ class BaseEventLoop(AbstractEventLoop): happy_eyeballs_delay: float | None = None, interleave: int | None = None, all_errors: bool = False, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Connect to a TCP server. + +Create a streaming transport connection to a given internet host and +port: socket family AF_INET or socket.AF_INET6 depending on host (or +family if specified), socket type SOCK_STREAM. protocol_factory must be +a callable returning a protocol instance. + +This method is a coroutine which will try to establish the connection +in the background. When successful, the coroutine returns a +(transport, protocol) pair. +""" @overload async def create_connection( self, @@ -168,7 +297,18 @@ class BaseEventLoop(AbstractEventLoop): ssl_shutdown_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Connect to a TCP server. + + Create a streaming transport connection to a given internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ @overload async def create_connection( self, @@ -206,7 +346,18 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, happy_eyeballs_delay: float | None = None, interleave: int | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Connect to a TCP server. + + Create a streaming transport connection to a given internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ @overload async def create_connection( self, @@ -246,7 +397,22 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """Create a TCP server. + +The host parameter can be a string, in that case the TCP server is +bound to host and port. + +The host parameter can also be a sequence of strings and in that case +the TCP server is bound to all hosts of the sequence. If a host +appears multiple times (possibly indirectly e.g. when hostnames +resolve to the same IP address), the server is only bound once to that +host. + +Return a Server object which can be used to stop the service. + +This method is a coroutine. +""" @overload async def create_server( self, @@ -284,7 +450,22 @@ class BaseEventLoop(AbstractEventLoop): ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """Create a TCP server. + + The host parameter can be a string, in that case the TCP server is + bound to host and port. + + The host parameter can also be a sequence of strings and in that case + the TCP server is bound to all hosts of the sequence. If a host + appears multiple times (possibly indirectly e.g. when hostnames + resolve to the same IP address), the server is only bound once to that + host. + + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ @overload async def create_server( self, @@ -320,7 +501,22 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """Create a TCP server. + + The host parameter can be a string, in that case the TCP server is + bound to host and port. + + The host parameter can also be a sequence of strings and in that case + the TCP server is bound to all hosts of the sequence. If a host + appears multiple times (possibly indirectly e.g. when hostnames + resolve to the same IP address), the server is only bound once to that + host. + + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ @overload async def create_server( self, @@ -350,7 +546,12 @@ class BaseEventLoop(AbstractEventLoop): server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: ... + ) -> Transport | None: + """Upgrade transport to TLS. + +Return a new transport that *protocol* should start using +immediately. +""" async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -370,7 +571,12 @@ class BaseEventLoop(AbstractEventLoop): server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, - ) -> Transport | None: ... + ) -> Transport | None: + """Upgrade transport to TLS. + + Return a new transport that *protocol* should start using + immediately. + """ async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -378,14 +584,44 @@ class BaseEventLoop(AbstractEventLoop): *, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Handle an accepted connection. + + This is used by servers that accept connections outside of + asyncio but that use asyncio to handle connections. + + This method is a coroutine. When completed, the coroutine + returns a (transport, protocol) pair. + """ async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True ) -> int: ... async def sendfile( self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True - ) -> int: ... + ) -> int: + """Send a file to transport. + +Return the total number of bytes which were sent. + +The method uses high-performance os.sendfile if available. + +file must be a regular file object opened in binary mode. + +offset tells from where to start reading the file. If specified, +count is the total number of bytes to transmit as opposed to +sending the file until EOF is reached. File position is updated on +return or also in case of error in which case file.tell() +can be used to figure out the number of bytes +which were sent. + +fallback set to True makes asyncio to manually read and send +the file when the platform does not support the sendfile syscall +(e.g. Windows or SSL socket on Unix). + +Raise SendfileNotAvailableError if the system does not support +sendfile syscall and fallback is False. +""" if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, @@ -399,7 +635,9 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: + """Create datagram connection. +""" else: async def create_datagram_endpoint( self, @@ -414,7 +652,9 @@ class BaseEventLoop(AbstractEventLoop): reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: + """Create datagram connection. +""" # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -473,16 +713,74 @@ class BaseEventLoop(AbstractEventLoop): def add_signal_handler(self, sig: int, callback: Callable[[Unpack[_Ts]], Any], *args: Unpack[_Ts]) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... # Error handlers. - def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... - def get_exception_handler(self) -> _ExceptionHandler | None: ... - def default_exception_handler(self, context: _Context) -> None: ... - def call_exception_handler(self, context: _Context) -> None: ... + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: + """Set handler as the new event loop exception handler. + +If handler is None, the default exception handler will +be set. + +If handler is a callable object, it should have a +signature matching '(loop, context)', where 'loop' +will be a reference to the active event loop, 'context' +will be a dict object (see `call_exception_handler()` +documentation for details about context). +""" + def get_exception_handler(self) -> _ExceptionHandler | None: + """Return an exception handler, or None if the default one is in use. + """ + def default_exception_handler(self, context: _Context) -> None: + """Default exception handler. + +This is called when an exception occurs and no exception +handler is set, and can be called by a custom exception +handler that wants to defer to the default behavior. + +This default handler logs the error message and other +context-dependent information. In debug mode, a truncated +stack trace is also appended showing where the given object +(e.g. a handle or future or task) was created, if any. + +The context parameter has the same meaning as in +`call_exception_handler()`. +""" + def call_exception_handler(self, context: _Context) -> None: + """Call the current event loop's exception handler. + +The context argument is a dict containing the following keys: + +- 'message': Error message; +- 'exception' (optional): Exception object; +- 'future' (optional): Future instance; +- 'task' (optional): Task instance; +- 'handle' (optional): Handle instance; +- 'protocol' (optional): Protocol instance; +- 'transport' (optional): Transport instance; +- 'socket' (optional): Socket instance; +- 'source_traceback' (optional): Traceback of the source; +- 'handle_traceback' (optional): Traceback of the handle; +- 'asyncgen' (optional): Asynchronous generator that caused + the exception. + +New keys maybe introduced in the future. + +Note: do not overload this method in an event loop subclass. +For custom exception handling, use the +`set_exception_handler()` method. +""" # Debug flag management. def get_debug(self) -> bool: ... def set_debug(self, enabled: bool) -> None: ... if sys.version_info >= (3, 12): - async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... + async def shutdown_default_executor(self, timeout: float | None = None) -> None: + """Schedule the shutdown of the default executor. + +The timeout parameter specifies the amount of time the executor will +be given to finish joining. The default value is None, which means +that the executor will be given an unlimited amount of time. +""" else: - async def shutdown_default_executor(self) -> None: ... + async def shutdown_default_executor(self) -> None: + """Schedule the shutdown of the default executor. +""" def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi index 2cd0f2e3a7e4a..200e26c30b1c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi @@ -12,6 +12,16 @@ _PENDING: Final = "PENDING" # undocumented _CANCELLED: Final = "CANCELLED" # undocumented _FINISHED: Final = "FINISHED" # undocumented -def isfuture(obj: object) -> TypeIs[Future[Any]]: ... -def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented -def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented +def isfuture(obj: object) -> TypeIs[Future[Any]]: + """Check for a Future. + +This returns True when obj is a Future instance or is advertising +itself as duck-type compatible by setting _asyncio_future_blocking. +See comment in Future for more details. +""" +def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: # undocumented + """helper function for Future.__repr__ +""" +def _future_repr_info(future: futures.Future[Any]) -> list[str]: # undocumented + """helper function for Future.__repr__ +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi index a5fe24e8768b7..ec7294f2ace33 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -52,7 +52,11 @@ class BaseSubprocessTransport(transports.SubprocessTransport): def _pipe_connection_lost(self, fd: int, exc: BaseException | None) -> None: ... # undocumented def _pipe_data_received(self, fd: int, data: bytes) -> None: ... # undocumented def _process_exited(self, returncode: int) -> None: ... # undocumented - async def _wait(self) -> int: ... # undocumented + async def _wait(self) -> int: # undocumented + """Wait until the process exit and return the process return code. + +This method is a coroutine. +""" def _try_finish(self) -> None: ... # undocumented def _call_connection_lost(self, exc: BaseException | None) -> None: ... # undocumented def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi index 5c6456b0e9c04..e9593b12fea2d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi @@ -15,6 +15,8 @@ if sys.version_info >= (3, 12): THREAD_JOIN_TIMEOUT: Final = 300 class _SendfileMode(enum.Enum): + """An enumeration. +""" UNSUPPORTED = 1 TRY_NATIVE = 2 FALLBACK = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi index 59212f4ec398b..91d6fc04020e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi @@ -15,14 +15,23 @@ _P = ParamSpec("_P") if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.8; removed in Python 3.11. Use `async def` instead.") - def coroutine(func: _FunctionT) -> _FunctionT: ... + def coroutine(func: _FunctionT) -> _FunctionT: + """Decorator to mark coroutines. + + If the coroutine is not yielded from before it is destroyed, + an error message is logged. + """ @overload -def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: + """Return True if func is a decorated coroutine function. +""" @overload def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... @overload def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... @overload def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... -def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: ... +def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: + """Return True if obj is a coroutine object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi index 5dc698bc5e15c..6fc52e657cd68 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi @@ -1,3 +1,5 @@ +"""Event loop and event loop policy. +""" import ssl import sys from _asyncio import ( @@ -73,6 +75,8 @@ class _TaskFactory(Protocol): def __call__(self, loop: AbstractEventLoop, factory: _CoroutineLike[_T], /) -> Future[_T]: ... class Handle: + """Object returned by callback registration methods. +""" __slots__ = ("_callback", "_args", "_cancelled", "_loop", "_source_traceback", "_repr", "__weakref__", "_context") _cancelled: bool _args: Sequence[Any] @@ -86,6 +90,8 @@ class Handle: def get_context(self) -> Context: ... class TimerHandle(Handle): + """Object returned by timed callback registration methods. +""" __slots__ = ["_scheduled", "_when"] def __init__( self, @@ -96,7 +102,12 @@ class TimerHandle(Handle): context: Context | None = None, ) -> None: ... def __hash__(self) -> int: ... - def when(self) -> float: ... + def when(self) -> float: + """Return a scheduled callback time. + +The time is an absolute timestamp, using the same time +reference as loop.time(). +""" def __lt__(self, other: TimerHandle) -> bool: ... def __le__(self, other: TimerHandle) -> bool: ... def __gt__(self, other: TimerHandle) -> bool: ... @@ -104,43 +115,93 @@ class TimerHandle(Handle): def __eq__(self, other: object) -> bool: ... class AbstractServer: + """Abstract server returned by create_server(). +""" @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + """Stop serving. This leaves existing connections open. +""" if sys.version_info >= (3, 13): @abstractmethod - def close_clients(self) -> None: ... + def close_clients(self) -> None: + """Close all active connections. +""" @abstractmethod - def abort_clients(self) -> None: ... + def abort_clients(self) -> None: + """Close all active connections immediately. +""" async def __aenter__(self) -> Self: ... async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod - def get_loop(self) -> AbstractEventLoop: ... + def get_loop(self) -> AbstractEventLoop: + """Get the event loop the Server object is attached to. +""" @abstractmethod - def is_serving(self) -> bool: ... + def is_serving(self) -> bool: + """Return True if the server is accepting connections. +""" @abstractmethod - async def start_serving(self) -> None: ... + async def start_serving(self) -> None: + """Start accepting connections. + +This method is idempotent, so it can be called when +the server is already being serving. +""" @abstractmethod - async def serve_forever(self) -> None: ... + async def serve_forever(self) -> None: + """Start accepting connections until the coroutine is cancelled. + +The server is closed when the coroutine is cancelled. +""" @abstractmethod - async def wait_closed(self) -> None: ... + async def wait_closed(self) -> None: + """Coroutine to wait until service is closed. +""" class AbstractEventLoop: + """Abstract event loop. +""" slow_callback_duration: float @abstractmethod - def run_forever(self) -> None: ... + def run_forever(self) -> None: + """Run the event loop until stop() is called. +""" @abstractmethod - def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: + """Run the event loop until a Future is done. + +Return the Future's result, or raise its exception. +""" @abstractmethod - def stop(self) -> None: ... + def stop(self) -> None: + """Stop the event loop as soon as reasonable. + +Exactly how soon that is may depend on the implementation, but +no more I/O callbacks should be scheduled. +""" @abstractmethod - def is_running(self) -> bool: ... + def is_running(self) -> bool: + """Return whether the event loop is currently running. +""" @abstractmethod - def is_closed(self) -> bool: ... + def is_closed(self) -> bool: + """Returns True if the event loop was closed. +""" @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + """Close the loop. + +The loop should not be running. + +This is idempotent and irreversible. + +No other methods should be called after this one. +""" @abstractmethod - async def shutdown_asyncgens(self) -> None: ... + async def shutdown_asyncgens(self) -> None: + """Shutdown all active asynchronous generators. +""" # Methods scheduling callbacks. All these return Handles. # "context" added in 3.9.10/3.10.2 for call_* @abstractmethod @@ -300,7 +361,58 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """A coroutine which creates a TCP server bound to host and port. + +The return value is a Server object which can be used to stop +the service. + +If host is an empty string or None all interfaces are assumed +and a list of multiple sockets will be returned (most likely +one for IPv4 and another one for IPv6). The host parameter can also be +a sequence (e.g. list) of hosts to bind to. + +family can be set to either AF_INET or AF_INET6 to force the +socket to use IPv4 or IPv6. If not set it will be determined +from host (defaults to AF_UNSPEC). + +flags is a bitmask for getaddrinfo(). + +sock can optionally be specified in order to use a preexisting +socket object. + +backlog is the maximum number of queued connections passed to +listen() (defaults to 100). + +ssl can be set to an SSLContext to enable SSL over the +accepted connections. + +reuse_address tells the kernel to reuse a local socket in +TIME_WAIT state, without waiting for its natural timeout to +expire. If not specified will automatically be set to True on +UNIX. + +reuse_port tells the kernel to allow this endpoint to be bound to +the same port as other existing endpoints are bound to, so long as +they all set this flag when being created. This option is not +supported on Windows. + +keep_alive set to True keeps connections active by enabling the +periodic transmission of messages. + +ssl_handshake_timeout is the time in seconds that an SSL server +will wait for completion of the SSL handshake before aborting the +connection. Default is 60s. + +ssl_shutdown_timeout is the time in seconds that an SSL server +will wait for completion of the SSL shutdown procedure +before aborting the connection. Default is 30s. + +start_serving set to True (default) causes the created server +to start accepting connections immediately. When set to False, +the user should await Server.start_serving() or Server.serve_forever() +to make the server to start accepting connections. +""" @overload @abstractmethod async def create_server( @@ -340,7 +452,55 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """A coroutine which creates a TCP server bound to host and port. + + The return value is a Server object which can be used to stop + the service. + + If host is an empty string or None all interfaces are assumed + and a list of multiple sockets will be returned (most likely + one for IPv4 and another one for IPv6). The host parameter can also be + a sequence (e.g. list) of hosts to bind to. + + family can be set to either AF_INET or AF_INET6 to force the + socket to use IPv4 or IPv6. If not set it will be determined + from host (defaults to AF_UNSPEC). + + flags is a bitmask for getaddrinfo(). + + sock can optionally be specified in order to use a preexisting + socket object. + + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). + + ssl can be set to an SSLContext to enable SSL over the + accepted connections. + + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified will automatically be set to True on + UNIX. + + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows. + + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for completion of the SSL handshake before aborting the + connection. Default is 60s. + + ssl_shutdown_timeout is the time in seconds that an SSL server + will wait for completion of the SSL shutdown procedure + before aborting the connection. Default is 30s. + + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ @overload @abstractmethod async def create_server( @@ -378,7 +538,51 @@ class AbstractEventLoop: reuse_port: bool | None = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """A coroutine which creates a TCP server bound to host and port. + + The return value is a Server object which can be used to stop + the service. + + If host is an empty string or None all interfaces are assumed + and a list of multiple sockets will be returned (most likely + one for IPv4 and another one for IPv6). The host parameter can also be + a sequence (e.g. list) of hosts to bind to. + + family can be set to either AF_INET or AF_INET6 to force the + socket to use IPv4 or IPv6. If not set it will be determined + from host (defaults to AF_UNSPEC). + + flags is a bitmask for getaddrinfo(). + + sock can optionally be specified in order to use a preexisting + socket object. + + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). + + ssl can be set to an SSLContext to enable SSL over the + accepted connections. + + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified will automatically be set to True on + UNIX. + + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows. + + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for completion of the SSL handshake before aborting the + connection. Default is 60s. + + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ @overload @abstractmethod async def create_server( @@ -410,7 +614,12 @@ class AbstractEventLoop: server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> Transport | None: ... + ) -> Transport | None: + """Upgrade a transport to TLS. + +Return a new transport that *protocol* should start using +immediately. +""" async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -422,7 +631,35 @@ class AbstractEventLoop: ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """A coroutine which creates a UNIX Domain Socket server. + +The return value is a Server object, which can be used to stop +the service. + +path is a str, representing a file system path to bind the +server socket to. + +sock can optionally be specified in order to use a preexisting +socket object. + +backlog is the maximum number of queued connections passed to +listen() (defaults to 100). + +ssl can be set to an SSLContext to enable SSL over the +accepted connections. + +ssl_handshake_timeout is the time in seconds that an SSL server +will wait for the SSL handshake to complete (defaults to 60s). + +ssl_shutdown_timeout is the time in seconds that an SSL server +will wait for the SSL shutdown to finish (defaults to 30s). + +start_serving set to True (default) causes the created server +to start accepting connections immediately. When set to False, +the user should await Server.start_serving() or Server.serve_forever() +to make the server to start accepting connections. +""" else: @abstractmethod async def start_tls( @@ -434,7 +671,12 @@ class AbstractEventLoop: server_side: bool = False, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, - ) -> Transport | None: ... + ) -> Transport | None: + """Upgrade a transport to TLS. + + Return a new transport that *protocol* should start using + immediately. + """ async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -445,7 +687,32 @@ class AbstractEventLoop: ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, start_serving: bool = True, - ) -> Server: ... + ) -> Server: + """A coroutine which creates a UNIX Domain Socket server. + + The return value is a Server object, which can be used to stop + the service. + + path is a str, representing a file system path to bind the + server socket to. + + sock can optionally be specified in order to use a preexisting + socket object. + + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). + + ssl can be set to an SSLContext to enable SSL over the + accepted connections. + + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for the SSL handshake to complete (defaults to 60s). + + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ if sys.version_info >= (3, 11): async def connect_accepted_socket( @@ -456,7 +723,15 @@ class AbstractEventLoop: ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Handle an accepted connection. + +This is used by servers that accept connections outside of +asyncio, but use asyncio to handle connections. + +This method is a coroutine. When completed, the coroutine +returns a (transport, protocol) pair. +""" elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, @@ -465,7 +740,15 @@ class AbstractEventLoop: *, ssl: _SSLContext = None, ssl_handshake_timeout: float | None = None, - ) -> tuple[Transport, _ProtocolT]: ... + ) -> tuple[Transport, _ProtocolT]: + """Handle an accepted connection. + + This is used by servers that accept connections outside of + asyncio, but use asyncio to handle connections. + + This method is a coroutine. When completed, the coroutine + returns a (transport, protocol) pair. + """ if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -497,7 +780,11 @@ class AbstractEventLoop: @abstractmethod async def sendfile( self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True - ) -> int: ... + ) -> int: + """Send a file through a transport. + +Return an amount of sent bytes. +""" @abstractmethod async def create_datagram_endpoint( self, @@ -512,16 +799,58 @@ class AbstractEventLoop: reuse_port: bool | None = None, allow_broadcast: bool | None = None, sock: socket | None = None, - ) -> tuple[DatagramTransport, _ProtocolT]: ... + ) -> tuple[DatagramTransport, _ProtocolT]: + """A coroutine which creates a datagram endpoint. + +This method will try to establish the endpoint in the background. +When successful, the coroutine returns a (transport, protocol) pair. + +protocol_factory must be a callable returning a protocol instance. + +socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on +host (or family if specified), socket type SOCK_DGRAM. + +reuse_address tells the kernel to reuse a local socket in +TIME_WAIT state, without waiting for its natural timeout to +expire. If not specified it will automatically be set to True on +UNIX. + +reuse_port tells the kernel to allow this endpoint to be bound to +the same port as other existing endpoints are bound to, so long as +they all set this flag when being created. This option is not +supported on Windows and some UNIX's. If the +:py:data:`~socket.SO_REUSEPORT` constant is not defined then this +capability is unsupported. + +allow_broadcast tells the kernel to allow this endpoint to send +messages to the broadcast address. + +sock can optionally be specified in order to use a preexisting +socket object. +""" # Pipes and subprocesses. @abstractmethod async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any - ) -> tuple[ReadTransport, _ProtocolT]: ... + ) -> tuple[ReadTransport, _ProtocolT]: + """Register read pipe in event loop. Set the pipe to non-blocking mode. + +protocol_factory should instantiate object with Protocol interface. +pipe is a file-like object. +Return pair (transport, protocol), where transport supports the +ReadTransport interface. +""" @abstractmethod async def connect_write_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any - ) -> tuple[WriteTransport, _ProtocolT]: ... + ) -> tuple[WriteTransport, _ProtocolT]: + """Register write pipe in event loop. + +protocol_factory should instantiate object with BaseProtocol interface. +Pipe is file-like object already switched to nonblocking. +Return pair (transport, protocol), where transport support +WriteTransport interface. +""" @abstractmethod async def subprocess_shell( self, @@ -600,16 +929,34 @@ class AbstractEventLoop: @abstractmethod def set_debug(self, enabled: bool) -> None: ... @abstractmethod - async def shutdown_default_executor(self) -> None: ... + async def shutdown_default_executor(self) -> None: + """Schedule the shutdown of the default executor. +""" if sys.version_info >= (3, 14): class _AbstractEventLoopPolicy: + """Abstract policy for accessing the event loop. +""" @abstractmethod - def get_event_loop(self) -> AbstractEventLoop: ... + def get_event_loop(self) -> AbstractEventLoop: + """Get the event loop for the current context. + +Returns an event loop object implementing the AbstractEventLoop interface, +or raises an exception in case no event loop has been set for the +current context and the current policy does not specify to create one. + +It should never return None. +""" @abstractmethod - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: + """Set the event loop for the current context to loop. +""" @abstractmethod - def new_event_loop(self) -> AbstractEventLoop: ... + def new_event_loop(self) -> AbstractEventLoop: + """Create and return a new event loop object according to this +policy's rules. If there's need to set this loop as the event loop for +the current context, set_event_loop must be called explicitly. +""" else: @type_check_only @@ -638,38 +985,108 @@ else: if sys.version_info >= (3, 14): class _BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): - def get_event_loop(self) -> AbstractEventLoop: ... - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... - def new_event_loop(self) -> AbstractEventLoop: ... + """Default policy implementation for accessing the event loop. + +In this policy, each thread has its own event loop. However, we +only automatically create an event loop by default for the main +thread; other threads by default have no event loop. + +Other policies may have different rules (e.g. a single global +event loop, or automatically creating an event loop per thread, or +using some other notion of context to which an event loop is +associated). +""" + def get_event_loop(self) -> AbstractEventLoop: + """Get the event loop for the current context. + +Returns an instance of EventLoop or raises an exception. +""" + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: + """Set the event loop. +""" + def new_event_loop(self) -> AbstractEventLoop: + """Create a new event loop. + +You must call set_event_loop() to make this the current event +loop. +""" else: class BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): - def get_event_loop(self) -> AbstractEventLoop: ... - def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... - def new_event_loop(self) -> AbstractEventLoop: ... + """Default policy implementation for accessing the event loop. + +In this policy, each thread has its own event loop. However, we +only automatically create an event loop by default for the main +thread; other threads by default have no event loop. + +Other policies may have different rules (e.g. a single global +event loop, or automatically creating an event loop per thread, or +using some other notion of context to which an event loop is +associated). +""" + def get_event_loop(self) -> AbstractEventLoop: + """Get the event loop for the current context. + +Returns an instance of EventLoop or raises an exception. +""" + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: + """Set the event loop. +""" + def new_event_loop(self) -> AbstractEventLoop: + """Create a new event loop. + +You must call set_event_loop() to make this the current event +loop. +""" if sys.version_info >= (3, 14): - def _get_event_loop_policy() -> _AbstractEventLoopPolicy: ... - def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... + def _get_event_loop_policy() -> _AbstractEventLoopPolicy: + """Get the current event loop policy. +""" + def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: + """Set the current event loop policy. + +If policy is None, the default policy is restored. +""" @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... else: - def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... - def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: ... + def get_event_loop_policy() -> _AbstractEventLoopPolicy: + """Get the current event loop policy. +""" + def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: + """Set the current event loop policy. + +If policy is None, the default policy is restored. +""" -def set_event_loop(loop: AbstractEventLoop | None) -> None: ... -def new_event_loop() -> AbstractEventLoop: ... +def set_event_loop(loop: AbstractEventLoop | None) -> None: + """Equivalent to calling get_event_loop_policy().set_event_loop(loop). +""" +def new_event_loop() -> AbstractEventLoop: + """Equivalent to calling get_event_loop_policy().new_event_loop(). +""" if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def get_child_watcher() -> AbstractChildWatcher: ... + def get_child_watcher() -> AbstractChildWatcher: + """Equivalent to calling get_event_loop_policy().get_child_watcher(). +""" @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... + def set_child_watcher(watcher: AbstractChildWatcher) -> None: + """Equivalent to calling +get_event_loop_policy().set_child_watcher(watcher). +""" else: - def get_child_watcher() -> AbstractChildWatcher: ... - def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... + def get_child_watcher() -> AbstractChildWatcher: + """Equivalent to calling get_event_loop_policy().get_child_watcher(). +""" + def set_child_watcher(watcher: AbstractChildWatcher) -> None: + """Equivalent to calling + get_event_loop_policy().set_child_watcher(watcher). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi index 759838f45de47..98a69a67a1346 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi @@ -1,3 +1,5 @@ +"""asyncio exceptions. +""" import sys # Keep asyncio.__all__ updated with any changes to __all__ here @@ -21,24 +23,48 @@ else: "SendfileNotAvailableError", ) -class CancelledError(BaseException): ... +class CancelledError(BaseException): + """The Future or Task was cancelled. +""" if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Exception): ... + class TimeoutError(Exception): + """The operation exceeded the given deadline. +""" -class InvalidStateError(Exception): ... -class SendfileNotAvailableError(RuntimeError): ... +class InvalidStateError(Exception): + """The operation is not allowed in this state. +""" +class SendfileNotAvailableError(RuntimeError): + """Sendfile syscall is not available. + +Raised if OS does not support sendfile syscall for given socket or +file type. +""" class IncompleteReadError(EOFError): + """ +Incomplete read error. Attributes: + +- partial: read bytes string before the end of stream was reached +- expected: total number of expected bytes (or None if unknown) +""" expected: int | None partial: bytes def __init__(self, partial: bytes, expected: int | None) -> None: ... class LimitOverrunError(Exception): + """Reached the buffer limit while looking for a separator. + +Attributes: +- consumed: total number of to be consumed bytes. +""" consumed: int def __init__(self, message: str, consumed: int) -> None: ... if sys.version_info >= (3, 11): - class BrokenBarrierError(RuntimeError): ... + class BrokenBarrierError(RuntimeError): + """Barrier is broken by barrier.abort() call. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi index 597eb9e56e1a1..fa3e8825ff875 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi @@ -19,14 +19,29 @@ def _get_function_source(func: object) -> tuple[str, int] | None: ... if sys.version_info >= (3, 13): def _format_callback_source(func: object, args: Iterable[Any], *, debug: bool = False) -> str: ... - def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: + """Format function arguments and keyword arguments. + +Special case for a single parameter: ('hello',) is formatted as ('hello'). + +Note that this function only returns argument details when +debug=True is specified, as arguments may contain sensitive +information. +""" def _format_callback( func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "" ) -> str: ... else: def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... - def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: + """Format function arguments and keyword arguments. + + Special case for a single parameter: ('hello',) is formatted as ('hello'). + """ def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... -def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: + """Replacement for traceback.extract_stack() that only does the +necessary work for asyncio debug mode. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi index c907c7036b040..435efbd5c4f4e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi @@ -1,3 +1,5 @@ +"""A Future class similar to the one in PEP 3148. +""" import sys from _asyncio import Future as Future from concurrent.futures._base import Future as _ConcurrentFuture @@ -16,4 +18,6 @@ else: _T = TypeVar("_T") -def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: + """Wrap concurrent.futures.Future object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi index 18a8a6457d757..21243d6bba928 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi @@ -1,3 +1,5 @@ +"""Introspection utils for tasks call graphs. +""" import sys from _typeshed import SupportsWrite from asyncio import Future @@ -10,19 +12,62 @@ if sys.version_info >= (3, 14): @dataclass(frozen=True, slots=True) class FrameCallGraphEntry: + """FrameCallGraphEntry(frame: frame) +""" frame: FrameType @dataclass(frozen=True, slots=True) class FutureCallGraph: + """FutureCallGraph(future: _asyncio.Future, call_stack: tuple['FrameCallGraphEntry', ...], awaited_by: tuple['FutureCallGraph', ...]) +""" future: Future[Any] call_stack: tuple[FrameCallGraphEntry, ...] awaited_by: tuple[FutureCallGraph, ...] @overload - def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... + def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: + """Capture the async call graph for the current task or the provided Future. + +The graph is represented with three data structures: + +* FutureCallGraph(future, call_stack, awaited_by) + + Where 'future' is an instance of asyncio.Future or asyncio.Task. + + 'call_stack' is a tuple of FrameGraphEntry objects. + + 'awaited_by' is a tuple of FutureCallGraph objects. + +* FrameCallGraphEntry(frame) + + Where 'frame' is a frame object of a regular Python function + in the call stack. + +Receives an optional 'future' argument. If not passed, +the current task will be used. If there's no current task, the function +returns None. + +If "capture_call_graph()" is introspecting *the current task*, the +optional keyword-only 'depth' argument can be used to skip the specified +number of frames from top of the stack. + +If the optional keyword-only 'limit' argument is provided, each call stack +in the resulting graph is truncated to include at most ``abs(limit)`` +entries. If 'limit' is positive, the entries left are the closest to +the invocation point. If 'limit' is negative, the topmost entries are +left. If 'limit' is omitted or None, all entries are present. +If 'limit' is 0, the call stack is not captured at all, only +"awaited by" information is present. +""" @overload def capture_call_graph(future: Future[Any], /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... - def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: ... + def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: + """Return the async call graph as a string for `future`. + +If `future` is not provided, format the call graph for the current task. +""" def print_call_graph( future: Future[Any] | None = None, /, *, file: SupportsWrite[str] | None = None, depth: int = 1, limit: int | None = None - ) -> None: ... + ) -> None: + """Print the async call graph for the current task or the provided Future. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi index 17390b0c5a0ee..b7fdd987a3f12 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi @@ -1,3 +1,5 @@ +"""Synchronization primitives. +""" import enum import sys from _typeshed import Unused @@ -30,29 +32,127 @@ class _ContextManagerMixin: ) -> None: ... class Lock(_ContextManagerMixin, _LoopBoundMixin): + """Primitive lock objects. + +A primitive lock is a synchronization primitive that is not owned +by a particular task when locked. A primitive lock is in one +of two states, 'locked' or 'unlocked'. + +It is created in the unlocked state. It has two basic methods, +acquire() and release(). When the state is unlocked, acquire() +changes the state to locked and returns immediately. When the +state is locked, acquire() blocks until a call to release() in +another task changes it to unlocked, then the acquire() call +resets it to locked and returns. The release() method should only +be called in the locked state; it changes the state to unlocked +and returns immediately. If an attempt is made to release an +unlocked lock, a RuntimeError will be raised. + +When more than one task is blocked in acquire() waiting for +the state to turn to unlocked, only one task proceeds when a +release() call resets the state to unlocked; successive release() +calls will unblock tasks in FIFO order. + +Locks also support the asynchronous context management protocol. +'async with lock' statement should be used. + +Usage: + + lock = Lock() + ... + await lock.acquire() + try: + ... + finally: + lock.release() + +Context manager usage: + + lock = Lock() + ... + async with lock: + ... + +Lock objects can be tested for locking state: + + if not lock.locked(): + await lock.acquire() + else: + # lock is acquired + ... + +""" _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def locked(self) -> bool: ... - async def acquire(self) -> Literal[True]: ... - def release(self) -> None: ... + def locked(self) -> bool: + """Return True if lock is acquired. +""" + async def acquire(self) -> Literal[True]: + """Acquire a lock. + +This method blocks until the lock is unlocked, then sets it to +locked and returns True. +""" + def release(self) -> None: + """Release a lock. + +When the lock is locked, reset it to unlocked, and return. +If any other tasks are blocked waiting for the lock to become +unlocked, allow exactly one of them to proceed. + +When invoked on an unlocked lock, a RuntimeError is raised. + +There is no return value. +""" class Event(_LoopBoundMixin): + """Asynchronous equivalent to threading.Event. + +Class implementing event objects. An event manages a flag that can be set +to true with the set() method and reset to false with the clear() method. +The wait() method blocks until the flag is true. The flag is initially +false. +""" _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... - def is_set(self) -> bool: ... - def set(self) -> None: ... - def clear(self) -> None: ... - async def wait(self) -> Literal[True]: ... + def is_set(self) -> bool: + """Return True if and only if the internal flag is true. +""" + def set(self) -> None: + """Set the internal flag to true. All tasks waiting for it to +become true are awakened. Tasks that call wait() once the flag is +true will not block at all. +""" + def clear(self) -> None: + """Reset the internal flag to false. Subsequently, tasks calling +wait() will block until set() is called to set the internal flag +to true again. +""" + async def wait(self) -> Literal[True]: + """Block until the internal flag is true. + +If the internal flag is true on entry, return True +immediately. Otherwise, block until another task calls +set() to set the flag to true, then return True. +""" class Condition(_ContextManagerMixin, _LoopBoundMixin): + """Asynchronous equivalent to threading.Condition. + +This class implements condition variable objects. A condition variable +allows one or more tasks to wait until they are notified by another +task. + +A new Lock object is created and used as the underlying lock. +""" _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self, lock: Lock | None = None) -> None: ... @@ -62,12 +162,62 @@ class Condition(_ContextManagerMixin, _LoopBoundMixin): def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... - async def wait(self) -> Literal[True]: ... - async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... - def notify(self, n: int = 1) -> None: ... - def notify_all(self) -> None: ... + async def wait(self) -> Literal[True]: + """Wait until notified. + +If the calling task has not acquired the lock when this +method is called, a RuntimeError is raised. + +This method releases the underlying lock, and then blocks +until it is awakened by a notify() or notify_all() call for +the same condition variable in another task. Once +awakened, it re-acquires the lock and returns True. + +This method may return spuriously, +which is why the caller should always +re-check the state and be prepared to wait() again. +""" + async def wait_for(self, predicate: Callable[[], _T]) -> _T: + """Wait until a predicate becomes true. + +The predicate should be a callable whose result will be +interpreted as a boolean value. The method will repeatedly +wait() until it evaluates to true. The final predicate value is +the return value. +""" + def notify(self, n: int = 1) -> None: + """By default, wake up one task waiting on this condition, if any. +If the calling task has not acquired the lock when this method +is called, a RuntimeError is raised. + +This method wakes up n of the tasks waiting for the condition + variable; if fewer than n are waiting, they are all awoken. + +Note: an awakened task does not actually return from its +wait() call until it can reacquire the lock. Since notify() does +not release the lock, its caller should. +""" + def notify_all(self) -> None: + """Wake up all tasks waiting on this condition. This method acts +like notify(), but wakes up all waiting tasks instead of one. If the +calling task has not acquired the lock when this method is called, +a RuntimeError is raised. +""" class Semaphore(_ContextManagerMixin, _LoopBoundMixin): + """A Semaphore implementation. + +A semaphore manages an internal counter which is decremented by each +acquire() call and incremented by each release() call. The counter +can never go below zero; when acquire() finds that it is zero, it blocks, +waiting until some other thread calls release(). + +Semaphores also support the context management protocol. + +The optional argument gives the initial value for the internal +counter; it defaults to 1. If the value given is less than 0, +ValueError is raised. +""" _value: int _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): @@ -75,12 +225,34 @@ class Semaphore(_ContextManagerMixin, _LoopBoundMixin): else: def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... - def locked(self) -> bool: ... - async def acquire(self) -> Literal[True]: ... - def release(self) -> None: ... - def _wake_up_next(self) -> None: ... + def locked(self) -> bool: + """Returns True if semaphore cannot be acquired immediately. +""" + async def acquire(self) -> Literal[True]: + """Acquire a semaphore. + +If the internal counter is larger than zero on entry, +decrement it by one and return True immediately. If it is +zero on entry, block, waiting until some other task has +called release() to make it larger than 0, and then return +True. +""" + def release(self) -> None: + """Release a semaphore, incrementing the internal counter by one. + +When it was zero on entry and another task is waiting for it to +become larger than zero again, wake up that task. +""" + def _wake_up_next(self) -> None: + """Wake up the first waiter that isn't done. +""" -class BoundedSemaphore(Semaphore): ... +class BoundedSemaphore(Semaphore): + """A bounded semaphore implementation. + +This raises ValueError in release() if it would increase the value +above the initial value. +""" if sys.version_info >= (3, 11): class _BarrierState(enum.Enum): # undocumented @@ -90,15 +262,46 @@ if sys.version_info >= (3, 11): BROKEN = "broken" class Barrier(_LoopBoundMixin): - def __init__(self, parties: int) -> None: ... + """Asyncio equivalent to threading.Barrier + +Implements a Barrier primitive. +Useful for synchronizing a fixed number of tasks at known synchronization +points. Tasks block on 'wait()' and are simultaneously awoken once they +have all made their call. +""" + def __init__(self, parties: int) -> None: + """Create a barrier, initialised to 'parties' tasks. +""" async def __aenter__(self) -> Self: ... async def __aexit__(self, *args: Unused) -> None: ... - async def wait(self) -> int: ... - async def abort(self) -> None: ... - async def reset(self) -> None: ... + async def wait(self) -> int: + """Wait for the barrier. + +When the specified number of tasks have started waiting, they are all +simultaneously awoken. +Returns an unique and individual index number from 0 to 'parties-1'. +""" + async def abort(self) -> None: + """Place the barrier into a 'broken' state. + +Useful in case of error. Any currently waiting tasks and tasks +attempting to 'wait()' will have BrokenBarrierError raised. +""" + async def reset(self) -> None: + """Reset the barrier to the initial state. + +Any tasks currently waiting will get the BrokenBarrier exception +raised. +""" @property - def parties(self) -> int: ... + def parties(self) -> int: + """Return the number of tasks required to trip the barrier. +""" @property - def n_waiting(self) -> int: ... + def n_waiting(self) -> int: + """Return the number of tasks currently waiting at the barrier. +""" @property - def broken(self) -> bool: ... + def broken(self) -> bool: + """Return True if the barrier is in a broken state. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi index e1de0b3bb845e..29b24be2208d8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi @@ -1,3 +1,5 @@ +"""Logging configuration. +""" import logging logger: logging.Logger diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi index 6ebcf543e6b94..c6a7430bd5ef5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi @@ -1,3 +1,5 @@ +"""Event loop mixins. +""" import sys import threading from typing_extensions import Never diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi index 909d671df289d..e434594f39ed9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi @@ -1,3 +1,8 @@ +"""Event loop using a proactor and related classes. + +A proactor is a "notify-on-completion" multiplexer. Currently a +proactor is only implemented on Windows with IOCP. +""" import sys from collections.abc import Mapping from socket import socket @@ -8,6 +13,8 @@ from . import base_events, constants, events, futures, streams, transports __all__ = ("BaseProactorEventLoop",) class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): + """Base class for pipe and socket transports. +""" def __init__( self, loop: events.AbstractEventLoop, @@ -20,6 +27,8 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr def __del__(self) -> None: ... class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): + """Transport for read pipes. +""" if sys.version_info >= (3, 10): def __init__( self, @@ -42,11 +51,17 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran server: events.AbstractServer | None = None, ) -> None: ... -class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... +class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): + """Transport for write pipes. +""" class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... -class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... +class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): + """Transport for duplex pipes. +""" class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): + """Transport for connected sockets. +""" _sendfile_compatible: ClassVar[constants._SendfileMode] def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi index 2c52ad4be4102..e989001725373 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi @@ -1,3 +1,5 @@ +"""Abstract Protocol base classes. +""" from _typeshed import ReadableBuffer from asyncio import transports from typing import Any @@ -6,36 +8,184 @@ from typing import Any __all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") class BaseProtocol: + """Common base class for protocol interfaces. + +Usually user implements protocols that derived from BaseProtocol +like Protocol or ProcessProtocol. + +The only case when BaseProtocol should be implemented directly is +write-only transport like write pipe +""" __slots__ = () - def connection_made(self, transport: transports.BaseTransport) -> None: ... - def connection_lost(self, exc: Exception | None) -> None: ... - def pause_writing(self) -> None: ... - def resume_writing(self) -> None: ... + def connection_made(self, transport: transports.BaseTransport) -> None: + """Called when a connection is made. + +The argument is the transport representing the pipe connection. +To receive data, wait for data_received() calls. +When the connection is closed, connection_lost() is called. +""" + def connection_lost(self, exc: Exception | None) -> None: + """Called when the connection is lost or closed. + +The argument is an exception object or None (the latter +meaning a regular EOF is received or the connection was +aborted or closed). +""" + def pause_writing(self) -> None: + """Called when the transport's buffer goes over the high-water mark. + +Pause and resume calls are paired -- pause_writing() is called +once when the buffer goes strictly over the high-water mark +(even if subsequent writes increases the buffer size even +more), and eventually resume_writing() is called once when the +buffer size reaches the low-water mark. + +Note that if the buffer size equals the high-water mark, +pause_writing() is not called -- it must go strictly over. +Conversely, resume_writing() is called when the buffer size is +equal or lower than the low-water mark. These end conditions +are important to ensure that things go as expected when either +mark is zero. + +NOTE: This is the only Protocol callback that is not called +through EventLoop.call_soon() -- if it were, it would have no +effect when it's most needed (when the app keeps writing +without yielding until pause_writing() is called). +""" + def resume_writing(self) -> None: + """Called when the transport's buffer drains below the low-water mark. + +See pause_writing() for details. +""" class Protocol(BaseProtocol): + """Interface for stream protocol. + +The user should implement this interface. They can inherit from +this class but don't need to. The implementations here do +nothing (they don't raise exceptions). + +When the user wants to requests a transport, they pass a protocol +factory to a utility function (e.g., EventLoop.create_connection()). + +When the connection is made successfully, connection_made() is +called with a suitable transport object. Then data_received() +will be called 0 or more times with data (bytes) received from the +transport; finally, connection_lost() will be called exactly once +with either an exception object or None as an argument. + +State machine of calls: + + start -> CM [-> DR*] [-> ER?] -> CL -> end + +* CM: connection_made() +* DR: data_received() +* ER: eof_received() +* CL: connection_lost() +""" # Need annotation or mypy will complain about 'Cannot determine type of "__slots__" in base class' __slots__: tuple[()] = () - def data_received(self, data: bytes) -> None: ... - def eof_received(self) -> bool | None: ... + def data_received(self, data: bytes) -> None: + """Called when some data is received. + +The argument is a bytes object. +""" + def eof_received(self) -> bool | None: + """Called when the other end calls write_eof() or equivalent. + +If this returns a false value (including None), the transport +will close itself. If it returns a true value, closing the +transport is up to the protocol. +""" class BufferedProtocol(BaseProtocol): + """Interface for stream protocol with manual buffer control. + +Event methods, such as `create_server` and `create_connection`, +accept factories that return protocols that implement this interface. + +The idea of BufferedProtocol is that it allows to manually allocate +and control the receive buffer. Event loops can then use the buffer +provided by the protocol to avoid unnecessary data copies. This +can result in noticeable performance improvement for protocols that +receive big amounts of data. Sophisticated protocols can allocate +the buffer only once at creation time. + +State machine of calls: + + start -> CM [-> GB [-> BU?]]* [-> ER?] -> CL -> end + +* CM: connection_made() +* GB: get_buffer() +* BU: buffer_updated() +* ER: eof_received() +* CL: connection_lost() +""" __slots__ = () - def get_buffer(self, sizehint: int) -> ReadableBuffer: ... - def buffer_updated(self, nbytes: int) -> None: ... - def eof_received(self) -> bool | None: ... + def get_buffer(self, sizehint: int) -> ReadableBuffer: + """Called to allocate a new receive buffer. + +*sizehint* is a recommended minimal size for the returned +buffer. When set to -1, the buffer size can be arbitrary. + +Must return an object that implements the +:ref:`buffer protocol `. +It is an error to return a zero-sized buffer. +""" + def buffer_updated(self, nbytes: int) -> None: + """Called when the buffer was updated with the received data. + +*nbytes* is the total number of bytes that were written to +the buffer. +""" + def eof_received(self) -> bool | None: + """Called when the other end calls write_eof() or equivalent. + +If this returns a false value (including None), the transport +will close itself. If it returns a true value, closing the +transport is up to the protocol. +""" class DatagramProtocol(BaseProtocol): + """Interface for datagram protocol. +""" __slots__ = () - def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] + def connection_made(self, transport: transports.DatagramTransport) -> None: # type: ignore[override] + """Called when a connection is made. + +The argument is the transport representing the pipe connection. +To receive data, wait for data_received() calls. +When the connection is closed, connection_lost() is called. +""" # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. # See https://github.com/python/typing/issues/566 - def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... - def error_received(self, exc: Exception) -> None: ... + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: + """Called when some datagram is received. +""" + def error_received(self, exc: Exception) -> None: + """Called when a send or receive operation raises an OSError. + +(Other than BlockingIOError or InterruptedError.) +""" class SubprocessProtocol(BaseProtocol): + """Interface for protocol for subprocess calls. +""" __slots__: tuple[()] = () - def pipe_data_received(self, fd: int, data: bytes) -> None: ... - def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... - def process_exited(self) -> None: ... + def pipe_data_received(self, fd: int, data: bytes) -> None: + """Called when the subprocess writes data into stdout/stderr pipe. + +fd is int file descriptor. +data is bytes object. +""" + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: + """Called when a file descriptor associated with the child process is +closed. + +fd is the int file descriptor that was closed. +""" + def process_exited(self) -> None: + """Called when subprocess has exited. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi index 2fa2226d0e6ae..028d1b7b7e649 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi @@ -9,8 +9,12 @@ if sys.version_info >= (3, 10): else: _LoopBoundMixin = object -class QueueEmpty(Exception): ... -class QueueFull(Exception): ... +class QueueEmpty(Exception): + """Raised when Queue.get_nowait() is called on an empty Queue. +""" +class QueueFull(Exception): + """Raised when the Queue.put_nowait() method is called on a full Queue. +""" # Keep asyncio.__all__ updated with any changes to __all__ here if sys.version_info >= (3, 13): @@ -22,11 +26,23 @@ else: _T = TypeVar("_T") if sys.version_info >= (3, 13): - class QueueShutDown(Exception): ... + class QueueShutDown(Exception): + """Raised when putting on to or getting from a shut-down Queue. +""" # If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy. # We can remove the noqa pragma when dropping 3.9 support. class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 + """A queue, useful for coordinating producer and consumer coroutines. + +If maxsize is less than or equal to zero, the queue size is infinite. If it +is an integer greater than 0, then "await put()" will block when the +queue reaches maxsize, until an item is removed by get(). + +Unlike the standard library Queue, you can reliably know this Queue's size +with qsize(), since your single-threaded asyncio application won't be +interrupted between calling qsize() and doing an operation on the Queue. +""" if sys.version_info >= (3, 10): def __init__(self, maxsize: int = 0) -> None: ... else: @@ -36,20 +52,100 @@ class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... def _format(self) -> str: ... - def qsize(self) -> int: ... + def qsize(self) -> int: + """Number of items in the queue. +""" @property - def maxsize(self) -> int: ... - def empty(self) -> bool: ... - def full(self) -> bool: ... - async def put(self, item: _T) -> None: ... - def put_nowait(self, item: _T) -> None: ... - async def get(self) -> _T: ... - def get_nowait(self) -> _T: ... - async def join(self) -> None: ... - def task_done(self) -> None: ... - def __class_getitem__(cls, type: Any, /) -> GenericAlias: ... + def maxsize(self) -> int: + """Number of items allowed in the queue. +""" + def empty(self) -> bool: + """Return True if the queue is empty, False otherwise. +""" + def full(self) -> bool: + """Return True if there are maxsize items in the queue. + +Note: if the Queue was initialized with maxsize=0 (the default), +then full() is never True. +""" + async def put(self, item: _T) -> None: + """Put an item into the queue. + +Put an item into the queue. If the queue is full, wait until a free +slot is available before adding item. + +Raises QueueShutDown if the queue has been shut down. +""" + def put_nowait(self, item: _T) -> None: + """Put an item into the queue without blocking. + +If no free slot is immediately available, raise QueueFull. + +Raises QueueShutDown if the queue has been shut down. +""" + async def get(self) -> _T: + """Remove and return an item from the queue. + +If queue is empty, wait until an item is available. + +Raises QueueShutDown if the queue has been shut down and is empty, or +if the queue has been shut down immediately. +""" + def get_nowait(self) -> _T: + """Remove and return an item from the queue. + +Return an item if one is immediately available, else raise QueueEmpty. + +Raises QueueShutDown if the queue has been shut down and is empty, or +if the queue has been shut down immediately. +""" + async def join(self) -> None: + """Block until all items in the queue have been gotten and processed. + +The count of unfinished tasks goes up whenever an item is added to the +queue. The count goes down whenever a consumer calls task_done() to +indicate that the item was retrieved and all work on it is complete. +When the count of unfinished tasks drops to zero, join() unblocks. +""" + def task_done(self) -> None: + """Indicate that a formerly enqueued task is complete. + +Used by queue consumers. For each get() used to fetch a task, +a subsequent call to task_done() tells the queue that the processing +on the task is complete. + +If a join() is currently blocking, it will resume when all items have +been processed (meaning that a task_done() call was received for every +item that had been put() into the queue). + +shutdown(immediate=True) calls task_done() for each remaining item in +the queue. + +Raises ValueError if called more times than there were items placed in +the queue. +""" + def __class_getitem__(cls, type: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" if sys.version_info >= (3, 13): - def shutdown(self, immediate: bool = False) -> None: ... + def shutdown(self, immediate: bool = False) -> None: + """Shut-down the queue, making queue gets and puts raise QueueShutDown. + +By default, gets will only raise once the queue is empty. Set +'immediate' to True to make gets raise immediately instead. + +All blocked callers of put() and get() will be unblocked. If +'immediate', a task is marked as done for each item remaining in +the queue, which may unblock callers of join(). +""" + +class PriorityQueue(Queue[SupportsRichComparisonT]): + """A subclass of Queue; retrieves entries in priority order (lowest first). -class PriorityQueue(Queue[SupportsRichComparisonT]): ... -class LifoQueue(Queue[_T]): ... +Entries are typically tuples of the form: (priority number, data). +""" +class LifoQueue(Queue[_T]): + """A subclass of Queue that retrieves most recently added entries first. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi index 919e6521f8a15..54589f26b42f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi @@ -17,17 +17,98 @@ _T = TypeVar("_T") if sys.version_info >= (3, 11): @final class Runner: + """A context manager that controls event loop life cycle. + +The context manager always creates a new event loop, +allows to run async functions inside it, +and properly finalizes the loop at the context manager exit. + +If debug is True, the event loop will be run in debug mode. +If loop_factory is passed, it is used for new event loop creation. + +asyncio.run(main(), debug=True) + +is a shortcut for + +with asyncio.Runner(debug=True) as runner: + runner.run(main()) + +The run() method can be called multiple times within the runner's context. + +This can be useful for interactive console (e.g. IPython), +unittest runners, console tools, -- everywhere when async code +is called from existing sync framework and where the preferred single +asyncio.run() call doesn't work. + +""" def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... - def close(self) -> None: ... - def get_loop(self) -> AbstractEventLoop: ... - def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... + def close(self) -> None: + """Shutdown and close event loop. +""" + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop. +""" + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: + """Run code in the embedded event loop. +""" if sys.version_info >= (3, 12): def run( main: Coroutine[Any, Any, _T], *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None - ) -> _T: ... + ) -> _T: + """Execute the coroutine and return the result. + +This function runs the passed coroutine, taking care of +managing the asyncio event loop, finalizing asynchronous +generators and closing the default executor. + +This function cannot be called when another asyncio event loop is +running in the same thread. + +If debug is True, the event loop will be run in debug mode. +If loop_factory is passed, it is used for new event loop creation. + +This function always creates a new event loop and closes it at the end. +It should be used as a main entry point for asyncio programs, and should +ideally only be called once. + +The executor is given a timeout duration of 5 minutes to shutdown. +If the executor hasn't finished within that duration, a warning is +emitted and the executor is closed. + +Example: + + async def main(): + await asyncio.sleep(1) + print('hello') + + asyncio.run(main()) +""" else: - def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: + """Execute the coroutine and return the result. + + This function runs the passed coroutine, taking care of + managing the asyncio event loop and finalizing asynchronous + generators. + + This function cannot be called when another asyncio event loop is + running in the same thread. + + If debug is True, the event loop will be run in debug mode. + + This function always creates a new event loop and closes it at the end. + It should be used as a main entry point for asyncio programs, and should + ideally only be called once. + + Example: + + async def main(): + await asyncio.sleep(1) + print('hello') + + asyncio.run(main()) + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi index 18c5df033e2f6..c67a9942360c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi @@ -1,3 +1,8 @@ +"""Event loop using a selector and related classes. + +A selector is a "notify-when-ready" multiplexer. For a subclass which +also includes support for signal handling, see the unix_events sub-module. +""" import selectors from socket import socket @@ -6,5 +11,15 @@ from . import base_events __all__ = ("BaseSelectorEventLoop",) class BaseSelectorEventLoop(base_events.BaseEventLoop): + """Selector event loop. + +See events.EventLoop for API specification. +""" def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... - async def sock_recv(self, sock: socket, n: int) -> bytes: ... + async def sock_recv(self, sock: socket, n: int) -> bytes: + """Receive data from the socket. + +The return value is a bytes object representing the data received. +The maximum amount of data to be received at once is specified by +nbytes. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi index ab102f124c2e8..ec38f27e25617 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi @@ -36,6 +36,22 @@ else: if sys.version_info < (3, 11): class _SSLPipe: + """An SSL "Pipe". + + An SSL pipe allows you to communicate with an SSL/TLS protocol instance + through memory buffers. It can be used to implement a security layer for an + existing connection where you don't have access to the connection's file + descriptor, or for some reason you don't want to use it. + + An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, + data is passed through untransformed. In wrapped mode, application level + data is encrypted to SSL record level data and vice versa. The SSL record + level is the lowest level in the SSL protocol suite and is what travels + as-is over the wire. + + An SslPipe initially is in "unwrapped" mode. To start SSL, call + do_handshake(). To shutdown SSL again, call unwrap(). + """ max_size: ClassVar[int] _context: ssl.SSLContext @@ -48,20 +64,93 @@ if sys.version_info < (3, 11): _need_ssldata: bool _handshake_cb: Callable[[BaseException | None], None] | None _shutdown_cb: Callable[[], None] | None - def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: + """ + The *context* argument specifies the ssl.SSLContext to use. + + The *server_side* argument indicates whether this is a server side or + client side transport. + + The optional *server_hostname* argument can be used to specify the + hostname you are connecting to. You may only specify this parameter if + the _ssl module supports Server Name Indication (SNI). + """ @property - def context(self) -> ssl.SSLContext: ... + def context(self) -> ssl.SSLContext: + """The SSL context passed to the constructor. +""" @property - def ssl_object(self) -> ssl.SSLObject | None: ... + def ssl_object(self) -> ssl.SSLObject | None: + """The internal ssl.SSLObject instance. + + Return None if the pipe is not wrapped. + """ @property - def need_ssldata(self) -> bool: ... + def need_ssldata(self) -> bool: + """Whether more record level data is needed to complete a handshake + that is currently in progress. +""" @property - def wrapped(self) -> bool: ... - def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... - def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... - def feed_eof(self) -> None: ... - def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... - def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... + def wrapped(self) -> bool: + """ + Whether a security layer is currently in effect. + + Return False during handshake. + """ + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: + """Start the SSL handshake. + + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the handshake is complete. The callback will be + called with None if successful, else an exception instance. + """ + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: + """Start the SSL shutdown sequence. + + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the shutdown is complete. The callback will be + called without arguments. + """ + def feed_eof(self) -> None: + """Send a potentially "ragged" EOF. + + This method will raise an SSL_ERROR_EOF exception if the EOF is + unexpected. + """ + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: + """Feed SSL record level data into the pipe. + + The data must be a bytes instance. It is OK to send an empty bytes + instance. This can be used to get ssldata for a handshake initiated by + this endpoint. + + Return a (ssldata, appdata) tuple. The ssldata element is a list of + buffers containing SSL data that needs to be sent to the remote SSL. + + The appdata element is a list of buffers containing plaintext data that + needs to be forwarded to the application. The appdata list may contain + an empty buffer indicating an SSL "close_notify" alert. This alert must + be acknowledged by calling shutdown(). + """ + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: + """Feed plaintext data into the pipe. + + Return an (ssldata, offset) tuple. The ssldata element is a list of + buffers containing record level data that needs to be sent to the + remote SSL instance. The offset is the number of plaintext bytes that + were processed, which may be less than the length of data. + + NOTE: In case of short writes, this call MUST be retried with the SAME + buffer passed into the *data* argument (i.e. the id() must be the + same). This is an OpenSSL requirement. A further particularity is that + a short write will always have offset == 0, because the _ssl module + does not enable partial writes. And even though the offset is zero, + there will still be encrypted data in ssldata. + """ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _sendfile_compatible: ClassVar[constants._SendfileMode] @@ -73,16 +162,45 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _ssl_protocol: SSLProtocol _closed: bool def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... - def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: + """Get optional transport information. +""" @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape - def can_write_eof(self) -> Literal[False]: ... + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape + """Write some data bytes to the transport. + +This does not block; it buffers the data and arranges for it +to be sent out asynchronously. +""" + def can_write_eof(self) -> Literal[False]: + """Return True if this transport supports write_eof(), False if not. +""" if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... def get_read_buffer_limits(self) -> tuple[int, int]: ... - def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... - def get_read_buffer_size(self) -> int: ... + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: + """Set the high- and low-water limits for read flow control. + +These two values control when to call the upstream transport's +pause_reading() and resume_reading() methods. If specified, +the low-water limit must be less than or equal to the +high-water limit. Neither value can be negative. + +The defaults are implementation-specific. If only the +high-water limit is given, the low-water limit defaults to an +implementation-specific value less than or equal to the +high-water limit. Setting high to zero forces low to zero as +well, and causes pause_reading() to be called whenever the +buffer becomes non-empty. Setting low to zero causes +resume_reading() to be called only once the buffer is empty. +Use of zero for either limit is generally sub-optimal as it +reduces opportunities for doing I/O and computation +concurrently. +""" + def get_read_buffer_size(self) -> int: + """Return the current size of the read buffer. +""" def __del__(self) -> None: ... @@ -92,6 +210,11 @@ else: _SSLProtocolBase: TypeAlias = protocols.Protocol class SSLProtocol(_SSLProtocolBase): + """SSL protocol. + + Implementation of SSL on top of a socket using incoming and outgoing + buffers which are ssl.MemoryBIO objects. + """ _server_side: bool _server_hostname: str | None _sslcontext: ssl.SSLContext @@ -143,8 +266,21 @@ class SSLProtocol(_SSLProtocolBase): def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... - def connection_lost(self, exc: BaseException | None) -> None: ... - def eof_received(self) -> None: ... + def connection_lost(self, exc: BaseException | None) -> None: + """Called when the low-level connection is lost or closed. + +The argument is an exception object or None (the latter +meaning a regular EOF is received or the connection was +aborted or closed). +""" + def eof_received(self) -> None: + """Called when the other end of the low-level stream +is half-closed. + +If this returns a false value (including None), the transport +will close itself. If it returns a true value, closing the +transport is up to the protocol. +""" def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... def _start_shutdown(self) -> None: ... if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi index 3324777f41686..beb5df707b4e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi @@ -1,3 +1,5 @@ +"""Support for running coroutines in parallel with staggered start times. +""" from collections.abc import Awaitable, Callable, Iterable from typing import Any @@ -7,4 +9,54 @@ __all__ = ("staggered_race",) async def staggered_race( coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None -) -> tuple[Any, int | None, list[Exception | None]]: ... +) -> tuple[Any, int | None, list[Exception | None]]: + """Run coroutines with staggered start times and take the first to finish. + +This method takes an iterable of coroutine functions. The first one is +started immediately. From then on, whenever the immediately preceding one +fails (raises an exception), or when *delay* seconds has passed, the next +coroutine is started. This continues until one of the coroutines complete +successfully, in which case all others are cancelled, or until all +coroutines fail. + +The coroutines provided should be well-behaved in the following way: + +* They should only ``return`` if completed successfully. + +* They should always raise an exception if they did not complete + successfully. In particular, if they handle cancellation, they should + probably reraise, like this:: + + try: + # do work + except asyncio.CancelledError: + # undo partially completed work + raise + +Args: + coro_fns: an iterable of coroutine functions, i.e. callables that + return a coroutine object when called. Use ``functools.partial`` or + lambdas to pass arguments. + + delay: amount of time, in seconds, between starting coroutines. If + ``None``, the coroutines will run sequentially. + + loop: the event loop to use. + +Returns: + tuple *(winner_result, winner_index, exceptions)* where + + - *winner_result*: the result of the winning coroutine, or ``None`` + if no coroutines won. + + - *winner_index*: the index of the winning coroutine in + ``coro_fns``, or ``None`` if no coroutines won. If the winning + coroutine may return None on success, *winner_index* can be used + to definitively determine whether any coroutine won. + + - *exceptions*: list of exceptions returned by the coroutines. + ``len(exceptions)`` is equal to the number of coroutines actually + started, and the order is the same as in ``coro_fns``. The winning + coroutine's entry is ``None``. + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi index 33cffb11ed780..02cb9a9b7348a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi @@ -36,7 +36,24 @@ if sys.version_info >= (3, 10): limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> tuple[StreamReader, StreamWriter]: ... + ) -> tuple[StreamReader, StreamWriter]: + """A wrapper for create_connection() returning a (reader, writer) pair. + +The reader returned is a StreamReader instance; the writer is a +StreamWriter instance. + +The arguments are all the usual arguments to create_connection() +except protocol_factory; most common are positional host and port, +with various optional keyword arguments following. + +Additional optional keyword arguments are loop (to set the event loop +instance to use) and limit (to set the buffer limit passed to the +StreamReader). + +(If you want to customize the StreamReader and/or +StreamReaderProtocol classes, just copy the code -- there's +really nothing special here except some convenience.) +""" async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | Sequence[str] | None = None, @@ -45,7 +62,27 @@ if sys.version_info >= (3, 10): limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> Server: ... + ) -> Server: + """Start a socket server, call back for each client connected. + +The first parameter, `client_connected_cb`, takes two parameters: +client_reader, client_writer. client_reader is a StreamReader +object, while client_writer is a StreamWriter object. This +parameter can either be a plain callback function or a coroutine; +if it is a coroutine, it will be automatically converted into a +Task. + +The rest of the arguments are all the usual arguments to +loop.create_server() except protocol_factory; most common are +positional host and port, with various optional keyword arguments +following. The return value is the same as loop.create_server(). + +Additional optional keyword argument is limit (to set the buffer +limit passed to the StreamReader). + +The return value is the same as loop.create_server(), i.e. a +Server object which can be used to stop the service. +""" else: async def open_connection( @@ -56,7 +93,24 @@ else: limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> tuple[StreamReader, StreamWriter]: ... + ) -> tuple[StreamReader, StreamWriter]: + """A wrapper for create_connection() returning a (reader, writer) pair. + + The reader returned is a StreamReader instance; the writer is a + StreamWriter instance. + + The arguments are all the usual arguments to create_connection() + except protocol_factory; most common are positional host and port, + with various optional keyword arguments following. + + Additional optional keyword arguments are loop (to set the event loop + instance to use) and limit (to set the buffer limit passed to the + StreamReader). + + (If you want to customize the StreamReader and/or + StreamReaderProtocol classes, just copy the code -- there's + really nothing special here except some convenience.) + """ async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | None = None, @@ -66,20 +120,47 @@ else: limit: int = 65536, ssl_handshake_timeout: float | None = None, **kwds: Any, - ) -> Server: ... + ) -> Server: + """Start a socket server, call back for each client connected. + + The first parameter, `client_connected_cb`, takes two parameters: + client_reader, client_writer. client_reader is a StreamReader + object, while client_writer is a StreamWriter object. This + parameter can either be a plain callback function or a coroutine; + if it is a coroutine, it will be automatically converted into a + Task. + + The rest of the arguments are all the usual arguments to + loop.create_server() except protocol_factory; most common are + positional host and port, with various optional keyword arguments + following. The return value is the same as loop.create_server(). + + Additional optional keyword arguments are loop (to set the event loop + instance to use) and limit (to set the buffer limit passed to the + StreamReader). + + The return value is the same as loop.create_server(), i.e. a + Server object which can be used to stop the service. + """ if sys.platform != "win32": if sys.version_info >= (3, 10): async def open_unix_connection( path: StrPath | None = None, *, limit: int = 65536, **kwds: Any - ) -> tuple[StreamReader, StreamWriter]: ... + ) -> tuple[StreamReader, StreamWriter]: + """Similar to `open_connection` but works with UNIX Domain Sockets. +""" async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any - ) -> Server: ... + ) -> Server: + """Similar to `start_server` but works with UNIX Domain Sockets. +""" else: async def open_unix_connection( path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any - ) -> tuple[StreamReader, StreamWriter]: ... + ) -> tuple[StreamReader, StreamWriter]: + """Similar to `open_connection` but works with UNIX Domain Sockets. +""" async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, @@ -87,12 +168,29 @@ if sys.platform != "win32": loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any, - ) -> Server: ... + ) -> Server: + """Similar to `start_server` but works with UNIX Domain Sockets. +""" class FlowControlMixin(protocols.Protocol): + """Reusable flow control logic for StreamWriter.drain(). + +This implements the protocol methods pause_writing(), +resume_writing() and connection_lost(). If the subclass overrides +these it must call the super methods. + +StreamWriter.drain() must wait for _drain_helper() coroutine. +""" def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): + """Helper class to adapt between Protocol and StreamReader. + +(This is a helper class instead of making StreamReader itself a +Protocol subclass, because the StreamReader has other potential +uses, and to prevent the user of the StreamReader to accidentally +call inappropriate methods of the protocol.) +""" def __init__( self, stream_reader: StreamReader, @@ -102,6 +200,14 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __del__(self) -> None: ... class StreamWriter: + """Wraps a Transport. + +This exposes write(), writelines(), [can_]write_eof(), +get_extra_info() and close(). It adds drain() which returns an +optional Future on which you can wait for flow control. It also +adds a transport property which references the Transport +directly. +""" def __init__( self, transport: transports.WriteTransport, @@ -119,7 +225,14 @@ class StreamWriter: def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... def get_extra_info(self, name: str, default: Any = None) -> Any: ... - async def drain(self) -> None: ... + async def drain(self) -> None: + """Flush the write buffer. + +The intended use is to write + + w.write(data) + await w.drain() +""" if sys.version_info >= (3, 12): async def start_tls( self, @@ -128,11 +241,15 @@ class StreamWriter: server_hostname: str | None = None, ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, - ) -> None: ... + ) -> None: + """Upgrade an existing stream-based connection to TLS. +""" elif sys.version_info >= (3, 11): async def start_tls( self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None - ) -> None: ... + ) -> None: + """Upgrade an existing stream-based connection to TLS. +""" if sys.version_info >= (3, 13): def __del__(self, warnings: ModuleType = ...) -> None: ... @@ -145,15 +262,111 @@ class StreamReader: def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... - def at_eof(self) -> bool: ... + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called. +""" def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... - async def readline(self) -> bytes: ... + async def readline(self) -> bytes: + """Read chunk of data from the stream until newline (b' +') is found. + + On success, return chunk that ends with newline. If only partial + line can be read due to EOF, return incomplete line without + terminating newline. When EOF was reached while no bytes read, empty + bytes object is returned. + + If limit is reached, ValueError will be raised. In that case, if + newline was found, complete line including newline will be removed + from internal buffer. Else, internal buffer will be cleared. Limit is + compared against part of the line without newline. + + If stream was paused, this function will automatically resume it if + needed. + """ if sys.version_info >= (3, 13): - async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: ... + async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: + """Read data from the stream until ``separator`` is found. + +On success, the data and separator will be removed from the +internal buffer (consumed). Returned data will include the +separator at the end. + +Configured stream limit is used to check result. Limit sets the +maximal length of data that can be returned, not counting the +separator. + +If an EOF occurs and the complete separator is still not found, +an IncompleteReadError exception will be raised, and the internal +buffer will be reset. The IncompleteReadError.partial attribute +may contain the separator partially. + +If the data cannot be read because of over limit, a +LimitOverrunError exception will be raised, and the data +will be left in the internal buffer, so it can be read again. + +The ``separator`` may also be a tuple of separators. In this +case the return value will be the shortest possible that has any +separator as the suffix. For the purposes of LimitOverrunError, +the shortest possible separator is considered to be the one that +matched. +""" else: - async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: ... + async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: + """Read data from the stream until ``separator`` is found. + + On success, the data and separator will be removed from the + internal buffer (consumed). Returned data will include the + separator at the end. + + Configured stream limit is used to check result. Limit sets the + maximal length of data that can be returned, not counting the + separator. + + If an EOF occurs and the complete separator is still not found, + an IncompleteReadError exception will be raised, and the internal + buffer will be reset. The IncompleteReadError.partial attribute + may contain the separator partially. + + If the data cannot be read because of over limit, a + LimitOverrunError exception will be raised, and the data + will be left in the internal buffer, so it can be read again. + """ + + async def read(self, n: int = -1) -> bytes: + """Read up to `n` bytes from the stream. + +If `n` is not provided or set to -1, +read until EOF, then return all read bytes. +If EOF was received and the internal buffer is empty, +return an empty bytes object. + +If `n` is 0, return an empty bytes object immediately. + +If `n` is positive, return at most `n` available bytes +as soon as at least 1 byte is available in the internal buffer. +If EOF is received before any byte is read, return an empty +bytes object. + +Returned value is not limited with limit, configured at stream +creation. + +If stream was paused, this function will automatically resume it if +needed. +""" + async def readexactly(self, n: int) -> bytes: + """Read exactly `n` bytes. + +Raise an IncompleteReadError if EOF is reached before `n` bytes can be +read. The IncompleteReadError.partial attribute of the exception will +contain the partial read bytes. + +if n is zero, return empty bytes object. + +Returned value is not limited with limit, configured at stream +creation. - async def read(self, n: int = -1) -> bytes: ... - async def readexactly(self, n: int) -> bytes: ... +If stream was paused, this function will automatically resume it if +needed. +""" def __aiter__(self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi index ceee2b5b90a09..4b1dc0e066f58 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi @@ -13,6 +13,8 @@ STDOUT: int DEVNULL: int class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): + """Like StreamReaderProtocol, but for a subprocess. +""" stdin: streams.StreamWriter | None stdout: streams.StreamReader | None stderr: streams.StreamReader | None @@ -29,7 +31,9 @@ class Process: ) -> None: ... @property def returncode(self) -> int | None: ... - async def wait(self) -> int: ... + async def wait(self) -> int: + """Wait until the process exit and return the process return code. +""" def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi index 30b7c9129f6f9..3076e17d865ad 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi @@ -17,10 +17,29 @@ else: _T = TypeVar("_T") class TaskGroup: + """Asynchronous context manager for managing groups of tasks. + +Example use: + + async with asyncio.TaskGroup() as group: + task1 = group.create_task(some_coroutine(...)) + task2 = group.create_task(other_coroutine(...)) + print("Both tasks have completed now.") + +All tasks are awaited when the context manager exits. + +Any exceptions other than `asyncio.CancelledError` raised within +a task will cancel all remaining tasks and wait for them to exit. +The exceptions are then combined and raised as an `ExceptionGroup`. +""" _loop: AbstractEventLoop | None _tasks: set[Task[Any]] async def __aenter__(self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... - def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: + """Create a new task in this group and return it. + +Similar to `asyncio.create_task`. +""" def _on_task_done(self, task: Task[object]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi index 1442f7400a9c9..8b2b6eb0ae56b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi @@ -1,3 +1,5 @@ +"""Support for tasks, coroutines and the scheduler. +""" import concurrent.futures import sys from _asyncio import ( @@ -90,18 +92,101 @@ if sys.version_info >= (3, 13): @type_check_only class _SyncAndAsyncIterator(Iterator[_T_co], AsyncIterator[_T_co], Protocol[_T_co]): ... - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: ... + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: + """Create an iterator of awaitables or their results in completion order. + +Run the supplied awaitables concurrently. The returned object can be +iterated to obtain the results of the awaitables as they finish. + +The object returned can be iterated as an asynchronous iterator or a plain +iterator. When asynchronous iteration is used, the originally-supplied +awaitables are yielded if they are tasks or futures. This makes it easy to +correlate previously-scheduled tasks with their results: + + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] + + async for earliest_connect in as_completed(tasks): + # earliest_connect is done. The result can be obtained by + # awaiting it or calling earliest_connect.result() + reader, writer = await earliest_connect + + if earliest_connect is ipv6_connect: + print("IPv6 connection established.") + else: + print("IPv4 connection established.") + +During asynchronous iteration, implicitly-created tasks will be yielded for +supplied awaitables that aren't tasks or futures. + +When used as a plain iterator, each iteration yields a new coroutine that +returns the result or raises the exception of the next completed awaitable. +This pattern is compatible with Python versions older than 3.13: + + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] + + for next_connect in as_completed(tasks): + # next_connect is not one of the original task objects. It must be + # awaited to obtain the result value or raise the exception of the + # awaitable that finishes next. + reader, writer = await next_connect + +A TimeoutError is raised if the timeout occurs before all awaitables are +done. This is raised by the async for loop during asynchronous iteration or +by the coroutines yielded during plain iteration. +""" elif sys.version_info >= (3, 10): - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: + """Return an iterator whose values are coroutines. + + When waiting for the yielded coroutines you'll get the results (or + exceptions!) of the original Futures (or coroutines), in the order + in which and as soon as they complete. + + This differs from PEP 3148; the proper way to use this is: + + for f in as_completed(fs): + result = await f # The 'await' may raise. + # Use result. + + If a timeout is specified, the 'await' will raise + TimeoutError when the timeout occurs before all Futures are done. + + Note: The futures 'f' are not necessarily members of fs. + """ else: def as_completed( fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None - ) -> Iterator[Future[_T]]: ... + ) -> Iterator[Future[_T]]: + """Return an iterator whose values are coroutines. + + When waiting for the yielded coroutines you'll get the results (or + exceptions!) of the original Futures (or coroutines), in the order + in which and as soon as they complete. + + This differs from PEP 3148; the proper way to use this is: + + for f in as_completed(fs): + result = await f # The 'await' may raise. + # Use result. + + If a timeout is specified, the 'await' will raise + TimeoutError when the timeout occurs before all Futures are done. + + Note: The futures 'f' are not necessarily members of fs. + """ @overload -def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[overload-overlap] +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: # type: ignore[overload-overlap] + """Wrap a coroutine or an awaitable in a future. + +If the argument is a Future, it is returned directly. +""" @overload def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... @@ -113,7 +198,36 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No # N.B. Having overlapping overloads is the only way to get acceptable type inference in all edge cases. if sys.version_info >= (3, 10): @overload - def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[overload-overlap] + def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: # type: ignore[overload-overlap] + """Return a future aggregating results from the given coroutines/futures. + +Coroutines will be wrapped in a future and scheduled in the event +loop. They will not necessarily be scheduled in the same order as +passed in. + +All futures must share the same event loop. If all the tasks are +done successfully, the returned future's result is the list of +results (in the order of the original sequence, not necessarily +the order of results arrival). If *return_exceptions* is True, +exceptions in the tasks are treated the same as successful +results, and gathered in the result list; otherwise, the first +raised exception will be immediately propagated to the returned +future. + +Cancellation: if the outer Future is cancelled, all children (that +have not completed yet) are also cancelled. If any child is +cancelled, this is treated as if it raised CancelledError -- +the outer Future is *not* cancelled in this case. (This is to +prevent the cancellation of one child to cause other children to +be cancelled.) + +If *return_exceptions* is False, cancelling gather() after it +has been marked done won't cancel any submitted awaitables. +For instance, gather can be marked done after propagating an +exception to the caller, therefore, calling ``gather.cancel()`` +after catching an exception (raised by one of the awaitables) from +gather won't cancel any other awaitables. +""" @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: Literal[False] = False @@ -228,7 +342,36 @@ else: @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], /, *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False - ) -> Future[tuple[_T1]]: ... + ) -> Future[tuple[_T1]]: + """Return a future aggregating results from the given coroutines/futures. + + Coroutines will be wrapped in a future and scheduled in the event + loop. They will not necessarily be scheduled in the same order as + passed in. + + All futures must share the same event loop. If all the tasks are + done successfully, the returned future's result is the list of + results (in the order of the original sequence, not necessarily + the order of results arrival). If *return_exceptions* is True, + exceptions in the tasks are treated the same as successful + results, and gathered in the result list; otherwise, the first + raised exception will be immediately propagated to the returned + future. + + Cancellation: if the outer Future is cancelled, all children (that + have not completed yet) are also cancelled. If any child is + cancelled, this is treated as if it raised CancelledError -- + the outer Future is *not* cancelled in this case. (This is to + prevent the cancellation of one child to cause other children to + be cancelled.) + + If *return_exceptions* is False, cancelling gather() after it + has been marked done won't cancel any submitted awaitables. + For instance, gather can be marked done after propagating an + exception to the caller, therefore, calling ``gather.cancel()`` + after catching an exception (raised by one of the awaitables) from + gather won't cancel any other awaitables. + """ @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], @@ -350,29 +493,134 @@ else: ) -> Future[list[_T | BaseException]]: ... # unlike some asyncio apis, This does strict runtime checking of actually being a coroutine, not of any future-like. -def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... +def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: + """Submit a coroutine object to a given event loop. + +Return a concurrent.futures.Future to access the result. +""" if sys.version_info >= (3, 10): - def shield(arg: _FutureLike[_T]) -> Future[_T]: ... + def shield(arg: _FutureLike[_T]) -> Future[_T]: + """Wait for a future, shielding it from cancellation. + +The statement + + task = asyncio.create_task(something()) + res = await shield(task) + +is exactly equivalent to the statement + + res = await something() + +*except* that if the coroutine containing it is cancelled, the +task running in something() is not cancelled. From the POV of +something(), the cancellation did not happen. But its caller is +still cancelled, so the yield-from expression still raises +CancelledError. Note: If something() is cancelled by other means +this will still cancel shield(). + +If you want to completely ignore cancellation (not recommended) +you can combine shield() with a try/except clause, as follows: + + task = asyncio.create_task(something()) + try: + res = await shield(task) + except CancelledError: + res = None + +Save a reference to tasks passed to this function, to avoid +a task disappearing mid-execution. The event loop only keeps +weak references to tasks. A task that isn't referenced elsewhere +may get garbage collected at any time, even before it's done. +""" @overload - async def sleep(delay: float) -> None: ... + async def sleep(delay: float) -> None: + """Coroutine that completes after a given time (in seconds). +""" @overload async def sleep(delay: float, result: _T) -> _T: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: + """Wait for the single Future or coroutine to complete, with timeout. + +Coroutine will be wrapped in Task. + +Returns result of the Future or coroutine. When a timeout occurs, +it cancels the task and raises TimeoutError. To avoid the task +cancellation, wrap it in shield(). + +If the wait is cancelled, the task is also cancelled. + +If the task suppresses the cancellation and returns a value instead, +that value is returned. + +This function is a coroutine. +""" else: - def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: + """Wait for a future, shielding it from cancellation. + + The statement + + res = await shield(something()) + + is exactly equivalent to the statement + + res = await something() + + *except* that if the coroutine containing it is cancelled, the + task running in something() is not cancelled. From the POV of + something(), the cancellation did not happen. But its caller is + still cancelled, so the yield-from expression still raises + CancelledError. Note: If something() is cancelled by other means + this will still cancel shield(). + + If you want to completely ignore cancellation (not recommended) + you can combine shield() with a try/except clause, as follows: + + try: + res = await shield(something()) + except CancelledError: + res = None + """ @overload - async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: + """Coroutine that completes after a given time (in seconds). +""" @overload async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: + """Wait for the single Future or coroutine to complete, with timeout. + + Coroutine will be wrapped in Task. + + Returns result of the Future or coroutine. When a timeout occurs, + it cancels the task and raises TimeoutError. To avoid the task + cancellation, wrap it in shield(). + + If the wait is cancelled, the task is also cancelled. + + This function is a coroutine. + """ if sys.version_info >= (3, 11): @overload async def wait( fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[_FT], set[_FT]]: ... + ) -> tuple[set[_FT], set[_FT]]: + """Wait for the Futures or Tasks given by fs to complete. + +The fs iterable must not be empty. + +Returns two sets of Future: (done, pending). + +Usage: + + done, pending = await asyncio.wait(fs) + +Note: This does not raise TimeoutError! Futures that aren't done +when the timeout occurs are returned in the second set. +""" @overload async def wait( fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -382,7 +630,22 @@ elif sys.version_info >= (3, 10): @overload async def wait( # type: ignore[overload-overlap] fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" - ) -> tuple[set[_FT], set[_FT]]: ... + ) -> tuple[set[_FT], set[_FT]]: + """Wait for the Futures and coroutines given by fs to complete. + + The fs iterable must not be empty. + + Coroutines will be wrapped in Tasks. + + Returns two sets of Future: (done, pending). + + Usage: + + done, pending = await asyncio.wait(fs) + + Note: This does not raise TimeoutError! Futures that aren't done + when the timeout occurs are returned in the second set. + """ @overload async def wait( fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -396,7 +659,22 @@ else: loop: AbstractEventLoop | None = None, timeout: float | None = None, return_when: str = "ALL_COMPLETED", - ) -> tuple[set[_FT], set[_FT]]: ... + ) -> tuple[set[_FT], set[_FT]]: + """Wait for the Futures and coroutines given by fs to complete. + + The fs iterable must not be empty. + + Coroutines will be wrapped in Tasks. + + Returns two sets of Future: (done, pending). + + Usage: + + done, pending = await asyncio.wait(fs) + + Note: This does not raise TimeoutError! Futures that aren't done + when the timeout occurs are returned in the second set. + """ @overload async def wait( fs: Iterable[Awaitable[_T]], @@ -411,18 +689,30 @@ if sys.version_info >= (3, 12): else: _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] -def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: + """Return a set of all tasks for the loop. +""" if sys.version_info >= (3, 11): - def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: + """Schedule the execution of a coroutine object in a spawn task. + +Return a Task object. +""" else: - def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: + """Schedule the execution of a coroutine object in a spawn task. + + Return a Task object. + """ if sys.version_info >= (3, 12): from _asyncio import current_task as current_task else: - def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: + """Return a currently executed task. +""" if sys.version_info >= (3, 14): def eager_task_factory( @@ -472,4 +762,21 @@ if sys.version_info >= (3, 12): def create_eager_task_factory( custom_task_constructor: _CustomTaskConstructor[_TaskT_co], - ) -> _EagerTaskFactoryType[_TaskT_co]: ... + ) -> _EagerTaskFactoryType[_TaskT_co]: + """Create a function suitable for use as a task factory on an event-loop. + +Example usage: + + loop.set_task_factory( + asyncio.create_eager_task_factory(my_task_constructor)) + +Now, tasks created will be started immediately (rather than being first +scheduled to an event loop). The constructor argument can be any callable +that returns a Task-compatible object and has a signature compatible +with `Task.__init__`; it must have the `eager_start` keyword argument. + +Most applications will use `Task` for `custom_task_constructor` and in +this case there's no need to call `create_eager_task_factory()` +directly. Instead the global `eager_task_factory` instance can be +used. E.g. `loop.set_task_factory(asyncio.eager_task_factory)`. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi index 00aae2ea814cb..5f0ce23352905 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi @@ -1,3 +1,5 @@ +"""High-level support for working with threads in asyncio +""" from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -7,4 +9,13 @@ __all__ = ("to_thread",) _P = ParamSpec("_P") _R = TypeVar("_R") -async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: + """Asynchronously run function *func* in a separate thread. + +Any *args and **kwargs supplied for this function are directly passed +to *func*. Also, the current :class:`contextvars.Context` is propagated, +allowing context variables from the main thread to be accessed in the +separate thread. + +Return a coroutine that can be awaited to get the eventual result of *func*. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi index 668cccbfe8b18..826010a8b18c7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi @@ -7,14 +7,63 @@ __all__ = ("Timeout", "timeout", "timeout_at") @final class Timeout: - def __init__(self, when: float | None) -> None: ... - def when(self) -> float | None: ... - def reschedule(self, when: float | None) -> None: ... - def expired(self) -> bool: ... + """Asynchronous context manager for cancelling overdue coroutines. + +Use `timeout()` or `timeout_at()` rather than instantiating this class directly. +""" + def __init__(self, when: float | None) -> None: + """Schedule a timeout that will trigger at a given loop time. + +- If `when` is `None`, the timeout will never trigger. +- If `when < loop.time()`, the timeout will trigger on the next + iteration of the event loop. +""" + def when(self) -> float | None: + """Return the current deadline. +""" + def reschedule(self, when: float | None) -> None: + """Reschedule the timeout. +""" + def expired(self) -> bool: + """Is timeout expired during execution? +""" async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... -def timeout(delay: float | None) -> Timeout: ... -def timeout_at(when: float | None) -> Timeout: ... +def timeout(delay: float | None) -> Timeout: + """Timeout async context manager. + +Useful in cases when you want to apply timeout logic around block +of code or in cases when asyncio.wait_for is not suitable. For example: + +>>> async with asyncio.timeout(10): # 10 seconds timeout +... await long_running_task() + + +delay - value in seconds or None to disable timeout logic + +long_running_task() is interrupted by raising asyncio.CancelledError, +the top-most affected timeout() context manager converts CancelledError +into TimeoutError. +""" +def timeout_at(when: float | None) -> Timeout: + """Schedule the timeout at absolute time. + +Like timeout() but argument gives absolute time in the same clock system +as loop.time(). + +Please note: it is not POSIX time but a time with +undefined starting base, e.g. the time of the system power on. + +>>> async with asyncio.timeout_at(loop.time() + 10): +... await long_running_task() + + +when - a deadline when timeout occurs or None to disable timeout logic + +long_running_task() is interrupted by raising asyncio.CancelledError, +the top-most affected timeout() context manager converts CancelledError +into TimeoutError. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi index bc8b809b9c055..223abc068ce75 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi @@ -1,3 +1,5 @@ +"""Tools to analyze tasks running in asyncio programs. +""" import sys from collections.abc import Iterable from enum import Enum @@ -31,16 +33,33 @@ class NodeType(Enum): TASK = 2 class CycleFoundException(Exception): + """Raised when there is a cycle when drawing the call tree. +""" cycles: list[list[int]] id2name: dict[int, str] def __init__(self, cycles: list[list[int]], id2name: dict[int, str]) -> None: ... def get_all_awaited_by(pid: SupportsIndex) -> list[_AwaitedInfo]: ... -def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: ... +def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: + """ +Build a list of strings for pretty-print an async call tree. + +The call tree is produced by `get_all_async_stacks()`, prefixing tasks +with `task_emoji` and coroutine frames with `cor_emoji`. +""" def build_task_table(result: Iterable[_AwaitedInfo]) -> list[list[int | str]]: ... if sys.version_info >= (3, 14): - def exit_with_permission_help_text() -> None: ... + def exit_with_permission_help_text() -> None: + """ +Prints a message pointing to platform-specific permission help text and exits the program. +This function is called when a PermissionError is encountered while trying +to attach to a process. +""" -def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: ... -def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: ... +def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: + """Build and print a table of all pending tasks under `pid`. +""" +def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: + """Build and print a tree of all pending tasks under `pid`. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi index cc870d5e0b9ad..464911c5fe3d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi @@ -1,3 +1,5 @@ +"""Abstract Transport class. +""" from asyncio.events import AbstractEventLoop from asyncio.protocols import BaseProtocol from collections.abc import Iterable, Mapping @@ -8,50 +10,215 @@ from typing import Any __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") class BaseTransport: + """Base class for transports. +""" __slots__ = ("_extra",) def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... - def get_extra_info(self, name: str, default: Any = None) -> Any: ... - def is_closing(self) -> bool: ... - def close(self) -> None: ... - def set_protocol(self, protocol: BaseProtocol) -> None: ... - def get_protocol(self) -> BaseProtocol: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Get optional transport information. +""" + def is_closing(self) -> bool: + """Return True if the transport is closing or closed. +""" + def close(self) -> None: + """Close the transport. + +Buffered data will be flushed asynchronously. No more data +will be received. After all buffered data is flushed, the +protocol's connection_lost() method will (eventually) be +called with None as its argument. +""" + def set_protocol(self, protocol: BaseProtocol) -> None: + """Set a new protocol. +""" + def get_protocol(self) -> BaseProtocol: + """Return the current protocol. +""" class ReadTransport(BaseTransport): + """Interface for read-only transports. +""" __slots__ = () - def is_reading(self) -> bool: ... - def pause_reading(self) -> None: ... - def resume_reading(self) -> None: ... + def is_reading(self) -> bool: + """Return True if the transport is receiving. +""" + def pause_reading(self) -> None: + """Pause the receiving end. + +No data will be passed to the protocol's data_received() +method until resume_reading() is called. +""" + def resume_reading(self) -> None: + """Resume the receiving end. + +Data received will once again be passed to the protocol's +data_received() method. +""" class WriteTransport(BaseTransport): + """Interface for write-only transports. +""" __slots__ = () - def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... - def get_write_buffer_size(self) -> int: ... - def get_write_buffer_limits(self) -> tuple[int, int]: ... - def write(self, data: bytes | bytearray | memoryview[Any]) -> None: ... # any memoryview format or shape + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: + """Set the high- and low-water limits for write flow control. + +These two values control when to call the protocol's +pause_writing() and resume_writing() methods. If specified, +the low-water limit must be less than or equal to the +high-water limit. Neither value can be negative. + +The defaults are implementation-specific. If only the +high-water limit is given, the low-water limit defaults to an +implementation-specific value less than or equal to the +high-water limit. Setting high to zero forces low to zero as +well, and causes pause_writing() to be called whenever the +buffer becomes non-empty. Setting low to zero causes +resume_writing() to be called only once the buffer is empty. +Use of zero for either limit is generally sub-optimal as it +reduces opportunities for doing I/O and computation +concurrently. +""" + def get_write_buffer_size(self) -> int: + """Return the current size of the write buffer. +""" + def get_write_buffer_limits(self) -> tuple[int, int]: + """Get the high and low watermarks for write flow control. +Return a tuple (low, high) where low and high are +positive number of bytes. +""" + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape + """Write some data bytes to the transport. + +This does not block; it buffers the data and arranges for it +to be sent out asynchronously. +""" def writelines( self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]] - ) -> None: ... # any memoryview format or shape - def write_eof(self) -> None: ... - def can_write_eof(self) -> bool: ... - def abort(self) -> None: ... + ) -> None: # any memoryview format or shape + """Write a list (or any iterable) of data bytes to the transport. + +The default implementation concatenates the arguments and +calls write() on the result. +""" + def write_eof(self) -> None: + """Close the write end after flushing buffered data. + +(This is like typing ^D into a UNIX program reading from stdin.) + +Data may still be received. +""" + def can_write_eof(self) -> bool: + """Return True if this transport supports write_eof(), False if not. +""" + def abort(self) -> None: + """Close the transport immediately. + +Buffered data will be lost. No more data will be received. +The protocol's connection_lost() method will (eventually) be +called with None as its argument. +""" class Transport(ReadTransport, WriteTransport): + """Interface representing a bidirectional transport. + +There may be several implementations, but typically, the user does +not implement new transports; rather, the platform provides some +useful transports that are implemented using the platform's best +practices. + +The user never instantiates a transport directly; they call a +utility function, passing it a protocol factory and other +information necessary to create the transport and protocol. (E.g. +EventLoop.create_connection() or EventLoop.create_server().) + +The utility function will asynchronously create a transport and a +protocol and hook them up by calling the protocol's +connection_made() method, passing it the transport. + +The implementation here raises NotImplemented for every method +except writelines(), which calls write() in a loop. +""" __slots__ = () class DatagramTransport(BaseTransport): + """Interface for datagram (UDP) transports. +""" __slots__ = () - def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... - def abort(self) -> None: ... + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: + """Send data to the transport. + +This does not block; it buffers the data and arranges for it +to be sent out asynchronously. +addr is target socket address. +If addr is None use target address pointed on transport creation. +If data is an empty bytes object a zero-length datagram will be +sent. +""" + def abort(self) -> None: + """Close the transport immediately. + +Buffered data will be lost. No more data will be received. +The protocol's connection_lost() method will (eventually) be +called with None as its argument. +""" class SubprocessTransport(BaseTransport): __slots__ = () - def get_pid(self) -> int: ... - def get_returncode(self) -> int | None: ... - def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... - def send_signal(self, signal: int) -> None: ... - def terminate(self) -> None: ... - def kill(self) -> None: ... + def get_pid(self) -> int: + """Get subprocess id. +""" + def get_returncode(self) -> int | None: + """Get subprocess returncode. + +See also +http://docs.python.org/3/library/subprocess#subprocess.Popen.returncode +""" + def get_pipe_transport(self, fd: int) -> BaseTransport | None: + """Get transport for pipe with number fd. +""" + def send_signal(self, signal: int) -> None: + """Send signal to subprocess. + +See also: +docs.python.org/3/library/subprocess#subprocess.Popen.send_signal +""" + def terminate(self) -> None: + """Stop the subprocess. + +Alias for close() method. + +On Posix OSs the method sends SIGTERM to the subprocess. +On Windows the Win32 API function TerminateProcess() + is called to stop the subprocess. + +See also: +http://docs.python.org/3/library/subprocess#subprocess.Popen.terminate +""" + def kill(self) -> None: + """Kill the subprocess. + +On Posix OSs the function sends SIGKILL to the subprocess. +On Windows kill() is an alias for terminate(). + +See also: +http://docs.python.org/3/library/subprocess#subprocess.Popen.kill +""" class _FlowControlMixin(Transport): + """All the logic for (write) flow control in a mix-in base class. + +The subclass must implement get_write_buffer_size(). It must call +_maybe_pause_protocol() whenever the write buffer size increases, +and _maybe_resume_protocol() whenever it decreases. It may also +override set_write_buffer_limits() (e.g. to specify different +defaults). + +The subclass constructor must call super().__init__(extra). This +will call set_write_buffer_limits(). + +The user may call set_write_buffer_limits() and +get_write_buffer_size(), and their protocol's pause_writing() and +resume_writing() may be called. +""" __slots__ = ("_loop", "_protocol_paused", "_high_water", "_low_water") def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi index 492f1e42adf20..e610f7271c3cb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi @@ -14,6 +14,12 @@ _WriteBuffer: TypeAlias = bytearray | memoryview _CMSG: TypeAlias = tuple[int, int, bytes] class TransportSocket: + """A socket-like wrapper for exposing real transport sockets. + +These objects can be safely returned by APIs like +`transport.get_extra_info('socket')`. All potentially disruptive +operations (like "socket.close()") are banned. +""" __slots__ = ("_sock",) def __init__(self, sock: socket.socket) -> None: ... @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi index 9071ee9a2fa7e..4f13579af61dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi @@ -1,3 +1,5 @@ +"""Selector event loop for Unix with signal handling. +""" import sys import types from _typeshed import StrPath @@ -50,45 +52,159 @@ if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class AbstractChildWatcher: + """Abstract base class for monitoring child processes. + +Objects derived from this class monitor a collection of subprocesses and +report their termination or interruption by a signal. + +New callbacks are registered with .add_child_handler(). Starting a new +process must be done within a 'with' block to allow the watcher to suspend +its activity until the new process if fully registered (this is needed to +prevent a race condition in some implementations). + +Example: + with watcher: + proc = subprocess.Popen("sleep 1") + watcher.add_child_handler(proc.pid, callback) + +Notes: + Implementations of this class must be thread-safe. + + Since child watcher objects may catch the SIGCHLD signal and call + waitpid(-1), there should be only one active object per process. +""" @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... + ) -> None: + """Register a new child handler. + +Arrange for callback(pid, returncode, *args) to be called when +process 'pid' terminates. Specifying another callback for the same +process replaces the previous handler. + +Note: callback() must be thread-safe. +""" @abstractmethod - def remove_child_handler(self, pid: int) -> bool: ... + def remove_child_handler(self, pid: int) -> bool: + """Removes the handler for process 'pid'. + +The function returns True if the handler was successfully removed, +False if there was nothing to remove. +""" @abstractmethod - def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: + """Attach the watcher to an event loop. + +If the watcher was previously attached to an event loop, then it is +first detached before attaching to the new loop. + +Note: loop may be None. +""" @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + """Close the watcher. + +This must be called to make sure that any underlying resource is freed. +""" @abstractmethod - def __enter__(self) -> Self: ... + def __enter__(self) -> Self: + """Enter the watcher's context and allow starting new processes + +This function must return self +""" @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: ... + ) -> None: + """Exit the watcher's context +""" @abstractmethod - def is_active(self) -> bool: ... + def is_active(self) -> bool: + """Return ``True`` if the watcher is active and is used by the event loop. + +Return True if the watcher is installed and ready to handle process exit +notifications. + +""" else: class AbstractChildWatcher: + """Abstract base class for monitoring child processes. + + Objects derived from this class monitor a collection of subprocesses and + report their termination or interruption by a signal. + + New callbacks are registered with .add_child_handler(). Starting a new + process must be done within a 'with' block to allow the watcher to suspend + its activity until the new process if fully registered (this is needed to + prevent a race condition in some implementations). + + Example: + with watcher: + proc = subprocess.Popen("sleep 1") + watcher.add_child_handler(proc.pid, callback) + + Notes: + Implementations of this class must be thread-safe. + + Since child watcher objects may catch the SIGCHLD signal and call + waitpid(-1), there should be only one active object per process. + """ @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] - ) -> None: ... + ) -> None: + """Register a new child handler. + + Arrange for callback(pid, returncode, *args) to be called when + process 'pid' terminates. Specifying another callback for the same + process replaces the previous handler. + + Note: callback() must be thread-safe. + """ @abstractmethod - def remove_child_handler(self, pid: int) -> bool: ... + def remove_child_handler(self, pid: int) -> bool: + """Removes the handler for process 'pid'. + + The function returns True if the handler was successfully removed, + False if there was nothing to remove. +""" @abstractmethod - def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... + def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: + """Attach the watcher to an event loop. + + If the watcher was previously attached to an event loop, then it is + first detached before attaching to the new loop. + + Note: loop may be None. + """ @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + """Close the watcher. + + This must be called to make sure that any underlying resource is freed. + """ @abstractmethod - def __enter__(self) -> Self: ... + def __enter__(self) -> Self: + """Enter the watcher's context and allow starting new processes + + This function must return self +""" @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None - ) -> None: ... + ) -> None: + """Exit the watcher's context +""" @abstractmethod - def is_active(self) -> bool: ... + def is_active(self) -> bool: + """Return ``True`` if the watcher is active and is used by the event loop. + + Return True if the watcher is installed and ready to handle process exit + notifications. + + """ if sys.platform != "win32": if sys.version_info < (3, 14): @@ -102,6 +218,15 @@ if sys.platform != "win32": @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class SafeChildWatcher(BaseChildWatcher): + """'Safe' child watcher implementation. + +This implementation avoids disrupting other code spawning processes by +polling explicitly each process in the SIGCHLD handler instead of calling +os.waitpid(-1). + +This is a safe solution but it has a significant overhead when handling a +big number of children (O(n) each time SIGCHLD is raised) +""" def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -113,6 +238,15 @@ if sys.platform != "win32": @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class FastChildWatcher(BaseChildWatcher): + """'Fast' child watcher implementation. + +This implementation reaps every terminated processes by calling +os.waitpid(-1) directly, possibly breaking other code spawning processes +and waiting for their termination. + +There is no noticeable overhead when handling a big number of children +(O(1) each time a child terminates). +""" def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -131,6 +265,15 @@ if sys.platform != "win32": def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... class SafeChildWatcher(BaseChildWatcher): + """'Safe' child watcher implementation. + + This implementation avoids disrupting other code spawning processes by + polling explicitly each process in the SIGCHLD handler instead of calling + os.waitpid(-1). + + This is a safe solution but it has a significant overhead when handling a + big number of children (O(n) each time SIGCHLD is raised) + """ def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -141,6 +284,15 @@ if sys.platform != "win32": def remove_child_handler(self, pid: int) -> bool: ... class FastChildWatcher(BaseChildWatcher): + """'Fast' child watcher implementation. + + This implementation reaps every terminated processes by calling + os.waitpid(-1) directly, possibly breaking other code spawning processes + and waiting for their termination. + + There is no noticeable overhead when handling a big number of children + (O(1) each time a child terminates). + """ def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -151,6 +303,10 @@ if sys.platform != "win32": def remove_child_handler(self, pid: int) -> bool: ... class _UnixSelectorEventLoop(BaseSelectorEventLoop): + """Unix event loop. + +Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. +""" if sys.version_info >= (3, 13): async def create_unix_server( self, @@ -167,17 +323,33 @@ if sys.platform != "win32": ) -> Server: ... if sys.version_info >= (3, 14): - class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): ... + class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): + """UNIX event loop policy +""" else: class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + """UNIX event loop policy with a watcher for child processes. +""" if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def get_child_watcher(self) -> AbstractChildWatcher: ... + def get_child_watcher(self) -> AbstractChildWatcher: + """Get the watcher for child processes. + +If not yet set, a ThreadedChildWatcher object is automatically created. +""" @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: + """Set the watcher for child processes. +""" else: - def get_child_watcher(self) -> AbstractChildWatcher: ... - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + def get_child_watcher(self) -> AbstractChildWatcher: + """Get the watcher for child processes. + + If not yet set, a ThreadedChildWatcher object is automatically created. + """ + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: + """Set the watcher for child processes. +""" SelectorEventLoop = _UnixSelectorEventLoop @@ -193,6 +365,16 @@ if sys.platform != "win32": if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") class MultiLoopChildWatcher(AbstractChildWatcher): + """A watcher that doesn't require running loop in the main thread. + +This implementation registers a SIGCHLD signal handler on +instantiation (which may conflict with other code that +install own handler for this signal). + +The solution is safe but it has a significant overhead when +handling a big number of processes (*O(n)* each time a +SIGCHLD is received). +""" def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -207,6 +389,16 @@ if sys.platform != "win32": else: class MultiLoopChildWatcher(AbstractChildWatcher): + """A watcher that doesn't require running loop in the main thread. + + This implementation registers a SIGCHLD signal handler on + instantiation (which may conflict with other code that + install own handler for this signal). + + The solution is safe but it has a significant overhead when + handling a big number of processes (*O(n)* each time a + SIGCHLD is received). + """ def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -221,6 +413,17 @@ if sys.platform != "win32": if sys.version_info < (3, 14): class ThreadedChildWatcher(AbstractChildWatcher): + """Threaded child watcher implementation. + +The watcher uses a thread per process +for waiting for the process finish. + +It doesn't require subscription on POSIX signal +but a thread creation is not free. + +The watcher has O(1) complexity, its performance doesn't depend +on amount of spawn processes. +""" def is_active(self) -> Literal[True]: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -235,6 +438,16 @@ if sys.platform != "win32": def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: ... class PidfdChildWatcher(AbstractChildWatcher): + """Child watcher implementation using Linux's pid file descriptors. + +This child watcher polls process file descriptors (pidfds) to await child +process termination. In some respects, PidfdChildWatcher is a "Goldilocks" +child watcher implementation. It doesn't require signals or threads, doesn't +interfere with any processes launched outside the event loop, and scales +linearly with the number of subprocesses launched by the event loop. The +main disadvantage is that pidfds are specific to Linux, and only work on +recent (5.3+) kernels. +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi index 36d1862fdda78..a5f931fdca357 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi @@ -1,3 +1,23 @@ +"""Basic infrastructure for asynchronous socket service clients and servers. + +There are only two ways to have a program on a single processor do "more +than one thing at a time". Multi-threaded programming is the simplest and +most popular way to do it, but there is another very different technique, +that lets you have nearly all the advantages of multi-threading, without +actually using multiple threads. it's really only practical if your program +is largely I/O bound. If your program is CPU bound, then pre-emptive +scheduled threads are probably what you really need. Network servers are +rarely CPU-bound, however. + +If your operating system supports the select() system call in its I/O +library (and nearly all do), then you can use it to juggle multiple +communication channels at once; doing other work while your I/O is taking +place in the "background." Although this strategy can seem strange and +complex, especially at first, it is in many ways easier to understand and +control than multi-threaded programming. The module documented here solves +many of the difficult problems for you, making the task of building +sophisticated high-performance network servers and clients a snap. +""" import sys from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi index 7f7b05ccc0a39..61e9320810a4c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi @@ -1,3 +1,8 @@ +"""allow programmer to define multiple exit functions to be executed +upon normal program termination. + +Two public functions, register and unregister, are defined. +""" from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -5,8 +10,29 @@ from typing_extensions import ParamSpec _T = TypeVar("_T") _P = ParamSpec("_P") -def _clear() -> None: ... -def _ncallbacks() -> int: ... -def _run_exitfuncs() -> None: ... -def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... -def unregister(func: Callable[..., object], /) -> None: ... +def _clear() -> None: + """Clear the list of previously registered exit functions. +""" +def _ncallbacks() -> int: + """Return the number of registered exit functions. +""" +def _run_exitfuncs() -> None: + """Run all registered exit functions. + +If a callback raises an exception, it is logged with sys.unraisablehook. +""" +def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: + """Register a function to be executed upon normal program termination + + func - function to be called at exit + args - optional arguments to pass to func + kwargs - optional keyword arguments to pass to func + + func is returned to facilitate usage as a decorator. +""" +def unregister(func: Callable[..., object], /) -> None: + """Unregister an exit function which was previously registered using +atexit.register + + func - function to be unregistered +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi index f3ce78ccb7fae..08234a5fc3584 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi @@ -5,26 +5,66 @@ _RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] class error(Exception): ... -def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: ... -def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... -def alaw2lin(fragment: Buffer, width: int, /) -> bytes: ... -def avg(fragment: Buffer, width: int, /) -> int: ... -def avgpp(fragment: Buffer, width: int, /) -> int: ... -def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: ... -def byteswap(fragment: Buffer, width: int, /) -> bytes: ... -def cross(fragment: Buffer, width: int, /) -> int: ... -def findfactor(fragment: Buffer, reference: Buffer, /) -> float: ... -def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: ... -def findmax(fragment: Buffer, length: int, /) -> int: ... -def getsample(fragment: Buffer, width: int, index: int, /) -> int: ... -def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: ... -def lin2alaw(fragment: Buffer, width: int, /) -> bytes: ... -def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: ... -def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: ... -def max(fragment: Buffer, width: int, /) -> int: ... -def maxpp(fragment: Buffer, width: int, /) -> int: ... -def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: ... -def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: ... +def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: + """Return a fragment which is the addition of the two samples passed as parameters. +""" +def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: + """Decode an Intel/DVI ADPCM coded fragment to a linear fragment. +""" +def alaw2lin(fragment: Buffer, width: int, /) -> bytes: + """Convert sound fragments in a-LAW encoding to linearly encoded sound fragments. +""" +def avg(fragment: Buffer, width: int, /) -> int: + """Return the average over all samples in the fragment. +""" +def avgpp(fragment: Buffer, width: int, /) -> int: + """Return the average peak-peak value over all samples in the fragment. +""" +def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: + """Return a fragment that is the original fragment with a bias added to each sample. +""" +def byteswap(fragment: Buffer, width: int, /) -> bytes: + """Convert big-endian samples to little-endian and vice versa. +""" +def cross(fragment: Buffer, width: int, /) -> int: + """Return the number of zero crossings in the fragment passed as an argument. +""" +def findfactor(fragment: Buffer, reference: Buffer, /) -> float: + """Return a factor F such that rms(add(fragment, mul(reference, -F))) is minimal. +""" +def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: + """Try to match reference as well as possible to a portion of fragment. +""" +def findmax(fragment: Buffer, length: int, /) -> int: + """Search fragment for a slice of specified number of samples with maximum energy. +""" +def getsample(fragment: Buffer, width: int, index: int, /) -> int: + """Return the value of sample index from the fragment. +""" +def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: + """Convert samples to 4 bit Intel/DVI ADPCM encoding. +""" +def lin2alaw(fragment: Buffer, width: int, /) -> bytes: + """Convert samples in the audio fragment to a-LAW encoding. +""" +def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: + """Convert samples between 1-, 2-, 3- and 4-byte formats. +""" +def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: + """Convert samples in the audio fragment to u-LAW encoding. +""" +def max(fragment: Buffer, width: int, /) -> int: + """Return the maximum of the absolute value of all samples in a fragment. +""" +def maxpp(fragment: Buffer, width: int, /) -> int: + """Return the maximum peak-peak value in the sound fragment. +""" +def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: + """Return the minimum and maximum values of all samples in the sound fragment. +""" +def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: + """Return a fragment that has all samples in the original fragment multiplied by the floating-point value factor. +""" def ratecv( fragment: Buffer, width: int, @@ -35,9 +75,21 @@ def ratecv( weightA: int = 1, weightB: int = 0, /, -) -> tuple[bytes, _RatecvState]: ... -def reverse(fragment: Buffer, width: int, /) -> bytes: ... -def rms(fragment: Buffer, width: int, /) -> int: ... -def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... -def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: ... -def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: ... +) -> tuple[bytes, _RatecvState]: + """Convert the frame rate of the input fragment. +""" +def reverse(fragment: Buffer, width: int, /) -> bytes: + """Reverse the samples in a fragment and returns the modified fragment. +""" +def rms(fragment: Buffer, width: int, /) -> int: + """Return the root-mean-square of the fragment, i.e. sqrt(sum(S_i^2)/n). +""" +def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: + """Convert a stereo fragment to a mono fragment. +""" +def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: + """Generate a stereo fragment from a mono fragment. +""" +def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: + """Convert sound fragments in u-LAW encoding to linearly encoded sound fragments. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi index 279d74a94ebe2..96fa7dd7430ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi @@ -1,3 +1,5 @@ +"""Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings +""" import sys from _typeshed import ReadableBuffer from typing import IO @@ -28,34 +30,184 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 13): __all__ += ["z85decode", "z85encode"] -def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... -def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: ... -def standard_b64encode(s: ReadableBuffer) -> bytes: ... -def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... -def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... -def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... -def b32encode(s: ReadableBuffer) -> bytes: ... -def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: ... -def b16encode(s: ReadableBuffer) -> bytes: ... -def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: + """Encode the bytes-like object s using Base64 and return a bytes object. + +Optional altchars should be a byte string of length 2 which specifies an +alternative alphabet for the '+' and '/' characters. This allows an +application to e.g. generate url or filesystem safe Base64 strings. +""" +def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: + """Decode the Base64 encoded bytes-like object or ASCII string s. + +Optional altchars must be a bytes-like object or ASCII string of length 2 +which specifies the alternative alphabet used instead of the '+' and '/' +characters. + +The result is returned as a bytes object. A binascii.Error is raised if +s is incorrectly padded. + +If validate is False (the default), characters that are neither in the +normal base-64 alphabet nor the alternative alphabet are discarded prior +to the padding check. If validate is True, these non-alphabet characters +in the input result in a binascii.Error. +For more information about the strict base64 check, see: + +https://docs.python.org/3.11/library/binascii.html#binascii.a2b_base64 +""" +def standard_b64encode(s: ReadableBuffer) -> bytes: + """Encode bytes-like object s using the standard Base64 alphabet. + +The result is returned as a bytes object. +""" +def standard_b64decode(s: str | ReadableBuffer) -> bytes: + """Decode bytes encoded with the standard Base64 alphabet. + +Argument s is a bytes-like object or ASCII string to decode. The result +is returned as a bytes object. A binascii.Error is raised if the input +is incorrectly padded. Characters that are not in the standard alphabet +are discarded prior to the padding check. +""" +def urlsafe_b64encode(s: ReadableBuffer) -> bytes: + """Encode bytes using the URL- and filesystem-safe Base64 alphabet. + +Argument s is a bytes-like object to encode. The result is returned as a +bytes object. The alphabet uses '-' instead of '+' and '_' instead of +'/'. +""" +def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: + """Decode bytes using the URL- and filesystem-safe Base64 alphabet. + +Argument s is a bytes-like object or ASCII string to decode. The result +is returned as a bytes object. A binascii.Error is raised if the input +is incorrectly padded. Characters that are not in the URL-safe base-64 +alphabet, and are not a plus '+' or slash '/', are discarded prior to the +padding check. + +The alphabet uses '-' instead of '+' and '_' instead of '/'. +""" +def b32encode(s: ReadableBuffer) -> bytes: + """ +Encode the bytes-like objects using base32 and return a bytes object. +""" +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: + """ +Decode the base32 encoded bytes-like object or ASCII string s. + +Optional casefold is a flag specifying whether a lowercase alphabet is +acceptable as input. For security purposes, the default is False. + +RFC 3548 allows for optional mapping of the digit 0 (zero) to the +letter O (oh), and for optional mapping of the digit 1 (one) to +either the letter I (eye) or letter L (el). The optional argument +map01 when not None, specifies which letter the digit 1 should be +mapped to (when map01 is not None, the digit 0 is always mapped to +the letter O). For security purposes the default is None, so that +0 and 1 are not allowed in the input. + +The result is returned as a bytes object. A binascii.Error is raised if +the input is incorrectly padded or if there are non-alphabet +characters present in the input. +""" +def b16encode(s: ReadableBuffer) -> bytes: + """Encode the bytes-like object s using Base16 and return a bytes object. + """ +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: + """Decode the Base16 encoded bytes-like object or ASCII string s. + +Optional casefold is a flag specifying whether a lowercase alphabet is +acceptable as input. For security purposes, the default is False. + +The result is returned as a bytes object. A binascii.Error is raised if +s is incorrectly padded or if there are non-alphabet characters present +in the input. +""" if sys.version_info >= (3, 10): - def b32hexencode(s: ReadableBuffer) -> bytes: ... - def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... + def b32hexencode(s: ReadableBuffer) -> bytes: + """ +Encode the bytes-like objects using base32hex and return a bytes object. +""" + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: + """ +Decode the base32hex encoded bytes-like object or ASCII string s. + +Optional casefold is a flag specifying whether a lowercase alphabet is +acceptable as input. For security purposes, the default is False. + +The result is returned as a bytes object. A binascii.Error is raised if +the input is incorrectly padded or if there are non-alphabet +characters present in the input. +""" def a85encode( b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False -) -> bytes: ... +) -> bytes: + """Encode bytes-like object b using Ascii85 and return a bytes object. + +foldspaces is an optional flag that uses the special short sequence 'y' +instead of 4 consecutive spaces (ASCII 0x20) as supported by 'btoa'. This +feature is not supported by the "standard" Adobe encoding. + +wrapcol controls whether the output should have newline (b'\\n') characters +added to it. If this is non-zero, each output line will be at most this +many characters long, excluding the trailing newline. + +pad controls whether the input is padded to a multiple of 4 before +encoding. Note that the btoa implementation always pads. + +adobe controls whether the encoded byte sequence is framed with <~ and ~>, +which is used by the Adobe implementation. +""" def a85decode( b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" -) -> bytes: ... -def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... -def b85decode(b: str | ReadableBuffer) -> bytes: ... -def decode(input: IO[bytes], output: IO[bytes]) -> None: ... -def encode(input: IO[bytes], output: IO[bytes]) -> None: ... -def encodebytes(s: ReadableBuffer) -> bytes: ... -def decodebytes(s: ReadableBuffer) -> bytes: ... +) -> bytes: + """Decode the Ascii85 encoded bytes-like object or ASCII string b. + +foldspaces is a flag that specifies whether the 'y' short sequence should be +accepted as shorthand for 4 consecutive spaces (ASCII 0x20). This feature is +not supported by the "standard" Adobe encoding. + +adobe controls whether the input sequence is in Adobe Ascii85 format (i.e. +is framed with <~ and ~>). + +ignorechars should be a byte string containing characters to ignore from the +input. This should only contain whitespace characters, and by default +contains all whitespace characters in ASCII. + +The result is returned as a bytes object. +""" +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: + """Encode bytes-like object b in base85 format and return a bytes object. + +If pad is true, the input is padded with b'\\0' so its length is a multiple of +4 bytes before encoding. +""" +def b85decode(b: str | ReadableBuffer) -> bytes: + """Decode the base85-encoded bytes-like object or ASCII string b + +The result is returned as a bytes object. +""" +def decode(input: IO[bytes], output: IO[bytes]) -> None: + """Decode a file; input and output are binary files. +""" +def encode(input: IO[bytes], output: IO[bytes]) -> None: + """Encode a file; input and output are binary files. +""" +def encodebytes(s: ReadableBuffer) -> bytes: + """Encode a bytestring into a bytes object containing multiple lines +of base-64 data. +""" +def decodebytes(s: ReadableBuffer) -> bytes: + """Decode a bytestring of base-64 data into a bytes object. +""" if sys.version_info >= (3, 13): - def z85encode(s: ReadableBuffer) -> bytes: ... - def z85decode(s: str | ReadableBuffer) -> bytes: ... + def z85encode(s: ReadableBuffer) -> bytes: + """Encode bytes-like object b in z85 format and return a bytes object. +""" + def z85decode(s: str | ReadableBuffer) -> bytes: + """Decode the z85-encoded bytes-like object or ASCII string b + +The result is returned as a bytes object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi index b6be2210ffe2e..ecf5505d6718d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi @@ -1,3 +1,5 @@ +"""Debugger basics +""" import sys from _typeshed import ExcInfo, TraceFunction, Unused from collections.abc import Callable, Iterable, Iterator, Mapping @@ -17,9 +19,23 @@ _Backend: TypeAlias = Literal["settrace", "monitoring"] # so we don't include the value of this constant in the stubs. GENERATOR_AND_COROUTINE_FLAGS: Final[int] -class BdbQuit(Exception): ... +class BdbQuit(Exception): + """Exception to give up completely. +""" class Bdb: + """Generic Python debugger base class. + +This class takes care of details of the trace facility; +a derived class should implement user interaction. +The standard debugger class (pdb.Pdb) is an example. + +The optional skip argument must be an iterable of glob-style +module name patterns. The debugger will not step into frames +that originate in a module that matches one of these patterns. +Whether a frame is considered to originate in a certain module +is determined by the __name__ in the frame globals. +""" skip: set[str] | None breaks: dict[str, list[int]] fncache: dict[str, str] @@ -35,69 +51,267 @@ class Bdb: else: def __init__(self, skip: Iterable[str] | None = None) -> None: ... - def canonic(self, filename: str) -> str: ... - def reset(self) -> None: ... + def canonic(self, filename: str) -> str: + """Return canonical form of filename. + +For real filenames, the canonical form is a case-normalized (on +case insensitive filesystems) absolute path. 'Filenames' with +angle brackets, such as "", generated in interactive +mode, are returned unchanged. +""" + def reset(self) -> None: + """Set values of attributes as ready to start debugging. +""" if sys.version_info >= (3, 12): @contextmanager def set_enterframe(self, frame: FrameType) -> Iterator[None]: ... - def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... - def dispatch_line(self, frame: FrameType) -> TraceFunction: ... - def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: ... - def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: ... - def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: ... + def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: + """Dispatch a trace function for debugged frames based on the event. + +This function is installed as the trace function for debugged +frames. Its return value is the new trace function, which is +usually itself. The default implementation decides how to +dispatch a frame, depending on the type of event (passed in as a +string) that is about to be executed. + +The event can be one of the following: + line: A new line of code is going to be executed. + call: A function is about to be called or another code block + is entered. + return: A function or other code block is about to return. + exception: An exception has occurred. + c_call: A C function is about to be called. + c_return: A C function has returned. + c_exception: A C function has raised an exception. + +For the Python events, specialized functions (see the dispatch_*() +methods) are called. For the C events, no action is taken. + +The arg parameter depends on the previous event. +""" + def dispatch_line(self, frame: FrameType) -> TraceFunction: + """Invoke user function and return trace function for line event. + +If the debugger stops on the current line, invoke +self.user_line(). Raise BdbQuit if self.quitting is set. +Return self.trace_dispatch to continue tracing in this scope. +""" + def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: + """Invoke user function and return trace function for call event. + +If the debugger stops on this function call, invoke +self.user_call(). Raise BdbQuit if self.quitting is set. +Return self.trace_dispatch to continue tracing in this scope. +""" + def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: + """Invoke user function and return trace function for return event. + +If the debugger stops on this function return, invoke +self.user_return(). Raise BdbQuit if self.quitting is set. +Return self.trace_dispatch to continue tracing in this scope. +""" + def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: + """Invoke user function and return trace function for exception event. + +If the debugger stops on this exception, invoke +self.user_exception(). Raise BdbQuit if self.quitting is set. +Return self.trace_dispatch to continue tracing in this scope. +""" if sys.version_info >= (3, 13): - def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: ... - - def is_skipped_module(self, module_name: str) -> bool: ... - def stop_here(self, frame: FrameType) -> bool: ... - def break_here(self, frame: FrameType) -> bool: ... - def do_clear(self, arg: Any) -> bool | None: ... - def break_anywhere(self, frame: FrameType) -> bool: ... - def user_call(self, frame: FrameType, argument_list: None) -> None: ... - def user_line(self, frame: FrameType) -> None: ... - def user_return(self, frame: FrameType, return_value: Any) -> None: ... - def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... - def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... + def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: + """Invoke user function and return trace function for opcode event. +If the debugger stops on the current opcode, invoke +self.user_opcode(). Raise BdbQuit if self.quitting is set. +Return self.trace_dispatch to continue tracing in this scope. + +Opcode event will always trigger the user callback. For now the only +opcode event is from an inline set_trace() and we want to stop there +unconditionally. +""" + + def is_skipped_module(self, module_name: str) -> bool: + """Return True if module_name matches any skip pattern. +""" + def stop_here(self, frame: FrameType) -> bool: + """Return True if frame is below the starting frame in the stack. +""" + def break_here(self, frame: FrameType) -> bool: + """Return True if there is an effective breakpoint for this line. + +Check for line or function breakpoint and if in effect. +Delete temporary breakpoints if effective() says to. +""" + def do_clear(self, arg: Any) -> bool | None: + """Remove temporary breakpoint. + +Must implement in derived classes or get NotImplementedError. +""" + def break_anywhere(self, frame: FrameType) -> bool: + """Return True if there is any breakpoint in that frame + """ + def user_call(self, frame: FrameType, argument_list: None) -> None: + """Called if we might stop in a function. +""" + def user_line(self, frame: FrameType) -> None: + """Called when we stop or break at a line. +""" + def user_return(self, frame: FrameType, return_value: Any) -> None: + """Called when a return trap is set here. +""" + def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: + """Called when we stop on an exception. +""" + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: + """Stop when the line with the lineno greater than the current one is +reached or when returning from current frame. +""" if sys.version_info >= (3, 13): - def user_opcode(self, frame: FrameType) -> None: ... # undocumented + def user_opcode(self, frame: FrameType) -> None: # undocumented + """Called when we are about to execute an opcode. +""" - def set_step(self) -> None: ... + def set_step(self) -> None: + """Stop after one line of code. +""" if sys.version_info >= (3, 13): - def set_stepinstr(self) -> None: ... # undocumented + def set_stepinstr(self) -> None: # undocumented + """Stop before the next instruction. +""" + + def set_next(self, frame: FrameType) -> None: + """Stop on the next line in or below the given frame. +""" + def set_return(self, frame: FrameType) -> None: + """Stop when returning from the given frame. +""" + def set_trace(self, frame: FrameType | None = None) -> None: + """Start debugging from frame. + +If frame is not specified, debugging starts from caller's frame. +""" + def set_continue(self) -> None: + """Stop only at breakpoints or when finished. - def set_next(self, frame: FrameType) -> None: ... - def set_return(self, frame: FrameType) -> None: ... - def set_trace(self, frame: FrameType | None = None) -> None: ... - def set_continue(self) -> None: ... - def set_quit(self) -> None: ... +If there are no breakpoints, set the system trace function to None. +""" + def set_quit(self) -> None: + """Set quitting attribute to True. + +Raises BdbQuit exception in the next call to a dispatch_*() method. +""" def set_break( self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None - ) -> str | None: ... - def clear_break(self, filename: str, lineno: int) -> str | None: ... - def clear_bpbynumber(self, arg: SupportsInt) -> str | None: ... - def clear_all_file_breaks(self, filename: str) -> str | None: ... - def clear_all_breaks(self) -> str | None: ... - def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: ... - def get_break(self, filename: str, lineno: int) -> bool: ... - def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: ... - def get_file_breaks(self, filename: str) -> list[int]: ... - def get_all_breaks(self) -> dict[str, list[int]]: ... - def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... - def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: ... + ) -> str | None: + """Set a new breakpoint for filename:lineno. + +If lineno doesn't exist for the filename, return an error message. +The filename should be in canonical form. +""" + def clear_break(self, filename: str, lineno: int) -> str | None: + """Delete breakpoints for filename:lineno. + +If no breakpoints were set, return an error message. +""" + def clear_bpbynumber(self, arg: SupportsInt) -> str | None: + """Delete a breakpoint by its index in Breakpoint.bpbynumber. + +If arg is invalid, return an error message. +""" + def clear_all_file_breaks(self, filename: str) -> str | None: + """Delete all breakpoints in filename. + +If none were set, return an error message. +""" + def clear_all_breaks(self) -> str | None: + """Delete all existing breakpoints. + +If none were set, return an error message. +""" + def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: + """Return a breakpoint by its index in Breakpoint.bybpnumber. + +For invalid arg values or if the breakpoint doesn't exist, +raise a ValueError. +""" + def get_break(self, filename: str, lineno: int) -> bool: + """Return True if there is a breakpoint for filename:lineno. +""" + def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: + """Return all breakpoints for filename:lineno. + +If no breakpoints are set, return an empty list. +""" + def get_file_breaks(self, filename: str) -> list[int]: + """Return all lines with breakpoints for filename. + +If no breakpoints are set, return an empty list. +""" + def get_all_breaks(self) -> dict[str, list[int]]: + """Return all breakpoints that are set. +""" + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: + """Return a list of (frame, lineno) in a stack trace and a size. + +List starts with original calling frame, if there is one. +Size may be number of frames above or below f. +""" + def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: + """Return a string with information about a stack entry. + +The stack entry frame_lineno is a (frame, lineno) tuple. The +return string contains the canonical filename, the function name +or '', the input arguments, the return value, and the +line of code (if it exists). + +""" def run( self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None - ) -> None: ... - def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... - def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... - def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... + ) -> None: + """Debug a statement executed via the exec() function. + +globals defaults to __main__.dict; locals defaults to globals. +""" + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: + """Debug an expression executed via the eval() function. + +globals defaults to __main__.dict; locals defaults to globals. +""" + def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: + """For backwards-compatibility. Defers to run(). +""" + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: + """Debug a single function call. + +Return the result of the function call. +""" if sys.version_info >= (3, 14): def start_trace(self) -> None: ... def stop_trace(self) -> None: ... - def disable_current_event(self) -> None: ... - def restart_events(self) -> None: ... + def disable_current_event(self) -> None: + """Disable the current event. +""" + def restart_events(self) -> None: + """Restart all events. +""" class Breakpoint: + """Breakpoint class. + +Implements temporary breakpoints, ignore counts, disabling and +(re)-enabling, and conditionals. + +Breakpoints are indexed by number through bpbynumber and by +the (file, line) tuple using bplist. The former points to a +single instance of class Breakpoint. The latter points to a +list of such instances since there may be more than one +breakpoint per line. + +When creating a breakpoint, its associated filename should be +in canonical form. If funcname is defined, a breakpoint hit will be +counted when the first line of that function is executed. A +conditional breakpoint always counts a hit. +""" next: int bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] @@ -119,12 +333,54 @@ class Breakpoint: @staticmethod def clearBreakpoints() -> None: ... - def deleteMe(self) -> None: ... - def enable(self) -> None: ... - def disable(self) -> None: ... - def bpprint(self, out: IO[str] | None = None) -> None: ... - def bpformat(self) -> str: ... + def deleteMe(self) -> None: + """Delete the breakpoint from the list associated to a file:line. + +If it is the last breakpoint in that position, it also deletes +the entry for the file:line. +""" + def enable(self) -> None: + """Mark the breakpoint as enabled. +""" + def disable(self) -> None: + """Mark the breakpoint as disabled. +""" + def bpprint(self, out: IO[str] | None = None) -> None: + """Print the output of bpformat(). + +The optional out argument directs where the output is sent +and defaults to standard output. +""" + def bpformat(self) -> str: + """Return a string with information about the breakpoint. + +The information includes the breakpoint number, temporary +status, file:line position, break condition, number of times to +ignore, and number of times hit. + +""" + +def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: + """Return True if break should happen here. + +Whether a break should happen depends on the way that b (the breakpoint) +was set. If it was set via line number, check if b.line is the same as +the one in the frame. If it was set via function name, check if this is +the right function and if it is on the first executable line. +""" +def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: + """Return (active breakpoint, delete temporary flag) or (None, None) as +breakpoint to act upon. + +The "active breakpoint" is the first entry in bplist[line, file] (which +must exist) that is enabled, for which checkfuncname is True, and that +has neither a False condition nor a positive ignore count. The flag, +meaning that a temporary breakpoint should be deleted, is False only +when the condiion cannot be evaluated (in which case, ignore count is +ignored). -def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... -def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: ... -def set_trace() -> None: ... +If no such entry exists, then (None, None) is returned. +""" +def set_trace() -> None: + """Start debugging with a Bdb instance from the caller's frame. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi index 5606d5cdf74d9..3176d98aeac3c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi @@ -1,3 +1,5 @@ +"""Conversion between binary data and ASCII +""" import sys from _typeshed import ReadableBuffer from typing_extensions import TypeAlias, deprecated @@ -6,35 +8,108 @@ from typing_extensions import TypeAlias, deprecated # or ASCII-only strings. _AsciiBuffer: TypeAlias = str | ReadableBuffer -def a2b_uu(data: _AsciiBuffer, /) -> bytes: ... -def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: ... +def a2b_uu(data: _AsciiBuffer, /) -> bytes: + """Decode a line of uuencoded data. +""" +def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: + """Uuencode line of data. +""" if sys.version_info >= (3, 11): - def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: ... + def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: + """Decode a line of base64 data. + + strict_mode + When set to True, bytes that are not part of the base64 standard are not allowed. + The same applies to excess data after padding (= / ==). +""" else: - def a2b_base64(data: _AsciiBuffer, /) -> bytes: ... + def a2b_base64(data: _AsciiBuffer, /) -> bytes: + """Decode a line of base64 data. +""" + +def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: + """Base64-code line of data. +""" +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: + """Decode a string of qp-encoded data. +""" +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: + """Encode a string using quoted-printable encoding. -def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: ... -def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... -def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... +On encoding, when istext is set, newlines are not encoded, and white +space at end of lines is. When istext is not set, \\r and \\n (CR/LF) +are both encoded. When quotetabs is set, space and tabs are encoded. +""" if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def a2b_hqx(data: _AsciiBuffer, /) -> bytes: ... + def a2b_hqx(data: _AsciiBuffer, /) -> bytes: + """Decode .hqx coding. +""" @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def rledecode_hqx(data: ReadableBuffer, /) -> bytes: ... + def rledecode_hqx(data: ReadableBuffer, /) -> bytes: + """Decode hexbin RLE-coded string. +""" @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def rlecode_hqx(data: ReadableBuffer, /) -> bytes: ... + def rlecode_hqx(data: ReadableBuffer, /) -> bytes: + """Binhex RLE-code binary data. +""" @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") - def b2a_hqx(data: ReadableBuffer, /) -> bytes: ... - -def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: ... -def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: ... -def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... -def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: ... -def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: ... -def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: ... + def b2a_hqx(data: ReadableBuffer, /) -> bytes: + """Encode .hqx data. +""" + +def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: + """Compute CRC-CCITT incrementally. +""" +def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: + """Compute CRC-32 incrementally. +""" +def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: + """Hexadecimal representation of binary data. + + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + +The return value is a bytes object. This function is also +available as "hexlify()". + +Example: +>>> binascii.b2a_hex(b'\\xb9\\x01\\xef') +b'b901ef' +>>> binascii.hexlify(b'\\xb9\\x01\\xef', ':') +b'b9:01:ef' +>>> binascii.b2a_hex(b'\\xb9\\x01\\xef', b'_', 2) +b'b9_01ef' +""" +def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: + """Hexadecimal representation of binary data. + + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + +The return value is a bytes object. This function is also +available as "b2a_hex()". +""" +def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: + """Binary data of hexadecimal representation. + +hexstr must contain an even number of hex digits (upper or lower case). +This function is also available as "unhexlify()". +""" +def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: + """Binary data of hexadecimal representation. + +hexstr must contain an even number of hex digits (upper or lower case). +""" class Error(ValueError): ... class Incomplete(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi index bdead928468f4..560a3d75c3aed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi @@ -1,3 +1,9 @@ +"""Macintosh binhex compression/decompression. + +easy interface: +binhex(inputfilename, outputfilename) +hexbin(inputfilename, outputfilename) +""" from _typeshed import SizedBuffer from typing import IO, Any, Final from typing_extensions import TypeAlias @@ -33,7 +39,9 @@ class BinHex: def write_rsrc(self, data: SizedBuffer) -> None: ... def close(self) -> None: ... -def binhex(inp: str, out: str) -> None: ... +def binhex(inp: str, out: str) -> None: + """binhex(infilename, outfilename): create binhex-encoded copy of a file +""" class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... @@ -42,4 +50,6 @@ class HexBin: def read_rsrc(self, *n: int) -> bytes: ... def close(self) -> None: ... -def hexbin(inp: str, out: str) -> None: ... +def hexbin(inp: str, out: str) -> None: + """hexbin(infilename, outfilename) - Decode binhexed file +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi index 60dfc48d69bd7..774c6cb0c22a5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi @@ -1,3 +1,5 @@ +"""Bisection algorithms. +""" from _bisect import * bisect = bisect_right diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi index 969d1687611c2..7d8535f9107f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi @@ -1,3 +1,14 @@ +"""Built-in functions, types, exceptions, and other objects. + +This module provides direct access to all 'built-in' +identifiers of Python; for example, builtins.len is +the full name for the built-in function len(). + +This module is not normally accessed explicitly by most +applications, but can be useful in modules that provide +objects with the same name as a built-in value, but in +which the built-in of that name is also needed. +""" import _ast import _sitebuiltins import _typeshed @@ -106,6 +117,11 @@ _StepT_co = TypeVar("_StepT_co", covariant=True, default=_StartT_co | _StopT_co) @disjoint_base class object: + """The base class of the class hierarchy. + +When called, it accepts no arguments and returns a new featureless +instance that has no instance attributes and cannot be given any. +""" __doc__: str | None __dict__: dict[str, Any] __module__: str @@ -142,13 +158,32 @@ class object: @disjoint_base class staticmethod(Generic[_P, _R_co]): + """Convert a function to be a static method. + +A static method does not receive an implicit first argument. +To declare a static method, use this idiom: + + class C: + @staticmethod + def f(arg1, arg2, argN): + ... + +It can be called either on the class (e.g. C.f()) or on an instance +(e.g. C().f()). Both the class and the instance are ignored, and +neither is passed implicitly as the first argument to the method. + +Static methods in Python are similar to those found in Java or C++. +For a more advanced concept, see the classmethod builtin. +""" @property def __func__(self) -> Callable[_P, _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... def __init__(self, f: Callable[_P, _R_co], /) -> None: ... @overload - def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: ... + def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: + """Return an attribute of instance, which is of type owner. +""" @overload def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -156,20 +191,43 @@ class staticmethod(Generic[_P, _R_co]): __qualname__: str @property def __wrapped__(self) -> Callable[_P, _R_co]: ... - def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: + """Call self as a function. +""" if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... __annotate__: AnnotateFunc | None @disjoint_base class classmethod(Generic[_T, _P, _R_co]): + """Convert a function to be a class method. + +A class method receives the class as implicit first argument, +just like an instance method receives the instance. +To declare a class method, use this idiom: + + class C: + @classmethod + def f(cls, arg1, arg2, argN): + ... + +It can be called either on the class (e.g. C.f()) or on an instance +(e.g. C().f()). The instance is ignored except for its class. +If a class method is called for a derived class, the derived class +object is passed as the implied first argument. + +Class methods are different than C++ or Java static methods. +If you want those, see the staticmethod builtin. +""" @property def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... def __init__(self, f: Callable[Concatenate[type[_T], _P], _R_co], /) -> None: ... @overload - def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... + def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: + """Return an attribute of instance, which is of type owner. +""" @overload def __get__(self, instance: None, owner: type[_T], /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -183,6 +241,9 @@ class classmethod(Generic[_T, _P, _R_co]): @disjoint_base class type: + """type(object) -> the object's type +type(name, bases, dict, **kwds) -> a new type +""" # object.__base__ is None. Otherwise, it would be a type. @property def __base__(self) -> type | None: ... @@ -216,20 +277,36 @@ class type: def __new__( cls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any ) -> _typeshed.Self: ... - def __call__(self, *args: Any, **kwds: Any) -> Any: ... - def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... + def __call__(self, *args: Any, **kwds: Any) -> Any: + """Call self as a function. +""" + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: + """Return a list of immediate subclasses. +""" # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. - def mro(self) -> list[type]: ... - def __instancecheck__(self, instance: Any, /) -> bool: ... - def __subclasscheck__(self, subclass: type, /) -> bool: ... + def mro(self) -> list[type]: + """Return a type's method resolution order. +""" + def __instancecheck__(self, instance: Any, /) -> bool: + """Check if an object is an instance. +""" + def __subclasscheck__(self, subclass: type, /) -> bool: + """Check if a class is a subclass. +""" @classmethod - def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: ... + def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: + """Create the namespace for the class statement +""" if sys.version_info >= (3, 10): # `int | str` produces an instance of `UnionType`, but `int | int` produces an instance of `type`, # and `abc.ABC | abc.ABC` produces an instance of `abc.ABCMeta`. - def __or__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... - def __ror__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: ... + def __or__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: + """Return self|value. +""" + def __ror__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: + """Return value|self. +""" if sys.version_info >= (3, 12): __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __annotations__: dict[str, AnnotationForm] @@ -238,6 +315,20 @@ class type: @disjoint_base class super: + """super() -> same as super(__class__, ) +super(type) -> unbound super object +super(type, obj) -> bound super object; requires isinstance(obj, type) +super(type, type2) -> bound super object; requires issubclass(type2, type) +Typical use to call a cooperative superclass method: +class C(B): + def meth(self, arg): + super().meth(arg) +This works for class methods too: +class C(B): + @classmethod + def cmeth(cls, arg): + super().cmeth(arg) +""" @overload def __init__(self, t: Any, obj: Any, /) -> None: ... @overload @@ -251,28 +342,97 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 @disjoint_base class int: + """int([x]) -> integer +int(x, base=10) -> integer + +Convert a number or string to an integer, or return 0 if no arguments +are given. If x is a number, return x.__int__(). For floating-point +numbers, this truncates towards zero. + +If x is not a number or if base is given, then x must be a string, +bytes, or bytearray instance representing an integer literal in the +given base. The literal can be preceded by '+' or '-' and be surrounded +by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. +Base 0 means to interpret the base from the string as an integer literal. +>>> int('0b100', base=0) +4 +""" @overload def __new__(cls, x: ConvertibleToInt = 0, /) -> Self: ... @overload def __new__(cls, x: str | bytes | bytearray, /, base: SupportsIndex) -> Self: ... - def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... + def as_integer_ratio(self) -> tuple[int, Literal[1]]: + """Return a pair of integers, whose ratio is equal to the original int. + +The ratio is in lowest terms and has a positive denominator. + +>>> (10).as_integer_ratio() +(10, 1) +>>> (-10).as_integer_ratio() +(-10, 1) +>>> (0).as_integer_ratio() +(0, 1) +""" @property - def real(self) -> int: ... + def real(self) -> int: + """the real part of a complex number +""" @property - def imag(self) -> Literal[0]: ... + def imag(self) -> Literal[0]: + """the imaginary part of a complex number +""" @property - def numerator(self) -> int: ... + def numerator(self) -> int: + """the numerator of a rational number in lowest terms +""" @property - def denominator(self) -> Literal[1]: ... - def conjugate(self) -> int: ... - def bit_length(self) -> int: ... + def denominator(self) -> Literal[1]: + """the denominator of a rational number in lowest terms +""" + def conjugate(self) -> int: + """Returns self, the complex conjugate of any int. +""" + def bit_length(self) -> int: + """Number of bits necessary to represent self in binary. + +>>> bin(37) +'0b100101' +>>> (37).bit_length() +6 +""" if sys.version_info >= (3, 10): - def bit_count(self) -> int: ... + def bit_count(self) -> int: + """Number of ones in the binary representation of the absolute value of self. + +Also known as the population count. + +>>> bin(13) +'0b1101' +>>> (13).bit_count() +3 +""" if sys.version_info >= (3, 11): def to_bytes( self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False - ) -> bytes: ... + ) -> bytes: + """Return an array of bytes representing an integer. + + length + Length of bytes object to use. An OverflowError is raised if the + integer is not representable with the given number of bytes. Default + is length 1. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + sys.byteorder as the byte order value. Default is to use 'big'. + signed + Determines whether two's complement is used to represent the integer. + If signed is False and a negative integer is given, an OverflowError + is raised. +""" @classmethod def from_bytes( cls, @@ -280,9 +440,41 @@ class int: byteorder: Literal["little", "big"] = "big", *, signed: bool = False, - ) -> Self: ... + ) -> Self: + """Return the integer represented by the given array of bytes. + + bytes + Holds the array of bytes to convert. The argument must either + support the buffer protocol or be an iterable object producing bytes. + Bytes and bytearray are examples of built-in objects that support the + buffer protocol. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + sys.byteorder as the byte order value. Default is to use 'big'. + signed + Indicates whether two's complement is used to represent the integer. +""" else: - def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: + """Return an array of bytes representing an integer. + + length + Length of bytes object to use. An OverflowError is raised if the + integer is not representable with the given number of bytes. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + `sys.byteorder' as the byte order value. + signed + Determines whether two's complement is used to represent the integer. + If signed is False and a negative integer is given, an OverflowError + is raised. +""" @classmethod def from_bytes( cls, @@ -290,27 +482,75 @@ class int: byteorder: Literal["little", "big"], *, signed: bool = False, - ) -> Self: ... + ) -> Self: + """Return the integer represented by the given array of bytes. + + bytes + Holds the array of bytes to convert. The argument must either + support the buffer protocol or be an iterable object producing bytes. + Bytes and bytearray are examples of built-in objects that support the + buffer protocol. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + `sys.byteorder' as the byte order value. + signed + Indicates whether two's complement is used to represent the integer. +""" if sys.version_info >= (3, 12): - def is_integer(self) -> Literal[True]: ... - - def __add__(self, value: int, /) -> int: ... - def __sub__(self, value: int, /) -> int: ... - def __mul__(self, value: int, /) -> int: ... - def __floordiv__(self, value: int, /) -> int: ... - def __truediv__(self, value: int, /) -> float: ... - def __mod__(self, value: int, /) -> int: ... - def __divmod__(self, value: int, /) -> tuple[int, int]: ... - def __radd__(self, value: int, /) -> int: ... - def __rsub__(self, value: int, /) -> int: ... - def __rmul__(self, value: int, /) -> int: ... - def __rfloordiv__(self, value: int, /) -> int: ... - def __rtruediv__(self, value: int, /) -> float: ... - def __rmod__(self, value: int, /) -> int: ... - def __rdivmod__(self, value: int, /) -> tuple[int, int]: ... - @overload - def __pow__(self, x: Literal[0], /) -> Literal[1]: ... + def is_integer(self) -> Literal[True]: + """Returns True. Exists for duck type compatibility with float.is_integer. +""" + + def __add__(self, value: int, /) -> int: + """Return self+value. +""" + def __sub__(self, value: int, /) -> int: + """Return self-value. +""" + def __mul__(self, value: int, /) -> int: + """Return self*value. +""" + def __floordiv__(self, value: int, /) -> int: + """Return self//value. +""" + def __truediv__(self, value: int, /) -> float: + """Return self/value. +""" + def __mod__(self, value: int, /) -> int: + """Return self%value. +""" + def __divmod__(self, value: int, /) -> tuple[int, int]: + """Return divmod(self, value). +""" + def __radd__(self, value: int, /) -> int: + """Return value+self. +""" + def __rsub__(self, value: int, /) -> int: + """Return value-self. +""" + def __rmul__(self, value: int, /) -> int: + """Return value*self. +""" + def __rfloordiv__(self, value: int, /) -> int: + """Return value//self. +""" + def __rtruediv__(self, value: int, /) -> float: + """Return value/self. +""" + def __rmod__(self, value: int, /) -> int: + """Return value%self. +""" + def __rdivmod__(self, value: int, /) -> tuple[int, int]: + """Return divmod(value, self). +""" + @overload + def __pow__(self, x: Literal[0], /) -> Literal[1]: + """Return pow(self, value, mod). +""" @overload def __pow__(self, value: Literal[0], mod: None, /) -> Literal[1]: ... @overload @@ -323,27 +563,69 @@ class int: def __pow__(self, value: int, mod: None = None, /) -> Any: ... @overload def __pow__(self, value: int, mod: int, /) -> int: ... - def __rpow__(self, value: int, mod: int | None = None, /) -> Any: ... - def __and__(self, value: int, /) -> int: ... - def __or__(self, value: int, /) -> int: ... - def __xor__(self, value: int, /) -> int: ... - def __lshift__(self, value: int, /) -> int: ... - def __rshift__(self, value: int, /) -> int: ... - def __rand__(self, value: int, /) -> int: ... - def __ror__(self, value: int, /) -> int: ... - def __rxor__(self, value: int, /) -> int: ... - def __rlshift__(self, value: int, /) -> int: ... - def __rrshift__(self, value: int, /) -> int: ... - def __neg__(self) -> int: ... - def __pos__(self) -> int: ... - def __invert__(self) -> int: ... - def __trunc__(self) -> int: ... - def __ceil__(self) -> int: ... - def __floor__(self) -> int: ... + def __rpow__(self, value: int, mod: int | None = None, /) -> Any: + """Return pow(value, self, mod). +""" + def __and__(self, value: int, /) -> int: + """Return self&value. +""" + def __or__(self, value: int, /) -> int: + """Return self|value. +""" + def __xor__(self, value: int, /) -> int: + """Return self^value. +""" + def __lshift__(self, value: int, /) -> int: + """Return self< int: + """Return self>>value. +""" + def __rand__(self, value: int, /) -> int: + """Return value&self. +""" + def __ror__(self, value: int, /) -> int: + """Return value|self. +""" + def __rxor__(self, value: int, /) -> int: + """Return value^self. +""" + def __rlshift__(self, value: int, /) -> int: + """Return value< int: + """Return value>>self. +""" + def __neg__(self) -> int: + """-self +""" + def __pos__(self) -> int: + """+self +""" + def __invert__(self) -> int: + """~self +""" + def __trunc__(self) -> int: + """Truncating an Integral returns itself. +""" + def __ceil__(self) -> int: + """Ceiling of an Integral returns itself. +""" + def __floor__(self) -> int: + """Flooring an Integral returns itself. +""" if sys.version_info >= (3, 14): - def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: ... + def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: + """Rounding an Integral returns itself. + +Rounding with an ndigits argument also returns an integer. +""" else: - def __round__(self, ndigits: SupportsIndex = ..., /) -> int: ... + def __round__(self, ndigits: SupportsIndex = ..., /) -> int: + """Rounding an Integral returns itself. + +Rounding with an ndigits argument also returns an integer. +""" def __getnewargs__(self) -> tuple[int]: ... def __eq__(self, value: object, /) -> bool: ... @@ -352,60 +634,150 @@ class int: def __le__(self, value: int, /) -> bool: ... def __gt__(self, value: int, /) -> bool: ... def __ge__(self, value: int, /) -> bool: ... - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __abs__(self) -> int: ... + def __float__(self) -> float: + """float(self) +""" + def __int__(self) -> int: + """int(self) +""" + def __abs__(self) -> int: + """abs(self) +""" def __hash__(self) -> int: ... - def __bool__(self) -> bool: ... - def __index__(self) -> int: ... - def __format__(self, format_spec: str, /) -> str: ... + def __bool__(self) -> bool: + """True if self else False +""" + def __index__(self) -> int: + """Return self converted to an integer, if self is suitable for use as an index into a list. +""" + def __format__(self, format_spec: str, /) -> str: + """Convert to a string according to format_spec. +""" @disjoint_base class float: + """Convert a string or number to a floating-point number, if possible. +""" def __new__(cls, x: ConvertibleToFloat = 0, /) -> Self: ... - def as_integer_ratio(self) -> tuple[int, int]: ... - def hex(self) -> str: ... - def is_integer(self) -> bool: ... + def as_integer_ratio(self) -> tuple[int, int]: + """Return a pair of integers, whose ratio is exactly equal to the original float. + +The ratio is in lowest terms and has a positive denominator. Raise +OverflowError on infinities and a ValueError on NaNs. + +>>> (10.0).as_integer_ratio() +(10, 1) +>>> (0.0).as_integer_ratio() +(0, 1) +>>> (-.25).as_integer_ratio() +(-1, 4) +""" + def hex(self) -> str: + """Return a hexadecimal representation of a floating-point number. + +>>> (-0.1).hex() +'-0x1.999999999999ap-4' +>>> 3.14159.hex() +'0x1.921f9f01b866ep+1' +""" + def is_integer(self) -> bool: + """Return True if the float is an integer. +""" @classmethod - def fromhex(cls, string: str, /) -> Self: ... + def fromhex(cls, string: str, /) -> Self: + """Create a floating-point number from a hexadecimal string. + +>>> float.fromhex('0x1.ffffp10') +2047.984375 +>>> float.fromhex('-0x1p-1074') +-5e-324 +""" @property - def real(self) -> float: ... + def real(self) -> float: + """the real part of a complex number +""" @property - def imag(self) -> float: ... - def conjugate(self) -> float: ... - def __add__(self, value: float, /) -> float: ... - def __sub__(self, value: float, /) -> float: ... - def __mul__(self, value: float, /) -> float: ... - def __floordiv__(self, value: float, /) -> float: ... - def __truediv__(self, value: float, /) -> float: ... - def __mod__(self, value: float, /) -> float: ... - def __divmod__(self, value: float, /) -> tuple[float, float]: ... - @overload - def __pow__(self, value: int, mod: None = None, /) -> float: ... + def imag(self) -> float: + """the imaginary part of a complex number +""" + def conjugate(self) -> float: + """Return self, the complex conjugate of any float. +""" + def __add__(self, value: float, /) -> float: + """Return self+value. +""" + def __sub__(self, value: float, /) -> float: + """Return self-value. +""" + def __mul__(self, value: float, /) -> float: + """Return self*value. +""" + def __floordiv__(self, value: float, /) -> float: + """Return self//value. +""" + def __truediv__(self, value: float, /) -> float: + """Return self/value. +""" + def __mod__(self, value: float, /) -> float: + """Return self%value. +""" + def __divmod__(self, value: float, /) -> tuple[float, float]: + """Return divmod(self, value). +""" + @overload + def __pow__(self, value: int, mod: None = None, /) -> float: + """Return pow(self, value, mod). +""" # positive __value -> float; negative __value -> complex # return type must be Any as `float | complex` causes too many false-positive errors @overload def __pow__(self, value: float, mod: None = None, /) -> Any: ... - def __radd__(self, value: float, /) -> float: ... - def __rsub__(self, value: float, /) -> float: ... - def __rmul__(self, value: float, /) -> float: ... - def __rfloordiv__(self, value: float, /) -> float: ... - def __rtruediv__(self, value: float, /) -> float: ... - def __rmod__(self, value: float, /) -> float: ... - def __rdivmod__(self, value: float, /) -> tuple[float, float]: ... - @overload - def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: ... + def __radd__(self, value: float, /) -> float: + """Return value+self. +""" + def __rsub__(self, value: float, /) -> float: + """Return value-self. +""" + def __rmul__(self, value: float, /) -> float: + """Return value*self. +""" + def __rfloordiv__(self, value: float, /) -> float: + """Return value//self. +""" + def __rtruediv__(self, value: float, /) -> float: + """Return value/self. +""" + def __rmod__(self, value: float, /) -> float: + """Return value%self. +""" + def __rdivmod__(self, value: float, /) -> tuple[float, float]: + """Return divmod(value, self). +""" + @overload + def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: + """Return pow(value, self, mod). +""" @overload def __rpow__(self, value: _NegativeInteger, mod: None = None, /) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @overload def __rpow__(self, value: float, mod: None = None, /) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... - def __trunc__(self) -> int: ... - def __ceil__(self) -> int: ... - def __floor__(self) -> int: ... - @overload - def __round__(self, ndigits: None = None, /) -> int: ... + def __trunc__(self) -> int: + """Return the Integral closest to x between 0 and x. +""" + def __ceil__(self) -> int: + """Return the ceiling as an Integral. +""" + def __floor__(self) -> int: + """Return the floor as an Integral. +""" + @overload + def __round__(self, ndigits: None = None, /) -> int: + """Return the Integral closest to x, rounding half toward even. + +When an argument is passed, work like built-in round(x, ndigits). +""" @overload def __round__(self, ndigits: SupportsIndex, /) -> float: ... def __eq__(self, value: object, /) -> bool: ... @@ -414,20 +786,43 @@ class float: def __le__(self, value: float, /) -> bool: ... def __gt__(self, value: float, /) -> bool: ... def __ge__(self, value: float, /) -> bool: ... - def __neg__(self) -> float: ... - def __pos__(self) -> float: ... - def __int__(self) -> int: ... - def __float__(self) -> float: ... - def __abs__(self) -> float: ... + def __neg__(self) -> float: + """-self +""" + def __pos__(self) -> float: + """+self +""" + def __int__(self) -> int: + """int(self) +""" + def __float__(self) -> float: + """float(self) +""" + def __abs__(self) -> float: + """abs(self) +""" def __hash__(self) -> int: ... - def __bool__(self) -> bool: ... - def __format__(self, format_spec: str, /) -> str: ... + def __bool__(self) -> bool: + """True if self else False +""" + def __format__(self, format_spec: str, /) -> str: + """Formats the float according to format_spec. +""" if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: ... + def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: + """Convert real number to a floating-point number. +""" @disjoint_base class complex: + """Create a complex number from a string or numbers. + +If a string is given, parse it as a complex number. +If a single number is given, convert it to a complex number. +If the 'real' or 'imag' arguments are given, create a complex number +with the specified real and imaginary components. +""" # Python doesn't currently accept SupportsComplex for the second argument @overload def __new__( @@ -438,33 +833,73 @@ class complex: @overload def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... @property - def real(self) -> float: ... + def real(self) -> float: + """the real part of a complex number +""" @property - def imag(self) -> float: ... - def conjugate(self) -> complex: ... - def __add__(self, value: complex, /) -> complex: ... - def __sub__(self, value: complex, /) -> complex: ... - def __mul__(self, value: complex, /) -> complex: ... - def __pow__(self, value: complex, mod: None = None, /) -> complex: ... - def __truediv__(self, value: complex, /) -> complex: ... - def __radd__(self, value: complex, /) -> complex: ... - def __rsub__(self, value: complex, /) -> complex: ... - def __rmul__(self, value: complex, /) -> complex: ... - def __rpow__(self, value: complex, mod: None = None, /) -> complex: ... - def __rtruediv__(self, value: complex, /) -> complex: ... + def imag(self) -> float: + """the imaginary part of a complex number +""" + def conjugate(self) -> complex: + """Return the complex conjugate of its argument. (3-4j).conjugate() == 3+4j. +""" + def __add__(self, value: complex, /) -> complex: + """Return self+value. +""" + def __sub__(self, value: complex, /) -> complex: + """Return self-value. +""" + def __mul__(self, value: complex, /) -> complex: + """Return self*value. +""" + def __pow__(self, value: complex, mod: None = None, /) -> complex: + """Return pow(self, value, mod). +""" + def __truediv__(self, value: complex, /) -> complex: + """Return self/value. +""" + def __radd__(self, value: complex, /) -> complex: + """Return value+self. +""" + def __rsub__(self, value: complex, /) -> complex: + """Return value-self. +""" + def __rmul__(self, value: complex, /) -> complex: + """Return value*self. +""" + def __rpow__(self, value: complex, mod: None = None, /) -> complex: + """Return pow(value, self, mod). +""" + def __rtruediv__(self, value: complex, /) -> complex: + """Return value/self. +""" def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... - def __neg__(self) -> complex: ... - def __pos__(self) -> complex: ... - def __abs__(self) -> float: ... + def __neg__(self) -> complex: + """-self +""" + def __pos__(self) -> complex: + """+self +""" + def __abs__(self) -> float: + """abs(self) +""" def __hash__(self) -> int: ... - def __bool__(self) -> bool: ... - def __format__(self, format_spec: str, /) -> str: ... + def __bool__(self) -> bool: + """True if self else False +""" + def __format__(self, format_spec: str, /) -> str: + """Convert to a string according to format_spec. +""" if sys.version_info >= (3, 11): - def __complex__(self) -> complex: ... + def __complex__(self) -> complex: + """Convert this value to exact type complex. +""" if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: ... + def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: + """Convert number to a complex floating-point number. +""" @type_check_only class _FormatMapMapping(Protocol): @@ -476,146 +911,436 @@ class _TranslateTable(Protocol): @disjoint_base class str(Sequence[str]): + """str(object='') -> str +str(bytes_or_buffer[, encoding[, errors]]) -> str + +Create a new string object from the given object. If encoding or +errors is specified, then the object must expose a data buffer +that will be decoded using the given encoding and error handler. +Otherwise, returns the result of object.__str__() (if defined) +or repr(object). +encoding defaults to 'utf-8'. +errors defaults to 'strict'. +""" @overload def __new__(cls, object: object = "") -> Self: ... @overload def __new__(cls, object: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> Self: ... @overload - def capitalize(self: LiteralString) -> LiteralString: ... + def capitalize(self: LiteralString) -> LiteralString: + """Return a capitalized version of the string. + +More specifically, make the first character have upper case and the rest lower +case. +""" @overload def capitalize(self) -> str: ... # type: ignore[misc] @overload - def casefold(self: LiteralString) -> LiteralString: ... + def casefold(self: LiteralString) -> LiteralString: + """Return a version of the string suitable for caseless comparisons. +""" @overload def casefold(self) -> str: ... # type: ignore[misc] @overload - def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... + def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: + """Return a centered string of length width. + +Padding is done using the specified fill character (default is a space). +""" @overload def center(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] - def count(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... - def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... + def count(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: + """Return the number of non-overlapping occurrences of substring sub in string S[start:end]. + +Optional arguments start and end are interpreted as in slice notation. +""" + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: + """Encode the string using the codec registered for encoding. + + encoding + The encoding in which to encode the string. + errors + The error handling scheme to use for encoding errors. + The default is 'strict' meaning that encoding errors raise a + UnicodeEncodeError. Other possible values are 'ignore', 'replace' and + 'xmlcharrefreplace' as well as any other name registered with + codecs.register_error that can handle UnicodeEncodeErrors. +""" def endswith( self, suffix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> bool: ... + ) -> bool: + """Return True if the string ends with the specified suffix, False otherwise. + + suffix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. +""" @overload - def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: + """Return a copy where all tab characters are expanded using spaces. + +If tabsize is not given, a tab size of 8 characters is assumed. +""" @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] - def find(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def find(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: + """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. + +Optional arguments start and end are interpreted as in slice notation. +Return -1 on failure. +""" @overload - def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: + """Return a formatted version of the string, using substitutions from args and kwargs. +The substitutions are identified by braces ('{' and '}'). +""" @overload def format(self, *args: object, **kwargs: object) -> str: ... - def format_map(self, mapping: _FormatMapMapping, /) -> str: ... - def index(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... - def isalnum(self) -> bool: ... - def isalpha(self) -> bool: ... - def isascii(self) -> bool: ... - def isdecimal(self) -> bool: ... - def isdigit(self) -> bool: ... - def isidentifier(self) -> bool: ... - def islower(self) -> bool: ... - def isnumeric(self) -> bool: ... - def isprintable(self) -> bool: ... - def isspace(self) -> bool: ... - def istitle(self) -> bool: ... - def isupper(self) -> bool: ... - @overload - def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString: ... + def format_map(self, mapping: _FormatMapMapping, /) -> str: + """Return a formatted version of the string, using substitutions from mapping. +The substitutions are identified by braces ('{' and '}'). +""" + def index(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: + """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. + +Optional arguments start and end are interpreted as in slice notation. +Raises ValueError when the substring is not found. +""" + def isalnum(self) -> bool: + """Return True if the string is an alpha-numeric string, False otherwise. + +A string is alpha-numeric if all characters in the string are alpha-numeric and +there is at least one character in the string. +""" + def isalpha(self) -> bool: + """Return True if the string is an alphabetic string, False otherwise. + +A string is alphabetic if all characters in the string are alphabetic and there +is at least one character in the string. +""" + def isascii(self) -> bool: + """Return True if all characters in the string are ASCII, False otherwise. + +ASCII characters have code points in the range U+0000-U+007F. +Empty string is ASCII too. +""" + def isdecimal(self) -> bool: + """Return True if the string is a decimal string, False otherwise. + +A string is a decimal string if all characters in the string are decimal and +there is at least one character in the string. +""" + def isdigit(self) -> bool: + """Return True if the string is a digit string, False otherwise. + +A string is a digit string if all characters in the string are digits and there +is at least one character in the string. +""" + def isidentifier(self) -> bool: + """Return True if the string is a valid Python identifier, False otherwise. + +Call keyword.iskeyword(s) to test whether string s is a reserved identifier, +such as "def" or "class". +""" + def islower(self) -> bool: + """Return True if the string is a lowercase string, False otherwise. + +A string is lowercase if all cased characters in the string are lowercase and +there is at least one cased character in the string. +""" + def isnumeric(self) -> bool: + """Return True if the string is a numeric string, False otherwise. + +A string is numeric if all characters in the string are numeric and there is at +least one character in the string. +""" + def isprintable(self) -> bool: + """Return True if all characters in the string are printable, False otherwise. + +A character is printable if repr() may use it in its output. +""" + def isspace(self) -> bool: + """Return True if the string is a whitespace string, False otherwise. + +A string is whitespace if all characters in the string are whitespace and there +is at least one character in the string. +""" + def istitle(self) -> bool: + """Return True if the string is a title-cased string, False otherwise. + +In a title-cased string, upper- and title-case characters may only +follow uncased characters and lowercase characters only cased ones. +""" + def isupper(self) -> bool: + """Return True if the string is an uppercase string, False otherwise. + +A string is uppercase if all cased characters in the string are uppercase and +there is at least one cased character in the string. +""" + @overload + def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString: + """Concatenate any number of strings. + +The string whose method is called is inserted in between each given string. +The result is returned as a new string. + +Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs' +""" @overload def join(self, iterable: Iterable[str], /) -> str: ... # type: ignore[misc] @overload - def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... + def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: + """Return a left-justified string of length width. + +Padding is done using the specified fill character (default is a space). +""" @overload def ljust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload - def lower(self: LiteralString) -> LiteralString: ... + def lower(self: LiteralString) -> LiteralString: + """Return a copy of the string converted to lowercase. +""" @overload def lower(self) -> str: ... # type: ignore[misc] @overload - def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... + def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: + """Return a copy of the string with leading whitespace removed. + +If chars is given and not None, remove characters in chars instead. +""" @overload def lstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: ... + def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: + """Partition the string into three parts using the given separator. + +This will search for the separator in the string. If the separator is found, +returns a 3-tuple containing the part before the separator, the separator +itself, and the part after it. + +If the separator is not found, returns a 3-tuple containing the original string +and two empty strings. +""" @overload def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] if sys.version_info >= (3, 13): @overload def replace( self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1 - ) -> LiteralString: ... + ) -> LiteralString: + """Return a copy with all occurrences of substring old replaced by new. + + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + +If the optional argument count is given, only the first count occurrences are +replaced. +""" @overload def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] else: @overload def replace( self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / - ) -> LiteralString: ... + ) -> LiteralString: + """Return a copy with all occurrences of substring old replaced by new. + + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + +If the optional argument count is given, only the first count occurrences are +replaced. +""" @overload def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] @overload - def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: ... + def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: + """Return a str with the given prefix string removed if present. + +If the string starts with the prefix string, return string[len(prefix):]. +Otherwise, return a copy of the original string. +""" @overload def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] @overload - def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: ... + def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: + """Return a str with the given suffix string removed if present. + +If the string ends with the suffix string and that suffix is not empty, +return string[:-len(suffix)]. Otherwise, return a copy of the original +string. +""" @overload def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] - def rfind(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... - def rindex(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: ... + def rfind(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: + """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. + +Optional arguments start and end are interpreted as in slice notation. +Return -1 on failure. +""" + def rindex(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: + """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. + +Optional arguments start and end are interpreted as in slice notation. +Raises ValueError when the substring is not found. +""" @overload - def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: ... + def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: + """Return a right-justified string of length width. + +Padding is done using the specified fill character (default is a space). +""" @overload def rjust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload - def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: ... + def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: + """Partition the string into three parts using the given separator. + +This will search for the separator in the string, starting at the end. If +the separator is found, returns a 3-tuple containing the part before the +separator, the separator itself, and the part after it. + +If the separator is not found, returns a 3-tuple containing two empty strings +and the original string. +""" @overload def rpartition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] @overload - def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: + """Return a list of the substrings in the string, using sep as the separator string. + + sep + The separator used to split the string. + + When set to None (the default value), will split on any whitespace + character (including \\n \\r \\t \\f and spaces) and will discard + empty strings from the result. + maxsplit + Maximum number of splits. + -1 (the default value) means no limit. + +Splitting starts at the end of the string and works to the front. +""" @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload - def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... + def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: + """Return a copy of the string with trailing whitespace removed. + +If chars is given and not None, remove characters in chars instead. +""" @overload def rstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: + """Return a list of the substrings in the string, using sep as the separator string. + + sep + The separator used to split the string. + + When set to None (the default value), will split on any whitespace + character (including \\n \\r \\t \\f and spaces) and will discard + empty strings from the result. + maxsplit + Maximum number of splits. + -1 (the default value) means no limit. + +Splitting starts at the front of the string and works to the end. + +Note, str.split() is mainly useful for data that has been intentionally +delimited. With natural text that includes punctuation, consider using +the regular expression module. +""" @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload - def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: + """Return a list of the lines in the string, breaking at line boundaries. + +Line breaks are not included in the resulting list unless keepends is given and +true. +""" @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> bool: ... + ) -> bool: + """Return True if the string starts with the specified prefix, False otherwise. + + prefix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. +""" @overload - def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: ... + def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: + """Return a copy of the string with leading and trailing whitespace removed. + +If chars is given and not None, remove characters in chars instead. +""" @overload def strip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload - def swapcase(self: LiteralString) -> LiteralString: ... + def swapcase(self: LiteralString) -> LiteralString: + """Convert uppercase characters to lowercase and lowercase characters to uppercase. +""" @overload def swapcase(self) -> str: ... # type: ignore[misc] @overload - def title(self: LiteralString) -> LiteralString: ... + def title(self: LiteralString) -> LiteralString: + """Return a version of the string where each word is titlecased. + +More specifically, words start with uppercased characters and all remaining +cased characters have lower case. +""" @overload def title(self) -> str: ... # type: ignore[misc] - def translate(self, table: _TranslateTable, /) -> str: ... + def translate(self, table: _TranslateTable, /) -> str: + """Replace each character in the string using the given translation table. + + table + Translation table, which must be a mapping of Unicode ordinals to + Unicode ordinals, strings, or None. + +The table must implement lookup/indexing via __getitem__, for instance a +dictionary or list. If this operation raises LookupError, the character is +left untouched. Characters mapped to None are deleted. +""" @overload - def upper(self: LiteralString) -> LiteralString: ... + def upper(self: LiteralString) -> LiteralString: + """Return a copy of the string converted to uppercase. +""" @overload def upper(self) -> str: ... # type: ignore[misc] @overload - def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString: ... + def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString: + """Pad a numeric string with zeros on the left, to fill a field of the given width. + +The string is never truncated. +""" @overload def zfill(self, width: SupportsIndex, /) -> str: ... # type: ignore[misc] @staticmethod @overload - def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: ... + def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: + """Return a translation table usable for str.translate(). + +If there is only one argument, it must be a dictionary mapping Unicode +ordinals (integers) or characters to Unicode ordinals, strings or None. +Character keys will be then converted to ordinals. +If there are two arguments, they must be strings of equal length, and +in the resulting dictionary, each character in x will be mapped to the +character at the same position in y. If there is a third argument, it +must be a string, whose characters will be mapped to None in the result. +""" @staticmethod @overload def maketrans(x: str, y: str, /) -> dict[int, int]: ... @@ -623,129 +1348,456 @@ class str(Sequence[str]): @overload def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]: ... @overload - def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString: ... + def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString: + """Return self+value. +""" @overload def __add__(self, value: str, /) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ - def __contains__(self, key: str, /) -> bool: ... # type: ignore[override] + def __contains__(self, key: str, /) -> bool: # type: ignore[override] + """Return bool(key in self). +""" def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: str, /) -> bool: ... @overload - def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: ... + def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: + """Return self[key]. +""" @overload def __getitem__(self, key: SupportsIndex | slice, /) -> str: ... # type: ignore[misc] def __gt__(self, value: str, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + def __iter__(self: LiteralString) -> Iterator[LiteralString]: + """Implement iter(self). +""" @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, value: str, /) -> bool: ... - def __len__(self) -> int: ... + def __len__(self) -> int: + """Return len(self). +""" def __lt__(self, value: str, /) -> bool: ... @overload - def __mod__(self: LiteralString, value: LiteralString | tuple[LiteralString, ...], /) -> LiteralString: ... + def __mod__(self: LiteralString, value: LiteralString | tuple[LiteralString, ...], /) -> LiteralString: + """Return self%value. +""" @overload def __mod__(self, value: Any, /) -> str: ... @overload - def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: ... + def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: + """Return self*value. +""" @overload def __mul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __ne__(self, value: object, /) -> bool: ... @overload - def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: ... + def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: + """Return value*self. +""" @overload def __rmul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... - def __format__(self, format_spec: str, /) -> str: ... + def __format__(self, format_spec: str, /) -> str: + """Return a formatted version of the string as described by format_spec. +""" @disjoint_base class bytes(Sequence[int]): + """bytes(iterable_of_ints) -> bytes +bytes(string, encoding[, errors]) -> bytes +bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer +bytes(int) -> bytes object of size given by the parameter initialized with null bytes +bytes() -> empty bytes object + +Construct an immutable array of bytes from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - any object implementing the buffer API. + - an integer +""" @overload def __new__(cls, o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, /) -> Self: ... @overload def __new__(cls, string: str, /, encoding: str, errors: str = "strict") -> Self: ... @overload def __new__(cls) -> Self: ... - def capitalize(self) -> bytes: ... - def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: ... + def capitalize(self) -> bytes: + """B.capitalize() -> copy of B + +Return a copy of B with only its first character capitalized (ASCII) +and the rest lower-cased. +""" + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: + """Return a centered string of length width. + +Padding is done using the specified fill character. +""" def count( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + ) -> int: + """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. +""" + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Decode the bytes using the codec registered for encoding. + + encoding + The encoding with which to decode the bytes. + errors + The error handling scheme to use for the handling of decoding errors. + The default is 'strict' meaning that decoding errors raise a + UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that + can handle UnicodeDecodeErrors. +""" def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /, - ) -> bool: ... - def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... + ) -> bool: + """Return True if the bytes ends with the specified suffix, False otherwise. + + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. +""" + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: + """Return a copy where all tab characters are expanded using spaces. + +If tabsize is not given, a tab size of 8 characters is assumed. +""" def find( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + ) -> int: + """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Return -1 on failure. +""" + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: + """Create a string of hexadecimal numbers from a bytes object. + + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + +Example: +>>> value = b'\\xb9\\x01\\xef' +>>> value.hex() +'b901ef' +>>> value.hex(':') +'b9:01:ef' +>>> value.hex(':', 2) +'b9:01ef' +>>> value.hex(':', -2) +'b901:ef' +""" def index( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def isalnum(self) -> bool: ... - def isalpha(self) -> bool: ... - def isascii(self) -> bool: ... - def isdigit(self) -> bool: ... - def islower(self) -> bool: ... - def isspace(self) -> bool: ... - def istitle(self) -> bool: ... - def isupper(self) -> bool: ... - def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: ... - def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... - def lower(self) -> bytes: ... - def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... - def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... - def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: ... - def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: ... - def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: ... + ) -> int: + """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Raise ValueError if the subsection is not found. +""" + def isalnum(self) -> bool: + """B.isalnum() -> bool + +Return True if all characters in B are alphanumeric +and there is at least one character in B, False otherwise. +""" + def isalpha(self) -> bool: + """B.isalpha() -> bool + +Return True if all characters in B are alphabetic +and there is at least one character in B, False otherwise. +""" + def isascii(self) -> bool: + """B.isascii() -> bool + +Return True if B is empty or all characters in B are ASCII, +False otherwise. +""" + def isdigit(self) -> bool: + """B.isdigit() -> bool + +Return True if all characters in B are digits +and there is at least one character in B, False otherwise. +""" + def islower(self) -> bool: + """B.islower() -> bool + +Return True if all cased characters in B are lowercase and there is +at least one cased character in B, False otherwise. +""" + def isspace(self) -> bool: + """B.isspace() -> bool + +Return True if all characters in B are whitespace +and there is at least one character in B, False otherwise. +""" + def istitle(self) -> bool: + """B.istitle() -> bool + +Return True if B is a titlecased string and there is at least one +character in B, i.e. uppercase characters may only follow uncased +characters and lowercase characters only cased ones. Return False +otherwise. +""" + def isupper(self) -> bool: + """B.isupper() -> bool + +Return True if all cased characters in B are uppercase and there is +at least one cased character in B, False otherwise. +""" + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: + """Concatenate any number of bytes objects. + +The bytes whose method is called is inserted in between each pair. + +The result is returned as a new bytes object. + +Example: b'.'.join([b'ab', b'pq', b'rs']) -> b'ab.pq.rs'. +""" + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: + """Return a left-justified string of length width. + +Padding is done using the specified fill character. +""" + def lower(self) -> bytes: + """B.lower() -> copy of B + +Return a copy of B with all ASCII characters converted to lowercase. +""" + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: + """Strip leading bytes contained in the argument. + +If the argument is omitted or None, strip leading ASCII whitespace. +""" + def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: + """Partition the bytes into three parts using the given separator. + +This will search for the separator sep in the bytes. If the separator is found, +returns a 3-tuple containing the part before the separator, the separator +itself, and the part after it. + +If the separator is not found, returns a 3-tuple containing the original bytes +object and two empty bytes objects. +""" + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: + """Return a copy with all occurrences of substring old replaced by new. + + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + +If the optional argument count is given, only the first count occurrences are +replaced. +""" + def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: + """Return a bytes object with the given prefix string removed if present. + +If the bytes starts with the prefix string, return bytes[len(prefix):]. +Otherwise, return a copy of the original bytes. +""" + def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: + """Return a bytes object with the given suffix string removed if present. + +If the bytes ends with the suffix string and that suffix is not empty, +return bytes[:-len(prefix)]. Otherwise, return a copy of the original +bytes. +""" def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... + ) -> int: + """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Return -1 on failure. +""" def rindex( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: ... - def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: ... - def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... - def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... - def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... - def splitlines(self, keepends: bool = False) -> list[bytes]: ... + ) -> int: + """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Raise ValueError if the subsection is not found. +""" + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: + """Return a right-justified string of length width. + +Padding is done using the specified fill character. +""" + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: + """Partition the bytes into three parts using the given separator. + +This will search for the separator sep in the bytes, starting at the end. If +the separator is found, returns a 3-tuple containing the part before the +separator, the separator itself, and the part after it. + +If the separator is not found, returns a 3-tuple containing two empty bytes +objects and the original bytes object. +""" + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: + """Return a list of the sections in the bytes, using sep as the delimiter. + + sep + The delimiter according which to split the bytes. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + +Splitting is done starting at the end of the bytes and working to the front. +""" + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: + """Strip trailing bytes contained in the argument. + +If the argument is omitted or None, strip trailing ASCII whitespace. +""" + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: + """Return a list of the sections in the bytes, using sep as the delimiter. + + sep + The delimiter according which to split the bytes. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. +""" + def splitlines(self, keepends: bool = False) -> list[bytes]: + """Return a list of the lines in the bytes, breaking at line boundaries. + +Line breaks are not included in the resulting list unless keepends is given and +true. +""" def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /, - ) -> bool: ... - def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: ... - def swapcase(self) -> bytes: ... - def title(self) -> bytes: ... - def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: ... - def upper(self) -> bytes: ... - def zfill(self, width: SupportsIndex, /) -> bytes: ... + ) -> bool: + """Return True if the bytes starts with the specified prefix, False otherwise. + + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. +""" + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: + """Strip leading and trailing bytes contained in the argument. + +If the argument is omitted or None, strip leading and trailing ASCII whitespace. +""" + def swapcase(self) -> bytes: + """B.swapcase() -> copy of B + +Return a copy of B with uppercase ASCII characters converted +to lowercase ASCII and vice versa. +""" + def title(self) -> bytes: + """B.title() -> copy of B + +Return a titlecased version of B, i.e. ASCII words start with uppercase +characters, all remaining cased characters have lowercase. +""" + def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: + """Return a copy with each character mapped by the given translation table. + + table + Translation table, which must be a bytes object of length 256. + +All characters occurring in the optional argument delete are removed. +The remaining characters are mapped through the given translation table. +""" + def upper(self) -> bytes: + """B.upper() -> copy of B + +Return a copy of B with all ASCII characters converted to uppercase. +""" + def zfill(self, width: SupportsIndex, /) -> bytes: + """Pad a numeric string with zeros on the left, to fill a field of the given width. + +The original string is never truncated. +""" @classmethod - def fromhex(cls, string: str, /) -> Self: ... + def fromhex(cls, string: str, /) -> Self: + """Create a bytes object from a string of hexadecimal numbers. + +Spaces between two numbers are accepted. +Example: bytes.fromhex('B9 01EF') -> b'\\\\xb9\\\\x01\\\\xef'. +""" @staticmethod - def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[int]: ... + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: + """Return a translation table usable for the bytes or bytearray translate method. + +The returned table will be one where each byte in frm is mapped to the byte at +the same position in to. + +The bytes objects frm and to must be of the same length. +""" + def __len__(self) -> int: + """Return len(self). +""" + def __iter__(self) -> Iterator[int]: + """Implement iter(self). +""" def __hash__(self) -> int: ... @overload - def __getitem__(self, key: SupportsIndex, /) -> int: ... + def __getitem__(self, key: SupportsIndex, /) -> int: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> bytes: ... - def __add__(self, value: ReadableBuffer, /) -> bytes: ... - def __mul__(self, value: SupportsIndex, /) -> bytes: ... - def __rmul__(self, value: SupportsIndex, /) -> bytes: ... - def __mod__(self, value: Any, /) -> bytes: ... + def __add__(self, value: ReadableBuffer, /) -> bytes: + """Return self+value. +""" + def __mul__(self, value: SupportsIndex, /) -> bytes: + """Return self*value. +""" + def __rmul__(self, value: SupportsIndex, /) -> bytes: + """Return value*self. +""" + def __mod__(self, value: Any, /) -> bytes: + """Return self%value. +""" # Incompatible with Sequence.__contains__ - def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] + """Return bool(key in self). +""" def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: bytes, /) -> bool: ... @@ -754,122 +1806,489 @@ class bytes(Sequence[int]): def __ge__(self, value: bytes, /) -> bool: ... def __getnewargs__(self) -> tuple[bytes]: ... if sys.version_info >= (3, 11): - def __bytes__(self) -> bytes: ... + def __bytes__(self) -> bytes: + """Convert this value to exact type bytes. +""" - def __buffer__(self, flags: int, /) -> memoryview: ... + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" @disjoint_base class bytearray(MutableSequence[int]): + """bytearray(iterable_of_ints) -> bytearray +bytearray(string, encoding[, errors]) -> bytearray +bytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer +bytearray(int) -> bytes array of size given by the parameter initialized with null bytes +bytearray() -> empty bytes array + +Construct a mutable bytearray object from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - a bytes or a buffer object + - any object implementing the buffer API. + - an integer +""" @overload def __init__(self) -> None: ... @overload def __init__(self, ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer, /) -> None: ... @overload def __init__(self, string: str, /, encoding: str, errors: str = "strict") -> None: ... - def append(self, item: SupportsIndex, /) -> None: ... - def capitalize(self) -> bytearray: ... - def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: ... + def append(self, item: SupportsIndex, /) -> None: + """Append a single item to the end of the bytearray. + + item + The item to be appended. +""" + def capitalize(self) -> bytearray: + """B.capitalize() -> copy of B + +Return a copy of B with only its first character capitalized (ASCII) +and the rest lower-cased. +""" + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: + """Return a centered string of length width. + +Padding is done using the specified fill character. +""" def count( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def copy(self) -> bytearray: ... - def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + ) -> int: + """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. +""" + def copy(self) -> bytearray: + """Return a copy of B. +""" + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Decode the bytearray using the codec registered for encoding. + + encoding + The encoding with which to decode the bytearray. + errors + The error handling scheme to use for the handling of decoding errors. + The default is 'strict' meaning that decoding errors raise a + UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that + can handle UnicodeDecodeErrors. +""" def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /, - ) -> bool: ... - def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... - def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: ... + ) -> bool: + """Return True if the bytearray ends with the specified suffix, False otherwise. + + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. +""" + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: + """Return a copy where all tab characters are expanded using spaces. + +If tabsize is not given, a tab size of 8 characters is assumed. +""" + def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: + """Append all the items from the iterator or sequence to the end of the bytearray. + + iterable_of_ints + The iterable of items to append. +""" def find( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... + ) -> int: + """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Return -1 on failure. +""" + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: + """Create a string of hexadecimal numbers from a bytearray object. + + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + +Example: +>>> value = bytearray([0xb9, 0x01, 0xef]) +>>> value.hex() +'b901ef' +>>> value.hex(':') +'b9:01:ef' +>>> value.hex(':', 2) +'b9:01ef' +>>> value.hex(':', -2) +'b901:ef' +""" def index( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: ... - def isalnum(self) -> bool: ... - def isalpha(self) -> bool: ... - def isascii(self) -> bool: ... - def isdigit(self) -> bool: ... - def islower(self) -> bool: ... - def isspace(self) -> bool: ... - def istitle(self) -> bool: ... - def isupper(self) -> bool: ... - def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: ... - def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... - def lower(self) -> bytearray: ... - def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... - def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... - def pop(self, index: int = -1, /) -> int: ... - def remove(self, value: int, /) -> None: ... - def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: ... - def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: ... - def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: ... + ) -> int: + """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Raise ValueError if the subsection is not found. +""" + def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: + """Insert a single item into the bytearray before the given index. + + index + The index where the value is to be inserted. + item + The item to be inserted. +""" + def isalnum(self) -> bool: + """B.isalnum() -> bool + +Return True if all characters in B are alphanumeric +and there is at least one character in B, False otherwise. +""" + def isalpha(self) -> bool: + """B.isalpha() -> bool + +Return True if all characters in B are alphabetic +and there is at least one character in B, False otherwise. +""" + def isascii(self) -> bool: + """B.isascii() -> bool + +Return True if B is empty or all characters in B are ASCII, +False otherwise. +""" + def isdigit(self) -> bool: + """B.isdigit() -> bool + +Return True if all characters in B are digits +and there is at least one character in B, False otherwise. +""" + def islower(self) -> bool: + """B.islower() -> bool + +Return True if all cased characters in B are lowercase and there is +at least one cased character in B, False otherwise. +""" + def isspace(self) -> bool: + """B.isspace() -> bool + +Return True if all characters in B are whitespace +and there is at least one character in B, False otherwise. +""" + def istitle(self) -> bool: + """B.istitle() -> bool + +Return True if B is a titlecased string and there is at least one +character in B, i.e. uppercase characters may only follow uncased +characters and lowercase characters only cased ones. Return False +otherwise. +""" + def isupper(self) -> bool: + """B.isupper() -> bool + +Return True if all cased characters in B are uppercase and there is +at least one cased character in B, False otherwise. +""" + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: + """Concatenate any number of bytes/bytearray objects. + +The bytearray whose method is called is inserted in between each pair. + +The result is returned as a new bytearray object. +""" + def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: + """Return a left-justified string of length width. + +Padding is done using the specified fill character. +""" + def lower(self) -> bytearray: + """B.lower() -> copy of B + +Return a copy of B with all ASCII characters converted to lowercase. +""" + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: + """Strip leading bytes contained in the argument. + +If the argument is omitted or None, strip leading ASCII whitespace. +""" + def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: + """Partition the bytearray into three parts using the given separator. + +This will search for the separator sep in the bytearray. If the separator is +found, returns a 3-tuple containing the part before the separator, the +separator itself, and the part after it as new bytearray objects. + +If the separator is not found, returns a 3-tuple containing the copy of the +original bytearray object and two empty bytearray objects. +""" + def pop(self, index: int = -1, /) -> int: + """Remove and return a single item from B. + + index + The index from where to remove the item. + -1 (the default value) means remove the last item. + +If no index argument is given, will pop the last item. +""" + def remove(self, value: int, /) -> None: + """Remove the first occurrence of a value in the bytearray. + + value + The value to remove. +""" + def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: + """Return a bytearray with the given prefix string removed if present. + +If the bytearray starts with the prefix string, return +bytearray[len(prefix):]. Otherwise, return a copy of the original +bytearray. +""" + def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: + """Return a bytearray with the given suffix string removed if present. + +If the bytearray ends with the suffix string and that suffix is not +empty, return bytearray[:-len(suffix)]. Otherwise, return a copy of +the original bytearray. +""" + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: + """Return a copy with all occurrences of substring old replaced by new. + + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + +If the optional argument count is given, only the first count occurrences are +replaced. +""" def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... + ) -> int: + """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Return -1 on failure. +""" def rindex( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / - ) -> int: ... - def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: ... - def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: ... - def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... - def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... - def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... - def splitlines(self, keepends: bool = False) -> list[bytearray]: ... + ) -> int: + """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. + + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + +Raise ValueError if the subsection is not found. +""" + def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: + """Return a right-justified string of length width. + +Padding is done using the specified fill character. +""" + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: + """Partition the bytearray into three parts using the given separator. + +This will search for the separator sep in the bytearray, starting at the end. +If the separator is found, returns a 3-tuple containing the part before the +separator, the separator itself, and the part after it as new bytearray +objects. + +If the separator is not found, returns a 3-tuple containing two empty bytearray +objects and the copy of the original bytearray object. +""" + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: + """Return a list of the sections in the bytearray, using sep as the delimiter. + + sep + The delimiter according which to split the bytearray. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + +Splitting is done starting at the end of the bytearray and working to the front. +""" + def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: + """Strip trailing bytes contained in the argument. + +If the argument is omitted or None, strip trailing ASCII whitespace. +""" + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: + """Return a list of the sections in the bytearray, using sep as the delimiter. + + sep + The delimiter according which to split the bytearray. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. +""" + def splitlines(self, keepends: bool = False) -> list[bytearray]: + """Return a list of the lines in the bytearray, breaking at line boundaries. + +Line breaks are not included in the resulting list unless keepends is given and +true. +""" def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /, - ) -> bool: ... - def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: ... - def swapcase(self) -> bytearray: ... - def title(self) -> bytearray: ... - def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: ... - def upper(self) -> bytearray: ... - def zfill(self, width: SupportsIndex, /) -> bytearray: ... + ) -> bool: + """Return True if the bytearray starts with the specified prefix, False otherwise. + + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. +""" + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: + """Strip leading and trailing bytes contained in the argument. + +If the argument is omitted or None, strip leading and trailing ASCII whitespace. +""" + def swapcase(self) -> bytearray: + """B.swapcase() -> copy of B + +Return a copy of B with uppercase ASCII characters converted +to lowercase ASCII and vice versa. +""" + def title(self) -> bytearray: + """B.title() -> copy of B + +Return a titlecased version of B, i.e. ASCII words start with uppercase +characters, all remaining cased characters have lowercase. +""" + def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: + """Return a copy with each character mapped by the given translation table. + + table + Translation table, which must be a bytes object of length 256. + +All characters occurring in the optional argument delete are removed. +The remaining characters are mapped through the given translation table. +""" + def upper(self) -> bytearray: + """B.upper() -> copy of B + +Return a copy of B with all ASCII characters converted to uppercase. +""" + def zfill(self, width: SupportsIndex, /) -> bytearray: + """Pad a numeric string with zeros on the left, to fill a field of the given width. + +The original string is never truncated. +""" @classmethod - def fromhex(cls, string: str, /) -> Self: ... + def fromhex(cls, string: str, /) -> Self: + """Create a bytearray object from a string of hexadecimal numbers. + +Spaces between two numbers are accepted. +Example: bytearray.fromhex('B9 01EF') -> bytearray(b'\\\\xb9\\\\x01\\\\xef') +""" @staticmethod - def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[int]: ... + def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: + """Return a translation table usable for the bytes or bytearray translate method. + +The returned table will be one where each byte in frm is mapped to the byte at +the same position in to. + +The bytes objects frm and to must be of the same length. +""" + def __len__(self) -> int: + """Return len(self). +""" + def __iter__(self) -> Iterator[int]: + """Implement iter(self). +""" __hash__: ClassVar[None] # type: ignore[assignment] @overload - def __getitem__(self, key: SupportsIndex, /) -> int: ... + def __getitem__(self, key: SupportsIndex, /) -> int: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> bytearray: ... @overload - def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: ... + def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: Iterable[SupportsIndex] | bytes, /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... - def __add__(self, value: ReadableBuffer, /) -> bytearray: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: + """Delete self[key]. +""" + def __add__(self, value: ReadableBuffer, /) -> bytearray: + """Return self+value. +""" # The superclass wants us to accept Iterable[int], but that fails at runtime. - def __iadd__(self, value: ReadableBuffer, /) -> Self: ... # type: ignore[override] - def __mul__(self, value: SupportsIndex, /) -> bytearray: ... - def __rmul__(self, value: SupportsIndex, /) -> bytearray: ... - def __imul__(self, value: SupportsIndex, /) -> Self: ... - def __mod__(self, value: Any, /) -> bytes: ... + def __iadd__(self, value: ReadableBuffer, /) -> Self: # type: ignore[override] + """Implement self+=value. +""" + def __mul__(self, value: SupportsIndex, /) -> bytearray: + """Return self*value. +""" + def __rmul__(self, value: SupportsIndex, /) -> bytearray: + """Return value*self. +""" + def __imul__(self, value: SupportsIndex, /) -> Self: + """Implement self*=value. +""" + def __mod__(self, value: Any, /) -> bytes: + """Return self%value. +""" # Incompatible with Sequence.__contains__ - def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: ... # type: ignore[override] + def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] + """Return bool(key in self). +""" def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: ReadableBuffer, /) -> bool: ... def __le__(self, value: ReadableBuffer, /) -> bool: ... def __gt__(self, value: ReadableBuffer, /) -> bool: ... def __ge__(self, value: ReadableBuffer, /) -> bool: ... - def __alloc__(self) -> int: ... - def __buffer__(self, flags: int, /) -> memoryview: ... - def __release_buffer__(self, buffer: memoryview, /) -> None: ... + def __alloc__(self) -> int: + """B.__alloc__() -> int + +Return the number of bytes actually allocated. +""" + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object. +""" if sys.version_info >= (3, 14): - def resize(self, size: int, /) -> None: ... + def resize(self, size: int, /) -> None: + """Resize the internal buffer of bytearray to len. + + size + New size to resize to.. +""" _IntegerFormats: TypeAlias = Literal[ "b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P" @@ -877,30 +2296,61 @@ _IntegerFormats: TypeAlias = Literal[ @final class memoryview(Sequence[_I]): + """Create a new memoryview object which references the given object. +""" @property - def format(self) -> str: ... + def format(self) -> str: + """A string containing the format (in struct module style) + for each element in the view. +""" @property - def itemsize(self) -> int: ... + def itemsize(self) -> int: + """The size in bytes of each element of the memoryview. +""" @property - def shape(self) -> tuple[int, ...] | None: ... + def shape(self) -> tuple[int, ...] | None: + """A tuple of ndim integers giving the shape of the memory + as an N-dimensional array. +""" @property - def strides(self) -> tuple[int, ...] | None: ... + def strides(self) -> tuple[int, ...] | None: + """A tuple of ndim integers giving the size in bytes to access + each element for each dimension of the array. +""" @property - def suboffsets(self) -> tuple[int, ...] | None: ... + def suboffsets(self) -> tuple[int, ...] | None: + """A tuple of integers used internally for PIL-style arrays. +""" @property - def readonly(self) -> bool: ... + def readonly(self) -> bool: + """A bool indicating whether the memory is read only. +""" @property - def ndim(self) -> int: ... + def ndim(self) -> int: + """An integer indicating how many dimensions of a multi-dimensional + array the memory represents. +""" @property - def obj(self) -> ReadableBuffer: ... + def obj(self) -> ReadableBuffer: + """The underlying object of the memoryview. +""" @property - def c_contiguous(self) -> bool: ... + def c_contiguous(self) -> bool: + """A bool indicating whether the memory is C contiguous. +""" @property - def f_contiguous(self) -> bool: ... + def f_contiguous(self) -> bool: + """A bool indicating whether the memory is Fortran contiguous. +""" @property - def contiguous(self) -> bool: ... + def contiguous(self) -> bool: + """A bool indicating whether the memory is contiguous. +""" @property - def nbytes(self) -> int: ... + def nbytes(self) -> int: + """The amount of space in bytes that the array would use in + a contiguous representation. +""" def __new__(cls, obj: ReadableBuffer) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -909,9 +2359,13 @@ class memoryview(Sequence[_I]): exc_val: BaseException | None, exc_tb: TracebackType | None, /, - ) -> None: ... + ) -> None: + """Release the underlying buffer exposed by the memoryview object. +""" @overload - def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: ... + def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: + """Cast a memoryview to a new format or shape. +""" @overload def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: ... @overload @@ -919,72 +2373,148 @@ class memoryview(Sequence[_I]): @overload def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... @overload - def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: ... + def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> memoryview[_I]: ... def __contains__(self, x: object, /) -> bool: ... - def __iter__(self) -> Iterator[_I]: ... - def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_I]: + """Implement iter(self). +""" + def __len__(self) -> int: + """Return len(self). +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... + def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], value: _I, /) -> None: ... if sys.version_info >= (3, 10): - def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: + """Return the data in the buffer as a byte string. + +Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the +original array is converted to C or Fortran order. For contiguous views, +'A' returns an exact copy of the physical memory. In particular, in-memory +Fortran order is preserved. For non-contiguous views, the data is converted +to C first. order=None is the same as order='C'. +""" else: - def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... - - def tolist(self) -> list[int]: ... - def toreadonly(self) -> memoryview: ... - def release(self) -> None: ... - def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: ... - def __buffer__(self, flags: int, /) -> memoryview: ... - def __release_buffer__(self, buffer: memoryview, /) -> None: ... + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: + """Return the data in the buffer as a byte string. Order can be {'C', 'F', 'A'}. +When order is 'C' or 'F', the data of the original array is converted to C or +Fortran order. For contiguous views, 'A' returns an exact copy of the physical +memory. In particular, in-memory Fortran order is preserved. For non-contiguous +views, the data is converted to C first. order=None is the same as order='C'. +""" + + def tolist(self) -> list[int]: + """Return the data in the buffer as a list of elements. +""" + def toreadonly(self) -> memoryview: + """Return a readonly version of the memoryview. +""" + def release(self) -> None: + """Release the underlying buffer exposed by the memoryview object. +""" + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: + """Return the data in the buffer as a str of hexadecimal numbers. + + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + +Example: +>>> value = memoryview(b'\\xb9\\x01\\xef') +>>> value.hex() +'b901ef' +>>> value.hex(':') +'b9:01:ef' +>>> value.hex(':', 2) +'b9:01ef' +>>> value.hex(':', -2) +'b901:ef' +""" + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object. +""" # These are inherited from the Sequence ABC, but don't actually exist on memoryview. # See https://github.com/python/cpython/issues/125420 index: ClassVar[None] # type: ignore[assignment] count: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @final class bool(int): + """Returns True when the argument is true, False otherwise. +The builtins True and False are the only two instances of the class bool. +The class bool is a subclass of the class int, and cannot be subclassed. +""" def __new__(cls, o: object = False, /) -> Self: ... # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload - def __and__(self, value: bool, /) -> bool: ... + def __and__(self, value: bool, /) -> bool: + """Return self&value. +""" @overload def __and__(self, value: int, /) -> int: ... @overload - def __or__(self, value: bool, /) -> bool: ... + def __or__(self, value: bool, /) -> bool: + """Return self|value. +""" @overload def __or__(self, value: int, /) -> int: ... @overload - def __xor__(self, value: bool, /) -> bool: ... + def __xor__(self, value: bool, /) -> bool: + """Return self^value. +""" @overload def __xor__(self, value: int, /) -> int: ... @overload - def __rand__(self, value: bool, /) -> bool: ... + def __rand__(self, value: bool, /) -> bool: + """Return value&self. +""" @overload def __rand__(self, value: int, /) -> int: ... @overload - def __ror__(self, value: bool, /) -> bool: ... + def __ror__(self, value: bool, /) -> bool: + """Return value|self. +""" @overload def __ror__(self, value: int, /) -> int: ... @overload - def __rxor__(self, value: bool, /) -> bool: ... + def __rxor__(self, value: bool, /) -> bool: + """Return value^self. +""" @overload def __rxor__(self, value: int, /) -> int: ... def __getnewargs__(self) -> tuple[int]: ... @deprecated("Will throw an error in Python 3.16. Use `not` for logical negation of bools instead.") - def __invert__(self) -> int: ... + def __invert__(self) -> int: + """~self +""" @final class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): + """slice(stop) +slice(start, stop[, step]) + +Create a slice object. This is used for extended slicing (e.g. a[0:10:2]). +""" @property def start(self) -> _StartT_co: ... @property @@ -1021,18 +2551,40 @@ class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): else: __hash__: ClassVar[None] # type: ignore[assignment] - def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: ... + def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: + """S.indices(len) -> (start, stop, stride) + +Assuming a sequence of length len, calculate the start and stop +indices, and the stride length of the extended slice described by +S. Out of bounds indices are clipped in a manner consistent with the +handling of normal slices. +""" @disjoint_base class tuple(Sequence[_T_co]): + """Built-in immutable sequence. + +If no argument is given, the constructor returns an empty tuple. +If iterable is specified the tuple is initialized from iterable's items. + +If the argument is a tuple, the return value is the same object. +""" def __new__(cls, iterable: Iterable[_T_co] = (), /) -> Self: ... - def __len__(self) -> int: ... - def __contains__(self, key: object, /) -> bool: ... + def __len__(self) -> int: + """Return len(self). +""" + def __contains__(self, key: object, /) -> bool: + """Return bool(key in self). +""" @overload - def __getitem__(self, key: SupportsIndex, /) -> _T_co: ... + def __getitem__(self, key: SupportsIndex, /) -> _T_co: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> tuple[_T_co, ...]: ... - def __iter__(self) -> Iterator[_T_co]: ... + def __iter__(self) -> Iterator[_T_co]: + """Implement iter(self). +""" def __lt__(self, value: tuple[_T_co, ...], /) -> bool: ... def __le__(self, value: tuple[_T_co, ...], /) -> bool: ... def __gt__(self, value: tuple[_T_co, ...], /) -> bool: ... @@ -1040,14 +2592,28 @@ class tuple(Sequence[_T_co]): def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload - def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: ... + def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: + """Return self+value. +""" @overload def __add__(self, value: tuple[_T, ...], /) -> tuple[_T_co | _T, ...]: ... - def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... - def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: ... - def count(self, value: Any, /) -> int: ... - def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: + """Return self*value. +""" + def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: + """Return value*self. +""" + def count(self, value: Any, /) -> int: + """Return number of occurrences of value. +""" + def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: + """Return first index of value. + +Raises ValueError if the value is not present. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" # Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: # https://github.com/python/typeshed/issues/7580 @@ -1102,61 +2668,134 @@ class function: @disjoint_base class list(MutableSequence[_T]): + """Built-in mutable sequence. + +If no argument is given, the constructor creates a new empty list. +The argument must be an iterable if specified. +""" @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def copy(self) -> list[_T]: ... - def append(self, object: _T, /) -> None: ... - def extend(self, iterable: Iterable[_T], /) -> None: ... - def pop(self, index: SupportsIndex = -1, /) -> _T: ... + def copy(self) -> list[_T]: + """Return a shallow copy of the list. +""" + def append(self, object: _T, /) -> None: + """Append object to the end of the list. +""" + def extend(self, iterable: Iterable[_T], /) -> None: + """Extend list by appending elements from the iterable. +""" + def pop(self, index: SupportsIndex = -1, /) -> _T: + """Remove and return item at index (default last). + +Raises IndexError if list is empty or index is out of range. +""" # Signature of `list.index` should be kept in line with `collections.UserList.index()` # and multiprocessing.managers.ListProxy.index() - def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: ... - def count(self, value: _T, /) -> int: ... - def insert(self, index: SupportsIndex, object: _T, /) -> None: ... - def remove(self, value: _T, /) -> None: ... + def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: + """Return first index of value. + +Raises ValueError if the value is not present. +""" + def count(self, value: _T, /) -> int: + """Return number of occurrences of value. +""" + def insert(self, index: SupportsIndex, object: _T, /) -> None: + """Insert object before index. +""" + def remove(self, value: _T, /) -> None: + """Remove first occurrence of value. + +Raises ValueError if the value is not present. +""" # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` # and multiprocessing.managers.ListProxy.sort() # # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: + """Sort the list in ascending order and return None. + +The sort is in-place (i.e. the list itself is modified) and stable (i.e. the +order of two equal elements is maintained). + +If a key function is given, apply it once to each list item and sort them, +ascending or descending, according to their function values. + +The reverse flag can be set to sort in descending order. +""" @overload def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[_T]: ... + def __len__(self) -> int: + """Return len(self). +""" + def __iter__(self) -> Iterator[_T]: + """Implement iter(self). +""" __hash__: ClassVar[None] # type: ignore[assignment] @overload - def __getitem__(self, i: SupportsIndex, /) -> _T: ... + def __getitem__(self, i: SupportsIndex, /) -> _T: + """Return self[index]. +""" @overload def __getitem__(self, s: slice, /) -> list[_T]: ... @overload - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: Iterable[_T], /) -> None: ... - def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: + """Delete self[key]. +""" # Overloading looks unnecessary, but is needed to work around complex mypy problems @overload - def __add__(self, value: list[_T], /) -> list[_T]: ... + def __add__(self, value: list[_T], /) -> list[_T]: + """Return self+value. +""" @overload def __add__(self, value: list[_S], /) -> list[_S | _T]: ... - def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[misc] - def __mul__(self, value: SupportsIndex, /) -> list[_T]: ... - def __rmul__(self, value: SupportsIndex, /) -> list[_T]: ... - def __imul__(self, value: SupportsIndex, /) -> Self: ... - def __contains__(self, key: object, /) -> bool: ... - def __reversed__(self) -> Iterator[_T]: ... + def __iadd__(self, value: Iterable[_T], /) -> Self: # type: ignore[misc] + """Implement self+=value. +""" + def __mul__(self, value: SupportsIndex, /) -> list[_T]: + """Return self*value. +""" + def __rmul__(self, value: SupportsIndex, /) -> list[_T]: + """Return value*self. +""" + def __imul__(self, value: SupportsIndex, /) -> Self: + """Implement self*=value. +""" + def __contains__(self, key: object, /) -> bool: + """Return bool(key in self). +""" + def __reversed__(self) -> Iterator[_T]: + """Return a reverse iterator over the list. +""" def __gt__(self, value: list[_T], /) -> bool: ... def __ge__(self, value: list[_T], /) -> bool: ... def __lt__(self, value: list[_T], /) -> bool: ... def __le__(self, value: list[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @disjoint_base class dict(MutableMapping[_KT, _VT]): + """dict() -> new empty dictionary +dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs +dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v +dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) +""" # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics # Also multiprocessing.managers.SyncManager.dict() @overload @@ -1188,134 +2827,294 @@ class dict(MutableMapping[_KT, _VT]): @overload def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... - def copy(self) -> dict[_KT, _VT]: ... - def keys(self) -> dict_keys[_KT, _VT]: ... - def values(self) -> dict_values[_KT, _VT]: ... - def items(self) -> dict_items[_KT, _VT]: ... + def copy(self) -> dict[_KT, _VT]: + """Return a shallow copy of the dict. +""" + def keys(self) -> dict_keys[_KT, _VT]: + """Return a set-like object providing a view on the dict's keys. +""" + def values(self) -> dict_values[_KT, _VT]: + """Return an object providing a view on the dict's values. +""" + def items(self) -> dict_items[_KT, _VT]: + """Return a set-like object providing a view on the dict's items. +""" # Signature of `dict.fromkeys` should be kept identical to # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: + """Create a new dictionary with keys from iterable and values set to value. +""" @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]: ... # Positional-only in dict, but not in MutableMapping @overload # type: ignore[override] - def get(self, key: _KT, default: None = None, /) -> _VT | None: ... + def get(self, key: _KT, default: None = None, /) -> _VT | None: + """Return the value for key if key is in the dictionary, else default. +""" @overload def get(self, key: _KT, default: _VT, /) -> _VT: ... @overload def get(self, key: _KT, default: _T, /) -> _VT | _T: ... @overload - def pop(self, key: _KT, /) -> _VT: ... + def pop(self, key: _KT, /) -> _VT: + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + +If the key is not found, return the default if given; otherwise, +raise a KeyError. +""" @overload def pop(self, key: _KT, default: _VT, /) -> _VT: ... @overload def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... - def __len__(self) -> int: ... - def __getitem__(self, key: _KT, /) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT, /) -> None: ... - def __delitem__(self, key: _KT, /) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: + """Return len(self). +""" + def __getitem__(self, key: _KT, /) -> _VT: + """Return self[key]. +""" + def __setitem__(self, key: _KT, value: _VT, /) -> None: + """Set self[key] to value. +""" + def __delitem__(self, key: _KT, /) -> None: + """Delete self[key]. +""" + def __iter__(self) -> Iterator[_KT]: + """Implement iter(self). +""" def __eq__(self, value: object, /) -> bool: ... - def __reversed__(self) -> Iterator[_KT]: ... + def __reversed__(self) -> Iterator[_KT]: + """Return a reverse iterator over the dict keys. +""" __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @overload - def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: + """Return self|value. +""" @overload def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: ... + def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: + """Return value|self. +""" @overload def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: ... + def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: + """Return self|=value. +""" @overload def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... @disjoint_base class set(MutableSet[_T]): + """Build an unordered collection of unique elements. +""" @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def add(self, element: _T, /) -> None: ... - def copy(self) -> set[_T]: ... - def difference(self, *s: Iterable[Any]) -> set[_T]: ... - def difference_update(self, *s: Iterable[Any]) -> None: ... - def discard(self, element: _T, /) -> None: ... - def intersection(self, *s: Iterable[Any]) -> set[_T]: ... - def intersection_update(self, *s: Iterable[Any]) -> None: ... - def isdisjoint(self, s: Iterable[Any], /) -> bool: ... - def issubset(self, s: Iterable[Any], /) -> bool: ... - def issuperset(self, s: Iterable[Any], /) -> bool: ... - def remove(self, element: _T, /) -> None: ... - def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: ... - def symmetric_difference_update(self, s: Iterable[_T], /) -> None: ... - def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... - def update(self, *s: Iterable[_T]) -> None: ... - def __len__(self) -> int: ... - def __contains__(self, o: object, /) -> bool: ... - def __iter__(self) -> Iterator[_T]: ... - def __and__(self, value: AbstractSet[object], /) -> set[_T]: ... - def __iand__(self, value: AbstractSet[object], /) -> Self: ... - def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... - def __ior__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] - def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: ... - def __isub__(self, value: AbstractSet[object], /) -> Self: ... - def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: ... - def __ixor__(self, value: AbstractSet[_T], /) -> Self: ... # type: ignore[override,misc] + def add(self, element: _T, /) -> None: + """Add an element to a set. + +This has no effect if the element is already present. +""" + def copy(self) -> set[_T]: + """Return a shallow copy of a set. +""" + def difference(self, *s: Iterable[Any]) -> set[_T]: + """Return a new set with elements in the set that are not in the others. +""" + def difference_update(self, *s: Iterable[Any]) -> None: + """Update the set, removing elements found in others. +""" + def discard(self, element: _T, /) -> None: + """Remove an element from a set if it is a member. + +Unlike set.remove(), the discard() method does not raise +an exception when an element is missing from the set. +""" + def intersection(self, *s: Iterable[Any]) -> set[_T]: + """Return a new set with elements common to the set and all others. +""" + def intersection_update(self, *s: Iterable[Any]) -> None: + """Update the set, keeping only elements found in it and all others. +""" + def isdisjoint(self, s: Iterable[Any], /) -> bool: + """Return True if two sets have a null intersection. +""" + def issubset(self, s: Iterable[Any], /) -> bool: + """Report whether another set contains this set. +""" + def issuperset(self, s: Iterable[Any], /) -> bool: + """Report whether this set contains another set. +""" + def remove(self, element: _T, /) -> None: + """Remove an element from a set; it must be a member. + +If the element is not a member, raise a KeyError. +""" + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: + """Return a new set with elements in either the set or other but not both. +""" + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: + """Update the set, keeping only elements found in either set, but not in both. +""" + def union(self, *s: Iterable[_S]) -> set[_T | _S]: + """Return a new set with elements from the set and all others. +""" + def update(self, *s: Iterable[_T]) -> None: + """Update the set, adding elements from all others. +""" + def __len__(self) -> int: + """Return len(self). +""" + def __contains__(self, o: object, /) -> bool: + """x.__contains__(y) <==> y in x. +""" + def __iter__(self) -> Iterator[_T]: + """Implement iter(self). +""" + def __and__(self, value: AbstractSet[object], /) -> set[_T]: + """Return self&value. +""" + def __iand__(self, value: AbstractSet[object], /) -> Self: + """Return self&=value. +""" + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: + """Return self|value. +""" + def __ior__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] + """Return self|=value. +""" + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: + """Return self-value. +""" + def __isub__(self, value: AbstractSet[object], /) -> Self: + """Return self-=value. +""" + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: + """Return self^value. +""" + def __ixor__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] + """Return self^=value. +""" def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @disjoint_base class frozenset(AbstractSet[_T_co]): + """Build an immutable unordered collection of unique elements. +""" @overload def __new__(cls) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T_co], /) -> Self: ... - def copy(self) -> frozenset[_T_co]: ... - def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... - def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... - def isdisjoint(self, s: Iterable[_T_co], /) -> bool: ... - def issubset(self, s: Iterable[object], /) -> bool: ... - def issuperset(self, s: Iterable[object], /) -> bool: ... - def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: ... - def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... - def __len__(self) -> int: ... - def __contains__(self, o: object, /) -> bool: ... - def __iter__(self) -> Iterator[_T_co]: ... - def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... - def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... - def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: ... - def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: ... + def copy(self) -> frozenset[_T_co]: + """Return a shallow copy of a set. +""" + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: + """Return a new set with elements in the set that are not in the others. +""" + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: + """Return a new set with elements common to the set and all others. +""" + def isdisjoint(self, s: Iterable[_T_co], /) -> bool: + """Return True if two sets have a null intersection. +""" + def issubset(self, s: Iterable[object], /) -> bool: + """Report whether another set contains this set. +""" + def issuperset(self, s: Iterable[object], /) -> bool: + """Report whether this set contains another set. +""" + def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: + """Return a new set with elements in either the set or other but not both. +""" + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: + """Return a new set with elements from the set and all others. +""" + def __len__(self) -> int: + """Return len(self). +""" + def __contains__(self, o: object, /) -> bool: + """x.__contains__(y) <==> y in x. +""" + def __iter__(self) -> Iterator[_T_co]: + """Implement iter(self). +""" + def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: + """Return self&value. +""" + def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: + """Return self|value. +""" + def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: + """Return self-value. +""" + def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: + """Return self^value. +""" def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... def __gt__(self, value: AbstractSet[object], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @disjoint_base class enumerate(Generic[_T]): + """Return an enumerate object. + + iterable + an object supporting iteration + +The enumerate object yields pairs containing a count (from start, which +defaults to zero) and a value yielded by the iterable argument. + +enumerate is useful for obtaining an indexed list: + (0, seq[0]), (1, seq[1]), (2, seq[2]), ... +""" def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> tuple[int, _T]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> tuple[int, _T]: + """Implement next(self). +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @final class range(Sequence[int]): + """range(stop) -> range object +range(start, stop[, step]) -> range object + +Return an object that produces a sequence of integers from start (inclusive) +to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1. +start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3. +These are exactly the valid indices for a list of 4 elements. +When step is given, it specifies the increment (or decrement). +""" @property def start(self) -> int: ... @property @@ -1326,21 +3125,69 @@ class range(Sequence[int]): def __new__(cls, stop: SupportsIndex, /) -> Self: ... @overload def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = 1, /) -> Self: ... - def count(self, value: int, /) -> int: ... - def index(self, value: int, /) -> int: ... # type: ignore[override] - def __len__(self) -> int: ... + def count(self, value: int, /) -> int: + """rangeobject.count(value) -> integer -- return number of occurrences of value +""" + def index(self, value: int, /) -> int: # type: ignore[override] + """rangeobject.index(value) -> integer -- return index of value. +Raise ValueError if the value is not present. +""" + def __len__(self) -> int: + """Return len(self). +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __contains__(self, key: object, /) -> bool: ... - def __iter__(self) -> Iterator[int]: ... + def __contains__(self, key: object, /) -> bool: + """Return bool(key in self). +""" + def __iter__(self) -> Iterator[int]: + """Implement iter(self). +""" @overload - def __getitem__(self, key: SupportsIndex, /) -> int: ... + def __getitem__(self, key: SupportsIndex, /) -> int: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> range: ... - def __reversed__(self) -> Iterator[int]: ... + def __reversed__(self) -> Iterator[int]: + """Return a reverse iterator. +""" @disjoint_base class property: + """Property attribute. + + fget + function to be used for getting an attribute value + fset + function to be used for setting an attribute value + fdel + function to be used for del'ing an attribute + doc + docstring + +Typical use is to define a managed attribute x: + +class C(object): + def getx(self): return self._x + def setx(self, value): self._x = value + def delx(self): del self._x + x = property(getx, setx, delx, "I'm the 'x' property.") + +Decorators make defining new properties or modifying existing ones easy: + +class C(object): + @property + def x(self): + "I am the 'x' property." + return self._x + @x.setter + def x(self, value): + self._x = value + @x.deleter + def x(self): + del self._x +""" fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], None] | None fdel: Callable[[Any], None] | None @@ -1355,15 +3202,27 @@ class property: fdel: Callable[[Any], None] | None = None, doc: str | None = None, ) -> None: ... - def getter(self, fget: Callable[[Any], Any], /) -> property: ... - def setter(self, fset: Callable[[Any, Any], None], /) -> property: ... - def deleter(self, fdel: Callable[[Any], None], /) -> property: ... - @overload - def __get__(self, instance: None, owner: type, /) -> Self: ... + def getter(self, fget: Callable[[Any], Any], /) -> property: + """Descriptor to obtain a copy of the property with a different getter. +""" + def setter(self, fset: Callable[[Any, Any], None], /) -> property: + """Descriptor to obtain a copy of the property with a different setter. +""" + def deleter(self, fdel: Callable[[Any], None], /) -> property: + """Descriptor to obtain a copy of the property with a different deleter. +""" + @overload + def __get__(self, instance: None, owner: type, /) -> Self: + """Return an attribute of instance, which is of type owner. +""" @overload def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... - def __set__(self, instance: Any, value: Any, /) -> None: ... - def __delete__(self, instance: Any, /) -> None: ... + def __set__(self, instance: Any, value: Any, /) -> None: + """Set an attribute of instance to value. +""" + def __delete__(self, instance: Any, /) -> None: + """Delete an attribute of instance. +""" @final @type_check_only @@ -1372,17 +3231,53 @@ class _NotImplementedType(Any): NotImplemented: _NotImplementedType -def abs(x: SupportsAbs[_T], /) -> _T: ... -def all(iterable: Iterable[object], /) -> bool: ... -def any(iterable: Iterable[object], /) -> bool: ... -def ascii(obj: object, /) -> str: ... -def bin(number: int | SupportsIndex, /) -> str: ... -def breakpoint(*args: Any, **kws: Any) -> None: ... -def callable(obj: object, /) -> TypeIs[Callable[..., object]]: ... -def chr(i: int | SupportsIndex, /) -> str: ... +def abs(x: SupportsAbs[_T], /) -> _T: + """Return the absolute value of the argument. +""" +def all(iterable: Iterable[object], /) -> bool: + """Return True if bool(x) is True for all values x in the iterable. + +If the iterable is empty, return True. +""" +def any(iterable: Iterable[object], /) -> bool: + """Return True if bool(x) is True for any x in the iterable. + +If the iterable is empty, return False. +""" +def ascii(obj: object, /) -> str: + """Return an ASCII-only representation of an object. + +As repr(), return a string containing a printable representation of an +object, but escape the non-ASCII characters in the string returned by +repr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar +to that returned by repr() in Python 2. +""" +def bin(number: int | SupportsIndex, /) -> str: + """Return the binary representation of an integer. + + >>> bin(2796202) + '0b1010101010101010101010' +""" +def breakpoint(*args: Any, **kws: Any) -> None: + """Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept +whatever arguments are passed. + +By default, this drops you into the pdb debugger. +""" +def callable(obj: object, /) -> TypeIs[Callable[..., object]]: + """Return whether the object is callable (i.e., some kind of function). + +Note that classes are callable, as are instances of classes with a +__call__() method. +""" +def chr(i: int | SupportsIndex, /) -> str: + """Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff. +""" if sys.version_info >= (3, 10): - def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: ... + def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: + """Return an AsyncIterator for an AsyncIterable object. +""" @type_check_only class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): def __anext__(self) -> _AwaitableT_co: ... @@ -1391,7 +3286,12 @@ if sys.version_info >= (3, 10): # `anext` is not, in fact, an async function. When default is not provided # `anext` is just a passthrough for `obj.__anext__` # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 - def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: ... + def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: + """Return the next item from the async iterator. + +If default is given and the async iterator is exhausted, +it is returned instead of raising StopAsyncIteration. +""" @overload async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT: ... @@ -1408,7 +3308,20 @@ def compile( optimize: int = -1, *, _feature_version: int = -1, -) -> CodeType: ... +) -> CodeType: + """Compile source into a code object that can be executed by exec() or eval(). + +The source code may represent a Python module, statement or expression. +The filename will be used for run-time error messages. +The mode must be 'exec' to compile a module, 'single' to compile a +single (interactive) statement, or 'eval' to compile an expression. +The flags argument, if present, controls which future statements influence +the compilation of the code. +The dont_inherit argument, if true, stops the compilation inheriting +the effects of any future statements in effect in the code calling +compile; if absent or false these statements do influence the compilation, +in addition to any features explicitly specified. +""" @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, @@ -1445,10 +3358,29 @@ def compile( copyright: _sitebuiltins._Printer credits: _sitebuiltins._Printer -def delattr(obj: object, name: str, /) -> None: ... -def dir(o: object = ..., /) -> list[str]: ... +def delattr(obj: object, name: str, /) -> None: + """Deletes the named attribute from the given object. + +delattr(x, 'y') is equivalent to ``del x.y`` +""" +def dir(o: object = ..., /) -> list[str]: + """dir([object]) -> list of strings + +If called without an argument, return the names in the current scope. +Else, return an alphabetized list of names comprising (some of) the attributes +of the given object, and of attributes reachable from it. +If the object supplies a method named __dir__, it will be used; otherwise +the default dir() logic is used and returns: + for a module object: the module's attributes. + for a class object: its attributes, and recursively the attributes + of its bases. + for any other object: its attributes, its class's attributes, and + recursively the attributes of its class's base classes. +""" @overload -def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: ... +def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: + """Return the tuple (x//y, x%y). Invariant: div*y + mod == x. +""" @overload def divmod(x: _T_contra, y: SupportsRDivMod[_T_contra, _T_co], /) -> _T_co: ... @@ -1460,7 +3392,15 @@ if sys.version_info >= (3, 13): /, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, - ) -> Any: ... + ) -> Any: + """Evaluate the given source in the context of globals and locals. + +The source may be a string representing a Python expression +or a code object as returned by compile(). +The globals must be a dictionary and locals can be any mapping, +defaulting to the current globals and locals. +If only globals is given, locals defaults to it. +""" else: def eval( @@ -1468,7 +3408,15 @@ else: globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, /, - ) -> Any: ... + ) -> Any: + """Evaluate the given source in the context of globals and locals. + +The source may be a string representing a Python expression +or a code object as returned by compile(). +The globals must be a dictionary and locals can be any mapping, +defaulting to the current globals and locals. +If only globals is given, locals defaults to it. +""" # Comment above regarding `eval` applies to `exec` as well if sys.version_info >= (3, 13): @@ -1479,7 +3427,17 @@ if sys.version_info >= (3, 13): locals: Mapping[str, object] | None = None, *, closure: tuple[CellType, ...] | None = None, - ) -> None: ... + ) -> None: + """Execute the given source in the context of globals and locals. + +The source may be a string representing one or more Python statements +or a code object as returned by compile(). +The globals must be a dictionary and locals can be any mapping, +defaulting to the current globals and locals. +If only globals is given, locals defaults to it. +The closure must be a tuple of cellvars, and can only be used +when source is a code object requiring exactly that many cellvars. +""" elif sys.version_info >= (3, 11): def exec( @@ -1489,7 +3447,17 @@ elif sys.version_info >= (3, 11): /, *, closure: tuple[CellType, ...] | None = None, - ) -> None: ... + ) -> None: + """Execute the given source in the context of globals and locals. + +The source may be a string representing one or more Python statements +or a code object as returned by compile(). +The globals must be a dictionary and locals can be any mapping, +defaulting to the current globals and locals. +If only globals is given, locals defaults to it. +The closure must be a tuple of cellvars, and can only be used +when source is a code object requiring exactly that many cellvars. +""" else: def exec( @@ -1497,12 +3465,23 @@ else: globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, /, - ) -> None: ... + ) -> None: + """Execute the given source in the context of globals and locals. + +The source may be a string representing one or more Python statements +or a code object as returned by compile(). +The globals must be a dictionary and locals can be any mapping, +defaulting to the current globals and locals. +If only globals is given, locals defaults to it. +""" exit: _sitebuiltins.Quitter @disjoint_base class filter(Generic[_T]): + """Return an iterator yielding those items of iterable for which function(item) +is true. If function is None, return the items that are true. +""" @overload def __new__(cls, function: None, iterable: Iterable[_T | None], /) -> Self: ... @overload @@ -1511,12 +3490,31 @@ class filter(Generic[_T]): def __new__(cls, function: Callable[[_S], TypeIs[_T]], iterable: Iterable[_S], /) -> Self: ... @overload def __new__(cls, function: Callable[[_T], Any], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... - -def format(value: object, format_spec: str = "", /) -> str: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" + +def format(value: object, format_spec: str = "", /) -> str: + """Return type(value).__format__(value, format_spec) + +Many built-in types implement format_spec according to the +Format Specification Mini-language. See help('FORMATTING'). + +If type(value) does not supply a method named __format__ +and format_spec is empty, then str(value) is returned. +See also help('SPECIALMETHODS'). +""" @overload -def getattr(o: object, name: str, /) -> Any: ... +def getattr(o: object, name: str, /) -> Any: + """getattr(object, name[, default]) -> value + +Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. +When a default argument is given, it is returned when the attribute doesn't +exist; without it, an exception is raised in that case. +""" # While technically covered by the last overload, spelling out the types for None, bool # and basic containers help mypy out in some tricky situations involving type context @@ -1531,21 +3529,60 @@ def getattr(o: object, name: str, default: list[Any], /) -> Any | list[Any]: ... def getattr(o: object, name: str, default: dict[Any, Any], /) -> Any | dict[Any, Any]: ... @overload def getattr(o: object, name: str, default: _T, /) -> Any | _T: ... -def globals() -> dict[str, Any]: ... -def hasattr(obj: object, name: str, /) -> bool: ... -def hash(obj: object, /) -> int: ... +def globals() -> dict[str, Any]: + """Return the dictionary containing the current scope's global variables. + +NOTE: Updates to this dictionary *will* affect name lookups in the current +global scope and vice-versa. +""" +def hasattr(obj: object, name: str, /) -> bool: + """Return whether the object has an attribute with the given name. + +This is done by calling getattr(obj, name) and catching AttributeError. +""" +def hash(obj: object, /) -> int: + """Return the hash value for the given object. + +Two objects that compare equal must also have the same hash value, but the +reverse is not necessarily true. +""" help: _sitebuiltins._Helper -def hex(number: int | SupportsIndex, /) -> str: ... -def id(obj: object, /) -> int: ... -def input(prompt: object = "", /) -> str: ... +def hex(number: int | SupportsIndex, /) -> str: + """Return the hexadecimal representation of an integer. + + >>> hex(12648430) + '0xc0ffee' +""" +def id(obj: object, /) -> int: + """Return the identity of an object. + +This is guaranteed to be unique among simultaneously existing objects. +(CPython uses the object's memory address.) +""" +def input(prompt: object = "", /) -> str: + """Read a string from standard input. The trailing newline is stripped. + +The prompt string, if given, is printed to standard output without a +trailing newline before reading input. + +If the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError. +On *nix systems, readline is used if available. +""" @type_check_only class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, i: int, /) -> _T_co: ... @overload -def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: ... +def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: + """iter(iterable) -> iterator +iter(callable, sentinel) -> iterator + +Get an iterator from an object. In the first form, the argument must +supply its own iterator, or be a sequence. +In the second form, the callable is called until it returns the sentinel. +""" @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... @overload @@ -1558,15 +3595,41 @@ if sys.version_info >= (3, 10): else: _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] -def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: ... -def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: ... -def len(obj: Sized, /) -> int: ... +def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: + """Return whether an object is an instance of a class or of a subclass thereof. + +A tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to +check against. This is equivalent to ``isinstance(x, A) or isinstance(x, B) +or ...`` etc. +""" +def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: + """Return whether 'cls' is derived from another class or is the same class. + +A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to +check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B) +or ...``. +""" +def len(obj: Sized, /) -> int: + """Return the number of items in a container. +""" license: _sitebuiltins._Printer -def locals() -> dict[str, Any]: ... +def locals() -> dict[str, Any]: + """Return a dictionary containing the current scope's local variables. + +NOTE: Whether or not updates to this dictionary will affect name lookups in +the local scope and vice-versa is *implementation dependent* and not +covered by any backwards compatibility guarantees. +""" @disjoint_base class map(Generic[_S]): + """Make an iterator that computes the function using arguments from +each of the iterables. Stops when the shortest iterable is exhausted. + +If strict is true and one of the arguments is exhausted before the others, +raise a ValueError. +""" # 3.14 adds `strict` argument. if sys.version_info >= (3, 14): @overload @@ -1669,13 +3732,25 @@ class map(Generic[_S]): *iterables: Iterable[Any], ) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _S: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _S: + """Implement next(self). +""" @overload def max( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None -) -> SupportsRichComparisonT: ... +) -> SupportsRichComparisonT: + """max(iterable, *[, default=obj, key=func]) -> value +max(arg1, arg2, *args, *[, key=func]) -> value + +With a single iterable argument, return its biggest item. The +default keyword-only argument specifies an object to return if +the provided iterable is empty. +With two or more positional arguments, return the largest argument. +""" @overload def max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -1689,7 +3764,15 @@ def max(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparis @overload def min( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None -) -> SupportsRichComparisonT: ... +) -> SupportsRichComparisonT: + """min(iterable, *[, default=obj, key=func]) -> value +min(arg1, arg2, *args, *[, key=func]) -> value + +With a single iterable argument, return its smallest item. The +default keyword-only argument specifies an object to return if +the provided iterable is empty. +With two or more positional arguments, return the smallest argument. +""" @overload def min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -1701,10 +3784,20 @@ def min(iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, def @overload def min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload -def next(i: SupportsNext[_T], /) -> _T: ... +def next(i: SupportsNext[_T], /) -> _T: + """next(iterator[, default]) + +Return the next item from the iterator. If default is given and the iterator +is exhausted, it is returned instead of raising StopIteration. +""" @overload def next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT: ... -def oct(number: int | SupportsIndex, /) -> str: ... +def oct(number: int | SupportsIndex, /) -> str: + """Return the octal representation of an integer. + + >>> oct(342391) + '0o1234567' +""" _Opener: TypeAlias = Callable[[str, int], int] @@ -1719,7 +3812,121 @@ def open( newline: str | None = None, closefd: bool = True, opener: _Opener | None = None, -) -> TextIOWrapper: ... +) -> TextIOWrapper: + """Open file and return a stream. Raise OSError upon failure. + +file is either a text or byte string giving the name (and the path +if the file isn't in the current working directory) of the file to +be opened or an integer file descriptor of the file to be +wrapped. (If a file descriptor is given, it is closed when the +returned I/O object is closed, unless closefd is set to False.) + +mode is an optional string that specifies the mode in which the file +is opened. It defaults to 'r' which means open for reading in text +mode. Other common values are 'w' for writing (truncating the file if +it already exists), 'x' for creating and writing to a new file, and +'a' for appending (which on some Unix systems, means that all writes +append to the end of the file regardless of the current seek position). +In text mode, if encoding is not specified the encoding used is platform +dependent: locale.getencoding() is called to get the current locale encoding. +(For reading and writing raw bytes use binary mode and leave encoding +unspecified.) The available modes are: + +========= =============================================================== +Character Meaning +--------- --------------------------------------------------------------- +'r' open for reading (default) +'w' open for writing, truncating the file first +'x' create a new file and open it for writing +'a' open for writing, appending to the end of the file if it exists +'b' binary mode +'t' text mode (default) +'+' open a disk file for updating (reading and writing) +========= =============================================================== + +The default mode is 'rt' (open for reading text). For binary random +access, the mode 'w+b' opens and truncates the file to 0 bytes, while +'r+b' opens the file without truncation. The 'x' mode implies 'w' and +raises an `FileExistsError` if the file already exists. + +Python distinguishes between files opened in binary and text modes, +even when the underlying operating system doesn't. Files opened in +binary mode (appending 'b' to the mode argument) return contents as +bytes objects without any decoding. In text mode (the default, or when +'t' is appended to the mode argument), the contents of the file are +returned as strings, the bytes having been first decoded using a +platform-dependent encoding or using the specified encoding if given. + +buffering is an optional integer used to set the buffering policy. +Pass 0 to switch buffering off (only allowed in binary mode), 1 to select +line buffering (only usable in text mode), and an integer > 1 to indicate +the size of a fixed-size chunk buffer. When no buffering argument is +given, the default buffering policy works as follows: + +* Binary files are buffered in fixed-size chunks; the size of the buffer + is max(min(blocksize, 8 MiB), DEFAULT_BUFFER_SIZE) + when the device block size is available. + On most systems, the buffer will typically be 128 kilobytes long. + +* "Interactive" text files (files for which isatty() returns True) + use line buffering. Other text files use the policy described above + for binary files. + +encoding is the name of the encoding used to decode or encode the +file. This should only be used in text mode. The default encoding is +platform dependent, but any encoding supported by Python can be +passed. See the codecs module for the list of supported encodings. + +errors is an optional string that specifies how encoding errors are to +be handled---this argument should not be used in binary mode. Pass +'strict' to raise a ValueError exception if there is an encoding error +(the default of None has the same effect), or pass 'ignore' to ignore +errors. (Note that ignoring encoding errors can lead to data loss.) +See the documentation for codecs.register or run 'help(codecs.Codec)' +for a list of the permitted encoding error strings. + +newline controls how universal newlines works (it only applies to text +mode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as +follows: + +* On input, if newline is None, universal newlines mode is + enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and + these are translated into '\\n' before being returned to the + caller. If it is '', universal newline mode is enabled, but line + endings are returned to the caller untranslated. If it has any of + the other legal values, input lines are only terminated by the given + string, and the line ending is returned to the caller untranslated. + +* On output, if newline is None, any '\\n' characters written are + translated to the system default line separator, os.linesep. If + newline is '' or '\\n', no translation takes place. If newline is any + of the other legal values, any '\\n' characters written are translated + to the given string. + +If closefd is False, the underlying file descriptor will be kept open +when the file is closed. This does not work when a file name is given +and must be True in that case. + +A custom opener can be used by passing a callable as *opener*. The +underlying file descriptor for the file object is then obtained by +calling *opener* with (*file*, *flags*). *opener* must return an open +file descriptor (passing os.open as *opener* results in functionality +similar to passing None). + +open() returns a file object whose type depends on the mode, and +through which the standard file operations such as reading and writing +are performed. When open() is used to open a file in a text mode ('w', +'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open +a file in a binary mode, the returned class varies: in read binary +mode, it returns a BufferedReader; in write binary and append binary +modes, it returns a BufferedWriter, and in read/write mode, it returns +a BufferedRandom. + +It is also possible to use a string or bytearray as a file for both +reading and writing. For strings StringIO can be used like a file +opened in a text mode, and for bytes a BytesIO can be used like a file +opened in a binary mode. +""" # Unbuffered binary mode: returns a FileIO @overload @@ -1794,7 +4001,15 @@ def open( closefd: bool = True, opener: _Opener | None = None, ) -> IO[Any]: ... -def ord(c: str | bytes | bytearray, /) -> int: ... +def ord(c: str | bytes | bytearray, /) -> int: + """Return the ordinal value of a character. + +If the argument is a one-character string, return the Unicode code +point of that character. + +If the argument is a bytes or bytearray object of length 1, return its +single byte value. +""" @type_check_only class _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ... @@ -1805,7 +4020,18 @@ def print( end: str | None = "\n", file: SupportsWrite[str] | None = None, flush: Literal[False] = False, -) -> None: ... +) -> None: + """Prints the values to a stream, or to sys.stdout by default. + + sep + string inserted between values, default a space. + end + string appended after the last value, default a newline. + file + a file-like object (stream); defaults to the current sys.stdout. + flush + whether to forcibly flush the stream. +""" @overload def print( *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool @@ -1833,7 +4059,12 @@ _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs a # TODO: `pow(int, int, Literal[0])` fails at runtime, # but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). @overload -def pow(base: int, exp: int, mod: int) -> int: ... +def pow(base: int, exp: int, mod: int) -> int: + """Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments + +Some types, such as ints, are able to use a more efficient algorithm when +invoked using the three argument form. +""" @overload def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... @overload @@ -1874,15 +4105,27 @@ quit: _sitebuiltins.Quitter @disjoint_base class reversed(Generic[_T]): + """Return a reverse iterator over the values of the given sequence. +""" @overload def __new__(cls, sequence: Reversible[_T], /) -> Iterator[_T]: ... # type: ignore[misc] @overload def __new__(cls, sequence: SupportsLenAndGetItem[_T], /) -> Iterator[_T]: ... # type: ignore[misc] - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... - def __length_hint__(self) -> int: ... - -def repr(obj: object, /) -> str: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" + def __length_hint__(self) -> int: + """Private method returning an estimate of len(list(it)). +""" + +def repr(obj: object, /) -> str: + """Return the canonical string representation of the object. + +For many object types, including most builtins, eval(repr(obj)) == obj. +""" # See https://github.com/python/typeshed/pull/9141 # and https://github.com/python/typeshed/pull/9151 @@ -1897,17 +4140,31 @@ class _SupportsRound2(Protocol[_T_co]): def __round__(self, ndigits: int, /) -> _T_co: ... @overload -def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: + """Round a number to a given precision in decimal digits. + +The return value is an integer if ndigits is omitted or None. Otherwise +the return value has the same type as the number. ndigits may be negative. +""" @overload def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... # See https://github.com/python/typeshed/pull/6292#discussion_r748875189 # for why arg 3 of `setattr` should be annotated with `Any` and not `object` -def setattr(obj: object, name: str, value: Any, /) -> None: ... +def setattr(obj: object, name: str, value: Any, /) -> None: + """Sets the named attribute on the given object to the specified value. + +setattr(x, 'y', v) is equivalent to ``x.y = v`` +""" @overload def sorted( iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, reverse: bool = False -) -> list[SupportsRichComparisonT]: ... +) -> list[SupportsRichComparisonT]: + """Return a new list containing all items from the iterable in ascending order. + +A custom key function can be supplied to customize the sort order, and the +reverse flag can be set to request the result in descending order. +""" @overload def sorted(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... @@ -1924,7 +4181,13 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # without creating many false-positive errors (see #7578). # Instead, we special-case the most common examples of this: bool and literal integers. @overload -def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ... +def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: + """Return the sum of a 'start' value (default: 0) plus an iterable of numbers + +When the iterable is empty, return the start value. +This function is intended specifically for use with numeric values and may +reject non-numeric types. +""" @overload def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ... @overload @@ -1933,11 +4196,27 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A # The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) @overload -def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... +def vars(object: type, /) -> types.MappingProxyType[str, Any]: + """vars([object]) -> dictionary + +Without arguments, equivalent to locals(). +With an argument, equivalent to object.__dict__. +""" @overload def vars(object: Any = ..., /) -> dict[str, Any]: ... @disjoint_base class zip(Generic[_T_co]): + """The zip object yields n-length tuples, where n is the number of iterables +passed as positional arguments to zip(). The i-th element in every tuple +comes from the i-th iterable argument to zip(). This continues until the +shortest argument is exhausted. + +If strict is true and one of the arguments is exhausted before the others, +raise a ValueError. + + >>> list(zip('abcdefg', range(3), range(4))) + [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] +""" if sys.version_info >= (3, 10): @overload def __new__(cls, *, strict: bool = False) -> zip[Any]: ... @@ -2015,8 +4294,12 @@ class zip(Generic[_T_co]): *iterables: Iterable[Any], ) -> zip[tuple[Any, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` # Return type of `__import__` should be kept the same as return type of `importlib.import_module` @@ -2026,8 +4309,28 @@ def __import__( locals: Mapping[str, object] | None = None, fromlist: Sequence[str] | None = (), level: int = 0, -) -> types.ModuleType: ... -def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... +) -> types.ModuleType: + """Import a module. + +Because this function is meant for use by the Python +interpreter and not for general use, it is better to use +importlib.import_module() to programmatically import a module. + +The globals argument is only used to determine the context; +they are not modified. The locals argument is unused. The fromlist +should be a list of names to emulate ``from name import ...``, or an +empty list to emulate ``import name``. +When importing a module from a package, note that __import__('A.B', ...) +returns package A when fromlist is empty, but its submodule B when +fromlist is not empty. The level argument is used to determine whether to +perform absolute or relative imports: 0 is absolute, while a positive number +is the number of parent directories to search relative to the current module. +""" +def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: + """__build_class__(func, name, /, *bases, [metaclass], **kwds) -> class + +Internal helper function used by the class statement. +""" if sys.version_info >= (3, 10): from types import EllipsisType @@ -2049,6 +4352,8 @@ else: @disjoint_base class BaseException: + """Common base class for all exceptions +""" args: tuple[Any, ...] __cause__: BaseException | None __context__: BaseException | None @@ -2057,27 +4362,43 @@ class BaseException: def __init__(self, *args: object) -> None: ... def __new__(cls, *args: Any, **kwds: Any) -> Self: ... def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... - def with_traceback(self, tb: TracebackType | None, /) -> Self: ... + def with_traceback(self, tb: TracebackType | None, /) -> Self: + """Set self.__traceback__ to tb and return self. +""" if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] - def add_note(self, note: str, /) -> None: ... + def add_note(self, note: str, /) -> None: + """Add a note to the exception +""" -class GeneratorExit(BaseException): ... -class KeyboardInterrupt(BaseException): ... +class GeneratorExit(BaseException): + """Request that a generator exit. +""" +class KeyboardInterrupt(BaseException): + """Program interrupted by user. +""" @disjoint_base class SystemExit(BaseException): + """Request to exit from the interpreter. +""" code: sys._ExitCode -class Exception(BaseException): ... +class Exception(BaseException): + """Common base class for all non-exit exceptions. +""" @disjoint_base class StopIteration(Exception): + """Signal the end from iterator.__next__(). +""" value: Any @disjoint_base class OSError(Exception): + """Base class for I/O related errors. +""" errno: int | None strerror: str | None # filename, filename2 are actually str | bytes | None @@ -2091,24 +4412,38 @@ IOError = OSError if sys.platform == "win32": WindowsError = OSError -class ArithmeticError(Exception): ... -class AssertionError(Exception): ... +class ArithmeticError(Exception): + """Base class for arithmetic errors. +""" +class AssertionError(Exception): + """Assertion failed. +""" if sys.version_info >= (3, 10): @disjoint_base class AttributeError(Exception): + """Attribute not found. +""" def __init__(self, *args: object, name: str | None = None, obj: object = None) -> None: ... name: str | None obj: object else: - class AttributeError(Exception): ... + class AttributeError(Exception): + """Attribute not found. +""" -class BufferError(Exception): ... -class EOFError(Exception): ... +class BufferError(Exception): + """Buffer error. +""" +class EOFError(Exception): + """Read beyond end of file. +""" @disjoint_base class ImportError(Exception): + """Import can't find module, or can't find name in module. +""" def __init__(self, *args: object, name: str | None = None, path: str | None = None) -> None: ... name: str | None path: str | None @@ -2116,24 +4451,40 @@ class ImportError(Exception): if sys.version_info >= (3, 12): name_from: str | None # undocumented -class LookupError(Exception): ... -class MemoryError(Exception): ... +class LookupError(Exception): + """Base class for lookup errors. +""" +class MemoryError(Exception): + """Out of memory. +""" if sys.version_info >= (3, 10): @disjoint_base class NameError(Exception): + """Name not found globally. +""" def __init__(self, *args: object, name: str | None = None) -> None: ... name: str | None else: - class NameError(Exception): ... - -class ReferenceError(Exception): ... -class RuntimeError(Exception): ... -class StopAsyncIteration(Exception): ... + class NameError(Exception): + """Name not found globally. +""" + +class ReferenceError(Exception): + """Weak ref proxy used after referent went away. +""" +class RuntimeError(Exception): + """Unspecified run-time error. +""" +class StopAsyncIteration(Exception): + """Signal the end from iterator.__anext__(). +""" @disjoint_base class SyntaxError(Exception): + """Invalid syntax. +""" msg: str filename: str | None lineno: int | None @@ -2162,42 +4513,107 @@ class SyntaxError(Exception): # If you provide more than two arguments, it still creates the SyntaxError, but # the arguments from the info tuple are not parsed. This form is omitted. -class SystemError(Exception): ... -class TypeError(Exception): ... -class ValueError(Exception): ... -class FloatingPointError(ArithmeticError): ... -class OverflowError(ArithmeticError): ... -class ZeroDivisionError(ArithmeticError): ... -class ModuleNotFoundError(ImportError): ... -class IndexError(LookupError): ... -class KeyError(LookupError): ... -class UnboundLocalError(NameError): ... +class SystemError(Exception): + """Internal error in the Python interpreter. + +Please report this to the Python maintainer, along with the traceback, +the Python version, and the hardware/OS platform and version. +""" +class TypeError(Exception): + """Inappropriate argument type. +""" +class ValueError(Exception): + """Inappropriate argument value (of correct type). +""" +class FloatingPointError(ArithmeticError): + """Floating-point operation failed. +""" +class OverflowError(ArithmeticError): + """Result too large to be represented. +""" +class ZeroDivisionError(ArithmeticError): + """Second argument to a division or modulo operation was zero. +""" +class ModuleNotFoundError(ImportError): + """Module not found. +""" +class IndexError(LookupError): + """Sequence index out of range. +""" +class KeyError(LookupError): + """Mapping key not found. +""" +class UnboundLocalError(NameError): + """Local name referenced but not bound to a value. +""" class BlockingIOError(OSError): + """I/O operation would block. +""" characters_written: int -class ChildProcessError(OSError): ... -class ConnectionError(OSError): ... -class BrokenPipeError(ConnectionError): ... -class ConnectionAbortedError(ConnectionError): ... -class ConnectionRefusedError(ConnectionError): ... -class ConnectionResetError(ConnectionError): ... -class FileExistsError(OSError): ... -class FileNotFoundError(OSError): ... -class InterruptedError(OSError): ... -class IsADirectoryError(OSError): ... -class NotADirectoryError(OSError): ... -class PermissionError(OSError): ... -class ProcessLookupError(OSError): ... -class TimeoutError(OSError): ... -class NotImplementedError(RuntimeError): ... -class RecursionError(RuntimeError): ... -class IndentationError(SyntaxError): ... -class TabError(IndentationError): ... -class UnicodeError(ValueError): ... +class ChildProcessError(OSError): + """Child process error. +""" +class ConnectionError(OSError): + """Connection error. +""" +class BrokenPipeError(ConnectionError): + """Broken pipe. +""" +class ConnectionAbortedError(ConnectionError): + """Connection aborted. +""" +class ConnectionRefusedError(ConnectionError): + """Connection refused. +""" +class ConnectionResetError(ConnectionError): + """Connection reset. +""" +class FileExistsError(OSError): + """File already exists. +""" +class FileNotFoundError(OSError): + """File not found. +""" +class InterruptedError(OSError): + """Interrupted by signal. +""" +class IsADirectoryError(OSError): + """Operation doesn't work on directories. +""" +class NotADirectoryError(OSError): + """Operation only works on directories. +""" +class PermissionError(OSError): + """Not enough permissions. +""" +class ProcessLookupError(OSError): + """Process not found. +""" +class TimeoutError(OSError): + """Timeout expired. +""" +class NotImplementedError(RuntimeError): + """Method or function hasn't been implemented yet. +""" +class RecursionError(RuntimeError): + """Recursion limit exceeded. +""" +class IndentationError(SyntaxError): + """Improper indentation. +""" +class TabError(IndentationError): + """Improper mixture of spaces and tabs. +""" +class UnicodeError(ValueError): + """Unicode related error. +""" @disjoint_base class UnicodeDecodeError(UnicodeError): + """Unicode decoding error. +""" encoding: str object: bytes start: int @@ -2207,6 +4623,8 @@ class UnicodeDecodeError(UnicodeError): @disjoint_base class UnicodeEncodeError(UnicodeError): + """Unicode encoding error. +""" encoding: str object: str start: int @@ -2216,6 +4634,8 @@ class UnicodeEncodeError(UnicodeError): @disjoint_base class UnicodeTranslateError(UnicodeError): + """Unicode translation error. +""" encoding: None object: str start: int @@ -2223,20 +4643,48 @@ class UnicodeTranslateError(UnicodeError): reason: str def __init__(self, object: str, start: int, end: int, reason: str, /) -> None: ... -class Warning(Exception): ... -class UserWarning(Warning): ... -class DeprecationWarning(Warning): ... -class SyntaxWarning(Warning): ... -class RuntimeWarning(Warning): ... -class FutureWarning(Warning): ... -class PendingDeprecationWarning(Warning): ... -class ImportWarning(Warning): ... -class UnicodeWarning(Warning): ... -class BytesWarning(Warning): ... -class ResourceWarning(Warning): ... +class Warning(Exception): + """Base class for warning categories. +""" +class UserWarning(Warning): + """Base class for warnings generated by user code. +""" +class DeprecationWarning(Warning): + """Base class for warnings about deprecated features. +""" +class SyntaxWarning(Warning): + """Base class for warnings about dubious syntax. +""" +class RuntimeWarning(Warning): + """Base class for warnings about dubious runtime behavior. +""" +class FutureWarning(Warning): + """Base class for warnings about constructs that will change semantically +in the future. +""" +class PendingDeprecationWarning(Warning): + """Base class for warnings about features which will be deprecated +in the future. +""" +class ImportWarning(Warning): + """Base class for warnings about probable mistakes in module imports +""" +class UnicodeWarning(Warning): + """Base class for warnings about Unicode related problems, mostly +related to conversion problems. +""" +class BytesWarning(Warning): + """Base class for warnings about bytes and buffer related problems, mostly +related to conversion from str or comparing to str. +""" +class ResourceWarning(Warning): + """Base class for warnings about resource usage. +""" if sys.version_info >= (3, 10): - class EncodingWarning(Warning): ... + class EncodingWarning(Warning): + """Base class for warnings about encodings. +""" if sys.version_info >= (3, 11): _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException) @@ -2247,12 +4695,18 @@ if sys.version_info >= (3, 11): # See `check_exception_group.py` for use-cases and comments. @disjoint_base class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): + """A combination of multiple unrelated exceptions. +""" def __new__(cls, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> None: ... @property - def message(self) -> str: ... + def message(self) -> str: + """exception message +""" @property - def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... + def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: + """nested exceptions +""" @overload def subgroup( self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / @@ -2282,13 +4736,17 @@ if sys.version_info >= (3, 11): def derive(self, excs: Sequence[_ExceptionT], /) -> ExceptionGroup[_ExceptionT]: ... @overload def derive(self, excs: Sequence[_BaseExceptionT], /) -> BaseExceptionGroup[_BaseExceptionT]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): def __new__(cls, message: str, exceptions: Sequence[_ExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_ExceptionT_co], /) -> None: ... @property - def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... + def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: + """nested exceptions +""" # We accept a narrower type, but that's OK. @overload # type: ignore[override] def subgroup( @@ -2308,4 +4766,6 @@ if sys.version_info >= (3, 11): ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... if sys.version_info >= (3, 13): - class PythonFinalizationError(RuntimeError): ... + class PythonFinalizationError(RuntimeError): + """Operation blocked during Python finalization. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi index 7bd829d040cb8..0d9b74be7bd07 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi @@ -1,3 +1,8 @@ +"""Interface to the libbzip2 compression library. + +This module provides a file interface, classes for incremental +(de)compression, and functions for one-shot (de)compression. +""" import sys from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -26,8 +31,18 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... -def decompress(data: ReadableBuffer) -> bytes: ... +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: + """Compress a block of data. + +compresslevel, if given, must be a number between 1 and 9. + +For incremental compression, use a BZ2Compressor object instead. +""" +def decompress(data: ReadableBuffer) -> bytes: + """Decompress a block of data. + +For incremental decompression, use a BZ2Decompressor object instead. +""" _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -42,7 +57,26 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> BZ2File: ... +) -> BZ2File: + """Open a bzip2-compressed file in binary or text mode. + +The filename argument can be an actual filename (a str, bytes, or +PathLike object), or an existing file object to read from or write +to. + +The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or +"ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode. +The default mode is "rb", and the default compresslevel is 9. + +For binary mode, this function is equivalent to the BZ2File +constructor: BZ2File(filename, mode, compresslevel). In this case, +the encoding, errors and newline arguments must not be provided. + +For text mode, a BZ2File object is created, and wrapped in an +io.TextIOWrapper instance with the specified encoding, error +handling behavior, and line ending(s). + +""" @overload def open( filename: _ReadableFileobj, @@ -99,21 +133,106 @@ def open( ) -> BZ2File | TextIOWrapper: ... class BZ2File(BaseStream, IO[bytes]): + """A file object providing transparent bzip2 (de)compression. + +A BZ2File can act as a wrapper for an existing file object, or refer +directly to a named file on disk. + +Note that BZ2File provides a *binary* file interface - data read is +returned as bytes, and data to be written should be given as bytes. +""" def __enter__(self) -> Self: ... @overload - def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: + """Open a bzip2-compressed file. + +If filename is a str, bytes, or PathLike object, it gives the +name of the file to be opened. Otherwise, it should be a file +object, which will be used to read or write the compressed data. + +mode can be 'r' for reading (default), 'w' for (over)writing, +'x' for creating exclusively, or 'a' for appending. These can +equivalently be given as 'rb', 'wb', 'xb', and 'ab'. + +If mode is 'w', 'x' or 'a', compresslevel can be a number between 1 +and 9 specifying the level of compression: 1 produces the least +compression, and 9 (default) produces the most compression. + +If mode is 'r', the input file may be the concatenation of +multiple compressed streams. +""" @overload def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... @overload def __init__( self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 ) -> None: ... - def read(self, size: int | None = -1) -> bytes: ... - def read1(self, size: int = -1) -> bytes: ... - def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] - def readinto(self, b: WriteableBuffer) -> int: ... - def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... - def peek(self, n: int = 0) -> bytes: ... - def seek(self, offset: int, whence: int = 0) -> int: ... - def write(self, data: ReadableBuffer) -> int: ... - def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... + def read(self, size: int | None = -1) -> bytes: + """Read up to size uncompressed bytes from the file. + +If size is negative or omitted, read until EOF is reached. +Returns b'' if the file is already at EOF. +""" + def read1(self, size: int = -1) -> bytes: + """Read up to size uncompressed bytes, while trying to avoid +making multiple reads from the underlying stream. Reads up to a +buffer's worth of data if size is negative. + +Returns b'' if the file is at EOF. +""" + def readline(self, size: SupportsIndex = -1) -> bytes: # type: ignore[override] + """Read a line of uncompressed bytes from the file. + +The terminating newline (if present) is retained. If size is +non-negative, no more than size bytes will be read (in which +case the line may be incomplete). Returns b'' if already at EOF. +""" + def readinto(self, b: WriteableBuffer) -> int: + """Read bytes into b. + +Returns the number of bytes read (0 for EOF). +""" + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: + """Read a list of lines of uncompressed bytes from the file. + +size can be specified to control the number of lines read: no +further lines will be read once the total size of the lines read +so far equals or exceeds size. +""" + def peek(self, n: int = 0) -> bytes: + """Return buffered data without advancing the file position. + +Always returns at least one byte of data, unless at EOF. +The exact number of bytes returned is unspecified. +""" + def seek(self, offset: int, whence: int = 0) -> int: + """Change the file position. + +The new position is specified by offset, relative to the +position indicated by whence. Values for whence are: + + 0: start of stream (default); offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive + +Returns the new file position. + +Note that seeking is emulated, so depending on the parameters, +this operation may be extremely slow. +""" + def write(self, data: ReadableBuffer) -> int: + """Write a byte string to the file. + +Returns the number of uncompressed bytes written, which is +always the length of data in bytes. Note that due to buffering, +the file on disk may not reflect the data written until close() +is called. +""" + def writelines(self, seq: Iterable[ReadableBuffer]) -> None: + """Write a sequence of byte strings to the file. + +Returns the number of uncompressed bytes written. +seq can be any iterable yielding byte strings. + +Line separators are not added between the written byte strings. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi index e921584d43905..6364cacf8d47b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi @@ -1,3 +1,6 @@ +"""Python interface for the 'lsprof' profiler. +Compatible with the 'profile' module. +""" import _lsprof from _typeshed import StrOrBytesPath, Unused from collections.abc import Callable, Mapping @@ -7,16 +10,39 @@ from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: + """Run statement under profiler optionally saving results in filename + +This function takes a single argument that can be passed to the +"exec" statement, and an optional file name. In all cases this +routine attempts to "exec" its first argument and gather profiling +statistics from the execution. If no file name is present, then this +function automatically prints a simple profiling report, sorted by the +standard name string (file/line/function-name) that is presented in +each line. +""" def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 -) -> None: ... +) -> None: + """Run statement under profiler, supplying your own globals and locals, +optionally saving results in filename. + +statement and filename have the same semantics as profile.run +""" _T = TypeVar("_T") _P = ParamSpec("_P") _Label: TypeAlias = tuple[str, int, str] class Profile(_lsprof.Profiler): + """Profile(timer=None, timeunit=None, subcalls=True, builtins=True) + +Builds a profiler object using the specified timer function. +The default timer is a fast built-in one based on real time. +For custom timer functions returning integers, timeunit can +be a float specifying a scale (i.e. how long each integer unit +is, in seconds). +""" stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi index d00f0d5d2bce3..280cd18c82018 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi @@ -1,3 +1,10 @@ +"""Calendar printing functions + +Note when comparing these calendars to the ones printed by cal(1): By +default, these calendars have Monday as the first day of the week, and +Sunday as the last (the European convention). Use setfirstweekday() to +set the first day of the week (0=Monday, 6=Sunday). +""" import datetime import enum import sys @@ -62,52 +69,182 @@ class IllegalMonthError(ValueError): class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... -def isleap(year: int) -> bool: ... -def leapdays(y1: int, y2: int) -> int: ... -def weekday(year: int, month: int, day: int) -> int: ... -def monthrange(year: int, month: int) -> tuple[int, int]: ... +def isleap(year: int) -> bool: + """Return True for leap years, False for non-leap years. +""" +def leapdays(y1: int, y2: int) -> int: + """Return number of leap years in range [y1, y2). +Assume y1 <= y2. +""" +def weekday(year: int, month: int, day: int) -> int: + """Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31). +""" +def monthrange(year: int, month: int) -> tuple[int, int]: + """Return weekday of first day of month (0-6 ~ Mon-Sun) +and number of days (28-31) for year, month. +""" class Calendar: + """ +Base calendar class. This class doesn't do any formatting. It simply +provides data to subclasses. +""" firstweekday: int def __init__(self, firstweekday: int = 0) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... - def iterweekdays(self) -> Iterable[int]: ... - def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... - def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: ... - def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... - def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... - def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... - def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... - def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: ... - def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: ... - def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: ... - def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... - def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... + def iterweekdays(self) -> Iterable[int]: + """ +Return an iterator for one week of weekday numbers starting with the +configured first one. +""" + def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: + """ +Return an iterator for one month. The iterator will yield datetime.date +values and will always iterate through complete weeks, so it will yield +dates outside the specified month. +""" + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: + """ +Like itermonthdates(), but will yield (day number, weekday number) +tuples. For days outside the specified month the day number is 0. +""" + def itermonthdays(self, year: int, month: int) -> Iterable[int]: + """ +Like itermonthdates(), but will yield day numbers. For days outside +the specified month the day number is 0. +""" + def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: + """ +Return a matrix (list of lists) representing a month's calendar. +Each row represents a week; week entries are datetime.date values. +""" + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: + """ +Return a matrix representing a month's calendar. +Each row represents a week; week entries are +(day number, weekday number) tuples. Day numbers outside this month +are zero. +""" + def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: + """ +Return a matrix representing a month's calendar. +Each row represents a week; days outside this month are zero. +""" + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: + """ +Return the data for the specified year ready for formatting. The return +value is a list of month rows. Each month row contains up to width months. +Each month contains between 4 and 6 weeks and each week contains 1-7 +days. Days are datetime.date objects. +""" + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: + """ +Return the data for the specified year ready for formatting (similar to +yeardatescalendar()). Entries in the week lists are +(day number, weekday number) tuples. Day numbers outside this month are +zero. +""" + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: + """ +Return the data for the specified year ready for formatting (similar to +yeardatescalendar()). Entries in the week lists are day numbers. +Day numbers outside this month are zero. +""" + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: + """ +Like itermonthdates(), but will yield (year, month, day) tuples. Can be +used for dates outside of datetime.date range. +""" + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: + """ +Like itermonthdates(), but will yield (year, month, day, day_of_week) tuples. +Can be used for dates outside of datetime.date range. +""" class TextCalendar(Calendar): - def prweek(self, theweek: int, width: int) -> None: ... - def formatday(self, day: int, weekday: int, width: int) -> str: ... - def formatweek(self, theweek: int, width: int) -> str: ... - def formatweekday(self, day: int, width: int) -> str: ... - def formatweekheader(self, width: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... - def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... - def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... - def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... - def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... + """ +Subclass of Calendar that outputs a calendar as a simple plain text +similar to the UNIX program cal. +""" + def prweek(self, theweek: int, width: int) -> None: + """ +Print a single week (no newline). +""" + def formatday(self, day: int, weekday: int, width: int) -> str: + """ +Returns a formatted day. +""" + def formatweek(self, theweek: int, width: int) -> str: + """ +Returns a single week in a string (no newline). +""" + def formatweekday(self, day: int, width: int) -> str: + """ +Returns a formatted week day name. +""" + def formatweekheader(self, width: int) -> str: + """ +Return a header for a week. +""" + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: + """ +Return a formatted month name. +""" + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: + """ +Print a month's calendar. +""" + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: + """ +Return a month's calendar string (multi-line). +""" + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: + """ +Returns a year's calendar as a multi-line string. +""" + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: + """Print a year's calendar. +""" def firstweekday() -> int: ... -def monthcalendar(year: int, month: int) -> list[list[int]]: ... -def prweek(theweek: int, width: int) -> None: ... -def week(theweek: int, width: int) -> str: ... -def weekheader(width: int) -> str: ... -def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... -def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... -def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... -def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... +def monthcalendar(year: int, month: int) -> list[list[int]]: + """ +Return a matrix representing a month's calendar. +Each row represents a week; days outside this month are zero. +""" +def prweek(theweek: int, width: int) -> None: + """ +Print a single week (no newline). +""" +def week(theweek: int, width: int) -> str: + """ +Returns a single week in a string (no newline). +""" +def weekheader(width: int) -> str: + """ +Return a header for a week. +""" +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: + """ +Print a month's calendar. +""" +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: + """ +Return a month's calendar string (multi-line). +""" +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: + """ +Returns a year's calendar as a multi-line string. +""" +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: + """Print a year's calendar. +""" class HTMLCalendar(Calendar): + """ +This calendar returns complete HTML pages. +""" cssclasses: ClassVar[list[str]] cssclass_noday: ClassVar[str] cssclasses_weekday_head: ClassVar[list[str]] @@ -115,16 +252,40 @@ class HTMLCalendar(Calendar): cssclass_month: ClassVar[str] cssclass_year: ClassVar[str] cssclass_year_head: ClassVar[str] - def formatday(self, day: int, weekday: int) -> str: ... - def formatweek(self, theweek: int) -> str: ... - def formatweekday(self, day: int) -> str: ... - def formatweekheader(self) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... - def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... - def formatyear(self, theyear: int, width: int = 3) -> str: ... + def formatday(self, day: int, weekday: int) -> str: + """ +Return a day as a table cell. +""" + def formatweek(self, theweek: int) -> str: + """ +Return a complete week as a table row. +""" + def formatweekday(self, day: int) -> str: + """ +Return a weekday name as a table header. +""" + def formatweekheader(self) -> str: + """ +Return a header for a week as a table row. +""" + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: + """ +Return a month name as a table row. +""" + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: + """ +Return a formatted month as a table. +""" + def formatyear(self, theyear: int, width: int = 3) -> str: + """ +Return a formatted year as a table of tables. +""" def formatyearpage( self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None - ) -> bytes: ... + ) -> bytes: + """ +Return a formatted year as a complete HTML page. +""" class different_locale: def __init__(self, locale: _LocaleType) -> None: ... @@ -132,9 +293,17 @@ class different_locale: def __exit__(self, *args: Unused) -> None: ... class LocaleTextCalendar(TextCalendar): + """ +This class can be passed a locale name in the constructor and will return +month and weekday names in the specified locale. +""" def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... class LocaleHTMLCalendar(HTMLCalendar): + """ +This class can be passed a locale name in the constructor and will return +month and weekday names in the specified locale. +""" def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... def formatweekday(self, day: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... @@ -142,9 +311,15 @@ class LocaleHTMLCalendar(HTMLCalendar): c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... -def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... -def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... -def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: + """Prints multi-column formatting for year calendars +""" +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: + """Returns a string formatted from n strings, centered within n columns. +""" +def timegm(tuple: tuple[int, ...] | struct_time) -> int: + """Unrelated but handy function to calculate Unix timestamp from GMT. +""" # Data attributes day_name: Sequence[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi index 0f9d4343b6307..336fc90f7a14a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi @@ -1,3 +1,13 @@ +"""Support module for CGI (Common Gateway Interface) scripts. + +This module defines a number of utilities for use by CGI scripts +written in Python. + +The global variable maxlen can be set to an integer indicating the maximum size +of a POST request. POST requests larger than this size will result in a +ValueError being raised during parsing. The default value of this variable is 0, +meaning the request size is unlimited. +""" import os from _typeshed import SupportsContainsAndGetItem, SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type @@ -28,23 +38,78 @@ def parse( keep_blank_values: bool = ..., strict_parsing: bool = ..., separator: str = "&", -) -> dict[str, list[str]]: ... +) -> dict[str, list[str]]: + """Parse a query in the environment or from a file (default stdin) + + Arguments, all optional: + + fp : file pointer; default: sys.stdin.buffer + + environ : environment dictionary; default: os.environ + + keep_blank_values: flag indicating whether blank values in + percent-encoded forms should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + + separator: str. The symbol to use for separating the query arguments. + Defaults to &. + """ def parse_multipart( fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" -) -> dict[str, list[Any]]: ... +) -> dict[str, list[Any]]: + """Parse multipart input. + + Arguments: + fp : input file + pdict: dictionary containing other parameters of content-type header + encoding, errors: request encoding and error handler, passed to + FieldStorage + + Returns a dictionary just like parse_qs(): keys are the field names, each + value is a list of values for that field. For non-file fields, the value + is a list of strings. + """ @type_check_only class _Environ(Protocol): def __getitem__(self, k: str, /) -> str: ... def keys(self) -> Iterable[str]: ... -def parse_header(line: str) -> tuple[str, dict[str, str]]: ... -def test(environ: _Environ = os.environ) -> None: ... -def print_environ(environ: _Environ = os.environ) -> None: ... -def print_form(form: dict[str, Any]) -> None: ... -def print_directory() -> None: ... -def print_environ_usage() -> None: ... +def parse_header(line: str) -> tuple[str, dict[str, str]]: + """Parse a Content-type like header. + + Return the main content-type and a dictionary of options. + + """ +def test(environ: _Environ = os.environ) -> None: + """Robust test CGI script, usable as main program. + + Write minimal HTTP headers and dump all information provided to + the script in HTML form. + + """ +def print_environ(environ: _Environ = os.environ) -> None: + """Dump the shell environment as HTML. +""" +def print_form(form: dict[str, Any]) -> None: + """Dump the contents of a form as HTML. +""" +def print_directory() -> None: + """Dump the current directory as HTML. +""" +def print_environ_usage() -> None: + """Dump a list of environment variables used by CGI as HTML. +""" class MiniFieldStorage: + """Like FieldStorage, for use when no file uploads are possible. +""" # The first five "Any" attributes here are always None, but mypy doesn't support that filename: Any list: Any @@ -56,9 +121,52 @@ class MiniFieldStorage: headers: dict[Any, Any] name: Any value: Any - def __init__(self, name: Any, value: Any) -> None: ... + def __init__(self, name: Any, value: Any) -> None: + """Constructor from field name and value. +""" class FieldStorage: + """Store a sequence of fields, reading multipart/form-data. + + This class provides naming, typing, files stored on disk, and + more. At the top level, it is accessible like a dictionary, whose + keys are the field names. (Note: None can occur as a field name.) + The items are either a Python list (if there's multiple values) or + another FieldStorage or MiniFieldStorage object. If it's a single + object, it has the following attributes: + + name: the field name, if specified; otherwise None + + filename: the filename, if specified; otherwise None; this is the + client side filename, *not* the file name on which it is + stored (that's a temporary file you don't deal with) + + value: the value as a *string*; for file uploads, this + transparently reads the file every time you request the value + and returns *bytes* + + file: the file(-like) object from which you can read the data *as + bytes* ; None if the data is stored a simple string + + type: the content-type, or None if not specified + + type_options: dictionary of options specified on the content-type + line + + disposition: content-disposition, or None if not specified + + disposition_options: dictionary of corresponding options + + headers: a dictionary(-like) object (sometimes email.message.Message or a + subclass thereof) containing *all* headers + + The class is subclassable, mostly for the purpose of overriding + the make_file() method, which is called internally to come up with + a file open for reading and writing. This makes it possible to + override the default choice of storing all files in a temporary + directory and unlinking them as soon as they have been opened. + + """ FieldStorageClass: _type | None keep_blank_values: int strict_parsing: int @@ -94,21 +202,101 @@ class FieldStorage: errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", - ) -> None: ... + ) -> None: + """Constructor. Read multipart/* until last part. + + Arguments, all optional: + + fp : file pointer; default: sys.stdin.buffer + (not used when the request method is GET) + Can be : + 1. a TextIOWrapper object + 2. an object whose read() and readline() methods return bytes + + headers : header dictionary-like object; default: + taken from environ as per CGI spec + + outerboundary : terminating multipart boundary + (for internal use only) + + environ : environment dictionary; default: os.environ + + keep_blank_values: flag indicating whether blank values in + percent-encoded forms should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + + limit : used internally to read parts of multipart/form-data forms, + to exit from the reading loop when reached. It is the difference + between the form content-length and the number of bytes already + read + + encoding, errors : the encoding and error handler used to decode the + binary stream to strings. Must be the same as the charset defined + for the page sending the form (content-type : meta http-equiv or + header) + + max_num_fields: int. If set, then __init__ throws a ValueError + if there are more than n fields read by parse_qsl(). + + """ def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... - def __getitem__(self, key: str) -> Any: ... - def getvalue(self, key: str, default: Any = None) -> Any: ... - def getfirst(self, key: str, default: Any = None) -> Any: ... - def getlist(self, key: str) -> _list[Any]: ... - def keys(self) -> _list[str]: ... - def __contains__(self, key: str) -> bool: ... - def __len__(self) -> int: ... + def __getitem__(self, key: str) -> Any: + """Dictionary style indexing. +""" + def getvalue(self, key: str, default: Any = None) -> Any: + """Dictionary style get() method, including 'value' lookup. +""" + def getfirst(self, key: str, default: Any = None) -> Any: + """ Return the first value received. +""" + def getlist(self, key: str) -> _list[Any]: + """ Return list of received values. +""" + def keys(self) -> _list[str]: + """Dictionary style keys() method. +""" + def __contains__(self, key: str) -> bool: + """Dictionary style __contains__ method. +""" + def __len__(self) -> int: + """Dictionary style len(x) support. +""" def __bool__(self) -> bool: ... def __del__(self) -> None: ... # Returns bytes or str IO depending on an internal flag - def make_file(self) -> IO[Any]: ... + def make_file(self) -> IO[Any]: + """Overridable: return a readable & writable file. + + The file will be used as follows: + - data is written to it + - seek(0) + - data is read from it + + The file is opened in binary mode for files, in text mode + for other fields + + This version opens a temporary file for reading and writing, + and immediately deletes (unlinks) it. The trick (on Unix!) is + that the file can still be used, but it can't be opened by + another process, and it will automatically be deleted when it + is closed or when the current process terminates. + + If you want a more permanent file, you derive a class which + overrides this method. If you want a visible temporary file + that is nevertheless automatically deleted when the script + terminates, try defining a __del__ method in a derived class + which unlinks the temporary files you have created. + + """ def print_exception( type: type[BaseException] | None = None, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi index 5657258011598..9f02d6defa0f5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi @@ -1,3 +1,26 @@ +"""More comprehensive traceback formatting for Python scripts. + +To enable this module, do: + + import cgitb; cgitb.enable() + +at the top of your script. The optional arguments to enable() are: + + display - if true, tracebacks are displayed in the web browser + logdir - if set, tracebacks are written to files in this directory + context - number of lines of source code to show for each stack frame + format - 'text' or 'html' controls the output format + +By default, tracebacks are displayed but not saved, the context is 5 lines +and the output format is 'html' (for backwards compatibility with the +original use of this module) + +Alternatively, if you have caught an exception and want cgitb to display it +for you, call cgitb.handler(). The optional argument to handler() is a +3-item tuple (etype, evalue, etb) just like the value of sys.exc_info(). +The default handler displays output as HTML. + +""" from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType @@ -5,18 +28,30 @@ from typing import IO, Any, Final __UNDEF__: Final[object] # undocumented sentinel -def reset() -> str: ... # undocumented +def reset() -> str: # undocumented + """Return a string that resets the CGI and browser to a known state. +""" def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented -def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: # undocumented + """Find the value for a given name in the given environment. +""" def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] -) -> list[tuple[str, str | None, Any]]: ... # undocumented -def html(einfo: OptExcInfo, context: int = 5) -> str: ... -def text(einfo: OptExcInfo, context: int = 5) -> str: ... +) -> list[tuple[str, str | None, Any]]: # undocumented + """Scan one logical line of Python and look up values of variables used. +""" +def html(einfo: OptExcInfo, context: int = 5) -> str: + """Return a nice HTML document describing a given traceback. +""" +def text(einfo: OptExcInfo, context: int = 5) -> str: + """Return a plain text document describing a given traceback. +""" class Hook: # undocumented + """A hook to replace sys.excepthook that shows tracebacks in HTML. +""" def __init__( self, display: int = 1, @@ -29,4 +64,10 @@ class Hook: # undocumented def handle(self, info: OptExcInfo | None = None) -> None: ... def handler(info: OptExcInfo | None = None) -> None: ... -def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: + """Install an exception handler that formats tracebacks as HTML. + + The optional argument 'display' can be set to 0 to suppress sending the + traceback to the browser, and 'logdir' can be set to a directory to cause + tracebacks to be written to files there. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi index 9788d35f680c1..2814433192b5c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi @@ -1,3 +1,52 @@ +"""Simple class to read IFF chunks. + +An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File +Format)) has the following structure: + ++----------------+ +| ID (4 bytes) | ++----------------+ +| size (4 bytes) | ++----------------+ +| data | +| ... | ++----------------+ + +The ID is a 4-byte string which identifies the type of chunk. + +The size field (a 32-bit value, encoded using big-endian byte order) +gives the size of the whole chunk, including the 8-byte header. + +Usually an IFF-type file consists of one or more chunks. The proposed +usage of the Chunk class defined here is to instantiate an instance at +the start of each chunk and read from the instance until it reaches +the end, after which a new instance can be instantiated. At the end +of the file, creating a new instance will fail with an EOFError +exception. + +Usage: +while True: + try: + chunk = Chunk(file) + except EOFError: + break + chunktype = chunk.getname() + while True: + data = chunk.read(nbytes) + if not data: + pass + # do something with data + +The interface is file-like. The implemented methods are: +read, close, seek, tell, isatty. +Extra methods are: skip() (called by close, skips to the end of the chunk), +getname() (returns the name (ID) of the chunk) + +The __init__ method has one required argument, a file-like object +(including a chunk instance), and one optional argument, a flag which +specifies whether or not chunks are aligned on 2-byte boundaries. The +default is 1, i.e. aligned. +""" from typing import IO class Chunk: @@ -10,11 +59,28 @@ class Chunk: offset: int seekable: bool def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... - def getname(self) -> bytes: ... - def getsize(self) -> int: ... + def getname(self) -> bytes: + """Return the name (ID) of the current chunk. +""" + def getsize(self) -> int: + """Return the size of the current chunk. +""" def close(self) -> None: ... def isatty(self) -> bool: ... - def seek(self, pos: int, whence: int = 0) -> None: ... + def seek(self, pos: int, whence: int = 0) -> None: + """Seek to specified position into the chunk. + Default position is 0 (start of chunk). + If the file is not seekable, this will result in an error. + """ def tell(self) -> int: ... - def read(self, size: int = -1) -> bytes: ... - def skip(self) -> None: ... + def read(self, size: int = -1) -> bytes: + """Read at most size bytes from the chunk. + If size is omitted or negative, read until the end + of the chunk. + """ + def skip(self) -> None: + """Skip the rest of the chunk. + If you are not interested in the contents of the chunk, + this method should be called so that the file points to + the start of the next chunk. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi index a08addcf54389..4d9de751052a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi @@ -1,3 +1,6 @@ +"""This module provides access to mathematical functions for complex +numbers. +""" from typing import Final, SupportsComplex, SupportsFloat, SupportsIndex from typing_extensions import TypeAlias @@ -11,26 +14,91 @@ tau: Final[float] _C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex -def acos(z: _C, /) -> complex: ... -def acosh(z: _C, /) -> complex: ... -def asin(z: _C, /) -> complex: ... -def asinh(z: _C, /) -> complex: ... -def atan(z: _C, /) -> complex: ... -def atanh(z: _C, /) -> complex: ... -def cos(z: _C, /) -> complex: ... -def cosh(z: _C, /) -> complex: ... -def exp(z: _C, /) -> complex: ... -def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... -def isinf(z: _C, /) -> bool: ... -def isnan(z: _C, /) -> bool: ... -def log(x: _C, base: _C = ..., /) -> complex: ... -def log10(z: _C, /) -> complex: ... -def phase(z: _C, /) -> float: ... -def polar(z: _C, /) -> tuple[float, float]: ... -def rect(r: float, phi: float, /) -> complex: ... -def sin(z: _C, /) -> complex: ... -def sinh(z: _C, /) -> complex: ... -def sqrt(z: _C, /) -> complex: ... -def tan(z: _C, /) -> complex: ... -def tanh(z: _C, /) -> complex: ... -def isfinite(z: _C, /) -> bool: ... +def acos(z: _C, /) -> complex: + """Return the arc cosine of z. +""" +def acosh(z: _C, /) -> complex: + """Return the inverse hyperbolic cosine of z. +""" +def asin(z: _C, /) -> complex: + """Return the arc sine of z. +""" +def asinh(z: _C, /) -> complex: + """Return the inverse hyperbolic sine of z. +""" +def atan(z: _C, /) -> complex: + """Return the arc tangent of z. +""" +def atanh(z: _C, /) -> complex: + """Return the inverse hyperbolic tangent of z. +""" +def cos(z: _C, /) -> complex: + """Return the cosine of z. +""" +def cosh(z: _C, /) -> complex: + """Return the hyperbolic cosine of z. +""" +def exp(z: _C, /) -> complex: + """Return the exponential value e**z. +""" +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: + """Determine whether two complex numbers are close in value. + + rel_tol + maximum difference for being considered "close", relative to the + magnitude of the input values + abs_tol + maximum difference for being considered "close", regardless of the + magnitude of the input values + +Return True if a is close in value to b, and False otherwise. + +For the values to be considered close, the difference between them must be +smaller than at least one of the tolerances. + +-inf, inf and NaN behave similarly to the IEEE 754 Standard. That is, NaN is +not close to anything, even itself. inf and -inf are only close to themselves. +""" +def isinf(z: _C, /) -> bool: + """Checks if the real or imaginary part of z is infinite. +""" +def isnan(z: _C, /) -> bool: + """Checks if the real or imaginary part of z not a number (NaN). +""" +def log(x: _C, base: _C = ..., /) -> complex: + """log(z[, base]) -> the logarithm of z to the given base. + +If the base is not specified, returns the natural logarithm (base e) of z. +""" +def log10(z: _C, /) -> complex: + """Return the base-10 logarithm of z. +""" +def phase(z: _C, /) -> float: + """Return argument, also known as the phase angle, of a complex. +""" +def polar(z: _C, /) -> tuple[float, float]: + """Convert a complex from rectangular coordinates to polar coordinates. + +r is the distance from 0 and phi the phase angle. +""" +def rect(r: float, phi: float, /) -> complex: + """Convert from polar coordinates to rectangular coordinates. +""" +def sin(z: _C, /) -> complex: + """Return the sine of z. +""" +def sinh(z: _C, /) -> complex: + """Return the hyperbolic sine of z. +""" +def sqrt(z: _C, /) -> complex: + """Return the square root of z. +""" +def tan(z: _C, /) -> complex: + """Return the tangent of z. +""" +def tanh(z: _C, /) -> complex: + """Return the hyperbolic tangent of z. +""" +def isfinite(z: _C, /) -> bool: + """Return True if both the real and imaginary parts of z are finite, else False. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi index 6e84133572bf5..14b251712dcb1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi @@ -1,3 +1,46 @@ +"""A generic class to build line-oriented command interpreters. + +Interpreters constructed with this class obey the following conventions: + +1. End of file on input is processed as the command 'EOF'. +2. A command is parsed out of each line by collecting the prefix composed + of characters in the identchars member. +3. A command 'foo' is dispatched to a method 'do_foo()'; the do_ method + is passed a single argument consisting of the remainder of the line. +4. Typing an empty line repeats the last command. (Actually, it calls the + method 'emptyline', which may be overridden in a subclass.) +5. There is a predefined 'help' method. Given an argument 'topic', it + calls the command 'help_topic'. With no arguments, it lists all topics + with defined help_ functions, broken into up to three topics; documented + commands, miscellaneous help topics, and undocumented commands. +6. The command '?' is a synonym for 'help'. The command '!' is a synonym + for 'shell', if a do_shell method exists. +7. If completion is enabled, completing commands will be done automatically, + and completing of commands args is done by calling complete_foo() with + arguments text, line, begidx, endidx. text is string we are matching + against, all returned matches must begin with it. line is the current + input line (lstripped), begidx and endidx are the beginning and end + indexes of the text being matched, which could be used to provide + different completion depending upon which position the argument is in. + +The 'default' method may be overridden to intercept commands for which there +is no do_ method. + +The 'completedefault' method may be overridden to intercept completions for +commands that have no complete_ method. + +The data member 'self.ruler' sets the character used to draw separator lines +in the help messages. If empty, no ruler line is drawn. It defaults to "=". + +If the value of 'self.intro' is nonempty when the cmdloop method is called, +it is printed out on interpreter startup. This value may be overridden +via an optional argument to the cmdloop() method. + +The data members 'self.doc_header', 'self.misc_header', and +'self.undoc_header' set the headers used for the help function's +listings of documented functions, miscellaneous topics, and undocumented +functions respectively. +""" from collections.abc import Callable from typing import IO, Any, Final from typing_extensions import LiteralString @@ -8,6 +51,17 @@ PROMPT: Final = "(Cmd) " IDENTCHARS: Final[LiteralString] # Too big to be `Literal` class Cmd: + """A simple framework for writing line-oriented command interpreters. + +These are often useful for test harnesses, administrative tools, and +prototypes that will later be wrapped in a more sophisticated interface. + +A Cmd instance or subclass instance is a line-oriented interpreter +framework. There is no good reason to instantiate Cmd itself; rather, +it's useful as a superclass of an interpreter class you define yourself +in order to inherit Cmd's methods and encapsulate action methods. + +""" prompt: str identchars: str ruler: str @@ -23,24 +77,94 @@ class Cmd: stdout: IO[str] cmdqueue: list[str] completekey: str - def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: + """Instantiate a line-oriented interpreter framework. + +The optional argument 'completekey' is the readline name of a +completion key; it defaults to the Tab key. If completekey is +not None and the readline module is available, command completion +is done automatically. The optional arguments stdin and stdout +specify alternate input and output file objects; if not specified, +sys.stdin and sys.stdout are used. + +""" old_completer: Callable[[str, int], str | None] | None - def cmdloop(self, intro: Any | None = None) -> None: ... - def precmd(self, line: str) -> str: ... - def postcmd(self, stop: bool, line: str) -> bool: ... - def preloop(self) -> None: ... - def postloop(self) -> None: ... - def parseline(self, line: str) -> tuple[str | None, str | None, str]: ... - def onecmd(self, line: str) -> bool: ... - def emptyline(self) -> bool: ... - def default(self, line: str) -> None: ... - def completedefault(self, *ignored: Any) -> list[str]: ... + def cmdloop(self, intro: Any | None = None) -> None: + """Repeatedly issue a prompt, accept input, parse an initial prefix +off the received input, and dispatch to action methods, passing them +the remainder of the line as argument. + +""" + def precmd(self, line: str) -> str: + """Hook method executed just before the command line is +interpreted, but after the input prompt is generated and issued. + +""" + def postcmd(self, stop: bool, line: str) -> bool: + """Hook method executed just after a command dispatch is finished. +""" + def preloop(self) -> None: + """Hook method executed once when the cmdloop() method is called. +""" + def postloop(self) -> None: + """Hook method executed once when the cmdloop() method is about to +return. + +""" + def parseline(self, line: str) -> tuple[str | None, str | None, str]: + """Parse the line into a command name and a string containing +the arguments. Returns a tuple containing (command, args, line). +'command' and 'args' may be None if the line couldn't be parsed. +""" + def onecmd(self, line: str) -> bool: + """Interpret the argument as though it had been typed in response +to the prompt. + +This may be overridden, but should not normally need to be; +see the precmd() and postcmd() methods for useful execution hooks. +The return value is a flag indicating whether interpretation of +commands by the interpreter should stop. + +""" + def emptyline(self) -> bool: + """Called when an empty line is entered in response to the prompt. + +If this method is not overridden, it repeats the last nonempty +command entered. + +""" + def default(self, line: str) -> None: + """Called on an input line when the command prefix is not recognized. + +If this method is not overridden, it prints an error message and +returns. + +""" + def completedefault(self, *ignored: Any) -> list[str]: + """Method called to complete an input line when no command-specific +complete_*() method is available. + +By default, it returns an empty list. + +""" def completenames(self, text: str, *ignored: Any) -> list[str]: ... completion_matches: list[str] | None - def complete(self, text: str, state: int) -> list[str] | None: ... + def complete(self, text: str, state: int) -> list[str] | None: + """Return the next possible completion for 'text'. + +If a command has not been entered, then complete against command list. +Otherwise try to call complete_ to get list of completions. +""" def get_names(self) -> list[str]: ... # Only the first element of args matters. def complete_help(self, *args: Any) -> list[str]: ... - def do_help(self, arg: str) -> bool | None: ... + def do_help(self, arg: str) -> bool | None: + """List available commands with "help" or detailed help with "help cmd". +""" def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... - def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: + """Display a list of strings as a compact set of columns. + +Each column is only as wide as necessary. +Columns are separated by two spaces (one was not legible enough). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi index 0b13c8a5016d4..36af4c6e1b79d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi @@ -1,3 +1,6 @@ +"""Utilities needed to emulate Python's interactive interpreter. + +""" import sys from codeop import CommandCompiler, compile_command as compile_command from collections.abc import Callable @@ -7,34 +10,195 @@ from typing import Any __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] class InteractiveInterpreter: + """Base class for InteractiveConsole. + +This class deals with parsing and interpreter state (the user's +namespace); it doesn't deal with input buffering or prompting or +input file naming (the filename is always passed in explicitly). + +""" locals: dict[str, Any] # undocumented compile: CommandCompiler # undocumented - def __init__(self, locals: dict[str, Any] | None = None) -> None: ... - def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... - def runcode(self, code: CodeType) -> None: ... + def __init__(self, locals: dict[str, Any] | None = None) -> None: + """Constructor. + +The optional 'locals' argument specifies a mapping to use as the +namespace in which code will be executed; it defaults to a newly +created dictionary with key "__name__" set to "__console__" and +key "__doc__" set to None. + +""" + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: + """Compile and run some source in the interpreter. + +Arguments are as for compile_command(). + +One of several things can happen: + +1) The input is incorrect; compile_command() raised an +exception (SyntaxError or OverflowError). A syntax traceback +will be printed by calling the showsyntaxerror() method. + +2) The input is incomplete, and more input is required; +compile_command() returned None. Nothing happens. + +3) The input is complete; compile_command() returned a code +object. The code is executed by calling self.runcode() (which +also handles run-time exceptions, except for SystemExit). + +The return value is True in case 2, False in the other cases (unless +an exception is raised). The return value can be used to +decide whether to use sys.ps1 or sys.ps2 to prompt the next +line. + +""" + def runcode(self, code: CodeType) -> None: + """Execute a code object. + +When an exception occurs, self.showtraceback() is called to +display a traceback. All exceptions are caught except +SystemExit, which is reraised. + +A note about KeyboardInterrupt: this exception may occur +elsewhere in this code, and may not always be caught. The +caller should be prepared to deal with it. + +""" if sys.version_info >= (3, 13): - def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: ... + def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: + """Display the syntax error that just occurred. + +This doesn't display a stack trace because there isn't one. + +If a filename is given, it is stuffed in the exception instead +of what was there before (because Python's parser always uses +"" when reading from a string). + +The output is written by self.write(), below. + +""" else: - def showsyntaxerror(self, filename: str | None = None) -> None: ... + def showsyntaxerror(self, filename: str | None = None) -> None: + """Display the syntax error that just occurred. + + This doesn't display a stack trace because there isn't one. + + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "" when reading from a string). + + The output is written by self.write(), below. + + """ + + def showtraceback(self) -> None: + """Display the exception that just occurred. + +We remove the first stack item because it is our own code. + +The output is written by self.write(), below. + +""" + def write(self, data: str) -> None: + """Write a string. - def showtraceback(self) -> None: ... - def write(self, data: str) -> None: ... +The base implementation writes to sys.stderr; a subclass may +replace this with a different implementation. + +""" class InteractiveConsole(InteractiveInterpreter): + """Closely emulate the behavior of the interactive Python interpreter. + +This class builds on InteractiveInterpreter and adds prompting +using the familiar sys.ps1 and sys.ps2, and input buffering. + +""" buffer: list[str] # undocumented filename: str # undocumented if sys.version_info >= (3, 13): def __init__( self, locals: dict[str, Any] | None = None, filename: str = "", *, local_exit: bool = False - ) -> None: ... - def push(self, line: str, filename: str | None = None) -> bool: ... + ) -> None: + """Constructor. + +The optional locals argument will be passed to the +InteractiveInterpreter base class. + +The optional filename argument should specify the (file)name +of the input stream; it will show up in tracebacks. + +""" + def push(self, line: str, filename: str | None = None) -> bool: + """Push a line to the interpreter. + +The line should not have a trailing newline; it may have +internal newlines. The line is appended to a buffer and the +interpreter's runsource() method is called with the +concatenated contents of the buffer as source. If this +indicates that the command was executed or invalid, the buffer +is reset; otherwise, the command is incomplete, and the buffer +is left as it was after the line was appended. The return +value is 1 if more input is required, 0 if the line was dealt +with in some way (this is the same as runsource()). + +""" else: - def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: ... - def push(self, line: str) -> bool: ... + def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: + """Constructor. + + The optional locals argument will be passed to the + InteractiveInterpreter base class. + + The optional filename argument should specify the (file)name + of the input stream; it will show up in tracebacks. + + """ + def push(self, line: str) -> bool: + """Push a line to the interpreter. + + The line should not have a trailing newline; it may have + internal newlines. The line is appended to a buffer and the + interpreter's runsource() method is called with the + concatenated contents of the buffer as source. If this + indicates that the command was executed or invalid, the buffer + is reset; otherwise, the command is incomplete, and the buffer + is left as it was after the line was appended. The return + value is 1 if more input is required, 0 if the line was dealt + with in some way (this is the same as runsource()). + + """ + + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: + """Closely emulate the interactive Python console. - def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... - def resetbuffer(self) -> None: ... - def raw_input(self, prompt: str = "") -> str: ... +The optional banner argument specifies the banner to print +before the first interaction; by default it prints a banner +similar to the one printed by the real Python interpreter, +followed by the current class name in parentheses (so as not +to confuse this with the real interpreter -- since it's so +close!). + +The optional exitmsg argument specifies the exit message +printed when exiting. Pass the empty string to suppress +printing an exit message. If exitmsg is not given or None, +a default message is printed. + +""" + def resetbuffer(self) -> None: + """Reset the input buffer. +""" + def raw_input(self, prompt: str = "") -> str: + """Write a prompt and read a line. + +The returned line does not include the trailing newline. +When the user enters the EOF key sequence, EOFError is raised. + +The base implementation uses the built-in function +input(); a subclass may replace this with a different +implementation. + +""" if sys.version_info >= (3, 13): def interact( @@ -43,7 +207,22 @@ if sys.version_info >= (3, 13): local: dict[str, Any] | None = None, exitmsg: str | None = None, local_exit: bool = False, - ) -> None: ... + ) -> None: + """Closely emulate the interactive Python interpreter. + +This is a backwards compatible interface to the InteractiveConsole +class. When readfunc is not specified, it attempts to import the +readline module to enable GNU readline if it is available. + +Arguments (all optional, all default to None): + +banner -- passed to InteractiveConsole.interact() +readfunc -- if not None, replaces InteractiveConsole.raw_input() +local -- passed to InteractiveInterpreter.__init__() +exitmsg -- passed to InteractiveConsole.interact() +local_exit -- passed to InteractiveConsole.__init__() + +""" else: def interact( @@ -51,4 +230,18 @@ else: readfunc: Callable[[str], str] | None = None, local: dict[str, Any] | None = None, exitmsg: str | None = None, - ) -> None: ... + ) -> None: + """Closely emulate the interactive Python interpreter. + + This is a backwards compatible interface to the InteractiveConsole + class. When readfunc is not specified, it attempts to import the + readline module to enable GNU readline if it is available. + + Arguments (all optional, all default to None): + + banner -- passed to InteractiveConsole.interact() + readfunc -- if not None, replaces InteractiveConsole.raw_input() + local -- passed to InteractiveInterpreter.__init__() + exitmsg -- passed to InteractiveConsole.interact() + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi index fa4d4fd4ba928..fc31827455d40 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi @@ -1,3 +1,11 @@ +"""codecs -- Python Codec Registry, API and helpers. + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import sys import types from _codecs import * @@ -125,6 +133,8 @@ class _BufferedIncrementalDecoder(Protocol): if sys.version_info >= (3, 12): class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + """Codec details when looking up the codec registry +""" _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -155,6 +165,8 @@ if sys.version_info >= (3, 12): else: @disjoint_base class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + """Codec details when looking up the codec registry +""" _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -182,21 +194,126 @@ else: _is_text_encoding: bool | None = None, ) -> Self: ... -def getencoder(encoding: str) -> _Encoder: ... -def getdecoder(encoding: str) -> _Decoder: ... -def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... +def getencoder(encoding: str) -> _Encoder: + """Lookup up the codec for the given encoding and return +its encoder function. + +Raises a LookupError in case the encoding cannot be found. + +""" +def getdecoder(encoding: str) -> _Decoder: + """Lookup up the codec for the given encoding and return +its decoder function. + +Raises a LookupError in case the encoding cannot be found. + +""" +def getincrementalencoder(encoding: str) -> _IncrementalEncoder: + """Lookup up the codec for the given encoding and return +its IncrementalEncoder class or factory function. + +Raises a LookupError in case the encoding cannot be found +or the codecs doesn't provide an incremental encoder. + +""" @overload -def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: ... +def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: + """Lookup up the codec for the given encoding and return +its IncrementalDecoder class or factory function. + +Raises a LookupError in case the encoding cannot be found +or the codecs doesn't provide an incremental decoder. + +""" @overload def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... -def getreader(encoding: str) -> _StreamReader: ... -def getwriter(encoding: str) -> _StreamWriter: ... +def getreader(encoding: str) -> _StreamReader: + """Lookup up the codec for the given encoding and return +its StreamReader class or factory function. + +Raises a LookupError in case the encoding cannot be found. + +""" +def getwriter(encoding: str) -> _StreamWriter: + """Lookup up the codec for the given encoding and return +its StreamWriter class or factory function. + +Raises a LookupError in case the encoding cannot be found. + +""" def open( filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 -) -> StreamReaderWriter: ... -def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... -def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... -def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... +) -> StreamReaderWriter: + """Open an encoded file using the given mode and return +a wrapped version providing transparent encoding/decoding. + +Note: The wrapped version will only accept the object format +defined by the codecs, i.e. Unicode objects for most builtin +codecs. Output is also codec dependent and will usually be +Unicode as well. + +If encoding is not None, then the +underlying encoded files are always opened in binary mode. +The default file mode is 'r', meaning to open the file in read mode. + +encoding specifies the encoding which is to be used for the +file. + +errors may be given to define the error handling. It defaults +to 'strict' which causes ValueErrors to be raised in case an +encoding error occurs. + +buffering has the same meaning as for the builtin open() API. +It defaults to -1 which means that the default buffer size will +be used. + +The returned wrapped file object provides an extra attribute +.encoding which allows querying the used encoding. This +attribute is only available if an encoding was specified as +parameter. +""" +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: + """Return a wrapped version of file which provides transparent +encoding translation. + +Data written to the wrapped file is decoded according +to the given data_encoding and then encoded to the underlying +file using file_encoding. The intermediate data type +will usually be Unicode but depends on the specified codecs. + +Bytes read from the file are decoded using file_encoding and then +passed back to the caller encoded using data_encoding. + +If file_encoding is not given, it defaults to data_encoding. + +errors may be given to define the error handling. It defaults +to 'strict' which causes ValueErrors to be raised in case an +encoding error occurs. + +The returned wrapped file object provides two extra attributes +.data_encoding and .file_encoding which reflect the given +parameters of the same name. The attributes can be used for +introspection by Python programs. + +""" +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: + """ +Encoding iterator. + +Encodes the input strings from the iterator using an IncrementalEncoder. + +errors and kwargs are passed through to the IncrementalEncoder +constructor. +""" +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: + """ +Decoding iterator. + +Decodes the input strings from the iterator using an IncrementalDecoder. + +errors and kwargs are passed through to the IncrementalDecoder +constructor. +""" BOM: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` BOM_BE: Final = b"\xfe\xff" @@ -209,40 +326,173 @@ BOM_UTF32: Final[Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"]] # depends o BOM_UTF32_BE: Final = b"\x00\x00\xfe\xff" BOM_UTF32_LE: Final = b"\xff\xfe\x00\x00" -def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... -def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... -def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... -def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... -def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... -def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: ... +def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'strict' error handling, which raises a UnicodeError on coding errors. +""" +def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'replace' error handling, which replaces malformed data with a replacement marker. +""" +def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'ignore' error handling, which ignores malformed data and continues. +""" +def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'xmlcharrefreplace' error handling, which replaces an unencodable character with the appropriate XML character reference. +""" +def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'backslashreplace' error handling, which replaces malformed data with a backslashed escape sequence. +""" +def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: + """Implements the 'namereplace' error handling, which replaces an unencodable character with a \\N{...} escape sequence. +""" class Codec: + """Defines the interface for stateless encoders/decoders. + +The .encode()/.decode() methods may use different error +handling schemes by providing the errors argument. These +string values are predefined: + + 'strict' - raise a ValueError error (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace' - replace with a suitable replacement character; + Python will use the official U+FFFD REPLACEMENT + CHARACTER for the builtin Unicode codecs on + decoding and '?' on encoding. + 'surrogateescape' - replace with private code points U+DCnn. + 'xmlcharrefreplace' - Replace with the appropriate XML + character reference (only for encoding). + 'backslashreplace' - Replace with backslashed escape sequences. + 'namereplace' - Replace with \\N{...} escape sequences + (only for encoding). + +The set of allowed values can be extended via register_error. + +""" # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. - def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... - def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: + """Encodes the object input and returns a tuple (output +object, length consumed). + +errors defines the error handling to apply. It defaults to +'strict' handling. + +The method may not store state in the Codec instance. Use +StreamWriter for codecs which have to keep state in order to +make encoding efficient. + +The encoder must be able to handle zero length input and +return an empty object of the output object type in this +situation. + +""" + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: + """Decodes the object input and returns a tuple (output +object, length consumed). + +input must be an object which provides the bf_getreadbuf +buffer slot. Python strings, buffer objects and memory +mapped files are examples of objects providing this slot. + +errors defines the error handling to apply. It defaults to +'strict' handling. + +The method may not store state in the Codec instance. Use +StreamReader for codecs which have to keep state in order to +make decoding efficient. + +The decoder must be able to handle zero length input and +return an empty object of the output object type in this +situation. + +""" class IncrementalEncoder: + """ +An IncrementalEncoder encodes an input in multiple steps. The input can +be passed piece by piece to the encode() method. The IncrementalEncoder +remembers the state of the encoding process between calls to encode(). +""" errors: str - def __init__(self, errors: str = "strict") -> None: ... + def __init__(self, errors: str = "strict") -> None: + """ +Creates an IncrementalEncoder instance. + +The IncrementalEncoder may use different error handling schemes by +providing the errors keyword argument. See the module docstring +for a list of possible values. +""" @abstractmethod - def encode(self, input: str, final: bool = False) -> bytes: ... - def reset(self) -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: + """ +Encodes input and returns the resulting object. +""" + def reset(self) -> None: + """ +Resets the encoder to the initial state. +""" # documentation says int but str is needed for the subclass. - def getstate(self) -> int | str: ... - def setstate(self, state: int | str) -> None: ... + def getstate(self) -> int | str: + """ +Return the current state of the encoder. +""" + def setstate(self, state: int | str) -> None: + """ +Set the current state of the encoder. state must have been +returned by getstate(). +""" class IncrementalDecoder: + """ +An IncrementalDecoder decodes an input in multiple steps. The input can +be passed piece by piece to the decode() method. The IncrementalDecoder +remembers the state of the decoding process between calls to decode(). +""" errors: str - def __init__(self, errors: str = "strict") -> None: ... + def __init__(self, errors: str = "strict") -> None: + """ +Create an IncrementalDecoder instance. + +The IncrementalDecoder may use different error handling schemes by +providing the errors keyword argument. See the module docstring +for a list of possible values. +""" @abstractmethod - def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... - def reset(self) -> None: ... - def getstate(self) -> tuple[bytes, int]: ... - def setstate(self, state: tuple[bytes, int]) -> None: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: + """ +Decode input and returns the resulting object. +""" + def reset(self) -> None: + """ +Reset the decoder to the initial state. +""" + def getstate(self) -> tuple[bytes, int]: + """ +Return the current state of the decoder. + +This must be a (buffered_input, additional_state_info) tuple. +buffered_input must be a bytes object containing bytes that +were passed to decode() that have not yet been converted. +additional_state_info must be a non-negative integer +representing the state of the decoder WITHOUT yet having +processed the contents of buffered_input. In the initial state +and after reset(), getstate() must return (b"", 0). +""" + def setstate(self, state: tuple[bytes, int]) -> None: + """ +Set the current state of the decoder. + +state must have been returned by getstate(). The effect of +setstate((b"", 0)) must be equivalent to reset(). +""" # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): + """ +This subclass of IncrementalEncoder can be used as the baseclass for an +incremental encoder if the encoder must keep some of the output in a +buffer between calls to encode(). +""" buffer: str def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -250,6 +500,11 @@ class BufferedIncrementalEncoder(IncrementalEncoder): def encode(self, input: str, final: bool = False) -> bytes: ... class BufferedIncrementalDecoder(IncrementalDecoder): + """ +This subclass of IncrementalDecoder can be used as the baseclass for an +incremental decoder if the decoder must be able to handle incomplete +byte sequences. +""" buffer: bytes def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -261,41 +516,170 @@ class BufferedIncrementalDecoder(IncrementalDecoder): class StreamWriter(Codec): stream: _WritableStream errors: str - def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... - def write(self, object: str) -> None: ... - def writelines(self, list: Iterable[str]) -> None: ... - def reset(self) -> None: ... + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: + """Creates a StreamWriter instance. + +stream must be a file-like object open for writing. + +The StreamWriter may use different error handling +schemes by providing the errors keyword argument. These +parameters are predefined: + + 'strict' - raise a ValueError (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace'- replace with a suitable replacement character + 'xmlcharrefreplace' - Replace with the appropriate XML + character reference. + 'backslashreplace' - Replace with backslashed escape + sequences. + 'namereplace' - Replace with \\N{...} escape sequences. + +The set of allowed parameter values can be extended via +register_error. +""" + def write(self, object: str) -> None: + """Writes the object's contents encoded to self.stream. + """ + def writelines(self, list: Iterable[str]) -> None: + """Writes the concatenated list of strings to the stream +using .write(). +""" + def reset(self) -> None: + """Resets the codec buffers used for keeping internal state. + +Calling this method should ensure that the data on the +output is put into a clean state, that allows appending +of new fresh data without having to rescan the whole +stream to recover state. + +""" def seek(self, offset: int, whence: int = 0) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: + """Inherit all other methods from the underlying stream. + """ class StreamReader(Codec): stream: _ReadableStream errors: str # This is set to str, but some subclasses set to bytes instead. charbuffertype: ClassVar[type] = ... - def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... - def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... - def readline(self, size: int | None = None, keepends: bool = True) -> str: ... - def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... - def reset(self) -> None: ... - def seek(self, offset: int, whence: int = 0) -> None: ... + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: + """Creates a StreamReader instance. + +stream must be a file-like object open for reading. + +The StreamReader may use different error handling +schemes by providing the errors keyword argument. These +parameters are predefined: + + 'strict' - raise a ValueError (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace'- replace with a suitable replacement character + 'backslashreplace' - Replace with backslashed escape sequences; + +The set of allowed parameter values can be extended via +register_error. +""" + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: + """Decodes data from the stream self.stream and returns the +resulting object. + +chars indicates the number of decoded code points or bytes to +return. read() will never return more data than requested, +but it might return less, if there is not enough available. + +size indicates the approximate maximum number of decoded +bytes or code points to read for decoding. The decoder +can modify this setting as appropriate. The default value +-1 indicates to read and decode as much as possible. size +is intended to prevent having to decode huge files in one +step. + +If firstline is true, and a UnicodeDecodeError happens +after the first line terminator in the input only the first line +will be returned, the rest of the input will be kept until the +next call to read(). + +The method should use a greedy read strategy, meaning that +it should read as much data as is allowed within the +definition of the encoding and the given size, e.g. if +optional encoding endings or state markers are available +on the stream, these should be read too. +""" + def readline(self, size: int | None = None, keepends: bool = True) -> str: + """Read one line from the input stream and return the +decoded data. + +size, if given, is passed as size argument to the +read() method. + +""" + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: + """Read all lines available on the input stream +and return them as a list. + +Line breaks are implemented using the codec's decoder +method and are included in the list entries. + +sizehint, if given, is ignored since there is no efficient +way of finding the true end-of-line. + +""" + def reset(self) -> None: + """Resets the codec buffers used for keeping internal state. + +Note that no stream repositioning should take place. +This method is primarily intended to be able to recover +from decoding errors. + +""" + def seek(self, offset: int, whence: int = 0) -> None: + """Set the input stream's current position. + +Resets the codec buffers used for keeping state. +""" def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __iter__(self) -> Self: ... - def __next__(self) -> str: ... - def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: ... + def __next__(self) -> str: + """Return the next decoded line from the input stream. +""" + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: + """Inherit all other methods from the underlying stream. + """ # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): + """StreamReaderWriter instances allow wrapping streams which +work in both read and write modes. + +The design is such that one can use the factory functions +returned by the codec.lookup() function to construct the +instance. + +""" stream: _Stream - def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: + """Creates a StreamReaderWriter instance. + +stream must be a Stream-like object. + +Reader, Writer must be factory functions or classes +providing the StreamReader, StreamWriter interface resp. + +Error handling is done in the same way as defined for the +StreamWriter/Readers. + +""" def read(self, size: int = -1) -> str: ... def readline(self, size: int | None = None) -> str: ... def readlines(self, sizehint: int | None = None) -> list[str]: ... - def __next__(self) -> str: ... + def __next__(self) -> str: + """Return the next decoded line from the input stream. +""" def __iter__(self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... @@ -303,7 +687,9 @@ class StreamReaderWriter(TextIO): def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __getattr__(self, name: str) -> Any: ... + def __getattr__(self, name: str) -> Any: + """Inherit all other methods from the underlying stream. + """ # These methods don't actually exist directly, but they are needed to satisfy the TextIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... @@ -317,6 +703,20 @@ class StreamReaderWriter(TextIO): def writable(self) -> bool: ... class StreamRecoder(BinaryIO): + """StreamRecoder instances translate data from one encoding to another. + +They use the complete set of APIs returned by the +codecs.lookup() function to implement their task. + +Data written to the StreamRecoder is first decoded into an +intermediate format (depending on the "decode" codec) and then +written to the underlying stream using an instance of the provided +Writer class. + +In the other direction, data is read from the underlying stream using +a Reader instance and then encoded and returned to the caller. + +""" data_encoding: str file_encoding: str def __init__( @@ -327,17 +727,39 @@ class StreamRecoder(BinaryIO): Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict", - ) -> None: ... + ) -> None: + """Creates a StreamRecoder instance which implements a two-way +conversion: encode and decode work on the frontend (the +data visible to .read() and .write()) while Reader and Writer +work on the backend (the data in stream). + +You can use these objects to do transparent +transcodings from e.g. latin-1 to utf-8 and back. + +stream must be a file-like object. + +encode and decode must adhere to the Codec interface; Reader and +Writer must be factory functions or classes providing the +StreamReader and StreamWriter interfaces resp. + +Error handling is done in the same way as defined for the +StreamWriter/Readers. + +""" def read(self, size: int = -1) -> bytes: ... def readline(self, size: int | None = None) -> bytes: ... def readlines(self, sizehint: int | None = None) -> list[bytes]: ... - def __next__(self) -> bytes: ... + def __next__(self) -> bytes: + """Return the next decoded line from the input stream. +""" def __iter__(self) -> Self: ... # Base class accepts more types than just bytes def write(self, data: bytes) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[bytes]) -> None: ... # type: ignore[override] def reset(self) -> None: ... - def __getattr__(self, name: str) -> Any: ... + def __getattr__(self, name: str) -> Any: + """Inherit all other methods from the underlying stream. + """ def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi index 8e311343eb89d..95dc4d4cefc85 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi @@ -1,15 +1,89 @@ +"""Utilities to compile possibly incomplete Python source code. + +This module provides two interfaces, broadly similar to the builtin +function compile(), which take program text, a filename and a 'mode' +and: + +- Return code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + +The two interfaces are: + +compile_command(source, filename, symbol): + + Compiles a single command in the manner described above. + +CommandCompiler(): + + Instances of this class have __call__ methods identical in + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force. + +The module also provides another class: + +Compile(): + + Instances of this class act like the built-in function compile, + but with 'memory' in the sense described above. +""" import sys from types import CodeType __all__ = ["compile_command", "Compile", "CommandCompiler"] if sys.version_info >= (3, 14): - def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: ... + def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: + """Compile a command and determine whether it is incomplete. + +Arguments: + +source -- the source string; may contain \\n characters +filename -- optional filename from which source was read; default + "" +symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" + +Return value / exceptions raised: + +- Return a code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). +""" else: - def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... + def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: + """Compile a command and determine whether it is incomplete. + +Arguments: + +source -- the source string; may contain \\n characters +filename -- optional filename from which source was read; default + "" +symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" + +Return value / exceptions raised: + +- Return a code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). +""" class Compile: + """Instances of this class behave much like the built-in compile +function, but if one is used to compile text containing a future +statement, it "remembers" and compiles all subsequent program texts +with the statement in force. +""" flags: int if sys.version_info >= (3, 13): def __call__(self, source: str, filename: str, symbol: str, flags: int = 0) -> CodeType: ... @@ -17,5 +91,29 @@ class Compile: def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... class CommandCompiler: + """Instances of this class have __call__ methods identical in +signature to compile_command; the difference is that if the +instance compiles program text containing a __future__ statement, +the instance 'remembers' and compiles all subsequent program texts +with the statement in force. +""" compiler: Compile - def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: + """Compile a command and determine whether it is incomplete. + +Arguments: + +source -- the source string; may contain \\n characters +filename -- optional filename from which source was read; + default "" +symbol -- optional grammar start symbol; "single" (default) or + "eval" + +Return value / exceptions raised: + +- Return a code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi index 8636e6cdbdc31..1da0fbd790fef 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi @@ -1,3 +1,18 @@ +"""This module implements specialized container datatypes providing +alternatives to Python's general purpose built-in containers, dict, +list, set, and tuple. + +* namedtuple factory function for creating tuple subclasses with named fields +* deque list-like container with fast appends and pops on either end +* ChainMap dict-like class for creating a single view of multiple mappings +* Counter dict subclass for counting hashable objects +* OrderedDict dict subclass that remembers the order entries were added +* defaultdict dict subclass that calls a factory function to supply missing values +* UserDict wrapper around dictionary objects for easier dict subclassing +* UserList wrapper around list objects for easier list subclassing +* UserString wrapper around string objects for easier string subclassing + +""" import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT @@ -40,7 +55,29 @@ def namedtuple( rename: bool = False, module: str | None = None, defaults: Iterable[Any] | None = None, -) -> type[tuple[Any, ...]]: ... +) -> type[tuple[Any, ...]]: + """Returns a new subclass of tuple with named fields. + +>>> Point = namedtuple('Point', ['x', 'y']) +>>> Point.__doc__ # docstring for the new class +'Point(x, y)' +>>> p = Point(11, y=22) # instantiate with positional args or keywords +>>> p[0] + p[1] # indexable like a plain tuple +33 +>>> x, y = p # unpack like a regular tuple +>>> x, y +(11, 22) +>>> p.x + p.y # fields also accessible by name +33 +>>> d = p._asdict() # convert to a dictionary +>>> d['x'] +11 +>>> Point(**d) # convert from a dictionary +Point(x=11, y=22) +>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields +Point(x=100, y=22) + +""" class UserDict(MutableMapping[_KT, _VT]): data: dict[_KT, _VT] @@ -113,6 +150,8 @@ class UserDict(MutableMapping[_KT, _VT]): def get(self, key: _KT, default: _T) -> _VT | _T: ... class UserList(MutableSequence[_T]): + """A more or less complete user-defined wrapper around list objects. +""" data: list[_T] @overload def __init__(self, initlist: None = None) -> None: ... @@ -233,60 +272,208 @@ class UserString(Sequence[UserString]): @disjoint_base class deque(MutableSequence[_T]): + """A list-like sequence optimized for data accesses near its endpoints. +""" @property - def maxlen(self) -> int | None: ... + def maxlen(self) -> int | None: + """maximum size of a deque or None if unbounded +""" @overload def __init__(self, *, maxlen: int | None = None) -> None: ... @overload def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... - def append(self, x: _T, /) -> None: ... - def appendleft(self, x: _T, /) -> None: ... - def copy(self) -> Self: ... - def count(self, x: _T, /) -> int: ... - def extend(self, iterable: Iterable[_T], /) -> None: ... - def extendleft(self, iterable: Iterable[_T], /) -> None: ... - def insert(self, i: int, x: _T, /) -> None: ... - def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ... - def pop(self) -> _T: ... # type: ignore[override] - def popleft(self) -> _T: ... - def remove(self, value: _T, /) -> None: ... - def rotate(self, n: int = 1, /) -> None: ... - def __copy__(self) -> Self: ... - def __len__(self) -> int: ... + def append(self, x: _T, /) -> None: + """Add an element to the right side of the deque. +""" + def appendleft(self, x: _T, /) -> None: + """Add an element to the left side of the deque. +""" + def copy(self) -> Self: + """Return a shallow copy of a deque. +""" + def count(self, x: _T, /) -> int: + """Return number of occurrences of value. +""" + def extend(self, iterable: Iterable[_T], /) -> None: + """Extend the right side of the deque with elements from the iterable. +""" + def extendleft(self, iterable: Iterable[_T], /) -> None: + """Extend the left side of the deque with elements from the iterable. +""" + def insert(self, i: int, x: _T, /) -> None: + """Insert value before index. +""" + def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: + """Return first index of value. + +Raises ValueError if the value is not present. +""" + def pop(self) -> _T: # type: ignore[override] + """Remove and return the rightmost element. +""" + def popleft(self) -> _T: + """Remove and return the leftmost element. +""" + def remove(self, value: _T, /) -> None: + """Remove first occurrence of value. +""" + def rotate(self, n: int = 1, /) -> None: + """Rotate the deque n steps to the right. If n is negative, rotates left. +""" + def __copy__(self) -> Self: + """Return a shallow copy of a deque. +""" + def __len__(self) -> int: + """Return len(self). +""" __hash__: ClassVar[None] # type: ignore[assignment] # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores - def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override] - def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override] - def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override] - def __contains__(self, key: object, /) -> bool: ... - def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... - def __iadd__(self, value: Iterable[_T], /) -> Self: ... - def __add__(self, value: Self, /) -> Self: ... - def __mul__(self, value: int, /) -> Self: ... - def __imul__(self, value: int, /) -> Self: ... + def __getitem__(self, key: SupportsIndex, /) -> _T: # type: ignore[override] + """Return self[key]. +""" + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: # type: ignore[override] + """Set self[key] to value. +""" + def __delitem__(self, key: SupportsIndex, /) -> None: # type: ignore[override] + """Delete self[key]. +""" + def __contains__(self, key: object, /) -> bool: + """Return bool(key in self). +""" + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: + """Return state information for pickling. +""" + def __iadd__(self, value: Iterable[_T], /) -> Self: + """Implement self+=value. +""" + def __add__(self, value: Self, /) -> Self: + """Return self+value. +""" + def __mul__(self, value: int, /) -> Self: + """Return self*value. +""" + def __imul__(self, value: int, /) -> Self: + """Implement self*=value. +""" def __lt__(self, value: deque[_T], /) -> bool: ... def __le__(self, value: deque[_T], /) -> bool: ... def __gt__(self, value: deque[_T], /) -> bool: ... def __ge__(self, value: deque[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" class Counter(dict[_T, int], Generic[_T]): + """Dict subclass for counting hashable items. Sometimes called a bag +or multiset. Elements are stored as dictionary keys and their counts +are stored as dictionary values. + +>>> c = Counter('abcdeabcdabcaba') # count elements from a string + +>>> c.most_common(3) # three most common elements +[('a', 5), ('b', 4), ('c', 3)] +>>> sorted(c) # list all unique elements +['a', 'b', 'c', 'd', 'e'] +>>> ''.join(sorted(c.elements())) # list elements with repetitions +'aaaaabbbbcccdde' +>>> sum(c.values()) # total of all counts +15 + +>>> c['a'] # count of letter 'a' +5 +>>> for elem in 'shazam': # update counts from an iterable +... c[elem] += 1 # by adding 1 to each element's count +>>> c['a'] # now there are seven 'a' +7 +>>> del c['b'] # remove all 'b' +>>> c['b'] # now there are zero 'b' +0 + +>>> d = Counter('simsalabim') # make another counter +>>> c.update(d) # add in the second counter +>>> c['a'] # now there are nine 'a' +9 + +>>> c.clear() # empty the counter +>>> c +Counter() + +Note: If a count is set to zero or reduced to zero, it will remain +in the counter until the entry is deleted or the counter is cleared: + +>>> c = Counter('aaabbc') +>>> c['b'] -= 2 # reduce the count of 'b' by two +>>> c.most_common() # 'b' is still in, but its count is zero +[('a', 3), ('c', 1), ('b', 0)] + +""" @overload - def __init__(self, iterable: None = None, /) -> None: ... + def __init__(self, iterable: None = None, /) -> None: + """Create a new, empty Counter object. And if given, count elements +from an input iterable. Or, initialize the count from another mapping +of elements to their counts. + +>>> c = Counter() # a new, empty counter +>>> c = Counter('gallahad') # a new counter from an iterable +>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping +>>> c = Counter(a=4, b=2) # a new counter from keyword args + +""" @overload def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ... @overload def __init__(self, mapping: SupportsKeysAndGetItem[_T, int], /) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... - def copy(self) -> Self: ... - def elements(self) -> Iterator[_T]: ... - def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... + def copy(self) -> Self: + """Return a shallow copy. +""" + def elements(self) -> Iterator[_T]: + """Iterator over elements repeating each as many times as its count. + +>>> c = Counter('ABCABC') +>>> sorted(c.elements()) +['A', 'A', 'B', 'B', 'C', 'C'] + +Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + +>>> import math +>>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) +>>> math.prod(prime_factors.elements()) +1836 + +Note, if an element's count has been set to zero or is a negative +number, elements() will ignore it. + +""" + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: + """List the n most common elements and their counts from the most +common to the least. If n is None, then list all element counts. + +>>> Counter('abracadabra').most_common(3) +[('a', 5), ('b', 2), ('r', 2)] + +""" @classmethod def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] @overload - def subtract(self, iterable: None = None, /) -> None: ... + def subtract(self, iterable: None = None, /) -> None: + """Like dict.update() but subtracts counts instead of replacing them. +Counts can be reduced below zero. Both the inputs and outputs are +allowed to contain zero and negative counts. + +Source can be an iterable, a dictionary, or another Counter instance. + +>>> c = Counter('which') +>>> c.subtract('witch') # subtract elements from another iterable +>>> c.subtract(Counter('watch')) # subtract elements from another counter +>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch +0 +>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch +-1 + +""" @overload def subtract(self, mapping: Mapping[_T, int], /) -> None: ... @overload @@ -298,34 +485,126 @@ class Counter(dict[_T, int], Generic[_T]): # (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`, # the tuples would be added as keys, breaking type safety) @overload # type: ignore[override] - def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: ... + def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: + """Like dict.update() but add counts instead of replacing them. + +Source can be an iterable, a dictionary, or another Counter instance. + +>>> c = Counter('which') +>>> c.update('witch') # add elements from another iterable +>>> d = Counter('watch') +>>> c.update(d) # add elements from another counter +>>> c['h'] # four 'h' in which, witch, and watch +4 + +""" @overload def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ... @overload def update(self, iterable: None = None, /, **kwargs: int) -> None: ... - def __missing__(self, key: _T) -> int: ... - def __delitem__(self, elem: object) -> None: ... + def __missing__(self, key: _T) -> int: + """The count of elements not in the Counter is zero. +""" + def __delitem__(self, elem: object) -> None: + """Like dict.__delitem__() but does not raise KeyError for missing values. +""" if sys.version_info >= (3, 10): - def __eq__(self, other: object) -> bool: ... - def __ne__(self, other: object) -> bool: ... - - def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ... - def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... - def __and__(self, other: Counter[_T]) -> Counter[_T]: ... - def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override] - def __pos__(self) -> Counter[_T]: ... - def __neg__(self) -> Counter[_T]: ... + def __eq__(self, other: object) -> bool: + """True if all counts agree. Missing counts are treated as zero. +""" + def __ne__(self, other: object) -> bool: + """True if any counts disagree. Missing counts are treated as zero. +""" + + def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: + """Add counts from two counters. + +>>> Counter('abbb') + Counter('bcc') +Counter({'b': 4, 'c': 2, 'a': 1}) + +""" + def __sub__(self, other: Counter[_T]) -> Counter[_T]: + """Subtract count, but keep only results with positive counts. + +>>> Counter('abbbc') - Counter('bccd') +Counter({'b': 2, 'a': 1}) + +""" + def __and__(self, other: Counter[_T]) -> Counter[_T]: + """Intersection is the minimum of corresponding counts. + +>>> Counter('abbb') & Counter('bcc') +Counter({'b': 1}) + +""" + def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: # type: ignore[override] + """Union is the maximum of value in either of the input counters. + +>>> Counter('abbb') | Counter('bcc') +Counter({'b': 3, 'c': 2, 'a': 1}) + +""" + def __pos__(self) -> Counter[_T]: + """Adds an empty counter, effectively stripping negative and zero counts +""" + def __neg__(self) -> Counter[_T]: + """Subtracts from an empty counter. Strips positive and zero counts, +and flips the sign on negative counts. + +""" # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. - def __iadd__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[misc] - def __isub__(self, other: SupportsItems[_T, int]) -> Self: ... - def __iand__(self, other: SupportsItems[_T, int]) -> Self: ... - def __ior__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[override,misc] + def __iadd__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[misc] + """Inplace add from another counter, keeping only positive counts. + +>>> c = Counter('abbb') +>>> c += Counter('bcc') +>>> c +Counter({'b': 4, 'c': 2, 'a': 1}) + +""" + def __isub__(self, other: SupportsItems[_T, int]) -> Self: + """Inplace subtract counter, but keep only results with positive counts. + +>>> c = Counter('abbbc') +>>> c -= Counter('bccd') +>>> c +Counter({'b': 2, 'a': 1}) + +""" + def __iand__(self, other: SupportsItems[_T, int]) -> Self: + """Inplace intersection is the minimum of corresponding counts. + +>>> c = Counter('abbb') +>>> c &= Counter('bcc') +>>> c +Counter({'b': 1}) + +""" + def __ior__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[override,misc] + """Inplace union is the maximum of value from either counter. + +>>> c = Counter('abbb') +>>> c |= Counter('bcc') +>>> c +Counter({'b': 3, 'c': 2, 'a': 1}) + +""" if sys.version_info >= (3, 10): - def total(self) -> int: ... - def __le__(self, other: Counter[Any]) -> bool: ... - def __lt__(self, other: Counter[Any]) -> bool: ... - def __ge__(self, other: Counter[Any]) -> bool: ... - def __gt__(self, other: Counter[Any]) -> bool: ... + def total(self) -> int: + """Sum of the counts +""" + def __le__(self, other: Counter[Any]) -> bool: + """True if all counts in self are a subset of those in other. +""" + def __lt__(self, other: Counter[Any]) -> bool: + """True if all counts in self are a proper subset of those in other. +""" + def __ge__(self, other: Counter[Any]) -> bool: + """True if all counts in self are a superset of those in other. +""" + def __gt__(self, other: Counter[Any]) -> bool: + """True if all counts in self are a proper superset of those in other. +""" # The pure-Python implementations of the "views" classes # These are exposed at runtime in `collections/__init__.py` @@ -359,10 +638,24 @@ class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyrig @disjoint_base class OrderedDict(dict[_KT, _VT]): - def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... - def move_to_end(self, key: _KT, last: bool = True) -> None: ... - def copy(self) -> Self: ... - def __reversed__(self) -> Iterator[_KT]: ... + """Dictionary that remembers insertion order +""" + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: + """Remove and return a (key, value) pair from the dictionary. + +Pairs are returned in LIFO order if last is true or FIFO order if false. +""" + def move_to_end(self, key: _KT, last: bool = True) -> None: + """Move an existing element to the end (or beginning if last is false). + +Raise KeyError if the element does not exist. +""" + def copy(self) -> Self: + """od.copy() -> a shallow copy of od +""" + def __reversed__(self) -> Iterator[_KT]: + """od.__reversed__() <==> reversed(od) +""" def keys(self) -> _odict_keys[_KT, _VT]: ... def items(self) -> _odict_items[_KT, _VT]: ... def values(self) -> _odict_values[_KT, _VT]: ... @@ -371,34 +664,57 @@ class OrderedDict(dict[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: + """Create a new ordered dictionary with keys from iterable and values set to value. +""" @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: + """Insert key with a value of default if key is not in the dictionary. + +Return the value for key if key is in the dictionary, else default. +""" @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... # Same as dict.pop, but accepts keyword arguments @overload - def pop(self, key: _KT) -> _VT: ... + def pop(self, key: _KT) -> _VT: + """od.pop(key[,default]) -> v, remove specified key and return the corresponding value. + +If the key is not found, return the default if given; otherwise, +raise a KeyError. +""" @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... def __eq__(self, value: object, /) -> bool: ... @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + def __or__(self, value: dict[_KT, _VT], /) -> Self: + """Return self|value. +""" @overload def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + def __ror__(self, value: dict[_KT, _VT], /) -> Self: + """Return value|self. +""" @overload def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] @disjoint_base class defaultdict(dict[_KT, _VT]): + """defaultdict(default_factory=None, /, [...]) --> dict with default factory + +The default factory is called without arguments to produce +a new value when a key is not present, in __getitem__ only. +A defaultdict compares equal to a dict with the same items. +All remaining arguments are treated the same as if they were +passed to the dict constructor, including keyword arguments. +""" default_factory: Callable[[], _VT] | None @overload def __init__(self) -> None: ... @@ -433,24 +749,59 @@ class defaultdict(dict[_KT, _VT]): /, **kwargs: _VT, ) -> None: ... - def __missing__(self, key: _KT, /) -> _VT: ... - def __copy__(self) -> Self: ... - def copy(self) -> Self: ... - @overload - def __or__(self, value: dict[_KT, _VT], /) -> Self: ... + def __missing__(self, key: _KT, /) -> _VT: + """__missing__(key) # Called by __getitem__ for missing key; pseudo-code: + if self.default_factory is None: raise KeyError((key,)) + self[key] = value = self.default_factory() + return value +""" + def __copy__(self) -> Self: + """D.copy() -> a shallow copy of D. +""" + def copy(self) -> Self: + """D.copy() -> a shallow copy of D. +""" + @overload + def __or__(self, value: dict[_KT, _VT], /) -> Self: + """Return self|value. +""" @overload def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... @overload - def __ror__(self, value: dict[_KT, _VT], /) -> Self: ... + def __ror__(self, value: dict[_KT, _VT], /) -> Self: + """Return value|self. +""" @overload def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class ChainMap(MutableMapping[_KT, _VT]): + """A ChainMap groups multiple dicts (or other mappings) together +to create a single, updateable view. + +The underlying mappings are stored in a list. That list is public and can +be accessed or updated using the *maps* attribute. There is no other +state. + +Lookups search the underlying mappings successively until a key is found. +In contrast, writes, updates, and deletions only operate on the first +mapping. + +""" maps: list[MutableMapping[_KT, _VT]] - def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... - def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: + """Initialize a ChainMap by setting *maps* to the given mappings. +If no mappings are provided, a single empty dictionary is used. + +""" + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: + """New ChainMap with a new map followed by all previous maps. +If no map is provided, an empty dict is used. +Keyword arguments update the map or new empty dict. +""" @property - def parents(self) -> Self: ... + def parents(self) -> Self: + """New ChainMap from maps[1:]. +""" def __setitem__(self, key: _KT, value: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @@ -467,32 +818,44 @@ class ChainMap(MutableMapping[_KT, _VT]): def __bool__(self) -> bool: ... # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: + """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D +""" @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload - def pop(self, key: _KT) -> _VT: ... + def pop(self, key: _KT) -> _VT: + """Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0]. +""" @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... - def copy(self) -> Self: ... + def copy(self) -> Self: + """New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:] +""" __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, # so the signature should be kept in line with `dict.fromkeys`. if sys.version_info >= (3, 13): @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: + """Create a new ChainMap with keys from iterable and values set to value. +""" else: @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: + """Create a ChainMap with a single dict created from the iterable. +""" @classmethod @overload # Special-case None: the user probably wants to add non-None values later. - def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: + """Create a new ChainMap with keys from iterable and values set to value. +""" @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi index 3df2a1d9eb9b3..f2effbf1d31e5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi @@ -1,2 +1,6 @@ +"""Abstract Base Classes (ABCs) for collections, according to PEP 3119. + +Unit tests are in test_collections. +""" from _collections_abc import * from _collections_abc import __all__ as __all__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi index 4afcb5392b58e..56f0b7514db0e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi @@ -1,3 +1,20 @@ +"""Conversion functions between RGB and other color systems. + +This modules provides two functions for each color system ABC: + + rgb_to_abc(r, g, b) --> a, b, c + abc_to_rgb(a, b, c) --> r, g, b + +All inputs and outputs are triples of floats in the range [0.0...1.0] +(with the exception of I and Q, which covers a slightly larger range). +Inputs outside the valid range may cause exceptions or invalid outputs. + +Supported color systems: +RGB: Red, Green, Blue components +YIQ: Luminance, Chrominance (used by composite video signals) +HLS: Hue, Luminance, Saturation +HSV: Hue, Saturation, Value +""" from typing import Final __all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi index 8972d50a4a634..6d3cba13a3caa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi @@ -1,3 +1,15 @@ +"""Module/script to byte-compile all .py files to .pyc files. + +When called as a script with arguments, this compiles the directories +given as arguments recursively; the -l option prevents it from +recursing into directories. + +Without arguments, it compiles all modules on sys.path, without +recursing into subdirectories. (Even though it should do so for +packages -- for now, you'll have to deal with packages separately.) + +See module py_compile for details of the actual byte-compilation. +""" import sys from _typeshed import StrPath from py_compile import PycInvalidationMode @@ -26,7 +38,31 @@ if sys.version_info >= (3, 10): prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: ... + ) -> bool: + """Byte-compile all modules in the given directory tree. + +Arguments (only dir is required): + +dir: the directory to byte-compile +maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) +ddir: the directory that will be prepended to the path to the + file as it is compiled into each byte-code file. +force: if True, force compilation, even if timestamps are up-to-date +quiet: full output with False or 0, errors only with 1, + no output with 2 +legacy: if True, produce legacy pyc paths instead of PEP 3147 paths +optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. +workers: maximum number of parallel workers +invalidation_mode: how the up-to-dateness of the pyc will be checked +stripdir: part of path to left-strip from source file path +prependdir: path to prepend to beginning of original file path, applied + after stripdir +limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path +hardlink_dupes: hardlink duplicated pyc files +""" def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -41,7 +77,29 @@ if sys.version_info >= (3, 10): prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: ... + ) -> bool: + """Byte-compile one file. + +Arguments (only fullname is required): + +fullname: the file to byte-compile +ddir: if given, the directory name compiled in to the + byte-code file. +force: if True, force compilation, even if timestamps are up-to-date +quiet: full output with False or 0, errors only with 1, + no output with 2 +legacy: if True, produce legacy pyc paths instead of PEP 3147 paths +optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. +invalidation_mode: how the up-to-dateness of the pyc will be checked +stripdir: part of path to left-strip from source file path +prependdir: path to prepend to beginning of original file path, applied + after stripdir +limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path. +hardlink_dupes: hardlink duplicated pyc files +""" else: def compile_dir( @@ -60,7 +118,31 @@ else: prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: ... + ) -> bool: + """Byte-compile all modules in the given directory tree. + + Arguments (only dir is required): + + dir: the directory to byte-compile + maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) + ddir: the directory that will be prepended to the path to the + file as it is compiled into each byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + workers: maximum number of parallel workers + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path + hardlink_dupes: hardlink duplicated pyc files + """ def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -75,7 +157,29 @@ else: prependdir: StrPath | None = None, limit_sl_dest: StrPath | None = None, hardlink_dupes: bool = False, - ) -> bool: ... + ) -> bool: + """Byte-compile one file. + + Arguments (only fullname is required): + + fullname: the file to byte-compile + ddir: if given, the directory name compiled in to the + byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path. + hardlink_dupes: hardlink duplicated pyc files + """ def compile_path( skip_curdir: bool = ..., @@ -85,4 +189,16 @@ def compile_path( legacy: bool = False, optimize: int = -1, invalidation_mode: PycInvalidationMode | None = None, -) -> bool: ... +) -> bool: + """Byte-compile all module on sys.path. + +Arguments (all optional): + +skip_curdir: if true, skip current directory (default True) +maxlevels: max recursion level (default 0) +force: as for compile_dir() (default False) +quiet: as for compile_dir() (default 0) +legacy: as for compile_dir() (default False) +optimize: as for compile_dir() (default -1) +invalidation_mode: as for compiler_dir() +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi index b8463973ec671..c3c85d6d2f0a8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi @@ -1,3 +1,5 @@ +"""Internal classes used by compression modules +""" from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase @@ -11,9 +13,13 @@ class _Reader(Protocol): def seekable(self) -> bool: ... def seek(self, n: int, /) -> Any: ... -class BaseStream(BufferedIOBase): ... +class BaseStream(BufferedIOBase): + """Mode-checking helper functions. +""" class DecompressReader(RawIOBase): + """Adapts the decompressor API to a RawIOBase reader API +""" def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi index 9ddc39f27c286..c5653a0753f98 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi @@ -1 +1,6 @@ +"""Interface to the libbzip2 compression library. + +This module provides a file interface, classes for incremental +(de)compression, and functions for one-shot (de)compression. +""" from bz2 import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi index 9422a735c590e..fb1e556c29d47 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi @@ -1 +1,6 @@ +"""Functions that read and write gzipped files. + +The user of the file doesn't have to worry about the compression, +but random access is not allowed. +""" from gzip import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi index 936c3813db4f1..67bca904ab63f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi @@ -1 +1,10 @@ +"""Interface to the liblzma compression library. + +This module provides a class for reading and writing compressed files, +classes for incremental (de)compression, and convenience functions for +one-shot (de)compression. + +These classes and functions support both the XZ and legacy LZMA +container formats, as well as raw compressed data streams. +""" from lzma import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi index 78d176c03ee83..93f1f7ccf5192 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi @@ -1 +1,15 @@ +"""The functions in this module allow compression and decompression using the +zlib library, which is based on GNU zip. + +adler32(string[, start]) -- Compute an Adler-32 checksum. +compress(data[, level]) -- Compress data, with compression level 0-9 or -1. +compressobj([level[, ...]]) -- Return a compressor object. +crc32(string[, start]) -- Compute a CRC-32 checksum. +decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string. +decompressobj([wbits[, zdict]]) -- Return a decompressor object. + +'wbits' is window buffer size and container format. +Compressor objects support compress() and flush() methods; decompressor +objects support decompress() and flush(). +""" from zlib import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi index d5da4be036129..f43fea2d723f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi @@ -1,3 +1,5 @@ +"""Python bindings to the Zstandard (zstd) compression library (RFC-8878). +""" import enum from _typeshed import ReadableBuffer from collections.abc import Iterable, Mapping @@ -35,20 +37,81 @@ zstd_version_info: Final[tuple[int, int, int]] COMPRESSION_LEVEL_DEFAULT: Final = _zstd.ZSTD_CLEVEL_DEFAULT class FrameInfo: + """Information about a Zstandard frame. +""" __slots__ = ("decompressed_size", "dictionary_id") decompressed_size: int dictionary_id: int def __init__(self, decompressed_size: int, dictionary_id: int) -> None: ... -def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: ... -def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: ... -def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: ... +def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: + """Get Zstandard frame information from a frame header. + +*frame_buffer* is a bytes-like object. It should start from the beginning +of a frame, and needs to include at least the frame header (6 to 18 bytes). + +The returned FrameInfo object has two attributes. +'decompressed_size' is the size in bytes of the data in the frame when +decompressed, or None when the decompressed size is unknown. +'dictionary_id' is an int in the range (0, 2**32). The special value 0 +means that the dictionary ID was not recorded in the frame header, +the frame may or may not need a dictionary to be decoded, +and the ID of such a dictionary is not specified. +""" +def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: + """Return a ZstdDict representing a trained Zstandard dictionary. + +*samples* is an iterable of samples, where a sample is a bytes-like +object representing a file. + +*dict_size* is the dictionary's maximum size, in bytes. +""" +def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: + """Return a ZstdDict representing a finalized Zstandard dictionary. + +Given a custom content as a basis for dictionary, and a set of samples, +finalize *zstd_dict* by adding headers and statistics according to the +Zstandard dictionary format. + +You may compose an effective dictionary content by hand, which is used as +basis dictionary, and use some samples to finalize a dictionary. The basis +dictionary may be a "raw content" dictionary. See *is_raw* in ZstdDict. + +*samples* is an iterable of samples, where a sample is a bytes-like object +representing a file. +*dict_size* is the dictionary's maximum size, in bytes. +*level* is the expected compression level. The statistics for each +compression level differ, so tuning the dictionary to the compression level +can provide improvements. +""" def compress( data: ReadableBuffer, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None -) -> bytes: ... -def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: ... +) -> bytes: + """Return Zstandard compressed *data* as bytes. + +*level* is an int specifying the compression level to use, defaulting to +COMPRESSION_LEVEL_DEFAULT ('3'). +*options* is a dict object that contains advanced compression +parameters. See CompressionParameter for more on options. +*zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See +the function train_dict for how to train a ZstdDict on sample data. + +For incremental compression, use a ZstdCompressor instead. +""" +def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: + """Decompress one or more frames of Zstandard compressed *data*. + +*zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See +the function train_dict for how to train a ZstdDict on sample data. +*options* is a dict object that contains advanced compression +parameters. See DecompressionParameter for more on options. + +For incremental decompression, use a ZstdDecompressor instead. +""" @final class CompressionParameter(enum.IntEnum): + """Compression parameters. +""" compression_level = _zstd.ZSTD_c_compressionLevel window_log = _zstd.ZSTD_c_windowLog hash_log = _zstd.ZSTD_c_hashLog @@ -68,15 +131,31 @@ class CompressionParameter(enum.IntEnum): nb_workers = _zstd.ZSTD_c_nbWorkers job_size = _zstd.ZSTD_c_jobSize overlap_log = _zstd.ZSTD_c_overlapLog - def bounds(self) -> tuple[int, int]: ... + def bounds(self) -> tuple[int, int]: + """Return the (lower, upper) int bounds of a compression parameter. + +Both the lower and upper bounds are inclusive. +""" @final class DecompressionParameter(enum.IntEnum): + """Decompression parameters. +""" window_log_max = _zstd.ZSTD_d_windowLogMax - def bounds(self) -> tuple[int, int]: ... + def bounds(self) -> tuple[int, int]: + """Return the (lower, upper) int bounds of a decompression parameter. + +Both the lower and upper bounds are inclusive. +""" @final class Strategy(enum.IntEnum): + """Compression strategies, listed from fastest to strongest. + +Note that new strategies might be added in the future. +Only the order (from fast to strong) is guaranteed, +the numeric value might change. +""" fast = _zstd.ZSTD_fast dfast = _zstd.ZSTD_dfast greedy = _zstd.ZSTD_greedy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi index e67b3d992f2f9..be8e472ac04d1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi @@ -24,6 +24,14 @@ class _FileBinaryWrite(SupportsWrite[bytes], Protocol): def close(self) -> None: ... class ZstdFile(_streams.BaseStream): + """A file-like object providing transparent Zstandard (de)compression. + +A ZstdFile can act as a wrapper for an existing file object, or refer +directly to a named file on disk. + +ZstdFile provides a *binary* file interface. Data is read and returned as +bytes, and may only be written to objects that support the Buffer Protocol. +""" FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME @@ -37,7 +45,25 @@ class ZstdFile(_streams.BaseStream): level: None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> None: ... + ) -> None: + """Open a Zstandard compressed file in binary mode. + +*file* can be either an file-like object, or a file name to open. + +*mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for +creating exclusively, or 'a' for appending. These can equivalently be +given as 'rb', 'wb', 'xb' and 'ab' respectively. + +*level* is an optional int specifying the compression level to use, +or COMPRESSION_LEVEL_DEFAULT if not given. + +*options* is an optional dict for advanced compression parameters. +See CompressionParameter and DecompressionParameter for the possible +options. + +*zstd_dict* is an optional ZstdDict object, a pre-trained Zstandard +dictionary. See train_dict() to train ZstdDict on sample data. +""" @overload def __init__( self, @@ -49,15 +75,77 @@ class ZstdFile(_streams.BaseStream): options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, ) -> None: ... - def write(self, data: ReadableBuffer, /) -> int: ... - def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: ... # type: ignore[override] - def read(self, size: int | None = -1) -> bytes: ... - def read1(self, size: int | None = -1) -> bytes: ... - def readinto(self, b: WriteableBuffer) -> int: ... - def readinto1(self, b: WriteableBuffer) -> int: ... - def readline(self, size: int | None = -1) -> bytes: ... - def seek(self, offset: int, whence: int = 0) -> int: ... - def peek(self, size: int = -1) -> bytes: ... + def write(self, data: ReadableBuffer, /) -> int: + """Write a bytes-like object *data* to the file. + +Returns the number of uncompressed bytes written, which is +always the length of data in bytes. Note that due to buffering, +the file on disk may not reflect the data written until .flush() +or .close() is called. +""" + def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: # type: ignore[override] + """Flush remaining data to the underlying stream. + +The mode argument can be FLUSH_BLOCK or FLUSH_FRAME. Abuse of this +method will reduce compression ratio, use it only when necessary. + +If the program is interrupted afterwards, all data can be recovered. +To ensure saving to disk, also need to use os.fsync(fd). + +This method does nothing in reading mode. +""" + def read(self, size: int | None = -1) -> bytes: + """Read up to size uncompressed bytes from the file. + +If size is negative or omitted, read until EOF is reached. +Returns b'' if the file is already at EOF. +""" + def read1(self, size: int | None = -1) -> bytes: + """Read up to size uncompressed bytes, while trying to avoid +making multiple reads from the underlying stream. Reads up to a +buffer's worth of data if size is negative. + +Returns b'' if the file is at EOF. +""" + def readinto(self, b: WriteableBuffer) -> int: + """Read bytes into b. + +Returns the number of bytes read (0 for EOF). +""" + def readinto1(self, b: WriteableBuffer) -> int: + """Read bytes into b, while trying to avoid making multiple reads +from the underlying stream. + +Returns the number of bytes read (0 for EOF). +""" + def readline(self, size: int | None = -1) -> bytes: + """Read a line of uncompressed bytes from the file. + +The terminating newline (if present) is retained. If size is +non-negative, no more than size bytes will be read (in which +case the line may be incomplete). Returns b'' if already at EOF. +""" + def seek(self, offset: int, whence: int = 0) -> int: + """Change the file position. + +The new position is specified by offset, relative to the +position indicated by whence. Possible values for whence are: + + 0: start of stream (default): offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive + +Returns the new file position. + +Note that seeking is emulated, so depending on the arguments, +this operation may be extremely slow. +""" + def peek(self, size: int = -1) -> bytes: + """Return buffered data without advancing the file position. + +Always returns at least one byte of data, unless at EOF. +The exact number of bytes returned is unspecified. +""" @property def name(self) -> str | bytes: ... @property @@ -75,7 +163,34 @@ def open( encoding: str | None = None, errors: str | None = None, newline: str | None = None, -) -> ZstdFile: ... +) -> ZstdFile: + """Open a Zstandard compressed file in binary or text mode. + +file can be either a file name (given as a str, bytes, or PathLike object), +in which case the named file is opened, or it can be an existing file object +to read from or write to. + +The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a', +'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode. + +The level, options, and zstd_dict parameters specify the settings the same +as ZstdFile. + +When using read mode (decompression), the options parameter is a dict +representing advanced decompression options. The level parameter is not +supported in this case. When using write mode (compression), only one of +level, an int representing the compression level, or options, a dict +representing advanced compression options, may be passed. In both modes, +zstd_dict is a ZstdDict instance containing a trained Zstandard dictionary. + +For binary mode, this function is equivalent to the ZstdFile constructor: +ZstdFile(filename, mode, ...). In this case, the encoding, errors and +newline parameters must not be provided. + +For text mode, an ZstdFile object is created, and wrapped in an +io.TextIOWrapper instance with the specified encoding, error handling +behavior, and line ending(s). +""" @overload def open( file: StrOrBytesPath | _FileBinaryWrite, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi index ad4d20ea54453..f15c1dc0af774 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi @@ -1,3 +1,5 @@ +"""Execute computations asynchronously using threads or processes. +""" import sys from ._base import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi index be48a6e4289c8..4c4d618dd28a1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi @@ -18,41 +18,150 @@ FINISHED: Final = "FINISHED" _STATE_TO_DESCRIPTION_MAP: Final[dict[str, str]] LOGGER: Logger -class Error(Exception): ... -class CancelledError(Error): ... +class Error(Exception): + """Base class for all future-related exceptions. +""" +class CancelledError(Error): + """The Future was cancelled. +""" if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: - class TimeoutError(Error): ... + class TimeoutError(Error): + """The operation exceeded the given deadline. +""" -class InvalidStateError(Error): ... -class BrokenExecutor(RuntimeError): ... +class InvalidStateError(Error): + """The operation is not allowed in this state. +""" +class BrokenExecutor(RuntimeError): + """ +Raised when a executor has become non-functional after a severe failure. +""" _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _P = ParamSpec("_P") class Future(Generic[_T]): + """Represents the result of an asynchronous computation. +""" _condition: threading.Condition _state: str _result: _T | None _exception: BaseException | None _waiters: list[_Waiter] - def cancel(self) -> bool: ... - def cancelled(self) -> bool: ... - def running(self) -> bool: ... - def done(self) -> bool: ... - def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... - def result(self, timeout: float | None = None) -> _T: ... - def set_running_or_notify_cancel(self) -> bool: ... - def set_result(self, result: _T) -> None: ... - def exception(self, timeout: float | None = None) -> BaseException | None: ... - def set_exception(self, exception: BaseException | None) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def cancel(self) -> bool: + """Cancel the future if possible. + +Returns True if the future was cancelled, False otherwise. A future +cannot be cancelled if it is running or has already completed. +""" + def cancelled(self) -> bool: + """Return True if the future was cancelled. +""" + def running(self) -> bool: + """Return True if the future is currently executing. +""" + def done(self) -> bool: + """Return True if the future was cancelled or finished executing. +""" + def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: + """Attaches a callable that will be called when the future finishes. + +Args: + fn: A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. +""" + def result(self, timeout: float | None = None) -> _T: + """Return the result of the call that the future represents. + +Args: + timeout: The number of seconds to wait for the result if the future + isn't done. If None, then there is no limit on the wait time. + +Returns: + The result of the call that the future represents. + +Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + Exception: If the call raised then that exception will be raised. +""" + def set_running_or_notify_cancel(self) -> bool: + """Mark the future as running or process any cancel notifications. + +Should only be used by Executor implementations and unit tests. + +If the future has been cancelled (cancel() was called and returned +True) then any threads waiting on the future completing (though calls +to as_completed() or wait()) are notified and False is returned. + +If the future was not cancelled then it is put in the running state +(future calls to running() will return True) and True is returned. + +This method should be called by Executor implementations before +executing the work associated with this future. If this method returns +False then the work should not be executed. + +Returns: + False if the Future was cancelled, True otherwise. + +Raises: + RuntimeError: if this method was already called or if set_result() + or set_exception() was called. +""" + def set_result(self, result: _T) -> None: + """Sets the return value of work associated with the future. + +Should only be used by Executor implementations and unit tests. +""" + def exception(self, timeout: float | None = None) -> BaseException | None: + """Return the exception raised by the call that the future represents. + +Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. + +Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + +Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. +""" + def set_exception(self, exception: BaseException | None) -> None: + """Sets the result of the future as being the given exception. + +Should only be used by Executor implementations and unit tests. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class Executor: - def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... + """This is an abstract base class for concrete asynchronous executors. +""" + def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: + """Submits a callable to be executed with the given arguments. + +Schedules the callable to be executed as fn(*args, **kwargs) and returns +a Future instance representing the execution of the callable. + +Returns: + A Future representing the given call. +""" if sys.version_info >= (3, 14): def map( self, @@ -61,13 +170,73 @@ class Executor: timeout: float | None = None, chunksize: int = 1, buffersize: int | None = None, - ) -> Iterator[_T]: ... + ) -> Iterator[_T]: + """Returns an iterator equivalent to map(fn, iter). + +Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken into + before being passed to a child process. This argument is only + used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. + buffersize: The number of submitted tasks whose results have not + yet been yielded. If the buffer is full, iteration over the + iterables pauses until a result is yielded from the buffer. + If None, all input elements are eagerly collected, and a task is + submitted for each. + +Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + +Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. +""" else: def map( self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 - ) -> Iterator[_T]: ... + ) -> Iterator[_T]: + """Returns an iterator equivalent to map(fn, iter). + +Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken into + before being passed to a child process. This argument is only + used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. + +Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + +Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. +""" - def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: + """Clean-up the resources associated with the Executor. + +It is safe to call this method several times. Otherwise, no other +methods can be called after this one. + +Args: + wait: If True then shutdown will not return until all running + futures have finished executing and the resources used by the + executor have been reclaimed. + cancel_futures: If True then shutdown will cancel all pending + futures. Futures that are completed or running will not be + cancelled. +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -84,17 +253,62 @@ class _AsCompletedFuture(Protocol[_T_co]): # Not used by as_completed, but needed to propagate the generic type def result(self, timeout: float | None = None) -> _T_co: ... -def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... +def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: + """An iterator over the given futures that yields each as it completes. + +Args: + fs: The sequence of Futures (possibly created by different Executors) to + iterate over. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + +Returns: + An iterator that yields the given Futures as they complete (finished or + cancelled). If any given Futures are duplicated, they will be returned + once. + +Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. +""" class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): + """DoneAndNotDoneFutures(done, not_done) +""" done: set[Future[_T]] not_done: set[Future[_T]] def wait( fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" -) -> DoneAndNotDoneFutures[_T]: ... +) -> DoneAndNotDoneFutures[_T]: + """Wait for the futures in the given sequence to complete. + +Args: + fs: The sequence of Futures (possibly created by different Executors) to + wait upon. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + return_when: Indicates when this function should return. The options + are: + + FIRST_COMPLETED - Return when any future finishes or is + cancelled. + FIRST_EXCEPTION - Return when any future finishes by raising an + exception. If no future raises an exception + then it is equivalent to ALL_COMPLETED. + ALL_COMPLETED - Return when all futures finish or are cancelled. + +Returns: + A named 2-tuple of sets. The first set, named 'done', contains the + futures that completed (is finished or cancelled) before the wait + completed. The second set, named 'not_done', contains uncompleted + futures. Duplicate futures given to *fs* are removed and will be + returned only once. +""" class _Waiter: + """Provides the event that wait() and as_completed() block on. +""" event: threading.Event finished_futures: list[Future[Any]] def add_result(self, future: Future[Any]) -> None: ... @@ -102,17 +316,25 @@ class _Waiter: def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): + """Used by as_completed(). +""" lock: threading.Lock -class _FirstCompletedWaiter(_Waiter): ... +class _FirstCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_COMPLETED). +""" class _AllCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED). +""" num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... class _AcquireFutures: + """A context manager that does an ordered acquire of Future conditions. +""" futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi index e101022babcb6..7cec6e114d3e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi @@ -1,3 +1,5 @@ +"""Implements InterpreterPoolExecutor. +""" import sys from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor @@ -37,7 +39,10 @@ if sys.version_info >= (3, 14): def __del__(self) -> None: ... def run(self, task: _Task) -> None: ... # type: ignore[override] - class BrokenInterpreterPool(BrokenThreadPool): ... + class BrokenInterpreterPool(BrokenThreadPool): + """ +Raised when a worker thread in an InterpreterPoolExecutor failed initializing. +""" class InterpreterPoolExecutor(ThreadPoolExecutor): BROKEN: type[BrokenInterpreterPool] @@ -59,7 +64,17 @@ if sys.version_info >= (3, 14): thread_name_prefix: str = "", initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: ... + ) -> None: + """Initializes a new InterpreterPoolExecutor instance. + +Args: + max_workers: The maximum number of interpreters that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + initializer: A callable or script used to initialize + each worker interpreter. + initargs: A tuple of arguments to pass to the initializer. +""" @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi index 071b3aba5d330..c1e59fd272110 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi @@ -1,3 +1,44 @@ +"""Implements ProcessPoolExecutor. + +The following diagram and text describe the data-flow through the system: + +|======================= In-process =====================|== Out-of-process ==| + ++----------+ +----------+ +--------+ +-----------+ +---------+ +| | => | Work Ids | | | | Call Q | | Process | +| | +----------+ | | +-----------+ | Pool | +| | | ... | | | | ... | +---------+ +| | | 6 | => | | => | 5, call() | => | | +| | | 7 | | | | ... | | | +| Process | | ... | | Local | +-----------+ | Process | +| Pool | +----------+ | Worker | | #1..n | +| Executor | | Thread | | | +| | +----------- + | | +-----------+ | | +| | <=> | Work Items | <=> | | <= | Result Q | <= | | +| | +------------+ | | +-----------+ | | +| | | 6: call() | | | | ... | | | +| | | future | | | | 4, result | | | +| | | ... | | | | 3, except | | | ++----------+ +------------+ +--------+ +-----------+ +---------+ + +Executor.submit() called: +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict +- adds the id of the _WorkItem to the "Work Ids" queue + +Local worker thread: +- reads work ids from the "Work Ids" queue and looks up the corresponding + WorkItem from the "Work Items" dict: if the work item has been cancelled then + it is simply removed from the dict, otherwise it is repackaged as a + _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" + until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because + calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). +- reads _ResultItems from "Result Q", updates the future stored in the + "Work Items" dict and deletes the dict entry + +Process #1..n: +- reads _CallItems from "Call Q", executes the calls, and puts the resulting + _ResultItems in "Result Q" +""" import sys from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, MutableSequence from multiprocessing.connection import Connection @@ -71,6 +112,8 @@ class _CallItem: def __init__(self, work_id: int, fn: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... class _SafeQueue(Queue[Future[Any]]): + """Safe Queue set exception to the future object linked to a job +""" pending_work_items: dict[int, _WorkItem[Any]] if sys.version_info < (3, 12): shutdown_lock: Lock @@ -97,8 +140,18 @@ class _SafeQueue(Queue[Future[Any]]): def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... -def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... -def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: ... +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: + """ Iterates over zip()ed iterables in chunks. +""" +def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: + """Processes a chunk of an iterable passed to map. + +Runs the function passed to map() on a chunk of the +iterable passed to map. + +This function is run in a separate process. + +""" if sys.version_info >= (3, 11): def _sendback_result( @@ -107,12 +160,16 @@ if sys.version_info >= (3, 11): result: Any | None = None, exception: Exception | None = None, exit_pid: int | None = None, - ) -> None: ... + ) -> None: + """Safely send back the given result or exception +""" else: def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None - ) -> None: ... + ) -> None: + """Safely send back the given result or exception +""" if sys.version_info >= (3, 11): def _process_worker( @@ -121,7 +178,19 @@ if sys.version_info >= (3, 11): initializer: Callable[[Unpack[_Ts]], object] | None, initargs: tuple[Unpack[_Ts]], max_tasks: int | None = None, - ) -> None: ... + ) -> None: + """Evaluates calls from call_queue and places the results in result_queue. + +This worker is run in a separate process. + +Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer +""" else: def _process_worker( @@ -129,9 +198,31 @@ else: result_queue: SimpleQueue[_ResultItem], initializer: Callable[[Unpack[_Ts]], object] | None, initargs: tuple[Unpack[_Ts]], - ) -> None: ... + ) -> None: + """Evaluates calls from call_queue and places the results in result_queue. + + This worker is run in a separate process. + + Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer + """ class _ExecutorManagerThread(Thread): + """Manages the communication between this process and the worker processes. + +The manager is run in a local thread. + +Args: + executor: A reference to the ProcessPoolExecutor that owns + this thread. A weakref will be own by the manager as well as + references to internal objects used to introspect the state of + the executor. +""" thread_wakeup: _ThreadWakeup shutdown_lock: Lock executor_reference: ref[Any] @@ -156,9 +247,18 @@ _system_limits_checked: bool _system_limited: bool | None def _check_system_limits() -> None: ... -def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: ... +def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: + """ +Specialized implementation of itertools.chain.from_iterable. +Each item in *iterable* should be a list. This function is +careful not to keep references to yielded objects. +""" -class BrokenProcessPool(BrokenExecutor): ... +class BrokenProcessPool(BrokenExecutor): + """ +Raised when a process in a ProcessPoolExecutor terminated abruptly +while a future was in the running state. +""" class ProcessPoolExecutor(Executor): _mp_context: BaseContext | None @@ -186,7 +286,25 @@ class ProcessPoolExecutor(Executor): initargs: tuple[()] = (), *, max_tasks_per_child: int | None = None, - ) -> None: ... + ) -> None: + """Initializes a new ProcessPoolExecutor instance. + +Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + mp_context: A multiprocessing context to launch the workers created + using the multiprocessing.get_context('start method') API. This + object should provide SimpleQueue, Queue and Process. + initializer: A callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + max_tasks_per_child: The maximum number of tasks a worker process + can complete before it will exit and be replaced with a fresh + worker process. The default of None means worker process will + live as long as the executor. Requires a non-'fork' mp_context + start method. When given, we default to using 'spawn' if no + mp_context is supplied. +""" @overload def __init__( self, @@ -215,7 +333,18 @@ class ProcessPoolExecutor(Executor): mp_context: BaseContext | None = None, initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: ... + ) -> None: + """Initializes a new ProcessPoolExecutor instance. + + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + mp_context: A multiprocessing context to launch the workers. This + object should provide SimpleQueue, Queue and Process. + initializer: A callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + """ @overload def __init__( self, @@ -238,5 +367,21 @@ class ProcessPoolExecutor(Executor): def _adjust_process_count(self) -> None: ... if sys.version_info >= (3, 14): - def kill_workers(self) -> None: ... - def terminate_workers(self) -> None: ... + def kill_workers(self) -> None: + """Attempts to kill the executor's workers. +Iterates through all of the current worker processes and kills +each one that is still alive. + +After killing workers, the pool will be in a broken state +and no longer usable (for instance, new tasks should not be +submitted). +""" + def terminate_workers(self) -> None: + """Attempts to terminate the executor's workers. +Iterates through all of the current worker processes and terminates +each one that is still alive. + +After terminating workers, the pool will be in a broken state +and no longer usable (for instance, new tasks should not be +submitted). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi index 50a6a9c6f43ea..98747afb2c6d5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi @@ -1,3 +1,5 @@ +"""Implements ThreadPoolExecutor. +""" import queue import sys from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet @@ -57,7 +59,11 @@ if sys.version_info >= (3, 14): task: _Task def __init__(self, future: Future[Any], task: _Task) -> None: ... def run(self, ctx: WorkerContext) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" def _worker(executor_reference: ref[Any], ctx: WorkerContext, work_queue: queue.SimpleQueue[Any]) -> None: ... @@ -69,7 +75,11 @@ else: kwargs: Mapping[str, Any] def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... def run(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" def _worker( executor_reference: ref[Any], @@ -78,7 +88,10 @@ else: initargs: tuple[Unpack[_Ts]], ) -> None: ... -class BrokenThreadPool(BrokenExecutor): ... +class BrokenThreadPool(BrokenExecutor): + """ +Raised when a worker thread in a ThreadPoolExecutor failed initializing. +""" class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 14): @@ -118,7 +131,17 @@ class ThreadPoolExecutor(Executor): thread_name_prefix: str = "", initializer: Callable[[], object] | None = None, initargs: tuple[()] = (), - ) -> None: ... + ) -> None: + """Initializes a new ThreadPoolExecutor instance. + +Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + initializer: A callable used to initialize worker threads. + initargs: A tuple of arguments to pass to the initializer. + ctxkwargs: Additional arguments to cls.prepare_context(). +""" @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi index 3839e6bef09b6..f4357e9a9e3e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi @@ -1,3 +1,5 @@ +"""Subinterpreters High Level Module. +""" import sys import threading import types @@ -38,16 +40,39 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < _P = ParamSpec("_P") class ExecutionFailed(InterpreterError): + """An unhandled exception happened during execution. + +This is raised from Interpreter.exec() and Interpreter.call(). +""" excinfo: types.SimpleNamespace def __init__(self, excinfo: types.SimpleNamespace) -> None: ... - def create() -> Interpreter: ... - def list_all() -> list[Interpreter]: ... - def get_current() -> Interpreter: ... - def get_main() -> Interpreter: ... + def create() -> Interpreter: + """Return a new (idle) Python interpreter. +""" + def list_all() -> list[Interpreter]: + """Return all existing interpreters. +""" + def get_current() -> Interpreter: + """Return the currently running interpreter. +""" + def get_main() -> Interpreter: + """Return the main interpreter. +""" class Interpreter: + """A single Python interpreter. + +Attributes: + +"id" - the unique process-global ID number for the interpreter +"whence" - indicates where the interpreter was created + +If the interpreter wasn't created by this module +then any method that modifies the interpreter will fail, +i.e. .close(), .prepare_main(), .exec(), and .call() +""" def __new__(cls, id: int, /, _whence: _Whence | None = None, _ownsref: bool | None = None) -> Self: ... def __reduce__(self) -> tuple[type[Self], int]: ... def __hash__(self) -> int: ... @@ -58,11 +83,55 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < def whence( self, ) -> Literal["unknown", "runtime init", "legacy C-API", "C-API", "cross-interpreter C-API", "_interpreters module"]: ... - def is_running(self) -> bool: ... - def close(self) -> None: ... + def is_running(self) -> bool: + """Return whether or not the identified interpreter is running. +""" + def close(self) -> None: + """Finalize and destroy the interpreter. + +Attempting to destroy the current interpreter results +in an InterpreterError. +""" def prepare_main( self, ns: _SharedDict | None = None, /, **kwargs: Any - ) -> None: ... # kwargs has same value restrictions as _SharedDict - def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: ... - def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ... - def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: ... + ) -> None: # kwargs has same value restrictions as _SharedDict + """Bind the given values into the interpreter's __main__. + +The values must be shareable. +""" + def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: + """Run the given source code in the interpreter. + +This is essentially the same as calling the builtin "exec" +with this interpreter, using the __dict__ of its __main__ +module as both globals and locals. + +There is no return value. + +If the code raises an unhandled exception then an ExecutionFailed +exception is raised, which summarizes the unhandled exception. +The actual exception is discarded because objects cannot be +shared between interpreters. + +This blocks the current Python thread until done. During +that time, the previous interpreter is allowed to run +in other threads. +""" + def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: + """Call the object in the interpreter with given args/kwargs. + +Nearly all callables, args, kwargs, and return values are +supported. All "shareable" objects are supported, as are +"stateless" functions (meaning non-closures that do not use +any globals). This method will fall back to pickle. + +If the callable raises an exception then the error display +(including full traceback) is sent back between the interpreters +and an ExecutionFailed exception is raised, much like what +happens with Interpreter.exec(). +""" + def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: + """Return a new thread that calls the object in the interpreter. + +The return value and any raised exception are discarded. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi index 7cf1ea34786ed..46a0acf6814b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi @@ -1,3 +1,5 @@ +"""Common code between queues and channels. +""" import sys from collections.abc import Callable from typing import Final, NewType @@ -6,16 +8,28 @@ from typing_extensions import Never, Self, TypeAlias if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python <3.13 from _interpqueues import _UnboundOp - class ItemInterpreterDestroyed(Exception): ... + class ItemInterpreterDestroyed(Exception): + """Raised when trying to get an item whose interpreter was destroyed. +""" # Actually a descriptor that behaves similarly to classmethod but prevents # access from instances. classonly = classmethod class UnboundItem: + """Represents a cross-interpreter item no longer bound to an interpreter. + +An item is unbound when the interpreter that added it to the +cross-interpreter container is destroyed. +""" __slots__ = () def __new__(cls) -> Never: ... @classonly - def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: ... + def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: + """A non-data descriptor that makes a value only visible on the class. + +This is like the "classmethod" builtin, but does not show up on +instances of the class. It may be used as a decorator. +""" # Sentinel types and alias that don't exist at runtime. _UnboundErrorType = NewType("_UnboundErrorType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi index bdf08d93d1e00..e4976492efad6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi @@ -1,3 +1,5 @@ +"""Cross-interpreter Queues High Level Module. +""" import queue import sys from typing import Final, SupportsIndex @@ -23,15 +25,37 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < "list_all", ] - class QueueEmpty(QueueError, queue.Empty): ... - class QueueFull(QueueError, queue.Full): ... - class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): ... + class QueueEmpty(QueueError, queue.Empty): + """Raised from get_nowait() when the queue is empty. + +It is also raised from get() if it times out. +""" + class QueueFull(QueueError, queue.Full): + """Raised from put_nowait() when the queue is full. + +It is also raised from put() if it times out. +""" + class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): + """Raised from get() and get_nowait(). +""" UNBOUND: Final[UnboundItem] - def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: ... - def list_all() -> list[Queue]: ... + def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: + """Return a new cross-interpreter queue. + +The queue may be used to pass data safely between interpreters. + +"unbounditems" sets the default for Queue.put(); see that method for +supported values. The default value is UNBOUND, which replaces +the unbound item. +""" + def list_all() -> list[Queue]: + """Return a list of all open queues. +""" class Queue: + """A cross-interpreter queue. +""" def __new__(cls, id: int, /) -> Self: ... def __del__(self) -> None: ... def __hash__(self) -> int: ... @@ -54,7 +78,42 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < *, unbounditems: _AnyUnbound | None = None, _delay: float = 0.01, - ) -> None: ... + ) -> None: + """Add the object to the queue. + +If "block" is true, this blocks while the queue is full. + +For most objects, the object received through Queue.get() will +be a new one, equivalent to the original and not sharing any +actual underlying data. The notable exceptions include +cross-interpreter types (like Queue) and memoryview, where the +underlying data is actually shared. Furthermore, some types +can be sent through a queue more efficiently than others. This +group includes various immutable types like int, str, bytes, and +tuple (if the items are likewise efficiently shareable). See interpreters.is_shareable(). + +"unbounditems" controls the behavior of Queue.get() for the given +object if the current interpreter (calling put()) is later +destroyed. + +If "unbounditems" is None (the default) then it uses the +queue's default, set with create_queue(), +which is usually UNBOUND. + +If "unbounditems" is UNBOUND_ERROR then get() will raise an +ItemInterpreterDestroyed exception if the original interpreter +has been destroyed. This does not otherwise affect the queue; +the next call to put() will work like normal, returning the next +item in the queue. + +If "unbounditems" is UNBOUND_REMOVE then the item will be removed +from the queue as soon as the original interpreter is destroyed. +Be aware that this will introduce an imbalance between put() +and get() calls. + +If "unbounditems" is UNBOUND then it is returned by get() in place +of the unbound item. +""" else: def put( self, @@ -67,8 +126,21 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < def put_nowait(self, obj: object, *, unbounditems: _AnyUnbound | None = None) -> None: ... if sys.version_info >= (3, 14): - def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... + def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: + """Return the next object from the queue. + +If "block" is true, this blocks while the queue is empty. + +If the next item's original interpreter has been destroyed +then the "next object" is determined by the value of the +"unbounditems" argument to put(). +""" else: def get(self, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... - def get_nowait(self) -> object: ... + def get_nowait(self) -> object: + """Return the next object from the channel. + +If the queue is empty then raise QueueEmpty. Otherwise this +is the same as get(). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi index 1909d80e3d189..a5f2170bac067 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi @@ -1,3 +1,147 @@ +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=, + allow_unnamed_section=False): + Create the parser. When `defaults` is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type` is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters` is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes` is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes` is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values` is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value` is True (default: False), options without + values are accepted; the value presented for these is None. + + When `default_section` is given, the name of the special section is + named accordingly. By default it is called ``"DEFAULT"`` but this can + be customized to point to any other valid section name. Its current + value can be retrieved using the ``parser_instance.default_section`` + attribute and may be modified at runtime. + + When `interpolation` is given, it should be an Interpolation subclass + instance. It will be used as the handler for option value + pre-processing when using getters. RawConfigParser objects don't do + any sort of interpolation, whereas ConfigParser uses an instance of + BasicInterpolation. The library also provides a ``zc.buildout`` + inspired ExtendedInterpolation implementation. + + When `converters` is given, it should be a dictionary where each key + represents the name of a type converter and each value is a callable + implementing the conversion from string to the desired datatype. Every + converter gets its corresponding get*() method on the parser object and + section proxies. + + When `allow_unnamed_section` is True (default: False), options + without section are accepted: the section for these is + ``configparser.UNNAMED_SECTION``. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the iterable of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name` attribute, the string `` is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars` argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option` is a key in + `vars`, the value from `vars` is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters` is True (the default), delimiters + between keys and values are surrounded by spaces. +""" import sys from _typeshed import MaybeNone, StrOrBytesPath, SupportsWrite from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence @@ -128,22 +272,46 @@ DEFAULTSECT: Final = "DEFAULT" MAX_INTERPOLATION_DEPTH: Final = 10 class Interpolation: + """Dummy interpolation that passes the value through with no changes. +""" def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, defaults: _Section) -> str: ... def before_set(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... def before_read(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... def before_write(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... -class BasicInterpolation(Interpolation): ... -class ExtendedInterpolation(Interpolation): ... +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + +The option values can contain format strings which refer to other values in +the same section, or values in the special default section. + +For example: + + something: %(dir)s/whatever + +would resolve the "%(dir)s" to the value of dir. All reference +expansions are done late, on demand. If a user needs to use a bare % in +a configuration file, she can escape it by writing %%. Other % usage +is considered a user error and raises `InterpolationSyntaxError`. +""" +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by +`zc.buildout`. Enables interpolation between sections. +""" if sys.version_info < (3, 13): @deprecated( "Deprecated since Python 3.2; removed in Python 3.13. Use `BasicInterpolation` or `ExtendedInterpolation` instead." ) class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead. +""" def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): + """ConfigParser that does not do interpolation. +""" _SECT_TMPL: ClassVar[str] # undocumented _OPT_TMPL: ClassVar[str] # undocumented _OPT_NV_TMPL: ClassVar[str] # undocumented @@ -264,18 +432,69 @@ class RawConfigParser(_Parser): def __iter__(self) -> Iterator[str]: ... def __contains__(self, key: object) -> bool: ... def defaults(self) -> _Section: ... - def sections(self) -> _SectionNameList: ... - def add_section(self, section: _SectionName) -> None: ... - def has_section(self, section: _SectionName) -> bool: ... - def options(self, section: _SectionName) -> list[str]: ... - def has_option(self, section: _SectionName, option: str) -> bool: ... - def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... - def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... - def read_string(self, string: str, source: str = "") -> None: ... - def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... + def sections(self) -> _SectionNameList: + """Return a list of section names, excluding [DEFAULT] +""" + def add_section(self, section: _SectionName) -> None: + """Create a new section in the configuration. + +Raise DuplicateSectionError if a section by the specified name +already exists. Raise ValueError if name is DEFAULT. +""" + def has_section(self, section: _SectionName) -> bool: + """Indicate whether the named section is present in the configuration. + +The DEFAULT section is not acknowledged. +""" + def options(self, section: _SectionName) -> list[str]: + """Return a list of option names for the given section name. +""" + def has_option(self, section: _SectionName, option: str) -> bool: + """Check for the existence of a given option in a given section. +If the specified `section` is None or an empty string, DEFAULT is +assumed. If the specified `section` does not exist, returns False. +""" + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: + """Read and parse a filename or an iterable of filenames. + +Files that cannot be opened are silently ignored; this is +designed so that you can specify an iterable of potential +configuration file locations (e.g. current directory, user's +home directory, systemwide directory), and all existing +configuration files in the iterable will be read. A single +filename may also be given. + +Return list of successfully read files. +""" + def read_file(self, f: Iterable[str], source: str | None = None) -> None: + """Like read() but the argument must be a file-like object. + +The `f` argument must be iterable, returning one line at a time. +Optional second argument is the `source` specifying the name of the +file being read. If not given, it is taken from f.name. If `f` has no +`name` attribute, `` is used. +""" + def read_string(self, string: str, source: str = "") -> None: + """Read configuration from a given string. +""" + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: + """Read configuration from a dictionary. + +Keys are section names, values are dictionaries with keys and values +that should be present in the section. If the used dictionary type +preserves order, sections and their keys will be added in order. + +All types held in the dictionary are converted to strings during +reading, including section names, option names and keys. + +Optional second argument is the `source` specifying the name of the +dictionary being read. +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `parser.read_file()` instead.") - def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: + """Deprecated, use read_file instead. +""" # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload @@ -308,27 +527,81 @@ class RawConfigParser(_Parser): ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: ... + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: + """Get an option value for a given section. + +If `vars` is provided, it must be a dictionary. The option is looked up +in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. +If the key is not found and `fallback` is provided, it is used as +a fallback value. `None` can be provided as a `fallback` value. + +If interpolation is enabled and the optional argument `raw` is False, +all interpolations are expanded in the return values. + +Arguments `raw`, `vars`, and `fallback` are keyword only. + +The section DEFAULT is special. +""" @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T ) -> str | _T | MaybeNone: ... @overload - def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: + """Return a list of (name, value) tuples for each option in a section. + +All % interpolations are expanded in the return values, based on the +defaults passed into the constructor, unless the optional argument +`raw` is true. Additional substitutions may be provided using the +`vars` argument, which must be a dictionary whose contents overrides +any pre-existing defaults. + +The section DEFAULT is special. +""" @overload def items(self, section: _SectionName, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... - def set(self, section: _SectionName, option: str, value: str | None = None) -> None: ... - def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... - def remove_option(self, section: _SectionName, option: str) -> bool: ... - def remove_section(self, section: _SectionName) -> bool: ... + def set(self, section: _SectionName, option: str, value: str | None = None) -> None: + """Set an option. +""" + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: + """Write an .ini-format representation of the configuration state. + +If `space_around_delimiters` is True (the default), delimiters +between keys and values are surrounded by spaces. + +Please note that comments in the original configuration file are not +preserved when writing the configuration back. +""" + def remove_option(self, section: _SectionName, option: str) -> bool: + """Remove an option. +""" + def remove_section(self, section: _SectionName) -> bool: + """Remove a file section. +""" def optionxform(self, optionstr: str) -> str: ... @property def converters(self) -> ConverterMapping: ... class ConfigParser(RawConfigParser): + """ConfigParser implementing interpolation. +""" # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... + def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: + """Get an option value for a given section. + +If `vars` is provided, it must be a dictionary. The option is looked up +in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. +If the key is not found and `fallback` is provided, it is used as +a fallback value. `None` can be provided as a `fallback` value. + +If interpolation is enabled and the optional argument `raw` is False, +all interpolations are expanded in the return values. + +Arguments `raw`, `vars`, and `fallback` are keyword only. + +The section DEFAULT is special. +""" @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T @@ -336,10 +609,16 @@ class ConfigParser(RawConfigParser): if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `ConfigParser` instead.") - class SafeConfigParser(ConfigParser): ... + class SafeConfigParser(ConfigParser): + """ConfigParser alias for backwards compatibility purposes. +""" class SectionProxy(MutableMapping[str, str]): - def __init__(self, parser: RawConfigParser, name: str) -> None: ... + """A proxy for a single section from a parser. +""" + def __init__(self, parser: RawConfigParser, name: str) -> None: + """Creates a view on a section of the specified `name` in `parser`. +""" def __getitem__(self, key: str) -> str: ... def __setitem__(self, key: str, value: str) -> None: ... def __delitem__(self, key: str) -> None: ... @@ -361,7 +640,13 @@ class SectionProxy(MutableMapping[str, str]): vars: _Section | None = None, _impl: Any | None = None, **kwargs: Any, # passed to the underlying parser's get() method - ) -> str | None: ... + ) -> str | None: + """Get an option value. + +Unless `fallback` is provided, `None` will be returned if the option +is not found. + +""" @overload def get( self, @@ -391,6 +676,12 @@ class SectionProxy(MutableMapping[str, str]): def __getattr__(self, key: str) -> Callable[..., Any]: ... class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): + """Enables reuse of get*() methods between the parser and section proxies. + +If a parser class implements a getter directly, the value for the given +key will be ``None``. The presence of the converter name here enables +section proxies to find and use the implementation on the parser class. +""" GETTERCRE: ClassVar[Pattern[Any]] def __init__(self, parser: RawConfigParser) -> None: ... def __getitem__(self, key: str) -> _ConverterCallback: ... @@ -400,20 +691,35 @@ class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): def __len__(self) -> int: ... class Error(Exception): + """Base class for ConfigParser exceptions. +""" message: str def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): + """Raised when no section matches a requested option. +""" section: _SectionName def __init__(self, section: _SectionName) -> None: ... class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + +Possible repetitions that raise this exception are: multiple creation +using the API or in strict parsers when a section is found more than once +in a single input file, string or dictionary. +""" section: _SectionName source: str | None lineno: int | None def __init__(self, section: _SectionName, source: str | None = None, lineno: int | None = None) -> None: ... class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + +Current implementation raises this exception only when an option is found +more than once in a single file, string or dictionary. +""" section: _SectionName option: str source: str | None @@ -421,25 +727,40 @@ class DuplicateOptionError(Error): def __init__(self, section: _SectionName, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): + """A requested option was not found. +""" section: _SectionName option: str def __init__(self, option: str, section: _SectionName) -> None: ... class InterpolationError(Error): + """Base class for interpolation-related exceptions. +""" section: _SectionName option: str def __init__(self, option: str, section: _SectionName, msg: str) -> None: ... class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply. +""" def __init__(self, option: str, section: _SectionName, rawval: object) -> None: ... class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available. +""" reference: str def __init__(self, option: str, section: _SectionName, rawval: object, reference: str) -> None: ... -class InterpolationSyntaxError(InterpolationError): ... +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + +Current implementation raises this exception when the source text into +which substitutions are made does not conform to the required syntax. +""" class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax. +""" source: str errors: list[tuple[int, str]] if sys.version_info >= (3, 13): @@ -462,25 +783,38 @@ class ParsingError(Error): if sys.version_info < (3, 12): @property @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") - def filename(self) -> str: ... + def filename(self) -> str: + """Deprecated, use `source'. +""" @filename.setter @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") def filename(self, value: str) -> None: ... class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header. +""" lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 13): class MultilineContinuationError(ParsingError): + """Raised when a key without value is followed by continuation line +""" lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 14): class UnnamedSectionDisabledError(Error): + """Raised when an attempt to use UNNAMED_SECTION is made with the +feature disabled. +""" msg: Final = "Support for UNNAMED_SECTION is disabled." def __init__(self) -> None: ... - class InvalidWriteError(Error): ... + class InvalidWriteError(Error): + """Raised when attempting to write data that the parser would read back differently. +ex: writing a key which begins with the section header pattern would read back as a +new section +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi index 383a1b7f334b4..dca6225851d58 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi @@ -1,3 +1,5 @@ +"""Utilities for with-statement contexts. See PEP 343. +""" import abc import sys from _typeshed import FileDescriptorOrPath, Unused @@ -47,30 +49,55 @@ _CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any, Any] | _ExitFunc) # allowlist for use as a Protocol. @runtime_checkable class AbstractContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + """An abstract base class for context managers. +""" __slots__ = () - def __enter__(self) -> _T_co: ... + def __enter__(self) -> _T_co: + """Return `self` upon entering the runtime context. +""" @abstractmethod def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> _ExitT_co: ... + ) -> _ExitT_co: + """Raise any exception triggered within the runtime context. +""" # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @runtime_checkable class AbstractAsyncContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + """An abstract base class for asynchronous context managers. +""" __slots__ = () - async def __aenter__(self) -> _T_co: ... + async def __aenter__(self) -> _T_co: + """Return `self` upon entering the runtime context. +""" @abstractmethod async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> _ExitT_co: ... + ) -> _ExitT_co: + """Raise any exception triggered within the runtime context. +""" class ContextDecorator: - def _recreate_cm(self) -> Self: ... + """A base class or mixin that enables context managers to work as decorators. +""" + def _recreate_cm(self) -> Self: + """Return a recreated instance of self. + +Allows an otherwise one-shot context manager like +_GeneratorContextManager to support use as +a decorator via implicit recreation. + +This is a private interface just for _GeneratorContextManager. +See issue #11647 for details. +""" def __call__(self, func: _F) -> _F: ... class _GeneratorContextManagerBase(Generic[_G_co]): + """Shared functionality for @contextmanager and @asynccontextmanager. +""" # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... gen: _G_co @@ -83,17 +110,49 @@ class _GeneratorContextManager( AbstractContextManager[_T_co, bool | None], ContextDecorator, ): + """Helper for @contextmanager decorator. +""" def __exit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... -def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: + """@contextmanager decorator. + +Typical usage: + + @contextmanager + def some_generator(): + + try: + yield + finally: + + +This makes this: + + with some_generator() as : + + +equivalent to this: + + + try: + = + + finally: + +""" if sys.version_info >= (3, 10): _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) class AsyncContextDecorator: - def _recreate_cm(self) -> Self: ... + """A base class or mixin that enables async context managers to work as decorators. +""" + def _recreate_cm(self) -> Self: + """Return a recreated instance of self. + """ def __call__(self, func: _AF) -> _AF: ... class _AsyncGeneratorContextManager( @@ -101,6 +160,8 @@ if sys.version_info >= (3, 10): AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator, ): + """Helper for @asynccontextmanager decorator. +""" async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... @@ -109,11 +170,39 @@ else: class _AsyncGeneratorContextManager( _GeneratorContextManagerBase[AsyncGenerator[_T_co, _SendT_contra]], AbstractAsyncContextManager[_T_co, bool | None] ): + """Helper for @asynccontextmanager decorator. +""" async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... -def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... +def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: + """@asynccontextmanager decorator. + +Typical usage: + + @asynccontextmanager + async def some_async_generator(): + + try: + yield + finally: + + +This makes this: + + async with some_async_generator() as : + + +equivalent to this: + + + try: + = + + finally: + +""" @type_check_only class _SupportsClose(Protocol): def close(self) -> object: ... @@ -121,6 +210,22 @@ class _SupportsClose(Protocol): _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) class closing(AbstractContextManager[_SupportsCloseT, None]): + """Context to automatically close something at the end of a block. + +Code like this: + + with closing(.open()) as f: + + +is equivalent to this: + + f = .open() + try: + + finally: + f.close() + +""" def __init__(self, thing: _SupportsCloseT) -> None: ... def __exit__(self, *exc_info: Unused) -> None: ... @@ -132,10 +237,36 @@ if sys.version_info >= (3, 10): _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) class aclosing(AbstractAsyncContextManager[_SupportsAcloseT, None]): + """Async context manager for safely finalizing an asynchronously cleaned-up +resource such as an async generator, calling its ``aclose()`` method. + +Code like this: + + async with aclosing(.fetch()) as agen: + + +is equivalent to this: + + agen = .fetch() + try: + + finally: + await agen.aclose() + +""" def __init__(self, thing: _SupportsAcloseT) -> None: ... async def __aexit__(self, *exc_info: Unused) -> None: ... class suppress(AbstractContextManager[None, bool]): + """Context manager to suppress specified exceptions + +After the exception is suppressed, execution proceeds with the next +statement following the with statement. + + with suppress(FileNotFoundError): + os.remove(somefile) + # Execution still resumes here if the file was already removed +""" def __init__(self, *exceptions: type[BaseException]) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None @@ -147,19 +278,62 @@ class _RedirectStream(AbstractContextManager[_T_io, None]): self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> None: ... -class redirect_stdout(_RedirectStream[_T_io]): ... -class redirect_stderr(_RedirectStream[_T_io]): ... +class redirect_stdout(_RedirectStream[_T_io]): + """Context manager for temporarily redirecting stdout to another file. + +# How to send help() to stderr +with redirect_stdout(sys.stderr): + help(dir) + +# How to write help() to a file +with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) +""" +class redirect_stderr(_RedirectStream[_T_io]): + """Context manager for temporarily redirecting stderr to another file. +""" class _BaseExitStack(Generic[_ExitT_co]): - def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: ... - def push(self, exit: _CM_EF) -> _CM_EF: ... - def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... - def pop_all(self) -> Self: ... + """A base class for ExitStack and AsyncExitStack. +""" + def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: + """Enters the supplied context manager. + +If successful, also pushes its __exit__ method as a callback and +returns the result of the __enter__ method. +""" + def push(self, exit: _CM_EF) -> _CM_EF: + """Registers a callback with the standard __exit__ method signature. + +Can suppress exceptions the same way __exit__ method can. +Also accepts any object with an __exit__ method (registering a call +to the method instead of the object itself). +""" + def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: + """Registers an arbitrary callback and arguments. + +Cannot suppress exceptions. +""" + def pop_all(self) -> Self: + """Preserve the context stack by transferring it to a new instance. +""" # In reality this is a subclass of `AbstractContextManager`; # see #7961 for why we don't do that in the stub class ExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): - def close(self) -> None: ... + """Context manager for dynamic management of a stack of exit callbacks. + +For example: + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception. +""" + def close(self) -> None: + """Immediately unwind the context stack. +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -173,12 +347,41 @@ _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any, Any] | _Exit # In reality this is a subclass of `AbstractAsyncContextManager`; # see #7961 for why we don't do that in the stub class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): - async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: ... - def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... + """Async context manager for dynamic management of a stack of exit +callbacks. + +For example: + async with AsyncExitStack() as stack: + connections = [await stack.enter_async_context(get_connection()) + for i in range(5)] + # All opened connections will automatically be released at the + # end of the async with statement, even if attempts to open a + # connection later in the list raise an exception. +""" + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: + """Enters the supplied async context manager. + +If successful, also pushes its __aexit__ method as a callback and +returns the result of the __aenter__ method. +""" + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: + """Registers a coroutine function with the standard __aexit__ method +signature. + +Can suppress exceptions the same way __aexit__ method can. +Also accepts any object with an __aexit__ method (registering a call +to the method instead of the object itself). +""" def push_async_callback( self, callback: Callable[_P, Awaitable[_T]], /, *args: _P.args, **kwds: _P.kwargs - ) -> Callable[_P, Awaitable[_T]]: ... - async def aclose(self) -> None: ... + ) -> Callable[_P, Awaitable[_T]]: + """Registers an arbitrary coroutine function and arguments. + +Cannot suppress exceptions. +""" + async def aclose(self) -> None: + """Immediately unwind the context stack. +""" async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -186,6 +389,15 @@ class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T, None], AbstractAsyncContextManager[_T, None]): + """Context manager that does no additional processing. + +Used as a stand-in for a normal context manager, when a particular +block of code is only sometimes used with a normal context manager: + +cm = optional_cm if condition else nullcontext() +with cm: + # Perform operation, using optional_cm if condition is True +""" enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -198,6 +410,15 @@ if sys.version_info >= (3, 10): else: class nullcontext(AbstractContextManager[_T, None]): + """Context manager that does no additional processing. + + Used as a stand-in for a normal context manager, when a particular + block of code is only sometimes used with a normal context manager: + + cm = optional_cm if condition else nullcontext() + with cm: + # Perform operation, using optional_cm if condition is True + """ enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -210,6 +431,8 @@ if sys.version_info >= (3, 11): _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None, None], Generic[_T_fd_or_any_path]): + """Non thread-safe context manager to change the current working directory. +""" path: _T_fd_or_any_path def __init__(self, path: _T_fd_or_any_path) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi index 373899ea2635f..07f9b54735f97 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi @@ -1,3 +1,53 @@ +"""Generic (shallow and deep) copying operations. + +Interface summary: + + import copy + + x = copy.copy(y) # make a shallow copy of y + x = copy.deepcopy(y) # make a deep copy of y + x = copy.replace(y, a=1, b=2) # new object with fields replaced, as defined by `__replace__` + +For module specific errors, copy.Error is raised. + +The difference between shallow and deep copying is only relevant for +compound objects (objects that contain other objects, like lists or +class instances). + +- A shallow copy constructs a new compound object and then (to the + extent possible) inserts *the same objects* into it that the + original contains. + +- A deep copy constructs a new compound object and then, recursively, + inserts *copies* into it of the objects found in the original. + +Two problems often exist with deep copy operations that don't exist +with shallow copy operations: + + a) recursive objects (compound objects that, directly or indirectly, + contain a reference to themselves) may cause a recursive loop + + b) because deep copy copies *everything* it may copy too much, e.g. + administrative data structures that should be shared even between + copies + +Python's deep copy operation avoids these problems by: + + a) keeping a table of objects already copied during the current + copying pass + + b) letting user-defined classes override the copying operation or the + set of components copied + +This version does not copy types like module, class, function, method, +nor stack trace, stack frame, nor file, socket, window, nor any +similar types. + +Classes can use the same interfaces to control copying that they use +to control pickling: they can define methods called __getinitargs__(), +__getstate__() and __setstate__(). See the documentation for module +"pickle" for information on these methods. +""" import sys from typing import Any, Protocol, TypeVar, type_check_only @@ -15,13 +65,26 @@ class _SupportsReplace(Protocol[_RT_co]): PyStringMap: Any # Note: memo and _nil are internal kwargs. -def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ... -def copy(x: _T) -> _T: ... +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: + """Deep copy operation on arbitrary Python objects. + +See the module's __doc__ string for more info. +""" +def copy(x: _T) -> _T: + """Shallow copy operation on arbitrary Python objects. + +See the module's __doc__ string for more info. +""" if sys.version_info >= (3, 13): __all__ += ["replace"] # The types accepted by `**changes` match those of `obj.__replace__`. - def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: ... + def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: + """Return a new object replacing specified fields with new values. + +This is especially useful for immutable objects, like named tuples or +frozen dataclasses. +""" class Error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi index 8f7fd957fc526..888e90710ea73 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi @@ -1,3 +1,8 @@ +"""Helper to provide extensibility for pickle. + +This is only useful to add pickle support for extension types defined in +C, not for instances of user-defined classes. +""" from collections.abc import Callable, Hashable from typing import Any, SupportsInt, TypeVar from typing_extensions import TypeAlias @@ -13,8 +18,12 @@ def pickle( constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, ) -> None: ... def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... -def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... -def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... +def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: + """Register an extension code. +""" +def remove_extension(module: Hashable, name: Hashable, code: int) -> None: + """Unregister an extension code. For testing only. +""" def clear_extension_cache() -> None: ... _DispatchTableType: TypeAlias = dict[type, Callable[[Any], str | _Reduce[Any]]] # imported by multiprocessing.reduction diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi index f926321969897..0dde139555d77 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi @@ -1,3 +1,5 @@ +"""Wrapper to the POSIX crypt library call and associated functionality. +""" import sys from typing import Final, NamedTuple, type_check_only from typing_extensions import disjoint_base @@ -11,10 +13,16 @@ if sys.platform != "win32": total_size: int if sys.version_info >= (3, 12): - class _Method(_MethodBase): ... + class _Method(_MethodBase): + """Class representing a salt method per the Modular Crypt Format or the + legacy 2-character crypt method. +""" else: @disjoint_base - class _Method(_MethodBase): ... + class _Method(_MethodBase): + """Class representing a salt method per the Modular Crypt Format or the + legacy 2-character crypt method. +""" METHOD_CRYPT: Final[_Method] METHOD_MD5: Final[_Method] @@ -22,5 +30,19 @@ if sys.platform != "win32": METHOD_SHA512: Final[_Method] METHOD_BLOWFISH: Final[_Method] methods: list[_Method] - def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... - def crypt(word: str, salt: str | _Method | None = None) -> str: ... + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: + """Generate a salt for the specified method. + + If not specified, the strongest available method will be used. + + """ + def crypt(word: str, salt: str | _Method | None = None) -> str: + """Return a string representing the one-way hash of a password, with a salt + prepended. + + If ``salt`` is not specified or is ``None``, the strongest + available method will be selected and a salt generated. Otherwise, + ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as + returned by ``crypt.mksalt()``. + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi index 2c8e7109cdfc3..198837aa56161 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi @@ -1,3 +1,66 @@ +""" +CSV parsing and writing. + +This module provides classes that assist in the reading and writing +of Comma Separated Value (CSV) files, and implements the interface +described by PEP 305. Although many CSV files are simple to parse, +the format is not formally defined by a stable specification and +is subtle enough that parsing lines of a CSV file with something +like line.split(",") is bound to fail. The module supports three +basic APIs: reading, writing, and registration of dialects. + + +DIALECT REGISTRATION: + +Readers and writers support a dialect argument, which is a convenient +handle on a group of settings. When the dialect argument is a string, +it identifies one of the dialects previously registered with the module. +If it is a class or instance, the attributes of the argument are used as +the settings for the reader or writer: + + class excel: + delimiter = ',' + quotechar = '"' + escapechar = None + doublequote = True + skipinitialspace = False + lineterminator = '\\r\\n' + quoting = QUOTE_MINIMAL + +SETTINGS: + + * quotechar - specifies a one-character string to use as the + quoting character. It defaults to '"'. + * delimiter - specifies a one-character string to use as the + field separator. It defaults to ','. + * skipinitialspace - specifies how to interpret spaces which + immediately follow a delimiter. It defaults to False, which + means that spaces immediately following a delimiter is part + of the following field. + * lineterminator - specifies the character sequence which should + terminate rows. + * quoting - controls when quotes should be generated by the writer. + It can take on any of the following module constants: + + csv.QUOTE_MINIMAL means only when required, for example, when a + field contains either the quotechar or the delimiter + csv.QUOTE_ALL means that quotes are always placed around fields. + csv.QUOTE_NONNUMERIC means that quotes are always placed around + fields which do not parse as integers or floating-point + numbers. + csv.QUOTE_STRINGS means that quotes are always placed around + fields which are strings. Note that the Python value None + is not a string. + csv.QUOTE_NOTNULL means that quotes are only placed around fields + that are not the Python value None. + csv.QUOTE_NONE means that quotes are never placed around fields. + * escapechar - specifies a one-character string used to escape + the delimiter when quoting is set to QUOTE_NONE. + * doublequote - controls the handling of quotes inside fields. When + True, two consecutive quotes are interpreted as one during read, + and when writing, each quote character embedded in the data is + written as two quotes +""" import sys from _csv import ( QUOTE_ALL as QUOTE_ALL, @@ -59,6 +122,13 @@ if sys.version_info < (3, 13): _T = TypeVar("_T") class Dialect: + """Describe a CSV dialect. + +This must be subclassed (see csv.excel). Valid attributes are: +delimiter, quotechar, escapechar, doublequote, skipinitialspace, +lineterminator, quoting. + +""" delimiter: str quotechar: str | None escapechar: str | None @@ -69,9 +139,15 @@ class Dialect: strict: bool def __init__(self) -> None: ... -class excel(Dialect): ... -class excel_tab(excel): ... -class unix_dialect(Dialect): ... +class excel(Dialect): + """Describe the usual properties of Excel-generated CSV files. +""" +class excel_tab(excel): + """Describe the usual properties of Excel-generated TAB-delimited files. +""" +class unix_dialect(Dialect): + """Describe the usual properties of Unix-generated CSV files. +""" class DictReader(Generic[_T]): fieldnames: Sequence[_T] | None @@ -119,7 +195,11 @@ class DictReader(Generic[_T]): def __iter__(self) -> Self: ... def __next__(self) -> dict[_T | Any, str | Any]: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class DictWriter(Generic[_T]): fieldnames: Collection[_T] @@ -147,9 +227,20 @@ class DictWriter(Generic[_T]): def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class Sniffer: + """ +"Sniffs" the format of a CSV file (i.e. delimiter, quotechar) +Returns a Dialect object. +""" preferred: list[str] - def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: + """ +Returns a dialect (or None) corresponding to the sample +""" def has_header(self, sample: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi index 19bd261c67e06..ae616f3576ca2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi @@ -1,3 +1,5 @@ +"""create and manipulate C data types in Python +""" import sys from _ctypes import ( RTLD_GLOBAL as RTLD_GLOBAL, @@ -45,12 +47,23 @@ _DLLT = TypeVar("_DLLT", bound=CDLL) if sys.version_info >= (3, 14): @overload @deprecated("ctypes.POINTER with string") - def POINTER(cls: str) -> type[Any]: ... + def POINTER(cls: str) -> type[Any]: + """Create and return a new ctypes pointer type. + +Pointer types are cached and reused internally, +so calling this function repeatedly is cheap. +""" @overload def POINTER(cls: None) -> type[c_void_p]: ... @overload def POINTER(cls: type[_CT]) -> type[_Pointer[_CT]]: ... - def pointer(obj: _CT) -> _Pointer[_CT]: ... + def pointer(obj: _CT) -> _Pointer[_CT]: + """Create a new pointer instance, pointing to 'obj'. + +The returned object is of the type POINTER(type(obj)). Note that if you +just want to pass a pointer to an object to a foreign function call, you +should use byref(obj) which is much faster. +""" else: from _ctypes import POINTER as POINTER, pointer as pointer @@ -77,6 +90,19 @@ else: _NameTypes: TypeAlias = str | None class CDLL: + """An instance of this class represents a loaded dll/shared +library, exporting functions using the standard C calling +convention (named 'cdecl' on Windows). + +The exported functions can be accessed as attributes, or by +indexing with the function name. Examples: + +.qsort -> callable object +['qsort'] -> callable object + +Calling the functions releases the Python GIL during the call and +reacquires it afterwards. +""" _func_flags_: ClassVar[int] _func_restype_: ClassVar[type[_CDataType]] _name: str @@ -98,14 +124,22 @@ if sys.platform == "win32": class OleDLL(CDLL): ... class WinDLL(CDLL): ... -class PyDLL(CDLL): ... +class PyDLL(CDLL): + """This class represents the Python library itself. It allows +accessing Python API functions. The GIL is not released, and +Python exceptions are handled correctly. +""" class LibraryLoader(Generic[_DLLT]): def __init__(self, dlltype: type[_DLLT]) -> None: ... def __getattr__(self, name: str) -> _DLLT: ... def __getitem__(self, name: str) -> _DLLT: ... def LoadLibrary(self, name: str) -> _DLLT: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" cdll: LibraryLoader[CDLL] if sys.platform == "win32": @@ -133,7 +167,22 @@ def CFUNCTYPE( *argtypes: type[_CData | _CDataType], use_errno: bool = False, use_last_error: bool = False, -) -> type[_CFunctionType]: ... +) -> type[_CFunctionType]: + """CFUNCTYPE(restype, *argtypes, + use_errno=False, use_last_error=False) -> function prototype. + +restype: the result type +argtypes: a sequence specifying the argument types + +The function prototype can be called in different ways to create a +callable object: + +prototype(integer address) -> foreign function +prototype(callable) -> create and return a C callable function from callable +prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method +prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal +prototype((function name, dll object)[, paramflags]) -> foreign function exported by name +""" if sys.platform == "win32": def WINFUNCTYPE( @@ -157,11 +206,19 @@ _CVoidConstPLike: TypeAlias = _CVoidPLike | bytes _CastT = TypeVar("_CastT", bound=_CanCastTo) def cast(obj: _CData | _CDataType | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... -def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: + """create_string_buffer(aBytes) -> character array +create_string_buffer(anInteger) -> character array +create_string_buffer(aBytes, anInteger) -> character array +""" c_buffer = create_string_buffer -def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: + """create_unicode_buffer(aString) -> character array +create_unicode_buffer(anInteger) -> character array +create_unicode_buffer(aString, anInteger) -> character array +""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -200,20 +257,36 @@ class _MemsetFunctionType(_CFunctionType): memset: _MemsetFunctionType -def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: ... +def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: + """string_at(ptr[, size]) -> string + +Return the byte string at void *ptr. +""" if sys.platform == "win32": def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... -def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: ... +def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: + """wstring_at(ptr[, size]) -> string + +Return the wide-character string at void *ptr. +""" if sys.version_info >= (3, 14): - def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: ... + def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: + """memoryview_at(ptr, size[, readonly]) -> memoryview + +Return a memoryview representing the memory at void *ptr. +""" class py_object(_CanCastTo, _SimpleCData[_T]): _type_: ClassVar[Literal["O"]] if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class c_bool(_SimpleCData[bool]): _type_: ClassVar[Literal["?"]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi index 97852f67aa6eb..6d9542f1397fd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi @@ -4,13 +4,21 @@ from ctypes import Structure, Union # At runtime, the native endianness is an alias for Structure, # while the other is a subclass with a metaclass added in. class BigEndianStructure(Structure): + """Structure with big endian byte order +""" __slots__ = () -class LittleEndianStructure(Structure): ... +class LittleEndianStructure(Structure): + """Structure base class +""" # Same thing for these: one is an alias of Union at runtime if sys.version_info >= (3, 11): class BigEndianUnion(Union): + """Union with big endian byte order +""" __slots__ = () - class LittleEndianUnion(Union): ... + class LittleEndianUnion(Union): + """Union base class +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi index c5dd954660638..7f7d32926292f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi @@ -1,3 +1,10 @@ +""" +Enough Mach-O to make your head spin. + +See the relevant header files in /usr/include/mach-o + +And also Apple's documentation. +""" from typing import Final __version__: Final[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi index c7e94daa21497..6de817768c9af 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi @@ -1,8 +1,22 @@ +""" +dyld emulation +""" from collections.abc import Mapping from ctypes.macholib.dylib import dylib_info as dylib_info from ctypes.macholib.framework import framework_info as framework_info __all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"] -def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... -def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: ... +def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: + """ +Find a library or framework using dyld semantics +""" +def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: + """ +Find a framework using dyld semantics in a very loose manner. + +Will take input such as: + Python + Python.framework + Python.framework/Versions/Current +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi index 95945edfd155c..04df3c0802443 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi @@ -1,3 +1,6 @@ +""" +Generic dylib path manipulation +""" from typing import TypedDict, type_check_only __all__ = ["dylib_info"] @@ -11,4 +14,23 @@ class _DylibInfo(TypedDict): version: str | None suffix: str | None -def dylib_info(filename: str) -> _DylibInfo | None: ... +def dylib_info(filename: str) -> _DylibInfo | None: + """ +A dylib name can take one of the following four forms: + Location/Name.SomeVersion_Suffix.dylib + Location/Name.SomeVersion.dylib + Location/Name_Suffix.dylib + Location/Name.dylib + +returns None if not found or a mapping equivalent to: + dict( + location='Location', + name='Name.SomeVersion_Suffix.dylib', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) + +Note that SomeVersion and Suffix are optional and may be None +if not present. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi index e92bf3700e840..27840d212f2fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi @@ -1,3 +1,6 @@ +""" +Generic framework path manipulation +""" from typing import TypedDict, type_check_only __all__ = ["framework_info"] @@ -11,4 +14,23 @@ class _FrameworkInfo(TypedDict): version: str | None suffix: str | None -def framework_info(filename: str) -> _FrameworkInfo | None: ... +def framework_info(filename: str) -> _FrameworkInfo | None: + """ +A framework name can take one of the following four forms: + Location/Name.framework/Versions/SomeVersion/Name_Suffix + Location/Name.framework/Versions/SomeVersion/Name + Location/Name.framework/Name_Suffix + Location/Name.framework/Name + +returns None if not found, or a mapping equivalent to: + dict( + location='Location', + name='Name.framework/Versions/SomeVersion/Name_Suffix', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) + +Note that SomeVersion and Suffix are optional and may be None +if not present +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi index 4f18c1d8db345..fb80e2836bb75 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi @@ -6,6 +6,8 @@ if sys.platform == "win32": def find_msvcrt() -> str | None: ... if sys.version_info >= (3, 14): - def dllist() -> list[str]: ... + def dllist() -> list[str]: + """Return a list of loaded shared libraries in the current process. +""" def test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi index 3e32487ad99f2..cf0b1e31a0756 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi @@ -1,3 +1,14 @@ +"""curses + +The main package for curses support for Python. Normally used by importing +the package, and perhaps a particular module inside it. + + import curses + from curses import textpad + curses.initscr() + ... + +""" import sys from _curses import * from _curses import window as window @@ -22,7 +33,13 @@ COLS: int COLORS: Final[int] COLOR_PAIRS: Final[int] -def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: ... +def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: + """Wrapper function that initializes curses and calls another function, +restoring normal keyboard/screen behavior on error. +The callable object 'func' is then passed the main window 'stdscr' +as its first argument, followed by any other arguments passed to +wrapper(). +""" # At runtime this class is unexposed and calls itself curses.ncurses_version. # That name would conflict with the actual curses.ncurses_version, which is diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi index 0234434b8c3de..04a9b0d89b0ab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi @@ -1,3 +1,5 @@ +"""Constants and membership tests for ASCII characters +""" from typing import Final, TypeVar _CharT = TypeVar("_CharT", str, int) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi index 861559d38bc5a..01e73ed12f3c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi @@ -1 +1,5 @@ +"""curses.panel + +Module for using panels with curses. +""" from _curses_panel import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi index 48ef67c9d85f0..238044287398d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi @@ -1,11 +1,45 @@ +"""Simple textbox editing widget with Emacs-like keybindings. +""" from _curses import window from collections.abc import Callable -def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: ... +def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: + """Draw a rectangle with corners at the provided upper-left +and lower-right coordinates. +""" class Textbox: + """Editing widget using the interior of a window object. + Supports the following Emacs-like key bindings: + +Ctrl-A Go to left edge of window. +Ctrl-B Cursor left, wrapping to previous line if appropriate. +Ctrl-D Delete character under cursor. +Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on). +Ctrl-F Cursor right, wrapping to next line when appropriate. +Ctrl-G Terminate, returning the window contents. +Ctrl-H Delete character backward. +Ctrl-J Terminate if the window is 1 line, otherwise insert newline. +Ctrl-K If line is blank, delete it, otherwise clear to end of line. +Ctrl-L Refresh screen. +Ctrl-N Cursor down; move down one line. +Ctrl-O Insert a blank line at cursor location. +Ctrl-P Cursor up; move up one line. + +Move operations do nothing if the cursor is at an edge where the movement +is not possible. The following synonyms are supported where possible: + +KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N +KEY_BACKSPACE = Ctrl-h +""" stripspaces: bool def __init__(self, win: window, insert_mode: bool = False) -> None: ... - def edit(self, validate: Callable[[int], int] | None = None) -> str: ... - def do_command(self, ch: str | int) -> None: ... - def gather(self) -> str: ... + def edit(self, validate: Callable[[int], int] | None = None) -> str: + """Edit in the widget window and collect the results. +""" + def do_command(self, ch: str | int) -> None: + """Process a single editing command. +""" + def gather(self) -> str: + """Collect and return the contents of the window. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi index 3a1c8cb5d62dd..d090d300d9c9b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi @@ -64,11 +64,46 @@ if sys.version_info >= (3, 10): class KW_ONLY: ... @overload -def asdict(obj: DataclassInstance) -> dict[str, Any]: ... +def asdict(obj: DataclassInstance) -> dict[str, Any]: + """Return the fields of a dataclass instance as a new dictionary mapping +field names to field values. + +Example usage:: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert asdict(c) == {'x': 1, 'y': 2} + +If given, 'dict_factory' will be used instead of built-in dict. +The function applies recursively to field values that are +dataclass instances. This will also look into built-in containers: +tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. +""" @overload def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: + """Return the fields of a dataclass instance as a new tuple of field values. + +Example usage:: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert astuple(c) == (1, 2) + +If given, 'tuple_factory' will be used instead of built-in tuple. +The function applies recursively to field values that are +dataclass instances. This will also look into built-in containers: +tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. +""" @overload def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... @@ -88,7 +123,20 @@ if sys.version_info >= (3, 11): kw_only: bool = False, slots: bool = False, weakref_slot: bool = False, - ) -> type[_T]: ... + ) -> type[_T]: + """Add dunder methods based on the fields defined in the class. + +Examines PEP 526 __annotations__ to determine fields. + +If init is true, an __init__() method is added to the class. If repr +is true, a __repr__() method is added. If order is true, rich +comparison dunder methods are added. If unsafe_hash is true, a +__hash__() method is added. If frozen is true, fields may not be +assigned to after instance creation. If match_args is true, the +__match_args__ tuple is added. If kw_only is true, then by default +all fields are keyword-only. If slots is true, a new class with a +__slots__ attribute is returned. +""" @overload def dataclass( cls: None = None, @@ -121,7 +169,21 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> type[_T]: ... + ) -> type[_T]: + """Returns the same class as was passed in, with dunder methods + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. If match_args is true, + the __match_args__ tuple is added. If kw_only is true, then by + default all fields are keyword-only. If slots is true, an + __slots__ attribute is added. + """ @overload def dataclass( cls: None = None, @@ -150,7 +212,18 @@ else: order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - ) -> type[_T]: ... + ) -> type[_T]: + """Returns the same class as was passed in, with dunder methods + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. + """ @overload def dataclass( cls: None = None, @@ -255,7 +328,11 @@ class Field(Generic[_T]): ) -> None: ... def __set_name__(self, owner: Type[Any], name: str) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @@ -272,7 +349,22 @@ if sys.version_info >= (3, 14): metadata: Mapping[Any, Any] | None = None, kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., doc: str | None = None, - ) -> _T: ... + ) -> _T: + """Return an object to identify dataclass fields. + +default is the default value of the field. default_factory is a +0-argument function called to initialize a field's value. If init +is true, the field will be a parameter to the class's __init__() +function. If repr is true, the field will be included in the +object's repr(). If hash is true, the field will be included in the +object's hash(). If compare is true, the field will be used in +comparison functions. metadata, if specified, must be a mapping +which is stored but not otherwise examined by dataclass. If kw_only +is true, the field will become a keyword-only parameter to +__init__(). doc is an optional docstring for this field. + +It is an error to specify both default and default_factory. +""" @overload def field( *, @@ -312,7 +404,22 @@ elif sys.version_info >= (3, 10): compare: bool = True, metadata: Mapping[Any, Any] | None = None, kw_only: bool | Literal[_MISSING_TYPE.MISSING] = ..., - ) -> _T: ... + ) -> _T: + """Return an object to identify dataclass fields. + +default is the default value of the field. default_factory is a +0-argument function called to initialize a field's value. If init +is true, the field will be a parameter to the class's __init__() +function. If repr is true, the field will be included in the +object's repr(). If hash is true, the field will be included in the +object's hash(). If compare is true, the field will be used in +comparison functions. metadata, if specified, must be a mapping +which is stored but not otherwise examined by dataclass. If kw_only +is true, the field will become a keyword-only parameter to +__init__(). + +It is an error to specify both default and default_factory. +""" @overload def field( *, @@ -349,7 +456,20 @@ else: hash: bool | None = None, compare: bool = True, metadata: Mapping[Any, Any] | None = None, - ) -> _T: ... + ) -> _T: + """Return an object to identify dataclass fields. + + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is True, the field will be a parameter to the class's __init__() + function. If repr is True, the field will be included in the + object's repr(). If hash is True, the field will be included in + the object's hash(). If compare is True, the field will be used + in comparison functions. metadata, if specified, must be a + mapping which is stored but not otherwise examined by dataclass. + + It is an error to specify both default and default_factory. + """ @overload def field( *, @@ -373,11 +493,19 @@ else: metadata: Mapping[Any, Any] | None = None, ) -> Any: ... -def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: + """Return a tuple describing the fields of this dataclass. + +Accepts a dataclass or an instance of one. Tuple elements are of +type Field. +""" # HACK: `obj: Never` typing matches if object argument is using `Any` type. @overload -def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] +def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] + """Returns True if obj is a dataclass or an instance of a +dataclass. +""" @overload def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ... @overload @@ -413,7 +541,32 @@ if sys.version_info >= (3, 14): weakref_slot: bool = False, module: str | None = None, decorator: _DataclassFactory = ..., - ) -> type: ... + ) -> type: + """Return a new dynamically created dataclass. + +The dataclass name will be 'cls_name'. 'fields' is an iterable +of either (name), (name, type) or (name, type, Field) objects. If type is +omitted, use the string 'typing.Any'. Field objects are created by +the equivalent of calling 'field(name, type [, Field-info])'.:: + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + +is equivalent to:: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + +For the bases and namespace parameters, see the builtin type() function. + +The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, +slots, and weakref_slot are passed to dataclass(). + +If module parameter is defined, the '__module__' attribute of the dataclass is +set to that value. +""" elif sys.version_info >= (3, 12): def make_dataclass( @@ -433,7 +586,32 @@ elif sys.version_info >= (3, 12): slots: bool = False, weakref_slot: bool = False, module: str | None = None, - ) -> type: ... + ) -> type: + """Return a new dynamically created dataclass. + +The dataclass name will be 'cls_name'. 'fields' is an iterable +of either (name), (name, type) or (name, type, Field) objects. If type is +omitted, use the string 'typing.Any'. Field objects are created by +the equivalent of calling 'field(name, type [, Field-info])'.:: + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + +is equivalent to:: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + +For the bases and namespace parameters, see the builtin type() function. + +The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, +slots, and weakref_slot are passed to dataclass(). + +If module parameter is defined, the '__module__' attribute of the dataclass is +set to that value. +""" elif sys.version_info >= (3, 11): def make_dataclass( @@ -452,7 +630,29 @@ elif sys.version_info >= (3, 11): kw_only: bool = False, slots: bool = False, weakref_slot: bool = False, - ) -> type: ... + ) -> type: + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'.:: + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to:: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ elif sys.version_info >= (3, 10): def make_dataclass( @@ -470,7 +670,29 @@ elif sys.version_info >= (3, 10): match_args: bool = True, kw_only: bool = False, slots: bool = False, - ) -> type: ... + ) -> type: + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ else: def make_dataclass( @@ -485,6 +707,41 @@ else: order: bool = False, unsafe_hash: bool = False, frozen: bool = False, - ) -> type: ... + ) -> type: + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ + +def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: + """Return a new object replacing specified fields with new values. + +This is especially useful for frozen classes. Example usage:: + + @dataclass(frozen=True) + class C: + x: int + y: int -def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: ... + c = C(1, 2) + c1 = replace(c, x=3) + assert c1.x == 3 and c1.y == 2 +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi index 8a0536c006d57..c3a35b19f358c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi @@ -1,3 +1,8 @@ +"""Specific date/time and related types. + +See https://data.iana.org/time-zones/tz-link.html for +time zone and DST data sources. +""" import sys from abc import abstractmethod from time import struct_time @@ -13,26 +18,44 @@ MINYEAR: Final = 1 MAXYEAR: Final = 9999 class tzinfo: + """Abstract base class for time zone info objects. +""" @abstractmethod - def tzname(self, dt: datetime | None, /) -> str | None: ... + def tzname(self, dt: datetime | None, /) -> str | None: + """datetime -> string name of time zone. +""" @abstractmethod - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: + """datetime -> timedelta showing offset from UTC, negative values indicating West of UTC +""" @abstractmethod - def dst(self, dt: datetime | None, /) -> timedelta | None: ... - def fromutc(self, dt: datetime, /) -> datetime: ... + def dst(self, dt: datetime | None, /) -> timedelta | None: + """datetime -> DST offset as timedelta positive east of UTC. +""" + def fromutc(self, dt: datetime, /) -> datetime: + """datetime in UTC -> datetime in local time. +""" # Alias required to avoid name conflicts with date(time).tzinfo. _TzInfo: TypeAlias = tzinfo @final class timezone(tzinfo): + """Fixed offset from UTC implementation of tzinfo. +""" utc: ClassVar[timezone] min: ClassVar[timezone] max: ClassVar[timezone] def __new__(cls, offset: timedelta, name: str = ...) -> Self: ... - def tzname(self, dt: datetime | None, /) -> str: ... - def utcoffset(self, dt: datetime | None, /) -> timedelta: ... - def dst(self, dt: datetime | None, /) -> None: ... + def tzname(self, dt: datetime | None, /) -> str: + """If name is specified when timezone is created, returns the name. Otherwise returns offset as 'UTC(+|-)HH:MM'. +""" + def utcoffset(self, dt: datetime | None, /) -> timedelta: + """Return fixed offset. +""" + def dst(self, dt: datetime | None, /) -> None: + """Return None. +""" def __hash__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... @@ -53,68 +76,124 @@ class _IsoCalendarDate(tuple[int, int, int]): @disjoint_base class date: + """date(year, month, day) --> date object +""" min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] def __new__(cls, year: SupportsIndex, month: SupportsIndex, day: SupportsIndex) -> Self: ... @classmethod - def fromtimestamp(cls, timestamp: float, /) -> Self: ... + def fromtimestamp(cls, timestamp: float, /) -> Self: + """Create a date from a POSIX timestamp. + +The timestamp is a number, e.g. created via time.time(), that is interpreted +as local time. +""" @classmethod - def today(cls) -> Self: ... + def today(cls) -> Self: + """Current date or datetime: same as self.__class__.fromtimestamp(time.time()). +""" @classmethod - def fromordinal(cls, n: int, /) -> Self: ... + def fromordinal(cls, n: int, /) -> Self: + """int -> date corresponding to a proleptic Gregorian ordinal. +""" @classmethod - def fromisoformat(cls, date_string: str, /) -> Self: ... + def fromisoformat(cls, date_string: str, /) -> Self: + """str -> Construct a date from a string in ISO 8601 format. +""" @classmethod - def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: + """int, int, int -> Construct a date from the ISO year, week number and weekday. + +This is the inverse of the date.isocalendar() function +""" @property def year(self) -> int: ... @property def month(self) -> int: ... @property def day(self) -> int: ... - def ctime(self) -> str: ... + def ctime(self) -> str: + """Return ctime() style string. +""" if sys.version_info >= (3, 14): @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: ... + def strptime(cls, date_string: str, format: str, /) -> Self: + """string, format -> new date parsed from a string (like time.strptime()). +""" # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): - def strftime(self, format: str) -> str: ... + def strftime(self, format: str) -> str: + """format -> strftime() style string. +""" else: - def strftime(self, format: str, /) -> str: ... + def strftime(self, format: str, /) -> str: + """format -> strftime() style string. +""" - def __format__(self, fmt: str, /) -> str: ... - def isoformat(self) -> str: ... - def timetuple(self) -> struct_time: ... - def toordinal(self) -> int: ... + def __format__(self, fmt: str, /) -> str: + """Formats self with strftime. +""" + def isoformat(self) -> str: + """Return string in ISO 8601 format, YYYY-MM-DD. +""" + def timetuple(self) -> struct_time: + """Return time tuple, compatible with time.localtime(). +""" + def toordinal(self) -> int: + """Return proleptic Gregorian ordinal. January 1 of year 1 is day 1. +""" if sys.version_info >= (3, 13): - def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: + """The same as replace(). +""" - def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... + def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: + """Return date with new specified fields. +""" def __le__(self, value: date, /) -> bool: ... def __lt__(self, value: date, /) -> bool: ... def __ge__(self, value: date, /) -> bool: ... def __gt__(self, value: date, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __add__(self, value: timedelta, /) -> Self: ... - def __radd__(self, value: timedelta, /) -> Self: ... + def __add__(self, value: timedelta, /) -> Self: + """Return self+value. +""" + def __radd__(self, value: timedelta, /) -> Self: + """Return value+self. +""" @overload - def __sub__(self, value: datetime, /) -> NoReturn: ... + def __sub__(self, value: datetime, /) -> NoReturn: + """Return self-value. +""" @overload def __sub__(self, value: Self, /) -> timedelta: ... @overload def __sub__(self, value: timedelta, /) -> Self: ... def __hash__(self) -> int: ... - def weekday(self) -> int: ... - def isoweekday(self) -> int: ... - def isocalendar(self) -> _IsoCalendarDate: ... + def weekday(self) -> int: + """Return the day of the week represented by the date. +Monday == 0 ... Sunday == 6 +""" + def isoweekday(self) -> int: + """Return the day of the week represented by the date. +Monday == 1 ... Sunday == 7 +""" + def isocalendar(self) -> _IsoCalendarDate: + """Return a named tuple containing ISO year, week number, and weekday. +""" @disjoint_base class time: + """time([hour[, minute[, second[, microsecond[, tzinfo]]]]]) --> a time object + +All arguments are optional. tzinfo may be None, or an instance of +a tzinfo subclass. The remaining arguments may be ints. +""" min: ClassVar[time] max: ClassVar[time] resolution: ClassVar[timedelta] @@ -146,26 +225,48 @@ class time: def __gt__(self, value: time, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def isoformat(self, timespec: str = "auto") -> str: ... + def isoformat(self, timespec: str = "auto") -> str: + """Return string in ISO 8601 format, [HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. + +The optional argument timespec specifies the number of additional terms +of the time to include. Valid options are 'auto', 'hours', 'minutes', +'seconds', 'milliseconds' and 'microseconds'. +""" @classmethod - def fromisoformat(cls, time_string: str, /) -> Self: ... + def fromisoformat(cls, time_string: str, /) -> Self: + """string -> time from a string in ISO 8601 format +""" if sys.version_info >= (3, 14): @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: ... + def strptime(cls, date_string: str, format: str, /) -> Self: + """string, format -> new time parsed from a string (like time.strptime()). +""" # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): - def strftime(self, format: str) -> str: ... + def strftime(self, format: str) -> str: + """format -> strftime() style string. +""" else: - def strftime(self, format: str, /) -> str: ... + def strftime(self, format: str, /) -> str: + """format -> strftime() style string. +""" - def __format__(self, fmt: str, /) -> str: ... - def utcoffset(self) -> timedelta | None: ... - def tzname(self) -> str | None: ... - def dst(self) -> timedelta | None: ... + def __format__(self, fmt: str, /) -> str: + """Formats self with strftime. +""" + def utcoffset(self) -> timedelta | None: + """Return self.tzinfo.utcoffset(self). +""" + def tzname(self) -> str | None: + """Return self.tzinfo.tzname(self). +""" + def dst(self) -> timedelta | None: + """Return self.tzinfo.dst(self). +""" if sys.version_info >= (3, 13): def __replace__( self, @@ -177,7 +278,9 @@ class time: microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: ... + ) -> Self: + """The same as replace(). +""" def replace( self, @@ -188,13 +291,22 @@ class time: tzinfo: _TzInfo | None = ..., *, fold: int = ..., - ) -> Self: ... + ) -> Self: + """Return time with new specified fields. +""" _Date: TypeAlias = date _Time: TypeAlias = time @disjoint_base class timedelta: + """Difference between two datetime values. + +timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0) + +All arguments are optional and default to 0. +Arguments may be integers or floats, and may be positive or negative. +""" min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] @@ -209,41 +321,82 @@ class timedelta: weeks: float = 0, ) -> Self: ... @property - def days(self) -> int: ... + def days(self) -> int: + """Number of days. +""" @property - def seconds(self) -> int: ... + def seconds(self) -> int: + """Number of seconds (>= 0 and less than 1 day). +""" @property - def microseconds(self) -> int: ... - def total_seconds(self) -> float: ... - def __add__(self, value: timedelta, /) -> timedelta: ... - def __radd__(self, value: timedelta, /) -> timedelta: ... - def __sub__(self, value: timedelta, /) -> timedelta: ... - def __rsub__(self, value: timedelta, /) -> timedelta: ... - def __neg__(self) -> timedelta: ... - def __pos__(self) -> timedelta: ... - def __abs__(self) -> timedelta: ... - def __mul__(self, value: float, /) -> timedelta: ... - def __rmul__(self, value: float, /) -> timedelta: ... + def microseconds(self) -> int: + """Number of microseconds (>= 0 and less than 1 second). +""" + def total_seconds(self) -> float: + """Total seconds in the duration. +""" + def __add__(self, value: timedelta, /) -> timedelta: + """Return self+value. +""" + def __radd__(self, value: timedelta, /) -> timedelta: + """Return value+self. +""" + def __sub__(self, value: timedelta, /) -> timedelta: + """Return self-value. +""" + def __rsub__(self, value: timedelta, /) -> timedelta: + """Return value-self. +""" + def __neg__(self) -> timedelta: + """-self +""" + def __pos__(self) -> timedelta: + """+self +""" + def __abs__(self) -> timedelta: + """abs(self) +""" + def __mul__(self, value: float, /) -> timedelta: + """Return self*value. +""" + def __rmul__(self, value: float, /) -> timedelta: + """Return value*self. +""" @overload - def __floordiv__(self, value: timedelta, /) -> int: ... + def __floordiv__(self, value: timedelta, /) -> int: + """Return self//value. +""" @overload def __floordiv__(self, value: int, /) -> timedelta: ... @overload - def __truediv__(self, value: timedelta, /) -> float: ... + def __truediv__(self, value: timedelta, /) -> float: + """Return self/value. +""" @overload def __truediv__(self, value: float, /) -> timedelta: ... - def __mod__(self, value: timedelta, /) -> timedelta: ... - def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: ... + def __mod__(self, value: timedelta, /) -> timedelta: + """Return self%value. +""" + def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: + """Return divmod(self, value). +""" def __le__(self, value: timedelta, /) -> bool: ... def __lt__(self, value: timedelta, /) -> bool: ... def __ge__(self, value: timedelta, /) -> bool: ... def __gt__(self, value: timedelta, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... - def __bool__(self) -> bool: ... + def __bool__(self) -> bool: + """True if self else False +""" def __hash__(self) -> int: ... @disjoint_base class datetime(date): + """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) + +The year, month and day arguments are required. tzinfo may be None, or an +instance of a tzinfo subclass. The remaining arguments may be ints. +""" min: ClassVar[datetime] max: ClassVar[datetime] def __new__( @@ -276,26 +429,53 @@ class datetime(date): # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): @classmethod - def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: ... + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: + """timestamp[, tz] -> tz's local time from POSIX timestamp. +""" else: @classmethod - def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: ... + def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: + """timestamp[, tz] -> tz's local time from POSIX timestamp. +""" @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)") - def utcfromtimestamp(cls, t: float, /) -> Self: ... + def utcfromtimestamp(cls, t: float, /) -> Self: + """Construct a naive UTC datetime from a POSIX timestamp. +""" @classmethod - def now(cls, tz: _TzInfo | None = None) -> Self: ... + def now(cls, tz: _TzInfo | None = None) -> Self: + """Returns new datetime object representing current time local to tz. + + tz + Timezone object. + +If no tz is specified, uses local timezone. +""" @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)") - def utcnow(cls) -> Self: ... + def utcnow(cls) -> Self: + """Return a new datetime representing UTC day and time. +""" @classmethod - def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... - def timestamp(self) -> float: ... - def utctimetuple(self) -> struct_time: ... - def date(self) -> _Date: ... - def time(self) -> _Time: ... - def timetz(self) -> _Time: ... + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: + """date, time -> datetime with same date and time fields +""" + def timestamp(self) -> float: + """Return POSIX timestamp as float. +""" + def utctimetuple(self) -> struct_time: + """Return UTC time tuple, compatible with time.localtime(). +""" + def date(self) -> _Date: + """Return date object with same year, month and day. +""" + def time(self) -> _Time: + """Return time object with same time but with tzinfo=None. +""" + def timetz(self) -> _Time: + """Return time object with same time and tzinfo. +""" if sys.version_info >= (3, 13): def __replace__( self, @@ -310,7 +490,9 @@ class datetime(date): microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., fold: int = ..., - ) -> Self: ... + ) -> Self: + """The same as replace(). +""" def replace( self, @@ -324,14 +506,32 @@ class datetime(date): tzinfo: _TzInfo | None = ..., *, fold: int = ..., - ) -> Self: ... - def astimezone(self, tz: _TzInfo | None = None) -> Self: ... - def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: ... + ) -> Self: + """Return datetime with new specified fields. +""" + def astimezone(self, tz: _TzInfo | None = None) -> Self: + """tz -> convert to local time in new timezone tz +""" + def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: + """[sep] -> string in ISO 8601 format, YYYY-MM-DDT[HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. +sep is used to separate the year from the time, and defaults to 'T'. +The optional argument timespec specifies the number of additional terms +of the time to include. Valid options are 'auto', 'hours', 'minutes', +'seconds', 'milliseconds' and 'microseconds'. +""" @classmethod - def strptime(cls, date_string: str, format: str, /) -> Self: ... - def utcoffset(self) -> timedelta | None: ... - def tzname(self) -> str | None: ... - def dst(self) -> timedelta | None: ... + def strptime(cls, date_string: str, format: str, /) -> Self: + """string, format -> new datetime parsed from a string (like time.strptime()). +""" + def utcoffset(self) -> timedelta | None: + """Return self.tzinfo.utcoffset(self). +""" + def tzname(self) -> str | None: + """Return self.tzinfo.tzname(self). +""" + def dst(self) -> timedelta | None: + """Return self.tzinfo.dst(self). +""" def __le__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __lt__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __ge__(self, value: datetime, /) -> bool: ... # type: ignore[override] @@ -339,7 +539,9 @@ class datetime(date): def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload # type: ignore[override] - def __sub__(self, value: Self, /) -> timedelta: ... + def __sub__(self, value: Self, /) -> timedelta: + """Return self-value. +""" @overload def __sub__(self, value: timedelta, /) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi index 7cbb63cf2f06e..4d37f4298a730 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi @@ -1,3 +1,31 @@ +"""Generic interface to all dbm clones. + +Use + + import dbm + d = dbm.open(file, 'w', 0o666) + +The returned object is a dbm.sqlite3, dbm.gnu, dbm.ndbm or dbm.dumb database object, dependent on the +type of database being opened (determined by the whichdb function) in the case +of an existing dbm. If the dbm does not exist and the create or new flag ('c' +or 'n') was specified, the dbm type will be determined by the availability of +the modules (tested in the above order). + +It has the following interface (key and data are strings): + + d[key] = data # store data at key (may override data at + # existing key) + data = d[key] # retrieve data at key (raise KeyError if no + # such key) + del d[key] # delete data stored at key (raises KeyError + # if no such key) + flag = key in d # true if the key exists + list = d.keys() # return a list of all existing keys (slow!) + +Future versions may change the order in which implementations are +tested for existence, and add interfaces to other dbm-like +implementations. +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -97,9 +125,51 @@ class _error(Exception): ... error: tuple[type[_error], type[OSError]] if sys.version_info >= (3, 11): - def whichdb(filename: StrOrBytesPath) -> str | None: ... - def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... + def whichdb(filename: StrOrBytesPath) -> str | None: + """Guess which db package to use to open a db file. + +Return values: + +- None if the database file can't be read; +- empty string if the file can be read but can't be recognized +- the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. + +Importing the given module may still fail, and opening the +database using that module may still fail. +""" + def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: + """Open or create database at path given by *file*. + +Optional argument *flag* can be 'r' (default) for read-only access, 'w' +for read-write access of an existing database, 'c' for read-write access +to a new or existing database, and 'n' for read-write access to a new +database. + +Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it +only if it doesn't exist; and 'n' always creates a new database. +""" else: - def whichdb(filename: str) -> str | None: ... - def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... + def whichdb(filename: str) -> str | None: + """Guess which db package to use to open a db file. + + Return values: + + - None if the database file can't be read; + - empty string if the file can be read but can't be recognized + - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. + + Importing the given module may still fail, and opening the + database using that module may still fail. + """ + def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: + """Open or create database at path given by *file*. + + Optional argument *flag* can be 'r' (default) for read-only access, 'w' + for read-write access of an existing database, 'c' for read-write access + to a new or existing database, and 'n' for read-write access to a new + database. + + Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it + only if it doesn't exist; and 'n' always creates a new database. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi index 1c0b7756f2925..8766a13c4bce3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi @@ -1,3 +1,25 @@ +"""A dumb and slow but simple dbm clone. + +For database spam, spam.dir contains the index (a text file), +spam.bak *may* contain a backup of the index (also a text file), +while spam.dat contains the data (a binary file). + +XXX TO DO: + +- seems to contain a bug when updating... + +- reclaim free space (currently, space once occupied by deleted or expanded +items is never reused) + +- support concurrent access (currently, if two processes take turns making +updates, they can mess up the index) + +- support efficient access to large databases (currently, the whole index +is read when the database is opened, and some updates rewrite the whole index) + +- support opening for read-only (flag = 'm') + +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -31,7 +53,33 @@ class _Database(MutableMapping[_KeyType, bytes]): ) -> None: ... if sys.version_info >= (3, 11): - def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: ... + def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: + """Open the database file, filename, and return corresponding object. + +The flag argument, used to control how the database is opened in the +other DBM implementations, supports only the semantics of 'c' and 'n' +values. Other values will default to the semantics of 'c' value: +the database will always opened for update and will be created if it +does not exist. + +The optional mode argument is the UNIX mode of the file, used only when +the database has to be created. It defaults to octal code 0o666 (and +will be modified by the prevailing umask). + +""" else: - def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... + def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: + """Open the database file, filename, and return corresponding object. + + The flag argument, used to control how the database is opened in the + other DBM implementations, supports only the semantics of 'c' and 'n' + values. Other values will default to the semantics of 'c' value: + the database will always opened for update and will be created if it + does not exist. + + The optional mode argument is the UNIX mode of the file, used only when + the database has to be created. It defaults to octal code 0o666 (and + will be modified by the prevailing umask). + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi index 2dac3d12b0ca4..8715bba5f43dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi @@ -1 +1,3 @@ +"""Provide the _gdbm module as a dbm submodule. +""" from _gdbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi index 66c943ab640be..6738efacb5241 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi @@ -1 +1,3 @@ +"""Provide the _dbm module as a dbm submodule. +""" from _dbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi index e2fba93b20017..c2ac90fef2cd1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi @@ -26,4 +26,17 @@ class _Database(MutableMapping[bytes, bytes]): def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... -def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: ... +def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: + """Open a dbm.sqlite3 database and return the dbm object. + +The 'filename' parameter is the name of the database file. + +The optional 'flag' parameter can be one of ...: + 'r' (default): open an existing database for read only access + 'w': open an existing database for read/write access + 'c': create a database if it does not exist; open for read/write access + 'n': always create a new, empty database; open for read/write access + +The optional 'mode' parameter is the Unix file access mode of the database; +only used when creating a new database. Default: 0o666. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi index 2e06c2d1b724a..e5a27d4b4099d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi @@ -1,3 +1,102 @@ +"""Decimal fixed-point and floating-point arithmetic. + +This is an implementation of decimal floating-point arithmetic based on +the General Decimal Arithmetic Specification: + + http://speleotrove.com/decimal/decarith.html + +and IEEE standard 854-1987: + + http://en.wikipedia.org/wiki/IEEE_854-1987 + +Decimal floating point has finite precision with arbitrarily large bounds. + +The purpose of this module is to support arithmetic using familiar +"schoolhouse" rules and to avoid some of the tricky representation +issues associated with binary floating point. The package is especially +useful for financial applications or for contexts where users have +expectations that are at odds with binary floating point (for instance, +in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead +of 0.0; Decimal('1.00') % Decimal('0.1') returns the expected +Decimal('0.00')). + +Here are some examples of using the decimal module: + +>>> from decimal import * +>>> setcontext(ExtendedContext) +>>> Decimal(0) +Decimal('0') +>>> Decimal('1') +Decimal('1') +>>> Decimal('-.0123') +Decimal('-0.0123') +>>> Decimal(123456) +Decimal('123456') +>>> Decimal('123.45e12345678') +Decimal('1.2345E+12345680') +>>> Decimal('1.33') + Decimal('1.27') +Decimal('2.60') +>>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41') +Decimal('-2.20') +>>> dig = Decimal(1) +>>> print(dig / Decimal(3)) +0.333333333 +>>> getcontext().prec = 18 +>>> print(dig / Decimal(3)) +0.333333333333333333 +>>> print(dig.sqrt()) +1 +>>> print(Decimal(3).sqrt()) +1.73205080756887729 +>>> print(Decimal(3) ** 123) +4.85192780976896427E+58 +>>> inf = Decimal(1) / Decimal(0) +>>> print(inf) +Infinity +>>> neginf = Decimal(-1) / Decimal(0) +>>> print(neginf) +-Infinity +>>> print(neginf + inf) +NaN +>>> print(neginf * inf) +-Infinity +>>> print(dig / 0) +Infinity +>>> getcontext().traps[DivisionByZero] = 1 +>>> print(dig / 0) +Traceback (most recent call last): + ... + ... + ... +decimal.DivisionByZero: x / 0 +>>> c = Context() +>>> c.traps[InvalidOperation] = 0 +>>> print(c.flags[InvalidOperation]) +0 +>>> c.divide(Decimal(0), Decimal(0)) +Decimal('NaN') +>>> c.traps[InvalidOperation] = 1 +>>> print(c.flags[InvalidOperation]) +1 +>>> c.flags[InvalidOperation] = 0 +>>> print(c.flags[InvalidOperation]) +0 +>>> print(c.divide(Decimal(0), Decimal(0))) +Traceback (most recent call last): + ... + ... + ... +decimal.InvalidOperation: 0 / 0 +>>> print(c.flags[InvalidOperation]) +1 +>>> c.flags[InvalidOperation] = 0 +>>> c.traps[InvalidOperation] = 0 +>>> print(c.divide(Decimal(0), Decimal(0))) +NaN +>>> print(c.flags[InvalidOperation]) +1 +>>> +""" import numbers import sys from _decimal import ( @@ -49,6 +148,8 @@ class _ContextManager: def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... class DecimalTuple(NamedTuple): + """DecimalTuple(sign, digits, exponent) +""" sign: int digits: tuple[int, ...] exponent: int | Literal["n", "N", "F"] @@ -70,51 +171,163 @@ class FloatOperation(DecimalException, TypeError): ... @disjoint_base class Decimal: + """Construct a new Decimal object. 'value' can be an integer, string, tuple, +or another Decimal object. If no value is given, return Decimal('0'). The +context does not affect the conversion and is only passed to determine if +the InvalidOperation trap is active. + +""" def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ... if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: Decimal | float, /) -> Self: ... + def from_number(cls, number: Decimal | float, /) -> Self: + """Class method that converts a real number to a decimal number, exactly. + + >>> Decimal.from_number(314) # int + Decimal('314') + >>> Decimal.from_number(0.1) # float + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_number(Decimal('3.14')) # another decimal instance + Decimal('3.14') + + +""" @classmethod - def from_float(cls, f: float, /) -> Self: ... - def __bool__(self) -> bool: ... - def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def from_float(cls, f: float, /) -> Self: + """Class method that converts a float to a decimal number, exactly. +Since 0.1 is not exactly representable in binary floating point, +Decimal.from_float(0.1) is not the same as Decimal('0.1'). + + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(float('-inf')) + Decimal('-Infinity') + + +""" + def __bool__(self) -> bool: + """True if self else False +""" + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Compare self to other. Return a decimal value: + + a or b is a NaN ==> Decimal('NaN') + a < b ==> Decimal('-1') + a == b ==> Decimal('0') + a > b ==> Decimal('1') + +""" def __hash__(self) -> int: ... - def as_tuple(self) -> DecimalTuple: ... - def as_integer_ratio(self) -> tuple[int, int]: ... - def to_eng_string(self, context: Context | None = None) -> str: ... - def __abs__(self) -> Decimal: ... - def __add__(self, value: _Decimal, /) -> Decimal: ... - def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... + def as_tuple(self) -> DecimalTuple: + """Return a tuple representation of the number. + +""" + def as_integer_ratio(self) -> tuple[int, int]: + """Decimal.as_integer_ratio() -> (int, int) + +Return a pair of integers, whose ratio is exactly equal to the original +Decimal and with a positive denominator. The ratio is in lowest terms. +Raise OverflowError on infinities and a ValueError on NaNs. + +""" + def to_eng_string(self, context: Context | None = None) -> str: + """Convert to an engineering-type string. Engineering notation has an exponent +which is a multiple of 3, so there are up to 3 digits left of the decimal +place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). + +The value of context.capitals determines whether the exponent sign is lower +or upper case. Otherwise, the context does not affect the operation. + +""" + def __abs__(self) -> Decimal: + """abs(self) +""" + def __add__(self, value: _Decimal, /) -> Decimal: + """Return self+value. +""" + def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return divmod(self, value). +""" def __eq__(self, value: object, /) -> bool: ... - def __floordiv__(self, value: _Decimal, /) -> Decimal: ... + def __floordiv__(self, value: _Decimal, /) -> Decimal: + """Return self//value. +""" def __ge__(self, value: _ComparableNum, /) -> bool: ... def __gt__(self, value: _ComparableNum, /) -> bool: ... def __le__(self, value: _ComparableNum, /) -> bool: ... def __lt__(self, value: _ComparableNum, /) -> bool: ... - def __mod__(self, value: _Decimal, /) -> Decimal: ... - def __mul__(self, value: _Decimal, /) -> Decimal: ... - def __neg__(self) -> Decimal: ... - def __pos__(self) -> Decimal: ... - def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: ... - def __radd__(self, value: _Decimal, /) -> Decimal: ... - def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: ... - def __rfloordiv__(self, value: _Decimal, /) -> Decimal: ... - def __rmod__(self, value: _Decimal, /) -> Decimal: ... - def __rmul__(self, value: _Decimal, /) -> Decimal: ... - def __rsub__(self, value: _Decimal, /) -> Decimal: ... - def __rtruediv__(self, value: _Decimal, /) -> Decimal: ... - def __sub__(self, value: _Decimal, /) -> Decimal: ... - def __truediv__(self, value: _Decimal, /) -> Decimal: ... - def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def __float__(self) -> float: ... - def __int__(self) -> int: ... + def __mod__(self, value: _Decimal, /) -> Decimal: + """Return self%value. +""" + def __mul__(self, value: _Decimal, /) -> Decimal: + """Return self*value. +""" + def __neg__(self) -> Decimal: + """-self +""" + def __pos__(self) -> Decimal: + """+self +""" + def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: + """Return pow(self, value, mod). +""" + def __radd__(self, value: _Decimal, /) -> Decimal: + """Return value+self. +""" + def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return divmod(value, self). +""" + def __rfloordiv__(self, value: _Decimal, /) -> Decimal: + """Return value//self. +""" + def __rmod__(self, value: _Decimal, /) -> Decimal: + """Return value%self. +""" + def __rmul__(self, value: _Decimal, /) -> Decimal: + """Return value*self. +""" + def __rsub__(self, value: _Decimal, /) -> Decimal: + """Return value-self. +""" + def __rtruediv__(self, value: _Decimal, /) -> Decimal: + """Return value/self. +""" + def __sub__(self, value: _Decimal, /) -> Decimal: + """Return self-value. +""" + def __truediv__(self, value: _Decimal, /) -> Decimal: + """Return self/value. +""" + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the remainder from dividing self by other. This differs from +self % other in that the sign of the remainder is chosen so as to minimize +its absolute value. More precisely, the return value is self - n * other +where n is the integer nearest to the exact value of self / other, and +if two integers are equally near then the even one is chosen. + +If the result is zero then its sign will be the sign of self. + +""" + def __float__(self) -> float: + """float(self) +""" + def __int__(self) -> int: + """int(self) +""" def __trunc__(self) -> int: ... @property def real(self) -> Decimal: ... @property def imag(self) -> Decimal: ... - def conjugate(self) -> Decimal: ... + def conjugate(self) -> Decimal: + """Return self. + +""" def __complex__(self) -> complex: ... @overload def __round__(self) -> int: ... @@ -122,53 +335,325 @@ class Decimal: def __round__(self, ndigits: int, /) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... - def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... - def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: ... - def normalize(self, context: Context | None = None) -> Decimal: ... - def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... - def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... - def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... - def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... - def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... - def sqrt(self, context: Context | None = None) -> Decimal: ... - def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def adjusted(self) -> int: ... - def canonical(self) -> Decimal: ... - def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def copy_abs(self) -> Decimal: ... - def copy_negate(self) -> Decimal: ... - def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def exp(self, context: Context | None = None) -> Decimal: ... - def is_canonical(self) -> bool: ... - def is_finite(self) -> bool: ... - def is_infinite(self) -> bool: ... - def is_nan(self) -> bool: ... - def is_normal(self, context: Context | None = None) -> bool: ... - def is_qnan(self) -> bool: ... - def is_signed(self) -> bool: ... - def is_snan(self) -> bool: ... - def is_subnormal(self, context: Context | None = None) -> bool: ... - def is_zero(self) -> bool: ... - def ln(self, context: Context | None = None) -> Decimal: ... - def log10(self, context: Context | None = None) -> Decimal: ... - def logb(self, context: Context | None = None) -> Decimal: ... - def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def logical_invert(self, context: Context | None = None) -> Decimal: ... - def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def next_minus(self, context: Context | None = None) -> Decimal: ... - def next_plus(self, context: Context | None = None) -> Decimal: ... - def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def number_class(self, context: Context | None = None) -> str: ... - def radix(self) -> Decimal: ... - def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... - def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: + """Fused multiply-add. Return self*other+third with no rounding of the +intermediate product self*other. + + >>> Decimal(2).fma(3, 5) + Decimal('11') + + +""" + def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: + """Return pow(value, self, mod). +""" + def normalize(self, context: Context | None = None) -> Decimal: + """Normalize the number by stripping the rightmost trailing zeros and +converting any result equal to Decimal('0') to Decimal('0e0'). Used +for producing canonical values for members of an equivalence class. +For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize +to the equivalent value Decimal('32.1'). + +""" + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: + """Return a value equal to the first operand after rounding and having the +exponent of the second operand. + + >>> Decimal('1.41421356').quantize(Decimal('1.000')) + Decimal('1.414') + +Unlike other operations, if the length of the coefficient after the quantize +operation would be greater than precision, then an InvalidOperation is signaled. +This guarantees that, unless there is an error condition, the quantized exponent +is always equal to that of the right-hand operand. + +Also unlike other operations, quantize never signals Underflow, even if the +result is subnormal and inexact. + +If the exponent of the second operand is larger than that of the first, then +rounding may be necessary. In this case, the rounding mode is determined by the +rounding argument if given, else by the given context argument; if neither +argument is given, the rounding mode of the current thread's context is used. + +""" + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: + """Test whether self and other have the same exponent or whether both are NaN. + +This operation is unaffected by context and is quiet: no flags are changed +and no rounding is performed. As an exception, the C version may raise +InvalidOperation if the second operand cannot be converted exactly. + +""" + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """Round to the nearest integer, signaling Inexact or Rounded as appropriate if +rounding occurs. The rounding mode is determined by the rounding parameter +if given, else by the given context. If neither parameter is given, then the +rounding mode of the current default context is used. + +""" + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """Round to the nearest integer without signaling Inexact or Rounded. The +rounding mode is determined by the rounding parameter if given, else by +the given context. If neither parameter is given, then the rounding mode +of the current default context is used. + +""" + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: + """Identical to the to_integral_value() method. The to_integral() name has been +kept for compatibility with older versions. + +""" + def sqrt(self, context: Context | None = None) -> Decimal: + """Return the square root of the argument to full precision. The result is +correctly rounded using the ROUND_HALF_EVEN rounding mode. + +""" + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Maximum of self and other. If one operand is a quiet NaN and the other is +numeric, the numeric operand is returned. + +""" + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Minimum of self and other. If one operand is a quiet NaN and the other is +numeric, the numeric operand is returned. + +""" + def adjusted(self) -> int: + """Return the adjusted exponent of the number. Defined as exp + digits - 1. + +""" + def canonical(self) -> Decimal: + """Return the canonical encoding of the argument. Currently, the encoding +of a Decimal instance is always canonical, so this operation returns its +argument unchanged. + +""" + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Identical to compare, except that all NaNs signal. + +""" + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Compare two operands using their abstract representation rather than +their numerical value. Similar to the compare() method, but the result +gives a total ordering on Decimal instances. Two Decimal instances with +the same numeric value but different representations compare unequal +in this ordering: + + >>> Decimal('12.0').compare_total(Decimal('12')) + Decimal('-1') + +Quiet and signaling NaNs are also included in the total ordering. The result +of this function is Decimal('0') if both operands have the same representation, +Decimal('-1') if the first operand is lower in the total order than the second, +and Decimal('1') if the first operand is higher in the total order than the +second operand. See the specification for details of the total order. + +This operation is unaffected by context and is quiet: no flags are changed +and no rounding is performed. As an exception, the C version may raise +InvalidOperation if the second operand cannot be converted exactly. + +""" + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Compare two operands using their abstract representation rather than their +value as in compare_total(), but ignoring the sign of each operand. + +x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). + +This operation is unaffected by context and is quiet: no flags are changed +and no rounding is performed. As an exception, the C version may raise +InvalidOperation if the second operand cannot be converted exactly. + +""" + def copy_abs(self) -> Decimal: + """Return the absolute value of the argument. This operation is unaffected by +context and is quiet: no flags are changed and no rounding is performed. + +""" + def copy_negate(self) -> Decimal: + """Return the negation of the argument. This operation is unaffected by context +and is quiet: no flags are changed and no rounding is performed. + +""" + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return a copy of the first operand with the sign set to be the same as the +sign of the second operand. For example: + + >>> Decimal('2.3').copy_sign(Decimal('-1.5')) + Decimal('-2.3') + +This operation is unaffected by context and is quiet: no flags are changed +and no rounding is performed. As an exception, the C version may raise +InvalidOperation if the second operand cannot be converted exactly. + +""" + def exp(self, context: Context | None = None) -> Decimal: + """Return the value of the (natural) exponential function e**x at the given +number. The function always uses the ROUND_HALF_EVEN mode and the result +is correctly rounded. + +""" + def is_canonical(self) -> bool: + """Return True if the argument is canonical and False otherwise. Currently, +a Decimal instance is always canonical, so this operation always returns +True. + +""" + def is_finite(self) -> bool: + """Return True if the argument is a finite number, and False if the argument +is infinite or a NaN. + +""" + def is_infinite(self) -> bool: + """Return True if the argument is either positive or negative infinity and +False otherwise. + +""" + def is_nan(self) -> bool: + """Return True if the argument is a (quiet or signaling) NaN and False +otherwise. + +""" + def is_normal(self, context: Context | None = None) -> bool: + """Return True if the argument is a normal finite non-zero number with an +adjusted exponent greater than or equal to Emin. Return False if the +argument is zero, subnormal, infinite or a NaN. + +""" + def is_qnan(self) -> bool: + """Return True if the argument is a quiet NaN, and False otherwise. + +""" + def is_signed(self) -> bool: + """Return True if the argument has a negative sign and False otherwise. +Note that both zeros and NaNs can carry signs. + +""" + def is_snan(self) -> bool: + """Return True if the argument is a signaling NaN and False otherwise. + +""" + def is_subnormal(self, context: Context | None = None) -> bool: + """Return True if the argument is subnormal, and False otherwise. A number is +subnormal if it is non-zero, finite, and has an adjusted exponent less +than Emin. + +""" + def is_zero(self) -> bool: + """Return True if the argument is a (positive or negative) zero and False +otherwise. + +""" + def ln(self, context: Context | None = None) -> Decimal: + """Return the natural (base e) logarithm of the operand. The function always +uses the ROUND_HALF_EVEN mode and the result is correctly rounded. + +""" + def log10(self, context: Context | None = None) -> Decimal: + """Return the base ten logarithm of the operand. The function always uses the +ROUND_HALF_EVEN mode and the result is correctly rounded. + +""" + def logb(self, context: Context | None = None) -> Decimal: + """For a non-zero number, return the adjusted exponent of the operand as a +Decimal instance. If the operand is a zero, then Decimal('-Infinity') is +returned and the DivisionByZero condition is raised. If the operand is +an infinity then Decimal('Infinity') is returned. + +""" + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'and' of the two (logical) operands. + +""" + def logical_invert(self, context: Context | None = None) -> Decimal: + """Return the digit-wise inversion of the (logical) operand. + +""" + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'or' of the two (logical) operands. + +""" + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the digit-wise 'exclusive or' of the two (logical) operands. + +""" + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Similar to the max() method, but the comparison is done using the absolute +values of the operands. + +""" + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Similar to the min() method, but the comparison is done using the absolute +values of the operands. + +""" + def next_minus(self, context: Context | None = None) -> Decimal: + """Return the largest number representable in the given context (or in the +current default context if no context is given) that is smaller than the +given operand. + +""" + def next_plus(self, context: Context | None = None) -> Decimal: + """Return the smallest number representable in the given context (or in the +current default context if no context is given) that is larger than the +given operand. + +""" + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: + """If the two operands are unequal, return the number closest to the first +operand in the direction of the second operand. If both operands are +numerically equal, return a copy of the first operand with the sign set +to be the same as the sign of the second operand. + +""" + def number_class(self, context: Context | None = None) -> str: + """Return a string describing the class of the operand. The returned value +is one of the following ten strings: + + * '-Infinity', indicating that the operand is negative infinity. + * '-Normal', indicating that the operand is a negative normal number. + * '-Subnormal', indicating that the operand is negative and subnormal. + * '-Zero', indicating that the operand is a negative zero. + * '+Zero', indicating that the operand is a positive zero. + * '+Subnormal', indicating that the operand is positive and subnormal. + * '+Normal', indicating that the operand is a positive normal number. + * '+Infinity', indicating that the operand is positive infinity. + * 'NaN', indicating that the operand is a quiet NaN (Not a Number). + * 'sNaN', indicating that the operand is a signaling NaN. + + +""" + def radix(self) -> Decimal: + """Return Decimal(10), the radix (base) in which the Decimal class does +all its arithmetic. Included for compatibility with the specification. + +""" + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the result of rotating the digits of the first operand by an amount +specified by the second operand. The second operand must be an integer in +the range -precision through precision. The absolute value of the second +operand gives the number of places to rotate. If the second operand is +positive then rotation is to the left; otherwise rotation is to the right. +The coefficient of the first operand is padded on the left with zeros to +length precision if necessary. The sign and exponent of the first operand are +unchanged. + +""" + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the first operand with the exponent adjusted the second. Equivalently, +return the first operand multiplied by 10**other. The second operand must be +an integer. + +""" + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: + """Return the result of shifting the digits of the first operand by an amount +specified by the second operand. The second operand must be an integer in +the range -precision through precision. The absolute value of the second +operand gives the number of places to shift. If the second operand is +positive, then the shift is to the left; otherwise the shift is to the +right. Digits shifted into the coefficient are zeros. The sign and exponent +of the first operand are unchanged. + +""" def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any, /) -> Self: ... @@ -176,6 +661,18 @@ class Decimal: @disjoint_base class Context: + """The context affects almost all operations and controls rounding, +Over/Underflow, raising of exceptions and much more. A new context +can be constructed as follows: + + >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, + ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, + ... traps=[InvalidOperation, DivisionByZero, Overflow], + ... flags=[]) + >>> + + +""" # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, # even settable attributes like `prec` and `rounding`, # but that's inexpressible in the stub. @@ -201,74 +698,300 @@ class Context: traps: dict[_TrapType, bool] | Container[_TrapType] | None = None, ) -> None: ... def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... - def clear_flags(self) -> None: ... - def clear_traps(self) -> None: ... - def copy(self) -> Context: ... + def clear_flags(self) -> None: + """Reset all flags to False. + +""" + def clear_traps(self) -> None: + """Set all traps to False. + +""" + def copy(self) -> Context: + """Return a duplicate of the context with all flags cleared. + +""" def __copy__(self) -> Context: ... # see https://github.com/python/cpython/issues/94107 __hash__: ClassVar[None] # type: ignore[assignment] - def Etiny(self) -> int: ... - def Etop(self) -> int: ... - def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: ... - def create_decimal_from_float(self, f: float, /) -> Decimal: ... - def abs(self, x: _Decimal, /) -> Decimal: ... - def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def canonical(self, x: Decimal, /) -> Decimal: ... - def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def copy_abs(self, x: _Decimal, /) -> Decimal: ... - def copy_decimal(self, x: _Decimal, /) -> Decimal: ... - def copy_negate(self, x: _Decimal, /) -> Decimal: ... - def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: ... - def exp(self, x: _Decimal, /) -> Decimal: ... - def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: ... - def is_canonical(self, x: _Decimal, /) -> bool: ... - def is_finite(self, x: _Decimal, /) -> bool: ... - def is_infinite(self, x: _Decimal, /) -> bool: ... - def is_nan(self, x: _Decimal, /) -> bool: ... - def is_normal(self, x: _Decimal, /) -> bool: ... - def is_qnan(self, x: _Decimal, /) -> bool: ... - def is_signed(self, x: _Decimal, /) -> bool: ... - def is_snan(self, x: _Decimal, /) -> bool: ... - def is_subnormal(self, x: _Decimal, /) -> bool: ... - def is_zero(self, x: _Decimal, /) -> bool: ... - def ln(self, x: _Decimal, /) -> Decimal: ... - def log10(self, x: _Decimal, /) -> Decimal: ... - def logb(self, x: _Decimal, /) -> Decimal: ... - def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def logical_invert(self, x: _Decimal, /) -> Decimal: ... - def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def minus(self, x: _Decimal, /) -> Decimal: ... - def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def next_minus(self, x: _Decimal, /) -> Decimal: ... - def next_plus(self, x: _Decimal, /) -> Decimal: ... - def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def normalize(self, x: _Decimal, /) -> Decimal: ... - def number_class(self, x: _Decimal, /) -> str: ... - def plus(self, x: _Decimal, /) -> Decimal: ... - def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... - def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def radix(self) -> Decimal: ... - def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: ... - def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def sqrt(self, x: _Decimal, /) -> Decimal: ... - def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: ... - def to_eng_string(self, x: _Decimal, /) -> str: ... - def to_sci_string(self, x: _Decimal, /) -> str: ... - def to_integral_exact(self, x: _Decimal, /) -> Decimal: ... - def to_integral_value(self, x: _Decimal, /) -> Decimal: ... - def to_integral(self, x: _Decimal, /) -> Decimal: ... + def Etiny(self) -> int: + """Return a value equal to Emin - prec + 1, which is the minimum exponent value +for subnormal results. When underflow occurs, the exponent is set to Etiny. + +""" + def Etop(self) -> int: + """Return a value equal to Emax - prec + 1. This is the maximum exponent +if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() +must not be negative. + +""" + def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: + """Create a new Decimal instance from num, using self as the context. Unlike the +Decimal constructor, this function observes the context limits. + +""" + def create_decimal_from_float(self, f: float, /) -> Decimal: + """Create a new Decimal instance from float f. Unlike the Decimal.from_float() +class method, this function observes the context limits. + +""" + def abs(self, x: _Decimal, /) -> Decimal: + """Return the absolute value of x. + +""" + def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the sum of x and y. + +""" + def canonical(self, x: Decimal, /) -> Decimal: + """Return a new instance of x. + +""" + def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y numerically. + +""" + def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y numerically. All NaNs signal. + +""" + def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y using their abstract representation. + +""" + def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare x and y using their abstract representation, ignoring sign. + +""" + def copy_abs(self, x: _Decimal, /) -> Decimal: + """Return a copy of x with the sign set to 0. + +""" + def copy_decimal(self, x: _Decimal, /) -> Decimal: + """Return a copy of Decimal x. + +""" + def copy_negate(self, x: _Decimal, /) -> Decimal: + """Return a copy of x with the sign inverted. + +""" + def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Copy the sign from y to x. + +""" + def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return x divided by y. + +""" + def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return x divided by y, truncated to an integer. + +""" + def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: + """Return quotient and remainder of the division x / y. + +""" + def exp(self, x: _Decimal, /) -> Decimal: + """Return e ** x. + +""" + def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: + """Return x multiplied by y, plus z. + +""" + def is_canonical(self, x: _Decimal, /) -> bool: + """Return True if x is canonical, False otherwise. + +""" + def is_finite(self, x: _Decimal, /) -> bool: + """Return True if x is finite, False otherwise. + +""" + def is_infinite(self, x: _Decimal, /) -> bool: + """Return True if x is infinite, False otherwise. + +""" + def is_nan(self, x: _Decimal, /) -> bool: + """Return True if x is a qNaN or sNaN, False otherwise. + +""" + def is_normal(self, x: _Decimal, /) -> bool: + """Return True if x is a normal number, False otherwise. + +""" + def is_qnan(self, x: _Decimal, /) -> bool: + """Return True if x is a quiet NaN, False otherwise. + +""" + def is_signed(self, x: _Decimal, /) -> bool: + """Return True if x is negative, False otherwise. + +""" + def is_snan(self, x: _Decimal, /) -> bool: + """Return True if x is a signaling NaN, False otherwise. + +""" + def is_subnormal(self, x: _Decimal, /) -> bool: + """Return True if x is subnormal, False otherwise. + +""" + def is_zero(self, x: _Decimal, /) -> bool: + """Return True if x is a zero, False otherwise. + +""" + def ln(self, x: _Decimal, /) -> Decimal: + """Return the natural (base e) logarithm of x. + +""" + def log10(self, x: _Decimal, /) -> Decimal: + """Return the base 10 logarithm of x. + +""" + def logb(self, x: _Decimal, /) -> Decimal: + """Return the exponent of the magnitude of the operand's MSD. + +""" + def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise and of x and y. + +""" + def logical_invert(self, x: _Decimal, /) -> Decimal: + """Invert all digits of x. + +""" + def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise or of x and y. + +""" + def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Digit-wise xor of x and y. + +""" + def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically and return the maximum. + +""" + def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically with their sign ignored. + +""" + def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically and return the minimum. + +""" + def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Compare the values numerically with their sign ignored. + +""" + def minus(self, x: _Decimal, /) -> Decimal: + """Minus corresponds to the unary prefix minus operator in Python, but applies +the context to the result. + +""" + def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the product of x and y. + +""" + def next_minus(self, x: _Decimal, /) -> Decimal: + """Return the largest representable number smaller than x. + +""" + def next_plus(self, x: _Decimal, /) -> Decimal: + """Return the smallest representable number larger than x. + +""" + def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the number closest to x, in the direction towards y. + +""" + def normalize(self, x: _Decimal, /) -> Decimal: + """Reduce x to its simplest form. Alias for reduce(x). + +""" + def number_class(self, x: _Decimal, /) -> str: + """Return an indication of the class of x. + +""" + def plus(self, x: _Decimal, /) -> Decimal: + """Plus corresponds to the unary prefix plus operator in Python, but applies +the context to the result. + +""" + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: + """Compute a**b. If 'a' is negative, then 'b' must be integral. The result +will be inexact unless 'a' is integral and the result is finite and can +be expressed exactly in 'precision' digits. In the Python version the +result is always correctly rounded, in the C version the result is almost +always correctly rounded. + +If modulo is given, compute (a**b) % modulo. The following restrictions +hold: + + * all three arguments must be integral + * 'b' must be nonnegative + * at least one of 'a' or 'b' must be nonzero + * modulo must be nonzero and less than 10**prec in absolute value + + +""" + def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a value equal to x (rounded), having the exponent of y. + +""" + def radix(self) -> Decimal: + """Return 10. + +""" + def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the remainder from integer division. The sign of the result, +if non-zero, is the same as that of the original dividend. + +""" + def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return x - y * n, where n is the integer nearest the exact value of x / y +(if the result is 0 then its sign will be the sign of x). + +""" + def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a copy of x, rotated by y places. + +""" + def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: + """Return True if the two operands have the same exponent. + +""" + def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the first operand after adding the second value to its exp. + +""" + def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return a copy of x, shifted by y places. + +""" + def sqrt(self, x: _Decimal, /) -> Decimal: + """Square root of a non-negative number to context precision. + +""" + def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: + """Return the difference between x and y. + +""" + def to_eng_string(self, x: _Decimal, /) -> str: + """Convert a number to a string, using engineering notation. + +""" + def to_sci_string(self, x: _Decimal, /) -> str: + """Convert a number to a string using scientific notation. + +""" + def to_integral_exact(self, x: _Decimal, /) -> Decimal: + """Round to an integer. Signal if the result is rounded or inexact. + +""" + def to_integral_value(self, x: _Decimal, /) -> Decimal: + """Round to an integer. + +""" + def to_integral(self, x: _Decimal, /) -> Decimal: + """Identical to to_integral_value(x). + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi index 6efe68322bb65..aa804b1198227 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi @@ -1,3 +1,30 @@ +""" +Module difflib -- helpers for computing deltas between objects. + +Function get_close_matches(word, possibilities, n=3, cutoff=0.6): + Use SequenceMatcher to return list of the best "good enough" matches. + +Function context_diff(a, b): + For two lists of strings, return a delta in context diff format. + +Function ndiff(a, b): + Return a delta: the difference between `a` and `b` (lists of strings). + +Function restore(delta, which): + Return one of the two sequences that generated an ndiff delta. + +Function unified_diff(a, b): + For two lists of strings, return a delta in unified diff format. + +Class SequenceMatcher: + A flexible class for comparing pairs of sequences of any type. + +Class Differ: + For producing human-readable deltas from sequences of lines of text. + +Class HtmlDiff: + For producing HTML side by side comparison with change highlights. +""" import re import sys from collections.abc import Callable, Iterable, Iterator, Sequence @@ -22,13 +49,110 @@ __all__ = [ _T = TypeVar("_T") class Match(NamedTuple): + """Match(a, b, size) +""" a: int b: int size: int class SequenceMatcher(Generic[_T]): + """ +SequenceMatcher is a flexible class for comparing pairs of sequences of +any type, so long as the sequence elements are hashable. The basic +algorithm predates, and is a little fancier than, an algorithm +published in the late 1980's by Ratcliff and Obershelp under the +hyperbolic name "gestalt pattern matching". The basic idea is to find +the longest contiguous matching subsequence that contains no "junk" +elements (R-O doesn't address junk). The same idea is then applied +recursively to the pieces of the sequences to the left and to the right +of the matching subsequence. This does not yield minimal edit +sequences, but does tend to yield matches that "look right" to people. + +SequenceMatcher tries to compute a "human-friendly diff" between two +sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the +longest *contiguous* & junk-free matching subsequence. That's what +catches peoples' eyes. The Windows(tm) windiff has another interesting +notion, pairing up elements that appear uniquely in each sequence. +That, and the method here, appear to yield more intuitive difference +reports than does diff. This method appears to be the least vulnerable +to syncing up on blocks of "junk lines", though (like blank lines in +ordinary text files, or maybe "

" lines in HTML files). That may be +because this is the only method of the 3 that has a *concept* of +"junk" . + +Example, comparing two strings, and considering blanks to be "junk": + +>>> s = SequenceMatcher(lambda x: x == " ", +... "private Thread currentThread;", +... "private volatile Thread currentThread;") +>>> + +.ratio() returns a float in [0, 1], measuring the "similarity" of the +sequences. As a rule of thumb, a .ratio() value over 0.6 means the +sequences are close matches: + +>>> print(round(s.ratio(), 2)) +0.87 +>>> + +If you're only interested in where the sequences match, +.get_matching_blocks() is handy: + +>>> for block in s.get_matching_blocks(): +... print("a[%d] and b[%d] match for %d elements" % block) +a[0] and b[0] match for 8 elements +a[8] and b[17] match for 21 elements +a[29] and b[38] match for 0 elements + +Note that the last tuple returned by .get_matching_blocks() is always a +dummy, (len(a), len(b), 0), and this is the only case in which the last +tuple element (number of elements matched) is 0. + +If you want to know how to change the first sequence into the second, +use .get_opcodes(): + +>>> for opcode in s.get_opcodes(): +... print("%6s a[%d:%d] b[%d:%d]" % opcode) + equal a[0:8] b[0:8] +insert a[8:8] b[8:17] + equal a[8:29] b[17:38] + +See the Differ class for a fancy human-friendly file differencer, which +uses SequenceMatcher both to compare sequences of lines, and to compare +sequences of characters within similar (near-matching) lines. + +See also function get_close_matches() in this module, which shows how +simple code building on SequenceMatcher can be used to do useful work. + +Timing: Basic R-O is cubic time worst case and quadratic time expected +case. SequenceMatcher is quadratic time for the worst case and has +expected-case behavior dependent in a complicated way on how many +elements the sequences have in common; best case time is linear. +""" @overload - def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: + """Construct a SequenceMatcher. + +Optional arg isjunk is None (the default), or a one-argument +function that takes a sequence element and returns true iff the +element is junk. None is equivalent to passing "lambda x: 0", i.e. +no elements are considered to be junk. For example, pass + lambda x: x in " \\t" +if you're comparing lines as sequences of characters, and don't +want to synch up on blanks or hard tabs. + +Optional arg a is the first of two sequences to be compared. By +default, an empty string. The elements of a must be hashable. See +also .set_seqs() and .set_seq1(). + +Optional arg b is the second of two sequences to be compared. By +default, an empty string. The elements of b must be hashable. See +also .set_seqs() and .set_seq2(). + +Optional arg autojunk should be set to False to disable the +"automatic junk heuristic" that treats popular elements as junk +(see module documentation for more information). +""" @overload def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload @@ -39,36 +163,417 @@ class SequenceMatcher(Generic[_T]): b: Sequence[str] = "", autojunk: bool = True, ) -> None: ... - def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... - def set_seq1(self, a: Sequence[_T]) -> None: ... - def set_seq2(self, b: Sequence[_T]) -> None: ... - def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... - def get_matching_blocks(self) -> list[Match]: ... - def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: ... - def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... - def ratio(self) -> float: ... - def quick_ratio(self) -> float: ... - def real_quick_ratio(self) -> float: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: + """Set the two sequences to be compared. + +>>> s = SequenceMatcher() +>>> s.set_seqs("abcd", "bcde") +>>> s.ratio() +0.75 +""" + def set_seq1(self, a: Sequence[_T]) -> None: + """Set the first sequence to be compared. + +The second sequence to be compared is not changed. + +>>> s = SequenceMatcher(None, "abcd", "bcde") +>>> s.ratio() +0.75 +>>> s.set_seq1("bcde") +>>> s.ratio() +1.0 +>>> + +SequenceMatcher computes and caches detailed information about the +second sequence, so if you want to compare one sequence S against +many sequences, use .set_seq2(S) once and call .set_seq1(x) +repeatedly for each of the other sequences. + +See also set_seqs() and set_seq2(). +""" + def set_seq2(self, b: Sequence[_T]) -> None: + """Set the second sequence to be compared. + +The first sequence to be compared is not changed. + +>>> s = SequenceMatcher(None, "abcd", "bcde") +>>> s.ratio() +0.75 +>>> s.set_seq2("abcd") +>>> s.ratio() +1.0 +>>> + +SequenceMatcher computes and caches detailed information about the +second sequence, so if you want to compare one sequence S against +many sequences, use .set_seq2(S) once and call .set_seq1(x) +repeatedly for each of the other sequences. + +See also set_seqs() and set_seq1(). +""" + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: + """Find longest matching block in a[alo:ahi] and b[blo:bhi]. + +By default it will find the longest match in the entirety of a and b. + +If isjunk is not defined: + +Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where + alo <= i <= i+k <= ahi + blo <= j <= j+k <= bhi +and for all (i',j',k') meeting those conditions, + k >= k' + i <= i' + and if i == i', j <= j' + +In other words, of all maximal matching blocks, return one that +starts earliest in a, and of all those maximal matching blocks that +start earliest in a, return the one that starts earliest in b. + +>>> s = SequenceMatcher(None, " abcd", "abcd abcd") +>>> s.find_longest_match(0, 5, 0, 9) +Match(a=0, b=4, size=5) + +If isjunk is defined, first the longest matching block is +determined as above, but with the additional restriction that no +junk element appears in the block. Then that block is extended as +far as possible by matching (only) junk elements on both sides. So +the resulting block never matches on junk except as identical junk +happens to be adjacent to an "interesting" match. + +Here's the same example as before, but considering blanks to be +junk. That prevents " abcd" from matching the " abcd" at the tail +end of the second sequence directly. Instead only the "abcd" can +match, and matches the leftmost "abcd" in the second sequence: + +>>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd") +>>> s.find_longest_match(0, 5, 0, 9) +Match(a=1, b=0, size=4) + +If no blocks match, return (alo, blo, 0). + +>>> s = SequenceMatcher(None, "ab", "c") +>>> s.find_longest_match(0, 2, 0, 1) +Match(a=0, b=0, size=0) +""" + def get_matching_blocks(self) -> list[Match]: + """Return list of triples describing matching subsequences. + +Each triple is of the form (i, j, n), and means that +a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in +i and in j. New in Python 2.5, it's also guaranteed that if +(i, j, n) and (i', j', n') are adjacent triples in the list, and +the second is not the last triple in the list, then i+n != i' or +j+n != j'. IOW, adjacent triples never describe adjacent equal +blocks. + +The last triple is a dummy, (len(a), len(b), 0), and is the only +triple with n==0. + +>>> s = SequenceMatcher(None, "abxcd", "abcd") +>>> list(s.get_matching_blocks()) +[Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)] +""" + def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: + """Return list of 5-tuples describing how to turn a into b. + +Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple +has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the +tuple preceding it, and likewise for j1 == the previous j2. + +The tags are strings, with these meanings: + +'replace': a[i1:i2] should be replaced by b[j1:j2] +'delete': a[i1:i2] should be deleted. + Note that j1==j2 in this case. +'insert': b[j1:j2] should be inserted at a[i1:i1]. + Note that i1==i2 in this case. +'equal': a[i1:i2] == b[j1:j2] + +>>> a = "qabxcd" +>>> b = "abycdf" +>>> s = SequenceMatcher(None, a, b) +>>> for tag, i1, i2, j1, j2 in s.get_opcodes(): +... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % +... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) + delete a[0:1] (q) b[0:0] () + equal a[1:3] (ab) b[0:2] (ab) +replace a[3:4] (x) b[2:3] (y) + equal a[4:6] (cd) b[3:5] (cd) + insert a[6:6] () b[5:6] (f) +""" + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: + """Isolate change clusters by eliminating ranges with no changes. + +Return a generator of groups with up to n lines of context. +Each group is in the same format as returned by get_opcodes(). + +>>> from pprint import pprint +>>> a = list(map(str, range(1,40))) +>>> b = a[:] +>>> b[8:8] = ['i'] # Make an insertion +>>> b[20] += 'x' # Make a replacement +>>> b[23:28] = [] # Make a deletion +>>> b[30] += 'y' # Make another replacement +>>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes())) +[[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)], + [('equal', 16, 19, 17, 20), + ('replace', 19, 20, 20, 21), + ('equal', 20, 22, 21, 23), + ('delete', 22, 27, 23, 23), + ('equal', 27, 30, 23, 26)], + [('equal', 31, 34, 27, 30), + ('replace', 34, 35, 30, 31), + ('equal', 35, 38, 31, 34)]] +""" + def ratio(self) -> float: + """Return a measure of the sequences' similarity (float in [0,1]). + +Where T is the total number of elements in both sequences, and +M is the number of matches, this is 2.0*M / T. +Note that this is 1 if the sequences are identical, and 0 if +they have nothing in common. + +.ratio() is expensive to compute if you haven't already computed +.get_matching_blocks() or .get_opcodes(), in which case you may +want to try .quick_ratio() or .real_quick_ratio() first to get an +upper bound. + +>>> s = SequenceMatcher(None, "abcd", "bcde") +>>> s.ratio() +0.75 +>>> s.quick_ratio() +0.75 +>>> s.real_quick_ratio() +1.0 +""" + def quick_ratio(self) -> float: + """Return an upper bound on ratio() relatively quickly. + +This isn't defined beyond that it is an upper bound on .ratio(), and +is faster to compute. +""" + def real_quick_ratio(self) -> float: + """Return an upper bound on ratio() very quickly. + +This isn't defined beyond that it is an upper bound on .ratio(), and +is faster to compute than either .ratio() or .quick_ratio(). +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" @overload -def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: + """Use SequenceMatcher to return list of the best "good enough" matches. + +word is a sequence for which close matches are desired (typically a +string). + +possibilities is a list of sequences against which to match word +(typically a list of strings). + +Optional arg n (default 3) is the maximum number of close matches to +return. n must be > 0. + +Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities +that don't score at least that similar to word are ignored. + +The best (no more than n) matches among the possibilities are returned +in a list, sorted by similarity score, most similar first. + +>>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"]) +['apple', 'ape'] +>>> import keyword as _keyword +>>> get_close_matches("wheel", _keyword.kwlist) +['while'] +>>> get_close_matches("Apple", _keyword.kwlist) +[] +>>> get_close_matches("accept", _keyword.kwlist) +['except'] +""" @overload def get_close_matches( word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 ) -> list[Sequence[_T]]: ... class Differ: - def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... - def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... + """ +Differ is a class for comparing sequences of lines of text, and +producing human-readable differences or deltas. Differ uses +SequenceMatcher both to compare sequences of lines, and to compare +sequences of characters within similar (near-matching) lines. + +Each line of a Differ delta begins with a two-letter code: + + '- ' line unique to sequence 1 + '+ ' line unique to sequence 2 + ' ' line common to both sequences + '? ' line not present in either input sequence + +Lines beginning with '? ' attempt to guide the eye to intraline +differences, and were not present in either input sequence. These lines +can be confusing if the sequences contain tab characters. + +Note that Differ makes no claim to produce a *minimal* diff. To the +contrary, minimal diffs are often counter-intuitive, because they synch +up anywhere possible, sometimes accidental matches 100 pages apart. +Restricting synch points to contiguous matches preserves some notion of +locality, at the occasional cost of producing a longer diff. + +Example: Comparing two texts. + +First we set up the texts, sequences of individual single-line strings +ending with newlines (such sequences can also be obtained from the +`readlines()` method of file-like objects): + +>>> text1 = ''' 1. Beautiful is better than ugly. +... 2. Explicit is better than implicit. +... 3. Simple is better than complex. +... 4. Complex is better than complicated. +... '''.splitlines(keepends=True) +>>> len(text1) +4 +>>> text1[0][-1] +'\\n' +>>> text2 = ''' 1. Beautiful is better than ugly. +... 3. Simple is better than complex. +... 4. Complicated is better than complex. +... 5. Flat is better than nested. +... '''.splitlines(keepends=True) + +Next we instantiate a Differ object: + +>>> d = Differ() + +Note that when instantiating a Differ object we may pass functions to +filter out line and character 'junk'. See Differ.__init__ for details. + +Finally, we compare the two: + +>>> result = list(d.compare(text1, text2)) + +'result' is a list of strings, so let's pretty-print it: + +>>> from pprint import pprint as _pprint +>>> _pprint(result) +[' 1. Beautiful is better than ugly.\\n', + '- 2. Explicit is better than implicit.\\n', + '- 3. Simple is better than complex.\\n', + '+ 3. Simple is better than complex.\\n', + '? ++\\n', + '- 4. Complex is better than complicated.\\n', + '? ^ ---- ^\\n', + '+ 4. Complicated is better than complex.\\n', + '? ++++ ^ ^\\n', + '+ 5. Flat is better than nested.\\n'] + +As a single multi-line string it looks like this: + +>>> print(''.join(result), end="") + 1. Beautiful is better than ugly. +- 2. Explicit is better than implicit. +- 3. Simple is better than complex. ++ 3. Simple is better than complex. +? ++ +- 4. Complex is better than complicated. +? ^ ---- ^ ++ 4. Complicated is better than complex. +? ++++ ^ ^ ++ 5. Flat is better than nested. +""" + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: + """ +Construct a text differencer, with optional filters. + +The two optional keyword parameters are for filter functions: + +- `linejunk`: A function that should accept a single string argument, + and return true iff the string is junk. The module-level function + `IS_LINE_JUNK` may be used to filter out lines without visible + characters, except for at most one splat ('#'). It is recommended + to leave linejunk None; the underlying SequenceMatcher class has + an adaptive notion of "noise" lines that's better than any static + definition the author has ever been able to craft. + +- `charjunk`: A function that should accept a string of length 1. The + module-level function `IS_CHARACTER_JUNK` may be used to filter out + whitespace characters (a blank or tab; **note**: bad idea to include + newline in this!). Use of IS_CHARACTER_JUNK is recommended. +""" + def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: + """ +Compare two sequences of lines; generate the resulting delta. + +Each sequence must contain individual single-line strings ending with +newlines. Such sequences can be obtained from the `readlines()` method +of file-like objects. The delta generated also consists of newline- +terminated strings, ready to be printed as-is via the writelines() +method of a file-like object. + +Example: + +>>> print(''.join(Differ().compare('one\\ntwo\\nthree\\n'.splitlines(True), +... 'ore\\ntree\\nemu\\n'.splitlines(True))), +... end="") +- one +? ^ ++ ore +? ^ +- two +- three +? - ++ tree ++ emu +""" if sys.version_info >= (3, 14): - def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: ... + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: + """ +Return True for ignorable line: if `line` is blank or contains a single '#'. + +Examples: + +>>> IS_LINE_JUNK('\\n') +True +>>> IS_LINE_JUNK(' # \\n') +True +>>> IS_LINE_JUNK('hello\\n') +False +""" else: - def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: ... + def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: + """ +Return True for ignorable line: iff `line` is blank or contains a single '#'. + +Examples: + +>>> IS_LINE_JUNK('\\n') +True +>>> IS_LINE_JUNK(' # \\n') +True +>>> IS_LINE_JUNK('hello\\n') +False +""" + +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: # ws is undocumented + """ +Return True for ignorable character: iff `ch` is a space or tab. -def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented +Examples: + +>>> IS_CHARACTER_JUNK(' ') +True +>>> IS_CHARACTER_JUNK('\\t') +True +>>> IS_CHARACTER_JUNK('\\n') +False +>>> IS_CHARACTER_JUNK('x') +False +""" def unified_diff( a: Sequence[str], b: Sequence[str], @@ -78,7 +583,45 @@ def unified_diff( tofiledate: str = "", n: int = 3, lineterm: str = "\n", -) -> Iterator[str]: ... +) -> Iterator[str]: + """ +Compare two sequences of lines; generate the delta as a unified diff. + +Unified diffs are a compact way of showing line changes and a few +lines of context. The number of context lines is set by 'n' which +defaults to three. + +By default, the diff control lines (those with ---, +++, or @@) are +created with a trailing newline. This is helpful so that inputs +created from file.readlines() result in diffs that are suitable for +file.writelines() since both the inputs and outputs have trailing +newlines. + +For inputs that do not have trailing newlines, set the lineterm +argument to "" so that the output will be uniformly newline free. + +The unidiff format normally has a header for filenames and modification +times. Any or all of these may be specified using strings for +'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. +The modification times are normally expressed in the ISO 8601 format. + +Example: + +>>> for line in unified_diff('one two three four'.split(), +... 'zero one tree four'.split(), 'Original', 'Current', +... '2005-01-26 23:30:50', '2010-04-02 10:20:52', +... lineterm=''): +... print(line) # doctest: +NORMALIZE_WHITESPACE +--- Original 2005-01-26 23:30:50 ++++ Current 2010-04-02 10:20:52 +@@ -1,4 +1,4 @@ ++zero + one +-two +-three ++tree + four +""" def context_diff( a: Sequence[str], b: Sequence[str], @@ -88,22 +631,121 @@ def context_diff( tofiledate: str = "", n: int = 3, lineterm: str = "\n", -) -> Iterator[str]: ... +) -> Iterator[str]: + """ +Compare two sequences of lines; generate the delta as a context diff. + +Context diffs are a compact way of showing line changes and a few +lines of context. The number of context lines is set by 'n' which +defaults to three. + +By default, the diff control lines (those with *** or ---) are +created with a trailing newline. This is helpful so that inputs +created from file.readlines() result in diffs that are suitable for +file.writelines() since both the inputs and outputs have trailing +newlines. + +For inputs that do not have trailing newlines, set the lineterm +argument to "" so that the output will be uniformly newline free. + +The context diff format normally has a header for filenames and +modification times. Any or all of these may be specified using +strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. +The modification times are normally expressed in the ISO 8601 format. +If not specified, the strings default to blanks. + +Example: + +>>> print(''.join(context_diff('one\\ntwo\\nthree\\nfour\\n'.splitlines(True), +... 'zero\\none\\ntree\\nfour\\n'.splitlines(True), 'Original', 'Current')), +... end="") +*** Original +--- Current +*************** +*** 1,4 **** + one +! two +! three + four +--- 1,4 ---- ++ zero + one +! tree + four +""" def ndiff( a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., -) -> Iterator[str]: ... +) -> Iterator[str]: + """ +Compare `a` and `b` (lists of strings); return a `Differ`-style delta. + +Optional keyword parameters `linejunk` and `charjunk` are for filter +functions, or can be None: + +- linejunk: A function that should accept a single string argument and + return true iff the string is junk. The default is None, and is + recommended; the underlying SequenceMatcher class has an adaptive + notion of "noise" lines. + +- charjunk: A function that accepts a character (string of length + 1), and returns true iff the character is junk. The default is + the module-level function IS_CHARACTER_JUNK, which filters out + whitespace characters (a blank or tab; note: it's a bad idea to + include newline in this!). + +Tools/scripts/ndiff.py is a command-line front-end to this function. + +Example: + +>>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), +... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) +>>> print(''.join(diff), end="") +- one +? ^ ++ ore +? ^ +- two +- three +? - ++ tree ++ emu +""" class HtmlDiff: + """For producing HTML side by side comparison with change highlights. + +This class can be used to create an HTML table (or a complete HTML file +containing the table) showing a side by side, line by line comparison +of text with inter-line and intra-line change highlights. The table can +be generated in either full or contextual difference mode. + +The following methods are provided for HTML generation: + +make_table -- generates HTML for a single side by side table +make_file -- generates complete HTML file with a single side by side table + +See tools/scripts/diff.py for an example usage of this class. +""" def __init__( self, tabsize: int = 8, wrapcolumn: int | None = None, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., - ) -> None: ... + ) -> None: + """HtmlDiff instance initializer + +Arguments: +tabsize -- tab stop spacing, defaults to 8. +wrapcolumn -- column number where lines are broken and wrapped, + defaults to None where lines are not wrapped. +linejunk,charjunk -- keyword arguments passed into ndiff() (used by + HtmlDiff() to generate the side by side HTML differences). See + ndiff() documentation for argument default values and descriptions. +""" def make_file( self, fromlines: Sequence[str], @@ -114,7 +756,23 @@ class HtmlDiff: numlines: int = 5, *, charset: str = "utf-8", - ) -> str: ... + ) -> str: + """Returns HTML file of side by side comparison with change highlights + +Arguments: +fromlines -- list of "from" lines +tolines -- list of "to" lines +fromdesc -- "from" file column header string +todesc -- "to" file column header string +context -- set to True for contextual differences (defaults to False + which shows full differences). +numlines -- number of context lines. When context is set True, + controls number of lines displayed before and after the change. + When context is False, controls the number of lines to place + the "next" link anchors before the next change (so click of + "next" link jumps to just before the change). +charset -- charset of the HTML document +""" def make_table( self, fromlines: Sequence[str], @@ -123,9 +781,45 @@ class HtmlDiff: todesc: str = "", context: bool = False, numlines: int = 5, - ) -> str: ... + ) -> str: + """Returns HTML table of side by side comparison with change highlights + +Arguments: +fromlines -- list of "from" lines +tolines -- list of "to" lines +fromdesc -- "from" file column header string +todesc -- "to" file column header string +context -- set to True for contextual differences (defaults to False + which shows full differences). +numlines -- number of context lines. When context is set True, + controls number of lines displayed before and after the change. + When context is False, controls the number of lines to place + the "next" link anchors before the next change (so click of + "next" link jumps to just before the change). +""" + +def restore(delta: Iterable[str], which: int) -> Iterator[str]: + """ +Generate one of the two sequences that generated a delta. + +Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract +lines originating from file 1 or 2 (parameter `which`), stripping off line +prefixes. + +Examples: -def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... +>>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), +... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) +>>> diff = list(diff) +>>> print(''.join(restore(diff, 1)), end="") +one +two +three +>>> print(''.join(restore(diff, 2)), end="") +ore +tree +emu +""" def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Iterable[bytes | bytearray], @@ -136,4 +830,13 @@ def diff_bytes( tofiledate: bytes | bytearray = b"", n: int = 3, lineterm: bytes | bytearray = b"\n", -) -> Iterator[bytes]: ... +) -> Iterator[bytes]: + """ +Compare `a` and `b`, two sequences of lines represented as bytes rather +than str. This is a wrapper for `dfunc`, which is typically either +unified_diff() or context_diff(). Inputs are losslessly converted to +strings so that `dfunc` only has to worry about strings, and encoded +back to bytes on return. This is necessary to compare files with +unknown or inconsistent encoding. All other inputs (except `n`) must be +bytes rather than str. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi index 896b50fa93847..cc67e045679e2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi @@ -1,3 +1,5 @@ +"""Disassembler of Python byte code into mnemonics. +""" import sys import types from collections.abc import Callable, Iterator @@ -45,6 +47,8 @@ _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeTyp if sys.version_info >= (3, 11): class Positions(NamedTuple): + """Positions(lineno, end_lineno, col_offset, end_col_offset) +""" lineno: int | None = None end_lineno: int | None = None col_offset: int | None = None @@ -52,6 +56,8 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 13): class _Instruction(NamedTuple): + """_Instruction(opname, opcode, arg, argval, argrepr, offset, start_offset, starts_line, line_number, label, positions, cache_info) +""" opname: str opcode: int arg: int | None @@ -67,6 +73,8 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 11): class _Instruction(NamedTuple): + """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target, positions) +""" opname: str opcode: int arg: int | None @@ -79,6 +87,8 @@ elif sys.version_info >= (3, 11): else: class _Instruction(NamedTuple): + """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target) +""" opname: str opcode: int arg: int | None @@ -90,23 +100,68 @@ else: if sys.version_info >= (3, 12): class Instruction(_Instruction): + """Details for a bytecode operation. + +Defined fields: + opname - human readable name for operation + opcode - numeric code for operation + arg - numeric argument to operation (if any), otherwise None + argval - resolved arg value (if known), otherwise same as arg + argrepr - human readable description of operation argument + offset - start index of operation within bytecode sequence + start_offset - start index of operation within bytecode sequence including extended args if present; + otherwise equal to Instruction.offset + starts_line - True if this opcode starts a source line, otherwise False + line_number - source line number associated with this opcode (if any), otherwise None + label - A label if this instruction is a jump target, otherwise None + positions - Optional dis.Positions object holding the span of source code + covered by this instruction + cache_info - information about the format and content of the instruction's cache + entries (if any) +""" if sys.version_info < (3, 13): - def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: + """Format instruction details for inclusion in disassembly output + + *lineno_width* sets the width of the line number field (0 omits it) + *mark_as_current* inserts a '-->' marker arrow as part of the line + *offset_width* sets the width of the instruction offset field + """ if sys.version_info >= (3, 13): @property - def oparg(self) -> int: ... + def oparg(self) -> int: + """Alias for Instruction.arg. +""" @property - def baseopcode(self) -> int: ... + def baseopcode(self) -> int: + """Numeric code for the base operation if operation is specialized. + +Otherwise equal to Instruction.opcode. +""" @property - def baseopname(self) -> str: ... + def baseopname(self) -> str: + """Human readable name for the base operation if operation is specialized. + +Otherwise equal to Instruction.opname. +""" @property - def cache_offset(self) -> int: ... + def cache_offset(self) -> int: + """Start index of the cache entries following the operation. +""" @property - def end_offset(self) -> int: ... + def end_offset(self) -> int: + """End index of the cache entries following the operation. +""" @property - def jump_target(self) -> int: ... + def jump_target(self) -> int: + """Bytecode index of the jump target if this is a jump operation. + +Otherwise return None. +""" @property - def is_jump_target(self) -> bool: ... + def is_jump_target(self) -> bool: + """True if other code jumps to here, otherwise False +""" if sys.version_info >= (3, 14): @staticmethod def make( @@ -126,9 +181,36 @@ if sys.version_info >= (3, 12): else: @disjoint_base class Instruction(_Instruction): - def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: ... + """Details for a bytecode operation + + Defined fields: + opname - human readable name for operation + opcode - numeric code for operation + arg - numeric argument to operation (if any), otherwise None + argval - resolved arg value (if known), otherwise same as arg + argrepr - human readable description of operation argument + offset - start index of operation within bytecode sequence + starts_line - line started by this opcode (if any), otherwise None + is_jump_target - True if other code jumps to here, otherwise False + positions - Optional dis.Positions object holding the span of source code + covered by this instruction + """ + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: + """Format instruction details for inclusion in disassembly output + + *lineno_width* sets the width of the line number field (0 omits it) + *mark_as_current* inserts a '-->' marker arrow as part of the line + *offset_width* sets the width of the instruction offset field + """ class Bytecode: + """The bytecode operations of a piece of code + +Instantiate this with a function, method, other compiled object, string of +code, or a code object (as returned by compile()). + +Iterating over this yields the bytecode operations as Instruction instances. +""" codeobj: types.CodeType first_line: int if sys.version_info >= (3, 14): @@ -175,21 +257,43 @@ class Bytecode: if sys.version_info >= (3, 11): @classmethod - def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: + """Construct a Bytecode from the given traceback +""" else: @classmethod - def from_traceback(cls, tb: types.TracebackType) -> Self: ... + def from_traceback(cls, tb: types.TracebackType) -> Self: + """ Construct a Bytecode from the given traceback +""" def __iter__(self) -> Iterator[Instruction]: ... - def info(self) -> str: ... - def dis(self) -> str: ... + def info(self) -> str: + """Return formatted information about the code object. +""" + def dis(self) -> str: + """Return a formatted view of the bytecode operations. +""" COMPILER_FLAG_NAMES: Final[dict[int, str]] -def findlabels(code: _HaveCodeType) -> list[int]: ... -def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... -def pretty_flags(flags: int) -> str: ... -def code_info(x: _HaveCodeType | str) -> str: ... +def findlabels(code: _HaveCodeType) -> list[int]: + """Detect all offsets in a byte code which are jump targets. + +Return the list of offsets. + +""" +def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: + """Find the offsets in a byte code which are start of lines in the source. + +Generate pairs (offset, lineno) +lineno will be an integer or None the offset does not have a source line. +""" +def pretty_flags(flags: int) -> str: + """Return pretty representation of code flags. +""" +def code_info(x: _HaveCodeType | str) -> str: + """Formatted details of methods, functions, or code. +""" if sys.version_info >= (3, 14): # 3.14 added `show_positions` @@ -202,7 +306,15 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: ... + ) -> None: + """Disassemble classes, methods, functions, and other compiled objects. + +With no argument, disassemble the last traceback. + +Compiled objects currently include generator objects, async generator +objects, and coroutine objects, all of which store their code object +in a special attribute. +""" def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -212,7 +324,9 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: ... + ) -> None: + """Disassemble a code object. +""" def distb( tb: types.TracebackType | None = None, *, @@ -221,7 +335,9 @@ if sys.version_info >= (3, 14): adaptive: bool = False, show_offsets: bool = False, show_positions: bool = False, - ) -> None: ... + ) -> None: + """Disassemble a traceback (default: last traceback). +""" elif sys.version_info >= (3, 13): # 3.13 added `show_offsets` @@ -233,7 +349,15 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: ... + ) -> None: + """Disassemble classes, methods, functions, and other compiled objects. + +With no argument, disassemble the last traceback. + +Compiled objects currently include generator objects, async generator +objects, and coroutine objects, all of which store their code object +in a special attribute. +""" def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -242,7 +366,9 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: ... + ) -> None: + """Disassemble a code object. +""" def distb( tb: types.TracebackType | None = None, *, @@ -250,7 +376,9 @@ elif sys.version_info >= (3, 13): show_caches: bool = False, adaptive: bool = False, show_offsets: bool = False, - ) -> None: ... + ) -> None: + """Disassemble a traceback (default: last traceback). +""" elif sys.version_info >= (3, 11): # 3.11 added `show_caches` and `adaptive` @@ -261,35 +389,93 @@ elif sys.version_info >= (3, 11): depth: int | None = None, show_caches: bool = False, adaptive: bool = False, - ) -> None: ... + ) -> None: + """Disassemble classes, methods, functions, and other compiled objects. + + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ def disassemble( co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False - ) -> None: ... + ) -> None: + """Disassemble a code object. +""" def distb( tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False - ) -> None: ... + ) -> None: + """Disassemble a traceback (default: last traceback). +""" else: def dis( x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None - ) -> None: ... - def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... - def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + ) -> None: + """Disassemble classes, methods, functions, and other compiled objects. + + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: + """Disassemble a code object. +""" + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: + """Disassemble a traceback (default: last traceback). +""" if sys.version_info >= (3, 13): # 3.13 made `show_cache` `None` by default def get_instructions( x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool | None = None, adaptive: bool = False - ) -> Iterator[Instruction]: ... + ) -> Iterator[Instruction]: + """Iterator for the opcodes in methods, functions or code + +Generates a series of Instruction named tuples giving the details of +each operations in the supplied code. + +If *first_line* is not None, it indicates the line number that should +be reported for the first source line in the disassembled code. +Otherwise, the source line information (if any) is taken directly from +the disassembled code object. +""" elif sys.version_info >= (3, 11): def get_instructions( x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False - ) -> Iterator[Instruction]: ... + ) -> Iterator[Instruction]: + """Iterator for the opcodes in methods, functions or code + + Generates a series of Instruction named tuples giving the details of + each operations in the supplied code. + + If *first_line* is not None, it indicates the line number that should + be reported for the first source line in the disassembled code. + Otherwise, the source line information (if any) is taken directly from + the disassembled code object. + """ else: - def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: + """Iterator for the opcodes in methods, functions or code + + Generates a series of Instruction named tuples giving the details of + each operations in the supplied code. + + If *first_line* is not None, it indicates the line number that should + be reported for the first source line in the disassembled code. + Otherwise, the source line information (if any) is taken directly from + the disassembled code object. + """ + +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: + """Print details of methods, functions, or code to *file*. -def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... +If *file* is not provided, the output is printed on stdout. +""" disco = disassemble diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi index 328a5b7834419..bce6972bf50f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi @@ -1,3 +1,12 @@ +"""distutils + +The main package for the Python Module Distribution Utilities. Normally +used from a setup script as + + from distutils.core import setup + + setup (...) +""" # Attempts to improve these stubs are probably not the best use of time: # - distutils is deleted in Python 3.12 and newer # - Most users already do not use stdlib distutils, due to setuptools monkeypatching diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi index 16684ff069568..22b3a7afb4bc3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi @@ -1,3 +1,8 @@ +"""distutils.archive_util + +Utility functions for creating archive files (tarballs, zip files, +that sort of thing). +""" from _typeshed import StrOrBytesPath, StrPath from typing import Literal, overload @@ -11,7 +16,23 @@ def make_archive( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: ... +) -> str: + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "gztar", + "bztar", "xztar", or "ztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ @overload def make_archive( base_name: StrPath, @@ -31,5 +52,28 @@ def make_tarball( dry_run: bool | Literal[0, 1] = 0, owner: str | None = None, group: str | None = None, -) -> str: ... -def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: ... +) -> str: + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or + None. ("compress" will be deprecated in Python 3.2) + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_dir' + ".tar", possibly plus + the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). + + Returns the output filename. + """ +def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises DistutilsExecError. Returns the name of the output zip + file. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi index 3e432f94b525d..4add3a267b923 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi @@ -1,3 +1,11 @@ +"""distutils.bcppcompiler + +Contains BorlandCCompiler, an implementation of the abstract CCompiler class +for the Borland C++ compiler. +""" from distutils.ccompiler import CCompiler -class BCPPCompiler(CCompiler): ... +class BCPPCompiler(CCompiler): + """Concrete class that implements an interface to the Borland C/C++ + compiler, as defined by the CCompiler abstract class. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi index 5bff209807eef..017f0e2c8bbd3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi @@ -1,3 +1,8 @@ +"""distutils.ccompiler + +Contains CCompiler, an abstract base class that defines the interface +for the Distutils compiler abstraction model. +""" from _typeshed import BytesPath, StrPath, Unused from collections.abc import Callable, Iterable, Sequence from distutils.file_util import _BytesPathT, _StrPathT @@ -9,19 +14,68 @@ _Ts = TypeVarTuple("_Ts") def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] -) -> list[str]: ... -def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... -def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... +) -> list[str]: + """Generate linker options for searching library directories and + linking with specific libraries. 'libraries' and 'library_dirs' are, + respectively, lists of library names (not filenames!) and search + directories. Returns a list of command-line options suitable for use + with some compiler (depending on the two format strings passed in). + """ +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: + """Generate C pre-processor options (-D, -U, -I) as used by at least + two types of compilers: the typical Unix compiler and Visual C++. + 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) + means undefine (-U) macro 'name', and (name,value) means define (-D) + macro 'name' to 'value'. 'include_dirs' is just a list of directory + names to be added to the header file search path (-I). Returns a list + of command-line options suitable for either Unix compilers or Visual + C++. + """ +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: + """Determine the default compiler to use for the given platform. + + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. + + The default values are os.name and sys.platform in case the + parameters are not given. + """ def new_compiler( plat: str | None = None, compiler: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0, -) -> CCompiler: ... -def show_compilers() -> None: ... +) -> CCompiler: + """Generate an instance of some CCompiler subclass for the supplied + platform/compiler combination. 'plat' defaults to 'os.name' + (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler + for that platform. Currently only 'posix' and 'nt' are supported, and + the default compilers are "traditional Unix interface" (UnixCCompiler + class) and Visual C++ (MSVCCompiler class). Note that it's perfectly + possible to ask for a Unix compiler object under Windows, and a + Microsoft compiler object under Unix -- if you supply a value for + 'compiler', 'plat' is ignored. + """ +def show_compilers() -> None: + """Print list of available compilers (used by the "--help-compiler" + options to "build", "build_ext", "build_clib"). + """ class CCompiler: + """Abstract base class to define the interface that must be implemented + by real compiler classes. Also has some utility methods used by + several compiler classes. + + The basic idea behind a compiler abstraction class is that each + instance can be used for all the compile/link steps in building a + single project. Thus, attributes common to all of those compile and + link steps -- include directories, macros to define, libraries to link + against, etc. -- are attributes of the compiler instance. To allow for + variability in how individual files are treated, most of those + attributes may be varied on a per-compilation or per-link basis. + """ dry_run: bool force: bool verbose: bool @@ -35,20 +89,100 @@ class CCompiler: def __init__( self, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, force: bool | Literal[0, 1] = 0 ) -> None: ... - def add_include_dir(self, dir: str) -> None: ... - def set_include_dirs(self, dirs: list[str]) -> None: ... - def add_library(self, libname: str) -> None: ... - def set_libraries(self, libnames: list[str]) -> None: ... - def add_library_dir(self, dir: str) -> None: ... - def set_library_dirs(self, dirs: list[str]) -> None: ... - def add_runtime_library_dir(self, dir: str) -> None: ... - def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... - def define_macro(self, name: str, value: str | None = None) -> None: ... - def undefine_macro(self, name: str) -> None: ... - def add_link_object(self, object: str) -> None: ... - def set_link_objects(self, objects: list[str]) -> None: ... - def detect_language(self, sources: str | list[str]) -> str | None: ... - def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: ... + def add_include_dir(self, dir: str) -> None: + """Add 'dir' to the list of directories that will be searched for + header files. The compiler is instructed to search directories in + the order in which they are supplied by successive calls to + 'add_include_dir()'. + """ + def set_include_dirs(self, dirs: list[str]) -> None: + """Set the list of directories that will be searched to 'dirs' (a + list of strings). Overrides any preceding calls to + 'add_include_dir()'; subsequence calls to 'add_include_dir()' add + to the list passed to 'set_include_dirs()'. This does not affect + any list of standard include directories that the compiler may + search by default. + """ + def add_library(self, libname: str) -> None: + """Add 'libname' to the list of libraries that will be included in + all links driven by this compiler object. Note that 'libname' + should *not* be the name of a file containing a library, but the + name of the library itself: the actual filename will be inferred by + the linker, the compiler, or the compiler class (depending on the + platform). + + The linker will be instructed to link against libraries in the + order they were supplied to 'add_library()' and/or + 'set_libraries()'. It is perfectly valid to duplicate library + names; the linker will be instructed to link against libraries as + many times as they are mentioned. + """ + def set_libraries(self, libnames: list[str]) -> None: + """Set the list of libraries to be included in all links driven by + this compiler object to 'libnames' (a list of strings). This does + not affect any standard system libraries that the linker may + include by default. + """ + def add_library_dir(self, dir: str) -> None: + """Add 'dir' to the list of directories that will be searched for + libraries specified to 'add_library()' and 'set_libraries()'. The + linker will be instructed to search for libraries in the order they + are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. + """ + def set_library_dirs(self, dirs: list[str]) -> None: + """Set the list of library search directories to 'dirs' (a list of + strings). This does not affect any standard library search path + that the linker may search by default. + """ + def add_runtime_library_dir(self, dir: str) -> None: + """Add 'dir' to the list of directories that will be searched for + shared libraries at runtime. + """ + def set_runtime_library_dirs(self, dirs: list[str]) -> None: + """Set the list of directories to search for shared libraries at + runtime to 'dirs' (a list of strings). This does not affect any + standard search path that the runtime linker may search by + default. + """ + def define_macro(self, name: str, value: str | None = None) -> None: + """Define a preprocessor macro for all compilations driven by this + compiler object. The optional parameter 'value' should be a + string; if it is not supplied, then the macro will be defined + without an explicit value and the exact outcome depends on the + compiler used (XXX true? does ANSI say anything about this?) + """ + def undefine_macro(self, name: str) -> None: + """Undefine a preprocessor macro for all compilations driven by + this compiler object. If the same macro is defined by + 'define_macro()' and undefined by 'undefine_macro()' the last call + takes precedence (including multiple redefinitions or + undefinitions). If the macro is redefined/undefined on a + per-compilation basis (ie. in the call to 'compile()'), then that + takes precedence. + """ + def add_link_object(self, object: str) -> None: + """Add 'object' to the list of object files (or analogues, such as + explicitly named library files or the output of "resource + compilers") to be included in every link driven by this compiler + object. + """ + def set_link_objects(self, objects: list[str]) -> None: + """Set the list of object files (or analogues) to be included in + every link to 'objects'. This does not affect any standard object + files that the linker may include by default (such as system + libraries). + """ + def detect_language(self, sources: str | list[str]) -> str | None: + """Detect the language of a given file, or list of files. Uses + language_map, and language_order to do the job. + """ + def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: + """Search the specified list of directories for a static or shared + library file 'lib' and return the full path to that file. If + 'debug' true, look for a debugging version (if that makes sense on + the current platform). Return None if 'lib' wasn't found in any of + the specified directories. + """ def has_function( self, funcname: str, @@ -56,11 +190,40 @@ class CCompiler: include_dirs: list[str] | None = None, libraries: list[str] | None = None, library_dirs: list[str] | None = None, - ) -> bool: ... - def library_dir_option(self, dir: str) -> str: ... - def library_option(self, lib: str) -> str: ... - def runtime_library_dir_option(self, dir: str) -> str: ... - def set_executables(self, **args: str) -> None: ... + ) -> bool: + """Return a boolean indicating whether funcname is supported on + the current platform. The optional arguments can be used to + augment the compilation environment. + """ + def library_dir_option(self, dir: str) -> str: + """Return the compiler option to add 'dir' to the list of + directories searched for libraries. + """ + def library_option(self, lib: str) -> str: + """Return the compiler option to add 'lib' to the list of libraries + linked into the shared library or executable. + """ + def runtime_library_dir_option(self, dir: str) -> str: + """Return the compiler option to add 'dir' to the list of + directories searched for runtime libraries. + """ + def set_executables(self, **args: str) -> None: + """Define the executables (and options for them) that will be run + to perform the various stages of compilation. The exact set of + executables that may be specified here depends on the compiler + class (via the 'executables' class attribute), but most will have: + compiler the C/C++ compiler + linker_so linker used to create shared objects and libraries + linker_exe linker used to create binary executables + archiver static library creator + + On platforms with a command-line (Unix, DOS/Windows), each of these + is a string that will be split into executable name and (optional) + list of arguments. (Splitting the string is done similarly to how + Unix shells operate: words are delimited by spaces, but quotes and + backslashes can override this. See + 'distutils.util.split_quoted()'.) + """ def compile( self, sources: Sequence[StrPath], @@ -71,7 +234,55 @@ class CCompiler: extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, depends: list[str] | None = None, - ) -> list[str]: ... + ) -> list[str]: + """Compile one or more source files. + + 'sources' must be a list of filenames, most likely C/C++ + files, but in reality anything that can be handled by a + particular compiler and compiler class (eg. MSVCCompiler can + handle resource files in 'sources'). Return a list of object + filenames, one per source filename in 'sources'. Depending on + the implementation, not all source files will necessarily be + compiled, but all corresponding object filenames will be + returned. + + If 'output_dir' is given, object files will be put under it, while + retaining their original path component. That is, "foo/bar.c" + normally compiles to "foo/bar.o" (for a Unix implementation); if + 'output_dir' is "build", then it would compile to + "build/foo/bar.o". + + 'macros', if given, must be a list of macro definitions. A macro + definition is either a (name, value) 2-tuple or a (name,) 1-tuple. + The former defines a macro; if the value is None, the macro is + defined without an explicit value. The 1-tuple case undefines a + macro. Later definitions/redefinitions/ undefinitions take + precedence. + + 'include_dirs', if given, must be a list of strings, the + directories to add to the default include file search path for this + compilation only. + + 'debug' is a boolean; if true, the compiler will be instructed to + output debug symbols in (or alongside) the object file(s). + + 'extra_preargs' and 'extra_postargs' are implementation- dependent. + On platforms that have the notion of a command-line (e.g. Unix, + DOS/Windows), they are most likely lists of strings: extra + command-line arguments to prepend/append to the compiler command + line. On other platforms, consult the implementation class + documentation. In any event, they are intended as an escape hatch + for those occasions when the abstract compiler framework doesn't + cut the mustard. + + 'depends', if given, is a list of filenames that all targets + depend on. If a source file is older than any file in + depends, then the source file will be recompiled. This + supports dependency tracking, but only at a coarse + granularity. + + Raises CompileError on failure. + """ def create_static_lib( self, objects: list[str], @@ -79,7 +290,29 @@ class CCompiler: output_dir: str | None = None, debug: bool | Literal[0, 1] = 0, target_lang: str | None = None, - ) -> None: ... + ) -> None: + """Link a bunch of stuff together to create a static library file. + The "bunch of stuff" consists of the list of object files supplied + as 'objects', the extra object files supplied to + 'add_link_object()' and/or 'set_link_objects()', the libraries + supplied to 'add_library()' and/or 'set_libraries()', and the + libraries supplied as 'libraries' (if any). + + 'output_libname' should be a library name, not a filename; the + filename will be inferred from the library name. 'output_dir' is + the directory where the library file will be put. + + 'debug' is a boolean; if true, debugging information will be + included in the library (note that on most platforms, it is the + compile step where this matters: the 'debug' flag is included here + just for consistency). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LibError on failure. + """ def link( self, target_desc: str, @@ -95,7 +328,50 @@ class CCompiler: extra_postargs: list[str] | None = None, build_temp: str | None = None, target_lang: str | None = None, - ) -> None: ... + ) -> None: + """Link a bunch of stuff together to create an executable or + shared library file. + + The "bunch of stuff" consists of the list of object files supplied + as 'objects'. 'output_filename' should be a filename. If + 'output_dir' is supplied, 'output_filename' is relative to it + (i.e. 'output_filename' can provide directory components if + needed). + + 'libraries' is a list of libraries to link against. These are + library names, not filenames, since they're translated into + filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" + on Unix and "foo.lib" on DOS/Windows). However, they can include a + directory component, which means the linker will look in that + specific directory rather than searching all the normal locations. + + 'library_dirs', if supplied, should be a list of directories to + search for libraries that were specified as bare library names + (ie. no directory component). These are on top of the system + default and those supplied to 'add_library_dir()' and/or + 'set_library_dirs()'. 'runtime_library_dirs' is a list of + directories that will be embedded into the shared library and used + to search for other shared libraries that *it* depends on at + run-time. (This may only be relevant on Unix.) + + 'export_symbols' is a list of symbols that the shared library will + export. (This appears to be relevant only on Windows.) + + 'debug' is as for 'compile()' and 'create_static_lib()', with the + slight distinction that it actually matters on most platforms (as + opposed to 'create_static_lib()', which includes a 'debug' flag + mostly for form's sake). + + 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except + of course that they supply command-line arguments for the + particular linker being used). + + 'target_lang' is the target language for which the given objects + are being compiled. This allows specific linkage time treatment of + certain languages. + + Raises LinkError on failure. + """ def link_executable( self, objects: list[str], @@ -147,7 +423,16 @@ class CCompiler: include_dirs: list[str] | None = None, extra_preargs: list[str] | None = None, extra_postargs: list[str] | None = None, - ) -> None: ... + ) -> None: + """Preprocess a single C/C++ source file, named in 'source'. + Output will be written to file named 'output_file', or stdout if + 'output_file' not supplied. 'macros' is a list of macro + definitions as for 'compile()', which will augment the macros set + with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a + list of directory names that will be added to the default list. + + Raises PreprocessError on failure. + """ @overload def executable_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi index 7f97bc3a2c9e0..cb48c0d275bf5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi @@ -1,3 +1,8 @@ +"""distutils.cmd + +Provides the Command class, the base class for the command classes +in the distutils.command package. +""" from _typeshed import BytesPath, StrOrBytesPath, StrPath, Unused from abc import abstractmethod from collections.abc import Callable, Iterable @@ -30,28 +35,109 @@ _CommandT = TypeVar("_CommandT", bound=Command) _Ts = TypeVarTuple("_Ts") class Command: + """Abstract base class for defining command classes, the "worker bees" + of the Distutils. A useful analogy for command classes is to think of + them as subroutines with local variables called "options". The options + are "declared" in 'initialize_options()' and "defined" (given their + final values, aka "finalized") in 'finalize_options()', both of which + must be defined by every command class. The distinction between the + two is necessary because option values might come from the outside + world (command line, config file, ...), and any options dependent on + other options must be computed *after* these outside influences have + been processed -- hence 'finalize_options()'. The "body" of the + subroutine, where it does all its work based on the values of its + options, is the 'run()' method, which must also be implemented by every + command class. + """ dry_run: bool | Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run distribution: Distribution # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] - def __init__(self, dist: Distribution) -> None: ... + def __init__(self, dist: Distribution) -> None: + """Create and initialize a new Command object. Most importantly, + invokes the 'initialize_options()' method, which is the real + initializer and depends on the actual command being + instantiated. + """ @abstractmethod - def initialize_options(self) -> None: ... + def initialize_options(self) -> None: + """Set default values for all the options that this command + supports. Note that these defaults may be overridden by other + commands, by the setup script, by config files, or by the + command-line. Thus, this is not the place to code dependencies + between options; generally, 'initialize_options()' implementations + are just a bunch of "self.foo = None" assignments. + + This method must be implemented by all command classes. + """ @abstractmethod - def finalize_options(self) -> None: ... + def finalize_options(self) -> None: + """Set final values for all the options that this command supports. + This is always called as late as possible, ie. after any option + assignments from the command-line or from other commands have been + done. Thus, this is the place to code option dependencies: if + 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as + long as 'foo' still has the same value it was assigned in + 'initialize_options()'. + + This method must be implemented by all command classes. + """ @abstractmethod - def run(self) -> None: ... - def announce(self, msg: str, level: int = 1) -> None: ... - def debug_print(self, msg: str) -> None: ... - def ensure_string(self, option: str, default: str | None = None) -> None: ... - def ensure_string_list(self, option: str) -> None: ... - def ensure_filename(self, option: str) -> None: ... + def run(self) -> None: + """A command's raison d'etre: carry out the action it exists to + perform, controlled by the options initialized in + 'initialize_options()', customized by other commands, the setup + script, the command-line, and config files, and finalized in + 'finalize_options()'. All terminal output and filesystem + interaction should be done by 'run()'. + + This method must be implemented by all command classes. + """ + def announce(self, msg: str, level: int = 1) -> None: + """If the current verbosity level is of greater than or equal to + 'level' print 'msg' to stdout. + """ + def debug_print(self, msg: str) -> None: + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ + def ensure_string(self, option: str, default: str | None = None) -> None: + """Ensure that 'option' is a string; if not defined, set it to + 'default'. + """ + def ensure_string_list(self, option: str) -> None: + """Ensure that 'option' is a list of strings. If 'option' is + currently a string, we split it either on /,\\s*/ or /\\s+/, so + "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become + ["foo", "bar", "baz"]. + """ + def ensure_filename(self, option: str) -> None: + """Ensure that 'option' is the name of an existing file. +""" def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... - def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: + """Set the values of any "undefined" options from corresponding + option values in some other command object. "Undefined" here means + "is None", which is the convention used to indicate that an option + has not been changed between 'initialize_options()' and + 'finalize_options()'. Usually called from 'finalize_options()' for + options that depend on some other command rather than another + option of the same command. 'src_cmd' is the other command from + which option values will be taken (a command object will be created + for it if necessary); the remaining arguments are + '(src_option,dst_option)' tuples which mean "take the value of + 'src_option' in the 'src_cmd' command object, and copy it to + 'dst_option' in the current command object". + """ # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. @overload - def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ... + def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: + """Wrapper around Distribution's 'get_command_obj()' method: find + (create if necessary and 'create' is true) the command object for + 'command', call its 'ensure_finalized()' method, and return the + finalized command object. + """ @overload def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ... @overload @@ -154,8 +240,18 @@ class Command: def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ... @overload def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ... - def run_command(self, command: str) -> None: ... - def get_sub_commands(self) -> list[str]: ... + def run_command(self, command: str) -> None: + """Run some other command: uses the 'run_command()' method of + Distribution, which creates and finalizes the command object if + necessary and then invokes its 'run()' method. + """ + def get_sub_commands(self) -> list[str]: + """Determine the sub-commands that are relevant in the current + distribution (ie., that need to be run). This is based on the + 'sub_commands' class attribute: each tuple in that list may include + a method that we call to determine if the subcommand needs to be + run for the current distribution. Return a list of command names. + """ def warn(self, msg: str) -> None: ... def execute( self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 @@ -170,7 +266,11 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, link: str | None = None, level: Unused = 1, - ) -> tuple[_StrPathT | str, bool]: ... + ) -> tuple[_StrPathT | str, bool]: + """Copy a file respecting verbose, dry-run and force flags. (The + former two default to whatever is in the Distribution object, and + the latter defaults to false for commands that don't define it.) +""" @overload def copy_file( self, @@ -189,12 +289,19 @@ class Command: preserve_times: bool | Literal[0, 1] = 1, preserve_symlinks: bool | Literal[0, 1] = 0, level: Unused = 1, - ) -> list[str]: ... + ) -> list[str]: + """Copy an entire directory tree respecting verbose, dry-run, + and force flags. + """ @overload - def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ... + def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: + """Move a file respecting dry-run flag. +""" @overload def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... - def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ... + def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: + """Spawn an external command respecting dry-run flag. +""" @overload def make_archive( self, @@ -224,6 +331,14 @@ class Command: exec_msg: str | None = None, skip_msg: str | None = None, level: Unused = 1, - ) -> None: ... + ) -> None: + """Special case of 'execute()' for operations that process one or + more input files and generate one output file. Works just like + 'execute()', except the operation is skipped and a different + message printed if 'outfile' already exists and is newer than all + files listed in 'infiles'. If the command defined 'self.force', + and it is true, then the command is unconditionally run -- does no + timestamp checks. + """ def ensure_finalized(self) -> None: ... def dump_options(self, header=None, indent: str = "") -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi index 4d7372858af34..97efecec9fe65 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi @@ -1,3 +1,8 @@ +"""distutils.command + +Package containing implementation of all the standard Distutils +commands. +""" import sys from . import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi index 6f996207077e0..14f971394cfcd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi @@ -1,10 +1,17 @@ +"""distutils.command.bdist + +Implements the Distutils 'bdist' command (create a built [binary] +distribution). +""" from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar from ..cmd import Command -def show_formats() -> None: ... +def show_formats() -> None: + """Print list of available formats (arguments to "--format" option). + """ class bdist(Command): description: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi index 297a0c39ed430..2cf149b6b06e5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi @@ -1,3 +1,9 @@ +"""distutils.command.bdist_dumb + +Implements the Distutils 'bdist_dumb' command (create a "dumb" built +distribution -- i.e., just an archive to be unpacked under $prefix or +$exec_prefix). +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi index 83b4161094c51..0b3db82337b71 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi @@ -1,3 +1,8 @@ +"""distutils.command.bdist_rpm + +Implements the Distutils 'bdist_rpm' command (create RPM source and binary +distributions). +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi index cf333bc5400dd..52c7e3ada9972 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -1,3 +1,8 @@ +"""distutils.command.bdist_wininst + +Implements the Distutils 'bdist_wininst' command: create a windows installer +exe-program. +""" from _typeshed import StrOrBytesPath from distutils.cmd import Command from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi index 3ec0c9614d62a..0e4609dd4d5c7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi @@ -1,3 +1,7 @@ +"""distutils.command.build + +Implements the Distutils 'build' command. +""" from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi index 69cfbe7120d8e..135365f2add16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi @@ -1,3 +1,9 @@ +"""distutils.command.build_clib + +Implements the Distutils 'build_clib' command, to build a C/C++ library +that is included in the module distribution and needed by an extension +module. +""" from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -23,7 +29,16 @@ class build_clib(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_library_list(self, libraries) -> None: ... + def check_library_list(self, libraries) -> None: + """Ensure that the list of libraries is valid. + + `library` is presumably provided as a command option 'libraries'. + This method checks that it is a list of 2-tuples, where the tuples + are (library_name, build_info_dict). + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ def get_library_names(self): ... def get_source_files(self): ... def build_libraries(self, libraries) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi index c5a9b5d508f0d..797b1909a847a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi @@ -1,3 +1,9 @@ +"""distutils.command.build_ext + +Implements the Distutils 'build_ext' command, for building extension +modules (currently limited to C extensions, should accommodate C++ +extensions ASAP). +""" from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -38,15 +44,55 @@ class build_ext(Command): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_extensions_list(self, extensions) -> None: ... + def check_extensions_list(self, extensions) -> None: + """Ensure that the list of extensions (presumably provided as a + command option 'extensions') is valid, i.e. it is a list of + Extension objects. We also support the old-style list of 2-tuples, + where the tuples are (ext_name, build_info), which are converted to + Extension instances here. + + Raise DistutilsSetupError if the structure is invalid anywhere; + just returns otherwise. + """ def get_source_files(self): ... def get_outputs(self): ... def build_extensions(self) -> None: ... def build_extension(self, ext) -> None: ... - def swig_sources(self, sources, extension): ... - def find_swig(self): ... - def get_ext_fullpath(self, ext_name: str) -> str: ... - def get_ext_fullname(self, ext_name: str) -> str: ... - def get_ext_filename(self, ext_name: str) -> str: ... - def get_export_symbols(self, ext): ... - def get_libraries(self, ext): ... + def swig_sources(self, sources, extension): + """Walk the list of source files in 'sources', looking for SWIG + interface (.i) files. Run SWIG on all that are found, and + return a modified 'sources' list with SWIG source files replaced + by the generated C (or C++) files. + """ + def find_swig(self): + """Return the name of the SWIG executable. On Unix, this is + just "swig" -- it should be in the PATH. Tries a bit harder on + Windows. + """ + def get_ext_fullpath(self, ext_name: str) -> str: + """Returns the path of the filename for a given extension. + + The file is located in `build_lib` or directly in the package + (inplace option). + """ + def get_ext_fullname(self, ext_name: str) -> str: + """Returns the fullname of a given extension name. + + Adds the `package.` prefix +""" + def get_ext_filename(self, ext_name: str) -> str: + """Convert the name of an extension (eg. "foo.bar") into the name + of the file from which it will be loaded (eg. "foo/bar.so", or + "foo\\bar.pyd"). + """ + def get_export_symbols(self, ext): + """Return the list of symbols that a shared extension has to + export. This either uses 'ext.export_symbols' or, if it's not + provided, "PyInit_" + module_name. Only relevant on Windows, where + the .pyd file (DLL) must export the module "PyInit_" function. + """ + def get_libraries(self, ext): + """Return the list of libraries to link against when building a + shared extension. On most platforms, this is just 'ext.libraries'; + on Windows, we add the Python library (eg. python20.dll). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi index 23ed230bb2d8c..36ca381b90cc0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi @@ -1,3 +1,7 @@ +"""distutils.command.build_py + +Implements the Distutils 'build_py' command. +""" from _typeshed import Incomplete from typing import ClassVar, Literal @@ -22,15 +26,39 @@ class build_py(Command): data_files: Incomplete def finalize_options(self) -> None: ... def run(self) -> None: ... - def get_data_files(self): ... - def find_data_files(self, package, src_dir): ... - def build_package_data(self) -> None: ... - def get_package_dir(self, package): ... + def get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples +""" + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir' +""" + def build_package_data(self) -> None: + """Copy data files into build directory +""" + def get_package_dir(self, package): + """Return the directory, relative to the top of the source + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any). +""" def check_package(self, package, package_dir): ... def check_module(self, module, module_file): ... def find_package_modules(self, package, package_dir): ... - def find_modules(self): ... - def find_all_modules(self): ... + def find_modules(self): + """Finds individually-specified Python modules, ie. those listed by + module name in 'self.py_modules'. Returns a list of tuples (package, + module_base, filename): 'package' is a tuple of the path through + package-space to the module; 'module_base' is the bare (no + packages, no dots) module name, and 'filename' is the path to the + ".py" file (relative to the distribution root) that implements the + module. + """ + def find_all_modules(self): + """Compute the list of all modules that will be built, whether + they are specified one-module-at-a-time ('self.py_modules') or + by whole packages ('self.packages'). Return a list of tuples + (package, module, module_file), just like 'find_modules()' and + 'find_package_modules()' do. +""" def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... def get_outputs(self, include_bytecode: bool | Literal[0, 1] = 1) -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi index 8372919bbd530..d655f16128620 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi @@ -1,3 +1,7 @@ +"""distutils.command.build_scripts + +Implements the Distutils 'build_scripts' command. +""" from _typeshed import Incomplete from typing import ClassVar @@ -19,7 +23,12 @@ class build_scripts(Command): def finalize_options(self) -> None: ... def get_source_files(self): ... def run(self) -> None: ... - def copy_scripts(self): ... + def copy_scripts(self): + """Copy each script listed in 'self.scripts'; if it's marked as a + Python script in the Unix way (first line matches 'first_line_re', + ie. starts with "\\#!" and contains "python"), then adjust the first + line to refer to the current Python interpreter as we copy. + """ class build_scripts_2to3(build_scripts, Mixin2to3): def copy_scripts(self): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi index 2c807fd2c4396..5bc852788a4ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi @@ -1,3 +1,7 @@ +"""distutils.command.check + +Implements the Distutils 'check' command. +""" from _typeshed import Incomplete from typing import Any, ClassVar, Final, Literal from typing_extensions import TypeAlias @@ -26,15 +30,35 @@ class SilentReporter(_Reporter): HAS_DOCUTILS: Final[bool] class check(Command): + """This command checks the meta-data of the package. + """ description: str user_options: ClassVar[list[tuple[str, str, str]]] boolean_options: ClassVar[list[str]] restructuredtext: int metadata: int strict: int - def initialize_options(self) -> None: ... + def initialize_options(self) -> None: + """Sets default values for options. +""" def finalize_options(self) -> None: ... - def warn(self, msg): ... - def run(self) -> None: ... - def check_metadata(self) -> None: ... - def check_restructuredtext(self) -> None: ... + def warn(self, msg): + """Counts the number of warnings that occurs. +""" + def run(self) -> None: + """Runs the command. +""" + def check_metadata(self) -> None: + """Ensures that all required elements of meta-data are supplied. + + Required fields: + name, version, URL + + Recommended fields: + (author and author_email) or (maintainer and maintainer_email) + + Warns if any are missing. + """ + def check_restructuredtext(self) -> None: + """Checks if the long string fields are reST-compliant. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi index 0f3768d6dcf4d..1cdcc0f853d16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi @@ -1,3 +1,7 @@ +"""distutils.command.clean + +Implements the Distutils 'clean' command. +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi index 381e8e466bf16..bb9994ac2cf87 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi @@ -1,3 +1,13 @@ +"""distutils.command.config + +Implements the Distutils 'config' command, a (mostly) empty command class +that exists mainly to be sub-classed by specific module distributions and +applications. The idea is that while every "config" command is different, +at least they're all named the same, and users always see "config" in the +list of standard commands. Also, this is a good place to put common +configure-like tasks: "try to compile this C code", or "figure out where +this header file lives". +""" from _typeshed import StrOrBytesPath from collections.abc import Sequence from re import Pattern @@ -29,7 +39,13 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: ... + ) -> bool: + """Construct a source file from 'body' (a string containing lines + of C/C++ code) and 'headers' (a list of header files to include) + and run it through the preprocessor. Return true if the + preprocessor succeeded, false if there were any errors. + ('body' probably isn't of much use, but what the heck.) + """ def search_cpp( self, pattern: Pattern[str] | str, @@ -37,10 +53,20 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: ... + ) -> bool: + """Construct a source file (just like 'try_cpp()'), run it through + the preprocessor, and return true if any line of the output matches + 'pattern'. 'pattern' should either be a compiled regex object or a + string containing a regex. If both 'body' and 'headers' are None, + preprocesses an empty file -- which can be useful to determine the + symbols the preprocessor and compiler set by default. + """ def try_compile( self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: ... + ) -> bool: + """Try to compile a source file built from 'body' and 'headers'. + Return true on success, false otherwise. + """ def try_link( self, body: str, @@ -49,7 +75,11 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: ... + ) -> bool: + """Try to compile and link a source file, built from 'body' and + 'headers', to executable form. Return true on success, false + otherwise. + """ def try_run( self, body: str, @@ -58,7 +88,11 @@ class config(Command): libraries: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c", - ) -> bool: ... + ) -> bool: + """Try to compile, link to an executable, and run a program + built from 'body' and 'headers'. Return true on success, false + otherwise. + """ def check_func( self, func: str, @@ -68,7 +102,20 @@ class config(Command): library_dirs: Sequence[str] | None = None, decl: bool | Literal[0, 1] = 0, call: bool | Literal[0, 1] = 0, - ) -> bool: ... + ) -> bool: + """Determine if function 'func' is available by constructing a + source file that refers to 'func', and compiles and links it. + If everything succeeds, returns true; otherwise returns false. + + The constructed source file starts out by including the header + files listed in 'headers'. If 'decl' is true, it then declares + 'func' (as "int func()"); you probably shouldn't supply 'headers' + and set 'decl' true in the same call, or you might get errors about + a conflicting declarations for 'func'. Finally, the constructed + 'main()' function either references 'func' or (if 'call' is true) + calls it. 'libraries' and 'library_dirs' are used when + linking. + """ def check_lib( self, library: str, @@ -76,9 +123,25 @@ class config(Command): headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, other_libraries: list[str] = [], - ) -> bool: ... + ) -> bool: + """Determine if 'library' is available to be linked against, + without actually checking that any particular symbols are provided + by it. 'headers' will be used in constructing the source file to + be compiled, but the only effect of this is to check if all the + header files listed are available. Any libraries listed in + 'other_libraries' will be included in the link, in case 'library' + has symbols that depend on other libraries. + """ def check_header( self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" - ) -> bool: ... + ) -> bool: + """Determine if the system header file named by 'header_file' + exists and can be found by the preprocessor; return true if so, + false otherwise. + """ + +def dump_file(filename: StrOrBytesPath, head=None) -> None: + """Dumps a file content into log.info. -def dump_file(filename: StrOrBytesPath, head=None) -> None: ... + If head is not None, will be dumped before the file content. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi index 1714e01a2c284..20112feb6598b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi @@ -1,3 +1,7 @@ +"""distutils.command.install + +Implements the Distutils 'install' command. +""" import sys from _typeshed import Incomplete from collections.abc import Callable @@ -43,29 +47,74 @@ class install(Command): build_base: Incomplete build_lib: Incomplete record: Incomplete - def initialize_options(self) -> None: ... + def initialize_options(self) -> None: + """Initializes options. +""" config_vars: Incomplete install_libbase: Incomplete - def finalize_options(self) -> None: ... - def dump_dirs(self, msg) -> None: ... - def finalize_unix(self) -> None: ... - def finalize_other(self) -> None: ... - def select_scheme(self, name) -> None: ... - def expand_basedirs(self) -> None: ... - def expand_dirs(self) -> None: ... - def convert_paths(self, *names) -> None: ... + def finalize_options(self) -> None: + """Finalizes options. +""" + def dump_dirs(self, msg) -> None: + """Dumps the list of user options. +""" + def finalize_unix(self) -> None: + """Finalizes options for posix platforms. +""" + def finalize_other(self) -> None: + """Finalizes options for non-posix platforms +""" + def select_scheme(self, name) -> None: + """Sets the install directories by applying the install schemes. +""" + def expand_basedirs(self) -> None: + """Calls `os.path.expanduser` on install_base, install_platbase and + root. +""" + def expand_dirs(self) -> None: + """Calls `os.path.expanduser` on install dirs. +""" + def convert_paths(self, *names) -> None: + """Call `convert_path` over `names`. +""" path_file: Incomplete extra_dirs: Incomplete - def handle_extra_path(self) -> None: ... - def change_roots(self, *names) -> None: ... - def create_home_path(self) -> None: ... - def run(self) -> None: ... - def create_path_file(self) -> None: ... - def get_outputs(self): ... - def get_inputs(self): ... - def has_lib(self): ... - def has_headers(self): ... - def has_scripts(self): ... - def has_data(self): ... + def handle_extra_path(self) -> None: + """Set `path_file` and `extra_dirs` using `extra_path`. +""" + def change_roots(self, *names) -> None: + """Change the install directories pointed by name using root. +""" + def create_home_path(self) -> None: + """Create directories under ~. +""" + def run(self) -> None: + """Runs the command. +""" + def create_path_file(self) -> None: + """Creates the .pth file +""" + def get_outputs(self): + """Assembles the outputs of all the sub-commands. +""" + def get_inputs(self): + """Returns the inputs of all the sub-commands +""" + def has_lib(self): + """Returns true if the current distribution has any Python + modules to install. +""" + def has_headers(self): + """Returns true if the current distribution has any headers to + install. +""" + def has_scripts(self): + """Returns true if the current distribution has any scripts to. + install. +""" + def has_data(self): + """Returns true if the current distribution has any data to. + install. +""" # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi index 609de62b04b52..3c41c12e33316 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi @@ -1,3 +1,8 @@ +"""distutils.command.install_data + +Implements the Distutils 'install_data' command, for installing +platform-independent data files. +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi index 75bb906ce5824..7e09ded1a2d38 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi @@ -1,9 +1,16 @@ +"""distutils.command.install_egg_info + +Implements the Distutils 'install_egg_info' command, for installing +a package's PKG-INFO metadata. +""" from _typeshed import Incomplete from typing import ClassVar from ..cmd import Command class install_egg_info(Command): + """Install an .egg-info file for the package +""" description: ClassVar[str] user_options: ClassVar[list[tuple[str, str, str]]] install_dir: Incomplete @@ -14,6 +21,19 @@ class install_egg_info(Command): def run(self) -> None: ... def get_outputs(self) -> list[str]: ... -def safe_name(name): ... -def safe_version(version): ... -def to_filename(name): ... +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ +def safe_version(version): + """Convert an arbitrary string to a standard version string + + Spaces become dots, and all other non-alphanumeric characters become + dashes, with runs of multiple dashes condensed to a single dash. + """ +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi index 3caad8a07dca4..60a4403d280f5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi @@ -1,3 +1,8 @@ +"""distutils.command.install_headers + +Implements the Distutils 'install_headers' command, to install C/C++ header +files to the Python include directory. +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi index a537e254904aa..7cd7a3c05dc91 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi @@ -1,3 +1,8 @@ +"""distutils.command.install_lib + +Implements the Distutils 'install_lib' command +(install all Python modules). +""" from _typeshed import Incomplete from typing import ClassVar, Final @@ -22,5 +27,14 @@ class install_lib(Command): def build(self) -> None: ... def install(self): ... def byte_compile(self, files) -> None: ... - def get_outputs(self): ... - def get_inputs(self): ... + def get_outputs(self): + """Return the list of files that would be installed if this command + were actually run. Not affected by the "dry-run" flag or whether + modules have actually been built yet. + """ + def get_inputs(self): + """Get the list of files that are input to this command, ie. the + files that get installed as they are named in the build tree. + The files in this list correspond one-to-one to the output + filenames returned by 'get_outputs()'. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi index 658594f32e43c..b79ad99524bbd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi @@ -1,3 +1,8 @@ +"""distutils.command.install_scripts + +Implements the Distutils 'install_scripts' command, for installing +Python scripts. +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi index c3bd62aaa7aa0..92082edf5c54b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi @@ -1,3 +1,7 @@ +"""distutils.command.register + +Implements the Distutils 'register' command (register with the repository). +""" from collections.abc import Callable from typing import Any, ClassVar @@ -12,9 +16,44 @@ class register(PyPIRCCommand): def initialize_options(self) -> None: ... def finalize_options(self) -> None: ... def run(self) -> None: ... - def check_metadata(self) -> None: ... - def classifiers(self) -> None: ... - def verify_metadata(self) -> None: ... - def send_metadata(self) -> None: ... + def check_metadata(self) -> None: + """Deprecated API. +""" + def classifiers(self) -> None: + """ Fetch the list of classifiers from the server. + """ + def verify_metadata(self) -> None: + """ Send the metadata to the package index server to be checked. + """ + def send_metadata(self) -> None: + """ Send the metadata to the package index server. + + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. + + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: + + [distutils] + index-servers = + pypi + + [pypi] + username: fred + password: sekrit + + Otherwise, to figure who the user is, we offer the user three + choices: + + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. + + """ def build_post_data(self, action): ... - def post_to_server(self, data, auth=None): ... + def post_to_server(self, data, auth=None): + """ Post a query to the server, and return a string response. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi index 48a140714dda7..74210597cd9df 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi @@ -1,14 +1,25 @@ +"""distutils.command.sdist + +Implements the Distutils 'sdist' command (create a source distribution). +""" from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar from ..cmd import Command -def show_formats() -> None: ... +def show_formats() -> None: + """Print all possible values for the 'formats' option (used by + the "--help-formats" command-line option). + """ class sdist(Command): description: str - def checking_metadata(self): ... + def checking_metadata(self): + """Callable used for the check sub-command. + + Placed here so user_options can view it +""" user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] @@ -33,13 +44,72 @@ class sdist(Command): def finalize_options(self) -> None: ... filelist: Incomplete def run(self) -> None: ... - def check_metadata(self) -> None: ... - def get_file_list(self) -> None: ... - def add_defaults(self) -> None: ... - def read_template(self) -> None: ... - def prune_file_list(self) -> None: ... - def write_manifest(self) -> None: ... - def read_manifest(self) -> None: ... - def make_release_tree(self, base_dir, files) -> None: ... - def make_distribution(self) -> None: ... - def get_archive_files(self): ... + def check_metadata(self) -> None: + """Deprecated API. +""" + def get_file_list(self) -> None: + """Figure out the list of files to include in the source + distribution, and put it in 'self.filelist'. This might involve + reading the manifest template (and writing the manifest), or just + reading the manifest, or just using the default file set -- it all + depends on the user's options. + """ + def add_defaults(self) -> None: + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + def read_template(self) -> None: + """Read and parse manifest template file named by self.template. + + (usually "MANIFEST.in") The parsing and processing is done by + 'self.filelist', which updates itself accordingly. + """ + def prune_file_list(self) -> None: + """Prune off branches that might slip into the file list as created + by 'read_template()', but really don't belong there: + * the build tree (typically "build") + * the release tree itself (only an issue if we ran "sdist" + previously with --keep-temp, or it aborted) + * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories + """ + def write_manifest(self) -> None: + """Write the file list in 'self.filelist' (presumably as filled in + by 'add_defaults()' and 'read_template()') to the manifest file + named by 'self.manifest'. + """ + def read_manifest(self) -> None: + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + def make_release_tree(self, base_dir, files) -> None: + """Create the directory tree that will become the source + distribution archive. All directories implied by the filenames in + 'files' are created under 'base_dir', and then we hard link or copy + (if hard linking is unavailable) those files into place. + Essentially, this duplicates the developer's source tree, but in a + directory named after the distribution, containing only the files + to be distributed. + """ + def make_distribution(self) -> None: + """Create the source distribution(s). First, we create the release + tree with 'make_release_tree()'; then, we create all required + archive files (according to 'self.formats') from the release tree. + Finally, we clean up by blowing away the release tree (unless + 'self.keep_temp' is true). The list of archive files created is + stored so it can be retrieved later by 'get_archive_files()'. + """ + def get_archive_files(self): + """Return the list of archive files created when the command + was run, or None if the command hasn't run yet. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi index afcfbaf48677e..1f250fdfb3106 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi @@ -1,3 +1,9 @@ +""" +distutils.command.upload + +Implements the Distutils 'upload' subcommand (upload package to a package +index). +""" from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi index 5814a82841cc9..7611e26e47774 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi @@ -1,3 +1,8 @@ +"""distutils.pypirc + +Provides the PyPIRCCommand class, the base class for the command classes +that uses .pypirc in the distutils.command package. +""" from abc import abstractmethod from distutils.cmd import Command from typing import ClassVar @@ -5,13 +10,28 @@ from typing import ClassVar DEFAULT_PYPIRC: str class PyPIRCCommand(Command): + """Base command that knows how to handle the .pypirc file + """ DEFAULT_REPOSITORY: ClassVar[str] DEFAULT_REALM: ClassVar[str] repository: None realm: None user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] - def initialize_options(self) -> None: ... - def finalize_options(self) -> None: ... + def initialize_options(self) -> None: + """Initialize options. +""" + def finalize_options(self) -> None: + """Finalizes options. +""" @abstractmethod - def run(self) -> None: ... + def run(self) -> None: + """A command's raison d'etre: carry out the action it exists to + perform, controlled by the options initialized in + 'initialize_options()', customized by other commands, the setup + script, the command-line, and config files, and finalized in + 'finalize_options()'. All terminal output and filesystem + interaction should be done by 'run()'. + + This method must be implemented by all command classes. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi index 174f249913514..c323fbb1fc30a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi @@ -1,3 +1,10 @@ +"""distutils.core + +The only module that needs to be imported to use the Distutils; provides +the 'setup' function (which is to be called from the setup script). Also +indirectly provides the Distribution and Command classes, although they are +really defined in distutils.dist and distutils.cmd. +""" from _typeshed import Incomplete, StrOrBytesPath from collections.abc import Mapping from distutils.cmd import Command as Command @@ -54,5 +61,66 @@ def setup( fullname: str = ..., # Custom Distributions could accept more params **attrs: Any, -) -> Distribution: ... -def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... +) -> Distribution: + """The gateway to the Distutils: do everything your setup script needs + to do, in a highly flexible and user-driven way. Briefly: create a + Distribution instance; find and parse config files; parse the command + line; run each Distutils command found there, customized by the options + supplied to 'setup()' (as keyword arguments), in config files, and on + the command line. + + The Distribution instance might be an instance of a class supplied via + the 'distclass' keyword argument to 'setup'; if no such class is + supplied, then the Distribution class (in dist.py) is instantiated. + All other arguments to 'setup' (except for 'cmdclass') are used to set + attributes of the Distribution instance. + + The 'cmdclass' argument, if supplied, is a dictionary mapping command + names to command classes. Each command encountered on the command line + will be turned into a command class, which is in turn instantiated; any + class found in 'cmdclass' is used in place of the default, which is + (for command 'foo_bar') class 'foo_bar' in module + 'distutils.command.foo_bar'. The command class must provide a + 'user_options' attribute which is a list of option specifiers for + 'distutils.fancy_getopt'. Any command-line options between the current + and the next command are used to set attributes of the current command + object. + + When the entire command-line has been successfully parsed, calls the + 'run()' method on each command object in turn. This method will be + driven entirely by the Distribution object (which each command object + has a reference to, thanks to its constructor), and the + command-specific options that became attributes of each command + object. + """ +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: + """Run a setup script in a somewhat controlled environment, and + return the Distribution instance that drives things. This is useful + if you need to find out the distribution meta-data (passed as + keyword args from 'script' to 'setup()', or the contents of the + config files or command-line. + + 'script_name' is a file that will be read and run with 'exec()'; + 'sys.argv[0]' will be replaced with 'script' for the duration of the + call. 'script_args' is a list of strings; if supplied, + 'sys.argv[1:]' will be replaced by 'script_args' for the duration of + the call. + + 'stop_after' tells 'setup()' when to stop processing; possible + values: + init + stop after the Distribution instance has been created and + populated with the keyword arguments to 'setup()' + config + stop after config files have been parsed (and their data + stored in the Distribution instance) + commandline + stop after the command-line ('sys.argv[1:]' or 'script_args') + have been parsed (and the data stored in the Distribution) + run [default] + stop after all commands have been run (the same as if 'setup()' + had been called in the usual way + + Returns the Distribution instance, which provides all information + used to drive the Distutils. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi index 80924d63e4714..8cb498c268d66 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi @@ -1,20 +1,56 @@ +"""distutils.cygwinccompiler + +Provides the CygwinCCompiler class, a subclass of UnixCCompiler that +handles the Cygwin port of the GNU C compiler to Windows. It also contains +the Mingw32CCompiler class which handles the mingw32 port of GCC (same as +cygwin in no-cygwin mode). +""" from distutils.unixccompiler import UnixCCompiler from distutils.version import LooseVersion from re import Pattern from typing import Final, Literal -def get_msvcr() -> list[str] | None: ... +def get_msvcr() -> list[str] | None: + """Include the appropriate MSVC runtime library if Python was built + with MSVC 7.0 or later. + """ -class CygwinCCompiler(UnixCCompiler): ... -class Mingw32CCompiler(CygwinCCompiler): ... +class CygwinCCompiler(UnixCCompiler): + """ Handles the Cygwin port of the GNU C compiler to Windows. + """ +class Mingw32CCompiler(CygwinCCompiler): + """ Handles the Mingw32 port of the GNU C compiler to Windows. + """ CONFIG_H_OK: Final = "ok" CONFIG_H_NOTOK: Final = "not ok" CONFIG_H_UNCERTAIN: Final = "uncertain" -def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ... +def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: + """Check if the current Python installation appears amenable to building + extensions with GCC. + + Returns a tuple (status, details), where 'status' is one of the following + constants: + + - CONFIG_H_OK: all is well, go ahead and compile + - CONFIG_H_NOTOK: doesn't look good + - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h + + 'details' is a human-readable string explaining the situation. + + Note there are two ways to conclude "OK": either 'sys.version' contains + the string "GCC" (implying that this Python was built with GCC), or the + installed "pyconfig.h" contains the string "__GNUC__". + """ RE_VERSION: Final[Pattern[bytes]] -def get_versions() -> tuple[LooseVersion | None, ...]: ... -def is_cygwingcc() -> bool: ... +def get_versions() -> tuple[LooseVersion | None, ...]: + """ Try to find out the versions of gcc, ld and dllwrap. + + If not possible it returns None for it. + """ +def is_cygwingcc() -> bool: + """Try to determine if the gcc that would be used is from cygwin. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi index 058377accabcc..2bfd910109cc4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi @@ -1,3 +1,9 @@ +"""distutils.dep_util + +Utility functions for simple, timestamp-based dependency of files +and groups of files; also, function based entirely on such +timestamp dependency analysis. +""" from _typeshed import StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Iterable from typing import Literal, TypeVar @@ -5,10 +11,32 @@ from typing import Literal, TypeVar _SourcesT = TypeVar("_SourcesT", bound=StrOrBytesPath) _TargetsT = TypeVar("_TargetsT", bound=StrOrBytesPath) -def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: ... +def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: + """Return true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. Return false if + both exist and 'target' is the same age or younger than 'source'. + Raise DistutilsFileError if 'source' does not exist. + """ def newer_pairwise( sources: SupportsLenAndGetItem[_SourcesT], targets: SupportsLenAndGetItem[_TargetsT] -) -> tuple[list[_SourcesT], list[_TargetsT]]: ... +) -> tuple[list[_SourcesT], list[_TargetsT]]: + """Walk two filename lists in parallel, testing if each source is newer + than its corresponding target. Return a pair of lists (sources, + targets) where source is newer than target, according to the semantics + of 'newer()'. + """ def newer_group( sources: Iterable[StrOrBytesPath], target: StrOrBytesPath, missing: Literal["error", "ignore", "newer"] = "error" -) -> Literal[0, 1]: ... +) -> Literal[0, 1]: + """Return true if 'target' is out-of-date with respect to any file + listed in 'sources'. In other words, if 'target' exists and is newer + than every file in 'sources', return false; otherwise return true. + 'missing' controls what we do when a source file is missing; the + default ("error") is to blow up with an OSError from inside 'stat()'; + if it is "ignore", we silently drop any missing source files; if it is + "newer", any missing source files make us assume that 'target' is + out-of-date (this is handy in "dry-run" mode: it'll make you pretend to + carry out commands that wouldn't work because inputs are missing, but + that doesn't matter because you're not actually going to run the + commands). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi index 23e2c3bc28b98..fe718ccfa75c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi @@ -1,15 +1,37 @@ +"""distutils.dir_util + +Utility functions for manipulating directories and directory trees. +""" from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal -def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: ... +def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> list[str]: + """Create a directory and any missing ancestor directories. + + If the directory already exists (or if 'name' is the empty string, which + means the current directory, which of course exists), then do nothing. + Raise DistutilsFileError if unable to create some directory along the way + (eg. some sub-path exists, but is a file rather than a directory). + If 'verbose' is true, print a one-line summary of each mkdir to stdout. + Return the list of directories actually created. + """ def create_tree( base_dir: StrPath, files: Iterable[StrPath], mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> None: ... +) -> None: + """Create all the empty directories under 'base_dir' needed to put 'files' + there. + + 'base_dir' is just the name of a directory which doesn't necessarily + exist yet; 'files' is a list of filenames to be interpreted relative to + 'base_dir'. 'base_dir' + the directory portion of every file in 'files' + will be created if it doesn't already exist. 'mode', 'verbose' and + 'dry_run' flags are as for 'mkpath()'. + """ def copy_tree( src: StrPath, dst: str, @@ -19,5 +41,29 @@ def copy_tree( update: bool | Literal[0, 1] = 0, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> list[str]: ... -def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: ... +) -> list[str]: + """Copy an entire directory tree 'src' to a new location 'dst'. + + Both 'src' and 'dst' must be directory names. If 'src' is not a + directory, raise DistutilsFileError. If 'dst' does not exist, it is + created with 'mkpath()'. The end result of the copy is that every + file in 'src' is copied to 'dst', and directories under 'src' are + recursively copied to 'dst'. Return the list of files that were + copied or might have been copied, using their output name. The + return value is unaffected by 'update' or 'dry_run': it is simply + the list of all files under 'src', with the names changed to be + under 'dst'. + + 'preserve_mode' and 'preserve_times' are the same as for + 'copy_file'; note that they only apply to regular files, not to + directories. If 'preserve_symlinks' is true, symlinks will be + copied as symlinks (on platforms that support them!); otherwise + (the default), the destination of the symlink will be copied. + 'update' and 'verbose' are the same as for 'copy_file'. + """ +def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: + """Recursively remove an entire directory tree. + + Any errors are ignored (apart from being reported to stdout if 'verbose' + is true). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi index 412b94131b54e..57d3283cc952d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi @@ -1,3 +1,8 @@ +"""distutils.dist + +Provides the Distribution class, which represents the module distribution +being built/installed/distributed. +""" from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite from collections.abc import Iterable, MutableMapping from distutils.cmd import Command @@ -31,6 +36,9 @@ _OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str _CommandT = TypeVar("_CommandT", bound=Command) class DistributionMetadata: + """Dummy class to hold the distribution meta-data: name, version, + author, and so forth. + """ def __init__(self, path: StrOrBytesPath | None = None) -> None: ... name: str | None version: str | None @@ -49,9 +57,15 @@ class DistributionMetadata: provides: list[str] | None requires: list[str] | None obsoletes: list[str] | None - def read_pkg_file(self, file: IO[str]) -> None: ... - def write_pkg_info(self, base_dir: StrPath) -> None: ... - def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... + def read_pkg_file(self, file: IO[str]) -> None: + """Reads the metadata values from a file object. +""" + def write_pkg_info(self, base_dir: StrPath) -> None: + """Write the PKG-INFO file into the release tree. + """ + def write_pkg_file(self, file: SupportsWrite[str]) -> None: + """Write the PKG-INFO format data to a file object. + """ def get_name(self) -> str: ... def get_version(self) -> str: ... def get_fullname(self) -> str: ... @@ -78,10 +92,36 @@ class DistributionMetadata: def set_obsoletes(self, value: Iterable[str]) -> None: ... class Distribution: + """The core of the Distutils. Most of the work hiding behind 'setup' + is really done within a Distribution instance, which farms the work out + to the Distutils commands specified on the command line. + + Setup scripts will almost never instantiate Distribution directly, + unless the 'setup()' function is totally inadequate to their needs. + However, it is conceivable that a setup script might wish to subclass + Distribution for some specialized purpose, and then pass the subclass + to 'setup()' as the 'distclass' keyword argument. If so, it is + necessary to respect the expectations that 'setup' has of Distribution. + See the code for 'setup()', in core.py, for details. + """ cmdclass: dict[str, type[Command]] metadata: DistributionMetadata - def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ... - def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... + def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: + """Construct a new Distribution instance: initialize all the + attributes of a Distribution, and then use 'attrs' (a dictionary + mapping attribute names to values) to assign some of those + attributes their "real" values. (Any attributes not mentioned in + 'attrs' will be assigned to some null value: 0, None, an empty list + or dictionary, etc.) Most importantly, initialize the + 'command_obj' attribute to the empty dictionary; this will be + filled in with real command objects by 'parse_command_line()'. + """ + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: + """Get the option dictionary for a given command. If that + command's option dictionary hasn't been created yet, then create it + and return the new dictionary; otherwise, return the existing + option dictionary. + """ def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... global_options: ClassVar[_OptionsList] common_usage: ClassVar[str] @@ -113,18 +153,82 @@ class Distribution: have_run: Incomplete want_user_cfg: bool def dump_option_dicts(self, header=None, commands=None, indent: str = "") -> None: ... - def find_config_files(self): ... + def find_config_files(self): + """Find as many configuration files as should be processed for this + platform, and return a list of filenames in the order in which they + should be parsed. The filenames returned are guaranteed to exist + (modulo nasty race conditions). + + There are three possible config files: distutils.cfg in the + Distutils installation directory (ie. where the top-level + Distutils __inst__.py file lives), a file in the user's home + directory named .pydistutils.cfg on Unix and pydistutils.cfg + on Windows/Mac; and setup.cfg in the current directory. + + The file in the user's home directory can be disabled with the + --no-user-cfg option. + """ commands: Incomplete - def parse_command_line(self): ... - def finalize_options(self) -> None: ... - def handle_display_options(self, option_order): ... - def print_command_list(self, commands, header, max_length) -> None: ... - def print_commands(self) -> None: ... - def get_command_list(self): ... - def get_command_packages(self): ... + def parse_command_line(self): + """Parse the setup script's command line, taken from the + 'script_args' instance attribute (which defaults to 'sys.argv[1:]' + -- see 'setup()' in core.py). This list is first processed for + "global options" -- options that set attributes of the Distribution + instance. Then, it is alternately scanned for Distutils commands + and options for that command. Each new command terminates the + options for the previous command. The allowed options for a + command are determined by the 'user_options' attribute of the + command class -- thus, we have to be able to load command classes + in order to parse the command line. Any error in that 'options' + attribute raises DistutilsGetoptError; any error on the + command-line raises DistutilsArgError. If no Distutils commands + were found on the command line, raises DistutilsArgError. Return + true if command-line was successfully parsed and we should carry + on with executing commands; false if no errors but we shouldn't + execute commands (currently, this only happens if user asks for + help). + """ + def finalize_options(self) -> None: + """Set final values for all the options on the Distribution + instance, analogous to the .finalize_options() method of Command + objects. + """ + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + def print_command_list(self, commands, header, max_length) -> None: + """Print a subset of the list of all commands -- used by + 'print_commands()'. + """ + def print_commands(self) -> None: + """Print out a help message listing all available commands with a + description of each. The list is divided into "standard commands" + (listed in distutils.command.__all__) and "extra commands" + (mentioned in self.cmdclass, but not a standard command). The + descriptions come from the command class attribute + 'description'. + """ + def get_command_list(self): + """Get a list of (command, description) tuples. + The list is divided into "standard commands" (listed in + distutils.command.__all__) and "extra commands" (mentioned in + self.cmdclass, but not a standard command). The descriptions come + from the command class attribute 'description'. + """ + def get_command_packages(self): + """Return a list of packages from which commands are loaded. +""" # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. @overload - def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ... + def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: + """Return the command object for 'command'. Normally this object + is cached on a previous call to 'get_command_obj()'; if no command + object for 'command' is in the cache, then we either create and + return it (if 'create' is true) or return None. + """ @overload def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ... @overload @@ -169,7 +273,18 @@ class Distribution: @overload def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ... @overload - def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ... + def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: + """Return the class that implements the Distutils command named by + 'command'. First we check the 'cmdclass' dictionary; if the + command is mentioned there, we fetch the class object from the + dictionary and return it. Otherwise we load the command module + ("distutils.command." + command) and fetch the command class from + the module. The loaded class is also stored in 'cmdclass' + to speed future calls to 'get_command_class()'. + + Raises DistutilsModuleError if the expected module could not be + found, or if that module does not define the expected class. + """ @overload def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ... @overload @@ -211,7 +326,25 @@ class Distribution: @overload def get_command_class(self, command: str) -> type[Command]: ... @overload - def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ... + def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: + """Reinitializes a command to the state it was in when first + returned by 'get_command_obj()': ie., initialized but not yet + finalized. This provides the opportunity to sneak option + values in programmatically, overriding or supplementing + user-supplied values from the config files and command line. + You'll have to re-finalize the command object (by calling + 'finalize_options()' or 'ensure_finalized()') before using it for + real. + + 'command' should be a command name (string) or command object. If + 'reinit_subcommands' is true, also reinitializes the command's + sub-commands, as declared by the 'sub_commands' class attribute (if + it has one). See the "install" command for an example. Only + reinitializes the sub-commands that actually matter, ie. those + whose test predicates return true. + + Returns the reinitialized command object. + """ @overload def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ... @overload @@ -257,8 +390,19 @@ class Distribution: @overload def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ... def announce(self, msg, level: int = 2) -> None: ... - def run_commands(self) -> None: ... - def run_command(self, command: str) -> None: ... + def run_commands(self) -> None: + """Run each command that was seen on the setup script command line. + Uses the list of commands found and cache of command objects + created by 'get_command_obj()'. + """ + def run_command(self, command: str) -> None: + """Do whatever it takes to run a command (including nothing at all, + if the command has already been run). Specifically: if we have + already created and run the command named by 'command', return + silently without doing anything. If the command named by 'command' + doesn't even have a command object yet, create one. Then invoke + 'run()' on that command object (or an existing one). + """ def has_pure_modules(self) -> bool: ... def has_ext_modules(self) -> bool: ... def has_c_libraries(self) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi index e483362bfbf19..c208bfa8046a4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi @@ -1,19 +1,86 @@ -class DistutilsError(Exception): ... -class DistutilsModuleError(DistutilsError): ... -class DistutilsClassError(DistutilsError): ... -class DistutilsGetoptError(DistutilsError): ... -class DistutilsArgError(DistutilsError): ... -class DistutilsFileError(DistutilsError): ... -class DistutilsOptionError(DistutilsError): ... -class DistutilsSetupError(DistutilsError): ... -class DistutilsPlatformError(DistutilsError): ... -class DistutilsExecError(DistutilsError): ... -class DistutilsInternalError(DistutilsError): ... -class DistutilsTemplateError(DistutilsError): ... -class DistutilsByteCompileError(DistutilsError): ... -class CCompilerError(Exception): ... -class PreprocessError(CCompilerError): ... -class CompileError(CCompilerError): ... -class LibError(CCompilerError): ... -class LinkError(CCompilerError): ... -class UnknownFileError(CCompilerError): ... +"""distutils.errors + +Provides exceptions used by the Distutils modules. Note that Distutils +modules may raise standard exceptions; in particular, SystemExit is +usually raised for errors that are obviously the end-user's fault +(eg. bad command-line arguments). + +This module is safe to use in "from ... import *" mode; it only exports +symbols whose names start with "Distutils" and end with "Error". +""" +class DistutilsError(Exception): + """The root of all Distutils evil. +""" +class DistutilsModuleError(DistutilsError): + """Unable to load an expected module, or to find an expected class + within some module (in particular, command modules and classes). +""" +class DistutilsClassError(DistutilsError): + """Some command class (or possibly distribution class, if anyone + feels a need to subclass Distribution) is found not to be holding + up its end of the bargain, ie. implementing some part of the + "command "interface. +""" +class DistutilsGetoptError(DistutilsError): + """The option table provided to 'fancy_getopt()' is bogus. +""" +class DistutilsArgError(DistutilsError): + """Raised by fancy_getopt in response to getopt.error -- ie. an + error in the command line usage. +""" +class DistutilsFileError(DistutilsError): + """Any problems in the filesystem: expected file not found, etc. + Typically this is for problems that we detect before OSError + could be raised. +""" +class DistutilsOptionError(DistutilsError): + """Syntactic/semantic errors in command options, such as use of + mutually conflicting options, or inconsistent options, + badly-spelled values, etc. No distinction is made between option + values originating in the setup script, the command line, config + files, or what-have-you -- but if we *know* something originated in + the setup script, we'll raise DistutilsSetupError instead. +""" +class DistutilsSetupError(DistutilsError): + """For errors that can be definitely blamed on the setup script, + such as invalid keyword arguments to 'setup()'. +""" +class DistutilsPlatformError(DistutilsError): + """We don't know how to do something on the current platform (but + we do know how to do it on some platform) -- eg. trying to compile + C files on a platform not supported by a CCompiler subclass. +""" +class DistutilsExecError(DistutilsError): + """Any problems executing an external program (such as the C + compiler, when compiling C files). +""" +class DistutilsInternalError(DistutilsError): + """Internal inconsistencies or impossibilities (obviously, this + should never be seen if the code is working!). +""" +class DistutilsTemplateError(DistutilsError): + """Syntax error in a file list template. +""" +class DistutilsByteCompileError(DistutilsError): + """Byte compile error. +""" +class CCompilerError(Exception): + """Some compile/link operation failed. +""" +class PreprocessError(CCompilerError): + """Failure to preprocess one or more C/C++ files. +""" +class CompileError(CCompilerError): + """Failure to compile one or more C/C++ source files. +""" +class LibError(CCompilerError): + """Failure to create a static library from one or more C/C++ object + files. +""" +class LinkError(CCompilerError): + """Failure to link one or more C/C++ object files into an executable + or shared library file. +""" +class UnknownFileError(CCompilerError): + """Attempt to process an unknown file type. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi index 789bbf6ec3d12..e803c1799951b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi @@ -1,4 +1,72 @@ +"""distutils.extension + +Provides the Extension class, used to describe C/C++ extension +modules in setup scripts. +""" class Extension: + """Just a collection of attributes that describes an extension + module and everything needed to build it (hopefully in a portable + way, but there are hooks that let you be as unportable as you need). + + Instance attributes: + name : string + the full name of the extension, including any packages -- ie. + *not* a filename or pathname, but Python dotted name + sources : [string] + list of source filenames, relative to the distribution root + (where the setup script lives), in Unix form (slash-separated) + for portability. Source files may be C, C++, SWIG (.i), + platform-specific resource files, or whatever else is recognized + by the "build_ext" command as source for a Python extension. + include_dirs : [string] + list of directories to search for C/C++ header files (in Unix + form for portability) + define_macros : [(name : string, value : string|None)] + list of macros to define; each macro is defined using a 2-tuple, + where 'value' is either the string to define it to or None to + define it without a particular value (equivalent of "#define + FOO" in source or -DFOO on Unix C compiler command line) + undef_macros : [string] + list of macros to undefine explicitly + library_dirs : [string] + list of directories to search for C/C++ libraries at link time + libraries : [string] + list of library names (not filenames or paths) to link against + runtime_library_dirs : [string] + list of directories to search for C/C++ libraries at run time + (for shared extensions, this is when the extension is loaded) + extra_objects : [string] + list of extra files to link with (eg. object files not implied + by 'sources', static library that must be explicitly specified, + binary resource files, etc.) + extra_compile_args : [string] + any extra platform- and compiler-specific information to use + when compiling the source files in 'sources'. For platforms and + compilers where "command line" makes sense, this is typically a + list of command-line arguments, but for other platforms it could + be anything. + extra_link_args : [string] + any extra platform- and compiler-specific information to use + when linking object files together to create the extension (or + to create a new static Python interpreter). Similar + interpretation as for 'extra_compile_args'. + export_symbols : [string] + list of symbols to be exported from a shared extension. Not + used on all platforms, and not generally necessary for Python + extensions, which typically export exactly one symbol: "init" + + extension_name. + swig_opts : [string] + any extra options to pass to SWIG if a source file has the .i + extension. + depends : [string] + list of files that the extension depends on + language : string + extension language (i.e. "c", "c++", "objc"). Will be detected + from the source extensions if not provided. + optional : boolean + specifies that a build failure in the extension should not abort the + build process, but simply not install the failing extension. + """ name: str sources: list[str] include_dirs: list[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi index f3fa2a1255a6d..809333d123068 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -1,3 +1,12 @@ +"""distutils.fancy_getopt + +Wrapper around the standard getopt module that provides the following +additional features: + * short and long options are tied together + * options have help strings, so fancy_getopt could potentially + create a complete usage summary + * options set attributes of a passed-in object +""" from collections.abc import Iterable, Mapping from getopt import _SliceableT, _StrSequenceT_co from re import Pattern @@ -12,18 +21,45 @@ neg_alias_re: Final[Pattern[str]] longopt_xlate: Final[dict[int, int]] class FancyGetopt: + """Wrapper around the standard 'getopt()' module that provides some + handy extra functionality: + * short and long options are tied together + * options have help strings, and help text can be assembled + from them + * options set attributes of a passed-in object + * boolean options can have "negative aliases" -- eg. if + --quiet is the "negative alias" of --verbose, then "--quiet" + on the command line sets 'verbose' to false + """ def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO: kinda wrong, `getopt(object=object())` is invalid @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None = None, object: None = None - ) -> tuple[_StrSequenceT_co, OptionDummy]: ... + ) -> tuple[_StrSequenceT_co, OptionDummy]: + """Parse command-line options in args. Store as attributes on object. + + If 'args' is None or not supplied, uses 'sys.argv[1:]'. If + 'object' is None or not supplied, creates a new OptionDummy + object, stores option values there, and returns a tuple (args, + object). If 'object' is supplied, it is modified in place and + 'getopt()' just returns 'args'; in both cases, the returned + 'args' is a modified copy of the passed-in 'args' list, which + is left untouched. + """ @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None, object: Any ) -> _StrSequenceT_co: ... # object is an arbitrary non-slotted object - def get_option_order(self) -> list[tuple[str, str]]: ... - def generate_help(self, header: str | None = None) -> list[str]: ... + def get_option_order(self) -> list[tuple[str, str]]: + """Returns the list of (option, value) tuples processed by the + previous run of 'getopt()'. Raises RuntimeError if + 'getopt()' hasn't been called yet. + """ + def generate_help(self, header: str | None = None) -> list[str]: + """Generate help text (a list of strings, one per suggested line of + output) from the option table for this FancyGetopt object. + """ # Same note as FancyGetopt.getopt @overload @@ -37,8 +73,22 @@ def fancy_getopt( WS_TRANS: Final[dict[int, str]] -def wrap_text(text: str, width: int) -> list[str]: ... -def translate_longopt(opt: str) -> str: ... +def wrap_text(text: str, width: int) -> list[str]: + """wrap_text(text : string, width : int) -> [string] + + Split 'text' into multiple lines of no more than 'width' characters + each, and return the list of strings that results. + """ +def translate_longopt(opt: str) -> str: + """Convert a long option name to a valid Python identifier by + changing "-" to "_". + """ class OptionDummy: - def __init__(self, options: Iterable[str] = []) -> None: ... + """Dummy class just used as a place to hold command-line option + values as instance attributes. +""" + def __init__(self, options: Iterable[str] = []) -> None: + """Create a new OptionDummy instance. The attributes listed in + 'options' will be initialized to None. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi index c763f91a958d7..c8905305b6fa0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi @@ -1,3 +1,7 @@ +"""distutils.file_util + +Utility functions for operating on single files. +""" from _typeshed import BytesPath, StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal, TypeVar, overload @@ -15,7 +19,31 @@ def copy_file( link: str | None = None, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, -) -> tuple[_StrPathT | str, bool]: ... +) -> tuple[_StrPathT | str, bool]: + """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is + copied there with the same name; otherwise, it must be a filename. (If + the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' + is true (the default), the file's mode (type and permission bits, or + whatever is analogous on the current platform) is copied. If + 'preserve_times' is true (the default), the last-modified and + last-access times are copied as well. If 'update' is true, 'src' will + only be copied if 'dst' does not exist, or if 'dst' does exist but is + older than 'src'. + + 'link' allows you to make hard links (os.link) or symbolic links + (os.symlink) instead of copying: set it to "hard" or "sym"; if it is + None (the default), files are copied. Don't set 'link' on systems that + don't support it: 'copy_file()' doesn't check if hard or symbolic + linking is available. If hardlink fails, falls back to + _copy_file_contents(). + + Under Mac OS, uses the native file copy function in macostools; on + other systems, uses '_copy_file_contents()' to copy file contents. + + Return a tuple (dest_name, copied): 'dest_name' is the actual name of + the output file, and 'copied' is true if the file was copied (or would + have been copied, if 'dry_run' true). + """ @overload def copy_file( src: BytesPath, @@ -30,9 +58,19 @@ def copy_file( @overload def move_file( src: StrPath, dst: _StrPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 -) -> _StrPathT | str: ... +) -> _StrPathT | str: + """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will + be moved into it with the same name; otherwise, 'src' is just renamed + to 'dst'. Return the new full name of the file. + + Handles cross-device moves on Unix using 'copy_file()'. What about + other systems??? + """ @overload def move_file( src: BytesPath, dst: _BytesPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 ) -> _BytesPathT | bytes: ... -def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: ... +def write_file(filename: StrOrBytesPath, contents: Iterable[str]) -> None: + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi index 607a78a1fbaca..b66f520c9b807 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi @@ -1,15 +1,36 @@ +"""distutils.filelist + +Provides the FileList class, used for poking about the filesystem +and building lists of files. +""" from collections.abc import Iterable from re import Pattern from typing import Literal, overload # class is entirely undocumented class FileList: + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + + Instance attributes: + dir + directory from which files will be taken -- only used if + 'allfiles' not supplied to constructor + files + list of filenames currently being built/filtered/manipulated + allfiles + complete list of files under consideration (ie. without any + filtering applied) + """ allfiles: Iterable[str] | None files: list[str] def __init__(self, warn: None = None, debug_print: None = None) -> None: ... def set_allfiles(self, allfiles: Iterable[str]) -> None: ... def findall(self, dir: str = ".") -> None: ... - def debug_print(self, msg: str) -> None: ... + def debug_print(self, msg: str) -> None: + """Print 'msg' to stdout if the global DEBUG (taken from the + DISTUTILS_DEBUG environment variable) flag is true. + """ def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... def sort(self) -> None: ... @@ -18,7 +39,31 @@ class FileList: @overload def include_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: ... + ) -> bool: + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. Patterns + are not quite the same as implemented by the 'fnmatch' module: '*' + and '?' match non-special characters, where "special" is platform- + dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found, False otherwise. + """ @overload def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -32,7 +77,13 @@ class FileList: @overload def exclude_pattern( self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 - ) -> bool: ... + ) -> bool: + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. Other parameters are the same as for + 'include_pattern()', above. + The list 'self.files' is modified in place. + Return True if files are found, False otherwise. + """ @overload def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -44,12 +95,26 @@ class FileList: is_regex: bool | Literal[0, 1] = 0, ) -> bool: ... -def findall(dir: str = ".") -> list[str]: ... -def glob_to_re(pattern: str) -> str: ... +def findall(dir: str = ".") -> list[str]: + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ +def glob_to_re(pattern: str) -> str: + """Translate a shell-like glob pattern to a regular expression; return + a string containing the regex. Differs from 'fnmatch.translate()' in + that '*' does not match "special characters" (which are + platform-specific). + """ @overload def translate_pattern( pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 -) -> Pattern[str]: ... +) -> Pattern[str]: + """Translate a shell-like wildcard pattern to a compiled regular + expression. Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ @overload def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi index 7246dd6be0cdf..d24819777b4de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi @@ -1,3 +1,5 @@ +"""A simple log mechanism styled after PEP 282. +""" from typing import Any, Final DEBUG: Final = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi index 80872a6b739f0..7560949a5693b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi @@ -1,3 +1,11 @@ +"""distutils.msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio. +""" from distutils.ccompiler import CCompiler -class MSVCCompiler(CCompiler): ... +class MSVCCompiler(CCompiler): + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi index ae07a49504fe1..15b36ed55c10f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi @@ -1,3 +1,10 @@ +"""distutils.spawn + +Provides the 'spawn()' function, a front-end to various platform- +specific functions for launching another program in a sub-process. +Also provides the 'find_executable()' to search the path for a given +executable name. +""" from collections.abc import Iterable from typing import Literal @@ -6,5 +13,25 @@ def spawn( search_path: bool | Literal[0, 1] = 1, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, -) -> None: ... -def find_executable(executable: str, path: str | None = None) -> str | None: ... +) -> None: + """Run another program, specified as a command list 'cmd', in a new process. + + 'cmd' is just the argument list for the new process, ie. + cmd[0] is the program to run and cmd[1:] are the rest of its arguments. + There is no way to run a program with a name different from that of its + executable. + + If 'search_path' is true (the default), the system's executable + search path will be used to find the program; otherwise, cmd[0] + must be the exact path to the executable. If 'dry_run' is true, + the command will not actually be run. + + Raise DistutilsExecError if running the program fails in any way; just + return on success. + """ +def find_executable(executable: str, path: str | None = None) -> str | None: + """Tries to find 'executable' in the directories listed in 'path'. + + A string listing directories separated by 'os.pathsep'; defaults to + os.environ['PATH']. Returns the complete filename or None if not found. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi index 4a9c45eb562a4..2cf01d35a98f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi @@ -1,3 +1,13 @@ +"""Provide access to Python's configuration information. The specific +configuration variables available depend heavily on the platform and +configuration. The values may be retrieved using +get_config_var(name), and the list of variables is available via +get_config_vars().keys(). Additional convenience functions are also +available. + +Written by: Fred L. Drake, Jr. +Email: +""" import sys from collections.abc import Mapping from distutils.ccompiler import CCompiler @@ -11,23 +21,80 @@ BASE_EXEC_PREFIX: Final[str] project_base: Final[str] python_build: Final[bool] -def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ... +def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: + """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in + 'string' according to 'vars' (a dictionary mapping variable names to + values). Variables not present in 'vars' are silently expanded to the + empty string. The variable values in 'vars' should not contain further + variable expansions; if 'vars' is the output of 'parse_makefile()', + you're fine. Returns a variable-expanded version of 's'. + """ @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") -def get_config_var(name: Literal["SO"]) -> int | str | None: ... +def get_config_var(name: Literal["SO"]) -> int | str | None: + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ @overload def get_config_var(name: str) -> int | str | None: ... @overload -def get_config_vars() -> dict[str, str | int]: ... +def get_config_vars() -> dict[str, str | int]: + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ @overload def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ... -def get_config_h_filename() -> str: ... -def get_makefile_filename() -> str: ... -def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: ... +def get_config_h_filename() -> str: + """Return the path of pyconfig.h. +""" +def get_makefile_filename() -> str: + """Return the path of the Makefile. +""" +def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: + """Return the directory containing installed Python header files. + + If 'plat_specific' is false (the default), this is the path to the + non-platform-specific header files, i.e. Python.h and so on; + otherwise, this is the path to platform-specific header files + (namely pyconfig.h). + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ def get_python_lib( plat_specific: bool | Literal[0, 1] = 0, standard_lib: bool | Literal[0, 1] = 0, prefix: str | None = None -) -> str: ... -def customize_compiler(compiler: CCompiler) -> None: ... +) -> str: + """Return the directory containing the Python library (standard or + site additions). + + If 'plat_specific' is true, return the directory containing + platform-specific modules, i.e. any module from a non-pure-Python + module distribution; otherwise, return the platform-shared library + directory. If 'standard_lib' is true, return the directory + containing standard Python library modules; otherwise, return the + directory for site-specific modules. + + If 'prefix' is supplied, use it instead of sys.base_prefix or + sys.base_exec_prefix -- i.e., ignore 'plat_specific'. + """ +def customize_compiler(compiler: CCompiler) -> None: + """Do any platform-specific customization of a CCompiler instance. + + Mainly needed on Unix, so we can plug in the information that + varies across Unices and is stored in Python's Makefile. + """ if sys.version_info < (3, 10): - def get_python_version() -> str: ... + def get_python_version() -> str: + """Return a string containing the major and minor Python version, + leaving off the patchlevel. Sample return values could be '1.5' + or '2.2'. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi index 54951af7e55d6..b3897c2310de9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi @@ -1,6 +1,70 @@ +"""text_file + +provides the TextFile class, which gives an interface to text files +that (optionally) takes care of stripping comments, ignoring blank +lines, and joining lines with backslashes. +""" from typing import IO, Literal class TextFile: + """Provides a file-like object that takes care of all the things you + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not. +""" def __init__( self, filename: str | None = None, @@ -12,10 +76,46 @@ class TextFile: skip_blanks: bool | Literal[0, 1] = ..., join_lines: bool | Literal[0, 1] = ..., collapse_join: bool | Literal[0, 1] = ..., - ) -> None: ... - def open(self, filename: str) -> None: ... - def close(self) -> None: ... - def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... - def readline(self) -> str | None: ... - def readlines(self) -> list[str]: ... - def unreadline(self, line: str) -> str: ... + ) -> None: + """Construct a new TextFile object. At least one of 'filename' + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'. +""" + def open(self, filename: str) -> None: + """Open a new file named 'filename'. This overrides both the + 'filename' and 'file' arguments to the constructor. +""" + def close(self) -> None: + """Close the current file and forget everything we know about it + (filename, current line number). +""" + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: + """Print (to stderr) a warning message tied to the current logical + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line. +""" + def readline(self) -> str | None: + """Read and return a single logical line from the current file (or + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not. +""" + def readlines(self) -> list[str]: + """Read and return the list of all logical lines remaining in the + current file. +""" + def unreadline(self, line: str) -> str: + """Push 'line' (a string) onto an internal buffer that will be + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi index e1d443471af36..6964eda86b479 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi @@ -1,3 +1,17 @@ +"""distutils.unixccompiler + +Contains the UnixCCompiler class, a subclass of CCompiler that handles +the "typical" Unix-style command-line C compiler: + * macros defined with -Dname[=value] + * macros undefined with -Uname + * include search directories specified with -Idir + * libraries specified with -lllib + * library search directories specified with -Ldir + * compile handled by 'cc' (or similar) executable with -c option: + compiles .c to .o + * link static library handled by 'ar' command (possibly with 'ranlib') + * link shared library handled by 'cc -shared' +""" from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi index 0e1bb4165d99d..e41378f729433 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi @@ -1,3 +1,8 @@ +"""distutils.util + +Miscellaneous utility functions -- anything that doesn't fit into +one of the other *util.py modules. +""" from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any, Literal @@ -5,21 +10,91 @@ from typing_extensions import TypeVarTuple, Unpack _Ts = TypeVarTuple("_Ts") -def get_host_platform() -> str: ... +def get_host_platform() -> str: + """Return a string that identifies the current platform. This is used mainly to + distinguish platform-specific build directories and platform-specific built + distributions. Typically includes the OS name and version and the + architecture (as supplied by 'os.uname()'), although the exact information + included depends on the OS; eg. on Linux, the kernel version isn't + particularly important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + + """ def get_platform() -> str: ... -def convert_path(pathname: str) -> str: ... -def change_root(new_root: StrPath, pathname: StrPath) -> str: ... -def check_environ() -> None: ... -def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... -def split_quoted(s: str) -> list[str]: ... +def convert_path(pathname: str) -> str: + """Return 'pathname' as a name that will work on the native filesystem, + i.e. split it on '/' and put it back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ +def change_root(new_root: StrPath, pathname: StrPath) -> str: + """Return 'pathname' with 'new_root' prepended. If 'pathname' is + relative, this is equivalent to "os.path.join(new_root,pathname)". + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + """ +def check_environ() -> None: + """Ensure that 'os.environ' has all the environment variables we + guarantee that users can use in config files, command-line options, + etc. Currently this includes: + HOME - user's home directory (Unix only) + PLAT - description of the current platform, including hardware + and OS (see 'get_platform()') + """ +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: + """Perform shell/Perl-style variable substitution on 'string'. Every + occurrence of '$' followed by a name is considered a variable, and + variable is substituted by the value found in the 'local_vars' + dictionary, or in 'os.environ' if it's not in 'local_vars'. + 'os.environ' is first checked/augmented to guarantee that it contains + certain values: see 'check_environ()'. Raise ValueError for any + variables not found in either 'local_vars' or 'os.environ'. + """ +def split_quoted(s: str) -> list[str]: + """Split a string up according to Unix shell-like rules for quotes and + backslashes. In short: words are delimited by spaces, as long as those + spaces are not escaped by a backslash, or inside a quoted string. + Single and double quotes are equivalent, and the quote characters can + be backslash-escaped. The backslash is stripped from any two-character + escape sequence, leaving only the escaped character. The quote + characters are stripped from any quoted string. Returns a list of + words. + """ def execute( func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0, -) -> None: ... -def strtobool(val: str) -> Literal[0, 1]: ... +) -> None: + """Perform some action that affects the outside world (eg. by + writing to the filesystem). Such actions are special because they + are disabled by the 'dry_run' flag. This method takes care of all + that bureaucracy for you; all you have to do is supply the + function to call and an argument tuple for it (to embody the + "external action" being performed), and an optional message to + print. + """ +def strtobool(val: str) -> Literal[0, 1]: + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ def byte_compile( py_files: list[str], optimize: int = 0, @@ -29,14 +104,51 @@ def byte_compile( verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0, direct: bool | None = None, -) -> None: ... -def rfc822_escape(header: str) -> str: ... +) -> None: + """Byte-compile a collection of Python source files to .pyc + files in a __pycache__ subdirectory. 'py_files' is a list + of files to compile; any files that don't end in ".py" are silently + skipped. 'optimize' must be one of the following: + 0 - don't optimize + 1 - normal optimization (like "python -O") + 2 - extra optimization (like "python -OO") + If 'force' is true, all files are recompiled regardless of + timestamps. + + The source filename encoded in each bytecode file defaults to the + filenames listed in 'py_files'; you can modify these with 'prefix' and + 'basedir'. 'prefix' is a string that will be stripped off of each + source filename, and 'base_dir' is a directory name that will be + prepended (after 'prefix' is stripped). You can supply either or both + (or neither) of 'prefix' and 'base_dir', as you wish. + + If 'dry_run' is true, doesn't actually do anything that would + affect the filesystem. + + Byte-compilation is either done directly in this interpreter process + with the standard py_compile module, or indirectly by writing a + temporary script and executing it. Normally, you should let + 'byte_compile()' figure out to use direct compilation or not (see + the source for details). The 'direct' flag is used by the script + generated in indirect mode; unless you know what you're doing, leave + it set to None. + """ +def rfc822_escape(header: str) -> str: + """Return a version of the string escaped for inclusion in an + RFC-822 header, by ensuring there are 8 spaces space after each newline. + """ def run_2to3( files: Iterable[str], fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Unused = None, -) -> None: ... +) -> None: + """Invoke 2to3 on a list of Python files. + The files should all come from the build area, as the + modification is done in-place. To reduce the build time, + only files modified since the last invocation of this + function should be passed in the files argument. +""" def copydir_run_2to3( src: StrPath, dest: StrPath, @@ -44,9 +156,19 @@ def copydir_run_2to3( fixer_names: Iterable[str] | None = None, options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None, -) -> list[str]: ... +) -> list[str]: + """Recursively copy a directory, only copying new and changed files, + running run_2to3 over all newly copied Python modules afterward. + + If you give a template string, it's parsed like a MANIFEST.in. + """ class Mixin2to3: + """Mixin class for commands that run 2to3. + To configure 2to3, setup scripts may either change + the class variables, or inherit from individual commands + to override how 2to3 is invoked. +""" fixer_names: Iterable[str] | None options: Mapping[str, Any] | None explicit: Container[str] | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi index 47da65ef87aab..f7c70bddb0a12 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi @@ -1,8 +1,31 @@ +"""Provides classes to represent module version numbers (one class for +each style of version numbering). There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: + * the 'parse' method takes a string and parses it to some internal + representation; if the string is an invalid version number, + 'parse' raises a ValueError exception + * the class constructor takes an optional string argument which, + if supplied, is passed to 'parse' + * __str__ reconstructs the string that was passed to 'parse' (or + an equivalent string -- ie. one that will generate an equivalent + version number instance) + * __repr__ generates Python code to recreate the version number instance + * _cmp compares the current instance with either another instance + of the same class or a string (which will be parsed to an instance + of the same class, thus must follow the same rules) +""" from abc import abstractmethod from re import Pattern from typing_extensions import Self class Version: + """Abstract base class for version numbering classes. Just provides + constructor (__init__) and reproducer (__repr__), because those + seem to be the same for all version numbering classes; and route + rich comparisons to _cmp. + """ def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self | str) -> bool: ... def __le__(self, other: Self | str) -> bool: ... @@ -18,6 +41,40 @@ class Version: def _cmp(self, other: Self | str) -> bool: ... class StrictVersion(Version): + """Version numbering for anal retentives and software idealists. + Implements the standard interface for version number classes as + described above. A version number consists of two or three + dot-separated numeric components, with an optional "pre-release" tag + on the end. The pre-release tag consists of the letter 'a' or 'b' + followed by a number. If the numeric components of two version + numbers are equal, then one with a pre-release tag will always + be deemed earlier (lesser) than one without. + + The following are valid version numbers (shown in the order that + would be obtained by sorting according to the supplied cmp function): + + 0.4 0.4.0 (these two are equivalent) + 0.4.1 + 0.5a1 + 0.5b3 + 0.5 + 0.9.6 + 1.0 + 1.0.4a3 + 1.0.4b1 + 1.0.4 + + The following are examples of invalid version numbers: + + 1 + 2.7.2.2 + 1.3.a4 + 1.3pl1 + 1.3c4 + + The rationale for this version numbering system will be explained + in the distutils documentation. + """ version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None @@ -27,6 +84,36 @@ class StrictVersion(Version): def _cmp(self, other: Self | str) -> bool: ... class LooseVersion(Version): + """Version numbering for anarchists and software realists. + Implements the standard interface for version number classes as + described above. A version number consists of a series of numbers, + separated by either periods or strings of letters. When comparing + version numbers, the numeric components will be compared + numerically, and the alphabetic components lexically. The following + are all valid version numbers, in no particular order: + + 1.5.1 + 1.5.2b2 + 161 + 3.10a + 8.02 + 3.4j + 1996.07.12 + 3.2.pl0 + 3.1.1.6 + 2g6 + 11g + 0.960923 + 2.2beta29 + 1.13++ + 5.5.kw + 2.0b1pl0 + + In fact, there is no such thing as an invalid version number under + this scheme; the rules for comparison are simple and predictable, + but may not always give the results you want (for some definition + of "want"). + """ component_re: Pattern[str] vstring: str version: tuple[str | int, ...] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi index 1bb96e1a77868..3bc81a758b34a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi @@ -1,3 +1,41 @@ +"""Module doctest -- a framework for running examples in docstrings. + +In simplest use, end each module M to be tested with: + +def _test(): + import doctest + doctest.testmod() + +if __name__ == "__main__": + _test() + +Then running the module as a script will cause the examples in the +docstrings to get executed and verified: + +python M.py + +This won't display anything unless an example fails, in which case the +failing example(s) and the cause(s) of the failure(s) are printed to stdout +(why not stderr? because stderr is a lame hack <0.2 wink>), and the final +line of output is "Test failed.". + +Run it with the -v switch instead: + +python M.py -v + +and a detailed report of all examples tried is printed to stdout, along +with assorted summaries at the end. + +You can force verbose mode by passing "verbose=True" to testmod, or prohibit +it by passing "verbose=False". In either of those cases, sys.argv is not +examined by testmod. + +There are a variety of other ways to run doctests, including integration +with the unittest framework, and support for running non-Python text +files containing doctests. There are also many ways to override parts +of doctest's default behaviors. See the Library Reference Manual for +details. +""" import sys import types import unittest @@ -54,6 +92,8 @@ if sys.version_info >= (3, 13): else: class TestResults(NamedTuple): + """TestResults(failed, attempted) +""" failed: int attempted: int @@ -82,6 +122,40 @@ BLANKLINE_MARKER: Final = "" ELLIPSIS_MARKER: Final = "..." class Example: + """ +A single doctest example, consisting of source code and expected +output. `Example` defines the following attributes: + + - source: A single Python statement, always ending with a newline. + The constructor adds a newline if needed. + + - want: The expected output from running the source code (either + from stdout, or a traceback in case of exception). `want` ends + with a newline unless it's empty, in which case it's an empty + string. The constructor adds a newline if needed. + + - exc_msg: The exception message generated by the example, if + the example is expected to generate an exception; or `None` if + it is not expected to generate an exception. This exception + message is compared against the return value of + `traceback.format_exception_only()`. `exc_msg` ends with a + newline unless it's `None`. The constructor adds a newline + if needed. + + - lineno: The line number within the DocTest string containing + this Example where the Example begins. This line number is + zero-based, with respect to the beginning of the DocTest. + + - indent: The example's indentation in the DocTest string. + I.e., the number of space characters that precede the + example's first prompt. + + - options: A dictionary mapping from option flags to True or + False, which is used to override default options for this + example. Any option flags not contained in this dictionary + are left at their default value (as specified by the + DocTestRunner's optionflags). By default, no options are set. +""" source: str want: str exc_msg: str | None @@ -101,6 +175,29 @@ class Example: def __eq__(self, other: object) -> bool: ... class DocTest: + """ +A collection of doctest examples that should be run in a single +namespace. Each `DocTest` defines the following attributes: + + - examples: the list of examples. + + - globs: The namespace (aka globals) that the examples should + be run in. + + - name: A name identifying the DocTest (typically, the name of + the object whose docstring this DocTest was extracted from). + + - filename: The name of the file that this DocTest was extracted + from, or `None` if the filename is unknown. + + - lineno: The line number within filename where this DocTest + begins, or `None` if the line number is unavailable. This + line number is zero-based, with respect to the beginning of + the file. + + - docstring: The string that the examples were extracted from, + or `None` if the string is unavailable. +""" examples: list[Example] globs: dict[str, Any] name: str @@ -115,20 +212,74 @@ class DocTest: filename: str | None, lineno: int | None, docstring: str | None, - ) -> None: ... + ) -> None: + """ +Create a new DocTest containing the given examples. The +DocTest's globals are initialized with a copy of `globs`. +""" def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... def __eq__(self, other: object) -> bool: ... class DocTestParser: - def parse(self, string: str, name: str = "") -> list[str | Example]: ... - def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... - def get_examples(self, string: str, name: str = "") -> list[Example]: ... + """ +A class used to parse strings containing doctest examples. +""" + def parse(self, string: str, name: str = "") -> list[str | Example]: + """ +Divide the given string into examples and intervening text, +and return them as a list of alternating Examples and strings. +Line numbers for the Examples are 0-based. The optional +argument `name` is a name identifying this string, and is only +used for error messages. +""" + def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: + """ +Extract all doctest examples from the given string, and +collect them into a `DocTest` object. + +`globs`, `name`, `filename`, and `lineno` are attributes for +the new `DocTest` object. See the documentation for `DocTest` +for more information. +""" + def get_examples(self, string: str, name: str = "") -> list[Example]: + """ +Extract all doctest examples from the given string, and return +them as a list of `Example` objects. Line numbers are +0-based, because it's most common in doctests that nothing +interesting appears on the same line as opening triple-quote, +and so the first interesting line is called "line 1" then. + +The optional argument `name` is a name identifying this +string, and is only used for error messages. +""" class DocTestFinder: + """ +A class used to extract the DocTests that are relevant to a given +object, from its docstring and the docstrings of its contained +objects. Doctests can currently be extracted from the following +object types: modules, functions, classes, methods, staticmethods, +classmethods, and properties. +""" def __init__( self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True - ) -> None: ... + ) -> None: + """ +Create a new doctest finder. + +The optional argument `parser` specifies a class or +function that should be used to create new DocTest objects (or +objects that implement the same interface as DocTest). The +signature for this factory function should match the signature +of the DocTest constructor. + +If the optional argument `recurse` is false, then `find` will +only examine the given object, and not any contained objects. + +If the optional argument `exclude_empty` is false, then `find` +will include tests for objects with empty docstrings. +""" def find( self, obj: object, @@ -136,11 +287,106 @@ class DocTestFinder: module: None | bool | types.ModuleType = None, globs: dict[str, Any] | None = None, extraglobs: dict[str, Any] | None = None, - ) -> list[DocTest]: ... + ) -> list[DocTest]: + """ +Return a list of the DocTests that are defined by the given +object's docstring, or by any of its contained objects' +docstrings. + +The optional parameter `module` is the module that contains +the given object. If the module is not specified or is None, then +the test finder will attempt to automatically determine the +correct module. The object's module is used: + + - As a default namespace, if `globs` is not specified. + - To prevent the DocTestFinder from extracting DocTests + from objects that are imported from other modules. + - To find the name of the file containing the object. + - To help find the line number of the object within its + file. + +Contained objects whose module does not match `module` are ignored. + +If `module` is False, no attempt to find the module will be made. +This is obscure, of use mostly in tests: if `module` is False, or +is None but cannot be found automatically, then all objects are +considered to belong to the (non-existent) module, so all contained +objects will (recursively) be searched for doctests. + +The globals for each DocTest is formed by combining `globs` +and `extraglobs` (bindings in `extraglobs` override bindings +in `globs`). A new copy of the globals dictionary is created +for each DocTest. If `globs` is not specified, then it +defaults to the module's `__dict__`, if specified, or {} +otherwise. If `extraglobs` is not specified, then it defaults +to {}. + +""" _Out: TypeAlias = Callable[[str], object] class DocTestRunner: + """ +A class used to run DocTest test cases, and accumulate statistics. +The `run` method is used to process a single DocTest case. It +returns a TestResults instance. + + >>> save_colorize = _colorize.COLORIZE + >>> _colorize.COLORIZE = False + + >>> tests = DocTestFinder().find(_TestClass) + >>> runner = DocTestRunner(verbose=False) + >>> tests.sort(key = lambda test: test.name) + >>> for test in tests: + ... print(test.name, '->', runner.run(test)) + _TestClass -> TestResults(failed=0, attempted=2) + _TestClass.__init__ -> TestResults(failed=0, attempted=2) + _TestClass.get -> TestResults(failed=0, attempted=2) + _TestClass.square -> TestResults(failed=0, attempted=1) + +The `summarize` method prints a summary of all the test cases that +have been run by the runner, and returns an aggregated TestResults +instance: + + >>> runner.summarize(verbose=1) + 4 items passed all tests: + 2 tests in _TestClass + 2 tests in _TestClass.__init__ + 2 tests in _TestClass.get + 1 test in _TestClass.square + 7 tests in 4 items. + 7 passed. + Test passed. + TestResults(failed=0, attempted=7) + +The aggregated number of tried examples and failed examples is also +available via the `tries`, `failures` and `skips` attributes: + + >>> runner.tries + 7 + >>> runner.failures + 0 + >>> runner.skips + 0 + +The comparison between expected outputs and actual outputs is done +by an `OutputChecker`. This comparison may be customized with a +number of option flags; see the documentation for `testmod` for +more information. If the option flags are insufficient, then the +comparison may also be customized by passing a subclass of +`OutputChecker` to the constructor. + +The test runner's display output can be controlled in two ways. +First, an output function (`out`) can be passed to +`TestRunner.run`; this function will be called with strings that +should be displayed. It defaults to `sys.stdout.write`. If +capturing the output is not sufficient, then the display output +can be also customized by subclassing DocTestRunner, and +overriding the methods `report_start`, `report_success`, +`report_unexpected_exception`, and `report_failure`. + + >>> _colorize.COLORIZE = save_colorize +""" DIVIDER: str optionflags: int original_optionflags: int @@ -149,34 +395,222 @@ class DocTestRunner: if sys.version_info >= (3, 13): skips: int test: DocTest - def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... - def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... - def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... - def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... - def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: + """ +Create a new test runner. + +Optional keyword arg `checker` is the `OutputChecker` that +should be used to compare the expected outputs and actual +outputs of doctest examples. + +Optional keyword arg 'verbose' prints lots of stuff if true, +only failures if false; by default, it's true iff '-v' is in +sys.argv. + +Optional argument `optionflags` can be used to control how the +test runner compares expected output to actual output, and how +it displays failures. See the documentation for `testmod` for +more information. +""" + def report_start(self, out: _Out, test: DocTest, example: Example) -> None: + """ +Report that the test runner is about to process the given +example. (Only displays a message if verbose=True) +""" + def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: + """ +Report that the given example ran successfully. (Only +displays a message if verbose=True) +""" + def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: + """ +Report that the given example failed. +""" + def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: + """ +Report that the given example raised an unexpected exception. +""" def run( self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True - ) -> TestResults: ... - def summarize(self, verbose: bool | None = None) -> TestResults: ... + ) -> TestResults: + """ +Run the examples in `test`, and display the results using the +writer function `out`. + +The examples are run in the namespace `test.globs`. If +`clear_globs` is true (the default), then this namespace will +be cleared after the test runs, to help with garbage +collection. If you would like to examine the namespace after +the test completes, then use `clear_globs=False`. + +`compileflags` gives the set of flags that should be used by +the Python compiler when running the examples. If not +specified, then it will default to the set of future-import +flags that apply to `globs`. + +The output of each example is checked using +`DocTestRunner.check_output`, and the results are formatted by +the `DocTestRunner.report_*` methods. +""" + def summarize(self, verbose: bool | None = None) -> TestResults: + """ +Print a summary of all the test cases that have been run by +this DocTestRunner, and return a TestResults instance. + +The optional `verbose` argument controls how detailed the +summary is. If the verbosity is not specified, then the +DocTestRunner's verbosity is used. +""" def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: - def check_output(self, want: str, got: str, optionflags: int) -> bool: ... - def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... + """ +A class used to check whether the actual output from a doctest +example matches the expected output. `OutputChecker` defines two +methods: `check_output`, which compares a given pair of outputs, +and returns true if they match; and `output_difference`, which +returns a string describing the differences between two outputs. +""" + def check_output(self, want: str, got: str, optionflags: int) -> bool: + """ +Return True iff the actual output from an example (`got`) +matches the expected output (`want`). These strings are +always considered to match if they are identical; but +depending on what option flags the test runner is using, +several non-exact match types are also possible. See the +documentation for `TestRunner` for more information about +option flags. +""" + def output_difference(self, example: Example, got: str, optionflags: int) -> str: + """ +Return a string describing the differences between the +expected output for a given example (`example`) and the actual +output (`got`). `optionflags` is the set of option flags used +to compare `want` and `got`. +""" class DocTestFailure(Exception): + """A DocTest example has failed in debugging mode. + +The exception instance has variables: + +- test: the DocTest object being run + +- example: the Example object that failed + +- got: the actual output +""" test: DocTest example: Example got: str def __init__(self, test: DocTest, example: Example, got: str) -> None: ... class UnexpectedException(Exception): + """A DocTest example has encountered an unexpected exception + +The exception instance has variables: + +- test: the DocTest object being run + +- example: the Example object that failed + +- exc_info: the exception info +""" test: DocTest example: Example exc_info: ExcInfo def __init__(self, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... -class DebugRunner(DocTestRunner): ... +class DebugRunner(DocTestRunner): + """Run doc tests but raise an exception as soon as there is a failure. + +If an unexpected exception occurs, an UnexpectedException is raised. +It contains the test, the example, and the original exception: + + >>> runner = DebugRunner(verbose=False) + >>> test = DocTestParser().get_doctest('>>> raise KeyError\\n42', + ... {}, 'foo', 'foo.py', 0) + >>> try: + ... runner.run(test) + ... except UnexpectedException as f: + ... failure = f + + >>> failure.test is test + True + + >>> failure.example.want + '42\\n' + + >>> exc_info = failure.exc_info + >>> raise exc_info[1] # Already has the traceback + Traceback (most recent call last): + ... + KeyError + +We wrap the original exception to give the calling application +access to the test and example information. + +If the output doesn't match, then a DocTestFailure is raised: + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 1 + ... >>> x + ... 2 + ... ''', {}, 'foo', 'foo.py', 0) + + >>> try: + ... runner.run(test) + ... except DocTestFailure as f: + ... failure = f + +DocTestFailure objects provide access to the test: + + >>> failure.test is test + True + +As well as to the example: + + >>> failure.example.want + '2\\n' + +and the actual output: + + >>> failure.got + '1\\n' + +If a failure or error occurs, the globals are left intact: + + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 1} + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... >>> raise KeyError + ... ''', {}, 'foo', 'foo.py', 0) + + >>> runner.run(test) + Traceback (most recent call last): + ... + doctest.UnexpectedException: + + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 2} + +But the globals are cleared if there is no error: + + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... ''', {}, 'foo', 'foo.py', 0) + + >>> runner.run(test) + TestResults(failed=0, attempted=1) + + >>> test.globs + {} + +""" master: DocTestRunner | None @@ -190,7 +624,70 @@ def testmod( extraglobs: dict[str, Any] | None = None, raise_on_error: bool = False, exclude_empty: bool = False, -) -> TestResults: ... +) -> TestResults: + """m=None, name=None, globs=None, verbose=None, report=True, + optionflags=0, extraglobs=None, raise_on_error=False, + exclude_empty=False + +Test examples in docstrings in functions and classes reachable +from module m (or the current module if m is not supplied), starting +with m.__doc__. + +Also test examples reachable from dict m.__test__ if it exists. +m.__test__ maps names to functions, classes and strings; +function and class docstrings are tested even if the name is private; +strings are tested directly, as if they were docstrings. + +Return (#failures, #tests). + +See help(doctest) for an overview. + +Optional keyword arg "name" gives the name of the module; by default +use m.__name__. + +Optional keyword arg "globs" gives a dict to be used as the globals +when executing examples; by default, use m.__dict__. A copy of this +dict is actually used for each docstring, so that each docstring's +examples start with a clean slate. + +Optional keyword arg "extraglobs" gives a dictionary that should be +merged into the globals that are used to execute examples. By +default, no extra globals are used. This is new in 2.4. + +Optional keyword arg "verbose" prints lots of stuff if true, prints +only failures if false; by default, it's true iff "-v" is in sys.argv. + +Optional keyword arg "report" prints a summary at the end when true, +else prints nothing at the end. In verbose mode, the summary is +detailed, else very brief (in fact, empty if all tests passed). + +Optional keyword arg "optionflags" or's together module constants, +and defaults to 0. This is new in 2.3. Possible values (see the +docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + +Optional keyword arg "raise_on_error" raises an exception on the +first unexpected exception or failure. This allows failures to be +post-mortem debugged. + +Advanced tomfoolery: testmod runs methods of a local instance of +class doctest.Tester, then merges the results into (or creates) +global Tester instance doctest.master. Methods of doctest.master +can be called directly too, if you want to do something unusual. +Passing report=0 to testmod is especially useful then, to delay +displaying a summary. Invoke doctest.master.summarize(verbose) +when you're done fiddling. +""" def testfile( filename: str, module_relative: bool = True, @@ -204,7 +701,83 @@ def testfile( raise_on_error: bool = False, parser: DocTestParser = ..., encoding: str | None = None, -) -> TestResults: ... +) -> TestResults: + """ +Test examples in the given file. Return (#failures, #tests). + +Optional keyword arg "module_relative" specifies how filenames +should be interpreted: + + - If "module_relative" is True (the default), then "filename" + specifies a module-relative path. By default, this path is + relative to the calling module's directory; but if the + "package" argument is specified, then it is relative to that + package. To ensure os-independence, "filename" should use + "/" characters to separate path segments, and should not + be an absolute path (i.e., it may not begin with "/"). + + - If "module_relative" is False, then "filename" specifies an + os-specific path. The path may be absolute or relative (to + the current working directory). + +Optional keyword arg "name" gives the name of the test; by default +use the file's basename. + +Optional keyword argument "package" is a Python package or the +name of a Python package whose directory should be used as the +base directory for a module relative filename. If no package is +specified, then the calling module's directory is used as the base +directory for module relative filenames. It is an error to +specify "package" if "module_relative" is False. + +Optional keyword arg "globs" gives a dict to be used as the globals +when executing examples; by default, use {}. A copy of this dict +is actually used for each docstring, so that each docstring's +examples start with a clean slate. + +Optional keyword arg "extraglobs" gives a dictionary that should be +merged into the globals that are used to execute examples. By +default, no extra globals are used. + +Optional keyword arg "verbose" prints lots of stuff if true, prints +only failures if false; by default, it's true iff "-v" is in sys.argv. + +Optional keyword arg "report" prints a summary at the end when true, +else prints nothing at the end. In verbose mode, the summary is +detailed, else very brief (in fact, empty if all tests passed). + +Optional keyword arg "optionflags" or's together module constants, +and defaults to 0. Possible values (see the docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + +Optional keyword arg "raise_on_error" raises an exception on the +first unexpected exception or failure. This allows failures to be +post-mortem debugged. + +Optional keyword arg "parser" specifies a DocTestParser (or +subclass) that should be used to extract tests from the files. + +Optional keyword arg "encoding" specifies an encoding that should +be used to convert the file to unicode. + +Advanced tomfoolery: testmod runs methods of a local instance of +class doctest.Tester, then merges the results into (or creates) +global Tester instance doctest.master. Methods of doctest.master +can be called directly too, if you want to do something unusual. +Passing report=0 to testmod is especially useful then, to delay +displaying a summary. Invoke doctest.master.summarize(verbose) +when you're done fiddling. +""" def run_docstring_examples( f: object, globs: dict[str, Any], @@ -212,8 +785,49 @@ def run_docstring_examples( name: str = "NoName", compileflags: int | None = None, optionflags: int = 0, -) -> None: ... -def set_unittest_reportflags(flags: int) -> int: ... +) -> None: + """ +Test examples in the given object's docstring (`f`), using `globs` +as globals. Optional argument `name` is used in failure messages. +If the optional argument `verbose` is true, then generate output +even if there are no failures. + +`compileflags` gives the set of flags that should be used by the +Python compiler when running the examples. If not specified, then +it will default to the set of future-import flags that apply to +`globs`. + +Optional keyword arg `optionflags` specifies options for the +testing and output. See the documentation for `testmod` for more +information. +""" +def set_unittest_reportflags(flags: int) -> int: + """Sets the unittest option flags. + +The old flag is returned so that a runner could restore the old +value if it wished to: + + >>> import doctest + >>> old = doctest._unittest_reportflags + >>> doctest.set_unittest_reportflags(REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) == old + True + + >>> doctest._unittest_reportflags == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True + +Only reporting flags can be set: + + >>> doctest.set_unittest_reportflags(ELLIPSIS) + Traceback (most recent call last): + ... + ValueError: ('Only reporting flags allowed', 8) + + >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True +""" class DocTestCase(unittest.TestCase): def __init__( @@ -241,7 +855,41 @@ def DocTestSuite( extraglobs: dict[str, Any] | None = None, test_finder: DocTestFinder | None = None, **options: Any, -) -> _DocTestSuite: ... +) -> _DocTestSuite: + """ +Convert doctest tests for a module to a unittest test suite. + +This converts each documentation string in a module that +contains doctest tests to a unittest test case. If any of the +tests in a doc string fail, then the test case fails. An exception +is raised showing the name of the file containing the test and a +(sometimes approximate) line number. + +The `module` argument provides the module to be tested. The argument +can be either a module or a module name. + +If no argument is given, the calling module is used. + +A number of options may be provided as keyword arguments: + +setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. + +tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. + +globs + A dictionary containing initial global variables for the tests. + +optionflags + A set of doctest option flags expressed as an integer. +""" class DocFileCase(DocTestCase): ... @@ -254,9 +902,137 @@ def DocFileTest( encoding: str | None = None, **options: Any, ) -> DocFileCase: ... -def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... -def script_from_examples(s: str) -> str: ... -def testsource(module: None | str | types.ModuleType, name: str) -> str: ... -def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... -def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... -def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... +def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: + """A unittest suite for one or more doctest files. + +The path to each doctest file is given as a string; the +interpretation of that string depends on the keyword argument +"module_relative". + +A number of options may be provided as keyword arguments: + +module_relative + If "module_relative" is True, then the given file paths are + interpreted as os-independent module-relative paths. By + default, these paths are relative to the calling module's + directory; but if the "package" argument is specified, then + they are relative to that package. To ensure os-independence, + "filename" should use "/" characters to separate path + segments, and may not be an absolute path (i.e., it may not + begin with "/"). + + If "module_relative" is False, then the given file paths are + interpreted as os-specific paths. These paths may be absolute + or relative (to the current working directory). + +package + A Python package or the name of a Python package whose directory + should be used as the base directory for module relative paths. + If "package" is not specified, then the calling module's + directory is used as the base directory for module relative + filenames. It is an error to specify "package" if + "module_relative" is False. + +setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. + +tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. + +globs + A dictionary containing initial global variables for the tests. + +optionflags + A set of doctest option flags expressed as an integer. + +parser + A DocTestParser (or subclass) that should be used to extract + tests from the files. + +encoding + An encoding that will be used to convert the files to unicode. +""" +def script_from_examples(s: str) -> str: + """Extract script from text with examples. + +Converts text with examples to a Python script. Example input is +converted to regular code. Example output and all other words +are converted to comments: + +>>> text = ''' +... Here are examples of simple math. +... +... Python has super accurate integer addition +... +... >>> 2 + 2 +... 5 +... +... And very friendly error messages: +... +... >>> 1/0 +... To Infinity +... And +... Beyond +... +... You can use logic if you want: +... +... >>> if 0: +... ... blah +... ... blah +... ... +... +... Ho hum +... ''' + +>>> print(script_from_examples(text)) +# Here are examples of simple math. +# +# Python has super accurate integer addition +# +2 + 2 +# Expected: +## 5 +# +# And very friendly error messages: +# +1/0 +# Expected: +## To Infinity +## And +## Beyond +# +# You can use logic if you want: +# +if 0: + blah + blah +# +# Ho hum + +""" +def testsource(module: None | str | types.ModuleType, name: str) -> str: + """Extract the test sources from a doctest docstring as a script. + +Provide the module (or dotted name of the module) containing the +test to be debugged and the name (within the module) of the object +with the doc string with tests to be debugged. +""" +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: + """Debug a single doctest docstring, in argument `src` +""" +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: + """Debug a test script. `src` is the script, as a string. +""" +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: + """Debug a single doctest docstring. + +Provide the module (or dotted name of the module) containing the +test to be debugged and the name (within the module) of the object +with the docstring with tests to be debugged. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi index 53f8c350b01e3..5ed907c877549 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi @@ -1,3 +1,5 @@ +"""A package for parsing, handling, and generating email messages. +""" from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -33,13 +35,21 @@ _ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 _ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 @overload -def message_from_string(s: str) -> Message: ... +def message_from_string(s: str) -> Message: + """Parse a string into a Message object model. + +Optional _class and strict are passed to the Parser constructor. +""" @overload def message_from_string(s: str, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload def message_from_string(s: str, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... @overload -def message_from_bytes(s: bytes | bytearray) -> Message: ... +def message_from_bytes(s: bytes | bytearray) -> Message: + """Parse a bytes string into a Message object model. + +Optional _class and strict are passed to the Parser constructor. +""" @overload def message_from_bytes(s: bytes | bytearray, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload @@ -47,13 +57,21 @@ def message_from_bytes( s: bytes | bytearray, _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT] ) -> _MessageT: ... @overload -def message_from_file(fp: IO[str]) -> Message: ... +def message_from_file(fp: IO[str]) -> Message: + """Read a file and parse its contents into a Message object model. + +Optional _class and strict are passed to the Parser constructor. +""" @overload def message_from_file(fp: IO[str], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload def message_from_file(fp: IO[str], _class: Callable[[], _MessageT] = ..., *, policy: Policy[_MessageT]) -> _MessageT: ... @overload -def message_from_binary_file(fp: IO[bytes]) -> Message: ... +def message_from_binary_file(fp: IO[bytes]) -> Message: + """Read a binary file and parse its contents into a Message object model. + +Optional _class and strict are passed to the Parser constructor. +""" @overload def message_from_binary_file(fp: IO[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi index dededd006e5b5..b2e2b5e617135 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi @@ -1,3 +1,71 @@ +"""Header value parser implementing various email-related RFC parsing rules. + +The parsing methods defined in this module implement various email related +parsing rules. Principal among them is RFC 5322, which is the followon +to RFC 2822 and primarily a clarification of the former. It also implements +RFC 2047 encoded word decoding. + +RFC 5322 goes to considerable trouble to maintain backward compatibility with +RFC 822 in the parse phase, while cleaning up the structure on the generation +phase. This parser supports correct RFC 5322 generation by tagging white space +as folding white space only when folding is allowed in the non-obsolete rule +sets. Actually, the parser is even more generous when accepting input than RFC +5322 mandates, following the spirit of Postel's Law, which RFC 5322 encourages. +Where possible deviations from the standard are annotated on the 'defects' +attribute of tokens that deviate. + +The general structure of the parser follows RFC 5322, and uses its terminology +where there is a direct correspondence. Where the implementation requires a +somewhat different structure than that used by the formal grammar, new terms +that mimic the closest existing terms are used. Thus, it really helps to have +a copy of RFC 5322 handy when studying this code. + +Input to the parser is a string that has already been unfolded according to +RFC 5322 rules. According to the RFC this unfolding is the very first step, and +this parser leaves the unfolding step to a higher level message parser, which +will have already detected the line breaks that need unfolding while +determining the beginning and end of each header. + +The output of the parser is a TokenList object, which is a list subclass. A +TokenList is a recursive data structure. The terminal nodes of the structure +are Terminal objects, which are subclasses of str. These do not correspond +directly to terminal objects in the formal grammar, but are instead more +practical higher level combinations of true terminals. + +All TokenList and Terminal objects have a 'value' attribute, which produces the +semantically meaningful value of that part of the parse subtree. The value of +all whitespace tokens (no matter how many sub-tokens they may contain) is a +single space, as per the RFC rules. This includes 'CFWS', which is herein +included in the general class of whitespace tokens. There is one exception to +the rule that whitespace tokens are collapsed into single spaces in values: in +the value of a 'bare-quoted-string' (a quoted-string with no leading or +trailing whitespace), any whitespace that appeared between the quotation marks +is preserved in the returned value. Note that in all Terminal strings quoted +pairs are turned into their unquoted values. + +All TokenList and Terminal objects also have a string value, which attempts to +be a "canonical" representation of the RFC-compliant form of the substring that +produced the parsed subtree, including minimal use of quoted pair quoting. +Whitespace runs are not collapsed. + +Comment tokens also have a 'content' attribute providing the string found +between the parens (including any nested comments) with whitespace preserved. + +All TokenList and Terminal objects have a 'defects' attribute which is a +possibly empty list all of the defects found while creating the token. Defects +may appear on any token in the tree, and a composite list of all defects in the +subtree is available through the 'all_defects' attribute of any node. (For +Terminal notes x.defects == x.all_defects.) + +Each object in a parse tree is called a 'token', and each has a 'token_type' +attribute that gives the name from the RFC 5322 grammar that it represents. +Not all RFC 5322 nodes are produced, and there is one non-RFC 5322 node that +may be produced: 'ptext'. A 'ptext' is a string of printable ascii characters. +It is returned in place of lists of (ctext/quoted-pair) and +(qtext/quoted-pair). + +XXX: provide complete list of token types. +""" from collections.abc import Iterable, Iterator from email.errors import HeaderParseError, MessageDefect from email.policy import Policy @@ -22,7 +90,9 @@ NLSET: Final[set[str]] SPECIALSNL: Final[set[str]] # Added in Python 3.9.23, 3.10.17, 3.11.12, 3.12.9, 3.13.2 -def make_quoted_pairs(value: Any) -> str: ... +def make_quoted_pairs(value: Any) -> str: + """Escape dquote and backslash for use within a quoted-string. +""" def quote_string(value: Any) -> str: ... rfc2047_matcher: Final[Pattern[str]] @@ -39,7 +109,9 @@ class TokenList(list[TokenList | Terminal]): def all_defects(self) -> list[MessageDefect]: ... def startswith_fws(self) -> bool: ... @property - def as_ew_allowed(self) -> bool: ... + def as_ew_allowed(self) -> bool: + """True if all top level tokens of this part may be RFC2047 encoded. +""" @property def comments(self) -> list[str]: ... def fold(self, *, policy: Policy) -> str: ... @@ -340,59 +412,368 @@ class ValueTerminal(Terminal): def startswith_fws(self) -> bool: ... class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... -class _InvalidEwError(HeaderParseError): ... +class _InvalidEwError(HeaderParseError): + """Invalid encoded word found while parsing headers. +""" DOT: Final[ValueTerminal] ListSeparator: Final[ValueTerminal] RouteComponentMarker: Final[ValueTerminal] -def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... -def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: ... -def get_unstructured(value: str) -> UnstructuredTokenList: ... -def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... -def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... -def get_atext(value: str) -> tuple[ValueTerminal, str]: ... -def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: ... -def get_comment(value: str) -> tuple[Comment, str]: ... -def get_cfws(value: str) -> tuple[CFWSList, str]: ... -def get_quoted_string(value: str) -> tuple[QuotedString, str]: ... -def get_atom(value: str) -> tuple[Atom, str]: ... -def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: ... -def get_dot_atom(value: str) -> tuple[DotAtom, str]: ... -def get_word(value: str) -> tuple[Any, str]: ... -def get_phrase(value: str) -> tuple[Phrase, str]: ... -def get_local_part(value: str) -> tuple[LocalPart, str]: ... -def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: ... -def get_dtext(value: str) -> tuple[ValueTerminal, str]: ... -def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: ... -def get_domain(value: str) -> tuple[Domain, str]: ... -def get_addr_spec(value: str) -> tuple[AddrSpec, str]: ... -def get_obs_route(value: str) -> tuple[ObsRoute, str]: ... -def get_angle_addr(value: str) -> tuple[AngleAddr, str]: ... -def get_display_name(value: str) -> tuple[DisplayName, str]: ... -def get_name_addr(value: str) -> tuple[NameAddr, str]: ... -def get_mailbox(value: str) -> tuple[Mailbox, str]: ... -def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: ... -def get_mailbox_list(value: str) -> tuple[MailboxList, str]: ... -def get_group_list(value: str) -> tuple[GroupList, str]: ... -def get_group(value: str) -> tuple[Group, str]: ... -def get_address(value: str) -> tuple[Address, str]: ... -def get_address_list(value: str) -> tuple[AddressList, str]: ... -def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: ... -def get_msg_id(value: str) -> tuple[MsgID, str]: ... -def parse_message_id(value: str) -> MessageID: ... -def parse_mime_version(value: str) -> MIMEVersion: ... -def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: ... -def get_ttext(value: str) -> tuple[ValueTerminal, str]: ... -def get_token(value: str) -> tuple[Token, str]: ... -def get_attrtext(value: str) -> tuple[ValueTerminal, str]: ... -def get_attribute(value: str) -> tuple[Attribute, str]: ... -def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: ... -def get_extended_attribute(value: str) -> tuple[Attribute, str]: ... -def get_section(value: str) -> tuple[Section, str]: ... -def get_value(value: str) -> tuple[Value, str]: ... -def get_parameter(value: str) -> tuple[Parameter, str]: ... -def parse_mime_parameters(value: str) -> MimeParameters: ... -def parse_content_type_header(value: str) -> ContentType: ... -def parse_content_disposition_header(value: str) -> ContentDisposition: ... -def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: ... +def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: + """FWS = 1*WSP + +This isn't the RFC definition. We're using fws to represent tokens where +folding can be done, but when we are parsing the *un*folding has already +been done so we don't need to watch out for CRLF. + +""" +def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: + """encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" + + """ +def get_unstructured(value: str) -> UnstructuredTokenList: + """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + + obs-NO-WS-CTL is control characters except WSP/CR/LF. + +So, basically, we have printable runs, plus control characters or nulls in +the obsolete syntax, separated by whitespace. Since RFC 2047 uses the +obsolete syntax in its specification, but requires whitespace on either +side of the encoded words, I can see no reason to need to separate the +non-printable-non-whitespace from the printable runs if they occur, so we +parse this into xtext tokens separated by WSP tokens. + +Because an 'unstructured' value must by definition constitute the entire +value, this 'get' routine does not return a remaining value, only the +parsed TokenList. + +""" +def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: + """ctext = + +This is not the RFC ctext, since we are handling nested comments in comment +and unquoting quoted-pairs here. We allow anything except the '()' +characters, but if we find any ASCII other than the RFC defined printable +ASCII, a NonPrintableDefect is added to the token's defects list. Since +quoted pairs are converted to their unquoted values, what is returned is +a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value +is ' '. + +""" +def get_qcontent(value: str) -> tuple[ValueTerminal, str]: + """qcontent = qtext / quoted-pair + +We allow anything except the DQUOTE character, but if we find any ASCII +other than the RFC defined printable ASCII, a NonPrintableDefect is +added to the token's defects list. Any quoted pairs are converted to their +unquoted values, so what is returned is a 'ptext' token. In this case it +is a ValueTerminal. + +""" +def get_atext(value: str) -> tuple[ValueTerminal, str]: + """atext = + +We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to +the token's defects list if we find non-atext characters. +""" +def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: + """bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE + +A quoted-string without the leading or trailing white space. Its +value is the text between the quote marks, with whitespace +preserved and quoted pairs decoded. +""" +def get_comment(value: str) -> tuple[Comment, str]: + """comment = "(" *([FWS] ccontent) [FWS] ")" + ccontent = ctext / quoted-pair / comment + +We handle nested comments here, and quoted-pair in our qp-ctext routine. +""" +def get_cfws(value: str) -> tuple[CFWSList, str]: + """CFWS = (1*([FWS] comment) [FWS]) / FWS + + """ +def get_quoted_string(value: str) -> tuple[QuotedString, str]: + """quoted-string = [CFWS] [CFWS] + +'bare-quoted-string' is an intermediate class defined by this +parser and not by the RFC grammar. It is the quoted string +without any attached CFWS. +""" +def get_atom(value: str) -> tuple[Atom, str]: + """atom = [CFWS] 1*atext [CFWS] + +An atom could be an rfc2047 encoded word. +""" +def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: + """dot-text = 1*atext *("." 1*atext) + + """ +def get_dot_atom(value: str) -> tuple[DotAtom, str]: + """dot-atom = [CFWS] dot-atom-text [CFWS] + +Any place we can have a dot atom, we could instead have an rfc2047 encoded +word. +""" +def get_word(value: str) -> tuple[Any, str]: + """word = atom / quoted-string + +Either atom or quoted-string may start with CFWS. We have to peel off this +CFWS first to determine which type of word to parse. Afterward we splice +the leading CFWS, if any, into the parsed sub-token. + +If neither an atom or a quoted-string is found before the next special, a +HeaderParseError is raised. + +The token returned is either an Atom or a QuotedString, as appropriate. +This means the 'word' level of the formal grammar is not represented in the +parse tree; this is because having that extra layer when manipulating the +parse tree is more confusing than it is helpful. + +""" +def get_phrase(value: str) -> tuple[Phrase, str]: + """phrase = 1*word / obs-phrase + obs-phrase = word *(word / "." / CFWS) + +This means a phrase can be a sequence of words, periods, and CFWS in any +order as long as it starts with at least one word. If anything other than +words is detected, an ObsoleteHeaderDefect is added to the token's defect +list. We also accept a phrase that starts with CFWS followed by a dot; +this is registered as an InvalidHeaderDefect, since it is not supported by +even the obsolete grammar. + +""" +def get_local_part(value: str) -> tuple[LocalPart, str]: + """local-part = dot-atom / quoted-string / obs-local-part + + """ +def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: + """obs-local-part = word *("." word) + """ +def get_dtext(value: str) -> tuple[ValueTerminal, str]: + """dtext = / obs-dtext + obs-dtext = obs-NO-WS-CTL / quoted-pair + +We allow anything except the excluded characters, but if we find any +ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is +added to the token's defects list. Quoted pairs are converted to their +unquoted values, so what is returned is a ptext token, in this case a +ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is +added to the returned token's defect list. + +""" +def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: + """domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS] + + """ +def get_domain(value: str) -> tuple[Domain, str]: + """domain = dot-atom / domain-literal / obs-domain +obs-domain = atom *("." atom)) + +""" +def get_addr_spec(value: str) -> tuple[AddrSpec, str]: + """addr-spec = local-part "@" domain + + """ +def get_obs_route(value: str) -> tuple[ObsRoute, str]: + """obs-route = obs-domain-list ":" +obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) + +Returns an obs-route token with the appropriate sub-tokens (that is, +there is no obs-domain-list in the parse tree). +""" +def get_angle_addr(value: str) -> tuple[AngleAddr, str]: + """angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr +obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS] + +""" +def get_display_name(value: str) -> tuple[DisplayName, str]: + """display-name = phrase + +Because this is simply a name-rule, we don't return a display-name +token containing a phrase, but rather a display-name token with +the content of the phrase. + +""" +def get_name_addr(value: str) -> tuple[NameAddr, str]: + """name-addr = [display-name] angle-addr + + """ +def get_mailbox(value: str) -> tuple[Mailbox, str]: + """mailbox = name-addr / addr-spec + + """ +def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: + """Read everything up to one of the chars in endchars. + +This is outside the formal grammar. The InvalidMailbox TokenList that is +returned acts like a Mailbox, but the data attributes are None. + +""" +def get_mailbox_list(value: str) -> tuple[MailboxList, str]: + """mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list + obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS]) + +For this routine we go outside the formal grammar in order to improve error +handling. We recognize the end of the mailbox list only at the end of the +value or at a ';' (the group terminator). This is so that we can turn +invalid mailboxes into InvalidMailbox tokens and continue parsing any +remaining valid mailboxes. We also allow all mailbox entries to be null, +and this condition is handled appropriately at a higher level. + +""" +def get_group_list(value: str) -> tuple[GroupList, str]: + """group-list = mailbox-list / CFWS / obs-group-list +obs-group-list = 1*([CFWS] ",") [CFWS] + +""" +def get_group(value: str) -> tuple[Group, str]: + """group = display-name ":" [group-list] ";" [CFWS] + + """ +def get_address(value: str) -> tuple[Address, str]: + """address = mailbox / group + +Note that counter-intuitively, an address can be either a single address or +a list of addresses (a group). This is why the returned Address object has +a 'mailboxes' attribute which treats a single address as a list of length +one. When you need to differentiate between to two cases, extract the single +element, which is either a mailbox or a group token. + +""" +def get_address_list(value: str) -> tuple[AddressList, str]: + """address_list = (address *("," address)) / obs-addr-list + obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) + +We depart from the formal grammar here by continuing to parse until the end +of the input, assuming the input to be entirely composed of an +address-list. This is always true in email parsing, and allows us +to skip invalid addresses to parse additional valid ones. + +""" +def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: + """no-fold-literal = "[" *dtext "]" + """ +def get_msg_id(value: str) -> tuple[MsgID, str]: + """msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS] +id-left = dot-atom-text / obs-id-left +id-right = dot-atom-text / no-fold-literal / obs-id-right +no-fold-literal = "[" *dtext "]" +""" +def parse_message_id(value: str) -> MessageID: + """message-id = "Message-ID:" msg-id CRLF + """ +def parse_mime_version(value: str) -> MIMEVersion: + """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS] + + """ +def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: + """Read everything up to the next ';'. + +This is outside the formal grammar. The InvalidParameter TokenList that is +returned acts like a Parameter, but the data attributes are None. + +""" +def get_ttext(value: str) -> tuple[ValueTerminal, str]: + """ttext = + +We allow any non-TOKEN_ENDS in ttext, but add defects to the token's +defects list if we find non-ttext characters. We also register defects for +*any* non-printables even though the RFC doesn't exclude all of them, +because we follow the spirit of RFC 5322. + +""" +def get_token(value: str) -> tuple[Token, str]: + """token = [CFWS] 1*ttext [CFWS] + +The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or +tspecials. We also exclude tabs even though the RFC doesn't. + +The RFC implies the CFWS but is not explicit about it in the BNF. + +""" +def get_attrtext(value: str) -> tuple[ValueTerminal, str]: + """attrtext = 1*(any non-ATTRIBUTE_ENDS character) + +We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the +token's defects list if we find non-attrtext characters. We also register +defects for *any* non-printables even though the RFC doesn't exclude all of +them, because we follow the spirit of RFC 5322. + +""" +def get_attribute(value: str) -> tuple[Attribute, str]: + """[CFWS] 1*attrtext [CFWS] + +This version of the BNF makes the CFWS explicit, and as usual we use a +value terminal for the actual run of characters. The RFC equivalent of +attrtext is the token characters, with the subtraction of '*', "'", and '%'. +We include tab in the excluded set just as we do for token. + +""" +def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: + """attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%') + +This is a special parsing routine so that we get a value that +includes % escapes as a single string (which we decode as a single +string later). + +""" +def get_extended_attribute(value: str) -> tuple[Attribute, str]: + """[CFWS] 1*extended_attrtext [CFWS] + +This is like the non-extended version except we allow % characters, so that +we can pick up an encoded value as a single string. + +""" +def get_section(value: str) -> tuple[Section, str]: + """'*' digits + +The formal BNF is more complicated because leading 0s are not allowed. We +check for that and add a defect. We also assume no CFWS is allowed between +the '*' and the digits, though the RFC is not crystal clear on that. +The caller should already have dealt with leading CFWS. + +""" +def get_value(value: str) -> tuple[Value, str]: + """quoted-string / attribute + + """ +def get_parameter(value: str) -> tuple[Parameter, str]: + """attribute [section] ["*"] [CFWS] "=" value + +The CFWS is implied by the RFC but not made explicit in the BNF. This +simplified form of the BNF from the RFC is made to conform with the RFC BNF +through some extra checks. We do it this way because it makes both error +recovery and working with the resulting parse tree easier. +""" +def parse_mime_parameters(value: str) -> MimeParameters: + """parameter *( ";" parameter ) + +That BNF is meant to indicate this routine should only be called after +finding and handling the leading ';'. There is no corresponding rule in +the formal RFC grammar, but it is more convenient for us for the set of +parameters to be treated as its own TokenList. + +This is 'parse' routine because it consumes the remaining value, but it +would never be called to parse a full header. Instead it is called to +parse everything after the non-parameter value of a specific MIME header. + +""" +def parse_content_type_header(value: str) -> ContentType: + """maintype "/" subtype *( ";" parameter ) + +The maintype and substype are tokens. Theoretically they could +be checked against the official IANA list + x-token, but we +don't do that. +""" +def parse_content_disposition_header(value: str) -> ContentDisposition: + """disposition-type *( ";" parameter ) + + """ +def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: + """mechanism + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi index 0fb890d424b10..c7877e8a25c4d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi @@ -1,3 +1,7 @@ +"""Policy framework for the email package. + +Allows fine grained feature control of how the package parses and emits data. +""" from abc import ABCMeta, abstractmethod from email.errors import MessageDefect from email.header import Header @@ -18,6 +22,26 @@ class _MessageFactory(Protocol[_MessageT]): # assume that the __init__ arguments and attributes of _PolicyBase are # the same as those of Policy. class _PolicyBase(Generic[_MessageT_co]): + """Policy Object basic framework. + +This class is useless unless subclassed. A subclass should define +class attributes with defaults for any values that are to be +managed by the Policy object. The constructor will then allow +non-default values to be set for these attributes at instance +creation time. The instance will be callable, taking these same +attributes keyword arguments, and returning a new instance +identical to the called instance except for those values changed +by the keyword arguments. Instances may be added, yielding new +instances with any non-default values from the right hand +operand overriding those in the left hand operand. That is, + + A + B == A() + +The repr of an instance can be used to reconstruct the object +if and only if the repr of the values can be used to reconstruct +those values. + +""" max_line_length: int | None linesep: str cte_type: str @@ -38,7 +62,12 @@ class _PolicyBase(Generic[_MessageT_co]): message_factory: _MessageFactory[_MessageT_co] | None = None, # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = True, - ) -> None: ... + ) -> None: + """Create new Policy, possibly overriding some defaults. + +See class docstring for a list of overridable attributes. + +""" def clone( self, *, @@ -50,31 +79,271 @@ class _PolicyBase(Generic[_MessageT_co]): message_factory: _MessageFactory[_MessageT_co] | None = ..., # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 verify_generated_headers: bool = ..., - ) -> Self: ... - def __add__(self, other: Policy) -> Self: ... + ) -> Self: + """Return a new instance with specified attributes changed. + +The new instance has the same attribute values as the current object, +except for the changes passed in as keyword arguments. + +""" + def __add__(self, other: Policy) -> Self: + """Non-default values from right operand override those from left. + +The object returned is a new instance of the subclass. + +""" class Policy(_PolicyBase[_MessageT_co], metaclass=ABCMeta): + """Controls for how messages are interpreted and formatted. + +Most of the classes and many of the methods in the email package accept +Policy objects as parameters. A Policy object contains a set of values and +functions that control how input is interpreted and how output is rendered. +For example, the parameter 'raise_on_defect' controls whether or not an RFC +violation results in an error being raised or not, while 'max_line_length' +controls the maximum length of output lines when a Message is serialized. + +Any valid attribute may be overridden when a Policy is created by passing +it as a keyword argument to the constructor. Policy objects are immutable, +but a new Policy object can be created with only certain values changed by +calling the Policy instance with keyword arguments. Policy objects can +also be added, producing a new Policy object in which the non-default +attributes set in the right hand operand overwrite those specified in the +left operand. + +Settable attributes: + +raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + +linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + +cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + +max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + +mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + +message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + +verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. +""" # Every Message object has a `defects` attribute, so the following # methods will work for any Message object. - def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... - def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: ... - def header_max_count(self, name: str) -> int | None: ... + def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: + """Based on policy, either raise defect or call register_defect. + + handle_defect(obj, defect) + +defect should be a Defect subclass, but in any case must be an +Exception subclass. obj is the object on which the defect should be +registered if it is not raised. If the raise_on_defect is True, the +defect is raised as an error, otherwise the object and the defect are +passed to register_defect. + +This method is intended to be called by parsers that discover defects. +The email package parsers always call it with Defect instances. + +""" + def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: + """Record 'defect' on 'obj'. + +Called by handle_defect if raise_on_defect is False. This method is +part of the Policy API so that Policy subclasses can implement custom +defect handling. The default implementation calls the append method of +the defects attribute of obj. The objects used by the email package by +default that get passed to this method will always have a defects +attribute with an append method. + +""" + def header_max_count(self, name: str) -> int | None: + """Return the maximum allowed number of headers named 'name'. + +Called when a header is added to a Message object. If the returned +value is not 0 or None, and there are already a number of headers with +the name 'name' equal to the value returned, a ValueError is raised. + +Because the default behavior of Message's __setitem__ is to append the +value to the list of headers, it is easy to create duplicate headers +without realizing it. This method allows certain headers to be limited +in the number of instances of that header that may be added to a +Message programmatically. (The limit is not observed by the parser, +which will faithfully produce as many headers as exist in the message +being parsed.) + +The default implementation returns None for all header names. +""" @abstractmethod - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: + """Given a list of linesep terminated strings constituting the lines of +a single header, return the (name, value) tuple that should be stored +in the model. The input lines should retain their terminating linesep +characters. The lines passed in by the email package may contain +surrogateescaped binary data. +""" @abstractmethod - def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + """Given the header name and the value provided by the application +program, return the (name, value) that should be stored in the model. +""" @abstractmethod - def header_fetch_parse(self, name: str, value: str) -> str: ... + def header_fetch_parse(self, name: str, value: str) -> str: + """Given the header name and the value from the model, return the value +to be returned to the application program that is requesting that +header. The value passed in by the email package may contain +surrogateescaped binary data if the lines were parsed by a BytesParser. +The returned value should not contain any surrogateescaped data. + +""" @abstractmethod - def fold(self, name: str, value: str) -> str: ... + def fold(self, name: str, value: str) -> str: + """Given the header name and the value from the model, return a string +containing linesep characters that implement the folding of the header +according to the policy controls. The value passed in by the email +package may contain surrogateescaped binary data if the lines were +parsed by a BytesParser. The returned value should not contain any +surrogateescaped data. + +""" @abstractmethod - def fold_binary(self, name: str, value: str) -> bytes: ... + def fold_binary(self, name: str, value: str) -> bytes: + """Given the header name and the value from the model, return binary +data containing linesep characters that implement the folding of the +header according to the policy controls. The value passed in by the +email package may contain surrogateescaped binary data. + +""" class Compat32(Policy[_MessageT_co]): - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... - def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... - def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] - def fold(self, name: str, value: str) -> str: ... - def fold_binary(self, name: str, value: str) -> bytes: ... + """Controls for how messages are interpreted and formatted. + +Most of the classes and many of the methods in the email package accept +Policy objects as parameters. A Policy object contains a set of values and +functions that control how input is interpreted and how output is rendered. +For example, the parameter 'raise_on_defect' controls whether or not an RFC +violation results in an error being raised or not, while 'max_line_length' +controls the maximum length of output lines when a Message is serialized. + +Any valid attribute may be overridden when a Policy is created by passing +it as a keyword argument to the constructor. Policy objects are immutable, +but a new Policy object can be created with only certain values changed by +calling the Policy instance with keyword arguments. Policy objects can +also be added, producing a new Policy object in which the non-default +attributes set in the right hand operand overwrite those specified in the +left operand. + +Settable attributes: + +raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + +linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + +cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + +max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + +mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + +message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + +verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. +This particular policy is the backward compatibility Policy. It +replicates the behavior of the email package version 5.1. +""" + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: + """Given a list of linesep terminated strings constituting the lines of +a single header, return the (name, value) tuple that should be stored +in the model. The input lines should retain their terminating linesep +characters. The lines passed in by the email package may contain +surrogateescaped binary data. +The name is parsed as everything up to the ':' and returned unmodified. +The value is determined by stripping leading whitespace off the +remainder of the first line joined with all subsequent lines, and +stripping any trailing carriage return or linefeed characters. + +""" + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + """Given the header name and the value provided by the application +program, return the (name, value) that should be stored in the model. +The name and value are returned unmodified. +""" + def header_fetch_parse(self, name: str, value: str) -> str | Header: # type: ignore[override] + """Given the header name and the value from the model, return the value +to be returned to the application program that is requesting that +header. The value passed in by the email package may contain +surrogateescaped binary data if the lines were parsed by a BytesParser. +The returned value should not contain any surrogateescaped data. + +If the value contains binary data, it is converted into a Header object +using the unknown-8bit charset. Otherwise it is returned unmodified. +""" + def fold(self, name: str, value: str) -> str: + """Given the header name and the value from the model, return a string +containing linesep characters that implement the folding of the header +according to the policy controls. The value passed in by the email +package may contain surrogateescaped binary data if the lines were +parsed by a BytesParser. The returned value should not contain any +surrogateescaped data. + +Headers are folded using the Header folding algorithm, which preserves +existing line breaks in the value, and wraps each resulting line to the +max_line_length. Non-ASCII binary data are CTE encoded using the +unknown-8bit charset. + +""" + def fold_binary(self, name: str, value: str) -> bytes: + """Given the header name and the value from the model, return binary +data containing linesep characters that implement the folding of the +header according to the policy controls. The value passed in by the +email package may contain surrogateescaped binary data. + +Headers are folded using the Header folding algorithm, which preserves +existing line breaks in the value, and wraps each resulting line to the +max_line_length. If cte_type is 7bit, non-ascii binary data is CTE +encoded using the unknown-8bit charset. Otherwise the original source +header is used, with its existing line breaks and/or binary data. + +""" compat32: Compat32[Message[str, str]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi index 563cd7f669a22..43b7cc7aea562 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi @@ -1,13 +1,56 @@ +"""Base64 content transfer encoding per RFCs 2045-2047. + +This module handles the content transfer encoding method defined in RFC 2045 +to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit +characters encoding known as Base64. + +It is used in the MIME standards for email to attach images, audio, and text +using some 8-bit character sets to messages. + +This module provides an interface to encode and decode both headers and bodies +with Base64 encoding. + +RFC 2045 defines a method for including character set information in an +'encoded-word' in a header. This method is commonly used for 8-bit real names +in To:, From:, Cc:, etc. fields, as well as Subject: lines. + +This module does not do the line wrapping or end-of-line character conversion +necessary for proper internationalized headers; it only does dumb encoding and +decoding. To deal with the various line wrapping issues, use the email.header +module. +""" __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] from _typeshed import ReadableBuffer -def header_length(bytearray: str | bytes | bytearray) -> int: ... -def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... +def header_length(bytearray: str | bytes | bytearray) -> int: + """Return the length of s when it is encoded with base64. +""" +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: + """Encode a single header line with Base64 encoding in a given charset. + +charset names the character set to use to encode the header. It defaults +to iso-8859-1. Base64 encoding is defined in RFC 2045. +""" # First argument should be a buffer that supports slicing and len(). -def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... -def decode(string: str | ReadableBuffer) -> bytes: ... +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: + """Encode a string with base64. + +Each line will be wrapped at, at most, maxlinelen characters (defaults to +76 characters). + +Each line of encoded text will end with eol, which defaults to "\\n". Set +this to "\\r\\n" if you will be using the result of this function directly +in an email. +""" +def decode(string: str | ReadableBuffer) -> bytes: + """Decode a raw base64 string, returning a bytes object. + +This function does not parse a full MIME header value encoded with +base64 (like =?iso-8859-1?b?bmloISBuaWgh?=) -- please use the high +level email.header class for that functionality. +""" body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi index e1930835bbd11..d78abd8f91d8a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi @@ -16,6 +16,49 @@ ALIASES: Final[dict[str, str]] CODEC_MAP: Final[dict[str, str | None]] # undocumented class Charset: + """Map character sets to their email properties. + +This class provides information about the requirements imposed on email +for a specific character set. It also provides convenience routines for +converting between character sets, given the availability of the +applicable codecs. Given a character set, it will do its best to provide +information on how to use that character set in an email in an +RFC-compliant way. + +Certain character sets must be encoded with quoted-printable or base64 +when used in email headers or bodies. Certain character sets must be +converted outright, and are not allowed in email. Instances of this +module expose the following information about a character set: + +input_charset: The initial character set specified. Common aliases + are converted to their 'official' email names (e.g. latin_1 + is converted to iso-8859-1). Defaults to 7-bit us-ascii. + +header_encoding: If the character set must be encoded before it can be + used in an email header, this attribute will be set to + charset.QP (for quoted-printable), charset.BASE64 (for + base64 encoding), or charset.SHORTEST for the shortest of + QP or BASE64 encoding. Otherwise, it will be None. + +body_encoding: Same as header_encoding, but describes the encoding for the + mail message's body, which indeed may be different than the + header encoding. charset.SHORTEST is not allowed for + body_encoding. + +output_charset: Some character sets must be converted before they can be + used in email headers or bodies. If the input_charset is + one of them, this attribute will contain the name of the + charset output will be converted to. Otherwise, it will + be None. + +input_codec: The name of the Python codec used to convert the + input_charset to Unicode. If no conversion codec is + necessary, this attribute will be None. + +output_codec: The name of the Python codec used to convert Unicode + to the output_charset. If no conversion codec is necessary, + this attribute will have the same value as the input_codec. +""" input_charset: str header_encoding: int body_encoding: int @@ -23,12 +66,63 @@ class Charset: input_codec: str | None output_codec: str | None def __init__(self, input_charset: str = "us-ascii") -> None: ... - def get_body_encoding(self) -> str | Callable[[Message], None]: ... - def get_output_charset(self) -> str | None: ... - def header_encode(self, string: str) -> str: ... - def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: ... + def get_body_encoding(self) -> str | Callable[[Message], None]: + """Return the content-transfer-encoding used for body encoding. + +This is either the string 'quoted-printable' or 'base64' depending on +the encoding used, or it is a function in which case you should call +the function with a single argument, the Message object being +encoded. The function should then set the Content-Transfer-Encoding +header itself to whatever is appropriate. + +Returns "quoted-printable" if self.body_encoding is QP. +Returns "base64" if self.body_encoding is BASE64. +Returns conversion function otherwise. +""" + def get_output_charset(self) -> str | None: + """Return the output character set. + +This is self.output_charset if that is not None, otherwise it is +self.input_charset. +""" + def header_encode(self, string: str) -> str: + """Header-encode a string by converting it first to bytes. + +The type of encoding (base64 or quoted-printable) will be based on +this charset's `header_encoding`. + +:param string: A unicode string for the header. It must be possible + to encode this string to bytes using the character set's + output codec. +:return: The encoded string, with RFC 2047 chrome. +""" + def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: + """Header-encode a string by converting it first to bytes. + +This is similar to `header_encode()` except that the string is fit +into maximum line lengths as given by the argument. + +:param string: A unicode string for the header. It must be possible + to encode this string to bytes using the character set's + output codec. +:param maxlengths: Maximum line length iterator. Each element + returned from this iterator will provide the next maximum line + length. This parameter is used as an argument to built-in next() + and should never be exhausted. The maximum line lengths should + not count the RFC 2047 chrome. These line lengths are only a + hint; the splitter does the best it can. +:return: Lines of encoded strings, each with RFC 2047 chrome. +""" @overload - def body_encode(self, string: None) -> None: ... + def body_encode(self, string: None) -> None: + """Body-encode a string by converting it first to bytes. + +The type of encoding (base64 or quoted-printable) will be based on +self.body_encoding. If body_encoding is None, we assume the +output charset is a 7bit encoding, so re-encoding the decoded +string using the ascii codec produces the correct string version +of the content. +""" @overload def body_encode(self, string: str | bytes) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -37,6 +131,39 @@ class Charset: def add_charset( charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None -) -> None: ... -def add_alias(alias: str, canonical: str) -> None: ... -def add_codec(charset: str, codecname: str) -> None: ... +) -> None: + """Add character set properties to the global registry. + +charset is the input character set, and must be the canonical name of a +character set. + +Optional header_enc and body_enc is either charset.QP for +quoted-printable, charset.BASE64 for base64 encoding, charset.SHORTEST for +the shortest of qp or base64 encoding, or None for no encoding. SHORTEST +is only valid for header_enc. It describes how message headers and +message bodies in the input charset are to be encoded. Default is no +encoding. + +Optional output_charset is the character set that the output should be +in. Conversions will proceed from input charset, to Unicode, to the +output charset when the method Charset.convert() is called. The default +is to output in the same character set as the input. + +Both input_charset and output_charset must have Unicode codec entries in +the module's charset-to-codec mapping; use add_codec(charset, codecname) +to add codecs the module does not know about. See the codecs module's +documentation for more information. +""" +def add_alias(alias: str, canonical: str) -> None: + """Add a character set alias. + +alias is the alias name, e.g. latin-1 +canonical is the character set's canonical name, e.g. iso-8859-1 +""" +def add_codec(charset: str, codecname: str) -> None: + """Add a codec that map characters in the given charset to/from Unicode. + +charset is the canonical name of a character set. codecname is the name +of a Python codec, as appropriate for the second argument to the unicode() +built-in, or to the encode() method of a Unicode string. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi index 55223bdc07621..2b1768be7058b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi @@ -1,8 +1,22 @@ +"""Encodings and related functions. +""" from email.message import Message __all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] -def encode_base64(msg: Message) -> None: ... -def encode_quopri(msg: Message) -> None: ... -def encode_7or8bit(msg: Message) -> None: ... -def encode_noop(msg: Message) -> None: ... +def encode_base64(msg: Message) -> None: + """Encode the message's payload in Base64. + +Also, add an appropriate Content-Transfer-Encoding header. +""" +def encode_quopri(msg: Message) -> None: + """Encode the message's payload in quoted-printable. + +Also, add an appropriate Content-Transfer-Encoding header. +""" +def encode_7or8bit(msg: Message) -> None: + """Set the Content-Transfer-Encoding header to 7bit or 8bit. +""" +def encode_noop(msg: Message) -> None: + """Do nothing. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi index b501a58665560..d1b98bb98a224 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi @@ -1,42 +1,98 @@ +"""email package exception classes. +""" import sys -class MessageError(Exception): ... -class MessageParseError(MessageError): ... -class HeaderParseError(MessageParseError): ... -class BoundaryError(MessageParseError): ... -class MultipartConversionError(MessageError, TypeError): ... -class CharsetError(MessageError): ... +class MessageError(Exception): + """Base class for errors in the email package. +""" +class MessageParseError(MessageError): + """Base class for message parsing errors. +""" +class HeaderParseError(MessageParseError): + """Error while parsing headers. +""" +class BoundaryError(MessageParseError): + """Couldn't find terminating boundary. +""" +class MultipartConversionError(MessageError, TypeError): + """Conversion to a multipart is prohibited. +""" +class CharsetError(MessageError): + """An illegal charset was given. +""" # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -class HeaderWriteError(MessageError): ... +class HeaderWriteError(MessageError): + """Error while writing headers. +""" class MessageDefect(ValueError): + """Base class for a message defect. +""" def __init__(self, line: str | None = None) -> None: ... -class NoBoundaryInMultipartDefect(MessageDefect): ... -class StartBoundaryNotFoundDefect(MessageDefect): ... -class FirstHeaderLineIsContinuationDefect(MessageDefect): ... -class MisplacedEnvelopeHeaderDefect(MessageDefect): ... -class MultipartInvariantViolationDefect(MessageDefect): ... -class InvalidMultipartContentTransferEncodingDefect(MessageDefect): ... -class UndecodableBytesDefect(MessageDefect): ... -class InvalidBase64PaddingDefect(MessageDefect): ... -class InvalidBase64CharactersDefect(MessageDefect): ... -class InvalidBase64LengthDefect(MessageDefect): ... -class CloseBoundaryNotFoundDefect(MessageDefect): ... -class MissingHeaderBodySeparatorDefect(MessageDefect): ... +class NoBoundaryInMultipartDefect(MessageDefect): + """A message claimed to be a multipart but had no boundary parameter. +""" +class StartBoundaryNotFoundDefect(MessageDefect): + """The claimed start boundary was never found. +""" +class FirstHeaderLineIsContinuationDefect(MessageDefect): + """A message had a continuation line as its first header line. +""" +class MisplacedEnvelopeHeaderDefect(MessageDefect): + """A 'Unix-from' header was found in the middle of a header block. +""" +class MultipartInvariantViolationDefect(MessageDefect): + """A message claimed to be a multipart but no subparts were found. +""" +class InvalidMultipartContentTransferEncodingDefect(MessageDefect): + """An invalid content transfer encoding was set on the multipart itself. +""" +class UndecodableBytesDefect(MessageDefect): + """Header contained bytes that could not be decoded +""" +class InvalidBase64PaddingDefect(MessageDefect): + """base64 encoded sequence had an incorrect length +""" +class InvalidBase64CharactersDefect(MessageDefect): + """base64 encoded sequence had characters not in base64 alphabet +""" +class InvalidBase64LengthDefect(MessageDefect): + """base64 encoded sequence had invalid length (1 mod 4) +""" +class CloseBoundaryNotFoundDefect(MessageDefect): + """A start boundary was found, but not the corresponding close boundary. +""" +class MissingHeaderBodySeparatorDefect(MessageDefect): + """Found line with no leading whitespace and no colon before blank line. +""" MalformedHeaderDefect = MissingHeaderBodySeparatorDefect -class HeaderDefect(MessageDefect): ... -class InvalidHeaderDefect(HeaderDefect): ... -class HeaderMissingRequiredValue(HeaderDefect): ... +class HeaderDefect(MessageDefect): + """Base class for a header defect. +""" +class InvalidHeaderDefect(HeaderDefect): + """Header is not valid, message gives details. +""" +class HeaderMissingRequiredValue(HeaderDefect): + """A header that must have a value had none +""" class NonPrintableDefect(HeaderDefect): + """ASCII characters outside the ascii-printable range found +""" def __init__(self, non_printables: str | None) -> None: ... -class ObsoleteHeaderDefect(HeaderDefect): ... -class NonASCIILocalPartDefect(HeaderDefect): ... +class ObsoleteHeaderDefect(HeaderDefect): + """Header uses syntax declared obsolete by RFC 5322 +""" +class NonASCIILocalPartDefect(HeaderDefect): + """local_part contains non-ASCII characters +""" if sys.version_info >= (3, 10): - class InvalidDateDefect(HeaderDefect): ... + class InvalidDateDefect(HeaderDefect): + """Header has unparsable or invalid date +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi index d9279e9cd996d..8a6b7c881047f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi @@ -1,3 +1,19 @@ +"""FeedParser - An email feed parser. + +The feed parser implements an interface for incrementally parsing an email +message, line by line. This has advantages for certain applications, such as +those reading email messages off a socket. + +FeedParser.feed() is the primary interface for pushing new data into the +parser. It returns when there's nothing more it can do with the available +data. When you have no more data to push into the parser, call .close(). +This completes the parsing and returns the root message object. + +The other advantage of this parser is that it will never raise a parsing +exception. Instead, when it finds something unexpected, it adds a 'defect' to +the current message. Defects are just instances that live on the message +object's .defects attribute. +""" from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -7,16 +23,38 @@ from typing import Generic, overload __all__ = ["FeedParser", "BytesFeedParser"] class FeedParser(Generic[_MessageT]): + """A feed-style parser of email. +""" @overload - def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: + """_factory is called with no arguments to create a new message obj + +The policy keyword specifies a policy object that controls a number of +aspects of the parser's operation. The default policy maintains +backward compatibility. + +""" @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... - def feed(self, data: str) -> None: ... - def close(self) -> _MessageT: ... + def feed(self, data: str) -> None: + """Push more data into the parser. +""" + def close(self) -> _MessageT: + """Parse all remaining data and return the root message object. +""" class BytesFeedParser(FeedParser[_MessageT]): + """Like FeedParser, but feed accepts bytes. +""" @overload - def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: ... + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: + """_factory is called with no arguments to create a new message obj + +The policy keyword specifies a policy object that controls a number of +aspects of the parser's operation. The default policy maintains +backward compatibility. + +""" @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... def feed(self, data: bytes | bytearray) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi index d30e686299fab..9adc28b4c7b42 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi @@ -1,3 +1,5 @@ +"""Classes to generate plain text from a message object tree. +""" from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy @@ -10,6 +12,11 @@ __all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] _MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Any) class Generator(Generic[_MessageT]): + """Generates output from a Message object tree. + +This basic generator writes the message to the given file object as plain +text. +""" maxheaderlen: int | None policy: Policy[_MessageT] | None @overload @@ -20,7 +27,29 @@ class Generator(Generic[_MessageT]): maxheaderlen: int | None = None, *, policy: None = None, - ) -> None: ... + ) -> None: + """Create the generator for message flattening. + +outfp is the output file-like object for writing the message to. It +must have a write() method. + +Optional mangle_from_ is a flag that, when True (the default if policy +is not set), escapes From_ lines in the body of the message by putting +a '>' in front of them. + +Optional maxheaderlen specifies the longest length for a non-continued +header. When a header line is longer (in characters, with tabs +expanded to 8 spaces) than maxheaderlen, the header will split as +defined in the Header class. Set maxheaderlen to zero to disable +header wrapping. The default is 78, as recommended (but not required) +by RFC 2822. + +The policy keyword specifies a policy object that controls a number of +aspects of the generator's operation. If no policy is specified, +the policy associated with the Message object passed to the +flatten method is used. + +""" @overload def __init__( self, @@ -31,10 +60,39 @@ class Generator(Generic[_MessageT]): policy: Policy[_MessageT], ) -> None: ... def write(self, s: str) -> None: ... - def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: ... - def clone(self, fp: SupportsWrite[str]) -> Self: ... + def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: + """Print the message object tree rooted at msg to the output file +specified when the Generator instance was created. + +unixfrom is a flag that forces the printing of a Unix From_ delimiter +before the first object in the message tree. If the original message +has no From_ delimiter, a 'standard' one is crafted. By default, this +is False to inhibit the printing of any From_ delimiter. + +Note that for subobjects, no From_ line is printed. + +linesep specifies the characters used to indicate a new line in +the output. The default value is determined by the policy specified +when the Generator instance was created or, if none was specified, +from the policy associated with the msg. + +""" + def clone(self, fp: SupportsWrite[str]) -> Self: + """Clone this generator with the exact same options. +""" class BytesGenerator(Generator[_MessageT]): + """Generates a bytes version of a Message object tree. + +Functionally identical to the base Generator except that the output is +bytes and not string. When surrogates were used in the input to encode +bytes, these are decoded back to bytes for output. If the policy has +cte_type set to 7bit, then the message is transformed such that the +non-ASCII bytes are properly content transfer encoded, using the charset +unknown-8bit. + +The outfp object must accept bytes in its write method. +""" @overload def __init__( self: BytesGenerator[Any], # The Policy of the message is used. @@ -43,7 +101,29 @@ class BytesGenerator(Generator[_MessageT]): maxheaderlen: int | None = None, *, policy: None = None, - ) -> None: ... + ) -> None: + """Create the generator for message flattening. + +outfp is the output file-like object for writing the message to. It +must have a write() method. + +Optional mangle_from_ is a flag that, when True (the default if policy +is not set), escapes From_ lines in the body of the message by putting +a '>' in front of them. + +Optional maxheaderlen specifies the longest length for a non-continued +header. When a header line is longer (in characters, with tabs +expanded to 8 spaces) than maxheaderlen, the header will split as +defined in the Header class. Set maxheaderlen to zero to disable +header wrapping. The default is 78, as recommended (but not required) +by RFC 2822. + +The policy keyword specifies a policy object that controls a number of +aspects of the generator's operation. If no policy is specified, +the policy associated with the Message object passed to the +flatten method is used. + +""" @overload def __init__( self, @@ -55,6 +135,11 @@ class BytesGenerator(Generator[_MessageT]): ) -> None: ... class DecodedGenerator(Generator[_MessageT]): + """Generates a text representation of a message. + +Like the Generator base class, except that non-text parts are substituted +with a format string representing the part. +""" @overload def __init__( self: DecodedGenerator[Any], # The Policy of the message is used. @@ -64,7 +149,28 @@ class DecodedGenerator(Generator[_MessageT]): fmt: str | None = None, *, policy: None = None, - ) -> None: ... + ) -> None: + """Like Generator.__init__() except that an additional optional +argument is allowed. + +Walks through all subparts of a message. If the subpart is of main +type 'text', then it prints the decoded payload of the subpart. + +Otherwise, fmt is a format string that is used instead of the message +payload. fmt is expanded with the following keywords (in +%(keyword)s format): + +type : Full MIME type of the non-text part +maintype : Main MIME type of the non-text part +subtype : Sub-MIME type of the non-text part +filename : Filename of the non-text part +description: Description associated with the non-text part +encoding : Content transfer encoding of the non-text part + +The default value for fmt is None, meaning + +[Non-text (%(type)s) part of message omitted, filename %(filename)s] +""" @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi index a26bbb516e096..f047800948109 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi @@ -1,3 +1,5 @@ +"""Header encoding and decoding functionality. +""" from collections.abc import Iterable from email.charset import Charset from typing import Any, ClassVar @@ -13,9 +15,87 @@ class Header: header_name: str | None = None, continuation_ws: str = " ", errors: str = "strict", - ) -> None: ... - def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... - def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... + ) -> None: + """Create a MIME-compliant header that can contain many character sets. + +Optional s is the initial header value. If None, the initial header +value is not set. You can later append to the header with .append() +method calls. s may be a byte string or a Unicode string, but see the +.append() documentation for semantics. + +Optional charset serves two purposes: it has the same meaning as the +charset argument to the .append() method. It also sets the default +character set for all subsequent .append() calls that omit the charset +argument. If charset is not provided in the constructor, the us-ascii +charset is used both as s's initial charset and as the default for +subsequent .append() calls. + +The maximum line length can be specified explicitly via maxlinelen. For +splitting the first line to a shorter value (to account for the field +header which isn't included in s, e.g. 'Subject') pass in the name of +the field in header_name. The default maxlinelen is 78 as recommended +by RFC 2822. + +continuation_ws must be RFC 2822 compliant folding whitespace (usually +either a space or a hard tab) which will be prepended to continuation +lines. + +errors is passed through to the .append() call. +""" + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: + """Append a string to the MIME header. + +Optional charset, if given, should be a Charset instance or the name +of a character set (which will be converted to a Charset instance). A +value of None (the default) means that the charset given in the +constructor is used. + +s may be a byte string or a Unicode string. If it is a byte string +(i.e. isinstance(s, str) is false), then charset is the encoding of +that byte string, and a UnicodeError will be raised if the string +cannot be decoded with that charset. If s is a Unicode string, then +charset is a hint specifying the character set of the characters in +the string. In either case, when producing an RFC 2822 compliant +header using RFC 2047 rules, the string will be encoded using the +output codec of the charset. If the string cannot be encoded to the +output codec, a UnicodeError will be raised. + +Optional 'errors' is passed as the errors argument to the decode +call if s is a byte string. +""" + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: + """Encode a message header into an RFC-compliant format. + +There are many issues involved in converting a given string for use in +an email header. Only certain character sets are readable in most +email clients, and as header strings can only contain a subset of +7-bit ASCII, care must be taken to properly convert and encode (with +Base64 or quoted-printable) header strings. In addition, there is a +75-character length limit on any given encoded header field, so +line-wrapping must be performed, even with double-byte character sets. + +Optional maxlinelen specifies the maximum length of each generated +line, exclusive of the linesep string. Individual lines may be longer +than maxlinelen if a folding point cannot be found. The first line +will be shorter by the length of the header name plus ": " if a header +name was specified at Header construction time. The default value for +maxlinelen is determined at header construction time. + +Optional splitchars is a string containing characters which should be +given extra weight by the splitting algorithm during normal header +wrapping. This is in very rough support of RFC 2822's 'higher level +syntactic breaks': split points preceded by a splitchar are preferred +during line splitting, with the characters preferred in the order in +which they appear in the string. Space and tab may be included in the +string to indicate whether preference should be given to one over the +other as a split point when other split chars do not appear in the line +being split. Splitchars does not affect RFC 2047 encoded lines. + +Optional linesep is a string to be used to separate the lines of +the value. The default value is the most useful for typical +Python applications, but it can be set to \\r\\n to produce RFC-compliant +line separators when needed. +""" __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... def __ne__(self, value: object, /) -> bool: ... @@ -23,10 +103,42 @@ class Header: # decode_header() either returns list[tuple[str, None]] if the header # contains no encoded parts, or list[tuple[bytes, str | None]] if the header # contains at least one encoded part. -def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... +def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: + """Decode a message header value without converting charset. + +For historical reasons, this function may return either: + +1. A list of length 1 containing a pair (str, None). +2. A list of (bytes, charset) pairs containing each of the decoded + parts of the header. Charset is None for non-encoded parts of the header, + otherwise a lower-case string containing the name of the character set + specified in the encoded string. + +header may be a string that may or may not contain RFC2047 encoded words, +or it may be a Header object. + +An email.errors.HeaderParseError may be raised when certain decoding error +occurs (e.g. a base64 decoding exception). + +This function exists for backwards compatibility only. For new code, we +recommend using email.headerregistry.HeaderRegistry instead. +""" def make_header( decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], maxlinelen: int | None = None, header_name: str | None = None, continuation_ws: str = " ", -) -> Header: ... +) -> Header: + """Create a Header from a sequence of pairs as returned by decode_header() + +decode_header() takes a header value string and returns a sequence of +pairs of the format (decoded_string, charset) where charset is the string +name of the character set. + +This function takes one of those sequence of pairs and returns a Header +instance. Optional maxlinelen, header_name, and continuation_ws are as in +the Header constructor. + +This function exists for backwards compatibility only, and is not +recommended for use in new code. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi index bea68307e0091..91109914e2d18 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi @@ -1,3 +1,8 @@ +"""Representing and manipulating email headers via custom objects. + +This module provides an implementation of the HeaderRegistry API. +The implementation is designed to flexibly follow RFC5322 rules. +""" import types from collections.abc import Iterable, Mapping from datetime import datetime as _datetime @@ -17,6 +22,37 @@ from typing import Any, ClassVar, Literal, Protocol, type_check_only from typing_extensions import Self class BaseHeader(str): + """Base class for message headers. + +Implements generic behavior and provides tools for subclasses. + +A subclass must define a classmethod named 'parse' that takes an unfolded +value string and a dictionary as its arguments. The dictionary will +contain one key, 'defects', initialized to an empty list. After the call +the dictionary must contain two additional keys: parse_tree, set to the +parse tree obtained from parsing the header, and 'decoded', set to the +string value of the idealized representation of the data from the value. +(That is, encoded words are decoded, and values that have canonical +representations are so represented.) + +The defects key is intended to collect parsing defects, which the message +parser will subsequently dispose of as appropriate. The parser should not, +insofar as practical, raise any errors. Defects should be added to the +list instead. The standard header parsers register defects for RFC +compliance issues, for obsolete RFC syntax, and for unrecoverable parsing +errors. + +The parse method may add additional keys to the dictionary. In this case +the subclass must define an 'init' method, which will be passed the +dictionary as its keyword arguments. The method should use (usually by +setting them as the value of similarly named attributes) and remove all the +extra keys added by its parse method, and then use super to call its parent +class with the remaining arguments and keywords. + +The subclass should also make sure that a 'max_count' attribute is defined +that is either None or 1. XXX: need to better define this API. + +""" # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) max_count: ClassVar[Literal[1] | None] @property @@ -25,12 +61,45 @@ class BaseHeader(str): def defects(self) -> tuple[MessageDefect, ...]: ... def __new__(cls, name: str, value: Any) -> Self: ... def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... - def fold(self, *, policy: Policy) -> str: ... + def fold(self, *, policy: Policy) -> str: + """Fold header according to policy. + +The parsed representation of the header is folded according to +RFC5322 rules, as modified by the policy. If the parse tree +contains surrogateescaped bytes, the bytes are CTE encoded using +the charset 'unknown-8bit". + +Any non-ASCII characters in the parse tree are CTE encoded using +charset utf-8. XXX: make this a policy setting. + +The returned value is an ASCII-only string possibly containing linesep +characters, and ending with a linesep character. The string includes +the header name and the ': ' separator. + +""" class UnstructuredHeader: max_count: ClassVar[Literal[1] | None] @staticmethod - def value_parser(value: str) -> UnstructuredTokenList: ... + def value_parser(value: str) -> UnstructuredTokenList: + """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + + obs-NO-WS-CTL is control characters except WSP/CR/LF. + +So, basically, we have printable runs, plus control characters or nulls in +the obsolete syntax, separated by whitespace. Since RFC 2047 uses the +obsolete syntax in its specification, but requires whitespace on either +side of the encoded words, I can see no reason to need to separate the +non-printable-non-whitespace from the printable runs if they occur, so we +parse this into xtext tokens separated by WSP tokens. + +Because an 'unstructured' value must by definition constitute the entire +value, this 'get' routine does not return a remaining value, only the +parsed TokenList. + +""" @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -38,12 +107,38 @@ class UniqueUnstructuredHeader(UnstructuredHeader): max_count: ClassVar[Literal[1]] class DateHeader: + """Header whose value consists of a single timestamp. + +Provides an additional attribute, datetime, which is either an aware +datetime using a timezone, or a naive datetime if the timezone +in the input string is -0000. Also accepts a datetime as input. +The 'value' attribute is the normalized form of the timestamp, +which means it is the output of format_datetime on the datetime. +""" max_count: ClassVar[Literal[1] | None] def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... @property def datetime(self) -> _datetime | None: ... @staticmethod - def value_parser(value: str) -> UnstructuredTokenList: ... + def value_parser(value: str) -> UnstructuredTokenList: + """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + + obs-NO-WS-CTL is control characters except WSP/CR/LF. + +So, basically, we have printable runs, plus control characters or nulls in +the obsolete syntax, separated by whitespace. Since RFC 2047 uses the +obsolete syntax in its specification, but requires whitespace on either +side of the encoded words, I can see no reason to need to separate the +non-printable-non-whitespace from the printable runs if they occur, so we +parse this into xtext tokens separated by WSP tokens. + +Because an 'unstructured' value must by definition constitute the entire +value, this 'get' routine does not return a remaining value, only the +parsed TokenList. + +""" @classmethod def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... @@ -91,7 +186,10 @@ class MIMEVersionHeader: @property def minor(self) -> int | None: ... @staticmethod - def value_parser(value: str) -> MIMEVersion: ... + def value_parser(value: str) -> MIMEVersion: + """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS] + + """ @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -111,14 +209,23 @@ class ContentTypeHeader(ParameterizedMIMEHeader): @property def subtype(self) -> str: ... @staticmethod - def value_parser(value: str) -> ContentType: ... + def value_parser(value: str) -> ContentType: + """maintype "/" subtype *( ";" parameter ) + +The maintype and substype are tokens. Theoretically they could +be checked against the official IANA list + x-token, but we +don't do that. +""" class ContentDispositionHeader(ParameterizedMIMEHeader): # init is redefined but has the same signature as parent class, so is omitted from the stub @property def content_disposition(self) -> str | None: ... @staticmethod - def value_parser(value: str) -> ContentDisposition: ... + def value_parser(value: str) -> ContentDisposition: + """disposition-type *( ";" parameter ) + + """ class ContentTransferEncodingHeader: max_count: ClassVar[Literal[1]] @@ -128,14 +235,19 @@ class ContentTransferEncodingHeader: @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod - def value_parser(value: str) -> ContentTransferEncoding: ... + def value_parser(value: str) -> ContentTransferEncoding: + """mechanism + + """ class MessageIDHeader: max_count: ClassVar[Literal[1]] @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod - def value_parser(value: str) -> MessageID: ... + def value_parser(value: str) -> MessageID: + """message-id = "Message-ID:" msg-id CRLF + """ @type_check_only class _HeaderParser(Protocol): @@ -146,15 +258,39 @@ class _HeaderParser(Protocol): def parse(cls, value: str, kwds: dict[str, Any], /) -> None: ... class HeaderRegistry: + """A header_factory and header registry. +""" registry: dict[str, type[_HeaderParser]] base_class: type[BaseHeader] default_class: type[_HeaderParser] def __init__( self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True - ) -> None: ... - def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... + ) -> None: + """Create a header_factory that works with the Policy API. + +base_class is the class that will be the last class in the created +header class's __bases__ list. default_class is the class that will be +used if "name" (see __call__) does not appear in the registry. +use_default_map controls whether or not the default mapping of names to +specialized classes is copied in to the registry when the factory is +created. The default is True. + +""" + def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: + """Register cls as the specialized class for handling "name" headers. + + """ def __getitem__(self, name: str) -> type[BaseHeader]: ... - def __call__(self, name: str, value: Any) -> BaseHeader: ... + def __call__(self, name: str, value: Any) -> BaseHeader: + """Create a header instance for header 'name' from 'value'. + +Creates a header instance by creating a specialized class for parsing +and representing the specified header by combining the factory +base_class with a specialized class from the registry or the +default_class, and passing the name and value to the constructed +class's constructor. + +""" class Address: @property @@ -164,10 +300,28 @@ class Address: @property def domain(self) -> str: ... @property - def addr_spec(self) -> str: ... + def addr_spec(self) -> str: + """The addr_spec (username@domain) portion of the address, quoted +according to RFC 5322 rules, but with no Content Transfer Encoding. +""" def __init__( self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None - ) -> None: ... + ) -> None: + """Create an object representing a full email address. + +An address can have a 'display_name', a 'username', and a 'domain'. In +addition to specifying the username and domain separately, they may be +specified together by using the addr_spec keyword *instead of* the +username and domain keywords. If an addr_spec string is specified it +must be properly quoted according to RFC 5322 rules; an error will be +raised if it is not. + +An Address object has display_name, username, domain, and addr_spec +attributes, all of which are read-only. The addr_spec and the string +value of the object are both quoted according to RFC5322 rules, but +without any Content Transfer Encoding. + +""" __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... @@ -176,6 +330,20 @@ class Group: def display_name(self) -> str | None: ... @property def addresses(self) -> tuple[Address, ...]: ... - def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: + """Create an object representing an address group. + +An address group consists of a display_name followed by colon and a +list of addresses (see Address) terminated by a semi-colon. The Group +is created by specifying a display_name and a possibly empty list of +Address objects. A Group can also be used to represent a single +address that is not in a group, which is convenient when manipulating +lists that are a combination of Groups and individual Addresses. In +this case the display_name should be set to None. In particular, the +string representation of a Group whose display_name is None is the same +as the Address object, if there is one and only one Address object in +the addresses list. + +""" __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi index d964d68438336..51cbec229ad29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi @@ -1,12 +1,31 @@ +"""Various types of useful iterators and generators. +""" from _typeshed import SupportsWrite from collections.abc import Iterator from email.message import Message __all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] -def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... -def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... -def walk(self: Message) -> Iterator[Message]: ... +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: + """Iterate over the parts, returning string payloads line-by-line. + +Optional decode (default False) is passed through to .get_payload(). +""" +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: + """Iterate over the subparts with a given MIME type. + +Use 'maintype' as the main MIME type to match against; this defaults to +"text". Optional 'subtype' is the MIME subtype to match against; if +omitted, only the main type is matched. +""" +def walk(self: Message) -> Iterator[Message]: + """Walk over the message tree, yielding each subpart. + +The walk is performed in depth-first order. This method is a +generator. +""" # We include the seemingly private function because it is documented in the stdlib documentation. -def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: + """A handy debugging aid +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi index 794882b140e61..227cac54340b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi @@ -1,3 +1,5 @@ +"""Basic message object for the email package object model. +""" from _typeshed import MaybeNone from collections.abc import Generator, Iterator, Sequence from email import _ParamsType, _ParamType @@ -33,6 +35,20 @@ class _SupportsDecodeToPayload(Protocol): def decode(self, encoding: str, errors: str, /) -> _PayloadType | _MultipartPayloadType: ... class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): + """Basic message object. + +A message object is defined as something that has a bunch of RFC 2822 +headers and a payload. It may optionally have an envelope header +(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a +multipart or a message/rfc822), then the payload is a list of Message +objects, otherwise it is a string. + +Message objects implement part of the 'mapping' interface, which assumes +there is exactly one occurrence of the header per message. Some headers +do in fact appear multiple times (e.g. Received) and for those headers, +you must use the explicit API to set or get all the headers. Not all of +the mapping methods are implemented. +""" # The policy attributes and arguments in this class and its subclasses # would ideally use Policy[Self], but this is not possible. policy: Policy[Any] # undocumented @@ -40,14 +56,41 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): epilogue: str | None defects: list[MessageDefect] def __init__(self, policy: Policy[Any] = ...) -> None: ... - def is_multipart(self) -> bool: ... + def is_multipart(self) -> bool: + """Return True if the message consists of multiple parts. +""" def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... - def attach(self, payload: _PayloadType) -> None: ... + def attach(self, payload: _PayloadType) -> None: + """Add the given payload to the current payload. + +The current payload will always be a list of objects after this method +is called. If you want to set the payload to a scalar object, use +set_payload() instead. +""" # `i: int` without a multipart payload results in an error # `| MaybeNone` acts like `| Any`: can be None for cleared or unset payload, but annoying to check @overload # multipart - def get_payload(self, i: int, decode: Literal[True]) -> None: ... + def get_payload(self, i: int, decode: Literal[True]) -> None: + """Return a reference to the payload. + +The payload will either be a list object or a string. If you mutate +the list object, you modify the message's payload in place. Optional +i returns that index into the payload. + +Optional decode is a flag indicating whether the payload should be +decoded or not, according to the Content-Transfer-Encoding header +(default is False). + +When True and the message is not a multipart, the payload will be +decoded if this header's value is `quoted-printable' or `base64'. If +some other encoding is used, or the header is missing, or if the +payload has bogus data (i.e. bogus base64 or uuencoded data), the +payload is returned as-is. + +If the message is a multipart and the decode flag is True, then None +is returned. +""" @overload # multipart def get_payload(self, i: int, decode: Literal[False] = False) -> _PayloadType | MaybeNone: ... @overload # either @@ -62,79 +105,340 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): @overload def set_payload( self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None - ) -> None: ... + ) -> None: + """Set the payload to the given value. + +Optional charset sets the message's default character set. See +set_charset() for details. +""" @overload def set_payload( self, payload: _SupportsEncodeToPayload | _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: Charset | str, ) -> None: ... - def set_charset(self, charset: _CharsetType) -> None: ... - def get_charset(self) -> _CharsetType: ... - def __len__(self) -> int: ... + def set_charset(self, charset: _CharsetType) -> None: + """Set the charset of the payload to a given character set. + +charset can be a Charset instance, a string naming a character set, or +None. If it is a string it will be converted to a Charset instance. +If charset is None, the charset parameter will be removed from the +Content-Type field. Anything else will generate a TypeError. + +The message will be assumed to be of type text/* encoded with +charset.input_charset. It will be converted to charset.output_charset +and encoded properly, if needed, when generating the plain text +representation of the message. MIME headers (MIME-Version, +Content-Type, Content-Transfer-Encoding) will be added as needed. +""" + def get_charset(self) -> _CharsetType: + """Return the Charset instance associated with the message's payload. + """ + def __len__(self) -> int: + """Return the total number of headers, including duplicates. +""" def __contains__(self, name: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... # Same as `get` with `failobj=None`, but with the expectation that it won't return None in most scenarios # This is important for protocols using __getitem__, like SupportsKeysAndGetItem # Morally, the return type should be `AnyOf[_HeaderType, None]`, # so using "the Any trick" instead. - def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: ... - def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: ... - def __delitem__(self, name: str) -> None: ... - def keys(self) -> list[str]: ... - def values(self) -> list[_HeaderT_co]: ... - def items(self) -> list[tuple[str, _HeaderT_co]]: ... + def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: + """Get a header value. + +Return None if the header is missing instead of raising an exception. + +Note that if the header appeared multiple times, exactly which +occurrence gets returned is undefined. Use get_all() to get all +the values matching a header field name. +""" + def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: + """Set the value of a header. + +Note: this does not overwrite an existing header with the same field +name. Use __delitem__() first to delete any existing headers. +""" + def __delitem__(self, name: str) -> None: + """Delete all occurrences of a header, if present. + +Does not raise an exception if the header is missing. +""" + def keys(self) -> list[str]: + """Return a list of all the message's header field names. + +These will be sorted in the order they appeared in the original +message, or were added to the message, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" + def values(self) -> list[_HeaderT_co]: + """Return a list of all the message's header values. + +These will be sorted in the order they appeared in the original +message, or were added to the message, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" + def items(self) -> list[tuple[str, _HeaderT_co]]: + """Get all the message's header fields and values. + +These will be sorted in the order they appeared in the original +message, or were added to the message, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" @overload - def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: ... + def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: + """Get a header value. + +Like __getitem__() but return failobj instead of None when the field +is missing. +""" @overload def get(self, name: str, failobj: _T) -> _HeaderT_co | _T: ... @overload - def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: ... + def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: + """Return a list of all the values for the named field. + +These will be sorted in the order they appeared in the original +message, and may contain duplicates. Any fields deleted and +re-inserted are always appended to the header list. + +If no such fields exist, failobj is returned (defaults to None). +""" @overload def get_all(self, name: str, failobj: _T) -> list[_HeaderT_co] | _T: ... - def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... - def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: ... - def get_content_type(self) -> str: ... - def get_content_maintype(self) -> str: ... - def get_content_subtype(self) -> str: ... - def get_default_type(self) -> str: ... - def set_default_type(self, ctype: str) -> None: ... + def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: + """Extended header setting. + +name is the header field to add. keyword arguments can be used to set +additional parameters for the header field, with underscores converted +to dashes. Normally the parameter will be added as key="value" unless +value is None, in which case only the key will be added. If a +parameter value contains non-ASCII characters it can be specified as a +three-tuple of (charset, language, value), in which case it will be +encoded according to RFC2231 rules. Otherwise it will be encoded using +the utf-8 charset and a language of ''. + +Examples: + +msg.add_header('content-disposition', 'attachment', filename='bud.gif') +msg.add_header('content-disposition', 'attachment', + filename=('utf-8', '', 'Fußballer.ppt')) +msg.add_header('content-disposition', 'attachment', + filename='Fußballer.ppt')) +""" + def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: + """Replace a header. + +Replace the first matching header found in the message, retaining +header order and case. If no matching header was found, a KeyError is +raised. +""" + def get_content_type(self) -> str: + """Return the message's content type. + +The returned string is coerced to lower case of the form +'maintype/subtype'. If there was no Content-Type header in the +message, the default type as given by get_default_type() will be +returned. Since according to RFC 2045, messages always have a default +type this will always return a value. + +RFC 2045 defines a message's default type to be text/plain unless it +appears inside a multipart/digest container, in which case it would be +message/rfc822. +""" + def get_content_maintype(self) -> str: + """Return the message's main content type. + +This is the 'maintype' part of the string returned by +get_content_type(). +""" + def get_content_subtype(self) -> str: + """Returns the message's sub-content type. + +This is the 'subtype' part of the string returned by +get_content_type(). +""" + def get_default_type(self) -> str: + """Return the 'default' content type. + +Most messages have a default content type of text/plain, except for +messages that are subparts of multipart/digest containers. Such +subparts have a default content type of message/rfc822. +""" + def set_default_type(self, ctype: str) -> None: + """Set the 'default' content type. + +ctype should be either "text/plain" or "message/rfc822", although this +is not enforced. The default content type is not stored in the +Content-Type header. +""" @overload def get_params( self, failobj: None = None, header: str = "content-type", unquote: bool = True - ) -> list[tuple[str, str]] | None: ... + ) -> list[tuple[str, str]] | None: + """Return the message's Content-Type parameters, as a list. + +The elements of the returned list are 2-tuples of key/value pairs, as +split on the '=' sign. The left hand side of the '=' is the key, +while the right hand side is the value. If there is no '=' sign in +the parameter the value is the empty string. The value is as +described in the get_param() method. + +Optional failobj is the object to return if there is no Content-Type +header. Optional header is the header to search instead of +Content-Type. If unquote is True, the value is unquoted. +""" @overload def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... @overload def get_param( self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True - ) -> _ParamType | None: ... + ) -> _ParamType | None: + """Return the parameter value if found in the Content-Type header. + +Optional failobj is the object to return if there is no Content-Type +header, or the Content-Type header has no such parameter. Optional +header is the header to search instead of Content-Type. + +Parameter keys are always compared case insensitively. The return +value can either be a string, or a 3-tuple if the parameter was RFC +2231 encoded. When it's a 3-tuple, the elements of the value are of +the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and +LANGUAGE can be None, in which case you should consider VALUE to be +encoded in the us-ascii charset. You can usually ignore LANGUAGE. +The parameter value (either the returned string, or the VALUE item in +the 3-tuple) is always unquoted, unless unquote is set to False. + +If your application doesn't care whether the parameter was RFC 2231 +encoded, it can turn the return value into a string as follows: + + rawparam = msg.get_param('foo') + param = email.utils.collapse_rfc2231_value(rawparam) + +""" @overload def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... - def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... - def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: + """Remove the given parameter completely from the Content-Type header. + +The header will be re-written in place without the parameter or its +value. All values will be quoted as necessary unless requote is +False. Optional header specifies an alternative to the Content-Type +header. +""" + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: + """Set the main type and subtype for the Content-Type header. + +type must be a string in the form "maintype/subtype", otherwise a +ValueError is raised. + +This method replaces the Content-Type header, keeping all the +parameters in place. If requote is False, this leaves the existing +header's quoting as is. Otherwise, the parameters will be quoted (the +default). + +An alternative header can be specified in the header argument. When +the Content-Type header is set, we'll always also add a MIME-Version +header. +""" @overload - def get_filename(self, failobj: None = None) -> str | None: ... + def get_filename(self, failobj: None = None) -> str | None: + """Return the filename associated with the payload if present. + +The filename is extracted from the Content-Disposition header's +'filename' parameter, and it is unquoted. If that header is missing +the 'filename' parameter, this method falls back to looking for the +'name' parameter. +""" @overload def get_filename(self, failobj: _T) -> str | _T: ... @overload - def get_boundary(self, failobj: None = None) -> str | None: ... + def get_boundary(self, failobj: None = None) -> str | None: + """Return the boundary associated with the payload if present. + +The boundary is extracted from the Content-Type header's 'boundary' +parameter, and it is unquoted. +""" @overload def get_boundary(self, failobj: _T) -> str | _T: ... - def set_boundary(self, boundary: str) -> None: ... + def set_boundary(self, boundary: str) -> None: + """Set the boundary parameter in Content-Type to 'boundary'. + +This is subtly different than deleting the Content-Type header and +adding a new one with a new boundary parameter via add_header(). The +main difference is that using the set_boundary() method preserves the +order of the Content-Type header in the original message. + +HeaderParseError is raised if the message has no Content-Type header. +""" @overload - def get_content_charset(self) -> str | None: ... + def get_content_charset(self) -> str | None: + """Return the charset parameter of the Content-Type header. + +The returned string is always coerced to lower case. If there is no +Content-Type header, or if that header has no charset parameter, +failobj is returned. +""" @overload def get_content_charset(self, failobj: _T) -> str | _T: ... @overload - def get_charsets(self, failobj: None = None) -> list[str | None]: ... + def get_charsets(self, failobj: None = None) -> list[str | None]: + """Return a list containing the charset(s) used in this message. + +The returned list of items describes the Content-Type headers' +charset parameter for this message and all the subparts in its +payload. + +Each item will either be a string (the value of the charset parameter +in the Content-Type header of that part) or the value of the +'failobj' parameter (defaults to None), if the part does not have a +main MIME type of "text", or the charset is not defined. + +The list will contain one string for each part of the message, plus +one for the container message (i.e. self), so that a non-multipart +message will still return a list of length 1. +""" @overload def get_charsets(self, failobj: _T) -> list[str | _T]: ... - def walk(self) -> Generator[Self, None, None]: ... - def get_content_disposition(self) -> str | None: ... - def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: ... - def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: ... - def __bytes__(self) -> bytes: ... + def walk(self) -> Generator[Self, None, None]: + """Walk over the message tree, yielding each subpart. + +The walk is performed in depth-first order. This method is a +generator. +""" + def get_content_disposition(self) -> str | None: + """Return the message's content-disposition if it exists, or None. + +The return values can be either 'inline', 'attachment' or None +according to the rfc2183. +""" + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: + """Return the entire formatted message as a string. + +Optional 'unixfrom', when true, means include the Unix From_ envelope +header. For backward compatibility reasons, if maxheaderlen is +not specified it defaults to 0, so you must override it explicitly +if you want a different maxheaderlen. 'policy' is passed to the +Generator instance used to serialize the message; if it is not +specified the policy associated with the message instance is used. + +If the message object contains binary data that is not encoded +according to RFC standards, the non-compliant data will be replaced by +unicode "unknown character" code points. +""" + def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: + """Return the entire formatted message as a bytes object. + +Optional 'unixfrom', when true, means include the Unix From_ envelope +header. 'policy' is passed to the BytesGenerator instance used to +serialize the message; if not specified the policy associated with +the message instance is used. +""" + def __bytes__(self) -> bytes: + """Return the entire formatted message as a bytes object. + """ def set_param( self, param: str, @@ -144,20 +448,73 @@ class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): charset: str | None = None, language: str = "", replace: bool = False, - ) -> None: ... + ) -> None: + """Set a parameter in the Content-Type header. + +If the parameter already exists in the header, its value will be +replaced with the new value. + +If header is Content-Type and has not yet been defined for this +message, it will be set to "text/plain" and the new parameter and +value will be appended as per RFC 2045. + +An alternate header can be specified in the header argument, and all +parameters will be quoted as necessary unless requote is False. + +If charset is specified, the parameter will be encoded according to RFC +2231. Optional language specifies the RFC 2231 language, defaulting +to the empty string. Both charset and language should be strings. +""" # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: ... - def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: ... + def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: + """Store name and value in the model without modification. + +This is an "internal" API, intended only for use by a parser. +""" + def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: + """Return the (name, value) header pairs without modification. + +This is an "internal" API, intended only for use by a generator. +""" class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def __init__(self, policy: Policy[Any] | None = None) -> None: ... - def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: ... - def attach(self, payload: Self) -> None: ... # type: ignore[override] + def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: + """Return best candidate mime part for display as 'body' of message. + +Do a depth first search, starting with self, looking for the first part +matching each of the items in preferencelist, and return the part +corresponding to the first item that has a match, or None if no items +have a match. If 'related' is not included in preferencelist, consider +the root part of any multipart/related encountered as a candidate +match. Ignore parts with 'Content-Disposition: attachment'. +""" + def attach(self, payload: Self) -> None: # type: ignore[override] + """Add the given payload to the current payload. + +The current payload will always be a list of objects after this method +is called. If you want to set the payload to a scalar object, use +set_payload() instead. +""" # The attachments are created via type(self) in the attach method. It's theoretically # possible to sneak other attachment types into a MIMEPart instance, but could cause # cause unforseen consequences. - def iter_attachments(self) -> Iterator[Self]: ... - def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: ... + def iter_attachments(self) -> Iterator[Self]: + """Return an iterator over the non-main parts of a multipart. + +Skip the first of each occurrence of text/plain, text/html, +multipart/related, or multipart/alternative in the multipart (unless +they have a 'Content-Disposition: attachment' header) and include all +remaining subparts in the returned iterator. When applied to a +multipart/related, return all parts except the root part. Return an +empty iterator when applied to a multipart/alternative or a +non-multipart. +""" + def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: + """Return an iterator over all immediate subparts of a multipart. + +Return an empty iterator for a non-multipart. +""" def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... def make_related(self, boundary: str | None = None) -> None: ... @@ -168,7 +525,17 @@ class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... - def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: + """Return the entire formatted message as a string. + +Optional 'unixfrom', when true, means include the Unix From_ envelope +header. maxheaderlen is retained for backward compatibility with the +base Message class, but defaults to None, meaning that the policy value +for max_line_length controls the header maximum length. 'policy' is +passed to the Generator instance used to serialize the message; if it +is not specified the policy associated with the message instance is +used. +""" def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi index a7ab9dc75ce24..55a88d7d80331 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi @@ -1,3 +1,5 @@ +"""Class representing application/* type MIME documents. +""" from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -6,6 +8,8 @@ from email.policy import Policy __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): + """Class for generating application/* MIME documents. +""" def __init__( self, _data: str | bytes | bytearray, @@ -14,4 +18,18 @@ class MIMEApplication(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: ... + ) -> None: + """Create an application/* type MIME document. + +_data contains the bytes for the raw application data. + +_subtype is the MIME content type subtype, defaulting to +'octet-stream'. + +_encoder is a function which will perform the actual encoding for +transport of the application data, defaulting to base64 encoding. + +Any additional keyword arguments are passed to the base class +constructor, which turns them into parameters on the Content-Type +header. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi index 090dfb960db6f..02ec41e39623b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi @@ -1,3 +1,5 @@ +"""Class representing audio/* type MIME documents. +""" from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -6,6 +8,8 @@ from email.policy import Policy __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): + """Class for generating audio/* MIME documents. +""" def __init__( self, _audiodata: str | bytes | bytearray, @@ -14,4 +18,24 @@ class MIMEAudio(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: ... + ) -> None: + """Create an audio/* type MIME document. + +_audiodata contains the bytes for the raw audio data. If this data +can be decoded as au, wav, aiff, or aifc, then the +subtype will be automatically included in the Content-Type header. +Otherwise, you can specify the specific audio subtype via the +_subtype parameter. If _subtype is not given, and no subtype can be +guessed, a TypeError is raised. + +_encoder is a function which will perform the actual encoding for +transport of the image data. It takes one argument, which is this +Image instance. It should use get_payload() and set_payload() to +change the payload to the encoded form. It should also add any +Content-Transfer-Encoding or other headers to the message as +necessary. The default encoding is Base64. + +Any additional keyword arguments are passed to the base class +constructor, which turns them into parameters on the Content-Type +header. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi index b733709f1b5a0..bc1a615b5883a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi @@ -1,3 +1,5 @@ +"""Base class for MIME specializations. +""" import email.message from email import _ParamsType from email.policy import Policy @@ -5,4 +7,12 @@ from email.policy import Policy __all__ = ["MIMEBase"] class MIMEBase(email.message.Message): - def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... + """Base class for MIME specializations. +""" + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: + """This constructor adds a Content-Type: and a MIME-Version: header. + +The Content-Type: header is taken from the _maintype and _subtype +arguments. Additional parameters for this header are taken from the +keyword arguments. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi index b47afa6ce5925..20f228cc21c3f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi @@ -1,3 +1,5 @@ +"""Class representing image/* type MIME documents. +""" from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -6,6 +8,8 @@ from email.policy import Policy __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): + """Class for generating image/* type MIME documents. +""" def __init__( self, _imagedata: str | bytes | bytearray, @@ -14,4 +18,23 @@ class MIMEImage(MIMENonMultipart): *, policy: Policy | None = None, **_params: _ParamsType, - ) -> None: ... + ) -> None: + """Create an image/* type MIME document. + +_imagedata contains the bytes for the raw image data. If the data +type can be detected (jpeg, png, gif, tiff, rgb, pbm, pgm, ppm, +rast, xbm, bmp, webp, and exr attempted), then the subtype will be +automatically included in the Content-Type header. Otherwise, you can +specify the specific image subtype via the _subtype parameter. + +_encoder is a function which will perform the actual encoding for +transport of the image data. It takes one argument, which is this +Image instance. It should use get_payload() and set_payload() to +change the payload to the encoded form. It should also add any +Content-Transfer-Encoding or other headers to the message as +necessary. The default encoding is Base64. + +Any additional keyword arguments are passed to the base class +constructor, which turns them into parameters on the Content-Type +header. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi index a1e370e2eab51..ca033b0758ca9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi @@ -1,3 +1,5 @@ +"""Class representing message/* MIME documents. +""" from email._policybase import _MessageT from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy @@ -5,4 +7,15 @@ from email.policy import Policy __all__ = ["MIMEMessage"] class MIMEMessage(MIMENonMultipart): - def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: ... + """Class representing message/* MIME documents. +""" + def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: + """Create a message/* type MIME document. + +_msg is a message object and must be an instance of Message, or a +derived class of Message, otherwise a TypeError is raised. + +Optional _subtype defines the subtype of the contained message. The +default is "rfc822" (this is defined by the MIME standard, even though +the term "rfc822" is technically outdated by RFC 2822). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi index fb9599edbcb8f..3ec6352afdfc4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi @@ -1,3 +1,5 @@ +"""Base class for MIME multipart/* type messages. +""" from collections.abc import Sequence from email import _ParamsType from email._policybase import _MessageT @@ -7,6 +9,8 @@ from email.policy import Policy __all__ = ["MIMEMultipart"] class MIMEMultipart(MIMEBase): + """Base class for MIME multipart/* type messages. +""" def __init__( self, _subtype: str = "mixed", @@ -15,4 +19,22 @@ class MIMEMultipart(MIMEBase): *, policy: Policy[_MessageT] | None = None, **_params: _ParamsType, - ) -> None: ... + ) -> None: + """Creates a multipart/* type message. + +By default, creates a multipart/mixed message, with proper +Content-Type and MIME-Version headers. + +_subtype is the subtype of the multipart content type, defaulting to +'mixed'. + +boundary is the multipart boundary string. By default it is +calculated as needed. + +_subparts is a sequence of initial subparts for the payload. It +must be an iterable object, such as a list. You can always +attach new subparts to the message by using the attach() method. + +Additional parameters for the Content-Type header are taken from the +keyword arguments (or passed into the _params argument). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi index 5497d89b10726..4b4de010d3b0b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi @@ -1,5 +1,9 @@ +"""Base class for MIME type messages that are not multipart. +""" from email.mime.base import MIMEBase __all__ = ["MIMENonMultipart"] -class MIMENonMultipart(MIMEBase): ... +class MIMENonMultipart(MIMEBase): + """Base class for MIME non-multipart type messages. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi index edfa67a092427..aaf9542d11b09 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi @@ -1,9 +1,23 @@ +"""Class representing text/* type MIME documents. +""" from email._policybase import Policy from email.mime.nonmultipart import MIMENonMultipart __all__ = ["MIMEText"] class MIMEText(MIMENonMultipart): + """Class for generating text/* type MIME documents. +""" def __init__( self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None - ) -> None: ... + ) -> None: + """Create a text/* type MIME document. + +_text is the string for this message object. + +_subtype is the MIME sub content type, defaulting to "plain". + +_charset is the character set parameter added to the Content-Type +header. This defaults to "us-ascii". Note that as a side-effect, the +Content-Transfer-Encoding header will also be set. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi index a4924a6cbd88f..2be6f8ae654a2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi @@ -1,3 +1,5 @@ +"""A parser of RFC 2822 and MIME email messages. +""" from _typeshed import SupportsRead from collections.abc import Callable from email._policybase import _MessageT @@ -11,13 +13,47 @@ __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedPa class Parser(Generic[_MessageT]): @overload - def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: ... + def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: + """Parser of RFC 2822 and MIME email messages. + +Creates an in-memory object tree representing the email message, which +can then be manipulated and turned over to a Generator to return the +textual representation of the message. + +The string must be formatted as a block of RFC 2822 headers and header +continuation lines, optionally preceded by a 'Unix-from' header. The +header block is terminated either by the end of the string or by a +blank line. + +_class is the class to instantiate for new message objects when they +must be created. This class must have a constructor that can take +zero arguments. Default is Message.Message. + +The policy keyword specifies a policy object that controls a number of +aspects of the parser's operation. The default policy maintains +backward compatibility. + +""" @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload def __init__(self, _class: Callable[[], _MessageT] | None, *, policy: Policy[_MessageT] = ...) -> None: ... - def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: ... - def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: + """Create a message structure from the data in a file. + +Reads all the data from the file and returns the root of the message +structure. Optional headersonly is a flag specifying whether to stop +parsing after reading the headers or not. The default is False, +meaning it parses the entire contents of the file. +""" + def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: + """Create a message structure from a string. + +Returns the root of the message structure. Optional headersonly is a +flag specifying whether to stop parsing after reading the headers or +not. The default is False, meaning it parses the entire contents of +the file. +""" class HeaderParser(Parser[_MessageT]): def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> _MessageT: ... @@ -26,13 +62,42 @@ class HeaderParser(Parser[_MessageT]): class BytesParser(Generic[_MessageT]): parser: Parser[_MessageT] @overload - def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: ... + def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: + """Parser of binary RFC 2822 and MIME email messages. + +Creates an in-memory object tree representing the email message, which +can then be manipulated and turned over to a Generator to return the +textual representation of the message. + +The input must be formatted as a block of RFC 2822 headers and header +continuation lines, optionally preceded by a 'Unix-from' header. The +header block is terminated either by the end of the input or by a +blank line. + +_class is the class to instantiate for new message objects when they +must be created. This class must have a constructor that can take +zero arguments. Default is Message.Message. +""" @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload def __init__(self, _class: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... - def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: ... - def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: ... + def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: + """Create a message structure from the data in a binary file. + +Reads all the data from the file and returns the root of the message +structure. Optional headersonly is a flag specifying whether to stop +parsing after reading the headers or not. The default is False, +meaning it parses the entire contents of the file. +""" + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: + """Create a message structure from a byte string. + +Returns the root of the message structure. Optional headersonly is a +flag specifying whether to stop parsing after reading the headers or +not. The default is False, meaning it parses the entire contents of +the file. +""" class BytesHeaderParser(BytesParser[_MessageT]): def parse(self, fp: _WrappedBuffer, headersonly: bool = True) -> _MessageT: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi index 35c999919eede..3dcdb37dc44d3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi @@ -1,3 +1,6 @@ +"""This will be the home for the policy that hooks in the new +code that adds all the email6 features. +""" from collections.abc import Callable from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, _MessageT, compat32 as compat32 from email.contentmanager import ContentManager @@ -8,6 +11,115 @@ from typing_extensions import Self __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] class EmailPolicy(Policy[_MessageT]): + """Controls for how messages are interpreted and formatted. + +Most of the classes and many of the methods in the email package accept +Policy objects as parameters. A Policy object contains a set of values and +functions that control how input is interpreted and how output is rendered. +For example, the parameter 'raise_on_defect' controls whether or not an RFC +violation results in an error being raised or not, while 'max_line_length' +controls the maximum length of output lines when a Message is serialized. + +Any valid attribute may be overridden when a Policy is created by passing +it as a keyword argument to the constructor. Policy objects are immutable, +but a new Policy object can be created with only certain values changed by +calling the Policy instance with keyword arguments. Policy objects can +also be added, producing a new Policy object in which the non-default +attributes set in the right hand operand overwrite those specified in the +left operand. + +Settable attributes: + +raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + +linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + +cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + +max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + +mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + +message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + +verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. +PROVISIONAL + +The API extensions enabled by this policy are currently provisional. +Refer to the documentation for details. + +This policy adds new header parsing and folding algorithms. Instead of +simple strings, headers are custom objects with custom attributes +depending on the type of the field. The folding algorithm fully +implements RFCs 2047 and 5322. + +In addition to the settable attributes listed above that apply to +all Policies, this policy adds the following additional attributes: + +utf8 -- if False (the default) message headers will be + serialized as ASCII, using encoded words to encode + any non-ASCII characters in the source strings. If + True, the message headers will be serialized using + utf8 and will not contain encoded words (see RFC + 6532 for more on this serialization format). + +refold_source -- if the value for a header in the Message object + came from the parsing of some source, this attribute + indicates whether or not a generator should refold + that value when transforming the message back into + stream form. The possible values are: + + none -- all source values use original folding + long -- source values that have any line that is + longer than max_line_length will be + refolded + all -- all values are refolded. + + The default is 'long'. + +header_factory -- a callable that takes two arguments, 'name' and + 'value', where 'name' is a header field name and + 'value' is an unfolded header field value, and + returns a string-like object that represents that + header. A default header_factory is provided that + understands some of the RFC5322 header field types. + (Currently address fields and date fields have + special treatment, while all other fields are + treated as unstructured. This list will be + completed before the extension is marked stable.) + +content_manager -- an object with at least two methods: get_content + and set_content. When the get_content or + set_content method of a Message object is called, + it calls the corresponding method of this object, + passing it the message object as its first argument, + and any arguments or keywords that were passed to + it as additional arguments. The default + content_manager is + :data:`~email.contentmanager.raw_data_manager`. + +""" utf8: bool refold_source: str header_factory: Callable[[str, Any], Any] @@ -46,11 +158,87 @@ class EmailPolicy(Policy[_MessageT]): header_factory: Callable[[str, str], str] = ..., content_manager: ContentManager = ..., ) -> None: ... - def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... - def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: ... - def header_fetch_parse(self, name: str, value: str) -> Any: ... - def fold(self, name: str, value: str) -> Any: ... - def fold_binary(self, name: str, value: str) -> bytes: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: + """Given a list of linesep terminated strings constituting the lines of +a single header, return the (name, value) tuple that should be stored +in the model. The input lines should retain their terminating linesep +characters. The lines passed in by the email package may contain +surrogateescaped binary data. +The name is parsed as everything up to the ':' and returned unmodified. +The value is determined by stripping leading whitespace off the +remainder of the first line joined with all subsequent lines, and +stripping any trailing carriage return or linefeed characters. (This +is the same as Compat32). + +""" + def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: + """Given the header name and the value provided by the application +program, return the (name, value) that should be stored in the model. +The name is returned unchanged. If the input value has a 'name' +attribute and it matches the name ignoring case, the value is returned +unchanged. Otherwise the name and value are passed to header_factory +method, and the resulting custom header object is returned as the +value. In this case a ValueError is raised if the input value contains +CR or LF characters. + +""" + def header_fetch_parse(self, name: str, value: str) -> Any: + """Given the header name and the value from the model, return the value +to be returned to the application program that is requesting that +header. The value passed in by the email package may contain +surrogateescaped binary data if the lines were parsed by a BytesParser. +The returned value should not contain any surrogateescaped data. + +If the value has a 'name' attribute, it is returned to unmodified. +Otherwise the name and the value with any linesep characters removed +are passed to the header_factory method, and the resulting custom +header object is returned. Any surrogateescaped bytes get turned +into the unicode unknown-character glyph. + +""" + def fold(self, name: str, value: str) -> Any: + """Given the header name and the value from the model, return a string +containing linesep characters that implement the folding of the header +according to the policy controls. The value passed in by the email +package may contain surrogateescaped binary data if the lines were +parsed by a BytesParser. The returned value should not contain any +surrogateescaped data. + +Header folding is controlled by the refold_source policy setting. A +value is considered to be a 'source value' if and only if it does not +have a 'name' attribute (having a 'name' attribute means it is a header +object of some sort). If a source value needs to be refolded according +to the policy, it is converted into a custom header object by passing +the name and the value with any linesep characters removed to the +header_factory method. Folding of a custom header object is done by +calling its fold method with the current policy. + +Source values are split into lines using splitlines. If the value is +not to be refolded, the lines are rejoined using the linesep from the +policy and returned. The exception is lines containing non-ascii +binary data. In that case the value is refolded regardless of the +refold_source setting, which causes the binary data to be CTE encoded +using the unknown-8bit charset. + +""" + def fold_binary(self, name: str, value: str) -> bytes: + """Given the header name and the value from the model, return binary +data containing linesep characters that implement the folding of the +header according to the policy controls. The value passed in by the +email package may contain surrogateescaped binary data. + +The same as fold if cte_type is 7bit, except that the returned value is +bytes. + +If cte_type is 8bit, non-ASCII binary data is converted back into +bytes. Headers with binary data are not refolded, regardless of the +refold_header setting, since there is no way to know whether the binary +data consists of single byte characters or multibyte characters. + +If utf8 is true, headers are encoded to utf8, otherwise to ascii with +non-ASCII unicode rendered as encoded words. + +""" def clone( self, *, @@ -66,7 +254,13 @@ class EmailPolicy(Policy[_MessageT]): refold_source: str = ..., header_factory: Callable[[str, str], str] = ..., content_manager: ContentManager = ..., - ) -> Self: ... + ) -> Self: + """Return a new instance with specified attributes changed. + +The new instance has the same attribute values as the current object, +except for the changes passed in as keyword arguments. + +""" default: EmailPolicy[EmailMessage] SMTP: EmailPolicy[EmailMessage] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi index 87d08eecc70ce..1235cb497b4d2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi @@ -1,3 +1,26 @@ +"""Quoted-printable content transfer encoding per RFCs 2045-2047. + +This module handles the content transfer encoding method defined in RFC 2045 +to encode US ASCII-like 8-bit data called 'quoted-printable'. It is used to +safely encode text that is in a character set similar to the 7-bit US ASCII +character set, but that includes some 8-bit characters that are normally not +allowed in email bodies or headers. + +Quoted-printable is very space-inefficient for encoding binary files; use the +email.base64mime module for that instead. + +This module provides an interface to encode and decode both headers and bodies +with quoted-printable encoding. + +RFC 2045 defines a method for including character set information in an +'encoded-word' in a header. This method is commonly used for 8-bit real names +in To:/From:/Cc: etc. fields, as well as Subject: lines. + +This module does not do the line wrapping or end-of-line character +conversion necessary for proper internationalized headers; it only +does dumb encoding and decoding. To deal with the various line +wrapping issues, use the email.header module. +""" from collections.abc import Iterable __all__ = [ @@ -13,16 +36,74 @@ __all__ = [ "unquote", ] -def header_check(octet: int) -> bool: ... -def body_check(octet: int) -> bool: ... -def header_length(bytearray: Iterable[int]) -> int: ... -def body_length(bytearray: Iterable[int]) -> int: ... -def unquote(s: str | bytes | bytearray) -> str: ... +def header_check(octet: int) -> bool: + """Return True if the octet should be escaped with header quopri. +""" +def body_check(octet: int) -> bool: + """Return True if the octet should be escaped with body quopri. +""" +def header_length(bytearray: Iterable[int]) -> int: + """Return a header quoted-printable encoding length. + +Note that this does not include any RFC 2047 chrome added by +`header_encode()`. + +:param bytearray: An array of bytes (a.k.a. octets). +:return: The length in bytes of the byte array when it is encoded with + quoted-printable for headers. +""" +def body_length(bytearray: Iterable[int]) -> int: + """Return a body quoted-printable encoding length. + +:param bytearray: An array of bytes (a.k.a. octets). +:return: The length in bytes of the byte array when it is encoded with + quoted-printable for bodies. +""" +def unquote(s: str | bytes | bytearray) -> str: + """Turn a string in the form =AB to the ASCII character with value 0xab +""" def quote(c: str | bytes | bytearray) -> str: ... -def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... -def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... -def decode(encoded: str, eol: str = "\n") -> str: ... -def header_decode(s: str) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: + """Encode a single header line with quoted-printable (like) encoding. + +Defined in RFC 2045, this 'Q' encoding is similar to quoted-printable, but +used specifically for email header fields to allow charsets with mostly 7 +bit characters (and some 8 bit) to remain more or less readable in non-RFC +2045 aware mail clients. + +charset names the character set to use in the RFC 2046 header. It +defaults to iso-8859-1. +""" +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: + """Encode with quoted-printable, wrapping at maxlinelen characters. + +Each line of encoded text will end with eol, which defaults to "\\n". Set +this to "\\r\\n" if you will be using the result of this function directly +in an email. + +Each line will be wrapped at, at most, maxlinelen characters before the +eol string (maxlinelen defaults to 76 characters, the maximum value +permitted by RFC 2045). Long lines will have the 'soft line break' +quoted-printable character "=" appended to them, so the decoded text will +be identical to the original text. + +The minimum maxlinelen is 4 to have room for a quoted character ("=XX") +followed by a soft line break. Smaller values will generate a +ValueError. + +""" +def decode(encoded: str, eol: str = "\n") -> str: + """Decode a quoted-printable string. + +Lines are separated with eol, which defaults to \\n. +""" +def header_decode(s: str) -> str: + """Decode a string encoded with RFC 2045 MIME header 'Q' encoding. + +This function does not parse a full MIME header value encoded with +quoted-printable (like =?iso-8859-1?q?Hello_World?=) -- please use +the high level email.header class for that functionality. +""" body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi index efc32a7abce29..1fea5d713d40f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi @@ -1,3 +1,5 @@ +"""Miscellaneous utilities. +""" import datetime import sys from _typeshed import Unused @@ -27,21 +29,62 @@ __all__ = [ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None] -def quote(str: str) -> str: ... -def unquote(str: str) -> str: ... +def quote(str: str) -> str: + """Prepare string to be used in a quoted string. + +Turns backslash and double quote characters into quoted pairs. These +are the only characters that need to be quoted inside a quoted string. +Does not add the surrounding double quotes. +""" +def unquote(str: str) -> str: + """Remove quotes from a string. +""" # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ... -def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... +def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: + """ +Parse addr into its constituent realname and email address parts. + +Return a tuple of realname and email address, unless the parse fails, in +which case return a 2-tuple of ('', ''). + +If strict is True, use a strict parser which rejects malformed inputs. +""" +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: + """The inverse of parseaddr(), this takes a 2-tuple of the form +(realname, email_address) and returns the string value suitable +for an RFC 2822 From, To or Cc header. + +If the first element of pair is false, then the second element is +returned unmodified. + +The optional charset is the character set that is used to encode +realname in case realname is not ASCII safe. Can be an instance of str or +a Charset-like object which has a header_encode method. Default is +'utf-8'. +""" # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 -def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ... +def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: + """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. + +When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in +its place. + +If strict is true, use a strict parser which rejects malformed inputs. +""" @overload -def parsedate(data: None) -> None: ... +def parsedate(data: None) -> None: + """Convert a time string to a time tuple. +""" @overload def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... @overload -def parsedate_tz(data: None) -> None: ... +def parsedate_tz(data: None) -> None: + """Convert a date string to a time tuple. + +Accounts for military timezones. +""" @overload def parsedate_tz(data: str) -> _PDTZ | None: ... @@ -54,25 +97,99 @@ if sys.version_info >= (3, 10): else: def parsedate_to_datetime(data: str) -> datetime.datetime: ... -def mktime_tz(data: _PDTZ) -> int: ... -def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... -def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... +def mktime_tz(data: _PDTZ) -> int: + """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp. +""" +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: + """Returns a date string as specified by RFC 2822, e.g.: + +Fri, 09 Nov 2001 01:08:47 -0000 + +Optional timeval if given is a floating-point time value as accepted by +gmtime() and localtime(), otherwise the current time is used. + +Optional localtime is a flag that when True, interprets timeval, and +returns a date relative to the local timezone instead of UTC, properly +taking daylight savings time into account. + +Optional argument usegmt means that the timezone is written out as +an ascii string, not numeric one (so "GMT" instead of "+0000"). This +is needed for HTTP, and is only used when localtime==False. +""" +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: + """Turn a datetime into a date string as specified in RFC 2822. + +If usegmt is True, dt must be an aware datetime with an offset of zero. In +this case 'GMT' will be rendered instead of the normal +0000 required by +RFC2822. This is to support HTTP headers involving date stamps. +""" if sys.version_info >= (3, 14): - def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: + """Return local time as an aware datetime object. + +If called without arguments, return current time. Otherwise *dt* +argument should be a datetime instance, and it is converted to the +local time zone according to the system time zone database. If *dt* is +naive (that is, dt.tzinfo is None), it is assumed to be in local time. + +""" elif sys.version_info >= (3, 12): @overload - def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ... + def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: + """Return local time as an aware datetime object. + +If called without arguments, return current time. Otherwise *dt* +argument should be a datetime instance, and it is converted to the +local time zone according to the system time zone database. If *dt* is +naive (that is, dt.tzinfo is None), it is assumed to be in local time. +The isdst parameter is ignored. + +""" @overload @deprecated("The `isdst` parameter does nothing and will be removed in Python 3.14.") def localtime(dt: datetime.datetime | None = None, isdst: Unused = None) -> datetime.datetime: ... else: - def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... + def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: + """Return local time as an aware datetime object. + + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. + In this case, a positive or zero value for *isdst* causes localtime to + presume initially that summer time (for example, Daylight Saving Time) + is or is not (respectively) in effect for the specified time. A + negative value for *isdst* causes the localtime() function to attempt + to divine whether summer time is in effect for the specified time. -def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... -def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... # May return list[str]. See issue #10431 for details. -def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... + """ + +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: + """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: + +<142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> + +Optional idstring if given is a string used to strengthen the +uniqueness of the message id. Optional domain if given provides the +portion of the message id after the '@'. It defaults to the locally +defined hostname. +""" +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: # May return list[str]. See issue #10431 for details. + """Decode string according to RFC 2231 +""" +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: + """Encode string according to RFC 2231. + +If neither charset nor language is given, then s is returned as-is. If +charset is given but not language, the string is encoded using the empty +string for language. +""" def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... -def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... +def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: + """Decode parameters list according to RFC 2231. + +params is a sequence of 2-tuples containing (param name, string value). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi index 61f86d243c720..1dbb623a90395 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi @@ -1,9 +1,48 @@ +"""Standard "encodings" Package + + Standard Python encoding modules are stored in this package + directory. + + Codec modules must have names corresponding to normalized encoding + names as defined in the normalize_encoding() function below, e.g. + 'utf-8' must be implemented by the module 'utf_8.py'. + + Each codec module must export the following interface: + + * getregentry() -> codecs.CodecInfo object + The getregentry() API must return a CodecInfo object with encoder, decoder, + incrementalencoder, incrementaldecoder, streamwriter and streamreader + attributes which adhere to the Python Codec Interface Standard. + + In addition, a module may optionally also define the following + APIs which are then used by the package's codec search function: + + * getaliases() -> sequence of encoding name strings to use as aliases + + Alias names returned by getaliases() must be normalized encoding + names as defined by normalize_encoding(). + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import sys from codecs import CodecInfo class CodecRegistryError(LookupError, SystemError): ... -def normalize_encoding(encoding: str | bytes) -> str: ... +def normalize_encoding(encoding: str | bytes) -> str: + """Normalize an encoding name. + +Normalization works as follows: all non-alphanumeric +characters except the dot used for Python package names are +collapsed and replaced with a single underscore, e.g. ' -;#' +becomes '_'. Leading and trailing underscores are removed. + +Note that encoding names should be ASCII only. + +""" def search_function(encoding: str) -> CodecInfo | None: ... if sys.version_info >= (3, 14) and sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi index 079af85d51ee4..8df8b5b4bcef3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi @@ -1 +1,18 @@ +"""Encoding Aliases Support + +This module is used by the encodings package search function to +map encodings names to module names. + +Note that the search function normalizes the encoding names before +doing the lookup, so the mapping will have to map normalized +encoding names to module names. + +Contents: + + The following aliases dictionary contains mappings of all IANA + character set names for which the Python core library provides + codecs. In addition to these, a few Python specific codec + aliases have also been added. + +""" aliases: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi index a85585af32ed9..839a54a56cfac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi @@ -1,3 +1,11 @@ +"""Python 'ascii' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi index 0c4f1cb1fe599..d99547405e919 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi @@ -1,3 +1,9 @@ +"""Python 'base64_codec' Codec - base64 content transfer encoding. + +This codec de/encodes from bytes to bytes. + +Written by Marc-Andre Lemburg (mal@lemburg.com). +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi index 468346a93da98..41cf27b9dc9be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi @@ -1,3 +1,11 @@ +"""Python 'bz2_codec' Codec - bz2 compression encoding. + +This codec de/encodes from bytes to bytes and is therefore usable with +bytes.transform() and bytes.untransform(). + +Adapted by Raymond Hettinger from zlib_codec.py which was written +by Marc-Andre Lemburg (mal@lemburg.com). +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi index a971a15860b52..ed4c6998e29f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi @@ -1,3 +1,14 @@ +"""Generic Python Character Mapping Codec. + + Use this codec directly rather than through the automatic + conversion mechanisms supplied by unicode() and .encode(). + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _codecs import _CharMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi index f62195662ce96..2d9d546d0aae1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi index f62195662ce96..9911bf2bcadb8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1006 generated from 'MAPPINGS/VENDORS/MISC/CP1006.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi index f62195662ce96..e8aa63d4861fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1026 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP1026.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi index 42781b4892984..5928bba909ac9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec for CP1125 + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi index f62195662ce96..db39537e8e697 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi index f62195662ce96..768b5ba52ffec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi index f62195662ce96..3cfd94e72c721 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1251 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1251.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi index f62195662ce96..73a62eb34bf25 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1252 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1252.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi index f62195662ce96..4112c83862fee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi index f62195662ce96..7616527f6613f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi index f62195662ce96..f6be3fe317870 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi index f62195662ce96..bb72d150c919c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi index f62195662ce96..9c314fafa000b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1257 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1257.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi index f62195662ce96..9e062ce722bb5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi index f62195662ce96..f8929b8551420 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp273 generated from 'python-mappings/CP273.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi index f62195662ce96..c286b10e89a69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp424 generated from 'MAPPINGS/VENDORS/MISC/CP424.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi index 42781b4892984..dde30f54793e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi index f62195662ce96..9f1feafe2627b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi index f62195662ce96..0da760c221487 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi @@ -1,3 +1,7 @@ +"""Python Character Mapping Codec cp720 generated on Windows: +Vista 6.0.6002 SP2 Multiprocessor Free with the command: + python Tools/unicode/genwincodec.py 720 +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi index 42781b4892984..dee32d17102f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi index 42781b4892984..e3510bfd79a37 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi index 42781b4892984..cf5950bb1bcc8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP850.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi index 42781b4892984..08c02032beb9d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi index 42781b4892984..dfe94baa778c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi index f62195662ce96..6960ee53524c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp856 generated from 'MAPPINGS/VENDORS/MISC/CP856.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi index 42781b4892984..03974a0f3fe7a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi index 42781b4892984..c00a097e44526 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec for CP858, modified from cp850. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi index 42781b4892984..f310db660229a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi index 42781b4892984..3ccf87543a751 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP861.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi index 42781b4892984..49d3c3058038d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi index 42781b4892984..bce9e460911e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP863.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi index 42781b4892984..b127633844fa8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP864.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi index 42781b4892984..4fe0d2fb5eed7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP865.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi index 42781b4892984..010e87152bed9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP866.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi index 42781b4892984..56729dd9a51d5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP869.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi index f62195662ce96..beaedab8cee3c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp874 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP874.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi index f62195662ce96..c4ef03a7d68f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi index 3fd4fe38898a8..38105cadc8751 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi @@ -1,3 +1,9 @@ +"""Python 'hex_codec' Codec - 2-digit hex content transfer encoding. + +This codec de/encodes from bytes to bytes. + +Written by Marc-Andre Lemburg (mal@lemburg.com). +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi index f62195662ce96..e1b72d93939db 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi @@ -1,3 +1,13 @@ +"""Python Character Mapping Codec generated from 'hp_roman8.txt' with gencodec.py. + +Based on data from ftp://dkuug.dk/i18n/charmaps/HP-ROMAN8 (Keld Simonsen) + +Original source: LaserJet IIP Printer User's Manual HP part no +33471-90901, Hewlet-Packard, June 1989. + +(Used with permission) + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi index f62195662ce96..aac4d48233a4c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi index f62195662ce96..485826075f70c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi index f62195662ce96..14a2435b249e5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi index f62195662ce96..5784003dee398 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi index f62195662ce96..1a579986fc269 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_14 generated from 'MAPPINGS/ISO8859/8859-14.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi index f62195662ce96..ab0702687963c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_15 generated from 'MAPPINGS/ISO8859/8859-15.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi index f62195662ce96..86b2cea4a71fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_16 generated from 'MAPPINGS/ISO8859/8859-16.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi index f62195662ce96..7d092ab745e91 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_2 generated from 'MAPPINGS/ISO8859/8859-2.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi index f62195662ce96..3904ff4fa378a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi index f62195662ce96..f47898334be55 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi index f62195662ce96..058a1cc3193eb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi index f62195662ce96..cc20e96c929ce 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi index f62195662ce96..63d81d0f5aafb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi index f62195662ce96..8349fa7c1f97f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi index f62195662ce96..63dc869b5033d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi index f62195662ce96..0dde852029a6e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi index f62195662ce96..a3cde64e83566 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi @@ -1,3 +1,5 @@ +"""Python Character Mapping Codec koi8_t +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi index f62195662ce96..9bbec4eee5790 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec koi8_u generated from 'python-mappings/KOI8-U.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi index f62195662ce96..79fb883736d59 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec kz1048 generated from 'MAPPINGS/VENDORS/MISC/KZ1048.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi index 3b06773eac03c..3b8f1333efdb5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi @@ -1,3 +1,11 @@ +"""Python 'latin-1' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi index 42781b4892984..01017388d52cc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi index f62195662ce96..010aa9bb6994a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi index f62195662ce96..62a00f17b54f7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi index f62195662ce96..291eeaf426cd7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_farsi generated from 'MAPPINGS/VENDORS/APPLE/FARSI.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi index f62195662ce96..782d5af6052b1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi index f62195662ce96..0bd4684e85a53 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi index f62195662ce96..a68a7948f779c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi @@ -1,3 +1,11 @@ +"""Python Character Mapping Codec mac_latin2 generated from 'MAPPINGS/VENDORS/MICSFT/MAC/LATIN2.TXT' with gencodec.py. + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. +(c) Copyright 2000 Guido van Rossum. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi index f62195662ce96..b6194f11f0213 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi index f62195662ce96..ac963c8b4078c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi index f62195662ce96..bcb971dc59151 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec mac_turkish generated from 'MAPPINGS/VENDORS/APPLE/TURKISH.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi index f62195662ce96..dddb00be487eb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi @@ -1,3 +1,8 @@ +"""Python Character Mapping Codec for PalmOS 3.5. + +Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi index f62195662ce96..0bae97e2c54dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi @@ -1,3 +1,11 @@ +"""Python Character Mapping Codec generated from 'PTCP154.txt' with gencodec.py. + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. +(c) Copyright 2000 Guido van Rossum. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi index eb99e667b4167..9cee3cfa76323 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi @@ -1,20 +1,44 @@ +"""Codec for the Punycode encoding, as specified in RFC 3492 + +Written by Martin v. Löwis. +""" import codecs from typing import Literal -def segregate(str: str) -> tuple[bytes, list[int]]: ... -def selective_len(str: str, max: int) -> int: ... -def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: ... -def insertion_unsort(str: str, extended: list[int]) -> list[int]: ... +def segregate(str: str) -> tuple[bytes, list[int]]: + """3.1 Basic code point segregation +""" +def selective_len(str: str, max: int) -> int: + """Return the length of str, considering only characters below max. +""" +def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: + """Return a pair (index, pos), indicating the next occurrence of +char in str. index is the position of the character considering +only ordinals up to and including char, and pos is the position in +the full string. index/pos is the starting position in the full +string. +""" +def insertion_unsort(str: str, extended: list[int]) -> list[int]: + """3.2 Insertion unsort coding +""" def T(j: int, bias: int) -> int: ... digits: Literal[b"abcdefghijklmnopqrstuvwxyz0123456789"] -def generate_generalized_integer(N: int, bias: int) -> bytes: ... +def generate_generalized_integer(N: int, bias: int) -> bytes: + """3.3 Generalized variable-length integers +""" def adapt(delta: int, first: bool, numchars: int) -> int: ... -def generate_integers(baselen: int, deltas: list[int]) -> bytes: ... +def generate_integers(baselen: int, deltas: list[int]) -> bytes: + """3.4 Bias adaptation +""" def punycode_encode(text: str) -> bytes: ... -def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: ... -def insertion_sort(base: str, extended: bytes, errors: str) -> str: ... +def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: + """3.3 Generalized variable-length integers +""" +def insertion_sort(base: str, extended: bytes, errors: str) -> str: + """3.2 Insertion sort coding +""" def punycode_decode(text: memoryview | bytes | bytearray | str, errors: str) -> str: ... class Codec(codecs.Codec): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi index e9deadd8d463c..015db644f9b15 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi @@ -1,3 +1,7 @@ +"""Codec for quoted-printable encoding. + +This codec de/encodes from bytes to bytes. +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi index 2887739468f24..2991147946c35 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi @@ -1,3 +1,11 @@ +"""Python 'raw-unicode-escape' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi index 8d71bc9575949..70d1a9cf93e47 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi @@ -1,3 +1,9 @@ +"""Python Character Mapping Codec for ROT13. + +This codec de/encodes from str to str. + +Written by Marc-Andre Lemburg (mal@lemburg.com). +""" import codecs from _typeshed import SupportsRead, SupportsWrite diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi index f62195662ce96..4c1354f8cb7c0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi @@ -1,3 +1,6 @@ +"""Python Character Mapping Codec tis_620 generated from 'python-mappings/TIS-620.TXT' with gencodec.py. + +""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi index 4775dac752f28..97ae3c0831e75 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi @@ -1,3 +1,14 @@ +"""Python 'undefined' Codec + + This codec will always raise a UnicodeError exception when being + used. It is intended for use by the site.py file to switch off + automatic string to Unicode coercion. + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi index ceaa39a3859ae..4e93b926ba4e4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi @@ -1,3 +1,11 @@ +"""Python 'unicode-escape' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi index 3b712cde420ae..c5e02c77f31d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi @@ -1,3 +1,11 @@ +"""Python 'utf-16' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi index cc7d1534fc69b..ce5c1cc66a0bd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi @@ -1,3 +1,11 @@ +"""Python 'utf-16-be' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi index ba103eb088e3c..35b65f06bfe25 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi @@ -1,3 +1,11 @@ +"""Python 'utf-16-le' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi index c925be712c728..48c33cfee86e7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi @@ -1,3 +1,6 @@ +""" +Python 'utf-32' Codec +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi index 9d28f5199c501..fc525a20df6e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi @@ -1,3 +1,6 @@ +""" +Python 'utf-32-be' Codec +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi index 5be14a91a3e6c..4e2e4df224fea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi @@ -1,3 +1,6 @@ +""" +Python 'utf-32-le' Codec +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi index dc1162f34c287..d2b4072637814 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi @@ -1,3 +1,7 @@ +"""Python 'utf-7' Codec + +Written by Brian Quinlan (brian@sweetapp.com). +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi index 918712d804730..3862d0eab3b97 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi @@ -1,3 +1,11 @@ +"""Python 'utf-8' Codec + + +Written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi index af69217d67321..2a7063a200da1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -1,3 +1,12 @@ +"""Python 'utf-8-sig' Codec +This work similar to UTF-8 with the following changes: + +* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the + first three bytes. + +* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these + bytes will be skipped. +""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi index e32ba8ac0a1a7..0c127094b6859 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi @@ -1,3 +1,11 @@ +"""Python 'uu_codec' Codec - UU content transfer encoding. + +This codec de/encodes from bytes to bytes. + +Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were +adapted from uu.py which was written by Lance Ellinghouse and +modified by Jack Jansen and Fredrik Lundh. +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi index 0f13d0e810e91..25a18bc939ecb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi @@ -1,3 +1,9 @@ +"""Python 'zlib_codec' Codec - zlib compression encoding. + +This codec de/encodes from bytes to bytes. + +Written by Marc-Andre Lemburg (mal@lemburg.com). +""" import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi index 332fb1845917d..aca7c1a5200f4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi @@ -1,6 +1,9 @@ __all__ = ["version", "bootstrap"] -def version() -> str: ... +def version() -> str: + """ +Returns a string specifying the bundled version of pip. +""" def bootstrap( *, root: str | None = None, @@ -9,4 +12,10 @@ def bootstrap( altinstall: bool = False, default_pip: bool = False, verbosity: int = 0, -) -> None: ... +) -> None: + """ +Bootstrap pip into the current Python installation (or the given root +directory). + +Note that calling this function will alter both sys.path and os.environ. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi index 4ac860f5e611d..1234838b6810a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi @@ -57,20 +57,40 @@ _Signature: TypeAlias = Any # TODO: Unable to import Signature from inspect mod if sys.version_info >= (3, 11): class nonmember(Generic[_EnumMemberT]): + """ +Protects item from becoming an Enum member during class creation. +""" value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class member(Generic[_EnumMemberT]): + """ +Forces item to become an Enum member during class creation. +""" value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class _EnumDict(dict[str, Any]): + """ +Track enum member order and ensure member names are not reused. + +EnumType will use the names found in self._member_names as the +enumeration member names. +""" if sys.version_info >= (3, 13): def __init__(self, cls_name: str | None = None) -> None: ... else: def __init__(self) -> None: ... - def __setitem__(self, key: str, value: Any) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: + """ +Changes anything not dundered or not a descriptor. + +If an enum member name is used twice, an error is raised; duplicate +values are not checked for. + +Single underscore (sunder) names are reserved. +""" if sys.version_info >= (3, 11): # See comment above `typing.MutableMapping.update` # for why overloads are preferable to a Union here @@ -90,6 +110,9 @@ if sys.version_info >= (3, 13): # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(type): + """ +Metaclass for Enum +""" if sys.version_info >= (3, 11): def __new__( metacls: type[_typeshed.Self], @@ -108,27 +131,88 @@ class EnumMeta(type): @classmethod def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] - def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... - def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: + """ +Return members in definition order. +""" + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: + """ +Return members in reverse definition order. +""" if sys.version_info >= (3, 12): - def __contains__(self: type[Any], value: object) -> bool: ... + def __contains__(self: type[Any], value: object) -> bool: + """Return True if `value` is in `cls`. + +`value` is in `cls` if: +1) `value` is a member of `cls`, or +2) `value` is the value of one of the `cls`'s members. +3) `value` is a pseudo-member (flags) +""" elif sys.version_info >= (3, 11): - def __contains__(self: type[Any], member: object) -> bool: ... + def __contains__(self: type[Any], member: object) -> bool: + """ + Return True if member is a member of this enum + raises TypeError if member is not an enum member + + note: in 3.12 TypeError will no longer be raised, and True will also be + returned if member is the value of a member in this enum + """ elif sys.version_info >= (3, 10): def __contains__(self: type[Any], obj: object) -> bool: ... else: def __contains__(self: type[Any], member: object) -> bool: ... - def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: + """ +Return the member matching `name`. +""" @_builtins_property - def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... - def __len__(self) -> int: ... - def __bool__(self) -> Literal[True]: ... + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: + """ +Returns a mapping of member name->value. + +This mapping lists all enum members, including aliases. Note that this +is a read-only view of the internal mapping. +""" + def __len__(self) -> int: + """ +Return the number of members (no aliases) +""" + def __bool__(self) -> Literal[True]: + """ +classes/types should always be True. +""" def __dir__(self) -> list[str]: ... # Overload 1: Value lookup on an already existing enum class (simple case) @overload - def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: + """ +Either returns an existing member, or creates a new enum class. + +This method is used both when an enum class is given a value to match +to an enumeration member (i.e. Color(3)) and for the functional API +(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + +The value lookup branch is chosen if the enum is final. + +When used for the functional API: + +`value` will be the name of the new class. + +`names` should be either a string of white-space/comma delimited names +(values will start at `start`), or an iterator/mapping of name, value pairs. + +`module` should be set to the module this class is being created in; +if it is not set, an attempt to find that module will be made, but if +it fails the class will not be picklable. + +`qualname` should be set to the actual location this class can be found +at in its module; by default it is set to the global scope. If this is +not correct, unpickling will fail in some circumstances. + +`type`, if set, will be mixed in as the first base class. +""" # Overload 2: Functional API for constructing new enum classes. if sys.version_info >= (3, 11): @@ -143,7 +227,33 @@ class EnumMeta(type): type: type | None = None, start: int = 1, boundary: FlagBoundary | None = None, - ) -> type[Enum]: ... + ) -> type[Enum]: + """ +Either returns an existing member, or creates a new enum class. + +This method is used both when an enum class is given a value to match +to an enumeration member (i.e. Color(3)) and for the functional API +(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + +The value lookup branch is chosen if the enum is final. + +When used for the functional API: + +`value` will be the name of the new class. + +`names` should be either a string of white-space/comma delimited names +(values will start at `start`), or an iterator/mapping of name, value pairs. + +`module` should be set to the module this class is being created in; +if it is not set, an attempt to find that module will be made, but if +it fails the class will not be picklable. + +`qualname` should be set to the actual location this class can be found +at in its module; by default it is set to the global scope. If this is +not correct, unpickling will fail in some circumstances. + +`type`, if set, will be mixed in as the first base class. +""" else: @overload def __call__( @@ -155,7 +265,31 @@ class EnumMeta(type): qualname: str | None = None, type: type | None = None, start: int = 1, - ) -> type[Enum]: ... + ) -> type[Enum]: + """ + Either returns an existing member, or creates a new enum class. + + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='RED GREEN BLUE')). + + When used for the functional API: + + `value` will be the name of the new class. + + `names` should be either a string of white-space/comma delimited names + (values will start at `start`), or an iterator/mapping of name, value pairs. + + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. + + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. + + `type`, if set, will be mixed in as the first base class. + """ # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) # @@ -166,7 +300,33 @@ class EnumMeta(type): # if sys.version_info >= (3, 12): @overload - def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: ... + def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: + """ +Either returns an existing member, or creates a new enum class. + +This method is used both when an enum class is given a value to match +to an enumeration member (i.e. Color(3)) and for the functional API +(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + +The value lookup branch is chosen if the enum is final. + +When used for the functional API: + +`value` will be the name of the new class. + +`names` should be either a string of white-space/comma delimited names +(values will start at `start`), or an iterator/mapping of name, value pairs. + +`module` should be set to the module this class is being created in; +if it is not set, an attempt to find that module will be made, but if +it fails the class will not be picklable. + +`qualname` should be set to the actual location this class can be found +at in its module; by default it is set to the global scope. If this is +not correct, unpickling will fail in some circumstances. + +`type`, if set, will be mixed in as the first base class. +""" if sys.version_info >= (3, 14): @property def __signature__(cls) -> _Signature: ... @@ -180,6 +340,13 @@ if sys.version_info >= (3, 11): EnumType = EnumMeta class property(types.DynamicClassAttribute): + """ +This is a descriptor, used to define attributes that act differently +when accessed through an enum member and through an enum class. +Instance access is the same as property(), but access to an attribute +through the enum class will instead look in the class' _member_map_ for +a corresponding enum member. +""" def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... name: str clsname: str @@ -190,10 +357,52 @@ else: _magic_enum_attr = types.DynamicClassAttribute class Enum(metaclass=EnumMeta): + """ +Create a collection of name/value pairs. + +Example enumeration: + +>>> class Color(Enum): +... RED = 1 +... BLUE = 2 +... GREEN = 3 + +Access them by: + +- attribute access: + + >>> Color.RED + + +- value lookup: + + >>> Color(1) + + +- name lookup: + + >>> Color['RED'] + + +Enumerations can be iterated over, and know how many members they have: + +>>> len(Color) +3 + +>>> list(Color) +[, , ] + +Methods can be added to enumerations, and members can have their own +attributes -- see the documentation for details. +""" @_magic_enum_attr - def name(self) -> str: ... + def name(self) -> str: + """The name of the Enum member. +""" @_magic_enum_attr - def value(self) -> Any: ... + def value(self) -> Any: + """The value of the Enum member. +""" _name_: str _value_: Any _ignore_: str | list[str] @@ -202,16 +411,30 @@ class Enum(metaclass=EnumMeta): @classmethod def _missing_(cls, value: object) -> Any: ... @staticmethod - def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... + def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: + """ +Generate the next value when not given. + +name: the name of the member +start: the initial start value or None +count: the number of existing members +last_values: the list of values assigned +""" # It's not true that `__new__` will accept any argument type, # so ideally we'd use `Any` to indicate that the argument type is inexpressible. # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` # (see #7752, #2539, mypy/#5788), # and in practice using `object` here has the same effect as using `Any`. def __new__(cls, value: object) -> Self: ... - def __dir__(self) -> list[str]: ... + def __dir__(self) -> list[str]: + """ +Returns public methods and other interesting attributes. +""" def __hash__(self) -> int: ... - def __format__(self, format_spec: str) -> str: ... + def __format__(self, format_spec: str) -> str: + """ + Returns format using actual value type unless __str__ has been overridden. + """ def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... @@ -226,13 +449,21 @@ class Enum(metaclass=EnumMeta): def _add_alias_(self, name: str) -> None: ... if sys.version_info >= (3, 11): - class ReprEnum(Enum): ... + class ReprEnum(Enum): + """ +Only changes the repr(), leaving str() and format() to the mixed-in type. +""" if sys.version_info >= (3, 12): class IntEnum(int, ReprEnum): + """ +Enum where members are also (and must be) ints +""" _value_: int @_magic_enum_attr - def value(self) -> int: ... + def value(self) -> int: + """The value of the Enum member. +""" def __new__(cls, value: int) -> Self: ... else: @@ -243,30 +474,51 @@ else: @disjoint_base class IntEnum(int, _IntEnumBase): + """ + Enum where members are also (and must be) ints + """ _value_: int @_magic_enum_attr - def value(self) -> int: ... + def value(self) -> int: + """The value of the Enum member. +""" def __new__(cls, value: int) -> Self: ... -def unique(enumeration: _EnumerationT) -> _EnumerationT: ... +def unique(enumeration: _EnumerationT) -> _EnumerationT: + """ +Class decorator for enumerations ensuring unique member values. +""" _auto_null: Any class Flag(Enum): + """ +Support for flags +""" _name_: str | None # type: ignore[assignment] _value_: int @_magic_enum_attr - def name(self) -> str | None: ... # type: ignore[override] + def name(self) -> str | None: # type: ignore[override] + """The name of the Enum member. +""" @_magic_enum_attr - def value(self) -> int: ... - def __contains__(self, other: Self) -> bool: ... + def value(self) -> int: + """The value of the Enum member. +""" + def __contains__(self, other: Self) -> bool: + """ +Returns True if self has at least the same flags set as other. +""" def __bool__(self) -> bool: ... def __or__(self, other: Self) -> Self: ... def __and__(self, other: Self) -> Self: ... def __xor__(self, other: Self) -> Self: ... def __invert__(self) -> Self: ... if sys.version_info >= (3, 11): - def __iter__(self) -> Iterator[Self]: ... + def __iter__(self) -> Iterator[Self]: + """ +Returns flags in definition order. +""" def __len__(self) -> int: ... __ror__ = __or__ __rand__ = __and__ @@ -274,14 +526,25 @@ class Flag(Enum): if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): + """ +Enum where members are also (and must be) strings +""" def __new__(cls, value: str) -> Self: ... _value_: str @_magic_enum_attr - def value(self) -> str: ... + def value(self) -> str: + """The value of the Enum member. +""" @staticmethod - def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ... + def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: + """ +Return the lower-cased version of the member name. +""" class EnumCheck(StrEnum): + """ +various conditions to check an enumeration for +""" CONTINUOUS = "no skipped integer values" NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags" UNIQUE = "one name per value" @@ -291,10 +554,20 @@ if sys.version_info >= (3, 11): UNIQUE: Final = EnumCheck.UNIQUE class verify: + """ +Check an enumeration for various constraints. (see EnumCheck) +""" def __init__(self, *checks: EnumCheck) -> None: ... def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... class FlagBoundary(StrEnum): + """ +control how out of range values are handled +"strict" -> error is raised [default for Flag] +"conform" -> extra bits are discarded +"eject" -> lose flag status +"keep" -> keep flag status and all bits [default for IntFlag] +""" STRICT = "strict" CONFORM = "conform" EJECT = "eject" @@ -305,14 +578,35 @@ if sys.version_info >= (3, 11): EJECT: Final = FlagBoundary.EJECT KEEP: Final = FlagBoundary.KEEP - def global_str(self: Enum) -> str: ... - def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... - def global_enum_repr(self: Enum) -> str: ... - def global_flag_repr(self: Flag) -> str: ... + def global_str(self: Enum) -> str: + """ +use enum_name instead of class.enum_name +""" + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: + """ +decorator that makes the repr() of an enum member reference its module +instead of its class; also exports all members to the enum's module's +global namespace +""" + def global_enum_repr(self: Enum) -> str: + """ +use module.enum_name instead of class.enum_name + +the module is the last module in case of a multi-module name +""" + def global_flag_repr(self: Flag) -> str: + """ +use module.flag_name instead of class.flag_name + +the module is the last module in case of a multi-module name +""" if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + """ +Support for integer-based Flags +""" def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -326,6 +620,9 @@ elif sys.version_info >= (3, 11): # The body of the class is the same, but the base classes are different. @disjoint_base class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + """ + Support for integer-based Flags + """ def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -338,6 +635,9 @@ elif sys.version_info >= (3, 11): else: @disjoint_base class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + """ + Support for integer-based Flags + """ def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -348,6 +648,9 @@ else: __rxor__ = __xor__ class auto: + """ +Instances are replaced with an appropriate value in Enum class suites. +""" _value_: Any @_magic_enum_attr def value(self) -> Any: ... @@ -358,7 +661,9 @@ class auto: # shouldn't have these, but they're needed for int versions of auto (mostly the __or__). # Ideally type checkers would special case auto enough to handle this, # but until then this is a slightly inaccurate helping hand. - def __or__(self, other: int | Self) -> Self: ... + def __or__(self, other: int | Self) -> Self: + """Return self|value. +""" def __and__(self, other: int | Self) -> Self: ... def __xor__(self, other: int | Self) -> Self: ... __ror__ = __or__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi index e025e1fd13b9b..2fc54d52889f1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi @@ -1,3 +1,16 @@ +"""This module makes available standard errno system symbols. + +The value of each symbol is the corresponding integer value, +e.g., on most systems, errno.ENOENT equals the integer 2. + +The dictionary errno.errorcode maps numeric codes to symbol names, +e.g., errno.errorcode[2] could be the string 'ENOENT'. + +Symbols that are not relevant to the underlying system are not defined. + +To map error codes to error messages, use the function os.strerror(), +e.g. os.strerror(2) could return 'No such file or directory'. +""" import sys from collections.abc import Mapping from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi index 33d08995eb759..a4b630709ac4d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi @@ -1,23 +1,46 @@ +"""faulthandler module. +""" import sys from _typeshed import FileDescriptorLike -def cancel_dump_traceback_later() -> None: ... -def disable() -> None: ... -def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... +def cancel_dump_traceback_later() -> None: + """Cancel the previous call to dump_traceback_later(). +""" +def disable() -> None: + """Disable the fault handler. +""" +def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: + """Dump the traceback of the current thread, or of all threads if all_threads is True, into file. +""" if sys.version_info >= (3, 14): - def dump_c_stack(file: FileDescriptorLike = ...) -> None: ... + def dump_c_stack(file: FileDescriptorLike = ...) -> None: + """Dump the C stack of the current thread. +""" -def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: ... +def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: + """Dump the traceback of all threads in timeout seconds, +or each timeout seconds if repeat is True. If exit is True, call _exit(1) which is not safe. +""" if sys.version_info >= (3, 14): - def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: ... + def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: + """Enable the fault handler. +""" else: - def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... + def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: + """Enable the fault handler. +""" -def is_enabled() -> bool: ... +def is_enabled() -> bool: + """Check if the handler is enabled. +""" if sys.platform != "win32": - def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: ... - def unregister(signum: int, /) -> None: ... + def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: + """Register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file. +""" + def unregister(signum: int, /) -> None: + """Unregister the handler of the signal 'signum' registered by register(). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi index 5a3e89b0c6766..714dd0257a336 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi @@ -1,3 +1,8 @@ +"""This module performs file control and I/O control on file +descriptors. It is an interface to the fcntl() and ioctl() Unix +routines. File descriptors can be obtained with the fileno() method of +a file or socket object. +""" import sys from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer from typing import Any, Final, Literal, overload @@ -137,12 +142,53 @@ if sys.platform != "win32": F_RDAHEAD: Final[int] @overload - def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: ... + def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: + """Perform the operation `cmd` on file descriptor fd. + +The values used for `cmd` are operating system dependent, and are available +as constants in the fcntl module, using the same names as used in +the relevant C header files. The argument arg is optional, and +defaults to 0; it may be an int or a string. If arg is given as a string, +the return value of fcntl is a string of that length, containing the +resulting value put in the arg buffer by the operating system. The length +of the arg string is not allowed to exceed 1024 bytes. If the arg given +is an integer or if none is specified, the result value is an integer +corresponding to the return value of the fcntl call in the C code. +""" @overload def fcntl(fd: FileDescriptorLike, cmd: int, arg: str | ReadOnlyBuffer, /) -> bytes: ... # If arg is an int, return int @overload - def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: ... + def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: + """Perform the operation `request` on file descriptor `fd`. + +The values used for `request` are operating system dependent, and are available +as constants in the fcntl or termios library modules, using the same names as +used in the relevant C header files. + +The argument `arg` is optional, and defaults to 0; it may be an int or a +buffer containing character data (most likely a string or an array). + +If the argument is a mutable buffer (such as an array) and if the +mutate_flag argument (which is only allowed in this case) is true then the +buffer is (in effect) passed to the operating system and changes made by +the OS will be reflected in the contents of the buffer after the call has +returned. The return value is the integer returned by the ioctl system +call. + +If the argument is a mutable buffer and the mutable_flag argument is false, +the behavior is as if a string had been passed. + +If the argument is an immutable buffer (most likely a string) then a copy +of the buffer is passed to the operating system and the return value is a +string of the same length containing whatever the operating system put in +the buffer. The length of the arg buffer in this case is not allowed to +exceed 1024 bytes. + +If the arg given is an integer or if none is specified, the result value is +an integer corresponding to the return value of the ioctl call in the C +code. +""" # The return type works as follows: # - If arg is a read-write buffer, return int if mutate_flag is True, otherwise bytes # - If arg is a read-only buffer, return bytes (and ignore the value of mutate_flag) @@ -154,5 +200,33 @@ if sys.platform != "win32": def ioctl(fd: FileDescriptorLike, request: int, arg: WriteableBuffer, mutate_flag: Literal[False], /) -> bytes: ... @overload def ioctl(fd: FileDescriptorLike, request: int, arg: Buffer, mutate_flag: bool = True, /) -> Any: ... - def flock(fd: FileDescriptorLike, operation: int, /) -> None: ... - def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: ... + def flock(fd: FileDescriptorLike, operation: int, /) -> None: + """Perform the lock operation `operation` on file descriptor `fd`. + +See the Unix manual page for flock(2) for details (On some systems, this +function is emulated using fcntl()). +""" + def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: + """A wrapper around the fcntl() locking calls. + +`fd` is the file descriptor of the file to lock or unlock, and operation is one +of the following values: + + LOCK_UN - unlock + LOCK_SH - acquire a shared lock + LOCK_EX - acquire an exclusive lock + +When operation is LOCK_SH or LOCK_EX, it can also be bitwise ORed with +LOCK_NB to avoid blocking on lock acquisition. If LOCK_NB is used and the +lock cannot be acquired, an OSError will be raised and the exception will +have an errno attribute set to EACCES or EAGAIN (depending on the operating +system -- for portability, check for either value). + +`len` is the number of bytes to lock, with the default meaning to lock to +EOF. `start` is the byte offset, relative to `whence`, to that the lock +starts. `whence` is as with fileobj.seek(), specifically: + + 0 - relative to the start of the file (SEEK_SET) + 1 - relative to the current buffer position (SEEK_CUR) + 2 - relative to the end of the file (SEEK_END) +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi index 620cc177a415a..d0ab8bfeda6ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi @@ -1,3 +1,14 @@ +"""Utilities for comparing files and directories. + +Classes: + dircmp + +Functions: + cmp(f1, f2, shallow=True) -> int + cmpfiles(a, b, common) -> ([], [], []) + clear_cache() + +""" import sys from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence @@ -9,12 +20,82 @@ __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: Final[list[str]] BUFSIZE: Final = 8192 -def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: + """Compare two files. + +Arguments: + +f1 -- First file name + +f2 -- Second file name + +shallow -- treat files as identical if their stat signatures (type, size, + mtime) are identical. Otherwise, files are considered different + if their sizes or contents differ. [default: True] + +Return value: + +True if the files are the same, False otherwise. + +This function uses a cache for past comparisons and the results, +with cache entries invalidated if their stat information +changes. The cache may be cleared by calling clear_cache(). + +""" def cmpfiles( a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True -) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... +) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: + """Compare common files in two directories. + +a, b -- directory names +common -- list of file names found in both directories +shallow -- if true, do comparison based solely on stat() information + +Returns a tuple of three lists: + files that compare equal + files that are different + filenames that aren't regular files. + +""" class dircmp(Generic[AnyStr]): + """A class that manages the comparison of 2 directories. + +dircmp(a, b, ignore=None, hide=None, *, shallow=True) + A and B are directories. + IGNORE is a list of names to ignore, + defaults to DEFAULT_IGNORES. + HIDE is a list of names to hide, + defaults to [os.curdir, os.pardir]. + SHALLOW specifies whether to just check the stat signature (do not read + the files). + defaults to True. + +High level usage: + x = dircmp(dir1, dir2) + x.report() -> prints a report on the differences between dir1 and dir2 + or + x.report_partial_closure() -> prints report on differences between dir1 + and dir2, and reports on common immediate subdirectories. + x.report_full_closure() -> like report_partial_closure, + but fully recursive. + +Attributes: + left_list, right_list: The files in dir1 and dir2, + filtered by hide and ignore. + common: a list of names in both dir1 and dir2. + left_only, right_only: names only in dir1, dir2. + common_dirs: subdirectories in both dir1 and dir2. + common_files: files in both dir1 and dir2. + common_funny: names in both dir1 and dir2 where the type differs between + dir1 and dir2, or the name is not stat-able. + same_files: list of identical files. + diff_files: list of filenames which differ. + funny_files: list of files which could not be compared. + subdirs: a dictionary of dircmp instances (or MyDirCmp instances if this + object is of type MyDirCmp, a subclass of dircmp), keyed by names + in common_dirs. + """ if sys.version_info >= (3, 13): def __init__( self, @@ -60,6 +141,12 @@ class dircmp(Generic[AnyStr]): def phase3(self) -> None: ... def phase4(self) -> None: ... def phase4_closure(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" -def clear_cache() -> None: ... +def clear_cache() -> None: + """Clear the filecmp cache. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi index 910d638142751..6a689ed6b3a1c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi @@ -1,3 +1,69 @@ +"""Helper class to quickly write a loop over all standard input files. + +Typical use is: + + import fileinput + for line in fileinput.input(encoding="utf-8"): + process(line) + +This iterates over the lines of all files listed in sys.argv[1:], +defaulting to sys.stdin if the list is empty. If a filename is '-' it +is also replaced by sys.stdin and the optional arguments mode and +openhook are ignored. To specify an alternative list of filenames, +pass it as the argument to input(). A single file name is also allowed. + +Functions filename(), lineno() return the filename and cumulative line +number of the line that has just been read; filelineno() returns its +line number in the current file; isfirstline() returns true iff the +line just read is the first line of its file; isstdin() returns true +iff the line was read from sys.stdin. Function nextfile() closes the +current file so that the next iteration will read the first line from +the next file (if any); lines not read from the file will not count +towards the cumulative line count; the filename is not changed until +after the first line of the next file has been read. Function close() +closes the sequence. + +Before any lines have been read, filename() returns None and both line +numbers are zero; nextfile() has no effect. After all lines have been +read, filename() and the line number functions return the values +pertaining to the last line read; nextfile() has no effect. + +All files are opened in text mode by default, you can override this by +setting the mode parameter to input() or FileInput.__init__(). +If an I/O error occurs during opening or reading a file, the OSError +exception is raised. + +If sys.stdin is used more than once, the second and further use will +return no lines, except perhaps for interactive use, or if it has been +explicitly reset (e.g. using sys.stdin.seek(0)). + +Empty files are opened and immediately closed; the only time their +presence in the list of filenames is noticeable at all is when the +last file opened is empty. + +It is possible that the last line of a file doesn't end in a newline +character; otherwise lines are returned including the trailing +newline. + +Class FileInput is the implementation; its methods filename(), +lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close() +correspond to the functions in the module. In addition it has a +readline() method which returns the next input line, and a +__getitem__() method which implements the sequence behavior. The +sequence must be accessed in strictly sequential order; sequence +access and readline() cannot be mixed. + +Optional in-place filtering: if the keyword argument inplace=True is +passed to input() or to the FileInput constructor, the file is moved +to a backup file and standard output is directed to the input file. +This makes it possible to write a filter that rewrites its input file +in place. If the keyword argument backup="." is also +given, it specifies the extension for the backup file, and the backup +file remains around; by default, the extension is ".bak" and it is +deleted when the output file is closed. In-place filtering is +disabled when standard input is read. XXX The current implementation +does not work for MS-DOS 8+3 filesystems. +""" import sys from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable @@ -42,7 +108,13 @@ if sys.version_info >= (3, 10): openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, encoding: str | None = None, errors: str | None = None, - ) -> FileInput[str]: ... + ) -> FileInput[str]: + """Return an instance of the FileInput class, which can be iterated. + +The parameters are passed to the constructor of the FileInput class. +The returned instance, in addition to being an iterator, +keeps global state for the functions of this module,. +""" @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -76,7 +148,13 @@ else: *, mode: _TextMode = "r", openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, - ) -> FileInput[str]: ... + ) -> FileInput[str]: + """Return an instance of the FileInput class, which can be iterated. + + The parameters are passed to the constructor of the FileInput class. + The returned instance, in addition to being an iterator, + keeps global state for the functions of this module,. + """ @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -96,16 +174,64 @@ else: openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... -def close() -> None: ... -def nextfile() -> None: ... -def filename() -> str: ... -def lineno() -> int: ... -def filelineno() -> int: ... -def fileno() -> int: ... -def isfirstline() -> bool: ... -def isstdin() -> bool: ... +def close() -> None: + """Close the sequence. +""" +def nextfile() -> None: + """ +Close the current file so that the next iteration will read the first +line from the next file (if any); lines not read from the file will +not count towards the cumulative line count. The filename is not +changed until after the first line of the next file has been read. +Before the first line has been read, this function has no effect; +it cannot be used to skip the first file. After the last line of the +last file has been read, this function has no effect. +""" +def filename() -> str: + """ +Return the name of the file currently being read. +Before the first line has been read, returns None. +""" +def lineno() -> int: + """ +Return the cumulative line number of the line that has just been read. +Before the first line has been read, returns 0. After the last line +of the last file has been read, returns the line number of that line. +""" +def filelineno() -> int: + """ +Return the line number in the current file. Before the first line +has been read, returns 0. After the last line of the last file has +been read, returns the line number of that line within the file. +""" +def fileno() -> int: + """ +Return the file number of the current file. When no file is currently +opened, returns -1. +""" +def isfirstline() -> bool: + """ +Returns true the line just read is the first line of its file, +otherwise returns false. +""" +def isstdin() -> bool: + """ +Returns true if the last line was read from sys.stdin, +otherwise returns false. +""" class FileInput(Generic[AnyStr]): + """FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None) + +Class FileInput is the implementation of the module; its methods +filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(), +nextfile() and close() correspond to the functions of the same name +in the module. +In addition it has a readline() method which returns the next +input line, and a __getitem__() method which implements the +sequence behavior. The sequence must be accessed in strictly +sequential order; random access and readline() cannot be mixed. +""" if sys.version_info >= (3, 10): # encoding and errors are added @overload @@ -197,7 +323,11 @@ class FileInput(Generic[AnyStr]): def fileno(self) -> int: ... def isfirstline(self) -> bool: ... def isstdin(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" if sys.version_info >= (3, 10): def hook_compressed( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi index 345c4576497de..7d17b35450b5f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi @@ -1,3 +1,14 @@ +"""Filename matching with shell patterns. + +fnmatch(FILENAME, PATTERN) matches according to the local convention. +fnmatchcase(FILENAME, PATTERN) always takes case in account. + +The functions operate by translating the pattern into a regular +expression. They cache the compiled regular expressions for speed. + +The function translate(PATTERN) returns a regular expression +corresponding to PATTERN. (It does not compile it.) +""" import sys from collections.abc import Iterable from typing import AnyStr @@ -6,10 +17,37 @@ __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] if sys.version_info >= (3, 14): __all__ += ["filterfalse"] -def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... -def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... -def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... -def translate(pat: str) -> str: ... +def fnmatch(name: AnyStr, pat: AnyStr) -> bool: + """Test whether FILENAME matches PATTERN. + +Patterns are Unix shell style: + +* matches everything +? matches any single character +[seq] matches any character in seq +[!seq] matches any char not in seq + +An initial period in FILENAME is not special. +Both FILENAME and PATTERN are first case-normalized +if the operating system requires it. +If you don't want this, use fnmatchcase(FILENAME, PATTERN). +""" +def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: + """Test whether FILENAME matches PATTERN, including case. + +This is a version of fnmatch() which doesn't case-normalize +its arguments. +""" +def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: + """Construct a list from those elements of the iterable NAMES that match PAT. +""" +def translate(pat: str) -> str: + """Translate a shell PATTERN to a regular expression. + +There is no way to quote meta-characters. +""" if sys.version_info >= (3, 14): - def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... + def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: + """Construct a list from those elements of the iterable NAMES that do not match PAT. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi index 05c3c8b3dd41c..a7347732f67a5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi @@ -1,3 +1,22 @@ +"""Generic output formatting. + +Formatter objects transform an abstract flow of formatting events into +specific output events on writer objects. Formatters manage several stack +structures to allow various properties of a writer object to be changed and +restored; writers need not be able to handle relative changes nor any sort +of ``change back'' operation. Specific writer properties which may be +controlled via formatter objects are horizontal alignment, font, and left +margin indentations. A mechanism is provided which supports providing +arbitrary, non-exclusive style settings to a writer as well. Additional +interfaces facilitate formatting events which are not reversible, such as +paragraph separation. + +Writer objects encapsulate device interfaces. Abstract devices, such as +file formats, are supported as well as physical devices. The provided +implementations all work with abstract devices. The interface makes +available mechanisms for setting the properties which formatter objects +manage and inserting data into the output. +""" from collections.abc import Iterable from typing import IO, Any from typing_extensions import TypeAlias @@ -7,6 +26,15 @@ _FontType: TypeAlias = tuple[str, bool, bool, bool] _StylesType: TypeAlias = tuple[Any, ...] class NullFormatter: + """A formatter which does nothing. + + If the writer parameter is omitted, a NullWriter instance is created. + No methods of the writer are called by NullFormatter instances. + + Implementations should inherit from this class if implementing a writer + interface but don't need to inherit any implementation. + + """ writer: NullWriter | None def __init__(self, writer: NullWriter | None = None) -> None: ... def end_paragraph(self, blankline: int) -> None: ... @@ -28,6 +56,13 @@ class NullFormatter: def assert_line_data(self, flag: int = 1) -> None: ... class AbstractFormatter: + """The standard formatter. + + This implementation has demonstrated wide applicability to many writers, + and may be used directly in most circumstances. It has been used to + implement a full-featured World Wide Web browser. + + """ writer: NullWriter align: str | None align_stack: list[str | None] @@ -64,6 +99,13 @@ class AbstractFormatter: def assert_line_data(self, flag: int = 1) -> None: ... class NullWriter: + """Minimal writer interface to use in testing & inheritance. + + A writer which only provides the interface definition; no actions are + taken on any methods. This should be the base class for all writers + which do not need to inherit any implementation methods. + + """ def flush(self) -> None: ... def new_alignment(self, align: str | None) -> None: ... def new_font(self, font: _FontType) -> None: ... @@ -77,9 +119,22 @@ class NullWriter: def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... -class AbstractWriter(NullWriter): ... +class AbstractWriter(NullWriter): + """A writer which can be used in debugging formatters, but not much else. + + Each method simply announces itself by printing its name and + arguments on standard output. + + """ class DumbWriter(NullWriter): + """Simple writer class which writes output on the file object passed in + as the file parameter or, if file is omitted, on standard output. The + output is simply word-wrapped to the number of columns specified by + the maxcol parameter. This class is suitable for reflowing a sequence + of paragraphs. + + """ file: IO[str] maxcol: int def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi index ef4066aa65b52..ed2177505f362 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi @@ -1,3 +1,5 @@ +"""Fraction, infinite-precision, rational numbers. +""" import sys from collections.abc import Callable from decimal import Decimal @@ -14,154 +16,345 @@ class _ConvertibleToIntegerRatio(Protocol): def as_integer_ratio(self) -> tuple[int | Rational, int | Rational]: ... class Fraction(Rational): + """This class implements rational numbers. + +In the two-argument form of the constructor, Fraction(8, 6) will +produce a rational number equivalent to 4/3. Both arguments must +be Rational. The numerator defaults to 0 and the denominator +defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. + +Fractions can also be constructed from: + + - numeric strings similar to those accepted by the + float constructor (for example, '-2.3' or '1e10') + + - strings of the form '123/456' + + - float and Decimal instances + + - other Rational instances (including integers) + +""" __slots__ = ("_numerator", "_denominator") @overload - def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: ... + def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: + """Constructs a Rational. + +Takes a string like '3/2' or '1.5', another Rational instance, a +numerator/denominator pair, or a float. + +Examples +-------- + +>>> Fraction(10, -8) +Fraction(-5, 4) +>>> Fraction(Fraction(1, 7), 5) +Fraction(1, 35) +>>> Fraction(Fraction(1, 7), Fraction(2, 3)) +Fraction(3, 14) +>>> Fraction('314') +Fraction(314, 1) +>>> Fraction('-35/4') +Fraction(-35, 4) +>>> Fraction('3.1415') # conversion from numeric string +Fraction(6283, 2000) +>>> Fraction('-47e-2') # string may include a decimal exponent +Fraction(-47, 100) +>>> Fraction(1.47) # direct construction from float (exact conversion) +Fraction(6620291452234629, 4503599627370496) +>>> Fraction(2.25) +Fraction(9, 4) +>>> Fraction(Decimal('1.47')) +Fraction(147, 100) + +""" @overload def __new__(cls, numerator: float | Decimal | str) -> Self: ... if sys.version_info >= (3, 14): @overload - def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: ... + def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: + """Constructs a Rational. + +Takes a string like '3/2' or '1.5', another Rational instance, a +numerator/denominator pair, or a float. + +Examples +-------- + +>>> Fraction(10, -8) +Fraction(-5, 4) +>>> Fraction(Fraction(1, 7), 5) +Fraction(1, 35) +>>> Fraction(Fraction(1, 7), Fraction(2, 3)) +Fraction(3, 14) +>>> Fraction('314') +Fraction(314, 1) +>>> Fraction('-35/4') +Fraction(-35, 4) +>>> Fraction('3.1415') # conversion from numeric string +Fraction(6283, 2000) +>>> Fraction('-47e-2') # string may include a decimal exponent +Fraction(-47, 100) +>>> Fraction(1.47) # direct construction from float (exact conversion) +Fraction(6620291452234629, 4503599627370496) +>>> Fraction(2.25) +Fraction(9, 4) +>>> Fraction(Decimal('1.47')) +Fraction(147, 100) + +""" @classmethod - def from_float(cls, f: float) -> Self: ... + def from_float(cls, f: float) -> Self: + """Converts a finite float to a rational number, exactly. + +Beware that Fraction.from_float(0.3) != Fraction(3, 10). + +""" @classmethod - def from_decimal(cls, dec: Decimal) -> Self: ... - def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... - def as_integer_ratio(self) -> tuple[int, int]: ... + def from_decimal(cls, dec: Decimal) -> Self: + """Converts a finite Decimal instance to a rational number, exactly. +""" + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: + """Closest Fraction to self with denominator at most max_denominator. + +>>> Fraction('3.141592653589793').limit_denominator(10) +Fraction(22, 7) +>>> Fraction('3.141592653589793').limit_denominator(100) +Fraction(311, 99) +>>> Fraction(4321, 8765).limit_denominator(10000) +Fraction(4321, 8765) + +""" + def as_integer_ratio(self) -> tuple[int, int]: + """Return a pair of integers, whose ratio is equal to the original Fraction. + +The ratio is in lowest terms and has a positive denominator. +""" if sys.version_info >= (3, 12): - def is_integer(self) -> bool: ... + def is_integer(self) -> bool: + """Return True if the Fraction is an integer. +""" @property def numerator(a) -> int: ... @property def denominator(a) -> int: ... @overload - def __add__(a, b: int | Fraction) -> Fraction: ... + def __add__(a, b: int | Fraction) -> Fraction: + """a + b +""" @overload def __add__(a, b: float) -> float: ... @overload def __add__(a, b: complex) -> complex: ... @overload - def __radd__(b, a: int | Fraction) -> Fraction: ... + def __radd__(b, a: int | Fraction) -> Fraction: + """a + b +""" @overload def __radd__(b, a: float) -> float: ... @overload def __radd__(b, a: complex) -> complex: ... @overload - def __sub__(a, b: int | Fraction) -> Fraction: ... + def __sub__(a, b: int | Fraction) -> Fraction: + """a - b +""" @overload def __sub__(a, b: float) -> float: ... @overload def __sub__(a, b: complex) -> complex: ... @overload - def __rsub__(b, a: int | Fraction) -> Fraction: ... + def __rsub__(b, a: int | Fraction) -> Fraction: + """a - b +""" @overload def __rsub__(b, a: float) -> float: ... @overload def __rsub__(b, a: complex) -> complex: ... @overload - def __mul__(a, b: int | Fraction) -> Fraction: ... + def __mul__(a, b: int | Fraction) -> Fraction: + """a * b +""" @overload def __mul__(a, b: float) -> float: ... @overload def __mul__(a, b: complex) -> complex: ... @overload - def __rmul__(b, a: int | Fraction) -> Fraction: ... + def __rmul__(b, a: int | Fraction) -> Fraction: + """a * b +""" @overload def __rmul__(b, a: float) -> float: ... @overload def __rmul__(b, a: complex) -> complex: ... @overload - def __truediv__(a, b: int | Fraction) -> Fraction: ... + def __truediv__(a, b: int | Fraction) -> Fraction: + """a / b +""" @overload def __truediv__(a, b: float) -> float: ... @overload def __truediv__(a, b: complex) -> complex: ... @overload - def __rtruediv__(b, a: int | Fraction) -> Fraction: ... + def __rtruediv__(b, a: int | Fraction) -> Fraction: + """a / b +""" @overload def __rtruediv__(b, a: float) -> float: ... @overload def __rtruediv__(b, a: complex) -> complex: ... @overload - def __floordiv__(a, b: int | Fraction) -> int: ... + def __floordiv__(a, b: int | Fraction) -> int: + """a // b +""" @overload def __floordiv__(a, b: float) -> float: ... @overload - def __rfloordiv__(b, a: int | Fraction) -> int: ... + def __rfloordiv__(b, a: int | Fraction) -> int: + """a // b +""" @overload def __rfloordiv__(b, a: float) -> float: ... @overload - def __mod__(a, b: int | Fraction) -> Fraction: ... + def __mod__(a, b: int | Fraction) -> Fraction: + """a % b +""" @overload def __mod__(a, b: float) -> float: ... @overload - def __rmod__(b, a: int | Fraction) -> Fraction: ... + def __rmod__(b, a: int | Fraction) -> Fraction: + """a % b +""" @overload def __rmod__(b, a: float) -> float: ... @overload - def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: + """(a // b, a % b) +""" @overload def __divmod__(a, b: float) -> tuple[float, Fraction]: ... @overload - def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: + """(a // b, a % b) +""" @overload def __rdivmod__(a, b: float) -> tuple[float, Fraction]: ... if sys.version_info >= (3, 14): @overload - def __pow__(a, b: int, modulo: None = None) -> Fraction: ... + def __pow__(a, b: int, modulo: None = None) -> Fraction: + """a ** b + +If b is not an integer, the result will be a float or complex +since roots are generally irrational. If b is an integer, the +result will be rational. + +""" @overload def __pow__(a, b: float | Fraction, modulo: None = None) -> float: ... @overload def __pow__(a, b: complex, modulo: None = None) -> complex: ... else: @overload - def __pow__(a, b: int) -> Fraction: ... + def __pow__(a, b: int) -> Fraction: + """a ** b + +If b is not an integer, the result will be a float or complex +since roots are generally irrational. If b is an integer, the +result will be rational. + +""" @overload def __pow__(a, b: float | Fraction) -> float: ... @overload def __pow__(a, b: complex) -> complex: ... if sys.version_info >= (3, 14): @overload - def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: ... + def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: + """a ** b +""" @overload def __rpow__(b, a: complex, modulo: None = None) -> complex: ... else: @overload - def __rpow__(b, a: float | Fraction) -> float: ... + def __rpow__(b, a: float | Fraction) -> float: + """a ** b +""" @overload def __rpow__(b, a: complex) -> complex: ... - def __pos__(a) -> Fraction: ... - def __neg__(a) -> Fraction: ... - def __abs__(a) -> Fraction: ... - def __trunc__(a) -> int: ... - def __floor__(a) -> int: ... - def __ceil__(a) -> int: ... - @overload - def __round__(self, ndigits: None = None) -> int: ... + def __pos__(a) -> Fraction: + """+a: Coerces a subclass instance to Fraction +""" + def __neg__(a) -> Fraction: + """-a +""" + def __abs__(a) -> Fraction: + """abs(a) +""" + def __trunc__(a) -> int: + """math.trunc(a) +""" + def __floor__(a) -> int: + """math.floor(a) +""" + def __ceil__(a) -> int: + """math.ceil(a) +""" + @overload + def __round__(self, ndigits: None = None) -> int: + """round(self, ndigits) + +Rounds half toward even. +""" @overload def __round__(self, ndigits: int) -> Fraction: ... - def __hash__(self) -> int: ... # type: ignore[override] - def __eq__(a, b: object) -> bool: ... - def __lt__(a, b: _ComparableNum) -> bool: ... - def __gt__(a, b: _ComparableNum) -> bool: ... - def __le__(a, b: _ComparableNum) -> bool: ... - def __ge__(a, b: _ComparableNum) -> bool: ... - def __bool__(a) -> bool: ... + def __hash__(self) -> int: # type: ignore[override] + """hash(self) +""" + def __eq__(a, b: object) -> bool: + """a == b +""" + def __lt__(a, b: _ComparableNum) -> bool: + """a < b +""" + def __gt__(a, b: _ComparableNum) -> bool: + """a > b +""" + def __le__(a, b: _ComparableNum) -> bool: + """a <= b +""" + def __ge__(a, b: _ComparableNum) -> bool: + """a >= b +""" + def __bool__(a) -> bool: + """a != 0 +""" def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): - def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... + def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: + """int(a) +""" # Not actually defined within fractions.py, but provides more useful # overrides @property - def real(self) -> Fraction: ... + def real(self) -> Fraction: + """Real numbers are their real component. +""" @property - def imag(self) -> Literal[0]: ... - def conjugate(self) -> Fraction: ... + def imag(self) -> Literal[0]: + """Real numbers have no imaginary component. +""" + def conjugate(self) -> Fraction: + """Conjugate is a no-op for Reals. +""" if sys.version_info >= (3, 14): @classmethod - def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: ... + def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: + """Converts a finite real number to a rational number, exactly. + +Beware that Fraction.from_number(0.3) != Fraction(3, 10). + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi index 44bc2165fe0e3..94cde6eac5d15 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi @@ -1,3 +1,32 @@ +"""An FTP client class and some helper functions. + +Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds + +Example: + +>>> from ftplib import FTP +>>> ftp = FTP('ftp.python.org') # connect to host, default port +>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@ +'230 Guest login ok, access restrictions apply.' +>>> ftp.retrlines('LIST') # list directory contents +total 9 +drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . +drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. +drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin +drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc +d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming +drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib +drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub +drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr +-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg +'226 Transfer complete.' +>>> ftp.quit() +'221 Goodbye.' +>>> + +A nice test that reveals some of the network dialogue would be: +python ftplib.py -d localhost -l -p -l +""" import sys from _typeshed import SupportsRead, SupportsReadline from collections.abc import Callable, Iterable, Iterator @@ -24,6 +53,28 @@ class error_proto(Error): ... all_errors: tuple[type[Exception], ...] class FTP: + """An FTP client class. + +To create a connection, call the class using these arguments: + host, user, passwd, acct, timeout, source_address, encoding + +The first four arguments are all strings, and have default value ''. +The parameter ´timeout´ must be numeric and defaults to None if not +passed, meaning that no timeout will be set on any ftp socket(s). +If a timeout is passed, then this is now the default timeout for all ftp +socket operations for this instance. +The last parameter is the encoding of filenames, which defaults to utf-8. + +Then use self.connect() with optional host and port argument. + +To download a file, use ftp.retrlines('RETR ' + filename), +or ftp.retrbinary() with slightly different arguments. +To upload a file, use ftp.storlines() or ftp.storbinary(), +which have an open file as argument (see their definitions +below for details). +The download/upload functions first issue appropriate TYPE +and PORT or PASV commands. +""" debugging: int host: str port: int @@ -51,35 +102,117 @@ class FTP: source_address: tuple[str, int] | None = None, *, encoding: str = "utf-8", - ) -> None: ... + ) -> None: + """Initialization method (called by class instantiation). +Initialize host to localhost, port to standard ftp port. +Optional arguments are host (for connect()), +and user, passwd, acct (for login()). +""" def connect( self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None - ) -> str: ... - def getwelcome(self) -> str: ... - def set_debuglevel(self, level: int) -> None: ... - def debug(self, level: int) -> None: ... - def set_pasv(self, val: bool | Literal[0, 1]) -> None: ... + ) -> str: + """Connect to host. Arguments are: +- host: hostname to connect to (string, default previous host) +- port: port to connect to (integer, default previous port) +- timeout: the timeout to set against the ftp socket(s) +- source_address: a 2-tuple (host, port) for the socket to bind + to as its source address before connecting. +""" + def getwelcome(self) -> str: + """Get the welcome message from the server. +(this is read and squirreled away by connect()) +""" + def set_debuglevel(self, level: int) -> None: + """Set the debugging level. +The required argument level means: +0: no debugging output (default) +1: print commands and responses but not body text etc. +2: also print raw lines read and sent before stripping CR/LF +""" + def debug(self, level: int) -> None: + """Set the debugging level. +The required argument level means: +0: no debugging output (default) +1: print commands and responses but not body text etc. +2: also print raw lines read and sent before stripping CR/LF +""" + def set_pasv(self, val: bool | Literal[0, 1]) -> None: + """Use passive or active mode for data transfers. +With a false argument, use the normal PORT mode, +With a true argument, use the PASV command. +""" def sanitize(self, s: str) -> str: ... def putline(self, line: str) -> None: ... def putcmd(self, line: str) -> None: ... def getline(self) -> str: ... def getmultiline(self) -> str: ... def getresp(self) -> str: ... - def voidresp(self) -> str: ... - def abort(self) -> str: ... - def sendcmd(self, cmd: str) -> str: ... - def voidcmd(self, cmd: str) -> str: ... - def sendport(self, host: str, port: int) -> str: ... - def sendeprt(self, host: str, port: int) -> str: ... - def makeport(self) -> socket: ... - def makepasv(self) -> tuple[str, int]: ... - def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... + def voidresp(self) -> str: + """Expect a response beginning with '2'. +""" + def abort(self) -> str: + """Abort a file transfer. Uses out-of-band data. +This does not follow the procedure from the RFC to send Telnet +IP and Synch; that doesn't seem to work with the servers I've +tried. Instead, just send the ABOR command as OOB data. +""" + def sendcmd(self, cmd: str) -> str: + """Send a command and return the response. +""" + def voidcmd(self, cmd: str) -> str: + """Send a command and expect a response beginning with '2'. +""" + def sendport(self, host: str, port: int) -> str: + """Send a PORT command with the current host and the given +port number. +""" + def sendeprt(self, host: str, port: int) -> str: + """Send an EPRT command with the current host and the given port number. +""" + def makeport(self) -> socket: + """Create a new socket and send a PORT command for it. +""" + def makepasv(self) -> tuple[str, int]: + """Internal: Does the PASV or EPSV handshake -> (address, port) +""" + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: + """Login, default anonymous. +""" # In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers - def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ... - def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: + """Initiate a transfer over the data connection. + +If the transfer is active, send a port command and the +transfer command, and accept the connection. If the server is +passive, send a pasv command, connect to it, and start the +transfer command. Either way, return the socket for the +connection and the expected size of the transfer. The +expected size may be None if it could not be determined. + +Optional 'rest' argument can be a string that is sent as the +argument to a REST command. This is essentially a server +marker used to tell the server to skip over any data up to the +given marker. +""" + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: + """Like ntransfercmd() but returns only the socket. +""" def retrbinary( self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None - ) -> str: ... + ) -> str: + """Retrieve data in binary mode. A new port is created for you. + +Args: + cmd: A RETR command. + callback: A single parameter callable to be called on each + block of data read. + blocksize: The maximum number of bytes to read from the + socket at one time. [default: 8192] + rest: Passed to transfercmd(). [default: None] + +Returns: + The response code. +""" def storbinary( self, cmd: str, @@ -87,25 +220,132 @@ class FTP: blocksize: int = 8192, callback: Callable[[bytes], object] | None = None, rest: int | str | None = None, - ) -> str: ... - def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... - def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... - def acct(self, password: str) -> str: ... - def nlst(self, *args: str) -> list[str]: ... + ) -> str: + """Store a file in binary mode. A new port is created for you. + +Args: + cmd: A STOR command. + fp: A file-like object with a read(num_bytes) method. + blocksize: The maximum data size to read from fp and send over + the connection at once. [default: 8192] + callback: An optional single parameter callable that is called on + each block of data after it is sent. [default: None] + rest: Passed to transfercmd(). [default: None] + +Returns: + The response code. +""" + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: + """Retrieve data in line mode. A new port is created for you. + +Args: + cmd: A RETR, LIST, or NLST command. + callback: An optional single parameter callable that is called + for each line with the trailing CRLF stripped. + [default: print_line()] + +Returns: + The response code. +""" + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: + """Store a file in line mode. A new port is created for you. + +Args: + cmd: A STOR command. + fp: A file-like object with a readline() method. + callback: An optional single parameter callable that is called on + each line after it is sent. [default: None] + +Returns: + The response code. +""" + def acct(self, password: str) -> str: + """Send new account name. +""" + def nlst(self, *args: str) -> list[str]: + """Return a list of files in a given directory (default the current). +""" # Technically only the last arg can be a Callable but ... - def dir(self, *args: str | Callable[[str], object]) -> None: ... - def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: ... - def rename(self, fromname: str, toname: str) -> str: ... - def delete(self, filename: str) -> str: ... - def cwd(self, dirname: str) -> str: ... - def size(self, filename: str) -> int | None: ... - def mkd(self, dirname: str) -> str: ... - def rmd(self, dirname: str) -> str: ... - def pwd(self) -> str: ... - def quit(self) -> str: ... - def close(self) -> None: ... + def dir(self, *args: str | Callable[[str], object]) -> None: + """List a directory in long form. +By default list current directory to stdout. +Optional last argument is callback function; all +non-empty arguments before it are concatenated to the +LIST command. (This *should* only be used for a pathname.) +""" + def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: + """List a directory in a standardized format by using MLSD +command (RFC-3659). If path is omitted the current directory +is assumed. "facts" is a list of strings representing the type +of information desired (e.g. ["type", "size", "perm"]). + +Return a generator object yielding a tuple of two elements +for every file found in path. +First element is the file name, the second one is a dictionary +including a variable number of "facts" depending on the server +and whether "facts" argument has been provided. +""" + def rename(self, fromname: str, toname: str) -> str: + """Rename a file. +""" + def delete(self, filename: str) -> str: + """Delete a file. +""" + def cwd(self, dirname: str) -> str: + """Change to a directory. +""" + def size(self, filename: str) -> int | None: + """Retrieve the size of a file. +""" + def mkd(self, dirname: str) -> str: + """Make a directory, return its full pathname. +""" + def rmd(self, dirname: str) -> str: + """Remove a directory. +""" + def pwd(self) -> str: + """Return current working directory. +""" + def quit(self) -> str: + """Quit, and close the connection. +""" + def close(self) -> None: + """Close the connection without assuming anything about it. +""" class FTP_TLS(FTP): + """A FTP subclass which adds TLS support to FTP as described +in RFC-4217. + +Connect as usual to port 21 implicitly securing the FTP control +connection before authenticating. + +Securing the data connection requires user to explicitly ask +for it by calling prot_p() method. + +Usage example: +>>> from ftplib import FTP_TLS +>>> ftps = FTP_TLS('ftp.python.org') +>>> ftps.login() # login anonymously previously securing control channel +'230 Guest login ok, access restrictions apply.' +>>> ftps.prot_p() # switch to secure data connection +'200 Protection level set to P' +>>> ftps.retrlines('LIST') # list directory content securely +total 9 +drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . +drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. +drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin +drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc +d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming +drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib +drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub +drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr +-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg +'226 Transfer complete.' +>>> ftps.quit() +'221 Goodbye.' +>>> +""" if sys.version_info >= (3, 12): def __init__( self, @@ -139,15 +379,41 @@ class FTP_TLS(FTP): certfile: str | None context: SSLContext def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... - def auth(self) -> str: ... - def prot_p(self) -> str: ... - def prot_c(self) -> str: ... - def ccc(self) -> str: ... - -def parse150(resp: str) -> int | None: ... # undocumented -def parse227(resp: str) -> tuple[str, int]: ... # undocumented -def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented -def parse257(resp: str) -> str: ... # undocumented + def auth(self) -> str: + """Set up secure control connection by using TLS/SSL. +""" + def prot_p(self) -> str: + """Set up secure data connection. +""" + def prot_c(self) -> str: + """Set up clear text data connection. +""" + def ccc(self) -> str: + """Switch back to a clear-text control connection. +""" + +def parse150(resp: str) -> int | None: # undocumented + """Parse the '150' response for a RETR request. +Returns the expected transfer size or None; size is not guaranteed to +be present in the 150 message. +""" +def parse227(resp: str) -> tuple[str, int]: # undocumented + """Parse the '227' response for a PASV request. +Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)' +Return ('host.addr.as.numbers', port#) tuple. +""" +def parse229(resp: str, peer: Any) -> tuple[str, int]: # undocumented + """Parse the '229' response for an EPSV request. +Raises error_proto if it does not contain '(|||port|)' +Return ('host.addr.as.numbers', port#) tuple. +""" +def parse257(resp: str) -> str: # undocumented + """Parse the '257' response for a MKD or PWD request. +This is a response to a MKD or PWD request: a directory name. +Returns the directoryname in the 257 reply. +""" def ftpcp( source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" -) -> None: ... # undocumented +) -> None: # undocumented + """Copy file from one FTP-instance to another. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi index 47baf917294da..6a36572440556 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi @@ -1,3 +1,5 @@ +"""functools.py - Tools for working with functions and callable objects +""" import sys import types from _typeshed import SupportsAllComparisons, SupportsItems @@ -33,16 +35,47 @@ _RWrapper = TypeVar("_RWrapper") if sys.version_info >= (3, 14): @overload - def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: ... + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: + """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. + +This effectively reduces the iterable to a single value. If initial is present, +it is placed before the items of the iterable in the calculation, and serves as +a default when the iterable is empty. + +For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) +calculates ((((1 + 2) + 3) + 4) + 5). +""" else: @overload - def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: ... + def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: + """reduce(function, iterable[, initial], /) -> value + +Apply a function of two arguments cumulatively to the items of an iterable, from left to right. + +This effectively reduces the iterable to a single value. If initial is present, +it is placed before the items of the iterable in the calculation, and serves as +a default when the iterable is empty. + +For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) +calculates ((((1 + 2) + 3) + 4) + 5). +""" @overload -def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: ... +def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: + """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. + +This effectively reduces the iterable to a single value. If initial is present, +it is placed before the items of the iterable in the calculation, and serves as +a default when the iterable is empty. + +For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) +calculates ((((1 + 2) + 3) + 4) + 5). +""" class _CacheInfo(NamedTuple): + """CacheInfo(hits, misses, maxsize, currsize) +""" hits: int misses: int maxsize: int | None @@ -55,16 +88,55 @@ class _CacheParameters(TypedDict): @final class _lru_cache_wrapper(Generic[_T]): + """Create a cached callable that wraps another function. + +user_function: the function being cached + +maxsize: 0 for no caching + None for unlimited cache size + n for a bounded cache + +typed: False cache f(3) and f(3.0) as identical calls + True cache f(3) and f(3.0) as distinct calls + +cache_info_type: namedtuple class with the fields: + hits misses currsize maxsize +""" __wrapped__: Callable[..., _T] - def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... - def cache_info(self) -> _CacheInfo: ... - def cache_clear(self) -> None: ... + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: + """Call self as a function. +""" + def cache_info(self) -> _CacheInfo: + """Report cache statistics +""" + def cache_clear(self) -> None: + """Clear the cache and cache statistics +""" def cache_parameters(self) -> _CacheParameters: ... def __copy__(self) -> _lru_cache_wrapper[_T]: ... def __deepcopy__(self, memo: Any, /) -> _lru_cache_wrapper[_T]: ... @overload -def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... +def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: + """Least-recently-used cache decorator. + +If *maxsize* is set to None, the LRU features are disabled and the cache +can grow without bound. + +If *typed* is True, arguments of different types will be cached separately. +For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as +distinct calls with distinct results. Some types such as str and int may +be cached separately even when typed is false. + +Arguments to the cached function must be hashable. + +View the cache statistics named tuple (hits, misses, maxsize, currsize) +with f.cache_info(). Clear the cache and statistics with f.cache_clear(). +Access the underlying function with f.__wrapped__. + +See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU) + +""" @overload def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... @@ -115,12 +187,31 @@ if sys.version_info >= (3, 14): wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: + """Update a wrapper function to look like the wrapped function + +wrapper is the function to be updated +wrapped is the original function +assigned is a tuple naming the attributes assigned directly +from the wrapped function to the wrapper function (defaults to +functools.WRAPPER_ASSIGNMENTS) +updated is a tuple naming the attributes of the wrapper that +are updated with the corresponding attribute from the wrapped +function (defaults to functools.WRAPPER_UPDATES) +""" def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: + """Decorator factory to apply update_wrapper() to a wrapper function + +Returns a decorator that invokes update_wrapper() with the decorated +function as the wrapper argument and the arguments to wraps() as the +remaining arguments. Default arguments are as for update_wrapper(). +This is a convenience function to simplify applying partial() to +update_wrapper(). +""" elif sys.version_info >= (3, 12): def update_wrapper( @@ -128,12 +219,31 @@ elif sys.version_info >= (3, 12): wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: + """Update a wrapper function to look like the wrapped function + +wrapper is the function to be updated +wrapped is the original function +assigned is a tuple naming the attributes assigned directly +from the wrapped function to the wrapper function (defaults to +functools.WRAPPER_ASSIGNMENTS) +updated is a tuple naming the attributes of the wrapper that +are updated with the corresponding attribute from the wrapped +function (defaults to functools.WRAPPER_UPDATES) +""" def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: + """Decorator factory to apply update_wrapper() to a wrapper function + +Returns a decorator that invokes update_wrapper() with the decorated +function as the wrapper argument and the arguments to wraps() as the +remaining arguments. Default arguments are as for update_wrapper(). +This is a convenience function to simplify applying partial() to +update_wrapper(). +""" else: def update_wrapper( @@ -141,31 +251,76 @@ else: wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: ... + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: + """Update a wrapper function to look like the wrapped function + + wrapper is the function to be updated + wrapped is the original function + assigned is a tuple naming the attributes assigned directly + from the wrapped function to the wrapper function (defaults to + functools.WRAPPER_ASSIGNMENTS) + updated is a tuple naming the attributes of the wrapper that + are updated with the corresponding attribute from the wrapped + function (defaults to functools.WRAPPER_UPDATES) + """ def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), updated: Iterable[str] = ("__dict__",), - ) -> _Wrapper[_PWrapped, _RWrapped]: ... + ) -> _Wrapper[_PWrapped, _RWrapped]: + """Decorator factory to apply update_wrapper() to a wrapper function + + Returns a decorator that invokes update_wrapper() with the decorated + function as the wrapper argument and the arguments to wraps() as the + remaining arguments. Default arguments are as for update_wrapper(). + This is a convenience function to simplify applying partial() to + update_wrapper(). + """ -def total_ordering(cls: type[_T]) -> type[_T]: ... -def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... +def total_ordering(cls: type[_T]) -> type[_T]: + """Class decorator that fills in missing ordering methods +""" +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: + """Convert a cmp= function into a key= function. + + mycmp + Function that compares two objects. +""" @disjoint_base class partial(Generic[_T]): + """Create a new function with partial application of the given arguments +and keywords. +""" @property - def func(self) -> Callable[..., _T]: ... + def func(self) -> Callable[..., _T]: + """function object to use in future partial calls +""" @property - def args(self) -> tuple[Any, ...]: ... + def args(self) -> tuple[Any, ...]: + """tuple of arguments to future partial calls +""" @property - def keywords(self) -> dict[str, Any]: ... + def keywords(self) -> dict[str, Any]: + """dictionary of keyword arguments to future partial calls +""" def __new__(cls, func: Callable[..., _T], /, *args: Any, **kwargs: Any) -> Self: ... - def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __call__(self, /, *args: Any, **kwargs: Any) -> _T: + """Call self as a function. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor: TypeAlias = Any class partialmethod(Generic[_T]): + """Method descriptor with partial application of the given arguments +and keywords. + +Supports wrapping existing descriptors and handles non-descriptor +callables as instance methods. +""" func: Callable[..., _T] | _Descriptor args: tuple[Any, ...] keywords: dict[str, Any] @@ -183,7 +338,11 @@ class partialmethod(Generic[_T]): def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... @property def __isabstractmethod__(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" if sys.version_info >= (3, 11): _RegType: TypeAlias = type[Any] | types.UnionType @@ -208,16 +367,33 @@ class _SingleDispatchCallable(Generic[_T]): def _clear_cache(self) -> None: ... def __call__(self, /, *args: Any, **kwargs: Any) -> _T: ... -def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: + """Single-dispatch generic function decorator. + +Transforms a function into a generic function, which can have different +behaviours depending upon the type of its first argument. The decorated +function acts as the default implementation, and additional +implementations can be registered using the register() attribute of the +generic function. +""" class singledispatchmethod(Generic[_T]): + """Single-dispatch generic method descriptor. + +Supports wrapping existing descriptors and handles non-descriptor +callables as instance methods. +""" dispatcher: _SingleDispatchCallable[_T] func: Callable[..., _T] def __init__(self, func: Callable[..., _T]) -> None: ... @property def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: + """generic_method.register(cls, func) -> func + +Registers a new implementation for the given *cls* on a *generic_method*. +""" @overload def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload @@ -235,9 +411,15 @@ class cached_property(Generic[_T_co]): def __set_name__(self, owner: type[Any], name: str) -> None: ... # __set__ is not defined at runtime, but @cached_property is designed to be settable def __set__(self, instance: object, value: _T_co) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" -def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: ... +def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: + """Simple lightweight unbounded cache. Sometimes called "memoize". +""" def _make_key( args: tuple[Hashable, ...], kwds: SupportsItems[Any, Any], @@ -247,11 +429,25 @@ def _make_key( tuple: type = ..., type: Any = ..., len: Callable[[Sized], int] = ..., -) -> Hashable: ... +) -> Hashable: + """Make a cache key from optionally typed positional and keyword arguments + +The key is constructed in a way that is flat as possible rather than +as a nested structure that would take more memory. + +If there is only a single argument and its data type is known to cache +its hash value, then that argument is returned without a wrapper. This +saves space and improves lookup speed. + +""" if sys.version_info >= (3, 14): @final - class _PlaceholderType: ... + class _PlaceholderType: + """The type of the Placeholder singleton. + +Used as a placeholder for partial arguments. +""" Placeholder: Final[_PlaceholderType] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi index 06fb6b47c2d1d..92c63d5890b45 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi @@ -1,3 +1,24 @@ +"""This module provides access to the garbage collector for reference cycles. + +enable() -- Enable automatic garbage collection. +disable() -- Disable automatic garbage collection. +isenabled() -- Returns true if automatic collection is enabled. +collect() -- Do a full collection right now. +get_count() -- Return the current collection counts. +get_stats() -- Return list of dictionaries containing per-generation stats. +set_debug() -- Set debugging flags. +get_debug() -- Get debugging flags. +set_threshold() -- Set the collection thresholds. +get_threshold() -- Return the current collection thresholds. +get_objects() -- Return a list of all objects tracked by the collector. +is_tracked() -- Returns true if a given object is tracked. +is_finalized() -- Returns true if a given object has been already finalized. +get_referrers() -- Return the list of objects that refer to an object. +get_referents() -- Return the list of objects that an object refers to. +freeze() -- Freeze all tracked objects and ignore them for future collections. +unfreeze() -- Unfreeze all objects in the permanent generation. +get_freeze_count() -- Return the number of objects in the permanent generation. +""" from collections.abc import Callable from typing import Any, Final, Literal from typing_extensions import TypeAlias @@ -13,21 +34,91 @@ _CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], callbacks: list[_CallbackType] garbage: list[Any] -def collect(generation: int = 2) -> int: ... -def disable() -> None: ... -def enable() -> None: ... -def get_count() -> tuple[int, int, int]: ... -def get_debug() -> int: ... -def get_objects(generation: int | None = None) -> list[Any]: ... -def freeze() -> None: ... -def unfreeze() -> None: ... -def get_freeze_count() -> int: ... -def get_referents(*objs: Any) -> list[Any]: ... -def get_referrers(*objs: Any) -> list[Any]: ... -def get_stats() -> list[dict[str, Any]]: ... -def get_threshold() -> tuple[int, int, int]: ... -def is_tracked(obj: Any, /) -> bool: ... -def is_finalized(obj: Any, /) -> bool: ... -def isenabled() -> bool: ... -def set_debug(flags: int, /) -> None: ... -def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: ... +def collect(generation: int = 2) -> int: + """Run the garbage collector. + +With no arguments, run a full collection. The optional argument +may be an integer specifying which generation to collect. A ValueError +is raised if the generation number is invalid. + +The number of unreachable objects is returned. +""" +def disable() -> None: + """Disable automatic garbage collection. +""" +def enable() -> None: + """Enable automatic garbage collection. +""" +def get_count() -> tuple[int, int, int]: + """Return a three-tuple of the current collection counts. +""" +def get_debug() -> int: + """Get the garbage collection debugging flags. +""" +def get_objects(generation: int | None = None) -> list[Any]: + """Return a list of objects tracked by the collector (excluding the list returned). + + generation + Generation to extract the objects from. + +If generation is not None, return only the objects tracked by the collector +that are in that generation. +""" +def freeze() -> None: + """Freeze all current tracked objects and ignore them for future collections. + +This can be used before a POSIX fork() call to make the gc copy-on-write friendly. +Note: collection before a POSIX fork() call may free pages for future allocation +which can cause copy-on-write. +""" +def unfreeze() -> None: + """Unfreeze all objects in the permanent generation. + +Put all objects in the permanent generation back into oldest generation. +""" +def get_freeze_count() -> int: + """Return the number of objects in the permanent generation. +""" +def get_referents(*objs: Any) -> list[Any]: + """Return the list of objects that are directly referred to by 'objs'. +""" +def get_referrers(*objs: Any) -> list[Any]: + """Return the list of objects that directly refer to any of 'objs'. +""" +def get_stats() -> list[dict[str, Any]]: + """Return a list of dictionaries containing per-generation statistics. +""" +def get_threshold() -> tuple[int, int, int]: + """Return the current collection thresholds. +""" +def is_tracked(obj: Any, /) -> bool: + """Returns true if the object is tracked by the garbage collector. + +Simple atomic objects will return false. +""" +def is_finalized(obj: Any, /) -> bool: + """Returns true if the object has been already finalized by the GC. +""" +def isenabled() -> bool: + """Returns true if automatic garbage collection is enabled. +""" +def set_debug(flags: int, /) -> None: + """Set the garbage collection debugging flags. + + flags + An integer that can have the following bits turned on: + DEBUG_STATS - Print statistics during collection. + DEBUG_COLLECTABLE - Print collectable objects found. + DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects + found. + DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them. + DEBUG_LEAK - Debug leaking programs (everything but STATS). + +Debugging information is written to sys.stderr. +""" +def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: + """set_threshold(threshold0, [threshold1, [threshold2]]) +Set the collection thresholds (the collection frequency). + +Setting 'threshold0' to zero disables collection. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi index 3caed77a661ac..b5089fe749894 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi @@ -1,3 +1,8 @@ +""" +Path operations common to more than one OS +Do not use directly. The OS specific modules import the appropriate +functions from this module themselves. +""" import os import sys from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT @@ -28,7 +33,9 @@ if sys.version_info >= (3, 13): # Iterable[T], so that list[T] | Literal[""] could be used as a return # type. But because this only works when T is str, we need Sequence[T] instead. @overload -def commonprefix(m: Sequence[LiteralString]) -> LiteralString: ... +def commonprefix(m: Sequence[LiteralString]) -> LiteralString: + """Given a list of pathnames, returns the longest common leading component +""" @overload def commonprefix(m: Sequence[StrPath]) -> str: ... @overload @@ -37,27 +44,60 @@ def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... -def exists(path: FileDescriptorOrPath) -> bool: ... -def getsize(filename: FileDescriptorOrPath) -> int: ... -def isfile(path: FileDescriptorOrPath) -> bool: ... -def isdir(s: FileDescriptorOrPath) -> bool: ... +def exists(path: FileDescriptorOrPath) -> bool: + """Test whether a path exists. Returns False for broken symbolic links +""" +def getsize(filename: FileDescriptorOrPath) -> int: + """Return the size of a file, reported by os.stat(). +""" +def isfile(path: FileDescriptorOrPath) -> bool: + """Test whether a path is a regular file +""" +def isdir(s: FileDescriptorOrPath) -> bool: + """Return true if the pathname refers to an existing directory. +""" if sys.version_info >= (3, 12): - def islink(path: StrOrBytesPath) -> bool: ... + def islink(path: StrOrBytesPath) -> bool: + """Test whether a path is a symbolic link +""" # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: FileDescriptorOrPath) -> float: ... -def getmtime(filename: FileDescriptorOrPath) -> float: ... -def getctime(filename: FileDescriptorOrPath) -> float: ... -def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... -def sameopenfile(fp1: int, fp2: int) -> bool: ... -def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... +def getatime(filename: FileDescriptorOrPath) -> float: + """Return the last access time of a file, reported by os.stat(). +""" +def getmtime(filename: FileDescriptorOrPath) -> float: + """Return the last modification time of a file, reported by os.stat(). +""" +def getctime(filename: FileDescriptorOrPath) -> float: + """Return the metadata change time of a file, reported by os.stat(). +""" +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: + """Test whether two pathnames reference the same actual file or directory + +This is determined by the device number and i-node number and +raises an exception if an os.stat() call on either pathname fails. +""" +def sameopenfile(fp1: int, fp2: int) -> bool: + """Test whether two open file objects reference the same file +""" +def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: + """Test whether two stat buffers reference the same file +""" if sys.version_info >= (3, 13): - def isjunction(path: StrOrBytesPath) -> bool: ... - def isdevdrive(path: StrOrBytesPath) -> bool: ... - def lexists(path: StrOrBytesPath) -> bool: ... + def isjunction(path: StrOrBytesPath) -> bool: + """Test whether a path is a junction +Junctions are not supported on the current platform +""" + def isdevdrive(path: StrOrBytesPath) -> bool: + """Determines whether the specified path is on a Windows Dev Drive. +Dev Drives are not supported on the current platform +""" + def lexists(path: StrOrBytesPath) -> bool: + """Test whether a path exists. Returns True for broken symbolic links +""" # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.4 _AllowMissingType = NewType("_AllowMissingType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi index c15db8122cfcf..5530bfc9f81be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi @@ -1,3 +1,18 @@ +"""Parser for command line options. + +This module helps scripts to parse the command line arguments in +sys.argv. It supports the same conventions as the Unix getopt() +function (including the special meanings of arguments of the form '-' +and '--'). Long options similar to those supported by GNU software +may be used as well via an optional third argument. This module +provides two functions and an exception: + +getopt() -- Parse command line options +gnu_getopt() -- Like getopt(), but allow option and non-option arguments +to be intermixed. +GetoptError -- exception (class) raised with 'opt' attribute, which is the +option involved with the exception. +""" from collections.abc import Iterable, Sequence from typing import Protocol, TypeVar, overload, type_check_only @@ -14,10 +29,50 @@ __all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] def getopt( args: _SliceableT[_StrSequenceT_co], shortopts: str, longopts: Iterable[str] | str = [] -) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: ... +) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: + """getopt(args, options[, long_options]) -> opts, args + +Parses command line options and parameter list. args is the +argument list to be parsed, without the leading reference to the +running program. Typically, this means "sys.argv[1:]". shortopts +is the string of option letters that the script wants to +recognize, with options that require an argument followed by a +colon and options that accept an optional argument followed by +two colons (i.e., the same format that Unix getopt() uses). If +specified, longopts is a list of strings with the names of the +long options which should be supported. The leading '--' +characters should not be included in the option name. Options +which require an argument should be followed by an equal sign +('='). Options which accept an optional argument should be +followed by an equal sign and question mark ('=?'). + +The return value consists of two elements: the first is a list of +(option, value) pairs; the second is the list of program arguments +left after the option list was stripped (this is a trailing slice +of the first argument). Each option-and-value pair returned has +the option as its first element, prefixed with a hyphen (e.g., +'-x'), and the option argument as its second element, or an empty +string if the option has no argument. The options occur in the +list in the same order in which they were found, thus allowing +multiple occurrences. Long and short options may be mixed. + +""" def gnu_getopt( args: Sequence[str], shortopts: str, longopts: Iterable[str] | str = [] -) -> tuple[list[tuple[str, str]], list[str]]: ... +) -> tuple[list[tuple[str, str]], list[str]]: + """getopt(args, options[, long_options]) -> opts, args + +This function works like getopt(), except that GNU style scanning +mode is used by default. This means that option and non-option +arguments may be intermixed. The getopt() function stops +processing options as soon as a non-option argument is +encountered. + +If the first character of the option string is '+', or if the +environment variable POSIXLY_CORRECT is set, then option +processing stops as soon as a non-option argument is encountered. + +""" class GetoptError(Exception): msg: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi index bb3013dfbf393..3ad62219f048c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi @@ -1,14 +1,66 @@ +"""Utilities to get a password and/or the current user name. + +getpass(prompt[, stream[, echo_char]]) - Prompt for a password, with echo +turned off and optional keyboard feedback. +getuser() - Get the user name from the environment or password database. + +GetPassWarning - This UserWarning is issued when getpass() cannot prevent + echoing of the password contents while reading. + +On Windows, the msvcrt module will be used. + +""" import sys from typing import TextIO __all__ = ["getpass", "getuser", "GetPassWarning"] if sys.version_info >= (3, 14): - def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: ... + def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: + """Prompt for a password, with echo turned off. + +Args: + prompt: Written on stream to ask for the input. Default: 'Password: ' + stream: A writable file object to display the prompt. Defaults to + the tty. If no tty is available defaults to sys.stderr. + echo_char: A single ASCII character to mask input (e.g., '*'). + If None, input is hidden. +Returns: + The seKr3t input. +Raises: + EOFError: If our input tty or stdin was closed. + GetPassWarning: When we were unable to turn echo off on the input. + +Always restores terminal settings before returning. +""" else: - def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... + def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: + """Prompt for a password, with echo turned off. + +Args: + prompt: Written on stream to ask for the input. Default: 'Password: ' + stream: A writable file object to display the prompt. Defaults to + the tty. If no tty is available defaults to sys.stderr. +Returns: + The seKr3t input. +Raises: + EOFError: If our input tty or stdin was closed. + GetPassWarning: When we were unable to turn echo off on the input. + +Always restores terminal settings before returning. +""" + +def getuser() -> str: + """Get the username from the environment or password database. + +First try various environment variables, then the password +database. This works on Windows as long as USERNAME is set. +Any failure to find a username raises OSError. -def getuser() -> str: ... +.. versionchanged:: 3.13 + Previously, various exceptions beyond just :exc:`OSError` + were raised. +""" class GetPassWarning(UserWarning): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi index e9ffd7a4a4a42..3a5cc0e5071b7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi @@ -1,3 +1,14 @@ +"""Internationalization and localization support. + +This module provides internationalization (I18N) and localization (L10N) +support for your Python programs by providing an interface to the GNU gettext +message catalog library. + +I18N refers to the operation by which a program is made aware of multiple +languages. L10N refers to the adaptation of your program, once +internationalized, to the local language and cultural habits. + +""" import io import sys from _typeshed import StrPath @@ -186,4 +197,7 @@ if sys.version_info < (3, 11): Catalog = translation -def c2py(plural: str) -> Callable[[int], int]: ... +def c2py(plural: str) -> Callable[[int], int]: + """Gets a C expression as used in PO files for plural forms and returns a +Python function that implements an equivalent expression. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi index 942fd73961963..a28283efb09aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi @@ -1,3 +1,5 @@ +"""Filename globbing utility. +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, Sequence @@ -31,7 +33,20 @@ if sys.version_info >= (3, 11): dir_fd: int | None = None, recursive: bool = False, include_hidden: bool = False, - ) -> list[AnyStr]: ... + ) -> list[AnyStr]: + """Return a list of paths matching a pathname pattern. + +The pattern may contain simple shell-style wildcards a la +fnmatch. Unlike fnmatch, filenames starting with a +dot are special cases that are not matched by '*' and '?' +patterns by default. + +If `include_hidden` is true, the patterns '*', '?', '**' will match hidden +directories. + +If `recursive` is true, the pattern '**' will match any files and +zero or more directories and subdirectories. +""" def iglob( pathname: AnyStr, *, @@ -39,24 +54,88 @@ if sys.version_info >= (3, 11): dir_fd: int | None = None, recursive: bool = False, include_hidden: bool = False, - ) -> Iterator[AnyStr]: ... + ) -> Iterator[AnyStr]: + """Return an iterator which yields the paths matching a pathname pattern. + +The pattern may contain simple shell-style wildcards a la +fnmatch. However, unlike fnmatch, filenames starting with a +dot are special cases that are not matched by '*' and '?' +patterns. + +If recursive is true, the pattern '**' will match any files and +zero or more directories and subdirectories. +""" elif sys.version_info >= (3, 10): def glob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> list[AnyStr]: ... + ) -> list[AnyStr]: + """Return a list of paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ def iglob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False - ) -> Iterator[AnyStr]: ... + ) -> Iterator[AnyStr]: + """Return an iterator which yields the paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ else: - def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... - def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: + """Return a list of paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: + """Return an iterator which yields the paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. -def escape(pathname: AnyStr) -> AnyStr: ... + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + +def escape(pathname: AnyStr) -> AnyStr: + """Escape all special characters. + """ def has_magic(s: str | bytes) -> bool: ... # undocumented if sys.version_info >= (3, 13): def translate( pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None - ) -> str: ... + ) -> str: + """Translate a pathname with shell wildcards to a regular expression. + +If `recursive` is true, the pattern segment '**' will match any number of +path segments. + +If `include_hidden` is true, wildcards can match path segments beginning +with a dot ('.'). + +If a sequence of separator characters is given to `seps`, they will be +used to split the pattern into segments and match path separators. If not +given, os.path.sep and os.path.altsep (where available) are used. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi index 1ca8cbe12b085..f673d641451ab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi @@ -11,18 +11,94 @@ if sys.version_info >= (3, 11): from types import GenericAlias class TopologicalSorter(Generic[_T]): + """Provides functionality to topologically sort a graph of hashable nodes +""" @overload def __init__(self, graph: None = None) -> None: ... @overload def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... - def add(self, node: _T, *predecessors: _T) -> None: ... - def prepare(self) -> None: ... - def is_active(self) -> bool: ... + def add(self, node: _T, *predecessors: _T) -> None: + """Add a new node and its predecessors to the graph. + +Both the *node* and all elements in *predecessors* must be hashable. + +If called multiple times with the same node argument, the set of dependencies +will be the union of all dependencies passed in. + +It is possible to add a node with no dependencies (*predecessors* is not provided) +as well as provide a dependency twice. If a node that has not been provided before +is included among *predecessors* it will be automatically added to the graph with +no predecessors of its own. + +Raises ValueError if called after "prepare". +""" + def prepare(self) -> None: + """Mark the graph as finished and check for cycles in the graph. + +If any cycle is detected, "CycleError" will be raised, but "get_ready" can +still be used to obtain as many nodes as possible until cycles block more +progress. After a call to this function, the graph cannot be modified and +therefore no more nodes can be added using "add". + +Raise ValueError if nodes have already been passed out of the sorter. + +""" + def is_active(self) -> bool: + """Return ``True`` if more progress can be made and ``False`` otherwise. + +Progress can be made if cycles do not block the resolution and either there +are still nodes ready that haven't yet been returned by "get_ready" or the +number of nodes marked "done" is less than the number that have been returned +by "get_ready". + +Raises ValueError if called without calling "prepare" previously. +""" def __bool__(self) -> bool: ... - def done(self, *nodes: _T) -> None: ... - def get_ready(self) -> tuple[_T, ...]: ... - def static_order(self) -> Iterable[_T]: ... + def done(self, *nodes: _T) -> None: + """Marks a set of nodes returned by "get_ready" as processed. + +This method unblocks any successor of each node in *nodes* for being returned +in the future by a call to "get_ready". + +Raises ValueError if any node in *nodes* has already been marked as +processed by a previous call to this method, if a node was not added to the +graph by using "add" or if called without calling "prepare" previously or if +node has not yet been returned by "get_ready". +""" + def get_ready(self) -> tuple[_T, ...]: + """Return a tuple of all the nodes that are ready. + +Initially it returns all nodes with no predecessors; once those are marked +as processed by calling "done", further calls will return all new nodes that +have all their predecessors already processed. Once no more progress can be made, +empty tuples are returned. + +Raises ValueError if called without calling "prepare" previously. +""" + def static_order(self) -> Iterable[_T]: + """Returns an iterable of nodes in a topological order. + +The particular order that is returned may depend on the specific +order in which the items were inserted in the graph. + +Using this method does not require to call "prepare" or "done". If any +cycle is detected, :exc:`CycleError` will be raised. +""" if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" + +class CycleError(ValueError): + """Subclass of ValueError raised by TopologicalSorter.prepare if cycles +exist in the working graph. -class CycleError(ValueError): ... +If multiple cycles exist, only one undefined choice among them will be reported +and included in the exception. The detected cycle can be accessed via the second +element in the *args* attribute of the exception instance and consists in a list +of nodes, such that each node is, in the graph, an immediate predecessor of the +next node in the list. In the reported list, the first and the last node will be +the same, to make it clear that it is cyclic. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi index 965ecece2a56d..07f3a73897194 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi @@ -1,3 +1,18 @@ +"""Access to the Unix group database. + +Group entries are reported as 4-tuples containing the following fields +from the group database, in order: + + gr_name - name of the group + gr_passwd - group password (encrypted); often empty + gr_gid - numeric ID of the group + gr_mem - list of members + +The gid is an integer, name and password are strings. (Note that most +users are not explicitly listed as members of the groups they are in +according to the password database. Check both databases to get +complete membership information.) +""" import sys from _typeshed import structseq from typing import Any, Final, final @@ -5,18 +20,45 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_group(structseq[Any], tuple[str, str | None, int, list[str]]): + """grp.struct_group: Results from getgr*() routines. + +This object may be accessed either as a tuple of + (gr_name,gr_passwd,gr_gid,gr_mem) +or via the object attributes as named in the above tuple. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("gr_name", "gr_passwd", "gr_gid", "gr_mem") @property - def gr_name(self) -> str: ... + def gr_name(self) -> str: + """group name +""" @property - def gr_passwd(self) -> str | None: ... + def gr_passwd(self) -> str | None: + """password +""" @property - def gr_gid(self) -> int: ... + def gr_gid(self) -> int: + """group id +""" @property - def gr_mem(self) -> list[str]: ... + def gr_mem(self) -> list[str]: + """group members +""" + + def getgrall() -> list[struct_group]: + """Return a list of all available group entries, in arbitrary order. + +An entry whose name starts with '+' or '-' represents an instruction +to use YP/NIS and may not be accessible via getgrnam or getgrgid. +""" + def getgrgid(id: int) -> struct_group: + """Return the group database entry for the given numeric group ID. + +If id is not valid, raise KeyError. +""" + def getgrnam(name: str) -> struct_group: + """Return the group database entry for the given group name. - def getgrall() -> list[struct_group]: ... - def getgrgid(id: int) -> struct_group: ... - def getgrnam(name: str) -> struct_group: ... +If name is not valid, raise KeyError. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi index b18f76f06e3ee..6b91b4a27d6c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi @@ -1,3 +1,8 @@ +"""Functions that read and write gzipped files. + +The user of the file doesn't have to worry about the compression, +but random access is not allowed. +""" import sys import zlib from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath, WriteableBuffer @@ -51,7 +56,25 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> GzipFile: ... +) -> GzipFile: + """Open a gzip-compressed file in binary or text mode. + +The filename argument can be an actual filename (a str or bytes object), or +an existing file object to read from or write to. + +The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for +binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is +"rb", and the default compresslevel is 9. + +For binary mode, this function is equivalent to the GzipFile constructor: +GzipFile(filename, mode, compresslevel). In this case, the encoding, errors +and newline arguments must not be provided. + +For text mode, a GzipFile object is created, and wrapped in an +io.TextIOWrapper instance with the specified encoding, error handling +behavior, and line ending(s). + +""" @overload def open( filename: StrOrBytesPath | _WritableFileobj, @@ -81,6 +104,10 @@ def open( ) -> GzipFile | TextIOWrapper: ... class _PaddedFile: + """Minimal read-only file object that prepends a string to the contents +of an actual file. Shouldn't be used outside of gzip.py, as it lacks +essential functionality. +""" file: _ReadableFileobj def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... def read(self, size: int) -> bytes: ... @@ -88,9 +115,18 @@ class _PaddedFile: def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... -class BadGzipFile(OSError): ... +class BadGzipFile(OSError): + """Exception raised in some cases for invalid gzip files. +""" class GzipFile(BaseStream): + """The GzipFile class simulates most of the methods of a file object with +the exception of the truncate() method. + +This class only supports opening files in binary mode. If you need to open a +compressed file in text mode, use the gzip.open() function. + +""" myfileobj: FileIO | None mode: object name: str @@ -104,7 +140,40 @@ class GzipFile(BaseStream): compresslevel: int = 9, fileobj: _ReadableFileobj | None = None, mtime: float | None = None, - ) -> None: ... + ) -> None: + """Constructor for the GzipFile class. + +At least one of fileobj and filename must be given a +non-trivial value. + +The new class instance is based on fileobj, which can be a regular +file, an io.BytesIO object, or any other object which simulates a file. +It defaults to None, in which case filename is opened to provide +a file object. + +When fileobj is not None, the filename argument is only used to be +included in the gzip file header, which may include the original +filename of the uncompressed file. It defaults to the filename of +fileobj, if discernible; otherwise, it defaults to the empty string, +and in this case the original filename is not included in the header. + +The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or +'xb' depending on whether the file will be read or written. The default +is the mode of fileobj if discernible; otherwise, the default is 'rb'. +A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and +'wb', 'a' and 'ab', and 'x' and 'xb'. + +The compresslevel argument is an integer from 0 to 9 controlling the +level of compression; 1 is fastest and produces the least compression, +and 9 is slowest and produces the most compression. 0 is no compression +at all. The default is 9. + +The optional mtime argument is the timestamp requested by gzip. The time +is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. +If mtime is omitted or None, the current time is used. Use mtime = 0 +to generate a compressed stream that does not depend on creation time. + +""" @overload def __init__( self, @@ -147,16 +216,30 @@ class GzipFile(BaseStream): def filename(self) -> str: ... @property - def mtime(self) -> int | None: ... + def mtime(self) -> int | None: + """Last modification time read from stream, or None +""" crc: int def write(self, data: ReadableBuffer) -> int: ... def read(self, size: int | None = -1) -> bytes: ... - def read1(self, size: int = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: + """Implements BufferedIOBase.read1() + +Reads up to a buffer's worth of data if size is negative. +""" def peek(self, n: int) -> bytes: ... def close(self) -> None: ... def flush(self, zlib_mode: int = 2) -> None: ... - def fileno(self) -> int: ... - def rewind(self) -> None: ... + def fileno(self) -> int: + """Invoke the underlying file object's fileno() method. + +This will raise AttributeError if the underlying file object +doesn't support fileno(). +""" + def rewind(self) -> None: + """Return the uncompressed stream file position indicator to the +beginning of the file +""" def seek(self, offset: int, whence: int = 0) -> int: ... def readline(self, size: int | None = -1) -> bytes: ... @@ -168,9 +251,24 @@ class _GzipReader(DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... if sys.version_info >= (3, 14): - def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: ... + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: + """Compress data in one shot and return the compressed string. + +compresslevel sets the compression level in range of 0-9. +mtime can be used to set the modification time. +The modification time is set to 0 by default, for reproducibility. +""" else: - def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... + def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: + """Compress data in one shot and return the compressed string. + +compresslevel sets the compression level in range of 0-9. +mtime can be used to set the modification time. The modification time is +set to the current time by default. +""" -def decompress(data: ReadableBuffer) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: + """Decompress a gzip compressed string in one shot. +Return the decompressed string. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi index 924136301b215..1c1a2ce69d80f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi @@ -1,3 +1,53 @@ +"""hashlib module - A common interface to many hash functions. + +new(name, data=b'', **kwargs) - returns a new hash object implementing the + given hash function; initializing the hash + using the given binary data. + +Named constructor functions are also available, these are faster +than using new(name): + +md5(), sha1(), sha224(), sha256(), sha384(), sha512(), blake2b(), blake2s(), +sha3_224, sha3_256, sha3_384, sha3_512, shake_128, and shake_256. + +More algorithms may be available on your platform but the above are guaranteed +to exist. See the algorithms_guaranteed and algorithms_available attributes +to find out what algorithm names can be passed to new(). + +NOTE: If you want the adler32 or crc32 hash functions they are available in +the zlib module. + +Choose your hash function wisely. Some have known collision weaknesses. +sha384 and sha512 will be slow on 32 bit platforms. + +Hash objects have these methods: + - update(data): Update the hash object with the bytes in data. Repeated calls + are equivalent to a single call with the concatenation of all + the arguments. + - digest(): Return the digest of the bytes passed to the update() method + so far as a bytes object. + - hexdigest(): Like digest() except the digest is returned as a string + of double length, containing only hexadecimal digits. + - copy(): Return a copy (clone) of the hash object. This can be used to + efficiently compute the digests of data that share a common + initial substring. + +For example, to obtain the digest of the byte string 'Nobody inspects the +spammish repetition': + + >>> import hashlib + >>> m = hashlib.md5() + >>> m.update(b"Nobody inspects") + >>> m.update(b" the spammish repetition") + >>> m.digest() + b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9' + +More condensed: + + >>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest() + 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2' + +""" import sys from _blake2 import blake2b as blake2b, blake2s as blake2s from _hashlib import ( @@ -66,7 +116,10 @@ else: "pbkdf2_hmac", ) -def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: ... +def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: + """new(name, data=b'') - Return a new hashing object using the named algorithm; +optionally initialized with data (which must be a bytes-like object). +""" algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] @@ -83,7 +136,17 @@ if sys.version_info >= (3, 11): def file_digest( fileobj: _BytesIOLike | _FileDigestFileObj, digest: str | Callable[[], _HashObject], /, *, _bufsize: int = 262144 - ) -> HASH: ... + ) -> HASH: + """Hash the contents of a file-like object. Returns a digest object. + +*fileobj* must be a file-like object opened for reading in binary mode. +It accepts file objects from open(), io.BytesIO(), and SocketIO objects. +The function may bypass Python's I/O and use the file descriptor *fileno* +directly. + +*digest* must either be a hash algorithm name as a *str*, a hash +constructor, or a callable that returns a hash object. +""" # Legacy typing-only alias _Hash = HASH diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi index 220c41f303fba..2c7943f1b2538 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi @@ -1,3 +1,34 @@ +"""Heap queue algorithm (a.k.a. priority queue). + +Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for +all k, counting elements from 0. For the sake of comparison, +non-existing elements are considered to be infinite. The interesting +property of a heap is that a[0] is always its smallest element. + +Usage: + +heap = [] # creates an empty heap +heappush(heap, item) # pushes a new item on the heap +item = heappop(heap) # pops the smallest item from the heap +item = heap[0] # smallest item on the heap without popping it +heapify(x) # transforms list into a heap, in-place, in linear time +item = heappushpop(heap, item) # pushes a new item and then returns + # the smallest item; the heap size is unchanged +item = heapreplace(heap, item) # pops and returns smallest item, and adds + # new item; the heap size is unchanged + +Our API differs from textbook heap algorithms as follows: + +- We use 0-based indexing. This makes the relationship between the + index for a node and the indexes for its children slightly less + obvious, but is more suitable since Python uses 0-based indexing. + +- Our heappop() method returns the smallest item, not the largest. + +These two make it possible to view the heap as a regular Python list +without surprises: heap[0] is the smallest item, and heap.sort() +maintains the heap invariant! +""" from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Generator, Iterable @@ -11,7 +42,33 @@ __about__: Final[str] def merge( *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False -) -> Generator[_S]: ... -def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... -def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... -def _heapify_max(heap: list[Any], /) -> None: ... # undocumented +) -> Generator[_S]: + """Merge multiple sorted inputs into a single sorted output. + +Similar to sorted(itertools.chain(*iterables)) but returns a generator, +does not pull the data into memory all at once, and assumes that each of +the input streams is already sorted (smallest to largest). + +>>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) +[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] + +If *key* is not None, applies a key function to each element to determine +its sort order. + +>>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len)) +['dog', 'cat', 'fish', 'horse', 'kangaroo'] + +""" +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: + """Find the n largest elements in a dataset. + +Equivalent to: sorted(iterable, key=key, reverse=True)[:n] +""" +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: + """Find the n smallest elements in a dataset. + +Equivalent to: sorted(iterable, key=key)[:n] +""" +def _heapify_max(heap: list[Any], /) -> None: # undocumented + """Maxheap variant of heapify. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi index 070c59b1c166d..20ce107d93525 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi @@ -1,3 +1,7 @@ +"""HMAC (Keyed-Hashing for Message Authentication) module. + +Implements the HMAC algorithm as described by RFC 2104. +""" from _hashlib import _HashObject, compare_digest as compare_digest from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable @@ -15,20 +19,74 @@ digest_size: None # In reality digestmod has a default value, but the function always throws an error # if the argument is not given, so we pretend it is a required argument. @overload -def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... +def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: + """Create a new hashing object and return it. + +key: bytes or buffer, The starting key for the hash. +msg: bytes or buffer, Initial input for the hash, or None. +digestmod: A hash name suitable for hashlib.new(). *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. + + Required as of 3.8, despite its position after the optional + msg argument. Passing it as a keyword argument is + recommended, though not required for legacy API reasons. + +You can now feed arbitrary bytes into the object using its update() +method, and can ask for the hash value at any time by calling its digest() +or hexdigest() methods. +""" @overload def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... class HMAC: + """RFC 2104 HMAC class. Also complies with RFC 4231. + +This supports the API for Cryptographic Hash Functions (PEP 247). +""" __slots__ = ("_hmac", "_inner", "_outer", "block_size", "digest_size") digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... - def update(self, msg: ReadableBuffer) -> None: ... - def digest(self) -> bytes: ... - def hexdigest(self) -> str: ... - def copy(self) -> HMAC: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: + """Create a new HMAC object. + +key: bytes or buffer, key for the keyed hash object. +msg: bytes or buffer, Initial input for the hash or None. +digestmod: A hash name suitable for hashlib.new(). *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. + + Required as of 3.8, despite its position after the optional + msg argument. Passing it as a keyword argument is + recommended, though not required for legacy API reasons. +""" + def update(self, msg: ReadableBuffer) -> None: + """Feed data from msg into this hashing object. +""" + def digest(self) -> bytes: + """Return the hash value of this hashing object. + +This returns the hmac value as bytes. The object is +not altered in any way by this function; you can continue +updating the object after calling this function. +""" + def hexdigest(self) -> str: + """Like digest(), but returns a string of hexadecimal digits instead. + """ + def copy(self) -> HMAC: + """Return a separate copy of this hashing object. + +An update to this copy won't affect the original object. +""" + +def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: + """Fast inline implementation of HMAC. -def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... +key: bytes or buffer, The key for the keyed hash object. +msg: bytes or buffer, Input message. +digest: A hash name suitable for hashlib.new() for best performance. *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi index afba90832535d..f17c0ae4020b9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi @@ -1,6 +1,22 @@ +""" +General functions for HTML manipulation. +""" from typing import AnyStr __all__ = ["escape", "unescape"] -def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... -def unescape(s: AnyStr) -> AnyStr: ... +def escape(s: AnyStr, quote: bool = True) -> AnyStr: + """ +Replace special characters "&", "<" and ">" to HTML-safe sequences. +If the optional flag quote is true (the default), the quotation mark +characters, both double quote (") and single quote (') characters are also +translated. +""" +def unescape(s: AnyStr) -> AnyStr: + """ +Convert all named and numeric character references (e.g. >, >, +&x3e;) in the string s to the corresponding unicode characters. +This function uses the rules defined by the HTML 5 standard +for both valid and invalid character references, and the list of +HTML 5 named character references defined in html.entities.html5. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi index e5890d1ecfbd8..eaea3c72f04ce 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi @@ -1,3 +1,5 @@ +"""HTML character entity references. +""" from typing import Final __all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi index 7edd39e8c7037..9941559b19496 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi @@ -1,3 +1,5 @@ +"""A parser for HTML and XHTML. +""" from _markupbase import ParserBase from re import Pattern from typing import Final @@ -5,14 +7,48 @@ from typing import Final __all__ = ["HTMLParser"] class HTMLParser(ParserBase): + """Find tags and other markup and call handler functions. + +Usage: + p = HTMLParser() + p.feed(data) + ... + p.close() + +Start tags are handled by calling self.handle_starttag() or +self.handle_startendtag(); end tags by self.handle_endtag(). The +data between tags is passed from the parser to the derived class +by calling self.handle_data() with the data as argument (the data +may be split up in arbitrary chunks). If convert_charrefs is +True the character references are converted automatically to the +corresponding Unicode character (and self.handle_data() is no +longer split in chunks), otherwise they are passed by calling +self.handle_entityref() or self.handle_charref() with the string +containing respectively the named or numeric reference as the +argument. +""" CDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] - def __init__(self, *, convert_charrefs: bool = True) -> None: ... - def feed(self, data: str) -> None: ... - def close(self) -> None: ... - def get_starttag_text(self) -> str | None: ... + def __init__(self, *, convert_charrefs: bool = True) -> None: + """Initialize and reset this instance. + +If convert_charrefs is True (the default), all character references +are automatically converted to the corresponding Unicode characters. +""" + def feed(self, data: str) -> None: + """Feed data to the parser. + +Call this as often as you want, with as little or as much text +as you want (may include '\\n'). +""" + def close(self) -> None: + """Handle any buffered data. +""" + def get_starttag_text(self) -> str | None: + """Return full source of start tag: '<...>'. +""" def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi index f60c3909736d3..a99aea1b903f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi @@ -10,6 +10,24 @@ else: __all__ = ["HTTPStatus"] class HTTPStatus(IntEnum): + """HTTP status codes and reason phrases + +Status codes from the following RFCs are all observed: + + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP +""" @property def phrase(self) -> str: ... @property @@ -105,6 +123,13 @@ class HTTPStatus(IntEnum): if sys.version_info >= (3, 11): class HTTPMethod(StrEnum): + """HTTP methods and descriptions + +Methods from the following RFCs are all observed: + + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 5789: PATCH Method for HTTP +""" @property def description(self) -> str: ... CONNECT = "CONNECT" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi index d259e84e6f2aa..acae10bb7e216 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi @@ -1,3 +1,72 @@ +"""HTTP/1.1 client library + + + + +HTTPConnection goes through a number of "states", which define when a client +may legally make another request or fetch the response for a particular +request. This diagram details these state transitions: + + (null) + | + | HTTPConnection() + v + Idle + | + | putrequest() + v + Request-started + | + | ( putheader() )* endheaders() + v + Request-sent + |\\_____________________________ + | | getresponse() raises + | response = getresponse() | ConnectionError + v v + Unread-response Idle + [Response-headers-read] + |\\____________________ + | | + | response.read() | putrequest() + v v + Idle Req-started-unread-response + ______/| + / | + response.read() | | ( putheader() )* endheaders() + v v + Request-started Req-sent-unread-response + | + | response.read() + v + Request-sent + +This diagram presents the following rules: + -- a second request may not be started until {response-headers-read} + -- a response [object] cannot be retrieved until {request-sent} + -- there is no differentiation between an unread response body and a + partially read response body + +Note: this enforcement is applied by the HTTPConnection class. The + HTTPResponse class does not enforce this state machine, which + implies sophisticated clients may accelerate the request/response + pipeline. Caution should be taken, though: accelerating the states + beyond the above pattern may imply knowledge of the server's + connection-close behavior for certain requests. For example, it + is impossible to tell whether the server will close the connection + UNTIL the response headers have been read; this means that further + requests cannot be placed into the pipeline until it is known that + the server will NOT be closing the connection. + +Logical State __state __response +------------- ------- ---------- +Idle _CS_IDLE None +Request-started _CS_REQ_STARTED None +Request-sent _CS_REQ_SENT None +Unread-response _CS_IDLE +Req-started-unread-response _CS_REQ_STARTED +Req-sent-unread-response _CS_REQ_SENT +""" import email.message import io import ssl @@ -119,10 +188,21 @@ NETWORK_AUTHENTICATION_REQUIRED: Final = 511 responses: dict[int, str] class HTTPMessage(email.message.Message[str, str]): - def getallmatchingheaders(self, name: str) -> list[str]: ... # undocumented + def getallmatchingheaders(self, name: str) -> list[str]: # undocumented + """Find all header lines matching a given header name. + +Look through the list of headers and find all lines matching a given +header name (and their continuation lines). A list of the lines is +returned, without interpretation. If the header does not occur, an +empty list is returned. If the header occurs multiple times, all +occurrences are returned. Case is not important in the header name. + +""" @overload -def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... +def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: + """Parses only RFC2822 headers from a file pointer. +""" @overload def parse_headers(fp: SupportsReadline[bytes]) -> HTTPMessage: ... @@ -145,24 +225,80 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incomp url: str def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... def peek(self, n: int = -1) -> bytes: ... - def read(self, amt: int | None = None) -> bytes: ... - def read1(self, n: int = -1) -> bytes: ... - def readinto(self, b: WriteableBuffer) -> int: ... + def read(self, amt: int | None = None) -> bytes: + """Read and return the response body, or up to the next amt bytes. +""" + def read1(self, n: int = -1) -> bytes: + """Read with at most one underlying system call. If at least one +byte is buffered, return that instead. +""" + def readinto(self, b: WriteableBuffer) -> int: + """Read up to len(b) bytes into bytearray b and return the number +of bytes read. +""" def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] @overload - def getheader(self, name: str) -> str | None: ... + def getheader(self, name: str) -> str | None: + """Returns the value of the header matching *name*. + +If there are multiple matching headers, the values are +combined into a single string separated by commas and spaces. + +If no matching header is found, returns *default* or None if +the *default* is not specified. + +If the headers are unknown, raises http.client.ResponseNotReady. + +""" @overload def getheader(self, name: str, default: _T) -> str | _T: ... - def getheaders(self) -> list[tuple[str, str]]: ... - def isclosed(self) -> bool: ... + def getheaders(self) -> list[tuple[str, str]]: + """Return list of (header, value) tuples. +""" + def isclosed(self) -> bool: + """True if the connection is closed. +""" def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... - def info(self) -> email.message.Message: ... - def geturl(self) -> str: ... - def getcode(self) -> int: ... + def info(self) -> email.message.Message: + """Returns an instance of the class mimetools.Message containing +meta-information associated with the URL. + +When the method is HTTP, these headers are those returned by +the server at the head of the retrieved HTML page (including +Content-Length and Content-Type). + +When the method is FTP, a Content-Length header will be +present if (as is now usual) the server passed back a file +length in response to the FTP retrieval request. A +Content-Type header will be present if the MIME type can be +guessed. + +When the method is local-file, returned headers will include +a Date representing the file's last-modified time, a +Content-Length giving file size, and a Content-Type +containing a guess at the file's type. See also the +description of the mimetools module. + +""" + def geturl(self) -> str: + """Return the real URL of the page. + +In some cases, the HTTP server redirects a client to another +URL. The urlopen() function handles this transparently, but in +some cases the caller needs to know which URL the client was +redirected to. The geturl() method can be used to get at this +redirected URL. + +""" + def getcode(self) -> int: + """Return the HTTP status code that was sent with the response, +or None if the URL is not an HTTP URL. + +""" def begin(self) -> None: ... class HTTPConnection: @@ -190,21 +326,90 @@ class HTTPConnection: headers: Mapping[str, _HeaderValue] = {}, *, encode_chunked: bool = False, - ) -> None: ... - def getresponse(self) -> HTTPResponse: ... + ) -> None: + """Send a complete request to the server. +""" + def getresponse(self) -> HTTPResponse: + """Get the response from the server. + +If the HTTPConnection is in the correct state, returns an +instance of HTTPResponse or of whatever object is returned by +the response_class variable. + +If a request has not been sent or if a previous response has +not be handled, ResponseNotReady is raised. If the HTTP +response indicates that the connection should be closed, then +it will be closed before the response is returned. When the +connection is closed, the underlying socket is closed. +""" def set_debuglevel(self, level: int) -> None: ... if sys.version_info >= (3, 12): - def get_proxy_response_headers(self) -> HTTPMessage | None: ... + def get_proxy_response_headers(self) -> HTTPMessage | None: + """ +Returns a dictionary with the headers of the response +received from the proxy server to the CONNECT request +sent to set the tunnel. + +If the CONNECT request was not sent, the method returns None. +""" + + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: + """Set up host and port for HTTP CONNECT tunnelling. + +In a connection that uses HTTP CONNECT tunnelling, the host passed to +the constructor is used as a proxy server that relays all communication +to the endpoint passed to `set_tunnel`. This done by sending an HTTP +CONNECT request to the proxy server when the connection is established. + +This method must be called before the HTTP connection has been +established. + +The headers argument should be a mapping of extra HTTP headers to send +with the CONNECT request. + +As HTTP/1.1 is used for HTTP CONNECT tunnelling request, as per the RFC +(https://tools.ietf.org/html/rfc7231#section-4.3.6), a HTTP Host: +header must be provided, matching the authority-form of the request +target provided as the destination for the CONNECT request. If a +HTTP Host: header is not provided via the headers argument, one +is generated and transmitted automatically. +""" + def connect(self) -> None: + """Connect to the host and port specified in __init__. +""" + def close(self) -> None: + """Close the connection to the HTTP server. +""" + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: + """Send a request to the server. + +'method' specifies an HTTP request method, e.g. 'GET'. +'url' specifies the object being requested, e.g. '/index.html'. +'skip_host' if True does not add automatically a 'Host:' header +'skip_accept_encoding' if True does not add automatically an + 'Accept-Encoding:' header +""" + def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: + """Send a request header line to the server. + +For example: h.putheader('Accept', 'text/html') +""" + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: + """Indicate that the last header line has been sent to the server. - def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... - def connect(self) -> None: ... - def close(self) -> None: ... - def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... - def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: ... - def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... - def send(self, data: _DataType | str) -> None: ... +This method sends the request to the server. The optional message_body +argument can be used to pass a message body associated with the +request. +""" + def send(self, data: _DataType | str) -> None: + """Send 'data' to the server. +``data`` can be a string object, a bytes object, an array object, a +file-like object that supports a .read() method, or an iterable object. +""" class HTTPSConnection(HTTPConnection): + """This class allows communication via SSL. +""" # Can be `None` if `.connect()` was not called: sock: ssl.SSLSocket | MaybeNone if sys.version_info >= (3, 12): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi index 31e1d3fc83785..083be4b39f145 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi @@ -1,3 +1,29 @@ +"""HTTP cookie handling for web clients. + +This module has (now fairly distant) origins in Gisle Aas' Perl module +HTTP::Cookies, from the libwww-perl library. + +Docstrings, comments and debug strings in this code refer to the +attributes of the HTTP cookie system as cookie-attributes, to distinguish +them clearly from Python attributes. + +Class diagram (note that BSDDBCookieJar and the MSIE* classes are not +distributed with the Python standard library, but are available from +http://wwwsearch.sf.net/): + + CookieJar____ + / \\ \\ + FileCookieJar \\ \\ + / | \\ \\ \\ + MozillaCookieJar | LWPCookieJar \\ \\ + | | \\ + | ---MSIEBase | \\ + | / | | \\ + | / MSIEDBCookieJar BSDDBCookieJar + |/ + MSIECookieJar + +""" import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -22,6 +48,11 @@ _T = TypeVar("_T") class LoadError(OSError): ... class CookieJar: + """Collection of HTTP cookies. + +You may not need to know about this class: try +urllib.request.build_opener(HTTPCookieProcessor).open(url). +""" non_word_re: ClassVar[Pattern[str]] # undocumented quote_re: ClassVar[Pattern[str]] # undocumented strict_domain_re: ClassVar[Pattern[str]] # undocumented @@ -29,43 +60,169 @@ class CookieJar: dots_re: ClassVar[Pattern[str]] # undocumented magic_re: ClassVar[Pattern[str]] # undocumented def __init__(self, policy: CookiePolicy | None = None) -> None: ... - def add_cookie_header(self, request: Request) -> None: ... - def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... + def add_cookie_header(self, request: Request) -> None: + """Add correct Cookie: header to request (urllib.request.Request object). + +The Cookie2 header is also added unless policy.hide_cookie2 is true. + +""" + def extract_cookies(self, response: HTTPResponse, request: Request) -> None: + """Extract cookies from response, where allowable given the request. +""" def set_policy(self, policy: CookiePolicy) -> None: ... - def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... - def set_cookie(self, cookie: Cookie) -> None: ... - def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... - def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... - def clear_session_cookies(self) -> None: ... - def clear_expired_cookies(self) -> None: ... # undocumented + def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: + """Return sequence of Cookie objects extracted from response object. +""" + def set_cookie(self, cookie: Cookie) -> None: + """Set a cookie, without checking whether or not it should be set. +""" + def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: + """Set a cookie if policy says it's OK to do so. +""" + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: + """Clear some cookies. + +Invoking this method without arguments will clear all cookies. If +given a single argument, only cookies belonging to that domain will be +removed. If given two arguments, cookies belonging to the specified +path within that domain are removed. If given three arguments, then +the cookie with the specified name, path and domain is removed. + +Raises KeyError if no matching cookie exists. + +""" + def clear_session_cookies(self) -> None: + """Discard all session cookies. + +Note that the .save() method won't save session cookies anyway, unless +you ask otherwise by passing a true ignore_discard argument. + +""" + def clear_expired_cookies(self) -> None: # undocumented + """Discard all expired cookies. + +You probably don't need to call this method: expired cookies are never +sent back to the server (provided you're using DefaultCookiePolicy), +this method is called by CookieJar itself every so often, and the +.save() method won't save expired cookies anyway (unless you ask +otherwise by passing a true ignore_expires argument). + +""" def __iter__(self) -> Iterator[Cookie]: ... - def __len__(self) -> int: ... + def __len__(self) -> int: + """Return number of contained cookies. +""" class FileCookieJar(CookieJar): + """CookieJar that can be loaded from and saved to a file. +""" filename: str | None delayload: bool - def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... - def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... - def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... - def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: + """ +Cookies are NOT loaded from the named file until either the .load() or +.revert() method is called. + +""" + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: + """Save cookies to a file. +""" + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: + """Load cookies from a file. +""" + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: + """Clear all cookies and reload cookies from a saved file. + +Raises LoadError (or OSError) if reversion is not successful; the +object's state will not be altered if this happens. + +""" class MozillaCookieJar(FileCookieJar): + """ + +WARNING: you may want to backup your browser's cookies file if you use +this class to save cookies. I *think* it works, but there have been +bugs in the past! + +This class differs from CookieJar only in the format it uses to save and +load cookies to and from a file. This class uses the Mozilla/Netscape +'cookies.txt' format. curl and lynx use this file format, too. + +Don't expect cookies saved while the browser is running to be noticed by +the browser (in fact, Mozilla on unix will overwrite your saved cookies if +you change them on disk while it's running; on Windows, you probably can't +save at all while the browser is running). + +Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to +Netscape cookies on saving. + +In particular, the cookie version and port number information is lost, +together with information about whether or not Path, Port and Discard were +specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the +domain as set in the HTTP header started with a dot (yes, I'm aware some +domains in Netscape files start with a dot and some don't -- trust me, you +really don't want to know any more about this). + +Note that though Mozilla and Netscape use the same format, they use +slightly different headers. The class saves cookies using the Netscape +header by default (Mozilla can cope with that). + +""" if sys.version_info < (3, 10): header: ClassVar[str] # undocumented class LWPCookieJar(FileCookieJar): - def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented + """ +The LWPCookieJar saves a sequence of "Set-Cookie3" lines. +"Set-Cookie3" is the format used by the libwww-perl library, not known +to be compatible with any browser, but which is easy to read and +doesn't lose information about RFC 2965 cookies. + +Additional methods + +as_lwp_str(ignore_discard=True, ignore_expired=True) + +""" + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: # undocumented + """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. + +ignore_discard and ignore_expires: see docstring for FileCookieJar.save + +""" class CookiePolicy: + """Defines which cookies get accepted from and returned to server. + +May also modify cookies, though this is probably a bad idea. + +The subclass DefaultCookiePolicy defines the standard rules for Netscape +and RFC 2965 cookies -- override that if you want a customized policy. + +""" netscape: bool rfc2965: bool hide_cookie2: bool - def set_ok(self, cookie: Cookie, request: Request) -> bool: ... - def return_ok(self, cookie: Cookie, request: Request) -> bool: ... - def domain_return_ok(self, domain: str, request: Request) -> bool: ... - def path_return_ok(self, path: str, request: Request) -> bool: ... + def set_ok(self, cookie: Cookie, request: Request) -> bool: + """Return true if (and only if) cookie should be accepted from server. + +Currently, pre-expired cookies never get this far -- the CookieJar +class deletes such cookies itself. + +""" + def return_ok(self, cookie: Cookie, request: Request) -> bool: + """Return true if (and only if) cookie should be returned to server. +""" + def domain_return_ok(self, domain: str, request: Request) -> bool: + """Return false if cookies should not be returned, given cookie domain. + """ + def path_return_ok(self, path: str, request: Request) -> bool: + """Return false if cookies should not be returned, given cookie path. + """ class DefaultCookiePolicy(CookiePolicy): + """Implements the standard rules for accepting and returning cookies. +""" rfc2109_as_netscape: bool strict_domain: bool strict_rfc2965_unverifiable: bool @@ -93,12 +250,22 @@ class DefaultCookiePolicy(CookiePolicy): strict_ns_set_initial_dollar: bool = False, strict_ns_set_path: bool = False, secure_protocols: Sequence[str] = ("https", "wss"), - ) -> None: ... - def blocked_domains(self) -> tuple[str, ...]: ... - def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... + ) -> None: + """Constructor arguments should be passed as keyword arguments only. +""" + def blocked_domains(self) -> tuple[str, ...]: + """Return the sequence of blocked domains (as a tuple). +""" + def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: + """Set the sequence of blocked domains. +""" def is_blocked(self, domain: str) -> bool: ... - def allowed_domains(self) -> tuple[str, ...] | None: ... - def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: ... + def allowed_domains(self) -> tuple[str, ...] | None: + """Return None, or the sequence of allowed domains (as a tuple). +""" + def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: + """Set the sequence of allowed domains, or None. +""" def is_not_allowed(self, domain: str) -> bool: ... def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented def set_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented @@ -114,6 +281,22 @@ class DefaultCookiePolicy(CookiePolicy): def return_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented class Cookie: + """HTTP Cookie. + +This class represents both Netscape and RFC 2965 cookies. + +This is deliberately a very simple class. It just holds attributes. It's +possible to construct Cookie instances that don't comply with the cookie +standards. CookieJar.make_cookies is the factory function for Cookie +objects -- it deals with cookie parsing, supplying defaults, and +normalising to the representation used in this class. CookiePolicy is +responsible for checking them to see whether they should be accepted from +and returned to the server. + +Note that the port may be present in the headers, but unspecified ("Port" +rather than"Port=80", for example); if this is the case, port is None. + +""" version: int | None name: str value: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi index 4df12e3125d4b..b07dad10db629 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi @@ -1,3 +1,92 @@ +""" +Here's a sample session to show how to use this module. +At the moment, this is the only documentation. + +The Basics +---------- + +Importing is easy... + + >>> from http import cookies + +Most of the time you start by creating a cookie. + + >>> C = cookies.SimpleCookie() + +Once you've created your Cookie, you can add values just as if it were +a dictionary. + + >>> C = cookies.SimpleCookie() + >>> C["fig"] = "newton" + >>> C["sugar"] = "wafer" + >>> C.output() + 'Set-Cookie: fig=newton\\r\\nSet-Cookie: sugar=wafer' + +Notice that the printable representation of a Cookie is the +appropriate format for a Set-Cookie: header. This is the +default behavior. You can change the header and printed +attributes by using the .output() function + + >>> C = cookies.SimpleCookie() + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> print(C.output(header="Cookie:")) + Cookie: rocky=road; Path=/cookie + >>> print(C.output(attrs=[], header="Cookie:")) + Cookie: rocky=road + +The load() method of a Cookie extracts cookies from a string. In a +CGI script, you would use this method to extract the cookies from the +HTTP_COOKIE environment variable. + + >>> C = cookies.SimpleCookie() + >>> C.load("chips=ahoy; vienna=finger") + >>> C.output() + 'Set-Cookie: chips=ahoy\\r\\nSet-Cookie: vienna=finger' + +The load() method is darn-tootin smart about identifying cookies +within a string. Escaped quotation marks, nested semicolons, and other +such trickeries do not confuse it. + + >>> C = cookies.SimpleCookie() + >>> C.load('keebler="E=everybody; L=\\\\"Loves\\\\"; fudge=\\\\012;";') + >>> print(C) + Set-Cookie: keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;" + +Each element of the Cookie also supports all of the RFC 2109 +Cookie attributes. Here's an example which sets the Path +attribute. + + >>> C = cookies.SimpleCookie() + >>> C["oreo"] = "doublestuff" + >>> C["oreo"]["path"] = "/" + >>> print(C) + Set-Cookie: oreo=doublestuff; Path=/ + +Each dictionary element has a 'value' attribute, which gives you +back the value associated with the key. + + >>> C = cookies.SimpleCookie() + >>> C["twix"] = "none for you" + >>> C["twix"].value + 'none for you' + +The SimpleCookie expects that all values should be standard strings. +Just to be sure, SimpleCookie invokes the str() builtin to convert +the value to a string, when the values are set dictionary-style. + + >>> C = cookies.SimpleCookie() + >>> C["number"] = 7 + >>> C["string"] = "seven" + >>> C["number"].value + '7' + >>> C["string"].value + 'seven' + >>> C.output() + 'Set-Cookie: number=7\\r\\nSet-Cookie: string=seven' + +Finis. +""" from collections.abc import Iterable, Mapping from types import GenericAlias from typing import Any, Generic, TypeVar, overload @@ -9,7 +98,13 @@ _DataType: TypeAlias = str | Mapping[str, str | Morsel[Any]] _T = TypeVar("_T") @overload -def _quote(str: None) -> None: ... +def _quote(str: None) -> None: + """Quote a string for use in a cookie header. + +If the string does not need to be double-quoted, then just return the +string. Otherwise, surround the string in doublequotes and quote +(with a \\) special characters. +""" @overload def _quote(str: str) -> str: ... @overload @@ -20,6 +115,13 @@ def _unquote(str: str) -> str: ... class CookieError(Exception): ... class Morsel(dict[str, Any], Generic[_T]): + """A class to hold ONE (key, value) pair. + +In a cookie, each such pair may have several attributes, so this class is +used to keep the attributes associated with the appropriate key,value pair. +This class also includes a coded_value attribute, which is used to hold +the network representation of the value. +""" @property def value(self) -> str: ... @property @@ -41,16 +143,50 @@ class Morsel(dict[str, Any], Generic[_T]): def OutputString(self, attrs: list[str] | None = None) -> str: ... def __eq__(self, morsel: object) -> bool: ... def __setitem__(self, K: str, V: Any) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): + """A container class for a set of Morsels. +""" def __init__(self, input: _DataType | None = None) -> None: ... - def value_decode(self, val: str) -> tuple[_T, str]: ... - def value_encode(self, val: _T) -> tuple[_T, str]: ... - def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... + def value_decode(self, val: str) -> tuple[_T, str]: + """real_value, coded_value = value_decode(STRING) +Called prior to setting a cookie's value from the network +representation. The VALUE is the value read from HTTP +header. +Override this function to modify the behavior of cookies. +""" + def value_encode(self, val: _T) -> tuple[_T, str]: + """real_value, coded_value = value_encode(VALUE) +Called prior to setting a cookie's value from the dictionary +representation. The VALUE is the value being assigned. +Override this function to modify the behavior of cookies. +""" + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: + """Return a string suitable for HTTP. +""" __str__ = output - def js_output(self, attrs: list[str] | None = None) -> str: ... - def load(self, rawdata: _DataType) -> None: ... - def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... + def js_output(self, attrs: list[str] | None = None) -> str: + """Return a string suitable for JavaScript. +""" + def load(self, rawdata: _DataType) -> None: + """Load cookies from a string (presumably HTTP_COOKIE) or +from a dictionary. Loading cookies from a dictionary 'd' +is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) +""" + def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: + """Dictionary style assignment. +""" -class SimpleCookie(BaseCookie[str]): ... +class SimpleCookie(BaseCookie[str]): + """ +SimpleCookie supports strings as cookie values. When setting +the value using the dictionary assignment notation, SimpleCookie +calls the builtin str() to convert the value to a string. Values +received from HTTP are kept as strings. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi index 2c1a374331bcc..cd23a69ebc198 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi @@ -1,3 +1,35 @@ +"""HTTP server classes. + +Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, +and (deprecated) CGIHTTPRequestHandler for CGI scripts. + +It does, however, optionally implement HTTP/1.1 persistent connections. + +Notes on CGIHTTPRequestHandler +------------------------------ + +This class is deprecated. It implements GET and POST requests to cgi-bin scripts. + +If the os.fork() function is not present (Windows), subprocess.Popen() is used, +with slightly altered but never documented semantics. Use from a threaded +process is likely to trigger a warning at os.fork() time. + +In all cases, the implementation is intentionally naive -- all +requests are executed synchronously. + +SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL +-- it may execute arbitrary Python code or external programs. + +Note that status code 200 is sent prior to execution of a CGI script, so +scripts cannot send other status codes such as 302 (redirect). + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" import _socket import email.message import io @@ -58,11 +90,114 @@ if sys.version_info >= (3, 14): password: _PasswordType | None = None, alpn_protocols: Iterable[str] | None = None, ) -> None: ... - def server_activate(self) -> None: ... + def server_activate(self) -> None: + """Wrap the socket in SSLSocket. +""" class ThreadingHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer): ... class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + """HTTP request handler base class. + +The following explanation of HTTP serves to guide you through the +code as well as to expose any misunderstandings I may have about +HTTP (so you don't need to read the code to figure out I'm wrong +:-). + +HTTP (HyperText Transfer Protocol) is an extensible protocol on +top of a reliable stream transport (e.g. TCP/IP). The protocol +recognizes three parts to a request: + +1. One line identifying the request type and path +2. An optional set of RFC-822-style headers +3. An optional data part + +The headers and data are separated by a blank line. + +The first line of the request has the form + + + +where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, +and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify +the ASCII character with hex code xx). + +The specification specifies that lines are separated by CRLF but +for compatibility with the widest range of clients recommends +servers also handle LF. Similarly, whitespace in the request line +is treated sensibly (allowing multiple spaces between components +and allowing trailing whitespace). + +Similarly, for output, lines ought to be separated by CRLF pairs +but most clients grok LF characters just fine. + +If the first line of the request has the form + + + +(i.e. is left out) then this is assumed to be an HTTP +0.9 request; this form has no optional headers and data part and +the reply consists of just the data. + +The reply form of the HTTP 1.x protocol again has three parts: + +1. One line giving the response code +2. An optional set of RFC-822-style headers +3. The data + +Again, the headers and data are separated by a blank line. + +The response code line has the form + + + +where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or +failure of the request, and is an optional +human-readable string explaining what the response code means. + +This server parses the request and the headers, and then calls a +function specific to the request type (). Specifically, +a request SPAM will be handled by a method do_SPAM(). If no +such method exists the server sends an error response to the +client. If it exists, it is called with no arguments: + +do_SPAM() + +Note that the request name is case sensitive (i.e. SPAM and spam +are different requests). + +The various request details are stored in instance variables: + +- client_address is the client IP address in the form (host, +port); + +- command, path and version are the broken-down request line; + +- headers is an instance of email.message.Message (or a derived +class) containing the header information; + +- rfile is a file object open for reading positioned at the +start of the optional input data part; + +- wfile is a file object open for writing. + +IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + +The first thing to be written must be the response line. Then +follow 0 or more header lines, then a blank line, and then the +actual data (if any). The meaning of the header lines depends on +the command executed by the server; in most cases, when data is +returned, there should be at least one header line of the form + +Content-type: / + +where and should be registered MIME types, +e.g. "text/html" or "text/plain". + +""" client_address: tuple[str, int] close_connection: bool requestline: str @@ -80,24 +215,134 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def handle_one_request(self) -> None: ... - def handle_expect_100(self) -> bool: ... - def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... - def send_response(self, code: int, message: str | None = None) -> None: ... - def send_header(self, keyword: str, value: str) -> None: ... - def send_response_only(self, code: int, message: str | None = None) -> None: ... - def end_headers(self) -> None: ... + def handle_one_request(self) -> None: + """Handle a single HTTP request. + +You normally don't need to override this method; see the class +__doc__ string for information on how to handle specific HTTP +commands such as GET and POST. + +""" + def handle_expect_100(self) -> bool: + """Decide what to do with an "Expect: 100-continue" header. + +If the client is expecting a 100 Continue response, we must +respond with either a 100 Continue or a final response before +waiting for the request body. The default is to always respond +with a 100 Continue. You can behave differently (for example, +reject unauthorized requests) by overriding this method. + +This method should either return True (possibly after sending +a 100 Continue response) or send an error response and return +False. + +""" + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: + """Send and log an error reply. + +Arguments are +* code: an HTTP error code + 3 digits +* message: a simple optional 1 line reason phrase. + *( HTAB / SP / VCHAR / %x80-FF ) + defaults to short entry matching the response code +* explain: a detailed message defaults to the long entry + matching the response code. + +This sends an error response (so it must be called before any +output has been generated), logs the error, and finally sends +a piece of HTML explaining the error to the user. + +""" + def send_response(self, code: int, message: str | None = None) -> None: + """Add the response header to the headers buffer and log the +response code. + +Also send two standard headers with the server software +version and the current date. + +""" + def send_header(self, keyword: str, value: str) -> None: + """Send a MIME header to the headers buffer. +""" + def send_response_only(self, code: int, message: str | None = None) -> None: + """Send the response header only. +""" + def end_headers(self) -> None: + """Send the blank line ending the MIME headers. +""" def flush_headers(self) -> None: ... - def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... - def log_error(self, format: str, *args: Any) -> None: ... - def log_message(self, format: str, *args: Any) -> None: ... - def version_string(self) -> str: ... - def date_time_string(self, timestamp: float | None = None) -> str: ... - def log_date_time_string(self) -> str: ... - def address_string(self) -> str: ... - def parse_request(self) -> bool: ... # undocumented + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: + """Log an accepted request. + +This is called by send_response(). + +""" + def log_error(self, format: str, *args: Any) -> None: + """Log an error. + +This is called when a request cannot be fulfilled. By +default it passes the message on to log_message(). + +Arguments are the same as for log_message(). + +XXX This should go to the separate error log. + +""" + def log_message(self, format: str, *args: Any) -> None: + """Log an arbitrary message. + +This is used by all other logging functions. Override +it if you have specific logging wishes. + +The first argument, FORMAT, is a format string for the +message to be logged. If the format string contains +any % escapes requiring parameters, they should be +specified as subsequent arguments (it's just like +printf!). + +The client ip and current date/time are prefixed to +every message. + +Unicode control characters are replaced with escaped hex +before writing the output to stderr. + +""" + def version_string(self) -> str: + """Return the server software version string. +""" + def date_time_string(self, timestamp: float | None = None) -> str: + """Return the current date and time formatted for a message header. +""" + def log_date_time_string(self) -> str: + """Return the current time formatted for logging. +""" + def address_string(self) -> str: + """Return the client address. +""" + def parse_request(self) -> bool: # undocumented + """Parse a request (internal). + +The request should be stored in self.raw_requestline; the results +are in self.command, self.path, self.request_version and +self.headers. + +Return True for success, False for failure; on failure, any relevant +error response has already been sent back. + +""" class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + """Simple HTTP request handler with GET and HEAD commands. + +This serves files from the current directory and any of its +subdirectories. The MIME type for files is determined by +calling the .guess_type() method. + +The GET and HEAD requests are identical except that the HEAD +request omits the actual contents of the file. + +""" extensions_map: dict[str, str] if sys.version_info >= (3, 12): index_pages: ClassVar[tuple[str, ...]] @@ -110,33 +355,153 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): *, directory: StrPath | None = None, ) -> None: ... - def do_GET(self) -> None: ... - def do_HEAD(self) -> None: ... - def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented - def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented - def translate_path(self, path: str) -> str: ... # undocumented - def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented - def guess_type(self, path: StrPath) -> str: ... # undocumented + def do_GET(self) -> None: + """Serve a GET request. +""" + def do_HEAD(self) -> None: + """Serve a HEAD request. +""" + def send_head(self) -> io.BytesIO | BinaryIO | None: # undocumented + """Common code for GET and HEAD commands. + +This sends the response code and MIME headers. + +Return value is either a file object (which has to be copied +to the outputfile by the caller unless the command was HEAD, +and must be closed by the caller under all circumstances), or +None, in which case the caller has nothing further to do. + +""" + def list_directory(self, path: StrPath) -> io.BytesIO | None: # undocumented + """Helper to produce a directory listing (absent index.html). + +Return value is either a file object, or None (indicating an +error). In either case, the headers are sent, making the +interface the same as for send_head(). + +""" + def translate_path(self, path: str) -> str: # undocumented + """Translate a /-separated PATH to the local filename syntax. + +Components that mean special things to the local file system +(e.g. drive or directory names) are ignored. (XXX They should +probably be diagnosed.) + +""" + def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: # undocumented + """Copy all data between two file objects. -def executable(path: StrPath) -> bool: ... # undocumented +The SOURCE argument is a file object open for reading +(or anything with a read() method) and the DESTINATION +argument is a file object open for writing (or +anything with a write() method). + +The only reason for overriding this would be to change +the block size or perhaps to replace newlines by CRLF +-- note however that this the default server uses this +to copy binary data as well. + +""" + def guess_type(self, path: StrPath) -> str: # undocumented + """Guess the type of a file. + +Argument is a PATH (a filename). + +Return value is a string of the form type/subtype, +usable for a MIME Content-type header. + +The default implementation looks the file's extension +up in the table self.extensions_map, using application/octet-stream +as a default; however it would be permissible (if +slow) to look inside the data to make a better guess. + +""" + +def executable(path: StrPath) -> bool: # undocumented + """Test for executable file. +""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + """Complete HTTP server with GET, HEAD and POST commands. + +GET and HEAD also support running CGI scripts. + +The POST command is *only* implemented for CGI scripts. + +""" cgi_directories: list[str] have_fork: bool # undocumented - def do_POST(self) -> None: ... - def is_cgi(self) -> bool: ... # undocumented - def is_executable(self, path: StrPath) -> bool: ... # undocumented - def is_python(self, path: StrPath) -> bool: ... # undocumented - def run_cgi(self) -> None: ... # undocumented + def do_POST(self) -> None: + """Serve a POST request. + +This is only implemented for CGI scripts. + +""" + def is_cgi(self) -> bool: # undocumented + """Test whether self.path corresponds to a CGI script. + +Returns True and updates the cgi_info attribute to the tuple +(dir, rest) if self.path requires running a CGI script. +Returns False otherwise. + +If any exception is raised, the caller should assume that +self.path was rejected as invalid and act accordingly. + +The default implementation tests whether the normalized url +path begins with one of the strings in self.cgi_directories +(and the next character is a '/' or the end of the string). + +""" + def is_executable(self, path: StrPath) -> bool: # undocumented + """Test whether argument path is an executable file. +""" + def is_python(self, path: StrPath) -> bool: # undocumented + """Test whether argument path is a Python script. +""" + def run_cgi(self) -> None: # undocumented + """Execute a CGI script. +""" else: class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + """Complete HTTP server with GET, HEAD and POST commands. + + GET and HEAD also support running CGI scripts. + + The POST command is *only* implemented for CGI scripts. + + """ cgi_directories: list[str] have_fork: bool # undocumented - def do_POST(self) -> None: ... - def is_cgi(self) -> bool: ... # undocumented - def is_executable(self, path: StrPath) -> bool: ... # undocumented - def is_python(self, path: StrPath) -> bool: ... # undocumented - def run_cgi(self) -> None: ... # undocumented + def do_POST(self) -> None: + """Serve a POST request. + + This is only implemented for CGI scripts. + + """ + def is_cgi(self) -> bool: # undocumented + """Test whether self.path corresponds to a CGI script. + + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. + + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. + + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ + def is_executable(self, path: StrPath) -> bool: # undocumented + """Test whether argument path is an executable file. +""" + def is_python(self, path: StrPath) -> bool: # undocumented + """Test whether argument path is a Python script. +""" + def run_cgi(self) -> None: # undocumented + """Execute a CGI script. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi index 536985a592b7f..34362ab1928ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi @@ -1,3 +1,14 @@ +"""IMAP4 client. + +Based on RFC 2060. + +Public class: IMAP4 +Public variable: Debug +Public functions: Internaldate2tuple + Int2AP + ParseFlags + Time2Internaldate +""" import subprocess import sys import time @@ -23,6 +34,52 @@ _AnyResponseData: TypeAlias = list[None] | list[bytes | tuple[bytes, bytes]] Commands: dict[str, tuple[str, ...]] class IMAP4: + """IMAP4 client class. + +Instantiate with: IMAP4([host[, port[, timeout=None]]]) + + host - host's name (default: localhost); + port - port number (default: standard IMAP4 port). + timeout - socket timeout (default: None) + If timeout is not given or is None, + the global default socket timeout is used + +All IMAP4rev1 commands are supported by methods of the same +name (in lowercase). + +All arguments to commands are converted to strings, except for +AUTHENTICATE, and the last argument to APPEND which is passed as +an IMAP4 literal. If necessary (the string contains any +non-printing characters or white-space and isn't enclosed with +either parentheses or double quotes) each string is quoted. +However, the 'password' argument to the LOGIN command is always +quoted. If you want to avoid having an argument string quoted +(eg: the 'flags' argument to STORE) then enclose the string in +parentheses (eg: "(\\Deleted)"). + +Each command returns a tuple: (type, [data, ...]) where 'type' +is usually 'OK' or 'NO', and 'data' is either the text from the +tagged response, or untagged results from command. Each 'data' +is either a string, or a tuple. If a tuple, then the first part +is the header of the response, and the second part contains +the data (ie: 'literal' value). + +Errors raise the exception class .error(""). +IMAP4 server errors raise .abort(""), +which is a sub-class of 'error'. Mailbox status changes +from READ-WRITE to READ-ONLY raise the exception class +.readonly(""), which is a sub-class of 'abort'. + +"error" exceptions imply a program error. +"abort" exceptions imply the connection should be reset, and + the command re-tried. +"readonly" exceptions imply the command should be re-tried. + +Note: to use this module, you must read the RFCs pertaining to the +IMAP4 protocol, as the semantics of the arguments to each IMAP4 +command are left to the invoker, not to mention the results. Also, +most IMAP servers implement a sub-set of the commands available here. +""" class error(Exception): ... class abort(error): ... class readonly(abort): ... @@ -41,7 +98,12 @@ class IMAP4: capabilities: tuple[str, ...] PROTOCOL_VERSION: str def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... - def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: + """Setup connection to remote server on "host:port" + (default: localhost:standard IMAP4 port). +This connection will be used by the routines: + read, readline, send, shutdown. +""" if sys.version_info >= (3, 14): @property @deprecated("IMAP4.file is unsupported, can cause errors, and may be removed.") @@ -53,72 +115,359 @@ class IMAP4: host: str port: int sock: _socket - def read(self, size: int) -> bytes: ... - def readline(self) -> bytes: ... - def send(self, data: ReadableBuffer) -> None: ... - def shutdown(self) -> None: ... - def socket(self) -> _socket: ... - def recent(self) -> _CommandResults: ... - def response(self, code: str) -> _CommandResults: ... - def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... - def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... - def capability(self) -> _CommandResults: ... - def check(self) -> _CommandResults: ... - def close(self) -> _CommandResults: ... - def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: ... - def create(self, mailbox: str) -> _CommandResults: ... - def delete(self, mailbox: str) -> _CommandResults: ... - def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... - def enable(self, capability: str) -> _CommandResults: ... + def read(self, size: int) -> bytes: + """Read 'size' bytes from remote. +""" + def readline(self) -> bytes: + """Read line from remote. +""" + def send(self, data: ReadableBuffer) -> None: + """Send data to remote. +""" + def shutdown(self) -> None: + """Close I/O established in "open". +""" + def socket(self) -> _socket: + """Return socket instance used to connect to IMAP4 server. + +socket = .socket() +""" + def recent(self) -> _CommandResults: + """Return most recent 'RECENT' responses if any exist, +else prompt server for an update using the 'NOOP' command. + +(typ, [data]) = .recent() + +'data' is None if no new messages, +else list of RECENT responses, most recent last. +""" + def response(self, code: str) -> _CommandResults: + """Return data for response 'code' if received, or None. + +Old value for response 'code' is cleared. + +(code, [data]) = .response(code) +""" + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: + """Append message to named mailbox. + +(typ, [data]) = .append(mailbox, flags, date_time, message) + + All args except 'message' can be None. +""" + def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: + """Authenticate command - requires response processing. + +'mechanism' specifies which authentication mechanism is to +be used - it must appear in .capabilities in the +form AUTH=. + +'authobject' must be a callable object: + + data = authobject(response) + +It will be called to process server continuation responses; the +response argument it is passed will be a bytes. It should return bytes +data that will be base64 encoded and sent to the server. It should +return None if the client abort response '*' should be sent instead. +""" + def capability(self) -> _CommandResults: + """(typ, [data]) = .capability() +Fetch capabilities list from server. +""" + def check(self) -> _CommandResults: + """Checkpoint mailbox on server. + +(typ, [data]) = .check() +""" + def close(self) -> _CommandResults: + """Close currently selected mailbox. + +Deleted messages are removed from writable mailbox. +This is the recommended command before 'LOGOUT'. + +(typ, [data]) = .close() +""" + def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: + """Copy 'message_set' messages onto end of 'new_mailbox'. + +(typ, [data]) = .copy(message_set, new_mailbox) +""" + def create(self, mailbox: str) -> _CommandResults: + """Create new mailbox. + +(typ, [data]) = .create(mailbox) +""" + def delete(self, mailbox: str) -> _CommandResults: + """Delete old mailbox. + +(typ, [data]) = .delete(mailbox) +""" + def deleteacl(self, mailbox: str, who: str) -> _CommandResults: + """Delete the ACLs (remove any rights) set for who on mailbox. + +(typ, [data]) = .deleteacl(mailbox, who) +""" + def enable(self, capability: str) -> _CommandResults: + """Send an RFC5161 enable string to the server. + +(typ, [data]) = .enable(capability) +""" def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def expunge(self) -> _CommandResults: ... - def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... - def getacl(self, mailbox: str) -> _CommandResults: ... - def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... - def getquota(self, root: str) -> _CommandResults: ... - def getquotaroot(self, mailbox: str) -> _CommandResults: ... + def expunge(self) -> _CommandResults: + """Permanently remove deleted items from selected mailbox. + +Generates 'EXPUNGE' response for each deleted message. + +(typ, [data]) = .expunge() + +'data' is list of 'EXPUNGE'd message numbers in order received. +""" + def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: + """Fetch (parts of) messages. + +(typ, [data, ...]) = .fetch(message_set, message_parts) + +'message_parts' should be a string of selected parts +enclosed in parentheses, eg: "(UID BODY[TEXT])". + +'data' are tuples of message part envelope and data. +""" + def getacl(self, mailbox: str) -> _CommandResults: + """Get the ACLs for a mailbox. + +(typ, [data]) = .getacl(mailbox) +""" + def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: + """(typ, [data]) = .getannotation(mailbox, entry, attribute) +Retrieve ANNOTATIONs. +""" + def getquota(self, root: str) -> _CommandResults: + """Get the quota root's resource usage and limits. + +Part of the IMAP4 QUOTA extension defined in rfc2087. + +(typ, [data]) = .getquota(root) +""" + def getquotaroot(self, mailbox: str) -> _CommandResults: + """Get the list of quota roots for the named mailbox. + +(typ, [[QUOTAROOT responses...], [QUOTA responses]]) = .getquotaroot(mailbox) +""" if sys.version_info >= (3, 14): - def idle(self, duration: float | None = None) -> Idler: ... - - def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... - def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... - def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... - def logout(self) -> tuple[str, _AnyResponseData]: ... - def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... - def myrights(self, mailbox: str) -> _CommandResults: ... - def namespace(self) -> _CommandResults: ... - def noop(self) -> tuple[str, _list[bytes]]: ... - def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: ... - def proxyauth(self, user: str) -> _CommandResults: ... - def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... - def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... - def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... - def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... - def setannotation(self, *args: str) -> _CommandResults: ... - def setquota(self, root: str, limits: str) -> _CommandResults: ... - def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def idle(self, duration: float | None = None) -> Idler: + """Return an iterable IDLE context manager producing untagged responses. +If the argument is not None, limit iteration to 'duration' seconds. + +with M.idle(duration=29 * 60) as idler: + for typ, data in idler: + print(typ, data) + +Note: 'duration' requires a socket connection (not IMAP4_stream). +""" + + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: + """List mailbox names in directory matching pattern. + +(typ, [data]) = .list(directory='""', pattern='*') + +'data' is list of LIST responses. +""" + def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: + """Identify client using plaintext password. + +(typ, [data]) = .login(user, password) + +NB: 'password' will be quoted. +""" + def login_cram_md5(self, user: str, password: str) -> _CommandResults: + """Force use of CRAM-MD5 authentication. + +(typ, [data]) = .login_cram_md5(user, password) +""" + def logout(self) -> tuple[str, _AnyResponseData]: + """Shutdown connection to server. + +(typ, [data]) = .logout() + +Returns server 'BYE' response. +""" + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: + """List 'subscribed' mailbox names in directory matching pattern. + +(typ, [data, ...]) = .lsub(directory='""', pattern='*') + +'data' are tuples of message part envelope and data. +""" + def myrights(self, mailbox: str) -> _CommandResults: + """Show my ACLs for a mailbox (i.e. the rights that I have on mailbox). + +(typ, [data]) = .myrights(mailbox) +""" + def namespace(self) -> _CommandResults: + """Returns IMAP namespaces ala rfc2342 + +(typ, [data, ...]) = .namespace() +""" + def noop(self) -> tuple[str, _list[bytes]]: + """Send NOOP command. + +(typ, [data]) = .noop() +""" + def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: + """Fetch truncated part of a message. + +(typ, [data, ...]) = .partial(message_num, message_part, start, length) + +'data' is tuple of message part envelope and data. +""" + def proxyauth(self, user: str) -> _CommandResults: + """Assume authentication as "user". + +Allows an authorised administrator to proxy into any user's +mailbox. + +(typ, [data]) = .proxyauth(user) +""" + def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: + """Rename old mailbox name to new. + +(typ, [data]) = .rename(oldmailbox, newmailbox) +""" + def search(self, charset: str | None, *criteria: str) -> _CommandResults: + """Search mailbox for matching messages. + +(typ, [data]) = .search(charset, criterion, ...) + +'data' is space separated list of matching message numbers. +If UTF8 is enabled, charset MUST be None. +""" + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: + """Select a mailbox. + +Flush all untagged responses. + +(typ, [data]) = .select(mailbox='INBOX', readonly=False) + +'data' is count of messages in mailbox ('EXISTS' response). + +Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so +other responses should be obtained via .response('FLAGS') etc. +""" + def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: + """Set a mailbox acl. + +(typ, [data]) = .setacl(mailbox, who, what) +""" + def setannotation(self, *args: str) -> _CommandResults: + """(typ, [data]) = .setannotation(mailbox[, entry, attribute]+) +Set ANNOTATIONs. +""" + def setquota(self, root: str, limits: str) -> _CommandResults: + """Set the quota root's resource limits. + +(typ, [data]) = .setquota(root, limits) +""" + def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: + """IMAP4rev1 extension SORT command. + +(typ, [data]) = .sort(sort_criteria, charset, search_criteria, ...) +""" def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... - def status(self, mailbox: str, names: str) -> _CommandResults: ... - def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... - def subscribe(self, mailbox: str) -> _CommandResults: ... - def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: ... - def uid(self, command: str, *args: str) -> _CommandResults: ... - def unsubscribe(self, mailbox: str) -> _CommandResults: ... - def unselect(self) -> _CommandResults: ... - def xatom(self, name: str, *args: str) -> _CommandResults: ... + def status(self, mailbox: str, names: str) -> _CommandResults: + """Request named status conditions for mailbox. + +(typ, [data]) = .status(mailbox, names) +""" + def store(self, message_set: str, command: str, flags: str) -> _CommandResults: + """Alters flag dispositions for messages in mailbox. + +(typ, [data]) = .store(message_set, command, flags) +""" + def subscribe(self, mailbox: str) -> _CommandResults: + """Subscribe to new mailbox. + +(typ, [data]) = .subscribe(mailbox) +""" + def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: + """IMAPrev1 extension THREAD command. + +(type, [data]) = .thread(threading_algorithm, charset, search_criteria, ...) +""" + def uid(self, command: str, *args: str) -> _CommandResults: + """Execute "command arg ..." with messages identified by UID, + rather than message number. + +(typ, [data]) = .uid(command, arg1, arg2, ...) + +Returns response appropriate to 'command'. +""" + def unsubscribe(self, mailbox: str) -> _CommandResults: + """Unsubscribe from old mailbox. + +(typ, [data]) = .unsubscribe(mailbox) +""" + def unselect(self) -> _CommandResults: + """Free server's resources associated with the selected mailbox +and returns the server to the authenticated state. +This command performs the same actions as CLOSE, except +that no messages are permanently removed from the currently +selected mailbox. + +(typ, [data]) = .unselect() +""" + def xatom(self, name: str, *args: str) -> _CommandResults: + """Allow simple extension commands + notified by server in CAPABILITY response. + +Assumes command is legal in current state. + +(typ, [data]) = .xatom(name, arg, ...) + +Returns response appropriate to extension command 'name'. +""" def print_log(self) -> None: ... if sys.version_info >= (3, 14): class Idler: + """Iterable IDLE context manager: start IDLE & produce untagged responses. + +An object of this type is returned by the IMAP4.idle() method. + +Note: The name and structure of this class are subject to change. +""" def __init__(self, imap: IMAP4, duration: float | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: object, exc_val: Unused, exc_tb: Unused) -> Literal[False]: ... def __iter__(self) -> Self: ... def __next__(self) -> tuple[str, float | None]: ... - def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: ... + def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: + """Yield a burst of responses no more than 'interval' seconds apart. + +with M.idle() as idler: + # get a response and any others following by < 0.1 seconds + batch = list(idler.burst()) + print(f'processing {len(batch)} responses...') + print(batch) + +Note: This generator requires a socket connection (not IMAP4_stream). +""" class IMAP4_SSL(IMAP4): + """IMAP4 client class over SSL connection + +Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) + + host - host's name (default: localhost); + port - port number (default: standard IMAP4 SSL port); + ssl_context - a SSLContext object that contains your certificate chain + and private key (default: None) + timeout - socket timeout (default: None) If timeout is not given or is None, + the global default socket timeout is used + +for more documentation see the docstring of the parent class IMAP4. +""" if sys.version_info < (3, 12): keyfile: str certfile: str @@ -144,10 +493,23 @@ class IMAP4_SSL(IMAP4): else: file: IO[Any] - def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: + """Setup connection to remote server on "host:port". + (default: localhost:standard IMAP4 SSL port). +This connection will be used by the routines: + read, readline, send, shutdown. +""" def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): + """IMAP4 client class over a stream + +Instantiate with: IMAP4_stream(command) + + "command" - a string that can be passed to subprocess.Popen() + +for more documentation see the docstring of the parent class IMAP4. +""" command: str def __init__(self, command: str) -> None: ... if sys.version_info >= (3, 14): @@ -159,16 +521,42 @@ class IMAP4_stream(IMAP4): process: subprocess.Popen[bytes] writefile: IO[Any] readfile: IO[Any] - def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: + """Setup a stream connection. +This connection will be used by the routines: + read, readline, send, shutdown. +""" class _Authenticator: + """Private class to provide en/decoding +for base64-based authentication conversation. +""" mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... def encode(self, inp: bytes | bytearray | memoryview) -> str: ... def decode(self, inp: str | SizedBuffer) -> bytes: ... -def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... -def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... -def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... -def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: + """Parse an IMAP4 INTERNALDATE string. + +Return corresponding local time. The return value is a +time.struct_time tuple or None if the string has wrong format. +""" +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: + """Convert integer to A-P string representation. +""" +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: + """Convert IMAP4 flags response to python tuple. +""" +def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: + """Convert date_time to IMAP4 INTERNALDATE representation. + +Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The +date_time argument can be a number (int or float) representing +seconds since epoch (as returned by time.time()), a 9-tuple +representing local time, an instance of time.struct_time (as +returned by time.localtime()), an aware datetime instance or a +double-quoted string. In the last case, it is assumed to already +be in the correct format. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi index e45ca3eb5bdbc..bcd398f0b4b88 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi @@ -1,3 +1,5 @@ +"""Recognize image file formats based on their first few bytes. +""" from _typeshed import StrPath from collections.abc import Callable from typing import Any, BinaryIO, Protocol, overload, type_check_only @@ -11,7 +13,9 @@ class _ReadableBinary(Protocol): def seek(self, offset: int, /) -> Any: ... @overload -def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: + """Return the type of image contained in a file or byte stream. +""" @overload def what(file: Any, h: bytes) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi index b5b4223aa58e9..a5e9ec829d1ac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi @@ -1,3 +1,10 @@ +"""This module provides the components needed to build your own __import__ +function. Undocumented functions are obsolete. + +In most cases it is preferred you consider using the importlib module's +functionality over this module. + +""" import types from _imp import ( acquire_lock as acquire_lock, @@ -26,16 +33,61 @@ PY_FROZEN: Final = 7 PY_CODERESOURCE: Final = 8 IMP_HOOK: Final = 9 -def new_module(name: str) -> types.ModuleType: ... -def get_magic() -> bytes: ... -def get_tag() -> str: ... -def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... -def source_from_cache(path: StrPath) -> str: ... -def get_suffixes() -> list[tuple[str, str, int]]: ... +def new_module(name: str) -> types.ModuleType: + """**DEPRECATED** + + Create a new module. + + The module is not entered into sys.modules. + + """ +def get_magic() -> bytes: + """**DEPRECATED** + + Return the magic number for .pyc files. + """ +def get_tag() -> str: + """Return the magic tag for .pyc files. +""" +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: + """**DEPRECATED** + + Given the path to a .py file, return the path to its .pyc file. + + The .py file does not need to exist; this simply returns the path to the + .pyc file calculated as if the .py file were imported. + + If debug_override is not None, then it must be a boolean and is used in + place of sys.flags.optimize. + + If sys.implementation.cache_tag is None then NotImplementedError is raised. + + """ +def source_from_cache(path: StrPath) -> str: + """**DEPRECATED** + + Given the path to a .pyc. file, return the path to its .py file. + + The .pyc file does not need to exist; this simply returns the path to + the .py file calculated to correspond to the .pyc file. If path does + not conform to PEP 3147 format, ValueError will be raised. If + sys.implementation.cache_tag is None then NotImplementedError is raised. + + """ +def get_suffixes() -> list[tuple[str, str, int]]: + """**DEPRECATED** +""" class NullImporter: + """**DEPRECATED** + + Null import object. + + """ def __init__(self, path: StrPath) -> None: ... - def find_module(self, fullname: Any) -> None: ... + def find_module(self, fullname: Any) -> None: + """Always returns None. +""" # Technically, a text file has to support a slightly different set of operations than a binary file, # but we ignore that here. @@ -50,14 +102,51 @@ class _FileLike(Protocol): # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... -def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... -def load_package(name: str, path: StrPath) -> types.ModuleType: ... -def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: + """**DEPRECATED** +""" +def load_package(name: str, path: StrPath) -> types.ModuleType: + """**DEPRECATED** +""" +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: + """**DEPRECATED** + + Load a module, given information returned by find_module(). + + The module name must include the full package name, if any. + + """ # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None -) -> tuple[IO[Any], str, tuple[str, str, int]]: ... -def reload(module: types.ModuleType) -> types.ModuleType: ... -def init_builtin(name: str) -> types.ModuleType | None: ... -def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored +) -> tuple[IO[Any], str, tuple[str, str, int]]: + """**DEPRECATED** + + Search for a module. + + If path is omitted or None, search for a built-in, frozen or special + module and continue search in sys.path. The module name cannot + contain '.'; to search for a submodule of a package, pass the + submodule name and the package's __path__. + + """ +def reload(module: types.ModuleType) -> types.ModuleType: + """**DEPRECATED** + + Reload the module and return it. + + The module must have been successfully imported before. + + """ +def init_builtin(name: str) -> types.ModuleType | None: + """**DEPRECATED** + + Load and return a built-in module by name, or None is such module doesn't + exist + """ +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: # file argument is ignored + """**DEPRECATED** + + Load an extension module. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi index d60f90adee19c..22ee90bae3b65 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi @@ -1,3 +1,5 @@ +"""A pure Python implementation of import. +""" import sys from importlib._bootstrap import __import__ as __import__ from importlib.abc import Loader @@ -7,11 +9,33 @@ from typing_extensions import deprecated __all__ = ["__import__", "import_module", "invalidate_caches", "reload"] # `importlib.import_module` return type should be kept the same as `builtins.__import__` -def import_module(name: str, package: str | None = None) -> ModuleType: ... +def import_module(name: str, package: str | None = None) -> ModuleType: + """Import a module. + +The 'package' argument is required when performing a relative import. It +specifies the package to use as the anchor point from which to resolve the +relative import to an absolute import. + +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `importlib.util.find_spec()` instead.") - def find_loader(name: str, path: str | None = None) -> Loader | None: ... + def find_loader(name: str, path: str | None = None) -> Loader | None: + """Return the loader for the specified module. + + This is a backward-compatible wrapper around find_spec(). + + This function is deprecated in favor of importlib.util.find_spec(). + + """ + +def invalidate_caches() -> None: + """Call the invalidate_caches() method on all meta path finders stored in +sys.meta_path (where implemented). +""" +def reload(module: ModuleType) -> ModuleType: + """Reload the module and return it. + +The module must have been successfully imported before. -def invalidate_caches() -> None: ... -def reload(module: ModuleType) -> ModuleType: ... +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi index 90ab340219172..20a094cbde421 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi @@ -1,3 +1,5 @@ +"""Subset of importlib.abc used to reduce importlib.util imports. +""" import sys import types from abc import ABCMeta @@ -6,15 +8,43 @@ from typing_extensions import deprecated if sys.version_info >= (3, 10): class Loader(metaclass=ABCMeta): - def load_module(self, fullname: str) -> types.ModuleType: ... + """Abstract base class for import loaders. +""" + def load_module(self, fullname: str) -> types.ModuleType: + """Return the loaded module. + +The module must be added to sys.modules and have import-related +attributes set properly. The fullname is a str. + +ImportError is raised on failure. + +This method is deprecated in favor of loader.exec_module(). If +exec_module() exists then it is used to provide a backwards-compatible +functionality for this method. + +""" if sys.version_info < (3, 12): @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "The module spec is now used by the import machinery to generate a module repr." ) - def module_repr(self, module: types.ModuleType) -> str: ... + def module_repr(self, module: types.ModuleType) -> str: + """Return a module's repr. + + Used by the module type when the method does not raise + NotImplementedError. + + This method is deprecated. + + """ + + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: + """Return a module to initialize and into which to load. - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... +This method should raise ImportError if anything prevents it +from creating a new module. It may return None to indicate +that the spec should create the new module. +""" # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi index 02427ff420620..908b1e66af4ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi @@ -1,2 +1,10 @@ +"""Core implementation of import. + +This module is NOT meant to be directly imported! It has been designed such +that it can be bootstrapped into Python as the implementation of import. As +such it requires the injection of specific modules and attributes in order to +work. One should use importlib as the public-facing version of this module. + +""" from _frozen_importlib import * from _frozen_importlib import __import__ as __import__, _init_module_attrs as _init_module_attrs diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi index 6210ce7083afa..cd0334025260b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi @@ -1,2 +1,10 @@ +"""Core implementation of path-based import. + +This module is NOT meant to be directly imported! It has been designed such +that it can be bootstrapped into Python as the implementation of import. As +such it requires the injection of specific modules and attributes in order to +work. One should use importlib as the public-facing version of this module. + +""" from _frozen_importlib_external import * from _frozen_importlib_external import _NamespaceLoader as _NamespaceLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi index ef7761f7119b9..cc518d8832242 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi @@ -1,3 +1,5 @@ +"""Abstract base classes related to import. +""" import _ast import sys import types @@ -29,151 +31,433 @@ if sys.version_info >= (3, 10): from importlib._abc import Loader as Loader else: class Loader(metaclass=ABCMeta): - def load_module(self, fullname: str) -> types.ModuleType: ... - def module_repr(self, module: types.ModuleType) -> str: ... - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + """Abstract base class for import loaders. +""" + def load_module(self, fullname: str) -> types.ModuleType: + """Return the loaded module. + + The module must be added to sys.modules and have import-related + attributes set properly. The fullname is a str. + + ImportError is raised on failure. + + This method is deprecated in favor of loader.exec_module(). If + exec_module() exists then it is used to provide a backwards-compatible + functionality for this method. + + """ + def module_repr(self, module: types.ModuleType) -> str: + """Return a module's repr. + + Used by the module type when the method does not raise + NotImplementedError. + + This method is deprecated. + + """ + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: + """Return a module to initialize and into which to load. + + This method should raise ImportError if anything prevents it + from creating a new module. It may return None to indicate + that the spec should create the new module. + """ # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use `MetaPathFinder` or `PathEntryFinder` instead.") - class Finder(metaclass=ABCMeta): ... + class Finder(metaclass=ABCMeta): + """Legacy abstract base class for import finders. + + It may be subclassed for compatibility with legacy third party + reimplementations of the import system. Otherwise, finder + implementations should derive from the more specific MetaPathFinder + or PathEntryFinder ABCs. + + Deprecated since Python 3.3 + """ @deprecated("Deprecated since Python 3.7. Use `importlib.resources.abc.TraversableResources` instead.") class ResourceLoader(Loader): + """Abstract base class for loaders which can return data from their +back-end storage to facilitate reading data to perform an import. + +This ABC represents one of the optional protocols specified by PEP 302. + +For directly loading resources, use TraversableResources instead. This class +primarily exists for backwards compatibility with other ABCs in this module. + +""" @abstractmethod - def get_data(self, path: str) -> bytes: ... + def get_data(self, path: str) -> bytes: + """Abstract method which when implemented should return the bytes for +the specified path. The path must be a str. +""" class InspectLoader(Loader): - def is_package(self, fullname: str) -> bool: ... - def get_code(self, fullname: str) -> types.CodeType | None: ... + """Abstract base class for loaders which support inspection about the +modules they can load. + +This ABC represents one of the optional protocols specified by PEP 302. + +""" + def is_package(self, fullname: str) -> bool: + """Optional method which when implemented should return whether the +module is a package. The fullname is a str. Returns a bool. + +Raises ImportError if the module cannot be found. +""" + def get_code(self, fullname: str) -> types.CodeType | None: + """Method which returns the code object for the module. + +The fullname is a str. Returns a types.CodeType if possible, else +returns None if a code object does not make sense +(e.g. built-in module). Raises ImportError if the module cannot be +found. +""" @abstractmethod - def get_source(self, fullname: str) -> str | None: ... - def exec_module(self, module: types.ModuleType) -> None: ... + def get_source(self, fullname: str) -> str | None: + """Abstract method which should return the source code for the +module. The fullname is a str. Returns a str. + +Raises ImportError if the module cannot be found. +""" + def exec_module(self, module: types.ModuleType) -> None: + """Execute the module. +""" @staticmethod def source_to_code( data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath = "" - ) -> types.CodeType: ... + ) -> types.CodeType: + """Compile 'data' into a code object. + +The 'data' argument can be anything that compile() can handle. The'path' +argument should be where the data was retrieved (when applicable). +""" class ExecutionLoader(InspectLoader): + """Abstract base class for loaders that wish to support the execution of +modules as scripts. + +This ABC represents one of the optional protocols specified in PEP 302. + +""" @abstractmethod - def get_filename(self, fullname: str) -> str: ... + def get_filename(self, fullname: str) -> str: + """Abstract method which should return the value that __file__ is to be +set to. + +Raises ImportError if the module cannot be found. +""" class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): # type: ignore[misc] # incompatible definitions of source_to_code in the base classes + """Abstract base class for loading source code (and optionally any +corresponding bytecode). + +To support loading from source code, the abstractmethods inherited from +ResourceLoader and ExecutionLoader need to be implemented. To also support +loading from bytecode, the optional methods specified directly by this ABC +is required. + +Inherited abstractmethods not implemented in this ABC: + + * ResourceLoader.get_data + * ExecutionLoader.get_filename + +""" @deprecated("Deprecated since Python 3.3. Use `importlib.resources.abc.SourceLoader.path_stats` instead.") - def path_mtime(self, path: str) -> float: ... - def set_data(self, path: str, data: bytes) -> None: ... - def get_source(self, fullname: str) -> str | None: ... - def path_stats(self, path: str) -> Mapping[str, Any]: ... + def path_mtime(self, path: str) -> float: + """Return the (int) modification time for the path (str). +""" + def set_data(self, path: str, data: bytes) -> None: + """Write the bytes to the path (if possible). + +Accepts a str path and data as bytes. + +Any needed intermediary directories are to be created. If for some +reason the file cannot be written because of permissions, fail +silently. +""" + def get_source(self, fullname: str) -> str | None: + """Concrete implementation of InspectLoader.get_source. +""" + def path_stats(self, path: str) -> Mapping[str, Any]: + """Return a metadata dict for the source pointed to by the path (str). +Possible keys: +- 'mtime' (mandatory) is the numeric timestamp of last source + code modification; +- 'size' (optional) is the size in bytes of the source code. +""" # The base classes differ starting in 3.10: if sys.version_info >= (3, 10): # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(metaclass=ABCMeta): + """Abstract base class for import finders on sys.meta_path. +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `MetaPathFinder.find_spec()` instead.") - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: + """Return a loader for the module. + + If no module is found, return None. The fullname is a str and + the path is a list of strings or None. + + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() exists then backwards-compatible + functionality is provided for this method. - def invalidate_caches(self) -> None: ... + """ + + def invalidate_caches(self) -> None: + """An optional method for clearing the finder's cache, if any. +This method is used by importlib.invalidate_caches(). +""" # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(metaclass=ABCMeta): + """Abstract base class for path entry finders used by PathFinder. +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `PathEntryFinder.find_spec()` instead.") - def find_module(self, fullname: str) -> Loader | None: ... + def find_module(self, fullname: str) -> Loader | None: + """Try to find a loader for the specified module by delegating to + self.find_loader(). + + This method is deprecated in favor of finder.find_spec(). + + """ @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: + """Return (loader, namespace portion) for the path entry. - def invalidate_caches(self) -> None: ... + The fullname is a str. The namespace portion is a sequence of + path entries contributing to part of a namespace package. The + sequence may be empty. If loader is not None, the portion will + be ignored. + + The portion will be discarded if another path entry finder + locates the module as a normal module or package. + + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() is provided than backwards-compatible + functionality is provided. + """ + + def invalidate_caches(self) -> None: + """An optional method for clearing the finder's cache, if any. +This method is used by PathFinder.invalidate_caches(). +""" # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... else: # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(Finder): - def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... - def invalidate_caches(self) -> None: ... + """Abstract base class for import finders on sys.meta_path. +""" + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: + """Return a loader for the module. + + If no module is found, return None. The fullname is a str and + the path is a list of strings or None. + + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() exists then backwards-compatible + functionality is provided for this method. + + """ + def invalidate_caches(self) -> None: + """An optional method for clearing the finder's cache, if any. + This method is used by importlib.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): - def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... - def invalidate_caches(self) -> None: ... + """Abstract base class for path entry finders used by PathFinder. +""" + def find_module(self, fullname: str) -> Loader | None: + """Try to find a loader for the specified module by delegating to + self.find_loader(). + + This method is deprecated in favor of finder.find_spec(). + + """ + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: + """Return (loader, namespace portion) for the path entry. + + The fullname is a str. The namespace portion is a sequence of + path entries contributing to part of a namespace package. The + sequence may be empty. If loader is not None, the portion will + be ignored. + + The portion will be discarded if another path entry finder + locates the module as a normal module or package. + + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() is provided than backwards-compatible + functionality is provided. + """ + def invalidate_caches(self) -> None: + """An optional method for clearing the finder's cache, if any. + This method is used by PathFinder.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): + """Abstract base class partially implementing the ResourceLoader and +ExecutionLoader ABCs. +""" name: str path: str - def __init__(self, fullname: str, path: str) -> None: ... - def get_data(self, path: str) -> bytes: ... - def get_filename(self, fullname: str | None = None) -> str: ... - def load_module(self, fullname: str | None = None) -> types.ModuleType: ... + def __init__(self, fullname: str, path: str) -> None: + """Cache the module name and the path to the file found by the +finder. +""" + def get_data(self, path: str) -> bytes: + """Return the data from path as raw bytes. +""" + def get_filename(self, fullname: str | None = None) -> str: + """Return the path to the source file as found by the finder. +""" + def load_module(self, fullname: str | None = None) -> types.ModuleType: + """Load a module from a file. + +This method is deprecated. Use exec_module() instead. + +""" if sys.version_info < (3, 11): class ResourceReader(metaclass=ABCMeta): + """Abstract base class for loaders to provide resource reading support. +""" @abstractmethod - def open_resource(self, resource: str) -> IO[bytes]: ... + def open_resource(self, resource: str) -> IO[bytes]: + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ @abstractmethod - def resource_path(self, resource: str) -> str: ... + def resource_path(self, resource: str) -> str: + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ if sys.version_info >= (3, 10): @abstractmethod - def is_resource(self, path: str) -> bool: ... + def is_resource(self, path: str) -> bool: + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ else: @abstractmethod - def is_resource(self, name: str) -> bool: ... + def is_resource(self, name: str) -> bool: + """Return True if the named 'name' is consider a resource. +""" @abstractmethod - def contents(self) -> Iterator[str]: ... + def contents(self) -> Iterator[str]: + """Return an iterable of entries in `package`. +""" @runtime_checkable class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ @abstractmethod - def is_dir(self) -> bool: ... + def is_dir(self) -> bool: + """ + Return True if self is a dir + """ @abstractmethod - def is_file(self) -> bool: ... + def is_file(self) -> bool: + """ + Return True if self is a file + """ @abstractmethod - def iterdir(self) -> Iterator[Traversable]: ... + def iterdir(self) -> Iterator[Traversable]: + """ + Yield Traversable objects in self + """ if sys.version_info >= (3, 11): @abstractmethod def joinpath(self, *descendants: str) -> Traversable: ... else: @abstractmethod - def joinpath(self, child: str, /) -> Traversable: ... + def joinpath(self, child: str, /) -> Traversable: + """ + Return Traversable child in self + """ # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @overload @abstractmethod - def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @property @abstractmethod - def name(self) -> str: ... + def name(self) -> str: + """ + The base name of this object without any parent references. + """ if sys.version_info >= (3, 10): - def __truediv__(self, child: str, /) -> Traversable: ... + def __truediv__(self, child: str, /) -> Traversable: + """ + Return Traversable child in self + """ else: @abstractmethod - def __truediv__(self, child: str, /) -> Traversable: ... + def __truediv__(self, child: str, /) -> Traversable: + """ + Return Traversable child in self + """ @abstractmethod - def read_bytes(self) -> bytes: ... + def read_bytes(self) -> bytes: + """ + Read contents of self as bytes + """ @abstractmethod - def read_text(self, encoding: str | None = None) -> str: ... + def read_text(self, encoding: str | None = None) -> str: + """ + Read contents of self as text + """ class TraversableResources(ResourceReader): + """ + The required interface for providing traversable + resources. + """ @abstractmethod - def files(self) -> Traversable: ... + def files(self) -> Traversable: + """Return a Traversable object for the loaded package. +""" def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi index 767046b70a3d1..802c90838782a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi @@ -1,3 +1,5 @@ +"""The machinery of importlib: finders, loaders, hooks, etc. +""" import sys from importlib._bootstrap import BuiltinImporter as BuiltinImporter, FrozenImporter as FrozenImporter, ModuleSpec as ModuleSpec from importlib._bootstrap_external import ( @@ -19,7 +21,9 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 14): from importlib._bootstrap_external import AppleFrameworkLoader as AppleFrameworkLoader -def all_suffixes() -> list[str]: ... +def all_suffixes() -> list[str]: + """Returns a list of all recognized module suffixes for this process +""" if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi index 9286e92331c82..edf7f9bfa5061 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -35,7 +35,16 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 10): from importlib.metadata._meta import PackageMetadata as PackageMetadata, SimplePath - def packages_distributions() -> Mapping[str, list[str]]: ... + def packages_distributions() -> Mapping[str, list[str]]: + """ +Return a mapping of top-level packages to their +distributions. + +>>> import collections.abc +>>> pkgs = packages_distributions() +>>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) +True +""" _SimplePath: TypeAlias = SimplePath @@ -43,13 +52,29 @@ else: _SimplePath: TypeAlias = Path class PackageNotFoundError(ModuleNotFoundError): + """The package was not found. +""" @property - def name(self) -> str: ... # type: ignore[override] + def name(self) -> str: # type: ignore[override] + """module name +""" if sys.version_info >= (3, 13): _EntryPointBase = object elif sys.version_info >= (3, 11): class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ def __getitem__(self, item: int) -> str: ... _EntryPointBase = DeprecatedTuple @@ -61,13 +86,32 @@ else: if sys.version_info >= (3, 11): class EntryPoint(_EntryPointBase): + """An entry point as defined by Python packaging conventions. + +See `the packaging docs on entry points +`_ +for more information. + +>>> ep = EntryPoint( +... name=None, group=None, value='package.module:attr [extra1, extra2]') +>>> ep.module +'package.module' +>>> ep.attr +'attr' +>>> ep.extras +['extra1', 'extra2'] +""" pattern: ClassVar[Pattern[str]] name: str value: str group: str def __init__(self, name: str, value: str, group: str) -> None: ... - def load(self) -> Any: ... # Callable[[], Any] or an importable module + def load(self) -> Any: # Callable[[], Any] or an importable module + """Load the entry point from its definition. If only a module +is indicated by the value, return that module. Otherwise, +return the named object. +""" @property def extras(self) -> list[str]: ... @property @@ -84,19 +128,60 @@ if sys.version_info >= (3, 11): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> bool: ... # undocumented + ) -> bool: # undocumented + """ +EntryPoint matches the given parameters. + +>>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') +>>> ep.matches(group='foo') +True +>>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') +True +>>> ep.matches(group='foo', name='other') +False +>>> ep.matches() +True +>>> ep.matches(extras=['extra1', 'extra2']) +True +>>> ep.matches(module='bing') +True +>>> ep.matches(attr='bong') +True +""" def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: object) -> bool: ... if sys.version_info < (3, 12): - def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really + def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really + """ + Supply iter so one may construct dicts of EntryPoints by name. + """ else: @disjoint_base class EntryPoint(_EntryPointBase): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + `_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ pattern: ClassVar[Pattern[str]] - def load(self) -> Any: ... # Callable[[], Any] or an importable module + def load(self) -> Any: # Callable[[], Any] or an importable module + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ @property def extras(self) -> list[str]: ... @property @@ -114,15 +199,43 @@ else: module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> bool: ... # undocumented + ) -> bool: # undocumented + """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[Any]: ... # result of iter((str, Self)), really + def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really + """ + Supply iter so one may construct dicts of EntryPoints by name. + """ if sys.version_info >= (3, 12): class EntryPoints(tuple[EntryPoint, ...]): + """ +An immutable collection of selectable EntryPoint objects. +""" __slots__ = () - def __getitem__(self, name: str) -> EntryPoint: ... # type: ignore[override] + def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] + """ +Get the EntryPoint in self matching name. +""" def select( self, *, @@ -132,20 +245,66 @@ if sys.version_info >= (3, 12): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> EntryPoints: ... + ) -> EntryPoints: + """ +Select entry points from self that match the +given parameters (typically group and/or name). +""" @property - def names(self) -> set[str]: ... + def names(self) -> set[str]: + """ +Return the set of all names of all entry points. +""" @property - def groups(self) -> set[str]: ... + def groups(self) -> set[str]: + """ +Return the set of all groups of all entry points. +""" elif sys.version_info >= (3, 10): class DeprecatedList(list[_T]): + """ + Allow an otherwise immutable object to implement mutability + for compatibility. + + >>> recwarn = getfixture('recwarn') + >>> dl = DeprecatedList(range(3)) + >>> dl[0] = 1 + >>> dl.append(3) + >>> del dl[3] + >>> dl.reverse() + >>> dl.sort() + >>> dl.extend([4]) + >>> dl.pop(-1) + 4 + >>> dl.remove(1) + >>> dl += [5] + >>> dl + [6] + [1, 2, 5, 6] + >>> dl + (6,) + [1, 2, 5, 6] + >>> dl.insert(0, 0) + >>> dl + [0, 1, 2, 5] + >>> dl == [0, 1, 2, 5] + True + >>> dl == (0, 1, 2, 5) + True + >>> len(recwarn) + 1 + """ __slots__ = () class EntryPoints(DeprecatedList[EntryPoint]): # use as list is deprecated since 3.10 + """ + An immutable collection of selectable EntryPoint objects. + """ # int argument is deprecated since 3.10 __slots__ = () - def __getitem__(self, name: int | str) -> EntryPoint: ... # type: ignore[override] + def __getitem__(self, name: int | str) -> EntryPoint: # type: ignore[override] + """ + Get the EntryPoint in self matching name. + """ def select( self, *, @@ -155,14 +314,49 @@ elif sys.version_info >= (3, 10): module: str = ..., attr: str = ..., extras: list[str] = ..., - ) -> EntryPoints: ... + ) -> EntryPoints: + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ @property - def names(self) -> set[str]: ... + def names(self) -> set[str]: + """ + Return the set of all names of all entry points. + """ @property - def groups(self) -> set[str]: ... + def groups(self) -> set[str]: + """ + Return the set of all groups of all entry points. + + For coverage while SelectableGroups is present. + >>> EntryPoints().groups + set() + """ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class Deprecated(Generic[_KT, _VT]): + """ + Compatibility add-in for mapping to indicate that + mapping behavior is deprecated. + + >>> recwarn = getfixture('recwarn') + >>> class DeprecatedDict(Deprecated, dict): pass + >>> dd = DeprecatedDict(foo='bar') + >>> dd.get('baz', None) + >>> dd['foo'] + 'bar' + >>> list(dd) + ['foo'] + >>> list(dd.keys()) + ['foo'] + >>> 'foo' in dd + True + >>> list(dd.values()) + ['bar'] + >>> len(recwarn) + 1 + """ def __getitem__(self, name: _KT) -> _VT: ... @overload def get(self, name: _KT, default: None = None) -> _VT | None: ... @@ -177,12 +371,21 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `select` instead.") class SelectableGroups(Deprecated[str, EntryPoints], dict[str, EntryPoints]): # use as dict is deprecated since 3.10 + """ + A backward- and forward-compatible result from + entry_points that fully implements the dict interface. + """ @classmethod def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property def groups(self) -> set[str]: ... @property - def names(self) -> set[str]: ... + def names(self) -> set[str]: + """ + for coverage: + >>> SelectableGroups().names + set() + """ @overload def select(self) -> Self: ... @overload @@ -198,9 +401,13 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): ) -> EntryPoints: ... class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package +""" def read_text(self, encoding: str = "utf-8") -> str: ... def read_binary(self) -> bytes: ... - def locate(self) -> PathLike[str]: ... + def locate(self) -> PathLike[str]: + """Return a path-like object for this path +""" # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: hash: FileHash | None size: int | None @@ -218,60 +425,204 @@ else: _distribution_parent = object class Distribution(_distribution_parent): + """ +An abstract Python distribution package. + +Custom providers may derive from this class and define +the abstract methods to provide a concrete implementation +for their environment. Some providers may opt to override +the default implementation of some properties to bypass +the file-reading mechanism. +""" @abc.abstractmethod - def read_text(self, filename: str) -> str | None: ... + def read_text(self, filename: str) -> str | None: + """Attempt to load metadata file given by the name. + +Python distribution metadata is organized by blobs of text +typically represented as "files" in the metadata directory +(e.g. package-1.0.dist-info). These files include things +like: + +- METADATA: The distribution metadata including fields + like Name and Version and Description. +- entry_points.txt: A series of entry points as defined in + `the entry points spec `_. +- RECORD: A record of files according to + `this recording spec `_. + +A package may provide any set of files, including those +not listed here or none at all. + +:param filename: The name of the file in the distribution info. +:return: The text if found, otherwise None. +""" @abc.abstractmethod - def locate_file(self, path: StrPath) -> _SimplePath: ... + def locate_file(self, path: StrPath) -> _SimplePath: + """ +Given a path to a file in this distribution, return a SimplePath +to it. +""" @classmethod - def from_name(cls, name: str) -> Distribution: ... + def from_name(cls, name: str) -> Distribution: + """Return the Distribution for the given package name. + +:param name: The name of the distribution package to search for. +:return: The Distribution instance (or subclass thereof) for the named + package, if found. +:raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. +:raises ValueError: When an invalid value is supplied for name. +""" @overload @classmethod - def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... + def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: + """Return an iterable of Distribution objects for all packages. + +Pass a ``context`` or pass keyword arguments for constructing +a context. + +:context: A ``DistributionFinder.Context`` object. +:return: Iterable of Distribution objects for packages matching + the context. +""" @overload @classmethod def discover( cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... @staticmethod - def at(path: StrPath) -> PathDistribution: ... + def at(path: StrPath) -> PathDistribution: + """Return a Distribution for the indicated metadata path. + +:param path: a string or path-like object +:return: a concrete Distribution instance for the path +""" if sys.version_info >= (3, 10): @property - def metadata(self) -> PackageMetadata: ... + def metadata(self) -> PackageMetadata: + """Return the parsed metadata for this Distribution. + +The returned object will have keys that name the various bits of +metadata per the +`Core metadata specifications `_. + +Custom providers may provide the METADATA file or override this +property. +""" @property - def entry_points(self) -> EntryPoints: ... + def entry_points(self) -> EntryPoints: + """ +Return EntryPoints for this distribution. + +Custom providers may provide the ``entry_points.txt`` file +or override this property. +""" else: @property - def metadata(self) -> Message: ... + def metadata(self) -> Message: + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ @property def entry_points(self) -> list[EntryPoint]: ... @property - def version(self) -> str: ... + def version(self) -> str: + """Return the 'Version' metadata for the distribution package. +""" @property - def files(self) -> list[PackagePath] | None: ... + def files(self) -> list[PackagePath] | None: + """Files in this distribution. + +:return: List of PackagePath for this distribution or None + +Result is `None` if the metadata file that enumerates files +(i.e. RECORD for dist-info, or installed-files.txt or +SOURCES.txt for egg-info) is missing. +Result may be empty if the metadata exists but is empty. + +Custom providers are recommended to provide a "RECORD" file (in +``read_text``) or override this property to allow for callers to be +able to resolve filenames provided by the package. +""" @property - def requires(self) -> list[str] | None: ... + def requires(self) -> list[str] | None: + """Generated requirements specified for this Distribution +""" if sys.version_info >= (3, 10): @property - def name(self) -> str: ... + def name(self) -> str: + """Return the 'Name' metadata for the distribution package. +""" if sys.version_info >= (3, 13): @property def origin(self) -> types.SimpleNamespace | None: ... class DistributionFinder(MetaPathFinder): + """ +A MetaPathFinder capable of discovering installed distributions. + +Custom providers should implement this interface in order to +supply metadata. +""" class Context: + """ +Keyword arguments presented by the caller to +``distributions()`` or ``Distribution.discover()`` +to narrow the scope of a search for distributions +in all DistributionFinders. + +Each DistributionFinder may expect any parameters +and should attempt to honor the canonical +parameters defined below when appropriate. + +This mechanism gives a custom provider a means to +solicit additional details from the caller beyond +"name" and "path" when searching distributions. +For example, imagine a provider that exposes suites +of packages in either a "public" or "private" ``realm``. +A caller may wish to query only for distributions in +a particular realm and could call +``distributions(realm="private")`` to signal to the +custom provider to only include distributions from that +realm. +""" name: str | None def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... @property - def path(self) -> list[str]: ... + def path(self) -> list[str]: + """ +The sequence of directory path that a distribution finder +should search. + +Typically refers to Python installed package paths such as +"site-packages" directories and defaults to ``sys.path``. +""" @abc.abstractmethod - def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... + def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: + """ +Find distributions. + +Return an iterable of all Distribution instances capable of +loading the metadata for packages matching the ``context``, +a DistributionFinder.Context instance. +""" class MetadataPathFinder(DistributionFinder): @classmethod - def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: + """ +Find distributions. + +Return an iterable of all Distribution instances capable of +loading the metadata for packages matching ``context.name`` +(or all names if ``None`` indicated) along the paths in the list +of directories ``context.path``. +""" if sys.version_info >= (3, 11): @classmethod def invalidate_caches(cls) -> None: ... @@ -281,40 +632,128 @@ class MetadataPathFinder(DistributionFinder): class PathDistribution(Distribution): _path: _SimplePath - def __init__(self, path: _SimplePath) -> None: ... - def read_text(self, filename: StrPath) -> str | None: ... + def __init__(self, path: _SimplePath) -> None: + """Construct a distribution. + +:param path: SimplePath indicating the metadata directory. +""" + def read_text(self, filename: StrPath) -> str | None: + """Attempt to load metadata file given by the name. + +Python distribution metadata is organized by blobs of text +typically represented as "files" in the metadata directory +(e.g. package-1.0.dist-info). These files include things +like: + +- METADATA: The distribution metadata including fields + like Name and Version and Description. +- entry_points.txt: A series of entry points as defined in + `the entry points spec `_. +- RECORD: A record of files according to + `this recording spec `_. + +A package may provide any set of files, including those +not listed here or none at all. + +:param filename: The name of the file in the distribution info. +:return: The text if found, otherwise None. +""" def locate_file(self, path: StrPath) -> _SimplePath: ... -def distribution(distribution_name: str) -> Distribution: ... +def distribution(distribution_name: str) -> Distribution: + """Get the ``Distribution`` instance for the named package. + +:param distribution_name: The name of the distribution package as a string. +:return: A ``Distribution`` instance (or subclass thereof). +""" @overload -def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... +def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: + """Get all ``Distribution`` instances in the current environment. + +:return: An iterable of ``Distribution`` instances. +""" @overload def distributions( *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... if sys.version_info >= (3, 10): - def metadata(distribution_name: str) -> PackageMetadata: ... + def metadata(distribution_name: str) -> PackageMetadata: + """Get the metadata for the named package. + +:param distribution_name: The name of the distribution package to query. +:return: A PackageMetadata containing the parsed metadata. +""" else: - def metadata(distribution_name: str) -> Message: ... + def metadata(distribution_name: str) -> Message: + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: An email.Message containing the parsed metadata. + """ if sys.version_info >= (3, 12): def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... - ) -> EntryPoints: ... + ) -> EntryPoints: + """Return EntryPoint objects for all installed packages. + +Pass selection parameters (group or name) to filter the +result to entry points matching those properties (see +EntryPoints.select()). + +:return: EntryPoints for all installed packages. +""" elif sys.version_info >= (3, 10): @overload - def entry_points() -> SelectableGroups: ... + def entry_points() -> SelectableGroups: + """Return EntryPoint objects for all installed packages. + + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + For compatibility, returns ``SelectableGroups`` object unless + selection parameters are supplied. In the future, this function + will return ``EntryPoints`` instead of ``SelectableGroups`` + even when no selection parameters are supplied. + + For maximum future compatibility, pass selection parameters + or invoke ``.select`` with parameters on the result. + + :return: EntryPoints or SelectableGroups for all installed packages. + """ @overload def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... ) -> EntryPoints: ... else: - def entry_points() -> dict[str, list[EntryPoint]]: ... - -def version(distribution_name: str) -> str: ... -def files(distribution_name: str) -> list[PackagePath] | None: ... -def requires(distribution_name: str) -> list[str] | None: ... + def entry_points() -> dict[str, list[EntryPoint]]: + """Return EntryPoint objects for all installed packages. + + :return: EntryPoint objects for all installed packages. + """ + +def version(distribution_name: str) -> str: + """Get the version string for the named package. + +:param distribution_name: The name of the distribution package to query. +:return: The version string for the package as defined in the package's + "Version" metadata key. +""" +def files(distribution_name: str) -> list[PackagePath] | None: + """Return a list of files for the named package. + +:param distribution_name: The name of the distribution package to query. +:return: List of files composing the distribution. +""" +def requires(distribution_name: str) -> list[str] | None: + """ +Return a list of requirements for the named package. + +:return: An iterable of requirements, suitable for + packaging.requirement.Requirement. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi index 9f791dab254fd..194b203053cd1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -14,19 +14,29 @@ class PackageMetadata(Protocol): def __getitem__(self, key: str) -> str: ... def __iter__(self) -> Iterator[str]: ... @property - def json(self) -> dict[str, str | list[str]]: ... + def json(self) -> dict[str, str | list[str]]: + """ +A JSON-compatible form of the metadata. +""" @overload - def get_all(self, name: str, failobj: None = None) -> list[Any] | None: ... + def get_all(self, name: str, failobj: None = None) -> list[Any] | None: + """Helper for @overload to raise when called. +""" @overload def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... if sys.version_info >= (3, 12): @overload - def get(self, name: str, failobj: None = None) -> str | None: ... + def get(self, name: str, failobj: None = None) -> str | None: + """Helper for @overload to raise when called. +""" @overload def get(self, name: str, failobj: _T) -> _T | str: ... if sys.version_info >= (3, 13): class SimplePath(Protocol): + """ +A minimal subset of pathlib.Path required by Distribution. +""" def joinpath(self, other: StrPath, /) -> SimplePath: ... def __truediv__(self, other: StrPath, /) -> SimplePath: ... # Incorrect at runtime @@ -38,6 +48,9 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 12): class SimplePath(Protocol[_T_co]): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ # At runtime this is defined as taking `str | _T`, but that causes trouble. # See #11436. def joinpath(self, other: str, /) -> _T_co: ... @@ -49,6 +62,9 @@ elif sys.version_info >= (3, 12): else: class SimplePath(Protocol): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ # Actually takes only self at runtime, but that's clearly wrong def joinpath(self, other: Any, /) -> SimplePath: ... # Not defined as a property at runtime, but it should be diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi index 4a6c73921535a..0c91f06d784a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi @@ -1,3 +1,9 @@ +""" +Compatibility shim for .resources.readers as found on Python 3.10. + +Consumers that can rely on Python 3.11 should use the other +module directly. +""" # On py311+, things are actually defined in importlib.resources.readers, # and re-exported here, # but doing it this way leads to less code duplication for us @@ -32,7 +38,12 @@ if sys.version_info >= (3, 10): class FileReader(abc.TraversableResources): path: pathlib.Path def __init__(self, loader: FileLoader) -> None: ... - def resource_path(self, resource: StrPath) -> str: ... + def resource_path(self, resource: StrPath) -> str: + """ +Return the file system path to prevent +`resources.path()` from creating a temporary +copy. +""" def files(self) -> pathlib.Path: ... class ZipReader(abc.TraversableResources): @@ -40,10 +51,20 @@ if sys.version_info >= (3, 10): archive: str def __init__(self, loader: zipimporter, module: str) -> None: ... def open_resource(self, resource: str) -> BufferedReader: ... - def is_resource(self, path: StrPath) -> bool: ... + def is_resource(self, path: StrPath) -> bool: + """ +Workaround for `zipfile.Path.is_file` returning true +for non-existent paths. +""" def files(self) -> zipfile.Path: ... class MultiplexedPath(abc.Traversable): + """ +Given a series of Traversable objects, implement a merged +version of the interface across all objects. Useful for +namespace packages which may be multihomed at a single +name. +""" def __init__(self, *paths: abc.Traversable) -> None: ... def iterdir(self) -> Iterator[abc.Traversable]: ... def read_bytes(self) -> NoReturn: ... @@ -68,5 +89,10 @@ if sys.version_info >= (3, 10): class NamespaceReader(abc.TraversableResources): path: MultiplexedPath def __init__(self, namespace_path: Iterable[str]) -> None: ... - def resource_path(self, resource: str) -> str: ... + def resource_path(self, resource: str) -> str: + """ +Return the file system path to prevent +`resources.path()` from creating a temporary +copy. +""" def files(self) -> MultiplexedPath: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi index 28adc37da4a42..15bf92bb1baea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi @@ -1,3 +1,11 @@ +""" +Read resources contained within a package. + +This codebase is shared between importlib.resources in the stdlib +and importlib_resources in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" import os import sys from collections.abc import Iterator @@ -58,27 +66,69 @@ if sys.version_info >= (3, 13): ) else: - def open_binary(package: Package, resource: Resource) -> BinaryIO: ... - def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... - def read_binary(package: Package, resource: Resource) -> bytes: ... - def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... - def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: ... - def is_resource(package: Package, name: str) -> bool: ... + def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource. +""" + def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: + """Return a file-like object opened for text reading of the resource. +""" + def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource. +""" + def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") - def contents(package: Package) -> Iterator[str]: ... + def contents(package: Package) -> Iterator[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ else: - def contents(package: Package) -> Iterator[str]: ... + def contents(package: Package) -> Iterator[str]: + """Return an iterable of entries in 'package'. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ if sys.version_info >= (3, 11): from importlib.resources._common import as_file as as_file else: - def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ if sys.version_info >= (3, 11): from importlib.resources._common import files as files else: - def files(package: Package) -> Traversable: ... + def files(package: Package) -> Traversable: + """ + Get a Traversable resource from a package + """ if sys.version_info >= (3, 11): from importlib.resources.abc import ResourceReader as ResourceReader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi index 11a93ca82d8df..bb29887137deb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi @@ -17,17 +17,37 @@ if sys.version_info >= (3, 11): def package_to_anchor( func: Callable[[Anchor | None], Traversable], - ) -> Callable[[Anchor | None, Anchor | None], Traversable]: ... + ) -> Callable[[Anchor | None, Anchor | None], Traversable]: + """ +Replace 'package' parameter as 'anchor' and warn about the change. + +Other errors should fall through. + +>>> files('a', 'b') +Traceback (most recent call last): +TypeError: files() takes from 0 to 1 positional arguments but 2 were given + +Remove this compatibility in Python 3.14. +""" @overload - def files(anchor: Anchor | None = None) -> Traversable: ... + def files(anchor: Anchor | None = None) -> Traversable: + """ +Get a Traversable resource for an anchor. +""" @overload @deprecated("Deprecated since Python 3.12; will be removed in Python 3.15. Use `anchor` parameter instead.") def files(package: Anchor | None = None) -> Traversable: ... else: - def files(package: Package) -> Traversable: ... + def files(package: Package) -> Traversable: + """ + Get a Traversable resource from a package + """ - def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: ... + def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: + """ +Return the package's loader if it's a ResourceReader. +""" if sys.version_info >= (3, 12): def resolve(cand: Anchor | None) -> types.ModuleType: ... @@ -36,7 +56,19 @@ if sys.version_info >= (3, 11): def resolve(cand: Package) -> types.ModuleType: ... if sys.version_info < (3, 12): - def get_package(package: Package) -> types.ModuleType: ... + def get_package(package: Package) -> types.ModuleType: + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + + def from_package(package: types.ModuleType) -> Traversable: + """ +Return a Traversable object for the given package. - def from_package(package: types.ModuleType) -> Traversable: ... - def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: ... +""" + def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: + """ +Given a Traversable object, return that object as a +path on the local file system in a context manager. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi index 71e01bcd3d5ec..b03d8bb1b8099 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi @@ -1,3 +1,5 @@ +"""Simplified function-based API for importlib.resources +""" import sys # Even though this file is 3.13+ only, Pyright will complain in stubtest for older versions. @@ -11,21 +13,40 @@ if sys.version_info >= (3, 13): from typing import BinaryIO, Literal, overload from typing_extensions import Unpack, deprecated - def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: ... + def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: + """Open for binary reading the *resource* within *package*. +""" @overload def open_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" - ) -> TextIOWrapper: ... + ) -> TextIOWrapper: + """Open for text reading the *resource* within *package*. +""" @overload def open_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> TextIOWrapper: ... - def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: ... + def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: + """Read and return contents of *resource* within *package* as bytes. +""" @overload def read_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" - ) -> str: ... + ) -> str: + """Read and return contents of *resource* within *package* as str. +""" @overload def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: ... - def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: ... - def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: ... + def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: + """Return the path to the *resource* as an actual file system path. +""" + def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: + """Return ``True`` if there is a resource named *name* in the package, + +Otherwise returns ``False``. +""" @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") - def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: ... + def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: + """Return an iterable over the named resources within the package. + +The iterable returns :class:`str` resources (e.g. files). +The iterable does not recurse into subdirectories. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi index 80d92a608604e..93de4dcb2effa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi @@ -6,47 +6,114 @@ from typing import IO, Any, Literal, Protocol, overload, runtime_checkable if sys.version_info >= (3, 11): class ResourceReader(metaclass=ABCMeta): + """Abstract base class for loaders to provide resource reading support. +""" @abstractmethod - def open_resource(self, resource: str) -> IO[bytes]: ... + def open_resource(self, resource: str) -> IO[bytes]: + """Return an opened, file-like object for binary reading. + +The 'resource' argument is expected to represent only a file name. +If the resource cannot be found, FileNotFoundError is raised. +""" @abstractmethod - def resource_path(self, resource: str) -> str: ... + def resource_path(self, resource: str) -> str: + """Return the file system path to the specified resource. + +The 'resource' argument is expected to represent only a file name. +If the resource does not exist on the file system, raise +FileNotFoundError. +""" @abstractmethod - def is_resource(self, path: str) -> bool: ... + def is_resource(self, path: str) -> bool: + """Return True if the named 'path' is a resource. + +Files are resources, directories are not. +""" @abstractmethod - def contents(self) -> Iterator[str]: ... + def contents(self) -> Iterator[str]: + """Return an iterable of entries in `package`. +""" @runtime_checkable class Traversable(Protocol): + """ +An object with a subset of pathlib.Path methods suitable for +traversing directories and opening files. + +Any exceptions that occur when accessing the backing resource +may propagate unaltered. +""" @abstractmethod - def is_dir(self) -> bool: ... + def is_dir(self) -> bool: + """ +Return True if self is a directory +""" @abstractmethod - def is_file(self) -> bool: ... + def is_file(self) -> bool: + """ +Return True if self is a file +""" @abstractmethod - def iterdir(self) -> Iterator[Traversable]: ... + def iterdir(self) -> Iterator[Traversable]: + """ +Yield Traversable objects in self +""" @abstractmethod - def joinpath(self, *descendants: str) -> Traversable: ... + def joinpath(self, *descendants: str) -> Traversable: + """ +Return Traversable resolved with any descendants applied. + +Each descendant should be a path segment relative to self +and each may contain multiple levels separated by +``posixpath.sep`` (``/``). +""" # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @overload @abstractmethod - def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: ... + def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: + """ +mode may be 'r' or 'rb' to open as text or binary. Return a handle +suitable for reading (same as pathlib.Path.open). + +When opening as text, accepts encoding parameters such as those +accepted by io.TextIOWrapper. +""" @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @property @abstractmethod - def name(self) -> str: ... - def __truediv__(self, child: str, /) -> Traversable: ... + def name(self) -> str: + """ +The base name of this object without any parent references. +""" + def __truediv__(self, child: str, /) -> Traversable: + """ +Return Traversable child in self +""" @abstractmethod - def read_bytes(self) -> bytes: ... + def read_bytes(self) -> bytes: + """ +Read contents of self as bytes +""" @abstractmethod - def read_text(self, encoding: str | None = None) -> str: ... + def read_text(self, encoding: str | None = None) -> str: + """ +Read contents of self as text +""" class TraversableResources(ResourceReader): + """ +The required interface for providing traversable +resources. +""" @abstractmethod - def files(self) -> Traversable: ... + def files(self) -> Traversable: + """Return a Traversable object for the loaded package. +""" def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi index c4c758111c2dd..c67da99ac38cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi @@ -1,3 +1,6 @@ +""" +Interface adapters for low-level readers. +""" import abc import sys from collections.abc import Iterator @@ -9,19 +12,39 @@ if sys.version_info >= (3, 11): from .abc import Traversable, TraversableResources class SimpleReader(abc.ABC): + """ +The minimum, low-level interface required from a resource +provider. +""" @property @abc.abstractmethod - def package(self) -> str: ... + def package(self) -> str: + """ +The name of the package for which this reader loads resources. +""" @abc.abstractmethod - def children(self) -> list[SimpleReader]: ... + def children(self) -> list[SimpleReader]: + """ +Obtain an iterable of SimpleReader for available +child containers (e.g. directories). +""" @abc.abstractmethod - def resources(self) -> list[str]: ... + def resources(self) -> list[str]: + """ +Obtain available named resources for this virtual package. +""" @abc.abstractmethod - def open_binary(self, resource: str) -> BinaryIO: ... + def open_binary(self, resource: str) -> BinaryIO: + """ +Obtain a File-like for a named resource. +""" @property def name(self) -> str: ... class ResourceHandle(Traversable, metaclass=abc.ABCMeta): + """ +Handle to a named resource in a ResourceReader. +""" parent: ResourceContainer def __init__(self, parent: ResourceContainer, name: str) -> None: ... def is_file(self) -> Literal[True]: ... @@ -43,6 +66,9 @@ if sys.version_info >= (3, 11): def joinpath(self, name: Never) -> NoReturn: ... # type: ignore[override] class ResourceContainer(Traversable, metaclass=abc.ABCMeta): + """ +Traversable container for a package's resources via its reader. +""" reader: SimpleReader def __init__(self, reader: SimpleReader) -> None: ... def is_dir(self) -> Literal[True]: ... @@ -53,4 +79,9 @@ if sys.version_info >= (3, 11): def joinpath(self, *descendants: str) -> Traversable: ... class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta): + """ +A TraversableResources based on SimpleReader. Resource providers +may derive from this class to provide the TraversableResources +interface by supplying the SimpleReader interface. +""" def files(self) -> ResourceContainer: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi index 58d8c6617082a..03ee7324ea648 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi @@ -1,3 +1,9 @@ +""" +Compatibility shim for .resources.simple as found on Python 3.10. + +Consumers that can rely on Python 3.11 should use the other +module directly. +""" import sys if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi index 05c4d0d1edb30..eb55331f9069a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi @@ -1,3 +1,5 @@ +"""Utility code for constructing importers, etc. +""" import importlib.machinery import sys import types @@ -21,28 +23,81 @@ if sys.version_info < (3, 12): "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: + """Decorator to handle selecting the proper module for loaders. + + The decorated function is passed the module to use instead of the module + name. The module passed in to the function is either from sys.modules if + it already exists or is a new module. If the module is new, then __name__ + is set the first argument to the method, __loader__ is set to self, and + __package__ is set accordingly (if self.is_package() is defined) will be set + before it is passed to the decorated function (if self.is_package() does + not work for the module it will be set post-load). + + If an exception is raised and the decorator created the module it is + subsequently removed from sys.modules. + + The decorator assumes that the decorated function takes the module name as + the second argument. + + """ @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: + """Set __loader__ on the returned module. + + This function is deprecated. + + """ @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." ) - def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... + def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: + """Set __package__ on the returned module. + + This function is deprecated. + + """ + +def resolve_name(name: str, package: str | None) -> str: + """Resolve a relative module name to an absolute one. +""" +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: + """Return the spec for the specified module. + +First, sys.modules is checked to see if the module was already imported. If +so, then sys.modules[name].__spec__ is returned. If that happens to be +set to None, then ValueError is raised. If the module is not in +sys.modules, then sys.meta_path is searched for a suitable spec with the +value of 'path' given to the finders. None is returned if no spec could +be found. + +If the name is for submodule (contains a dot), the parent module is +automatically imported. + +The name and package arguments work the same as importlib.import_module(). +In other words, relative module names (with leading dots) work. -def resolve_name(name: str, package: str | None) -> str: ... -def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... +""" class LazyLoader(Loader): + """A loader that creates a module which defers loading until attribute access. +""" def __init__(self, loader: Loader) -> None: ... @classmethod - def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: ... - def exec_module(self, module: types.ModuleType) -> None: ... + def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: + """Construct a callable which returns the eager loader made lazy. +""" + def exec_module(self, module: types.ModuleType) -> None: + """Make the module load lazily. +""" -def source_hash(source_bytes: ReadableBuffer) -> bytes: ... +def source_hash(source_bytes: ReadableBuffer) -> bytes: + """Return the hash of *source_bytes* as used in hash-based pyc files. +""" if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi index 55ae61617af7e..6c77116a35483 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi @@ -1,3 +1,30 @@ +"""Get useful information from live Python objects. + +This module encapsulates the interface provided by the internal special +attributes (co_*, im_*, tb_*, etc.) in a friendlier fashion. +It also provides some help for examining source code and class layout. + +Here are some of the useful functions provided by this module: + + ismodule(), isclass(), ismethod(), ispackage(), isfunction(), + isgeneratorfunction(), isgenerator(), istraceback(), isframe(), + iscode(), isbuiltin(), isroutine() - check object types + getmembers() - get members of an object that satisfy a given condition + + getfile(), getsourcefile(), getsource() - find an object's source code + getdoc(), getcomments() - get documentation on an object + getmodule() - determine the module that an object came from + getclasstree() - arrange classes so as to represent their hierarchy + + getargvalues(), getcallargs() - get info about function arguments + getfullargspec() - same, with support for Python 3 features + formatargvalues() - format an argument spec + getouterframes(), getinnerframes() - get info about frames + currentframe() - get the current stack frame + stack(), trace() - get info about frames on the stack or in a traceback + + signature() - get a Signature object for the callable +""" import dis import enum import sys @@ -157,6 +184,8 @@ _V_contra = TypeVar("_V_contra", contravariant=True) class EndOfBlock(Exception): ... class BlockFinder: + """Provide a tokeneater() method to detect the end of a code block. +""" indent: int islambda: bool started: bool @@ -189,7 +218,10 @@ _GetMembersPredicate: TypeAlias = Callable[[Any], bool] _GetMembersReturn: TypeAlias = list[tuple[str, _T]] @overload -def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... +def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: + """Return all members of an object as (name, value) pairs sorted by name. +Optionally, only return members that satisfy a given predicate. +""" @overload def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload @@ -197,45 +229,120 @@ def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> if sys.version_info >= (3, 11): @overload - def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: ... + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: + """Return all members of an object as (name, value) pairs sorted by name +without triggering dynamic lookup via the descriptor protocol, +__getattr__ or __getattribute__. Optionally, only return members that +satisfy a given predicate. + +Note: this function may not be able to retrieve all members + that getmembers can fetch (like dynamically created attributes) + and may find members that getmembers can't (like descriptors + that raise AttributeError). It can also return descriptor objects + instead of instance members in some cases. +""" @overload def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ... -def getmodulename(path: StrPath) -> str | None: ... -def ismodule(object: object) -> TypeIs[ModuleType]: ... -def isclass(object: object) -> TypeIs[type[Any]]: ... -def ismethod(object: object) -> TypeIs[MethodType]: ... +def getmodulename(path: StrPath) -> str | None: + """Return the module name for a given file, or None. +""" +def ismodule(object: object) -> TypeIs[ModuleType]: + """Return true if the object is a module. +""" +def isclass(object: object) -> TypeIs[type[Any]]: + """Return true if the object is a class. +""" +def ismethod(object: object) -> TypeIs[MethodType]: + """Return true if the object is an instance method. +""" if sys.version_info >= (3, 14): # Not TypeIs because it does not return True for all modules - def ispackage(object: object) -> TypeGuard[ModuleType]: ... - -def isfunction(object: object) -> TypeIs[FunctionType]: ... + def ispackage(object: object) -> TypeGuard[ModuleType]: + """Return true if the object is a package. +""" + +def isfunction(object: object) -> TypeIs[FunctionType]: + """Return true if the object is a user-defined function. + +Function objects provide these attributes: + __doc__ documentation string + __name__ name with which this function was defined + __qualname__ qualified name of this function + __module__ name of the module the function was defined in or None + __code__ code object containing compiled function bytecode + __defaults__ tuple of any default values for arguments + __globals__ global namespace in which this function was defined + __annotations__ dict of parameter annotations + __kwdefaults__ dict of keyword only parameters with defaults + __dict__ namespace which is supporting arbitrary function attributes + __closure__ a tuple of cells or None + __type_params__ tuple of type parameters +""" if sys.version_info >= (3, 12): - def markcoroutinefunction(func: _F) -> _F: ... + def markcoroutinefunction(func: _F) -> _F: + """ +Decorator to ensure callable is recognised as a coroutine function. +""" @overload -def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: ... +def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: + """Return true if the object is a user-defined generator function. + +Generator function objects provide the same attributes as functions. +See help(isfunction) for a list of attributes. +""" @overload def isgeneratorfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... @overload def isgeneratorfunction(obj: object) -> TypeGuard[Callable[..., GeneratorType[Any, Any, Any]]]: ... @overload -def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: + """Return true if the object is a coroutine function. + +Coroutine functions are normally defined with "async def" syntax, but may +be marked via markcoroutinefunction. +""" @overload def iscoroutinefunction(obj: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... @overload def iscoroutinefunction(obj: Callable[_P, object]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, Any]]]: ... @overload def iscoroutinefunction(obj: object) -> TypeGuard[Callable[..., CoroutineType[Any, Any, Any]]]: ... -def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: ... -def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: ... -def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: ... +def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: + """Return true if the object is a generator. + +Generator objects provide these attributes: + gi_code code object + gi_frame frame object or possibly None once the generator has + been exhausted + gi_running set to 1 when generator is executing, 0 otherwise + gi_yieldfrom object being iterated by yield from or None + + __iter__() defined to support iteration over container + close() raises a new GeneratorExit exception inside the + generator to terminate the iteration + send() resumes the generator and "sends" a value that becomes + the result of the current yield-expression + throw() used to raise an exception inside the generator +""" +def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: + """Return true if the object is a coroutine. +""" +def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: + """Return true if object can be passed to an ``await`` expression. +""" @overload -def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: ... +def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: + """Return true if the object is an asynchronous generator function. + +Asynchronous generator functions are defined with "async def" +syntax and have "yield" expressions in their body. +""" @overload def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... @overload @@ -248,14 +355,78 @@ class _SupportsSet(Protocol[_T_contra, _V_contra]): class _SupportsDelete(Protocol[_T_contra]): def __delete__(self, instance: _T_contra, /) -> None: ... -def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: ... -def istraceback(object: object) -> TypeIs[TracebackType]: ... -def isframe(object: object) -> TypeIs[FrameType]: ... -def iscode(object: object) -> TypeIs[CodeType]: ... -def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: ... +def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: + """Return true if the object is an asynchronous generator. +""" +def istraceback(object: object) -> TypeIs[TracebackType]: + """Return true if the object is a traceback. + +Traceback objects provide these attributes: + tb_frame frame object at this level + tb_lasti index of last attempted instruction in bytecode + tb_lineno current line number in Python source code + tb_next next inner traceback object (called by this level) +""" +def isframe(object: object) -> TypeIs[FrameType]: + """Return true if the object is a frame object. + +Frame objects provide these attributes: + f_back next outer frame object (this frame's caller) + f_builtins built-in namespace seen by this frame + f_code code object being executed in this frame + f_globals global namespace seen by this frame + f_lasti index of last attempted instruction in bytecode + f_lineno current line number in Python source code + f_locals local namespace seen by this frame + f_trace tracing function for this frame, or None + f_trace_lines is a tracing event triggered for each source line? + f_trace_opcodes are per-opcode events being requested? + + clear() used to clear all references to local variables +""" +def iscode(object: object) -> TypeIs[CodeType]: + """Return true if the object is a code object. + +Code objects provide these attributes: + co_argcount number of arguments (not including *, ** args + or keyword only arguments) + co_code string of raw compiled bytecode + co_cellvars tuple of names of cell variables + co_consts tuple of constants used in the bytecode + co_filename name of file in which this code object was created + co_firstlineno number of first line in Python source code + co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg + | 16=nested | 32=generator | 64=nofree | 128=coroutine + | 256=iterable_coroutine | 512=async_generator + | 0x4000000=has_docstring + co_freevars tuple of names of free variables + co_posonlyargcount number of positional only arguments + co_kwonlyargcount number of keyword only arguments (not including ** arg) + co_lnotab encoded mapping of line numbers to bytecode indices + co_name name with which this code object was defined + co_names tuple of names other than arguments and function locals + co_nlocals number of local variables + co_stacksize virtual machine stack space required + co_varnames tuple of names of arguments and local variables + co_qualname fully qualified function name + + co_lines() returns an iterator that yields successive bytecode ranges + co_positions() returns an iterator of source code positions for each bytecode instruction + replace() returns a copy of the code object with a new values +""" +def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: + """Return true if the object is a built-in function or method. + +Built-in functions and methods provide these attributes: + __doc__ documentation string + __name__ original name of this function or method + __self__ instance to which a method is bound, or None +""" if sys.version_info >= (3, 11): - def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: ... + def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: + """Return true if the object is a method wrapper. +""" def isroutine( object: object, @@ -268,12 +439,49 @@ def isroutine( | WrapperDescriptorType | MethodDescriptorType | ClassMethodDescriptorType -]: ... -def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: ... -def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: ... -def isabstract(object: object) -> bool: ... -def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: ... -def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: ... +]: + """Return true if the object is any kind of function or method. +""" +def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: + """Return true if the object is a method descriptor. + +But not if ismethod() or isclass() or isfunction() are true. + +This is new in Python 2.2, and, for example, is true of int.__add__. +An object passing this test has a __get__ attribute, but not a +__set__ attribute or a __delete__ attribute. Beyond that, the set +of attributes varies; __name__ is usually sensible, and __doc__ +often is. + +Methods implemented via descriptors that also pass one of the other +tests return false from the ismethoddescriptor() test, simply because +the other tests promise more -- you can, e.g., count on having the +__func__ attribute (etc) when an object passes ismethod(). +""" +def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: + """Return true if the object is a member descriptor. + +Member descriptors are specialized descriptors defined in extension +modules. +""" +def isabstract(object: object) -> bool: + """Return true if the object is an abstract base class (ABC). +""" +def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: + """Return true if the object is a getset descriptor. + +getset descriptors are specialized descriptors defined in extension +modules. +""" +def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: + """Return true if the object is a data descriptor. + +Data descriptors have a __set__ or a __delete__ attribute. Examples are +properties (defined in Python) and getsets and members (defined in C). +Typically, data descriptors will also have __name__ and __doc__ attributes +(properties, getsets, and members have both of these attributes), but this +is not guaranteed. +""" # # Retrieving source code @@ -282,26 +490,78 @@ _SourceObjectType: TypeAlias = ( ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] ) -def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... -def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... +def findsource(object: _SourceObjectType) -> tuple[list[str], int]: + """Return the entire source file and starting line number for an object. + +The argument may be a module, class, method, function, traceback, frame, +or code object. The source code is returned as a list of all the lines +in the file and the line number indexes a line in that list. An OSError +is raised if the source code cannot be retrieved. +""" +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: + """Return an absolute path to the source or compiled file for an object. + +The idea is for each object to have a unique origin, so this routine +normalizes the result as much as possible. +""" # Special-case the two most common input types here # to avoid the annoyingly vague `Sequence[str]` return type @overload -def getblock(lines: list[str]) -> list[str]: ... +def getblock(lines: list[str]) -> list[str]: + """Extract the block of code at the top of the given list of lines. +""" @overload def getblock(lines: tuple[str, ...]) -> tuple[str, ...]: ... @overload def getblock(lines: Sequence[str]) -> Sequence[str]: ... -def getdoc(object: object) -> str | None: ... -def getcomments(object: object) -> str | None: ... -def getfile(object: _SourceObjectType) -> str: ... -def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... -def getsourcefile(object: _SourceObjectType) -> str | None: ... -def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... -def getsource(object: _SourceObjectType) -> str: ... -def cleandoc(doc: str) -> str: ... -def indentsize(line: str) -> int: ... +def getdoc(object: object) -> str | None: + """Get the documentation string for an object. + +All tabs are expanded to spaces. To clean up docstrings that are +indented to line up with blocks of code, any whitespace than can be +uniformly removed from the second line onwards is removed. +""" +def getcomments(object: object) -> str | None: + """Get lines of comments immediately preceding an object's source code. + +Returns None when source can't be found. +""" +def getfile(object: _SourceObjectType) -> str: + """Work out which source or compiled file an object was defined in. +""" +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: + """Return the module an object was defined in, or None if not found. +""" +def getsourcefile(object: _SourceObjectType) -> str | None: + """Return the filename that can be used to locate an object's source. +Return None if no way can be identified to get the source. +""" +def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: + """Return a list of source lines and starting line number for an object. + +The argument may be a module, class, method, function, traceback, frame, +or code object. The source code is returned as a list of the lines +corresponding to the object and the line number indicates where in the +original source file the first line of code was found. An OSError is +raised if the source code cannot be retrieved. +""" +def getsource(object: _SourceObjectType) -> str: + """Return the text of the source code for an object. + +The argument may be a module, class, method, function, traceback, frame, +or code object. The source code is returned as a single string. An +OSError is raised if the source code cannot be retrieved. +""" +def cleandoc(doc: str) -> str: + """Clean up indentation from docstrings. + +Any whitespace that can be uniformly removed from the second line +onwards is removed. +""" +def indentsize(line: str) -> int: + """Return the indent size, in spaces, at the start of a line of text. +""" _IntrospectableCallable: TypeAlias = Callable[..., Any] @@ -317,7 +577,9 @@ if sys.version_info >= (3, 14): locals: Mapping[str, Any] | None = None, eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 - ) -> Signature: ... + ) -> Signature: + """Get a signature object for the passed callable. +""" elif sys.version_info >= (3, 10): def signature( @@ -327,27 +589,71 @@ elif sys.version_info >= (3, 10): globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None, eval_str: bool = False, - ) -> Signature: ... + ) -> Signature: + """Get a signature object for the passed callable. +""" else: - def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: + """Get a signature object for the passed callable. +""" -class _void: ... -class _empty: ... +class _void: + """A private marker - used in Parameter & Signature. +""" +class _empty: + """Marker object for Signature.empty and Parameter.empty. +""" class Signature: + """A Signature object represents the overall signature of a function. +It stores a Parameter object for each parameter accepted by the +function, as well as information specific to the function itself. + +A Signature object has the following public attributes and methods: + +* parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in `code.co_varnames`). +* return_annotation : object + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is set to `Signature.empty`. +* bind(*args, **kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. +* bind_partial(*args, **kwargs) -> BoundArguments + Creates a partial mapping from positional and keyword arguments + to parameters (simulating 'functools.partial' behavior.) +""" __slots__ = ("_return_annotation", "_parameters") def __init__( self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True - ) -> None: ... + ) -> None: + """Constructs Signature from the given list of Parameter +objects and 'return_annotation'. All arguments are optional. +""" empty = _empty @property def parameters(self) -> types.MappingProxyType[str, Parameter]: ... @property def return_annotation(self) -> Any: ... - def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... - def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... - def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... + def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: + """Get a BoundArguments object, that maps the passed `args` +and `kwargs` to the function's signature. Raises `TypeError` +if the passed arguments can not be bound. +""" + def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: + """Get a BoundArguments object, that partially maps the +passed `args` and `kwargs` to the function's signature. +Raises `TypeError` if the passed arguments can not be bound. +""" + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: + """Creates a customized copy of the Signature. +Pass 'parameters' and/or 'return_annotation' arguments +to override them in the new copy. +""" __replace__ = replace if sys.version_info >= (3, 14): @classmethod @@ -360,7 +666,9 @@ class Signature: locals: Mapping[str, Any] | None = None, eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 - ) -> Self: ... + ) -> Self: + """Constructs Signature for the given callable object. +""" elif sys.version_info >= (3, 10): @classmethod def from_callable( @@ -371,14 +679,37 @@ class Signature: globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None, eval_str: bool = False, - ) -> Self: ... + ) -> Self: + """Constructs Signature for the given callable object. +""" else: @classmethod - def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: + """Constructs Signature for the given callable object. +""" if sys.version_info >= (3, 14): - def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: ... + def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: + """Create a string representation of the Signature object. + +If *max_width* integer is passed, +signature will try to fit into the *max_width*. +If signature is longer than *max_width*, +all parameters will be on separate lines. + +If *quote_annotation_strings* is False, annotations +in the signature are displayed without opening and closing quotation +marks. This is useful when the signature was created with the +STRING format or when ``from __future__ import annotations`` was used. +""" elif sys.version_info >= (3, 13): - def format(self, *, max_width: int | None = None) -> str: ... + def format(self, *, max_width: int | None = None) -> str: + """Create a string representation of the Signature object. + +If *max_width* integer is passed, +signature will try to fit into the *max_width*. +If signature is longer than *max_width*, +all parameters will be on separate lines. +""" def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -392,10 +723,56 @@ elif sys.version_info >= (3, 10): globals: Mapping[str, Any] | None = None, # value types depend on the key locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, - ) -> dict[str, AnnotationForm]: ... # values are type expressions + ) -> dict[str, AnnotationForm]: # values are type expressions + """Compute the annotations dict for an object. + +obj may be a callable, class, or module. +Passing in an object of any other type raises TypeError. + +Returns a dict. get_annotations() returns a new dict every time +it's called; calling it twice on the same object will return two +different but equivalent dicts. + +This function handles several details for you: + + * If eval_str is true, values of type str will + be un-stringized using eval(). This is intended + for use with stringized annotations + ("from __future__ import annotations"). + * If obj doesn't have an annotations dict, returns an + empty dict. (Functions and methods always have an + annotations dict; classes, modules, and other types of + callables may not.) + * Ignores inherited annotations on classes. If a class + doesn't have its own annotations dict, returns an empty dict. + * All accesses to object members and dict values are done + using getattr() and dict.get() for safety. + * Always, always, always returns a freshly-created dict. + +eval_str controls whether or not values of type str are replaced +with the result of calling eval() on those values: + + * If eval_str is true, eval() is called on values of type str. + * If eval_str is false (the default), values of type str are unchanged. + +globals and locals are passed in to eval(); see the documentation +for eval() for more information. If either globals or locals is +None, this function may replace that value with a context-specific +default, contingent on type(obj): + + * If obj is a module, globals defaults to obj.__dict__. + * If obj is a class, globals defaults to + sys.modules[obj.__module__].__dict__ and locals + defaults to the obj class namespace. + * If obj is a callable, globals defaults to obj.__globals__, + although if obj is a wrapped function (using + functools.update_wrapper()) it is first unwrapped. +""" # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): + """An enumeration. +""" POSITIONAL_ONLY = 0 POSITIONAL_OR_KEYWORD = 1 VAR_POSITIONAL = 2 @@ -413,10 +790,45 @@ if sys.version_info >= (3, 12): def getasyncgenstate( agen: AsyncGenerator[Any, Any], - ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: ... - def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: ... + ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: + """Get current state of an asynchronous generator object. + +Possible states are: + AGEN_CREATED: Waiting to start execution. + AGEN_RUNNING: Currently being executed by the interpreter. + AGEN_SUSPENDED: Currently suspended at a yield expression. + AGEN_CLOSED: Execution has completed. +""" + def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: + """ +Get the mapping of asynchronous generator local variables to their current +values. + +A dict is returned, with the keys the local variable names and values the +bound values. +""" class Parameter: + """Represents a parameter in a function signature. + +Has the following public attributes: + +* name : str + The name of the parameter as a string. +* default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is set to + `Parameter.empty`. +* annotation + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is set to + `Parameter.empty`. +* kind : str + Describes how argument values are bound to the parameter. + Possible values: `Parameter.POSITIONAL_ONLY`, + `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, + `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. +""" __slots__ = ("_name", "_kind", "_default", "_annotation") def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... empty = _empty @@ -441,7 +853,9 @@ class Parameter: kind: _ParameterKind | type[_void] = ..., default: Any = ..., annotation: Any = ..., - ) -> Self: ... + ) -> Self: + """Creates a customized copy of the Parameter. +""" if sys.version_info >= (3, 13): __replace__ = replace @@ -449,6 +863,21 @@ class Parameter: def __hash__(self) -> int: ... class BoundArguments: + """Result of `Signature.bind` call. Holds the mapping of arguments +to the function's parameters. + +Has the following public attributes: + +* arguments : dict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. +* signature : Signature + The Signature object that created this instance. +* args : tuple + Tuple of positional arguments values. +* kwargs : dict + Dict of keyword arguments values. +""" __slots__ = ("arguments", "_signature", "__weakref__") arguments: OrderedDict[str, Any] @property @@ -458,7 +887,15 @@ class BoundArguments: @property def signature(self) -> Signature: ... def __init__(self, signature: Signature, arguments: OrderedDict[str, Any]) -> None: ... - def apply_defaults(self) -> None: ... + def apply_defaults(self) -> None: + """Set default values for missing arguments. + +For variable-positional arguments (*args) the default is an +empty tuple. + +For variable-keyword arguments (**kwargs) the default is an +empty dict. +""" def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -468,28 +905,71 @@ class BoundArguments: _ClassTreeItem: TypeAlias = list[tuple[type, ...]] | list[_ClassTreeItem] -def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: ... -def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: ... +def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: + """Arrange the given list of classes into a hierarchy of nested lists. + +Where a nested list appears, it contains classes derived from the class +whose entry immediately precedes the list. Each entry is a 2-tuple +containing a class and a tuple of its base classes. If the 'unique' +argument is true, exactly one entry appears in the returned structure +for each class in the given list. Otherwise, classes using multiple +inheritance and their descendants will appear multiple times. +""" +def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: + """Recursive helper function for getclasstree(). +""" class Arguments(NamedTuple): + """Arguments(args, varargs, varkw) +""" args: list[str] varargs: str | None varkw: str | None -def getargs(co: CodeType) -> Arguments: ... +def getargs(co: CodeType) -> Arguments: + """Get information about the arguments accepted by a code object. + +Three things are returned: (args, varargs, varkw), where +'args' is the list of argument names. Keyword-only arguments are +appended. 'varargs' and 'varkw' are the names of the * and ** +arguments or None. +""" if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.0; removed in Python 3.11.") class ArgSpec(NamedTuple): + """ArgSpec(args, varargs, keywords, defaults) +""" args: list[str] varargs: str | None keywords: str | None defaults: tuple[Any, ...] @deprecated("Deprecated since Python 3.0; removed in Python 3.11. Use `inspect.signature()` instead.") - def getargspec(func: object) -> ArgSpec: ... + def getargspec(func: object) -> ArgSpec: + """Get the names and default values of a function's parameters. + + A tuple of four things is returned: (args, varargs, keywords, defaults). + 'args' is a list of the argument names, including keyword-only argument names. + 'varargs' and 'keywords' are the names of the * and ** parameters or None. + 'defaults' is an n-tuple of the default values of the last n parameters. + + This function is deprecated, as it does not support annotations or + keyword-only parameters and will raise ValueError if either is present + on the supplied callable. + + For a more structured introspection API, use inspect.signature() instead. + + Alternatively, use getfullargspec() for an API with a similar namedtuple + based interface, but full support for annotations and keyword-only + parameters. + + Deprecated since Python 3.5, use `inspect.getfullargspec()`. + """ class FullArgSpec(NamedTuple): + """FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations) +""" args: list[str] varargs: str | None varkw: str | None @@ -498,15 +978,39 @@ class FullArgSpec(NamedTuple): kwonlydefaults: dict[str, Any] | None annotations: dict[str, Any] -def getfullargspec(func: object) -> FullArgSpec: ... +def getfullargspec(func: object) -> FullArgSpec: + """Get the names and default values of a callable object's parameters. + +A tuple of seven things is returned: +(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations). +'args' is a list of the parameter names. +'varargs' and 'varkw' are the names of the * and ** parameters or None. +'defaults' is an n-tuple of the default values of the last n parameters. +'kwonlyargs' is a list of keyword-only parameter names. +'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults. +'annotations' is a dictionary mapping parameter names to annotations. + +Notable differences from inspect.signature(): + - the "self" parameter is always reported, even for bound methods + - wrapper chains defined by __wrapped__ *not* unwrapped automatically +""" class ArgInfo(NamedTuple): + """ArgInfo(args, varargs, keywords, locals) +""" args: list[str] varargs: str | None keywords: str | None locals: dict[str, Any] -def getargvalues(frame: FrameType) -> ArgInfo: ... +def getargvalues(frame: FrameType) -> ArgInfo: + """Get information about arguments passed into a particular frame. + +A tuple of four things is returned: (args, varargs, varkw, locals). +'args' is a list of the argument names. +'varargs' and 'varkw' are the names of the * and ** arguments or None. +'locals' is the locals dictionary of the given frame. +""" if sys.version_info >= (3, 14): def formatannotation(annotation: object, base_module: str | None = None, *, quote_annotation_strings: bool = True) -> str: ... @@ -534,7 +1038,18 @@ if sys.version_info < (3, 11): formatvalue: Callable[[Any], str] = ..., formatreturns: Callable[[Any], str] = ..., formatannotation: Callable[[Any], str] = ..., - ) -> str: ... + ) -> str: + """Format an argument spec from the values returned by getfullargspec. + + The first seven arguments are (args, varargs, varkw, defaults, + kwonlyargs, kwonlydefaults, annotations). The other five arguments + are the corresponding optional formatting functions that are called to + turn names and values into strings. The last argument is an optional + function to format the sequence of arguments. + + Deprecated since Python 3.5: use the `signature` function and `Signature` + objects. + """ def formatargvalues( args: list[str], @@ -545,18 +1060,57 @@ def formatargvalues( formatvarargs: Callable[[str], str] | None = ..., formatvarkw: Callable[[str], str] | None = ..., formatvalue: Callable[[Any], str] | None = ..., -) -> str: ... -def getmro(cls: type) -> tuple[type, ...]: ... -def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: ... +) -> str: + """Format an argument spec from the 4 values returned by getargvalues. + +The first four arguments are (args, varargs, varkw, locals). The +next four arguments are the corresponding optional formatting functions +that are called to turn names and values into strings. The ninth +argument is an optional function to format the sequence of arguments. +""" +def getmro(cls: type) -> tuple[type, ...]: + """Return tuple of base classes (including cls) in method resolution order. +""" +def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: + """Get the mapping of arguments to values. + +A dict is returned, with keys the function argument names (including the +names of the * and ** arguments, if any), and values the respective bound +values from 'positional' and 'named'. +""" class ClosureVars(NamedTuple): + """ClosureVars(nonlocals, globals, builtins, unbound) +""" nonlocals: Mapping[str, Any] globals: Mapping[str, Any] builtins: Mapping[str, Any] unbound: AbstractSet[str] -def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... -def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... +def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: + """ +Get the mapping of free variables to their current values. + +Returns a named tuple of dicts mapping the current nonlocal, global +and builtin references as seen by the body of the function. A final +set of unbound names that could not be resolved is also provided. +""" +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: + """Get the object wrapped by *func*. + +Follows the chain of :attr:`__wrapped__` attributes returning the last +object in the chain. + +*stop* is an optional callback accepting an object in the wrapper chain +as its sole argument that allows the unwrapping to be terminated early if +the callback returns a true value. If the callback never returns a true +value, the last object in the chain is returned as usual. For example, +:func:`signature` uses this to stop unwrapping if any object in the +chain has a ``__signature__`` attribute defined. + +:exc:`ValueError` is raised if a cycle is encountered. + + """ # # The interpreter stack @@ -564,6 +1118,8 @@ def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any if sys.version_info >= (3, 11): class _Traceback(NamedTuple): + """_Traceback(filename, lineno, function, code_context, index) +""" filename: str lineno: int function: str @@ -571,6 +1127,8 @@ if sys.version_info >= (3, 11): index: int | None # type: ignore[assignment] class _FrameInfo(NamedTuple): + """_FrameInfo(frame, filename, lineno, function, code_context, index) +""" frame: FrameType filename: str lineno: int @@ -638,6 +1196,8 @@ if sys.version_info >= (3, 11): else: class Traceback(NamedTuple): + """Traceback(filename, lineno, function, code_context, index) +""" filename: str lineno: int function: str @@ -645,6 +1205,8 @@ else: index: int | None # type: ignore[assignment] class FrameInfo(NamedTuple): + """FrameInfo(frame, filename, lineno, function, code_context, index) +""" frame: FrameType filename: str lineno: int @@ -652,19 +1214,55 @@ else: code_context: list[str] | None index: int | None # type: ignore[assignment] -def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... -def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... -def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... -def getlineno(frame: FrameType) -> int: ... -def currentframe() -> FrameType | None: ... -def stack(context: int = 1) -> list[FrameInfo]: ... -def trace(context: int = 1) -> list[FrameInfo]: ... +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: + """Get information about a frame or traceback object. + +A tuple of five things is returned: the filename, the line number of +the current line, the function name, a list of lines of context from +the source code, and the index of the current line within that list. +The optional second argument specifies the number of lines of context +to return, which are centered around the current line. +""" +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: + """Get a list of records for a frame and all higher (calling) frames. + +Each record contains a frame object, filename, line number, function +name, a list of lines of context, and index within the context. +""" +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: + """Get a list of records for a traceback's frame and all lower frames. + +Each record contains a frame object, filename, line number, function +name, a list of lines of context, and index within the context. +""" +def getlineno(frame: FrameType) -> int: + """Get the line number from a frame object, allowing for optimization. +""" +def currentframe() -> FrameType | None: + """Return the frame of the caller or None if this is not possible. +""" +def stack(context: int = 1) -> list[FrameInfo]: + """Return a list of records for the stack above the caller's frame. +""" +def trace(context: int = 1) -> list[FrameInfo]: + """Return a list of records for the stack below the current exception. +""" # # Fetching attributes statically # -def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: ... +def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: + """Retrieve attributes without triggering dynamic lookup via the +descriptor protocol, __getattr__ or __getattribute__. + +Note: this function may not be able to retrieve all attributes +that getattr can fetch (like dynamically created attributes) +and may find attributes that getattr can't (like descriptors +that raise AttributeError). It can also return descriptor objects +instead of instance members in some cases. See the +documentation for details. +""" # # Current State of Generators and Coroutines @@ -677,7 +1275,15 @@ GEN_CLOSED: Final = "GEN_CLOSED" def getgeneratorstate( generator: Generator[Any, Any, Any], -) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: ... +) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: + """Get current state of a generator-iterator. + +Possible states are: + GEN_CREATED: Waiting to start execution. + GEN_RUNNING: Currently being executed by the interpreter. + GEN_SUSPENDED: Currently suspended at a yield expression. + GEN_CLOSED: Execution has completed. +""" CORO_CREATED: Final = "CORO_CREATED" CORO_RUNNING: Final = "CORO_RUNNING" @@ -686,21 +1292,68 @@ CORO_CLOSED: Final = "CORO_CLOSED" def getcoroutinestate( coroutine: Coroutine[Any, Any, Any], -) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: ... -def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: ... -def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: ... +) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: + """Get current state of a coroutine object. + +Possible states are: + CORO_CREATED: Waiting to start execution. + CORO_RUNNING: Currently being executed by the interpreter. + CORO_SUSPENDED: Currently suspended at an await expression. + CORO_CLOSED: Execution has completed. +""" +def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: + """ +Get the mapping of generator local variables to their current values. + +A dict is returned, with the keys the local variable names and values the +bound values. +""" +def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: + """ +Get the mapping of coroutine local variables to their current values. + +A dict is returned, with the keys the local variable names and values the +bound values. +""" # Create private type alias to avoid conflict with symbol of same # name created in Attribute class. _Object: TypeAlias = object class Attribute(NamedTuple): + """Attribute(name, kind, defining_class, object) +""" name: str kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type object: _Object -def classify_class_attrs(cls: type) -> list[Attribute]: ... +def classify_class_attrs(cls: type) -> list[Attribute]: + """Return list of attribute-descriptor tuples. + +For each name in dir(cls), the return list contains a 4-tuple +with these elements: + + 0. The name (a string). + + 1. The kind of attribute this is, one of these strings: + 'class method' created via classmethod() + 'static method' created via staticmethod() + 'property' created via property() + 'method' any other flavor of method or descriptor + 'data' not a method + + 2. The class which defined this attribute (a class). + + 3. The object as obtained by calling getattr; if this fails, or if the + resulting object does not live anywhere in the class' mro (including + metaclasses) then the object is looked up in the defining class's + dict (found by walking the mro). + +If one of the items in dir(cls) is stored in the metaclass it will now +be discovered and not have None be listed as the class in which it was +defined. Any items whose home class cannot be discovered are skipped. +""" class ClassFoundException(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi index d301d700e9d0f..6bb344429a755 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi @@ -1,3 +1,37 @@ +"""The io module provides the Python interfaces to stream handling. The +builtin open function is defined in this module. + +At the top of the I/O hierarchy is the abstract base class IOBase. It +defines the basic interface to a stream. Note, however, that there is no +separation between reading and writing to streams; implementations are +allowed to raise an OSError if they do not support a given operation. + +Extending IOBase is RawIOBase which deals simply with the reading and +writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide +an interface to OS files. + +BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its +subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer +streams that are readable, writable, and both respectively. +BufferedRandom provides a buffered interface to random access +streams. BytesIO is a simple stream of in-memory bytes. + +Another IOBase subclass, TextIOBase, deals with the encoding and decoding +of streams into text. TextIOWrapper, which extends it, is a buffered text +interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO +is an in-memory stream for text. + +Argument names are not part of the specification, and only the arguments +of open() are intended to be used as keyword arguments. + +data: + +DEFAULT_BUFFER_SIZE + + An int containing the default buffer size used by the module's buffered + I/O classes. open() uses the file's blksize (as obtained by os.stat) if + possible. +""" import abc import sys from _io import ( @@ -60,16 +94,83 @@ SEEK_CUR: Final = 1 SEEK_END: Final = 2 class UnsupportedOperation(OSError, ValueError): ... -class IOBase(_IOBase, metaclass=abc.ABCMeta): ... -class RawIOBase(_RawIOBase, IOBase): ... -class BufferedIOBase(_BufferedIOBase, IOBase): ... -class TextIOBase(_TextIOBase, IOBase): ... +class IOBase(_IOBase, metaclass=abc.ABCMeta): + """The abstract base class for all I/O classes. + +This class provides dummy implementations for many methods that +derived classes can override selectively; the default implementations +represent a file that cannot be read, written or seeked. + +Even though IOBase does not declare read, readinto, or write because +their signatures will vary, implementations and clients should +consider those methods part of the interface. Also, implementations +may raise UnsupportedOperation when operations they do not support are +called. + +The basic type used for binary data read from or written to a file is +bytes. Other bytes-like objects are accepted as method arguments too. +In some cases (such as readinto), a writable object is required. Text +I/O classes work with str data. + +Note that calling any method (except additional calls to close(), +which are ignored) on a closed stream should raise a ValueError. + +IOBase (and its subclasses) support the iterator protocol, meaning +that an IOBase object can be iterated over yielding the lines in a +stream. + +IOBase also supports the :keyword:`with` statement. In this example, +fp is closed after the suite of the with statement is complete: + +with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') +""" +class RawIOBase(_RawIOBase, IOBase): + """Base class for raw binary I/O. +""" +class BufferedIOBase(_BufferedIOBase, IOBase): + """Base class for buffered IO objects. + +The main difference with RawIOBase is that the read() method +supports omitting the size argument, and does not have a default +implementation that defers to readinto(). + +In addition, read(), readinto() and write() may raise +BlockingIOError if the underlying raw stream is in non-blocking +mode and not ready; unlike their raw counterparts, they will never +return None. + +A typical implementation should not inherit from a RawIOBase +implementation, but wrap one. +""" +class TextIOBase(_TextIOBase, IOBase): + """Base class for text I/O. + +This class provides a character and line based interface to stream +I/O. There is no readinto method because Python's character strings +are immutable. +""" if sys.version_info >= (3, 14): class Reader(Protocol[_T_co]): + """Protocol for simple I/O reader instances. + +This protocol only supports blocking I/O. +""" __slots__ = () - def read(self, size: int = ..., /) -> _T_co: ... + def read(self, size: int = ..., /) -> _T_co: + """Read data from the input stream and return it. + +If *size* is specified, at most *size* items (bytes/characters) will be +read. +""" class Writer(Protocol[_T_contra]): + """Protocol for simple I/O writer instances. + +This protocol only supports blocking I/O. +""" __slots__ = () - def write(self, data: _T_contra, /) -> int: ... + def write(self, data: _T_contra, /) -> int: + """Write *data* to the output stream and return the number of items written. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi index e2f3defa2deac..458aa23d6b6d2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi @@ -1,3 +1,9 @@ +"""A fast, lightweight IPv4/IPv6 manipulation library in Python. + +This library is used to create/poke/manipulate IPv4 and IPv6 addresses +and networks. + +""" import sys from collections.abc import Iterable, Iterator from typing import Any, Final, Generic, Literal, TypeVar, overload @@ -13,45 +19,141 @@ _N = TypeVar("_N", IPv4Network, IPv6Network) _RawIPAddress: TypeAlias = int | str | bytes | IPv4Address | IPv6Address _RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface -def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... +def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: + """Take an IP string/int and return an object of the correct type. + +Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + +Returns: + An IPv4Address or IPv6Address object. + +Raises: + ValueError: if the *address* passed isn't either a v4 or a v6 + address + +""" def ip_network( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True -) -> IPv4Network | IPv6Network: ... +) -> IPv4Network | IPv6Network: + """Take an IP string/int and return an object of the correct type. + +Args: + address: A string or integer, the IP network. Either IPv4 or + IPv6 networks may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + +Returns: + An IPv4Network or IPv6Network object. + +Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if the network has host bits set. + +""" def ip_interface( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], -) -> IPv4Interface | IPv6Interface: ... +) -> IPv4Interface | IPv6Interface: + """Take an IP string/int and return an object of the correct type. + +Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + +Returns: + An IPv4Interface or IPv6Interface object. + +Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + +Notes: + The IPv?Interface classes describe an Address on a particular + Network, so they're basically a combination of both the Address + and Network classes. + +""" class _IPAddressBase: + """The mother class. +""" __slots__ = () @property - def compressed(self) -> str: ... + def compressed(self) -> str: + """Return the shorthand version of the IP address as a string. +""" @property - def exploded(self) -> str: ... + def exploded(self) -> str: + """Return the longhand version of the IP address as a string. +""" @property - def reverse_pointer(self) -> str: ... + def reverse_pointer(self) -> str: + """The name of the reverse DNS pointer for the IP address, e.g.: +>>> ipaddress.ip_address("127.0.0.1").reverse_pointer +'1.0.0.127.in-addr.arpa' +>>> ipaddress.ip_address("2001:db8::1").reverse_pointer +'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' + +""" if sys.version_info < (3, 14): @property def version(self) -> int: ... class _BaseAddress(_IPAddressBase): + """A generic IP object. + +This IP class contains the version independent methods which are +used by single IP addresses. +""" __slots__ = () def __add__(self, other: int) -> Self: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... def __sub__(self, other: int) -> Self: ... - def __format__(self, fmt: str) -> str: ... + def __format__(self, fmt: str) -> str: + """Returns an IP address as a formatted string. + +Supported presentation types are: +'s': returns the IP address as a string (default) +'b': converts to binary and returns a zero-padded string +'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string +'n': the same as 'b' for IPv4 and 'x' for IPv6 + +For binary and hex presentation types, the alternate form specifier +'#' and the grouping option '_' are supported. +""" def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self, other: Self) -> bool: ... - def __gt__(self, other: Self) -> bool: ... - def __le__(self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __gt__(self, other: Self) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __le__(self, other: Self) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" class _BaseNetwork(_IPAddressBase, Generic[_A]): + """A generic IP network object. + +This IP class contains the version independent methods which are +used by networks. +""" network_address: _A netmask: _A def __contains__(self, other: Any) -> bool: ... @@ -61,42 +163,227 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): def __hash__(self) -> int: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self, other: Self) -> bool: ... - def __gt__(self, other: Self) -> bool: ... - def __le__(self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __gt__(self, other: Self) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __le__(self, other: Self) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" else: - def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" - def address_exclude(self, other: Self) -> Iterator[Self]: ... + def address_exclude(self, other: Self) -> Iterator[Self]: + """Remove an address from a larger block. + +For example: + + addr1 = ip_network('192.0.2.0/28') + addr2 = ip_network('192.0.2.1/32') + list(addr1.address_exclude(addr2)) = + [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), + IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] + +or IPv6: + + addr1 = ip_network('2001:db8::1/32') + addr2 = ip_network('2001:db8::1/128') + list(addr1.address_exclude(addr2)) = + [ip_network('2001:db8::1/128'), + ip_network('2001:db8::2/127'), + ip_network('2001:db8::4/126'), + ip_network('2001:db8::8/125'), + ... + ip_network('2001:db8:8000::/33')] + +Args: + other: An IPv4Network or IPv6Network object of the same type. + +Returns: + An iterator of the IPv(4|6)Network objects which is self + minus other. + +Raises: + TypeError: If self and other are of differing address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + +""" @property def broadcast_address(self) -> _A: ... - def compare_networks(self, other: Self) -> int: ... - def hosts(self) -> Iterator[_A] | list[_A]: ... + def compare_networks(self, other: Self) -> int: + """Compare two IP objects. + +This is only concerned about the comparison of the integer +representation of the network addresses. This means that the +host bits aren't considered at all in this method. If you want +to compare host bits, you can easily enough do a +'HostA._ip < HostB._ip' + +Args: + other: An IP object. + +Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') + IPv6Network('2001:db8::1000/124') < + IPv6Network('2001:db8::2000/124') + 0 if self == other + eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') + IPv6Network('2001:db8::1000/124') == + IPv6Network('2001:db8::1000/124') + 1 if self > other + eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') + IPv6Network('2001:db8::2000/124') > + IPv6Network('2001:db8::1000/124') + + Raises: + TypeError if the IP versions are different. + +""" + def hosts(self) -> Iterator[_A] | list[_A]: + """Generate Iterator over usable hosts in a network. + +This is like __iter__ except it doesn't return the network +or broadcast addresses. + +""" @property - def is_global(self) -> bool: ... + def is_global(self) -> bool: + """Test if this address is allocated for public networks. + +Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + +""" @property - def is_link_local(self) -> bool: ... + def is_link_local(self) -> bool: + """Test if the address is reserved for link-local. + +Returns: + A boolean, True if the address is reserved per RFC 4291. + +""" @property - def is_loopback(self) -> bool: ... + def is_loopback(self) -> bool: + """Test if the address is a loopback address. + +Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + +""" @property - def is_multicast(self) -> bool: ... + def is_multicast(self) -> bool: + """Test if the address is reserved for multicast use. + +Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + +""" @property - def is_private(self) -> bool: ... + def is_private(self) -> bool: + """Test if this network belongs to a private range. + +Returns: + A boolean, True if the network is reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + +""" @property - def is_reserved(self) -> bool: ... + def is_reserved(self) -> bool: + """Test if the address is otherwise IETF reserved. + +Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + +""" @property - def is_unspecified(self) -> bool: ... + def is_unspecified(self) -> bool: + """Test if the address is unspecified. + +Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + +""" @property - def num_addresses(self) -> int: ... - def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... + def num_addresses(self) -> int: + """Number of hosts in the current subnet. +""" + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: + """Tell if self is partly contained in other. +""" @property def prefixlen(self) -> int: ... - def subnet_of(self, other: Self) -> bool: ... - def supernet_of(self, other: Self) -> bool: ... - def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... - def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... + def subnet_of(self, other: Self) -> bool: + """Return True if this network is a subnet of other. +""" + def supernet_of(self, other: Self) -> bool: + """Return True if this network is a supernet of other. +""" + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: + """The subnets which join to make the current subnet. + +In the case that self contains only one IP +(self._prefixlen == 32 for IPv4 or self._prefixlen == 128 +for IPv6), yield an iterator with just ourself. + +Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. + +Returns: + An iterator of IPv(4|6) objects. + +Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + +""" + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: + """The supernet containing the current network. + +Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. + +Returns: + An IPv4 network object. + +Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have + a negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + +""" @property def with_hostmask(self) -> str: ... @property @@ -107,6 +394,12 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]): def hostmask(self) -> _A: ... class _BaseV4: + """Base IPv4 object. + +The following methods are used by IPv4 objects in both single IP +addresses and networks. + +""" __slots__ = () if sys.version_info >= (3, 14): version: Final = 4 @@ -118,30 +411,158 @@ class _BaseV4: def max_prefixlen(self) -> Literal[32]: ... class IPv4Address(_BaseV4, _BaseAddress): + """Represent and manipulate single IPv4 Addresses. +""" __slots__ = ("_ip", "__weakref__") - def __init__(self, address: object) -> None: ... + def __init__(self, address: object) -> None: + """ +Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv4Address('192.0.2.1') == IPv4Address(3221225985). + or, more generally + IPv4Address(int(IPv4Address('192.0.2.1'))) == + IPv4Address('192.0.2.1') + +Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + +""" @property - def is_global(self) -> bool: ... + def is_global(self) -> bool: + """``True`` if the address is defined as globally reachable by +iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ +(for IPv6) with the following exception: + +For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the +semantics of the underlying IPv4 addresses and the following condition holds +(see :attr:`IPv6Address.ipv4_mapped`):: + + address.is_global == address.ipv4_mapped.is_global + +``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` +IPv4 range where they are both ``False``. +""" @property - def is_link_local(self) -> bool: ... + def is_link_local(self) -> bool: + """Test if the address is reserved for link-local. + +Returns: + A boolean, True if the address is link-local per RFC 3927. + +""" @property - def is_loopback(self) -> bool: ... + def is_loopback(self) -> bool: + """Test if the address is a loopback address. + +Returns: + A boolean, True if the address is a loopback per RFC 3330. + +""" @property - def is_multicast(self) -> bool: ... + def is_multicast(self) -> bool: + """Test if the address is reserved for multicast use. + +Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + +""" @property - def is_private(self) -> bool: ... + def is_private(self) -> bool: + """``True`` if the address is defined as not globally reachable by +iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ +(for IPv6) with the following exceptions: + +* ``is_private`` is ``False`` for ``100.64.0.0/10`` +* For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: + + address.is_private == address.ipv4_mapped.is_private + +``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` +IPv4 range where they are both ``False``. +""" @property - def is_reserved(self) -> bool: ... + def is_reserved(self) -> bool: + """Test if the address is otherwise IETF reserved. + +Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + +""" @property - def is_unspecified(self) -> bool: ... + def is_unspecified(self) -> bool: + """Test if the address is unspecified. + +Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + +""" @property - def packed(self) -> bytes: ... + def packed(self) -> bytes: + """The binary representation of this address. +""" if sys.version_info >= (3, 13): @property - def ipv6_mapped(self) -> IPv6Address: ... + def ipv6_mapped(self) -> IPv6Address: + """Return the IPv4-mapped IPv6 address. + +Returns: + The IPv4-mapped IPv6 address per RFC 4291. + +""" class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): - def __init__(self, address: object, strict: bool = True) -> None: ... + """This class represents and manipulates 32-bit IPv4 network + addresses.. + +Attributes: [examples for IPv4Network('192.0.2.0/27')] + .network_address: IPv4Address('192.0.2.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast_address: IPv4Address('192.0.2.32') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + +""" + def __init__(self, address: object, strict: bool = True) -> None: + """Instantiate a new IPv4 network object. + +Args: + address: A string or integer representing the IP [& network]. + '192.0.2.0/24' + '192.0.2.0/255.255.255.0' + '192.0.2.0/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.0.2.1' + '192.0.2.1/255.255.255.255' + '192.0.2.1/32' + are also functionally equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.0.2.1') == IPv4Network(3221225985) + or, more generally + IPv4Interface(int(IPv4Interface('192.0.2.1'))) == + IPv4Interface('192.0.2.1') + +Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict is True and a network address is not + supplied. +""" class IPv4Interface(IPv4Address): netmask: IPv4Address @@ -160,6 +581,12 @@ class IPv4Interface(IPv4Address): def with_prefixlen(self) -> str: ... class _BaseV6: + """Base IPv6 object. + +The following methods are used by IPv6 objects in both single IP +addresses and networks. + +""" __slots__ = () if sys.version_info >= (3, 14): version: Final = 6 @@ -171,41 +598,212 @@ class _BaseV6: def max_prefixlen(self) -> Literal[128]: ... class IPv6Address(_BaseV6, _BaseAddress): + """Represent and manipulate single IPv6 Addresses. +""" __slots__ = ("_ip", "_scope_id", "__weakref__") - def __init__(self, address: object) -> None: ... + def __init__(self, address: object) -> None: + """Instantiate a new IPv6 address object. + +Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:db8::') == + IPv6Address(42540766411282592856903984951653826560) + or, more generally + IPv6Address(int(IPv6Address('2001:db8::'))) == + IPv6Address('2001:db8::') + +Raises: + AddressValueError: If address isn't a valid IPv6 address. + +""" @property - def is_global(self) -> bool: ... + def is_global(self) -> bool: + """``True`` if the address is defined as globally reachable by +iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ +(for IPv6) with the following exception: + +For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the +semantics of the underlying IPv4 addresses and the following condition holds +(see :attr:`IPv6Address.ipv4_mapped`):: + + address.is_global == address.ipv4_mapped.is_global + +``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` +IPv4 range where they are both ``False``. +""" @property - def is_link_local(self) -> bool: ... + def is_link_local(self) -> bool: + """Test if the address is reserved for link-local. + +Returns: + A boolean, True if the address is reserved per RFC 4291. + +""" @property - def is_loopback(self) -> bool: ... + def is_loopback(self) -> bool: + """Test if the address is a loopback address. + +Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + +""" @property - def is_multicast(self) -> bool: ... + def is_multicast(self) -> bool: + """Test if the address is reserved for multicast use. + +Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + +""" @property - def is_private(self) -> bool: ... + def is_private(self) -> bool: + """``True`` if the address is defined as not globally reachable by +iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ +(for IPv6) with the following exceptions: + +* ``is_private`` is ``False`` for ``100.64.0.0/10`` +* For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: + + address.is_private == address.ipv4_mapped.is_private + +``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` +IPv4 range where they are both ``False``. +""" @property - def is_reserved(self) -> bool: ... + def is_reserved(self) -> bool: + """Test if the address is otherwise IETF reserved. + +Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + +""" @property - def is_unspecified(self) -> bool: ... + def is_unspecified(self) -> bool: + """Test if the address is unspecified. + +Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + +""" @property - def packed(self) -> bytes: ... + def packed(self) -> bytes: + """The binary representation of this address. +""" @property - def ipv4_mapped(self) -> IPv4Address | None: ... + def ipv4_mapped(self) -> IPv4Address | None: + """Return the IPv4 mapped address. + +Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + +""" @property - def is_site_local(self) -> bool: ... + def is_site_local(self) -> bool: + """Test if the address is reserved for site-local. + +Note that the site-local address space has been deprecated by RFC 3879. +Use is_private to test if this address is in the space of unique local +addresses as defined by RFC 4193. + +Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + +""" @property - def sixtofour(self) -> IPv4Address | None: ... + def sixtofour(self) -> IPv4Address | None: + """Return the IPv4 6to4 embedded address. + +Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + +""" @property - def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... + def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: + """Tuple of embedded teredo IPs. + +Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + +""" @property - def scope_id(self) -> str | None: ... + def scope_id(self) -> str | None: + """Identifier of a particular zone of the address's scope. + +See RFC 4007 for details. + +Returns: + A string identifying the zone of the address if specified, else None. + +""" def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): - def __init__(self, address: object, strict: bool = True) -> None: ... - @property - def is_site_local(self) -> bool: ... + """This class represents and manipulates 128-bit IPv6 networks. + +Attributes: [examples for IPv6('2001:db8::1000/124')] + .network_address: IPv6Address('2001:db8::1000') + .hostmask: IPv6Address('::f') + .broadcast_address: IPv6Address('2001:db8::100f') + .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') + .prefixlen: 124 + +""" + def __init__(self, address: object, strict: bool = True) -> None: + """Instantiate a new IPv6 Network object. + +Args: + address: A string or integer representing the IPv6 network or the + IP and prefix/netmask. + '2001:db8::/128' + '2001:db8:0000:0000:0000:0000:0000:0000/128' + '2001:db8::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:db8::') == + IPv6Network(42540766411282592856903984951653826560) + or, more generally + IPv6Network(int(IPv6Network('2001:db8::'))) == + IPv6Network('2001:db8::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 2001:db8::1000/124 and not an + IP address on a network, eg, 2001:db8::1/124. + +Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. +""" + @property + def is_site_local(self) -> bool: + """Test if the address is reserved for site-local. + +Note that the site-local address space has been deprecated by RFC 3879. +Use is_private to test if this address is in the space of unique local +addresses as defined by RFC 4193. + +Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + +""" class IPv6Interface(IPv6Address): netmask: IPv6Address @@ -223,25 +821,110 @@ class IPv6Interface(IPv6Address): @property def with_prefixlen(self) -> str: ... -def v4_int_to_packed(address: int) -> bytes: ... -def v6_int_to_packed(address: int) -> bytes: ... +def v4_int_to_packed(address: int) -> bytes: + """Represent an address as 4 packed bytes in network (big-endian) order. + +Args: + address: An integer representation of an IPv4 IP address. + +Returns: + The integer address packed as 4 bytes in network (big-endian) order. + +Raises: + ValueError: If the integer is negative or too large to be an + IPv4 IP address. + +""" +def v6_int_to_packed(address: int) -> bytes: + """Represent an address as 16 packed bytes in network (big-endian) order. + +Args: + address: An integer representation of an IPv6 IP address. + +Returns: + The integer address packed as 16 bytes in network (big-endian) order. + +""" # Third overload is technically incorrect, but convenient when first and last are return values of ip_address() @overload -def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... +def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: + """Summarize a network range given the first and last IP addresses. + +Example: + >>> list(summarize_address_range(IPv4Address('192.0.2.0'), + ... IPv4Address('192.0.2.130'))) + ... #doctest: +NORMALIZE_WHITESPACE + [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), + IPv4Network('192.0.2.130/32')] + +Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. + +Returns: + An iterator of the summarized IPv(4|6) network objects. + +Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version of the first address is not 4 or 6. + +""" @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... @overload def summarize_address_range( first: IPv4Address | IPv6Address, last: IPv4Address | IPv6Address ) -> Iterator[IPv4Network] | Iterator[IPv6Network]: ... -def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... +def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: + """Collapse a list of IP objects. + +Example: + collapse_addresses([IPv4Network('192.0.2.0/25'), + IPv4Network('192.0.2.128/25')]) -> + [IPv4Network('192.0.2.0/24')] + +Args: + addresses: An iterable of IPv4Network or IPv6Network objects. + +Returns: + An iterator of the collapsed IPv(4|6)Network objects. + +Raises: + TypeError: If passed a list of mixed version objects. + +""" @overload -def get_mixed_type_key(obj: _A) -> tuple[int, _A]: ... +def get_mixed_type_key(obj: _A) -> tuple[int, _A]: + """Return a key suitable for sorting between networks and addresses. + +Address and Network objects are not sortable by default; they're +fundamentally different so the expression + + IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') + +doesn't make any sense. There are some times however, where you may wish +to have ipaddress sort these for you anyway. If you need to do this, you +can use this function as the key= argument to sorted(). + +Args: + obj: either a Network or Address object. +Returns: + appropriate key. + +""" @overload def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... -class AddressValueError(ValueError): ... -class NetmaskValueError(ValueError): ... +class AddressValueError(ValueError): + """A Value Error related to the address. +""" +class NetmaskValueError(ValueError): + """A Value Error related to the netmask. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi index fe4ccbdf8ae97..e34674de9ef01 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi @@ -1,3 +1,33 @@ +"""Functional tools for creating and using iterators. + +Infinite iterators: +count(start=0, step=1) --> start, start+step, start+2*step, ... +cycle(p) --> p0, p1, ... plast, p0, p1, ... +repeat(elem [,n]) --> elem, elem, elem, ... endlessly or up to n times + +Iterators terminating on the shortest input sequence: +accumulate(p[, func]) --> p0, p0+p1, p0+p1+p2 +batched(p, n) --> [p0, p1, ..., p_n-1], [p_n, p_n+1, ..., p_2n-1], ... +chain(p, q, ...) --> p0, p1, ... plast, q0, q1, ... +chain.from_iterable([p, q, ...]) --> p0, p1, ... plast, q0, q1, ... +compress(data, selectors) --> (d[0] if s[0]), (d[1] if s[1]), ... +dropwhile(predicate, seq) --> seq[n], seq[n+1], starting when predicate fails +groupby(iterable[, keyfunc]) --> sub-iterators grouped by value of keyfunc(v) +filterfalse(predicate, seq) --> elements of seq where predicate(elem) is False +islice(seq, [start,] stop [, step]) --> elements from + seq[start:stop:step] +pairwise(s) --> (s[0],s[1]), (s[1],s[2]), (s[2], s[3]), ... +starmap(fun, seq) --> fun(*seq[0]), fun(*seq[1]), ... +tee(it, n=2) --> (it1, it2 , ... itn) splits one iterator into n +takewhile(predicate, seq) --> seq[0], seq[1], until predicate fails +zip_longest(p, q, ...) --> (p[0], q[0]), (p[1], q[1]), ... + +Combinatoric generators: +product(p, q, ... [repeat=1]) --> cartesian product +permutations(p[, r]) +combinations(p, r) +combinations_with_replacement(p, r) +""" import sys from _typeshed import MaybeNone from collections.abc import Callable, Iterable, Iterator @@ -29,101 +59,221 @@ _Predicate: TypeAlias = Callable[[_T], object] # but we can't enforce the add method @disjoint_base class count(Generic[_N]): + """Return a count object whose .__next__() method returns consecutive values. + +Equivalent to: + def count(firstval=0, step=1): + x = firstval + while 1: + yield x + x += step +""" @overload def __new__(cls) -> count[int]: ... @overload def __new__(cls, start: _N, step: _Step = ...) -> count[_N]: ... @overload def __new__(cls, *, step: _N) -> count[_N]: ... - def __next__(self) -> _N: ... - def __iter__(self) -> Self: ... + def __next__(self) -> _N: + """Implement next(self). +""" + def __iter__(self) -> Self: + """Implement iter(self). +""" @disjoint_base class cycle(Generic[_T]): + """Return elements from the iterable until it is exhausted. Then repeat the sequence indefinitely. +""" def __new__(cls, iterable: Iterable[_T], /) -> Self: ... - def __next__(self) -> _T: ... - def __iter__(self) -> Self: ... + def __next__(self) -> _T: + """Implement next(self). +""" + def __iter__(self) -> Self: + """Implement iter(self). +""" @disjoint_base class repeat(Generic[_T]): + """repeat(object [,times]) -> create an iterator which returns the object +for the specified number of times. If not specified, returns the object +endlessly. +""" @overload def __new__(cls, object: _T) -> Self: ... @overload def __new__(cls, object: _T, times: int) -> Self: ... - def __next__(self) -> _T: ... - def __iter__(self) -> Self: ... - def __length_hint__(self) -> int: ... + def __next__(self) -> _T: + """Implement next(self). +""" + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __length_hint__(self) -> int: + """Private method returning an estimate of len(list(it)). +""" @disjoint_base class accumulate(Generic[_T]): + """Return series of accumulated sums (or other binary function results). +""" @overload def __new__(cls, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> Self: ... @overload def __new__(cls, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" @disjoint_base class chain(Generic[_T]): + """Return a chain object whose .__next__() method returns elements from the +first iterable until it is exhausted, then elements from the next +iterable, until all of the iterables are exhausted. +""" def __new__(cls, *iterables: Iterable[_T]) -> Self: ... - def __next__(self) -> _T: ... - def __iter__(self) -> Self: ... + def __next__(self) -> _T: + """Implement next(self). +""" + def __iter__(self) -> Self: + """Implement iter(self). +""" @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable - def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: + """Alternative chain() constructor taking a single iterable argument that evaluates lazily. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @disjoint_base class compress(Generic[_T]): + """Return data elements corresponding to true selector elements. + +Forms a shorter iterator from selected data elements using the selectors to +choose the data elements. +""" def __new__(cls, data: Iterable[_T], selectors: Iterable[Any]) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" @disjoint_base class dropwhile(Generic[_T]): + """Drop items from the iterable while predicate(item) is true. + +Afterwards, return every element until the iterable is exhausted. +""" def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" @disjoint_base class filterfalse(Generic[_T]): + """Return those items of iterable for which function(item) is false. + +If function is None, return the items that are false. +""" def __new__(cls, function: _Predicate[_T] | None, iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" @disjoint_base class groupby(Generic[_T_co, _S_co]): + """make an iterator that returns consecutive keys and groups from the iterable + + iterable + Elements to divide into groups according to the key function. + key + A function for computing the group category for each element. + If the key function is not specified or is None, the element itself + is used for grouping. +""" @overload def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: + """Implement next(self). +""" @disjoint_base class islice(Generic[_T]): + """islice(iterable, stop) --> islice object +islice(iterable, start, stop[, step]) --> islice object + +Return an iterator whose next() method returns selected values from an +iterable. If start is specified, will skip all preceding elements; +otherwise, start defaults to zero. Step defaults to one. If +specified as another value, step determines how many values are +skipped between successive calls. Works like a slice() on a list +but returns an iterator. +""" @overload def __new__(cls, iterable: Iterable[_T], stop: int | None, /) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T], start: int | None, stop: int | None, step: int | None = ..., /) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" @disjoint_base class starmap(Generic[_T_co]): + """Return an iterator whose values are returned from the function evaluated with an argument tuple taken from the given sequence. +""" def __new__(cls, function: Callable[..., _T], iterable: Iterable[Iterable[Any]], /) -> starmap[_T]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" @disjoint_base class takewhile(Generic[_T]): + """Return successive entries from an iterable as long as the predicate evaluates to true for each entry. +""" def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T: + """Implement next(self). +""" -def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: ... +def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: + """Returns a tuple of n independent iterators. +""" @disjoint_base class zip_longest(Generic[_T_co]): + """Return a zip_longest object whose .__next__() method returns a tuple where +the i-th element comes from the i-th iterable argument. The .__next__() +method continues until the longest iterable in the argument sequence +is exhausted and then it raises StopIteration. When the shorter iterables +are exhausted, the fillvalue is substituted in their place. The fillvalue +defaults to None or can be specified by a keyword argument. +""" # one iterable (fillvalue doesn't matter) @overload def __new__(cls, iter1: Iterable[_T1], /, *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ... @@ -198,11 +348,29 @@ class zip_longest(Generic[_T_co]): *iterables: Iterable[_T], fillvalue: _T, ) -> zip_longest[tuple[_T, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" @disjoint_base class product(Generic[_T_co]): + """Cartesian product of input iterables. Equivalent to nested for-loops. + +For example, product(A, B) returns the same as: ((x,y) for x in A for y in B). +The leftmost iterators are in the outermost for-loop, so the output tuples +cycle in a manner similar to an odometer (with the rightmost element changing +on every iteration). + +To compute the product of an iterable with itself, specify the number +of repetitions with the optional repeat keyword argument. For example, +product(A, repeat=4) means the same as product(A, A, A, A). + +product('ab', range(3)) --> ('a',0) ('a',1) ('a',2) ('b',0) ('b',1) ('b',2) +product((0,1), (0,1), (0,1)) --> (0,0,0) (0,0,1) (0,1,0) (0,1,1) (1,0,0) ... +""" @overload def __new__(cls, iter1: Iterable[_T1], /) -> product[tuple[_T1]]: ... @overload @@ -284,11 +452,19 @@ class product(Generic[_T_co]): ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6, _T7, _T8, _T9, _T10]]: ... @overload def __new__(cls, *iterables: Iterable[_T1], repeat: int = 1) -> product[tuple[_T1, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" @disjoint_base class permutations(Generic[_T_co]): + """Return successive r-length permutations of elements in the iterable. + +permutations(range(3), 2) --> (0,1), (0,2), (1,0), (1,2), (2,0), (2,1) +""" @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> permutations[tuple[_T, _T]]: ... @overload @@ -299,11 +475,19 @@ class permutations(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> permutations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int | None = ...) -> permutations[tuple[_T, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" @disjoint_base class combinations(Generic[_T_co]): + """Return successive r-length combinations of elements in the iterable. + +combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3) +""" @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... @overload @@ -314,11 +498,19 @@ class combinations(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" @disjoint_base class combinations_with_replacement(Generic[_T_co]): + """Return successive r-length combinations of elements in the iterable allowing individual elements to have successive repeats. + +combinations_with_replacement('ABC', 2) --> ('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C') +""" @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations_with_replacement[tuple[_T, _T]]: ... @overload @@ -329,23 +521,56 @@ class combinations_with_replacement(Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations_with_replacement[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations_with_replacement[tuple[_T, ...]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" if sys.version_info >= (3, 10): @disjoint_base class pairwise(Generic[_T_co]): + """Return an iterator of overlapping pairs taken from the input iterator. + + s -> (s0,s1), (s1,s2), (s2, s3), ... +""" def __new__(cls, iterable: Iterable[_T], /) -> pairwise[tuple[_T, _T]]: ... - def __iter__(self) -> Self: ... - def __next__(self) -> _T_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _T_co: + """Implement next(self). +""" if sys.version_info >= (3, 12): @disjoint_base class batched(Generic[_T_co]): + """Batch data into tuples of length n. The last batch may be shorter than n. + +Loops over the input iterable and accumulates data into tuples +up to size n. The input is consumed lazily, just enough to +fill a batch. The result is yielded as soon as a batch is full +or when the input iterable is exhausted. + + >>> for batch in batched('ABCDEFG', 3): + ... print(batch) + ... + ('A', 'B', 'C') + ('D', 'E', 'F') + ('G',) + +If "strict" is True, raises a ValueError if the final batch is shorter +than n. +""" if sys.version_info >= (3, 13): def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ... else: def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... - def __iter__(self) -> Self: ... - def __next__(self) -> tuple[_T_co, ...]: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> tuple[_T_co, ...]: + """Implement next(self). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi index 63e9718ee1512..8d9912dd25f0e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi @@ -1,3 +1,100 @@ +"""JSON (JavaScript Object Notation) is a subset of +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data +interchange format. + +:mod:`json` exposes an API familiar to users of the standard library +:mod:`marshal` and :mod:`pickle` modules. It is derived from a +version of the externally maintained simplejson library. + +Encoding basic Python object hierarchies:: + + >>> import json + >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) + '["foo", {"bar": ["baz", null, 1.0, 2]}]' + >>> print(json.dumps("\\"foo\\bar")) + "\\"foo\\bar" + >>> print(json.dumps('\\u1234')) + "\\u1234" + >>> print(json.dumps('\\\\')) + "\\\\" + >>> print(json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)) + {"a": 0, "b": 0, "c": 0} + >>> from io import StringIO + >>> io = StringIO() + >>> json.dump(['streaming API'], io) + >>> io.getvalue() + '["streaming API"]' + +Compact encoding:: + + >>> import json + >>> mydict = {'4': 5, '6': 7} + >>> json.dumps([1,2,3,mydict], separators=(',', ':')) + '[1,2,3,{"4":5,"6":7}]' + +Pretty printing:: + + >>> import json + >>> print(json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)) + { + "4": 5, + "6": 7 + } + +Decoding JSON:: + + >>> import json + >>> obj = ['foo', {'bar': ['baz', None, 1.0, 2]}] + >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj + True + >>> json.loads('"\\\\"foo\\\\bar"') == '"foo\\x08ar' + True + >>> from io import StringIO + >>> io = StringIO('["streaming API"]') + >>> json.load(io)[0] == 'streaming API' + True + +Specializing JSON object decoding:: + + >>> import json + >>> def as_complex(dct): + ... if '__complex__' in dct: + ... return complex(dct['real'], dct['imag']) + ... return dct + ... + >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', + ... object_hook=as_complex) + (1+2j) + >>> from decimal import Decimal + >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') + True + +Specializing JSON object encoding:: + + >>> import json + >>> def encode_complex(obj): + ... if isinstance(obj, complex): + ... return [obj.real, obj.imag] + ... raise TypeError(f'Object of type {obj.__class__.__name__} ' + ... f'is not JSON serializable') + ... + >>> json.dumps(2 + 1j, default=encode_complex) + '[2.0, 1.0]' + >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) + '[2.0, 1.0]' + >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) + '[2.0, 1.0]' + + +Using json from the shell to validate and pretty-print:: + + $ echo '{"json":"obj"}' | python -m json + { + "json": "obj" + } + $ echo '{ 1.2:3.4}' | python -m json + Expecting property name enclosed in double quotes: line 1 column 3 (char 2) +""" from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable from typing import Any @@ -20,7 +117,47 @@ def dumps( default: Callable[[Any], Any] | None = None, sort_keys: bool = False, **kwds: Any, -) -> str: ... +) -> str: + """Serialize ``obj`` to a JSON formatted ``str``. + +If ``skipkeys`` is true then ``dict`` keys that are not basic types +(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped +instead of raising a ``TypeError``. + +If ``ensure_ascii`` is false, then the return value can contain non-ASCII +characters if they appear in strings contained in ``obj``. Otherwise, all +such characters are escaped in JSON strings. + +If ``check_circular`` is false, then the circular reference check +for container types will be skipped and a circular reference will +result in an ``RecursionError`` (or worse). + +If ``allow_nan`` is false, then it will be a ``ValueError`` to +serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in +strict compliance of the JSON specification, instead of using the +JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + +If ``indent`` is a non-negative integer, then JSON array elements and +object members will be pretty-printed with that indent level. An indent +level of 0 will only insert newlines. ``None`` is the most compact +representation. + +If specified, ``separators`` should be an ``(item_separator, key_separator)`` +tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and +``(',', ': ')`` otherwise. To get the most compact JSON representation, +you should specify ``(',', ':')`` to eliminate whitespace. + +``default(obj)`` is a function that should return a serializable version +of obj or raise TypeError. The default simply raises TypeError. + +If *sort_keys* is true (default: ``False``), then the output of +dictionaries will be sorted by key. + +To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the +``.default()`` method to serialize additional types), specify it with +the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + +""" def dump( obj: Any, fp: SupportsWrite[str], @@ -35,7 +172,48 @@ def dump( default: Callable[[Any], Any] | None = None, sort_keys: bool = False, **kwds: Any, -) -> None: ... +) -> None: + """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a +``.write()``-supporting file-like object). + +If ``skipkeys`` is true then ``dict`` keys that are not basic types +(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped +instead of raising a ``TypeError``. + +If ``ensure_ascii`` is false, then the strings written to ``fp`` can +contain non-ASCII characters if they appear in strings contained in +``obj``. Otherwise, all such characters are escaped in JSON strings. + +If ``check_circular`` is false, then the circular reference check +for container types will be skipped and a circular reference will +result in an ``RecursionError`` (or worse). + +If ``allow_nan`` is false, then it will be a ``ValueError`` to +serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) +in strict compliance of the JSON specification, instead of using the +JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + +If ``indent`` is a non-negative integer, then JSON array elements and +object members will be pretty-printed with that indent level. An indent +level of 0 will only insert newlines. ``None`` is the most compact +representation. + +If specified, ``separators`` should be an ``(item_separator, key_separator)`` +tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and +``(',', ': ')`` otherwise. To get the most compact JSON representation, +you should specify ``(',', ':')`` to eliminate whitespace. + +``default(obj)`` is a function that should return a serializable version +of obj or raise TypeError. The default simply raises TypeError. + +If *sort_keys* is true (default: ``False``), then the output of +dictionaries will be sorted by key. + +To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the +``.default()`` method to serialize additional types), specify it with +the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + +""" def loads( s: str | bytes | bytearray, *, @@ -46,7 +224,39 @@ def loads( parse_constant: Callable[[str], Any] | None = None, object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, -) -> Any: ... +) -> Any: + """Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance +containing a JSON document) to a Python object. + +``object_hook`` is an optional function that will be called with the +result of any object literal decode (a ``dict``). The return value of +``object_hook`` will be used instead of the ``dict``. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting). + +``object_pairs_hook`` is an optional function that will be called with the +result of any object literal decoded with an ordered list of pairs. The +return value of ``object_pairs_hook`` will be used instead of the ``dict``. +This feature can be used to implement custom decoders. If ``object_hook`` +is also defined, the ``object_pairs_hook`` takes priority. + +``parse_float``, if specified, will be called with the string +of every JSON float to be decoded. By default this is equivalent to +float(num_str). This can be used to use another datatype or parser +for JSON floats (e.g. decimal.Decimal). + +``parse_int``, if specified, will be called with the string +of every JSON int to be decoded. By default this is equivalent to +int(num_str). This can be used to use another datatype or parser +for JSON integers (e.g. float). + +``parse_constant``, if specified, will be called with one of the +following strings: -Infinity, Infinity, NaN. +This can be used to raise an exception if invalid JSON numbers +are encountered. + +To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` +kwarg; otherwise ``JSONDecoder`` is used. +""" def load( fp: SupportsRead[str | bytes], *, @@ -57,5 +267,22 @@ def load( parse_constant: Callable[[str], Any] | None = None, object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, -) -> Any: ... +) -> Any: + """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing +a JSON document) to a Python object. + +``object_hook`` is an optional function that will be called with the +result of any object literal decode (a ``dict``). The return value of +``object_hook`` will be used instead of the ``dict``. This feature +can be used to implement custom decoders (e.g. JSON-RPC class hinting). + +``object_pairs_hook`` is an optional function that will be called with the +result of any object literal decoded with an ordered list of pairs. The +return value of ``object_pairs_hook`` will be used instead of the ``dict``. +This feature can be used to implement custom decoders. If ``object_hook`` +is also defined, the ``object_pairs_hook`` takes priority. + +To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` +kwarg; otherwise ``JSONDecoder`` is used. +""" def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi index 8debfe6cd65a9..41e4cd16e41d2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi @@ -1,9 +1,20 @@ +"""Implementation of JSONDecoder +""" from collections.abc import Callable from typing import Any __all__ = ["JSONDecoder", "JSONDecodeError"] class JSONDecodeError(ValueError): + """Subclass of ValueError with the following additional properties: + +msg: The unformatted error message +doc: The JSON document being parsed +pos: The start index of doc where parsing failed +lineno: The line corresponding to pos +colno: The column corresponding to pos + +""" msg: str doc: str pos: int @@ -12,6 +23,34 @@ class JSONDecodeError(ValueError): def __init__(self, msg: str, doc: str, pos: int) -> None: ... class JSONDecoder: + """Simple JSON decoder + +Performs the following translations in decoding by default: + ++---------------+-------------------+ +| JSON | Python | ++===============+===================+ +| object | dict | ++---------------+-------------------+ +| array | list | ++---------------+-------------------+ +| string | str | ++---------------+-------------------+ +| number (int) | int | ++---------------+-------------------+ +| number (real) | float | ++---------------+-------------------+ +| true | True | ++---------------+-------------------+ +| false | False | ++---------------+-------------------+ +| null | None | ++---------------+-------------------+ + +It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as +their corresponding ``float`` values, which is outside the JSON spec. + +""" object_hook: Callable[[dict[str, Any]], Any] parse_float: Callable[[str], Any] parse_int: Callable[[str], Any] @@ -27,6 +66,50 @@ class JSONDecoder: parse_constant: Callable[[str], Any] | None = None, strict: bool = True, object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, - ) -> None: ... - def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented - def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... + ) -> None: + """``object_hook``, if specified, will be called with the result +of every JSON object decoded and its return value will be used in +place of the given ``dict``. This can be used to provide custom +deserializations (e.g. to support JSON-RPC class hinting). + +``object_pairs_hook``, if specified will be called with the result of +every JSON object decoded with an ordered list of pairs. The return +value of ``object_pairs_hook`` will be used instead of the ``dict``. +This feature can be used to implement custom decoders. +If ``object_hook`` is also defined, the ``object_pairs_hook`` takes +priority. + +``parse_float``, if specified, will be called with the string +of every JSON float to be decoded. By default this is equivalent to +float(num_str). This can be used to use another datatype or parser +for JSON floats (e.g. decimal.Decimal). + +``parse_int``, if specified, will be called with the string +of every JSON int to be decoded. By default this is equivalent to +int(num_str). This can be used to use another datatype or parser +for JSON integers (e.g. float). + +``parse_constant``, if specified, will be called with one of the +following strings: -Infinity, Infinity, NaN. +This can be used to raise an exception if invalid JSON numbers +are encountered. + +If ``strict`` is false (true is the default), then control +characters will be allowed inside strings. Control characters in +this context are those with character codes in the 0-31 range, +including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``. +""" + def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: # _w is undocumented + """Return the Python representation of ``s`` (a ``str`` instance +containing a JSON document). + +""" + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: + """Decode a JSON document from ``s`` (a ``str`` beginning with +a JSON document) and return a 2-tuple of the Python +representation and the index in ``s`` where the document ended. + +This can be used to decode a JSON document from a string that may +have extraneous data at the end. + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi index 83b78666d4a70..205cefa0fb604 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi @@ -1,3 +1,5 @@ +"""Implementation of JSONEncoder +""" from collections.abc import Callable, Iterator from re import Pattern from typing import Any, Final @@ -8,12 +10,54 @@ HAS_UTF8: Final[Pattern[bytes]] # undocumented ESCAPE_DCT: Final[dict[str, str]] # undocumented INFINITY: Final[float] # undocumented -def py_encode_basestring(s: str) -> str: ... # undocumented -def py_encode_basestring_ascii(s: str) -> str: ... # undocumented -def encode_basestring(s: str, /) -> str: ... # undocumented -def encode_basestring_ascii(s: str, /) -> str: ... # undocumented +def py_encode_basestring(s: str) -> str: # undocumented + """Return a JSON representation of a Python string + + """ +def py_encode_basestring_ascii(s: str) -> str: # undocumented + """Return an ASCII-only JSON representation of a Python string + + """ +def encode_basestring(s: str, /) -> str: # undocumented + """encode_basestring(string) -> string + +Return a JSON representation of a Python string +""" +def encode_basestring_ascii(s: str, /) -> str: # undocumented + """encode_basestring_ascii(string) -> string + +Return an ASCII-only JSON representation of a Python string +""" class JSONEncoder: + """Extensible JSON encoder for Python data structures. + +Supports the following objects and types by default: + ++-------------------+---------------+ +| Python | JSON | ++===================+===============+ +| dict | object | ++-------------------+---------------+ +| list, tuple | array | ++-------------------+---------------+ +| str | string | ++-------------------+---------------+ +| int, float | number | ++-------------------+---------------+ +| True | true | ++-------------------+---------------+ +| False | false | ++-------------------+---------------+ +| None | null | ++-------------------+---------------+ + +To extend this to recognize other objects, subclass and implement a +``.default()`` method with another method that returns a serializable +object for ``o`` if possible, otherwise it should call the superclass +implementation (to raise ``TypeError``). + +""" item_separator: str key_separator: str @@ -34,7 +78,80 @@ class JSONEncoder: indent: int | str | None = None, separators: tuple[str, str] | None = None, default: Callable[..., Any] | None = None, - ) -> None: ... - def default(self, o: Any) -> Any: ... - def encode(self, o: Any) -> str: ... - def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... + ) -> None: + """Constructor for JSONEncoder, with sensible defaults. + +If skipkeys is false, then it is a TypeError to attempt +encoding of keys that are not str, int, float, bool or None. +If skipkeys is True, such items are simply skipped. + +If ensure_ascii is true, the output is guaranteed to be str +objects with all incoming non-ASCII characters escaped. If +ensure_ascii is false, the output can contain non-ASCII characters. + +If check_circular is true, then lists, dicts, and custom encoded +objects will be checked for circular references during encoding to +prevent an infinite recursion (which would cause an RecursionError). +Otherwise, no such check takes place. + +If allow_nan is true, then NaN, Infinity, and -Infinity will be +encoded as such. This behavior is not JSON specification compliant, +but is consistent with most JavaScript based encoders and decoders. +Otherwise, it will be a ValueError to encode such floats. + +If sort_keys is true, then the output of dictionaries will be +sorted by key; this is useful for regression tests to ensure +that JSON serializations can be compared on a day-to-day basis. + +If indent is a non-negative integer, then JSON array +elements and object members will be pretty-printed with that +indent level. An indent level of 0 will only insert newlines. +None is the most compact representation. + +If specified, separators should be an (item_separator, key_separator) +tuple. The default is (', ', ': ') if *indent* is ``None`` and +(',', ': ') otherwise. To get the most compact JSON representation, +you should specify (',', ':') to eliminate whitespace. + +If specified, default is a function that gets called for objects +that can't otherwise be serialized. It should return a JSON encodable +version of the object or raise a ``TypeError``. + +""" + def default(self, o: Any) -> Any: + """Implement this method in a subclass such that it returns +a serializable object for ``o``, or calls the base implementation +(to raise a ``TypeError``). + +For example, to support arbitrary iterators, you could +implement default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + # Let the base class default method raise the TypeError + return super().default(o) + +""" + def encode(self, o: Any) -> str: + """Return a JSON string representation of a Python data structure. + +>>> from json.encoder import JSONEncoder +>>> JSONEncoder().encode({"foo": ["bar", "baz"]}) +'{"foo": ["bar", "baz"]}' + +""" + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: + """Encode the given object and yield each string +representation as available. + +For example:: + + for chunk in JSONEncoder().iterencode(bigobject): + mysocket.write(chunk) + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi index 68b42e92d295e..176199758b490 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi @@ -1,3 +1,5 @@ +"""JSON token scanner +""" from _json import make_scanner as make_scanner from re import Pattern from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi index 7e7363e797f3f..1603efe4b546f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi @@ -1 +1,6 @@ +"""Command-line tool to validate and pretty-print JSON + +See `json.__main__` for a usage example (invocation as +`python -m json.tool` is supported for backwards compatibility). +""" def main() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi index 6b8bdad6beb6a..6f3207b54f81a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi @@ -1,15 +1,30 @@ +"""Keywords (from "Grammar/python.gram") + +This file is automatically generated; please don't muck it up! + +To update the symbols in this file, 'cd' to the top directory of +the python source tree and run: + + PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen Grammar/python.gram Grammar/Tokens Lib/keyword.py + +Alternatively, you can run 'make regen-keyword'. +""" from collections.abc import Sequence from typing import Final __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] -def iskeyword(s: str, /) -> bool: ... +def iskeyword(s: str, /) -> bool: + """x.__contains__(y) <==> y in x. +""" # a list at runtime, but you're not meant to mutate it; # type it as a sequence kwlist: Final[Sequence[str]] -def issoftkeyword(s: str, /) -> bool: ... +def issoftkeyword(s: str, /) -> bool: + """x.__contains__(y) <==> y in x. +""" # a list at runtime, but you're not meant to mutate it; # type it as a sequence diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi index 4c87b664eb200..104c54bb9f99c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi @@ -1,3 +1,10 @@ +"""A bottom-up tree matching algorithm implementation meant to speed +up 2to3's matching process. After the tree patterns are reduced to +their rarest linear path, a linear Aho-Corasick automaton is +created. The linear automaton traverses the linear paths from the +leaves to the root of the AST and returns a set of nodes for further +matching. This reduces significantly the number of candidate nodes. +""" from _typeshed import Incomplete, SupportsGetItem from collections import defaultdict from collections.abc import Iterable @@ -6,6 +13,8 @@ from .fixer_base import BaseFix from .pytree import Leaf, Node class BMNode: + """Class for a node of the Aho-Corasick automaton used in matching +""" count: Incomplete transition_table: Incomplete fixers: Incomplete @@ -14,15 +23,42 @@ class BMNode: def __init__(self) -> None: ... class BottomMatcher: + """The main matcher class. After instantiating the patterns should + be added using the add_fixer method +""" match: Incomplete root: Incomplete nodes: Incomplete fixers: Incomplete logger: Incomplete def __init__(self) -> None: ... - def add_fixer(self, fixer: BaseFix) -> None: ... - def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ... - def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ... - def print_ac(self) -> None: ... + def add_fixer(self, fixer: BaseFix) -> None: + """Reduces a fixer's pattern tree to a linear path and adds it + to the matcher(a common Aho-Corasick automaton). The fixer is + appended on the matching states and called when they are + reached +""" + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: + """Recursively adds a linear pattern to the AC automaton +""" + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: + """The main interface with the bottom matcher. The tree is + traversed from the bottom using the constructed + automaton. Nodes are only checked once as the tree is + retraversed. When the automaton fails, we give it one more + shot(in case the above tree matches as a whole with the + rejected leaf), then we break for the next leaf. There is the + special case of multiple arguments(see code comments) where we + recheck the nodes + + Args: + The leaves of the AST tree to be matched + + Returns: + A dictionary of node matches with fixers as the keys + """ + def print_ac(self) -> None: + """Prints a graphviz diagram of the BM automaton(for debugging) +""" def type_repr(type_num: int) -> str | int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi index 06813c94308a0..2983afc568251 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi @@ -1,3 +1,5 @@ +"""Base class for fixers (optional, but recommended). +""" from _typeshed import Incomplete, StrPath from abc import ABCMeta, abstractmethod from collections.abc import MutableMapping @@ -8,6 +10,13 @@ from .pytree import Base, Leaf, Node _N = TypeVar("_N", bound=Base) class BaseFix: + """Optional base class for fixers. + + The subclass name must be FixFooBar where FooBar is the result of + removing underscores and capitalizing the words of the fix name. + For example, the class name for a fixer named 'has_key' should be + FixHasKey. + """ PATTERN: ClassVar[str | None] pattern: Incomplete | None pattern_tree: Incomplete | None @@ -22,21 +31,88 @@ class BaseFix: BM_compatible: ClassVar[bool] syms: Incomplete log: Incomplete - def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ... - def compile_pattern(self) -> None: ... - def set_filename(self, filename: StrPath) -> None: ... - def match(self, node: _N) -> Literal[False] | dict[str, _N]: ... + def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: + """Initializer. Subclass may override. + + Args: + options: a dict containing the options passed to RefactoringTool + that could be used to customize the fixer through the command line. + log: a list to append warnings and other messages to. + """ + def compile_pattern(self) -> None: + """Compiles self.PATTERN into self.pattern. + + Subclass may override if it doesn't want to use + self.{pattern,PATTERN} in .match(). + """ + def set_filename(self, filename: StrPath) -> None: + """Set the filename. + + The main refactoring tool should call this. + """ + def match(self, node: _N) -> Literal[False] | dict[str, _N]: + """Returns match for a given parse tree node. + + Should return a true or false object (not necessarily a bool). + It may return a non-empty dict of matching sub-nodes as + returned by a matching pattern. + + Subclass may override. + """ @abstractmethod - def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ... - def new_name(self, template: str = "xxx_todo_changeme") -> str: ... + def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: + """Returns the transformation for a given parse tree node. + + Args: + node: the root of the parse tree that matched the fixer. + results: a dict mapping symbolic names to part of the match. + + Returns: + None, or a node that is a modified copy of the + argument node. The node argument may also be modified in-place to + effect the same change. + + Subclass *must* override. + """ + def new_name(self, template: str = "xxx_todo_changeme") -> str: + """Return a string suitable for use as an identifier + + The new name is guaranteed not to conflict with other identifiers. + """ first_log: bool def log_message(self, message: str) -> None: ... - def cannot_convert(self, node: Base, reason: str | None = None) -> None: ... - def warning(self, node: Base, reason: str) -> None: ... - def start_tree(self, tree: Node, filename: StrPath) -> None: ... - def finish_tree(self, tree: Node, filename: StrPath) -> None: ... + def cannot_convert(self, node: Base, reason: str | None = None) -> None: + """Warn the user that a given chunk of code is not valid Python 3, + but that it cannot be converted automatically. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + def warning(self, node: Base, reason: str) -> None: + """Used for warning the user about possible uncertainty in the + translation. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + def start_tree(self, tree: Node, filename: StrPath) -> None: + """Some fixers need to maintain tree-wide state. + This method is called once, at the start of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + def finish_tree(self, tree: Node, filename: StrPath) -> None: + """Some fixers need to maintain tree-wide state. + This method is called once, at the conclusion of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ class ConditionalFix(BaseFix, metaclass=ABCMeta): + """ Base class for fixers which not execute if an import is found. +""" skip_on: ClassVar[str | None] def start_tree(self, tree: Node, filename: StrPath, /) -> None: ... def should_skip(self, node: Base) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi index e53e3dd864579..9124c31fdbacd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi @@ -1,3 +1,7 @@ +"""Fixer for apply(). + +This converts apply(func, v, k) into (func)(*v, **k). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi index 1bf7db2f76e98..18fd9de57eb24 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -1,3 +1,5 @@ +"""Fixer that replaces deprecated unittest method names. +""" from typing import ClassVar, Final, Literal from ..fixer_base import BaseFix diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi index 8ed5ccaa7fd39..41be9dc244ded 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -1,3 +1,5 @@ +"""Fixer for basestring -> str. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi index 1efca6228ea28..dfc572b3ce6e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -1,3 +1,5 @@ +"""Fixer that changes buffer(...) into memoryview(...). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi index 08c54c3bc376b..d5aac66aca903 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi @@ -1,3 +1,28 @@ +"""Fixer for dict methods. + +d.keys() -> list(d.keys()) +d.items() -> list(d.items()) +d.values() -> list(d.values()) + +d.iterkeys() -> iter(d.keys()) +d.iteritems() -> iter(d.items()) +d.itervalues() -> iter(d.values()) + +d.viewkeys() -> d.keys() +d.viewitems() -> d.items() +d.viewvalues() -> d.values() + +Except in certain very specific contexts: the iter() can be dropped +when the context is list(), sorted(), iter() or for...in; the list() +can be dropped when the context is list() or sorted() (but not iter() +or for...in!). Special contexts that apply to both: list(), sorted(), tuple() +set(), any(), all(), sum(). + +Note: iter(d.keys()) could be written as iter(d) but since the +original d.iterkeys() was also redundant we don't fix this. And there +are (rare) contexts where it makes a difference (e.g. when passing it +as an argument to a function that introspects the argument). +""" from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi index 30930a2c381e9..c5f07a3ac742f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi @@ -1,3 +1,24 @@ +"""Fixer for except statements with named exceptions. + +The following cases will be converted: + +- "except E, T:" where T is a name: + + except E as T: + +- "except E, T:" where T is not a name, tuple or list: + + except E as t: + T = t + + This is done because the target of an "except" clause must be a + name. + +- "except E, T:" where T is a tuple or list literal: + + except E as t: + T = t.args +""" from collections.abc import Generator, Iterable from typing import ClassVar, Literal, TypeVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi index 71e2a820a564d..04a663e397eca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi @@ -1,3 +1,10 @@ +"""Fixer for exec. + +This converts usages of the exec statement into calls to a built-in +exec() function. + +exec code in ns1, ns2 -> exec(code, ns1, ns2) +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi index 8122a6389b124..e4b82b3e44ffe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -1,3 +1,8 @@ +"""Fixer for execfile. + +This converts usages of the execfile function into calls to the built-in +exec() function. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi index 7fc910c0a1bcd..b49c19caa2cb8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -1,3 +1,6 @@ +""" +Convert use of sys.exitfunc to use the atexit module. +""" from _typeshed import Incomplete, StrPath from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi index 638889be8b65b..d621ace99f3cc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi @@ -1,3 +1,14 @@ +"""Fixer that changes filter(F, X) into list(filter(F, X)). + +We avoid the transformation if the filter() call is directly contained +in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or +for V in <>:. + +NOTE: This is still not correct if the original code was depending on +filter(F, X) to return a string if X is a string and a tuple if X is a +tuple. That would require type inference, which we don't do. Let +Python 2.6 figure it out. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi index 60487bb1f2a62..61883779c514a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -1,3 +1,5 @@ +"""Fix function attribute names (f.func_x -> f.__x__). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi index 12ed93f21223d..aa0c4ae23925f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi @@ -1,3 +1,7 @@ +"""Remove __future__ imports + +from __future__ import foo is replaced with an empty line. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi index aa3ccf50be9e8..d55be458e39d2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -1,3 +1,6 @@ +""" +Fixer that changes os.getcwdu() to os.getcwd(). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi index f6f5a072e21b5..65a8d792c5213 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -1,3 +1,30 @@ +"""Fixer for has_key(). + +Calls to .has_key() methods are expressed in terms of the 'in' +operator: + + d.has_key(k) -> k in d + +CAVEATS: +1) While the primary target of this fixer is dict.has_key(), the + fixer will change any has_key() method call, regardless of its + class. + +2) Cases like this will not be converted: + + m = d.has_key + if m(k): + ... + + Only *calls* to has_key() are converted. While it is possible to + convert the above to something like + + m = d.__contains__ + if m(k): + ... + + this is currently not done. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi index 6b2723d09d436..8705524ac96b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -1,3 +1,30 @@ +"""Adjust some old Python 2 idioms to their modern counterparts. + +* Change some type comparisons to isinstance() calls: + type(x) == T -> isinstance(x, T) + type(x) is T -> isinstance(x, T) + type(x) != T -> not isinstance(x, T) + type(x) is not T -> not isinstance(x, T) + +* Change "while 1:" into "while True:". + +* Change both + + v = list(EXPR) + v.sort() + foo(v) + +and the more general + + v = EXPR + v.sort() + foo(v) + +into + + v = sorted(EXPR) + foo(v) +""" from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi index bf4b2d00925eb..9133c010df63f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi @@ -1,3 +1,14 @@ +"""Fixer for import statements. +If spam is being imported from the local directory, this import: + from spam import eggs +Becomes: + from .spam import eggs + +And this import: + import spam +Becomes: + from . import spam +""" from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Literal @@ -5,7 +16,10 @@ from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Node -def traverse_imports(names) -> Generator[str, None, None]: ... +def traverse_imports(names) -> Generator[str, None, None]: + """ + Walks over all the names imported in a dotted_as_names node. + """ class FixImport(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi index c747af529f440..1b189d8afe03b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi @@ -1,3 +1,5 @@ +"""Fix incompatible imports and module references. +""" from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi index 618ecd0424d86..e5d66c4e24a8f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -1,3 +1,6 @@ +"""Fix incompatible imports and module references that must be fixed after +fix_imports. +""" from typing import Final from . import fix_imports diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi index fc1279535bedb..46ef83f93265c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi @@ -1,3 +1,5 @@ +"""Fixer that changes input(...) into eval(input(...)). +""" from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi index 804b7b2517a50..1776f31b6a528 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi @@ -1,3 +1,7 @@ +"""Fixer for intern(). + +intern(s) -> sys.intern(s) +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi index 31eefd6253174..ba1e08af281a5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -1,3 +1,10 @@ +"""Fixer that cleans up a tuple argument to isinstance after the tokens +in it were fixed. This is mainly used to remove double occurrences of +tokens as a leftover of the long -> int / unicode -> str conversion. + +eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) + -> isinstance(x, int) +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi index 229d86ee71bb7..1cf2f5627184e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -1,3 +1,11 @@ +""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and + itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) + + imports from itertools are fixed in fix_itertools_import.py + + If itertools is imported as something else (ie: import itertools as it; + it.izip(spam, eggs)) method calls will not get fixed. + """ from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi index 39a4da506867e..8991a1454bf2c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -1,3 +1,5 @@ +""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) +""" from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi index 9ccf2711d7d12..6e880f043f1e7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi @@ -1,3 +1,5 @@ +"""Fixer that turns 'long' into 'int' everywhere. +""" from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi index 6e60282cf0be5..d6625ce9d357d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi @@ -1,3 +1,20 @@ +"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there +exists a 'from future_builtins import map' statement in the top-level +namespace. + +As a special case, map(None, X) is changed into list(X). (This is +necessary because the semantics are changed in this case -- the new +map(None, X) is equivalent to [(x,) for x in X].) + +We avoid the transformation (except for the special case mentioned +above) if the map() call is directly contained in iter(<>), list(<>), +tuple(<>), sorted(<>), ...join(<>), or for V in <>:. + +NOTE: This is still not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi index 1b1ec82032b4f..e48cd179e941d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -1,15 +1,47 @@ +"""Fixer for __metaclass__ = X -> (metaclass=X) methods. + + The various forms of classef (inherits nothing, inherits once, inherits + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. + +""" from collections.abc import Generator from typing import ClassVar, Literal from .. import fixer_base from ..pytree import Base -def has_metaclass(parent): ... -def fixup_parse_tree(cls_node) -> None: ... -def fixup_simple_stmt(parent, i, stmt_node) -> None: ... +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possibilities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ +def fixup_parse_tree(cls_node) -> None: + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ +def fixup_simple_stmt(parent, i, stmt_node) -> None: + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything after the semi-colon into its own simple_stmt node + """ def remove_trailing_newline(node) -> None: ... def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... -def fixup_indent(suite) -> None: ... +def fixup_indent(suite) -> None: + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ class FixMetaclass(fixer_base.BaseFix): BM_compatible: ClassVar[Literal[True]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi index ca9b71e43f856..640a1cc38ced3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -1,3 +1,5 @@ +"""Fix bound method attributes (method.im_? -> method.__?__). +""" from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi index 6ff1220b04728..2a93bf7d29f15 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi @@ -1,3 +1,5 @@ +"""Fixer that turns <> into !=. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi index b13914ae8c018..c5b23da8913d5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi @@ -1,3 +1,5 @@ +"""Fixer for it.next() -> next(it), per PEP 3114. +""" from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi index 5c37fc12ef089..fcce615738ad1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -1,3 +1,5 @@ +"""Fixer for __nonzero__ -> __bool__ methods. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi index 113145e395f62..a96101bc9d8dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -1,3 +1,5 @@ +"""Fixer that turns 1L into 1, 0755 into 0o755. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi index b9863d38347be..f97d15ee3d7e7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi @@ -1,3 +1,13 @@ +"""Fixer for operator functions. + +operator.isCallable(obj) -> callable(obj) +operator.sequenceIncludes(obj) -> operator.contains(obj) +operator.isSequenceType(obj) -> isinstance(obj, collections.abc.Sequence) +operator.isMappingType(obj) -> isinstance(obj, collections.abc.Mapping) +operator.isNumberType(obj) -> isinstance(obj, numbers.Number) +operator.repeat(obj, n) -> operator.mul(obj, n) +operator.irepeat(obj, n) -> operator.imul(obj, n) +""" from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi index 237df6c5ff2c1..ee44f1c79aee4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi @@ -1,3 +1,7 @@ +"""Fixer that adds parentheses where they are required + +This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi index e9564b04ac75f..597172431ccb0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi @@ -1,3 +1,14 @@ +"""Fixer for print. + +Change: + 'print' into 'print()' + 'print ...' into 'print(...)' + 'print ... ,' into 'print(..., end=" ")' + 'print >>x, ...' into 'print(..., file=x)' + +No changes are applied if print_function is imported from __future__ + +""" from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi index e02c3080f4093..0867c8497e22c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi @@ -1,3 +1,25 @@ +"""Fixer for 'raise E, V, T' + +raise -> raise +raise E -> raise E +raise E, V -> raise E(V) +raise E, V, T -> raise E(V).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), V -> raise E(V) +raise "foo", V, T -> warns about string exceptions + + +CAVEATS: +1) "raise E, V" will be incorrectly translated if V is an exception + instance. The correct Python 3 idiom is + + raise E from V + + but since we can't detect instance-hood by syntax alone and since + any client code would have to be changed as well, we don't automate + this. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi index d1a0eb0e0a7ea..567d3f46b81d7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -1,3 +1,5 @@ +"""Fixer that changes raw_input(...) into input(...). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi index f8ad876c21a69..2205476c64c7e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -1,3 +1,8 @@ +"""Fixer for reduce(). + +Makes sure reduce() is imported from the functools module if reduce is +used in that module. +""" from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi index 820075438eca1..e2bbb8a3da65e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi @@ -1,3 +1,7 @@ +"""Fixer for reload(). + +reload(s) -> importlib.reload(s) +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi index 652d8f15ea1a9..e675428ccaf55 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi @@ -1,3 +1,8 @@ +"""Fix incompatible renames + +Fixes: + * sys.maxint -> sys.maxsize +""" from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi index 3b192d396dd68..4f4d0439a0372 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi @@ -1,3 +1,5 @@ +"""Fixer that transforms `xyzzy` into repr(xyzzy). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi index 6962ff326f56a..c06fbc683f836 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -1,3 +1,6 @@ +""" +Optional fixer to transform set() calls to set literals. +""" from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi index ba914bcab5d6b..f2842de99afa8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -1,3 +1,5 @@ +"""Fixer for StandardError -> Exception. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi index 0fa1a47870872..f09393c84f46b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -1,3 +1,9 @@ +"""Fixer for sys.exc_{type, value, traceback} + +sys.exc_type -> sys.exc_info()[0] +sys.exc_value -> sys.exc_info()[1] +sys.exc_traceback -> sys.exc_info()[2] +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi index 4c99855e5c373..a1f1e6aaa6057 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi @@ -1,3 +1,11 @@ +"""Fixer for generator.throw(E, V, T). + +g.throw(E) -> g.throw(E) +g.throw(E, V) -> g.throw(E(V)) +g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) + +g.throw("foo"[, V[, T]]) will warn about string exceptions. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi index 7f4f7f4e8656e..e98aa87078d2a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -1,3 +1,21 @@ +"""Fixer for function definitions with tuple parameters. + +def func(((a, b), c), d): + ... + + -> + +def func(x, d): + ((a, b), c) = x + ... + +It will also support lambdas: + + lambda (x, y): x + y -> lambda t: t[0] + t[1] + + # The parens are a syntax error in Python 3 + lambda (x): x + y -> lambda x: x + y +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi index e26dbec71a97d..9d8f687cae2a9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi @@ -1,3 +1,20 @@ +"""Fixer for removing uses of the types module. + +These work for only the known names in the types module. The forms above +can include types. or not. ie, It is assumed the module is imported either as: + + import types + from types import ... # either * or specific types + +The import statements are not modified. + +There should be another fixer that handles at least the following constants: + + type([]) -> list + type(()) -> tuple + type('') -> str + +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi index 85d1315213b96..d4a63f023455a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -1,3 +1,12 @@ +"""Fixer for unicode. + +* Changes unicode to str and unichr to chr. + +* If "...\\u..." is not unicode literal change it into "...\\\\u...". + +* Change u"..." into "...". + +""" from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi index abdcc0f62970f..f1dd7cc4a99ba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -1,3 +1,7 @@ +"""Fix changes imports of urllib which are now incompatible. + This is rather similar to fix_imports, but because of the more + complex nature of the fixing for urllib, it has its own fixer. +""" from collections.abc import Generator from typing import Final, Literal @@ -9,7 +13,17 @@ def build_pattern() -> Generator[str, None, None]: ... class FixUrllib(FixImports): def build_pattern(self): ... - def transform_import(self, node, results) -> None: ... - def transform_member(self, node, results): ... - def transform_dot(self, node, results) -> None: ... + def transform_import(self, node, results) -> None: + """Transform for the basic import case. Replaces the old + import name with a comma separated list of its + replacements. + """ + def transform_member(self, node, results): + """Transform for imports of specific module elements. Replaces + the module to be imported from with the appropriate new + module. + """ + def transform_dot(self, node, results) -> None: + """Transform for calls to module members in code. +""" def transform(self, node, results) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi index 4ce5cb2c4ac16..be919631b00de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -1,3 +1,9 @@ +"""Fixer that changes 'a ,b' into 'a, b'. + +This also changes '{a :b}' into '{a: b}', but does not touch other +uses of colons. It does not touch other uses of whitespace. + +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi index 71318b7660b68..04be7d3590ec3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -1,3 +1,5 @@ +"""Fixer that changes xrange(...) into range(...). +""" from _typeshed import Incomplete, StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi index b4794143a0031..87505bec7d7d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -1,3 +1,7 @@ +"""Fix "for x in f.xreadlines()" -> "for x in f". + +This fixer will also convert g(f.xreadlines) into g(f.__iter__). +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi index 805886ee31805..82f3dc0c02bee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi @@ -1,3 +1,11 @@ +""" +Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) +unless there exists a 'from future_builtins import zip' statement in the +top-level namespace. + +We avoid the transformation if the zip() call is directly contained in +iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. +""" from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi index 5b7fdfca5d65d..4f3b31db89210 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi @@ -1,3 +1,6 @@ +""" +Main program for 2to3. +""" from _typeshed import FileDescriptorOrPath from collections.abc import Container, Iterable, Iterator, Mapping, Sequence from logging import _ExcInfoType @@ -5,9 +8,19 @@ from typing import AnyStr, Literal from . import refactor as refactor -def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ... +def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: + """Return a unified diff of two strings. +""" class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + """ + A refactoring tool that can avoid overwriting its input files. + Prints output to stdout. + + Output files can optionally be written to a different directory and or + have an extra file suffix appended to their name for use in situations + where you do not want to replace the input files. + """ nobackups: bool show_diffs: bool def __init__( @@ -20,7 +33,25 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): input_base_dir: str = "", output_dir: str = "", append_suffix: str = "", - ) -> None: ... + ) -> None: + """ + Args: + fixers: A list of fixers to import. + options: A dict with RefactoringTool configuration. + explicit: A list of fixers to run even if they are explicit. + nobackups: If true no backup '.bak' files will be created for those + files that are being refactored. + show_diffs: Should diffs of the refactoring be printed to stdout? + input_base_dir: The base directory for all input files. This class + will strip this path prefix off of filenames before substituting + it with output_dir. Only meaningful if output_dir is supplied. + All files processed by refactor() must start with this path. + output_dir: If supplied, all converted files will be written into + this directory tree instead of input_base_dir. + append_suffix: If supplied, all files output by this tool will have + this appended to their filename. Useful for changing .py to + .py3 for example by passing append_suffix='3'. + """ # Same as super.log_error and Logger.error def log_error( # type: ignore[override] self, @@ -39,4 +70,13 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override] def warn(msg: object) -> None: ... -def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ... +def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi index de8a874f434d0..18e1656194b96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -1,3 +1,5 @@ +"""The pgen2 package. +""" from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi index dea13fb9d0f8b..426cd416fa705 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -1,3 +1,8 @@ +"""Parser driver. + +This provides a high-level interface to parse a file into a syntax tree. + +""" from _typeshed import StrPath from collections.abc import Iterable from logging import Logger @@ -16,12 +21,24 @@ class Driver: def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... def parse_tokens( self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False - ) -> _NL: ... - def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... - def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... - def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... - def parse_string(self, text: str, debug: bool = False) -> _NL: ... + ) -> _NL: + """Parse a series of tokens and return the syntax tree. +""" + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: + """Parse a stream and return the syntax tree. +""" + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: + """Parse a stream and return the syntax tree. +""" + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: + """Parse a file and return the syntax tree. +""" + def parse_string(self, text: str, debug: bool = False) -> _NL: + """Parse a string and return the syntax tree. +""" def load_grammar( gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None -) -> Grammar: ... +) -> Grammar: + """Load the grammar (maybe from a pickle). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi index bef0a7922683b..33a215e8e43a9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,3 +1,13 @@ +"""This module defines the data structures used to represent a grammar. + +These are a bit arcane because they are derived from the data +structures used by Python's 'pgen' parser generator. + +There's also a table here mapping operators to their names in the +token module; the Python tokenize module reports all operators as the +fallback token code OP, but the parser needs the actual token code. + +""" from _typeshed import StrPath from typing_extensions import Self, TypeAlias @@ -6,6 +16,58 @@ _DFA: TypeAlias = list[list[tuple[int, int]]] _DFAS: TypeAlias = tuple[_DFA, dict[int, int]] class Grammar: + """Pgen parsing tables conversion class. + + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. The class here does not + provide initialization of the tables; several subclasses exist to + do this (see the conv and pgen modules). + + The load() method reads the tables from a pickle file, which is + much faster than the other ways offered by subclasses. The pickle + file is written by calling dump() (after loading the grammar + tables using a subclass). The report() method prints a readable + representation of the tables to stdout, for debugging. + + The instance variables are as follows: + + symbol2number -- a dict mapping symbol names to numbers. Symbol + numbers are always 256 or higher, to distinguish + them from token numbers, which are between 0 and + 255 (inclusive). + + number2symbol -- a dict mapping numbers to symbol names; + these two are each other's inverse. + + states -- a list of DFAs, where each DFA is a list of + states, each state is a list of arcs, and each + arc is a (i, j) pair where i is a label and j is + a state number. The DFA number is the index into + this list. (This name is slightly confusing.) + Final states are represented by a special arc of + the form (0, j) where j is its own state number. + + dfas -- a dict mapping symbol numbers to (DFA, first) + pairs, where DFA is an item from the states list + above, and first is a set of tokens that can + begin this grammar rule (represented by a dict + whose values are always 1). + + labels -- a list of (x, y) pairs where x is either a token + number or a symbol number, and y is either None + or a string; the strings are keywords. The label + number is the index in this list; label numbers + are used to mark state transitions (arcs) in the + DFAs. + + start -- the number of the grammar's start symbol. + + keywords -- a dict mapping keyword strings to arc labels. + + tokens -- a dict mapping token numbers to arc labels. + + """ symbol2number: dict[str, int] number2symbol: dict[int, str] states: list[_DFA] @@ -15,10 +77,19 @@ class Grammar: tokens: dict[int, int] symbol2label: dict[str, int] start: int - def dump(self, filename: StrPath) -> None: ... - def load(self, filename: StrPath) -> None: ... - def copy(self) -> Self: ... - def report(self) -> None: ... + def dump(self, filename: StrPath) -> None: + """Dump the grammar tables to a pickle file. +""" + def load(self, filename: StrPath) -> None: + """Load the grammar tables from a pickle file. +""" + def copy(self) -> Self: + """ + Copy the grammar. + """ + def report(self) -> None: + """Dump the grammar tables to standard output, for debugging. +""" opmap_raw: str opmap: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi index c3fabe8a5177f..1d0be4b33d493 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi @@ -1,3 +1,5 @@ +"""Safely evaluate Python string literals without using eval(). +""" from re import Match simple_escapes: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi index 320c5f018d43f..494e0996c09c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -1,3 +1,11 @@ +"""Parser engine for the grammar tables generated by pgen. + +The grammar table must be loaded first. + +See Parser/parser.c in the Python distribution for additional info on +how this parsing engine works. + +""" from _typeshed import Incomplete from collections.abc import Sequence from typing_extensions import TypeAlias @@ -9,6 +17,8 @@ from .grammar import _DFAS, Grammar _Context: TypeAlias = Sequence[Incomplete] class ParseError(Exception): + """Exception to signal the parser is stuck. +""" msg: str type: int value: str | None @@ -16,15 +26,93 @@ class ParseError(Exception): def __init__(self, msg: str, type: int, value: str | None, context: _Context) -> None: ... class Parser: + """Parser engine. + + The proper usage sequence is: + + p = Parser(grammar, [converter]) # create instance + p.setup([start]) # prepare for parsing + : + if p.addtoken(...): # parse a token; may raise ParseError + break + root = p.rootnode # root of abstract syntax tree + + A Parser instance may be reused by calling setup() repeatedly. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See driver.py for how to get input tokens by tokenizing a file or + string. + + Parsing is complete when addtoken() returns True; the root of the + abstract syntax tree can then be retrieved from the rootnode + instance variable. When a syntax error occurs, addtoken() raises + the ParseError exception. There is no error recovery; the parser + cannot be used after a syntax error was reported (but it can be + reinitialized by calling setup()). + + """ grammar: Grammar convert: _Convert stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None used_names: set[str] - def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... - def setup(self, start: int | None = None) -> None: ... - def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... - def classify(self, type: int, value: str | None, context: _Context) -> int: ... - def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... - def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... - def pop(self) -> None: ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: + """Constructor. + + The grammar argument is a grammar.Grammar instance; see the + grammar module for more information. + + The parser is not ready yet for parsing; you must call the + setup() method to get it started. + + The optional convert argument is a function mapping concrete + syntax tree nodes to abstract syntax tree nodes. If not + given, no conversion is done and the syntax tree produced is + the concrete syntax tree. If given, it must be a function of + two arguments, the first being the grammar (a grammar.Grammar + instance), and the second being the concrete syntax tree node + to be converted. The syntax tree is converted from the bottom + up. + + A concrete syntax tree node is a (type, value, context, nodes) + tuple, where type is the node type (a token or symbol number), + value is None for symbols and a string for tokens, context is + None or an opaque value used for error reporting (typically a + (lineno, offset) pair), and nodes is a list of children for + symbols, and None for tokens. + + An abstract syntax tree node may be anything; this is entirely + up to the converter function. + + """ + def setup(self, start: int | None = None) -> None: + """Prepare for parsing. + + This *must* be called before starting to parse. + + The optional argument is an alternative start symbol; it + defaults to the grammar's start symbol. + + You can use a Parser instance to parse any number of programs; + each time you call setup() the parser is reset to an initial + state determined by the (implicit or explicit) start symbol. + + """ + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: + """Add a token; return True iff this is the end of the program. +""" + def classify(self, type: int, value: str | None, context: _Context) -> int: + """Turn a token into a label. (Internal) +""" + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: + """Shift a token. (Internal) +""" + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: + """Push a nonterminal. (Internal) +""" + def pop(self) -> None: + """Pop a nonterminal. (Internal) +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi index 6898517acee64..bf8c78a97760d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi @@ -1,3 +1,5 @@ +"""Token constants (from "token.h"). +""" from typing import Final ENDMARKER: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index af54de1b51d33..a46433b312451 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -1,3 +1,27 @@ +"""Tokenization help for Python programs. + +generate_tokens(readline) is a generator that breaks a stream of +text into Python tokens. It accepts a readline-like method which is called +repeatedly to get the next line of input (or "" for EOF). It generates +5-tuples with these members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators + +Older entry points + tokenize_loop(readline, tokeneater) + tokenize(readline, tokeneater=printtoken) +are the same, except instead of generating tokens, tokeneater is a callback +function to which the 5 fields described above are passed as 5 arguments, +each time a new token is found. +""" from collections.abc import Callable, Iterable, Iterator from typing_extensions import TypeAlias @@ -82,7 +106,19 @@ _TokenInfo: TypeAlias = tuple[int, str, _Coord, _Coord, str] class TokenError(Exception): ... class StopTokenizing(Exception): ... -def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ... +def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: + """ + The tokenize() function accepts two parameters: one representing the + input stream, and one providing an output mechanism for tokenize(). + + The first parameter, readline, must be a callable object which provides + the same interface as the readline() method of built-in file objects. + Each call to the function should return one line of input as a string. + + The second parameter, tokeneater, must also be a callable object. It is + called once for each token, with five arguments, corresponding to the + tuples generated by generate_tokens(). + """ class Untokenizer: tokens: list[str] @@ -92,5 +128,37 @@ class Untokenizer: def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... -def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... -def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... +def untokenize(iterable: Iterable[_TokenInfo]) -> str: + """Transform tokens back into Python source code. + + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + Round-trip invariant for full input: + Untokenized source will match input source exactly + + Round-trip invariant for limited input: + # Output text will tokenize the back to the input + t1 = [tok[:2] for tok in generate_tokens(f.readline)] + newcode = untokenize(t1) + readline = iter(newcode.splitlines(1)).next + t2 = [tok[:2] for tokin generate_tokens(readline)] + assert t1 == t2 + """ +def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: + """ + The generate_tokens() generator requires one argument, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as a string. Alternately, readline + can be a callable function terminating with StopIteration: + readline = open(myfile).next # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + physical line. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi index 86c74b54888af..9add5c89d95fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,7 +1,14 @@ +"""Export the Python grammar and symbols. +""" from .pgen2.grammar import Grammar class Symbols: - def __init__(self, grammar: Grammar) -> None: ... + def __init__(self, grammar: Grammar) -> None: + """Initializer. + + Creates an attribute for each grammar symbol (nonterminal), + whose value is the symbol's type (an int >= 256). + """ class python_symbols(Symbols): and_expr: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi index 51bdbc75e1421..18b945242f0be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi @@ -1,3 +1,11 @@ +""" +Python parse tree definitions. + +This is a very concrete parse tree; we need to keep every token and +even the comments and whitespace between tokens. + +There's also a pattern matching implementation here. +""" from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused from abc import abstractmethod from collections.abc import Iterable, Iterator, MutableSequence @@ -17,35 +25,93 @@ HUGE: Final = 0x7FFFFFFF def type_repr(type_num: int) -> str | int: ... class Base: + """ + Abstract base class for Node and Leaf. + + This provides some default functionality and boilerplate using the + template pattern. + + A node may be a subnode of at most one parent. + """ type: int parent: Node | None prefix: str children: list[_NL] was_changed: bool was_checked: bool - def __eq__(self, other: object) -> bool: ... + def __eq__(self, other: object) -> bool: + """ + Compare two nodes for equality. + + This calls the method _eq(). + """ __hash__: ClassVar[None] # type: ignore[assignment] @abstractmethod - def _eq(self, other: Base) -> bool: ... + def _eq(self, other: Base) -> bool: + """ + Compare two nodes for equality. + + This is called by __eq__ and __ne__. It is only called if the two nodes + have the same type. This must be implemented by the concrete subclass. + Nodes should be considered equal if they have the same structure, + ignoring the prefix string and other context information. + """ @abstractmethod - def clone(self) -> Self: ... + def clone(self) -> Self: + """ + Return a cloned (deep) copy of self. + + This must be implemented by the concrete subclass. + """ @abstractmethod - def post_order(self) -> Iterator[Self]: ... + def post_order(self) -> Iterator[Self]: + """ + Return a post-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ @abstractmethod - def pre_order(self) -> Iterator[Self]: ... - def replace(self, new: _NL | list[_NL]) -> None: ... - def get_lineno(self) -> int: ... + def pre_order(self) -> Iterator[Self]: + """ + Return a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + def replace(self, new: _NL | list[_NL]) -> None: + """Replace this node with a new one in the parent. +""" + def get_lineno(self) -> int: + """Return the line number which generated the invocant node. +""" def changed(self) -> None: ... - def remove(self) -> int | None: ... + def remove(self) -> int | None: + """ + Remove the node from the tree. Returns the position of the node in its + parent's children before it was removed. + """ @property - def next_sibling(self) -> _NL | None: ... + def next_sibling(self) -> _NL | None: + """ + The node immediately following the invocant in their parent's children + list. If the invocant does not have a next sibling, it is None + """ @property - def prev_sibling(self) -> _NL | None: ... + def prev_sibling(self) -> _NL | None: + """ + The node immediately preceding the invocant in their parent's children + list. If the invocant does not have a previous sibling, it is None. + """ def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... - def get_suffix(self) -> str: ... + def get_suffix(self) -> str: + """ + Return the string immediately following the invocant node. This is + effectively equivalent to node.next_sibling.prefix + """ class Node(Base): + """Concrete implementation for interior nodes. +""" fixers_applied: MutableSequence[BaseFix] | None # Is Unbound until set in refactor.RefactoringTool future_features: frozenset[Incomplete] @@ -58,17 +124,52 @@ class Node(Base): context: Unused = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] | None = None, - ) -> None: ... - def _eq(self, other: Base) -> bool: ... - def clone(self) -> Node: ... - def post_order(self) -> Iterator[Self]: ... - def pre_order(self) -> Iterator[Self]: ... - def set_child(self, i: int, child: _NL) -> None: ... - def insert_child(self, i: int, child: _NL) -> None: ... - def append_child(self, child: _NL) -> None: ... - def __unicode__(self) -> str: ... + ) -> None: + """ + Initializer. + + Takes a type constant (a symbol number >= 256), a sequence of + child nodes, and an optional context keyword argument. + + As a side effect, the parent pointers of the children are updated. + """ + def _eq(self, other: Base) -> bool: + """Compare two nodes for equality. +""" + def clone(self) -> Node: + """Return a cloned (deep) copy of self. +""" + def post_order(self) -> Iterator[Self]: + """Return a post-order iterator for the tree. +""" + def pre_order(self) -> Iterator[Self]: + """Return a pre-order iterator for the tree. +""" + def set_child(self, i: int, child: _NL) -> None: + """ + Equivalent to 'node.children[i] = child'. This method also sets the + child's parent attribute appropriately. + """ + def insert_child(self, i: int, child: _NL) -> None: + """ + Equivalent to 'node.children.insert(i, child)'. This method also sets + the child's parent attribute appropriately. + """ + def append_child(self, child: _NL) -> None: + """ + Equivalent to 'node.children.append(child)'. This method also sets the + child's parent attribute appropriately. + """ + def __unicode__(self) -> str: + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ class Leaf(Base): + """Concrete implementation for leaf nodes. +""" lineno: int column: int value: str @@ -80,39 +181,182 @@ class Leaf(Base): context: _Context | None = None, prefix: str | None = None, fixers_applied: MutableSequence[BaseFix] = [], - ) -> None: ... - def _eq(self, other: Base) -> bool: ... - def clone(self) -> Leaf: ... - def post_order(self) -> Iterator[Self]: ... - def pre_order(self) -> Iterator[Self]: ... - def __unicode__(self) -> str: ... + ) -> None: + """ + Initializer. + + Takes a type constant (a token number < 256), a string value, and an + optional context keyword argument. + """ + def _eq(self, other: Base) -> bool: + """Compare two nodes for equality. +""" + def clone(self) -> Leaf: + """Return a cloned (deep) copy of self. +""" + def post_order(self) -> Iterator[Self]: + """Return a post-order iterator for the tree. +""" + def pre_order(self) -> Iterator[Self]: + """Return a pre-order iterator for the tree. +""" + def __unicode__(self) -> str: + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + +def convert(gr: Grammar, raw_node: _RawNode) -> _NL: + """ + Convert raw node information to a Node or Leaf instance. -def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ class BasePattern: + """ + A pattern is a tree matching pattern. + + It looks for a specific node type (token or symbol), and + optionally for a specific content. + + This is an abstract base class. There are three concrete + subclasses: + + - LeafPattern matches a single leaf node; + - NodePattern matches a single node (usually non-leaf); + - WildcardPattern matches a sequence of nodes of variable length. + """ type: int content: str | None name: str | None - def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns - def match(self, node: _NL, results: _Results | None = None) -> bool: ... - def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ... - def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ... + def optimize(self) -> BasePattern: # sic, subclasses are free to optimize themselves into different patterns + """ + A subclass can define this as a hook for optimizations. + + Returns either self or another node with the same effect. + """ + def match(self, node: _NL, results: _Results | None = None) -> bool: + """ + Does this pattern exactly match a node? + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + Default implementation for non-wildcard patterns. + """ + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: + """ + Does this pattern exactly match a sequence of nodes? + + Default implementation for non-wildcard patterns. + """ + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: + """ + Generator yielding all matches for this pattern. + + Default implementation for non-wildcard patterns. + """ class LeafPattern(BasePattern): - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: + """ + Initializer. Takes optional type, content, and name. + + The type, if given must be a token type (< 256). If not given, + this matches any *leaf* node; the content may still be required. + + The content, if given, must be a string. + + If a name is given, the matching node is stored in the results + dict under that key. + """ class NodePattern(BasePattern): wildcards: bool - def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: + """ + Initializer. Takes optional type, content, and name. + + The type, if given, must be a symbol type (>= 256). If the + type is None this matches *any* single node (leaf or not), + except if content is not None, in which it only matches + non-leaf nodes that also match the content pattern. + + The content, if not None, must be a sequence of Patterns that + must match the node's children exactly. If the content is + given, the type must not be None. + + If a name is given, the matching node is stored in the results + dict under that key. + """ class WildcardPattern(BasePattern): + """ + A wildcard pattern can match zero or more nodes. + + This has all the flexibility needed to implement patterns like: + + .* .+ .? .{m,n} + (a b c | d e | f) + (...)* (...)+ (...)? (...){m,n} + + except it always uses non-greedy matching. + """ min: int max: int - def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: + """ + Initializer. + + Args: + content: optional sequence of subsequences of patterns; + if absent, matches one node; + if present, each subsequence is an alternative [*] + min: optional minimum number of times to match, default 0 + max: optional maximum number of times to match, default HUGE + name: optional name assigned to this match + + [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is + equivalent to (a b c | d e | f g h); if content is None, + this is equivalent to '.' in regular expression terms. + The min and max parameters work as follows: + min=0, max=maxint: .* + min=1, max=maxint: .+ + min=0, max=1: .? + min=1, max=1: . + If content is not None, replace the dot with the parenthesized + list of alternatives, e.g. (a b c | d e | f g h)* + """ class NegatedPattern(BasePattern): - def __init__(self, content: str | None = None) -> None: ... + def __init__(self, content: str | None = None) -> None: + """ + Initializer. + + The argument is either a pattern or None. If it is None, this + only matches an empty sequence (effectively '$' in regex + lingo). If it is not None, this matches whenever the argument + pattern doesn't have any matches. + """ def generate_matches( patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL] -) -> Iterator[tuple[int, _Results]]: ... +) -> Iterator[tuple[int, _Results]]: + """ + Generator yielding matches for a sequence of patterns and nodes. + + Args: + patterns: a sequence of patterns + nodes: a sequence of nodes + + Yields: + (count, results) tuples where: + count: the entire sequence of patterns matches nodes[:count]; + results: dict containing named submatches. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi index a7f3825406488..0d53fe6534513 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi @@ -1,3 +1,9 @@ +"""Refactoring framework. + +Used as a main program, this can refactor any number of files and/or +recursively descend down directories. Imported as a module, this +provides infrastructure to write your own refactoring tool. +""" from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem from collections.abc import Container, Generator, Iterable, Mapping from logging import Logger, _ExcInfoType @@ -11,10 +17,17 @@ from .pgen2.driver import Driver from .pgen2.grammar import Grammar from .pytree import Node -def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... -def get_fixers_from_package(pkg_name: str) -> list[str]: ... +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: + """Return a sorted list of all available fix names in the given package. +""" +def get_fixers_from_package(pkg_name: str) -> list[str]: + """ + Return the fully qualified names for fixers in the package pkg_name. + """ -class FixerError(Exception): ... +class FixerError(Exception): + """A fixer could not be loaded. +""" class RefactoringTool: CLASS_PREFIX: ClassVar[str] @@ -37,40 +50,147 @@ class RefactoringTool: bmi_post_order: list[BaseFix] def __init__( self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None - ) -> None: ... - def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ... - def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ... + ) -> None: + """Initializer. + + Args: + fixer_names: a list of fixers to import + options: a dict with configuration. + explicit: a list of fixers to run even if they are explicit. + """ + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: + """Inspects the options to load the requested patterns and handlers. + + Returns: + (pre_order, post_order), where pre_order is the list of fixers that + want a pre-order AST traversal, and post_order is the list that want + post-order traversal. + """ + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: + """Called when an error occurs. +""" @overload - def log_message(self, msg: object) -> None: ... + def log_message(self, msg: object) -> None: + """Hook to log a message. +""" @overload def log_message(self, msg: str, *args: object) -> None: ... @overload def log_debug(self, msg: object) -> None: ... @overload def log_debug(self, msg: str, *args: object) -> None: ... - def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ... - def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... - def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... - def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ... - def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ... - def refactor_string(self, data: str, name: str) -> Node | None: ... + def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: + """Called with the old version, new version, and filename of a + refactored file. +""" + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: + """Refactor a list of files and directories. +""" + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: + """Descends down a directory and refactor every Python file found. + + Python files are assumed to have a .py extension. + + Files and subdirectories starting with '.' are skipped. + """ + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: + """ + Do our best to decode a Python source file correctly. + """ + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: + """Refactors a file. +""" + def refactor_string(self, data: str, name: str) -> Node | None: + """Refactor a given input string. + + Args: + data: a string holding the code to be refactored. + name: a human-readable name for use in error/log messages. + + Returns: + An AST corresponding to the refactored input stream; None if + there were errors during the parse. + """ def refactor_stdin(self, doctests_only: bool = False) -> None: ... - def refactor_tree(self, tree: Node, name: str) -> bool: ... - def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ... + def refactor_tree(self, tree: Node, name: str) -> bool: + """Refactors a parse tree (modifying the tree in place). + + For compatible patterns the bottom matcher module is + used. Otherwise the tree is traversed node-to-node for + matches. + + Args: + tree: a pytree.Node instance representing the root of the tree + to be refactored. + name: a human-readable name for this tree. + + Returns: + True if the tree was modified, False otherwise. + """ + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: + """Traverse an AST, applying a set of fixers to each node. + + This is a helper method for refactor_tree(). + + Args: + fixers: a list of fixer instances. + traversal: a generator that yields AST nodes. + + Returns: + None + """ def processed_file( self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None - ) -> None: ... - def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ... + ) -> None: + """ + Called when a file has been refactored and there may be changes. + """ + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: + """Writes a string to a file. + + It first shows a unified diff between the old text and the new text, and + then rewrites the file; the latter is only done if the write option is + set. + """ PS1: Final = ">>> " PS2: Final = "... " - def refactor_docstring(self, input: str, filename: StrPath) -> str: ... - def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ... + def refactor_docstring(self, input: str, filename: StrPath) -> str: + """Refactors a docstring, looking for doctests. + + This returns a modified version of the input string. It looks + for doctests, which start with a ">>>" prompt, and may be + continued with "..." prompts, as long as the "..." is indented + the same as the ">>>". + + (Unfortunately we can't use the doctest module's parser, + since, like most parsers, it is not geared towards preserving + the original source.) + """ + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: + """Refactors one doctest. + + A doctest is given as a block of lines, the first of which starts + with ">>>" (possibly indented), while the remaining lines start + with "..." (identically indented). + + """ def summarize(self) -> None: ... - def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ... + def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: + """Parses a block into a tree. + + This is necessary to get correct line number / offset information + in the parser diagnostics and embedded into the parse tree. + """ def wrap_toks( self, block: Iterable[str], lineno: int, indent: int - ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ... - def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... + ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: + """Wraps a tokenize stream to systematically modify start/end. +""" + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: + """Generates lines as expected by tokenize from a list of lines. + + This strips the first len(indent + self.PS1) characters off each line. + """ class MultiprocessingUnsupported(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi index 5379a21e7d123..aca0d8d35698f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi @@ -1,3 +1,9 @@ +"""Cache lines from Python source files. + +This is intended to read lines from modules imported -- hence if a filename +is not found, it will look down the module search path for a file by +that name. +""" from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias @@ -11,9 +17,36 @@ _SourceLoader: TypeAlias = tuple[Callable[[], str | None]] cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented -def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... -def clearcache() -> None: ... -def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... -def checkcache(filename: str | None = None) -> None: ... -def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... -def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: + """Get a line for a Python source file from the cache. +Update the cache if it doesn't contain an entry for this file already. +""" +def clearcache() -> None: + """Clear the cache entirely. +""" +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: + """Get the lines for a Python source file from the cache. +Update the cache if it doesn't contain an entry for this file already. +""" +def checkcache(filename: str | None = None) -> None: + """Discard cache entries that are out of date. +(This is not checked upon each call!) +""" +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: + """Update a cache entry and return its list of lines. +If something's wrong, print a message, discard the cache entry, +and return an empty list. +""" +def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: + """Seed the cache for filename with module_globals. + +The module loader will be asked for the source only when getlines is +called, not immediately. + +If there is an entry in the cache already, it is not altered. + +:return: True if a lazy load is registered in the cache, + otherwise False. To register such a load a module loader with a + get_source method must be found, the filename must be a cacheable + filename, and the filename must not be already cached. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi index fae9f849b6373..bc61693520760 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi @@ -1,3 +1,14 @@ +"""Locale support module. + +The module provides low-level access to the C lib's locale APIs and adds high +level number formatting APIs as well as a locale aliasing engine to complement +these. + +The aliasing engine includes support for many commonly used locale names and +maps them to values suitable for passing to the C lib's setlocale() function. It +also includes default encodings for all supported locale names. + +""" import sys from _locale import ( CHAR_MAX as CHAR_MAX, @@ -131,31 +142,122 @@ class Error(Exception): ... def getdefaultlocale( envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") -) -> tuple[_str | None, _str | None]: ... -def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... -def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... -def getpreferredencoding(do_setlocale: bool = True) -> _str: ... -def normalize(localename: _str) -> _str: ... +) -> tuple[_str | None, _str | None]: + """Tries to determine the default locale settings and returns +them as tuple (language code, encoding). + +According to POSIX, a program which has not called +setlocale(LC_ALL, "") runs using the portable 'C' locale. +Calling setlocale(LC_ALL, "") lets it use the default locale as +defined by the LANG variable. Since we don't want to interfere +with the current locale setting we thus emulate the behavior +in the way described above. + +To maintain compatibility with other platforms, not only the +LANG variable is tested, but a list of variables given as +envvars parameter. The first found to be defined will be +used. envvars defaults to the search path used in GNU gettext; +it must always contain the variable name 'LANG'. + +Except for the code 'C', the language code corresponds to RFC +1766. code and encoding can be None in case the values cannot +be determined. + +""" +def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: + """Returns the current setting for the given locale category as +tuple (language code, encoding). + +category may be one of the LC_* value except LC_ALL. It +defaults to LC_CTYPE. + +Except for the code 'C', the language code corresponds to RFC +1766. code and encoding can be None in case the values cannot +be determined. + +""" +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: + """Set the locale for the given category. The locale can be +a string, an iterable of two strings (language code and encoding), +or None. + +Iterables are converted to strings using the locale aliasing +engine. Locale strings are passed directly to the C lib. + +category may be given as one of the LC_* values. + +""" +def getpreferredencoding(do_setlocale: bool = True) -> _str: + """Return the charset that the user is likely using, +according to the system configuration. +""" +def normalize(localename: _str) -> _str: + """Returns a normalized locale code for the given locale +name. + +The returned locale code is formatted for use with +setlocale(). + +If normalization fails, the original name is returned +unchanged. + +If the given encoding is not known, the function defaults to +the default encoding for the locale code just like setlocale() +does. + +""" if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `locale.setlocale(locale.LC_ALL, '')` instead.") - def resetlocale(category: int = ...) -> None: ... + def resetlocale(category: int = ...) -> None: + """ Sets the locale for category to the default setting. + + The default setting is determined by calling + getdefaultlocale(). category defaults to LC_ALL. + + """ else: - def resetlocale(category: int = ...) -> None: ... + def resetlocale(category: int = ...) -> None: + """ Sets the locale for category to the default setting. + + The default setting is determined by calling + getdefaultlocale(). category defaults to LC_ALL. + + """ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.7; removed in Python 3.12. Use `locale.format_string()` instead.") def format( percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any - ) -> _str: ... - -def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... -def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... -def delocalize(string: _str) -> _str: ... -def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... -def atoi(string: _str) -> int: ... -def str(val: float) -> _str: ... + ) -> _str: + """Deprecated, use format_string instead. +""" + +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: + """Formats a string in the same way that the % formatting would use, +but takes the current locale into account. + +Grouping is applied if the third parameter is true. +Conversion uses monetary thousands separator and grouping strings if +forth parameter monetary is true. +""" +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: + """Formats val according to the currency settings +in the current locale. +""" +def delocalize(string: _str) -> _str: + """Parses a string as a normalized number according to the locale settings. +""" +def atof(string: _str, func: Callable[[_str], float] = ...) -> float: + """Parses a string as a float according to the locale settings. +""" +def atoi(string: _str) -> int: + """Converts a string to an integer according to the locale settings. +""" +def str(val: float) -> _str: + """Convert float to string, taking the locale into account. +""" locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi index 8248f82ea87ac..7638e1c12ebd1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi @@ -1,3 +1,11 @@ +""" +Logging package for Python. Based on PEP 282 and comments thereto in +comp.lang.python. + +Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. + +To use, simply 'import logging' and log away! +""" import sys import threading from _typeshed import StrPath, SupportsWrite @@ -91,27 +99,110 @@ _levelToName: dict[int, str] _nameToLevel: dict[str, int] class Filterer: + """ +A base class for loggers and handlers which allows them to share +common code. +""" filters: list[_FilterType] - def addFilter(self, filter: _FilterType) -> None: ... - def removeFilter(self, filter: _FilterType) -> None: ... + def addFilter(self, filter: _FilterType) -> None: + """ +Add the specified filter to this handler. +""" + def removeFilter(self, filter: _FilterType) -> None: + """ +Remove the specified filter from this handler. +""" if sys.version_info >= (3, 12): - def filter(self, record: LogRecord) -> bool | LogRecord: ... + def filter(self, record: LogRecord) -> bool | LogRecord: + """ +Determine if a record is loggable by consulting all the filters. + +The default is to allow the record to be logged; any filter can veto +this by returning a false value. +If a filter attached to a handler returns a log record instance, +then that instance is used in place of the original log record in +any further processing of the event by that handler. +If a filter returns any other true value, the original log record +is used in any further processing of the event by that handler. + +If none of the filters return false values, this method returns +a log record. +If any of the filters return a false value, this method returns +a false value. + +.. versionchanged:: 3.2 + + Allow filters to be just callables. + +.. versionchanged:: 3.12 + Allow filters to return a LogRecord instead of + modifying it in place. +""" else: - def filter(self, record: LogRecord) -> bool: ... + def filter(self, record: LogRecord) -> bool: + """ + Determine if a record is loggable by consulting all the filters. + + The default is to allow the record to be logged; any filter can veto + this and the record is then dropped. Returns a zero value if a record + is to be dropped, else non-zero. + + .. versionchanged:: 3.2 + + Allow filters to be just callables. + """ class Manager: # undocumented + """ +There is [under normal circumstances] just one Manager instance, which +holds the hierarchy of loggers. +""" root: RootLogger disable: int emittedNoHandlerWarning: bool loggerDict: dict[str, Logger | PlaceHolder] loggerClass: type[Logger] | None logRecordFactory: Callable[..., LogRecord] | None - def __init__(self, rootnode: RootLogger) -> None: ... - def getLogger(self, name: str) -> Logger: ... - def setLoggerClass(self, klass: type[Logger]) -> None: ... - def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: ... + def __init__(self, rootnode: RootLogger) -> None: + """ +Initialize the manager with the root node of the logger hierarchy. +""" + def getLogger(self, name: str) -> Logger: + """ +Get a logger with the specified name (channel name), creating it +if it doesn't yet exist. This name is a dot-separated hierarchical +name, such as "a", "a.b", "a.b.c" or similar. + +If a PlaceHolder existed for the specified name [i.e. the logger +didn't exist but a child of it did], replace it with the created +logger and fix up the parent/child references which pointed to the +placeholder to now point to the logger. +""" + def setLoggerClass(self, klass: type[Logger]) -> None: + """ +Set the class to be used when instantiating a logger with this Manager. +""" + def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: + """ +Set the factory to be used when instantiating a log record with this +Manager. +""" class Logger(Filterer): + """ +Instances of the Logger class represent a single logging channel. A +"logging channel" indicates an area of an application. Exactly how an +"area" is defined is up to the application developer. Since an +application can have any number of areas, logging channels are identified +by a unique string. Application areas can be nested (e.g. an area +of "input processing" might include sub-areas "read CSV files", "read +XLS files" and "read Gnumeric files"). To cater for this natural nesting, +channel names are organized into a namespace hierarchy where levels are +separated by periods, much like the Java or Python package namespace. So +in the instance given above, channel names might be "input" for the upper +level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels. +There is no arbitrary limit to the depth of nesting. +""" name: str # undocumented level: int # undocumented parent: Logger | None # undocumented @@ -120,11 +211,40 @@ class Logger(Filterer): disabled: bool # undocumented root: ClassVar[RootLogger] # undocumented manager: Manager # undocumented - def __init__(self, name: str, level: _Level = 0) -> None: ... - def setLevel(self, level: _Level) -> None: ... - def isEnabledFor(self, level: int) -> bool: ... - def getEffectiveLevel(self) -> int: ... - def getChild(self, suffix: str) -> Self: ... # see python/typing#980 + def __init__(self, name: str, level: _Level = 0) -> None: + """ +Initialize the logger with a name and an optional level. +""" + def setLevel(self, level: _Level) -> None: + """ +Set the logging level of this logger. level must be an int or a str. +""" + def isEnabledFor(self, level: int) -> bool: + """ +Is this logger enabled for level 'level'? +""" + def getEffectiveLevel(self) -> int: + """ +Get the effective level for this logger. + +Loop through this logger and its parents in the logger hierarchy, +looking for a non-zero logging level. Return the first one found. +""" + def getChild(self, suffix: str) -> Self: # see python/typing#980 + """ +Get a logger which is a descendant to this one. + +This is a convenience method, such that + +logging.getLogger('abc').getChild('def.ghi') + +is the same as + +logging.getLogger('abc.def.ghi') + +It's useful, for example, when the parent logger is named using +__name__ rather than a literal string. +""" if sys.version_info >= (3, 12): def getChildren(self) -> set[Logger]: ... @@ -136,7 +256,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with severity 'DEBUG'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) +""" def info( self, msg: object, @@ -145,7 +273,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with severity 'INFO'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.info("Houston, we have a %s", "notable problem", exc_info=True) +""" def warning( self, msg: object, @@ -154,7 +290,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with severity 'WARNING'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) +""" @deprecated("Deprecated since Python 3.3. Use `Logger.warning()` instead.") def warn( self, @@ -173,7 +317,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with severity 'ERROR'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.error("Houston, we have a %s", "major problem", exc_info=True) +""" def exception( self, msg: object, @@ -182,7 +334,10 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Convenience method for logging an ERROR with exception information. +""" def critical( self, msg: object, @@ -191,7 +346,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with severity 'CRITICAL'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.critical("Houston, we have a %s", "major disaster", exc_info=True) +""" def log( self, level: int, @@ -201,7 +364,15 @@ class Logger(Filterer): stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, - ) -> None: ... + ) -> None: + """ +Log 'msg % args' with the integer severity 'level'. + +To pass exception information, use the keyword argument exc_info with +a true value, e.g. + +logger.log(level, "We have a %s", "mysterious problem", exc_info=True) +""" def _log( self, level: int, @@ -211,12 +382,32 @@ class Logger(Filterer): extra: Mapping[str, object] | None = None, stack_info: bool = False, stacklevel: int = 1, - ) -> None: ... # undocumented + ) -> None: # undocumented + """ +Low-level logging routine which creates a LogRecord and then calls +all the handlers of this logger to handle the record. +""" fatal = critical - def addHandler(self, hdlr: Handler) -> None: ... - def removeHandler(self, hdlr: Handler) -> None: ... - def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... - def handle(self, record: LogRecord) -> None: ... + def addHandler(self, hdlr: Handler) -> None: + """ +Add the specified handler to this logger. +""" + def removeHandler(self, hdlr: Handler) -> None: + """ +Remove the specified handler from this logger. +""" + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: + """ +Find the stack frame of the caller so that we can note the source +file name, line number and function name. +""" + def handle(self, record: LogRecord) -> None: + """ +Call the handlers for the specified record. + +This method is used for unpickled records received from a socket, as +well as those created locally. Logger-level filtering is applied. +""" def makeRecord( self, name: str, @@ -229,9 +420,31 @@ class Logger(Filterer): func: str | None = None, extra: Mapping[str, object] | None = None, sinfo: str | None = None, - ) -> LogRecord: ... - def hasHandlers(self) -> bool: ... - def callHandlers(self, record: LogRecord) -> None: ... # undocumented + ) -> LogRecord: + """ +A factory method which can be overridden in subclasses to create +specialized LogRecords. +""" + def hasHandlers(self) -> bool: + """ +See if this logger has any handlers configured. + +Loop through all handlers for this logger and its parents in the +logger hierarchy. Return True if a handler was found, else False. +Stop searching up the hierarchy whenever a logger with the "propagate" +attribute set to zero is found - that will be the last logger which +is checked for the existence of handlers. +""" + def callHandlers(self, record: LogRecord) -> None: # undocumented + """ +Pass a record to all relevant handlers. + +Loop through all handlers for this logger and its parents in the +logger hierarchy. If no handler was found, output a one-off error +message to sys.stderr. Stop searching up the hierarchy whenever a +logger with the "propagate" attribute set to zero is found - that +will be the last logger whose handlers are called. +""" CRITICAL: Final = 50 FATAL: Final = CRITICAL @@ -243,30 +456,154 @@ DEBUG: Final = 10 NOTSET: Final = 0 class Handler(Filterer): + """ +Handler instances dispatch logging events to specific destinations. + +The base handler class. Acts as a placeholder which defines the Handler +interface. Handlers can optionally use Formatter instances to format +records as desired. By default, no formatter is specified; in this case, +the 'raw' message as determined by record.message is logged. +""" level: int # undocumented formatter: Formatter | None # undocumented lock: threading.Lock | None # undocumented name: str | None # undocumented - def __init__(self, level: _Level = 0) -> None: ... + def __init__(self, level: _Level = 0) -> None: + """ +Initializes the instance - basically setting the formatter to None +and the filter list to empty. +""" def get_name(self) -> str: ... # undocumented def set_name(self, name: str) -> None: ... # undocumented - def createLock(self) -> None: ... - def acquire(self) -> None: ... - def release(self) -> None: ... - def setLevel(self, level: _Level) -> None: ... - def setFormatter(self, fmt: Formatter | None) -> None: ... - def flush(self) -> None: ... - def close(self) -> None: ... - def handle(self, record: LogRecord) -> bool: ... - def handleError(self, record: LogRecord) -> None: ... - def format(self, record: LogRecord) -> str: ... - def emit(self, record: LogRecord) -> None: ... + def createLock(self) -> None: + """ +Acquire a thread lock for serializing access to the underlying I/O. +""" + def acquire(self) -> None: + """ +Acquire the I/O thread lock. +""" + def release(self) -> None: + """ +Release the I/O thread lock. +""" + def setLevel(self, level: _Level) -> None: + """ +Set the logging level of this handler. level must be an int or a str. +""" + def setFormatter(self, fmt: Formatter | None) -> None: + """ +Set the formatter for this handler. +""" + def flush(self) -> None: + """ +Ensure all logging output has been flushed. + +This version does nothing and is intended to be implemented by +subclasses. +""" + def close(self) -> None: + """ +Tidy up any resources used by the handler. + +This version removes the handler from an internal map of handlers, +_handlers, which is used for handler lookup by name. Subclasses +should ensure that this gets called from overridden close() +methods. +""" + def handle(self, record: LogRecord) -> bool: + """ +Conditionally emit the specified logging record. + +Emission depends on filters which may have been added to the handler. +Wrap the actual emission of the record with acquisition/release of +the I/O thread lock. + +Returns an instance of the log record that was emitted +if it passed all filters, otherwise a false value is returned. +""" + def handleError(self, record: LogRecord) -> None: + """ +Handle errors which occur during an emit() call. + +This method should be called from handlers when an exception is +encountered during an emit() call. If raiseExceptions is false, +exceptions get silently ignored. This is what is mostly wanted +for a logging system - most users will not care about errors in +the logging system, they are more interested in application errors. +You could, however, replace this with a custom handler if you wish. +The record which was being processed is passed in to this method. +""" + def format(self, record: LogRecord) -> str: + """ +Format the specified record. + +If a formatter is set, use it. Otherwise, use the default formatter +for the module. +""" + def emit(self, record: LogRecord) -> None: + """ +Do whatever it takes to actually log the specified logging record. + +This version is intended to be implemented by subclasses and so +raises a NotImplementedError. +""" if sys.version_info >= (3, 12): - def getHandlerByName(name: str) -> Handler | None: ... - def getHandlerNames() -> frozenset[str]: ... + def getHandlerByName(name: str) -> Handler | None: + """ +Get a handler with the specified *name*, or None if there isn't one with +that name. +""" + def getHandlerNames() -> frozenset[str]: + """ +Return all known handler names as an immutable set. +""" class Formatter: + """ +Formatter instances are used to convert a LogRecord to text. + +Formatters need to know how a LogRecord is constructed. They are +responsible for converting a LogRecord to (usually) a string which can +be interpreted by either a human or an external system. The base Formatter +allows a formatting string to be specified. If none is supplied, the +style-dependent default value, "%(message)s", "{message}", or +"${message}", is used. + +The Formatter can be initialized with a format string which makes use of +knowledge of the LogRecord attributes - e.g. the default value mentioned +above makes use of the fact that the user's message and arguments are pre- +formatted into a LogRecord's message attribute. Currently, the useful +attributes in a LogRecord are described by: + +%(name)s Name of the logger (logging channel) +%(levelno)s Numeric logging level for the message (DEBUG, INFO, + WARNING, ERROR, CRITICAL) +%(levelname)s Text logging level for the message ("DEBUG", "INFO", + "WARNING", "ERROR", "CRITICAL") +%(pathname)s Full pathname of the source file where the logging + call was issued (if available) +%(filename)s Filename portion of pathname +%(module)s Module (name portion of filename) +%(lineno)d Source line number where the logging call was issued + (if available) +%(funcName)s Function name +%(created)f Time when the LogRecord was created (time.time_ns() / 1e9 + return value) +%(asctime)s Textual time when the LogRecord was created +%(msecs)d Millisecond portion of the creation time +%(relativeCreated)d Time in milliseconds when the LogRecord was created, + relative to the time the logging module was loaded + (typically at application startup time) +%(thread)d Thread ID (if available) +%(threadName)s Thread name (if available) +%(taskName)s Task name (if available) +%(process)d Process ID (if available) +%(processName)s Process name (if available) +%(message)s The result of record.getMessage(), computed just as + the record is emitted +""" converter: Callable[[float | None], struct_time] _fmt: str | None # undocumented datefmt: str | None # undocumented @@ -283,36 +620,170 @@ class Formatter: validate: bool = True, *, defaults: Mapping[str, Any] | None = None, - ) -> None: ... + ) -> None: + """ +Initialize the formatter with specified format strings. + +Initialize the formatter either with the specified format string, or a +default as described above. Allow for specialized date formatting with +the optional datefmt argument. If datefmt is omitted, you get an +ISO8601-like (or RFC 3339-like) format. + +Use a style parameter of '%', '{' or '$' to specify that you want to +use one of %-formatting, :meth:`str.format` (``{}``) formatting or +:class:`string.Template` formatting in your format string. + +.. versionchanged:: 3.2 + Added the ``style`` parameter. +""" else: def __init__( self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True - ) -> None: ... + ) -> None: + """ + Initialize the formatter with specified format strings. + + Initialize the formatter either with the specified format string, or a + default as described above. Allow for specialized date formatting with + the optional datefmt argument. If datefmt is omitted, you get an + ISO8601-like (or RFC 3339-like) format. + + Use a style parameter of '%', '{' or '$' to specify that you want to + use one of %-formatting, :meth:`str.format` (``{}``) formatting or + :class:`string.Template` formatting in your format string. + + .. versionchanged:: 3.2 + Added the ``style`` parameter. + """ + + def format(self, record: LogRecord) -> str: + """ +Format the specified record as text. + +The record's attribute dictionary is used as the operand to a +string formatting operation which yields the returned string. +Before formatting the dictionary, a couple of preparatory steps +are carried out. The message attribute of the record is computed +using LogRecord.getMessage(). If the formatting string uses the +time (as determined by a call to usesTime(), formatTime() is +called to format the event time. If there is exception information, +it is formatted using formatException() and appended to the message. +""" + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: + """ +Return the creation time of the specified LogRecord as formatted text. - def format(self, record: LogRecord) -> str: ... - def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... - def formatException(self, ei: _SysExcInfoType) -> str: ... +This method should be called from format() by a formatter which +wants to make use of a formatted time. This method can be overridden +in formatters to provide for any specific requirement, but the +basic behaviour is as follows: if datefmt (a string) is specified, +it is used with time.strftime() to format the creation time of the +record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used. +The resulting string is returned. This function uses a user-configurable +function to convert the creation time to a tuple. By default, +time.localtime() is used; to change this for a particular formatter +instance, set the 'converter' attribute to a function with the same +signature as time.localtime() or time.gmtime(). To change it for all +formatters, for example if you want all logging times to be shown in GMT, +set the 'converter' attribute in the Formatter class. +""" + def formatException(self, ei: _SysExcInfoType) -> str: + """ +Format and return the specified exception information as a string. + +This default implementation just uses +traceback.print_exception() +""" def formatMessage(self, record: LogRecord) -> str: ... # undocumented - def formatStack(self, stack_info: str) -> str: ... - def usesTime(self) -> bool: ... # undocumented + def formatStack(self, stack_info: str) -> str: + """ +This method is provided as an extension point for specialized +formatting of stack information. + +The input data is a string as returned from a call to +:func:`traceback.print_stack`, but with the last trailing newline +removed. + +The base implementation just returns the value passed in. +""" + def usesTime(self) -> bool: # undocumented + """ +Check if the format uses the creation time of the record. +""" class BufferingFormatter: + """ +A formatter suitable for formatting a number of records. +""" linefmt: Formatter - def __init__(self, linefmt: Formatter | None = None) -> None: ... - def formatHeader(self, records: Sequence[LogRecord]) -> str: ... - def formatFooter(self, records: Sequence[LogRecord]) -> str: ... - def format(self, records: Sequence[LogRecord]) -> str: ... + def __init__(self, linefmt: Formatter | None = None) -> None: + """ +Optionally specify a formatter which will be used to format each +individual record. +""" + def formatHeader(self, records: Sequence[LogRecord]) -> str: + """ +Return the header string for the specified records. +""" + def formatFooter(self, records: Sequence[LogRecord]) -> str: + """ +Return the footer string for the specified records. +""" + def format(self, records: Sequence[LogRecord]) -> str: + """ +Format the specified records and return the result as a string. +""" class Filter: + """ +Filter instances are used to perform arbitrary filtering of LogRecords. + +Loggers and Handlers can optionally use Filter instances to filter +records as desired. The base filter class only allows events which are +below a certain point in the logger hierarchy. For example, a filter +initialized with "A.B" will allow events logged by loggers "A.B", +"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If +initialized with the empty string, all events are passed. +""" name: str # undocumented nlen: int # undocumented - def __init__(self, name: str = "") -> None: ... + def __init__(self, name: str = "") -> None: + """ +Initialize a filter. + +Initialize with the name of the logger which, together with its +children, will have its events allowed through the filter. If no +name is specified, allow every event. +""" if sys.version_info >= (3, 12): - def filter(self, record: LogRecord) -> bool | LogRecord: ... + def filter(self, record: LogRecord) -> bool | LogRecord: + """ +Determine if the specified record is to be logged. + +Returns True if the record should be logged, or False otherwise. +If deemed appropriate, the record may be modified in-place. +""" else: - def filter(self, record: LogRecord) -> bool: ... + def filter(self, record: LogRecord) -> bool: + """ + Determine if the specified record is to be logged. + + Returns True if the record should be logged, or False otherwise. + If deemed appropriate, the record may be modified in-place. + """ class LogRecord: + """ +A LogRecord instance represents an event being logged. + +LogRecord instances are created every time something is logged. They +contain all the information pertinent to the event being logged. The +main information passed in is in msg and args, which are combined +using str(msg) % args to create the message field of the record. The +record also includes information such as when the record was created, +the source line where the logging call was made, and any exception +information to be logged. +""" # args can be set to None by logging.handlers.QueueHandler # (see https://bugs.python.org/issue44473) args: _ArgsType | None @@ -352,23 +823,76 @@ class LogRecord: exc_info: _SysExcInfoType | None, func: str | None = None, sinfo: str | None = None, - ) -> None: ... - def getMessage(self) -> str: ... + ) -> None: + """ +Initialize a logging record with interesting information. +""" + def getMessage(self) -> str: + """ +Return the message for this LogRecord. + +Return the message for this LogRecord after merging any user-supplied +arguments with the message. +""" # Allows setting contextual information on LogRecord objects as per the docs, see #7833 def __setattr__(self, name: str, value: Any, /) -> None: ... _L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) class LoggerAdapter(Generic[_L]): + """ +An adapter for loggers which makes it easier to specify contextual +information in logging output. +""" logger: _L manager: Manager # undocumented if sys.version_info >= (3, 13): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: + """ +Initialize the adapter with a logger and a dict-like object which +provides contextual information. This constructor signature allows +easy stacking of LoggerAdapters, if so desired. + +You can effectively pass keyword arguments as shown in the +following example: + +adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + +By default, LoggerAdapter objects will drop the "extra" argument +passed on the individual log calls to use its own instead. + +Initializing it with merge_extra=True will instead merge both +maps when logging, the individual call extra taking precedence +over the LoggerAdapter instance extra + +.. versionchanged:: 3.13 + The *merge_extra* argument was added. +""" elif sys.version_info >= (3, 10): - def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: + """ + Initialize the adapter with a logger and a dict-like object which + provides contextual information. This constructor signature allows + easy stacking of LoggerAdapters, if so desired. + + You can effectively pass keyword arguments as shown in the + following example: + + adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + """ else: - def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: + """ + Initialize the adapter with a logger and a dict-like object which + provides contextual information. This constructor signature allows + easy stacking of LoggerAdapters, if so desired. + + You can effectively pass keyword arguments as shown in the + following example: + + adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + """ if sys.version_info >= (3, 10): extra: Mapping[str, object] | None @@ -378,7 +902,16 @@ class LoggerAdapter(Generic[_L]): if sys.version_info >= (3, 13): merge_extra: bool - def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: ... + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: + """ +Process the logging message and keyword arguments passed in to +a logging call to insert contextual information. You can either +manipulate the message itself, the keyword args or both. Return +the message and kwargs modified (or not) to suit your needs. + +Normally, you'll only need to override this one method in a +LoggerAdapter subclass for your specific needs. +""" def debug( self, msg: object, @@ -388,7 +921,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate a debug call to the underlying logger. +""" def info( self, msg: object, @@ -398,7 +934,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate an info call to the underlying logger. +""" def warning( self, msg: object, @@ -408,7 +947,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate a warning call to the underlying logger. +""" @deprecated("Deprecated since Python 3.3. Use `LoggerAdapter.warning()` instead.") def warn( self, @@ -429,7 +971,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate an error call to the underlying logger. +""" def exception( self, msg: object, @@ -439,7 +984,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate an exception call to the underlying logger. +""" def critical( self, msg: object, @@ -449,7 +997,10 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... + ) -> None: + """ +Delegate a critical call to the underlying logger. +""" def log( self, level: int, @@ -460,11 +1011,27 @@ class LoggerAdapter(Generic[_L]): stacklevel: int = 1, extra: Mapping[str, object] | None = None, **kwargs: object, - ) -> None: ... - def isEnabledFor(self, level: int) -> bool: ... - def getEffectiveLevel(self) -> int: ... - def setLevel(self, level: _Level) -> None: ... - def hasHandlers(self) -> bool: ... + ) -> None: + """ +Delegate a log call to the underlying logger, after adding +contextual information from this adapter instance. +""" + def isEnabledFor(self, level: int) -> bool: + """ +Is this logger enabled for level 'level'? +""" + def getEffectiveLevel(self) -> int: + """ +Get the effective level for the underlying logger. +""" + def setLevel(self, level: _Level) -> None: + """ +Set the specified level on the underlying logger. +""" + def hasHandlers(self) -> bool: + """ +See if the underlying logger has any handlers. +""" if sys.version_info >= (3, 11): def _log( self, @@ -475,7 +1042,10 @@ class LoggerAdapter(Generic[_L]): exc_info: _ExcInfoType | None = None, extra: Mapping[str, object] | None = None, stack_info: bool = False, - ) -> None: ... # undocumented + ) -> None: # undocumented + """ +Low-level log implementation, proxied to allow nested logger adapters. +""" else: def _log( self, @@ -485,16 +1055,34 @@ class LoggerAdapter(Generic[_L]): exc_info: _ExcInfoType | None = None, extra: Mapping[str, object] | None = None, stack_info: bool = False, - ) -> None: ... # undocumented + ) -> None: # undocumented + """ + Low-level log implementation, proxied to allow nested logger adapters. + """ @property def name(self) -> str: ... # undocumented if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" + +def getLogger(name: str | None = None) -> Logger: + """ +Return a logger with the specified name, creating it if necessary. -def getLogger(name: str | None = None) -> Logger: ... -def getLoggerClass() -> type[Logger]: ... -def getLogRecordFactory() -> Callable[..., LogRecord]: ... +If no name is specified, return the root logger. +""" +def getLoggerClass() -> type[Logger]: + """ +Return the class to be used when instantiating a logger. +""" +def getLogRecordFactory() -> Callable[..., LogRecord]: + """ +Return the factory to be used when instantiating a log record. +""" def debug( msg: object, *args: object, @@ -502,7 +1090,12 @@ def debug( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'DEBUG' on the root logger. If the logger has +no handlers, call basicConfig() to add a console handler with a pre-defined +format. +""" def info( msg: object, *args: object, @@ -510,7 +1103,12 @@ def info( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'INFO' on the root logger. If the logger has +no handlers, call basicConfig() to add a console handler with a pre-defined +format. +""" def warning( msg: object, *args: object, @@ -518,7 +1116,12 @@ def warning( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'WARNING' on the root logger. If the logger has +no handlers, call basicConfig() to add a console handler with a pre-defined +format. +""" @deprecated("Deprecated since Python 3.3. Use `warning()` instead.") def warn( msg: object, @@ -535,7 +1138,12 @@ def error( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'ERROR' on the root logger. If the logger has +no handlers, call basicConfig() to add a console handler with a pre-defined +format. +""" def critical( msg: object, *args: object, @@ -543,7 +1151,12 @@ def critical( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'CRITICAL' on the root logger. If the logger +has no handlers, call basicConfig() to add a console handler with a +pre-defined format. +""" def exception( msg: object, *args: object, @@ -551,7 +1164,12 @@ def exception( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log a message with severity 'ERROR' on the root logger, with exception +information. If the logger has no handlers, basicConfig() is called to add +a console handler with a pre-defined format. +""" def log( level: int, msg: object, @@ -560,14 +1178,44 @@ def log( stack_info: bool = False, stacklevel: int = 1, extra: Mapping[str, object] | None = None, -) -> None: ... +) -> None: + """ +Log 'msg % args' with the integer severity 'level' on the root logger. If +the logger has no handlers, call basicConfig() to add a console handler +with a pre-defined format. +""" fatal = critical -def disable(level: int = 50) -> None: ... -def addLevelName(level: int, levelName: str) -> None: ... +def disable(level: int = 50) -> None: + """ +Disable all logging calls of severity 'level' and below. +""" +def addLevelName(level: int, levelName: str) -> None: + """ +Associate 'levelName' with 'level'. + +This is used when converting levels to text during message formatting. +""" @overload -def getLevelName(level: int) -> str: ... +def getLevelName(level: int) -> str: + """ +Return the textual or numeric representation of logging level 'level'. + +If the level is one of the predefined levels (CRITICAL, ERROR, WARNING, +INFO, DEBUG) then you get the corresponding string. If you have +associated levels with names using addLevelName then the name you have +associated with 'level' is returned. + +If a numeric value corresponding to one of the defined levels is passed +in, the corresponding string representation is returned. + +If a string representation of the level is passed in, the corresponding +numeric value is returned. + +If no matching numeric or string value is passed in, the string +'Level %s' % level is returned. +""" @overload @deprecated("The str -> int case is considered a mistake.") def getLevelName(level: str) -> Any: ... @@ -575,7 +1223,13 @@ def getLevelName(level: str) -> Any: ... if sys.version_info >= (3, 11): def getLevelNamesMapping() -> dict[str, int]: ... -def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... +def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: + """ +Make a LogRecord whose attributes are defined by the specified dictionary, +This function is useful for converting a logging event received over +a socket connection (which is sent as a dictionary) into a LogRecord +instance. +""" def basicConfig( *, filename: StrPath | None = ..., @@ -589,28 +1243,141 @@ def basicConfig( force: bool | None = ..., encoding: str | None = ..., errors: str | None = ..., -) -> None: ... -def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented -def setLoggerClass(klass: type[Logger]) -> None: ... -def captureWarnings(capture: bool) -> None: ... -def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... +) -> None: + """ +Do basic configuration for the logging system. + +This function does nothing if the root logger already has handlers +configured, unless the keyword argument *force* is set to ``True``. +It is a convenience method intended for use by simple scripts +to do one-shot configuration of the logging package. + +The default behaviour is to create a StreamHandler which writes to +sys.stderr, set a formatter using the BASIC_FORMAT format string, and +add the handler to the root logger. + +A number of optional keyword arguments may be specified, which can alter +the default behaviour. + +filename Specifies that a FileHandler be created, using the specified + filename, rather than a StreamHandler. +filemode Specifies the mode to open the file, if filename is specified + (if filemode is unspecified, it defaults to 'a'). +format Use the specified format string for the handler. +datefmt Use the specified date/time format. +style If a format string is specified, use this to specify the + type of format string (possible values '%', '{', '$', for + %-formatting, :meth:`str.format` and :class:`string.Template` + - defaults to '%'). +level Set the root logger level to the specified level. +stream Use the specified stream to initialize the StreamHandler. Note + that this argument is incompatible with 'filename' - if both + are present, 'stream' is ignored. +handlers If specified, this should be an iterable of already created + handlers, which will be added to the root logger. Any handler + in the list which does not have a formatter assigned will be + assigned the formatter created in this function. +force If this keyword is specified as true, any existing handlers + attached to the root logger are removed and closed, before + carrying out the configuration as specified by the other + arguments. +encoding If specified together with a filename, this encoding is passed to + the created FileHandler, causing it to be used when the file is + opened. +errors If specified together with a filename, this value is passed to the + created FileHandler, causing it to be used when the file is + opened in text mode. If not specified, the default value is + `backslashreplace`. + +Note that you could specify a stream created using open(filename, mode) +rather than passing the filename and mode in. However, it should be +remembered that StreamHandler does not close its stream (since it may be +using sys.stdout or sys.stderr), whereas FileHandler closes its stream +when the handler is closed. + +.. versionchanged:: 3.2 + Added the ``style`` parameter. + +.. versionchanged:: 3.3 + Added the ``handlers`` parameter. A ``ValueError`` is now thrown for + incompatible arguments (e.g. ``handlers`` specified together with + ``filename``/``filemode``, or ``filename``/``filemode`` specified + together with ``stream``, or ``handlers`` specified together with + ``stream``. + +.. versionchanged:: 3.8 + Added the ``force`` parameter. + +.. versionchanged:: 3.9 + Added the ``encoding`` and ``errors`` parameters. +""" +def shutdown(handlerList: Sequence[Any] = ...) -> None: # handlerList is undocumented + """ +Perform any cleanup actions in the logging system (e.g. flushing +buffers). + +Should be called at application exit. +""" +def setLoggerClass(klass: type[Logger]) -> None: + """ +Set the class to be used when instantiating a logger. The class should +define __init__() such that only a name argument is required, and the +__init__() should call Logger.__init__() +""" +def captureWarnings(capture: bool) -> None: + """ +If capture is true, redirect all warnings to the logging package. +If capture is False, ensure that warnings are not redirected to logging +but to their original destinations. +""" +def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: + """ +Set the factory to be used when instantiating a log record. + +:param factory: A callable which will be called to instantiate +a log record. +""" lastResort: Handler | None _StreamT = TypeVar("_StreamT", bound=SupportsWrite[str]) class StreamHandler(Handler, Generic[_StreamT]): + """ +A handler class which writes logging records, appropriately formatted, +to a stream. Note that this class does not close the stream, as +sys.stdout or sys.stderr may be used. +""" stream: _StreamT # undocumented terminator: str @overload - def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: + """ +Initialize the handler. + +If stream is not specified, sys.stderr is used. +""" @overload def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 - def setStream(self, stream: _StreamT) -> _StreamT | None: ... + def setStream(self, stream: _StreamT) -> _StreamT | None: + """ +Sets the StreamHandler's stream to the specified value, +if it is different. + +Returns the old stream, if the stream was changed, or None +if it wasn't. +""" if sys.version_info >= (3, 11): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class FileHandler(StreamHandler[TextIOWrapper]): + """ +A handler class which writes formatted logging records to disk files. +""" baseFilename: str # undocumented mode: str # undocumented encoding: str | None # undocumented @@ -618,20 +1385,55 @@ class FileHandler(StreamHandler[TextIOWrapper]): errors: str | None # undocumented def __init__( self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: ... - def _open(self) -> TextIOWrapper: ... # undocumented + ) -> None: + """ +Open the specified file and use it as the stream for logging. +""" + def _open(self) -> TextIOWrapper: # undocumented + """ +Open the current base file with the (original) mode and encoding. +Return the resulting stream. +""" -class NullHandler(Handler): ... +class NullHandler(Handler): + """ +This handler does nothing. It's intended to be used to avoid the +"No handlers could be found for logger XXX" one-off warning. This is +important for library code, which may contain code to log events. If a user +of the library does not configure logging, the one-off warning might be +produced; to avoid this, the library developer simply needs to instantiate +a NullHandler and add it to the top-level logger of the library module or +package. +""" class PlaceHolder: # undocumented + """ +PlaceHolder instances are used in the Manager logger hierarchy to take +the place of nodes for which no loggers have been defined. This class is +intended for internal use only and not as part of the public API. +""" loggerMap: dict[Logger, None] - def __init__(self, alogger: Logger) -> None: ... - def append(self, alogger: Logger) -> None: ... + def __init__(self, alogger: Logger) -> None: + """ +Initialize with the specified logger being a child of this placeholder. +""" + def append(self, alogger: Logger) -> None: + """ +Add the specified logger as a child of this placeholder. +""" # Below aren't in module docs but still visible class RootLogger(Logger): - def __init__(self, level: int) -> None: ... + """ +A root logger is not that different to any other logger, except that +it must have a logging level and there is only one instance of it in +the hierarchy. +""" + def __init__(self, level: int) -> None: + """ +Initialize the logger with the name "root". +""" root: RootLogger @@ -647,7 +1449,9 @@ class PercentStyle: # undocumented def __init__(self, fmt: str) -> None: ... def usesTime(self) -> bool: ... - def validate(self) -> None: ... + def validate(self) -> None: + """Validate the input format, ensure it matches the correct style +""" def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi index 72412ddc2cea5..81292bd302b1b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi @@ -1,3 +1,12 @@ +""" +Configuration functions for the logging package for Python. The core package +is based on PEP 282 and comments thereto in comp.lang.python, and influenced +by Apache's log4j system. + +Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. + +To use, simply 'import logging' and log away! +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence @@ -63,7 +72,9 @@ class _DictConfigArgs(TypedDict, total=False): # # Also accept a TypedDict type, to allow callers to use TypedDict # types, and for somewhat stricter type checking of dict literals. -def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... +def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: + """Configure logging using a dictionary. +""" if sys.version_info >= (3, 10): def fileConfig( @@ -71,29 +82,72 @@ if sys.version_info >= (3, 10): defaults: Mapping[str, str] | None = None, disable_existing_loggers: bool = True, encoding: str | None = None, - ) -> None: ... + ) -> None: + """ +Read the logging configuration from a ConfigParser-format file. + +This can be called several times from an application, allowing an end user +the ability to select from various pre-canned configurations (if the +developer provides a mechanism to present the choices and load the chosen +configuration). +""" else: def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, defaults: Mapping[str, str] | None = None, disable_existing_loggers: bool = True, - ) -> None: ... + ) -> None: + """ + Read the logging configuration from a ConfigParser-format file. + + This can be called several times from an application, allowing an end user + the ability to select from various pre-canned configurations (if the + developer provides a mechanism to present the choices and load the chosen + configuration). + """ def valid_ident(s: str) -> Literal[True]: ... # undocumented -def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... -def stopListening() -> None: ... +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: + """ +Start up a socket server on the specified port, and listen for new +configurations. + +These will be sent as a file suitable for processing by fileConfig(). +Returns a Thread object on which you can call start() to start the server, +and which you can join() when appropriate. To stop the server, call +stopListening(). + +Use the ``verify`` argument to verify any bytes received across the wire +from a client. If specified, it should be a callable which receives a +single argument - the bytes of configuration data received across the +network - and it should return either ``None``, to indicate that the +passed in bytes could not be verified and should be discarded, or a +byte string which is then passed to the configuration machinery as +normal. Note that you can return transformed bytes, e.g. by decrypting +the bytes passed in. +""" +def stopListening() -> None: + """ +Stop the listening server which was created with a call to listen(). +""" class ConvertingMixin: # undocumented + """For ConvertingXXX's, this mixin class provides common functions +""" def convert_with_key(self, key: Any, value: Any, replace: bool = True) -> Any: ... def convert(self, value: Any) -> Any: ... class ConvertingDict(dict[Hashable, Any], ConvertingMixin): # undocumented + """A converting dictionary wrapper. +""" def __getitem__(self, key: Hashable) -> Any: ... def get(self, key: Hashable, default: Any = None) -> Any: ... def pop(self, key: Hashable, default: Any = None) -> Any: ... class ConvertingList(list[Any], ConvertingMixin): # undocumented + """A converting list wrapper. +""" @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -102,6 +156,8 @@ class ConvertingList(list[Any], ConvertingMixin): # undocumented if sys.version_info >= (3, 12): class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented + """A converting tuple wrapper. +""" @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -110,12 +166,17 @@ if sys.version_info >= (3, 12): else: @disjoint_base class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented + """A converting tuple wrapper. +""" @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload def __getitem__(self, key: slice) -> Any: ... class BaseConfigurator: + """ +The configurator base class which defines some useful defaults. +""" CONVERT_PATTERN: Pattern[str] WORD_PATTERN: Pattern[str] DOT_PATTERN: Pattern[str] @@ -127,24 +188,64 @@ class BaseConfigurator: config: dict[str, Any] # undocumented def __init__(self, config: _DictConfigArgs | dict[str, Any]) -> None: ... - def resolve(self, s: str) -> Any: ... - def ext_convert(self, value: str) -> Any: ... - def cfg_convert(self, value: str) -> Any: ... - def convert(self, value: Any) -> Any: ... - def configure_custom(self, config: dict[str, Any]) -> Any: ... - def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: ... + def resolve(self, s: str) -> Any: + """ +Resolve strings to objects using standard import and attribute +syntax. +""" + def ext_convert(self, value: str) -> Any: + """Default converter for the ext:// protocol. +""" + def cfg_convert(self, value: str) -> Any: + """Default converter for the cfg:// protocol. +""" + def convert(self, value: Any) -> Any: + """ +Convert values to an appropriate type. dicts, lists and tuples are +replaced by their converting alternatives. Strings are checked to +see if they have a conversion format and are converted if they do. +""" + def configure_custom(self, config: dict[str, Any]) -> Any: + """Configure an object with a user-supplied factory. +""" + def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: + """Utility function which converts lists to tuples. +""" class DictConfigurator(BaseConfigurator): - def configure(self) -> None: ... # undocumented - def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: ... # undocumented - def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: ... # undocumented - def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: ... # undocumented - def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: ... # undocumented - def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: ... # undocumented + """ +Configure logging using a dictionary-like object to describe the +configuration. +""" + def configure(self) -> None: # undocumented + """Do the configuration. +""" + def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: # undocumented + """Configure a formatter from a dictionary. +""" + def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: # undocumented + """Configure a filter from a dictionary. +""" + def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: # undocumented + """Add filters to a filterer from a list of names. +""" + def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: # undocumented + """Configure a handler from a dictionary. +""" + def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: # undocumented + """Add handlers to a logger from a list of names. +""" def common_logger_config( self, logger: Logger, config: _LoggerConfiguration, incremental: bool = False - ) -> None: ... # undocumented - def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented - def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + ) -> None: # undocumented + """ +Perform configuration which is common to root and non-root loggers. +""" + def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented + """Configure a non-root logger from a dictionary. +""" + def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented + """Configure a root logger from a dictionary. +""" dictConfigClass = DictConfigurator diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi index 535f1c6851831..f203dd418d93e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi @@ -1,3 +1,11 @@ +""" +Additional handlers for the logging package for Python. The core package is +based on PEP 282 and comments thereto in comp.lang.python. + +Copyright (C) 2001-2021 Vinay Sajip. All Rights Reserved. + +To use, simply 'import logging.handlers' and log away! +""" import datetime import http.client import ssl @@ -22,24 +30,86 @@ SYSLOG_UDP_PORT: Final = 514 SYSLOG_TCP_PORT: Final = 514 class WatchedFileHandler(FileHandler): + """ +A handler for logging to a file, which watches the file +to see if it has changed while in use. This can happen because of +usage of programs such as newsyslog and logrotate which perform +log file rotation. This handler, intended for use under Unix, +watches the file to see if it has changed since the last emit. +(A file has changed if its device or inode have changed.) +If it has changed, the old file stream is closed, and the file +opened to get a new stream. + +This handler is not appropriate for use under Windows, because +under Windows open files cannot be moved or renamed - logging +opens the files with exclusive locks - and so there is no need +for such a handler. + +This handler is based on a suggestion and patch by Chad J. +Schroeder. +""" dev: int # undocumented ino: int # undocumented def __init__( self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... def _statstream(self) -> None: ... # undocumented - def reopenIfNeeded(self) -> None: ... + def reopenIfNeeded(self) -> None: + """ +Reopen log file if needed. + +Checks if the underlying file has changed, and if it +has, close the old stream and reopen the file to get the +current stream. +""" class BaseRotatingHandler(FileHandler): + """ +Base class for handlers that rotate log files at a certain point. +Not meant to be instantiated directly. Instead, use RotatingFileHandler +or TimedRotatingFileHandler. +""" namer: Callable[[str], str] | None rotator: Callable[[str, str], None] | None def __init__( self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None - ) -> None: ... - def rotation_filename(self, default_name: str) -> str: ... - def rotate(self, source: str, dest: str) -> None: ... + ) -> None: + """ +Use the specified filename for streamed logging +""" + def rotation_filename(self, default_name: str) -> str: + """ +Modify the filename of a log file when rotating. + +This is provided so that a custom filename can be provided. + +The default implementation calls the 'namer' attribute of the +handler, if it's callable, passing the default name to +it. If the attribute isn't callable (the default is None), the name +is returned unchanged. + +:param default_name: The default name for the log file. +""" + def rotate(self, source: str, dest: str) -> None: + """ +When rotating, rotate the current log. + +The default implementation calls the 'rotator' attribute of the +handler, if it's callable, passing the source and dest arguments to +it. If the attribute isn't callable (the default is None), the source +is simply renamed to the destination. + +:param source: The source filename. This is normally the base + filename, e.g. 'test.log' +:param dest: The destination filename. This is normally + what the source is rotated to, e.g. 'test.log.1'. +""" class RotatingFileHandler(BaseRotatingHandler): + """ +Handler for logging to a set of files, which switches from one file +to the next when the current file reaches a certain size. +""" maxBytes: int # undocumented backupCount: int # undocumented def __init__( @@ -51,11 +121,47 @@ class RotatingFileHandler(BaseRotatingHandler): encoding: str | None = None, delay: bool = False, errors: str | None = None, - ) -> None: ... - def doRollover(self) -> None: ... - def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + ) -> None: + """ +Open the specified file and use it as the stream for logging. + +By default, the file grows indefinitely. You can specify particular +values of maxBytes and backupCount to allow the file to rollover at +a predetermined size. + +Rollover occurs whenever the current log file is nearly maxBytes in +length. If backupCount is >= 1, the system will successively create +new files with the same pathname as the base file, but with extensions +".1", ".2" etc. appended to it. For example, with a backupCount of 5 +and a base file name of "app.log", you would get "app.log", +"app.log.1", "app.log.2", ... through to "app.log.5". The file being +written to is always "app.log" - when it gets filled up, it is closed +and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. +exist, then they are renamed to "app.log.2", "app.log.3" etc. +respectively. + +If maxBytes is zero, rollover never occurs. +""" + def doRollover(self) -> None: + """ +Do a rollover, as described in __init__(). +""" + def shouldRollover(self, record: LogRecord) -> int: # undocumented + """ +Determine if rollover should occur. + +Basically, see if the supplied record would cause the file to exceed +the size limit we have. +""" class TimedRotatingFileHandler(BaseRotatingHandler): + """ +Handler for logging to a file, rotating the log file at certain timed +intervals. + +If backupCount is > 0, when rollover is done, no more than backupCount +files are kept - the oldest ones are deleted. +""" when: str # undocumented backupCount: int # undocumented utc: bool # undocumented @@ -77,12 +183,44 @@ class TimedRotatingFileHandler(BaseRotatingHandler): atTime: datetime.time | None = None, errors: str | None = None, ) -> None: ... - def doRollover(self) -> None: ... - def shouldRollover(self, record: LogRecord) -> int: ... # undocumented - def computeRollover(self, currentTime: int) -> int: ... # undocumented - def getFilesToDelete(self) -> list[str]: ... # undocumented + def doRollover(self) -> None: + """ +do a rollover; in this case, a date/time stamp is appended to the filename +when the rollover happens. However, you want the file to be named for the +start of the interval, not the current time. If there is a backup count, +then we have to get a list of matching filenames, sort them and remove +the one with the oldest suffix. +""" + def shouldRollover(self, record: LogRecord) -> int: # undocumented + """ +Determine if rollover should occur. + +record is not used, as we are just comparing times, but it is needed so +the method signatures are the same +""" + def computeRollover(self, currentTime: int) -> int: # undocumented + """ +Work out the rollover time based on the specified time. +""" + def getFilesToDelete(self) -> list[str]: # undocumented + """ +Determine the files to delete when rolling over. + +More specific than the earlier method, which just used glob.glob(). +""" class SocketHandler(Handler): + """ +A handler class which writes logging records, in pickle format, to +a streaming socket. The socket is kept open across logging calls. +If the peer resets it, an attempt is made to reconnect on the next call. +The pickle which is sent is that of the LogRecord's attribute dictionary +(__dict__), so that the receiver does not need to have the logging module +installed in order to process the logging event. + +To unpickle the record at the receiving end into a LogRecord, use the +makeLogRecord function. +""" host: str # undocumented port: int | None # undocumented address: tuple[str, int] | str # undocumented @@ -92,16 +230,63 @@ class SocketHandler(Handler): retryStart: float # undocumented retryFactor: float # undocumented retryMax: float # undocumented - def __init__(self, host: str, port: int | None) -> None: ... - def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented - def makePickle(self, record: LogRecord) -> bytes: ... - def send(self, s: ReadableBuffer) -> None: ... - def createSocket(self) -> None: ... + def __init__(self, host: str, port: int | None) -> None: + """ +Initializes the handler with a specific host address and port. + +When the attribute *closeOnError* is set to True - if a socket error +occurs, the socket is silently closed and then reopened on the next +logging call. +""" + def makeSocket(self, timeout: float = 1) -> socket: # timeout is undocumented + """ +A factory method which allows subclasses to define the precise +type of socket they want. +""" + def makePickle(self, record: LogRecord) -> bytes: + """ +Pickles the record in binary format with a length prefix, and +returns it ready for transmission across the socket. +""" + def send(self, s: ReadableBuffer) -> None: + """ +Send a pickled string to the socket. + +This function allows for partial sends which can happen when the +network is busy. +""" + def createSocket(self) -> None: + """ +Try to create a socket, using an exponential backoff with +a max retry time. Thanks to Robert Olson for the original patch +(SF #815911) which has been slightly refactored. +""" class DatagramHandler(SocketHandler): - def makeSocket(self) -> socket: ... # type: ignore[override] + """ +A handler class which writes logging records, in pickle format, to +a datagram socket. The pickle which is sent is that of the LogRecord's +attribute dictionary (__dict__), so that the receiver does not need to +have the logging module installed in order to process the logging event. + +To unpickle the record at the receiving end into a LogRecord, use the +makeLogRecord function. + +""" + def makeSocket(self) -> socket: # type: ignore[override] + """ +The factory method of SocketHandler is here overridden to create +a UDP socket (SOCK_DGRAM). +""" class SysLogHandler(Handler): + """ +A handler class which sends formatted logging records to a syslog +server. Based on Sam Rushing's syslog module: +http://www.nightmare.com/squirl/python-ext/misc/syslog.py +Contributed by Nicolas Untz (after which minor refactoring changes +have been made). +""" LOG_EMERG: int LOG_ALERT: int LOG_CRIT: int @@ -152,25 +337,102 @@ class SysLogHandler(Handler): facility: str | int = 1, socktype: SocketKind | None = None, timeout: float | None = None, - ) -> None: ... + ) -> None: + """ +Initialize a handler. + +If address is specified as a string, a UNIX socket is used. To log to a +local syslogd, "SysLogHandler(address="/dev/log")" can be used. +If facility is not specified, LOG_USER is used. If socktype is +specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific +socket type will be used. For Unix sockets, you can also specify a +socktype of None, in which case socket.SOCK_DGRAM will be used, falling +back to socket.SOCK_STREAM. +""" else: def __init__( self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None - ) -> None: ... + ) -> None: + """ +Initialize a handler. + +If address is specified as a string, a UNIX socket is used. To log to a +local syslogd, "SysLogHandler(address="/dev/log")" can be used. +If facility is not specified, LOG_USER is used. If socktype is +specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific +socket type will be used. For Unix sockets, you can also specify a +socktype of None, in which case socket.SOCK_DGRAM will be used, falling +back to socket.SOCK_STREAM. +""" if sys.version_info >= (3, 11): - def createSocket(self) -> None: ... + def createSocket(self) -> None: + """ +Try to create a socket and, if it's not a datagram socket, connect it +to the other end. This method is called during handler initialization, +but it's not regarded as an error if the other end isn't listening yet +--- the method will be called again when emitting an event, +if there is no socket at that point. +""" - def encodePriority(self, facility: int | str, priority: int | str) -> int: ... - def mapPriority(self, levelName: str) -> str: ... + def encodePriority(self, facility: int | str, priority: int | str) -> int: + """ +Encode the facility and priority. You can pass in strings or +integers - if strings are passed, the facility_names and +priority_names mapping dictionaries are used to convert them to +integers. +""" + def mapPriority(self, levelName: str) -> str: + """ +Map a logging level name to a key in the priority_names map. +This is useful in two scenarios: when custom levels are being +used, and in the case where you can't do a straightforward +mapping by lowercasing the logging level name because of locale- +specific issues (see SF #1524081). +""" class NTEventLogHandler(Handler): + """ +A handler class which sends events to the NT Event Log. Adds a +registry entry for the specified application name. If no dllname is +provided, win32service.pyd (which contains some basic message +placeholders) is used. Note that use of these placeholders will make +your event logs big, as the entire message source is held in the log. +If you want slimmer logs, you have to pass in the name of your own DLL +which contains the message definitions you want to use in the event log. +""" def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... - def getEventCategory(self, record: LogRecord) -> int: ... + def getEventCategory(self, record: LogRecord) -> int: + """ +Return the event category for the record. + +Override this if you want to specify your own categories. This version +returns 0. +""" # TODO: correct return value? - def getEventType(self, record: LogRecord) -> int: ... - def getMessageID(self, record: LogRecord) -> int: ... + def getEventType(self, record: LogRecord) -> int: + """ +Return the event type for the record. + +Override this if you want to specify your own types. This version does +a mapping using the handler's typemap attribute, which is set up in +__init__() to a dictionary which contains mappings for DEBUG, INFO, +WARNING, ERROR and CRITICAL. If you are using your own levels you will +either need to override this method or place a suitable dictionary in +the handler's typemap attribute. +""" + def getMessageID(self, record: LogRecord) -> int: + """ +Return the message ID for the event record. If you are using your +own messages, you could do this by having the msg passed to the +logger being an ID rather than a formatting string. Then, in here, +you could use a dictionary lookup to get the message ID. This +version returns 1, which is the base message ID in win32service.pyd. +""" class SMTPHandler(Handler): + """ +A handler class which sends an SMTP email for each logging event. +""" mailhost: str # undocumented mailport: int | None # undocumented username: str | None # undocumented @@ -190,23 +452,84 @@ class SMTPHandler(Handler): credentials: tuple[str, str] | None = None, secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, timeout: float = 5.0, - ) -> None: ... - def getSubject(self, record: LogRecord) -> str: ... + ) -> None: + """ +Initialize the handler. + +Initialize the instance with the from and to addresses and subject +line of the email. To specify a non-standard SMTP port, use the +(host, port) tuple format for the mailhost argument. To specify +authentication credentials, supply a (username, password) tuple +for the credentials argument. To specify the use of a secure +protocol (TLS), pass in a tuple for the secure argument. This will +only be used when authentication credentials are supplied. The tuple +will be either an empty tuple, or a single-value tuple with the name +of a keyfile, or a 2-value tuple with the names of the keyfile and +certificate file. (This tuple is passed to the +`ssl.SSLContext.load_cert_chain` method). +A timeout in seconds can be specified for the SMTP connection (the +default is one second). +""" + def getSubject(self, record: LogRecord) -> str: + """ +Determine the subject for the email. + +If you want to specify a subject line which is record-dependent, +override this method. +""" class BufferingHandler(Handler): + """ +A handler class which buffers logging records in memory. Whenever each +record is added to the buffer, a check is made to see if the buffer should +be flushed. If it should, then flush() is expected to do what's needed. + """ capacity: int # undocumented buffer: list[LogRecord] # undocumented - def __init__(self, capacity: int) -> None: ... - def shouldFlush(self, record: LogRecord) -> bool: ... + def __init__(self, capacity: int) -> None: + """ +Initialize the handler with the buffer size. +""" + def shouldFlush(self, record: LogRecord) -> bool: + """ +Should the handler flush its buffer? + +Returns true if the buffer is up to capacity. This method can be +overridden to implement custom flushing strategies. +""" class MemoryHandler(BufferingHandler): + """ +A handler class which buffers logging records in memory, periodically +flushing them to a target handler. Flushing occurs whenever the buffer +is full, or when an event of a certain severity or greater is seen. +""" flushLevel: int # undocumented target: Handler | None # undocumented flushOnClose: bool # undocumented - def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... - def setTarget(self, target: Handler | None) -> None: ... + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: + """ +Initialize the handler with the buffer size, the level at which +flushing should occur and an optional target. + +Note that without a target being set either here or via setTarget(), +a MemoryHandler is no use to anyone! + +The ``flushOnClose`` argument is ``True`` for backward compatibility +reasons - the old behaviour is that when the handler is closed, the +buffer is flushed, even if the flush level hasn't been exceeded nor the +capacity exceeded. To prevent this, set ``flushOnClose`` to ``False``. +""" + def setTarget(self, target: Handler | None) -> None: + """ +Set the target handler for this handler. +""" class HTTPHandler(Handler): + """ +A class which sends records to a web server, using either GET or +POST semantics. +""" host: str # undocumented url: str # undocumented method: str # undocumented @@ -221,9 +544,24 @@ class HTTPHandler(Handler): secure: bool = False, credentials: tuple[str, str] | None = None, context: ssl.SSLContext | None = None, - ) -> None: ... - def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... - def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented + ) -> None: + """ +Initialize the instance with the host, the request URL, and the method +("GET" or "POST") +""" + def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: + """ +Default implementation of mapping the log record into a dict +that is sent as the CGI data. Overwrite in your class. +Contributed by Franz Glasner. +""" + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: # undocumented + """ +get a HTTP[S]Connection. + +Override when a custom connection is required, for example if +there is a proxy. +""" @type_check_only class _QueueLike(Protocol[_T]): @@ -231,28 +569,116 @@ class _QueueLike(Protocol[_T]): def put_nowait(self, item: _T, /) -> None: ... class QueueHandler(Handler): + """ +This handler sends events to a queue. Typically, it would be used together +with a multiprocessing Queue to centralise logging to file in one process +(in a multi-process application), so as to avoid file write contention +between processes. + +This code is new in Python 3.2, but this class can be copy pasted into +user code for use with earlier Python versions. +""" queue: _QueueLike[Any] - def __init__(self, queue: _QueueLike[Any]) -> None: ... - def prepare(self, record: LogRecord) -> Any: ... - def enqueue(self, record: LogRecord) -> None: ... + def __init__(self, queue: _QueueLike[Any]) -> None: + """ +Initialise an instance, using the passed queue. +""" + def prepare(self, record: LogRecord) -> Any: + """ +Prepare a record for queuing. The object returned by this method is +enqueued. + +The base implementation formats the record to merge the message and +arguments, and removes unpickleable items from the record in-place. +Specifically, it overwrites the record's `msg` and +`message` attributes with the merged message (obtained by +calling the handler's `format` method), and sets the `args`, +`exc_info` and `exc_text` attributes to None. + +You might want to override this method if you want to convert +the record to a dict or JSON string, or send a modified copy +of the record while leaving the original intact. +""" + def enqueue(self, record: LogRecord) -> None: + """ +Enqueue a record. + +The base implementation uses put_nowait. You may want to override +this method if you want to use blocking, timeouts or custom queue +implementations. +""" if sys.version_info >= (3, 12): listener: QueueListener | None class QueueListener: + """ +This class implements an internal threaded listener which watches for +LogRecords being added to a queue, removes them and passes them to a +list of handlers for processing. +""" handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: _QueueLike[Any] # undocumented _thread: Thread | None # undocumented - def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... - def dequeue(self, block: bool) -> LogRecord: ... - def prepare(self, record: LogRecord) -> Any: ... - def start(self) -> None: ... - def stop(self) -> None: ... - def enqueue_sentinel(self) -> None: ... - def handle(self, record: LogRecord) -> None: ... + def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: + """ +Initialise an instance with the specified queue and +handlers. +""" + def dequeue(self, block: bool) -> LogRecord: + """ +Dequeue a record and return it, optionally blocking. + +The base implementation uses get. You may want to override this method +if you want to use timeouts or work with custom queue implementations. +""" + def prepare(self, record: LogRecord) -> Any: + """ +Prepare a record for handling. + +This method just returns the passed-in record. You may want to +override this method if you need to do any custom marshalling or +manipulation of the record before passing it to the handlers. +""" + def start(self) -> None: + """ +Start the listener. + +This starts up a background thread to monitor the queue for +LogRecords to process. +""" + def stop(self) -> None: + """ +Stop the listener. + +This asks the thread to terminate, and then waits for it to do so. +Note that if you don't call this before your application exits, there +may be some records still left on the queue, which won't be processed. +""" + def enqueue_sentinel(self) -> None: + """ +This is used to enqueue the sentinel record. + +The base implementation uses put_nowait. You may want to override this +method if you want to use timeouts or work with custom queue +implementations. +""" + def handle(self, record: LogRecord) -> None: + """ +Handle a record. + +This just loops through the handlers offering them the record +to handle. +""" if sys.version_info >= (3, 14): - def __enter__(self) -> Self: ... + def __enter__(self) -> Self: + """ +For use as a context manager. Starts the listener. +""" def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> None: ... + ) -> None: + """ +For use as a context manager. Stops the listener. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi index b7ef607b75cbf..0a76e80017d86 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi @@ -1,3 +1,12 @@ +"""Interface to the liblzma compression library. + +This module provides a class for reading and writing compressed files, +classes for incremental (de)compression, and convenience functions for +one-shot (de)compression. + +These classes and functions support both the XZ and legacy LZMA +container formats, as well as raw compressed data streams. +""" import sys from _lzma import ( CHECK_CRC32 as CHECK_CRC32, @@ -89,6 +98,14 @@ _OpenTextWritingMode: TypeAlias = Literal["wt", "xt", "at"] _PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes + """A file object providing transparent LZMA (de)compression. + +An LZMAFile can act as a wrapper for an existing file object, or +refer directly to a named file on disk. + +Note that LZMAFile provides a *binary* file interface - data read +is returned as bytes, and data to be written must be given as bytes. +""" def __init__( self, filename: _PathOrFile | None = None, @@ -98,14 +115,99 @@ class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible def check: int = -1, preset: int | None = None, filters: _FilterChain | None = None, - ) -> None: ... + ) -> None: + """Open an LZMA-compressed file in binary mode. + +filename can be either an actual file name (given as a str, +bytes, or PathLike object), in which case the named file is +opened, or it can be an existing file object to read from or +write to. + +mode can be "r" for reading (default), "w" for (over)writing, +"x" for creating exclusively, or "a" for appending. These can +equivalently be given as "rb", "wb", "xb" and "ab" respectively. + +format specifies the container format to use for the file. +If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the +default is FORMAT_XZ. + +check specifies the integrity check to use. This argument can +only be used when opening a file for writing. For FORMAT_XZ, +the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not +support integrity checks - for these formats, check must be +omitted, or be CHECK_NONE. + +When opening a file for reading, the *preset* argument is not +meaningful, and should be omitted. The *filters* argument should +also be omitted, except when format is FORMAT_RAW (in which case +it is required). + +When opening a file for writing, the settings used by the +compressor can be specified either as a preset compression +level (with the *preset* argument), or in detail as a custom +filter chain (with the *filters* argument). For FORMAT_XZ and +FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset +level. For FORMAT_RAW, the caller must always specify a filter +chain; the raw compressor does not support preset compression +levels. + +preset (if provided) should be an integer in the range 0-9, +optionally OR-ed with the constant PRESET_EXTREME. + +filters (if provided) should be a sequence of dicts. Each dict +should have an entry for "id" indicating ID of the filter, plus +additional entries for options to the filter. +""" def __enter__(self) -> Self: ... - def peek(self, size: int = -1) -> bytes: ... - def read(self, size: int | None = -1) -> bytes: ... - def read1(self, size: int = -1) -> bytes: ... - def readline(self, size: int | None = -1) -> bytes: ... - def write(self, data: ReadableBuffer) -> int: ... - def seek(self, offset: int, whence: int = 0) -> int: ... + def peek(self, size: int = -1) -> bytes: + """Return buffered data without advancing the file position. + +Always returns at least one byte of data, unless at EOF. +The exact number of bytes returned is unspecified. +""" + def read(self, size: int | None = -1) -> bytes: + """Read up to size uncompressed bytes from the file. + +If size is negative or omitted, read until EOF is reached. +Returns b"" if the file is already at EOF. +""" + def read1(self, size: int = -1) -> bytes: + """Read up to size uncompressed bytes, while trying to avoid +making multiple reads from the underlying stream. Reads up to a +buffer's worth of data if size is negative. + +Returns b"" if the file is at EOF. +""" + def readline(self, size: int | None = -1) -> bytes: + """Read a line of uncompressed bytes from the file. + +The terminating newline (if present) is retained. If size is +non-negative, no more than size bytes will be read (in which +case the line may be incomplete). Returns b'' if already at EOF. +""" + def write(self, data: ReadableBuffer) -> int: + """Write a bytes object to the file. + +Returns the number of uncompressed bytes written, which is +always the length of data in bytes. Note that due to buffering, +the file on disk may not reflect the data written until close() +is called. +""" + def seek(self, offset: int, whence: int = 0) -> int: + """Change the file position. + +The new position is specified by offset, relative to the +position indicated by whence. Possible values for whence are: + + 0: start of stream (default): offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive + +Returns the new file position. + +Note that seeking is emulated, so depending on the parameters, +this operation may be extremely slow. +""" @overload def open( @@ -119,7 +221,30 @@ def open( encoding: None = None, errors: None = None, newline: None = None, -) -> LZMAFile: ... +) -> LZMAFile: + """Open an LZMA-compressed file in binary or text mode. + +filename can be either an actual file name (given as a str, bytes, +or PathLike object), in which case the named file is opened, or it +can be an existing file object to read from or write to. + +The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb", +"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text +mode. + +The format, check, preset and filters arguments specify the +compression settings, as for LZMACompressor, LZMADecompressor and +LZMAFile. + +For binary mode, this function is equivalent to the LZMAFile +constructor: LZMAFile(filename, mode, ...). In this case, the +encoding, errors and newline arguments must not be provided. + +For text mode, an LZMAFile object is created, and wrapped in an +io.TextIOWrapper instance with the specified encoding, error +handling behavior, and line ending(s). + +""" @overload def open( filename: _PathOrFile, @@ -174,7 +299,21 @@ def open( ) -> LZMAFile | TextIOWrapper: ... def compress( data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None -) -> bytes: ... +) -> bytes: + """Compress a block of data. + +Refer to LZMACompressor's docstring for a description of the +optional arguments *format*, *check*, *preset* and *filters*. + +For incremental compression, use an LZMACompressor instead. +""" def decompress( data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None -) -> bytes: ... +) -> bytes: + """Decompress a block of data. + +Refer to LZMADecompressor's docstring for a description of the +optional arguments *format*, *check* and *filters*. + +For incremental decompression, use an LZMADecompressor instead. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi index 89bd998b4dfeb..0cd45a9c989a5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi @@ -1,3 +1,5 @@ +"""Read/write support for Maildir, mbox, MH, Babyl, and MMDF mailboxes. +""" import email.message import io import sys @@ -47,202 +49,507 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): + """A group of messages in a particular place. +""" _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: + """Initialize a Mailbox instance. +""" @overload def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... @abstractmethod - def add(self, message: _MessageData) -> str: ... + def add(self, message: _MessageData) -> str: + """Add message and return assigned key. +""" @abstractmethod - def remove(self, key: str) -> None: ... + def remove(self, key: str) -> None: + """Remove the keyed message; raise KeyError if it doesn't exist. +""" def __delitem__(self, key: str) -> None: ... - def discard(self, key: str) -> None: ... + def discard(self, key: str) -> None: + """If the keyed message exists, remove it. +""" @abstractmethod - def __setitem__(self, key: str, message: _MessageData) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: + """Replace the keyed message; raise KeyError if it doesn't exist. +""" @overload - def get(self, key: str, default: None = None) -> _MessageT | None: ... + def get(self, key: str, default: None = None) -> _MessageT | None: + """Return the keyed message, or default if it doesn't exist. +""" @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... - def __getitem__(self, key: str) -> _MessageT: ... + def __getitem__(self, key: str) -> _MessageT: + """Return the keyed message; raise KeyError if it doesn't exist. +""" @abstractmethod - def get_message(self, key: str) -> _MessageT: ... - def get_string(self, key: str) -> str: ... + def get_message(self, key: str) -> _MessageT: + """Return a Message representation or raise a KeyError. +""" + def get_string(self, key: str) -> str: + """Return a string representation or raise a KeyError. + +Uses email.message.Message to create a 7bit clean string +representation of the message. +""" @abstractmethod - def get_bytes(self, key: str) -> bytes: ... + def get_bytes(self, key: str) -> bytes: + """Return a byte string representation or raise a KeyError. +""" # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here @abstractmethod - def get_file(self, key: str) -> Any: ... + def get_file(self, key: str) -> Any: + """Return a file-like representation or raise a KeyError. +""" @abstractmethod - def iterkeys(self) -> Iterator[str]: ... - def keys(self) -> list[str]: ... - def itervalues(self) -> Iterator[_MessageT]: ... + def iterkeys(self) -> Iterator[str]: + """Return an iterator over keys. +""" + def keys(self) -> list[str]: + """Return a list of keys. +""" + def itervalues(self) -> Iterator[_MessageT]: + """Return an iterator over all messages. +""" def __iter__(self) -> Iterator[_MessageT]: ... - def values(self) -> list[_MessageT]: ... - def iteritems(self) -> Iterator[tuple[str, _MessageT]]: ... - def items(self) -> list[tuple[str, _MessageT]]: ... + def values(self) -> list[_MessageT]: + """Return a list of messages. Memory intensive. +""" + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: + """Return an iterator over (key, message) tuples. +""" + def items(self) -> list[tuple[str, _MessageT]]: + """Return a list of (key, message) tuples. Memory intensive. +""" @abstractmethod - def __contains__(self, key: str) -> bool: ... + def __contains__(self, key: str) -> bool: + """Return True if the keyed message exists, False otherwise. +""" @abstractmethod - def __len__(self) -> int: ... - def clear(self) -> None: ... + def __len__(self) -> int: + """Return a count of messages in the mailbox. +""" + def clear(self) -> None: + """Delete all messages. +""" @overload - def pop(self, key: str, default: None = None) -> _MessageT | None: ... + def pop(self, key: str, default: None = None) -> _MessageT | None: + """Delete the keyed message and return it, or default. +""" @overload def pop(self, key: str, default: _T) -> _MessageT | _T: ... - def popitem(self) -> tuple[str, _MessageT]: ... - def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... + def popitem(self) -> tuple[str, _MessageT]: + """Delete an arbitrary (key, message) pair and return it. +""" + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: + """Change the messages that correspond to certain keys. +""" @abstractmethod - def flush(self) -> None: ... + def flush(self) -> None: + """Write any pending changes to the disk. +""" @abstractmethod - def lock(self) -> None: ... + def lock(self) -> None: + """Lock the mailbox. +""" @abstractmethod - def unlock(self) -> None: ... + def unlock(self) -> None: + """Unlock the mailbox if it is locked. +""" @abstractmethod - def close(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def close(self) -> None: + """Flush and close the mailbox. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class Maildir(Mailbox[MaildirMessage]): + """A qmail-style Maildir mailbox. +""" colon: str def __init__( self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True - ) -> None: ... - def add(self, message: _MessageData) -> str: ... - def remove(self, key: str) -> None: ... - def __setitem__(self, key: str, message: _MessageData) -> None: ... - def get_message(self, key: str) -> MaildirMessage: ... - def get_bytes(self, key: str) -> bytes: ... - def get_file(self, key: str) -> _ProxyFile[bytes]: ... + ) -> None: + """Initialize a Maildir instance. +""" + def add(self, message: _MessageData) -> str: + """Add message and return assigned key. +""" + def remove(self, key: str) -> None: + """Remove the keyed message; raise KeyError if it doesn't exist. +""" + def __setitem__(self, key: str, message: _MessageData) -> None: + """Replace the keyed message; raise KeyError if it doesn't exist. +""" + def get_message(self, key: str) -> MaildirMessage: + """Return a Message representation or raise a KeyError. +""" + def get_bytes(self, key: str) -> bytes: + """Return a bytes representation or raise a KeyError. +""" + def get_file(self, key: str) -> _ProxyFile[bytes]: + """Return a file-like representation or raise a KeyError. +""" if sys.version_info >= (3, 13): - def get_info(self, key: str) -> str: ... - def set_info(self, key: str, info: str) -> None: ... - def get_flags(self, key: str) -> str: ... - def set_flags(self, key: str, flags: str) -> None: ... - def add_flag(self, key: str, flag: str) -> None: ... - def remove_flag(self, key: str, flag: str) -> None: ... + def get_info(self, key: str) -> str: + """Get the keyed message's "info" as a string. +""" + def set_info(self, key: str, info: str) -> None: + """Set the keyed message's "info" string. +""" + def get_flags(self, key: str) -> str: + """Return as a string the standard flags that are set on the keyed message. +""" + def set_flags(self, key: str, flags: str) -> None: + """Set the given flags and unset all others on the keyed message. +""" + def add_flag(self, key: str, flag: str) -> None: + """Set the given flag(s) without changing others on the keyed message. +""" + def remove_flag(self, key: str, flag: str) -> None: + """Unset the given string flag(s) without changing others on the keyed message. +""" - def iterkeys(self) -> Iterator[str]: ... - def __contains__(self, key: str) -> bool: ... - def __len__(self) -> int: ... - def flush(self) -> None: ... - def lock(self) -> None: ... - def unlock(self) -> None: ... - def close(self) -> None: ... - def list_folders(self) -> list[str]: ... - def get_folder(self, folder: str) -> Maildir: ... - def add_folder(self, folder: str) -> Maildir: ... - def remove_folder(self, folder: str) -> None: ... - def clean(self) -> None: ... - def next(self) -> str | None: ... + def iterkeys(self) -> Iterator[str]: + """Return an iterator over keys. +""" + def __contains__(self, key: str) -> bool: + """Return True if the keyed message exists, False otherwise. +""" + def __len__(self) -> int: + """Return a count of messages in the mailbox. +""" + def flush(self) -> None: + """Write any pending changes to disk. +""" + def lock(self) -> None: + """Lock the mailbox. +""" + def unlock(self) -> None: + """Unlock the mailbox if it is locked. +""" + def close(self) -> None: + """Flush and close the mailbox. +""" + def list_folders(self) -> list[str]: + """Return a list of folder names. +""" + def get_folder(self, folder: str) -> Maildir: + """Return a Maildir instance for the named folder. +""" + def add_folder(self, folder: str) -> Maildir: + """Create a folder and return a Maildir instance representing it. +""" + def remove_folder(self, folder: str) -> None: + """Delete the named folder, which must be empty. +""" + def clean(self) -> None: + """Delete old files in "tmp". +""" + def next(self) -> str | None: + """Return the next message in a one-time iteration. +""" class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): - def add(self, message: _MessageData) -> str: ... - def remove(self, key: str) -> None: ... - def __setitem__(self, key: str, message: _MessageData) -> None: ... - def iterkeys(self) -> Iterator[str]: ... - def __contains__(self, key: str) -> bool: ... - def __len__(self) -> int: ... - def lock(self) -> None: ... - def unlock(self) -> None: ... - def flush(self) -> None: ... - def close(self) -> None: ... + """A single-file mailbox. +""" + def add(self, message: _MessageData) -> str: + """Add message and return assigned key. +""" + def remove(self, key: str) -> None: + """Remove the keyed message; raise KeyError if it doesn't exist. +""" + def __setitem__(self, key: str, message: _MessageData) -> None: + """Replace the keyed message; raise KeyError if it doesn't exist. +""" + def iterkeys(self) -> Iterator[str]: + """Return an iterator over keys. +""" + def __contains__(self, key: str) -> bool: + """Return True if the keyed message exists, False otherwise. +""" + def __len__(self) -> int: + """Return a count of messages in the mailbox. +""" + def lock(self) -> None: + """Lock the mailbox. +""" + def unlock(self) -> None: + """Unlock the mailbox if it is locked. +""" + def flush(self) -> None: + """Write any pending changes to disk. +""" + def close(self) -> None: + """Flush and close the mailbox. +""" class _mboxMMDF(_singlefileMailbox[_MessageT]): - def get_message(self, key: str) -> _MessageT: ... - def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... - def get_bytes(self, key: str, from_: bool = False) -> bytes: ... - def get_string(self, key: str, from_: bool = False) -> str: ... + """An mbox or MMDF mailbox. +""" + def get_message(self, key: str) -> _MessageT: + """Return a Message representation or raise a KeyError. +""" + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: + """Return a file-like representation or raise a KeyError. +""" + def get_bytes(self, key: str, from_: bool = False) -> bytes: + """Return a string representation or raise a KeyError. +""" + def get_string(self, key: str, from_: bool = False) -> str: + """Return a string representation or raise a KeyError. +""" class mbox(_mboxMMDF[mboxMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... + """A classic mbox mailbox. +""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: + """Initialize an mbox mailbox. +""" class MMDF(_mboxMMDF[MMDFMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... + """An MMDF mailbox. +""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: + """Initialize an MMDF mailbox. +""" class MH(Mailbox[MHMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... - def add(self, message: _MessageData) -> str: ... - def remove(self, key: str) -> None: ... - def __setitem__(self, key: str, message: _MessageData) -> None: ... - def get_message(self, key: str) -> MHMessage: ... - def get_bytes(self, key: str) -> bytes: ... - def get_file(self, key: str) -> _ProxyFile[bytes]: ... - def iterkeys(self) -> Iterator[str]: ... - def __contains__(self, key: str) -> bool: ... - def __len__(self) -> int: ... - def flush(self) -> None: ... - def lock(self) -> None: ... - def unlock(self) -> None: ... - def close(self) -> None: ... - def list_folders(self) -> list[str]: ... - def get_folder(self, folder: StrPath) -> MH: ... - def add_folder(self, folder: StrPath) -> MH: ... - def remove_folder(self, folder: StrPath) -> None: ... - def get_sequences(self) -> dict[str, list[int]]: ... - def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... - def pack(self) -> None: ... + """An MH mailbox. +""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: + """Initialize an MH instance. +""" + def add(self, message: _MessageData) -> str: + """Add message and return assigned key. +""" + def remove(self, key: str) -> None: + """Remove the keyed message; raise KeyError if it doesn't exist. +""" + def __setitem__(self, key: str, message: _MessageData) -> None: + """Replace the keyed message; raise KeyError if it doesn't exist. +""" + def get_message(self, key: str) -> MHMessage: + """Return a Message representation or raise a KeyError. +""" + def get_bytes(self, key: str) -> bytes: + """Return a bytes representation or raise a KeyError. +""" + def get_file(self, key: str) -> _ProxyFile[bytes]: + """Return a file-like representation or raise a KeyError. +""" + def iterkeys(self) -> Iterator[str]: + """Return an iterator over keys. +""" + def __contains__(self, key: str) -> bool: + """Return True if the keyed message exists, False otherwise. +""" + def __len__(self) -> int: + """Return a count of messages in the mailbox. +""" + def flush(self) -> None: + """Write any pending changes to the disk. +""" + def lock(self) -> None: + """Lock the mailbox. +""" + def unlock(self) -> None: + """Unlock the mailbox if it is locked. +""" + def close(self) -> None: + """Flush and close the mailbox. +""" + def list_folders(self) -> list[str]: + """Return a list of folder names. +""" + def get_folder(self, folder: StrPath) -> MH: + """Return an MH instance for the named folder. +""" + def add_folder(self, folder: StrPath) -> MH: + """Create a folder and return an MH instance representing it. +""" + def remove_folder(self, folder: StrPath) -> None: + """Delete the named folder, which must be empty. +""" + def get_sequences(self) -> dict[str, list[int]]: + """Return a name-to-key-list dictionary to define each sequence. +""" + def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: + """Set sequences using the given name-to-key-list dictionary. +""" + def pack(self) -> None: + """Re-name messages to eliminate numbering gaps. Invalidates keys. +""" class Babyl(_singlefileMailbox[BabylMessage]): - def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... - def get_message(self, key: str) -> BabylMessage: ... - def get_bytes(self, key: str) -> bytes: ... - def get_file(self, key: str) -> IO[bytes]: ... - def get_labels(self) -> list[str]: ... + """An Rmail-style Babyl mailbox. +""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: + """Initialize a Babyl mailbox. +""" + def get_message(self, key: str) -> BabylMessage: + """Return a Message representation or raise a KeyError. +""" + def get_bytes(self, key: str) -> bytes: + """Return a string representation or raise a KeyError. +""" + def get_file(self, key: str) -> IO[bytes]: + """Return a file-like representation or raise a KeyError. +""" + def get_labels(self) -> list[str]: + """Return a list of user-defined labels in the mailbox. +""" class Message(email.message.Message): - def __init__(self, message: _MessageData | None = None) -> None: ... + """Message with mailbox-format-specific properties. +""" + def __init__(self, message: _MessageData | None = None) -> None: + """Initialize a Message instance. +""" class MaildirMessage(Message): - def get_subdir(self) -> str: ... - def set_subdir(self, subdir: Literal["new", "cur"]) -> None: ... - def get_flags(self) -> str: ... - def set_flags(self, flags: Iterable[str]) -> None: ... - def add_flag(self, flag: str) -> None: ... - def remove_flag(self, flag: str) -> None: ... - def get_date(self) -> int: ... - def set_date(self, date: float) -> None: ... - def get_info(self) -> str: ... - def set_info(self, info: str) -> None: ... + """Message with Maildir-specific properties. +""" + def get_subdir(self) -> str: + """Return 'new' or 'cur'. +""" + def set_subdir(self, subdir: Literal["new", "cur"]) -> None: + """Set subdir to 'new' or 'cur'. +""" + def get_flags(self) -> str: + """Return as a string the flags that are set. +""" + def set_flags(self, flags: Iterable[str]) -> None: + """Set the given flags and unset all others. +""" + def add_flag(self, flag: str) -> None: + """Set the given flag(s) without changing others. +""" + def remove_flag(self, flag: str) -> None: + """Unset the given string flag(s) without changing others. +""" + def get_date(self) -> int: + """Return delivery date of message, in seconds since the epoch. +""" + def set_date(self, date: float) -> None: + """Set delivery date of message, in seconds since the epoch. +""" + def get_info(self) -> str: + """Get the message's "info" as a string. +""" + def set_info(self, info: str) -> None: + """Set the message's "info" string. +""" class _mboxMMDFMessage(Message): - def get_from(self) -> str: ... - def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... - def get_flags(self) -> str: ... - def set_flags(self, flags: Iterable[str]) -> None: ... - def add_flag(self, flag: str) -> None: ... - def remove_flag(self, flag: str) -> None: ... + """Message with mbox- or MMDF-specific properties. +""" + def get_from(self) -> str: + """Return contents of "From " line. +""" + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: + """Set "From " line, formatting and appending time_ if specified. +""" + def get_flags(self) -> str: + """Return as a string the flags that are set. +""" + def set_flags(self, flags: Iterable[str]) -> None: + """Set the given flags and unset all others. +""" + def add_flag(self, flag: str) -> None: + """Set the given flag(s) without changing others. +""" + def remove_flag(self, flag: str) -> None: + """Unset the given string flag(s) without changing others. +""" -class mboxMessage(_mboxMMDFMessage): ... +class mboxMessage(_mboxMMDFMessage): + """Message with mbox-specific properties. +""" class MHMessage(Message): - def get_sequences(self) -> list[str]: ... - def set_sequences(self, sequences: Iterable[str]) -> None: ... - def add_sequence(self, sequence: str) -> None: ... - def remove_sequence(self, sequence: str) -> None: ... + """Message with MH-specific properties. +""" + def get_sequences(self) -> list[str]: + """Return a list of sequences that include the message. +""" + def set_sequences(self, sequences: Iterable[str]) -> None: + """Set the list of sequences that include the message. +""" + def add_sequence(self, sequence: str) -> None: + """Add sequence to list of sequences including the message. +""" + def remove_sequence(self, sequence: str) -> None: + """Remove sequence from the list of sequences including the message. +""" class BabylMessage(Message): - def get_labels(self) -> list[str]: ... - def set_labels(self, labels: Iterable[str]) -> None: ... - def add_label(self, label: str) -> None: ... - def remove_label(self, label: str) -> None: ... - def get_visible(self) -> Message: ... - def set_visible(self, visible: _MessageData) -> None: ... - def update_visible(self) -> None: ... + """Message with Babyl-specific properties. +""" + def get_labels(self) -> list[str]: + """Return a list of labels on the message. +""" + def set_labels(self, labels: Iterable[str]) -> None: + """Set the list of labels on the message. +""" + def add_label(self, label: str) -> None: + """Add label to list of labels on the message. +""" + def remove_label(self, label: str) -> None: + """Remove label from the list of labels on the message. +""" + def get_visible(self) -> Message: + """Return a Message representation of visible headers. +""" + def set_visible(self, visible: _MessageData) -> None: + """Set the Message representation of visible headers. +""" + def update_visible(self) -> None: + """Update and/or sensibly generate a set of visible headers. +""" -class MMDFMessage(_mboxMMDFMessage): ... +class MMDFMessage(_mboxMMDFMessage): + """Message with MMDF-specific properties. +""" class _ProxyFile(Generic[AnyStr]): - def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... - def read(self, size: int | None = None) -> AnyStr: ... - def read1(self, size: int | None = None) -> AnyStr: ... - def readline(self, size: int | None = None) -> AnyStr: ... - def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... - def __iter__(self) -> Iterator[AnyStr]: ... - def tell(self) -> int: ... - def seek(self, offset: int, whence: int = 0) -> None: ... - def close(self) -> None: ... - def __enter__(self) -> Self: ... + """A read-only wrapper of a file. +""" + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: + """Initialize a _ProxyFile. +""" + def read(self, size: int | None = None) -> AnyStr: + """Read bytes. +""" + def read1(self, size: int | None = None) -> AnyStr: + """Read bytes. +""" + def readline(self, size: int | None = None) -> AnyStr: + """Read a line. +""" + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: + """Read multiple lines. +""" + def __iter__(self) -> Iterator[AnyStr]: + """Iterate over lines. +""" + def tell(self) -> int: + """Return the position. +""" + def seek(self, offset: int, whence: int = 0) -> None: + """Change position. +""" + def close(self) -> None: + """Close the file. +""" + def __enter__(self) -> Self: + """Context management protocol support. +""" def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -250,13 +557,31 @@ class _ProxyFile(Generic[AnyStr]): def flush(self) -> None: ... @property def closed(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class _PartialFile(_ProxyFile[AnyStr]): - def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... + """A read-only wrapper of part of a file. +""" + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: + """Initialize a _PartialFile. +""" -class Error(Exception): ... -class NoSuchMailboxError(Error): ... -class NotEmptyError(Error): ... -class ExternalClashError(Error): ... -class FormatError(Error): ... +class Error(Exception): + """Raised for module-specific errors. +""" +class NoSuchMailboxError(Error): + """The specified mailbox does not exist and won't be created. +""" +class NotEmptyError(Error): + """The specified mailbox is not empty and deletion was requested. +""" +class ExternalClashError(Error): + """Another process caused an action to fail. +""" +class FormatError(Error): + """A file appears to have an invalid format. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi index ce549e01f528c..685f2dacba371 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi @@ -1,3 +1,5 @@ +"""Mailcap file handling. See RFC 1524. +""" from collections.abc import Mapping, Sequence from typing_extensions import TypeAlias @@ -7,5 +9,22 @@ __all__ = ["getcaps", "findmatch"] def findmatch( caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = [] -) -> tuple[str | None, _Cap | None]: ... -def getcaps() -> dict[str, list[_Cap]]: ... +) -> tuple[str | None, _Cap | None]: + """Find a match for a mailcap entry. + + Return a tuple containing the command line, and the mailcap entry + used; (None, None) if no match is found. This may invoke the + 'test' command of several matching entries before deciding which + entry to use. + + """ +def getcaps() -> dict[str, list[_Cap]]: + """Return a dictionary containing the mailcap database. + + The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain') + to a list of dictionaries corresponding to mailcap entries. The list + collects all the entries for that MIME type from all available mailcap + files. Each dictionary contains key-value pairs for that MIME type, + where the viewing command is stored with the key "view". + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi index 46c421e4ce307..49f5f12e4fbdf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi @@ -1,3 +1,31 @@ +"""This module contains functions that can read and write Python values in +a binary format. The format is specific to Python, but independent of +machine architecture issues. + +Not all Python object types are supported; in general, only objects +whose value is independent from a particular invocation of Python can be +written and read by this module. The following types are supported: +None, integers, floating-point numbers, strings, bytes, bytearrays, +tuples, lists, sets, dictionaries, and code objects, where it +should be understood that tuples, lists and dictionaries are only +supported as long as the values contained therein are themselves +supported; and recursive lists and dictionaries should not be written +(they will cause infinite loops). + +Variables: + +version -- indicates the format that the module uses. Version 0 is the + historical format, version 1 shares interned strings and version 2 + uses a binary format for floating-point numbers. + Version 3 shares common object references (New in version 3.4). + +Functions: + +dump() -- write value to a file +load() -- read value from a file +dumps() -- marshal value as a bytes object +loads() -- read value from a bytes-like object +""" import builtins import sys import types @@ -29,21 +57,137 @@ _Marshallable: TypeAlias = ( ) if sys.version_info >= (3, 14): - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: ... - def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: ... + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: + """Write the value on the open file. + + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + allow_code + Allow to write code objects. + +If the value has (or contains an object that has) an unsupported type, a +ValueError exception is raised - but garbage data will also be written +to the file. The object will not be properly read back by load(). +""" + def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: + """Return the bytes object that would be written to a file by dump(value, file). + + value + Must be a supported type. + version + Indicates the data format that dumps should use. + allow_code + Allow to write code objects. + +Raise a ValueError exception if value has (or contains an object that has) an +unsupported type. +""" elif sys.version_info >= (3, 13): - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: ... - def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: ... + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: + """Write the value on the open file. + + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + allow_code + Allow to write code objects. + +If the value has (or contains an object that has) an unsupported type, a +ValueError exception is raised - but garbage data will also be written +to the file. The object will not be properly read back by load(). +""" + def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: + """Return the bytes object that would be written to a file by dump(value, file). + + value + Must be a supported type. + version + Indicates the data format that dumps should use. + allow_code + Allow to write code objects. + +Raise a ValueError exception if value has (or contains an object that has) an +unsupported type. +""" else: - def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ... - def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ... + def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: + """Write the value on the open file. + + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + +If the value has (or contains an object that has) an unsupported type, a +ValueError exception is raised - but garbage data will also be written +to the file. The object will not be properly read back by load(). +""" + def dumps(value: _Marshallable, version: int = 4, /) -> bytes: + """Return the bytes object that would be written to a file by dump(value, file). + + value + Must be a supported type. + version + Indicates the data format that dumps should use. + +Raise a ValueError exception if value has (or contains an object that has) an +unsupported type. +""" if sys.version_info >= (3, 13): - def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: ... - def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: ... + def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: + """Read one value from the open file and return it. + + file + Must be readable binary file. + allow_code + Allow to load code objects. + +If no valid value is read (e.g. because the data has a different Python +version's incompatible marshal format), raise EOFError, ValueError or +TypeError. + +Note: If an object containing an unsupported type was marshalled with +dump(), load() will substitute None for the unmarshallable type. +""" + def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: + """Convert the bytes-like object to a value. + + allow_code + Allow to load code objects. + +If no valid value is found, raise EOFError, ValueError or TypeError. Extra +bytes in the input are ignored. +""" else: - def load(file: SupportsRead[bytes], /) -> Any: ... - def loads(bytes: ReadableBuffer, /) -> Any: ... + def load(file: SupportsRead[bytes], /) -> Any: + """Read one value from the open file and return it. + + file + Must be readable binary file. + +If no valid value is read (e.g. because the data has a different Python +version's incompatible marshal format), raise EOFError, ValueError or +TypeError. + +Note: If an object containing an unsupported type was marshalled with +dump(), load() will substitute None for the unmarshallable type. +""" + def loads(bytes: ReadableBuffer, /) -> Any: + """Convert the bytes-like object to a value. + +If no valid value is found, raise EOFError, ValueError or TypeError. Extra +bytes in the input are ignored. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi index 1903d488f7bb3..1b9df2bcb830f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi @@ -1,3 +1,6 @@ +"""This module provides access to the mathematical functions +defined by the C standard. +""" import sys from _typeshed import SupportsMul, SupportsRMul from collections.abc import Iterable @@ -15,83 +18,265 @@ inf: Final[float] nan: Final[float] tau: Final[float] -def acos(x: _SupportsFloatOrIndex, /) -> float: ... -def acosh(x: _SupportsFloatOrIndex, /) -> float: ... -def asin(x: _SupportsFloatOrIndex, /) -> float: ... -def asinh(x: _SupportsFloatOrIndex, /) -> float: ... -def atan(x: _SupportsFloatOrIndex, /) -> float: ... -def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: ... -def atanh(x: _SupportsFloatOrIndex, /) -> float: ... +def acos(x: _SupportsFloatOrIndex, /) -> float: + """Return the arc cosine (measured in radians) of x. + +The result is between 0 and pi. +""" +def acosh(x: _SupportsFloatOrIndex, /) -> float: + """Return the inverse hyperbolic cosine of x. +""" +def asin(x: _SupportsFloatOrIndex, /) -> float: + """Return the arc sine (measured in radians) of x. + +The result is between -pi/2 and pi/2. +""" +def asinh(x: _SupportsFloatOrIndex, /) -> float: + """Return the inverse hyperbolic sine of x. +""" +def atan(x: _SupportsFloatOrIndex, /) -> float: + """Return the arc tangent (measured in radians) of x. + +The result is between -pi/2 and pi/2. +""" +def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: + """Return the arc tangent (measured in radians) of y/x. + +Unlike atan(y/x), the signs of both x and y are considered. +""" +def atanh(x: _SupportsFloatOrIndex, /) -> float: + """Return the inverse hyperbolic tangent of x. +""" if sys.version_info >= (3, 11): - def cbrt(x: _SupportsFloatOrIndex, /) -> float: ... + def cbrt(x: _SupportsFloatOrIndex, /) -> float: + """Return the cube root of x. +""" @type_check_only class _SupportsCeil(Protocol[_T_co]): def __ceil__(self) -> _T_co: ... @overload -def ceil(x: _SupportsCeil[_T], /) -> _T: ... +def ceil(x: _SupportsCeil[_T], /) -> _T: + """Return the ceiling of x as an Integral. + +This is the smallest integer >= x. +""" @overload def ceil(x: _SupportsFloatOrIndex, /) -> int: ... -def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: ... -def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... -def cos(x: _SupportsFloatOrIndex, /) -> float: ... -def cosh(x: _SupportsFloatOrIndex, /) -> float: ... -def degrees(x: _SupportsFloatOrIndex, /) -> float: ... -def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: ... -def erf(x: _SupportsFloatOrIndex, /) -> float: ... -def erfc(x: _SupportsFloatOrIndex, /) -> float: ... -def exp(x: _SupportsFloatOrIndex, /) -> float: ... +def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: + """Number of ways to choose k items from n items without repetition and without order. + +Evaluates to n! / (k! * (n - k)!) when k <= n and evaluates +to zero when k > n. + +Also called the binomial coefficient because it is equivalent +to the coefficient of k-th term in polynomial expansion of the +expression (1 + x)**n. + +Raises TypeError if either of the arguments are not integers. +Raises ValueError if either of the arguments are negative. +""" +def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: + """Return a float with the magnitude (absolute value) of x but the sign of y. + +On platforms that support signed zeros, copysign(1.0, -0.0) +returns -1.0. +""" +def cos(x: _SupportsFloatOrIndex, /) -> float: + """Return the cosine of x (measured in radians). +""" +def cosh(x: _SupportsFloatOrIndex, /) -> float: + """Return the hyperbolic cosine of x. +""" +def degrees(x: _SupportsFloatOrIndex, /) -> float: + """Convert angle x from radians to degrees. +""" +def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: + """Return the Euclidean distance between two points p and q. + +The points should be specified as sequences (or iterables) of +coordinates. Both inputs must have the same dimension. + +Roughly equivalent to: + sqrt(sum((px - qx) ** 2.0 for px, qx in zip(p, q))) +""" +def erf(x: _SupportsFloatOrIndex, /) -> float: + """Error function at x. +""" +def erfc(x: _SupportsFloatOrIndex, /) -> float: + """Complementary error function at x. +""" +def exp(x: _SupportsFloatOrIndex, /) -> float: + """Return e raised to the power of x. +""" if sys.version_info >= (3, 11): - def exp2(x: _SupportsFloatOrIndex, /) -> float: ... + def exp2(x: _SupportsFloatOrIndex, /) -> float: + """Return 2 raised to the power of x. +""" -def expm1(x: _SupportsFloatOrIndex, /) -> float: ... -def fabs(x: _SupportsFloatOrIndex, /) -> float: ... -def factorial(x: SupportsIndex, /) -> int: ... +def expm1(x: _SupportsFloatOrIndex, /) -> float: + """Return exp(x)-1. + +This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x. +""" +def fabs(x: _SupportsFloatOrIndex, /) -> float: + """Return the absolute value of the float x. +""" +def factorial(x: SupportsIndex, /) -> int: + """Find n!. +""" @type_check_only class _SupportsFloor(Protocol[_T_co]): def __floor__(self) -> _T_co: ... @overload -def floor(x: _SupportsFloor[_T], /) -> _T: ... +def floor(x: _SupportsFloor[_T], /) -> _T: + """Return the floor of x as an Integral. + +This is the largest integer <= x. +""" @overload def floor(x: _SupportsFloatOrIndex, /) -> int: ... -def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... -def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: ... -def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: ... -def gamma(x: _SupportsFloatOrIndex, /) -> float: ... -def gcd(*integers: SupportsIndex) -> int: ... -def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... +def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: + """Return fmod(x, y), according to platform C. + +x % y may differ. +""" +def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: + """Return the mantissa and exponent of x, as pair (m, e). + +m is a float and e is an int, such that x = m * 2.**e. +If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0. +""" +def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: + """Return an accurate floating-point sum of values in the iterable seq. + +Assumes IEEE-754 floating-point arithmetic. +""" +def gamma(x: _SupportsFloatOrIndex, /) -> float: + """Gamma function at x. +""" +def gcd(*integers: SupportsIndex) -> int: + """Greatest Common Divisor. +""" +def hypot(*coordinates: _SupportsFloatOrIndex) -> float: + """Multidimensional Euclidean distance from the origin to a point. + +Roughly equivalent to: + sqrt(sum(x**2 for x in coordinates)) + +For a two dimensional point (x, y), gives the hypotenuse +using the Pythagorean theorem: sqrt(x*x + y*y). + +For example, the hypotenuse of a 3/4/5 right triangle is: + + >>> hypot(3.0, 4.0) + 5.0 +""" def isclose( a: _SupportsFloatOrIndex, b: _SupportsFloatOrIndex, *, rel_tol: _SupportsFloatOrIndex = 1e-09, abs_tol: _SupportsFloatOrIndex = 0.0, -) -> bool: ... -def isinf(x: _SupportsFloatOrIndex, /) -> bool: ... -def isfinite(x: _SupportsFloatOrIndex, /) -> bool: ... -def isnan(x: _SupportsFloatOrIndex, /) -> bool: ... -def isqrt(n: SupportsIndex, /) -> int: ... -def lcm(*integers: SupportsIndex) -> int: ... -def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: ... -def lgamma(x: _SupportsFloatOrIndex, /) -> float: ... -def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... -def log10(x: _SupportsFloatOrIndex, /) -> float: ... -def log1p(x: _SupportsFloatOrIndex, /) -> float: ... -def log2(x: _SupportsFloatOrIndex, /) -> float: ... -def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: ... +) -> bool: + """Determine whether two floating-point numbers are close in value. + + rel_tol + maximum difference for being considered "close", relative to the + magnitude of the input values + abs_tol + maximum difference for being considered "close", regardless of the + magnitude of the input values + +Return True if a is close in value to b, and False otherwise. + +For the values to be considered close, the difference between them +must be smaller than at least one of the tolerances. + +-inf, inf and NaN behave similarly to the IEEE 754 Standard. That +is, NaN is not close to anything, even itself. inf and -inf are +only close to themselves. +""" +def isinf(x: _SupportsFloatOrIndex, /) -> bool: + """Return True if x is a positive or negative infinity, and False otherwise. +""" +def isfinite(x: _SupportsFloatOrIndex, /) -> bool: + """Return True if x is neither an infinity nor a NaN, and False otherwise. +""" +def isnan(x: _SupportsFloatOrIndex, /) -> bool: + """Return True if x is a NaN (not a number), and False otherwise. +""" +def isqrt(n: SupportsIndex, /) -> int: + """Return the integer part of the square root of the input. +""" +def lcm(*integers: SupportsIndex) -> int: + """Least Common Multiple. +""" +def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: + """Return x * (2**i). + +This is essentially the inverse of frexp(). +""" +def lgamma(x: _SupportsFloatOrIndex, /) -> float: + """Natural logarithm of absolute value of Gamma function at x. +""" +def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: + """log(x, [base=math.e]) +Return the logarithm of x to the given base. + +If the base is not specified, returns the natural logarithm (base e) of x. +""" +def log10(x: _SupportsFloatOrIndex, /) -> float: + """Return the base 10 logarithm of x. +""" +def log1p(x: _SupportsFloatOrIndex, /) -> float: + """Return the natural logarithm of 1+x (base e). + +The result is computed in a way which is accurate for x near zero. +""" +def log2(x: _SupportsFloatOrIndex, /) -> float: + """Return the base 2 logarithm of x. +""" +def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: + """Return the fractional and integer parts of x. + +Both results carry the sign of x and are floats. +""" if sys.version_info >= (3, 12): - def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: ... + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: + """Return the floating-point value the given number of steps after x towards y. + +If steps is not specified or is None, it defaults to 1. + +Raises a TypeError, if x or y is not a double, or if steps is not an integer. +Raises ValueError if steps is negative. +""" else: - def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... + def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: + """Return the next floating-point value after x towards y. +""" + +def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: + """Number of ways to choose k items from n items without repetition and with order. -def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: ... -def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... +Evaluates to n! / (n - k)! when k <= n and evaluates +to zero when k > n. + +If k is not specified or is None, then k defaults to n +and the function returns n!. + +Raises TypeError if either of the arguments are not integers. +Raises ValueError if either of the arguments are negative. +""" +def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: + """Return x**y (x to the power of y). +""" _PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] _NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] @@ -111,30 +296,75 @@ _SupportsProdNoDefaultT = TypeVar("_SupportsProdNoDefaultT", bound=_SupportsProd # For more details on its limitations and false positives, see #13572. # Instead, just like `builtins.sum`, we explicitly handle several useful cases. @overload -def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: ... # type: ignore[overload-overlap] +def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: # type: ignore[overload-overlap] + """Calculate the product of all the elements in the input iterable. + +The default start value for the product is 1. + +When the iterable is empty, return the start value. This function is +intended specifically for use with numeric values and may reject +non-numeric types. +""" @overload def prod(iterable: Iterable[_SupportsProdNoDefaultT], /) -> _SupportsProdNoDefaultT | Literal[1]: ... @overload def prod(iterable: Iterable[_MultiplicableT1], /, *, start: _MultiplicableT2) -> _MultiplicableT1 | _MultiplicableT2: ... -def radians(x: _SupportsFloatOrIndex, /) -> float: ... -def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: ... -def sin(x: _SupportsFloatOrIndex, /) -> float: ... -def sinh(x: _SupportsFloatOrIndex, /) -> float: ... +def radians(x: _SupportsFloatOrIndex, /) -> float: + """Convert angle x from degrees to radians. +""" +def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: + """Difference between x and the closest integer multiple of y. + +Return x - n*y where n*y is the closest integer multiple of y. +In the case where x is exactly halfway between two multiples of +y, the nearest even value of n is used. The result is always exact. +""" +def sin(x: _SupportsFloatOrIndex, /) -> float: + """Return the sine of x (measured in radians). +""" +def sinh(x: _SupportsFloatOrIndex, /) -> float: + """Return the hyperbolic sine of x. +""" if sys.version_info >= (3, 12): - def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: ... + def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: + """Return the sum of products of values from two iterables p and q. -def sqrt(x: _SupportsFloatOrIndex, /) -> float: ... -def tan(x: _SupportsFloatOrIndex, /) -> float: ... -def tanh(x: _SupportsFloatOrIndex, /) -> float: ... +Roughly equivalent to: + + sum(map(operator.mul, p, q, strict=True)) + +For float and mixed int/float inputs, the intermediate products +and sums are computed with extended precision. +""" + +def sqrt(x: _SupportsFloatOrIndex, /) -> float: + """Return the square root of x. +""" +def tan(x: _SupportsFloatOrIndex, /) -> float: + """Return the tangent of x (measured in radians). +""" +def tanh(x: _SupportsFloatOrIndex, /) -> float: + """Return the hyperbolic tangent of x. +""" # Is different from `_typeshed.SupportsTrunc`, which is not generic @type_check_only class _SupportsTrunc(Protocol[_T_co]): def __trunc__(self) -> _T_co: ... -def trunc(x: _SupportsTrunc[_T], /) -> _T: ... -def ulp(x: _SupportsFloatOrIndex, /) -> float: ... +def trunc(x: _SupportsTrunc[_T], /) -> _T: + """Truncates the Real x to the nearest Integral toward 0. + +Uses the __trunc__ magic method. +""" +def ulp(x: _SupportsFloatOrIndex, /) -> float: + """Return the value of the least significant bit of the float x. +""" if sys.version_info >= (3, 13): - def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: ... + def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: + """Fused multiply-add operation. + +Compute (x * y) + z with a single round. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi index 9914a34a2d6a6..5b5450897fd4b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi @@ -1,3 +1,27 @@ +"""Guess the MIME type of a file. + +This module defines two useful functions: + +guess_type(url, strict=True) -- guess the MIME type and encoding of a URL. + +guess_extension(type, strict=True) -- guess the extension for a given MIME type. + +It also contains the following, for tuning the behavior: + +Data: + +knownfiles -- list of files to parse +inited -- flag set when init() has been called +suffix_map -- dictionary mapping suffixes to suffixes +encodings_map -- dictionary mapping suffixes to encodings +types_map -- dictionary mapping suffixes to types + +Functions: + +init([files]) -- parse a list of files, default knownfiles (on Windows, the + default values are taken from the registry) +read_mime_types(file) -- parse one file, return a dictionary or None +""" import sys from _typeshed import StrPath from collections.abc import Sequence @@ -22,15 +46,70 @@ __all__ = [ if sys.version_info >= (3, 13): __all__ += ["guess_file_type"] -def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... -def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... -def guess_extension(type: str, strict: bool = True) -> str | None: ... +def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: + """Guess the type of a file based on its URL. + +Return value is a tuple (type, encoding) where type is None if the +type can't be guessed (no or unknown suffix) or a string of the +form type/subtype, usable for a MIME Content-type header; and +encoding is None for no encoding or the name of the program used +to encode (e.g. compress or gzip). The mappings are table +driven. Encoding suffixes are case sensitive; type suffixes are +first tried case sensitive, then case insensitive. + +The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped +to ".tar.gz". (This is table-driven too, using the dictionary +suffix_map). + +Optional 'strict' argument when false adds a bunch of commonly found, but +non-standard types. +""" +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: + """Guess the extensions for a file based on its MIME type. + +Return value is a list of strings giving the possible filename +extensions, including the leading dot ('.'). The extension is not +guaranteed to have been associated with any particular data +stream, but would be mapped to the MIME type 'type' by +guess_type(). If no extension can be guessed for 'type', None +is returned. + +Optional 'strict' argument when false adds a bunch of commonly found, +but non-standard types. +""" +def guess_extension(type: str, strict: bool = True) -> str | None: + """Guess the extension for a file based on its MIME type. + +Return value is a string giving a filename extension, including the +leading dot ('.'). The extension is not guaranteed to have been +associated with any particular data stream, but would be mapped to the +MIME type 'type' by guess_type(). If no extension can be guessed for +'type', None is returned. + +Optional 'strict' argument when false adds a bunch of commonly found, +but non-standard types. +""" def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... -def add_type(type: str, ext: str, strict: bool = True) -> None: ... +def add_type(type: str, ext: str, strict: bool = True) -> None: + """Add a mapping between a type and an extension. + +When the extension is already known, the new +type will replace the old one. When the type +is already known the extension will be added +to the list of known extensions. + +If strict is true, information will be added to +list of standard types, else to the list of non-standard +types. +""" if sys.version_info >= (3, 13): - def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: + """Guess the type of a file based on its path. + +Similar to guess_type(), but takes file path instead of URL. +""" inited: bool knownfiles: list[str] @@ -40,17 +119,101 @@ types_map: dict[str, str] common_types: dict[str, str] class MimeTypes: + """MIME-types datastore. + +This datastore can handle information from mime.types-style files +and supports basic determination of MIME type from a filename or +URL, and can guess a reasonable extension given a MIME type. +""" suffix_map: dict[str, str] encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... - def add_type(self, type: str, ext: str, strict: bool = True) -> None: ... - def guess_extension(self, type: str, strict: bool = True) -> str | None: ... - def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... - def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... - def read(self, filename: str, strict: bool = True) -> None: ... - def readfp(self, fp: IO[str], strict: bool = True) -> None: ... - def read_windows_registry(self, strict: bool = True) -> None: ... + def add_type(self, type: str, ext: str, strict: bool = True) -> None: + """Add a mapping between a type and an extension. + +When the extension is already known, the new +type will replace the old one. When the type +is already known the extension will be added +to the list of known extensions. + +If strict is true, information will be added to +list of standard types, else to the list of non-standard +types. + +Valid extensions are empty or start with a '.'. +""" + def guess_extension(self, type: str, strict: bool = True) -> str | None: + """Guess the extension for a file based on its MIME type. + +Return value is a string giving a filename extension, +including the leading dot ('.'). The extension is not +guaranteed to have been associated with any particular data +stream, but would be mapped to the MIME type 'type' by +guess_type(). If no extension can be guessed for 'type', None +is returned. + +Optional 'strict' argument when false adds a bunch of commonly found, +but non-standard types. +""" + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: + """Guess the type of a file which is either a URL or a path-like object. + +Return value is a tuple (type, encoding) where type is None if +the type can't be guessed (no or unknown suffix) or a string +of the form type/subtype, usable for a MIME Content-type +header; and encoding is None for no encoding or the name of +the program used to encode (e.g. compress or gzip). The +mappings are table driven. Encoding suffixes are case +sensitive; type suffixes are first tried case sensitive, then +case insensitive. + +The suffixes .tgz, .taz and .tz (case sensitive!) are all +mapped to '.tar.gz'. (This is table-driven too, using the +dictionary suffix_map.) + +Optional 'strict' argument when False adds a bunch of commonly found, +but non-standard types. +""" + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: + """Guess the extensions for a file based on its MIME type. + +Return value is a list of strings giving the possible filename +extensions, including the leading dot ('.'). The extension is not +guaranteed to have been associated with any particular data stream, +but would be mapped to the MIME type 'type' by guess_type(). + +Optional 'strict' argument when false adds a bunch of commonly found, +but non-standard types. +""" + def read(self, filename: str, strict: bool = True) -> None: + """ +Read a single mime.types-format file, specified by pathname. + +If strict is true, information will be added to +list of standard types, else to the list of non-standard +types. +""" + def readfp(self, fp: IO[str], strict: bool = True) -> None: + """ +Read a single mime.types-format file. + +If strict is true, information will be added to +list of standard types, else to the list of non-standard +types. +""" + def read_windows_registry(self, strict: bool = True) -> None: + """ +Load the MIME types database from Windows registry. + +If strict is true, information will be added to +list of standard types, else to the list of non-standard +types. +""" if sys.version_info >= (3, 13): - def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: ... + def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: + """Guess the type of a file based on its path. + +Similar to guess_type(), but takes file path instead of URL. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi index 8a5baba629141..b56a4ba347f68 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi @@ -33,6 +33,28 @@ PAGESIZE: Final[int] @disjoint_base class mmap: + """Windows: mmap(fileno, length[, tagname[, access[, offset]]]) + +Maps length bytes from the file specified by the file handle fileno, +and returns a mmap object. If length is larger than the current size +of the file, the file is extended to contain length bytes. If length +is 0, the maximum length of the map is the current size of the file, +except that if the file is empty Windows raises an exception (you cannot +create an empty mapping on Windows). + +Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]]) + +Maps length bytes from the file specified by the file descriptor fileno, +and returns a mmap object. If length is 0, the maximum length of the map +will be the current size of the file when mmap is called. +flags specifies the nature of the mapping. MAP_PRIVATE creates a +private copy-on-write mapping, so changes to the contents of the mmap +object will be private to this process, and MAP_SHARED creates a mapping +that's shared with all other processes mapping the same areas of the file. +The default value is MAP_SHARED. + +To map anonymous memory, pass -1 as the fileno (both versions). +""" if sys.platform == "win32": def __new__(self, fileno: int, length: int, tagname: str | None = None, access: int = 0, offset: int = 0) -> Self: ... else: @@ -67,7 +89,9 @@ class mmap: def size(self) -> int: ... def tell(self) -> int: ... def write_byte(self, byte: int) -> None: ... - def __len__(self) -> int: ... + def __len__(self) -> int: + """Return len(self). +""" closed: bool if sys.platform != "win32": def madvise(self, option: int, start: int = 0, length: int = ...) -> None: ... @@ -77,12 +101,18 @@ class mmap: def read(self, n: int | None = None) -> bytes: ... def write(self, bytes: ReadableBuffer) -> int: ... @overload - def __getitem__(self, key: int, /) -> int: ... + def __getitem__(self, key: int, /) -> int: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> bytes: ... - def __delitem__(self, key: int | slice, /) -> NoReturn: ... + def __delitem__(self, key: int | slice, /) -> NoReturn: + """Delete self[key]. +""" @overload - def __setitem__(self, key: int, value: int, /) -> None: ... + def __setitem__(self, key: int, value: int, /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, @@ -93,8 +123,12 @@ class mmap: def __iter__(self) -> Iterator[int]: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def __buffer__(self, flags: int, /) -> memoryview: ... - def __release_buffer__(self, buffer: memoryview, /) -> None: ... + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object. +""" if sys.version_info >= (3, 13): def seekable(self) -> Literal[True]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi index 6db665a18e691..3659ceed9c4e4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi @@ -1,3 +1,5 @@ +"""Find modules used by a script, using introspection. +""" import sys from collections.abc import Container, Iterable, Iterator, Sequence from types import CodeType @@ -60,9 +62,24 @@ class ModuleFinder: def find_module( self, name: str, path: str | None, parent: Module | None = None ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented - def report(self) -> None: ... - def any_missing(self) -> list[str]: ... # undocumented - def any_missing_maybe(self) -> tuple[list[str], list[str]]: ... # undocumented + def report(self) -> None: + """Print a report to stdout, listing the found modules with their +paths, as well as modules that are missing, or seem to be missing. +""" + def any_missing(self) -> list[str]: # undocumented + """Return a list of modules that appear to be missing. Use +any_missing_maybe() if you want to know which modules are +certain to be missing, and which *may* be missing. +""" + def any_missing_maybe(self) -> tuple[list[str], list[str]]: # undocumented + """Return two lists, one with modules that are certainly missing +and one with modules that *may* be missing. The latter names could +either be submodules *or* just global names in the package. + +The reason it can't always be determined is that it's impossible to +tell which names are imported when "from module import *" is done +with an extension module, short of actually importing it. +""" def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented def test() -> ModuleFinder | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi index cd4fa102c0f3e..cdee41d297abb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi @@ -18,36 +18,79 @@ _RecvT_co = TypeVar("_RecvT_co", covariant=True, default=Any) class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property - def closed(self) -> bool: ... # undocumented + def closed(self) -> bool: # undocumented + """True if the connection is closed +""" @property - def readable(self) -> bool: ... # undocumented + def readable(self) -> bool: # undocumented + """True if the connection is readable +""" @property - def writable(self) -> bool: ... # undocumented - def fileno(self) -> int: ... - def close(self) -> None: ... - def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... - def send(self, obj: _SendT_contra) -> None: ... - def recv_bytes(self, maxlength: int | None = None) -> bytes: ... - def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... - def recv(self) -> _RecvT_co: ... - def poll(self, timeout: float | None = 0.0) -> bool: ... + def writable(self) -> bool: # undocumented + """True if the connection is writable +""" + def fileno(self) -> int: + """File descriptor or handle of the connection +""" + def close(self) -> None: + """Close the connection +""" + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: + """Send the bytes data from a bytes-like object +""" + def send(self, obj: _SendT_contra) -> None: + """Send a (picklable) object +""" + def recv_bytes(self, maxlength: int | None = None) -> bytes: + """ +Receive bytes data as a bytes object. +""" + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: + """ +Receive bytes data into a writeable bytes-like object. +Return the number of bytes read. +""" + def recv(self) -> _RecvT_co: + """Receive a (picklable) object +""" + def poll(self, timeout: float | None = 0.0) -> bool: + """Whether there is any input available to be read +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __del__(self) -> None: ... -class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... +class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): + """ +Connection class based on an arbitrary file descriptor (Unix only), or +a socket handle (Windows). +""" if sys.platform == "win32": class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... class Listener: + """ +Returns a listener object. + +This is a wrapper for a bound socket which is 'listening' for +connections, or for a Windows named pipe. +""" def __init__( self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... - def accept(self) -> Connection[Incomplete, Incomplete]: ... - def close(self) -> None: ... + def accept(self) -> Connection[Incomplete, Incomplete]: + """ +Accept a connection on the bound socket or named pipe of `self`. + +Returns a `Connection` object. +""" + def close(self) -> None: + """ +Close the bound socket or named pipe of `self`. +""" @property def address(self) -> _Address: ... @property @@ -67,8 +110,16 @@ else: def answer_challenge(connection: Connection[Any, Any], authkey: bytes) -> None: ... def wait( object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None -) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: ... -def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: ... +) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: + """ +Wait till an object in object_list is ready/readable. + +Returns list of those objects in object_list which are ready/readable. +""" +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: + """ +Returns a connection to the address of a `Listener` +""" # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection @@ -77,7 +128,10 @@ def Client(address: _Address, family: str | None = None, authkey: bytes | None = # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... + def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: + """ +Returns pair of connection objects at either end of a pipe +""" else: def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi index 03d1d2e5c2203..3b8356360ab4e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi @@ -37,13 +37,29 @@ class BaseContext: # N.B. The methods below are applied at runtime to generate # multiprocessing.*, so the signatures should be identical (modulo self). @staticmethod - def current_process() -> BaseProcess: ... + def current_process() -> BaseProcess: + """ +Return process object representing the current process +""" @staticmethod - def parent_process() -> BaseProcess | None: ... + def parent_process() -> BaseProcess | None: + """ +Return process object representing the parent process +""" @staticmethod - def active_children() -> list[BaseProcess]: ... - def cpu_count(self) -> int: ... - def Manager(self) -> SyncManager: ... + def active_children() -> list[BaseProcess]: + """ +Return list of process objects corresponding to live child processes +""" + def cpu_count(self) -> int: + """Returns the number of CPUs in the system +""" + def Manager(self) -> SyncManager: + """Returns a manager associated with a running server process + +The managers methods such as `Lock()`, `Condition()` and `Queue()` +can be used to create shared objects. +""" # N.B. Keep this in sync with multiprocessing.connection.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection @@ -52,41 +68,71 @@ class BaseContext: # The two connections should have the same generic types but inverted (Connection[_T1, _T2], Connection[_T2, _T1]). # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": - def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: ... + def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: + """Returns two connection object connected by a pipe +""" else: def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... def Barrier( self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None - ) -> synchronize.Barrier: ... - def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... - def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... - def Event(self) -> synchronize.Event: ... - def Lock(self) -> synchronize.Lock: ... - def RLock(self) -> synchronize.RLock: ... - def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... - def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... - def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... - def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... + ) -> synchronize.Barrier: + """Returns a barrier object +""" + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: + """Returns a bounded semaphore object +""" + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: + """Returns a condition object +""" + def Event(self) -> synchronize.Event: + """Returns an event object +""" + def Lock(self) -> synchronize.Lock: + """Returns a non-recursive lock object +""" + def RLock(self) -> synchronize.RLock: + """Returns a recursive lock object +""" + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: + """Returns a semaphore object +""" + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: + """Returns a queue object +""" + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: + """Returns a queue object +""" + def SimpleQueue(self) -> queues.SimpleQueue[Any]: + """Returns a queue object +""" def Pool( self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = (), maxtasksperchild: int | None = None, - ) -> _Pool: ... + ) -> _Pool: + """Returns a process pool object +""" @overload - def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... + def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: + """Returns a shared object +""" @overload def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... @overload - def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... + def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: + """Returns a shared array +""" @overload def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload def Value( self, typecode_or_type: type[_SimpleCData[_T]], *args: Any, lock: Literal[True] | _LockLike = True - ) -> Synchronized[_T]: ... + ) -> Synchronized[_T]: + """Returns a synchronized shared object +""" @overload def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> Synchronized[_CT]: ... @overload @@ -98,7 +144,9 @@ class BaseContext: @overload def Array( self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] - ) -> SynchronizedArray[_T]: ... + ) -> SynchronizedArray[_T]: + """Returns a synchronized shared array +""" @overload def Array( self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True @@ -119,12 +167,30 @@ class BaseContext: def Array( self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True ) -> Any: ... - def freeze_support(self) -> None: ... - def get_logger(self) -> Logger: ... - def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... - def allow_connection_pickling(self) -> None: ... - def set_executable(self, executable: str) -> None: ... - def set_forkserver_preload(self, module_names: list[str]) -> None: ... + def freeze_support(self) -> None: + """Check whether this is a fake forked process in a frozen executable. +If so then run code specified by commandline and exit. +""" + def get_logger(self) -> Logger: + """Return package logger -- if it does not already exist then +it is created. +""" + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: + """Turn on logging and add a handler which prints to stderr +""" + def allow_connection_pickling(self) -> None: + """Install support for sending connections and sockets +between processes +""" + def set_executable(self, executable: str) -> None: + """Sets the path to a python.exe or pythonw.exe binary used to run +child processes instead of sys.executable when using the 'spawn' +start method. Useful for people embedding Python. +""" + def set_forkserver_preload(self, module_names: list[str]) -> None: + """Set list of module names to try to load in forkserver process. +This is really just a hint. +""" if sys.platform != "win32": @overload def get_context(self, method: None = None) -> DefaultContext: ... @@ -150,7 +216,10 @@ class BaseContext: def get_start_method(self, allow_none: bool) -> str | None: ... def set_start_method(self, method: str | None, force: bool = False) -> None: ... @property - def reducer(self) -> str: ... + def reducer(self) -> str: + """Controls how objects will be reduced to a form that can be +shared with other processes. +""" @reducer.setter def reducer(self, reduction: str) -> None: ... def _check_available(self) -> None: ... @@ -164,7 +233,9 @@ class DefaultContext(BaseContext): Process: ClassVar[type[Process]] def __init__(self, context: BaseContext) -> None: ... def get_start_method(self, allow_none: bool = False) -> str: ... - def get_all_start_methods(self) -> list[str]: ... + def get_all_start_methods(self) -> list[str]: + """Returns a list of the supported start methods, default first. +""" _default_context: DefaultContext diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi index c4af295d23161..3d499b902e26d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -10,10 +10,30 @@ MAXFDS_TO_SEND: Final = 256 SIGNED_STRUCT: Final[Struct] class ForkServer: - def set_forkserver_preload(self, modules_names: list[str]) -> None: ... - def get_inherited_fds(self) -> list[int] | None: ... - def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: ... - def ensure_running(self) -> None: ... + def set_forkserver_preload(self, modules_names: list[str]) -> None: + """Set list of module names to try to load in forkserver process. +""" + def get_inherited_fds(self) -> list[int] | None: + """Return list of fds inherited from parent process. + +This returns None if the current process was not started by fork +server. +""" + def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: + """Request forkserver to create a child process. + +Returns a pair of fds (status_r, data_w). The calling process can read +the child process's pid and (eventually) its returncode from status_r. +The calling process should write to data_w the pickled preparation and +process data. +""" + def ensure_running(self) -> None: + """Make sure that a fork server is running. + +This can be called from any process. Note that usually a child +process will just reuse the forkserver started by its parent, so +ensure_running() will do nothing. +""" if sys.version_info >= (3, 14): def main( @@ -24,7 +44,9 @@ if sys.version_info >= (3, 14): sys_path: list[str] | None = None, *, authkey_r: int | None = None, - ) -> None: ... + ) -> None: + """Run forkserver. +""" else: def main( @@ -33,7 +55,9 @@ else: preload: Sequence[str], main_path: str | None = None, sys_path: Unused = None, - ) -> None: ... + ) -> None: + """Run forkserver. +""" def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi index 38191a099f1ec..d6a89156870bb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi @@ -8,6 +8,9 @@ from typing_extensions import TypeAlias __all__ = ["BufferWrapper"] class Arena: + """ +A shared memory area backed by a temporary file (POSIX). +""" size: int buffer: mmap if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi index 5efe69a973777..8dc0d9ce92538 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi @@ -38,6 +38,9 @@ class Namespace: _Namespace: TypeAlias = Namespace class Token: + """ +Type to uniquely identify a shared object +""" __slots__ = ("typeid", "address", "id") typeid: str | bytes | None address: _Address | None @@ -47,6 +50,9 @@ class Token: def __setstate__(self, state: tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]) -> None: ... class BaseProxy: + """ +A base for proxies of shared objects +""" _address_to_local: dict[_Address, Any] _mutex: Any def __init__( @@ -60,15 +66,25 @@ class BaseProxy: manager_owned: bool = False, ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... - def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: ... - def _getvalue(self) -> Any: ... + def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: + """ +Try to call a method of the referent and return a copy of the result +""" + def _getvalue(self) -> Any: + """ +Get a copy of the value of the referent +""" def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... class ValueProxy(BaseProxy, Generic[_T]): def get(self) -> _T: ... def set(self, value: _T) -> None: ... value: _T - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" if sys.version_info >= (3, 13): class _BaseDictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -96,7 +112,11 @@ if sys.version_info >= (3, 13): def values(self) -> list[_VT]: ... # type: ignore[override] class DictProxy(_BaseDictProxy[_KT, _VT]): - def __class_getitem__(cls, args: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, args: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" else: class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -203,7 +223,11 @@ class ListProxy(BaseListProxy[_T]): def __iadd__(self, value: Iterable[_T], /) -> Self: ... # type: ignore[override] def __imul__(self, value: SupportsIndex, /) -> Self: ... # type: ignore[override] if sys.version_info >= (3, 13): - def __class_getitem__(cls, args: Any, /) -> Any: ... + def __class_getitem__(cls, args: Any, /) -> Any: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" # Send is (kind, result) # Receive is (id, methodname, args, kwds) @@ -211,6 +235,9 @@ _ServerConnection: TypeAlias = Connection[tuple[str, Any], tuple[str, str, Itera # Returned by BaseManager.get_server() class Server: + """ +Server class which runs in a process controlled by a manager object +""" address: _Address | None id_to_obj: dict[str, tuple[Any, set[str], dict[str, str]]] fallback_mapping: dict[str, Callable[[_ServerConnection, str, Any], Any]] @@ -223,28 +250,61 @@ class Server: authkey: bytes, serializer: str, ) -> None: ... - def serve_forever(self) -> None: ... + def serve_forever(self) -> None: + """ +Run the server forever +""" def accepter(self) -> None: ... if sys.version_info >= (3, 10): - def handle_request(self, conn: _ServerConnection) -> None: ... + def handle_request(self, conn: _ServerConnection) -> None: + """ +Handle a new connection +""" else: - def handle_request(self, c: _ServerConnection) -> None: ... + def handle_request(self, c: _ServerConnection) -> None: + """ + Handle a new connection + """ - def serve_client(self, conn: _ServerConnection) -> None: ... + def serve_client(self, conn: _ServerConnection) -> None: + """ +Handle requests from the proxies in a particular process/thread +""" def fallback_getvalue(self, conn: _ServerConnection, ident: str, obj: _T) -> _T: ... def fallback_str(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def fallback_repr(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def dummy(self, c: _ServerConnection) -> None: ... - def debug_info(self, c: _ServerConnection) -> str: ... - def number_of_objects(self, c: _ServerConnection) -> int: ... - def shutdown(self, c: _ServerConnection) -> None: ... - def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: ... - def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: ... - def accept_connection(self, c: _ServerConnection, name: str) -> None: ... + def debug_info(self, c: _ServerConnection) -> str: + """ +Return some info --- useful to spot problems with refcounting +""" + def number_of_objects(self, c: _ServerConnection) -> int: + """ +Number of shared objects +""" + def shutdown(self, c: _ServerConnection) -> None: + """ +Shutdown this process +""" + def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: + """ +Create a new shared object and return its id +""" + def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: + """ +Return the methods of the shared object indicated by token +""" + def accept_connection(self, c: _ServerConnection, name: str) -> None: + """ +Spawn a new thread to serve this connection +""" def incref(self, c: _ServerConnection, ident: str) -> None: ... def decref(self, c: _ServerConnection, ident: str) -> None: ... class BaseManager: + """ +Base class for managers +""" if sys.version_info >= (3, 11): def __init__( self, @@ -264,11 +324,23 @@ class BaseManager: ctx: BaseContext | None = None, ) -> None: ... - def get_server(self) -> Server: ... - def connect(self) -> None: ... - def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: ... + def get_server(self) -> Server: + """ +Return server object with serve_forever() method and address attribute +""" + def connect(self) -> None: + """ +Connect manager object to the server process +""" + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: + """ +Spawn a server process for this manager object +""" shutdown: _Finalize # only available after start() was called - def join(self, timeout: float | None = None) -> None: ... # undocumented + def join(self, timeout: float | None = None) -> None: # undocumented + """ +Join the manager process (if it has been spawned) +""" @property def address(self) -> _Address | None: ... @classmethod @@ -280,13 +352,25 @@ class BaseManager: exposed: Sequence[str] | None = None, method_to_typeid: Mapping[str, str] | None = None, create_method: bool = True, - ) -> None: ... + ) -> None: + """ +Register a typeid with the manager type +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class SyncManager(BaseManager): + """ +Subclass of `BaseManager` which supports a number of shared object types. + +The types registered are those intended for the synchronization +of threads, plus `dict`, `list` and `Namespace`. + +The `multiprocessing.Manager()` function creates started instances of +this class. +""" def Barrier( self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None ) -> threading.Barrier: ... @@ -339,12 +423,36 @@ class SyncManager(BaseManager): class RemoteError(Exception): ... class SharedMemoryServer(Server): - def track_segment(self, c: _ServerConnection, segment_name: str) -> None: ... - def release_segment(self, c: _ServerConnection, segment_name: str) -> None: ... - def list_segments(self, c: _ServerConnection) -> list[str]: ... + def track_segment(self, c: _ServerConnection, segment_name: str) -> None: + """Adds the supplied shared memory block name to Server's tracker. +""" + def release_segment(self, c: _ServerConnection, segment_name: str) -> None: + """Calls unlink() on the shared memory block with the supplied name +and removes it from the tracker instance inside the Server. +""" + def list_segments(self, c: _ServerConnection) -> list[str]: + """Returns a list of names of shared memory blocks that the Server +is currently tracking. +""" class SharedMemoryManager(BaseManager): - def get_server(self) -> SharedMemoryServer: ... - def SharedMemory(self, size: int) -> _SharedMemory: ... - def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: ... + """Like SyncManager but uses SharedMemoryServer instead of Server. + +It provides methods for creating and returning SharedMemory instances +and for creating a list-like object (ShareableList) backed by shared +memory. It also provides methods that create and return Proxy Objects +that support synchronization across processes (i.e. multi-process-safe +locks and semaphores). +""" + def get_server(self) -> SharedMemoryServer: + """Better than monkeypatching for now; merge into Server ultimately +""" + def SharedMemory(self, size: int) -> _SharedMemory: + """Returns a new SharedMemory instance with the specified size in +bytes, to be tracked by the manager. +""" + def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: + """Returns a new ShareableList instance populated with the values +from the input sequence, to be tracked by the manager. +""" def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi index b79f9e77359ae..e31ca99591fbf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi @@ -17,7 +17,11 @@ class ApplyResult(Generic[_T]): def wait(self, timeout: float | None = None) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" # alias created during issue #17805 AsyncResult = ApplyResult @@ -41,6 +45,9 @@ class IMapIterator(Generic[_T]): class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool: + """ +Class which supports an async version of applying functions to arguments. +""" def __init__( self, processes: int | None = None, @@ -51,7 +58,11 @@ class Pool: ) -> None: ... @staticmethod def Process(ctx: DefaultContext, *args: Any, **kwds: Any) -> Process: ... - def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: ... + def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: + """ +Equivalent of `func(*args, **kwds)`. +Pool must be running. +""" def apply_async( self, func: Callable[..., _T], @@ -59,8 +70,15 @@ class Pool: kwds: Mapping[str, Any] = {}, callback: Callable[[_T], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> AsyncResult[_T]: ... - def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... + ) -> AsyncResult[_T]: + """ +Asynchronous version of `apply()` method. +""" + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: + """ +Apply `func` to each element in `iterable`, collecting the results +in a list that is returned. +""" def map_async( self, func: Callable[[_S], _T], @@ -68,10 +86,24 @@ class Pool: chunksize: int | None = None, callback: Callable[[list[_T]], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> MapResult[_T]: ... - def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... - def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... - def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... + ) -> MapResult[_T]: + """ +Asynchronous version of `map()` method. +""" + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: + """ +Equivalent of `map()` -- can be MUCH slower than `Pool.map()`. +""" + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: + """ +Like `imap()` method but ordering of results is arbitrary. +""" + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: + """ +Like `map()` method but the elements of the `iterable` are expected to +be iterables as well and will be unpacked as arguments. Hence +`func` and (a, b) becomes func(a, b). +""" def starmap_async( self, func: Callable[..., _T], @@ -79,7 +111,10 @@ class Pool: chunksize: int | None = None, callback: Callable[[list[_T]], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, - ) -> AsyncResult[list[_T]]: ... + ) -> AsyncResult[list[_T]]: + """ +Asynchronous version of `starmap()` method. +""" def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi index 4d129b27b0e87..d4d0f6bf235bd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi @@ -4,6 +4,11 @@ from typing import Any __all__ = ["BaseProcess", "current_process", "active_children", "parent_process"] class BaseProcess: + """ +Process objects represent activity that is run in a separate process + +The class is analogous to `threading.Thread` +""" name: str daemon: bool authkey: bytes @@ -18,22 +23,68 @@ class BaseProcess: *, daemon: bool | None = None, ) -> None: ... - def run(self) -> None: ... - def start(self) -> None: ... - def terminate(self) -> None: ... - def kill(self) -> None: ... - def close(self) -> None: ... - def join(self, timeout: float | None = None) -> None: ... - def is_alive(self) -> bool: ... + def run(self) -> None: + """ +Method to be run in sub-process; can be overridden in sub-class +""" + def start(self) -> None: + """ +Start child process +""" + def terminate(self) -> None: + """ +Terminate process; sends SIGTERM signal or uses TerminateProcess() +""" + def kill(self) -> None: + """ +Terminate process; sends SIGKILL signal or uses TerminateProcess() +""" + def close(self) -> None: + """ +Close the Process object. + +This method releases resources held by the Process object. It is +an error to call this method if the child process is still running. +""" + def join(self, timeout: float | None = None) -> None: + """ +Wait until child process terminates +""" + def is_alive(self) -> bool: + """ +Return whether process is alive +""" @property - def exitcode(self) -> int | None: ... + def exitcode(self) -> int | None: + """ +Return exit code of process or `None` if it has yet to stop +""" @property - def ident(self) -> int | None: ... + def ident(self) -> int | None: + """ +Return identifier (PID) of process or `None` if it has yet to start +""" @property - def pid(self) -> int | None: ... + def pid(self) -> int | None: + """ +Return identifier (PID) of process or `None` if it has yet to start +""" @property - def sentinel(self) -> int: ... + def sentinel(self) -> int: + """ +Return a file descriptor (Unix) or handle (Windows) suitable for +waiting for process termination. +""" -def current_process() -> BaseProcess: ... -def active_children() -> list[BaseProcess]: ... -def parent_process() -> BaseProcess | None: ... +def current_process() -> BaseProcess: + """ +Return process object representing the current process +""" +def active_children() -> list[BaseProcess]: + """ +Return list of process objects corresponding to live child processes +""" +def parent_process() -> BaseProcess | None: + """ +Return process object representing the parent process +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi index a6b00d744c421..d21cc96dd3bf8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi @@ -21,7 +21,11 @@ class Queue(Generic[_T]): def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... if sys.version_info >= (3, 12): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... @@ -33,4 +37,8 @@ class SimpleQueue(Generic[_T]): def empty(self) -> bool: ... def get(self) -> _T: ... def put(self, obj: _T) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi index 490ae195c20e2..047471be64d16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi @@ -18,17 +18,23 @@ else: HAVE_SEND_HANDLE: Final[bool] class ForkingPickler(pickle.Pickler): + """Pickler subclass used by multiprocessing. +""" dispatch_table: _DispatchTableType def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod - def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: + """Register a reduce function for a type. +""" @classmethod def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: + """Replacement for pickle.dump() using ForkingPickler. +""" if sys.platform == "win32": def duplicate( @@ -46,11 +52,21 @@ else: if sys.version_info < (3, 14): ACKNOWLEDGE: Final[bool] - def recvfds(sock: socket, size: int) -> list[int]: ... - def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... - def recv_handle(conn: HasFileno) -> int: ... - def sendfds(sock: socket, fds: list[int]) -> None: ... - def DupFd(fd: int) -> Any: ... # Return type is really hard to get right + def recvfds(sock: socket, size: int) -> list[int]: + """Receive an array of fds over an AF_UNIX socket. +""" + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: + """Send a handle over a local connection. +""" + def recv_handle(conn: HasFileno) -> int: + """Receive a handle over a local connection. +""" + def sendfds(sock: socket, fds: list[int]) -> None: + """Send an array of fds over an AF_UNIX socket. +""" + def DupFd(fd: int) -> Any: # Return type is really hard to get right + """Return a wrapper for an fd. +""" # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -71,6 +87,10 @@ else: _DupFd = DupFd class AbstractReducer(metaclass=ABCMeta): + """Abstract base class for use in implementing a Reduction class +suitable for use in replacing the standard reduction mechanism +used in multiprocessing. +""" ForkingPickler = _ForkingPickler register = _register dump = _dump diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi index 5fee7cf31e17f..38185f87d39e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -14,7 +14,13 @@ else: __all__ += ["DupFd"] class DupFd: + """Wrapper for fd which can be used at any time. +""" def __init__(self, fd: int) -> None: ... - def detach(self) -> int: ... + def detach(self) -> int: + """Get the fd. This should only be called once. +""" -def stop(timeout: float | None = None) -> None: ... +def stop(timeout: float | None = None) -> None: + """Stop the background thread and clear registered resources. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi index cb2f27a628614..a9ffd3d68eb26 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -6,9 +6,18 @@ __all__ = ["ensure_running", "register", "unregister"] class ResourceTracker: def getfd(self) -> int | None: ... - def ensure_running(self) -> None: ... - def register(self, name: Sized, rtype: str) -> None: ... - def unregister(self, name: Sized, rtype: str) -> None: ... + def ensure_running(self) -> None: + """Make sure that resource tracker process is running. + +This can be run from any process. Usually a child process will use +the resource created by its parent. +""" + def register(self, name: Sized, rtype: str) -> None: + """Register name of resource with resource tracker. +""" + def unregister(self, name: Sized, rtype: str) -> None: + """Unregister name of resource with resource tracker. +""" if sys.version_info >= (3, 12): def __del__(self) -> None: ... @@ -18,4 +27,6 @@ register = _resource_tracker.register unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd -def main(fd: FileDescriptorOrPath) -> None: ... +def main(fd: FileDescriptorOrPath) -> None: + """Run resource tracker. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi index f75a372a69a2d..1e0e3a17d2db9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -1,3 +1,8 @@ +"""Provides shared memory for direct access across processes. + +The API of this package is currently provisional. Refer to the +documentation for details. +""" import sys from collections.abc import Iterable from types import GenericAlias @@ -9,22 +14,67 @@ __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: + """Creates a new shared memory block or attaches to an existing +shared memory block. + +Every shared memory block is assigned a unique name. This enables +one process to create a shared memory block with a particular name +so that a different process can attach to that same shared memory +block using that same name. + +As a resource for sharing data across processes, shared memory blocks +may outlive the original process that created them. When one process +no longer needs access to a shared memory block that might still be +needed by other processes, the close() method should be called. +When a shared memory block is no longer needed by any process, the +unlink() method should be called to ensure proper cleanup. +""" if sys.version_info >= (3, 13): def __init__(self, name: str | None = None, create: bool = False, size: int = 0, *, track: bool = True) -> None: ... else: def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... @property - def buf(self) -> memoryview | None: ... + def buf(self) -> memoryview | None: + """A memoryview of contents of the shared memory block. +""" @property - def name(self) -> str: ... + def name(self) -> str: + """Unique name that identifies the shared memory block. +""" @property - def size(self) -> int: ... - def close(self) -> None: ... - def unlink(self) -> None: ... + def size(self) -> int: + """Size in bytes. +""" + def close(self) -> None: + """Closes access to the shared memory from this instance but does +not destroy the shared memory block. +""" + def unlink(self) -> None: + """Requests that the underlying shared memory block be destroyed. + +Unlink should be called once (and only once) across all handles +which have access to the shared memory block, even if these +handles belong to different processes. Closing and unlinking may +happen in any order, but trying to access data inside a shared +memory block after unlinking may result in memory errors, +depending on platform. + +This method has no effect on Windows, where the only way to +delete a shared memory block is to close all handles. +""" def __del__(self) -> None: ... class ShareableList(Generic[_SLT]): + """Pattern for a mutable list-like object shareable via a shared +memory block. It differs from the built-in list type in that these +lists can not change their overall length (i.e. no append, insert, +etc.) + +Because values are packed into a memoryview as bytes, the struct +packing format for any storable value must require no more than 8 +characters to describe its format. +""" shm: SharedMemory @overload def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... @@ -35,7 +85,18 @@ class ShareableList(Generic[_SLT]): def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... def __len__(self) -> int: ... @property - def format(self) -> str: ... - def count(self, value: _SLT) -> int: ... - def index(self, value: _SLT) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def format(self) -> str: + """The struct packing format used by all currently stored items. +""" + def count(self, value: _SLT) -> int: + """L.count(value) -> integer -- return number of occurrences of value. +""" + def index(self, value: _SLT) -> int: + """L.index(value) -> integer -- return first index of value. +Raises ValueError if the value is not present. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi index e2ec15f05ea23..f561480e57aeb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -13,15 +13,24 @@ _T = TypeVar("_T") _CT = TypeVar("_CT", bound=_CData) @overload -def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: ... +def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: + """ +Returns a ctypes object allocated from shared memory +""" @overload def RawValue(typecode_or_type: str, *args: Any) -> Any: ... @overload -def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... +def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: + """ +Returns a ctypes array allocated from shared memory +""" @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload -def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: + """ +Return a synchronization wrapper for a Value +""" @overload def Value( typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None @@ -37,7 +46,10 @@ def Value( @overload def Array( typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None -) -> _CT: ... +) -> _CT: + """ +Return a synchronization wrapper for a RawArray +""" @overload def Array( typecode_or_type: type[c_char], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi index 4a97532228974..58aa3cf225d22 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi @@ -17,16 +17,37 @@ WINSERVICE: Final[bool] def set_executable(exe: str) -> None: ... def get_executable() -> str: ... -def is_forking(argv: Sequence[str]) -> bool: ... -def freeze_support() -> None: ... -def get_command_line(**kwds: Any) -> list[str]: ... -def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... +def is_forking(argv: Sequence[str]) -> bool: + """ +Return whether commandline indicates we are forking +""" +def freeze_support() -> None: + """ +Run code for process object if this in not the main process +""" +def get_command_line(**kwds: Any) -> list[str]: + """ +Returns prefix of command line used for spawning a child process +""" +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: + """ +Run code specified by data received over pipe +""" # undocumented def _main(fd: int, parent_sentinel: int) -> int: ... -def get_preparation_data(name: str) -> dict[str, Any]: ... +def get_preparation_data(name: str) -> dict[str, Any]: + """ +Return info about parent needed by child to unpickle process object +""" old_main_modules: list[ModuleType] -def prepare(data: Mapping[str, Any]) -> None: ... -def import_main_path(main_path: str) -> None: ... +def prepare(data: Mapping[str, Any]) -> None: + """ +Try to get current process ready to unpickle process object +""" +def import_main_path(main_path: str) -> None: + """ +Set sys.modules['__main__'] to module at main_path +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi index 3583194c77e29..06eee50fa945b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi @@ -48,8 +48,14 @@ if sys.version_info >= (3, 14): def warn(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... -def get_logger() -> Logger: ... -def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... +def get_logger() -> Logger: + """ +Returns logger used by multiprocessing +""" +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: + """ +Turn on logging and add a handler which prints to stderr +""" def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: Final[bool] @@ -58,6 +64,9 @@ def get_temp_dir() -> str: ... def register_after_fork(obj: _T, func: Callable[[_T], object]) -> None: ... class Finalize(Generic[_R_co]): + """ +Class which supports object finalization using weakrefs +""" # "args" and "kwargs" are passed as arguments to "callback". @overload def __init__( @@ -88,11 +97,23 @@ class Finalize(Generic[_R_co]): _finalizer_registry: MutableMapping[Incomplete, Incomplete] = {}, sub_debug: Callable[..., object] = ..., getpid: Callable[[], int] = ..., - ) -> _R_co: ... - def cancel(self) -> None: ... - def still_active(self) -> bool: ... - -def is_exiting() -> bool: ... + ) -> _R_co: + """ +Run the callback unless it has already been called or cancelled +""" + def cancel(self) -> None: + """ +Cancel finalization of the object +""" + def still_active(self) -> bool: + """ +Return whether this finalizer is still waiting to invoke callback +""" + +def is_exiting() -> bool: + """ +Returns true if the process is shutting down +""" class ForkAwareThreadLock: acquire: Callable[[bool, float], bool] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi index 480f55a46d645..82d19d60bec26 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi @@ -1,3 +1,5 @@ +"""An object-oriented interface to .netrc files. +""" import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -5,6 +7,8 @@ from typing_extensions import TypeAlias __all__ = ["netrc", "NetrcParseError"] class NetrcParseError(Exception): + """Exception raised on syntax errors in the .netrc file. +""" filename: str | None lineno: int | None msg: str @@ -20,4 +24,6 @@ class netrc: hosts: dict[str, _NetrcTuple] macros: dict[str, list[str]] def __init__(self, file: StrOrBytesPath | None = None) -> None: ... - def authenticators(self, host: str) -> _NetrcTuple | None: ... + def authenticators(self, host: str) -> _NetrcTuple | None: + """Return a (user, account, password) tuple for given host. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi index 1fb1e79f69a1f..6823e240a79a1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi @@ -1,3 +1,31 @@ +"""An NNTP client class based on: +- RFC 977: Network News Transfer Protocol +- RFC 2980: Common NNTP Extensions +- RFC 3977: Network News Transfer Protocol (version 2) + +Example: + +>>> from nntplib import NNTP +>>> s = NNTP('news') +>>> resp, count, first, last, name = s.group('comp.lang.python') +>>> print('Group', name, 'has', count, 'articles, range', first, 'to', last) +Group comp.lang.python has 51 articles, range 5770 to 5821 +>>> resp, subs = s.xhdr('subject', '{0}-{1}'.format(first, last)) +>>> resp = s.quit() +>>> + +Here 'resp' is the server response line. +Error responses are turned into exceptions. + +To post an article from a file: +>>> f = open(filename, 'rb') # file containing article, including header +>>> resp = s.post(f) +>>> + +For descriptions of all methods, read the comments in the code below. +Note that all arguments and return values representing article numbers +are strings, not numbers, since they are rarely used for calculations. +""" import datetime import socket import ssl @@ -22,29 +50,48 @@ __all__ = [ _File: TypeAlias = IO[bytes] | bytes | str | None class NNTPError(Exception): + """Base class for all nntplib exceptions +""" response: str -class NNTPReplyError(NNTPError): ... -class NNTPTemporaryError(NNTPError): ... -class NNTPPermanentError(NNTPError): ... -class NNTPProtocolError(NNTPError): ... -class NNTPDataError(NNTPError): ... +class NNTPReplyError(NNTPError): + """Unexpected [123]xx reply +""" +class NNTPTemporaryError(NNTPError): + """4xx errors +""" +class NNTPPermanentError(NNTPError): + """5xx errors +""" +class NNTPProtocolError(NNTPError): + """Response does not begin with [1-5] +""" +class NNTPDataError(NNTPError): + """Error in response data +""" NNTP_PORT: Final = 119 NNTP_SSL_PORT: Final = 563 class GroupInfo(NamedTuple): + """GroupInfo(group, last, first, flag) +""" group: str last: str first: str flag: str class ArticleInfo(NamedTuple): + """ArticleInfo(number, message_id, lines) +""" number: int message_id: str lines: list[bytes] -def decode_header(header_str: str) -> str: ... +def decode_header(header_str: str) -> str: + """Takes a unicode string representing a munged header value + and decodes it as a (possibly non-ASCII) readable value. +""" class NNTP: encoding: str @@ -70,39 +117,221 @@ class NNTP: readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: ... + ) -> None: + """Initialize an instance. Arguments: + - host: hostname to connect to + - port: port to connect to (default the standard NNTP port) + - user: username to authenticate with + - password: password to use with username + - readermode: if true, send 'mode reader' command after + connecting. + - usenetrc: allow loading username and password from ~/.netrc file + if not specified explicitly + - timeout: timeout (in seconds) used for socket connections + + readermode is sometimes necessary if you are connecting to an + NNTP server on the local machine and intend to call + reader-specific commands, such as `group'. If you get + unexpected NNTPPermanentErrors, you might need to set + readermode. + """ def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def getwelcome(self) -> str: ... - def getcapabilities(self) -> dict[str, _list[str]]: ... - def set_debuglevel(self, level: int) -> None: ... - def debug(self, level: int) -> None: ... - def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... - def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... - def description(self, group: str) -> str: ... - def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... - def group(self, name: str) -> tuple[str, int, int, int, str]: ... - def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... - def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... - def next(self) -> tuple[str, int, str]: ... - def last(self) -> tuple[str, int, str]: ... - def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... - def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... - def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... - def slave(self) -> str: ... - def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... - def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def getwelcome(self) -> str: + """Get the welcome message from the server + (this is read and squirreled away by __init__()). + If the response code is 200, posting is allowed; + if it 201, posting is not allowed. +""" + def getcapabilities(self) -> dict[str, _list[str]]: + """Get the server capabilities, as read by __init__(). + If the CAPABILITIES command is not supported, an empty dict is + returned. +""" + def set_debuglevel(self, level: int) -> None: + """Set the debugging level. Argument 'level' means: + 0: no debugging output (default) + 1: print commands and responses but not body text etc. + 2: also print raw lines read and sent before stripping CR/LF +""" + def debug(self, level: int) -> None: + """Set the debugging level. Argument 'level' means: + 0: no debugging output (default) + 1: print commands and responses but not body text etc. + 2: also print raw lines read and sent before stripping CR/LF +""" + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: + """Process a CAPABILITIES command. Not supported by all servers. + Return: + - resp: server response if successful + - caps: a dictionary mapping capability names to lists of tokens + (for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] }) + """ + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: + """Process a NEWGROUPS command. Arguments: + - date: a date or datetime object + Return: + - resp: server response if successful + - list: list of newsgroup names + """ + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: + """Process a NEWNEWS command. Arguments: + - group: group name or '*' + - date: a date or datetime object + Return: + - resp: server response if successful + - list: list of message ids + """ + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: + """Process a LIST or LIST ACTIVE command. Arguments: + - group_pattern: a pattern indicating which groups to query + - file: Filename string or file object to store the result in + Returns: + - resp: server response if successful + - list: list of (group, last, first, flag) (strings) + """ + def description(self, group: str) -> str: + """Get a description for a single group. If more than one + group matches ('group' is a pattern), return the first. If no + group matches, return an empty string. + + This elides the response code from the server, since it can + only be '215' or '285' (for xgtitle) anyway. If the response + code is needed, use the 'descriptions' method. + + NOTE: This neither checks for a wildcard in 'group' nor does + it check whether the group actually exists. +""" + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: + """Get descriptions for a range of groups. +""" + def group(self, name: str) -> tuple[str, int, int, int, str]: + """Process a GROUP command. Argument: + - group: the group name + Returns: + - resp: server response if successful + - count: number of articles + - first: first article number + - last: last article number + - name: the group name + """ + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: + """Process a HELP command. Argument: + - file: Filename string or file object to store the result in + Returns: + - resp: server response if successful + - list: list of strings returned by the server in response to the + HELP command + """ + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: + """Process a STAT command. Argument: + - message_spec: article number or message id (if not specified, + the current article is selected) + Returns: + - resp: server response if successful + - art_num: the article number + - message_id: the message id + """ + def next(self) -> tuple[str, int, str]: + """Process a NEXT command. No arguments. Return as for STAT. +""" + def last(self) -> tuple[str, int, str]: + """Process a LAST command. No arguments. Return as for STAT. +""" + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: + """Process a HEAD command. Argument: + - message_spec: article number or message id + - file: filename string or file object to store the headers in + Returns: + - resp: server response if successful + - ArticleInfo: (article number, message id, list of header lines) + """ + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: + """Process a BODY command. Argument: + - message_spec: article number or message id + - file: filename string or file object to store the body in + Returns: + - resp: server response if successful + - ArticleInfo: (article number, message id, list of body lines) + """ + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: + """Process an ARTICLE command. Argument: + - message_spec: article number or message id + - file: filename string or file object to store the article in + Returns: + - resp: server response if successful + - ArticleInfo: (article number, message id, list of article lines) + """ + def slave(self) -> str: + """Process a SLAVE command. Returns: + - resp: server response if successful + """ + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: + """Process an XHDR command (optional server extension). Arguments: + - hdr: the header type (e.g. 'subject') + - str: an article nr, a message id, or a range nr1-nr2 + - file: Filename string or file object to store the result in + Returns: + - resp: server response if successful + - list: list of (nr, value) strings + """ + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: + """Process an XOVER command (optional server extension) Arguments: + - start: start of range + - end: end of range + - file: Filename string or file object to store the result in + Returns: + - resp: server response if successful + - list: list of dicts containing the response fields + """ def over( self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None - ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... - def date(self) -> tuple[str, datetime.datetime]: ... - def post(self, data: bytes | Iterable[bytes]) -> str: ... - def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... - def quit(self) -> str: ... + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: + """Process an OVER command. If the command isn't supported, fall + back to XOVER. Arguments: + - message_spec: + - either a message id, indicating the article to fetch + information about + - or a (start, end) tuple, indicating a range of article numbers; + if end is None, information up to the newest message will be + retrieved + - or None, indicating the current article number must be used + - file: Filename string or file object to store the result in + Returns: + - resp: server response if successful + - list: list of dicts containing the response fields + + NOTE: the "message id" form isn't supported by XOVER + """ + def date(self) -> tuple[str, datetime.datetime]: + """Process the DATE command. + Returns: + - resp: server response if successful + - date: datetime object + """ + def post(self, data: bytes | Iterable[bytes]) -> str: + """Process a POST command. Arguments: + - data: bytes object, iterable or file containing the article + Returns: + - resp: server response if successful +""" + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: + """Process an IHAVE command. Arguments: + - message_id: message-id of the article + - data: file containing the article + Returns: + - resp: server response if successful + Note that if the server refuses the article an exception is raised. +""" + def quit(self) -> str: + """Process a QUIT command and close the socket. Returns: + - resp: server response if successful +""" def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... - def starttls(self, context: ssl.SSLContext | None = None) -> None: ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: + """Process a STARTTLS command. Arguments: + - context: SSL context to use for the encrypted connection + """ class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -117,4 +346,7 @@ class NNTP_SSL(NNTP): readermode: bool | None = None, usenetrc: bool = False, timeout: float = ..., - ) -> None: ... + ) -> None: + """This works identically to NNTP.__init__, except for the change + in default port and the `ssl_context` argument for SSL connections. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi index 074df075b9727..78c016e732467 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi @@ -1,3 +1,8 @@ +"""Common pathname manipulations, WindowsNT/95 version. + +Instead of importing this module directly, import os and refer to this +module as os.path. +""" import sys from _typeshed import BytesPath, StrOrBytesPath, StrPath from genericpath import ( @@ -120,4 +125,6 @@ else: realpath = abspath if sys.version_info >= (3, 13): - def isreserved(path: StrOrBytesPath) -> bool: ... + def isreserved(path: StrOrBytesPath) -> bool: + """Return true if the pathname is reserved by the system. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi index 014af8a0fd2ed..c82b1d40798f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi @@ -1,12 +1,29 @@ +"""Convert a NT pathname to a file URL and vice versa. + +This module only exists to provide OS-specific code +for urllib.requests, thus do not use directly. +""" import sys from typing_extensions import deprecated if sys.version_info >= (3, 14): @deprecated("The `nturl2path` module is deprecated since Python 3.14.") - def url2pathname(url: str) -> str: ... + def url2pathname(url: str) -> str: + """OS-specific conversion from a relative URL of the 'file' scheme +to a file system path; not recommended for general use. +""" @deprecated("The `nturl2path` module is deprecated since Python 3.14.") - def pathname2url(p: str) -> str: ... + def pathname2url(p: str) -> str: + """OS-specific conversion from a file system path to a relative URL +of the 'file' scheme; not recommended for general use. +""" else: - def url2pathname(url: str) -> str: ... - def pathname2url(p: str) -> str: ... + def url2pathname(url: str) -> str: + """OS-specific conversion from a relative URL of the 'file' scheme +to a file system path; not recommended for general use. +""" + def pathname2url(p: str) -> str: + """OS-specific conversion from a file system path to a relative URL +of the 'file' scheme; not recommended for general use. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi index 64fb16581e952..c635e9b6746b7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi @@ -1,3 +1,7 @@ +"""Abstract Base Classes (ABCs) for numbers, according to PEP 3141. + +TODO: Fill out more detailed documentation on the operators. +""" # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. # @@ -61,157 +65,350 @@ class _IntegralLike(_RealLike, Protocol): ################# class Number(metaclass=ABCMeta): + """All numbers inherit from this class. + +If you just want to check if an argument x is a number, without +caring what kind, use isinstance(x, Number). +""" __slots__ = () @abstractmethod - def __hash__(self) -> int: ... + def __hash__(self) -> int: + """The type of the None singleton. +""" # See comment at the top of the file # for why some of these return types are purposefully vague class Complex(Number, _ComplexLike): + """Complex defines the operations that work on the builtin complex type. + +In short, those are: a conversion to complex, .real, .imag, +, -, +*, /, **, abs(), .conjugate, ==, and !=. + +If it is given heterogeneous arguments, and doesn't have special +knowledge about them, it should fall back to the builtin complex +type as described below. +""" __slots__ = () @abstractmethod - def __complex__(self) -> complex: ... - def __bool__(self) -> bool: ... + def __complex__(self) -> complex: + """Return a builtin complex instance. Called for complex(self). +""" + def __bool__(self) -> bool: + """True if self != 0. Called for bool(self). +""" @property @abstractmethod - def real(self) -> _RealLike: ... + def real(self) -> _RealLike: + """Retrieve the real component of this number. + +This should subclass Real. +""" @property @abstractmethod - def imag(self) -> _RealLike: ... - @abstractmethod - def __add__(self, other) -> _ComplexLike: ... - @abstractmethod - def __radd__(self, other) -> _ComplexLike: ... - @abstractmethod - def __neg__(self) -> _ComplexLike: ... - @abstractmethod - def __pos__(self) -> _ComplexLike: ... - def __sub__(self, other) -> _ComplexLike: ... - def __rsub__(self, other) -> _ComplexLike: ... - @abstractmethod - def __mul__(self, other) -> _ComplexLike: ... - @abstractmethod - def __rmul__(self, other) -> _ComplexLike: ... - @abstractmethod - def __truediv__(self, other) -> _ComplexLike: ... - @abstractmethod - def __rtruediv__(self, other) -> _ComplexLike: ... - @abstractmethod - def __pow__(self, exponent) -> _ComplexLike: ... - @abstractmethod - def __rpow__(self, base) -> _ComplexLike: ... - @abstractmethod - def __abs__(self) -> _RealLike: ... - @abstractmethod - def conjugate(self) -> _ComplexLike: ... - @abstractmethod - def __eq__(self, other: object) -> bool: ... + def imag(self) -> _RealLike: + """Retrieve the imaginary component of this number. + +This should subclass Real. +""" + @abstractmethod + def __add__(self, other) -> _ComplexLike: + """self + other +""" + @abstractmethod + def __radd__(self, other) -> _ComplexLike: + """other + self +""" + @abstractmethod + def __neg__(self) -> _ComplexLike: + """-self +""" + @abstractmethod + def __pos__(self) -> _ComplexLike: + """+self +""" + def __sub__(self, other) -> _ComplexLike: + """self - other +""" + def __rsub__(self, other) -> _ComplexLike: + """other - self +""" + @abstractmethod + def __mul__(self, other) -> _ComplexLike: + """self * other +""" + @abstractmethod + def __rmul__(self, other) -> _ComplexLike: + """other * self +""" + @abstractmethod + def __truediv__(self, other) -> _ComplexLike: + """self / other: Should promote to float when necessary. +""" + @abstractmethod + def __rtruediv__(self, other) -> _ComplexLike: + """other / self +""" + @abstractmethod + def __pow__(self, exponent) -> _ComplexLike: + """self ** exponent; should promote to float or complex when necessary. +""" + @abstractmethod + def __rpow__(self, base) -> _ComplexLike: + """base ** self +""" + @abstractmethod + def __abs__(self) -> _RealLike: + """Returns the Real distance from 0. Called for abs(self). +""" + @abstractmethod + def conjugate(self) -> _ComplexLike: + """(x+y*i).conjugate() returns (x-y*i). +""" + @abstractmethod + def __eq__(self, other: object) -> bool: + """self == other +""" __hash__: ClassVar[None] # type: ignore[assignment] # See comment at the top of the file # for why some of these return types are purposefully vague class Real(Complex, _RealLike): + """To Complex, Real adds the operations that work on real numbers. + +In short, those are: a conversion to float, trunc(), divmod, +%, <, <=, >, and >=. + +Real also provides defaults for the derived operations. +""" __slots__ = () @abstractmethod - def __float__(self) -> float: ... - @abstractmethod - def __trunc__(self) -> _IntegralLike: ... - @abstractmethod - def __floor__(self) -> _IntegralLike: ... + def __float__(self) -> float: + """Any Real can be converted to a native float object. + +Called for float(self). +""" @abstractmethod - def __ceil__(self) -> _IntegralLike: ... + def __trunc__(self) -> _IntegralLike: + """trunc(self): Truncates self to an Integral. + +Returns an Integral i such that: + * i > 0 iff self > 0; + * abs(i) <= abs(self); + * for any Integral j satisfying the first two conditions, + abs(i) >= abs(j) [i.e. i has "maximal" abs among those]. +i.e. "truncate towards 0". +""" + @abstractmethod + def __floor__(self) -> _IntegralLike: + """Finds the greatest Integral <= self. +""" + @abstractmethod + def __ceil__(self) -> _IntegralLike: + """Finds the least Integral >= self. +""" @abstractmethod @overload - def __round__(self, ndigits: None = None) -> _IntegralLike: ... + def __round__(self, ndigits: None = None) -> _IntegralLike: + """Rounds self to ndigits decimal places, defaulting to 0. + +If ndigits is omitted or None, returns an Integral, otherwise +returns a Real. Rounds half toward even. +""" @abstractmethod @overload def __round__(self, ndigits: int) -> _RealLike: ... - def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: ... - def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: ... - @abstractmethod - def __floordiv__(self, other) -> _RealLike: ... + def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: + """divmod(self, other): The pair (self // other, self % other). + +Sometimes this can be computed faster than the pair of +operations. +""" + def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: + """divmod(other, self): The pair (other // self, other % self). + +Sometimes this can be computed faster than the pair of +operations. +""" @abstractmethod - def __rfloordiv__(self, other) -> _RealLike: ... + def __floordiv__(self, other) -> _RealLike: + """self // other: The floor() of self/other. +""" @abstractmethod - def __mod__(self, other) -> _RealLike: ... + def __rfloordiv__(self, other) -> _RealLike: + """other // self: The floor() of other/self. +""" @abstractmethod - def __rmod__(self, other) -> _RealLike: ... + def __mod__(self, other) -> _RealLike: + """self % other +""" @abstractmethod - def __lt__(self, other) -> bool: ... + def __rmod__(self, other) -> _RealLike: + """other % self +""" @abstractmethod - def __le__(self, other) -> bool: ... - def __complex__(self) -> complex: ... + def __lt__(self, other) -> bool: + """self < other + +< on Reals defines a total ordering, except perhaps for NaN. +""" + @abstractmethod + def __le__(self, other) -> bool: + """self <= other +""" + def __complex__(self) -> complex: + """complex(self) == complex(float(self), 0) +""" @property - def real(self) -> _RealLike: ... + def real(self) -> _RealLike: + """Real numbers are their real component. +""" @property - def imag(self) -> Literal[0]: ... - def conjugate(self) -> _RealLike: ... + def imag(self) -> Literal[0]: + """Real numbers have no imaginary component. +""" + def conjugate(self) -> _RealLike: + """Conjugate is a no-op for Reals. +""" # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod - def __pos__(self) -> _RealLike: ... + def __pos__(self) -> _RealLike: + """+self +""" @abstractmethod - def __neg__(self) -> _RealLike: ... + def __neg__(self) -> _RealLike: + """-self +""" # See comment at the top of the file # for why some of these return types are purposefully vague class Rational(Real): + """.numerator and .denominator should be in lowest terms. +""" __slots__ = () @property @abstractmethod - def numerator(self) -> _IntegralLike: ... + def numerator(self) -> _IntegralLike: + """The numerator of a rational number in lowest terms. +""" @property @abstractmethod - def denominator(self) -> _IntegralLike: ... - def __float__(self) -> float: ... + def denominator(self) -> _IntegralLike: + """The denominator of a rational number in lowest terms. + +This denominator should be positive. +""" + def __float__(self) -> float: + """float(self) = self.numerator / self.denominator + +It's important that this conversion use the integer's "true" +division rather than casting one side to float before dividing +so that ratios of huge integers convert without overflowing. + +""" # See comment at the top of the file # for why some of these return types are purposefully vague class Integral(Rational, _IntegralLike): + """Integral adds methods that work on integral numbers. + +In short, these are conversion to int, pow with modulus, and the +bit-string operations. +""" __slots__ = () @abstractmethod - def __int__(self) -> int: ... - def __index__(self) -> int: ... - @abstractmethod - def __pow__(self, exponent, modulus=None) -> _IntegralLike: ... - @abstractmethod - def __lshift__(self, other) -> _IntegralLike: ... + def __int__(self) -> int: + """int(self) +""" + def __index__(self) -> int: + """Called whenever an index is needed, such as in slicing +""" @abstractmethod - def __rlshift__(self, other) -> _IntegralLike: ... - @abstractmethod - def __rshift__(self, other) -> _IntegralLike: ... - @abstractmethod - def __rrshift__(self, other) -> _IntegralLike: ... - @abstractmethod - def __and__(self, other) -> _IntegralLike: ... - @abstractmethod - def __rand__(self, other) -> _IntegralLike: ... - @abstractmethod - def __xor__(self, other) -> _IntegralLike: ... - @abstractmethod - def __rxor__(self, other) -> _IntegralLike: ... - @abstractmethod - def __or__(self, other) -> _IntegralLike: ... - @abstractmethod - def __ror__(self, other) -> _IntegralLike: ... - @abstractmethod - def __invert__(self) -> _IntegralLike: ... - def __float__(self) -> float: ... + def __pow__(self, exponent, modulus=None) -> _IntegralLike: + """self ** exponent % modulus, but maybe faster. + +Accept the modulus argument if you want to support the +3-argument version of pow(). Raise a TypeError if exponent < 0 +or any argument isn't Integral. Otherwise, just implement the +2-argument version described in Complex. +""" + @abstractmethod + def __lshift__(self, other) -> _IntegralLike: + """self << other +""" + @abstractmethod + def __rlshift__(self, other) -> _IntegralLike: + """other << self +""" + @abstractmethod + def __rshift__(self, other) -> _IntegralLike: + """self >> other +""" + @abstractmethod + def __rrshift__(self, other) -> _IntegralLike: + """other >> self +""" + @abstractmethod + def __and__(self, other) -> _IntegralLike: + """self & other +""" + @abstractmethod + def __rand__(self, other) -> _IntegralLike: + """other & self +""" + @abstractmethod + def __xor__(self, other) -> _IntegralLike: + """self ^ other +""" + @abstractmethod + def __rxor__(self, other) -> _IntegralLike: + """other ^ self +""" + @abstractmethod + def __or__(self, other) -> _IntegralLike: + """self | other +""" + @abstractmethod + def __ror__(self, other) -> _IntegralLike: + """other | self +""" + @abstractmethod + def __invert__(self) -> _IntegralLike: + """~self +""" + def __float__(self) -> float: + """float(self) == float(int(self)) +""" @property - def numerator(self) -> _IntegralLike: ... + def numerator(self) -> _IntegralLike: + """Integers are their own numerators. +""" @property - def denominator(self) -> Literal[1]: ... + def denominator(self) -> Literal[1]: + """Integers have a denominator of 1. +""" # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod - def __pos__(self) -> _IntegralLike: ... + def __pos__(self) -> _IntegralLike: + """+self +""" @abstractmethod - def __neg__(self) -> _IntegralLike: ... + def __neg__(self) -> _IntegralLike: + """-self +""" @abstractmethod - def __abs__(self) -> _IntegralLike: ... + def __abs__(self) -> _IntegralLike: + """Returns the Real distance from 0. Called for abs(self). +""" @abstractmethod @overload - def __round__(self, ndigits: None = None) -> _IntegralLike: ... + def __round__(self, ndigits: None = None) -> _IntegralLike: + """Rounds self to ndigits decimal places, defaulting to 0. + +If ndigits is omitted or None, returns an Integral, otherwise +returns a Real. Rounds half toward even. +""" @abstractmethod @overload def __round__(self, ndigits: int) -> _IntegralLike: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi index ed0e96ef1cb9c..8873005c7697e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi @@ -1,3 +1,7 @@ +""" +opcode module - potentially shared between dis and other modules which +operate on bytecodes (e.g. peephole optimizers). +""" import sys from typing import Final, Literal @@ -44,4 +48,6 @@ opmap: Final[dict[str, int]] HAVE_ARGUMENT: Final = 43 EXTENDED_ARG: Final = 69 -def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: ... +def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: + """Compute the stack effect of the opcode. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi index 2f919514b0b8b..d18e1cd698985 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi @@ -1,3 +1,11 @@ +"""Operator interface. + +This module exports a set of functions implemented in C corresponding +to the intrinsic operators of Python. For example, operator.add(x, y) +is equivalent to the expression x+y. The function names are those +used for special methods; variants without leading and trailing +'__' are also provided for convenience. +""" import sys from _operator import ( abs as abs, @@ -182,6 +190,12 @@ if sys.version_info >= (3, 11): # them here. @final class attrgetter(Generic[_T_co]): + """Return a callable object that fetches the given attribute(s) from its operand. +After f = attrgetter('name'), the call f(r) returns r.name. +After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date). +After h = attrgetter('name.first', 'name.last'), the call h(r) returns +(r.name.first, r.name.last). +""" @overload def __new__(cls, attr: str, /) -> attrgetter[Any]: ... @overload @@ -192,10 +206,16 @@ class attrgetter(Generic[_T_co]): def __new__(cls, attr: str, attr2: str, attr3: str, attr4: str, /) -> attrgetter[tuple[Any, Any, Any, Any]]: ... @overload def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... - def __call__(self, obj: Any, /) -> _T_co: ... + def __call__(self, obj: Any, /) -> _T_co: + """Call self as a function. +""" @final class itemgetter(Generic[_T_co]): + """Return a callable object that fetches the given item(s) from its operand. +After f = itemgetter(2), the call f(r) returns r[2]. +After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) +""" @overload def __new__(cls, item: _T, /) -> itemgetter[_T]: ... @overload @@ -209,9 +229,18 @@ class itemgetter(Generic[_T_co]): # then we can't annotate __call__'s return type or it'll break on tuples # # These issues are best demonstrated by the `itertools.check_itertools_recipes.unique_justseen` test. - def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: + """Call self as a function. +""" @final class methodcaller: + """Return a callable object that calls the given method on its operand. +After f = methodcaller('name'), the call f(r) returns r.name(). +After g = methodcaller('name', 'date', foo=1), the call g(r) returns +r.name('date', foo=1). +""" def __new__(cls, name: str, /, *args: Any, **kwargs: Any) -> Self: ... - def __call__(self, obj: Any) -> Any: ... + def __call__(self, obj: Any) -> Any: + """Call self as a function. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi index c522917992800..67c46a9bff19e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi @@ -1,3 +1,25 @@ +"""A powerful, extensible, and easy-to-use option parser. + +By Greg Ward + +Originally distributed as Optik. + +For support, use the optik-users@lists.sourceforge.net mailing list +(http://lists.sourceforge.net/lists/listinfo/optik-users). + +Simple usage example: + + from optparse import OptionParser + + parser = OptionParser() + parser.add_option("-f", "--file", dest="filename", + help="write report to FILE", metavar="FILE") + parser.add_option("-q", "--quiet", + action="store_false", dest="verbose", default=True, + help="don't print status messages to stdout") + + (options, args) = parser.parse_args() +""" import builtins from _typeshed import MaybeNone, SupportsWrite from abc import abstractmethod @@ -37,21 +59,78 @@ class OptParseError(Exception): def __init__(self, msg: str) -> None: ... class BadOptionError(OptParseError): + """ +Raised if an invalid option is seen on the command line. +""" opt_str: str def __init__(self, opt_str: str) -> None: ... class AmbiguousOptionError(BadOptionError): + """ +Raised if an ambiguous option is seen on the command line. +""" possibilities: Iterable[str] def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... class OptionError(OptParseError): + """ +Raised if an Option instance is created with invalid or +inconsistent arguments. +""" option_id: str def __init__(self, msg: str, option: Option) -> None: ... -class OptionConflictError(OptionError): ... -class OptionValueError(OptParseError): ... +class OptionConflictError(OptionError): + """ +Raised if conflicting options are added to an OptionParser. +""" +class OptionValueError(OptParseError): + """ +Raised if an invalid option value is encountered on the command +line. +""" class HelpFormatter: + """ +Abstract base class for formatting option help. OptionParser +instances should use one of the HelpFormatter subclasses for +formatting help; by default IndentedHelpFormatter is used. + +Instance attributes: + parser : OptionParser + the controlling OptionParser instance + indent_increment : int + the number of columns to indent per nesting level + max_help_position : int + the maximum starting column for option help text + help_position : int + the calculated starting column for option help text; + initially the same as the maximum + width : int + total number of columns for output (pass None to constructor for + this value to be taken from the $COLUMNS environment variable) + level : int + current indentation level + current_indent : int + current indentation level (in columns) + help_width : int + number of columns available for option help text (calculated) + default_tag : str + text to replace with each option's default value, "%default" + by default. Set to false value to disable default value expansion. + option_strings : { Option : str } + maps Option instances to the snippet of help text explaining + the syntax of that option, e.g. "-h, --help" or + "-fFILE, --file=FILE" + _short_opt_fmt : str + format string controlling how short options with values are + printed in help text. Must be either "%s%s" ("-fFILE") or + "%s %s" ("-f FILE"), because those are the two syntaxes that + Optik supports. + _long_opt_fmt : str + similar but for long options; must be either "%s %s" ("--file FILE") + or "%s=%s" ("--file=FILE"). +""" NO_DEFAULT_VALUE: str _long_opt_fmt: str _short_opt_fmt: str @@ -76,7 +155,9 @@ class HelpFormatter: @abstractmethod def format_heading(self, heading: str) -> str: ... def format_option(self, option: Option) -> str: ... - def format_option_strings(self, option: Option) -> str: ... + def format_option_strings(self, option: Option) -> str: + """Return a comma-separated list of option strings & metavariables. +""" @abstractmethod def format_usage(self, usage: str) -> str: ... def indent(self) -> None: ... @@ -86,6 +167,8 @@ class HelpFormatter: def store_option_strings(self, parser: OptionParser) -> None: ... class IndentedHelpFormatter(HelpFormatter): + """Format help with indented section bodies. + """ def __init__( self, indent_increment: int = 2, @@ -97,6 +180,8 @@ class IndentedHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class TitledHelpFormatter(HelpFormatter): + """Format help with underlined section headers. + """ def __init__( self, indent_increment: int = 0, @@ -108,6 +193,24 @@ class TitledHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class Option: + """ +Instance attributes: + _short_opts : [string] + _long_opts : [string] + + action : string + type : string + dest : string + default : any + nargs : int + const : any + choices : [string] + callback : function + callback_args : (any*) + callback_kwargs : { string : any } + help : string + metavar : string +""" ACTIONS: tuple[str, ...] ALWAYS_TYPED_ACTIONS: tuple[str, ...] ATTRS: list[str] @@ -172,6 +275,35 @@ class Option: make_option = Option class OptionContainer: + """ +Abstract base class. + +Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + +Instance attributes: + option_list : [Option] + the list of Option objects contained by this OptionContainer + _short_opt : { string : Option } + dictionary mapping short option strings, eg. "-f" or "-X", + to the Option instances that implement them. If an Option + has multiple short option strings, it will appear in this + dictionary multiple times. [1] + _long_opt : { string : Option } + dictionary mapping long option strings, eg. "--file" or + "--exclude", to the Option instances that implement them. + Again, a given Option can occur multiple times in this + dictionary. [1] + defaults : { string : any } + dictionary mapping option destination names to default + values for each destination [1] + +[1] These mappings are common to (shared by) all components of the + controlling OptionParser, where they are initially created. + +""" _long_opt: dict[str, Option] _short_opt: dict[str, Option] conflict_handler: str @@ -185,7 +317,10 @@ class OptionContainer: def _create_option_mappings(self) -> None: ... def _share_option_mappings(self, parser: OptionParser) -> None: ... @overload - def add_option(self, opt: Option, /) -> Option: ... + def add_option(self, opt: Option, /) -> Option: + """add_option(Option) +add_option(opt_str, ..., kwarg=val, ...) +""" @overload def add_option( self, @@ -207,7 +342,9 @@ class OptionContainer: **kwargs, # Allow arbitrary keyword arguments for user defined option_class ) -> Option: ... def add_options(self, option_list: Iterable[Option]) -> None: ... - def destroy(self) -> None: ... + def destroy(self) -> None: + """see OptionParser.destroy(). +""" def format_option_help(self, formatter: HelpFormatter) -> str: ... def format_description(self, formatter: HelpFormatter) -> str: ... def format_help(self, formatter: HelpFormatter) -> str: ... @@ -229,8 +366,19 @@ class OptionGroup(OptionContainer): class Values: def __init__(self, defaults: Mapping[str, object] | None = None) -> None: ... def _update(self, dict: Mapping[str, object], mode: Literal["careful", "loose"]) -> None: ... - def _update_careful(self, dict: Mapping[str, object]) -> None: ... - def _update_loose(self, dict: Mapping[str, object]) -> None: ... + def _update_careful(self, dict: Mapping[str, object]) -> None: + """ +Update the option values from an arbitrary dictionary, but only +use keys from dict that already have a corresponding attribute +in self. Any keys in dict without a corresponding attribute +are silently ignored. +""" + def _update_loose(self, dict: Mapping[str, object]) -> None: + """ +Update the option values from an arbitrary dictionary, +using all keys from the dictionary regardless of whether +they have a corresponding attribute in self or not. +""" def ensure_value(self, attr: str, value: object) -> Any: ... # return type cannot be known statically def read_file(self, filename: str, mode: Literal["careful", "loose"] = "careful") -> None: ... def read_module(self, modname: str, mode: Literal["careful", "loose"] = "careful") -> None: ... @@ -243,6 +391,73 @@ class Values: def __eq__(self, other: object) -> bool: ... class OptionParser(OptionContainer): + """ +Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + +Instance attributes: + usage : string + a usage string for your program. Before it is displayed + to the user, "%prog" will be expanded to the name of + your program (self.prog or os.path.basename(sys.argv[0])). + prog : string + the name of the current program (to override + os.path.basename(sys.argv[0])). + description : string + A paragraph of text giving a brief overview of your program. + optparse reformats this paragraph to fit the current terminal + width and prints it when the user requests help (after usage, + but before the list of options). + epilog : string + paragraph of help text to print after option help + + option_groups : [OptionGroup] + list of option groups in this parser (option groups are + irrelevant for parsing the command-line, but very useful + for generating help) + + allow_interspersed_args : bool = true + if true, positional arguments may be interspersed with options. + Assuming -a and -b each take a single argument, the command-line + -ablah foo bar -bboo baz + will be interpreted the same as + -ablah -bboo -- foo bar baz + If this flag were false, that command line would be interpreted as + -ablah -- foo bar -bboo baz + -- ie. we stop processing options as soon as we see the first + non-option argument. (This is the tradition followed by + Python's getopt module, Perl's Getopt::Std, and other argument- + parsing libraries, but it is generally annoying to users.) + + process_default_values : bool = true + if true, option default values are processed similarly to option + values from the command line: that is, they are passed to the + type-checking function for the option's type (as long as the + default value is a string). (This really only matters if you + have defined custom types; see SF bug #955889.) Set it to false + to restore the behaviour of Optik 1.4.1 and earlier. + + rargs : [string] + the argument list currently being parsed. Only set when + parse_args() is active, and continually trimmed down as + we consume arguments. Mainly there for the benefit of + callback options. + largs : [string] + the list of leftover arguments that we have skipped while + parsing options. If allow_interspersed_args is false, this + list is always empty. + values : Values + the set of option values currently being accumulated. Only + set when parse_args() is active. Also mainly for callbacks. + +Because of the 'rargs', 'largs', and 'values' attributes, +OptionParser is not thread-safe. If, for some perverse reason, you +need to parse command-line arguments simultaneously in different +threads, use different OptionParser instances. + +""" allow_interspersed_args: bool epilog: str | None formatter: HelpFormatter @@ -275,19 +490,61 @@ class OptionParser(OptionContainer): def _get_all_options(self) -> list[Option]: ... def _get_args(self, args: list[str] | None) -> list[str]: ... def _init_parsing_state(self) -> None: ... - def _match_long_opt(self, opt: str) -> str: ... + def _match_long_opt(self, opt: str) -> str: + """_match_long_opt(opt : string) -> string + +Determine which long option string 'opt' matches, ie. which one +it is an unambiguous abbreviation for. Raises BadOptionError if +'opt' doesn't unambiguously match any long option string. +""" def _populate_option_list(self, option_list: Iterable[Option] | None, add_help: bool = True) -> None: ... - def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: ... + def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: + """_process_args(largs : [string], + rargs : [string], + values : Values) + +Process command-line arguments and populate 'values', consuming +options and arguments from 'rargs'. If 'allow_interspersed_args' is +false, stop at the first non-option argument. If true, accumulate any +interspersed non-option arguments in 'largs'. +""" def _process_long_opt(self, rargs: list[str], values: Values) -> None: ... def _process_short_opts(self, rargs: list[str], values: Values) -> None: ... @overload def add_option_group(self, opt_group: OptionGroup, /) -> OptionGroup: ... @overload def add_option_group(self, title: str, /, description: str | None = None) -> OptionGroup: ... - def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: ... - def disable_interspersed_args(self) -> None: ... - def enable_interspersed_args(self) -> None: ... - def error(self, msg: str) -> NoReturn: ... + def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: + """ +check_values(values : Values, args : [string]) +-> (values : Values, args : [string]) + +Check that the supplied option values and leftover arguments are +valid. Returns the option values and leftover arguments +(possibly adjusted, possibly completely new -- whatever you +like). Default implementation just returns the passed-in +values; subclasses may override as desired. +""" + def disable_interspersed_args(self) -> None: + """Set parsing to stop on the first non-option. Use this if +you have a command processor which runs another command that +has options of its own and you want to make sure these options +don't get confused. +""" + def enable_interspersed_args(self) -> None: + """Set parsing to not stop on the first non-option, allowing +interspersing switches with command arguments. This is the +default behavior. See also disable_interspersed_args() and the +class documentation description of the attribute +allow_interspersed_args. +""" + def error(self, msg: str) -> NoReturn: + """error(msg : string) + +Print a usage message incorporating 'msg' to stderr and exit. +If you override this in a subclass, it should not return -- it +should either exit or raise an exception. +""" def exit(self, status: int = 0, msg: str | None = None) -> NoReturn: ... def expand_prog_name(self, s: str) -> str: ... def format_epilog(self, formatter: HelpFormatter) -> str: ... @@ -298,10 +555,43 @@ class OptionParser(OptionContainer): def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... - def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... - def print_usage(self, file: SupportsWrite[str] | None = None) -> None: ... - def print_help(self, file: SupportsWrite[str] | None = None) -> None: ... - def print_version(self, file: SupportsWrite[str] | None = None) -> None: ... + def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: + """ +parse_args(args : [string] = sys.argv[1:], + values : Values = None) +-> (values : Values, args : [string]) + +Parse the command-line options found in 'args' (default: +sys.argv[1:]). Any errors result in a call to 'error()', which +by default prints the usage message to stderr and calls +sys.exit() with an error message. On success returns a pair +(values, args) where 'values' is a Values instance (with all +your option values) and 'args' is the list of arguments left +over after parsing options. +""" + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: + """print_usage(file : file = stdout) + +Print the usage message for the current program (self.usage) to +'file' (default stdout). Any occurrence of the string "%prog" in +self.usage is replaced with the name of the current program +(basename of sys.argv[0]). Does nothing if self.usage is empty +or not defined. +""" + def print_help(self, file: SupportsWrite[str] | None = None) -> None: + """print_help(file : file = stdout) + +Print an extended help message, listing all options and any +help text provided with them, to 'file' (default stdout). +""" + def print_version(self, file: SupportsWrite[str] | None = None) -> None: + """print_version(file : file = stdout) + +Print the version message for this program (self.version) to +'file' (default stdout). As with print_usage(), any occurrence +of "%prog" in self.version is replaced by the current program's +name. Does nothing if self.version is empty or undefined. +""" def set_default(self, dest: str, value: Any) -> None: ... # default value can be "any" type def set_defaults(self, **kwargs: Any) -> None: ... # default values can be "any" type def set_process_default_values(self, process: bool) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 71c79dfac399f..98818110fbd9c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -1,3 +1,25 @@ +"""OS routines for NT or Posix depending on what system we're on. + +This exports: + - all functions from posix or nt, e.g. unlink, stat, etc. + - os.path is either posixpath or ntpath + - os.name is either 'posix' or 'nt' + - os.curdir is a string representing the current directory (always '.') + - os.pardir is a string representing the parent directory (always '..') + - os.sep is the (or a most common) pathname separator ('/' or '\\\\') + - os.extsep is the extension separator (always '.') + - os.altsep is the alternate pathname separator (None or '/') + - os.pathsep is the component separator used in $PATH etc + - os.linesep is the line separator in text files ('\\n' or '\\r\\n') + - os.defpath is the default search path for executables + - os.devnull is the file path of the null device ('/dev/null', etc.) + +Programs that import and use 'os' stand a better chance of being +portable between different platforms. Of course, they must then +only use functions that are defined by all platforms (e.g., unlink +and opendir), and leave all pathname manipulation to os.path +(e.g., split and join). +""" import sys from _typeshed import ( AnyStr_co, @@ -775,6 +797,17 @@ TMP_MAX: Final[int] # Undocumented, but used by tempfile # ----- os classes (structures) ----- @final class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): + """stat_result: Result from stat, fstat, or lstat. + +This object may be accessed either as a tuple of + (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) +or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on. + +Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev, +or st_flags, they are available as attributes only. + +See os.stat for more information. +""" # The constructor of this class takes an iterable of variable length (though it must be at least 10). # # However, this class behaves like a tuple of 10 elements, @@ -789,23 +822,41 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo __match_args__: Final = ("st_mode", "st_ino", "st_dev", "st_nlink", "st_uid", "st_gid", "st_size") @property - def st_mode(self) -> int: ... # protection bits, + def st_mode(self) -> int: # protection bits, + """protection bits +""" @property - def st_ino(self) -> int: ... # inode number, + def st_ino(self) -> int: # inode number, + """inode +""" @property - def st_dev(self) -> int: ... # device, + def st_dev(self) -> int: # device, + """device +""" @property - def st_nlink(self) -> int: ... # number of hard links, + def st_nlink(self) -> int: # number of hard links, + """number of hard links +""" @property - def st_uid(self) -> int: ... # user id of owner, + def st_uid(self) -> int: # user id of owner, + """user ID of owner +""" @property - def st_gid(self) -> int: ... # group id of owner, + def st_gid(self) -> int: # group id of owner, + """group ID of owner +""" @property - def st_size(self) -> int: ... # size of file, in bytes, + def st_size(self) -> int: # size of file, in bytes, + """total size, in bytes +""" @property - def st_atime(self) -> float: ... # time of most recent access, + def st_atime(self) -> float: # time of most recent access, + """time of last access +""" @property - def st_mtime(self) -> float: ... # time of most recent content modification, + def st_mtime(self) -> float: # time of most recent content modification, + """time of last modification +""" # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) if sys.version_info >= (3, 12) and sys.platform == "win32": @property @@ -817,15 +868,23 @@ In the future, this property will contain the last metadata change time.""" def st_ctime(self) -> float: ... else: @property - def st_ctime(self) -> float: ... + def st_ctime(self) -> float: + """time of last change +""" @property - def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds + def st_atime_ns(self) -> int: # time of most recent access, in nanoseconds + """time of last access in nanoseconds +""" @property - def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds + def st_mtime_ns(self) -> int: # time of most recent content modification in nanoseconds + """time of last modification in nanoseconds +""" # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds @property - def st_ctime_ns(self) -> int: ... + def st_ctime_ns(self) -> int: + """time of last change in nanoseconds +""" if sys.platform == "win32": @property def st_file_attributes(self) -> int: ... @@ -838,11 +897,17 @@ In the future, this property will contain the last metadata change time.""" def st_birthtime_ns(self) -> int: ... # time of file creation in nanoseconds else: @property - def st_blocks(self) -> int: ... # number of blocks allocated for file + def st_blocks(self) -> int: # number of blocks allocated for file + """number of blocks allocated +""" @property - def st_blksize(self) -> int: ... # filesystem blocksize + def st_blksize(self) -> int: # filesystem blocksize + """blocksize for filesystem I/O +""" @property - def st_rdev(self) -> int: ... # type of device if an inode device + def st_rdev(self) -> int: # type of device if an inode device + """device type (if inode device) +""" if sys.platform != "linux": # These properties are available on MacOS, but not Ubuntu. # On other Unix systems (such as FreeBSD), the following attributes may be @@ -862,12 +927,29 @@ In the future, this property will contain the last metadata change time.""" # on the allowlist for use as a Protocol starting in 3.14. @runtime_checkable class PathLike(ABC, Protocol[AnyStr_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + """Abstract base class for implementing the file system path protocol. +""" __slots__ = () @abstractmethod - def __fspath__(self) -> AnyStr_co: ... + def __fspath__(self) -> AnyStr_co: + """Return the file system path representation of the object. +""" @overload -def listdir(path: StrPath | None = None) -> list[str]: ... +def listdir(path: StrPath | None = None) -> list[str]: + """Return a list containing the names of the files in the directory. + +path can be specified as either str, bytes, or a path-like object. If path is bytes, + the filenames returned will also be bytes; in all other circumstances + the filenames returned will be str. +If path is None, uses the path='.'. +On some platforms, path may also be specified as an open file descriptor;\\ + the file descriptor must refer to a directory. + If this functionality is unavailable, using it raises NotImplementedError. + +The list is in arbitrary order. It does not include the special +entries '.' and '..' even if they are present in the directory. +""" @overload def listdir(path: BytesPath) -> list[bytes]: ... @overload @@ -878,21 +960,49 @@ class DirEntry(Generic[AnyStr]): # The constructor is hidden @property - def name(self) -> AnyStr: ... + def name(self) -> AnyStr: + """the entry's base filename, relative to scandir() "path" argument +""" @property - def path(self) -> AnyStr: ... - def inode(self) -> int: ... - def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... - def is_file(self, *, follow_symlinks: bool = True) -> bool: ... - def is_symlink(self) -> bool: ... - def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... - def __fspath__(self) -> AnyStr: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def path(self) -> AnyStr: + """the entry's full path name; equivalent to os.path.join(scandir_path, entry.name) +""" + def inode(self) -> int: + """Return inode of the entry; cached per entry. +""" + def is_dir(self, *, follow_symlinks: bool = True) -> bool: + """Return True if the entry is a directory; cached per entry. +""" + def is_file(self, *, follow_symlinks: bool = True) -> bool: + """Return True if the entry is a file; cached per entry. +""" + def is_symlink(self) -> bool: + """Return True if the entry is a symbolic link; cached per entry. +""" + def stat(self, *, follow_symlinks: bool = True) -> stat_result: + """Return stat_result object for the entry; cached per entry. +""" + def __fspath__(self) -> AnyStr: + """Returns the path for the entry. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" if sys.version_info >= (3, 12): - def is_junction(self) -> bool: ... + def is_junction(self) -> bool: + """Return True if the entry is a junction; cached per entry. +""" @final class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): + """statvfs_result: Result from statvfs or fstatvfs. + +This object may be accessed either as a tuple of + (bsize, frsize, blocks, bfree, bavail, files, ffree, favail, flag, namemax), +or via the attributes f_bsize, f_frsize, f_blocks, f_bfree, and so on. + +See os.statvfs for more information. +""" if sys.version_info >= (3, 10): __match_args__: Final = ( "f_bsize", @@ -931,82 +1041,215 @@ class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, in def f_fsid(self) -> int: ... # ----- os function stubs ----- -def fsencode(filename: StrOrBytesPath) -> bytes: ... -def fsdecode(filename: StrOrBytesPath) -> str: ... +def fsencode(filename: StrOrBytesPath) -> bytes: + """Encode filename (an os.PathLike, bytes, or str) to the filesystem +encoding with 'surrogateescape' error handler, return bytes unchanged. +On Windows, use 'strict' error handler if the file system encoding is +'mbcs' (which is the default encoding). +""" +def fsdecode(filename: StrOrBytesPath) -> str: + """Decode filename (an os.PathLike, bytes, or str) from the filesystem +encoding with 'surrogateescape' error handler, return str unchanged. On +Windows, use 'strict' error handler if the file system encoding is +'mbcs' (which is the default encoding). +""" @overload -def fspath(path: str) -> str: ... +def fspath(path: str) -> str: + """Return the file system path representation of the object. + +If the object is str or bytes, then allow it to pass through as-is. If the +object defines __fspath__(), then return the result of that method. All other +types raise a TypeError. +""" @overload def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[AnyStr]) -> AnyStr: ... -def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... -def getlogin() -> str: ... -def getpid() -> int: ... -def getppid() -> int: ... -def strerror(code: int, /) -> str: ... -def umask(mask: int, /) -> int: ... +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: + """Returns the sequence of directories that will be searched for the +named executable (similar to a shell) when launching a process. + +*env* must be an environment variable dict or None. If *env* is None, +os.environ will be used. +""" +def getlogin() -> str: + """Return the actual login name. +""" +def getpid() -> int: + """Return the current process id. +""" +def getppid() -> int: + """Return the parent's process id. + +If the parent process has already exited, Windows machines will still +return its id; others systems will return the id of the 'init' process (1). +""" +def strerror(code: int, /) -> str: + """Translate an error code to a message string. +""" +def umask(mask: int, /) -> int: + """Set the current numeric umask and return the previous umask. +""" @final class uname_result(structseq[str], tuple[str, str, str, str, str]): + """uname_result: Result from os.uname(). + +This object may be accessed either as a tuple of + (sysname, nodename, release, version, machine), +or via the attributes sysname, nodename, release, version, and machine. + +See os.uname for more information. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") @property - def sysname(self) -> str: ... + def sysname(self) -> str: + """operating system name +""" @property - def nodename(self) -> str: ... + def nodename(self) -> str: + """name of machine on network (implementation-defined) +""" @property - def release(self) -> str: ... + def release(self) -> str: + """operating system release +""" @property - def version(self) -> str: ... + def version(self) -> str: + """operating system version +""" @property - def machine(self) -> str: ... + def machine(self) -> str: + """hardware identifier +""" if sys.platform != "win32": - def ctermid() -> str: ... - def getegid() -> int: ... - def geteuid() -> int: ... - def getgid() -> int: ... - def getgrouplist(user: str, group: int, /) -> list[int]: ... - def getgroups() -> list[int]: ... # Unix only, behaves differently on Mac - def initgroups(username: str, gid: int, /) -> None: ... - def getpgid(pid: int) -> int: ... - def getpgrp() -> int: ... - def getpriority(which: int, who: int) -> int: ... - def setpriority(which: int, who: int, priority: int) -> None: ... + def ctermid() -> str: + """Return the name of the controlling terminal for this process. +""" + def getegid() -> int: + """Return the current process's effective group id. +""" + def geteuid() -> int: + """Return the current process's effective user id. +""" + def getgid() -> int: + """Return the current process's group id. +""" + def getgrouplist(user: str, group: int, /) -> list[int]: + """Returns a list of groups to which a user belongs. + + user + username to lookup + group + base group id of the user +""" + def getgroups() -> list[int]: # Unix only, behaves differently on Mac + """Return list of supplemental group IDs for the process. +""" + def initgroups(username: str, gid: int, /) -> None: + """Initialize the group access list. + +Call the system initgroups() to initialize the group access list with all of +the groups of which the specified username is a member, plus the specified +group id. +""" + def getpgid(pid: int) -> int: + """Call the system call getpgid(), and return the result. +""" + def getpgrp() -> int: + """Return the current process group id. +""" + def getpriority(which: int, who: int) -> int: + """Return program scheduling priority. +""" + def setpriority(which: int, who: int, priority: int) -> None: + """Set program scheduling priority. +""" if sys.platform != "darwin": - def getresuid() -> tuple[int, int, int]: ... - def getresgid() -> tuple[int, int, int]: ... - - def getuid() -> int: ... - def setegid(egid: int, /) -> None: ... - def seteuid(euid: int, /) -> None: ... - def setgid(gid: int, /) -> None: ... - def setgroups(groups: Sequence[int], /) -> None: ... - def setpgrp() -> None: ... - def setpgid(pid: int, pgrp: int, /) -> None: ... - def setregid(rgid: int, egid: int, /) -> None: ... + def getresuid() -> tuple[int, int, int]: + """Return a tuple of the current process's real, effective, and saved user ids. +""" + def getresgid() -> tuple[int, int, int]: + """Return a tuple of the current process's real, effective, and saved group ids. +""" + + def getuid() -> int: + """Return the current process's user id. +""" + def setegid(egid: int, /) -> None: + """Set the current process's effective group id. +""" + def seteuid(euid: int, /) -> None: + """Set the current process's effective user id. +""" + def setgid(gid: int, /) -> None: + """Set the current process's group id. +""" + def setgroups(groups: Sequence[int], /) -> None: + """Set the groups of the current process to list. +""" + def setpgrp() -> None: + """Make the current process the leader of its process group. +""" + def setpgid(pid: int, pgrp: int, /) -> None: + """Call the system call setpgid(pid, pgrp). +""" + def setregid(rgid: int, egid: int, /) -> None: + """Set the current process's real and effective group ids. +""" if sys.platform != "darwin": - def setresgid(rgid: int, egid: int, sgid: int, /) -> None: ... - def setresuid(ruid: int, euid: int, suid: int, /) -> None: ... - - def setreuid(ruid: int, euid: int, /) -> None: ... - def getsid(pid: int, /) -> int: ... - def setsid() -> None: ... - def setuid(uid: int, /) -> None: ... - def uname() -> uname_result: ... + def setresgid(rgid: int, egid: int, sgid: int, /) -> None: + """Set the current process's real, effective, and saved group ids. +""" + def setresuid(ruid: int, euid: int, suid: int, /) -> None: + """Set the current process's real, effective, and saved user ids. +""" + + def setreuid(ruid: int, euid: int, /) -> None: + """Set the current process's real and effective user ids. +""" + def getsid(pid: int, /) -> int: + """Call the system call getsid(pid) and return the result. +""" + def setsid() -> None: + """Call the system call setsid(). +""" + def setuid(uid: int, /) -> None: + """Set the current process's user id. +""" + def uname() -> uname_result: + """Return an object identifying the current operating system. + +The object behaves like a named tuple with the following fields: + (sysname, nodename, release, version, machine) +""" @overload -def getenv(key: str) -> str | None: ... +def getenv(key: str) -> str | None: + """Get an environment variable, return None if it doesn't exist. +The optional second argument can specify an alternate default. +key, default and the result are str. +""" @overload def getenv(key: str, default: _T) -> str | _T: ... if sys.platform != "win32": @overload - def getenvb(key: bytes) -> bytes | None: ... + def getenvb(key: bytes) -> bytes | None: + """Get an environment variable, return None if it doesn't exist. +The optional second argument can specify an alternate default. +key, default and the result are bytes. +""" @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... - def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: ... - def unsetenv(name: StrOrBytesPath, /) -> None: ... + def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: + """Change or add an environment variable. +""" + def unsetenv(name: StrOrBytesPath, /) -> None: + """Delete an environment variable. +""" else: def putenv(name: str, value: str, /) -> None: ... @@ -1091,50 +1334,220 @@ def fdopen( closefd: bool = ..., opener: _Opener | None = ..., ) -> IO[Any]: ... -def close(fd: int) -> None: ... -def closerange(fd_low: int, fd_high: int, /) -> None: ... -def device_encoding(fd: int) -> str | None: ... -def dup(fd: int, /) -> int: ... -def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... -def fstat(fd: int) -> stat_result: ... -def ftruncate(fd: int, length: int, /) -> None: ... -def fsync(fd: FileDescriptorLike) -> None: ... -def isatty(fd: int, /) -> bool: ... +def close(fd: int) -> None: + """Close a file descriptor. +""" +def closerange(fd_low: int, fd_high: int, /) -> None: + """Closes all file descriptors in [fd_low, fd_high), ignoring errors. +""" +def device_encoding(fd: int) -> str | None: + """Return a string describing the encoding of a terminal's file descriptor. + +The file descriptor must be attached to a terminal. +If the device is not a terminal, return None. +""" +def dup(fd: int, /) -> int: + """Return a duplicate of a file descriptor. +""" +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: + """Duplicate file descriptor. +""" +def fstat(fd: int) -> stat_result: + """Perform a stat system call on the given file descriptor. + +Like stat(), but for an open file descriptor. +Equivalent to os.stat(fd). +""" +def ftruncate(fd: int, length: int, /) -> None: + """Truncate a file, specified by file descriptor, to a specific length. +""" +def fsync(fd: FileDescriptorLike) -> None: + """Force write of fd to disk. +""" +def isatty(fd: int, /) -> bool: + """Return True if the fd is connected to a terminal. + +Return True if the file descriptor is an open file descriptor +connected to the slave end of a terminal. +""" if sys.platform != "win32" and sys.version_info >= (3, 11): - def login_tty(fd: int, /) -> None: ... + def login_tty(fd: int, /) -> None: + """Prepare the tty of which fd is a file descriptor for a new login session. + +Make the calling process a session leader; make the tty the +controlling tty, the stdin, the stdout, and the stderr of the +calling process; close fd. +""" if sys.version_info >= (3, 11): - def lseek(fd: int, position: int, whence: int, /) -> int: ... + def lseek(fd: int, position: int, whence: int, /) -> int: + """Set the position of a file descriptor. Return the new position. + + fd + An open file descriptor, as returned by os.open(). + position + Position, interpreted relative to 'whence'. + whence + The relative position to seek from. Valid values are: + - SEEK_SET: seek from the start of the file. + - SEEK_CUR: seek from the current file position. + - SEEK_END: seek from the end of the file. + +The return value is the number of bytes relative to the beginning of the file. +""" else: - def lseek(fd: int, position: int, how: int, /) -> int: ... - -def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... -def pipe() -> tuple[int, int]: ... -def read(fd: int, length: int, /) -> bytes: ... + def lseek(fd: int, position: int, how: int, /) -> int: + """Set the position of a file descriptor. Return the new position. + +Return the new cursor position in number of bytes +relative to the beginning of the file. +""" + +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: + """Open a file for low level IO. Returns a file descriptor (integer). + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" +def pipe() -> tuple[int, int]: + """Create a pipe. + +Returns a tuple of two file descriptors: + (read_fd, write_fd) +""" +def read(fd: int, length: int, /) -> bytes: + """Read from a file descriptor. Returns a bytes object. +""" if sys.version_info >= (3, 12) or sys.platform != "win32": - def get_blocking(fd: int, /) -> bool: ... - def set_blocking(fd: int, blocking: bool, /) -> None: ... + def get_blocking(fd: int, /) -> bool: + """Get the blocking mode of the file descriptor. + +Return False if the O_NONBLOCK flag is set, True if the flag is cleared. +""" + def set_blocking(fd: int, blocking: bool, /) -> None: + """Set the blocking mode of the specified file descriptor. + +Set the O_NONBLOCK flag if blocking is False, +clear the O_NONBLOCK flag otherwise. +""" if sys.platform != "win32": - def fchown(fd: int, uid: int, gid: int) -> None: ... - def fpathconf(fd: int, name: str | int, /) -> int: ... - def fstatvfs(fd: int, /) -> statvfs_result: ... - def lockf(fd: int, command: int, length: int, /) -> None: ... - def openpty() -> tuple[int, int]: ... # some flavors of Unix + def fchown(fd: int, uid: int, gid: int) -> None: + """Change the owner and group id of the file specified by file descriptor. + +Equivalent to os.chown(fd, uid, gid). +""" + def fpathconf(fd: int, name: str | int, /) -> int: + """Return the configuration limit name for the file descriptor fd. + +If there is no limit, return -1. +""" + def fstatvfs(fd: int, /) -> statvfs_result: + """Perform an fstatvfs system call on the given fd. + +Equivalent to statvfs(fd). +""" + def lockf(fd: int, command: int, length: int, /) -> None: + """Apply, test or remove a POSIX lock on an open file descriptor. + + fd + An open file descriptor. + command + One of F_LOCK, F_TLOCK, F_ULOCK or F_TEST. + length + The number of bytes to lock, starting at the current position. +""" + def openpty() -> tuple[int, int]: # some flavors of Unix + """Open a pseudo-terminal. + +Return a tuple of (master_fd, slave_fd) containing open file descriptors +for both the master and slave ends. +""" if sys.platform != "darwin": - def fdatasync(fd: FileDescriptorLike) -> None: ... - def pipe2(flags: int, /) -> tuple[int, int]: ... # some flavors of Unix - def posix_fallocate(fd: int, offset: int, length: int, /) -> None: ... - def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: ... - - def pread(fd: int, length: int, offset: int, /) -> bytes: ... - def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: ... + def fdatasync(fd: FileDescriptorLike) -> None: + """Force write of fd to disk without forcing update of metadata. +""" + def pipe2(flags: int, /) -> tuple[int, int]: # some flavors of Unix + """Create a pipe with flags set atomically. + +Returns a tuple of two file descriptors: + (read_fd, write_fd) + +flags can be constructed by ORing together one or more of these values: +O_NONBLOCK, O_CLOEXEC. +""" + def posix_fallocate(fd: int, offset: int, length: int, /) -> None: + """Ensure a file has allocated at least a particular number of bytes on disk. + +Ensure that the file specified by fd encompasses a range of bytes +starting at offset bytes from the beginning and continuing for length bytes. +""" + def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: + """Announce an intention to access data in a specific pattern. + +Announce an intention to access data in a specific pattern, thus allowing +the kernel to make optimizations. +The advice applies to the region of the file specified by fd starting at +offset and continuing for length bytes. +advice is one of POSIX_FADV_NORMAL, POSIX_FADV_SEQUENTIAL, +POSIX_FADV_RANDOM, POSIX_FADV_NOREUSE, POSIX_FADV_WILLNEED, or +POSIX_FADV_DONTNEED. +""" + + def pread(fd: int, length: int, offset: int, /) -> bytes: + """Read a number of bytes from a file descriptor starting at a particular offset. + +Read length bytes from file descriptor fd, starting at offset bytes from +the beginning of the file. The file offset remains unchanged. +""" + def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: + """Write bytes to a file descriptor starting at a particular offset. + +Write buffer to fd, starting at offset bytes from the beginning of +the file. Returns the number of bytes written. Does not change the +current file offset. +""" # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not - def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: ... - def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: ... + def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: + """Reads from a file descriptor into a number of mutable bytes-like objects. + +Combines the functionality of readv() and pread(). As readv(), it will +transfer data into each buffer until it is full and then move on to the next +buffer in the sequence to hold the rest of the data. Its fourth argument, +specifies the file offset at which the input operation is to be performed. It +will return the total number of bytes read (which can be less than the total +capacity of all the objects). + +The flags argument contains a bitwise OR of zero or more of the following flags: + +- RWF_HIPRI +- RWF_NOWAIT + +Using non-zero flags requires Linux 4.6 or newer. +""" + def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: + """Writes the contents of bytes-like objects to a file descriptor at a given offset. + +Combines the functionality of writev() and pwrite(). All buffers must be a sequence +of bytes-like objects. Buffers are processed in array order. Entire contents of first +buffer is written before proceeding to second, and so on. The operating system may +set a limit (sysconf() value SC_IOV_MAX) on the number of buffers that can be used. +This function writes the contents of each object to the file descriptor and returns +the total number of bytes written. + +The flags argument contains a bitwise OR of zero or more of the following flags: + +- RWF_DSYNC +- RWF_SYNC +- RWF_APPEND + +Using non-zero flags requires Linux 4.7 or newer. +""" if sys.platform != "darwin": if sys.version_info >= (3, 10): RWF_APPEND: Final[int] # docs say available on 3.7+, stubtest says otherwise @@ -1144,7 +1557,9 @@ if sys.platform != "win32": RWF_NOWAIT: Final[int] if sys.platform == "linux": - def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: ... + def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: + """Copy count bytes from file descriptor in_fd to file descriptor out_fd. +""" else: def sendfile( out_fd: FileDescriptor, @@ -1156,25 +1571,76 @@ if sys.platform != "win32": flags: int = 0, ) -> int: ... # FreeBSD and Mac OS X only - def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: ... - def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: ... + def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: + """Read from a file descriptor fd into an iterable of buffers. + +The buffers should be mutable buffers accepting bytes. +readv will transfer data into each buffer until it is full +and then move on to the next buffer in the sequence to hold +the rest of the data. + +readv returns the total number of bytes read, +which may be less than the total capacity of all the buffers. +""" + def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: + """Iterate over buffers, and write the contents of each to a file descriptor. + +Returns the total number of bytes written. +buffers must be a sequence of bytes-like objects. +""" if sys.version_info >= (3, 14): - def readinto(fd: int, buffer: ReadableBuffer, /) -> int: ... + def readinto(fd: int, buffer: ReadableBuffer, /) -> int: + """Read into a buffer object from a file descriptor. + +The buffer should be mutable and bytes-like. On success, returns the number of +bytes read. Less bytes may be read than the size of the buffer. The underlying +system call will be retried when interrupted by a signal, unless the signal +handler raises an exception. Other errors will not be retried and an error will +be raised. + +Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 +(which can be used to check for errors without reading data). Never returns +negative. +""" @final class terminal_size(structseq[int], tuple[int, int]): + """A tuple of (columns, lines) for holding terminal window size +""" if sys.version_info >= (3, 10): __match_args__: Final = ("columns", "lines") @property - def columns(self) -> int: ... + def columns(self) -> int: + """width of the terminal window in characters +""" @property - def lines(self) -> int: ... + def lines(self) -> int: + """height of the terminal window in characters +""" + +def get_terminal_size(fd: int = ..., /) -> terminal_size: + """Return the size of the terminal window as (columns, lines). + +The optional argument fd (default standard output) specifies +which file descriptor should be queried. -def get_terminal_size(fd: int = ..., /) -> terminal_size: ... -def get_inheritable(fd: int, /) -> bool: ... -def set_inheritable(fd: int, inheritable: bool, /) -> None: ... +If the file descriptor is not connected to a terminal, an OSError +is thrown. + +This function will only be defined if an implementation is +available for this system. + +shutil.get_terminal_size is the high-level function which should +normally be used, os.get_terminal_size is the low-level implementation. +""" +def get_inheritable(fd: int, /) -> bool: + """Get the close-on-exe flag of the specified file descriptor. +""" +def set_inheritable(fd: int, inheritable: bool, /) -> None: + """Set the inheritable flag of the specified file descriptor. +""" if sys.platform == "win32": def get_handle_inheritable(handle: int, /) -> bool: ... @@ -1182,33 +1648,144 @@ if sys.platform == "win32": if sys.platform != "win32": # Unix only - def tcgetpgrp(fd: int, /) -> int: ... - def tcsetpgrp(fd: int, pgid: int, /) -> None: ... - def ttyname(fd: int, /) -> str: ... - -def write(fd: int, data: ReadableBuffer, /) -> int: ... + def tcgetpgrp(fd: int, /) -> int: + """Return the process group associated with the terminal specified by fd. +""" + def tcsetpgrp(fd: int, pgid: int, /) -> None: + """Set the process group associated with the terminal specified by fd. +""" + def ttyname(fd: int, /) -> str: + """Return the name of the terminal device connected to 'fd'. + + fd + Integer file descriptor handle. +""" + +def write(fd: int, data: ReadableBuffer, /) -> int: + """Write a bytes object to a file descriptor. +""" def access( path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True -) -> bool: ... -def chdir(path: FileDescriptorOrPath) -> None: ... +) -> bool: + """Use the real uid/gid to test for access to a path. + + path + Path to be tested; can be string, bytes, or a path-like object. + mode + Operating-system mode bitfield. Can be F_OK to test existence, + or the inclusive-OR of R_OK, W_OK, and X_OK. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that + directory. + effective_ids + If True, access will use the effective uid/gid instead of + the real uid/gid. + follow_symlinks + If False, and the last element of the path is a symbolic link, + access will examine the symbolic link itself instead of the file + the link points to. + +dir_fd, effective_ids, and follow_symlinks may not be implemented + on your platform. If they are unavailable, using them will raise a + NotImplementedError. + +Note that most operations will use the effective uid/gid, therefore this + routine can be used in a suid/sgid environment to test if the invoking user + has the specified access to the path. +""" +def chdir(path: FileDescriptorOrPath) -> None: + """Change the current working directory to the specified path. + +path may always be specified as a string. +On some platforms, path may also be specified as an open file descriptor. +If this functionality is unavailable, using it raises an exception. +""" if sys.platform != "win32": - def fchdir(fd: FileDescriptorLike) -> None: ... - -def getcwd() -> str: ... -def getcwdb() -> bytes: ... -def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... + def fchdir(fd: FileDescriptorLike) -> None: + """Change to the directory of the given file descriptor. + +fd must be opened on a directory, not a file. +Equivalent to os.chdir(fd). +""" + +def getcwd() -> str: + """Return a unicode string representing the current working directory. +""" +def getcwdb() -> bytes: + """Return a bytes string representing the current working directory. +""" +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: + """Change the access permissions of a file. + + path + Path to be modified. May always be specified as a str, bytes, or a path-like object. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + mode + Operating-system mode bitfield. + Be careful when using number literals for *mode*. The conventional UNIX notation for + numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in + Python. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that + directory. + follow_symlinks + If False, and the last element of the path is a symbolic link, + chmod will modify the symbolic link itself instead of the file + the link points to. + +It is an error to use dir_fd or follow_symlinks when specifying path as + an open file descriptor. +dir_fd and follow_symlinks may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. +""" if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix def lchflags(path: StrOrBytesPath, flags: int) -> None: ... if sys.platform != "win32": - def chroot(path: StrOrBytesPath) -> None: ... + def chroot(path: StrOrBytesPath) -> None: + """Change root directory to path. +""" def chown( path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True - ) -> None: ... - def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... + ) -> None: + """Change the owner and group id of path to the numeric uid and gid.\\ + + path + Path to be examined; can be string, bytes, a path-like object, or open-file-descriptor int. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that + directory. + follow_symlinks + If False, and the last element of the path is a symbolic link, + stat will examine the symbolic link itself instead of the file + the link points to. + +path may always be specified as a string. +On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +If follow_symlinks is False, and the last element of the path is a symbolic + link, chown will modify the symbolic link itself instead of the file the + link points to. +It is an error to use dir_fd or follow_symlinks when specifying path as + an open file descriptor. +dir_fd and follow_symlinks may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. +""" + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: + """Change the owner and group id of path to the numeric uid and gid. + +This function will not follow symbolic links. +Equivalent to os.chown(path, uid, gid, follow_symlinks=False). +""" def link( src: StrOrBytesPath, @@ -1217,31 +1794,162 @@ def link( src_dir_fd: int | None = None, dst_dir_fd: int | None = None, follow_symlinks: bool = True, -) -> None: ... -def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... -def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... +) -> None: + """Create a hard link to a file. + +If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. +If follow_symlinks is False, and the last element of src is a symbolic + link, link will create a link to the symbolic link itself instead of the + file the link points to. +src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your + platform. If they are unavailable, using them will raise a + NotImplementedError. +""" +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: + """Perform a stat system call on the given path, without following symbolic links. + +Like stat(), but do not follow symbolic links. +Equivalent to stat(path, follow_symlinks=False). +""" +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: + """Create a directory. + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + +The mode argument is ignored on Windows. Where it is used, the current umask +value is first masked out. +""" if sys.platform != "win32": - def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: # Unix only + """Create a "fifo" (a POSIX named pipe). + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" -def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: + """makedirs(name [, mode=0o777][, exist_ok=False]) + +Super-mkdir; create a leaf directory and all intermediate ones. Works like +mkdir, except that any intermediate path segment (not just the rightmost) +will be created if it does not exist. If the target directory already +exists, raise an OSError if exist_ok is False. Otherwise no exception is +raised. This is recursive. + +""" if sys.platform != "win32": - def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... - def major(device: int, /) -> int: ... - def minor(device: int, /) -> int: ... - def makedev(major: int, minor: int, /) -> int: ... - def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only - -def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... -def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... -def removedirs(name: StrOrBytesPath) -> None: ... -def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... -def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: + """Create a node in the file system. + +Create a node in the file system (file, device special file or named pipe) +at path. mode specifies both the permissions to use and the +type of node to be created, being combined (bitwise OR) with one of +S_IFREG, S_IFCHR, S_IFBLK, and S_IFIFO. If S_IFCHR or S_IFBLK is set on mode, +device defines the newly created device special file (probably using +os.makedev()). Otherwise device is ignored. + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" + def major(device: int, /) -> int: + """Extracts a device major number from a raw device number. +""" + def minor(device: int, /) -> int: + """Extracts a device minor number from a raw device number. +""" + def makedev(major: int, minor: int, /) -> int: + """Composes a raw device number from the major and minor device numbers. +""" + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: # Unix only + """Return the configuration limit name for the file or directory path. + +If there is no limit, return -1. +On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. +""" + +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: + """Return a string representing the path to which the symbolic link points. + +If dir_fd is not None, it should be a file descriptor open to a directory, +and path should be relative; path will then be relative to that directory. + +dir_fd may not be implemented on your platform. If it is unavailable, +using it will raise a NotImplementedError. +""" +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: + """Remove a file (same as unlink()). + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" +def removedirs(name: StrOrBytesPath) -> None: + """removedirs(name) + +Super-rmdir; remove a leaf directory and all empty intermediate +ones. Works like rmdir except that, if the leaf directory is +successfully removed, directories corresponding to rightmost path +segments will be pruned away until either the whole path is +consumed or an error occurs. Errors during this latter phase are +ignored -- they generally mean that a directory was not empty. + +""" +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: + """Rename a file or directory. + +If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. +src_dir_fd and dst_dir_fd, may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. +""" +def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: + """renames(old, new) + +Super-rename; create directories as necessary and delete any left +empty. Works like rename, except creation of any intermediate +directories needed to make the new pathname good is attempted +first. After the rename, directories corresponding to rightmost +path segments of the old name will be pruned until either the +whole path is consumed or a nonempty directory is found. + +Note: this function can fail with the new directory structure made +if you lack permissions needed to unlink the leaf directory or +file. + +""" def replace( src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None -) -> None: ... -def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +) -> None: + """Rename a file or directory, overwriting the destination. + +If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. +src_dir_fd and dst_dir_fd, may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. +""" +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: + """Remove a directory. + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" @final @type_check_only class _ScandirIterator(Generic[AnyStr]): @@ -1253,25 +1961,86 @@ class _ScandirIterator(Generic[AnyStr]): def close(self) -> None: ... @overload -def scandir(path: None = None) -> _ScandirIterator[str]: ... +def scandir(path: None = None) -> _ScandirIterator[str]: + """Return an iterator of DirEntry objects for given path. + +path can be specified as either str, bytes, or a path-like object. If path +is bytes, the names of yielded DirEntry objects will also be bytes; in +all other circumstances they will be str. + +If path is None, uses the path='.'. +""" @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: + """Perform a stat system call on the given path. + + path + Path to be examined; can be string, bytes, a path-like object or + open-file-descriptor int. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be a relative string; path will then be relative to + that directory. + follow_symlinks + If False, and the last element of the path is a symbolic link, + stat will examine the symbolic link itself instead of the file + the link points to. + +dir_fd and follow_symlinks may not be implemented + on your platform. If they are unavailable, using them will raise a + NotImplementedError. + +It's an error to use dir_fd or follow_symlinks when specifying path as + an open file descriptor. +""" if sys.platform != "win32": - def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: # Unix only + """Perform a statvfs system call on the given path. + +path may always be specified as a string. +On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. +""" def symlink( src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None -) -> None: ... +) -> None: + """Create a symbolic link pointing to src named dst. -if sys.platform != "win32": - def sync() -> None: ... # Unix only +target_is_directory is required on Windows if the target is to be + interpreted as a directory. (On Windows, symlink requires + Windows 6.0 or greater, and raises a NotImplementedError otherwise.) + target_is_directory is ignored on non-Windows platforms. -def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 -def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" + +if sys.platform != "win32": + def sync() -> None: # Unix only + """Force write of everything to disk. +""" + +def truncate(path: FileDescriptorOrPath, length: int) -> None: # Unix only up to version 3.4 + """Truncate a file, specified by path, to a specific length. + +On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. +""" +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: + """Remove a file (same as remove()). + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. +""" def utime( path: FileDescriptorOrPath, times: tuple[int, int] | tuple[float, float] | None = None, @@ -1279,13 +2048,96 @@ def utime( ns: tuple[int, int] = ..., dir_fd: int | None = None, follow_symlinks: bool = True, -) -> None: ... +) -> None: + """Set the access and modified time of path. + +path may always be specified as a string. +On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + +If times is not None, it must be a tuple (atime, mtime); + atime and mtime should be expressed as float seconds since the epoch. +If ns is specified, it must be a tuple (atime_ns, mtime_ns); + atime_ns and mtime_ns should be expressed as integer nanoseconds + since the epoch. +If times is None and ns is unspecified, utime uses the current time. +Specifying tuples for both times and ns is an error. + +If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. +If follow_symlinks is False, and the last element of the path is a symbolic + link, utime will modify the symbolic link itself instead of the file the + link points to. +It is an error to use dir_fd or follow_symlinks when specifying path + as an open file descriptor. +dir_fd and follow_symlinks may not be available on your platform. + If they are unavailable, using them will raise a NotImplementedError. +""" _OnError: TypeAlias = Callable[[OSError], object] def walk( top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False -) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: + """Directory tree generator. + +For each directory in the directory tree rooted at top (including top +itself, but excluding '.' and '..'), yields a 3-tuple + + dirpath, dirnames, filenames + +dirpath is a string, the path to the directory. dirnames is a list of +the names of the subdirectories in dirpath (including symlinks to directories, +and excluding '.' and '..'). +filenames is a list of the names of the non-directory files in dirpath. +Note that the names in the lists are just names, with no path components. +To get a full path (which begins with top) to a file or directory in +dirpath, do os.path.join(dirpath, name). + +If optional arg 'topdown' is true or not specified, the triple for a +directory is generated before the triples for any of its subdirectories +(directories are generated top down). If topdown is false, the triple +for a directory is generated after the triples for all of its +subdirectories (directories are generated bottom up). + +When topdown is true, the caller can modify the dirnames list in-place +(e.g., via del or slice assignment), and walk will only recurse into the +subdirectories whose names remain in dirnames; this can be used to prune the +search, or to impose a specific order of visiting. Modifying dirnames when +topdown is false has no effect on the behavior of os.walk(), since the +directories in dirnames have already been generated by the time dirnames +itself is generated. No matter the value of topdown, the list of +subdirectories is retrieved before the tuples for the directory and its +subdirectories are generated. + +By default errors from the os.scandir() call are ignored. If +optional arg 'onerror' is specified, it should be a function; it +will be called with one argument, an OSError instance. It can +report the error to continue with the walk, or raise the exception +to abort the walk. Note that the filename is available as the +filename attribute of the exception object. + +By default, os.walk does not follow symbolic links to subdirectories on +systems that support them. In order to get this functionality, set the +optional argument 'followlinks' to true. + +Caution: if you pass a relative pathname for top, don't change the +current working directory between resumptions of walk. walk never +changes the current directory, and assumes that the client doesn't +either. + +Example: + +import os +from os.path import join, getsize +for root, dirs, files in os.walk('python/Lib/xml'): + print(root, "consumes ") + print(sum(getsize(join(root, name)) for name in files), end=" ") + print("bytes in", len(files), "non-directory files") + if '__pycache__' in dirs: + dirs.remove('__pycache__') # don't visit __pycache__ directories + +""" if sys.platform != "win32": @overload @@ -1296,7 +2148,39 @@ if sys.platform != "win32": *, follow_symlinks: bool = False, dir_fd: int | None = None, - ) -> Iterator[tuple[str, list[str], list[str], int]]: ... + ) -> Iterator[tuple[str, list[str], list[str], int]]: + """Directory tree generator. + +This behaves exactly like walk(), except that it yields a 4-tuple + + dirpath, dirnames, filenames, dirfd + +`dirpath`, `dirnames` and `filenames` are identical to walk() output, +and `dirfd` is a file descriptor referring to the directory `dirpath`. + +The advantage of fwalk() over walk() is that it's safe against symlink +races (when follow_symlinks is False). + +If dir_fd is not None, it should be a file descriptor open to a directory, + and top should be relative; top will then be relative to that directory. + (dir_fd is always supported for fwalk.) + +Caution: +Since fwalk() yields file descriptors, those are only valid until the +next iteration step, so you should dup() them if you want to keep them +for a longer period. + +Example: + +import os +for root, dirs, files, rootfd in os.fwalk('python/Lib/xml'): + print(root, "consumes", end="") + print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files), + end="") + print("bytes in", len(files), "non-directory files") + if '__pycache__' in dirs: + dirs.remove('__pycache__') # don't visit __pycache__ directories +""" @overload def fwalk( top: BytesPath, @@ -1307,9 +2191,31 @@ if sys.platform != "win32": dir_fd: int | None = None, ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... - def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... - def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: + """Return the value of extended attribute attribute on path. + +path may be either a string, a path-like object, or an open file descriptor. +If follow_symlinks is False, and the last element of the path is a symbolic + link, getxattr will examine the symbolic link itself instead of the file + the link points to. +""" + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: + """Return a list of extended attributes on path. + +path may be either None, a string, a path-like object, or an open file descriptor. +if path is None, listxattr will examine the current directory. +If follow_symlinks is False, and the last element of the path is a symbolic + link, listxattr will examine the symbolic link itself instead of the file + the link points to. +""" + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: + """Remove extended attribute attribute on path. + +path may be either a string, a path-like object, or an open file descriptor. +If follow_symlinks is False, and the last element of the path is a symbolic + link, removexattr will modify the symbolic link itself instead of the file + the link points to. +""" def setxattr( path: FileDescriptorOrPath, attribute: StrOrBytesPath, @@ -1317,21 +2223,54 @@ if sys.platform != "win32": flags: int = 0, *, follow_symlinks: bool = True, - ) -> None: ... + ) -> None: + """Set extended attribute attribute on path to value. + +path may be either a string, a path-like object, or an open file descriptor. +If follow_symlinks is False, and the last element of the path is a symbolic + link, setxattr will modify the symbolic link itself instead of the file + the link points to. +""" -def abort() -> NoReturn: ... +def abort() -> NoReturn: + """Abort the interpreter immediately. + +This function 'dumps core' or otherwise fails in the hardest way possible +on the hosting operating system. This function never returns. +""" # These are defined as execl(file, *args) but the first *arg is mandatory. -def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... -def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: ... +def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: + """execl(file, *args) + +Execute the executable file with argument list args, replacing the +current process. +""" +def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: + """execlp(file, *args) + +Execute the executable file (which is searched for along $PATH) +with argument list args, replacing the current process. +""" # These are: execle(file, *args, env) but env is pulled from the last element of the args. def execle( file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] -) -> NoReturn: ... +) -> NoReturn: + """execle(file, *args, env) + +Execute the executable file with argument list args and +environment env, replacing the current process. +""" def execlpe( file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] -) -> NoReturn: ... +) -> NoReturn: + """execlpe(file, *args, env) + +Execute the executable file (which is searched for along $PATH) +with argument list args and environment env, replacing the current +process. +""" # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. @@ -1354,19 +2293,67 @@ _ExecVArgs: TypeAlias = ( # we limit to str | bytes. _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] -def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: ... -def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... -def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... -def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... -def _exit(status: int) -> NoReturn: ... -def kill(pid: int, signal: int, /) -> None: ... +def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: + """Execute an executable path with arguments, replacing current process. + + path + Path of executable file. + argv + Tuple or list of strings. +""" +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: + """Execute an executable path with arguments, replacing current process. + + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. +""" +def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: + """execvp(file, args) + +Execute the executable file (which is searched for along $PATH) +with argument list args, replacing the current process. +args may be a list or tuple of strings. +""" +def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: + """execvpe(file, args, env) + +Execute the executable file (which is searched for along $PATH) +with argument list args and environment env, replacing the +current process. +args may be a list or tuple of strings. +""" +def _exit(status: int) -> NoReturn: + """Exit to the system with specified status, without normal exit processing. +""" +def kill(pid: int, signal: int, /) -> None: + """Kill a process with a signal. +""" if sys.platform != "win32": # Unix only - def fork() -> int: ... - def forkpty() -> tuple[int, int]: ... # some flavors of Unix - def killpg(pgid: int, signal: int, /) -> None: ... - def nice(increment: int, /) -> int: ... + def fork() -> int: + """Fork a child process. + +Return 0 to child process and PID of child to parent process. +""" + def forkpty() -> tuple[int, int]: # some flavors of Unix + """Fork a new process with a new pseudo-terminal as controlling tty. + +Returns a tuple of (pid, master_fd). +Like fork(), return pid of 0 to the child process, +and pid of child to the parent process. +To both, return fd of newly opened pseudo-terminal. +""" + def killpg(pgid: int, signal: int, /) -> None: + """Kill a process group with a signal. +""" + def nice(increment: int, /) -> int: + """Add increment to the priority of process and return the new priority. +""" if sys.platform != "darwin" and sys.platform != "linux": def plock(op: int, /) -> None: ... @@ -1391,36 +2378,100 @@ class _wrap_close: def writelines(self, lines: Iterable[str], /) -> None: ... def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... -def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... -def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig +def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: + """spawnl(mode, file, *args) -> integer + +Execute file with arguments from args in a subprocess. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" +def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise sig + """spawnle(mode, file, *args, env) -> integer + +Execute file with arguments from args in a subprocess with the +supplied environment. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" if sys.platform != "win32": - def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... - def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: + """spawnv(mode, file, args) -> integer + +Execute file with arguments from args in a subprocess. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: + """spawnve(mode, file, args, env) -> integer + +Execute file with arguments from args in a subprocess with the +specified environment. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" else: def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... -def system(command: StrOrBytesPath) -> int: ... +def system(command: StrOrBytesPath) -> int: + """Execute the command in a subshell. +""" @final class times_result(structseq[float], tuple[float, float, float, float, float]): + """times_result: Result from os.times(). + +This object may be accessed either as a tuple of + (user, system, children_user, children_system, elapsed), +or via the attributes user, system, children_user, children_system, +and elapsed. + +See os.times for more information. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") @property - def user(self) -> float: ... + def user(self) -> float: + """user time +""" @property - def system(self) -> float: ... + def system(self) -> float: + """system time +""" @property - def children_user(self) -> float: ... + def children_user(self) -> float: + """user time of children +""" @property - def children_system(self) -> float: ... + def children_system(self) -> float: + """system time of children +""" @property - def elapsed(self) -> float: ... + def elapsed(self) -> float: + """elapsed time since an arbitrary point in the past +""" + +def times() -> times_result: + """Return a collection containing process timing information. + +The object returned behaves like a named tuple with these fields: + (utime, stime, cutime, cstime, elapsed_time) +All fields are floating-point numbers. +""" +def waitpid(pid: int, options: int, /) -> tuple[int, int]: + """Wait for completion of a given child process. -def times() -> times_result: ... -def waitpid(pid: int, options: int, /) -> tuple[int, int]: ... +Returns a tuple of information regarding the child process: + (pid, status) + +The options argument is ignored on Windows. +""" if sys.platform == "win32": if sys.version_info >= (3, 10): @@ -1435,15 +2486,60 @@ if sys.platform == "win32": def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ... else: - def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... - def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature - def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... - def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... - def wait() -> tuple[int, int]: ... # Unix only + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: + """spawnlp(mode, file, *args) -> integer + +Execute file (which is looked for along $PATH) with arguments from +args in a subprocess with the supplied environment. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise signature + """spawnlpe(mode, file, *args, env) -> integer + +Execute file (which is looked for along $PATH) with arguments from +args in a subprocess with the supplied environment. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: + """spawnvp(mode, file, args) -> integer + +Execute file (which is looked for along $PATH) with arguments from +args in a subprocess. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: + """spawnvpe(mode, file, args, env) -> integer + +Execute file (which is looked for along $PATH) with arguments from +args in a subprocess with the supplied environment. +If mode == P_NOWAIT return the pid of the process. +If mode == P_WAIT return the process's exit code if it exits normally; +otherwise return -SIG, where SIG is the signal that killed it. +""" + def wait() -> tuple[int, int]: # Unix only + """Wait for completion of a child process. + +Returns a tuple of information about the child process: + (pid, status) +""" # Added to MacOS in 3.13 if sys.platform != "darwin" or sys.version_info >= (3, 13): @final class waitid_result(structseq[int], tuple[int, int, int, int, int]): + """waitid_result: Result from waitid. + +This object may be accessed either as a tuple of + (si_pid, si_uid, si_signo, si_status, si_code), +or via the attributes si_pid, si_uid, and so on. + +See os.waitid for more information. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") @@ -1458,20 +2554,62 @@ else: @property def si_code(self) -> int: ... - def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: ... + def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: + """Returns the result of waiting for a process or processes. + + idtype + Must be one of be P_PID, P_PGID or P_ALL. + id + The id to wait on. + options + Constructed from the ORing of one or more of WEXITED, WSTOPPED + or WCONTINUED and additionally may be ORed with WNOHANG or WNOWAIT. + +Returns either waitid_result or None if WNOHANG is specified and there are +no children in a waitable state. +""" from resource import struct_rusage - def wait3(options: int) -> tuple[int, int, struct_rusage]: ... - def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: ... - def WCOREDUMP(status: int, /) -> bool: ... - def WIFCONTINUED(status: int) -> bool: ... - def WIFSTOPPED(status: int) -> bool: ... - def WIFSIGNALED(status: int) -> bool: ... - def WIFEXITED(status: int) -> bool: ... - def WEXITSTATUS(status: int) -> int: ... - def WSTOPSIG(status: int) -> int: ... - def WTERMSIG(status: int) -> int: ... + def wait3(options: int) -> tuple[int, int, struct_rusage]: + """Wait for completion of a child process. + +Returns a tuple of information about the child process: + (pid, status, rusage) +""" + def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: + """Wait for completion of a specific child process. + +Returns a tuple of information about the child process: + (pid, status, rusage) +""" + def WCOREDUMP(status: int, /) -> bool: + """Return True if the process returning status was dumped to a core file. +""" + def WIFCONTINUED(status: int) -> bool: + """Return True if a particular process was continued from a job control stop. + +Return True if the process returning status was continued from a +job control stop. +""" + def WIFSTOPPED(status: int) -> bool: + """Return True if the process returning status was stopped. +""" + def WIFSIGNALED(status: int) -> bool: + """Return True if the process returning status was terminated by a signal. +""" + def WIFEXITED(status: int) -> bool: + """Return True if the process returning status exited via the exit() system call. +""" + def WEXITSTATUS(status: int) -> int: + """Return the process return code from status. +""" + def WSTOPSIG(status: int) -> int: + """Return the signal that stopped the process that provided the status value. +""" + def WTERMSIG(status: int) -> int: + """Return the signal that terminated the process that provided the status value. +""" def posix_spawn( path: StrOrBytesPath, argv: _ExecVArgs, @@ -1485,7 +2623,30 @@ else: setsigmask: Iterable[int] = ..., setsigdef: Iterable[int] = ..., scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: ... + ) -> int: + """Execute the program specified by path in a new process. + + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + file_actions + A sequence of file action tuples. + setpgroup + The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. + resetids + If the value is `true` the POSIX_SPAWN_RESETIDS will be activated. + setsid + If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. + setsigmask + The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. + setsigdef + The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. + scheduler + A tuple with the scheduler policy (optional) and parameters. +""" def posix_spawnp( path: StrOrBytesPath, argv: _ExecVArgs, @@ -1499,7 +2660,30 @@ else: setsigmask: Iterable[int] = ..., setsigdef: Iterable[int] = ..., scheduler: tuple[Any, sched_param] | None = ..., - ) -> int: ... + ) -> int: + """Execute the program specified by path in a new process. + + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + file_actions + A sequence of file action tuples. + setpgroup + The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. + resetids + If the value is `True` the POSIX_SPAWN_RESETIDS will be activated. + setsid + If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. + setsigmask + The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. + setsigdef + The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. + scheduler + A tuple with the scheduler policy (optional) and parameters. +""" POSIX_SPAWN_OPEN: Final = 0 POSIX_SPAWN_CLOSE: Final = 1 POSIX_SPAWN_DUP2: Final = 2 @@ -1507,45 +2691,113 @@ else: if sys.platform != "win32": @final class sched_param(structseq[int], tuple[int]): + """Currently has only one field: sched_priority + + sched_priority + A scheduling parameter. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("sched_priority",) def __new__(cls, sched_priority: int) -> Self: ... @property - def sched_priority(self) -> int: ... - - def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix - def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix - def sched_yield() -> None: ... # some flavors of Unix + def sched_priority(self) -> int: + """the scheduling priority +""" + + def sched_get_priority_min(policy: int) -> int: # some flavors of Unix + """Get the minimum scheduling priority for policy. +""" + def sched_get_priority_max(policy: int) -> int: # some flavors of Unix + """Get the maximum scheduling priority for policy. +""" + def sched_yield() -> None: # some flavors of Unix + """Voluntarily relinquish the CPU. +""" if sys.platform != "darwin": - def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: ... # some flavors of Unix - def sched_getscheduler(pid: int, /) -> int: ... # some flavors of Unix - def sched_rr_get_interval(pid: int, /) -> float: ... # some flavors of Unix - def sched_setparam(pid: int, param: sched_param, /) -> None: ... # some flavors of Unix - def sched_getparam(pid: int, /) -> sched_param: ... # some flavors of Unix - def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: ... # some flavors of Unix - def sched_getaffinity(pid: int, /) -> set[int]: ... # some flavors of Unix - -def cpu_count() -> int | None: ... + def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: # some flavors of Unix + """Set the scheduling policy for the process identified by pid. + +If pid is 0, the calling process is changed. +param is an instance of sched_param. +""" + def sched_getscheduler(pid: int, /) -> int: # some flavors of Unix + """Get the scheduling policy for the process identified by pid. + +Passing 0 for pid returns the scheduling policy for the calling process. +""" + def sched_rr_get_interval(pid: int, /) -> float: # some flavors of Unix + """Return the round-robin quantum for the process identified by pid, in seconds. + +Value returned is a float. +""" + def sched_setparam(pid: int, param: sched_param, /) -> None: # some flavors of Unix + """Set scheduling parameters for the process identified by pid. + +If pid is 0, sets parameters for the calling process. +param should be an instance of sched_param. +""" + def sched_getparam(pid: int, /) -> sched_param: # some flavors of Unix + """Returns scheduling parameters for the process identified by pid. + +If pid is 0, returns parameters for the calling process. +Return value is an instance of sched_param. +""" + def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: # some flavors of Unix + """Set the CPU affinity of the process identified by pid to mask. + +mask should be an iterable of integers identifying CPUs. +""" + def sched_getaffinity(pid: int, /) -> set[int]: # some flavors of Unix + """Return the affinity of the process identified by pid (or the current process if zero). + +The affinity is returned as a set of CPU identifiers. +""" + +def cpu_count() -> int | None: + """Return the number of logical CPUs in the system. + +Return None if indeterminable. +""" if sys.version_info >= (3, 13): # Documented to return `int | None`, but falls back to `len(sched_getaffinity(0))` when # available. See https://github.com/python/cpython/blob/417c130/Lib/os.py#L1175-L1186. if sys.platform != "win32" and sys.platform != "darwin": - def process_cpu_count() -> int: ... + def process_cpu_count() -> int: + """ +Get the number of CPUs of the current process. + +Return the number of logical CPUs usable by the calling thread of the +current process. Return None if indeterminable. +""" else: def process_cpu_count() -> int | None: ... if sys.platform != "win32": # Unix only - def confstr(name: str | int, /) -> str | None: ... - def getloadavg() -> tuple[float, float, float]: ... - def sysconf(name: str | int, /) -> int: ... + def confstr(name: str | int, /) -> str | None: + """Return a string-valued system configuration variable. +""" + def getloadavg() -> tuple[float, float, float]: + """Return average recent system load information. + +Return the number of processes in the system run queue averaged over +the last 1, 5, and 15 minutes as a tuple of three floats. +Raises OSError if the load average was unobtainable. +""" + def sysconf(name: str | int, /) -> int: + """Return an integer-valued system configuration variable. +""" if sys.platform == "linux": - def getrandom(size: int, flags: int = 0) -> bytes: ... + def getrandom(size: int, flags: int = 0) -> bytes: + """Obtain a series of random bytes. +""" -def urandom(size: int, /) -> bytes: ... +def urandom(size: int, /) -> bytes: + """Return a bytes object containing random bytes suitable for cryptographic use. +""" if sys.platform != "win32": def register_at_fork( @@ -1553,7 +2805,19 @@ if sys.platform != "win32": before: Callable[..., Any] | None = ..., after_in_parent: Callable[..., Any] | None = ..., after_in_child: Callable[..., Any] | None = ..., - ) -> None: ... + ) -> None: + """Register callables to be called when forking a new process. + + before + A callable to be called in the parent before the fork() syscall. + after_in_child + A callable to be called in the child after fork(). + after_in_parent + A callable to be called in the parent after fork(). + +'before' callbacks are called in reverse order. +'after_in_child' and 'after_in_parent' callbacks are called in order. +""" if sys.platform == "win32": class _AddedDllDirectory: @@ -1584,12 +2848,47 @@ if sys.platform == "linux": MFD_HUGE_2GB: Final[int] MFD_HUGE_16GB: Final[int] def memfd_create(name: str, flags: int = ...) -> int: ... - def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: ... + def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: + """Copy count bytes from one file descriptor to another. + + src + Source file descriptor. + dst + Destination file descriptor. + count + Number of bytes to copy. + offset_src + Starting offset in src. + offset_dst + Starting offset in dst. + +If offset_src is None, then src is read from the current position; +respectively for offset_dst. +""" + +def waitstatus_to_exitcode(status: int) -> int: + """Convert a wait status to an exit code. -def waitstatus_to_exitcode(status: int) -> int: ... +On Unix: + +* If WIFEXITED(status) is true, return WEXITSTATUS(status). +* If WIFSIGNALED(status) is true, return -WTERMSIG(status). +* Otherwise, raise a ValueError. + +On Windows, return status shifted right by 8 bits. + +On Unix, if the process is being traced or if waitpid() was called with +WUNTRACED option, the caller must first check if WIFSTOPPED(status) is true. +This function must not be called if WIFSTOPPED(status) is true. +""" if sys.platform == "linux": - def pidfd_open(pid: int, flags: int = ...) -> int: ... + def pidfd_open(pid: int, flags: int = ...) -> int: + """Return a file descriptor referring to the process *pid*. + +The descriptor can be used to perform process management without races and +signals. +""" if sys.version_info >= (3, 12) and sys.platform == "linux": PIDFD_NONBLOCK: Final = 2048 @@ -1606,9 +2905,15 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": SPLICE_F_MORE: Final[int] SPLICE_F_MOVE: Final[int] SPLICE_F_NONBLOCK: Final[int] - def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: ... - def eventfd_read(fd: FileDescriptor) -> int: ... - def eventfd_write(fd: FileDescriptor, value: int) -> None: ... + def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: + """Creates and returns an event notification file descriptor. +""" + def eventfd_read(fd: FileDescriptor) -> int: + """Read eventfd value +""" + def eventfd_write(fd: FileDescriptor, value: int) -> None: + """Write eventfd value. +""" def splice( src: FileDescriptor, dst: FileDescriptor, @@ -1616,7 +2921,26 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": offset_src: int | None = ..., offset_dst: int | None = ..., flags: int = 0, - ) -> int: ... + ) -> int: + """Transfer count bytes from one pipe to a descriptor or vice versa. + + src + Source file descriptor. + dst + Destination file descriptor. + count + Number of bytes to copy. + offset_src + Starting offset in src. + offset_dst + Starting offset in dst. + flags + Flags to modify the semantics of the call. + +If offset_src is None, then src is read from the current position; +respectively for offset_dst. The offset associated to the file +descriptor that refers to a pipe must be None. +""" if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_FILES: Final[int] @@ -1633,14 +2957,54 @@ if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_SYSVSEM: Final[int] # Linux 2.6.26+ CLONE_THREAD: Final[int] CLONE_VM: Final[int] - def unshare(flags: int) -> None: ... - def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: ... + def unshare(flags: int) -> None: + """Disassociate parts of a process (or thread) execution context. + + flags + Namespaces to be unshared. +""" + def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: + """Move the calling thread into different namespaces. + + fd + A file descriptor to a namespace. + nstype + Type of namespace. +""" if sys.version_info >= (3, 13) and sys.platform != "win32": - def posix_openpt(oflag: int, /) -> int: ... - def grantpt(fd: FileDescriptorLike, /) -> None: ... - def unlockpt(fd: FileDescriptorLike, /) -> None: ... - def ptsname(fd: FileDescriptorLike, /) -> str: ... + def posix_openpt(oflag: int, /) -> int: + """Open and return a file descriptor for a master pseudo-terminal device. + +Performs a posix_openpt() C function call. The oflag argument is used to +set file status flags and file access modes as specified in the manual page +of posix_openpt() of your system. +""" + def grantpt(fd: FileDescriptorLike, /) -> None: + """Grant access to the slave pseudo-terminal device. + + fd + File descriptor of a master pseudo-terminal device. + +Performs a grantpt() C function call. +""" + def unlockpt(fd: FileDescriptorLike, /) -> None: + """Unlock a pseudo-terminal master/slave pair. + + fd + File descriptor of a master pseudo-terminal device. + +Performs an unlockpt() C function call. +""" + def ptsname(fd: FileDescriptorLike, /) -> str: + """Return the name of the slave pseudo-terminal device. + + fd + File descriptor of a master pseudo-terminal device. + +If the ptsname_r() C function is available, it is called; +otherwise, performs a ptsname() C function call. +""" if sys.version_info >= (3, 13) and sys.platform == "linux": TFD_TIMER_ABSTIME: Final = 1 @@ -1649,17 +3013,79 @@ if sys.version_info >= (3, 13) and sys.platform == "linux": TFD_CLOEXEC: Final[int] POSIX_SPAWN_CLOSEFROM: Final[int] - def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: ... + def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: + """Create and return a timer file descriptor. + + clockid + A valid clock ID constant as timer file descriptor. + + time.CLOCK_REALTIME + time.CLOCK_MONOTONIC + time.CLOCK_BOOTTIME + flags + 0 or a bit mask of os.TFD_NONBLOCK or os.TFD_CLOEXEC. + + os.TFD_NONBLOCK + If *TFD_NONBLOCK* is set as a flag, read doesn't blocks. + If *TFD_NONBLOCK* is not set as a flag, read block until the timer fires. + + os.TFD_CLOEXEC + If *TFD_CLOEXEC* is set as a flag, enable the close-on-exec flag +""" def timerfd_settime( fd: FileDescriptor, /, *, flags: int = 0, initial: float = 0.0, interval: float = 0.0 - ) -> tuple[float, float]: ... - def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: ... - def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: ... - def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: ... + ) -> tuple[float, float]: + """Alter a timer file descriptor's internal timer in seconds. + + fd + A timer file descriptor. + flags + 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. + initial + The initial expiration time, in seconds. + interval + The timer's interval, in seconds. +""" + def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: + """Alter a timer file descriptor's internal timer in nanoseconds. + + fd + A timer file descriptor. + flags + 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. + initial + initial expiration timing in seconds. + interval + interval for the timer in seconds. +""" + def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: + """Return a tuple of a timer file descriptor's (interval, next expiration) in float seconds. + + fd + A timer file descriptor. +""" + def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: + """Return a tuple of a timer file descriptor's (interval, next expiration) in nanoseconds. + + fd + A timer file descriptor. +""" if sys.version_info >= (3, 13) or sys.platform != "win32": # Added to Windows in 3.13. - def fchmod(fd: int, mode: int) -> None: ... + def fchmod(fd: int, mode: int) -> None: + """Change the access permissions of the file given by file descriptor fd. + + fd + The file descriptor of the file to be modified. + mode + Operating-system mode bitfield. + Be careful when using number literals for *mode*. The conventional UNIX notation for + numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in + Python. + +Equivalent to os.chmod(fd, mode). +""" if sys.platform != "linux": if sys.version_info >= (3, 13) or sys.platform != "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi index dc688a9f877f0..907017dc0532d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi @@ -1,3 +1,14 @@ +"""Common operations on Posix pathnames. + +Instead of importing this module directly, import os and refer to +this module as os.path. The "os.path" name is an alias for this +module on Posix systems; on other systems (e.g. Windows), +os.path provides the same operations in a manner specific to that +platform, and is an alias to another module (e.g. ntpath). + +Some of this can actually be useful on non-Posix systems too, e.g. +for manipulation of the pathname component of URLs. +""" import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi index 26140c76248ae..344d3cc8367d5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi @@ -1,25 +1,57 @@ +"""This is an interface to Python's internal parser. +""" from _typeshed import StrOrBytesPath from collections.abc import Sequence from types import CodeType from typing import Any, ClassVar, final -def expr(source: str) -> STType: ... -def suite(source: str) -> STType: ... -def sequence2st(sequence: Sequence[Any]) -> STType: ... -def tuple2st(sequence: Sequence[Any]) -> STType: ... -def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... -def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... -def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... -def isexpr(st: STType) -> bool: ... -def issuite(st: STType) -> bool: ... +def expr(source: str) -> STType: + """Creates an ST object from an expression. +""" +def suite(source: str) -> STType: + """Creates an ST object from a suite. +""" +def sequence2st(sequence: Sequence[Any]) -> STType: + """Creates an ST object from a tree representation. +""" +def tuple2st(sequence: Sequence[Any]) -> STType: + """Creates an ST object from a tree representation. +""" +def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: + """Creates a list-tree representation of an ST. +""" +def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: + """Creates a tuple-tree representation of an ST. +""" +def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: + """Compiles an ST object into a code object. +""" +def isexpr(st: STType) -> bool: + """Determines if an ST object was created from an expression. +""" +def issuite(st: STType) -> bool: + """Determines if an ST object was created from a suite. +""" class ParserError(Exception): ... @final class STType: + """Intermediate representation of a Python parse tree. +""" __hash__: ClassVar[None] # type: ignore[assignment] - def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... - def isexpr(self) -> bool: ... - def issuite(self) -> bool: ... - def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... - def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... + def compile(self, filename: StrOrBytesPath = ...) -> CodeType: + """Compile this ST object into a code object. +""" + def isexpr(self) -> bool: + """Determines if this ST object was created from an expression. +""" + def issuite(self) -> bool: + """Determines if this ST object was created from a suite. +""" + def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: + """Creates a list-tree representation of this ST. +""" + def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: + """Creates a tuple-tree representation of this ST. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi index fa5143f202927..ebb6967c14288 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi @@ -1,3 +1,9 @@ +"""Object-oriented filesystem paths. + +This module provides classes to represent abstract paths and concrete +paths with operations that have semantics appropriate for different +operating systems. +""" import sys import types from _typeshed import ( @@ -29,6 +35,14 @@ if sys.version_info >= (3, 13): __all__ += ["UnsupportedOperation"] class PurePath(PathLike[str]): + """Base class for manipulating paths without I/O. + +PurePath represents a filesystem path and offers operations which +don't imply any actual filesystem I/O. Depending on your system, +instantiating a PurePath will return either a PurePosixPath or a +PureWindowsPath object. You can also instantiate either of these classes +directly, regardless of your system. +""" if sys.version_info >= (3, 13): __slots__ = ( "_raw_paths", @@ -56,29 +70,66 @@ class PurePath(PathLike[str]): __slots__ = ("_drv", "_root", "_parts", "_str", "_hash", "_pparts", "_cached_cparts") if sys.version_info >= (3, 13): parser: ClassVar[types.ModuleType] - def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: ... + def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: + """ +Return True if this path matches the given glob-style pattern. The +pattern is matched against the entire path. +""" @property - def parts(self) -> tuple[str, ...]: ... + def parts(self) -> tuple[str, ...]: + """An object providing sequence-like access to the +components in the filesystem path. +""" @property - def drive(self) -> str: ... + def drive(self) -> str: + """The drive prefix (letter or UNC path), if any. +""" @property - def root(self) -> str: ... + def root(self) -> str: + """The root of the path, if any. +""" @property - def anchor(self) -> str: ... + def anchor(self) -> str: + """The concatenation of the drive and root, or ''. +""" @property - def name(self) -> str: ... + def name(self) -> str: + """The final path component, if any. +""" @property - def suffix(self) -> str: ... + def suffix(self) -> str: + """ +The final component's last suffix, if any. + +This includes the leading period. For example: '.txt' +""" @property - def suffixes(self) -> list[str]: ... + def suffixes(self) -> list[str]: + """ +A list of the final component's suffixes, if any. + +These include the leading periods. For example: ['.tar', '.gz'] +""" @property - def stem(self) -> str: ... + def stem(self) -> str: + """The final path component, minus its last suffix. +""" if sys.version_info >= (3, 12): - def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... + def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: + """Construct a PurePath from one or several strings and or existing +PurePath objects. The strings and path objects are combined so as +to yield a canonicalized path, which is incorporated into the +new PurePath object. +""" def __init__(self, *args: StrPath) -> None: ... # pyright: ignore[reportInconsistentConstructor] else: - def __new__(cls, *args: StrPath) -> Self: ... + def __new__(cls, *args: StrPath) -> Self: + """Construct a PurePath from one or several strings and or existing + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ def __hash__(self) -> int: ... def __fspath__(self) -> str: ... @@ -88,58 +139,147 @@ class PurePath(PathLike[str]): def __ge__(self, other: PurePath) -> bool: ... def __truediv__(self, key: StrPath) -> Self: ... def __rtruediv__(self, key: StrPath) -> Self: ... - def __bytes__(self) -> bytes: ... - def as_posix(self) -> str: ... - def as_uri(self) -> str: ... - def is_absolute(self) -> bool: ... + def __bytes__(self) -> bytes: + """Return the bytes representation of the path. This is only +recommended to use under Unix. +""" + def as_posix(self) -> str: + """Return the string representation of the path with forward (/) +slashes. +""" + def as_uri(self) -> str: + """Return the path as a URI. +""" + def is_absolute(self) -> bool: + """True if the path is absolute (has both a root and, if applicable, +a drive). +""" if sys.version_info >= (3, 13): @deprecated( "Deprecated since Python 3.13; will be removed in Python 3.15. " "Use `os.path.isreserved()` to detect reserved paths on Windows." ) - def is_reserved(self) -> bool: ... + def is_reserved(self) -> bool: + """Return True if the path contains one of the special names reserved +by the system, if any. +""" else: - def is_reserved(self) -> bool: ... + def is_reserved(self) -> bool: + """Return True if the path contains one of the special names reserved + by the system, if any. +""" if sys.version_info >= (3, 14): - def is_relative_to(self, other: StrPath) -> bool: ... + def is_relative_to(self, other: StrPath) -> bool: + """Return True if the path is relative to another path or False. + """ elif sys.version_info >= (3, 12): - def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: ... + def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: + """Return True if the path is relative to another path or False. + """ else: - def is_relative_to(self, *other: StrPath) -> bool: ... + def is_relative_to(self, *other: StrPath) -> bool: + """Return True if the path is relative to another path or False. + """ if sys.version_info >= (3, 12): - def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: ... + def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: + """ +Return True if this path matches the given pattern. If the pattern is +relative, matching is done from the right; otherwise, the entire path +is matched. The recursive wildcard '**' is *not* supported by this +method. +""" else: - def match(self, path_pattern: str) -> bool: ... + def match(self, path_pattern: str) -> bool: + """ + Return True if this path matches the given pattern. + """ if sys.version_info >= (3, 14): - def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: ... + def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: + """Return the relative path to another path identified by the passed +arguments. If the operation is not possible (because this is not +related to the other path), raise ValueError. + +The *walk_up* parameter controls whether `..` may be used to resolve +the path. +""" elif sys.version_info >= (3, 12): - def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: ... + def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: + """Return the relative path to another path identified by the passed +arguments. If the operation is not possible (because this is not +related to the other path), raise ValueError. + +The *walk_up* parameter controls whether `..` may be used to resolve +the path. +""" else: - def relative_to(self, *other: StrPath) -> Self: ... - - def with_name(self, name: str) -> Self: ... - def with_stem(self, stem: str) -> Self: ... - def with_suffix(self, suffix: str) -> Self: ... - def joinpath(self, *other: StrPath) -> Self: ... + def relative_to(self, *other: StrPath) -> Self: + """Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + a subpath of the other path), raise ValueError. + """ + + def with_name(self, name: str) -> Self: + """Return a new path with the file name changed. +""" + def with_stem(self, stem: str) -> Self: + """Return a new path with the stem changed. +""" + def with_suffix(self, suffix: str) -> Self: + """Return a new path with the file suffix changed. If the path +has no suffix, add given suffix. If the given suffix is an empty +string, remove the suffix from the path. +""" + def joinpath(self, *other: StrPath) -> Self: + """Combine this path with one or several arguments, and return a +new path representing either a subpath (if all arguments are relative +paths) or a totally different path (if one of the arguments is +anchored). +""" @property - def parents(self) -> Sequence[Self]: ... + def parents(self) -> Sequence[Self]: + """A sequence of this path's logical parents. +""" @property - def parent(self) -> Self: ... + def parent(self) -> Self: + """The logical parent of the path. +""" if sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... if sys.version_info >= (3, 12): - def with_segments(self, *args: StrPath) -> Self: ... + def with_segments(self, *args: StrPath) -> Self: + """Construct a new path object from any number of path-like objects. +Subclasses may override this method to customize how new path objects +are created from methods like `iterdir()`. +""" class PurePosixPath(PurePath): + """PurePath subclass for non-Windows systems. + +On a POSIX system, instantiating a PurePath should return this object. +However, you can also instantiate it directly on any system. +""" __slots__ = () class PureWindowsPath(PurePath): + """PurePath subclass for Windows systems. + +On a Windows system, instantiating a PurePath should return this object. +However, you can also instantiate it directly on any system. +""" __slots__ = () class Path(PurePath): + """PurePath subclass that can make system calls. + +Path represents a filesystem path but unlike PurePath, also offers +methods to do system calls on path objects. Depending on your system, +instantiating a Path will return either a PosixPath or a WindowsPath +object. You can also instantiate a PosixPath or WindowsPath directly, +but cannot instantiate a WindowsPath on a POSIX system or vice versa. +""" if sys.version_info >= (3, 14): __slots__ = ("_info",) elif sys.version_info >= (3, 10): @@ -153,74 +293,192 @@ class Path(PurePath): def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: ... @classmethod - def cwd(cls) -> Self: ... + def cwd(cls) -> Self: + """Return a new path pointing to the current working directory. +""" if sys.version_info >= (3, 10): - def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... - def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: + """ +Return the result of the stat() system call on this path, like +os.stat() does. +""" + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + """ +Change the permissions of the path, like os.chmod(). +""" else: - def stat(self) -> stat_result: ... - def chmod(self, mode: int) -> None: ... + def stat(self) -> stat_result: + """ + Return the result of the stat() system call on this path, like + os.stat() does. + """ + def chmod(self, mode: int) -> None: + """ + Change the permissions of the path, like os.chmod(). + """ if sys.version_info >= (3, 13): @classmethod - def from_uri(cls, uri: str) -> Self: ... - def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... - def is_file(self, *, follow_symlinks: bool = True) -> bool: ... - def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: ... + def from_uri(cls, uri: str) -> Self: + """Return a new path from the given 'file' URI. +""" + def is_dir(self, *, follow_symlinks: bool = True) -> bool: + """ +Whether this path is a directory. +""" + def is_file(self, *, follow_symlinks: bool = True) -> bool: + """ +Whether this path is a regular file (also True for symlinks pointing +to regular files). +""" + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: + """ +Open the file in text mode, read it, and close the file. +""" else: def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def is_dir(self) -> bool: ... - def is_file(self) -> bool: ... - def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + def is_dir(self) -> bool: + """ + Whether this path is a directory. + """ + def is_file(self) -> bool: + """ + Whether this path is a regular file (also True for symlinks pointing + to regular files). + """ + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: + """ + Open the file in text mode, read it, and close the file. + """ if sys.version_info >= (3, 13): - def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: ... + def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: + """Iterate over this subtree and yield all existing files (of any +kind, including directories) matching the given relative pattern. +""" def rglob( self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Iterator[Self]: ... + ) -> Iterator[Self]: + """Recursively yield all existing files (of any kind, including +directories) matching the given relative pattern, anywhere in +this subtree. +""" elif sys.version_info >= (3, 12): - def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... - def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: ... + def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given relative pattern. + """ + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ else: - def glob(self, pattern: str) -> Generator[Self, None, None]: ... - def rglob(self, pattern: str) -> Generator[Self, None, None]: ... + def glob(self, pattern: str) -> Generator[Self, None, None]: + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given relative pattern. + """ + def rglob(self, pattern: str) -> Generator[Self, None, None]: + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ if sys.version_info >= (3, 12): - def exists(self, *, follow_symlinks: bool = True) -> bool: ... - else: - def exists(self) -> bool: ... + def exists(self, *, follow_symlinks: bool = True) -> bool: + """ +Whether this path exists. - def is_symlink(self) -> bool: ... - def is_socket(self) -> bool: ... - def is_fifo(self) -> bool: ... - def is_block_device(self) -> bool: ... - def is_char_device(self) -> bool: ... +This method normally follows symlinks; to check whether a symlink exists, +add the argument follow_symlinks=False. +""" + else: + def exists(self) -> bool: + """ + Whether this path exists. + """ + + def is_symlink(self) -> bool: + """ +Whether this path is a symbolic link. +""" + def is_socket(self) -> bool: + """ +Whether this path is a socket. +""" + def is_fifo(self) -> bool: + """ +Whether this path is a FIFO. +""" + def is_block_device(self) -> bool: + """ +Whether this path is a block device. +""" + def is_char_device(self) -> bool: + """ +Whether this path is a character device. +""" if sys.version_info >= (3, 12): - def is_junction(self) -> bool: ... - - def iterdir(self) -> Generator[Self, None, None]: ... - def lchmod(self, mode: int) -> None: ... - def lstat(self) -> stat_result: ... - def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... + def is_junction(self) -> bool: + """ +Whether this path is a junction. +""" + + def iterdir(self) -> Generator[Self, None, None]: + """Yield path objects of the directory contents. + +The children are yielded in arbitrary order, and the +special entries '.' and '..' are not included. +""" + def lchmod(self, mode: int) -> None: + """ +Like chmod(), except if the path points to a symlink, the symlink's +permissions are changed, rather than its target's. +""" + def lstat(self) -> stat_result: + """ +Like stat(), except if the path points to a symlink, the symlink's +status information is returned, rather than its target's. +""" + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: + """ +Create a new directory at this given path. +""" if sys.version_info >= (3, 14): @property - def info(self) -> PathInfo: ... + def info(self) -> PathInfo: + """ +A PathInfo object that exposes the file type and other file attributes +of this path. +""" @overload - def move_into(self, target_dir: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + def move_into(self, target_dir: _PathT) -> _PathT: # type: ignore[overload-overlap] + """ +Move this file or directory tree into the given existing directory. +""" @overload def move_into(self, target_dir: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload - def move(self, target: _PathT) -> _PathT: ... # type: ignore[overload-overlap] + def move(self, target: _PathT) -> _PathT: # type: ignore[overload-overlap] + """ +Recursively move this file or directory tree to the given destination. +""" @overload def move(self, target: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload - def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] + """ +Copy this file or directory tree into the given existing directory. +""" @overload def copy_into(self, target_dir: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @overload - def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: ... # type: ignore[overload-overlap] + def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] + """ +Recursively copy this file or directory tree to the given destination. +""" @overload def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @@ -235,7 +493,11 @@ class Path(PurePath): encoding: str | None = None, errors: str | None = None, newline: str | None = None, - ) -> TextIOWrapper: ... + ) -> TextIOWrapper: + """ +Open the file pointed to by this path and return a file object, as +the built-in open() function does. +""" # Unbuffered binary mode: returns a FileIO @overload def open( @@ -291,65 +553,199 @@ class Path(PurePath): def group(self: Never) -> str: ... # type: ignore[misc] else: if sys.version_info >= (3, 13): - def owner(self, *, follow_symlinks: bool = True) -> str: ... - def group(self, *, follow_symlinks: bool = True) -> str: ... + def owner(self, *, follow_symlinks: bool = True) -> str: + """ +Return the login name of the file owner. +""" + def group(self, *, follow_symlinks: bool = True) -> str: + """ +Return the group name of the file gid. +""" else: - def owner(self) -> str: ... - def group(self) -> str: ... + def owner(self) -> str: + """ + Return the login name of the file owner. + """ + def group(self) -> str: + """ + Return the group name of the file gid. + """ # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms if sys.platform == "win32" and sys.version_info < (3, 12): def is_mount(self: Never) -> bool: ... # type: ignore[misc] else: - def is_mount(self) -> bool: ... + def is_mount(self) -> bool: + """ +Check if this path is a mount point +""" - def readlink(self) -> Self: ... + def readlink(self) -> Self: + """ +Return the path to which the symbolic link points. +""" if sys.version_info >= (3, 10): - def rename(self, target: StrPath) -> Self: ... - def replace(self, target: StrPath) -> Self: ... + def rename(self, target: StrPath) -> Self: + """ +Rename this path to the target path. + +The target path may be absolute or relative. Relative paths are +interpreted relative to the current working directory, *not* the +directory of the Path object. + +Returns the new Path instance pointing to the target path. +""" + def replace(self, target: StrPath) -> Self: + """ +Rename this path to the target path, overwriting if that path exists. + +The target path may be absolute or relative. Relative paths are +interpreted relative to the current working directory, *not* the +directory of the Path object. + +Returns the new Path instance pointing to the target path. +""" else: - def rename(self, target: str | PurePath) -> Self: ... - def replace(self, target: str | PurePath) -> Self: ... - - def resolve(self, strict: bool = False) -> Self: ... - def rmdir(self) -> None: ... - def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... + def rename(self, target: str | PurePath) -> Self: + """ + Rename this path to the target path. + + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ + def replace(self, target: str | PurePath) -> Self: + """ + Rename this path to the target path, overwriting if that path exists. + + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ + + def resolve(self, strict: bool = False) -> Self: + """ +Make the path absolute, resolving all symlinks on the way and also +normalizing it. +""" + def rmdir(self) -> None: + """ +Remove this directory. The directory must be empty. +""" + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: + """ +Make this path a symlink pointing to the target path. +Note the order of arguments (link, target) is the reverse of os.symlink. +""" if sys.version_info >= (3, 10): - def hardlink_to(self, target: StrOrBytesPath) -> None: ... - - def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... - def unlink(self, missing_ok: bool = False) -> None: ... + def hardlink_to(self, target: StrOrBytesPath) -> None: + """ +Make this path a hard link pointing to the same file as *target*. + +Note the order of arguments (self, target) is the reverse of os.link's. +""" + + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + """ +Create this file with the given access mode, if it doesn't exist. +""" + def unlink(self, missing_ok: bool = False) -> None: + """ +Remove this file or link. +If the path is a directory, use rmdir() instead. +""" @classmethod - def home(cls) -> Self: ... - def absolute(self) -> Self: ... - def expanduser(self) -> Self: ... - def read_bytes(self) -> bytes: ... - def samefile(self, other_path: StrPath) -> bool: ... - def write_bytes(self, data: ReadableBuffer) -> int: ... + def home(cls) -> Self: + """Return a new path pointing to expanduser('~'). + """ + def absolute(self) -> Self: + """Return an absolute version of this path +No normalization or symlink resolution is performed. + +Use resolve() to resolve symlinks and remove '..' segments. +""" + def expanduser(self) -> Self: + """Return a new path with expanded ~ and ~user constructs +(as returned by os.path.expanduser) +""" + def read_bytes(self) -> bytes: + """ +Open the file in bytes mode, read it, and close the file. +""" + def samefile(self, other_path: StrPath) -> bool: + """Return whether other_path is the same or not as this file +(as returned by os.path.samefile()). +""" + def write_bytes(self, data: ReadableBuffer) -> int: + """ +Open the file in bytes mode, write to it, and close the file. +""" if sys.version_info >= (3, 10): def write_text( self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None - ) -> int: ... + ) -> int: + """ +Open the file in text mode, write to it, and close the file. +""" else: - def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: + """ + Open the file in text mode, write to it, and close the file. + """ if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `hardlink_to()` instead.") - def link_to(self, target: StrOrBytesPath) -> None: ... + def link_to(self, target: StrOrBytesPath) -> None: + """ + Make the target path a hard link pointing to this path. + + Note this function does not make this path a hard link to *target*, + despite the implication of the function and argument names. The order + of arguments (target, link) is the reverse of Path.symlink_to, but + matches that of os.link. + + Deprecated since Python 3.10 and scheduled for removal in Python 3.12. + Use `hardlink_to()` instead. + """ else: - def link_to(self, target: StrOrBytesPath) -> None: ... + def link_to(self, target: StrOrBytesPath) -> None: + """ + Make the target path a hard link pointing to this path. + + Note this function does not make this path a hard link to *target*, + despite the implication of the function and argument names. The order + of arguments (target, link) is the reverse of Path.symlink_to, but + matches that of os.link. + + """ if sys.version_info >= (3, 12): def walk( self, top_down: bool = True, on_error: Callable[[OSError], object] | None = None, follow_symlinks: bool = False - ) -> Iterator[tuple[Self, list[str], list[str]]]: ... + ) -> Iterator[tuple[Self, list[str], list[str]]]: + """Walk the directory tree from this directory, similar to os.walk(). +""" class PosixPath(Path, PurePosixPath): + """Path subclass for non-Windows systems. + +On a POSIX system, instantiating a Path should return this object. +""" __slots__ = () class WindowsPath(Path, PureWindowsPath): + """Path subclass for Windows systems. + +On a Windows system, instantiating a Path should return this object. +""" __slots__ = () if sys.version_info >= (3, 13): - class UnsupportedOperation(NotImplementedError): ... + class UnsupportedOperation(NotImplementedError): + """An exception that is raised when an unsupported operation is attempted. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi index 9f9a650846deb..5b88245dec9b6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi @@ -1,7 +1,13 @@ +""" +Protocols for supporting classes in pathlib. +""" from typing import Protocol, runtime_checkable @runtime_checkable class PathInfo(Protocol): + """Protocol for path info objects, which support querying the file type. +Methods may return cached results. +""" def exists(self, *, follow_symlinks: bool = True) -> bool: ... def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... def is_file(self, *, follow_symlinks: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi index 2f114b20572df..979ab6ec8f97e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi @@ -1,3 +1,340 @@ +""" +The Python Debugger Pdb +======================= + +To use the debugger in its simplest form: + + >>> import pdb + >>> pdb.run('') + +The debugger's prompt is '(Pdb) '. This will stop in the first +function call in . + +Alternatively, if a statement terminated with an unhandled exception, +you can use pdb's post-mortem facility to inspect the contents of the +traceback: + + >>> + + >>> import pdb + >>> pdb.pm() + +The commands recognized by the debugger are listed in the next +section. Most can be abbreviated as indicated; e.g., h(elp) means +that 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel', +nor as 'H' or 'Help' or 'HELP'). Optional arguments are enclosed in +square brackets. Alternatives in the command syntax are separated +by a vertical bar (|). + +A blank line repeats the previous command literally, except for +'list', where it lists the next 11 lines. + +Commands that the debugger doesn't recognize are assumed to be Python +statements and are executed in the context of the program being +debugged. Python statements can also be prefixed with an exclamation +point ('!'). This is a powerful way to inspect the program being +debugged; it is even possible to change variables or call functions. +When an exception occurs in such a statement, the exception name is +printed but the debugger's state is not changed. + +The debugger supports aliases, which can save typing. And aliases can +have parameters (see the alias help entry) which allows one a certain +level of adaptability to the context under examination. + +Multiple commands may be entered on a single line, separated by the +pair ';;'. No intelligence is applied to separating the commands; the +input is split at the first ';;', even if it is in the middle of a +quoted string. + +If a file ".pdbrc" exists in your home directory or in the current +directory, it is read in and executed as if it had been typed at the +debugger prompt. This is particularly useful for aliases. If both +files exist, the one in the home directory is read first and aliases +defined there can be overridden by the local file. This behavior can be +disabled by passing the "readrc=False" argument to the Pdb constructor. + +Aside from aliases, the debugger is not directly programmable; but it +is implemented as a class from which you can derive your own debugger +class, which you can make as fancy as you like. + + +Debugger commands +================= + +h(elp) + +Without argument, print the list of available commands. +With a command name as argument, print help about that command. +"help pdb" shows the full pdb documentation. +"help exec" gives help on the ! command. + +w(here) [count] + +Print a stack trace. If count is not specified, print the full stack. +If count is 0, print the current frame entry. If count is positive, +print count entries from the most recent frame. If count is negative, +print -count entries from the least recent frame. +An arrow indicates the "current frame", which determines the +context of most commands. 'bt' is an alias for this command. + +d(own) [count] + +Move the current frame count (default one) levels down in the +stack trace (to a newer frame). + +u(p) [count] + +Move the current frame count (default one) levels up in the +stack trace (to an older frame). + +b(reak) [ ([filename:]lineno | function) [, condition] ] + +Without argument, list all breaks. + +With a line number argument, set a break at this line in the +current file. With a function name, set a break at the first +executable line of that function. If a second argument is +present, it is a string specifying an expression which must +evaluate to true before the breakpoint is honored. + +The line number may be prefixed with a filename and a colon, +to specify a breakpoint in another file (probably one that +hasn't been loaded yet). The file is searched for on +sys.path; the .py suffix may be omitted. + +tbreak [ ([filename:]lineno | function) [, condition] ] + +Same arguments as break, but sets a temporary breakpoint: it +is automatically deleted when first hit. + +cl(ear) [filename:lineno | bpnumber ...] + +With a space separated list of breakpoint numbers, clear +those breakpoints. Without argument, clear all breaks (but +first ask confirmation). With a filename:lineno argument, +clear all breaks at that line in that file. + +disable bpnumber [bpnumber ...] + +Disables the breakpoints given as a space separated list of +breakpoint numbers. Disabling a breakpoint means it cannot +cause the program to stop execution, but unlike clearing a +breakpoint, it remains in the list of breakpoints and can be +(re-)enabled. + +enable bpnumber [bpnumber ...] + +Enables the breakpoints given as a space separated list of +breakpoint numbers. + +ignore bpnumber [count] + +Set the ignore count for the given breakpoint number. If +count is omitted, the ignore count is set to 0. A breakpoint +becomes active when the ignore count is zero. When non-zero, +the count is decremented each time the breakpoint is reached +and the breakpoint is not disabled and any associated +condition evaluates to true. + +condition bpnumber [condition] + +Set a new condition for the breakpoint, an expression which +must evaluate to true before the breakpoint is honored. If +condition is absent, any existing condition is removed; i.e., +the breakpoint is made unconditional. + +(Pdb) commands [bpnumber] +(com) ... +(com) end +(Pdb) + +Specify a list of commands for breakpoint number bpnumber. +The commands themselves are entered on the following lines. +Type a line containing just 'end' to terminate the commands. +The commands are executed when the breakpoint is hit. + +To remove all commands from a breakpoint, type commands and +follow it immediately with end; that is, give no commands. + +With no bpnumber argument, commands refers to the last +breakpoint set. + +You can use breakpoint commands to start your program up +again. Simply use the continue command, or step, or any other +command that resumes execution. + +Specifying any command resuming execution (currently continue, +step, next, return, jump, quit and their abbreviations) +terminates the command list (as if that command was +immediately followed by end). This is because any time you +resume execution (even with a simple next or step), you may +encounter another breakpoint -- which could have its own +command list, leading to ambiguities about which list to +execute. + +If you use the 'silent' command in the command list, the usual +message about stopping at a breakpoint is not printed. This +may be desirable for breakpoints that are to print a specific +message and then continue. If none of the other commands +print anything, you will see no sign that the breakpoint was +reached. + +s(tep) + +Execute the current line, stop at the first possible occasion +(either in a function that is called or in the current +function). + +n(ext) + +Continue execution until the next line in the current function +is reached or it returns. + +unt(il) [lineno] + +Without argument, continue execution until the line with a +number greater than the current one is reached. With a line +number, continue execution until a line with a number greater +or equal to that is reached. In both cases, also stop when +the current frame returns. + +j(ump) lineno + +Set the next line that will be executed. Only available in +the bottom-most frame. This lets you jump back and execute +code again, or jump forward to skip code that you don't want +to run. + +It should be noted that not all jumps are allowed -- for +instance it is not possible to jump into the middle of a +for loop or out of a finally clause. + +r(eturn) + +Continue execution until the current function returns. + +retval + +Print the return value for the last return of a function. + +run [args...] + +Restart the debugged python program. If a string is supplied +it is split with "shlex", and the result is used as the new +sys.argv. History, breakpoints, actions and debugger options +are preserved. "restart" is an alias for "run". + +c(ont(inue)) + +Continue execution, only stop when a breakpoint is encountered. + +l(ist) [first[, last] | .] + +List source code for the current file. Without arguments, +list 11 lines around the current line or continue the previous +listing. With . as argument, list 11 lines around the current +line. With one argument, list 11 lines starting at that line. +With two arguments, list the given range; if the second +argument is less than the first, it is a count. + +The current line in the current frame is indicated by "->". +If an exception is being debugged, the line where the +exception was originally raised or propagated is indicated by +">>", if it differs from the current line. + +ll | longlist + +List the whole source code for the current function or frame. + +a(rgs) + +Print the argument list of the current function. + +p expression + +Print the value of the expression. + +pp expression + +Pretty-print the value of the expression. + +whatis expression + +Print the type of the argument. + +source expression + +Try to get source code for the given object and display it. + +display [expression] + +Display the value of the expression if it changed, each time execution +stops in the current frame. + +Without expression, list all display expressions for the current frame. + +undisplay [expression] + +Do not display the expression any more in the current frame. + +Without expression, clear all display expressions for the current frame. + +interact + +Start an interactive interpreter whose global namespace +contains all the (global and local) names found in the current scope. + +alias [name [command]] + +Create an alias called 'name' that executes 'command'. The +command must *not* be enclosed in quotes. Replaceable +parameters can be indicated by %1, %2, and so on, while %* is +replaced by all the parameters. If no command is given, the +current alias for name is shown. If no name is given, all +aliases are listed. + +Aliases may be nested and can contain anything that can be +legally typed at the pdb prompt. Note! You *can* override +internal pdb commands with aliases! Those internal commands +are then hidden until the alias is removed. Aliasing is +recursively applied to the first word of the command line; all +other words in the line are left alone. + +As an example, here are two useful aliases (especially when +placed in the .pdbrc file): + +# Print instance variables (usage "pi classInst") +alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) +# Print instance variables in self +alias ps pi self + +unalias name + +Delete the specified alias. + +debug code + +Enter a recursive debugger that steps through the code +argument (which is an arbitrary expression or statement to be +executed in the current environment). + +q(uit) | exit + +Quit from the debugger. The program being executed is aborted. + +(!) statement + +Execute the (one-line) statement in the context of the current +stack frame. The exclamation point can be omitted unless the +first word of the statement resembles a debugger command, e.g.: +(Pdb) ! n=42 +(Pdb) + +To assign to a global variable you must always prefix the command with +a 'global' command, e.g.: +(Pdb) global list_options; list_options = ['-l'] +(Pdb) +""" import signal import sys from bdb import Bdb, _Backend @@ -20,24 +357,88 @@ _Mode: TypeAlias = Literal["inline", "cli"] line_prefix: Final[str] # undocumented -class Restart(Exception): ... - -def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... -def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... +class Restart(Exception): + """Causes a debugger to be restarted for the debugged python program. +""" + +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: + """Execute the *statement* (given as a string or a code object) +under debugger control. + +The debugger prompt appears before any code is executed; you can set +breakpoints and type continue, or you can step through the statement +using step or next. + +The optional *globals* and *locals* arguments specify the +environment in which the code is executed; by default the +dictionary of the module __main__ is used (see the explanation of +the built-in exec() or eval() functions.). +""" +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: + """Evaluate the *expression* (given as a string or a code object) +under debugger control. + +When runeval() returns, it returns the value of the expression. +Otherwise this function is similar to run(). +""" def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... -def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... +def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: + """Call the function (a function or method object, not a string) +with the given arguments. + +When runcall() returns, it returns whatever the function call +returned. The debugger prompt appears as soon as the function is +entered. +""" if sys.version_info >= (3, 14): - def set_default_backend(backend: _Backend) -> None: ... - def get_default_backend() -> _Backend: ... - def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... - async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: ... + def set_default_backend(backend: _Backend) -> None: + """Set the default backend to use for Pdb instances. +""" + def get_default_backend() -> _Backend: + """Get the default backend to use for Pdb instances. +""" + def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: + """Enter the debugger at the calling stack frame. + +This is useful to hard-code a breakpoint at a given point in a +program, even if the code is not otherwise being debugged (e.g. when +an assertion fails). If given, *header* is printed to the console +just before debugging begins. *commands* is an optional list of +pdb commands to run when the debugger starts. +""" + async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: + """Enter the debugger at the calling stack frame, but in async mode. + +This should be used as await pdb.set_trace_async(). Users can do await +if they enter the debugger with this function. Otherwise it's the same +as set_trace(). +""" else: - def set_trace(*, header: str | None = None) -> None: ... - -def post_mortem(t: TracebackType | None = None) -> None: ... -def pm() -> None: ... + def set_trace(*, header: str | None = None) -> None: + """Enter the debugger at the calling stack frame. + +This is useful to hard-code a breakpoint at a given point in a +program, even if the code is not otherwise being debugged (e.g. when +an assertion fails). If given, *header* is printed to the console +just before debugging begins. +""" + +def post_mortem(t: TracebackType | None = None) -> None: + """Enter post-mortem debugging of the given *traceback*, or *exception* +object. + +If no traceback is given, it uses the one of the exception that is +currently being handled (an exception must be being handled if the +default is to be used). + +If `t` is an exception object, the `exceptions` command makes it possible to +list and inspect its chained exceptions (if any). +""" +def pm() -> None: + """Enter post-mortem debugging of the traceback found in sys.last_exc. +""" class Pdb(Bdb, Cmd): # Everything here is undocumented, except for __init__ @@ -98,21 +499,44 @@ class Pdb(Bdb, Cmd): if sys.version_info >= (3, 13): user_opcode = Bdb.user_line - def bp_commands(self, frame: FrameType) -> bool: ... + def bp_commands(self, frame: FrameType) -> bool: + """Call every command that was set for the current active breakpoint +(if there is one). + +Returns True if the normal interaction function must be called, +False otherwise. +""" if sys.version_info >= (3, 13): def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ... else: def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... - def displayhook(self, obj: object) -> None: ... - def handle_command_def(self, line: str) -> bool: ... - def defaultFile(self) -> str: ... + def displayhook(self, obj: object) -> None: + """Custom displayhook for the exec in default(), which prevents +assignment of the _ variable in the builtins. +""" + def handle_command_def(self, line: str) -> bool: + """Handles one command line during command list definition. +""" + def defaultFile(self) -> str: + """Produce a reasonable default. +""" def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... if sys.version_info >= (3, 14): - def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: ... + def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: + """Check whether specified line seems to be executable. + +Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank +line or EOF). Warning: testing is not comprehensive. +""" else: - def checkline(self, filename: str, lineno: int) -> int: ... + def checkline(self, filename: str, lineno: int) -> int: + """Check whether specified line seems to be executable. + +Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank +line or EOF). Warning: testing is not comprehensive. +""" def _getval(self, arg: str) -> object: ... if sys.version_info >= (3, 14): @@ -121,7 +545,19 @@ class Pdb(Bdb, Cmd): def print_stack_trace(self) -> None: ... def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... - def lookupmodule(self, filename: str) -> str | None: ... + def lookupmodule(self, filename: str) -> str | None: + """Helper function for break/clear parsing -- may be overridden. + +lookupmodule() translates (possibly incomplete) file or module name +into an absolute file name. + +filename could be in format of: + * an absolute path like '/path/to/file.py' + * a relative path like 'file.py' or 'dir/file.py' + * a module name like 'module' or 'package.module' + +files and modules will be searched in sys.path. +""" if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... @@ -131,43 +567,307 @@ class Pdb(Bdb, Cmd): if sys.version_info >= (3, 13): def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... - def do_commands(self, arg: str) -> bool | None: ... + def do_commands(self, arg: str) -> bool | None: + """(Pdb) commands [bpnumber] +(com) ... +(com) end +(Pdb) + +Specify a list of commands for breakpoint number bpnumber. +The commands themselves are entered on the following lines. +Type a line containing just 'end' to terminate the commands. +The commands are executed when the breakpoint is hit. + +To remove all commands from a breakpoint, type commands and +follow it immediately with end; that is, give no commands. + +With no bpnumber argument, commands refers to the last +breakpoint set. + +You can use breakpoint commands to start your program up +again. Simply use the continue command, or step, or any other +command that resumes execution. + +Specifying any command resuming execution (currently continue, +step, next, return, jump, quit and their abbreviations) +terminates the command list (as if that command was +immediately followed by end). This is because any time you +resume execution (even with a simple next or step), you may +encounter another breakpoint -- which could have its own +command list, leading to ambiguities about which list to +execute. + +If you use the 'silent' command in the command list, the usual +message about stopping at a breakpoint is not printed. This +may be desirable for breakpoints that are to print a specific +message and then continue. If none of the other commands +print anything, you will see no sign that the breakpoint was +reached. +""" if sys.version_info >= (3, 14): - def do_break(self, arg: str, temporary: bool = False) -> bool | None: ... + def do_break(self, arg: str, temporary: bool = False) -> bool | None: + """b(reak) [ ([filename:]lineno | function) [, condition] ] + +Without argument, list all breaks. + +With a line number argument, set a break at this line in the +current file. With a function name, set a break at the first +executable line of that function. If a second argument is +present, it is a string specifying an expression which must +evaluate to true before the breakpoint is honored. + +The line number may be prefixed with a filename and a colon, +to specify a breakpoint in another file (probably one that +hasn't been loaded yet). The file is searched for on +sys.path; the .py suffix may be omitted. +""" else: - def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: ... - - def do_tbreak(self, arg: str) -> bool | None: ... - def do_enable(self, arg: str) -> bool | None: ... - def do_disable(self, arg: str) -> bool | None: ... - def do_condition(self, arg: str) -> bool | None: ... - def do_ignore(self, arg: str) -> bool | None: ... - def do_clear(self, arg: str) -> bool | None: ... - def do_where(self, arg: str) -> bool | None: ... + def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: + """b(reak) [ ([filename:]lineno | function) [, condition] ] + +Without argument, list all breaks. + +With a line number argument, set a break at this line in the +current file. With a function name, set a break at the first +executable line of that function. If a second argument is +present, it is a string specifying an expression which must +evaluate to true before the breakpoint is honored. + +The line number may be prefixed with a filename and a colon, +to specify a breakpoint in another file (probably one that +hasn't been loaded yet). The file is searched for on +sys.path; the .py suffix may be omitted. +""" + + def do_tbreak(self, arg: str) -> bool | None: + """tbreak [ ([filename:]lineno | function) [, condition] ] + +Same arguments as break, but sets a temporary breakpoint: it +is automatically deleted when first hit. +""" + def do_enable(self, arg: str) -> bool | None: + """enable bpnumber [bpnumber ...] + +Enables the breakpoints given as a space separated list of +breakpoint numbers. +""" + def do_disable(self, arg: str) -> bool | None: + """disable bpnumber [bpnumber ...] + +Disables the breakpoints given as a space separated list of +breakpoint numbers. Disabling a breakpoint means it cannot +cause the program to stop execution, but unlike clearing a +breakpoint, it remains in the list of breakpoints and can be +(re-)enabled. +""" + def do_condition(self, arg: str) -> bool | None: + """condition bpnumber [condition] + +Set a new condition for the breakpoint, an expression which +must evaluate to true before the breakpoint is honored. If +condition is absent, any existing condition is removed; i.e., +the breakpoint is made unconditional. +""" + def do_ignore(self, arg: str) -> bool | None: + """ignore bpnumber [count] + +Set the ignore count for the given breakpoint number. If +count is omitted, the ignore count is set to 0. A breakpoint +becomes active when the ignore count is zero. When non-zero, +the count is decremented each time the breakpoint is reached +and the breakpoint is not disabled and any associated +condition evaluates to true. +""" + def do_clear(self, arg: str) -> bool | None: + """cl(ear) [filename:lineno | bpnumber ...] + +With a space separated list of breakpoint numbers, clear +those breakpoints. Without argument, clear all breaks (but +first ask confirmation). With a filename:lineno argument, +clear all breaks at that line in that file. +""" + def do_where(self, arg: str) -> bool | None: + """w(here) [count] + +Print a stack trace. If count is not specified, print the full stack. +If count is 0, print the current frame entry. If count is positive, +print count entries from the most recent frame. If count is negative, +print -count entries from the least recent frame. +An arrow indicates the "current frame", which determines the +context of most commands. 'bt' is an alias for this command. +""" if sys.version_info >= (3, 13): - def do_exceptions(self, arg: str) -> bool | None: ... - - def do_up(self, arg: str) -> bool | None: ... - def do_down(self, arg: str) -> bool | None: ... - def do_until(self, arg: str) -> bool | None: ... - def do_step(self, arg: str) -> bool | None: ... - def do_next(self, arg: str) -> bool | None: ... - def do_run(self, arg: str) -> bool | None: ... - def do_return(self, arg: str) -> bool | None: ... - def do_continue(self, arg: str) -> bool | None: ... - def do_jump(self, arg: str) -> bool | None: ... - def do_debug(self, arg: str) -> bool | None: ... - def do_quit(self, arg: str) -> bool | None: ... - def do_EOF(self, arg: str) -> bool | None: ... - def do_args(self, arg: str) -> bool | None: ... - def do_retval(self, arg: str) -> bool | None: ... - def do_p(self, arg: str) -> bool | None: ... - def do_pp(self, arg: str) -> bool | None: ... - def do_list(self, arg: str) -> bool | None: ... - def do_whatis(self, arg: str) -> bool | None: ... - def do_alias(self, arg: str) -> bool | None: ... - def do_unalias(self, arg: str) -> bool | None: ... - def do_help(self, arg: str) -> bool | None: ... + def do_exceptions(self, arg: str) -> bool | None: + """exceptions [number] + +List or change current exception in an exception chain. + +Without arguments, list all the current exception in the exception +chain. Exceptions will be numbered, with the current exception indicated +with an arrow. + +If given an integer as argument, switch to the exception at that index. +""" + + def do_up(self, arg: str) -> bool | None: + """u(p) [count] + +Move the current frame count (default one) levels up in the +stack trace (to an older frame). +""" + def do_down(self, arg: str) -> bool | None: + """d(own) [count] + +Move the current frame count (default one) levels down in the +stack trace (to a newer frame). +""" + def do_until(self, arg: str) -> bool | None: + """unt(il) [lineno] + +Without argument, continue execution until the line with a +number greater than the current one is reached. With a line +number, continue execution until a line with a number greater +or equal to that is reached. In both cases, also stop when +the current frame returns. +""" + def do_step(self, arg: str) -> bool | None: + """s(tep) + +Execute the current line, stop at the first possible occasion +(either in a function that is called or in the current +function). +""" + def do_next(self, arg: str) -> bool | None: + """n(ext) + +Continue execution until the next line in the current function +is reached or it returns. +""" + def do_run(self, arg: str) -> bool | None: + """run [args...] + +Restart the debugged python program. If a string is supplied +it is split with "shlex", and the result is used as the new +sys.argv. History, breakpoints, actions and debugger options +are preserved. "restart" is an alias for "run". +""" + def do_return(self, arg: str) -> bool | None: + """r(eturn) + +Continue execution until the current function returns. +""" + def do_continue(self, arg: str) -> bool | None: + """c(ont(inue)) + +Continue execution, only stop when a breakpoint is encountered. +""" + def do_jump(self, arg: str) -> bool | None: + """j(ump) lineno + +Set the next line that will be executed. Only available in +the bottom-most frame. This lets you jump back and execute +code again, or jump forward to skip code that you don't want +to run. + +It should be noted that not all jumps are allowed -- for +instance it is not possible to jump into the middle of a +for loop or out of a finally clause. +""" + def do_debug(self, arg: str) -> bool | None: + """debug code + +Enter a recursive debugger that steps through the code +argument (which is an arbitrary expression or statement to be +executed in the current environment). +""" + def do_quit(self, arg: str) -> bool | None: + """q(uit) | exit + +Quit from the debugger. The program being executed is aborted. +""" + def do_EOF(self, arg: str) -> bool | None: + """EOF + +Handles the receipt of EOF as a command. +""" + def do_args(self, arg: str) -> bool | None: + """a(rgs) + +Print the argument list of the current function. +""" + def do_retval(self, arg: str) -> bool | None: + """retval + +Print the return value for the last return of a function. +""" + def do_p(self, arg: str) -> bool | None: + """p expression + +Print the value of the expression. +""" + def do_pp(self, arg: str) -> bool | None: + """pp expression + +Pretty-print the value of the expression. +""" + def do_list(self, arg: str) -> bool | None: + """l(ist) [first[, last] | .] + +List source code for the current file. Without arguments, +list 11 lines around the current line or continue the previous +listing. With . as argument, list 11 lines around the current +line. With one argument, list 11 lines starting at that line. +With two arguments, list the given range; if the second +argument is less than the first, it is a count. + +The current line in the current frame is indicated by "->". +If an exception is being debugged, the line where the +exception was originally raised or propagated is indicated by +">>", if it differs from the current line. +""" + def do_whatis(self, arg: str) -> bool | None: + """whatis expression + +Print the type of the argument. +""" + def do_alias(self, arg: str) -> bool | None: + """alias [name [command]] + +Create an alias called 'name' that executes 'command'. The +command must *not* be enclosed in quotes. Replaceable +parameters can be indicated by %1, %2, and so on, while %* is +replaced by all the parameters. If no command is given, the +current alias for name is shown. If no name is given, all +aliases are listed. + +Aliases may be nested and can contain anything that can be +legally typed at the pdb prompt. Note! You *can* override +internal pdb commands with aliases! Those internal commands +are then hidden until the alias is removed. Aliasing is +recursively applied to the first word of the command line; all +other words in the line are left alone. + +As an example, here are two useful aliases (especially when +placed in the .pdbrc file): + +# Print instance variables (usage "pi classInst") +alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) +# Print instance variables in self +alias ps pi self +""" + def do_unalias(self, arg: str) -> bool | None: + """unalias name + +Delete the specified alias. +""" + def do_help(self, arg: str) -> bool | None: + """h(elp) + +Without argument, print the list of available commands. +With a command name as argument, print help about that command. +"help pdb" shows the full pdb documentation. +"help exec" gives help on the ! command. +""" do_b = do_break do_cl = do_clear do_w = do_where @@ -188,7 +888,20 @@ class Pdb(Bdb, Cmd): do_rv = do_retval do_l = do_list do_h = do_help - def help_exec(self) -> None: ... + def help_exec(self) -> None: + """(!) statement + +Execute the (one-line) statement in the context of the current +stack frame. The exclamation point can be omitted unless the +first word of the statement resembles a debugger command, e.g.: +(Pdb) ! n=42 +(Pdb) + +To assign to a global variable you must always prefix the command with +a 'global' command, e.g.: +(Pdb) global list_options; list_options = ['-l'] +(Pdb) +""" def help_pdb(self) -> None: ... def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... if sys.version_info >= (3, 13): @@ -204,19 +917,54 @@ class Pdb(Bdb, Cmd): if sys.version_info >= (3, 13) and sys.version_info < (3, 14): # Added in 3.13.8. @property - def rlcompleter(self) -> type[Completer]: ... + def rlcompleter(self) -> type[Completer]: + """Return the `Completer` class from `rlcompleter`, while avoiding the +side effects of changing the completer from `import rlcompleter`. + +This is a compromise between GH-138860 and GH-139289. If GH-139289 is +fixed, then we don't need this and we can just `import rlcompleter` in +`Pdb.__init__`. +""" def _select_frame(self, number: int) -> None: ... def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... def _print_lines( self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None - ) -> None: ... + ) -> None: + """Print a range of lines. +""" def _cmdloop(self) -> None: ... - def do_display(self, arg: str) -> bool | None: ... - def do_interact(self, arg: str) -> bool | None: ... - def do_longlist(self, arg: str) -> bool | None: ... - def do_source(self, arg: str) -> bool | None: ... - def do_undisplay(self, arg: str) -> bool | None: ... + def do_display(self, arg: str) -> bool | None: + """display [expression] + +Display the value of the expression if it changed, each time execution +stops in the current frame. + +Without expression, list all display expressions for the current frame. +""" + def do_interact(self, arg: str) -> bool | None: + """interact + +Start an interactive interpreter whose global namespace +contains all the (global and local) names found in the current scope. +""" + def do_longlist(self, arg: str) -> bool | None: + """ll | longlist + +List the whole source code for the current function or frame. +""" + def do_source(self, arg: str) -> bool | None: + """source expression + +Try to get source code for the given object and display it. +""" + def do_undisplay(self, arg: str) -> bool | None: + """undisplay [expression] + +Do not display the expression any more in the current frame. + +Without expression, clear all display expressions for the current frame. +""" do_ll = do_longlist def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... def _complete_bpnumber(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... @@ -256,4 +1004,6 @@ if sys.version_info < (3, 10): def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): + """String that doesn't quote its repr. +""" def __repr__(self) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi index d94fe208f4468..847657500d83b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi @@ -1,3 +1,27 @@ +"""Create portable serialized representations of Python objects. + +See module copyreg for a mechanism for registering custom picklers. +See module pickletools source for extensive comments. + +Classes: + + Pickler + Unpickler + +Functions: + + dump(object, file) + dumps(object) -> string + load(file) -> object + loads(bytes) -> object + +Misc variables: + + __version__ + format_version + compatible_formats + +""" from _pickle import ( PickleError as PickleError, Pickler as Pickler, @@ -109,11 +133,22 @@ bytes_types: tuple[type[Any], ...] # undocumented @final class PickleBuffer: + """Wrapper for potentially out-of-band buffers +""" def __new__(cls, buffer: ReadableBuffer) -> Self: ... - def raw(self) -> memoryview: ... - def release(self) -> None: ... - def __buffer__(self, flags: int, /) -> memoryview: ... - def __release_buffer__(self, buffer: memoryview, /) -> None: ... + def raw(self) -> memoryview: + """Return a memoryview of the raw memory underlying this buffer. +Will raise BufferError is the buffer isn't contiguous. +""" + def release(self) -> None: + """Release the underlying buffer exposed by the PickleBuffer object. +""" + def __buffer__(self, flags: int, /) -> memoryview: + """Return a buffer object that exposes the underlying memory of the object. +""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object. +""" MARK: Final = b"(" STOP: Final = b"." @@ -195,8 +230,45 @@ BYTEARRAY8: Final = b"\x96" NEXT_BUFFER: Final = b"\x97" READONLY_BUFFER: Final = b"\x98" -def encode_long(x: int) -> bytes: ... # undocumented -def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: ... # undocumented +def encode_long(x: int) -> bytes: # undocumented + """Encode a long to a two's complement little-endian binary string. +Note that 0 is a special case, returning an empty string, to save a +byte in the LONG1 pickling context. + +>>> encode_long(0) +b'' +>>> encode_long(255) +b'\\xff\\x00' +>>> encode_long(32767) +b'\\xff\\x7f' +>>> encode_long(-256) +b'\\x00\\xff' +>>> encode_long(-32768) +b'\\x00\\x80' +>>> encode_long(-128) +b'\\x80' +>>> encode_long(127) +b'\\x7f' +>>> +""" +def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: # undocumented + """Decode a long from a two's complement little-endian binary string. + +>>> decode_long(b'') +0 +>>> decode_long(b"\\xff\\x00") +255 +>>> decode_long(b"\\xff\\x7f") +32767 +>>> decode_long(b"\\x00\\xff") +-256 +>>> decode_long(b"\\x00\\x80") +-32768 +>>> decode_long(b"\\x80") +-128 +>>> decode_long(b"\\x7f") +127 +""" # undocumented pure-Python implementations class _Pickler: @@ -212,9 +284,51 @@ class _Pickler: *, fix_imports: bool = True, buffer_callback: _BufferCallback = None, - ) -> None: ... - def dump(self, obj: Any) -> None: ... - def clear_memo(self) -> None: ... + ) -> None: + """This takes a binary file for writing a pickle data stream. + +The optional *protocol* argument tells the pickler to use the +given protocol; supported protocols are 0, 1, 2, 3, 4 and 5. +The default protocol is 5. It was introduced in Python 3.8, and +is incompatible with previous versions. + +Specifying a negative protocol version selects the highest +protocol version supported. The higher the protocol used, the +more recent the version of Python needed to read the pickle +produced. + +The *file* argument must have a write() method that accepts a +single bytes argument. It can thus be a file object opened for +binary writing, an io.BytesIO instance, or any other custom +object that meets this interface. + +If *fix_imports* is True and *protocol* is less than 3, pickle +will try to map the new Python 3 names to the old module names +used in Python 2, so that the pickle data stream is readable +with Python 2. + +If *buffer_callback* is None (the default), buffer views are +serialized into *file* as part of the pickle stream. + +If *buffer_callback* is not None, then it can be called any number +of times with a buffer view. If the callback returns a false value +(such as None), the given buffer is out-of-band; otherwise the +buffer is serialized in-band, i.e. inside the pickle stream. + +It is an error if *buffer_callback* is not None and *protocol* +is None or smaller than 5. +""" + def dump(self, obj: Any) -> None: + """Write a pickled representation of obj to the open file. +""" + def clear_memo(self) -> None: + """Clears the pickler's "memo". + +The memo is the data structure that remembers which objects the +pickler has already seen, so that shared or recursive objects +are pickled by reference and not by value. This method is +useful when re-using picklers. +""" def persistent_id(self, obj: Any) -> Any: ... class _Unpickler: @@ -227,7 +341,48 @@ class _Unpickler: encoding: str = "ASCII", errors: str = "strict", buffers: Iterable[Any] | None = None, - ) -> None: ... - def load(self) -> Any: ... + ) -> None: + """This takes a binary file for reading a pickle data stream. + +The protocol version of the pickle is detected automatically, so +no proto argument is needed. + +The argument *file* must have two methods, a read() method that +takes an integer argument, and a readline() method that requires +no arguments. Both methods should return bytes. Thus *file* +can be a binary file object opened for reading, an io.BytesIO +object, or any other custom object that meets this interface. + +The file-like object must have two methods, a read() method +that takes an integer argument, and a readline() method that +requires no arguments. Both methods should return bytes. +Thus file-like object can be a binary file object opened for +reading, a BytesIO object, or any other custom object that +meets this interface. + +If *buffers* is not None, it should be an iterable of buffer-enabled +objects that is consumed each time the pickle stream references +an out-of-band buffer view. Such buffers have been given in order +to the *buffer_callback* of a Pickler object. + +If *buffers* is None (the default), then the buffers are taken +from the pickle stream, assuming they are serialized there. +It is an error for *buffers* to be None if the pickle stream +was produced with a non-None *buffer_callback*. + +Other optional arguments are *fix_imports*, *encoding* and +*errors*, which are used to control compatibility support for +pickle stream generated by Python 2. If *fix_imports* is True, +pickle will try to map the old Python 2 names to the new names +used in Python 3. The *encoding* and *errors* tell pickle how +to decode 8-bit string instances pickled by Python 2; these +default to 'ASCII' and 'strict', respectively. *encoding* can be +'bytes' to read these 8-bit string instances as bytes objects. +""" + def load(self) -> Any: + """Read a pickled object representation from the open file. + +Return the reconstituted object hierarchy specified in the file. +""" def find_class(self, module: str, name: str) -> Any: ... def persistent_load(self, pid: Any) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi index 8bbfaba31b671..cd3f3401d4446 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi @@ -1,3 +1,14 @@ +""""Executable documentation" for the pickle module. + +Extensive comments about the pickle protocols and pickle-machine opcodes +can be found here. Some functions meant for external use: + +genops(pickle) + Generate all the opcodes in a pickle, as (opcode, arg, position) triples. + +dis(pickle, out=None, memo=None, indentlevel=4) + Print a symbolic disassembly of a pickle. +""" import sys from collections.abc import Callable, Iterator, MutableMapping from typing import IO, Any, Final @@ -22,33 +33,116 @@ class ArgumentDescriptor: doc: str def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... -def read_uint1(f: IO[bytes]) -> int: ... +def read_uint1(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_uint1(io.BytesIO(b'\\xff')) +255 +""" uint1: ArgumentDescriptor -def read_uint2(f: IO[bytes]) -> int: ... +def read_uint2(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_uint2(io.BytesIO(b'\\xff\\x00')) +255 +>>> read_uint2(io.BytesIO(b'\\xff\\xff')) +65535 +""" uint2: ArgumentDescriptor -def read_int4(f: IO[bytes]) -> int: ... +def read_int4(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_int4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) +255 +>>> read_int4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == -(2**31) +True +""" int4: ArgumentDescriptor -def read_uint4(f: IO[bytes]) -> int: ... +def read_uint4(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_uint4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) +255 +>>> read_uint4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == 2**31 +True +""" uint4: ArgumentDescriptor -def read_uint8(f: IO[bytes]) -> int: ... +def read_uint8(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_uint8(io.BytesIO(b'\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00')) +255 +>>> read_uint8(io.BytesIO(b'\\xff' * 8)) == 2**64-1 +True +""" uint8: ArgumentDescriptor if sys.version_info >= (3, 12): def read_stringnl( f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1" - ) -> bytes | str: ... + ) -> bytes | str: + """ +>>> import io +>>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) +'abcd' + +>>> read_stringnl(io.BytesIO(b"\\n")) +Traceback (most recent call last): +... +ValueError: no string quotes around b'' + +>>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) +'' + +>>> read_stringnl(io.BytesIO(b"''\\n")) +'' + +>>> read_stringnl(io.BytesIO(b'"abcd"')) +Traceback (most recent call last): +... +ValueError: no newline found when trying to read stringnl + +Embedded escapes are undone in the result. +>>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) +'a\\n\\\\b\\x00c\\td' +""" else: - def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... + def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: + """ + >>> import io + >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) + 'abcd' + + >>> read_stringnl(io.BytesIO(b"\\n")) + Traceback (most recent call last): + ... + ValueError: no string quotes around b'' + + >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) + '' + + >>> read_stringnl(io.BytesIO(b"''\\n")) + '' + + >>> read_stringnl(io.BytesIO(b'"abcd"')) + Traceback (most recent call last): + ... + ValueError: no newline found when trying to read stringnl + + Embedded escapes are undone in the result. + >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) + 'a\\n\\\\b\\x00c\\td' + """ stringnl: ArgumentDescriptor @@ -56,65 +150,229 @@ def read_stringnl_noescape(f: IO[bytes]) -> str: ... stringnl_noescape: ArgumentDescriptor -def read_stringnl_noescape_pair(f: IO[bytes]) -> str: ... +def read_stringnl_noescape_pair(f: IO[bytes]) -> str: + """ +>>> import io +>>> read_stringnl_noescape_pair(io.BytesIO(b"Queue\\nEmpty\\njunk")) +'Queue Empty' +""" stringnl_noescape_pair: ArgumentDescriptor -def read_string1(f: IO[bytes]) -> str: ... +def read_string1(f: IO[bytes]) -> str: + """ +>>> import io +>>> read_string1(io.BytesIO(b"\\x00")) +'' +>>> read_string1(io.BytesIO(b"\\x03abcdef")) +'abc' +""" string1: ArgumentDescriptor -def read_string4(f: IO[bytes]) -> str: ... +def read_string4(f: IO[bytes]) -> str: + """ +>>> import io +>>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) +'' +>>> read_string4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) +'abc' +>>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) +Traceback (most recent call last): +... +ValueError: expected 50331648 bytes in a string4, but only 6 remain +""" string4: ArgumentDescriptor -def read_bytes1(f: IO[bytes]) -> bytes: ... +def read_bytes1(f: IO[bytes]) -> bytes: + """ +>>> import io +>>> read_bytes1(io.BytesIO(b"\\x00")) +b'' +>>> read_bytes1(io.BytesIO(b"\\x03abcdef")) +b'abc' +""" bytes1: ArgumentDescriptor -def read_bytes4(f: IO[bytes]) -> bytes: ... +def read_bytes4(f: IO[bytes]) -> bytes: + """ +>>> import io +>>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) +b'' +>>> read_bytes4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) +b'abc' +>>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) +Traceback (most recent call last): +... +ValueError: expected 50331648 bytes in a bytes4, but only 6 remain +""" bytes4: ArgumentDescriptor -def read_bytes8(f: IO[bytes]) -> bytes: ... +def read_bytes8(f: IO[bytes]) -> bytes: + """ +>>> import io, struct, sys +>>> read_bytes8(io.BytesIO(b"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00abc")) +b'' +>>> read_bytes8(io.BytesIO(b"\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00abcdef")) +b'abc' +>>> bigsize8 = struct.pack(">> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS +Traceback (most recent call last): +... +ValueError: expected ... bytes in a bytes8, but only 6 remain +""" bytes8: ArgumentDescriptor -def read_unicodestringnl(f: IO[bytes]) -> str: ... +def read_unicodestringnl(f: IO[bytes]) -> str: + """ +>>> import io +>>> read_unicodestringnl(io.BytesIO(b"abc\\\\uabcd\\njunk")) == 'abc\\uabcd' +True +""" unicodestringnl: ArgumentDescriptor -def read_unicodestring1(f: IO[bytes]) -> str: ... +def read_unicodestring1(f: IO[bytes]) -> str: + """ +>>> import io +>>> s = 'abcd\\uabcd' +>>> enc = s.encode('utf-8') +>>> enc +b'abcd\\xea\\xaf\\x8d' +>>> n = bytes([len(enc)]) # little-endian 1-byte length +>>> t = read_unicodestring1(io.BytesIO(n + enc + b'junk')) +>>> s == t +True + +>>> read_unicodestring1(io.BytesIO(n + enc[:-1])) +Traceback (most recent call last): +... +ValueError: expected 7 bytes in a unicodestring1, but only 6 remain +""" unicodestring1: ArgumentDescriptor -def read_unicodestring4(f: IO[bytes]) -> str: ... +def read_unicodestring4(f: IO[bytes]) -> str: + """ +>>> import io +>>> s = 'abcd\\uabcd' +>>> enc = s.encode('utf-8') +>>> enc +b'abcd\\xea\\xaf\\x8d' +>>> n = bytes([len(enc), 0, 0, 0]) # little-endian 4-byte length +>>> t = read_unicodestring4(io.BytesIO(n + enc + b'junk')) +>>> s == t +True + +>>> read_unicodestring4(io.BytesIO(n + enc[:-1])) +Traceback (most recent call last): +... +ValueError: expected 7 bytes in a unicodestring4, but only 6 remain +""" unicodestring4: ArgumentDescriptor -def read_unicodestring8(f: IO[bytes]) -> str: ... +def read_unicodestring8(f: IO[bytes]) -> str: + """ +>>> import io +>>> s = 'abcd\\uabcd' +>>> enc = s.encode('utf-8') +>>> enc +b'abcd\\xea\\xaf\\x8d' +>>> n = bytes([len(enc)]) + b'\\0' * 7 # little-endian 8-byte length +>>> t = read_unicodestring8(io.BytesIO(n + enc + b'junk')) +>>> s == t +True + +>>> read_unicodestring8(io.BytesIO(n + enc[:-1])) +Traceback (most recent call last): +... +ValueError: expected 7 bytes in a unicodestring8, but only 6 remain +""" unicodestring8: ArgumentDescriptor -def read_decimalnl_short(f: IO[bytes]) -> int: ... -def read_decimalnl_long(f: IO[bytes]) -> int: ... +def read_decimalnl_short(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_decimalnl_short(io.BytesIO(b"1234\\n56")) +1234 + +>>> read_decimalnl_short(io.BytesIO(b"1234L\\n56")) +Traceback (most recent call last): +... +ValueError: invalid literal for int() with base 10: b'1234L' +""" +def read_decimalnl_long(f: IO[bytes]) -> int: + """ +>>> import io + +>>> read_decimalnl_long(io.BytesIO(b"1234L\\n56")) +1234 + +>>> read_decimalnl_long(io.BytesIO(b"123456789012345678901234L\\n6")) +123456789012345678901234 +""" decimalnl_short: ArgumentDescriptor decimalnl_long: ArgumentDescriptor -def read_floatnl(f: IO[bytes]) -> float: ... +def read_floatnl(f: IO[bytes]) -> float: + """ +>>> import io +>>> read_floatnl(io.BytesIO(b"-1.25\\n6")) +-1.25 +""" floatnl: ArgumentDescriptor -def read_float8(f: IO[bytes]) -> float: ... +def read_float8(f: IO[bytes]) -> float: + """ +>>> import io, struct +>>> raw = struct.pack(">d", -1.25) +>>> raw +b'\\xbf\\xf4\\x00\\x00\\x00\\x00\\x00\\x00' +>>> read_float8(io.BytesIO(raw + b"\\n")) +-1.25 +""" float8: ArgumentDescriptor -def read_long1(f: IO[bytes]) -> int: ... +def read_long1(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_long1(io.BytesIO(b"\\x00")) +0 +>>> read_long1(io.BytesIO(b"\\x02\\xff\\x00")) +255 +>>> read_long1(io.BytesIO(b"\\x02\\xff\\x7f")) +32767 +>>> read_long1(io.BytesIO(b"\\x02\\x00\\xff")) +-256 +>>> read_long1(io.BytesIO(b"\\x02\\x00\\x80")) +-32768 +""" long1: ArgumentDescriptor -def read_long4(f: IO[bytes]) -> int: ... +def read_long4(f: IO[bytes]) -> int: + """ +>>> import io +>>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x00")) +255 +>>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x7f")) +32767 +>>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\xff")) +-256 +>>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\x80")) +-32768 +>>> read_long1(io.BytesIO(b"\\x00\\x00\\x00\\x00")) +0 +""" long4: ArgumentDescriptor @@ -166,12 +424,73 @@ class OpcodeInfo: opcodes: list[OpcodeInfo] -def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... -def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: + """Generate all the opcodes in a pickle. + +'pickle' is a file-like object, or string, containing the pickle. + +Each opcode in the pickle is generated, from the current pickle position, +stopping after a STOP opcode is delivered. A triple is generated for +each opcode: + + opcode, arg, pos + +opcode is an OpcodeInfo record, describing the current opcode. + +If the opcode has an argument embedded in the pickle, arg is its decoded +value, as a Python object. If the opcode doesn't have an argument, arg +is None. + +If the pickle has a tell() method, pos was the value of pickle.tell() +before reading the current opcode. If the pickle is a bytes object, +it's wrapped in a BytesIO object, and the latter's tell() result is +used. Else (the pickle doesn't have a tell(), and it's not obvious how +to query its current position) pos is None. +""" +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: + """Optimize a pickle string by removing unused PUT opcodes +""" def dis( pickle: bytes | bytearray | IO[bytes], out: IO[str] | None = None, memo: MutableMapping[int, Any] | None = None, indentlevel: int = 4, annotate: int = 0, -) -> None: ... +) -> None: + """Produce a symbolic disassembly of a pickle. + +'pickle' is a file-like object, or string, containing a (at least one) +pickle. The pickle is disassembled from the current position, through +the first STOP opcode encountered. + +Optional arg 'out' is a file-like object to which the disassembly is +printed. It defaults to sys.stdout. + +Optional arg 'memo' is a Python dict, used as the pickle's memo. It +may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes. +Passing the same memo object to another dis() call then allows disassembly +to proceed across multiple pickles that were all created by the same +pickler with the same memo. Ordinarily you don't need to worry about this. + +Optional arg 'indentlevel' is the number of blanks by which to indent +a new MARK level. It defaults to 4. + +Optional arg 'annotate' if nonzero instructs dis() to add short +description of the opcode on each line of disassembled output. +The value given to 'annotate' must be an integer and is used as a +hint for the column where annotation should start. The default +value is 0, meaning no annotations. + +In addition to printing the disassembly, some sanity checks are made: + ++ All embedded opcode arguments "make sense". + ++ Explicit and implicit pop operations have enough items on the stack. + ++ When an opcode implicitly refers to a markobject, a markobject is + actually on the stack. + ++ A memo entry isn't referenced before it's defined. + ++ The markobject isn't stored in the memo. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi index fe680bfddf5f2..2fc3232dff2c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi @@ -1,16 +1,91 @@ +"""Conversion pipeline templates. + +The problem: +------------ + +Suppose you have some data that you want to convert to another format, +such as from GIF image format to PPM image format. Maybe the +conversion involves several steps (e.g. piping it through compress or +uuencode). Some of the conversion steps may require that their input +is a disk file, others may be able to read standard input; similar for +their output. The input to the entire conversion may also be read +from a disk file or from an open file, and similar for its output. + +The module lets you construct a pipeline template by sticking one or +more conversion steps together. It will take care of creating and +removing temporary files if they are necessary to hold intermediate +data. You can then use the template to do conversions from many +different sources to many different destinations. The temporary +file names used are different each time the template is used. + +The templates are objects so you can create templates for many +different conversion steps and store them in a dictionary, for +instance. + + +Directions: +----------- + +To create a template: + t = Template() + +To add a conversion step to a template: + t.append(command, kind) +where kind is a string of two characters: the first is '-' if the +command reads its standard input or 'f' if it requires a file; the +second likewise for the output. The command must be valid /bin/sh +syntax. If input or output files are required, they are passed as +$IN and $OUT; otherwise, it must be possible to use the command in +a pipeline. + +To add a conversion step at the beginning: + t.prepend(command, kind) + +To convert a file to another file using a template: + sts = t.copy(infile, outfile) +If infile or outfile are the empty string, standard input is read or +standard output is written, respectively. The return value is the +exit status of the conversion pipeline. + +To open a file for reading or writing through a conversion pipeline: + fp = t.open(file, mode) +where mode is 'r' to read the file, or 'w' to write it -- just like +for the built-in function open() or for os.popen(). + +To create a new template object initialized to a given one: + t2 = t.clone() +""" import os __all__ = ["Template"] class Template: - def reset(self) -> None: ... - def clone(self) -> Template: ... - def debug(self, flag: bool) -> None: ... - def append(self, cmd: str, kind: str) -> None: ... - def prepend(self, cmd: str, kind: str) -> None: ... - def open(self, file: str, rw: str) -> os._wrap_close: ... + """Class representing a pipeline template. +""" + def reset(self) -> None: + """t.reset() restores a pipeline template to its initial state. +""" + def clone(self) -> Template: + """t.clone() returns a new pipeline template with identical + initial state as the current one. +""" + def debug(self, flag: bool) -> None: + """t.debug(flag) turns debugging on or off. +""" + def append(self, cmd: str, kind: str) -> None: + """t.append(cmd, kind) adds a new step at the end. +""" + def prepend(self, cmd: str, kind: str) -> None: + """t.prepend(cmd, kind) adds a new step at the front. +""" + def open(self, file: str, rw: str) -> os._wrap_close: + """t.open(file, rw) returns a pipe or file object open for + reading or writing; the file is the other end of the pipeline. +""" def copy(self, infile: str, outfile: str) -> int: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. -def quote(s: str) -> str: ... +def quote(s: str) -> str: + """Return a shell-escaped version of the string *s*. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi index 7c70dcc4c5ab1..35641908a5705 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi @@ -1,3 +1,5 @@ +"""Utilities to support packages. +""" import sys from _typeshed import StrOrBytesPath, SupportsRead from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol @@ -23,37 +25,211 @@ if sys.version_info < (3, 12): _PathT = TypeVar("_PathT", bound=Iterable[str]) class ModuleInfo(NamedTuple): + """A namedtuple with minimal info about a module. +""" module_finder: MetaPathFinderProtocol | PathEntryFinderProtocol name: str ispkg: bool -def extend_path(path: _PathT, name: str) -> _PathT: ... +def extend_path(path: _PathT, name: str) -> _PathT: + """Extend a package's path. + +Intended use is to place the following code in a package's __init__.py: + + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + +For each directory on sys.path that has a subdirectory that +matches the package name, add the subdirectory to the package's +__path__. This is useful if one wants to distribute different +parts of a single logical package as multiple directories. + +It also looks for *.pkg files beginning where * matches the name +argument. This feature is similar to *.pth files (see site.py), +except that it doesn't special-case lines starting with 'import'. +A *.pkg file is trusted at face value: apart from checking for +duplicates, all entries found in a *.pkg file are added to the +path, regardless of whether they are exist the filesystem. (This +is a feature.) + +If the input path is not a list (as is the case for frozen +packages) it is returned unchanged. The input path is not +modified; an extended copy is returned. Items are only appended +to the copy at the end. + +It is assumed that sys.path is a sequence. Items of sys.path that +are not (unicode or 8-bit) strings referring to existing +directories are ignored. Unicode items of sys.path that cause +errors when used as filenames may cause this function to raise an +exception (in line with os.path.isdir() behavior). +""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpImporter: + """PEP 302 Finder that wraps Python's "classic" import algorithm + + ImpImporter(dirname) produces a PEP 302 finder that searches that + directory. ImpImporter(None) produces a PEP 302 finder that searches + the current sys.path, plus any modules that are frozen or built-in. + + Note that ImpImporter does not currently support being used by placement + on sys.meta_path. + """ def __init__(self, path: StrOrBytesPath | None = None) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpLoader: + """PEP 302 Loader that wraps Python's "classic" import algorithm + """ def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") - def find_loader(fullname: str) -> LoaderProtocol | None: ... + def find_loader(fullname: str) -> LoaderProtocol | None: + """Find a "loader" object for fullname + +This is a backwards compatibility wrapper around +importlib.util.find_spec that converts most failures to ImportError +and only returns the loader rather than the full spec +""" @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") - def get_loader(module_or_name: str) -> LoaderProtocol | None: ... + def get_loader(module_or_name: str) -> LoaderProtocol | None: + """Get a "loader" object for module_or_name + +Returns None if the module cannot be found or imported. +If the named module is not already imported, its containing package +(if any) is imported, in order to establish the package __path__. +""" else: - def find_loader(fullname: str) -> LoaderProtocol | None: ... - def get_loader(module_or_name: str) -> LoaderProtocol | None: ... + def find_loader(fullname: str) -> LoaderProtocol | None: + """Find a "loader" object for fullname + + This is a backwards compatibility wrapper around + importlib.util.find_spec that converts most failures to ImportError + and only returns the loader rather than the full spec + """ + def get_loader(module_or_name: str) -> LoaderProtocol | None: + """Get a "loader" object for module_or_name + + Returns None if the module cannot be found or imported. + If the named module is not already imported, its containing package + (if any) is imported, in order to establish the package __path__. + """ + +def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: + """Retrieve a finder for the given path item + +The returned finder is cached in sys.path_importer_cache +if it was newly created by a path hook. + +The cache (or part of it) can be cleared manually if a +rescan of sys.path_hooks is necessary. +""" +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: + """Yield finders for the given module name + +If fullname contains a '.', the finders will be for the package +containing fullname, otherwise they will be all registered top level +finders (i.e. those on both sys.meta_path and sys.path_hooks). + +If the named module is in a package, that package is imported as a side +effect of invoking this function. + +If no module name is specified, all top level finders are produced. +""" +def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: + """Yields ModuleInfo for all submodules on path, +or, if path is None, all top-level modules on sys.path. + +'path' should be either None or a list of paths to look for +modules in. -def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: ... -def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: ... -def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... +'prefix' is a string to output on the front of every module name +on output. +""" def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None -) -> Iterator[ModuleInfo]: ... -def get_data(package: str, resource: str) -> bytes | None: ... -def resolve_name(name: str) -> Any: ... +) -> Iterator[ModuleInfo]: + """Yields ModuleInfo for all modules recursively +on path, or, if path is None, all accessible modules. + +'path' should be either None or a list of paths to look for +modules in. + +'prefix' is a string to output on the front of every module name +on output. + +Note that this function must import all *packages* (NOT all +modules!) on the given path, in order to access the __path__ +attribute to find submodules. + +'onerror' is a function which gets called with one argument (the +name of the package which was being imported) if any exception +occurs while trying to import a package. If no onerror function is +supplied, ImportErrors are caught and ignored, while all other +exceptions are propagated, terminating the search. + +Examples: + +# list all modules python can access +walk_packages() + +# list all submodules of ctypes +walk_packages(ctypes.__path__, ctypes.__name__+'.') +""" +def get_data(package: str, resource: str) -> bytes | None: + """Get a resource from a package. + +This is a wrapper round the PEP 302 loader get_data API. The package +argument should be the name of a package, in standard module format +(foo.bar). The resource argument should be in the form of a relative +filename, using '/' as the path separator. The parent directory name '..' +is not allowed, and nor is a rooted name (starting with a '/'). + +The function returns a binary string, which is the contents of the +specified resource. + +For packages located in the filesystem, which have already been imported, +this is the rough equivalent of + + d = os.path.dirname(sys.modules[package].__file__) + data = open(os.path.join(d, resource), 'rb').read() + +If the package cannot be located or loaded, or it uses a PEP 302 loader +which does not support get_data(), then None is returned. +""" +def resolve_name(name: str) -> Any: + """ +Resolve a name to an object. + +It is expected that `name` will be a string in one of the following +formats, where W is shorthand for a valid Python identifier and dot stands +for a literal period in these pseudo-regexes: + +W(.W)* +W(.W)*:(W(.W)*)? + +The first form is intended for backward compatibility only. It assumes that +some part of the dotted name is a package, and the rest is an object +somewhere within that package, possibly nested inside other objects. +Because the place where the package stops and the object hierarchy starts +can't be inferred by inspection, repeated attempts to import must be done +with this form. + +In the second form, the caller makes the division point clear through the +provision of a single colon: the dotted name to the left of the colon is a +package to be imported, and the dotted name to the right is the object +hierarchy within that package. Only one import is needed in this form. If +it ends with the colon, then a module object is returned. + +The function will return an object (which might be a module), or raise one +of the following exceptions: + +ValueError - if `name` isn't in a recognised format +ImportError - if an import failed when it shouldn't have +AttributeError - if a failure occurred when traversing the object hierarchy + within the imported package to get to the desired object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi index 69d702bb155cd..70d5957cfb399 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi @@ -1,14 +1,42 @@ +"""This module tries to retrieve as much platform-identifying data as +possible. It makes this information available via function APIs. + +If called from the command line, it prints the platform +information concatenated as single string to stdout. The output +format is usable as part of a filename. + +""" import sys from typing import NamedTuple, type_check_only from typing_extensions import Self, deprecated, disjoint_base -def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... +def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: + """Tries to determine the libc version that the file executable +(which defaults to the Python interpreter) is linked against. + +Returns a tuple of strings (lib,version) which default to the +given parameters in case the lookup fails. + +Note that the function has intimate knowledge of how different +libc versions add symbols to the executable and thus is probably +only usable for executables compiled using gcc. + +The file is read and scanned in chunks of chunksize bytes. + +""" def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... def win32_edition() -> str: ... def win32_is_iot() -> bool: ... def mac_ver( release: str = "", versioninfo: tuple[str, str, str] = ("", "", ""), machine: str = "" -) -> tuple[str, tuple[str, str, str], str]: ... +) -> tuple[str, tuple[str, str, str], str]: + """Get macOS version information and return it as tuple (release, +versioninfo, machine) with versioninfo being a tuple (version, +dev_stage, non_release_version). + +Entries which cannot be determined are set to the parameter values +which default to ''. All tuple entries are strings. +""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -17,7 +45,17 @@ if sys.version_info >= (3, 13): vendor: str = "", vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", ""), - ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: + """Version interface for Jython. + +Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being +a tuple (vm_name, vm_release, vm_vendor) and osinfo being a +tuple (os_name, os_version, os_arch). + +Values which cannot be determined are set to the defaults +given as parameters (which all default to ''). + +""" else: def java_ver( @@ -25,10 +63,46 @@ else: vendor: str = "", vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", ""), - ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... + ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: + """ Version interface for Jython. + + Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being + a tuple (vm_name, vm_release, vm_vendor) and osinfo being a + tuple (os_name, os_version, os_arch). + + Values which cannot be determined are set to the defaults + given as parameters (which all default to ''). + + """ + +def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: + """Returns (system, release, version) aliased to common +marketing names used for some systems. -def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... -def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... +It also does some reordering of the information in some cases +where it would otherwise cause confusion. + +""" +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: + """Queries the given executable (defaults to the Python interpreter +binary) for various architecture information. + +Returns a tuple (bits, linkage) which contains information about +the bit architecture and the linkage format used for the +executable. Both values are returned as strings. + +Values that cannot be determined are returned as given by the +parameter presets. If bits is given as '', the sizeof(pointer) +(or sizeof(long) on Python version < 1.5.2) is used as +indicator for the supported pointer size. + +The function relies on the system's "file" command to do the +actual work. This is available on most if not all Unix +platforms. On some non-Unix platforms where the "file" command +does not exist and the executable is set to the Python interpreter +binary defaults from _default_architecture are used. + +""" # This class is not exposed. It calls itself platform.uname_result_base. # At runtime it only has 5 fields. @@ -48,43 +122,171 @@ class _uname_result_base(NamedTuple): # is lazily evaluated rather than being passed in to the constructor. if sys.version_info >= (3, 12): class uname_result(_uname_result_base): + """ +A uname_result that's largely compatible with a +simple namedtuple except that 'processor' is +resolved late and cached to avoid calling "uname" +except when needed. +""" __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] - def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: + """Create new instance of uname_result_base(system, node, release, version, machine) +""" @property def processor(self) -> str: ... else: @disjoint_base class uname_result(_uname_result_base): + """ + A uname_result that's largely compatible with a + simple namedtuple except that 'processor' is + resolved late and cached to avoid calling "uname" + except when needed. + """ if sys.version_info >= (3, 10): __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] - def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: ... + def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: + """Create new instance of uname_result_base(system, node, release, version, machine) +""" @property def processor(self) -> str: ... -def uname() -> uname_result: ... -def system() -> str: ... -def node() -> str: ... -def release() -> str: ... -def version() -> str: ... -def machine() -> str: ... -def processor() -> str: ... -def python_implementation() -> str: ... -def python_version() -> str: ... -def python_version_tuple() -> tuple[str, str, str]: ... -def python_branch() -> str: ... -def python_revision() -> str: ... -def python_build() -> tuple[str, str]: ... -def python_compiler() -> str: ... -def platform(aliased: bool = False, terse: bool = False) -> str: ... +def uname() -> uname_result: + """Fairly portable uname interface. Returns a tuple +of strings (system, node, release, version, machine, processor) +identifying the underlying platform. + +Note that unlike the os.uname function this also returns +possible processor information as an additional tuple entry. + +Entries which cannot be determined are set to ''. + +""" +def system() -> str: + """Returns the system/OS name, e.g. 'Linux', 'Windows' or 'Java'. + +An empty string is returned if the value cannot be determined. + +""" +def node() -> str: + """Returns the computer's network name (which may not be fully +qualified) + +An empty string is returned if the value cannot be determined. + +""" +def release() -> str: + """Returns the system's release, e.g. '2.2.0' or 'NT' + +An empty string is returned if the value cannot be determined. + +""" +def version() -> str: + """Returns the system's release version, e.g. '#3 on degas' + +An empty string is returned if the value cannot be determined. + +""" +def machine() -> str: + """Returns the machine type, e.g. 'i386' + +An empty string is returned if the value cannot be determined. + +""" +def processor() -> str: + """Returns the (true) processor name, e.g. 'amdk6' + +An empty string is returned if the value cannot be +determined. Note that many platforms do not provide this +information or simply return the same value as for machine(), +e.g. NetBSD does this. + +""" +def python_implementation() -> str: + """Returns a string identifying the Python implementation. + +Currently, the following implementations are identified: + 'CPython' (C implementation of Python), + 'Jython' (Java implementation of Python), + 'PyPy' (Python implementation of Python). + +""" +def python_version() -> str: + """Returns the Python version as string 'major.minor.patchlevel' + +Note that unlike the Python sys.version, the returned value +will always include the patchlevel (it defaults to 0). + +""" +def python_version_tuple() -> tuple[str, str, str]: + """Returns the Python version as tuple (major, minor, patchlevel) +of strings. + +Note that unlike the Python sys.version, the returned value +will always include the patchlevel (it defaults to 0). + +""" +def python_branch() -> str: + """Returns a string identifying the Python implementation +branch. + +For CPython this is the SCM branch from which the +Python binary was built. + +If not available, an empty string is returned. + +""" +def python_revision() -> str: + """Returns a string identifying the Python implementation +revision. + +For CPython this is the SCM revision from which the +Python binary was built. + +If not available, an empty string is returned. + +""" +def python_build() -> tuple[str, str]: + """Returns a tuple (buildno, builddate) stating the Python +build number and date as strings. + +""" +def python_compiler() -> str: + """Returns a string identifying the compiler used for compiling +Python. + +""" +def platform(aliased: bool = False, terse: bool = False) -> str: + """Returns a single string identifying the underlying platform +with as much useful information as possible (but no more :). + +The output is intended to be human readable rather than +machine parseable. It may look different on different +platforms and this is intended. + +If "aliased" is true, the function will use aliases for +various platforms that report system names which differ from +their common names, e.g. SunOS will be reported as +Solaris. The system_alias() function is used to implement +this. + +Setting terse to true causes the function to return only the +absolute minimum information needed to identify the platform. + +""" if sys.version_info >= (3, 10): - def freedesktop_os_release() -> dict[str, str]: ... + def freedesktop_os_release() -> dict[str, str]: + """Return operation system identification from freedesktop.org os-release + """ if sys.version_info >= (3, 13): class AndroidVer(NamedTuple): + """AndroidVer(release, api_level, manufacturer, model, device, is_emulator) +""" release: str api_level: int manufacturer: str @@ -93,6 +295,8 @@ if sys.version_info >= (3, 13): is_emulator: bool class IOSVersionInfo(NamedTuple): + """IOSVersionInfo(system, release, model, is_simulator) +""" system: str release: str model: str @@ -106,7 +310,15 @@ if sys.version_info >= (3, 13): device: str = "", is_emulator: bool = False, ) -> AndroidVer: ... - def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: ... + def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: + """Get iOS version information, and return it as a namedtuple: + (system, release, model, is_simulator). + +If values can't be determined, they are set to values provided as +parameters. +""" if sys.version_info >= (3, 14): - def invalidate_caches() -> None: ... + def invalidate_caches() -> None: + """Invalidate the cached results. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi index dc3247ee47fb8..87d6d149da8f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi @@ -1,3 +1,59 @@ +"""plistlib.py -- a tool to generate and parse MacOSX .plist files. + +The property list (.plist) file format is a simple XML pickle supporting +basic object types, like dictionaries, lists, numbers and strings. +Usually the top level object is a dictionary. + +To write out a plist file, use the dump(value, file) +function. 'value' is the top level object, 'file' is +a (writable) file object. + +To parse a plist from a file, use the load(file) function, +with a (readable) file object as the only argument. It +returns the top level object (again, usually a dictionary). + +To work with plist data in bytes objects, you can use loads() +and dumps(). + +Values can be strings, integers, floats, booleans, tuples, lists, +dictionaries (but only with string keys), Data, bytes, bytearray, or +datetime.datetime objects. + +Generate Plist example: + + import datetime + import plistlib + + pl = dict( + aString = "Doodah", + aList = ["A", "B", 12, 32.1, [1, 2, 3]], + aFloat = 0.1, + anInt = 728, + aDict = dict( + anotherString = "", + aThirdString = "M\\xe4ssig, Ma\\xdf", + aTrueValue = True, + aFalseValue = False, + ), + someData = b"", + someMoreData = b"" * 10, + aDate = datetime.datetime.now() + ) + print(plistlib.dumps(pl).decode()) + +Parse Plist example: + + import plistlib + + plist = b''' + + foo + bar + + ''' + pl = plistlib.loads(plist) + print(pl["foo"]) +""" import sys from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping @@ -9,6 +65,8 @@ from typing_extensions import Self __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] class PlistFormat(Enum): + """An enumeration. +""" FMT_XML = 1 FMT_BINARY = 2 @@ -21,20 +79,32 @@ if sys.version_info >= (3, 13): fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ..., aware_datetime: bool = False, - ) -> Any: ... + ) -> Any: + """Read a .plist file. 'fp' should be a readable and binary file object. +Return the unpacked root object (which usually is a dictionary). +""" def loads( value: ReadableBuffer | str, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ..., aware_datetime: bool = False, - ) -> Any: ... + ) -> Any: + """Read a .plist file from a bytes object. +Return the unpacked root object (which usually is a dictionary). +""" else: - def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: + """Read a .plist file. 'fp' should be a readable and binary file object. + Return the unpacked root object (which usually is a dictionary). + """ def loads( value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... - ) -> Any: ... + ) -> Any: + """Read a .plist file from a bytes object. + Return the unpacked root object (which usually is a dictionary). + """ if sys.version_info >= (3, 13): def dump( @@ -45,7 +115,10 @@ if sys.version_info >= (3, 13): sort_keys: bool = True, skipkeys: bool = False, aware_datetime: bool = False, - ) -> None: ... + ) -> None: + """Write 'value' to a .plist file. 'fp' should be a writable, +binary file object. +""" def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, @@ -53,7 +126,9 @@ if sys.version_info >= (3, 13): skipkeys: bool = False, sort_keys: bool = True, aware_datetime: bool = False, - ) -> bytes: ... + ) -> bytes: + """Return a bytes object with the contents for a .plist file. + """ else: def dump( @@ -63,14 +138,19 @@ else: fmt: PlistFormat = ..., sort_keys: bool = True, skipkeys: bool = False, - ) -> None: ... + ) -> None: + """Write 'value' to a .plist file. 'fp' should be a writable, + binary file object. + """ def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., skipkeys: bool = False, sort_keys: bool = True, - ) -> bytes: ... + ) -> bytes: + """Return a bytes object with the contents for a .plist file. + """ class UID: data: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi index 9ff2b764aeb68..44755f30f675a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi @@ -1,3 +1,7 @@ +"""A POP3 client class. + +Based on the J. Myers POP3 draft, Jan. 96 +""" import socket import ssl import sys @@ -20,6 +24,48 @@ CRLF: Final = b"\r\n" HAVE_SSL: Final[bool] class POP3: + """This class supports both the minimal and optional command sets. +Arguments can be strings or integers (where appropriate) +(e.g.: retr(1) and retr('1') both work equally well. + +Minimal Command Set: + USER name user(name) + PASS string pass_(string) + STAT stat() + LIST [msg] list(msg = None) + RETR msg retr(msg) + DELE msg dele(msg) + NOOP noop() + RSET rset() + QUIT quit() + +Optional Commands (some servers support these): + RPOP name rpop(name) + APOP name digest apop(name, digest) + TOP msg n top(msg, n) + UIDL [msg] uidl(msg = None) + CAPA capa() + STLS stls() + UTF8 utf8() + +Raises one exception: 'error_proto'. + +Instantiate with: + POP3(hostname, port=110) + +NB: the POP protocol locks the mailbox from user + authorization until QUIT, so be sure to get in, suck + the messages, and quit, each time you access the + mailbox. + + POP is a line-based protocol, which means large mail + messages consume lots of python cycles reading them + line-by-line. + + If it's available on your mail server, use IMAP4 + instead, it doesn't suffer from the two problems + above. +""" encoding: str host: str port: int @@ -29,34 +75,129 @@ class POP3: def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... - def user(self, user: str) -> bytes: ... - def pass_(self, pswd: str) -> bytes: ... - def stat(self) -> tuple[int, int]: ... - def list(self, which: Any | None = None) -> _LongResp: ... - def retr(self, which: Any) -> _LongResp: ... - def dele(self, which: Any) -> bytes: ... - def noop(self) -> bytes: ... - def rset(self) -> bytes: ... - def quit(self) -> bytes: ... - def close(self) -> None: ... - def rpop(self, user: str) -> bytes: ... + def user(self, user: str) -> bytes: + """Send user name, return response + +(should indicate password required). +""" + def pass_(self, pswd: str) -> bytes: + """Send password, return response + +(response includes message count, mailbox size). + +NB: mailbox is locked by server from here to 'quit()' +""" + def stat(self) -> tuple[int, int]: + """Get mailbox status. + +Result is tuple of 2 ints (message count, mailbox size) +""" + def list(self, which: Any | None = None) -> _LongResp: + """Request listing, return result. + +Result without a message number argument is in form +['response', ['mesg_num octets', ...], octets]. + +Result when a message number argument is given is a +single response: the "scan listing" for that message. +""" + def retr(self, which: Any) -> _LongResp: + """Retrieve whole message number 'which'. + +Result is in form ['response', ['line', ...], octets]. +""" + def dele(self, which: Any) -> bytes: + """Delete message number 'which'. + +Result is 'response'. +""" + def noop(self) -> bytes: + """Does nothing. + +One supposes the response indicates the server is alive. +""" + def rset(self) -> bytes: + """Unmark all messages marked for deletion. +""" + def quit(self) -> bytes: + """Signoff: commit changes on server, unlock mailbox, close connection. +""" + def close(self) -> None: + """Close the connection without assuming anything about it. +""" + def rpop(self, user: str) -> bytes: + """Send RPOP command to access the mailbox with an alternate user. +""" timestamp: Pattern[str] - def apop(self, user: str, password: str) -> bytes: ... - def top(self, which: Any, howmuch: int) -> _LongResp: ... + def apop(self, user: str, password: str) -> bytes: + """Authorisation + +- only possible if server has supplied a timestamp in initial greeting. + +Args: + user - mailbox user; + password - mailbox password. + +NB: mailbox is locked by server from here to 'quit()' +""" + def top(self, which: Any, howmuch: int) -> _LongResp: + """Retrieve message header of message number 'which' +and first 'howmuch' lines of message body. + +Result is in form ['response', ['line', ...], octets]. +""" @overload - def uidl(self) -> _LongResp: ... + def uidl(self) -> _LongResp: + """Return message digest (unique id) list. + +If 'which', result contains unique id for that message +in the form 'response mesgnum uid', otherwise result is +the list ['response', ['mesgnum uid', ...], octets] +""" @overload def uidl(self, which: Any) -> bytes: ... - def utf8(self) -> bytes: ... - def capa(self) -> dict[str, _list[str]]: ... - def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... + def utf8(self) -> bytes: + """Try to enter UTF-8 mode (see RFC 6856). Returns server response. + """ + def capa(self) -> dict[str, _list[str]]: + """Return server capabilities (RFC 2449) as a dictionary +>>> c=poplib.POP3('localhost') +>>> c.capa() +{'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'], + 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [], + 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [], + 'UIDL': [], 'RESP-CODES': []} +>>> + +Really, according to RFC 2449, the cyrus folks should avoid +having the implementation split into multiple arguments... +""" + def stls(self, context: ssl.SSLContext | None = None) -> bytes: + """Start a TLS session on the active connection as specified in RFC 2595. + +context - a ssl.SSLContext +""" class POP3_SSL(POP3): + """POP3 client class over SSL connection + +Instantiate with: POP3_SSL(hostname, port=995, context=None) + + hostname - the hostname of the pop3 over ssl server + port - port number + context - a ssl.SSLContext + +See the methods of the parent class POP3 for more documentation. +""" if sys.version_info >= (3, 12): def __init__( self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None ) -> None: ... - def stls(self, context: Any = None) -> NoReturn: ... + def stls(self, context: Any = None) -> NoReturn: + """The method unconditionally raises an exception since the +STLS command doesn't make any sense on an already established +SSL/TLS session. +""" else: def __init__( self, @@ -69,4 +210,8 @@ class POP3_SSL(POP3): ) -> None: ... # "context" is actually the last argument, # but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: + """The method unconditionally raises an exception since the + STLS command doesn't make any sense on an already established + SSL/TLS session. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi index 6d0d76ab82176..9d62e94b5399d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi @@ -1,3 +1,8 @@ +"""This module provides access to operating system functionality that is +standardized by the C Standard and the POSIX standard (a thinly +disguised Unix interface). Refer to the library manual and +corresponding Unix manual entries for more information on calls. +""" import sys if sys.platform != "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi index 84e1b1e028bde..1d0284b8f6e8d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi @@ -1,3 +1,14 @@ +"""Common operations on Posix pathnames. + +Instead of importing this module directly, import os and refer to +this module as os.path. The "os.path" name is an alias for this +module on Posix systems; on other systems (e.g. Windows), +os.path provides the same operations in a manner specific to that +platform, and is an alias to another module (e.g. ntpath). + +Some of this can actually be useful on non-Posix systems too, e.g. +for manipulation of the pathname component of URLs. +""" import sys from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Iterable @@ -82,35 +93,53 @@ devnull: LiteralString # Overloads are necessary to work around python/mypy#17952 & python/mypy#11880 @overload -def abspath(path: PathLike[AnyStr]) -> AnyStr: ... +def abspath(path: PathLike[AnyStr]) -> AnyStr: + """Return an absolute path. +""" @overload def abspath(path: AnyStr) -> AnyStr: ... @overload -def basename(p: PathLike[AnyStr]) -> AnyStr: ... +def basename(p: PathLike[AnyStr]) -> AnyStr: + """Returns the final component of a pathname +""" @overload def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def dirname(p: PathLike[AnyStr]) -> AnyStr: ... +def dirname(p: PathLike[AnyStr]) -> AnyStr: + """Returns the directory component of a pathname +""" @overload def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... +def expanduser(path: PathLike[AnyStr]) -> AnyStr: + """Expand ~ and ~user constructions. If user or $HOME is unknown, +do nothing. +""" @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload -def expandvars(path: PathLike[AnyStr]) -> AnyStr: ... +def expandvars(path: PathLike[AnyStr]) -> AnyStr: + """Expand shell variables of form $var and ${var}. Unknown variables +are left unchanged. +""" @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload -def normcase(s: PathLike[AnyStr]) -> AnyStr: ... +def normcase(s: PathLike[AnyStr]) -> AnyStr: + """Normalize case of pathname. Has no effect under Posix +""" @overload def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def normpath(path: PathLike[AnyStr]) -> AnyStr: ... +def normpath(path: PathLike[AnyStr]) -> AnyStr: + """Normalize path, eliminating double slashes, etc. +""" @overload def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload -def commonpath(paths: Iterable[LiteralString]) -> LiteralString: ... +def commonpath(paths: Iterable[LiteralString]) -> LiteralString: + """Given a sequence of path names, returns the longest common sub-path. +""" @overload def commonpath(paths: Iterable[StrPath]) -> str: ... @overload @@ -120,41 +149,77 @@ def commonpath(paths: Iterable[BytesPath]) -> bytes: ... # but must be defined as pos-only in the stub or cross-platform code doesn't type-check, # as the parameter name is different in ntpath.join() @overload -def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: ... +def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: + """Join two or more pathname components, inserting '/' as needed. +If any component is an absolute path, all previous path components +will be discarded. An empty last part will result in a path that +ends with a separator. +""" @overload def join(a: StrPath, /, *paths: StrPath) -> str: ... @overload def join(a: BytesPath, /, *paths: BytesPath) -> bytes: ... @overload -def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: ... +def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: + """Return the canonical path of the specified filename, eliminating any +symbolic links encountered in the path. +""" @overload def realpath(filename: AnyStr, *, strict: bool | _AllowMissingType = False) -> AnyStr: ... @overload -def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: + """Return a relative version of a path +""" @overload def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... @overload def relpath(path: StrPath, start: StrPath | None = None) -> str: ... @overload -def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: + """Split a pathname. Returns tuple "(head, tail)" where "tail" is +everything after the final slash. Either part may be empty. +""" @overload def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload -def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: + """Split a pathname into drive and path. On Posix, drive is always +empty. +""" @overload def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload -def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: + """Split the extension from a pathname. + +Extension is everything from the last dot to the end, ignoring +leading dots. Returns "(root, ext)"; ext may be empty. +""" @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... -def isabs(s: StrOrBytesPath) -> bool: ... -def islink(path: FileDescriptorOrPath) -> bool: ... -def ismount(path: FileDescriptorOrPath) -> bool: ... -def lexists(path: FileDescriptorOrPath) -> bool: ... +def isabs(s: StrOrBytesPath) -> bool: + """Test whether a path is absolute +""" +def islink(path: FileDescriptorOrPath) -> bool: + """Test whether a path is a symbolic link +""" +def ismount(path: FileDescriptorOrPath) -> bool: + """Test whether a path is a mount point +""" +def lexists(path: FileDescriptorOrPath) -> bool: + """Test whether a path exists. Returns True for broken symbolic links +""" if sys.version_info >= (3, 12): - def isjunction(path: StrOrBytesPath) -> bool: ... + def isjunction(path: StrOrBytesPath) -> bool: + """Test whether a path is a junction +Junctions are not supported on the current platform +""" @overload - def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: ... + def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: + """Split a pathname into drive, root and tail. + +The tail contains anything after the root. +""" @overload def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi index 1e80462e25657..ebe3dd14848ff 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi @@ -1,3 +1,28 @@ +"""Support to pretty-print lists, tuples, & dictionaries recursively. + +Very simple, but useful, especially in debugging data structures. + +Classes +------- + +PrettyPrinter() + Handle pretty-printing operations onto a stream using a configured + set of formatting parameters. + +Functions +--------- + +pformat() + Format a Python object into a pretty-printed representation. + +pprint() + Pretty-print a Python object to a stream [default is sys.stdout]. + +saferepr() + Generate a 'standard' repr()-like value, but protect against recursive + data structures. + +""" import sys from _typeshed import SupportsWrite from collections import deque @@ -15,7 +40,9 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> str: ... + ) -> str: + """Format a Python object into a pretty-printed representation. +""" else: def pformat( @@ -26,7 +53,9 @@ else: *, compact: bool = False, sort_dicts: bool = True, - ) -> str: ... + ) -> str: + """Format a Python object into a pretty-printed representation. +""" if sys.version_info >= (3, 10): def pp( @@ -39,7 +68,9 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = False, underscore_numbers: bool = False, - ) -> None: ... + ) -> None: + """Pretty-print a Python object +""" else: def pp( @@ -51,7 +82,9 @@ else: *, compact: bool = False, sort_dicts: bool = False, - ) -> None: ... + ) -> None: + """Pretty-print a Python object +""" if sys.version_info >= (3, 10): def pprint( @@ -64,7 +97,9 @@ if sys.version_info >= (3, 10): compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> None: ... + ) -> None: + """Pretty-print a Python object to a stream [default is sys.stdout]. +""" else: def pprint( @@ -76,11 +111,19 @@ else: *, compact: bool = False, sort_dicts: bool = True, - ) -> None: ... + ) -> None: + """Pretty-print a Python object to a stream [default is sys.stdout]. +""" -def isreadable(object: object) -> bool: ... -def isrecursive(object: object) -> bool: ... -def saferepr(object: object) -> str: ... +def isreadable(object: object) -> bool: + """Determine if saferepr(object) is readable by eval(). +""" +def isrecursive(object: object) -> bool: + """Determine if object requires a recursive representation. +""" +def saferepr(object: object) -> str: + """Version of repr() which can handle recursive data structures. +""" class PrettyPrinter: if sys.version_info >= (3, 10): @@ -94,7 +137,33 @@ class PrettyPrinter: compact: bool = False, sort_dicts: bool = True, underscore_numbers: bool = False, - ) -> None: ... + ) -> None: + """Handle pretty printing operations onto a stream using a set of +configured parameters. + +indent + Number of spaces to indent for each level of nesting. + +width + Attempted maximum number of columns in the output. + +depth + The maximum depth to print out nested structures. + +stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. + +compact + If true, several items will be combined in one line. + +sort_dicts + If true, dict keys are sorted. + +underscore_numbers + If true, digit groups are separated with underscores. + +""" else: def __init__( self, @@ -105,13 +174,40 @@ class PrettyPrinter: *, compact: bool = False, sort_dicts: bool = True, - ) -> None: ... + ) -> None: + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. + + compact + If true, several items will be combined in one line. + + sort_dicts + If true, dict keys are sorted. + + """ def pformat(self, object: object) -> str: ... def pprint(self, object: object) -> None: ... def isreadable(self, object: object) -> bool: ... def isrecursive(self, object: object) -> bool: ... - def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... + def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: + """Format object for a specific context, returning a string +and flags indicating whether the representation is 'readable' +and whether the object represents a recursive construct. +""" def _format( self, object: object, stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi index 696193d9dc169..ff85e2dc18b1b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi @@ -1,3 +1,5 @@ +"""Class for profiling Python code. +""" from _typeshed import StrOrBytesPath from collections.abc import Callable, Mapping from typing import Any, TypeVar @@ -5,16 +7,68 @@ from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: + """Run statement under profiler optionally saving results in filename + +This function takes a single argument that can be passed to the +"exec" statement, and an optional file name. In all cases this +routine attempts to "exec" its first argument and gather profiling +statistics from the execution. If no file name is present, then this +function automatically prints a simple profiling report, sorted by the +standard name string (file/line/function-name) that is presented in +each line. +""" def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 -) -> None: ... +) -> None: + """Run statement under profiler, supplying your own globals and locals, +optionally saving results in filename. + +statement and filename have the same semantics as profile.run +""" _T = TypeVar("_T") _P = ParamSpec("_P") _Label: TypeAlias = tuple[str, int, str] class Profile: + """Profiler class. + +self.cur is always a tuple. Each such tuple corresponds to a stack +frame that is currently active (self.cur[-2]). The following are the +definitions of its members. We use this external "parallel stack" to +avoid contaminating the program that we are profiling. (old profiler +used to write into the frames local dictionary!!) Derived classes +can change the definition of some entries, as long as they leave +[-2:] intact (frame and previous tuple). In case an internal error is +detected, the -3 element is used as the function name. + +[ 0] = Time that needs to be charged to the parent frame's function. + It is used so that a function call will not have to access the + timing data for the parent frame. +[ 1] = Total time spent in this frame's function, excluding time in + subfunctions (this latter is tallied in cur[2]). +[ 2] = Total time spent in subfunctions, excluding time executing the + frame's function (this latter is tallied in cur[1]). +[-3] = Name of the function that corresponds to this frame. +[-2] = Actual frame that we correspond to (used to sync exception handling). +[-1] = Our parent 6-tuple (corresponds to frame.f_back). + +Timing data for each function is stored as a 5-tuple in the dictionary +self.timings[]. The index is always the name stored in self.cur[-3]. +The following are the definitions of the members: + +[0] = The number of times this function was called, not counting direct + or indirect recursion, +[1] = Number of times this function appears on the stack, minus one +[2] = Total time spent internal to this function +[3] = Cumulative time that this function was present on the stack. In + non-recursive functions, this is the total execution time from start + to finish of each invocation of a function, including time spent in + all subfunctions. +[4] = A dictionary indicating for each function name, the number of times + it was called by us. +""" bias: int stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi index c4dee1f6b8f69..474a21e4d2fe8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi @@ -1,3 +1,5 @@ +"""Class for printing reports on profiled python code. +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -18,6 +20,8 @@ _Selector: TypeAlias = str | float | int if sys.version_info >= (3, 11): class SortKey(StrEnum): + """An enumeration. +""" CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -30,6 +34,8 @@ if sys.version_info >= (3, 11): else: class SortKey(str, Enum): + """An enumeration. +""" CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -42,6 +48,8 @@ else: @dataclass(unsafe_hash=True) class FunctionProfile: + """FunctionProfile(ncalls: str, tottime: float, percall_tottime: float, cumtime: float, percall_cumtime: float, file_name: str, line_number: int) +""" ncalls: str tottime: float percall_tottime: float @@ -52,12 +60,45 @@ class FunctionProfile: @dataclass(unsafe_hash=True) class StatsProfile: + """Class for keeping track of an item in inventory. +""" total_tt: float func_profiles: dict[str, FunctionProfile] _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] class Stats: + """This class is used for creating reports from data generated by the +Profile class. It is a "friend" of that class, and imports data either +by direct access to members of Profile class, or by reading in a dictionary +that was emitted (via marshal) from the Profile class. + +The big change from the previous Profiler (in terms of raw functionality) +is that an "add()" method has been provided to combine Stats from +several distinct profile runs. Both the constructor and the add() +method now take arbitrarily many file names as arguments. + +All the print methods now take an argument that indicates how many lines +to print. If the arg is a floating-point number between 0 and 1.0, then +it is taken as a decimal percentage of the available lines to be printed +(e.g., .1 means print 10% of all available lines). If it is an integer, +it is taken to mean the number of lines of data that you wish to have +printed. + +The sort_stats() method now processes some additional options (i.e., in +addition to the old -1, 0, 1, or 2 that are respectively interpreted as +'stdname', 'calls', 'time', and 'cumulative'). It takes either an +arbitrary number of quoted strings or SortKey enum to select the sort +order. + +For example sort_stats('time', 'name') or sort_stats(SortKey.TIME, +SortKey.NAME) sorts on the major key of 'internal function time', and on +the minor key of 'the name of the function'. Look at the two tables in +sort_stats() and get_sort_arg_defs(self) for more examples. + +All methods return self, so you can string together commands like: + Stats('foo', 'goo').strip_dirs().sort_stats('calls'). print_stats(5).print_callers(5) +""" sort_arg_dict_default: _SortArgDict def __init__( self, @@ -70,8 +111,12 @@ class Stats: def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... def get_top_level_stats(self) -> None: ... def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... - def dump_stats(self, filename: StrOrBytesPath) -> None: ... - def get_sort_arg_defs(self) -> _SortArgDict: ... + def dump_stats(self, filename: StrOrBytesPath) -> None: + """Write the profile data to a file we know how to load back. +""" + def get_sort_arg_defs(self) -> _SortArgDict: + """Expand all abbreviations that are unique. +""" @overload def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload @@ -80,7 +125,12 @@ class Stats: def strip_dirs(self) -> Self: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... - def get_stats_profile(self) -> StatsProfile: ... + def get_stats_profile(self) -> StatsProfile: + """This method returns an instance of StatsProfile, which contains a mapping +of function names to instances of FunctionProfile. Each FunctionProfile +instance holds information related to the function's profile such as how +long the function took to run, how many times it was called, etc... +""" def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... def print_stats(self, *amount: _Selector) -> Self: ... def print_callees(self, *amount: _Selector) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi index d1c78f9e3dd67..52a1824fd7a53 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi @@ -1,3 +1,5 @@ +"""Pseudo terminal utilities. +""" import sys from collections.abc import Callable, Iterable from typing import Final @@ -12,17 +14,43 @@ if sys.platform != "win32": STDERR_FILENO: Final = 2 CHILD: Final = 0 - def openpty() -> tuple[int, int]: ... + def openpty() -> tuple[int, int]: + """openpty() -> (master_fd, slave_fd) +Open a pty master/slave pair, using os.openpty() if possible. +""" if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") - def master_open() -> tuple[int, str]: ... + def master_open() -> tuple[int, str]: + """master_open() -> (master_fd, slave_name) +Open a pty master and return the fd, and the filename of the slave end. +Deprecated, use openpty() instead. +""" @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") - def slave_open(tty_name: str) -> int: ... + def slave_open(tty_name: str) -> int: + """slave_open(tty_name) -> slave_fd +Open the pty slave and acquire the controlling terminal, returning +opened filedescriptor. +Deprecated, use openpty() instead. +""" else: - def master_open() -> tuple[int, str]: ... - def slave_open(tty_name: str) -> int: ... + def master_open() -> tuple[int, str]: + """master_open() -> (master_fd, slave_name) + Open a pty master and return the fd, and the filename of the slave end. + Deprecated, use openpty() instead. +""" + def slave_open(tty_name: str) -> int: + """slave_open(tty_name) -> slave_fd + Open the pty slave and acquire the controlling terminal, returning + opened filedescriptor. + Deprecated, use openpty() instead. +""" - def fork() -> tuple[int, int]: ... - def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... + def fork() -> tuple[int, int]: + """fork() -> (pid, master_fd) +Fork and make the child a session leader with a controlling terminal. +""" + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: + """Create a spawned process. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi index a84ba324718af..5a257d0b571ba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi @@ -1,3 +1,12 @@ +"""This module provides access to the Unix password database. +It is available on all Unix versions. + +Password database entries are reported as 7-tuples containing the following +items from the password database (see `'), in order: +pw_name, pw_passwd, pw_uid, pw_gid, pw_gecos, pw_dir, pw_shell. +The uid and gid items are integers, all others are strings. An +exception is raised if the entry asked for cannot be found. +""" import sys from _typeshed import structseq from typing import Any, Final, final @@ -5,24 +14,56 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): + """pwd.struct_passwd: Results from getpw*() routines. + +This object may be accessed either as a tuple of + (pw_name,pw_passwd,pw_uid,pw_gid,pw_gecos,pw_dir,pw_shell) +or via the object attributes as named in the above tuple. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("pw_name", "pw_passwd", "pw_uid", "pw_gid", "pw_gecos", "pw_dir", "pw_shell") @property - def pw_name(self) -> str: ... + def pw_name(self) -> str: + """user name +""" @property - def pw_passwd(self) -> str: ... + def pw_passwd(self) -> str: + """password +""" @property - def pw_uid(self) -> int: ... + def pw_uid(self) -> int: + """user id +""" @property - def pw_gid(self) -> int: ... + def pw_gid(self) -> int: + """group id +""" @property - def pw_gecos(self) -> str: ... + def pw_gecos(self) -> str: + """real name +""" @property - def pw_dir(self) -> str: ... + def pw_dir(self) -> str: + """home directory +""" @property - def pw_shell(self) -> str: ... + def pw_shell(self) -> str: + """shell program +""" + + def getpwall() -> list[struct_passwd]: + """Return a list of all available password database entries, in arbitrary order. + +See help(pwd) for more on password database entries. +""" + def getpwuid(uid: int, /) -> struct_passwd: + """Return the password database entry for the given numeric user ID. + +See `help(pwd)` for more on password database entries. +""" + def getpwnam(name: str, /) -> struct_passwd: + """Return the password database entry for the given user name. - def getpwall() -> list[struct_passwd]: ... - def getpwuid(uid: int, /) -> struct_passwd: ... - def getpwnam(name: str, /) -> struct_passwd: ... +See `help(pwd)` for more on password database entries. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi index 334ce79b5dd04..50eaf660cd072 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi @@ -1,3 +1,7 @@ +"""Routine to "compile" a .py file to a .pyc file. + +This module has intimate knowledge of the format of .pyc files. +""" import enum import sys from typing import AnyStr @@ -5,6 +9,32 @@ from typing import AnyStr __all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] class PyCompileError(Exception): + """Exception raised when an error occurs while attempting to +compile the file. + +To raise this exception, use + + raise PyCompileError(exc_type,exc_value,file[,msg]) + +where + + exc_type: exception type to be used in error message + type name can be accesses as class variable + 'exc_type_name' + + exc_value: exception value to be used in error message + can be accesses as class variable 'exc_value' + + file: name of file being compiled to be used in error message + can be accesses as class variable 'file' + + msg: string message to be written as error message + If no value is given, a default exception message will be + given, consistent with 'standard' py_compile output. + message (or default) can be accesses as class variable + 'msg' + +""" exc_type_name: str exc_value: BaseException file: str @@ -12,6 +42,8 @@ class PyCompileError(Exception): def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): + """An enumeration. +""" TIMESTAMP = 1 CHECKED_HASH = 2 UNCHECKED_HASH = 3 @@ -25,10 +57,63 @@ def compile( optimize: int = -1, invalidation_mode: PycInvalidationMode | None = None, quiet: int = 0, -) -> AnyStr | None: ... +) -> AnyStr | None: + """Byte-compile one Python source file to Python bytecode. + +:param file: The source file name. +:param cfile: The target byte compiled file name. When not given, this + defaults to the PEP 3147/PEP 488 location. +:param dfile: Purported file name, i.e. the file name that shows up in + error messages. Defaults to the source file name. +:param doraise: Flag indicating whether or not an exception should be + raised when a compile error is found. If an exception occurs and this + flag is set to False, a string indicating the nature of the exception + will be printed, and the function will return to the caller. If an + exception occurs and this flag is set to True, a PyCompileError + exception will be raised. +:param optimize: The optimization level for the compiler. Valid values + are -1, 0, 1 and 2. A value of -1 means to use the optimization + level of the current interpreter, as given by -O command line options. +:param invalidation_mode: +:param quiet: Return full output with False or 0, errors only with 1, + and no output with 2. + +:return: Path to the resulting byte compiled file. + +Note that it isn't necessary to byte-compile Python modules for +execution efficiency -- Python itself byte-compiles a module when +it is loaded, and if it can, writes out the bytecode to the +corresponding .pyc file. + +However, if a Python installation is shared between users, it is a +good idea to byte-compile all modules upon installation, since +other users may not be able to write in the source directories, +and thus they won't be able to write the .pyc file, and then +they would be byte-compiling every module each time it is loaded. +This can slow down program start-up considerably. + +See compileall.py for a script/module that uses this module to +byte-compile all installed files (or all files in selected +directories). + +Do note that FileExistsError is raised if cfile ends up pointing at a +non-regular file or symlink. Because the compilation uses a file renaming, +the resulting file would be regular and thus not the same type of file as +it was previously. +""" if sys.version_info >= (3, 10): def main() -> None: ... else: - def main(args: list[str] | None = None) -> int: ... + def main(args: list[str] | None = None) -> int: + """Compile several source files. + + The files named in 'args' (or on the command line, if 'args' is + not specified) are compiled and the resulting bytecode is cached + in the normal manner. This function does not search a directory + structure to locate source files; it only compiles files named + explicitly. If '-' is the only parameter in args, the list of + files is taken from standard input. + + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi index 504a5d5f115a0..b6e3bb27bb21a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi @@ -1,9 +1,53 @@ +"""Parse a Python module and describe its classes and functions. + +Parse enough of a Python file to recognize imports and class and +function definitions, and to find out the superclasses of a class. + +The interface consists of a single function: + readmodule_ex(module, path=None) +where module is the name of a Python module, and path is an optional +list of directories where the module is to be searched. If present, +path is prepended to the system search path sys.path. The return value +is a dictionary. The keys of the dictionary are the names of the +classes and functions defined in the module (including classes that are +defined via the from XXX import YYY construct). The values are +instances of classes Class and Function. One special key/value pair is +present for packages: the key '__path__' has a list as its value which +contains the package search path. + +Classes and Functions have a common superclass: _Object. Every instance +has the following attributes: + module -- name of the module; + name -- name of the object; + file -- file in which the object is defined; + lineno -- line in the file where the object's definition starts; + end_lineno -- line in the file where the object's definition ends; + parent -- parent of this object, if any; + children -- nested objects contained in this object. +The 'children' attribute is a dictionary mapping names to objects. + +Instances of Function describe functions with the attributes from _Object, +plus the following: + is_async -- if a function is defined with an 'async' prefix + +Instances of Class describe classes with the attributes from _Object, +plus the following: + super -- list of super classes (Class instances if possible); + methods -- mapping of method names to beginning line numbers. +If the name of a super class is not recognized, the corresponding +entry in the list of super classes is not a class instance but a +string giving the name of the super class. Since import statements +are recognized and imported modules are scanned as well, this +shouldn't happen often. +""" import sys from collections.abc import Mapping, Sequence __all__ = ["readmodule", "readmodule_ex", "Class", "Function"] class _Object: + """Information about Python class or function. +""" module: str name: str file: int @@ -26,6 +70,8 @@ class _Object: def __init__(self, module: str, name: str, file: str, lineno: int, parent: _Object | None) -> None: ... class Function(_Object): + """Information about a Python function, including methods. +""" if sys.version_info >= (3, 10): is_async: bool @@ -48,6 +94,8 @@ class Function(_Object): def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... class Class(_Object): + """Information about a Python class. +""" super: list[Class | str] | None methods: dict[str, int] parent: Class | None @@ -70,5 +118,15 @@ class Class(_Object): self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None ) -> None: ... -def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... -def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: + """Return Class objects for the top-level classes in module. + +This is the original interface, before Functions were added. +""" +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: + """Return a dictionary with all functions and classes in module. + +Search for module in PATH + sys.path. +If possible, include imported superclasses. +Do this by reading source, without importing (and executing) it. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi index 935f9420f88c0..62f58ffaf7987 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi @@ -1,3 +1,41 @@ +"""Generate Python documentation in HTML or text for interactive use. + +At the Python interactive prompt, calling help(thing) on a Python object +documents the object, and calling help() starts up an interactive +help session. + +Or, at the shell command line outside of Python: + +Run "pydoc " to show documentation on something. may be +the name of a function, module, package, or a dotted reference to a +class or function within a module or module in a package. If the +argument contains a path segment delimiter (e.g. slash on Unix, +backslash on Windows) it is treated as the path to a Python source file. + +Run "pydoc -k " to search for a keyword in the synopsis lines +of all available modules. + +Run "pydoc -n " to start an HTTP server with the given +hostname (default: localhost) on the local machine. + +Run "pydoc -p " to start an HTTP server on the given port on the +local machine. Port number 0 can be used to get an arbitrary unused port. + +Run "pydoc -b" to start an HTTP server on an arbitrary unused port and +open a web browser to interactively browse documentation. Combine with +the -n and -p options to control the hostname and port used. + +Run "pydoc -w " to write out the HTML documentation for a module +to a file named ".html". + +Module docs for core modules are assumed to be in + + https://docs.python.org/X.Y/library/ + +This can be overridden by setting the PYTHONDOCS environment variable +to a different URL or to a local directory containing the Library +Reference Manual pages. +""" import sys from _typeshed import OptExcInfo, SupportsWrite, Unused from abc import abstractmethod @@ -21,57 +59,117 @@ __credits__: Final[str] class _Pager(Protocol): def __call__(self, text: str, title: str = "") -> None: ... -def pathdirs() -> list[str]: ... -def getdoc(object: object) -> str: ... -def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ... -def classname(object: object, modname: str) -> str: ... -def isdata(object: object) -> bool: ... -def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... -def cram(text: str, maxlen: int) -> str: ... -def stripid(text: str) -> str: ... +def pathdirs() -> list[str]: + """Convert sys.path into a list of absolute, existing, unique paths. +""" +def getdoc(object: object) -> str: + """Get the doc string or comments for an object. +""" +def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: + """Split a doc string into a synopsis line (if any) and the rest. +""" +def classname(object: object, modname: str) -> str: + """Get a class name and qualify it with a module name if necessary. +""" +def isdata(object: object) -> bool: + """Check if an object is of a type that probably means it's data. +""" +def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: + """Do a series of global replacements on a string. +""" +def cram(text: str, maxlen: int) -> str: + """Omit part of a string if needed to make it fit in a maximum length. +""" +def stripid(text: str) -> str: + """Remove the hexadecimal id from a Python object representation. +""" def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... -def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... -def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: + """Decide whether to show documentation on a variable. +""" +def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: + """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods. +""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13.") - def ispackage(path: str) -> bool: ... # undocumented + def ispackage(path: str) -> bool: # undocumented + """Guess whether a path refers to a package directory. +""" else: - def ispackage(path: str) -> bool: ... # undocumented + def ispackage(path: str) -> bool: # undocumented + """Guess whether a path refers to a package directory. +""" -def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... -def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: ... +def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: + """Return the one-line summary of a file object, if present +""" +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: + """Get the one-line summary out of a module file. +""" class ErrorDuringImport(Exception): + """Errors that occurred while trying to import something to document it. +""" filename: str exc: type[BaseException] | None value: BaseException | None tb: TracebackType | None def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... -def importfile(path: str) -> ModuleType: ... -def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: ... +def importfile(path: str) -> ModuleType: + """Import a Python source file or compiled file given its path. +""" +def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: + """Import a module; handle errors; return None if the module isn't found. + +If the module *is* found but an exception occurs, it's wrapped in an +ErrorDuringImport exception and reraised. Unlike __import__, if a +package path is specified, the module at the end of the path is returned, +not the package at the beginning. If the optional 'forceload' argument +is 1, we reload the module from disk (unless it's a dynamic extension). +""" class Doc: PYTHONDOCS: str - def document(self, object: object, name: str | None = None, *args: Any) -> str: ... - def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... + def document(self, object: object, name: str | None = None, *args: Any) -> str: + """Generate documentation for an object. +""" + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... + def docother(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" @abstractmethod - def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... - def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: + """Raise an exception for unimplemented types. +""" + def getdocloc(self, object: object, basedir: str = ...) -> str | None: + """Return the location of module docs or None +""" class HTMLRepr(Repr): + """Class for safely making an HTML representation of a Python object. +""" def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... @@ -82,12 +180,18 @@ class HTMLRepr(Repr): def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): + """Formatter class for HTML documentation. +""" _repr_instance: HTMLRepr repr = _repr_instance.repr escape = _repr_instance.escape - def page(self, title: str, contents: str) -> str: ... + def page(self, title: str, contents: str) -> str: + """Format an HTML page. +""" if sys.version_info >= (3, 11): - def heading(self, title: str, extras: str = "") -> str: ... + def heading(self, title: str, extras: str = "") -> str: + """Format a page heading. +""" def section( self, title: str, @@ -97,10 +201,16 @@ class HTMLDoc(Doc): prelude: str = "", marginalia: str | None = None, gap: str = " ", - ) -> str: ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... + ) -> str: + """Format a section with a heading. +""" + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: + """Format a list of items into a multi-column list. +""" else: - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: + """Format a page heading. +""" def section( self, title: str, @@ -111,16 +221,32 @@ class HTMLDoc(Doc): prelude: str = "", marginalia: str | None = None, gap: str = " ", - ) -> str: ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... - - def bigsection(self, title: str, *args: Any) -> str: ... - def preformat(self, text: str) -> str: ... + ) -> str: + """Format a section with a heading. +""" + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: + """Format a list of items into a multi-column list. +""" + + def bigsection(self, title: str, *args: Any) -> str: + """Format a section with a big heading. +""" + def preformat(self, text: str) -> str: + """Format literal preformatted text. +""" def grey(self, text: str) -> str: ... - def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... - def classlink(self, object: object, modname: str) -> str: ... - def modulelink(self, object: object) -> str: ... - def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: ... + def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: + """Make a link for an identifier, given name-to-URL mappings. +""" + def classlink(self, object: object, modname: str) -> str: + """Make a link for a class. +""" + def modulelink(self, object: object) -> str: + """Make a link for a module. +""" + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: + """Make a link for a module or package to display in an index. +""" def markup( self, text: str, @@ -128,11 +254,18 @@ class HTMLDoc(Doc): funcs: Mapping[str, str] = {}, classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, - ) -> str: ... + ) -> str: + """Mark up some plain text, given a context of symbols to look for. +Each context dictionary maps object names to anchor names. +""" def formattree( self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None - ) -> str: ... - def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... + ) -> str: + """Produce HTML for a class tree as given by inspect.getclasstree(). +""" + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: + """Produce HTML documentation for a module object. +""" def docclass( self, object: object, @@ -141,9 +274,15 @@ class HTMLDoc(Doc): funcs: Mapping[str, str] = {}, classes: Mapping[str, str] = {}, *ignored: Unused, - ) -> str: ... - def formatvalue(self, object: object) -> str: ... - def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... + ) -> str: + """Produce HTML documentation for a class object. +""" + def formatvalue(self, object: object) -> str: + """Format an argument default value as text. +""" + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: + """Produce HTML documentation for a data object. +""" if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -155,13 +294,19 @@ class HTMLDoc(Doc): methods: Mapping[str, str] = {}, cl: type | None = None, homecls: type | None = None, - ) -> str: ... + ) -> str: + """Produce HTML documentation for a function or method object. +""" def docproperty( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: ... + ) -> str: + """Produce html documentation for a data descriptor. +""" def docdata( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: ... + ) -> str: + """Produce html documentation for a data descriptor. +""" else: def docroutine( # type: ignore[override] self, @@ -172,16 +317,30 @@ class HTMLDoc(Doc): classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, cl: type | None = None, - ) -> str: ... - def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] - def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + ) -> str: + """Produce HTML documentation for a function or method object. +""" + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] + """Produce html documentation for a data descriptor. +""" + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] + """Produce html documentation for a data descriptor. +""" if sys.version_info >= (3, 11): - def parentlink(self, object: type | ModuleType, modname: str) -> str: ... + def parentlink(self, object: type | ModuleType, modname: str) -> str: + """Make a link for the enclosing class or module. +""" - def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... - def filelink(self, url: str, path: str) -> str: ... + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: + """Generate an HTML index for a directory of modules. +""" + def filelink(self, url: str, path: str) -> str: + """Make a link to source file. +""" class TextRepr(Repr): + """Class for safely making a text representation of a Python object. +""" def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... @@ -189,16 +348,30 @@ class TextRepr(Repr): def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): + """Formatter class for text documentation. +""" _repr_instance: TextRepr repr = _repr_instance.repr - def bold(self, text: str) -> str: ... - def indent(self, text: str, prefix: str = " ") -> str: ... - def section(self, title: str, contents: str) -> str: ... + def bold(self, text: str) -> str: + """Format a string in bold by overstriking. +""" + def indent(self, text: str, prefix: str = " ") -> str: + """Indent text by prepending a given prefix to each line. +""" + def section(self, title: str, contents: str) -> str: + """Format a section with a given heading. +""" def formattree( self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" - ) -> str: ... - def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: ... - def formatvalue(self, object: object) -> str: ... + ) -> str: + """Render in text a class tree as returned by inspect.getclasstree(). +""" + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: + """Produce text documentation for a given class object. +""" + def formatvalue(self, object: object) -> str: + """Format an argument default value as text. +""" if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -207,14 +380,22 @@ class TextDoc(Doc): mod: str | None = None, cl: Any | None = None, homecls: Any | None = None, - ) -> str: ... - def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: ... + ) -> str: + """Produce text documentation for a function or method object. +""" + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: + """Produce text documentation for a given module object. +""" def docproperty( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: ... + ) -> str: + """Produce text documentation for a data descriptor. +""" def docdata( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused - ) -> str: ... + ) -> str: + """Produce text documentation for a data descriptor. +""" def docother( self, object: object, @@ -224,12 +405,22 @@ class TextDoc(Doc): *ignored: Unused, maxlen: int | None = None, doc: Any | None = None, - ) -> str: ... + ) -> str: + """Produce text documentation for a data object. +""" else: - def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] - def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] - def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] - def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] + """Produce text documentation for a function or method object. +""" + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: # type: ignore[override] + """Produce text documentation for a given module object. +""" + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] + """Produce text documentation for a data descriptor. +""" + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] + """Produce text documentation for a data descriptor. +""" def docother( # type: ignore[override] self, object: object, @@ -238,24 +429,46 @@ class TextDoc(Doc): parent: str | None = None, maxlen: int | None = None, doc: Any | None = None, - ) -> str: ... + ) -> str: + """Produce text documentation for a data object. +""" if sys.version_info >= (3, 13): - def pager(text: str, title: str = "") -> None: ... + def pager(text: str, title: str = "") -> None: + """The first time this is called, determine what kind of pager to use. +""" else: - def pager(text: str) -> None: ... - -def plain(text: str) -> str: ... -def describe(thing: Any) -> str: ... -def locate(path: str, forceload: bool = ...) -> object: ... + def pager(text: str) -> None: + """The first time this is called, determine what kind of pager to use. +""" + +def plain(text: str) -> str: + """Remove boldface formatting from text. +""" +def describe(thing: Any) -> str: + """Produce a short description of the given thing. +""" +def locate(path: str, forceload: bool = ...) -> object: + """Locate an object by name or dotted path, importing as necessary. +""" if sys.version_info >= (3, 13): - def get_pager() -> _Pager: ... - def pipe_pager(text: str, cmd: str, title: str = "") -> None: ... - def tempfile_pager(text: str, cmd: str, title: str = "") -> None: ... - def tty_pager(text: str, title: str = "") -> None: ... - def plain_pager(text: str, title: str = "") -> None: ... + def get_pager() -> _Pager: + """Decide what method to use for paging through text. +""" + def pipe_pager(text: str, cmd: str, title: str = "") -> None: + """Page through text by feeding it to another program. +""" + def tempfile_pager(text: str, cmd: str, title: str = "") -> None: + """Page through text by invoking a program on a temporary file. +""" + def tty_pager(text: str, title: str = "") -> None: + """Page through text on a text terminal. +""" + def plain_pager(text: str, title: str = "") -> None: + """Simply print unformatted text. This is the ultimate fallback. +""" # For backwards compatibility. getpager = get_pager @@ -264,19 +477,33 @@ if sys.version_info >= (3, 13): ttypager = tty_pager plainpager = plain_pager else: - def getpager() -> Callable[[str], None]: ... - def pipepager(text: str, cmd: str) -> None: ... - def tempfilepager(text: str, cmd: str) -> None: ... - def ttypager(text: str) -> None: ... - def plainpager(text: str) -> None: ... + def getpager() -> Callable[[str], None]: + """Decide what method to use for paging through text. +""" + def pipepager(text: str, cmd: str) -> None: + """Page through text by feeding it to another program. +""" + def tempfilepager(text: str, cmd: str) -> None: + """Page through text by invoking a program on a temporary file. +""" + def ttypager(text: str) -> None: + """Page through text on a text terminal. +""" + def plainpager(text: str) -> None: + """Simply print unformatted text. This is the ultimate fallback. +""" text: TextDoc html: HTMLDoc -def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... +def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: + """Given an object or a path to an object, get the object and its name. +""" def render_doc( thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None -) -> str: ... +) -> str: + """Render text documentation, given an object or a path to an object. +""" if sys.version_info >= (3, 11): def doc( @@ -285,7 +512,9 @@ if sys.version_info >= (3, 11): forceload: bool = ..., output: SupportsWrite[str] | None = None, is_cli: bool = False, - ) -> None: ... + ) -> None: + """Display text documentation, given an object or a path to an object. +""" else: def doc( @@ -293,10 +522,16 @@ else: title: str = "Python Library Documentation: %s", forceload: bool = ..., output: SupportsWrite[str] | None = None, - ) -> None: ... + ) -> None: + """Display text documentation, given an object or a path to an object. +""" -def writedoc(thing: str | object, forceload: bool = ...) -> None: ... -def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... +def writedoc(thing: str | object, forceload: bool = ...) -> None: + """Write HTML documentation to a file in the current directory. +""" +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: + """Write out HTML documentation for all modules in a directory tree. +""" class Helper: keywords: dict[str, str | tuple[str, str]] @@ -309,7 +544,9 @@ class Helper: def output(self) -> IO[str]: ... def __call__(self, request: str | Helper | object = ...) -> None: ... def interact(self) -> None: ... - def getline(self, prompt: str) -> str: ... + def getline(self, prompt: str) -> str: + """Read one line, using input() when appropriate. +""" if sys.version_info >= (3, 11): def help(self, request: Any, is_cli: bool = False) -> None: ... else: @@ -327,6 +564,8 @@ class Helper: help: Helper class ModuleScanner: + """An interruptible scanner that searches module synopses. +""" quit: bool def run( self, @@ -336,6 +575,10 @@ class ModuleScanner: onerror: Callable[[str], object] | None = None, ) -> None: ... -def apropos(key: str) -> None: ... +def apropos(key: str) -> None: + """Print all the one-line module summaries that contain a substring. +""" def ispath(x: object) -> TypeGuard[str]: ... -def cli() -> None: ... +def cli() -> None: + """Command-line interface (looks at sys.argv to decide what to do). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi index 21e676052098d..70c6015b3d6a5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,3 +1,5 @@ +"""Python wrapper for Expat parser. +""" from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable from pyexpat import errors as errors, model as model @@ -19,16 +21,52 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: ... - def ParseFile(self, file: SupportsRead[bytes], /) -> int: ... - def SetBase(self, base: str, /) -> None: ... - def GetBase(self) -> str | None: ... - def GetInputContext(self) -> bytes | None: ... - def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: ... - def SetParamEntityParsing(self, flag: int, /) -> int: ... - def UseForeignDTD(self, flag: bool = True, /) -> None: ... - def GetReparseDeferralEnabled(self) -> bool: ... - def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: ... + """XML parser +""" + def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: + """Parse XML data. + +'isfinal' should be true at end of input. +""" + def ParseFile(self, file: SupportsRead[bytes], /) -> int: + """Parse XML data from file-like object. +""" + def SetBase(self, base: str, /) -> None: + """Set the base URL for the parser. +""" + def GetBase(self) -> str | None: + """Return base URL string for the parser. +""" + def GetInputContext(self) -> bytes | None: + """Return the untranslated text of the input that caused the current event. + +If the event was generated by a large amount of text (such as a start tag +for an element with many attributes), not all of the text may be available. +""" + def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: + """Create a parser for parsing an external entity based on the information passed to the ExternalEntityRefHandler. +""" + def SetParamEntityParsing(self, flag: int, /) -> int: + """Controls parsing of parameter entities (including the external DTD subset). + +Possible flag values are XML_PARAM_ENTITY_PARSING_NEVER, +XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE and +XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag +was successful. +""" + def UseForeignDTD(self, flag: bool = True, /) -> None: + """Allows the application to provide an artificial external subset if one is not specified as part of the document instance. + +This readily allows the use of a 'default' document type controlled by the +application, while still getting the advantage of providing document type +information to the parser. 'flag' defaults to True if not provided. +""" + def GetReparseDeferralEnabled(self) -> bool: + """Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0. +""" + def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: + """Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0. +""" @property def intern(self) -> dict[str, str]: ... buffer_size: int @@ -72,11 +110,15 @@ class XMLParserType: ExternalEntityRefHandler: Callable[[str, str | None, str | None, str | None], int] | None SkippedEntityHandler: Callable[[str, bool], Any] | None -def ErrorString(code: int, /) -> str: ... +def ErrorString(code: int, /) -> str: + """Returns string error for given number. +""" # intern is undocumented def ParserCreate( encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None -) -> XMLParserType: ... +) -> XMLParserType: + """Return a new XML parser object. +""" expat_CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi index 493ae03456044..a95389c17d03d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi @@ -1,3 +1,5 @@ +"""Constants used to describe error conditions. +""" import sys from typing import Final from typing_extensions import LiteralString diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi index bac8f3692ce58..298a88cedd9e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi @@ -1,3 +1,5 @@ +"""Constants used to interpret content model information. +""" from typing import Final XML_CTYPE_ANY: Final = 2 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi index 65e2ac1559adf..5ba93db64c506 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi @@ -1,3 +1,5 @@ +"""A multi-producer, multi-consumer queue. +""" import sys from _queue import Empty as Empty, SimpleQueue as SimpleQueue from _typeshed import SupportsRichComparisonT @@ -11,12 +13,20 @@ if sys.version_info >= (3, 13): _T = TypeVar("_T") -class Full(Exception): ... +class Full(Exception): + """Exception raised by Queue.put(block=0)/put_nowait(). +""" if sys.version_info >= (3, 13): - class ShutDown(Exception): ... + class ShutDown(Exception): + """Raised when put/get with shut-down queue. +""" class Queue(Generic[_T]): + """Create a queue object with a given maximum size. + +If maxsize is <= 0, the queue size is infinite. +""" maxsize: int mutex: Lock # undocumented @@ -31,25 +41,122 @@ class Queue(Generic[_T]): queue: Any # undocumented def __init__(self, maxsize: int = 0) -> None: ... def _init(self, maxsize: int) -> None: ... - def empty(self) -> bool: ... - def full(self) -> bool: ... - def get(self, block: bool = True, timeout: float | None = None) -> _T: ... - def get_nowait(self) -> _T: ... + def empty(self) -> bool: + """Return True if the queue is empty, False otherwise (not reliable!). + +This method is likely to be removed at some point. Use qsize() == 0 +as a direct substitute, but be aware that either approach risks a race +condition where a queue can grow before the result of empty() or +qsize() can be used. + +To create code that needs to wait for all queued tasks to be +completed, the preferred technique is to use the join() method. +""" + def full(self) -> bool: + """Return True if the queue is full, False otherwise (not reliable!). + +This method is likely to be removed at some point. Use qsize() >= n +as a direct substitute, but be aware that either approach risks a race +condition where a queue can shrink before the result of full() or +qsize() can be used. +""" + def get(self, block: bool = True, timeout: float | None = None) -> _T: + """Remove and return an item from the queue. + +If optional args 'block' is true and 'timeout' is None (the default), +block if necessary until an item is available. If 'timeout' is +a non-negative number, it blocks at most 'timeout' seconds and raises +the Empty exception if no item was available within that time. +Otherwise ('block' is false), return an item if one is immediately +available, else raise the Empty exception ('timeout' is ignored +in that case). + +Raises ShutDown if the queue has been shut down and is empty, +or if the queue has been shut down immediately. +""" + def get_nowait(self) -> _T: + """Remove and return an item from the queue without blocking. + +Only get an item if one is immediately available. Otherwise +raise the Empty exception. +""" if sys.version_info >= (3, 13): - def shutdown(self, immediate: bool = False) -> None: ... + def shutdown(self, immediate: bool = False) -> None: + """Shut-down the queue, making queue gets and puts raise ShutDown. + +By default, gets will only raise once the queue is empty. Set +'immediate' to True to make gets raise immediately instead. + +All blocked callers of put() and get() will be unblocked. If +'immediate', a task is marked as done for each item remaining in +the queue, which may unblock callers of join(). +""" def _get(self) -> _T: ... - def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... - def put_nowait(self, item: _T) -> None: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: + """Put an item into the queue. + +If optional args 'block' is true and 'timeout' is None (the default), +block if necessary until a free slot is available. If 'timeout' is +a non-negative number, it blocks at most 'timeout' seconds and raises +the Full exception if no free slot was available within that time. +Otherwise ('block' is false), put an item on the queue if a free slot +is immediately available, else raise the Full exception ('timeout' +is ignored in that case). + +Raises ShutDown if the queue has been shut down. +""" + def put_nowait(self, item: _T) -> None: + """Put an item into the queue without blocking. + +Only enqueue the item if a free slot is immediately available. +Otherwise raise the Full exception. +""" def _put(self, item: _T) -> None: ... - def join(self) -> None: ... - def qsize(self) -> int: ... + def join(self) -> None: + """Blocks until all items in the Queue have been gotten and processed. + +The count of unfinished tasks goes up whenever an item is added to the +queue. The count goes down whenever a consumer thread calls task_done() +to indicate the item was retrieved and all work on it is complete. + +When the count of unfinished tasks drops to zero, join() unblocks. +""" + def qsize(self) -> int: + """Return the approximate size of the queue (not reliable!). +""" def _qsize(self) -> int: ... - def task_done(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def task_done(self) -> None: + """Indicate that a formerly enqueued task is complete. + +Used by Queue consumer threads. For each get() used to fetch a task, +a subsequent call to task_done() tells the queue that the processing +on the task is complete. + +If a join() is currently blocking, it will resume when all items +have been processed (meaning that a task_done() call was received +for every item that had been put() into the queue). + +shutdown(immediate=True) calls task_done() for each remaining item in +the queue. + +Raises a ValueError if called more times than there were items +placed in the queue. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class PriorityQueue(Queue[SupportsRichComparisonT]): + """Variant of Queue that retrieves open entries in priority order (lowest first). + +Entries are typically tuples of the form: (priority number, data). +""" queue: list[SupportsRichComparisonT] class LifoQueue(Queue[_T]): + """Variant of Queue that retrieves most recently added entries first. +""" queue: list[_T] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi index be6892fcbcd78..bd56474b91b37 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi @@ -1,3 +1,5 @@ +"""Conversions to/from quoted-printable transport encoding as per RFC 1521. +""" from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite from typing import Protocol, type_check_only @@ -6,7 +8,19 @@ __all__ = ["encode", "decode", "encodestring", "decodestring"] @type_check_only class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... -def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: + """Read 'input', apply quoted-printable encoding, and write to 'output'. + +'input' and 'output' are binary file objects. The 'quotetabs' flag +indicates whether embedded tabs and spaces should be quoted. Note that +line-ending tabs and spaces are always encoded, as per RFC 1521. +The 'header' flag indicates whether we are encoding spaces as _ as per RFC +1522. +""" def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... -def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: + """Read 'input', apply quoted-printable decoding, and write to 'output'. +'input' and 'output' are binary file objects. +If 'header' is true, decode underscore as space (per RFC 1522). +""" def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi index a797794b8050f..358c4bc120563 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi @@ -1,3 +1,50 @@ +"""Random variable generators. + + bytes + ----- + uniform bytes (values between 0 and 255) + + integers + -------- + uniform within range + + sequences + --------- + pick random element + pick random sample + pick weighted random sample + generate random permutation + + distributions on the real line: + ------------------------------ + uniform + triangular + normal (Gaussian) + lognormal + negative exponential + gamma + beta + pareto + Weibull + + distributions on the circle (angles 0 to 2pi) + --------------------------------------------- + circular uniform + von Mises + + discrete distributions + ---------------------- + binomial + + +General notes on the underlying Mersenne Twister core generator: + +* The period is 2**19937-1. +* It is one of the most extensively tested generators in existence. +* The random() method is implemented in C, executes in a single Python step, + and is, therefore, threadsafe. + +""" import _random import sys from _typeshed import SupportsLenAndGetItem @@ -40,8 +87,24 @@ if sys.version_info >= (3, 12): _T = TypeVar("_T") class Random(_random.Random): + """Random number generator base class used by bound module functions. + +Used to instantiate instances of Random to get generators that don't +share state. + +Class Random can also be subclassed if you want to use a different basic +generator of your own devising: in that case, override the following +methods: random(), seed(), getstate(), and setstate(). +Optionally, implement a getrandbits() method so that randrange() +can cover arbitrarily large ranges. + +""" VERSION: ClassVar[int] - def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: ... # noqa: Y041 + def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: # noqa: Y041 + """Initialize an instance. + +Optional argument x controls seeding, as for Random.seed(). +""" # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. @@ -49,13 +112,45 @@ class Random(_random.Random): # this is a workaround for pyright correctly flagging an inconsistent inherited constructor, see #14624 def __new__(cls, x: int | float | str | bytes | bytearray | None = None) -> Self: ... # noqa: Y041 - def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 - def getstate(self) -> tuple[Any, ...]: ... - def setstate(self, state: tuple[Any, ...]) -> None: ... - def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... - def randint(self, a: int, b: int) -> int: ... - def randbytes(self, n: int) -> bytes: ... - def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: # type: ignore[override] # noqa: Y041 + """Initialize internal state from a seed. + +The only supported seed types are None, int, float, +str, bytes, and bytearray. + +None or no argument seeds from current time or from an operating +system specific randomness source if available. + +If *a* is an int, all bits are used. + +For version 2 (the default), all of the bits are used if *a* is a str, +bytes, or bytearray. For version 1 (provided for reproducing random +sequences from older versions of Python), the algorithm for str and +bytes generates a narrower range of seeds. + +""" + def getstate(self) -> tuple[Any, ...]: + """Return internal state; can be passed to setstate() later. +""" + def setstate(self, state: tuple[Any, ...]) -> None: + """Restore internal state from object returned by getstate(). +""" + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: + """Choose a random item from range(stop) or range(start, stop[, step]). + +Roughly equivalent to ``choice(range(start, stop, step))`` but +supports arbitrarily large ranges and is optimized for common cases. + +""" + def randint(self, a: int, b: int) -> int: + """Return random integer in range [a, b], including both end points. + """ + def randbytes(self, n: int) -> bytes: + """Generate n random bytes. +""" + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: + """Choose a random element from a non-empty sequence. +""" def choices( self, population: SupportsLenAndGetItem[_T], @@ -63,47 +158,271 @@ class Random(_random.Random): *, cum_weights: Sequence[float | Fraction] | None = None, k: int = 1, - ) -> list[_T]: ... + ) -> list[_T]: + """Return a k sized list of population elements chosen with replacement. + +If the relative weights or cumulative weights are not specified, +the selections are made with equal probability. + +""" if sys.version_info >= (3, 11): - def shuffle(self, x: MutableSequence[Any]) -> None: ... + def shuffle(self, x: MutableSequence[Any]) -> None: + """Shuffle list x in place, and return None. +""" else: - def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: + """Shuffle list x in place, and return None. + + Optional argument random is a 0-argument function returning a + random float in [0.0, 1.0); if it is the default None, the + standard random.random will be used. + + """ if sys.version_info >= (3, 11): - def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: + """Chooses k unique random elements from a population sequence. + +Returns a new list containing elements from the population while +leaving the original population unchanged. The resulting list is +in selection order so that all sub-slices will also be valid random +samples. This allows raffle winners (the sample) to be partitioned +into grand prize and second place winners (the subslices). + +Members of the population need not be hashable or unique. If the +population contains repeats, then each occurrence is a possible +selection in the sample. + +Repeated elements can be specified one at a time or with the optional +counts parameter. For example: + + sample(['red', 'blue'], counts=[4, 2], k=5) + +is equivalent to: + + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + +To choose a sample from a range of integers, use range() for the +population argument. This is especially fast and space efficient +for sampling from a large population: + + sample(range(10000000), 60) + +""" else: def sample( self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None - ) -> list[_T]: ... + ) -> list[_T]: + """Chooses k unique random elements from a population sequence or set. + + Returns a new list containing elements from the population while + leaving the original population unchanged. The resulting list is + in selection order so that all sub-slices will also be valid random + samples. This allows raffle winners (the sample) to be partitioned + into grand prize and second place winners (the subslices). + + Members of the population need not be hashable or unique. If the + population contains repeats, then each occurrence is a possible + selection in the sample. + + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: + + sample(['red', 'blue'], counts=[4, 2], k=5) + + is equivalent to: + + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: + + sample(range(10000000), 60) + + """ + + def uniform(self, a: float, b: float) -> float: + """Get a random number in the range [a, b) or [a, b] depending on rounding. + +The mean (expected value) and variance of the random variable are: + + E[X] = (a + b) / 2 + Var[X] = (b - a) ** 2 / 12 + +""" + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: + """Triangular distribution. - def uniform(self, a: float, b: float) -> float: ... - def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... +Continuous distribution bounded by given lower and upper limits, +and having a given mode value in-between. + +http://en.wikipedia.org/wiki/Triangular_distribution + +The mean (expected value) and variance of the random variable are: + + E[X] = (low + high + mode) / 3 + Var[X] = (low**2 + high**2 + mode**2 - low*high - low*mode - high*mode) / 18 + +""" if sys.version_info >= (3, 12): - def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: ... + def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: + """Binomial random variable. + +Gives the number of successes for *n* independent trials +with the probability of success in each trial being *p*: + + sum(random() < p for i in range(n)) + +Returns an integer in the range: + + 0 <= X <= n + +The integer is chosen with the probability: + + P(X == k) = math.comb(n, k) * p ** k * (1 - p) ** (n - k) + +The mean (expected value) and variance of the random variable are: + + E[X] = n * p + Var[X] = n * p * (1 - p) - def betavariate(self, alpha: float, beta: float) -> float: ... +""" + + def betavariate(self, alpha: float, beta: float) -> float: + """Beta distribution. + +Conditions on the parameters are alpha > 0 and beta > 0. +Returned values range between 0 and 1. + +The mean (expected value) and variance of the random variable are: + + E[X] = alpha / (alpha + beta) + Var[X] = alpha * beta / ((alpha + beta)**2 * (alpha + beta + 1)) + +""" if sys.version_info >= (3, 12): - def expovariate(self, lambd: float = 1.0) -> float: ... + def expovariate(self, lambd: float = 1.0) -> float: + """Exponential distribution. + +lambd is 1.0 divided by the desired mean. It should be +nonzero. (The parameter would be called "lambda", but that is +a reserved word in Python.) Returned values range from 0 to +positive infinity if lambd is positive, and from negative +infinity to 0 if lambd is negative. + +The mean (expected value) and variance of the random variable are: + + E[X] = 1 / lambd + Var[X] = 1 / lambd ** 2 + +""" else: - def expovariate(self, lambd: float) -> float: ... + def expovariate(self, lambd: float) -> float: + """Exponential distribution. + + lambd is 1.0 divided by the desired mean. It should be + nonzero. (The parameter would be called "lambda", but that is + a reserved word in Python.) Returned values range from 0 to + positive infinity if lambd is positive, and from negative + infinity to 0 if lambd is negative. + + """ + + def gammavariate(self, alpha: float, beta: float) -> float: + """Gamma distribution. Not the gamma function! - def gammavariate(self, alpha: float, beta: float) -> float: ... +Conditions on the parameters are alpha > 0 and beta > 0. + +The probability distribution function is: + + x ** (alpha - 1) * math.exp(-x / beta) + pdf(x) = -------------------------------------- + math.gamma(alpha) * beta ** alpha + +The mean (expected value) and variance of the random variable are: + + E[X] = alpha * beta + Var[X] = alpha * beta ** 2 + +""" if sys.version_info >= (3, 11): - def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... - def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: + """Gaussian distribution. + +mu is the mean, and sigma is the standard deviation. This is +slightly faster than the normalvariate() function. + +Not thread-safe without a lock around calls. + +""" + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: + """Normal distribution. + +mu is the mean, and sigma is the standard deviation. + +""" else: - def gauss(self, mu: float, sigma: float) -> float: ... - def normalvariate(self, mu: float, sigma: float) -> float: ... + def gauss(self, mu: float, sigma: float) -> float: + """Gaussian distribution. + + mu is the mean, and sigma is the standard deviation. This is + slightly faster than the normalvariate() function. + + Not thread-safe without a lock around calls. - def lognormvariate(self, mu: float, sigma: float) -> float: ... - def vonmisesvariate(self, mu: float, kappa: float) -> float: ... - def paretovariate(self, alpha: float) -> float: ... - def weibullvariate(self, alpha: float, beta: float) -> float: ... + """ + def normalvariate(self, mu: float, sigma: float) -> float: + """Normal distribution. + + mu is the mean, and sigma is the standard deviation. + + """ + + def lognormvariate(self, mu: float, sigma: float) -> float: + """Log normal distribution. + +If you take the natural logarithm of this distribution, you'll get a +normal distribution with mean mu and standard deviation sigma. +mu can have any value, and sigma must be greater than zero. + +""" + def vonmisesvariate(self, mu: float, kappa: float) -> float: + """Circular data distribution. + +mu is the mean angle, expressed in radians between 0 and 2*pi, and +kappa is the concentration parameter, which must be greater than or +equal to zero. If kappa is equal to zero, this distribution reduces +to a uniform random angle over the range 0 to 2*pi. + +""" + def paretovariate(self, alpha: float) -> float: + """Pareto distribution. alpha is the shape parameter. +""" + def weibullvariate(self, alpha: float, beta: float) -> float: + """Weibull distribution. + +alpha is the scale parameter and beta is the shape parameter. + +""" # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): - def getrandbits(self, k: int) -> int: ... # k can be passed by keyword - def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... - def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + """Alternate random number generator using sources provided +by the operating system (such as /dev/urandom on Unix or +CryptGenRandom on Windows). + + Not available on all systems (see os.urandom() for details). + +""" + def getrandbits(self, k: int) -> int: # k can be passed by keyword + """getrandbits(k) -> x. Generates an int with k random bits. +""" + def getstate(self, *args: Any, **kwds: Any) -> NoReturn: + """Method should not be called for a system random number generator. +""" + def setstate(self, *args: Any, **kwds: Any) -> NoReturn: + """Method should not be called for a system random number generator. +""" _inst: Random seed = _inst.seed diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi index fb2a06d5e4c81..b253aff3552c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi @@ -1,3 +1,110 @@ +"""Support for regular expressions (RE). + +This module provides regular expression matching operations similar to +those found in Perl. It supports both 8-bit and Unicode strings; both +the pattern and the strings being processed can contain null bytes and +characters outside the US ASCII range. + +Regular expressions can contain both special and ordinary characters. +Most ordinary characters, like "A", "a", or "0", are the simplest +regular expressions; they simply match themselves. You can +concatenate ordinary characters, so last matches the string 'last'. + +The special characters are: + "." Matches any character except a newline. + "^" Matches the start of the string. + "$" Matches the end of the string or just before the newline at + the end of the string. + "*" Matches 0 or more (greedy) repetitions of the preceding RE. + Greedy means that it will match as many repetitions as possible. + "+" Matches 1 or more (greedy) repetitions of the preceding RE. + "?" Matches 0 or 1 (greedy) of the preceding RE. + *?,+?,?? Non-greedy versions of the previous three special characters. + {m,n} Matches from m to n repetitions of the preceding RE. + {m,n}? Non-greedy version of the above. + "\\\\" Either escapes special characters or signals a special sequence. + [] Indicates a set of characters. + A "^" as the first character indicates a complementing set. + "|" A|B, creates an RE that will match either A or B. + (...) Matches the RE inside the parentheses. + The contents can be retrieved or matched later in the string. + (?aiLmsux) The letters set the corresponding flags defined below. + (?:...) Non-grouping version of regular parentheses. + (?P...) The substring matched by the group is accessible by name. + (?P=name) Matches the text matched earlier by the group named name. + (?#...) A comment; ignored. + (?=...) Matches if ... matches next, but doesn't consume the string. + (?!...) Matches if ... doesn't match next. + (?<=...) Matches if preceded by ... (must be fixed length). + (?= 3, 11) or # sre_constants. Typeshed has it here because its __module__ attribute is set to "re". class error(Exception): + """Exception raised for invalid regular expressions. + +Attributes: + + msg: The unformatted error message + pattern: The regular expression pattern + pos: The index in the pattern where compilation failed (may be None) + lineno: The line corresponding to pos (may be None) + colno: The column corresponding to pos (may be None) +""" msg: str pattern: str | bytes | None pos: int | None @@ -63,30 +180,51 @@ class error(Exception): @final class Match(Generic[AnyStr]): + """The result of re.match() and re.search(). +Match objects always have a boolean value of True. +""" @property - def pos(self) -> int: ... + def pos(self) -> int: + """The index into the string at which the RE engine started looking for a match. +""" @property - def endpos(self) -> int: ... + def endpos(self) -> int: + """The index into the string beyond which the RE engine will not go. +""" @property - def lastindex(self) -> int | None: ... + def lastindex(self) -> int | None: + """The integer index of the last matched capturing group. +""" @property - def lastgroup(self) -> str | None: ... + def lastgroup(self) -> str | None: + """The name of the last matched capturing group. +""" @property - def string(self) -> AnyStr: ... + def string(self) -> AnyStr: + """The string passed to match() or search(). +""" # The regular expression object whose match() or search() method produced # this match instance. @property - def re(self) -> Pattern[AnyStr]: ... + def re(self) -> Pattern[AnyStr]: + """The regular expression object. +""" @overload - def expand(self: Match[str], template: str) -> str: ... + def expand(self: Match[str], template: str) -> str: + """Return the string obtained by doing backslash substitution on the string template, as done by the sub() method. +""" @overload def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... @overload def expand(self, template: AnyStr) -> AnyStr: ... # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def group(self, group: Literal[0] = 0, /) -> AnyStr: ... + def group(self, group: Literal[0] = 0, /) -> AnyStr: + """group([group1, ...]) -> str or tuple. + Return subgroup(s) of the match by indices or names. + For 0 returns the entire match. +""" @overload def group(self, group: str | int, /) -> AnyStr | MaybeNone: ... @overload @@ -94,53 +232,91 @@ class Match(Generic[AnyStr]): # Each item of groups()'s return tuple is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload - def groups(self) -> tuple[AnyStr | MaybeNone, ...]: ... + def groups(self) -> tuple[AnyStr | MaybeNone, ...]: + """Return a tuple containing all the subgroups of the match, from 1. + + default + Is used for groups that did not participate in the match. +""" @overload def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... # Each value in groupdict()'s return dict is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload - def groupdict(self) -> dict[str, AnyStr | MaybeNone]: ... + def groupdict(self) -> dict[str, AnyStr | MaybeNone]: + """Return a dictionary containing all the named subgroups of the match, keyed by the subgroup name. + + default + Is used for groups that did not participate in the match. +""" @overload def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... - def start(self, group: int | str = 0, /) -> int: ... - def end(self, group: int | str = 0, /) -> int: ... - def span(self, group: int | str = 0, /) -> tuple[int, int]: ... + def start(self, group: int | str = 0, /) -> int: + """Return index of the start of the substring matched by group. +""" + def end(self, group: int | str = 0, /) -> int: + """Return index of the end of the substring matched by group. +""" + def span(self, group: int | str = 0, /) -> tuple[int, int]: + """For match object m, return the 2-tuple (m.start(group), m.end(group)). +""" @property def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def __getitem__(self, key: Literal[0], /) -> AnyStr: ... + def __getitem__(self, key: Literal[0], /) -> AnyStr: + """Return self[key]. +""" @overload def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ... def __copy__(self) -> Match[AnyStr]: ... def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @final class Pattern(Generic[AnyStr]): + """Compiled regular expression object. +""" @property - def flags(self) -> int: ... + def flags(self) -> int: + """The regex matching flags. +""" @property - def groupindex(self) -> Mapping[str, int]: ... + def groupindex(self) -> Mapping[str, int]: + """A dictionary mapping group names to group numbers. +""" @property - def groups(self) -> int: ... + def groups(self) -> int: + """The number of capturing groups in the pattern. +""" @property - def pattern(self) -> AnyStr: ... + def pattern(self) -> AnyStr: + """The pattern string from which the RE object was compiled. +""" @overload - def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: + """Scan through string looking for a match, and return a corresponding match object instance. + +Return None if no position in the string matches. +""" @overload def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: + """Matches zero or more characters at the beginning of the string. +""" @overload def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: + """Matches against all of the string. +""" @overload def fullmatch( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -148,20 +324,28 @@ class Pattern(Generic[AnyStr]): @overload def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: ... + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: + """Split string by the occurrences of pattern. +""" @overload def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | MaybeNone]: ... @overload def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | MaybeNone]: ... # return type depends on the number of groups in the pattern @overload - def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: + """Return a list of all non-overlapping matches of pattern in string. +""" @overload def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... @overload - def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: + """Return an iterator over all non-overlapping matches for the RE pattern in string. + +For each match, the iterator returns a match object. +""" @overload def finditer( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -169,7 +353,9 @@ class Pattern(Generic[AnyStr]): @overload def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... @overload - def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: + """Return the string obtained by replacing the leftmost non-overlapping occurrences of pattern in string by the replacement repl. +""" @overload def sub( self: Pattern[bytes], @@ -180,7 +366,9 @@ class Pattern(Generic[AnyStr]): @overload def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... @overload - def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: + """Return the tuple (new_string, number_of_subs_made) found by replacing the leftmost non-overlapping occurrences of pattern with the replacement repl. +""" @overload def subn( self: Pattern[bytes], @@ -194,11 +382,15 @@ class Pattern(Generic[AnyStr]): def __deepcopy__(self, memo: Any, /) -> Pattern[AnyStr]: ... def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" # ----- re variables and constants ----- class RegexFlag(enum.IntFlag): + """An enumeration. +""" A = sre_compile.SRE_FLAG_ASCII ASCII = A DEBUG = sre_compile.SRE_FLAG_DEBUG @@ -250,39 +442,78 @@ _FlagsType: TypeAlias = int | RegexFlag # pattern arguments do *not* accept arbitrary buffers such as bytearray, # because the pattern must be hashable. @overload -def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: + """Compile a regular expression pattern, returning a Pattern object. +""" @overload def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: + """Scan through string looking for a match to the pattern, returning +a Match object, or None if no match was found. +""" @overload def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: + """Try to apply the pattern at the start of the string, returning +a Match object, or None if no match was found. +""" @overload def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: + """Try to apply the pattern to all of the string, returning +a Match object, or None if no match was found. +""" @overload def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: ... +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: + """Split the source string by the occurrences of the pattern, +returning a list containing the resulting substrings. If +capturing parentheses are used in pattern, then the text of all +groups in the pattern are also returned as part of the resulting +list. If maxsplit is nonzero, at most maxsplit splits occur, +and the remainder of the string is returned as the final element +of the list. +""" @overload def split( pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 ) -> list[bytes | MaybeNone]: ... @overload -def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: + """Return a list of all non-overlapping matches in the string. + +If one or more capturing groups are present in the pattern, return +a list of groups; this will be a list of tuples if the pattern +has more than one group. + +Empty matches are included in the result. +""" @overload def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... @overload -def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: + """Return an iterator over all non-overlapping matches in the +string. For each match, the iterator returns a Match object. + +Empty matches are included in the result. +""" @overload def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... @overload def sub( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 -) -> str: ... +) -> str: + """Return the string obtained by replacing the leftmost +non-overlapping occurrences of the pattern in string by the +replacement repl. repl can be either a string or a callable; +if a string, backslash escapes in it are processed. If it is +a callable, it's passed the Match object and must return +a replacement string to be used. +""" @overload def sub( pattern: bytes | Pattern[bytes], @@ -294,7 +525,16 @@ def sub( @overload def subn( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 -) -> tuple[str, int]: ... +) -> tuple[str, int]: + """Return a 2-tuple containing (new_string, number). +new_string is the string obtained by replacing the leftmost +non-overlapping occurrences of the pattern in the source +string by the replacement repl. number is the number of +substitutions that were made. repl can be either a string or a +callable; if a string, backslash escapes in it are processed. +If it is a callable, it's passed the Match object and must +return a replacement string to be used. +""" @overload def subn( pattern: bytes | Pattern[bytes], @@ -303,12 +543,21 @@ def subn( count: int = 0, flags: _FlagsType = 0, ) -> tuple[bytes, int]: ... -def escape(pattern: AnyStr) -> AnyStr: ... -def purge() -> None: ... +def escape(pattern: AnyStr) -> AnyStr: + """ +Escape special characters in a string. +""" +def purge() -> None: + """Clear the regular expression caches +""" if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `re.compile()` instead.") - def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented + """Compile a template pattern, returning a Pattern object, deprecated +""" else: - def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... # undocumented + def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented + """Compile a template pattern, returning a Pattern object +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi index 7325c267b32c2..cf4b4bb4a5ef8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi @@ -1,3 +1,5 @@ +"""Importing this module enables command line editing using libedit readline. +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -8,33 +10,114 @@ if sys.platform != "win32": _Completer: TypeAlias = Callable[[str, int], str | None] _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] - def parse_and_bind(string: str, /) -> None: ... - def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: ... - def get_line_buffer() -> str: ... - def insert_text(string: str, /) -> None: ... - def redisplay() -> None: ... - def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... - def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: ... - def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: ... - def get_history_length() -> int: ... - def set_history_length(length: int, /) -> None: ... - def clear_history() -> None: ... - def get_current_history_length() -> int: ... - def get_history_item(index: int, /) -> str: ... - def remove_history_item(pos: int, /) -> None: ... - def replace_history_item(pos: int, line: str, /) -> None: ... - def add_history(string: str, /) -> None: ... - def set_auto_history(enabled: bool, /) -> None: ... - def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: ... - def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: ... - def set_completer(function: _Completer | None = None, /) -> None: ... - def get_completer() -> _Completer | None: ... - def get_completion_type() -> int: ... - def get_begidx() -> int: ... - def get_endidx() -> int: ... - def set_completer_delims(string: str, /) -> None: ... - def get_completer_delims() -> str: ... - def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: ... + def parse_and_bind(string: str, /) -> None: + """Execute the init line provided in the string argument. +""" + def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: + """Execute a readline initialization file. + +The default filename is the last filename used. +""" + def get_line_buffer() -> str: + """Return the current contents of the line buffer. +""" + def insert_text(string: str, /) -> None: + """Insert text into the line buffer at the cursor position. +""" + def redisplay() -> None: + """Change what's displayed on the screen to reflect contents of the line buffer. +""" + def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: + """Load a readline history file. + +The default filename is ~/.history. +""" + def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: + """Save a readline history file. + +The default filename is ~/.history. +""" + def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: + """Append the last nelements items of the history list to file. + +The default filename is ~/.history. +""" + def get_history_length() -> int: + """Return the maximum number of lines that will be written to the history file. +""" + def set_history_length(length: int, /) -> None: + """Set the maximal number of lines which will be written to the history file. + +A negative length is used to inhibit history truncation. +""" + def clear_history() -> None: + """Clear the current readline history. +""" + def get_current_history_length() -> int: + """Return the current (not the maximum) length of history. +""" + def get_history_item(index: int, /) -> str: + """Return the current contents of history item at one-based index. +""" + def remove_history_item(pos: int, /) -> None: + """Remove history item given by its zero-based position. +""" + def replace_history_item(pos: int, line: str, /) -> None: + """Replaces history item given by its position with contents of line. + +pos is zero-based. +""" + def add_history(string: str, /) -> None: + """Add an item to the history buffer. +""" + def set_auto_history(enabled: bool, /) -> None: + """Enables or disables automatic history. +""" + def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: + """Set or remove the function invoked by the rl_startup_hook callback. + +The function is called with no arguments just +before readline prints the first prompt. +""" + def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: + """Set or remove the function invoked by the rl_pre_input_hook callback. + +The function is called with no arguments after the first prompt +has been printed and just before readline starts reading input +characters. +""" + def set_completer(function: _Completer | None = None, /) -> None: + """Set or remove the completer function. + +The function is called as function(text, state), +for state in 0, 1, 2, ..., until it returns a non-string. +It should return the next possible completion starting with 'text'. +""" + def get_completer() -> _Completer | None: + """Get the current completer function. +""" + def get_completion_type() -> int: + """Get the type of completion being attempted. +""" + def get_begidx() -> int: + """Get the beginning index of the completion scope. +""" + def get_endidx() -> int: + """Get the ending index of the completion scope. +""" + def set_completer_delims(string: str, /) -> None: + """Set the word delimiters for completion. +""" + def get_completer_delims() -> str: + """Get the word delimiters for completion. +""" + def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: + """Set or remove the completion display function. + +The function is called as + function(substitution, [matches], longest_match_length) +once each time matches need to be displayed. +""" if sys.version_info >= (3, 13): backend: Literal["readline", "editline"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi index 68ada65693485..da748220c5b4f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi @@ -1,3 +1,5 @@ +"""Redo the builtin repr() (representation) but with limits on most sizes. +""" import sys from array import array from collections import deque @@ -9,7 +11,9 @@ __all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc: TypeAlias = Callable[[Any], str] -def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: + """Decorator to make a repr function return fillvalue for a recursive call +""" class Repr: maxlevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi index f99cd5b088056..89868fe24cfdc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi @@ -30,6 +30,13 @@ if sys.platform != "win32": class struct_rusage( structseq[float], tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] ): + """struct_rusage: Result from getrusage. + +This object may be accessed either as a tuple of + (utime,stime,maxrss,ixrss,idrss,isrss,minflt,majflt, + nswap,inblock,oublock,msgsnd,msgrcv,nsignals,nvcsw,nivcsw) +or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. +""" if sys.version_info >= (3, 10): __match_args__: Final = ( "ru_utime", @@ -51,37 +58,69 @@ if sys.platform != "win32": ) @property - def ru_utime(self) -> float: ... + def ru_utime(self) -> float: + """user time used +""" @property - def ru_stime(self) -> float: ... + def ru_stime(self) -> float: + """system time used +""" @property - def ru_maxrss(self) -> int: ... + def ru_maxrss(self) -> int: + """max. resident set size +""" @property - def ru_ixrss(self) -> int: ... + def ru_ixrss(self) -> int: + """shared memory size +""" @property - def ru_idrss(self) -> int: ... + def ru_idrss(self) -> int: + """unshared data size +""" @property - def ru_isrss(self) -> int: ... + def ru_isrss(self) -> int: + """unshared stack size +""" @property - def ru_minflt(self) -> int: ... + def ru_minflt(self) -> int: + """page faults not requiring I/O +""" @property - def ru_majflt(self) -> int: ... + def ru_majflt(self) -> int: + """page faults requiring I/O +""" @property - def ru_nswap(self) -> int: ... + def ru_nswap(self) -> int: + """number of swap outs +""" @property - def ru_inblock(self) -> int: ... + def ru_inblock(self) -> int: + """block input operations +""" @property - def ru_oublock(self) -> int: ... + def ru_oublock(self) -> int: + """block output operations +""" @property - def ru_msgsnd(self) -> int: ... + def ru_msgsnd(self) -> int: + """IPC messages sent +""" @property - def ru_msgrcv(self) -> int: ... + def ru_msgrcv(self) -> int: + """IPC messages received +""" @property - def ru_nsignals(self) -> int: ... + def ru_nsignals(self) -> int: + """signals received +""" @property - def ru_nvcsw(self) -> int: ... + def ru_nvcsw(self) -> int: + """voluntary context switches +""" @property - def ru_nivcsw(self) -> int: ... + def ru_nivcsw(self) -> int: + """involuntary context switches +""" def getpagesize() -> int: ... def getrlimit(resource: int, /) -> tuple[int, int]: ... @@ -91,5 +130,7 @@ if sys.platform != "win32": if sys.version_info >= (3, 12): def prlimit(pid: int, resource: int, limits: tuple[int, int] | None = None, /) -> tuple[int, int]: ... else: - def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: ... + def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: + """prlimit(pid, resource, [limits]) +""" error = OSError diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi index 8d9477e3ee452..0feeb5166783f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi @@ -1,9 +1,75 @@ +"""Word completion for GNU readline. + +The completer completes keywords, built-ins and globals in a selectable +namespace (which defaults to __main__); when completing NAME.NAME..., it +evaluates (!) the expression up to the last dot and completes its attributes. + +It's very cool to do "import sys" type "sys.", hit the completion key (twice), +and see the list of names defined by the sys module! + +Tip: to use the tab key as the completion key, call + + readline.parse_and_bind("tab: complete") + +Notes: + +- Exceptions raised by the completer function are *ignored* (and generally cause + the completion to fail). This is a feature -- since readline sets the tty + device in raw (or cbreak) mode, printing a traceback wouldn't work well + without some complicated hoopla to save, reset and restore the tty state. + +- The evaluation of the NAME.NAME... form may cause arbitrary application + defined code to be executed if an object with a __getattr__ hook is found. + Since it is the responsibility of the application (or the user) to enable this + feature, I consider this an acceptable risk. More complicated expressions + (e.g. function calls or indexing operations) are *not* evaluated. + +- When the original stdin is not a tty device, GNU readline is never + used, and this module (and the readline module) are silently inactive. + +""" from typing import Any __all__ = ["Completer"] class Completer: - def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... - def complete(self, text: str, state: int) -> str | None: ... - def attr_matches(self, text: str) -> list[str]: ... - def global_matches(self, text: str) -> list[str]: ... + def __init__(self, namespace: dict[str, Any] | None = None) -> None: + """Create a new completer for the command line. + +Completer([namespace]) -> completer instance. + +If unspecified, the default namespace where completions are performed +is __main__ (technically, __main__.__dict__). Namespaces should be +given as dictionaries. + +Completer instances should be used as the completion mechanism of +readline via the set_completer() call: + +readline.set_completer(Completer(my_namespace).complete) +""" + def complete(self, text: str, state: int) -> str | None: + """Return the next possible completion for 'text'. + +This is called successively with state == 0, 1, 2, ... until it +returns None. The completion should begin with 'text'. + +""" + def attr_matches(self, text: str) -> list[str]: + """Compute matches when text contains a dot. + +Assuming the text is of the form NAME.NAME....[NAME], and is +evaluable in self.namespace, it will be evaluated and its attributes +(as revealed by dir()) are used as possible completions. (For class +instances, class members are also considered.) + +WARNING: this can still invoke arbitrary C code, if an object +with a __getattr__ hook is evaluated. + +""" + def global_matches(self, text: str) -> list[str]: + """Compute matches when text is a simple name. + +Return a list of all keywords, built-in functions and names currently +defined in self.namespace that match. + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi index d4406ea4ac41e..9f1e53a465d1e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi @@ -1,3 +1,11 @@ +"""runpy.py - locating and running Python code using the module namespace + +Provides support for locating and running Python scripts using the Python +module namespace instead of the native filesystem. + +This allows Python code to play nicely with non-filesystem based PEP 302 +importers when locating support scripts as well as when importing modules. +""" from _typeshed import Unused from types import ModuleType from typing import Any @@ -6,6 +14,8 @@ from typing_extensions import Self __all__ = ["run_module", "run_path"] class _TempModule: + """Temporarily replace a module in sys.modules with an empty namespace +""" mod_name: str module: ModuleType def __init__(self, mod_name: str) -> None: ... @@ -20,5 +30,38 @@ class _ModifiedArgv0: def run_module( mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False -) -> dict[str, Any]: ... -def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... +) -> dict[str, Any]: + """Execute a module's code without importing it. + +mod_name -- an absolute module name or package name. + +Optional arguments: +init_globals -- dictionary used to pre-populate the module’s +globals dictionary before the code is executed. + +run_name -- if not None, this will be used for setting __name__; +otherwise, __name__ will be set to mod_name + '__main__' if the +named module is a package and to just mod_name otherwise. + +alter_sys -- if True, sys.argv[0] is updated with the value of +__file__ and sys.modules[__name__] is updated with a temporary +module object for the module being executed. Both are +restored to their original values before the function returns. + +Returns the resulting module globals dictionary. +""" +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: + """Execute code located at the specified filesystem location. + +path_name -- filesystem location of a Python script, zipfile, +or directory containing a top level __main__.py script. + +Optional arguments: +init_globals -- dictionary used to pre-populate the module’s +globals dictionary before the code is executed. + +run_name -- if not None, this will be used to set __name__; +otherwise, '' will be used for __name__. + +Returns the resulting module globals dictionary. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi index 52f87ab68ff54..0e56c9f5c0cb4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi @@ -1,3 +1,27 @@ +"""A generally useful event scheduler class. + +Each instance of this class manages its own queue. +No multi-threading is implied; you are supposed to hack that +yourself, or use a single instance per application. + +Each instance is parametrized with two functions, one that is +supposed to return the current time, one that is supposed to +implement a delay. You can implement real-time scheduling by +substituting time and sleep from built-in module time, or you can +implement simulated time by writing your own functions. This can +also be used to integrate scheduling with STDWIN events; the delay +function is allowed to modify the queue. Time can be expressed as +integers or floating-point numbers, as long as it is consistent. + +Events are specified by tuples (time, priority, action, argument, kwargs). +As in UNIX, lower priority numbers mean higher priority; in this +way the queue can be maintained as a priority queue. Execution of the +event means calling the action function, passing it the argument +sequence in "argument" (remember that in Python, multiple function +arguments are be packed in a sequence) and keyword parameters in "kwargs". +The action function may be an instance method so it +has another way to reference private data (besides global variables). +""" import sys from collections.abc import Callable from typing import Any, ClassVar, NamedTuple, type_check_only @@ -9,6 +33,8 @@ _ActionCallback: TypeAlias = Callable[..., Any] if sys.version_info >= (3, 10): class Event(NamedTuple): + """Event(time, priority, sequence, action, argument, kwargs) +""" time: float priority: Any sequence: int @@ -32,15 +58,66 @@ class scheduler: timefunc: Callable[[], float] delayfunc: Callable[[float], object] - def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... + def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: + """Initialize a new instance, passing the time and delay +functions +""" def enterabs( self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... - ) -> Event: ... + ) -> Event: + """Enter a new event in the queue at an absolute time. + +Returns an ID for the event which can be used to remove it, +if necessary. + +""" def enter( self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... - ) -> Event: ... - def run(self, blocking: bool = True) -> float | None: ... - def cancel(self, event: Event) -> None: ... - def empty(self) -> bool: ... + ) -> Event: + """A variant that specifies the time as a relative time. + +This is actually the more commonly used interface. + +""" + def run(self, blocking: bool = True) -> float | None: + """Execute events until the queue is empty. +If blocking is False executes the scheduled events due to +expire soonest (if any) and then return the deadline of the +next scheduled call in the scheduler. + +When there is a positive delay until the first event, the +delay function is called and the event is left in the queue; +otherwise, the event is removed from the queue and executed +(its action function is called, passing it the argument). If +the delay function returns prematurely, it is simply +restarted. + +It is legal for both the delay function and the action +function to modify the queue or to raise an exception; +exceptions are not caught but the scheduler's state remains +well-defined so run() may be called again. + +A questionable hack is added to allow other threads to run: +just after an event is executed, a delay of 0 is executed, to +avoid monopolizing the CPU when other threads are also +runnable. + +""" + def cancel(self, event: Event) -> None: + """Remove an event from the queue. + +This must be presented the ID as returned by enter(). +If the event is not in the queue, this raises ValueError. + +""" + def empty(self) -> bool: + """Check whether the queue is empty. +""" @property - def queue(self) -> list[Event]: ... + def queue(self) -> list[Event]: + """An ordered list of upcoming events. + +Events are named tuples with fields for: + time, priority, action, arguments, kwargs + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi index 4861b6f09340e..4726ab1bd82dd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi @@ -1,3 +1,10 @@ +"""Generate cryptographically strong pseudo-random numbers suitable for +managing secrets such as account authentication, tokens, and similar. + +See PEP 506 for more information. +https://peps.python.org/pep-0506/ + +""" from _typeshed import SupportsLenAndGetItem from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom @@ -7,9 +14,43 @@ __all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "to _T = TypeVar("_T") -def randbelow(exclusive_upper_bound: int) -> int: ... -def randbits(k: int) -> int: ... -def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... -def token_bytes(nbytes: int | None = None) -> bytes: ... -def token_hex(nbytes: int | None = None) -> str: ... -def token_urlsafe(nbytes: int | None = None) -> str: ... +def randbelow(exclusive_upper_bound: int) -> int: + """Return a random int in the range [0, n). +""" +def randbits(k: int) -> int: + """getrandbits(k) -> x. Generates an int with k random bits. +""" +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: + """Choose a random element from a non-empty sequence. +""" +def token_bytes(nbytes: int | None = None) -> bytes: + """Return a random byte string containing *nbytes* bytes. + +If *nbytes* is ``None`` or not supplied, a reasonable +default is used. + +>>> token_bytes(16) #doctest:+SKIP +b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + +""" +def token_hex(nbytes: int | None = None) -> str: + """Return a random text string, in hexadecimal. + +The string has *nbytes* random bytes, each byte converted to two +hex digits. If *nbytes* is ``None`` or not supplied, a reasonable +default is used. + +>>> token_hex(16) #doctest:+SKIP +'f9bf78b9a18ce6d46a0cd2b0b86df9da' + +""" +def token_urlsafe(nbytes: int | None = None) -> str: + """Return a random URL-safe text string, in Base64 encoding. + +The string has *nbytes* random bytes. If *nbytes* is ``None`` +or not supplied, a reasonable default is used. + +>>> token_urlsafe(16) #doctest:+SKIP +'Drmhze6EPcv0fN_81Bj-nA' + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi index 587bc75376ef1..83db6d9eb4a14 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi @@ -1,3 +1,8 @@ +"""This module supports asynchronous I/O on multiple file descriptors. + +*** IMPORTANT NOTICE *** +On Windows, only sockets are supported; on Unix, all file descriptors. +""" import sys from _typeshed import FileDescriptorLike from collections.abc import Iterable @@ -25,6 +30,11 @@ if sys.platform != "win32": # This is actually a function that returns an instance of a class. # The class is not accessible directly, and also calls itself select.poll. class poll: + """Returns a polling object. + +This object supports registering and unregistering file descriptors, and then +polling them for I/O events. +""" # default value is select.POLLIN | select.POLLPRI | select.POLLOUT def register(self, fd: FileDescriptorLike, eventmask: int = 7, /) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int, /) -> None: ... @@ -33,7 +43,30 @@ if sys.platform != "win32": def select( rlist: Iterable[Any], wlist: Iterable[Any], xlist: Iterable[Any], timeout: float | None = None, / -) -> tuple[list[Any], list[Any], list[Any]]: ... +) -> tuple[list[Any], list[Any], list[Any]]: + """Wait until one or more file descriptors are ready for some kind of I/O. + +The first three arguments are iterables of file descriptors to be waited for: +rlist -- wait until ready for reading +wlist -- wait until ready for writing +xlist -- wait for an "exceptional condition" +If only one kind of condition is required, pass [] for the other lists. + +A file descriptor is either a socket or file object, or a small integer +gotten from a fileno() method call on one of those. + +The optional 4th argument specifies a timeout in seconds; it may be +a floating-point number to specify fractions of seconds. If it is absent +or None, the call will never time out. + +The return value is a tuple of three lists corresponding to the first three +arguments; each contains the subset of the corresponding file descriptors +that are ready. + +*** IMPORTANT NOTICE *** +On Windows, only sockets are supported; on Unix, all file +descriptors can be used. +""" error = OSError @@ -114,6 +147,14 @@ if sys.platform != "linux" and sys.platform != "win32": if sys.platform == "linux": @final class epoll: + """select.epoll(sizehint=-1, flags=0) + +Returns an epolling object + +sizehint must be a positive integer or -1 for the default size. The +sizehint is used to optimize internal data structures. It doesn't limit +the maximum number of monitored events. +""" def __new__(self, sizehint: int = ..., flags: int = ...) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -123,15 +164,55 @@ if sys.platform == "linux": exc_tb: TracebackType | None = None, /, ) -> None: ... - def close(self) -> None: ... + def close(self) -> None: + """Close the epoll control file descriptor. + +Further operations on the epoll object will raise an exception. +""" closed: bool - def fileno(self) -> int: ... - def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... - def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... - def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... + def fileno(self) -> int: + """Return the epoll control file descriptor. +""" + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: + """Registers a new fd or raises an OSError if the fd is already registered. + + fd + the target file descriptor of the operation + eventmask + a bit set composed of the various EPOLL constants + +The epoll interface supports all file descriptors that support poll. +""" + def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: + """Modify event mask for a registered file descriptor. + + fd + the target file descriptor of the operation + eventmask + a bit set composed of the various EPOLL constants +""" + def unregister(self, fd: FileDescriptorLike) -> None: + """Remove a registered file descriptor from the epoll object. + + fd + the target file descriptor of the operation +""" + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: + """Wait for events on the epoll file descriptor. + + timeout + the maximum time to wait in seconds (as float); + a timeout of None or -1 makes poll wait indefinitely + maxevents + the maximum number of events returned; -1 means no limit + +Returns a list containing any descriptors that have events to report, +as a list of (fd, events) 2-tuples. +""" @classmethod - def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: ... + def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: + """Create an epoll object from a given control fd. +""" EPOLLERR: Final[int] EPOLLEXCLUSIVE: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi index bcca4e341b9a1..20ea1708cd967 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi @@ -1,3 +1,8 @@ +"""Selectors module. + +This module allows high-level and efficient I/O multiplexing, built upon the +`select` module primitives. +""" import sys from _typeshed import FileDescriptor, FileDescriptorLike, Unused from abc import ABCMeta, abstractmethod @@ -11,43 +16,144 @@ EVENT_READ: Final = 1 EVENT_WRITE: Final = 2 class SelectorKey(NamedTuple): + """SelectorKey(fileobj, fd, events, data) + + Object used to associate a file object to its backing + file descriptor, selected event mask, and attached data. +""" fileobj: FileDescriptorLike fd: FileDescriptor events: _EventMask data: Any class BaseSelector(metaclass=ABCMeta): + """Selector abstract base class. + +A selector supports registering file objects to be monitored for specific +I/O events. + +A file object is a file descriptor or any object with a `fileno()` method. +An arbitrary object can be attached to the file object, which can be used +for example to store context information, a callback, etc. + +A selector can use various implementations (select(), poll(), epoll()...) +depending on the platform. The default `Selector` class uses the most +efficient implementation on the current platform. +""" @abstractmethod - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: + """Register a file object. + +Parameters: +fileobj -- file object or file descriptor +events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) +data -- attached data + +Returns: +SelectorKey instance + +Raises: +ValueError if events is invalid +KeyError if fileobj is already registered +OSError if fileobj is closed or otherwise is unacceptable to + the underlying system call (if a system call is made) + +Note: +OSError may or may not be raised +""" @abstractmethod - def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: + """Unregister a file object. + +Parameters: +fileobj -- file object or file descriptor + +Returns: +SelectorKey instance + +Raises: +KeyError if fileobj is not registered + +Note: +If fileobj is registered but has since been closed this does +*not* raise OSError (even if the wrapped syscall does) +""" + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: + """Change a registered file object monitored events or attached data. + +Parameters: +fileobj -- file object or file descriptor +events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) +data -- attached data + +Returns: +SelectorKey instance + +Raises: +Anything that unregister() or register() raises +""" @abstractmethod - def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... - def close(self) -> None: ... - def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: + """Perform the actual selection, until some monitored file objects are +ready or a timeout expires. + +Parameters: +timeout -- if timeout > 0, this specifies the maximum wait time, in + seconds + if timeout <= 0, the select() call won't block, and will + report the currently ready file objects + if timeout is None, select() will block until a monitored + file object becomes ready + +Returns: +list of (key, events) for ready file objects +`events` is a bitwise mask of EVENT_READ|EVENT_WRITE +""" + def close(self) -> None: + """Close the selector. + +This must be called to make sure that any underlying resource is freed. +""" + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: + """Return the key associated to a registered file object. + +Returns: +SelectorKey for this file object +""" @abstractmethod - def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: + """Return a mapping of file objects to selector keys. +""" def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... class _BaseSelectorImpl(BaseSelector, metaclass=ABCMeta): + """Base selector implementation. +""" def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class SelectSelector(_BaseSelectorImpl): + """Select-based selector. +""" def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... class _PollLikeSelector(_BaseSelectorImpl): + """Base class shared between poll, epoll and devpoll selectors. +""" def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": - class PollSelector(_PollLikeSelector): ... + class PollSelector(_PollLikeSelector): + """Poll-based selector. +""" if sys.platform == "linux": class EpollSelector(_PollLikeSelector): + """Epoll-based selector. +""" def fileno(self) -> int: ... if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": @@ -64,6 +170,8 @@ if sys.platform != "win32" and sys.platform != "linux": # The runtime logic is more fine-grained than a `sys.platform` check; # not really expressible in the stubs class DefaultSelector(_BaseSelectorImpl): + """Epoll-based selector. +""" def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": def fileno(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi index 654c2ea097f78..c2cefe1ab2540 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi @@ -1,3 +1,60 @@ +"""Manage shelves of pickled objects. + +A "shelf" is a persistent, dictionary-like object. The difference +with dbm databases is that the values (not the keys!) in a shelf can +be essentially arbitrary Python objects -- anything that the "pickle" +module can handle. This includes most class instances, recursive data +types, and objects containing lots of shared sub-objects. The keys +are ordinary strings. + +To summarize the interface (key is a string, data is an arbitrary +object): + + import shelve + d = shelve.open(filename) # open, with (g)dbm filename -- no suffix + + d[key] = data # store data at key (overwrites old data if + # using an existing key) + data = d[key] # retrieve a COPY of the data at key (raise + # KeyError if no such key) -- NOTE that this + # access returns a *copy* of the entry! + del d[key] # delete data stored at key (raises KeyError + # if no such key) + flag = key in d # true if the key exists + list = d.keys() # a list of all existing keys (slow!) + + d.close() # close it + +Dependent on the implementation, closing a persistent dictionary may +or may not be necessary to flush changes to disk. + +Normally, d[key] returns a COPY of the entry. This needs care when +mutable entries are mutated: for example, if d[key] is a list, + d[key].append(anitem) +does NOT modify the entry d[key] itself, as stored in the persistent +mapping -- it only modifies the copy, which is then immediately +discarded, so that the append has NO effect whatsoever. To append an +item to d[key] in a way that will affect the persistent mapping, use: + data = d[key] + data.append(anitem) + d[key] = data + +To avoid the problem with mutable entries, you may pass the keyword +argument writeback=True in the call to shelve.open. When you use: + d = shelve.open(filename, writeback=True) +then d keeps a cache of all entries you access, and writes them all back +to the persistent mapping when you call d.close(). This ensures that +such usage as d[key].append(anitem) works as intended. + +However, using keyword argument writeback=True may consume vast amount +of memory for the cache, and it may make d.close() very slow, if you +access many of d's entries after opening it in this way: d has no way to +check which of the entries you access are mutable and/or which ones you +actually mutate, so it must cache, and write back at close, all of the +entries that you access. You can call d.sync() to write back all the +entries in the cache, and empty the cache (d.sync() also synchronizes +the persistent dictionary on disk, if feasible). +""" import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -12,6 +69,11 @@ _T = TypeVar("_T") _VT = TypeVar("_VT") class Shelf(MutableMapping[str, _VT]): + """Base class for shelf implementations. + +This is initialized with a dictionary-like object. +See the module's __doc__ string for an overview of the interface. +""" def __init__( self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" ) -> None: ... @@ -36,6 +98,17 @@ class Shelf(MutableMapping[str, _VT]): def sync(self) -> None: ... class BsdDbShelf(Shelf[_VT]): + """Shelf implementation using the "BSD" db interface. + +This adds methods first(), next(), previous(), last() and +set_location() that have no counterpart in [g]dbm databases. + +The actual database must be opened using one of the "bsddb" +modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or +bsddb.rnopen) and passed to the constructor. + +See the module's __doc__ string for an overview of the interface. +""" def set_location(self, key: str) -> tuple[str, _VT]: ... def next(self) -> tuple[str, _VT]: ... def previous(self) -> tuple[str, _VT]: ... @@ -43,6 +116,11 @@ class BsdDbShelf(Shelf[_VT]): def last(self) -> tuple[str, _VT]: ... class DbfilenameShelf(Shelf[_VT]): + """Shelf implementation using the "dbm" generic dbm interface. + +This is initialized with the filename for the dbm database. +See the module's __doc__ string for an overview of the interface. +""" if sys.version_info >= (3, 11): def __init__( self, filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False @@ -53,7 +131,29 @@ class DbfilenameShelf(Shelf[_VT]): if sys.version_info >= (3, 11): def open( filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False - ) -> Shelf[Any]: ... + ) -> Shelf[Any]: + """Open a persistent dictionary for reading and writing. + +The filename parameter is the base filename for the underlying +database. As a side-effect, an extension may be added to the +filename and more than one file may be created. The optional flag +parameter has the same interpretation as the flag parameter of +dbm.open(). The optional protocol parameter specifies the +version of the pickle protocol. + +See the module's __doc__ string for an overview of the interface. +""" else: - def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... + def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: + """Open a persistent dictionary for reading and writing. + + The filename parameter is the base filename for the underlying + database. As a side-effect, an extension may be added to the + filename and more than one file may be created. The optional flag + parameter has the same interpretation as the flag parameter of + dbm.open(). The optional protocol parameter specifies the + version of the pickle protocol. + + See the module's __doc__ string for an overview of the interface. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi index 1c27483782fb5..9adebbb7ef8d5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi @@ -1,3 +1,5 @@ +"""A lexical analyzer class for simple shell-like syntaxes. +""" import sys from collections import deque from collections.abc import Iterable @@ -14,20 +16,30 @@ class _ShlexInstream(Protocol): def close(self) -> object: ... if sys.version_info >= (3, 12): - def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: + """Split the string *s* using shell-like syntax. +""" else: @overload - def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: ... + def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: + """Split the string *s* using shell-like syntax. +""" @overload @deprecated("Passing None for 's' to shlex.split() is deprecated and will raise an error in Python 3.12.") def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: ... -def join(split_command: Iterable[str]) -> str: ... -def quote(s: str) -> str: ... +def join(split_command: Iterable[str]) -> str: + """Return a shell-escaped string from *split_command*. +""" +def quote(s: str) -> str: + """Return a shell-escaped version of the string *s*. +""" # TODO: Make generic over infile once PEP 696 is implemented. class shlex: + """A lexical analyzer class for simple shell-like syntaxes. +""" commenters: str wordchars: str whitespace: str @@ -52,12 +64,24 @@ class shlex: posix: bool = False, punctuation_chars: bool | str = False, ) -> None: ... - def get_token(self) -> str | None: ... - def push_token(self, tok: str) -> None: ... + def get_token(self) -> str | None: + """Get a token from the input stream (or from stack if it's nonempty) +""" + def push_token(self, tok: str) -> None: + """Push a token onto the stack popped by the get_token method +""" def read_token(self) -> str | None: ... - def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: ... - def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: ... - def pop_source(self) -> None: ... - def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: ... + def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: + """Hook called on a filename to be sourced. +""" + def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: + """Push an input source onto the lexer's input source stack. +""" + def pop_source(self) -> None: + """Pop the input source stack. +""" + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: + """Emit a C-compiler-like, Emacs-friendly error-message leader. +""" def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi index cc26cfc556a00..fbe57c3da3d29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi @@ -1,3 +1,8 @@ +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" import os import sys from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, MaybeNone, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite @@ -41,31 +46,95 @@ _StrPathT = TypeVar("_StrPathT", bound=StrPath) _BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) class Error(OSError): ... -class SameFileError(Error): ... -class SpecialFileError(OSError): ... +class SameFileError(Error): + """Raised when source and destination are the same file. +""" +class SpecialFileError(OSError): + """Raised when trying to do a kind of operation (e.g. copying) which is +not supported on a special file (e.g. a named pipe) +""" if sys.version_info >= (3, 14): ExecError = RuntimeError # Deprecated in Python 3.14; removal scheduled for Python 3.16 else: - class ExecError(OSError): ... + class ExecError(OSError): + """Raised when a command could not be executed +""" + +class ReadError(OSError): + """Raised when an archive cannot be read +""" +class RegistryError(Exception): + """Raised when a registry operation with the archiving +and unpacking registries fails +""" + +def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: + """copy data from file-like object fsrc to file-like object fdst +""" +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: + """Copy data from src to dst in the most efficient way possible. + +If follow_symlinks is not set and src is a symbolic link, a new +symlink will be created instead of copying the file it points to. + +""" +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: + """Copy mode bits from src to dst. + +If follow_symlinks is not set, symlinks aren't followed if and only +if both `src` and `dst` are symlinks. If `lchmod` isn't available +(e.g. Linux) this method does nothing. + +""" +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: + """Copy file metadata -class ReadError(OSError): ... -class RegistryError(Exception): ... +Copy the permission bits, last access time, last modification time, and +flags from `src` to `dst`. On Linux, copystat() also copies the "extended +attributes" where possible. The file contents, owner, and group are +unaffected. `src` and `dst` are path-like objects or path names given as +strings. -def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... -def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... -def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... -def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +If the optional flag `follow_symlinks` is not set, symlinks aren't +followed if and only if both `src` and `dst` are symlinks. +""" @overload -def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... +def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: + """Copy data and mode bits ("cp src dst"). Return the file's destination. + +The destination may be a directory. + +If follow_symlinks is false, symlinks won't be followed. This +resembles GNU's "cp -P src dst". + +If source and destination are the same file, a SameFileError will be +raised. + +""" @overload def copy(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... @overload -def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: ... +def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: + """Copy data and metadata. Return the file's destination. + +Metadata is copied with copystat(). Please see the copystat function +for more information. + +The destination may be a directory. + +If follow_symlinks is false, symlinks won't be followed. This +resembles GNU's "cp -P src dst". +""" @overload def copy2(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... -def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... +def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: + """Function that can be used as copytree() ignore parameter. + +Patterns is a sequence of glob-style patterns +that are used to exclude files +""" def copytree( src: StrPath, dst: _StrPathT, @@ -74,7 +143,45 @@ def copytree( copy_function: Callable[[str, str], object] = ..., ignore_dangling_symlinks: bool = False, dirs_exist_ok: bool = False, -) -> _StrPathT: ... +) -> _StrPathT: + """Recursively copy a directory tree and return the destination directory. + +If exception(s) occur, an Error is raised with a list of reasons. + +If the optional symlinks flag is true, symbolic links in the +source tree result in symbolic links in the destination tree; if +it is false, the contents of the files pointed to by symbolic +links are copied. If the file pointed to by the symlink doesn't +exist, an exception will be added in the list of errors raised in +an Error exception at the end of the copy process. + +You can set the optional ignore_dangling_symlinks flag to true if you +want to silence this exception. Notice that this has no effect on +platforms that don't support os.symlink. + +The optional ignore argument is a callable. If given, it +is called with the `src` parameter, which is the directory +being visited by copytree(), and `names` which is the list of +`src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + +Since copytree() is called recursively, the callable will be +called once for each directory that is copied. It returns a +list of names relative to the `src` directory that should +not be copied. + +The optional copy_function argument is a callable that will be used +to copy each file. It will be called with the source path and the +destination path as arguments. By default, copy2() is used, but any +function that supports the same signature (like copy()) can be used. + +If dirs_exist_ok is false (the default) and `dst` already exists, a +`FileExistsError` is raised. If `dirs_exist_ok` is true, the copying +operation will continue if it encounters existing directories, and files +within the `dst` tree will be overwritten by corresponding files from the +`src` tree. +""" _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], str, ExcInfo], object] _OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object] @@ -136,14 +243,46 @@ _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], # N.B. shutil.move appears to take bytes arguments, however, # this does not work when dst is (or is within) an existing directory. # (#6832) -def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: ... +def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: + """Recursively move a file or directory to another location. This is +similar to the Unix "mv" command. Return the file or directory's +destination. + +If dst is an existing directory or a symlink to a directory, then src is +moved inside that directory. The destination path in that directory must +not already exist. + +If dst already exists but is not a directory, it may be overwritten +depending on os.rename() semantics. + +If the destination is on our current filesystem, then rename() is used. +Otherwise, src is copied to the destination and then removed. Symlinks are +recreated under the new name if os.rename() fails because of cross +filesystem renames. + +The optional `copy_function` argument is a callable that will be used +to copy the source or it will be delegated to `copytree`. +By default, copy2() is used, but any function that supports the same +signature (like copy()) can be used. + +A lot more could be done here... A look at a mv.c shows a lot of +the issues this implementation glosses over. + +""" class _ntuple_diskusage(NamedTuple): + """usage(total, used, free) +""" total: int used: int free: int -def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: + """Return disk usage statistics about the given path. + +Returned value is a named tuple with attributes 'total', 'used' and +'free', which are the amount of total, used and free space, in bytes. +""" # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's @@ -157,7 +296,19 @@ if sys.version_info >= (3, 13): *, dir_fd: int | None = None, follow_symlinks: bool = True, - ) -> None: ... + ) -> None: + """Change owner user and group of the given path. + +user and group can be the uid/gid or the user/group names, and in that case, +they are converted to their respective uid/gid. + +If dir_fd is set, it should be an open file descriptor to the directory to +be used as the root of *path* if it is relative. + +If follow_symlinks is set to False and the last element of the path is a +symbolic link, chown will modify the link itself and not the file being +referenced by the link. +""" @overload def chown( path: FileDescriptorOrPath, @@ -178,7 +329,12 @@ if sys.version_info >= (3, 13): else: @overload - def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: ... + def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: + """Change owner user and group of the given path. + + user and group can be the uid/gid or the user/group names, and in that case, + they are converted to their respective uid/gid. + """ @overload def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ... @overload @@ -192,7 +348,16 @@ if sys.platform == "win32" and sys.version_info < (3, 12): def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: ... @overload -def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: ... +def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: + """Given a command, mode, and a PATH string, return the path which +conforms to the given mode on the PATH, or None if there is no such +file. + +`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result +of os.environ.get("PATH"), or can be overridden with a custom search +path. + +""" @overload def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... def make_archive( @@ -205,12 +370,40 @@ def make_archive( owner: str | None = None, group: str | None = None, logger: Any | None = None, -) -> str: ... -def get_archive_formats() -> list[tuple[str, str]]: ... +) -> str: + """Create an archive file (eg. zip or tar). + +'base_name' is the name of the file to create, minus any format-specific +extension; 'format' is the archive format: one of "zip", "tar", "gztar", +"bztar", "xztar", or "zstdtar". Or any other registered format. + +'root_dir' is a directory that will be the root directory of the +archive; ie. we typically chdir into 'root_dir' before creating the +archive. 'base_dir' is the directory where we start archiving from; +ie. 'base_dir' will be the common prefix of all files and +directories in the archive. 'root_dir' and 'base_dir' both default +to the current directory. Returns the name of the archive file. + +'owner' and 'group' are used when creating a tar archive. By default, +uses the current owner and group. +""" +def get_archive_formats() -> list[tuple[str, str]]: + """Returns a list of supported formats for archiving and unarchiving. + +Each element of the returned sequence is a tuple (name, description) +""" @overload def register_archive_format( name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" -) -> None: ... +) -> None: + """Registers an archive format. + +name is the name of the format. function is the callable that will be +used to create archives. If provided, extra_args is a sequence of +(name, value) tuples that will be passed as arguments to the callable. +description can be provided to describe the format, and will be returned +by the get_archive_formats() function. +""" @overload def register_archive_format( name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" @@ -218,7 +411,24 @@ def register_archive_format( def unregister_archive_format(name: str) -> None: ... def unpack_archive( filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None, *, filter: _TarfileFilter | None = None -) -> None: ... +) -> None: + """Unpack an archive. + +`filename` is the name of the archive. + +`extract_dir` is the name of the target directory, where the archive +is unpacked. If not provided, the current working directory is used. + +`format` is the archive format: one of "zip", "tar", "gztar", "bztar", +"xztar", or "zstdtar". Or any other registered format. If not provided, +unpack_archive will use the filename extension and see if an unpacker +was registered for that extension. + +In case none is found, a ValueError is raised. + +If `filter` is given, it is passed to the underlying +extraction function. +""" @overload def register_unpack_format( name: str, @@ -226,11 +436,51 @@ def register_unpack_format( function: Callable[..., object], extra_args: Sequence[tuple[str, Any]], description: str = "", -) -> None: ... +) -> None: + """Registers an unpack format. + +`name` is the name of the format. `extensions` is a list of extensions +corresponding to the format. + +`function` is the callable that will be +used to unpack archives. The callable will receive archives to unpack. +If it's unable to handle an archive, it needs to raise a ReadError +exception. + +If provided, `extra_args` is a sequence of +(name, value) tuples that will be passed as arguments to the callable. +description can be provided to describe the format, and will be returned +by the get_unpack_formats() function. +""" @overload def register_unpack_format( name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... -def unregister_unpack_format(name: str) -> None: ... -def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... -def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: ... +def unregister_unpack_format(name: str) -> None: + """Removes the pack format from the registry. +""" +def get_unpack_formats() -> list[tuple[str, list[str], str]]: + """Returns a list of supported formats for unpacking. + +Each element of the returned sequence is a tuple +(name, extensions, description) +""" +def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: + """Get the size of the terminal window. + +For each of the two dimensions, the environment variable, COLUMNS +and LINES respectively, is checked. If the variable is defined and +the value is a positive integer, it is used. + +When COLUMNS or LINES is not defined, which is the common case, +the terminal connected to sys.__stdout__ is queried +by invoking os.get_terminal_size. + +If the terminal size cannot be successfully queried, either because +the system doesn't support querying, or because we are not +connected to a terminal, the value given in fallback parameter +is used. Fallback defaults to (80, 24) which is the default +size used by many terminal emulators. + +The value returned is a named tuple of type os.terminal_size. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi index c2668bd8b32d9..831c7a6b73f95 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi @@ -9,6 +9,8 @@ from typing_extensions import Never, TypeAlias NSIG: int class Signals(IntEnum): + """An enumeration. +""" SIGABRT = 6 SIGFPE = 8 SIGILL = 4 @@ -58,6 +60,8 @@ class Signals(IntEnum): SIGSTKFLT = 16 class Handlers(IntEnum): + """An enumeration. +""" SIG_DFL = 0 SIG_IGN = 1 @@ -67,15 +71,53 @@ SIG_IGN: Final = Handlers.SIG_IGN _SIGNUM: TypeAlias = int | Signals _HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None -def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: ... +def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: + """The default handler for SIGINT installed by Python. + +It raises KeyboardInterrupt. +""" if sys.version_info >= (3, 10): # arguments changed in 3.10.2 - def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... - def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + def getsignal(signalnum: _SIGNUM) -> _HANDLER: + """Return the current action for the given signal. + +The return value can be: + SIG_IGN -- if the signal is being ignored + SIG_DFL -- if the default action for the signal is in effect + None -- if an unknown handler is in effect + anything else -- the callable Python object used as a handler +""" + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: + """Set the action for the given signal. + +The action can be SIG_DFL, SIG_IGN, or a callable Python object. +The previous action is returned. See getsignal() for possible return values. + +*** IMPORTANT NOTICE *** +A signal handler function is called with two arguments: +the first is the signal number, the second is the interrupted stack frame. +""" else: - def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: ... - def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: ... + def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: + """Return the current action for the given signal. + +The return value can be: + SIG_IGN -- if the signal is being ignored + SIG_DFL -- if the default action for the signal is in effect + None -- if an unknown handler is in effect + anything else -- the callable Python object used as a handler +""" + def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: + """Set the action for the given signal. + +The action can be SIG_DFL, SIG_IGN, or a callable Python object. +The previous action is returned. See getsignal() for possible return values. + +*** IMPORTANT NOTICE *** +A signal handler function is called with two arguments: +the first is the signal number, the second is the interrupted stack frame. +""" SIGABRT: Final = Signals.SIGABRT SIGFPE: Final = Signals.SIGFPE @@ -123,6 +165,8 @@ else: ITIMER_VIRTUAL: int class Sigmasks(IntEnum): + """An enumeration. +""" SIG_BLOCK = 0 SIG_UNBLOCK = 1 SIG_SETMASK = 2 @@ -130,22 +174,63 @@ else: SIG_BLOCK: Final = Sigmasks.SIG_BLOCK SIG_UNBLOCK: Final = Sigmasks.SIG_UNBLOCK SIG_SETMASK: Final = Sigmasks.SIG_SETMASK - def alarm(seconds: int, /) -> int: ... - def getitimer(which: int, /) -> tuple[float, float]: ... - def pause() -> None: ... - def pthread_kill(thread_id: int, signalnum: int, /) -> None: ... + def alarm(seconds: int, /) -> int: + """Arrange for SIGALRM to arrive after the given number of seconds. +""" + def getitimer(which: int, /) -> tuple[float, float]: + """Returns current value of given itimer. +""" + def pause() -> None: + """Wait until a signal arrives. +""" + def pthread_kill(thread_id: int, signalnum: int, /) -> None: + """Send a signal to a thread. +""" if sys.version_info >= (3, 10): # arguments changed in 3.10.2 - def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: ... + def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: + """Fetch and/or change the signal mask of the calling thread. +""" else: - def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: ... + def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: + """Fetch and/or change the signal mask of the calling thread. +""" + + def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: + """Sets given itimer (one of ITIMER_REAL, ITIMER_VIRTUAL or ITIMER_PROF). + +The timer will fire after value seconds and after that every interval seconds. +The itimer can be cleared by setting seconds to zero. + +Returns old values as a tuple: (delay, interval). +""" + def siginterrupt(signalnum: int, flag: bool, /) -> None: + """Change system call restart behaviour. - def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: ... - def siginterrupt(signalnum: int, flag: bool, /) -> None: ... - def sigpending() -> Any: ... +If flag is False, system calls will be restarted when interrupted by +signal sig, else system calls will be interrupted. +""" + def sigpending() -> Any: + """Examine pending signals. + +Returns a set of signal numbers that are pending for delivery to +the calling thread. +""" if sys.version_info >= (3, 10): # argument changed in 3.10.2 - def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... + def sigwait(sigset: Iterable[int]) -> _SIGNUM: + """Wait for a signal. + +Suspend execution of the calling thread until the delivery of one of the +signals specified in the signal set sigset. The function accepts the signal +and returns the signal number. +""" else: - def sigwait(sigset: Iterable[int], /) -> _SIGNUM: ... + def sigwait(sigset: Iterable[int], /) -> _SIGNUM: + """Wait for a signal. + +Suspend execution of the calling thread until the delivery of one of the +signals specified in the signal set sigset. The function accepts the signal +and returns the signal number. +""" if sys.platform != "darwin": SIGCLD: Final = Signals.SIGCHLD # alias SIGPOLL: Final = Signals.SIGIO # alias @@ -157,31 +242,81 @@ else: @final class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): + """struct_siginfo: Result from sigwaitinfo or sigtimedwait. + +This object may be accessed either as a tuple of +(si_signo, si_code, si_errno, si_pid, si_uid, si_status, si_band), +or via the attributes si_signo, si_code, and so on. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("si_signo", "si_code", "si_errno", "si_pid", "si_uid", "si_status", "si_band") @property - def si_signo(self) -> int: ... + def si_signo(self) -> int: + """signal number +""" @property - def si_code(self) -> int: ... + def si_code(self) -> int: + """signal code +""" @property - def si_errno(self) -> int: ... + def si_errno(self) -> int: + """errno associated with this signal +""" @property - def si_pid(self) -> int: ... + def si_pid(self) -> int: + """sending process ID +""" @property - def si_uid(self) -> int: ... + def si_uid(self) -> int: + """real user ID of sending process +""" @property - def si_status(self) -> int: ... + def si_status(self) -> int: + """exit value or signal +""" @property - def si_band(self) -> int: ... + def si_band(self) -> int: + """band event for SIGPOLL +""" + + def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: + """Like sigwaitinfo(), but with a timeout. + +The timeout is specified in seconds, with floating-point numbers allowed. +""" + def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: + """Wait synchronously until one of the signals in *sigset* is delivered. + +Returns a struct_siginfo containing information about the signal. +""" + +def strsignal(signalnum: _SIGNUM, /) -> str | None: + """Return the system description of the given signal. + +Returns the description of signal *signalnum*, such as "Interrupt" +for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no +description. Raises :exc:`ValueError` if *signalnum* is invalid. +""" +def valid_signals() -> set[Signals]: + """Return a set of valid signal numbers on this platform. + +The signal numbers returned by this function can be safely passed to +functions like `pthread_sigmask`. +""" +def raise_signal(signalnum: _SIGNUM, /) -> None: + """Send a signal to the executing process. +""" +def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: + """Sets the fd to be written to (with the signal number) when a signal comes in. - def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: ... - def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: ... +A library can use this to wakeup select or poll. +The previous fd or -1 is returned. -def strsignal(signalnum: _SIGNUM, /) -> str | None: ... -def valid_signals() -> set[Signals]: ... -def raise_signal(signalnum: _SIGNUM, /) -> None: ... -def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: ... +The fd must be non-blocking. +""" if sys.platform == "linux": - def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: ... + def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: + """Send a signal to a process referred to by a pid file descriptor. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi index 6e39677aaea0e..c9cfed4f6e8da 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi @@ -1,3 +1,73 @@ +"""Append module search paths for third-party packages to sys.path. + +**************************************************************** +* This module is automatically imported during initialization. * +**************************************************************** + +This will append site-specific paths to the module search path. On +Unix (including Mac OSX), it starts with sys.prefix and +sys.exec_prefix (if different) and appends +lib/python/site-packages. +On other platforms (such as Windows), it tries each of the +prefixes directly, as well as with lib/site-packages appended. The +resulting directories, if they exist, are appended to sys.path, and +also inspected for path configuration files. + +If a file named "pyvenv.cfg" exists one directory above sys.executable, +sys.prefix and sys.exec_prefix are set to that directory and +it is also checked for site-packages (sys.base_prefix and +sys.base_exec_prefix will always be the "real" prefixes of the Python +installation). If "pyvenv.cfg" (a bootstrap configuration file) contains +the key "include-system-site-packages" set to anything other than "false" +(case-insensitive), the system-level prefixes will still also be +searched for site-packages; otherwise they won't. + +All of the resulting site-specific directories, if they exist, are +appended to sys.path, and also inspected for path configuration +files. + +A path configuration file is a file whose name has the form +.pth; its contents are additional directories (one per line) +to be added to sys.path. Non-existing directories (or +non-directories) are never added to sys.path; no directory is added to +sys.path more than once. Blank lines and lines beginning with +'#' are skipped. Lines starting with 'import' are executed. + +For example, suppose sys.prefix and sys.exec_prefix are set to +/usr/local and there is a directory /usr/local/lib/python2.5/site-packages +with three subdirectories, foo, bar and spam, and two path +configuration files, foo.pth and bar.pth. Assume foo.pth contains the +following: + + # foo package configuration + foo + bar + bletch + +and bar.pth contains: + + # bar package configuration + bar + +Then the following directories are added to sys.path, in this order: + + /usr/local/lib/python2.5/site-packages/bar + /usr/local/lib/python2.5/site-packages/foo + +Note that bletch is omitted because it doesn't exist; bar precedes foo +because bar.pth comes alphabetically before foo.pth; and spam is +omitted because it is not mentioned in either path configuration file. + +The readline module is also automatically configured to enable +completion for systems that support it. This can be overridden in +sitecustomize, usercustomize or PYTHONSTARTUP. Starting Python in +isolated mode (-I) disables automatic readline configuration. + +After these operations, an attempt is made to import a module +named sitecustomize, which can perform arbitrary additional +site-specific customizations. If this import fails with an +ImportError exception, it is silently ignored. +""" import sys from _typeshed import StrPath from collections.abc import Iterable @@ -7,30 +77,106 @@ ENABLE_USER_SITE: bool | None USER_SITE: str | None USER_BASE: str | None -def main() -> None: ... -def abs_paths() -> None: ... # undocumented -def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented -def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... -def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented -def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented -def check_enableusersite() -> bool | None: ... # undocumented +def main() -> None: + """Add standard site-specific directories to the module search path. + +This function is called automatically when this module is imported, +unless the python interpreter was started with the -S flag. +""" +def abs_paths() -> None: # undocumented + """Set all module __file__ and __cached__ attributes to an absolute path +""" +def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: # undocumented + """Process a .pth file within the site-packages directory: +For each line in the file, either combine it with sitedir to a path +and add that to known_paths, or execute it if it starts with 'import '. +""" +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: + """Add 'sitedir' argument to sys.path if missing and handle .pth files in +'sitedir' +""" +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: # undocumented + """Add site-packages to sys.path +""" +def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: # undocumented + """Add a per user site-package to sys.path + +Each user has its own python directory with site-packages in the +home directory. +""" +def check_enableusersite() -> bool | None: # undocumented + """Check if user site directory is safe for inclusion + +The function tests for the command line flag (including environment var), +process uid/gid equal to effective uid/gid. + +None: Disabled for security reasons +False: Disabled by user (command line option) +True: Safe and enabled +""" if sys.version_info >= (3, 13): - def gethistoryfile() -> str: ... # undocumented + def gethistoryfile() -> str: # undocumented + """Check if the PYTHON_HISTORY environment variable is set and define +it as the .python_history file. If PYTHON_HISTORY is not set, use the +default .python_history file. +""" -def enablerlcompleter() -> None: ... # undocumented +def enablerlcompleter() -> None: # undocumented + """Enable default readline configuration on interactive prompts, by +registering a sys.__interactivehook__. +""" if sys.version_info >= (3, 13): - def register_readline() -> None: ... # undocumented + def register_readline() -> None: # undocumented + """Configure readline completion on interactive prompts. -def execsitecustomize() -> None: ... # undocumented -def execusercustomize() -> None: ... # undocumented -def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... -def getuserbase() -> str: ... -def getusersitepackages() -> str: ... +If the readline module can be imported, the hook will set the Tab key +as completion key and register ~/.python_history as history file. +This can be overridden in the sitecustomize or usercustomize module, +or in a PYTHONSTARTUP file. +""" + +def execsitecustomize() -> None: # undocumented + """Run custom site specific code, if available. +""" +def execusercustomize() -> None: # undocumented + """Run custom user specific code, if available. +""" +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: + """Returns a list containing all global site-packages directories. + +For each directory present in ``prefixes`` (or the global ``PREFIXES``), +this function will find its `site-packages` subdirectory depending on the +system environment, and will return a list of full paths. +""" +def getuserbase() -> str: + """Returns the `user base` directory path. + +The `user base` directory can be used to store data. If the global +variable ``USER_BASE`` is not initialized yet, this function will also set +it. +""" +def getusersitepackages() -> str: + """Returns the user-specific site-packages directory path. + +If the global variable ``USER_SITE`` is not initialized yet, this +function will also set it. +""" def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented -def removeduppaths() -> set[str]: ... # undocumented -def setcopyright() -> None: ... # undocumented +def removeduppaths() -> set[str]: # undocumented + """Remove duplicate entries from sys.path along with making them +absolute +""" +def setcopyright() -> None: # undocumented + """Set 'copyright' and 'credits' in builtins +""" def sethelper() -> None: ... # undocumented -def setquit() -> None: ... # undocumented +def setquit() -> None: # undocumented + """Define new builtins 'quit' and 'exit'. + +These are objects which make the interpreter exit when called. +The repr of each object contains a hint at how it works. + +""" def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi index dee7e949f42fa..b18979838eda6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi @@ -1,3 +1,47 @@ +"""An RFC 5321 smtp proxy with optional RFC 1870 and RFC 6531 extensions. + +Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]] + +Options: + + --nosetuid + -n + This program generally tries to setuid `nobody', unless this flag is + set. The setuid call will fail if this program is not run as root (in + which case, use this flag). + + --version + -V + Print the version number and exit. + + --class classname + -c classname + Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by + default. + + --size limit + -s limit + Restrict the total size of the incoming message to "limit" number of + bytes via the RFC 1870 SIZE extension. Defaults to 33554432 bytes. + + --smtputf8 + -u + Enable the SMTPUTF8 extension and behave as an RFC 6531 smtp proxy. + + --debug + -d + Turn on debugging prints. + + --help + -h + Print this message and exit. + +Version: %(__version__)s + +If localhost is not given then `localhost' is used, and if localport is not +given then 8025 is used. If remotehost is not given then `localhost' is used, +and if remoteport is not given, then 25 is used. +""" import asynchat import asyncore import socket @@ -79,7 +123,37 @@ class SMTPServer(asyncore.dispatcher): def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message( self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any - ) -> str | None: ... + ) -> str | None: + """Override this abstract method to handle messages from the client. + + peer is a tuple containing (ipaddr, port) of the client that made the + socket connection to our smtp port. + + mailfrom is the raw address the client claims the message is coming + from. + + rcpttos is a list of raw addresses the client wishes to deliver the + message to. + + data is a string containing the entire full text of the message, + headers (if supplied) and all. It has been `de-transparencied' + according to RFC 821, Section 4.5.2. In other words, a line + containing a `.' followed by other text has had the leading dot + removed. + + kwargs is a dictionary containing additional information. It is + empty if decode_data=True was given as init parameter, otherwise + it will contain the following keys: + 'mail_options': list of parameters to the mail command. All + elements are uppercase strings. Example: + ['BODY=8BITMIME', 'SMTPUTF8']. + 'rcpt_options': same, for the rcpt command. + + This function should return None for a normal `250 Ok' response; + otherwise, it should return the desired response string in RFC 821 + format. + + """ class DebuggingServer(SMTPServer): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi index 6a8467689367a..35f8721a3855e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi @@ -1,3 +1,34 @@ +"""SMTP/ESMTP client class. + +This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP +Authentication) and RFC 2487 (Secure SMTP over TLS). + +Notes: + +Please remember, when doing ESMTP, that the names of the SMTP service +extensions are NOT the same thing as the option keywords for the RCPT +and MAIL commands! + +Example: + + >>> import smtplib + >>> s=smtplib.SMTP("localhost") + >>> print(s.help()) + This is Sendmail version 8.8.4 + Topics: + HELO EHLO MAIL RCPT DATA + RSET NOOP QUIT HELP VRFY + EXPN VERB ETRN DSN + For more info use "HELP ". + To report bugs in the implementation send email to + sendmail-bugs@sendmail.org. + For local information send email to Postmaster at your site. + End of HELP info + >>> s.putcmd("vrfy","someone@here") + >>> s.getreply() + (250, "Somebody OverHere ") + >>> s.quit() +""" import sys from _socket import _Address as _SourceAddress from _typeshed import ReadableBuffer, SizedBuffer @@ -37,34 +68,85 @@ bCRLF: Final[bytes] OLDSTYLE_AUTH: Final[Pattern[str]] -class SMTPException(OSError): ... -class SMTPNotSupportedError(SMTPException): ... -class SMTPServerDisconnected(SMTPException): ... +class SMTPException(OSError): + """Base class for all exceptions raised by this module. +""" +class SMTPNotSupportedError(SMTPException): + """The command or option is not supported by the SMTP server. + +This exception is raised when an attempt is made to run a command or a +command with an option which is not supported by the server. +""" +class SMTPServerDisconnected(SMTPException): + """Not connected to any SMTP server. + +This exception is raised when the server unexpectedly disconnects, +or when an attempt is made to use the SMTP instance before +connecting it to a server. +""" class SMTPResponseException(SMTPException): + """Base class for all exceptions that include an SMTP error code. + +These exceptions are generated in some instances when the SMTP +server returns an error code. The error code is stored in the +`smtp_code' attribute of the error, and the `smtp_error' attribute +is set to the error message. +""" smtp_code: int smtp_error: bytes | str args: tuple[int, bytes | str] | tuple[int, bytes, str] def __init__(self, code: int, msg: bytes | str) -> None: ... class SMTPSenderRefused(SMTPResponseException): + """Sender address refused. + +In addition to the attributes set by on all SMTPResponseException +exceptions, this sets 'sender' to the string that the SMTP refused. +""" smtp_error: bytes sender: str args: tuple[int, bytes, str] def __init__(self, code: int, msg: bytes, sender: str) -> None: ... class SMTPRecipientsRefused(SMTPException): + """All recipient addresses refused. + +The errors for each recipient are accessible through the attribute +'recipients', which is a dictionary of exactly the same sort as +SMTP.sendmail() returns. +""" recipients: _SendErrs args: tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... -class SMTPDataError(SMTPResponseException): ... -class SMTPConnectError(SMTPResponseException): ... -class SMTPHeloError(SMTPResponseException): ... -class SMTPAuthenticationError(SMTPResponseException): ... +class SMTPDataError(SMTPResponseException): + """The SMTP server didn't accept the data. +""" +class SMTPConnectError(SMTPResponseException): + """Error during connection establishment. +""" +class SMTPHeloError(SMTPResponseException): + """The server refused our HELO reply. +""" +class SMTPAuthenticationError(SMTPResponseException): + """Authentication error. + +Most probably the server didn't accept the username/password +combination provided. +""" + +def quoteaddr(addrstring: str) -> str: + """Quote a subset of the email addresses defined by RFC 821. -def quoteaddr(addrstring: str) -> str: ... -def quotedata(data: str) -> str: ... +Should be able to handle anything email.utils.parseaddr can handle. +""" +def quotedata(data: str) -> str: + """Quote data for email. + +Double leading '.', and change Unix newline '\\n', or Mac '\\r' into +internet CRLF end-of-line. +""" @type_check_only class _AuthObject(Protocol): @overload @@ -73,6 +155,34 @@ class _AuthObject(Protocol): def __call__(self, challenge: bytes, /) -> str: ... class SMTP: + """This class manages a connection to an SMTP or ESMTP server. +SMTP Objects: + SMTP objects have the following attributes: + helo_resp + This is the message given by the server in response to the + most recent HELO command. + + ehlo_resp + This is the message given by the server in response to the + most recent EHLO command. This is usually multiline. + + does_esmtp + This is a True value _after you do an EHLO command_, if the + server supports ESMTP. + + esmtp_features + This is a dictionary, which, if the server supports ESMTP, + will _after you do an EHLO command_, contain the names of the + SMTP service extensions this server supports, and their + parameters (if any). + + Note, all extension names are mapped to lower case in the + dictionary. + + See each method's docstrings for details. In general, there is a + method of the same name to perform each SMTP command. There is also a + method called 'sendmail' that will do an entire mail transaction. + """ debuglevel: int sock: socket | None # Type of file should match what socket.makefile() returns @@ -94,46 +204,230 @@ class SMTP: local_hostname: str | None = None, timeout: float = ..., source_address: _SourceAddress | None = None, - ) -> None: ... + ) -> None: + """Initialize a new instance. + +If specified, `host` is the name of the remote host to which to +connect. If specified, `port` specifies the port to which to connect. +By default, smtplib.SMTP_PORT is used. If a host is specified the +connect method is called, and if it returns anything other than a +success code an SMTPConnectError is raised. If specified, +`local_hostname` is used as the FQDN of the local host in the HELO/EHLO +command. Otherwise, the local hostname is found using +socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host, +port) for the socket to bind to as its source address before +connecting. If the host is '' and port is 0, the OS default behavior +will be used. + +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... - def set_debuglevel(self, debuglevel: int) -> None: ... - def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... - def send(self, s: ReadableBuffer | str) -> None: ... - def putcmd(self, cmd: str, args: str = "") -> None: ... - def getreply(self) -> _Reply: ... - def docmd(self, cmd: str, args: str = "") -> _Reply: ... - def helo(self, name: str = "") -> _Reply: ... - def ehlo(self, name: str = "") -> _Reply: ... - def has_extn(self, opt: str) -> bool: ... - def help(self, args: str = "") -> bytes: ... - def rset(self) -> _Reply: ... - def noop(self) -> _Reply: ... - def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: ... - def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: ... - def data(self, msg: ReadableBuffer | str) -> _Reply: ... - def verify(self, address: str) -> _Reply: ... + def set_debuglevel(self, debuglevel: int) -> None: + """Set the debug output level. + +A non-false value results in debug messages for connection and for all +messages sent to and received from the server. + +""" + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: + """Connect to a host on a given port. + +If the hostname ends with a colon (':') followed by a number, and +there is no port specified, that suffix will be stripped off and the +number interpreted as the port number to use. + +Note: This method is automatically invoked by __init__, if a host is +specified during instantiation. + +""" + def send(self, s: ReadableBuffer | str) -> None: + """Send 's' to the server. +""" + def putcmd(self, cmd: str, args: str = "") -> None: + """Send a command to the server. +""" + def getreply(self) -> _Reply: + """Get a reply from the server. + +Returns a tuple consisting of: + + - server response code (e.g. '250', or such, if all goes well) + Note: returns -1 if it can't read response code. + + - server response string corresponding to response code (multiline + responses are converted to a single, multiline string). + +Raises SMTPServerDisconnected if end-of-file is reached. +""" + def docmd(self, cmd: str, args: str = "") -> _Reply: + """Send a command, and return its response code. +""" + def helo(self, name: str = "") -> _Reply: + """SMTP 'helo' command. +Hostname to send for this command defaults to the FQDN of the local +host. +""" + def ehlo(self, name: str = "") -> _Reply: + """SMTP 'ehlo' command. +Hostname to send for this command defaults to the FQDN of the local +host. +""" + def has_extn(self, opt: str) -> bool: + """Does the server support a given SMTP service extension? +""" + def help(self, args: str = "") -> bytes: + """SMTP 'help' command. +Returns help text from server. +""" + def rset(self) -> _Reply: + """SMTP 'rset' command -- resets session. +""" + def noop(self) -> _Reply: + """SMTP 'noop' command -- doesn't do anything :> +""" + def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: + """SMTP 'mail' command -- begins mail xfer session. + +This method may raise the following exceptions: + + SMTPNotSupportedError The options parameter includes 'SMTPUTF8' + but the SMTPUTF8 extension is not supported by + the server. +""" + def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: + """SMTP 'rcpt' command -- indicates 1 recipient for this mail. +""" + def data(self, msg: ReadableBuffer | str) -> _Reply: + """SMTP 'DATA' command -- sends message data to server. + +Automatically quotes lines beginning with a period per rfc821. +Raises SMTPDataError if there is an unexpected reply to the +DATA command; the return value from this method is the final +response code received when the all data is sent. If msg +is a string, lone '\\r' and '\\n' characters are converted to +'\\r\\n' characters. If msg is bytes, it is transmitted as is. +""" + def verify(self, address: str) -> _Reply: + """SMTP 'verify' command -- checks for address validity. +""" vrfy = verify - def expn(self, address: str) -> _Reply: ... - def ehlo_or_helo_if_needed(self) -> None: ... + def expn(self, address: str) -> _Reply: + """SMTP 'expn' command -- expands a mailing list. +""" + def ehlo_or_helo_if_needed(self) -> None: + """Call self.ehlo() and/or self.helo() if needed. + +If there has been no previous EHLO or HELO command this session, this +method tries ESMTP EHLO first. + +This method may raise the following exceptions: + + SMTPHeloError The server didn't reply properly to + the helo greeting. +""" user: str password: str - def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: + """Authentication command - requires response processing. + +'mechanism' specifies which authentication mechanism is to +be used - the valid values are those listed in the 'auth' +element of 'esmtp_features'. + +'authobject' must be a callable object taking a single argument: + + data = authobject(challenge) + +It will be called to process the server's challenge response; the +challenge argument it is passed will be a bytes. It should return +an ASCII string that will be base64 encoded and sent to the server. + +Keyword arguments: + - initial_response_ok: Allow sending the RFC 4954 initial-response + to the AUTH command, if the authentication methods supports it. +""" @overload - def auth_cram_md5(self, challenge: None = None) -> None: ... + def auth_cram_md5(self, challenge: None = None) -> None: + """Authobject to use with CRAM-MD5 authentication. Requires self.user +and self.password to be set. +""" @overload def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... - def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... - def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... - def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: + """Authobject to use with PLAIN authentication. Requires self.user and +self.password to be set. +""" + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: + """Authobject to use with LOGIN authentication. Requires self.user and +self.password to be set. +""" + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: + """Log in on an SMTP server that requires authentication. + +The arguments are: + - user: The user name to authenticate with. + - password: The password for the authentication. + +Keyword arguments: + - initial_response_ok: Allow sending the RFC 4954 initial-response + to the AUTH command, if the authentication methods supports it. + +If there has been no previous EHLO or HELO command this session, this +method tries ESMTP EHLO first. + +This method will return normally if the authentication was successful. + +This method may raise the following exceptions: + + SMTPHeloError The server didn't reply properly to + the helo greeting. + SMTPAuthenticationError The server didn't accept the username/ + password combination. + SMTPNotSupportedError The AUTH command is not supported by the + server. + SMTPException No suitable authentication method was + found. +""" if sys.version_info >= (3, 12): - def starttls(self, *, context: SSLContext | None = None) -> _Reply: ... + def starttls(self, *, context: SSLContext | None = None) -> _Reply: + """Puts the connection to the SMTP server into TLS mode. + +If there has been no previous EHLO or HELO command this session, this +method tries ESMTP EHLO first. + +If the server supports TLS, this will encrypt the rest of the SMTP +session. If you provide the context parameter, +the identity of the SMTP server and client can be checked. This, +however, depends on whether the socket module really checks the +certificates. + +This method may raise the following exceptions: + + SMTPHeloError The server didn't reply properly to + the helo greeting. +""" else: def starttls( self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None - ) -> _Reply: ... + ) -> _Reply: + """Puts the connection to the SMTP server into TLS mode. + + If there has been no previous EHLO or HELO command this session, this + method tries ESMTP EHLO first. + + If the server supports TLS, this will encrypt the rest of the SMTP + session. If you provide the keyfile and certfile parameters, + the identity of the SMTP server and client can be checked. This, + however, depends on whether the socket module really checks the + certificates. + + This method may raise the following exceptions: + + SMTPHeloError The server didn't reply properly to + the helo greeting. + """ def sendmail( self, @@ -142,7 +436,69 @@ class SMTP: msg: SizedBuffer | str, mail_options: Sequence[str] = (), rcpt_options: Sequence[str] = (), - ) -> _SendErrs: ... + ) -> _SendErrs: + """This command performs an entire mail transaction. + +The arguments are: + - from_addr : The address sending this mail. + - to_addrs : A list of addresses to send this mail to. A bare + string will be treated as a list with 1 address. + - msg : The message to send. + - mail_options : List of ESMTP options (such as 8bitmime) for the + mail command. + - rcpt_options : List of ESMTP options (such as DSN commands) for + all the rcpt commands. + +msg may be a string containing characters in the ASCII range, or a byte +string. A string is encoded to bytes using the ascii codec, and lone +\\r and \\n characters are converted to \\r\\n characters. + +If there has been no previous EHLO or HELO command this session, this +method tries ESMTP EHLO first. If the server does ESMTP, message size +and each of the specified options will be passed to it. If EHLO +fails, HELO will be tried and ESMTP options suppressed. + +This method will return normally if the mail is accepted for at least +one recipient. It returns a dictionary, with one entry for each +recipient that was refused. Each entry contains a tuple of the SMTP +error code and the accompanying error message sent by the server. + +This method may raise the following exceptions: + + SMTPHeloError The server didn't reply properly to + the helo greeting. + SMTPRecipientsRefused The server rejected ALL recipients + (no mail was sent). + SMTPSenderRefused The server didn't accept the from_addr. + SMTPDataError The server replied with an unexpected + error code (other than a refusal of + a recipient). + SMTPNotSupportedError The mail_options parameter includes 'SMTPUTF8' + but the SMTPUTF8 extension is not supported by + the server. + +Note: the connection will be open even after an exception is raised. + +Example: + + >>> import smtplib + >>> s=smtplib.SMTP("localhost") + >>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"] + >>> msg = '''\\ + ... From: Me@my.org + ... Subject: testin'... + ... + ... This is a test ''' + >>> s.sendmail("me@my.org",tolist,msg) + { "three@three.org" : ( 550 ,"User unknown" ) } + >>> s.quit() + +In the above example, the message was accepted for delivery to three +of the four addresses, and one was rejected, with the error code +550. If all addresses are accepted, then the method will return an +empty dictionary. + +""" def send_message( self, msg: _Message, @@ -150,11 +506,43 @@ class SMTP: to_addrs: str | Sequence[str] | None = None, mail_options: Sequence[str] = (), rcpt_options: Sequence[str] = (), - ) -> _SendErrs: ... - def close(self) -> None: ... - def quit(self) -> _Reply: ... + ) -> _SendErrs: + """Converts message to a bytestring and passes it to sendmail. + +The arguments are as for sendmail, except that msg is an +email.message.Message object. If from_addr is None or to_addrs is +None, these arguments are taken from the headers of the Message as +described in RFC 2822 (a ValueError is raised if there is more than +one set of 'Resent-' headers). Regardless of the values of from_addr and +to_addr, any Bcc field (or Resent-Bcc field, when the Message is a +resent) of the Message object won't be transmitted. The Message +object is then serialized using email.generator.BytesGenerator and +sendmail is called to transmit the message. If the sender or any of +the recipient addresses contain non-ASCII and the server advertises the +SMTPUTF8 capability, the policy is cloned with utf8 set to True for the +serialization, and SMTPUTF8 and BODY=8BITMIME are asserted on the send. +If the server does not support SMTPUTF8, an SMTPNotSupported error is +raised. Otherwise the generator is called without modifying the +policy. + +""" + def close(self) -> None: + """Close the connection to the SMTP server. +""" + def quit(self) -> _Reply: + """Terminate the SMTP session. +""" class SMTP_SSL(SMTP): + """This is a subclass derived from SMTP that connects over an SSL +encrypted socket (to use this class you need a socket module that was +compiled with SSL support). If host is not specified, '' (the local +host) is used. If port is omitted, the standard SMTP-over-SSL port +(465) is used. local_hostname and source_address have the same meaning +as they do in the SMTP class. context also optional, can contain a +SSLContext. + +""" keyfile: str | None certfile: str | None context: SSLContext @@ -185,6 +573,19 @@ class SMTP_SSL(SMTP): LMTP_PORT: Final = 2003 class LMTP(SMTP): + """LMTP - Local Mail Transfer Protocol + +The LMTP protocol, which is very similar to ESMTP, is heavily based +on the standard SMTP client. It's common to use Unix sockets for +LMTP, so our connect() method must support that as well as a regular +host:port server. local_hostname and source_address have the same +meaning as they do in the SMTP class. To specify a Unix socket, +you must use an absolute path as the host, starting with a '/'. + +Authentication is supported, using the regular SMTP mechanism. When +using a Unix socket, LMTP generally don't support or require any +authentication, but your mileage might vary. +""" def __init__( self, host: str = "", @@ -192,4 +593,6 @@ class LMTP(SMTP): local_hostname: str | None = None, source_address: _SourceAddress | None = None, timeout: float = ..., - ) -> None: ... + ) -> None: + """Initialize a new instance. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi index f4d487607fbb2..a680cd23ecf84 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi @@ -1,14 +1,48 @@ +"""Routines to help recognizing sound files. + +Function whathdr() recognizes various types of sound file headers. +It understands almost all headers that SOX can decode. + +The return tuple contains the following items, in this order: +- file type (as SOX understands it) +- sampling rate (0 if unknown or hard to decode) +- number of channels (0 if unknown or hard to decode) +- number of frames in the file (-1 if unknown or hard to decode) +- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW + +If the file doesn't have a recognizable type, it returns None. +If the file can't be opened, OSError is raised. + +To compute the total time, divide the number of frames by the +sampling rate (a frame contains a sample for each channel). + +Function what() calls whathdr(). (It used to also use some +heuristics for raw data, but this doesn't work very well.) + +Finally, the function test() is a simple main program that calls +what() for all files mentioned on the argument list. For directory +arguments it calls what() for all files in that directory. Default +argument is "." (testing all files in the current directory). The +option -r tells it to recurse down directories found inside +explicitly given directories. +""" from _typeshed import StrOrBytesPath from typing import NamedTuple __all__ = ["what", "whathdr"] class SndHeaders(NamedTuple): + """SndHeaders(filetype, framerate, nchannels, nframes, sampwidth) +""" filetype: str framerate: int nchannels: int nframes: int sampwidth: int | str -def what(filename: StrOrBytesPath) -> SndHeaders | None: ... -def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: ... +def what(filename: StrOrBytesPath) -> SndHeaders | None: + """Guess the type of a sound file. +""" +def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: + """Recognize sound headers. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi index b10b3560b91fa..e6699c2f4724b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi @@ -1,3 +1,49 @@ +"""This module provides socket operations and some related functions. +On Unix, it supports IP (Internet Protocol) and Unix domain sockets. +On other systems, it only supports IP. Functions specific for a +socket are available as methods of the socket object. + +Functions: + +socket() -- create a new socket object +socketpair() -- create a pair of new socket objects [*] +fromfd() -- create a socket object from an open file descriptor [*] +send_fds() -- Send file descriptor to the socket. +recv_fds() -- Receive file descriptors from the socket. +fromshare() -- create a socket object from data received from socket.share() [*] +gethostname() -- return the current hostname +gethostbyname() -- map a hostname to its IP number +gethostbyaddr() -- map an IP number or hostname to DNS info +getservbyname() -- map a service name and a protocol name to a port number +getprotobyname() -- map a protocol name (e.g. 'tcp') to a number +ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order +htons(), htonl() -- convert 16, 32 bit int from host to network byte order +inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format +inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89) +socket.getdefaulttimeout() -- get the default timeout value +socket.setdefaulttimeout() -- set the default timeout value +create_connection() -- connects to an address, with an optional timeout and + optional source address. +create_server() -- create a TCP socket and bind it to a specified address. + + [*] not available on all platforms! + +Special objects: + +SocketType -- type object for socket objects +error -- exception raised for I/O errors +has_ipv6 -- boolean value indicating if IPv6 is supported + +IntEnum constants: + +AF_INET, AF_UNIX -- socket domains (first argument to socket() call) +SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument) + +Integer constants: + +Many other constants may be defined; these may be used in calls to +the setsockopt() and getsockopt() methods. +""" # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. @@ -1076,6 +1122,8 @@ else: class timeout(error): ... class AddressFamily(IntEnum): + """An enumeration. +""" AF_INET = 2 AF_INET6 = 10 AF_APPLETALK = 5 @@ -1180,6 +1228,8 @@ if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darw AF_DIVERT: Final = AddressFamily.AF_DIVERT class SocketKind(IntEnum): + """An enumeration. +""" SOCK_STREAM = 1 SOCK_DGRAM = 2 SOCK_RAW = 3 @@ -1199,6 +1249,8 @@ if sys.platform == "linux": SOCK_NONBLOCK: Final = SocketKind.SOCK_NONBLOCK class MsgFlag(IntFlag): + """An enumeration. +""" MSG_CTRUNC = 8 MSG_DONTROUTE = 4 MSG_OOB = 1 @@ -1260,6 +1312,8 @@ if sys.platform != "win32" and sys.platform != "linux": MSG_EOF: Final = MsgFlag.MSG_EOF class AddressInfo(IntFlag): + """An enumeration. +""" AI_ADDRCONFIG = 32 AI_ALL = 16 AI_CANONNAME = 2 @@ -1300,14 +1354,27 @@ class _SendableFile(Protocol): # def fileno(self) -> int: ... class socket(_socket.socket): + """A subclass of _socket.socket adding the makefile() method. +""" __slots__ = ["__weakref__", "_io_refs", "_closed"] def __init__( self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None ) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def dup(self) -> Self: ... - def accept(self) -> tuple[socket, _RetAddress]: ... + def dup(self) -> Self: + """dup() -> socket object + +Duplicate the socket. Return a new socket object connected to the same +system resource. The new socket is non-inheritable. +""" + def accept(self) -> tuple[socket, _RetAddress]: + """accept() -> (socket object, address info) + +Wait for an incoming connection. Return a new socket +representing the connection, and the address of the client. +For IP sockets, the address info is a pair (hostaddr, port). +""" # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @overload @@ -1319,7 +1386,13 @@ class socket(_socket.socket): encoding: str | None = None, errors: str | None = None, newline: str | None = None, - ) -> SocketIO: ... + ) -> SocketIO: + """makefile(...) -> an I/O stream connected to the socket + +The arguments are as for io.open() after the filename, except the only +supported mode values are 'r' (default), 'w', 'b', or a combination of +those. +""" @overload def makefile( self, @@ -1370,21 +1443,61 @@ class socket(_socket.socket): errors: str | None = None, newline: str | None = None, ) -> TextIOWrapper: ... - def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: + """sendfile(file[, offset[, count]]) -> sent + +Send a file until EOF is reached by using high-performance +os.sendfile() and return the total number of bytes which +were sent. +*file* must be a regular file object opened in binary mode. +If os.sendfile() is not available (e.g. Windows) or file is +not a regular file socket.send() will be used instead. +*offset* tells from where to start reading the file. +If specified, *count* is the total number of bytes to transmit +as opposed to sending the file until EOF is reached. +File position is updated on return or also in case of error in +which case file.tell() can be used to figure out the number of +bytes which were sent. +The socket must be of SOCK_STREAM type. +Non-blocking sockets are not supported. +""" @property - def family(self) -> AddressFamily: ... + def family(self) -> AddressFamily: + """Read-only access to the address family for this socket. + """ @property - def type(self) -> SocketKind: ... - def get_inheritable(self) -> bool: ... - def set_inheritable(self, inheritable: bool) -> None: ... - -def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... + def type(self) -> SocketKind: + """Read-only access to the socket type. + """ + def get_inheritable(self) -> bool: + """Get the inheritable flag of the socket +""" + def set_inheritable(self, inheritable: bool) -> None: + """Set the inheritable flag of the socket +""" + +def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: + """fromfd(fd, family, type[, proto]) -> socket object + +Create a socket object from a duplicate of the given file +descriptor. The remaining arguments are the same as for socket(). +""" if sys.platform != "win32": def send_fds( sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None - ) -> int: ... - def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... + ) -> int: + """send_fds(sock, buffers, fds[, flags[, address]]) -> integer + +Send the list of file descriptors fds over an AF_UNIX socket. +""" + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: + """recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file +descriptors, msg_flags, address) + +Receive up to maxfds file descriptors returning the message +data and a list containing the descriptors. +""" if sys.platform == "win32": def fromshare(info: bytes) -> socket: ... @@ -1395,18 +1508,50 @@ if sys.platform == "win32": else: def socketpair( family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 - ) -> tuple[socket, socket]: ... + ) -> tuple[socket, socket]: + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) +Create a pair of socket objects from the sockets returned by the platform +socketpair() function. +The arguments are the same as for socket() except the default family is AF_UNIX +if defined on the platform; otherwise, the default is AF_INET. +""" class SocketIO(RawIOBase): + """Raw I/O implementation for stream sockets. + +This class supports the makefile() method on sockets. It provides +the raw I/O interface on top of a socket object. +""" def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... - def readinto(self, b: WriteableBuffer) -> int | None: ... - def write(self, b: ReadableBuffer) -> int | None: ... + def readinto(self, b: WriteableBuffer) -> int | None: + """Read up to len(b) bytes into the writable buffer *b* and return +the number of bytes read. If the socket is non-blocking and no bytes +are available, None is returned. + +If *b* is non-empty, a 0 return value indicates that the connection +was shutdown at the other end. +""" + def write(self, b: ReadableBuffer) -> int | None: + """Write the given bytes or bytearray object *b* to the socket +and return the number of bytes written. This can be less than +len(b) if not all data could be written. If the socket is +non-blocking and no bytes could be written None is returned. +""" @property def name(self) -> int: ... # return value is really "int" @property def mode(self) -> Literal["rb", "wb", "rwb"]: ... -def getfqdn(name: str = "") -> str: ... +def getfqdn(name: str = "") -> str: + """Get fully qualified domain name from name. + +An empty argument is interpreted as meaning the local host. + +First the hostname returned by gethostbyaddr() is checked, then +possibly existing aliases. In case no FQDN is available and `name` +was given, it is returned unchanged. If `name` was empty, '0.0.0.0' or '::', +hostname from gethostname() is returned. +""" if sys.version_info >= (3, 11): def create_connection( @@ -1415,19 +1560,76 @@ if sys.version_info >= (3, 11): source_address: _Address | None = None, *, all_errors: bool = False, - ) -> socket: ... + ) -> socket: + """Connect to *address* and return the socket object. + +Convenience function. Connect to *address* (a 2-tuple ``(host, +port)``) and return the socket object. Passing the optional +*timeout* parameter will set the timeout on the socket instance +before attempting to connect. If no *timeout* is supplied, the +global default timeout setting returned by :func:`getdefaulttimeout` +is used. If *source_address* is set it must be a tuple of (host, port) +for the socket to bind as a source address before making the connection. +A host of '' or port 0 tells the OS to use the default. When a connection +cannot be created, raises the last error if *all_errors* is False, +and an ExceptionGroup of all errors if *all_errors* is True. +""" else: def create_connection( address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None - ) -> socket: ... - -def has_dualstack_ipv6() -> bool: ... + ) -> socket: + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + A host of '' or port 0 tells the OS to use the default. + """ + +def has_dualstack_ipv6() -> bool: + """Return True if the platform supports creating a SOCK_STREAM socket +which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections. +""" def create_server( address: _Address, *, family: int = ..., backlog: int | None = None, reuse_port: bool = False, dualstack_ipv6: bool = False -) -> socket: ... +) -> socket: + """Convenience function which creates a SOCK_STREAM type socket +bound to *address* (a 2-tuple (host, port)) and return the socket +object. + +*family* should be either AF_INET or AF_INET6. +*backlog* is the queue size passed to socket.listen(). +*reuse_port* dictates whether to use the SO_REUSEPORT socket option. +*dualstack_ipv6*: if true and the platform supports it, it will +create an AF_INET6 socket able to accept both IPv4 or IPv6 +connections. When false it will explicitly disable this option on +platforms that enable it by default (e.g. Linux). + +>>> with create_server(('', 8000)) as server: +... while True: +... conn, addr = server.accept() +... # handle new connection +""" # The 5th tuple item is the socket address, for IP4, IP6, or IP6 if Python is compiled with --disable-ipv6, respectively. def getaddrinfo( host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 -) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: ... +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: + """Resolve host and port into list of address info entries. + +Translate the host/port argument into a sequence of 5-tuples that contain +all the necessary arguments for creating a socket connected to that service. +host is a domain name, a string representation of an IPv4/v6 address or +None. port is a string service name such as 'http', a numeric port number or +None. By passing None as the value of host and port, you can pass NULL to +the underlying C API. + +The family, type and proto arguments can be optionally specified in order to +narrow the list of addresses returned. Passing zero as a value for each of +these arguments selects the full range of results. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi index f321d14a792b2..d449c2ebc66d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi @@ -1,3 +1,122 @@ +"""Generic socket server classes. + +This module tries to capture the various aspects of defining a server: + +For socket-based servers: + +- address family: + - AF_INET{,6}: IP (Internet Protocol) sockets (default) + - AF_UNIX: Unix domain sockets + - others, e.g. AF_DECNET are conceivable (see +- socket type: + - SOCK_STREAM (reliable stream, e.g. TCP) + - SOCK_DGRAM (datagrams, e.g. UDP) + +For request-based servers (including socket-based): + +- client address verification before further looking at the request + (This is actually a hook for any processing that needs to look + at the request before anything else, e.g. logging) +- how to handle multiple requests: + - synchronous (one request is handled at a time) + - forking (each request is handled by a new process) + - threading (each request is handled by a new thread) + +The classes in this module favor the server type that is simplest to +write: a synchronous TCP/IP server. This is bad class design, but +saves some typing. (There's also the issue that a deep class hierarchy +slows down method lookups.) + +There are five classes in an inheritance diagram, four of which represent +synchronous servers of four types: + + +------------+ + | BaseServer | + +------------+ + | + v + +-----------+ +------------------+ + | TCPServer |------->| UnixStreamServer | + +-----------+ +------------------+ + | + v + +-----------+ +--------------------+ + | UDPServer |------->| UnixDatagramServer | + +-----------+ +--------------------+ + +Note that UnixDatagramServer derives from UDPServer, not from +UnixStreamServer -- the only difference between an IP and a Unix +stream server is the address family, which is simply repeated in both +unix server classes. + +Forking and threading versions of each type of server can be created +using the ForkingMixIn and ThreadingMixIn mix-in classes. For +instance, a threading UDP server class is created as follows: + + class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass + +The Mix-in class must come first, since it overrides a method defined +in UDPServer! Setting the various member variables also changes +the behavior of the underlying server mechanism. + +To implement a service, you must derive a class from +BaseRequestHandler and redefine its handle() method. You can then run +various versions of the service by combining one of the server classes +with your request handler class. + +The request handler class must be different for datagram or stream +services. This can be hidden by using the request handler +subclasses StreamRequestHandler or DatagramRequestHandler. + +Of course, you still have to use your head! + +For instance, it makes no sense to use a forking server if the service +contains state in memory that can be modified by requests (since the +modifications in the child process would never reach the initial state +kept in the parent process and passed to each child). In this case, +you can use a threading server, but you will probably have to use +locks to avoid two requests that come in nearly simultaneous to apply +conflicting changes to the server state. + +On the other hand, if you are building e.g. an HTTP server, where all +data is stored externally (e.g. in the file system), a synchronous +class will essentially render the service "deaf" while one request is +being handled -- which may be for a very long time if a client is slow +to read all the data it has requested. Here a threading or forking +server is appropriate. + +In some cases, it may be appropriate to process part of a request +synchronously, but to finish processing in a forked child depending on +the request data. This can be implemented by using a synchronous +server and doing an explicit fork in the request handler class +handle() method. + +Another approach to handling multiple simultaneous requests in an +environment that supports neither threads nor fork (or where these are +too expensive or inappropriate for the service) is to maintain an +explicit table of partially finished requests and to use a selector to +decide which request to work on next (or whether to handle a new +incoming request). This is particularly important for stream services +where each client can potentially be connected for a long time (if +threads or subprocesses cannot be used). + +Future work: +- Standard classes for Sun RPC (which uses either UDP or TCP) +- Standard mix-in classes to implement various authentication + and encryption schemes + +XXX Open problems: +- What to do with out-of-band data? + +BaseServer: +- split generic "request" functionality out into BaseServer class. + Copyright (C) 2000 Luke Kenneth Casson Leighton + + example: read entries from a SQL database (requires overriding + get_request() to return a table entry from the database). + entry is processed by a RequestHandlerClass. + +""" import sys import types from _socket import _Address, _RetAddress @@ -39,32 +158,178 @@ _AfInet6Address: TypeAlias = tuple[str | bytes | bytearray, int, int, int] # ad # This can possibly be generic at some point: class BaseServer: + """Base class for server classes. + +Methods for the caller: + +- __init__(server_address, RequestHandlerClass) +- serve_forever(poll_interval=0.5) +- shutdown() +- handle_request() # if you do not use serve_forever() +- fileno() -> int # for selector + +Methods that may be overridden: + +- server_bind() +- server_activate() +- get_request() -> request, client_address +- handle_timeout() +- verify_request(request, client_address) +- server_close() +- process_request(request, client_address) +- shutdown_request(request) +- close_request(request) +- service_actions() +- handle_error() + +Methods for derived classes: + +- finish_request(request, client_address) + +Class variables that may be overridden by derived classes or +instances: + +- timeout +- address_family +- socket_type +- allow_reuse_address +- allow_reuse_port + +Instance variables: + +- RequestHandlerClass +- socket + +""" server_address: _Address timeout: float | None RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] def __init__( self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] - ) -> None: ... - def handle_request(self) -> None: ... - def serve_forever(self, poll_interval: float = 0.5) -> None: ... - def shutdown(self) -> None: ... - def server_close(self) -> None: ... - def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + ) -> None: + """Constructor. May be extended, do not override. +""" + def handle_request(self) -> None: + """Handle one request, possibly blocking. + +Respects self.timeout. +""" + def serve_forever(self, poll_interval: float = 0.5) -> None: + """Handle one request at a time until shutdown. + +Polls for shutdown every poll_interval seconds. Ignores +self.timeout. If you need to do periodic tasks, do them in +another thread. +""" + def shutdown(self) -> None: + """Stops the serve_forever loop. + +Blocks until the loop has finished. This must be called while +serve_forever() is running in another thread, or it will +deadlock. +""" + def server_close(self) -> None: + """Called to clean-up the server. + +May be overridden. + +""" + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: + """Finish one request by instantiating RequestHandlerClass. +""" def get_request(self) -> tuple[Any, Any]: ... # Not implemented here, but expected to exist on subclasses - def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... - def handle_timeout(self) -> None: ... - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... - def server_activate(self) -> None: ... - def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: + """Handle an error gracefully. May be overridden. + +The default is to print a traceback and continue. + +""" + def handle_timeout(self) -> None: + """Called if no new request arrives within self.timeout. + +Overridden by ForkingMixIn. +""" + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: + """Call finish_request. + +Overridden by ForkingMixIn and ThreadingMixIn. + +""" + def server_activate(self) -> None: + """Called by constructor to activate the server. + +May be overridden. + +""" + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: + """Verify the request. May be overridden. + +Return True if we should proceed with this request. + +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... - def service_actions(self) -> None: ... - def shutdown_request(self, request: _RequestType) -> None: ... # undocumented - def close_request(self, request: _RequestType) -> None: ... # undocumented + def service_actions(self) -> None: + """Called by the serve_forever() loop. + +May be overridden by a subclass / Mixin to implement any code that +needs to be run during the loop. +""" + def shutdown_request(self, request: _RequestType) -> None: # undocumented + """Called to shutdown and close an individual request. +""" + def close_request(self, request: _RequestType) -> None: # undocumented + """Called to clean up an individual request. +""" class TCPServer(BaseServer): + """Base class for various socket-based server classes. + +Defaults to synchronous IP stream (i.e., TCP). + +Methods for the caller: + +- __init__(server_address, RequestHandlerClass, bind_and_activate=True) +- serve_forever(poll_interval=0.5) +- shutdown() +- handle_request() # if you don't use serve_forever() +- fileno() -> int # for selector + +Methods that may be overridden: + +- server_bind() +- server_activate() +- get_request() -> request, client_address +- handle_timeout() +- verify_request(request, client_address) +- process_request(request, client_address) +- shutdown_request(request) +- close_request(request) +- handle_error() + +Methods for derived classes: + +- finish_request(request, client_address) + +Class variables that may be overridden by derived classes or +instances: + +- timeout +- address_family +- socket_type +- request_queue_size (only for stream sockets) +- allow_reuse_address +- allow_reuse_port + +Instance variables: + +- server_address +- RequestHandlerClass +- socket + +""" address_family: int socket: _socket allow_reuse_address: bool @@ -78,12 +343,31 @@ class TCPServer(BaseServer): server_address: _AfInetAddress | _AfInet6Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: ... - def fileno(self) -> int: ... - def get_request(self) -> tuple[_socket, _RetAddress]: ... - def server_bind(self) -> None: ... + ) -> None: + """Constructor. May be extended, do not override. +""" + def fileno(self) -> int: + """Return socket file number. + +Interface required by selector. + +""" + def get_request(self) -> tuple[_socket, _RetAddress]: + """Get the request and client address from the socket. + +May be overridden. + +""" + def server_bind(self) -> None: + """Called by constructor to bind the socket. + +May be overridden. + +""" class UDPServer(TCPServer): + """UDP server class. +""" max_packet_size: ClassVar[int] def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] @@ -95,7 +379,9 @@ if sys.platform != "win32": server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: ... + ) -> None: + """Constructor. May be extended, do not override. +""" class UnixDatagramServer(UDPServer): server_address: _AfUnixAddress # type: ignore[assignment] @@ -104,25 +390,50 @@ if sys.platform != "win32": server_address: _AfUnixAddress, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, - ) -> None: ... + ) -> None: + """Constructor. May be extended, do not override. +""" if sys.platform != "win32": class ForkingMixIn: + """Mix-in class to handle each request in a new process. +""" timeout: float | None # undocumented active_children: set[int] | None # undocumented max_children: int # undocumented block_on_close: bool - def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented - def handle_timeout(self) -> None: ... # undocumented - def service_actions(self) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def collect_children(self, *, blocking: bool = False) -> None: # undocumented + """Internal routine to wait for children that have exited. +""" + def handle_timeout(self) -> None: # undocumented + """Wait for zombies after self.timeout seconds of inactivity. + +May be extended, do not override. +""" + def service_actions(self) -> None: # undocumented + """Collect the zombie child processes regularly in the ForkingMixIn. + +service_actions is called in the BaseServer's serve_forever loop. +""" + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: + """Fork a new subprocess to process the request. +""" def server_close(self) -> None: ... class ThreadingMixIn: + """Mix-in class to handle each request in a new thread. +""" daemon_threads: bool block_on_close: bool - def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: # undocumented + """Same as in BaseServer but as a thread. + +In addition, exception handling is done here. + +""" + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: + """Start a new thread to process the request. +""" def server_close(self) -> None: ... if sys.platform != "win32": @@ -140,6 +451,21 @@ if sys.platform != "win32": class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: + """Base class for request handler classes. + +This class is instantiated for each request to be handled. The +constructor sets the instance variables request, client_address +and server, and then calls the handle() method. To implement a +specific service, all you need to do is to derive a class which +defines a handle() method. + +The handle() method can find the request as self.request, the +client address as self.client_address, and the server (in case it +needs access to per-server information) as self.server. Since a +separate instance is created for each request, the handle() method +can define other arbitrary instance variables. + +""" # `request` is technically of type _RequestType, # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see @@ -155,6 +481,8 @@ class BaseRequestHandler: def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): + """Define self.rfile and self.wfile for stream sockets. +""" rbufsize: ClassVar[int] # undocumented wbufsize: ClassVar[int] # undocumented timeout: ClassVar[float | None] # undocumented @@ -164,6 +492,8 @@ class StreamRequestHandler(BaseRequestHandler): wfile: BufferedIOBase class DatagramRequestHandler(BaseRequestHandler): + """Define self.rfile and self.wfile for datagram sockets. +""" packet: bytes # undocumented socket: _socket # undocumented rfile: BufferedIOBase diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi index 3a5d39997dcc7..1a7059699ef04 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi @@ -1,3 +1,13 @@ +"""This module provides access to the Unix shadow password database. +It is available on various Unix versions. + +Shadow password database entries are reported as 9-tuples of type struct_spwd, +containing the following items from the password database (see `'): +sp_namp, sp_pwdp, sp_lstchg, sp_min, sp_max, sp_warn, sp_inact, sp_expire, sp_flag. +The sp_namp and sp_pwdp are strings, the rest are integers. +An exception is raised if the entry asked for cannot be found. +You have to be root to be able to use this module. +""" import sys from _typeshed import structseq from typing import Any, Final, final @@ -5,6 +15,12 @@ from typing import Any, Final, final if sys.platform != "win32": @final class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): + """spwd.struct_spwd: Results from getsp*() routines. + +This object may be accessed either as a 9-tuple of + (sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag) +or via the object attributes as named in the above tuple. +""" if sys.version_info >= (3, 10): __match_args__: Final = ( "sp_namp", @@ -19,28 +35,58 @@ if sys.platform != "win32": ) @property - def sp_namp(self) -> str: ... + def sp_namp(self) -> str: + """login name +""" @property - def sp_pwdp(self) -> str: ... + def sp_pwdp(self) -> str: + """encrypted password +""" @property - def sp_lstchg(self) -> int: ... + def sp_lstchg(self) -> int: + """date of last change +""" @property - def sp_min(self) -> int: ... + def sp_min(self) -> int: + """min #days between changes +""" @property - def sp_max(self) -> int: ... + def sp_max(self) -> int: + """max #days between changes +""" @property - def sp_warn(self) -> int: ... + def sp_warn(self) -> int: + """#days before pw expires to warn user about it +""" @property - def sp_inact(self) -> int: ... + def sp_inact(self) -> int: + """#days after pw expires until account is disabled +""" @property - def sp_expire(self) -> int: ... + def sp_expire(self) -> int: + """#days since 1970-01-01 when account expires +""" @property - def sp_flag(self) -> int: ... + def sp_flag(self) -> int: + """reserved +""" # Deprecated aliases below. @property - def sp_nam(self) -> str: ... + def sp_nam(self) -> str: + """login name; deprecated +""" @property - def sp_pwd(self) -> str: ... + def sp_pwd(self) -> str: + """encrypted password; deprecated +""" + + def getspall() -> list[struct_spwd]: + """Return a list of all available shadow password database entries, in arbitrary order. + +See `help(spwd)` for more on shadow password database entries. +""" + def getspnam(arg: str, /) -> struct_spwd: + """Return the shadow password database entry for the given user name. - def getspall() -> list[struct_spwd]: ... - def getspnam(arg: str, /) -> struct_spwd: ... +See `help(spwd)` for more on shadow password database entries. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi index 6b0f1ba949106..792e15b45c91e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi @@ -1,3 +1,36 @@ +""" +The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compliant +interface to the SQLite library, and requires SQLite 3.15.2 or newer. + +To use the module, start by creating a database Connection object: + + import sqlite3 + cx = sqlite3.connect("test.db") # test.db will be created or opened + +The special path name ":memory:" can be provided to connect to a transient +in-memory database: + + cx = sqlite3.connect(":memory:") # connect to a database in RAM + +Once a connection has been established, create a Cursor object and call +its execute() method to perform SQL queries: + + cu = cx.cursor() + + # create a table + cu.execute("create table lang(name, first_appeared)") + + # insert values into a table + cu.execute("insert into lang values (?, ?)", ("C", 1972)) + + # execute a query and iterate over the result + for row in cu.execute("select * from lang"): + print(row) + + cx.close() + +The sqlite3 module is written by Gerhard Häring . +""" import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence @@ -270,6 +303,8 @@ class Warning(Exception): ... @disjoint_base class Connection: + """SQLite database connection object. +""" @property def DataError(self) -> type[DataError]: ... @property @@ -328,19 +363,58 @@ class Connection: uri: bool = False, ) -> None: ... - def close(self) -> None: ... + def close(self) -> None: + """Close the database connection. + +Any pending transaction is not committed implicitly. +""" if sys.version_info >= (3, 11): - def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: ... + def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: + """Open and return a BLOB object. + + table + Table name. + column + Column name. + row + Row index. + readonly + Open the BLOB without write permissions. + name + Database name. +""" + + def commit(self) -> None: + """Commit any pending transaction to the database. - def commit(self) -> None: ... - def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... +If there is no open transaction, this method is a no-op. +""" + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: + """Creates a new aggregate. + +Note: Passing keyword arguments 'name', 'n_arg' and 'aggregate_class' +to _sqlite3.Connection.create_aggregate() is deprecated. Parameters +'name', 'n_arg' and 'aggregate_class' will become positional-only in +Python 3.15. +""" if sys.version_info >= (3, 11): # num_params determines how many params will be passed to the aggregate class. We provide an overload # for the case where num_params = 1, which is expected to be the common case. @overload def create_window_function( self, name: str, num_params: Literal[1], aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None, / - ) -> None: ... + ) -> None: + """Creates or redefines an aggregate window function. Non-standard. + + name + The name of the SQL aggregate window function to be created or + redefined. + num_params + The number of arguments the step and inverse methods takes. + aggregate_class + A class with step(), finalize(), value(), and inverse() methods. + Set to None to clear the window function. +""" # And for num_params = -1, which means the aggregate must accept any number of parameters. @overload def create_window_function( @@ -351,36 +425,99 @@ class Connection: self, name: str, num_params: int, aggregate_class: Callable[[], _WindowAggregateClass] | None, / ) -> None: ... - def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: ... + def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: + """Creates a collation function. +""" def create_function( self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False - ) -> None: ... + ) -> None: + """Creates a new function. + +Note: Passing keyword arguments 'name', 'narg' and 'func' to +_sqlite3.Connection.create_function() is deprecated. Parameters +'name', 'narg' and 'func' will become positional-only in Python 3.15. +""" @overload - def cursor(self, factory: None = None) -> Cursor: ... + def cursor(self, factory: None = None) -> Cursor: + """Return a cursor for the connection. +""" @overload def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: ... - def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: ... - def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: ... - def executescript(self, sql_script: str, /) -> Cursor: ... - def interrupt(self) -> None: ... + def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: + """Executes an SQL statement. +""" + def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: + """Repeatedly executes an SQL statement. +""" + def executescript(self, sql_script: str, /) -> Cursor: + """Executes multiple SQL statements at once. +""" + def interrupt(self) -> None: + """Abort any pending database operation. +""" if sys.version_info >= (3, 13): - def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: ... + def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: + """Returns iterator to the dump of the database in an SQL text format. + + filter + An optional LIKE pattern for database objects to dump +""" else: - def iterdump(self) -> Generator[str, None, None]: ... + def iterdump(self) -> Generator[str, None, None]: + """Returns iterator to the dump of the database in an SQL text format. +""" - def rollback(self) -> None: ... + def rollback(self) -> None: + """Roll back to the start of any pending transaction. + +If there is no open transaction, this method is a no-op. +""" def set_authorizer( self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None - ) -> None: ... - def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: ... - def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: ... + ) -> None: + """Set authorizer callback. + +Note: Passing keyword argument 'authorizer_callback' to +_sqlite3.Connection.set_authorizer() is deprecated. Parameter +'authorizer_callback' will become positional-only in Python 3.15. +""" + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: + """Set progress handler callback. + + progress_handler + A callable that takes no arguments. + If the callable returns non-zero, the current query is terminated, + and an exception is raised. + n + The number of SQLite virtual machine instructions that are + executed between invocations of 'progress_handler'. + +If 'progress_handler' is None or 'n' is 0, the progress handler is disabled. + +Note: Passing keyword argument 'progress_handler' to +_sqlite3.Connection.set_progress_handler() is deprecated. Parameter +'progress_handler' will become positional-only in Python 3.15. +""" + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: + """Set a trace callback called for each SQL statement (passed as unicode). + +Note: Passing keyword argument 'trace_callback' to +_sqlite3.Connection.set_trace_callback() is deprecated. Parameter +'trace_callback' will become positional-only in Python 3.15. +""" # enable_load_extension and load_extension is not available on python distributions compiled # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 - def enable_load_extension(self, enable: bool, /) -> None: ... + def enable_load_extension(self, enable: bool, /) -> None: + """Enable dynamic loading of SQLite extension modules. +""" if sys.version_info >= (3, 12): - def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: ... + def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: + """Load SQLite extension module. +""" else: - def load_extension(self, name: str, /) -> None: ... + def load_extension(self, name: str, /) -> None: + """Load SQLite extension module. +""" def backup( self, @@ -390,24 +527,89 @@ class Connection: progress: Callable[[int, int, int], object] | None = None, name: str = "main", sleep: float = 0.25, - ) -> None: ... + ) -> None: + """Makes a backup of the database. +""" if sys.version_info >= (3, 11): - def setlimit(self, category: int, limit: int, /) -> int: ... - def getlimit(self, category: int, /) -> int: ... - def serialize(self, *, name: str = "main") -> bytes: ... - def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: ... + def setlimit(self, category: int, limit: int, /) -> int: + """Set connection run-time limits. + + category + The limit category to be set. + limit + The new limit. If the new limit is a negative number, the limit is + unchanged. + +Attempts to increase a limit above its hard upper bound are silently truncated +to the hard upper bound. Regardless of whether or not the limit was changed, +the prior value of the limit is returned. +""" + def getlimit(self, category: int, /) -> int: + """Get connection run-time limits. + + category + The limit category to be queried. +""" + def serialize(self, *, name: str = "main") -> bytes: + """Serialize a database into a byte string. + + name + Which database to serialize. + +For an ordinary on-disk database file, the serialization is just a copy of the +disk file. For an in-memory database or a "temp" database, the serialization is +the same sequence of bytes which would be written to disk if that database +were backed up to disk. +""" + def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: + """Load a serialized database. + + data + The serialized database content. + name + Which database to reopen with the deserialization. + +The deserialize interface causes the database connection to disconnect from the +target database, and then reopen it as an in-memory database based on the given +serialized data. + +The deserialize interface will fail with SQLITE_BUSY if the database is +currently in a read transaction or is involved in a backup operation. +""" if sys.version_info >= (3, 12): - def getconfig(self, op: int, /) -> bool: ... - def setconfig(self, op: int, enable: bool = True, /) -> bool: ... + def getconfig(self, op: int, /) -> bool: + """Query a boolean connection configuration option. + + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. +""" + def setconfig(self, op: int, enable: bool = True, /) -> bool: + """Set a boolean connection configuration option. + + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. +""" - def __call__(self, sql: str, /) -> _Statement: ... - def __enter__(self) -> Self: ... + def __call__(self, sql: str, /) -> _Statement: + """Call self as a function. +""" + def __enter__(self) -> Self: + """Called when the connection is used as a context manager. + +Returns itself as a convenience to the caller. +""" def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / - ) -> Literal[False]: ... + ) -> Literal[False]: + """Called when the connection is used as a context manager. + +If there was any exception, a rollback takes place; otherwise we commit. +""" @disjoint_base class Cursor: + """SQLite database cursor class. +""" arraysize: int @property def connection(self) -> Connection: ... @@ -420,35 +622,70 @@ class Cursor: @property def rowcount(self) -> int: ... def __init__(self, cursor: Connection, /) -> None: ... - def close(self) -> None: ... - def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: ... - def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: ... - def executescript(self, sql_script: str, /) -> Cursor: ... - def fetchall(self) -> list[Any]: ... - def fetchmany(self, size: int | None = 1) -> list[Any]: ... + def close(self) -> None: + """Closes the cursor. +""" + def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: + """Executes an SQL statement. +""" + def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: + """Repeatedly executes an SQL statement. +""" + def executescript(self, sql_script: str, /) -> Cursor: + """Executes multiple SQL statements at once. +""" + def fetchall(self) -> list[Any]: + """Fetches all rows from the resultset. +""" + def fetchmany(self, size: int | None = 1) -> list[Any]: + """Fetches several rows from the resultset. + + size + The default value is set by the Cursor.arraysize attribute. +""" # Returns either a row (as created by the row_factory) or None, but # putting None in the return annotation causes annoying false positives. - def fetchone(self) -> Any: ... - def setinputsizes(self, sizes: Unused, /) -> None: ... # does nothing - def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: ... # does nothing - def __iter__(self) -> Self: ... - def __next__(self) -> Any: ... + def fetchone(self) -> Any: + """Fetches one row from the resultset. +""" + def setinputsizes(self, sizes: Unused, /) -> None: # does nothing + """Required by DB-API. Does nothing in sqlite3. +""" + def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: # does nothing + """Required by DB-API. Does nothing in sqlite3. +""" + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> Any: + """Implement next(self). +""" @final class PrepareProtocol: + """PEP 246 style object adaption protocol type. +""" def __init__(self, *args: object, **kwargs: object) -> None: ... @disjoint_base class Row(Sequence[Any]): def __new__(cls, cursor: Cursor, data: tuple[Any, ...], /) -> Self: ... - def keys(self) -> list[str]: ... + def keys(self) -> list[str]: + """Returns the keys of the row. +""" @overload - def __getitem__(self, key: int | str, /) -> Any: ... + def __getitem__(self, key: int | str, /) -> Any: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> tuple[Any, ...]: ... def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[Any]: ... - def __len__(self) -> int: ... + def __iter__(self) -> Iterator[Any]: + """Implement iter(self). +""" + def __len__(self) -> int: + """Return len(self). +""" # These return NotImplemented for anything that is not a Row. def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: object, /) -> bool: ... @@ -465,14 +702,48 @@ class _Statement: ... if sys.version_info >= (3, 11): @final class Blob: - def close(self) -> None: ... - def read(self, length: int = -1, /) -> bytes: ... - def write(self, data: ReadableBuffer, /) -> None: ... - def tell(self) -> int: ... + def close(self) -> None: + """Close the blob. +""" + def read(self, length: int = -1, /) -> bytes: + """Read data at the current offset position. + + length + Read length in bytes. + +If the end of the blob is reached, the data up to end of file will be returned. +When length is not specified, or is negative, Blob.read() will read until the +end of the blob. +""" + def write(self, data: ReadableBuffer, /) -> None: + """Write data at the current offset. + +This function cannot change the blob length. Writing beyond the end of the +blob will result in an exception being raised. +""" + def tell(self) -> int: + """Return the current access position for the blob. +""" # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, offset: int, origin: int = 0, /) -> None: ... - def __len__(self) -> int: ... - def __enter__(self) -> Self: ... - def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: ... - def __getitem__(self, key: SupportsIndex | slice, /) -> int: ... - def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: ... + def seek(self, offset: int, origin: int = 0, /) -> None: + """Set the current access position to offset. + +The origin argument defaults to os.SEEK_SET (absolute blob positioning). +Other values for origin are os.SEEK_CUR (seek relative to the current position) +and os.SEEK_END (seek relative to the blob's end). +""" + def __len__(self) -> int: + """Return len(self). +""" + def __enter__(self) -> Self: + """Blob context manager enter. +""" + def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: + """Blob context manager exit. +""" + def __getitem__(self, key: SupportsIndex | slice, /) -> int: + """Return self[key]. +""" + def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: + """Set self[key] to value. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi index d8f0b7937e994..eef5169667ea1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi @@ -1,3 +1,5 @@ +"""Internal support module for sre +""" from re import Pattern from sre_constants import * from sre_constants import _NamedIntConstant diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi index 9a1da4ac89e7e..4c8186812ee3f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi @@ -1,3 +1,5 @@ +"""Internal support module for sre +""" import sys from re import error as error from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi index eaacbff312a92..af61c65c64905 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi @@ -1,3 +1,5 @@ +"""Internal support module for sre +""" import sys from collections.abc import Iterable from re import Match, Pattern as _Pattern diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi index faa98cb399200..05a6f027ec316 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi @@ -1,3 +1,92 @@ +"""This module provides some more Pythonic support for SSL. + +Object types: + + SSLSocket -- subtype of socket.socket which does SSL over the socket + +Exceptions: + + SSLError -- exception raised for I/O errors + +Functions: + + cert_time_to_seconds -- convert time string used for certificate + notBefore and notAfter functions to integer + seconds past the Epoch (the time values + returned from time.time()) + + get_server_certificate (addr, ssl_version, ca_certs, timeout) -- Retrieve the + certificate from the server at the specified + address and return it as a PEM-encoded string + + +Integer constants: + +SSL_ERROR_ZERO_RETURN +SSL_ERROR_WANT_READ +SSL_ERROR_WANT_WRITE +SSL_ERROR_WANT_X509_LOOKUP +SSL_ERROR_SYSCALL +SSL_ERROR_SSL +SSL_ERROR_WANT_CONNECT + +SSL_ERROR_EOF +SSL_ERROR_INVALID_ERROR_CODE + +The following group define certificate requirements that one side is +allowing/requiring from the other side: + +CERT_NONE - no certificates from the other side are required (or will + be looked at if provided) +CERT_OPTIONAL - certificates are not required, but if provided will be + validated, and if validation fails, the connection will + also fail +CERT_REQUIRED - certificates are required, and will be validated, and + if validation fails, the connection will also fail + +The following constants identify various SSL protocol variants: + +PROTOCOL_SSLv2 +PROTOCOL_SSLv3 +PROTOCOL_SSLv23 +PROTOCOL_TLS +PROTOCOL_TLS_CLIENT +PROTOCOL_TLS_SERVER +PROTOCOL_TLSv1 +PROTOCOL_TLSv1_1 +PROTOCOL_TLSv1_2 + +The following constants identify various SSL alert message descriptions as per +http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6 + +ALERT_DESCRIPTION_CLOSE_NOTIFY +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE +ALERT_DESCRIPTION_BAD_RECORD_MAC +ALERT_DESCRIPTION_RECORD_OVERFLOW +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE +ALERT_DESCRIPTION_HANDSHAKE_FAILURE +ALERT_DESCRIPTION_BAD_CERTIFICATE +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE +ALERT_DESCRIPTION_CERTIFICATE_REVOKED +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN +ALERT_DESCRIPTION_ILLEGAL_PARAMETER +ALERT_DESCRIPTION_UNKNOWN_CA +ALERT_DESCRIPTION_ACCESS_DENIED +ALERT_DESCRIPTION_DECODE_ERROR +ALERT_DESCRIPTION_DECRYPT_ERROR +ALERT_DESCRIPTION_PROTOCOL_VERSION +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY +ALERT_DESCRIPTION_INTERNAL_ERROR +ALERT_DESCRIPTION_USER_CANCELLED +ALERT_DESCRIPTION_NO_RENEGOTIATION +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE +ALERT_DESCRIPTION_UNRECOGNIZED_NAME +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY +""" import enum import socket import sys @@ -65,16 +154,32 @@ class _Cipher(TypedDict): symmetric: str class SSLError(OSError): + """An error occurred in the SSL implementation. +""" library: str reason: str -class SSLZeroReturnError(SSLError): ... -class SSLWantReadError(SSLError): ... -class SSLWantWriteError(SSLError): ... -class SSLSyscallError(SSLError): ... -class SSLEOFError(SSLError): ... +class SSLZeroReturnError(SSLError): + """SSL/TLS session closed cleanly. +""" +class SSLWantReadError(SSLError): + """Non-blocking SSL socket needs to read more data +before the requested operation can be completed. +""" +class SSLWantWriteError(SSLError): + """Non-blocking SSL socket needs to write more data +before the requested operation can be completed. +""" +class SSLSyscallError(SSLError): + """System error when attempting SSL operation. +""" +class SSLEOFError(SSLError): + """SSL/TLS connection terminated abruptly. +""" class SSLCertVerificationError(SSLError, ValueError): + """A certificate could not be verified. +""" verify_code: int verify_message: str @@ -95,22 +200,62 @@ if sys.version_info < (3, 12): ciphers: str | None = None, ) -> SSLSocket: ... @deprecated("Deprecated since Python 3.7; removed in Python 3.12.") - def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed. -def cert_time_to_seconds(cert_time: str) -> int: ... + The function matches IP addresses rather than dNSNames if hostname is a + valid ipaddress string. IPv4 addresses are supported on all platforms. + IPv6 addresses are supported on platforms with IPv6 support (AF_INET6 + and inet_pton). + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + +def cert_time_to_seconds(cert_time: str) -> int: + """Return the time in seconds since the Epoch, given the timestring +representing the "notBefore" or "notAfter" date from a certificate +in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale). + +"notBefore" or "notAfter" dates must use UTC (RFC 5280). + +Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec +UTC should be specified as GMT (see ASN1_TIME_print()) +""" if sys.version_info >= (3, 10): def get_server_certificate( addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... - ) -> str: ... + ) -> str: + """Retrieve the certificate from the server at the specified address, +and return it as a PEM-encoded string. +If 'ca_certs' is specified, validate the server cert against it. +If 'ssl_version' is specified, use it in the connection attempt. +If 'timeout' is specified, use it in the connection attempt. +""" else: - def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... - -def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... -def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: + """Retrieve the certificate from the server at the specified address, + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt. +""" + +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: + """Takes a certificate in binary DER format and returns the +PEM version of it as a string. +""" +def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: + """Takes a certificate in ASCII PEM format and returns the +DER-encoded version of it as a byte sequence +""" class DefaultVerifyPaths(NamedTuple): + """DefaultVerifyPaths(cafile, capath, openssl_cafile_env, openssl_cafile, openssl_capath_env, openssl_capath) +""" cafile: str capath: str openssl_cafile_env: str @@ -118,9 +263,13 @@ class DefaultVerifyPaths(NamedTuple): openssl_capath_env: str openssl_capath: str -def get_default_verify_paths() -> DefaultVerifyPaths: ... +def get_default_verify_paths() -> DefaultVerifyPaths: + """Return paths to default cafile and capath. + """ class VerifyMode(enum.IntEnum): + """An enumeration. +""" CERT_NONE = 0 CERT_OPTIONAL = 1 CERT_REQUIRED = 2 @@ -130,6 +279,8 @@ CERT_OPTIONAL: Final = VerifyMode.CERT_OPTIONAL CERT_REQUIRED: Final = VerifyMode.CERT_REQUIRED class VerifyFlags(enum.IntFlag): + """An enumeration. +""" VERIFY_DEFAULT = 0 VERIFY_CRL_CHECK_LEAF = 4 VERIFY_CRL_CHECK_CHAIN = 12 @@ -150,6 +301,8 @@ if sys.version_info >= (3, 10): VERIFY_X509_PARTIAL_CHAIN: Final = VerifyFlags.VERIFY_X509_PARTIAL_CHAIN class _SSLMethod(enum.IntEnum): + """An enumeration. +""" PROTOCOL_SSLv23 = 2 PROTOCOL_SSLv2 = ... PROTOCOL_SSLv3 = ... @@ -171,6 +324,8 @@ PROTOCOL_TLS_CLIENT: Final = _SSLMethod.PROTOCOL_TLS_CLIENT PROTOCOL_TLS_SERVER: Final = _SSLMethod.PROTOCOL_TLS_SERVER class Options(enum.IntFlag): + """An enumeration. +""" OP_ALL = 2147483728 OP_NO_SSLv2 = 0 OP_NO_SSLv3 = 33554432 @@ -216,6 +371,8 @@ HAS_NEVER_CHECK_COMMON_NAME: Final[bool] CHANNEL_BINDING_TYPES: Final[list[str]] class AlertDescription(enum.IntEnum): + """An enumeration. +""" ALERT_DESCRIPTION_ACCESS_DENIED = 49 ALERT_DESCRIPTION_BAD_CERTIFICATE = 42 ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = 114 @@ -281,13 +438,21 @@ class _ASN1ObjectBase(NamedTuple): oid: str class _ASN1Object(_ASN1ObjectBase): + """ASN.1 object identifier lookup + """ def __new__(cls, oid: str) -> Self: ... @classmethod - def fromnid(cls, nid: int) -> Self: ... + def fromnid(cls, nid: int) -> Self: + """Create _ASN1Object from OpenSSL numeric ID + """ @classmethod - def fromname(cls, name: str) -> Self: ... + def fromname(cls, name: str) -> Self: + """Create _ASN1Object from short name, long name or OID + """ class Purpose(_ASN1Object, enum.Enum): + """SSLContext purpose flags with X509v3 Extended Key Usage objects + """ # Normally this class would inherit __new__ from _ASN1Object, but # because this is an enum, the inherited __new__ is replaced at runtime with # Enum.__new__. @@ -296,15 +461,27 @@ class Purpose(_ASN1Object, enum.Enum): CLIENT_AUTH = (130, "clientAuth", "TLS Web Client Authentication", "1.3.6.1.5.5.7.3.1") # pyright: ignore[reportCallIssue] class SSLSocket(socket.socket): + """This class implements a subtype of socket.socket that wraps +the underlying OS socket in an SSL context when necessary, and +provides read and write methods over that channel. +""" context: SSLContext server_side: bool server_hostname: str | None session: SSLSession | None @property - def session_reused(self) -> bool | None: ... + def session_reused(self) -> bool | None: + """Was the client session reused during handshake +""" def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def connect(self, addr: socket._Address) -> None: ... - def connect_ex(self, addr: socket._Address) -> int: ... + def connect(self, addr: socket._Address) -> None: + """Connects to remote ADDR, and then wraps the connection in +an SSL channel. +""" + def connect_ex(self, addr: socket._Address) -> int: + """Connects to remote ADDR, and then wraps the connection in +an SSL channel. +""" def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... @@ -318,40 +495,101 @@ class SSLSocket(socket.socket): @overload def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... - def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... - def write(self, data: ReadableBuffer) -> int: ... - def do_handshake(self, block: bool = False) -> None: ... # block is undocumented + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: + """Read up to LEN bytes and return them. +Return zero-length string on EOF. +""" + def write(self, data: ReadableBuffer) -> int: + """Write DATA to the underlying SSL channel. Returns +number of bytes of DATA actually transmitted. +""" + def do_handshake(self, block: bool = False) -> None: # block is undocumented + """Start the SSL/TLS handshake. +""" @overload - def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: + """Returns a formatted version of the data in the certificate provided +by the other end of the SSL channel. + +Return None if no certificate was provided, {} if a certificate was +provided, but not validated. +""" @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... - def cipher(self) -> tuple[str, str, int] | None: ... - def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... - def compression(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... - def selected_alpn_protocol(self) -> str | None: ... + def cipher(self) -> tuple[str, str, int] | None: + """Return the currently selected cipher as a 3-tuple ``(name, +ssl_version, secret_bits)``. +""" + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: + """Return a list of ciphers shared by the client during the handshake or +None if this is not a valid server connection. +""" + def compression(self) -> str | None: + """Return the current compression algorithm in use, or ``None`` if +compression was not negotiated or not supported by one of the peers. +""" + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: + """Get channel binding data for current connection. Raise ValueError +if the requested `cb_type` is not supported. Return bytes of the data +or None if the data is not available (e.g. before the handshake). +""" + def selected_alpn_protocol(self) -> str | None: + """Return the currently selected ALPN protocol as a string, or ``None`` +if a next protocol was not negotiated or if ALPN is not supported by one +of the peers. +""" if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") - def selected_npn_protocol(self) -> str | None: ... + def selected_npn_protocol(self) -> str | None: + """Return the currently selected NPN protocol as a string, or ``None`` +if a next protocol was not negotiated or if NPN is not supported by one +of the peers. +""" else: - def selected_npn_protocol(self) -> str | None: ... - - def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... - def unwrap(self) -> socket.socket: ... - def version(self) -> str | None: ... - def pending(self) -> int: ... + def selected_npn_protocol(self) -> str | None: + """Return the currently selected NPN protocol as a string, or ``None`` + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. +""" + + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: + """Accepts a new connection from a remote client, and returns +a tuple containing that new connection wrapped with a server-side +SSL channel, and the address of the remote client. +""" + def unwrap(self) -> socket.socket: + """Start the SSL shutdown handshake. +""" + def version(self) -> str | None: + """Return a string identifying the protocol version used by the +current SSL channel. +""" + def pending(self) -> int: + """Return the number of bytes that can be read immediately. +""" def verify_client_post_handshake(self) -> None: ... # These methods always raise `NotImplementedError`: def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] def recvmsg_into(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] def sendmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: ... - def get_unverified_chain(self) -> list[bytes]: ... + def get_verified_chain(self) -> list[bytes]: + """Returns verified certificate chain provided by the other +end of the SSL channel as a list of DER-encoded bytes. + +If certificate verification was disabled method acts the same as +``SSLSocket.get_unverified_chain``. +""" + def get_unverified_chain(self) -> list[bytes]: + """Returns raw certificate chain provided by the other +end of the SSL channel as a list of DER-encoded bytes. +""" class TLSVersion(enum.IntEnum): + """An enumeration. +""" MINIMUM_SUPPORTED = -2 MAXIMUM_SUPPORTED = -1 SSLv3 = 768 @@ -361,6 +599,9 @@ class TLSVersion(enum.IntEnum): TLSv1_3 = 772 class SSLContext(_SSLContext): + """An SSLContext holds various SSL-related configuration options and +data, such as certificates and possibly a private key. +""" options: Options verify_flags: VerifyFlags verify_mode: VerifyMode @@ -395,7 +636,15 @@ class SSLContext(_SSLContext): cadata: str | ReadableBuffer | None = None, ) -> None: ... @overload - def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: + """Returns a list of dicts with information of loaded CA certs. + +If the optional argument is True, returns a DER-encoded copy of the CA +certificate. + +NOTE: Certificates in a capath directory aren't loaded unless they have +been used at least once. +""" @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -437,7 +686,13 @@ def create_default_context( cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, -) -> SSLContext: ... +) -> SSLContext: + """Create a SSLContext object with default settings. + +NOTE: The protocol and settings may change anytime without prior + deprecation. The values represent a fair balance between maximum + compatibility and security. +""" if sys.version_info >= (3, 10): def _create_unverified_context( @@ -451,7 +706,14 @@ if sys.version_info >= (3, 10): cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, - ) -> SSLContext: ... + ) -> SSLContext: + """Create a SSLContext object for Python stdlib modules + +All Python stdlib modules shall use this function to create SSLContext +objects in order to keep common settings in one place. The configuration +is less restrict than create_default_context()'s to increase backward +compatibility. +""" else: def _create_unverified_context( @@ -465,49 +727,138 @@ else: cafile: StrOrBytesPath | None = None, capath: StrOrBytesPath | None = None, cadata: str | ReadableBuffer | None = None, - ) -> SSLContext: ... + ) -> SSLContext: + """Create a SSLContext object for Python stdlib modules + + All Python stdlib modules shall use this function to create SSLContext + objects in order to keep common settings in one place. The configuration + is less restrict than create_default_context()'s to increase backward + compatibility. + """ _create_default_https_context = create_default_context class SSLObject: + """This class implements an interface on top of a low-level SSL object as +implemented by OpenSSL. This object captures the state of an SSL connection +but does not provide any network IO itself. IO needs to be performed +through separate "BIO" objects which are OpenSSL's IO abstraction layer. + +This class does not have a public constructor. Instances are returned by +``SSLContext.wrap_bio``. This class is typically used by framework authors +that want to implement asynchronous IO for SSL through memory buffers. + +When compared to ``SSLSocket``, this object lacks the following features: + + * Any form of network IO, including methods such as ``recv`` and ``send``. + * The ``do_handshake_on_connect`` and ``suppress_ragged_eofs`` machinery. +""" context: SSLContext @property - def server_side(self) -> bool: ... + def server_side(self) -> bool: + """Whether this is a server-side socket. +""" @property - def server_hostname(self) -> str | None: ... + def server_hostname(self) -> str | None: + """The currently set server hostname (for SNI), or ``None`` if no +server hostname is set. +""" session: SSLSession | None @property - def session_reused(self) -> bool: ... + def session_reused(self) -> bool: + """Was the client session reused during handshake +""" def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... - def write(self, data: ReadableBuffer) -> int: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: + """Read up to 'len' bytes from the SSL object and return them. + +If 'buffer' is provided, read into this buffer and return the number of +bytes read. +""" + def write(self, data: ReadableBuffer) -> int: + """Write 'data' to the SSL object and return the number of bytes +written. + +The 'data' argument must support the buffer interface. +""" @overload - def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: + """Returns a formatted version of the data in the certificate provided +by the other end of the SSL channel. + +Return None if no certificate was provided, {} if a certificate was +provided, but not validated. +""" @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... - def selected_alpn_protocol(self) -> str | None: ... + def selected_alpn_protocol(self) -> str | None: + """Return the currently selected ALPN protocol as a string, or ``None`` +if a next protocol was not negotiated or if ALPN is not supported by one +of the peers. +""" if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") - def selected_npn_protocol(self) -> str | None: ... + def selected_npn_protocol(self) -> str | None: + """Return the currently selected NPN protocol as a string, or ``None`` +if a next protocol was not negotiated or if NPN is not supported by one +of the peers. +""" else: - def selected_npn_protocol(self) -> str | None: ... - - def cipher(self) -> tuple[str, str, int] | None: ... - def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... - def compression(self) -> str | None: ... - def pending(self) -> int: ... - def do_handshake(self) -> None: ... - def unwrap(self) -> None: ... - def version(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + def selected_npn_protocol(self) -> str | None: + """Return the currently selected NPN protocol as a string, or ``None`` + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. +""" + + def cipher(self) -> tuple[str, str, int] | None: + """Return the currently selected cipher as a 3-tuple ``(name, +ssl_version, secret_bits)``. +""" + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: + """Return a list of ciphers shared by the client during the handshake or +None if this is not a valid server connection. +""" + def compression(self) -> str | None: + """Return the current compression algorithm in use, or ``None`` if +compression was not negotiated or not supported by one of the peers. +""" + def pending(self) -> int: + """Return the number of bytes that can be read immediately. +""" + def do_handshake(self) -> None: + """Start the SSL/TLS handshake. +""" + def unwrap(self) -> None: + """Start the SSL shutdown handshake. +""" + def version(self) -> str | None: + """Return a string identifying the protocol version used by the +current SSL channel. +""" + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: + """Get channel binding data for current connection. Raise ValueError +if the requested `cb_type` is not supported. Return bytes of the data +or None if the data is not available (e.g. before the handshake). +""" def verify_client_post_handshake(self) -> None: ... if sys.version_info >= (3, 13): - def get_verified_chain(self) -> list[bytes]: ... - def get_unverified_chain(self) -> list[bytes]: ... + def get_verified_chain(self) -> list[bytes]: + """Returns verified certificate chain provided by the other +end of the SSL channel as a list of DER-encoded bytes. + +If certificate verification was disabled method acts the same as +``SSLSocket.get_unverified_chain``. +""" + def get_unverified_chain(self) -> list[bytes]: + """Returns raw certificate chain provided by the other +end of the SSL channel as a list of DER-encoded bytes. +""" class SSLErrorNumber(enum.IntEnum): + """An enumeration. +""" SSL_ERROR_EOF = 8 SSL_ERROR_INVALID_ERROR_CODE = 10 SSL_ERROR_SSL = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi index face28ab0cbb6..da24b541e81a7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi @@ -1,3 +1,7 @@ +"""Constants/functions for interpreting results of os.stat() and os.lstat(). + +Suggested usage: from stat import * +""" import sys from _stat import * from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi index ba9e5f1b6b71f..225dbea69c3a8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi @@ -1,3 +1,108 @@ +""" +Basic statistics module. + +This module provides functions for calculating statistics of data, including +averages, variance, and standard deviation. + +Calculating averages +-------------------- + +================== ================================================== +Function Description +================== ================================================== +mean Arithmetic mean (average) of data. +fmean Fast, floating-point arithmetic mean. +geometric_mean Geometric mean of data. +harmonic_mean Harmonic mean of data. +median Median (middle value) of data. +median_low Low median of data. +median_high High median of data. +median_grouped Median, or 50th percentile, of grouped data. +mode Mode (most common value) of data. +multimode List of modes (most common values of data). +quantiles Divide data into intervals with equal probability. +================== ================================================== + +Calculate the arithmetic mean ("the average") of data: + +>>> mean([-1.0, 2.5, 3.25, 5.75]) +2.625 + + +Calculate the standard median of discrete data: + +>>> median([2, 3, 4, 5]) +3.5 + + +Calculate the median, or 50th percentile, of data grouped into class intervals +centred on the data values provided. E.g. if your data points are rounded to +the nearest whole number: + +>>> median_grouped([2, 2, 3, 3, 3, 4]) #doctest: +ELLIPSIS +2.8333333333... + +This should be interpreted in this way: you have two data points in the class +interval 1.5-2.5, three data points in the class interval 2.5-3.5, and one in +the class interval 3.5-4.5. The median of these data points is 2.8333... + + +Calculating variability or spread +--------------------------------- + +================== ============================================= +Function Description +================== ============================================= +pvariance Population variance of data. +variance Sample variance of data. +pstdev Population standard deviation of data. +stdev Sample standard deviation of data. +================== ============================================= + +Calculate the standard deviation of sample data: + +>>> stdev([2.5, 3.25, 5.5, 11.25, 11.75]) #doctest: +ELLIPSIS +4.38961843444... + +If you have previously calculated the mean, you can pass it as the optional +second argument to the four "spread" functions to avoid recalculating it: + +>>> data = [1, 2, 2, 4, 4, 4, 5, 6] +>>> mu = mean(data) +>>> pvariance(data, mu) +2.5 + + +Statistics for relations between two inputs +------------------------------------------- + +================== ==================================================== +Function Description +================== ==================================================== +covariance Sample covariance for two variables. +correlation Pearson's correlation coefficient for two variables. +linear_regression Intercept and slope for simple linear regression. +================== ==================================================== + +Calculate covariance, Pearson's correlation, and simple linear regression +for two inputs: + +>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] +>>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] +>>> covariance(x, y) +0.75 +>>> correlation(x, y) #doctest: +ELLIPSIS +0.31622776601... +>>> linear_regression(x, y) #doctest: +LinearRegression(slope=0.1, intercept=1.5) + + +Exceptions +---------- + +A single exception is defined: StatisticsError is a subclass of ValueError. + +""" import sys from _typeshed import SupportsRichComparisonT from collections.abc import Callable, Hashable, Iterable, Sequence @@ -44,96 +149,639 @@ _Seed: TypeAlias = int | float | str | bytes | bytearray # noqa: Y041 class StatisticsError(ValueError): ... if sys.version_info >= (3, 11): - def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: + """Convert data to floats and compute the arithmetic mean. + +This runs faster than the mean() function and it always returns a float. +If the input dataset is empty, it raises a StatisticsError. + +>>> fmean([3.5, 4.0, 5.25]) +4.25 + +""" else: - def fmean(data: Iterable[SupportsFloat]) -> float: ... + def fmean(data: Iterable[SupportsFloat]) -> float: + """Convert data to floats and compute the arithmetic mean. + + This runs faster than the mean() function and it always returns a float. + If the input dataset is empty, it raises a StatisticsError. + + >>> fmean([3.5, 4.0, 5.25]) + 4.25 + """ + +def geometric_mean(data: Iterable[SupportsFloat]) -> float: + """Convert data to floats and compute the geometric mean. + +Raises a StatisticsError if the input dataset is empty +or if it contains a negative value. + +Returns zero if the product of inputs is zero. -def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... -def mean(data: Iterable[_NumberT]) -> _NumberT: ... +No special efforts are made to achieve exact results. +(However, this may change in the future.) + +>>> round(geometric_mean([54, 24, 36]), 9) +36.0 + +""" +def mean(data: Iterable[_NumberT]) -> _NumberT: + """Return the sample arithmetic mean of data. + +>>> mean([1, 2, 3, 4, 4]) +2.8 + +>>> from fractions import Fraction as F +>>> mean([F(3, 7), F(1, 21), F(5, 3), F(1, 3)]) +Fraction(13, 21) + +>>> from decimal import Decimal as D +>>> mean([D("0.5"), D("0.75"), D("0.625"), D("0.375")]) +Decimal('0.5625') + +If ``data`` is empty, StatisticsError will be raised. + +""" if sys.version_info >= (3, 10): - def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: + """Return the harmonic mean of data. + +The harmonic mean is the reciprocal of the arithmetic mean of the +reciprocals of the data. It can be used for averaging ratios or +rates, for example speeds. + +Suppose a car travels 40 km/hr for 5 km and then speeds-up to +60 km/hr for another 5 km. What is the average speed? + + >>> harmonic_mean([40, 60]) + 48.0 + +Suppose a car travels 40 km/hr for 5 km, and when traffic clears, +speeds-up to 60 km/hr for the remaining 30 km of the journey. What +is the average speed? + + >>> harmonic_mean([40, 60], weights=[5, 30]) + 56.0 + +If ``data`` is empty, or any element is less than zero, +``harmonic_mean`` will raise ``StatisticsError``. + +""" else: - def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... + def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: + """Return the harmonic mean of data. + + The harmonic mean, sometimes called the subcontrary mean, is the + reciprocal of the arithmetic mean of the reciprocals of the data, + and is often appropriate when averaging quantities which are rates + or ratios, for example speeds. Example: + + Suppose an investor purchases an equal value of shares in each of + three companies, with P/E (price/earning) ratios of 2.5, 3 and 10. + What is the average P/E ratio for the investor's portfolio? + + >>> harmonic_mean([2.5, 3, 10]) # For an equal investment portfolio. + 3.6 + + Using the arithmetic mean would give an average of about 5.167, which + is too high. + + If ``data`` is empty, or any element is less than zero, + ``harmonic_mean`` will raise ``StatisticsError``. + """ + +def median(data: Iterable[_NumberT]) -> _NumberT: + """Return the median (middle value) of numeric data. + +When the number of data points is odd, return the middle data point. +When the number of data points is even, the median is interpolated by +taking the average of the two middle values: + +>>> median([1, 3, 5]) +3 +>>> median([1, 3, 5, 7]) +4.0 + +""" +def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: + """Return the low median of numeric data. + +When the number of data points is odd, the middle value is returned. +When it is even, the smaller of the two middle values is returned. -def median(data: Iterable[_NumberT]) -> _NumberT: ... -def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... -def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... +>>> median_low([1, 3, 5]) +3 +>>> median_low([1, 3, 5, 7]) +3 + +""" +def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: + """Return the high median of data. + +When the number of data points is odd, the middle value is returned. +When it is even, the larger of the two middle values is returned. + +>>> median_high([1, 3, 5]) +3 +>>> median_high([1, 3, 5, 7]) +5 + +""" if sys.version_info >= (3, 11): - def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: + """Estimates the median for numeric data binned around the midpoints +of consecutive, fixed-width intervals. + +The *data* can be any iterable of numeric data with each value being +exactly the midpoint of a bin. At least one value must be present. + +The *interval* is width of each bin. + +For example, demographic information may have been summarized into +consecutive ten-year age groups with each group being represented +by the 5-year midpoints of the intervals: + + >>> demographics = Counter({ + ... 25: 172, # 20 to 30 years old + ... 35: 484, # 30 to 40 years old + ... 45: 387, # 40 to 50 years old + ... 55: 22, # 50 to 60 years old + ... 65: 6, # 60 to 70 years old + ... }) + +The 50th percentile (median) is the 536th person out of the 1071 +member cohort. That person is in the 30 to 40 year old age group. + +The regular median() function would assume that everyone in the +tricenarian age group was exactly 35 years old. A more tenable +assumption is that the 484 members of that age group are evenly +distributed between 30 and 40. For that, we use median_grouped(). + + >>> data = list(demographics.elements()) + >>> median(data) + 35 + >>> round(median_grouped(data, interval=10), 1) + 37.5 + +The caller is responsible for making sure the data points are separated +by exact multiples of *interval*. This is essential for getting a +correct result. The function does not check this precondition. + +Inputs may be any numeric type that can be coerced to a float during +the interpolation step. + +""" else: - def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: + """Return the 50th percentile (median) of grouped continuous data. + + >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) + 3.7 + >>> median_grouped([52, 52, 53, 54]) + 52.5 + + This calculates the median as the 50th percentile, and should be + used when your data is continuous and grouped. In the above example, + the values 1, 2, 3, etc. actually represent the midpoint of classes + 0.5-1.5, 1.5-2.5, 2.5-3.5, etc. The middle value falls somewhere in + class 3.5-4.5, and interpolation is used to estimate it. + + Optional argument ``interval`` represents the class interval, and + defaults to 1. Changing the class interval naturally will change the + interpolated 50th percentile value: + + >>> median_grouped([1, 3, 3, 5, 7], interval=1) + 3.25 + >>> median_grouped([1, 3, 3, 5, 7], interval=2) + 3.5 + + This function does not check whether the data points are at least + ``interval`` apart. + """ + +def mode(data: Iterable[_HashableT]) -> _HashableT: + """Return the most common data point from discrete or nominal data. + +``mode`` assumes discrete data, and returns a single value. This is the +standard treatment of the mode as commonly taught in schools: + + >>> mode([1, 1, 2, 3, 3, 3, 3, 4]) + 3 + +This also works with nominal (non-numeric) data: + + >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) + 'red' + +If there are multiple modes with same frequency, return the first one +encountered: + + >>> mode(['red', 'red', 'green', 'blue', 'blue']) + 'red' + +If *data* is empty, ``mode``, raises StatisticsError. + +""" +def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: + """Return a list of the most frequently occurring values. + +Will return more than one result if there are multiple modes +or an empty list if *data* is empty. + +>>> multimode('aabbbbbbbbcc') +['b'] +>>> multimode('aabbbbccddddeeffffgg') +['b', 'd', 'f'] +>>> multimode('') +[] + +""" +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: + """Return the square root of the population variance. + +See ``pvariance`` for arguments and other details. + +>>> pstdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) +0.986893273527251 + +""" +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: + """Return the population variance of ``data``. + +data should be a sequence or iterable of Real-valued numbers, with at least one +value. The optional argument mu, if given, should be the mean of +the data. If it is missing or None, the mean is automatically calculated. + +Use this function to calculate the variance from the entire population. +To estimate the variance from a sample, the ``variance`` function is +usually a better choice. -def mode(data: Iterable[_HashableT]) -> _HashableT: ... -def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... -def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... -def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +Examples: + +>>> data = [0.0, 0.25, 0.25, 1.25, 1.5, 1.75, 2.75, 3.25] +>>> pvariance(data) +1.25 + +If you have already calculated the mean of the data, you can pass it as +the optional second argument to avoid recalculating it: + +>>> mu = mean(data) +>>> pvariance(data, mu) +1.25 + +Decimals and Fractions are supported: + +>>> from decimal import Decimal as D +>>> pvariance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) +Decimal('24.815') + +>>> from fractions import Fraction as F +>>> pvariance([F(1, 4), F(5, 4), F(1, 2)]) +Fraction(13, 72) + +""" def quantiles( data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" -) -> list[_NumberT]: ... -def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... -def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +) -> list[_NumberT]: + """Divide *data* into *n* continuous intervals with equal probability. + +Returns a list of (n - 1) cut points separating the intervals. + +Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. +Set *n* to 100 for percentiles which gives the 99 cuts points that +separate *data* in to 100 equal sized groups. + +The *data* can be any iterable containing sample. +The cut points are linearly interpolated between data points. + +If *method* is set to *inclusive*, *data* is treated as population +data. The minimum value is treated as the 0th percentile and the +maximum value is treated as the 100th percentile. + +""" +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: + """Return the square root of the sample variance. + +See ``variance`` for arguments and other details. + +>>> stdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) +1.0810874155219827 + +""" +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: + """Return the sample variance of data. + +data should be an iterable of Real-valued numbers, with at least two +values. The optional argument xbar, if given, should be the mean of +the data. If it is missing or None, the mean is automatically calculated. + +Use this function when your data is a sample from a population. To +calculate the variance from the entire population, see ``pvariance``. + +Examples: + +>>> data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5] +>>> variance(data) +1.3720238095238095 + +If you have already calculated the mean of your data, you can pass it as +the optional second argument ``xbar`` to avoid recalculating it: + +>>> m = mean(data) +>>> variance(data, m) +1.3720238095238095 + +This function does not check that ``xbar`` is actually the mean of +``data``. Giving arbitrary values for ``xbar`` may lead to invalid or +impossible results. + +Decimals and Fractions are supported: + +>>> from decimal import Decimal as D +>>> variance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) +Decimal('31.01875') + +>>> from fractions import Fraction as F +>>> variance([F(1, 6), F(1, 2), F(5, 3)]) +Fraction(67, 108) + +""" class NormalDist: + """Normal distribution of a random variable +""" __slots__ = {"_mu": "Arithmetic mean of a normal distribution", "_sigma": "Standard deviation of a normal distribution"} - def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: + """NormalDist where mu is the mean and sigma is the standard deviation. +""" @property - def mean(self) -> float: ... + def mean(self) -> float: + """Arithmetic mean of the normal distribution. +""" @property - def median(self) -> float: ... + def median(self) -> float: + """Return the median of the normal distribution +""" @property - def mode(self) -> float: ... + def mode(self) -> float: + """Return the mode of the normal distribution + +The mode is the value x where which the probability density +function (pdf) takes its maximum value. +""" @property - def stdev(self) -> float: ... + def stdev(self) -> float: + """Standard deviation of the normal distribution. +""" @property - def variance(self) -> float: ... + def variance(self) -> float: + """Square of the standard deviation. +""" @classmethod - def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... - def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: ... - def pdf(self, x: float) -> float: ... - def cdf(self, x: float) -> float: ... - def inv_cdf(self, p: float) -> float: ... - def overlap(self, other: NormalDist) -> float: ... - def quantiles(self, n: int = 4) -> list[float]: ... - def zscore(self, x: float) -> float: ... - def __eq__(x1, x2: object) -> bool: ... - def __add__(x1, x2: float | NormalDist) -> NormalDist: ... - def __sub__(x1, x2: float | NormalDist) -> NormalDist: ... - def __mul__(x1, x2: float) -> NormalDist: ... - def __truediv__(x1, x2: float) -> NormalDist: ... - def __pos__(x1) -> NormalDist: ... - def __neg__(x1) -> NormalDist: ... + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: + """Make a normal distribution instance from sample data. +""" + def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: + """Generate *n* samples for a given mean and standard deviation. +""" + def pdf(self, x: float) -> float: + """Probability density function. P(x <= X < x+dx) / dx +""" + def cdf(self, x: float) -> float: + """Cumulative distribution function. P(X <= x) +""" + def inv_cdf(self, p: float) -> float: + """Inverse cumulative distribution function. x : P(X <= x) = p + +Finds the value of the random variable such that the probability of +the variable being less than or equal to that value equals the given +probability. + +This function is also called the percent point function or quantile +function. +""" + def overlap(self, other: NormalDist) -> float: + """Compute the overlapping coefficient (OVL) between two normal distributions. + +Measures the agreement between two normal probability distributions. +Returns a value between 0.0 and 1.0 giving the overlapping area in +the two underlying probability density functions. + + >>> N1 = NormalDist(2.4, 1.6) + >>> N2 = NormalDist(3.2, 2.0) + >>> N1.overlap(N2) + 0.8035050657330205 +""" + def quantiles(self, n: int = 4) -> list[float]: + """Divide into *n* continuous intervals with equal probability. + +Returns a list of (n - 1) cut points separating the intervals. + +Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. +Set *n* to 100 for percentiles which gives the 99 cuts points that +separate the normal distribution in to 100 equal sized groups. +""" + def zscore(self, x: float) -> float: + """Compute the Standard Score. (x - mean) / stdev + +Describes *x* in terms of the number of standard deviations +above or below the mean of the normal distribution. +""" + def __eq__(x1, x2: object) -> bool: + """Two NormalDist objects are equal if their mu and sigma are both equal. +""" + def __add__(x1, x2: float | NormalDist) -> NormalDist: + """Add a constant or another NormalDist instance. + +If *other* is a constant, translate mu by the constant, +leaving sigma unchanged. + +If *other* is a NormalDist, add both the means and the variances. +Mathematically, this works only if the two distributions are +independent or if they are jointly normally distributed. +""" + def __sub__(x1, x2: float | NormalDist) -> NormalDist: + """Subtract a constant or another NormalDist instance. + +If *other* is a constant, translate by the constant mu, +leaving sigma unchanged. + +If *other* is a NormalDist, subtract the means and add the variances. +Mathematically, this works only if the two distributions are +independent or if they are jointly normally distributed. +""" + def __mul__(x1, x2: float) -> NormalDist: + """Multiply both mu and sigma by a constant. + +Used for rescaling, perhaps to change measurement units. +Sigma is scaled with the absolute value of the constant. +""" + def __truediv__(x1, x2: float) -> NormalDist: + """Divide both mu and sigma by a constant. + +Used for rescaling, perhaps to change measurement units. +Sigma is scaled with the absolute value of the constant. +""" + def __pos__(x1) -> NormalDist: + """Return a copy of the instance. +""" + def __neg__(x1) -> NormalDist: + """Negates mu while keeping sigma the same. +""" __radd__ = __add__ - def __rsub__(x1, x2: float | NormalDist) -> NormalDist: ... + def __rsub__(x1, x2: float | NormalDist) -> NormalDist: + """Subtract a NormalDist from a constant or another NormalDist. +""" __rmul__ = __mul__ - def __hash__(self) -> int: ... + def __hash__(self) -> int: + """NormalDist objects hash equal if their mu and sigma are both equal. +""" if sys.version_info >= (3, 12): def correlation( x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear" - ) -> float: ... + ) -> float: + """Pearson's correlation coefficient + +Return the Pearson's correlation coefficient for two inputs. Pearson's +correlation coefficient *r* takes values between -1 and +1. It measures +the strength and direction of a linear relationship. + +>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] +>>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] +>>> correlation(x, x) +1.0 +>>> correlation(x, y) +-1.0 + +If *method* is "ranked", computes Spearman's rank correlation coefficient +for two inputs. The data is replaced by ranks. Ties are averaged +so that equal values receive the same rank. The resulting coefficient +measures the strength of a monotonic relationship. + +Spearman's rank correlation coefficient is appropriate for ordinal +data or for continuous data that doesn't meet the linear proportion +requirement for Pearson's correlation coefficient. + +""" elif sys.version_info >= (3, 10): - def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... + def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: + """Pearson's correlation coefficient + + Return the Pearson's correlation coefficient for two inputs. Pearson's + correlation coefficient *r* takes values between -1 and +1. It measures the + strength and direction of the linear relationship, where +1 means very + strong, positive linear relationship, -1 very strong, negative linear + relationship, and 0 no linear relationship. + + >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] + >>> correlation(x, x) + 1.0 + >>> correlation(x, y) + -1.0 + + """ if sys.version_info >= (3, 10): - def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: ... + def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: + """Covariance + +Return the sample covariance of two inputs *x* and *y*. Covariance +is a measure of the joint variability of two inputs. + +>>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] +>>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] +>>> covariance(x, y) +0.75 +>>> z = [9, 8, 7, 6, 5, 4, 3, 2, 1] +>>> covariance(x, z) +-7.5 +>>> covariance(z, x) +-7.5 + +""" class LinearRegression(NamedTuple): + """LinearRegression(slope, intercept) +""" slope: float intercept: float if sys.version_info >= (3, 11): def linear_regression( regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /, *, proportional: bool = False - ) -> LinearRegression: ... + ) -> LinearRegression: + """Slope and intercept for simple linear regression. + +Return the slope and intercept of simple linear regression +parameters estimated using ordinary least squares. Simple linear +regression describes relationship between an independent variable +*x* and a dependent variable *y* in terms of a linear function: + + y = slope * x + intercept + noise + +where *slope* and *intercept* are the regression parameters that are +estimated, and noise represents the variability of the data that was +not explained by the linear regression (it is equal to the +difference between predicted and actual values of the dependent +variable). + +The parameters are returned as a named tuple. + +>>> x = [1, 2, 3, 4, 5] +>>> noise = NormalDist().samples(5, seed=42) +>>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] +>>> linear_regression(x, y) #doctest: +ELLIPSIS +LinearRegression(slope=3.17495..., intercept=1.00925...) + +If *proportional* is true, the independent variable *x* and the +dependent variable *y* are assumed to be directly proportional. +The data is fit to a line passing through the origin. + +Since the *intercept* will always be 0.0, the underlying linear +function simplifies to: + + y = slope * x + noise + +>>> y = [3 * x[i] + noise[i] for i in range(5)] +>>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS +LinearRegression(slope=2.90475..., intercept=0.0) + +""" elif sys.version_info >= (3, 10): - def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: ... + def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: + """Slope and intercept for simple linear regression. + + Return the slope and intercept of simple linear regression + parameters estimated using ordinary least squares. Simple linear + regression describes relationship between an independent variable + *x* and a dependent variable *y* in terms of linear function: + + y = slope * x + intercept + noise + + where *slope* and *intercept* are the regression parameters that are + estimated, and noise represents the variability of the data that was + not explained by the linear regression (it is equal to the + difference between predicted and actual values of the dependent + variable). + + The parameters are returned as a named tuple. + + >>> x = [1, 2, 3, 4, 5] + >>> noise = NormalDist().samples(5, seed=42) + >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] + >>> linear_regression(x, y) #doctest: +ELLIPSIS + LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) + + """ if sys.version_info >= (3, 13): _Kernel: TypeAlias = Literal[ @@ -153,7 +801,122 @@ if sys.version_info >= (3, 13): ] def kde( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False - ) -> Callable[[float], float]: ... + ) -> Callable[[float], float]: + """Kernel Density Estimation: Create a continuous probability density +function or cumulative distribution function from discrete samples. + +The basic idea is to smooth the data using a kernel function +to help draw inferences about a population from a sample. + +The degree of smoothing is controlled by the scaling parameter h +which is called the bandwidth. Smaller values emphasize local +features while larger values give smoother results. + +The kernel determines the relative weights of the sample data +points. Generally, the choice of kernel shape does not matter +as much as the more influential bandwidth smoothing parameter. + +Kernels that give some weight to every sample point: + + normal (gauss) + logistic + sigmoid + +Kernels that only give weight to sample points within +the bandwidth: + + rectangular (uniform) + triangular + parabolic (epanechnikov) + quartic (biweight) + triweight + cosine + +If *cumulative* is true, will return a cumulative distribution function. + +A StatisticsError will be raised if the data sequence is empty. + +Example +------- + +Given a sample of six data points, construct a continuous +function that estimates the underlying probability density: + + >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> f_hat = kde(sample, h=1.5) + +Compute the area under the curve: + + >>> area = sum(f_hat(x) for x in range(-20, 20)) + >>> round(area, 4) + 1.0 + +Plot the estimated probability density function at +evenly spaced points from -6 to 10: + + >>> for x in range(-6, 11): + ... density = f_hat(x) + ... plot = ' ' * int(density * 400) + 'x' + ... print(f'{x:2}: {density:.3f} {plot}') + ... + -6: 0.002 x + -5: 0.009 x + -4: 0.031 x + -3: 0.070 x + -2: 0.111 x + -1: 0.125 x + 0: 0.110 x + 1: 0.086 x + 2: 0.068 x + 3: 0.059 x + 4: 0.066 x + 5: 0.082 x + 6: 0.082 x + 7: 0.058 x + 8: 0.028 x + 9: 0.009 x + 10: 0.002 x + +Estimate P(4.5 < X <= 7.5), the probability that a new sample value +will be between 4.5 and 7.5: + + >>> cdf = kde(sample, h=1.5, cumulative=True) + >>> round(cdf(7.5) - cdf(4.5), 2) + 0.22 + +References +---------- + +Kernel density estimation and its application: +https://www.itm-conferences.org/articles/itmconf/pdf/2018/08/itmconf_sam2018_00037.pdf + +Kernel functions in common use: +https://en.wikipedia.org/wiki/Kernel_(statistics)#kernel_functions_in_common_use + +Interactive graphical demonstration and exploration: +https://demonstrations.wolfram.com/KernelDensityEstimation/ + +Kernel estimation of cumulative distribution function of a random variable with bounded support +https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf + +""" def kde_random( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: _Seed | None = None - ) -> Callable[[], float]: ... + ) -> Callable[[], float]: + """Return a function that makes a random selection from the estimated +probability density function created by kde(data, h, kernel). + +Providing a *seed* allows reproducible selections within a single +thread. The seed may be an integer, float, str, or bytes. + +A StatisticsError will be raised if the *data* sequence is empty. + +Example: + +>>> data = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] +>>> rand = kde_random(data, h=1.5, seed=8675309) +>>> new_selections = [rand() for i in range(10)] +>>> [round(x, 1) for x in new_selections] +[0.7, 6.2, 1.2, 6.9, 7.0, 1.8, 2.5, -0.5, -1.8, 5.6] + +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi index c8b32a98e26d7..5b8e9e02c1125 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi @@ -1,3 +1,18 @@ +"""A collection of string constants. + +Public module variables: + +whitespace -- a string containing all ASCII whitespace +ascii_lowercase -- a string containing all ASCII lowercase letters +ascii_uppercase -- a string containing all ASCII uppercase letters +ascii_letters -- a string containing all ASCII letters +digits -- a string containing all ASCII decimal digits +hexdigits -- a string containing all ASCII hexadecimal digits +octdigits -- a string containing all ASCII octal digits +punctuation -- a string containing all ASCII punctuation characters +printable -- a string containing all ASCII characters considered printable + +""" import sys from _typeshed import StrOrLiteralStr from collections.abc import Iterable, Mapping, Sequence @@ -30,9 +45,21 @@ octdigits: Final = "01234567" punctuation: Final = r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~""" printable: Final[LiteralString] # string too long -def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: + """capwords(s [,sep]) -> string + +Split the argument into words using split, capitalize each +word using capitalize, and join the capitalized words using +join. If the optional second argument sep is absent or None, +runs of whitespace characters are replaced by a single space +and leading and trailing whitespace are removed, otherwise +sep is used to split and join the words. + +""" class Template: + """A string class for supporting $-substitutions. +""" template: str delimiter: ClassVar[str] idpattern: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi index 9906d31c63915..ced5ba899d636 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi @@ -1,3 +1,5 @@ +"""Support for template string literals (t-strings). +""" from collections.abc import Iterator from types import GenericAlias from typing import Any, Literal, TypeVar, final, overload @@ -6,18 +8,30 @@ _T = TypeVar("_T") @final class Template: # TODO: consider making `Template` generic on `TypeVarTuple` + """Template object +""" strings: tuple[str, ...] interpolations: tuple[Interpolation, ...] def __new__(cls, *args: str | Interpolation) -> Template: ... - def __iter__(self) -> Iterator[str | Interpolation]: ... - def __add__(self, other: Template, /) -> Template: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __iter__(self) -> Iterator[str | Interpolation]: + """Implement iter(self). +""" + def __add__(self, other: Template, /) -> Template: + """Return self+value. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @property - def values(self) -> tuple[Any, ...]: ... # Tuple of interpolation values, which can have any type + def values(self) -> tuple[Any, ...]: # Tuple of interpolation values, which can have any type + """Values of interpolations +""" @final class Interpolation: + """Interpolation object +""" value: Any # TODO: consider making `Interpolation` generic in runtime expression: str conversion: Literal["a", "r", "s"] | None @@ -28,9 +42,13 @@ class Interpolation: def __new__( cls, value: Any, expression: str = "", conversion: Literal["a", "r", "s"] | None = None, format_spec: str = "" ) -> Interpolation: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" @overload -def convert(obj: _T, /, conversion: None) -> _T: ... +def convert(obj: _T, /, conversion: None) -> _T: + """Convert *obj* using formatted string literal semantics. +""" @overload def convert(obj: object, /, conversion: Literal["r", "s", "a"]) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi index d67955e499c85..d92673ccb76d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi @@ -1,3 +1,8 @@ +"""Library that exposes various tables found in the StringPrep RFC 3454. + +There are two kinds of tables: sets, for which a member test is provided, +and mappings, for which a mapping function is provided. +""" from typing import Final b1_set: Final[set[int]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi index 2c26908746ecc..3ae8477fc9cd3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi @@ -1,3 +1,32 @@ +"""Functions to convert between Python values and C structs. +Python bytes objects are used to hold the data representing the C struct +and also as format strings (explained below) to describe the layout of data +in the C struct. + +The optional first format char indicates byte order, size and alignment: + @: native order, size & alignment (default) + =: native order, std. size & alignment + <: little-endian, std. size & alignment + >: big-endian, std. size & alignment + !: same as > + +The remaining chars indicate types of args and must match exactly; +these can be preceded by a decimal repeat count: + x: pad byte (no data); c:char; b:signed byte; B:unsigned byte; + ?: _Bool (requires C99; if not available, char is used instead) + h:short; H:unsigned short; i:int; I:unsigned int; + l:long; L:unsigned long; f:float; d:double; e:half-float. +Special cases (preceding decimal count indicates length): + s:string (array of char); p: pascal string (with count byte). +Special cases (only available in native format): + n:ssize_t; N:size_t; + P:an integer type that is wide enough to hold a pointer. +Special case (not in native mode unless 'long long' in platform C): + q:long long; Q:unsigned long long +Whitespace between formats is ignored. + +The variable struct.error is an exception raised on errors. +""" from _struct import * __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi index e1e25bcb50cbe..cca4bd3213ee0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi @@ -1,3 +1,36 @@ +"""Subprocesses with accessible I/O streams + +This module allows you to spawn processes, connect to their +input/output/error pipes, and obtain their return codes. + +For a complete description of this module see the Python documentation. + +Main API +======== +run(...): Runs a command, waits for it to complete, then returns a + CompletedProcess instance. +Popen(...): A class for flexibly executing a command in a new process + +Constants +--------- +DEVNULL: Special value that indicates that os.devnull should be used +PIPE: Special value that indicates a pipe should be created +STDOUT: Special value that indicates that stderr should go to stdout + + +Older API +========= +call(...): Runs a command, waits for it to complete, then returns + the return code. +check_call(...): Same as call() but raises CalledProcessError() + if return code is not 0 +check_output(...): Same as check_call() but returns the contents of + stdout instead of a return code +getoutput(...): Runs a command in the shell, waits for it to complete, + then returns the output +getstatusoutput(...): Runs a command in the shell, waits for it to complete, + then returns a (exitcode, output) tuple +""" import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence @@ -75,6 +108,16 @@ if sys.version_info >= (3, 11): _USE_POSIX_SPAWN: Final[bool] class CompletedProcess(Generic[_T]): + """A process that has finished running. + +This is returned by run(). + +Attributes: + args: The list or str args passed to run(). + returncode: The exit code of the process, negative for signals. + stdout: The standard output (None if not captured). + stderr: The standard error (None if not captured). +""" # morally: _CMD args: Any returncode: int @@ -83,8 +126,14 @@ class CompletedProcess(Generic[_T]): stdout: _T stderr: _T def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... - def check_returncode(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def check_returncode(self) -> None: + """Raise CalledProcessError if the exit code is non-zero. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -121,7 +170,35 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> CompletedProcess[str]: ... + ) -> CompletedProcess[str]: + """Run command with arguments and return a CompletedProcess instance. + +The returned instance will have attributes args, returncode, stdout and +stderr. By default, stdout and stderr are not captured, and those attributes +will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, +or pass capture_output=True to capture both. + +If check is True and the exit code was non-zero, it raises a +CalledProcessError. The CalledProcessError object will have the return code +in the returncode attribute, and output & stderr attributes if those streams +were captured. + +If timeout (seconds) is given and the process takes too long, + a TimeoutExpired exception will be raised. + +There is an optional argument "input", allowing you to +pass bytes or a string to the subprocess's stdin. If you use this argument +you may not also use the Popen constructor's "stdin" argument, as +it will be used internally. + +By default, all communication is in bytes, and therefore any "input" should +be bytes, and the stdout and stderr will be bytes. If in text mode, any +"input" should be a string, and stdout and stderr will be strings decoded +according to locale encoding, or by "encoding" if set. Text mode is +triggered by setting any of text, encoding, errors or universal_newlines. + +The other arguments are the same as for the Popen constructor. +""" @overload def run( args: _CMD, @@ -328,7 +405,35 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> CompletedProcess[str]: ... + ) -> CompletedProcess[str]: + """Run command with arguments and return a CompletedProcess instance. + + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, + or pass capture_output=True to capture both. + + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. + + If timeout is given, and the process takes too long, a TimeoutExpired + exception will be raised. + + There is an optional argument "input", allowing you to + pass bytes or a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. + + By default, all communication is in bytes, and therefore any "input" should + be bytes, and the stdout and stderr will be bytes. If in text mode, any + "input" should be a string, and stdout and stderr will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode is + triggered by setting any of text, encoding, errors or universal_newlines. + + The other arguments are the same as for the Popen constructor. + """ @overload def run( args: _CMD, @@ -529,7 +634,34 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> CompletedProcess[str]: ... + ) -> CompletedProcess[str]: + """Run command with arguments and return a CompletedProcess instance. + + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. + + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. + + If timeout is given, and the process takes too long, a TimeoutExpired + exception will be raised. + + There is an optional argument "input", allowing you to + pass bytes or a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. + + By default, all communication is in bytes, and therefore any "input" should + be bytes, and the stdout and stderr will be bytes. If in text mode, any + "input" should be a string, and stdout and stderr will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode is + triggered by setting any of text, encoding, errors or universal_newlines. + + The other arguments are the same as for the Popen constructor. + """ @overload def run( args: _CMD, @@ -723,7 +855,14 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete or +for timeout seconds, then return the returncode attribute. + +The arguments are the same as for the Popen constructor. Example: + +retcode = call(["ls", "-l"]) +""" elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -754,7 +893,14 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete or + timeout, then return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + """ else: def call( @@ -783,7 +929,14 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete or + timeout, then return the returncode attribute. + + The arguments are the same as for the Popen constructor. Example: + + retcode = call(["ls", "-l"]) + """ # Same args as Popen.__init__ if sys.version_info >= (3, 11): @@ -816,7 +969,16 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete. If +the exit code was zero then return, otherwise raise +CalledProcessError. The CalledProcessError object will have the +return code in the returncode attribute. + +The arguments are the same as for the call function. Example: + +check_call(["ls", "-l"]) +""" elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -847,7 +1009,16 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete. If + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the call function. Example: + + check_call(["ls", "-l"]) + """ else: def check_call( @@ -876,7 +1047,16 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> int: ... + ) -> int: + """Run command with arguments. Wait for command to complete. If + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. + + The arguments are the same as for the call function. Example: + + check_call(["ls", "-l"]) + """ if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -910,7 +1090,41 @@ if sys.version_info >= (3, 11): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> str: ... + ) -> str: + """Run command with arguments and return its output. + +If the exit code was non-zero it raises a CalledProcessError. The +CalledProcessError object will have the return code in the returncode +attribute and output in the output attribute. + +The arguments are the same as for the Popen constructor. Example: + +>>> check_output(["ls", "-l", "/dev/null"]) +b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + +The stdout argument is not allowed as it is used internally. +To capture standard error in the result, use stderr=STDOUT. + +>>> check_output(["/bin/sh", "-c", +... "ls -l non_existent_file ; exit 0"], +... stderr=STDOUT) +b'ls: non_existent_file: No such file or directory\\n' + +There is an additional optional argument, "input", allowing you to +pass a string to the subprocess's stdin. If you use this argument +you may not also use the Popen constructor's "stdin" argument, as +it too will be used internally. Example: + +>>> check_output(["sed", "-e", "s/foo/bar/"], +... input=b"when in the course of fooman events\\n") +b'when in the course of barman events\\n' + +By default, all communication is in bytes, and therefore any "input" +should be bytes, and the return value will be bytes. If in text mode, +any "input" should be a string, and the return value will be a string +decoded according to locale encoding, or by "encoding" if set. Text mode +is triggered by setting any of text, encoding, errors or universal_newlines. +""" @overload def check_output( args: _CMD, @@ -1099,7 +1313,41 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> str: ... + ) -> str: + """Run command with arguments and return its output. + + If the exit code was non-zero it raises a CalledProcessError. The + CalledProcessError object will have the return code in the returncode + attribute and output in the output attribute. + + The arguments are the same as for the Popen constructor. Example: + + >>> check_output(["ls", "-l", "/dev/null"]) + b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + + The stdout argument is not allowed as it is used internally. + To capture standard error in the result, use stderr=STDOUT. + + >>> check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT) + b'ls: non_existent_file: No such file or directory\\n' + + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example: + + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\\n") + b'when in the course of barman events\\n' + + By default, all communication is in bytes, and therefore any "input" + should be bytes, and the return value will be bytes. If in text mode, + any "input" should be a string, and the return value will be a string + decoded according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or universal_newlines. + """ @overload def check_output( args: _CMD, @@ -1281,7 +1529,41 @@ else: group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> str: ... + ) -> str: + """Run command with arguments and return its output. + + If the exit code was non-zero it raises a CalledProcessError. The + CalledProcessError object will have the return code in the returncode + attribute and output in the output attribute. + + The arguments are the same as for the Popen constructor. Example: + + >>> check_output(["ls", "-l", "/dev/null"]) + b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + + The stdout argument is not allowed as it is used internally. + To capture standard error in the result, use stderr=STDOUT. + + >>> check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT) + b'ls: non_existent_file: No such file or directory\\n' + + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example: + + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\\n") + b'when in the course of barman events\\n' + + By default, all communication is in bytes, and therefore any "input" + should be bytes, and the return value will be bytes. If in text mode, + any "input" should be a string, and the return value will be a string + decoded according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or universal_newlines. + """ @overload def check_output( args: _CMD, @@ -1436,6 +1718,12 @@ DEVNULL: Final[int] class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): + """This exception is raised when the timeout expires while waiting for a +child process. + +Attributes: + cmd, output, stdout, stderr, timeout +""" def __init__( self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None ) -> None: ... @@ -1448,6 +1736,12 @@ class TimeoutExpired(SubprocessError): stderr: bytes | None class CalledProcessError(SubprocessError): + """Raised when run() is called with check=True and the process +returns a non-zero exit status. + +Attributes: + cmd, returncode, stdout, stderr, output +""" returncode: int # morally: _CMD cmd: Any @@ -1462,6 +1756,61 @@ class CalledProcessError(SubprocessError): ) -> None: ... class Popen(Generic[AnyStr]): + """Execute a child program in a new process. + +For a complete description of the arguments see the Python documentation. + +Arguments: + args: A string, or a sequence of program arguments. + + bufsize: supplied as the buffering argument to the open() function when + creating the stdin/stdout/stderr pipe file objects + + executable: A replacement program to execute. + + stdin, stdout and stderr: These specify the executed programs' standard + input, standard output and standard error file handles, respectively. + + preexec_fn: (POSIX only) An object to be called in the child process + just before the child is executed. + + close_fds: Controls closing or inheriting of file descriptors. + + shell: If true, the command will be executed through the shell. + + cwd: Sets the current directory before the child is executed. + + env: Defines the environment variables for the new process. + + text: If true, decode stdin, stdout and stderr using the given encoding + (if set) or the system default otherwise. + + universal_newlines: Alias of text, provided for backwards compatibility. + + startupinfo and creationflags (Windows only) + + restore_signals (POSIX only) + + start_new_session (POSIX only) + + process_group (POSIX only) + + group (POSIX only) + + extra_groups (POSIX only) + + user (POSIX only) + + umask (POSIX only) + + pass_fds (POSIX only) + + encoding and errors: Text mode encoding and error handling to use for + file objects stdin, stdout and stderr. + +Attributes: + stdin, stdout, stderr, pid, returncode +""" args: _CMD stdin: IO[AnyStr] | None stdout: IO[AnyStr] | None @@ -1502,7 +1851,9 @@ class Popen(Generic[AnyStr]): umask: int = -1, pipesize: int = -1, process_group: int | None = None, - ) -> None: ... + ) -> None: + """Create new Popen instance. +""" @overload def __init__( self: Popen[str], @@ -1690,7 +2041,9 @@ class Popen(Generic[AnyStr]): extra_groups: Iterable[str | int] | None = None, umask: int = -1, pipesize: int = -1, - ) -> None: ... + ) -> None: + """Create new Popen instance. +""" @overload def __init__( self: Popen[str], @@ -1871,7 +2224,9 @@ class Popen(Generic[AnyStr]): group: str | int | None = None, extra_groups: Iterable[str | int] | None = None, umask: int = -1, - ) -> None: ... + ) -> None: + """Create new Popen instance. +""" @overload def __init__( self: Popen[str], @@ -2019,32 +2374,145 @@ class Popen(Generic[AnyStr]): umask: int = -1, ) -> None: ... - def poll(self) -> int | None: ... - def wait(self, timeout: float | None = None) -> int: ... + def poll(self) -> int | None: + """Check if child process has terminated. Set and return returncode +attribute. +""" + def wait(self, timeout: float | None = None) -> int: + """Wait for child process to terminate; returns self.returncode. +""" # morally the members of the returned tuple should be optional # TODO: this should allow ReadableBuffer for Popen[bytes], but adding # overloads for that runs into a mypy bug (python/mypy#14070). - def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... - def send_signal(self, sig: int) -> None: ... - def terminate(self) -> None: ... - def kill(self) -> None: ... + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: + """Interact with process: Send data to stdin and close it. +Read data from stdout and stderr, until end-of-file is +reached. Wait for process to terminate. + +The optional "input" argument should be data to be sent to the +child process, or None, if no data should be sent to the child. +communicate() returns a tuple (stdout, stderr). + +By default, all communication is in bytes, and therefore any +"input" should be bytes, and the (stdout, stderr) will be bytes. +If in text mode (indicated by self.text_mode), any "input" should +be a string, and (stdout, stderr) will be strings decoded +according to locale encoding, or by "encoding" if set. Text mode +is triggered by setting any of text, encoding, errors or +universal_newlines. +""" + def send_signal(self, sig: int) -> None: + """Send a signal to the process. +""" + def terminate(self) -> None: + """Terminate the process with SIGTERM + """ + def kill(self) -> None: + """Kill the process with SIGKILL + """ def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... - def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: ... + def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: + """Return (exitcode, output) of executing cmd in a shell. + +Execute the string 'cmd' in a shell with 'check_output' and +return a 2-tuple (status, output). The locale encoding is used +to decode the output and process newlines. + +A trailing newline is stripped from the output. +The exit status for the command can be interpreted +according to the rules for the function 'wait'. Example: + +>>> import subprocess +>>> subprocess.getstatusoutput('ls /bin/ls') +(0, '/bin/ls') +>>> subprocess.getstatusoutput('cat /bin/junk') +(1, 'cat: /bin/junk: No such file or directory') +>>> subprocess.getstatusoutput('/bin/junk') +(127, 'sh: /bin/junk: not found') +>>> subprocess.getstatusoutput('/bin/kill $$') +(-15, '') +""" + def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: + """Return output (stdout or stderr) of executing cmd in a shell. + +Like getstatusoutput(), except the exit status is ignored and the return +value is a string containing the command's output. Example: + +>>> import subprocess +>>> subprocess.getoutput('ls /bin/ls') +'/bin/ls' +""" else: - def getstatusoutput(cmd: _CMD) -> tuple[int, str]: ... - def getoutput(cmd: _CMD) -> str: ... + def getstatusoutput(cmd: _CMD) -> tuple[int, str]: + """Return (exitcode, output) of executing cmd in a shell. + + Execute the string 'cmd' in a shell with 'check_output' and + return a 2-tuple (status, output). The locale encoding is used + to decode the output and process newlines. + + A trailing newline is stripped from the output. + The exit status for the command can be interpreted + according to the rules for the function 'wait'. Example: + + >>> import subprocess + >>> subprocess.getstatusoutput('ls /bin/ls') + (0, '/bin/ls') + >>> subprocess.getstatusoutput('cat /bin/junk') + (1, 'cat: /bin/junk: No such file or directory') + >>> subprocess.getstatusoutput('/bin/junk') + (127, 'sh: /bin/junk: not found') + >>> subprocess.getstatusoutput('/bin/kill $$') + (-15, '') + """ + def getoutput(cmd: _CMD) -> str: + """Return output (stdout or stderr) of executing cmd in a shell. + + Like getstatusoutput(), except the exit status is ignored and the return + value is a string containing the command's output. Example: + + >>> import subprocess + >>> subprocess.getoutput('ls /bin/ls') + '/bin/ls' + """ + +def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: # undocumented + """ +Translate a sequence of arguments into a command line +string, using the same rules as the MS C runtime: + +1) Arguments are delimited by white space, which is either a + space or a tab. + +2) A string surrounded by double quotation marks is + interpreted as a single argument, regardless of white space + contained within. A quoted string can be embedded in an + argument. + +3) A double quotation mark preceded by a backslash is + interpreted as a literal double quotation mark. + +4) Backslashes are interpreted literally, unless they + immediately precede a double quotation mark. -def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented +5) If backslashes immediately precede a double quotation mark, + every pair of backslashes is interpreted as a literal + backslash. If the number of backslashes is odd, the last + backslash escapes the next double quotation mark as + described in rule 3. +""" if sys.platform == "win32": if sys.version_info >= (3, 13): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi index f83a0a4c520e7..7e4e7ff2186ac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi @@ -1,3 +1,107 @@ +"""Stuff to parse Sun and NeXT audio files. + +An audio file consists of a header followed by the data. The structure +of the header is as follows. + + +---------------+ + | magic word | + +---------------+ + | header size | + +---------------+ + | data size | + +---------------+ + | encoding | + +---------------+ + | sample rate | + +---------------+ + | # of channels | + +---------------+ + | info | + | | + +---------------+ + +The magic word consists of the 4 characters '.snd'. Apart from the +info field, all header fields are 4 bytes in size. They are all +32-bit unsigned integers encoded in big-endian byte order. + +The header size really gives the start of the data. +The data size is the physical size of the data. From the other +parameters the number of frames can be calculated. +The encoding gives the way in which audio samples are encoded. +Possible values are listed below. +The info field currently consists of an ASCII string giving a +human-readable description of the audio file. The info field is +padded with NUL bytes to the header size. + +Usage. + +Reading audio files: + f = sunau.open(file, 'r') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods read(), seek(), and close(). +When the setpos() and rewind() methods are not used, the seek() +method is not necessary. + +This returns an instance of a class with the following public methods: + getnchannels() -- returns number of audio channels (1 for + mono, 2 for stereo) + getsampwidth() -- returns sample width in bytes + getframerate() -- returns sampling frequency + getnframes() -- returns number of audio frames + getcomptype() -- returns compression type ('NONE' or 'ULAW') + getcompname() -- returns human-readable version of + compression type ('not compressed' matches 'NONE') + getparams() -- returns a namedtuple consisting of all of the + above in the above order + getmarkers() -- returns None (for compatibility with the + aifc module) + getmark(id) -- raises an error since the mark does not + exist (for compatibility with the aifc module) + readframes(n) -- returns at most n frames of audio + rewind() -- rewind to the beginning of the audio stream + setpos(pos) -- seek to the specified position + tell() -- return the current position + close() -- close the instance (make it unusable) +The position returned by tell() and the position given to setpos() +are compatible and have nothing to do with the actual position in the +file. +The close() method is called automatically when the class instance +is destroyed. + +Writing audio files: + f = sunau.open(file, 'w') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods write(), tell(), seek(), and +close(). + +This returns an instance of a class with the following public methods: + setnchannels(n) -- set the number of channels + setsampwidth(n) -- set the sample width + setframerate(n) -- set the frame rate + setnframes(n) -- set the number of frames + setcomptype(type, name) + -- set the compression type and the + human-readable compression type + setparams(tuple)-- set all parameters at once + tell() -- return current position in output file + writeframesraw(data) + -- write audio frames without pathing up the + file header + writeframes(data) + -- write audio frames and patch up the file header + close() -- patch up the file header and close the + output file +You should set the parameters before the first writeframesraw or +writeframes. The total number of frames does not need to be set, +but when it is set to the correct value, the header does not have to +be patched up. +It is best to first set all parameters, perhaps possibly the +compression type, and then write audio frames using writeframesraw. +When all frames have been written, either call writeframes(b'') or +close() to patch up the sizes in the header. +The close() method is called automatically when the class instance +is destroyed. +""" from _typeshed import Unused from typing import IO, Any, Final, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias @@ -22,6 +126,8 @@ AUDIO_FILE_ENCODING_ALAW_8: Final = 27 AUDIO_UNKNOWN_SIZE: Final = 0xFFFFFFFF class _sunau_params(NamedTuple): + """_sunau_params(nchannels, sampwidth, framerate, nframes, comptype, compname) +""" nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi index 5344ce504c6c7..48ee2795602c2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi @@ -1,3 +1,5 @@ +"""Non-terminal symbols of Python grammar (from "graminit.h"). +""" from typing import Final single_input: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi index a727b878688ed..0a28503b3f84e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi @@ -1,3 +1,5 @@ +"""Interface to the compiler's internal symbol tables +""" import sys from _collections_abc import dict_keys from collections.abc import Sequence @@ -9,7 +11,12 @@ __all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] if sys.version_info >= (3, 13): __all__ += ["SymbolTableType"] -def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ... +def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: + """Return the toplevel *SymbolTable* for the source code. + +*filename* is the name of the file with the code +and *compile_type* is the *compile()* mode argument. +""" if sys.version_info >= (3, 13): from enum import StrEnum @@ -26,63 +33,169 @@ if sys.version_info >= (3, 13): class SymbolTable: def __init__(self, raw_table: Any, filename: str) -> None: ... if sys.version_info >= (3, 13): - def get_type(self) -> SymbolTableType: ... + def get_type(self) -> SymbolTableType: + """Return the type of the symbol table. + +The value returned is one of the values in +the ``SymbolTableType`` enumeration. +""" else: - def get_type(self) -> str: ... - - def get_id(self) -> int: ... - def get_name(self) -> str: ... - def get_lineno(self) -> int: ... - def is_optimized(self) -> bool: ... - def is_nested(self) -> bool: ... - def has_children(self) -> bool: ... - def get_identifiers(self) -> dict_keys[str, int]: ... - def lookup(self, name: str) -> Symbol: ... - def get_symbols(self) -> list[Symbol]: ... - def get_children(self) -> list[SymbolTable]: ... + def get_type(self) -> str: + """Return the type of the symbol table. + + The values returned are 'class', 'module', 'function', + 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. + """ + + def get_id(self) -> int: + """Return an identifier for the table. + """ + def get_name(self) -> str: + """Return the table's name. + +This corresponds to the name of the class, function +or 'top' if the table is for a class, function or +global respectively. +""" + def get_lineno(self) -> int: + """Return the number of the first line in the +block for the table. +""" + def is_optimized(self) -> bool: + """Return *True* if the locals in the table +are optimizable. +""" + def is_nested(self) -> bool: + """Return *True* if the block is a nested class +or function. +""" + def has_children(self) -> bool: + """Return *True* if the block has nested namespaces. + """ + def get_identifiers(self) -> dict_keys[str, int]: + """Return a view object containing the names of symbols in the table. + """ + def lookup(self, name: str) -> Symbol: + """Lookup a *name* in the table. + +Returns a *Symbol* instance. +""" + def get_symbols(self) -> list[Symbol]: + """Return a list of *Symbol* instances for +names in the table. +""" + def get_children(self) -> list[SymbolTable]: + """Return a list of the nested symbol tables. + """ class Function(SymbolTable): - def get_parameters(self) -> tuple[str, ...]: ... - def get_locals(self) -> tuple[str, ...]: ... - def get_globals(self) -> tuple[str, ...]: ... - def get_frees(self) -> tuple[str, ...]: ... - def get_nonlocals(self) -> tuple[str, ...]: ... + def get_parameters(self) -> tuple[str, ...]: + """Return a tuple of parameters to the function. + """ + def get_locals(self) -> tuple[str, ...]: + """Return a tuple of locals in the function. + """ + def get_globals(self) -> tuple[str, ...]: + """Return a tuple of globals in the function. + """ + def get_frees(self) -> tuple[str, ...]: + """Return a tuple of free variables in the function. + """ + def get_nonlocals(self) -> tuple[str, ...]: + """Return a tuple of nonlocals in the function. + """ class Class(SymbolTable): if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") - def get_methods(self) -> tuple[str, ...]: ... + def get_methods(self) -> tuple[str, ...]: + """Return a tuple of methods declared in the class. + """ else: - def get_methods(self) -> tuple[str, ...]: ... + def get_methods(self) -> tuple[str, ...]: + """Return a tuple of methods declared in the class. + """ class Symbol: def __init__( self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False ) -> None: ... - def is_nonlocal(self) -> bool: ... - def get_name(self) -> str: ... - def is_referenced(self) -> bool: ... - def is_parameter(self) -> bool: ... + def is_nonlocal(self) -> bool: + """Return *True* if the symbol is nonlocal. +""" + def get_name(self) -> str: + """Return a name of a symbol. + """ + def is_referenced(self) -> bool: + """Return *True* if the symbol is used in +its block. +""" + def is_parameter(self) -> bool: + """Return *True* if the symbol is a parameter. + """ if sys.version_info >= (3, 14): - def is_type_parameter(self) -> bool: ... + def is_type_parameter(self) -> bool: + """Return *True* if the symbol is a type parameter. + """ - def is_global(self) -> bool: ... - def is_declared_global(self) -> bool: ... - def is_local(self) -> bool: ... - def is_annotated(self) -> bool: ... - def is_free(self) -> bool: ... + def is_global(self) -> bool: + """Return *True* if the symbol is global. + """ + def is_declared_global(self) -> bool: + """Return *True* if the symbol is declared global +with a global statement. +""" + def is_local(self) -> bool: + """Return *True* if the symbol is local. + """ + def is_annotated(self) -> bool: + """Return *True* if the symbol is annotated. + """ + def is_free(self) -> bool: + """Return *True* if a referenced symbol is +not assigned to. +""" if sys.version_info >= (3, 14): - def is_free_class(self) -> bool: ... + def is_free_class(self) -> bool: + """Return *True* if a class-scoped symbol is free from +the perspective of a method. +""" - def is_imported(self) -> bool: ... - def is_assigned(self) -> bool: ... + def is_imported(self) -> bool: + """Return *True* if the symbol is created from +an import statement. +""" + def is_assigned(self) -> bool: + """Return *True* if a symbol is assigned to. +""" if sys.version_info >= (3, 14): - def is_comp_iter(self) -> bool: ... - def is_comp_cell(self) -> bool: ... + def is_comp_iter(self) -> bool: + """Return *True* if the symbol is a comprehension iteration variable. + """ + def is_comp_cell(self) -> bool: + """Return *True* if the symbol is a cell in an inlined comprehension. + """ + + def is_namespace(self) -> bool: + """Returns *True* if name binding introduces new namespace. + +If the name is used as the target of a function or class +statement, this will be true. + +Note that a single name can be bound to multiple objects. If +is_namespace() is true, the name may also be bound to other +objects, like an int or list, that does not introduce a new +namespace. +""" + def get_namespaces(self) -> Sequence[SymbolTable]: + """Return a list of namespaces bound to this name +""" + def get_namespace(self) -> SymbolTable: + """Return the single namespace bound to this name. - def is_namespace(self) -> bool: ... - def get_namespaces(self) -> Sequence[SymbolTable]: ... - def get_namespace(self) -> SymbolTable: ... +Raises ValueError if the name is bound to multiple namespaces +or no namespace. +""" class SymbolTableFactory: def new(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index 7807b0eab01f6..cf33181842696 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -1,3 +1,74 @@ +"""This module provides access to some objects used or maintained by the +interpreter and to functions that interact strongly with the interpreter. + +Dynamic objects: + +argv -- command line arguments; argv[0] is the script pathname if known +path -- module search path; path[0] is the script directory, else '' +modules -- dictionary of loaded modules + +displayhook -- called to show results in an interactive session +excepthook -- called to handle any uncaught exception other than SystemExit + To customize printing in an interactive session or to install a custom + top-level exception handler, assign other functions to replace these. + +stdin -- standard input file object; used by input() +stdout -- standard output file object; used by print() +stderr -- standard error object; used for error messages + By assigning other file objects (or objects that behave like files) + to these, it is possible to redirect all of the interpreter's I/O. + +last_exc - the last uncaught exception + Only available in an interactive session after a + traceback has been printed. +last_type -- type of last uncaught exception +last_value -- value of last uncaught exception +last_traceback -- traceback of last uncaught exception + These three are the (deprecated) legacy representation of last_exc. + +Static objects: + +builtin_module_names -- tuple of module names built into this interpreter +copyright -- copyright notice pertaining to this interpreter +exec_prefix -- prefix used to find the machine-specific Python library +executable -- absolute path of the executable binary of the Python interpreter +float_info -- a named tuple with information about the float implementation. +float_repr_style -- string indicating the style of repr() output for floats +hash_info -- a named tuple with information about the hash algorithm. +hexversion -- version information encoded as a single integer +implementation -- Python implementation information. +int_info -- a named tuple with information about the int implementation. +maxsize -- the largest supported length of containers. +maxunicode -- the value of the largest Unicode code point +platform -- platform identifier +prefix -- prefix used to find the Python library +thread_info -- a named tuple with information about the thread implementation. +version -- the version of this interpreter as a string +version_info -- version information as a named tuple +__stdin__ -- the original stdin; don't touch! +__stdout__ -- the original stdout; don't touch! +__stderr__ -- the original stderr; don't touch! +__displayhook__ -- the original displayhook; don't touch! +__excepthook__ -- the original excepthook; don't touch! + +Functions: + +displayhook() -- print an object to the screen, and save it in builtins._ +excepthook() -- print an exception and its traceback to sys.stderr +exception() -- return the current thread's active exception +exc_info() -- return information about the current thread's active exception +exit() -- exit the interpreter by raising SystemExit +getdlopenflags() -- returns flags to be used for dlopen() calls +getprofile() -- get the global profiling function +getrefcount() -- return the reference count for an object (plus one :-) +getrecursionlimit() -- return the max recursion depth for the interpreter +getsizeof() -- return the size of an object in bytes +gettrace() -- get the global debug tracing function +setdlopenflags() -- set the flags to be used for dlopen() calls +setprofile() -- set the global profiling function +setrecursionlimit() -- set the max recursion depth for the interpreter +settrace() -- set the global debug tracing function +""" import sys from _typeshed import MaybeNone, OptExcInfo, ProfileFunction, StrOrBytesPath, TraceFunction, structseq from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol @@ -102,6 +173,10 @@ flags: _flags @final @type_check_only class _flags(_UninstantiableStructseq, tuple[int, ...]): + """sys.flags + +Flags provided through command line arguments or environment vars. +""" # `safe_path` was added in py311 if sys.version_info >= (3, 11): __match_args__: Final = ( @@ -146,56 +221,98 @@ class _flags(_UninstantiableStructseq, tuple[int, ...]): ) @property - def debug(self) -> int: ... + def debug(self) -> int: + """-d +""" @property - def inspect(self) -> int: ... + def inspect(self) -> int: + """-i +""" @property - def interactive(self) -> int: ... + def interactive(self) -> int: + """-i +""" @property - def optimize(self) -> int: ... + def optimize(self) -> int: + """-O or -OO +""" @property - def dont_write_bytecode(self) -> int: ... + def dont_write_bytecode(self) -> int: + """-B +""" @property - def no_user_site(self) -> int: ... + def no_user_site(self) -> int: + """-s +""" @property - def no_site(self) -> int: ... + def no_site(self) -> int: + """-S +""" @property - def ignore_environment(self) -> int: ... + def ignore_environment(self) -> int: + """-E +""" @property - def verbose(self) -> int: ... + def verbose(self) -> int: + """-v +""" @property - def bytes_warning(self) -> int: ... + def bytes_warning(self) -> int: + """-b +""" @property - def quiet(self) -> int: ... + def quiet(self) -> int: + """-q +""" @property - def hash_randomization(self) -> int: ... + def hash_randomization(self) -> int: + """-R +""" @property - def isolated(self) -> int: ... + def isolated(self) -> int: + """-I +""" @property - def dev_mode(self) -> bool: ... + def dev_mode(self) -> bool: + """-X dev +""" @property - def utf8_mode(self) -> int: ... + def utf8_mode(self) -> int: + """-X utf8 +""" if sys.version_info >= (3, 10): @property - def warn_default_encoding(self) -> int: ... + def warn_default_encoding(self) -> int: + """-X warn_default_encoding +""" if sys.version_info >= (3, 11): @property - def safe_path(self) -> bool: ... + def safe_path(self) -> bool: + """-P +""" if sys.version_info >= (3, 13): @property - def gil(self) -> Literal[0, 1]: ... + def gil(self) -> Literal[0, 1]: + """-X gil +""" if sys.version_info >= (3, 14): @property - def thread_inherit_context(self) -> Literal[0, 1]: ... + def thread_inherit_context(self) -> Literal[0, 1]: + """-X thread_inherit_context +""" @property - def context_aware_warnings(self) -> Literal[0, 1]: ... + def context_aware_warnings(self) -> Literal[0, 1]: + """-X context_aware_warnings +""" # Whether or not this exists on lower versions of Python # may depend on which patch release you're using # (it was backported to all Python versions on 3.8+ as a security fix) # Added in: 3.9.14, 3.10.7 # and present in all versions of 3.11 and later. @property - def int_max_str_digits(self) -> int: ... + def int_max_str_digits(self) -> int: + """-X int_max_str_digits +""" float_info: _float_info @@ -203,6 +320,12 @@ float_info: _float_info @final @type_check_only class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): + """sys.float_info + +A named tuple holding information about the float type. It contains low level +information about the precision and internal representation. Please study +your system's :file:`float.h` for more information. +""" if sys.version_info >= (3, 10): __match_args__: Final = ( "max", @@ -219,27 +342,49 @@ class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, ) @property - def max(self) -> float: ... # DBL_MAX + def max(self) -> float: # DBL_MAX + """DBL_MAX -- maximum representable finite float +""" @property - def max_exp(self) -> int: ... # DBL_MAX_EXP + def max_exp(self) -> int: # DBL_MAX_EXP + """DBL_MAX_EXP -- maximum int e such that radix**(e-1) is representable +""" @property - def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP + def max_10_exp(self) -> int: # DBL_MAX_10_EXP + """DBL_MAX_10_EXP -- maximum int e such that 10**e is representable +""" @property - def min(self) -> float: ... # DBL_MIN + def min(self) -> float: # DBL_MIN + """DBL_MIN -- Minimum positive normalized float +""" @property - def min_exp(self) -> int: ... # DBL_MIN_EXP + def min_exp(self) -> int: # DBL_MIN_EXP + """DBL_MIN_EXP -- minimum int e such that radix**(e-1) is a normalized float +""" @property - def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP + def min_10_exp(self) -> int: # DBL_MIN_10_EXP + """DBL_MIN_10_EXP -- minimum int e such that 10**e is a normalized float +""" @property - def dig(self) -> int: ... # DBL_DIG + def dig(self) -> int: # DBL_DIG + """DBL_DIG -- maximum number of decimal digits that can be faithfully represented in a float +""" @property - def mant_dig(self) -> int: ... # DBL_MANT_DIG + def mant_dig(self) -> int: # DBL_MANT_DIG + """DBL_MANT_DIG -- mantissa digits +""" @property - def epsilon(self) -> float: ... # DBL_EPSILON + def epsilon(self) -> float: # DBL_EPSILON + """DBL_EPSILON -- Difference between 1 and the next representable float +""" @property - def radix(self) -> int: ... # FLT_RADIX + def radix(self) -> int: # FLT_RADIX + """FLT_RADIX -- radix of exponent +""" @property - def rounds(self) -> int: ... # FLT_ROUNDS + def rounds(self) -> int: # FLT_ROUNDS + """FLT_ROUNDS -- rounding mode used for arithmetic operations +""" hash_info: _hash_info @@ -247,27 +392,50 @@ hash_info: _hash_info @final @type_check_only class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): + """hash_info + +A named tuple providing parameters used for computing +hashes. The attributes are read only. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("width", "modulus", "inf", "nan", "imag", "algorithm", "hash_bits", "seed_bits", "cutoff") @property - def width(self) -> int: ... + def width(self) -> int: + """width of the type used for hashing, in bits +""" @property - def modulus(self) -> int: ... + def modulus(self) -> int: + """prime number giving the modulus on which the hash function is based +""" @property - def inf(self) -> int: ... + def inf(self) -> int: + """value to be used for hash of a positive infinity +""" @property - def nan(self) -> int: ... + def nan(self) -> int: + """value to be used for hash of a nan +""" @property - def imag(self) -> int: ... + def imag(self) -> int: + """multiplier used for the imaginary part of a complex number +""" @property - def algorithm(self) -> str: ... + def algorithm(self) -> str: + """name of the algorithm for hashing of str, bytes and memoryviews +""" @property - def hash_bits(self) -> int: ... + def hash_bits(self) -> int: + """internal output size of hash algorithm +""" @property - def seed_bits(self) -> int: ... + def seed_bits(self) -> int: + """seed size of hash algorithm +""" @property - def cutoff(self) -> int: ... # undocumented + def cutoff(self) -> int: # undocumented + """small string optimization cutoff +""" implementation: _implementation @@ -290,17 +458,30 @@ int_info: _int_info @final @type_check_only class _int_info(structseq[int], tuple[int, int, int, int]): + """sys.int_info + +A named tuple that holds information about Python's +internal representation of integers. The attributes are read only. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("bits_per_digit", "sizeof_digit", "default_max_str_digits", "str_digits_check_threshold") @property - def bits_per_digit(self) -> int: ... + def bits_per_digit(self) -> int: + """size of a digit in bits +""" @property - def sizeof_digit(self) -> int: ... + def sizeof_digit(self) -> int: + """size in bytes of the C type used to represent a digit +""" @property - def default_max_str_digits(self) -> int: ... + def default_max_str_digits(self) -> int: + """maximum string conversion digits limitation +""" @property - def str_digits_check_threshold(self) -> int: ... + def str_digits_check_threshold(self) -> int: + """minimum positive value for int_max_str_digits +""" _ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"] _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None @@ -309,15 +490,25 @@ _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None @final @type_check_only class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]): + """sys.thread_info + +A named tuple holding information about the thread implementation. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("name", "lock", "version") @property - def name(self) -> _ThreadInfoName: ... + def name(self) -> _ThreadInfoName: + """name of the thread implementation +""" @property - def lock(self) -> _ThreadInfoLock: ... + def lock(self) -> _ThreadInfoLock: + """name of the lock implementation +""" @property - def version(self) -> str | None: ... + def version(self) -> str | None: + """name and version of the thread library +""" thread_info: _thread_info _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @@ -326,62 +517,183 @@ _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @final @type_check_only class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]): + """sys.version_info + +Version information as a named tuple. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("major", "minor", "micro", "releaselevel", "serial") @property - def major(self) -> int: ... + def major(self) -> int: + """Major release number +""" @property - def minor(self) -> int: ... + def minor(self) -> int: + """Minor release number +""" @property - def micro(self) -> int: ... + def micro(self) -> int: + """Patch release number +""" @property - def releaselevel(self) -> _ReleaseLevel: ... + def releaselevel(self) -> _ReleaseLevel: + """'alpha', 'beta', 'candidate', or 'final' +""" @property - def serial(self) -> int: ... + def serial(self) -> int: + """Serial release number +""" version_info: _version_info -def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: ... +def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: + """Call func(*args), while tracing is enabled. + +The tracing state is saved, and restored afterwards. This is intended +to be called from a debugger from a checkpoint, to recursively debug +some other code. +""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13. Use `_clear_internal_caches()` instead.") - def _clear_type_cache() -> None: ... + def _clear_type_cache() -> None: + """Clear the internal type lookup cache. +""" else: - def _clear_type_cache() -> None: ... + def _clear_type_cache() -> None: + """Clear the internal type lookup cache. +""" -def _current_frames() -> dict[int, FrameType]: ... -def _getframe(depth: int = 0, /) -> FrameType: ... +def _current_frames() -> dict[int, FrameType]: + """Return a dict mapping each thread's thread id to its current stack frame. -if sys.version_info >= (3, 12): - def _getframemodulename(depth: int = 0) -> str | None: ... +This function should be used for specialized purposes only. +""" +def _getframe(depth: int = 0, /) -> FrameType: + """Return a frame object from the call stack. -def _debugmallocstats() -> None: ... -def __displayhook__(object: object, /) -> None: ... -def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: ... -def exc_info() -> OptExcInfo: ... +If optional integer depth is given, return the frame object that many +calls below the top of the stack. If that is deeper than the call +stack, ValueError is raised. The default for depth is zero, returning +the frame at the top of the call stack. -if sys.version_info >= (3, 11): - def exception() -> BaseException | None: ... +This function should be used for internal and specialized purposes +only. +""" + +if sys.version_info >= (3, 12): + def _getframemodulename(depth: int = 0) -> str | None: + """Return the name of the module for a calling frame. + +The default depth returns the module containing the call to this API. +A more typical use in a library will pass a depth of 1 to get the user's +module rather than the library module. + +If no frame, module, or name can be found, returns None. +""" + +def _debugmallocstats() -> None: + """Print summary info to stderr about the state of pymalloc's structures. + +In Py_DEBUG mode, also perform some expensive internal consistency +checks. +""" +def __displayhook__(object: object, /) -> None: + """Print an object to sys.stdout and also save it in builtins._ +""" +def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: + """Handle an exception by displaying it with a traceback on sys.stderr. +""" +def exc_info() -> OptExcInfo: + """Return current exception information: (type, value, traceback). + +Return information about the most recent exception caught by an except +clause in the current stack frame or in an older stack frame. +""" -def exit(status: _ExitCode = None, /) -> NoReturn: ... -def getallocatedblocks() -> int: ... -def getdefaultencoding() -> str: ... +if sys.version_info >= (3, 11): + def exception() -> BaseException | None: + """Return the current exception. + +Return the most recent exception caught by an except clause +in the current stack frame or in an older stack frame, or None +if no such exception exists. +""" + +def exit(status: _ExitCode = None, /) -> NoReturn: + """Exit the interpreter by raising SystemExit(status). + +If the status is omitted or None, it defaults to zero (i.e., success). +If the status is an integer, it will be used as the system exit status. +If it is another kind of object, it will be printed and the system +exit status will be one (i.e., failure). +""" +def getallocatedblocks() -> int: + """Return the number of memory blocks currently allocated. +""" +def getdefaultencoding() -> str: + """Return the current default encoding used by the Unicode implementation. +""" if sys.platform != "win32": - def getdlopenflags() -> int: ... - -def getfilesystemencoding() -> str: ... -def getfilesystemencodeerrors() -> str: ... -def getrefcount(object: Any, /) -> int: ... -def getrecursionlimit() -> int: ... -def getsizeof(obj: object, default: int = ...) -> int: ... -def getswitchinterval() -> float: ... -def getprofile() -> ProfileFunction | None: ... -def setprofile(function: ProfileFunction | None, /) -> None: ... -def gettrace() -> TraceFunction | None: ... -def settrace(function: TraceFunction | None, /) -> None: ... + def getdlopenflags() -> int: + """Return the current value of the flags that are used for dlopen calls. + +The flag constants are defined in the os module. +""" + +def getfilesystemencoding() -> str: + """Return the encoding used to convert Unicode filenames to OS filenames. +""" +def getfilesystemencodeerrors() -> str: + """Return the error mode used Unicode to OS filename conversion. +""" +def getrefcount(object: Any, /) -> int: + """Return the reference count of object. + +The count returned is generally one higher than you might expect, +because it includes the (temporary) reference as an argument to +getrefcount(). +""" +def getrecursionlimit() -> int: + """Return the current value of the recursion limit. + +The recursion limit is the maximum depth of the Python interpreter +stack. This limit prevents infinite recursion from causing an overflow +of the C stack and crashing Python. +""" +def getsizeof(obj: object, default: int = ...) -> int: + """getsizeof(object [, default]) -> int + +Return the size of object in bytes. +""" +def getswitchinterval() -> float: + """Return the current thread switch interval; see sys.setswitchinterval(). +""" +def getprofile() -> ProfileFunction | None: + """Return the profiling function set with sys.setprofile. + +See the profiler chapter in the library manual. +""" +def setprofile(function: ProfileFunction | None, /) -> None: + """Set the profiling function. + +It will be called on each function call and return. See the profiler +chapter in the library manual. +""" +def gettrace() -> TraceFunction | None: + """Return the global debug tracing function set with sys.settrace. + +See the debugger chapter in the library manual. +""" +def settrace(function: TraceFunction | None, /) -> None: + """Set the global debug tracing function. + +It will be called on each function call. See the debugger chapter +in the library manual. +""" if sys.platform == "win32": # A tuple of length 5, even though it has more than 5 attributes. @@ -411,23 +723,64 @@ if sys.platform == "win32": def getwindowsversion() -> _WinVersion: ... -def intern(string: str, /) -> str: ... +def intern(string: str, /) -> str: + """``Intern'' the given string. -if sys.version_info >= (3, 13): - def _is_gil_enabled() -> bool: ... - def _clear_internal_caches() -> None: ... - def _is_interned(string: str, /) -> bool: ... +This enters the string in the (global) table of interned strings whose +purpose is to speed up dictionary lookups. Return the string itself or +the previously interned string object with the same value. +""" -def is_finalizing() -> bool: ... -def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... +if sys.version_info >= (3, 13): + def _is_gil_enabled() -> bool: + """Return True if the GIL is currently enabled and False otherwise. +""" + def _clear_internal_caches() -> None: + """Clear all internal performance-related caches. +""" + def _is_interned(string: str, /) -> bool: + """Return True if the given string is "interned". +""" + +def is_finalizing() -> bool: + """Return True if Python is exiting. +""" +def breakpointhook(*args: Any, **kwargs: Any) -> Any: + """This hook function is called by built-in breakpoint(). +""" __breakpointhook__ = breakpointhook # Contains the original value of breakpointhook if sys.platform != "win32": - def setdlopenflags(flags: int, /) -> None: ... - -def setrecursionlimit(limit: int, /) -> None: ... -def setswitchinterval(interval: float, /) -> None: ... + def setdlopenflags(flags: int, /) -> None: + """Set the flags used by the interpreter for dlopen calls. + +This is used, for example, when the interpreter loads extension +modules. Among other things, this will enable a lazy resolving of +symbols when importing a module, if called as sys.setdlopenflags(0). +To share symbols across extension modules, call as +sys.setdlopenflags(os.RTLD_GLOBAL). Symbolic names for the flag +modules can be found in the os module (RTLD_xxx constants, e.g. +os.RTLD_LAZY). +""" + +def setrecursionlimit(limit: int, /) -> None: + """Set the maximum depth of the Python interpreter stack to n. + +This limit prevents infinite recursion from causing an overflow of the C +stack and crashing Python. The highest possible limit is platform- +dependent. +""" +def setswitchinterval(interval: float, /) -> None: + """Set the ideal thread switching delay inside the Python interpreter. + +The actual frequency of switching threads can be lower if the +interpreter executes long sequences of uninterruptible code +(this is implementation-specific and workload-dependent). + +The parameter must represent the desired switching delay in seconds +A typical value is 0.005 (5 milliseconds). +""" def gettotalrefcount() -> int: ... # Debug builds only # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. @@ -441,9 +794,23 @@ class UnraisableHookArgs(Protocol): unraisablehook: Callable[[UnraisableHookArgs], Any] -def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: ... -def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... -def audit(event: str, /, *args: Any) -> None: ... +def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: + """Handle an unraisable exception. + +The unraisable argument has the following attributes: + +* exc_type: Exception type. +* exc_value: Exception value, can be None. +* exc_traceback: Exception traceback, can be None. +* err_msg: Error message, can be None. +* object: Object causing the exception, can be None. +""" +def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: + """Adds a new audit hook callback. +""" +def audit(event: str, /, *args: Any) -> None: + """Passes the event to any audit hooks that are attached. +""" _AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None @@ -459,8 +826,16 @@ class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHo @property def finalizer(self) -> _AsyncgenHook: ... -def get_asyncgen_hooks() -> _asyncgen_hooks: ... -def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... +def get_asyncgen_hooks() -> _asyncgen_hooks: + """Return the installed asynchronous generators hooks. + +This returns a namedtuple of the form (firstiter, finalizer). +""" +def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: + """set_asyncgen_hooks([firstiter] [, finalizer]) + +Set a finalizer for async generators objects. +""" if sys.platform == "win32": if sys.version_info >= (3, 13): @@ -472,25 +847,50 @@ if sys.platform == "win32": else: def _enablelegacywindowsfsencoding() -> None: ... -def get_coroutine_origin_tracking_depth() -> int: ... -def set_coroutine_origin_tracking_depth(depth: int) -> None: ... +def get_coroutine_origin_tracking_depth() -> int: + """Check status of origin tracking for coroutine objects in this thread. +""" +def set_coroutine_origin_tracking_depth(depth: int) -> None: + """Enable or disable origin tracking for coroutine objects in this thread. + +Coroutine objects will track 'depth' frames of traceback information +about where they came from, available in their cr_origin attribute. + +Set a depth of 0 to disable. +""" # The following two functions were added in 3.11.0, 3.10.7, and 3.9.14, # as part of the response to CVE-2020-10735 -def set_int_max_str_digits(maxdigits: int) -> None: ... -def get_int_max_str_digits() -> int: ... +def set_int_max_str_digits(maxdigits: int) -> None: + """Set the maximum string digits limit for non-binary int<->str conversions. +""" +def get_int_max_str_digits() -> int: + """Return the maximum string digits limit for non-binary int<->str conversions. +""" if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): - def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: ... + def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: + """Return the number of elements of the unicode interned dictionary +""" else: - def getunicodeinternedsize() -> int: ... - - def deactivate_stack_trampoline() -> None: ... - def is_stack_trampoline_active() -> bool: ... + def getunicodeinternedsize() -> int: + """Return the number of elements of the unicode interned dictionary +""" + + def deactivate_stack_trampoline() -> None: + """Deactivate the current stack profiler trampoline backend. + +If no stack profiler is activated, this function has no effect. +""" + def is_stack_trampoline_active() -> bool: + """Return *True* if a stack profiler trampoline is active. +""" # It always exists, but raises on non-linux platforms: if sys.platform == "linux": - def activate_stack_trampoline(backend: str, /) -> None: ... + def activate_stack_trampoline(backend: str, /) -> None: + """Activate stack profiler trampoline *backend*. +""" else: def activate_stack_trampoline(backend: str, /) -> NoReturn: ... @@ -499,5 +899,26 @@ if sys.version_info >= (3, 12): monitoring = _monitoring if sys.version_info >= (3, 14): - def is_remote_debug_enabled() -> bool: ... - def remote_exec(pid: int, script: StrOrBytesPath) -> None: ... + def is_remote_debug_enabled() -> bool: + """Return True if remote debugging is enabled, False otherwise. +""" + def remote_exec(pid: int, script: StrOrBytesPath) -> None: + """Executes a file containing Python code in a given remote Python process. + +This function returns immediately, and the code will be executed by the +target process's main thread at the next available opportunity, similarly +to how signals are handled. There is no interface to determine when the +code has been executed. The caller is responsible for making sure that +the file still exists whenever the remote process tries to read it and that +it hasn't been overwritten. + +The remote process must be running a CPython interpreter of the same major +and minor version as the local process. If either the local or remote +interpreter is pre-release (alpha, beta, or release candidate) then the +local and remote interpreters must be the same exact version. + +Args: + pid (int): The process ID of the target Python process. + script (str|bytes): The path to a file containing + the Python code to be executed. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi index 807a979050e80..31301e0a56388 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi @@ -1,3 +1,5 @@ +"""Access to Python's configuration information. +""" import sys from typing import IO, Any, Literal, overload from typing_extensions import deprecated @@ -18,24 +20,77 @@ __all__ = [ @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") -def get_config_var(name: Literal["SO"]) -> Any: ... +def get_config_var(name: Literal["SO"]) -> Any: + """Return the value of a single variable using the dictionary returned by +'get_config_vars()'. + +Equivalent to get_config_vars().get(name) +""" @overload def get_config_var(name: str) -> Any: ... @overload -def get_config_vars() -> dict[str, Any]: ... +def get_config_vars() -> dict[str, Any]: + """With no arguments, return a dictionary of all configuration +variables relevant for the current platform. + +On Unix, this means every variable defined in Python's installed Makefile; +On Windows it's a much smaller set. + +With arguments, return a list of values that result from looking up +each argument in the configuration variable dictionary. +""" @overload def get_config_vars(arg: str, /, *args: str) -> list[Any]: ... -def get_scheme_names() -> tuple[str, ...]: ... +def get_scheme_names() -> tuple[str, ...]: + """Return a tuple containing the schemes names. +""" if sys.version_info >= (3, 10): def get_default_scheme() -> str: ... def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... -def get_path_names() -> tuple[str, ...]: ... -def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... -def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... +def get_path_names() -> tuple[str, ...]: + """Return a tuple containing the paths names. +""" +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: + """Return a path corresponding to the scheme. + +``scheme`` is the install scheme name. +""" +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: + """Return a mapping containing an install scheme. + +``scheme`` is the install scheme name. If not provided, it will +return the default scheme for the current platform. +""" def get_python_version() -> str: ... -def get_platform() -> str: ... +def get_platform() -> str: + """Return a string that identifies the current platform. + +This is used mainly to distinguish platform-specific build directories and +platform-specific built distributions. Typically includes the OS name and +version and the architecture (as supplied by 'os.uname()'), although the +exact information included depends on the OS; on Linux, the kernel version +isn't particularly important. + +Examples of returned values: + + +Windows: + +- win-amd64 (64-bit Windows on AMD64, aka x86_64, Intel64, and EM64T) +- win-arm64 (64-bit Windows on ARM64, aka AArch64) +- win32 (all others - specifically, sys.platform is returned) + +POSIX based OS: + +- linux-x86_64 +- macosx-15.5-arm64 +- macosx-26.0-universal2 (macOS on Apple Silicon or Intel) +- android-24-arm64_v8a + +For other non-POSIX platforms, currently just returns :data:`sys.platform`. +""" if sys.version_info >= (3, 11): def is_python_build(check_home: object = None) -> bool: ... @@ -43,6 +98,16 @@ if sys.version_info >= (3, 11): else: def is_python_build(check_home: bool = False) -> bool: ... -def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... -def get_config_h_filename() -> str: ... -def get_makefile_filename() -> str: ... +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: + """Parse a config.h-style file. + +A dictionary containing name/value pairs is returned. If an +optional dictionary is passed in as the second argument, it is +used instead of a new dictionary. +""" +def get_config_h_filename() -> str: + """Return the path of pyconfig.h. +""" +def get_makefile_filename() -> str: + """Return the path of the Makefile. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi index 1e0d0d3839022..a14d15c2dd50c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi @@ -46,12 +46,25 @@ if sys.platform != "win32": LOG_RAS: Final = 120 LOG_REMOTEAUTH: Final = 104 - def LOG_MASK(pri: int, /) -> int: ... - def LOG_UPTO(pri: int, /) -> int: ... - def closelog() -> None: ... - def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... - def setlogmask(maskpri: int, /) -> int: ... + def LOG_MASK(pri: int, /) -> int: + """Calculates the mask for the individual priority pri. +""" + def LOG_UPTO(pri: int, /) -> int: + """Calculates the mask for all priorities up to and including pri. +""" + def closelog() -> None: + """Reset the syslog module values and call the system library closelog(). +""" + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: + """Set logging options of subsequent syslog() calls. +""" + def setlogmask(maskpri: int, /) -> int: + """Set the priority mask to maskpri and return the previous mask value. +""" @overload - def syslog(priority: int, message: str) -> None: ... + def syslog(priority: int, message: str) -> None: + """syslog([priority=LOG_INFO,] message) +Send the string message to the system logger. +""" @overload def syslog(message: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi index 8a8592f441242..1a62ae6a2db8a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi @@ -1,3 +1,14 @@ +"""The Tab Nanny despises ambiguous indentation. She knows no mercy. + +tabnanny -- Detection of ambiguous indentation + +For the time being this module is intended to be called as a script. +However it is possible to import it into an IDE and use the function +check() described below. + +Warning: The API provided by this module is likely to change in future +releases; such changes may not be backward compatible. +""" from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -7,10 +18,22 @@ verbose: int filename_only: int class NannyNag(Exception): + """ +Raised by process_tokens() if detecting an ambiguous indent. +Captured and handled in check(). +""" def __init__(self, lineno: int, msg: str, line: str) -> None: ... def get_lineno(self) -> int: ... def get_msg(self) -> str: ... def get_line(self) -> str: ... -def check(file: StrOrBytesPath) -> None: ... +def check(file: StrOrBytesPath) -> None: + """check(file_or_dir) + +If file_or_dir is a directory and not a symbolic link, then recursively +descend the directory tree named by file_or_dir, checking all .py files +along the way. If file_or_dir is an ordinary Python source file, it is +checked for whitespace related problems. The diagnostic messages are +written to standard output using the print statement. +""" def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi index f6623ea9929d4..cb2c6f2f9febe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi @@ -1,3 +1,5 @@ +"""Read from and write to tar format archives. +""" import bz2 import io import sys @@ -116,6 +118,8 @@ class ExFileObject(io.BufferedReader): # undocumented def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... class TarFile: + """The TarFile Class provides an interface to tar archives. + """ OPEN_METH: ClassVar[Mapping[str, str]] name: StrOrBytesPath | None mode: Literal["r", "a", "w", "x"] @@ -150,7 +154,15 @@ class TarFile: errorlevel: int | None = None, copybufsize: int | None = None, # undocumented stream: bool = False, - ) -> None: ... + ) -> None: + """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to +read from an existing archive, 'a' to append data to an existing +file or 'w' to create a new file overwriting an existing one. 'mode' +defaults to 'r'. +If 'fileobj' is given, it is used for reading or writing data. If it +can be determined, 'mode' is overridden by 'fileobj's mode. +'fileobj' is not closed, when TarFile is closed. +""" else: def __init__( self, @@ -167,13 +179,23 @@ class TarFile: debug: int | None = None, errorlevel: int | None = None, copybufsize: int | None = None, # undocumented - ) -> None: ... + ) -> None: + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self) -> Iterator[TarInfo]: ... + def __iter__(self) -> Iterator[TarInfo]: + """Provide an iterator object. + """ @overload @classmethod def open( @@ -192,7 +214,47 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open a tar archive for reading, writing or appending. Return +an appropriate TarFile class. + +mode: +'r' or 'r:*' open for reading with transparent compression +'r:' open for reading exclusively uncompressed +'r:gz' open for reading with gzip compression +'r:bz2' open for reading with bzip2 compression +'r:xz' open for reading with lzma compression +'r:zst' open for reading with zstd compression +'a' or 'a:' open for appending, creating the file if necessary +'w' or 'w:' open for writing without compression +'w:gz' open for writing with gzip compression +'w:bz2' open for writing with bzip2 compression +'w:xz' open for writing with lzma compression +'w:zst' open for writing with zstd compression + +'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created +'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created +'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created +'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created +'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + +'r|*' open a stream of tar blocks with transparent compression +'r|' open an uncompressed stream of tar blocks for reading +'r|gz' open a gzip compressed stream of tar blocks +'r|bz2' open a bzip2 compressed stream of tar blocks +'r|xz' open an lzma compressed stream of tar blocks +'r|zst' open a zstd compressed stream of tar blocks +'w|' open an uncompressed stream for writing +'w|gz' open a gzip compressed stream for writing +'w|bz2' open a bzip2 compressed stream for writing +'w|xz' open an lzma compressed stream for writing +'w|zst' open a zstd compressed stream for writing +""" if sys.version_info >= (3, 14): @overload @classmethod @@ -215,7 +277,47 @@ class TarFile: level: None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> Self: ... + ) -> Self: + """Open a tar archive for reading, writing or appending. Return +an appropriate TarFile class. + +mode: +'r' or 'r:*' open for reading with transparent compression +'r:' open for reading exclusively uncompressed +'r:gz' open for reading with gzip compression +'r:bz2' open for reading with bzip2 compression +'r:xz' open for reading with lzma compression +'r:zst' open for reading with zstd compression +'a' or 'a:' open for appending, creating the file if necessary +'w' or 'w:' open for writing without compression +'w:gz' open for writing with gzip compression +'w:bz2' open for writing with bzip2 compression +'w:xz' open for writing with lzma compression +'w:zst' open for writing with zstd compression + +'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created +'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created +'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created +'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created +'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + +'r|*' open a stream of tar blocks with transparent compression +'r|' open an uncompressed stream of tar blocks for reading +'r|gz' open a gzip compressed stream of tar blocks +'r|bz2' open a bzip2 compressed stream of tar blocks +'r|xz' open an lzma compressed stream of tar blocks +'r|zst' open a zstd compressed stream of tar blocks +'w|' open an uncompressed stream for writing +'w|gz' open a gzip compressed stream for writing +'w|bz2' open a bzip2 compressed stream for writing +'w|xz' open an lzma compressed stream for writing +'w|zst' open a zstd compressed stream for writing +""" @overload @classmethod @@ -356,7 +458,47 @@ class TarFile: errorlevel: int | None = ..., options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None, - ) -> Self: ... + ) -> Self: + """Open a tar archive for reading, writing or appending. Return +an appropriate TarFile class. + +mode: +'r' or 'r:*' open for reading with transparent compression +'r:' open for reading exclusively uncompressed +'r:gz' open for reading with gzip compression +'r:bz2' open for reading with bzip2 compression +'r:xz' open for reading with lzma compression +'r:zst' open for reading with zstd compression +'a' or 'a:' open for appending, creating the file if necessary +'w' or 'w:' open for writing without compression +'w:gz' open for writing with gzip compression +'w:bz2' open for writing with bzip2 compression +'w:xz' open for writing with lzma compression +'w:zst' open for writing with zstd compression + +'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created +'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created +'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created +'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created +'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + +'r|*' open a stream of tar blocks with transparent compression +'r|' open an uncompressed stream of tar blocks for reading +'r|gz' open a gzip compressed stream of tar blocks +'r|bz2' open a bzip2 compressed stream of tar blocks +'r|xz' open an lzma compressed stream of tar blocks +'r|zst' open a zstd compressed stream of tar blocks +'w|' open an uncompressed stream for writing +'w|gz' open a gzip compressed stream for writing +'w|bz2' open a bzip2 compressed stream for writing +'w|xz' open an lzma compressed stream for writing +'w|zst' open a zstd compressed stream for writing +""" @overload @classmethod def open( @@ -511,7 +653,9 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open uncompressed tar archive name for reading or writing. + """ @overload @classmethod def gzopen( @@ -529,7 +673,10 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open gzip compressed tar archive name for reading or writing. +Appending is not allowed. +""" @overload @classmethod def gzopen( @@ -565,7 +712,10 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open bzip2 compressed tar archive name for reading or writing. +Appending is not allowed. +""" @overload @classmethod def bz2open( @@ -600,7 +750,10 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open lzma compressed tar archive name for reading or writing. +Appending is not allowed. +""" if sys.version_info >= (3, 14): @overload @classmethod @@ -621,7 +774,10 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> Self: ... + ) -> Self: + """Open zstd compressed tar archive name for reading or writing. +Appending is not allowed. +""" @overload @classmethod def zstopen( @@ -643,11 +799,31 @@ class TarFile: errorlevel: int | None = ..., ) -> Self: ... - def getmember(self, name: str) -> TarInfo: ... - def getmembers(self) -> _list[TarInfo]: ... - def getnames(self) -> _list[str]: ... - def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: ... - def next(self) -> TarInfo | None: ... + def getmember(self, name: str) -> TarInfo: + """Return a TarInfo object for member 'name'. If 'name' can not be +found in the archive, KeyError is raised. If a member occurs more +than once in the archive, its last occurrence is assumed to be the +most up-to-date version. +""" + def getmembers(self) -> _list[TarInfo]: + """Return the members of the archive as a list of TarInfo objects. The +list has the same order as the members in the archive. +""" + def getnames(self) -> _list[str]: + """Return the members of the archive as a list of their names. It has +the same order as the list returned by getmembers(). +""" + def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: + """Print a table of contents to sys.stdout. If 'verbose' is False, only +the names of the members are printed. If it is True, an 'ls -l'-like +output is produced. 'members' is optional and must be a subset of the +list returned by getmembers(). +""" + def next(self) -> TarInfo | None: + """Return the next member of the archive as a TarInfo object, when +TarFile is opened for reading. Return None if there is no more +available. +""" # Calling this method without `filter` is deprecated, but it may be set either on the class or in an # individual call, so we can't mark it as @deprecated here. def extractall( @@ -657,7 +833,19 @@ class TarFile: *, numeric_owner: bool = False, filter: _TarfileFilter | None = None, - ) -> None: ... + ) -> None: + """Extract all members from the archive to the current working +directory and set owner, modification time and permissions on +directories afterwards. 'path' specifies a different directory +to extract to. 'members' is optional and must be a subset of the +list returned by getmembers(). If 'numeric_owner' is True, only +the numbers for user/group names are used and not the names. + +The 'filter' function will be called on each member just +before extraction. +It can return a changed TarInfo or None to skip the member. +String names of common filters are accepted. +""" # Same situation as for `extractall`. def extract( self, @@ -667,7 +855,19 @@ class TarFile: *, numeric_owner: bool = False, filter: _TarfileFilter | None = None, - ) -> None: ... + ) -> None: + """Extract a member from the archive to the current working directory, +using its full name. Its file information is extracted as accurately +as possible. 'member' may be a filename or a TarInfo object. You can +specify a different directory using 'path'. File attributes (owner, +mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' +is True, only the numbers for user/group names are used and not +the names. + +The 'filter' function will be called before extraction. +It can return a changed TarInfo or None to skip the member. +String names of common filters are accepted. +""" def _extract_member( self, tarinfo: TarInfo, @@ -677,20 +877,59 @@ class TarFile: *, filter_function: _FilterFunction | None = None, extraction_root: str | None = None, - ) -> None: ... # undocumented - def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... - def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented - def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented - def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented - def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented - def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + ) -> None: # undocumented + """Extract the filtered TarInfo object tarinfo to a physical +file called targetpath. + +filter_function is only used when extracting a *different* +member (e.g. as fallback to creating a symlink) +""" + def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: + """Extract a member from the archive as a file object. 'member' may be +a filename or a TarInfo object. If 'member' is a regular file or +a link, an io.BufferedReader object is returned. For all other +existing members, None is returned. If 'member' does not appear +in the archive, KeyError is raised. +""" + def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a directory called targetpath. + """ + def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a file called targetpath. + """ + def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a file from a TarInfo object with an unknown type +at targetpath. +""" + def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a fifo called targetpath. + """ + def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a character or block device called targetpath. + """ def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented def makelink_with_filter( self, tarinfo: TarInfo, targetpath: StrOrBytesPath, filter_function: _FilterFunction, extraction_root: str - ) -> None: ... # undocumented - def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: ... # undocumented - def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented - def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + ) -> None: # undocumented + """Make a (symbolic) link called targetpath. If it cannot be created +(platform limitation), we try to make a copy of the referenced file +instead of a link. + +filter_function is only used when extracting a *different* +member (e.g. as fallback to creating a link). +""" + def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: # undocumented + """Set owner of targetpath according to tarinfo. If numeric_owner +is True, use .gid/.uid instead of .gname/.uname. If numeric_owner +is False, fall back to .gid/.uid when the search based on name +fails. +""" + def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Set file permissions of targetpath according to tarinfo. + """ + def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Set modification time of targetpath according to tarinfo. + """ def add( self, name: StrPath, @@ -698,23 +937,65 @@ class TarFile: recursive: bool = True, *, filter: Callable[[TarInfo], TarInfo | None] | None = None, - ) -> None: ... - def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: ... + ) -> None: + """Add the file 'name' to the archive. 'name' may be any type of file +(directory, fifo, symbolic link, etc.). If given, 'arcname' +specifies an alternative name for the file in the archive. +Directories are added recursively by default. This can be avoided by +setting 'recursive' to False. 'filter' is a function +that expects a TarInfo object argument and returns the changed +TarInfo object, if it returns None the TarInfo object will be +excluded from the archive. +""" + def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: + """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents +a non zero-size regular file, the 'fileobj' argument should be a binary file, +and tarinfo.size bytes are read from it and added to the archive. +You can create TarInfo objects directly, or by using gettarinfo(). +""" def gettarinfo( self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None - ) -> TarInfo: ... - def close(self) -> None: ... + ) -> TarInfo: + """Create a TarInfo object from the result of os.stat or equivalent +on an existing file. The file is either named by 'name', or +specified as a file object 'fileobj' with a file descriptor. If +given, 'arcname' specifies an alternative name for the file in the +archive, otherwise, the name is taken from the 'name' attribute of +'fileobj', or the 'name' argument. The name should be a text +string. +""" + def close(self) -> None: + """Close the TarFile. In write-mode, two finishing zero blocks are +appended to the archive. +""" open = TarFile.open -def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... +def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: + """Return True if name points to a tar archive that we +are able to handle, else return False. + +'name' should be a string, file, or file-like object. +""" -class TarError(Exception): ... -class ReadError(TarError): ... -class CompressionError(TarError): ... -class StreamError(TarError): ... -class ExtractError(TarError): ... -class HeaderError(TarError): ... +class TarError(Exception): + """Base exception. +""" +class ReadError(TarError): + """Exception for unreadable tar archives. +""" +class CompressionError(TarError): + """Exception for unavailable compression methods. +""" +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles. +""" +class ExtractError(TarError): + """General exception for extract errors. +""" +class HeaderError(TarError): + """Base exception for header errors. +""" class FilterError(TarError): # This attribute is only set directly on the subclasses, but the documentation guarantees @@ -744,6 +1025,12 @@ def tar_filter(member: TarInfo, dest_path: str) -> TarInfo: ... def data_filter(member: TarInfo, dest_path: str) -> TarInfo: ... class TarInfo: + """Informational class which holds the details about an +archive member given by a tar header block. +TarInfo objects are returned by TarFile.getmember(), +TarFile.getmembers() and TarFile.gettarinfo() and are +usually created internally. +""" __slots__ = ( "name", "mode", @@ -784,7 +1071,10 @@ class TarInfo: uname: str gname: str pax_headers: Mapping[str, str] - def __init__(self, name: str = "") -> None: ... + def __init__(self, name: str = "") -> None: + """Construct a TarInfo object. name is the optional name +of the member. +""" if sys.version_info >= (3, 13): @property @deprecated("Deprecated since Python 3.13; will be removed in Python 3.16.") @@ -796,11 +1086,18 @@ class TarInfo: tarfile: TarFile | None @classmethod - def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: + """Construct a TarInfo object from a 512 byte bytes object. + """ @classmethod - def fromtarfile(cls, tarfile: TarFile) -> Self: ... + def fromtarfile(cls, tarfile: TarFile) -> Self: + """Return the next TarInfo object from TarFile object +tarfile. +""" @property - def linkpath(self) -> str: ... + def linkpath(self) -> str: + """In pax headers, "linkname" is called "linkpath". +""" @linkpath.setter def linkpath(self, linkname: str) -> None: ... def replace( @@ -815,25 +1112,59 @@ class TarInfo: uname: str = ..., gname: str = ..., deep: bool = True, - ) -> Self: ... - def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... - def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + ) -> Self: + """Return a deep copy of self with the given attributes replaced. + """ + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: + """Return the TarInfo's attributes as a dictionary. + """ + def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: + """Return a tar header as a string of 512 byte blocks. + """ def create_ustar_header( self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str - ) -> bytes: ... + ) -> bytes: + """Return the object as a ustar header block. + """ def create_gnu_header( self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str - ) -> bytes: ... - def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: ... + ) -> bytes: + """Return the object as a GNU header block sequence. + """ + def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: + """Return the object as a ustar header block. If it cannot be +represented this way, prepend a pax extended header sequence +with supplement information. +""" @classmethod - def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: ... - def isfile(self) -> bool: ... - def isreg(self) -> bool: ... + def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: + """Return the object as a pax global header block sequence. + """ + def isfile(self) -> bool: + """Return True if the Tarinfo object is a regular file. +""" + def isreg(self) -> bool: + """Return True if the Tarinfo object is a regular file. +""" def issparse(self) -> bool: ... - def isdir(self) -> bool: ... - def issym(self) -> bool: ... - def islnk(self) -> bool: ... - def ischr(self) -> bool: ... - def isblk(self) -> bool: ... - def isfifo(self) -> bool: ... - def isdev(self) -> bool: ... + def isdir(self) -> bool: + """Return True if it is a directory. +""" + def issym(self) -> bool: + """Return True if it is a symbolic link. +""" + def islnk(self) -> bool: + """Return True if it is a hard link. +""" + def ischr(self) -> bool: + """Return True if it is a character device. +""" + def isblk(self) -> bool: + """Return True if it is a block device. +""" + def isfifo(self) -> bool: + """Return True if it is a FIFO. +""" + def isdev(self) -> bool: + """Return True if it is one of character device, block device or FIFO. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi index 88aa43d248996..c221b62748a2a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi @@ -1,3 +1,35 @@ +"""TELNET client class. + +Based on RFC 854: TELNET Protocol Specification, by J. Postel and +J. Reynolds + +Example: + +>>> from telnetlib import Telnet +>>> tn = Telnet('www.python.org', 79) # connect to finger port +>>> tn.write(b'guido\\r\\n') +>>> print(tn.read_all()) +Login Name TTY Idle When Where +guido Guido van Rossum pts/2 snag.cnri.reston.. + +>>> + +Note that read_all() won't read until eof -- it just reads some data +-- but it guarantees to read at least one byte unless EOF is hit. + +It is possible to pass a Telnet object to a selector in order to wait until +more data is available. Note that in this case, read_eager() may return b'' +even if there was data on the socket, because the protocol negotiation may have +eaten the data. This is why EOFError is needed in some cases to distinguish +between "no data" and "connection closed" (since the socket also appears ready +for reading when it is closed). + +To do: +- option negotiation +- timeout should be intrinsic to the connection object instead of an + option on one of the read calls only + +""" import socket from collections.abc import Callable, MutableSequence, Sequence from re import Match, Pattern @@ -87,37 +119,225 @@ EXOPL: Final = b"\xff" NOOPT: Final = b"\x00" class Telnet: + """Telnet interface class. + + An instance of this class represents a connection to a telnet + server. The instance is initially not connected; the open() + method must be used to establish a connection. Alternatively, the + host name and optional port number can be passed to the + constructor, too. + + Don't try to reopen an already connected instance. + + This class has many read_*() methods. Note that some of them + raise EOFError when the end of the connection is read, because + they can return an empty string for other reasons. See the + individual doc strings. + + read_until(expected, [timeout]) + Read until the expected string has been seen, or a timeout is + hit (default is no timeout); may block. + + read_all() + Read all data until EOF; may block. + + read_some() + Read at least one byte or EOF; may block. + + read_very_eager() + Read all data available already queued or on the socket, + without blocking. + + read_eager() + Read either data already queued or some data available on the + socket, without blocking. + + read_lazy() + Read all data in the raw queue (processing it first), without + doing any socket I/O. + + read_very_lazy() + Reads all data in the cooked queue, without doing any socket + I/O. + + read_sb_data() + Reads available data between SB ... SE sequence. Don't block. + + set_option_negotiation_callback(callback) + Each time a telnet option is read on the input flow, this callback + (if set) is called with the following parameters : + callback(telnet socket, command, option) + option will be chr(0) when there is no option. + No other action is done afterwards by telnetlib. + + """ host: str | None # undocumented sock: socket.socket | None # undocumented - def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... - def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... - def msg(self, msg: str, *args: Any) -> None: ... - def set_debuglevel(self, debuglevel: int) -> None: ... - def close(self) -> None: ... - def get_socket(self) -> socket.socket: ... - def fileno(self) -> int: ... - def write(self, buffer: bytes) -> None: ... - def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... - def read_all(self) -> bytes: ... - def read_some(self) -> bytes: ... - def read_very_eager(self) -> bytes: ... - def read_eager(self) -> bytes: ... - def read_lazy(self) -> bytes: ... - def read_very_lazy(self) -> bytes: ... - def read_sb_data(self) -> bytes: ... - def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: ... - def process_rawq(self) -> None: ... - def rawq_getchar(self) -> bytes: ... - def fill_rawq(self) -> None: ... - def sock_avail(self) -> bool: ... - def interact(self) -> None: ... - def mt_interact(self) -> None: ... - def listener(self) -> None: ... + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: + """Constructor. + + When called without arguments, create an unconnected instance. + With a hostname argument, it connects the instance; port number + and timeout are optional. + """ + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: + """Connect to a host. + + The optional second argument is the port number, which + defaults to the standard telnet port (23). + + Don't try to reopen an already connected instance. + """ + def msg(self, msg: str, *args: Any) -> None: + """Print a debug message, when the debug level is > 0. + + If extra arguments are present, they are substituted in the + message using the standard string formatting operator. + + """ + def set_debuglevel(self, debuglevel: int) -> None: + """Set the debug level. + + The higher it is, the more debug output you get (on sys.stdout). + + """ + def close(self) -> None: + """Close the connection. +""" + def get_socket(self) -> socket.socket: + """Return the socket object used internally. +""" + def fileno(self) -> int: + """Return the fileno() of the socket object used internally. +""" + def write(self, buffer: bytes) -> None: + """Write a string to the socket, doubling any IAC characters. + + Can block if the connection is blocked. May raise + OSError if the connection is closed. + + """ + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: + """Read until a given string is encountered or until timeout. + + When no match is found, return whatever is available instead, + possibly the empty string. Raise EOFError if the connection + is closed and no cooked data is available. + + """ + def read_all(self) -> bytes: + """Read all data until EOF; block until connection closed. +""" + def read_some(self) -> bytes: + """Read at least one byte of cooked data unless EOF is hit. + + Return b'' if EOF is hit. Block if no data is immediately + available. + + """ + def read_very_eager(self) -> bytes: + """Read everything that's possible without blocking in I/O (eager). + + Raise EOFError if connection closed and no cooked data + available. Return b'' if no cooked data available otherwise. + Don't block unless in the midst of an IAC sequence. + + """ + def read_eager(self) -> bytes: + """Read readily available data. + + Raise EOFError if connection closed and no cooked data + available. Return b'' if no cooked data available otherwise. + Don't block unless in the midst of an IAC sequence. + + """ + def read_lazy(self) -> bytes: + """Process and return data that's already in the queues (lazy). + + Raise EOFError if connection closed and no data available. + Return b'' if no cooked data available otherwise. Don't block + unless in the midst of an IAC sequence. + + """ + def read_very_lazy(self) -> bytes: + """Return any data available in the cooked queue (very lazy). + + Raise EOFError if connection closed and no data available. + Return b'' if no cooked data available otherwise. Don't block. + + """ + def read_sb_data(self) -> bytes: + """Return any data available in the SB ... SE queue. + + Return b'' if no SB ... SE available. Should only be called + after seeing a SB or SE command. When a new SB command is + found, old unread SB data will be discarded. Don't block. + + """ + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: + """Provide a callback function called after each receipt of a telnet option. +""" + def process_rawq(self) -> None: + """Transfer from raw queue to cooked queue. + + Set self.eof when connection is closed. Don't block unless in + the midst of an IAC sequence. + + """ + def rawq_getchar(self) -> bytes: + """Get next char from raw queue. + + Block if no data is immediately available. Raise EOFError + when connection is closed. + + """ + def fill_rawq(self) -> None: + """Fill raw queue from exactly one recv() system call. + + Block if no data is immediately available. Set self.eof when + connection is closed. + + """ + def sock_avail(self) -> bool: + """Test whether data is available on the socket. +""" + def interact(self) -> None: + """Interaction function, emulates a very dumb telnet client. +""" + def mt_interact(self) -> None: + """Multithreaded version of interact(). +""" + def listener(self) -> None: + """Helper for mt_interact() -- this executes in the other thread. +""" def expect( self, list: MutableSequence[Pattern[bytes] | bytes] | Sequence[Pattern[bytes]], timeout: float | None = None - ) -> tuple[int, Match[bytes] | None, bytes]: ... + ) -> tuple[int, Match[bytes] | None, bytes]: + """Read until one from a list of a regular expressions matches. + + The first argument is a list of regular expressions, either + compiled (re.Pattern instances) or uncompiled (strings). + The optional second argument is a timeout, in seconds; default + is no timeout. + + Return a tuple of three items: the index in the list of the + first regular expression that matches; the re.Match object + returned; and the text read up till and including the match. + + If EOF is read and no text was read, raise EOFError. + Otherwise, when nothing matches, return (-1, None, text) where + text is the text received so far (may be the empty string if a + timeout happened). + + If a regular expression ends with a greedy match (e.g. '.*') + or if more than one expression can match the same input, the + results are undeterministic, and may depend on the I/O timing. + + """ def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __del__(self) -> None: ... + def __del__(self) -> None: + """Destructor -- close the connection. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi index 26491074ff71d..17752f23338c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi @@ -1,3 +1,27 @@ +"""Temporary files. + +This module provides generic, low- and high-level interfaces for +creating temporary files and directories. All of the interfaces +provided by this module can be used without fear of race conditions +except for 'mktemp'. 'mktemp' is subject to race conditions and +should not be used; it is provided for backward compatibility only. + +The default path names are returned as str. If you supply bytes as +input, all return values will be in bytes. Ex: + + >>> tempfile.mkstemp() + (4, '/tmp/tmptpu9nin8') + >>> tempfile.mkdtemp(suffix=b'') + b'/tmp/tmppbi8f0hy' + +This module also provides some data items to the user: + + TMP_MAX - maximum number of names that will be tried before + giving up. + tempdir - If this is set to a string before the first use of + any routine from this module, it will be considered as + another candidate location to store temporary files. +""" import io import sys from _typeshed import ( @@ -52,7 +76,29 @@ if sys.version_info >= (3, 12): *, errors: str | None = None, delete_on_close: bool = True, - ) -> _TemporaryFileWrapper[str]: ... + ) -> _TemporaryFileWrapper[str]: + """Create and return a temporary file. +Arguments: +'prefix', 'suffix', 'dir' -- as for mkstemp. +'mode' -- the mode argument to io.open (default "w+b"). +'buffering' -- the buffer size argument to io.open (default -1). +'encoding' -- the encoding argument to io.open (default None) +'newline' -- the newline argument to io.open (default None) +'delete' -- whether the file is automatically deleted (default True). +'delete_on_close' -- if 'delete', whether the file is deleted on close + (default True) or otherwise either on context manager exit + (if context manager was used) or on object finalization. . +'errors' -- the errors argument to io.open (default None) +The file is created as mkstemp() would do it. + +Returns an object with a file-like interface; the name of the file +is accessible as its 'name' attribute. The file will be automatically +deleted when it is closed unless the 'delete' argument is set to False. + +On POSIX, NamedTemporaryFiles cannot be automatically deleted if +the creating process is terminated abruptly with a SIGKILL signal. +Windows can delete the file even in this case. +""" @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -95,7 +141,26 @@ else: delete: bool = True, *, errors: str | None = None, - ) -> _TemporaryFileWrapper[str]: ... + ) -> _TemporaryFileWrapper[str]: + """Create and return a temporary file. + Arguments: + 'prefix', 'suffix', 'dir' -- as for mkstemp. + 'mode' -- the mode argument to io.open (default "w+b"). + 'buffering' -- the buffer size argument to io.open (default -1). + 'encoding' -- the encoding argument to io.open (default None) + 'newline' -- the newline argument to io.open (default None) + 'delete' -- whether the file is deleted on close (default True). + 'errors' -- the errors argument to io.open (default None) + The file is created as mkstemp() would do it. + + Returns an object with a file-like interface; the name of the file + is accessible as its 'name' attribute. The file will be automatically + deleted when it is closed unless the 'delete' argument is set to False. + + On POSIX, NamedTemporaryFiles cannot be automatically deleted if + the creating process is terminated abruptly with a SIGKILL signal. + Windows can delete the file even in this case. + """ @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -138,7 +203,20 @@ else: dir: GenericPath[AnyStr] | None = None, *, errors: str | None = None, - ) -> io.TextIOWrapper: ... + ) -> io.TextIOWrapper: + """Create and return a temporary file. +Arguments: +'prefix', 'suffix', 'dir' -- as for mkstemp. +'mode' -- the mode argument to io.open (default "w+b"). +'buffering' -- the buffer size argument to io.open (default -1). +'encoding' -- the encoding argument to io.open (default None) +'newline' -- the newline argument to io.open (default None) +'errors' -- the errors argument to io.open (default None) +The file is created as mkstemp() would do it. + +Returns an object with a file-like interface. The file has no +name, and will cease to exist when it is closed. +""" @overload def TemporaryFile( mode: OpenBinaryMode, @@ -212,6 +290,12 @@ else: ) -> IO[Any]: ... class _TemporaryFileWrapper(IO[AnyStr]): + """Temporary file wrapper + +This class provides a wrapper around files opened for +temporary use. In particular, it seeks to automatically +remove the file when it is no longer needed. +""" file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool @@ -223,7 +307,10 @@ class _TemporaryFileWrapper(IO[AnyStr]): def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... - def close(self) -> None: ... + def close(self) -> None: + """ +Close the temporary file, possibly deleting it. +""" # These methods don't exist directly on this object, but # are delegated to the underlying IO object through __getattr__. # We need to add them here so that this class is concrete. @@ -272,6 +359,10 @@ else: # It does not actually derive from IO[AnyStr], but it does mostly behave # like one. class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): + """Temporary file wrapper, specialized to switch from BytesIO +or StringIO to a real file when it exceeds a certain size or +when a fileno is needed. +""" _file: IO[AnyStr] @property def encoding(self) -> str: ... # undocumented @@ -395,10 +486,34 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def readable(self) -> bool: ... def seekable(self) -> bool: ... def writable(self) -> bool: ... - def __next__(self) -> AnyStr: ... # type: ignore[override] - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __next__(self) -> AnyStr: # type: ignore[override] + """Implement next(self). +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class TemporaryDirectory(Generic[AnyStr]): + """Create and return a temporary directory. This has the same +behavior as mkdtemp but can be used as a context manager. For +example: + + with TemporaryDirectory() as tmpdir: + ... + +Upon exiting the context, the directory and everything contained +in it are removed (unless delete=False is passed or an exception +is raised during cleanup and ignore_cleanup_errors is not True). + +Optional Arguments: + suffix - A str suffix for the directory name. (see mkdtemp) + prefix - A str prefix for the directory name. (see mkdtemp) + dir - A directory to create this temp dir in. (see mkdtemp) + ignore_cleanup_errors - False; ignore exceptions during cleanup? + delete - True; whether the directory is automatically deleted. +""" name: AnyStr if sys.version_info >= (3, 12): @overload @@ -454,13 +569,44 @@ class TemporaryDirectory(Generic[AnyStr]): def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" # The overloads overlap, but they should still work fine. @overload def mkstemp( suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False -) -> tuple[int, str]: ... +) -> tuple[int, str]: + """User-callable function to create and return a unique temporary +file. The return value is a pair (fd, name) where fd is the +file descriptor returned by os.open, and name is the filename. + +If 'suffix' is not None, the file name will end with that suffix, +otherwise there will be no suffix. + +If 'prefix' is not None, the file name will begin with that prefix, +otherwise a default prefix is used. + +If 'dir' is not None, the file will be created in that directory, +otherwise a default directory is used. + +If 'text' is specified and true, the file is opened in text +mode. Else (the default) the file is opened in binary mode. + +If any of 'suffix', 'prefix' and 'dir' are not None, they must be the +same type. If they are bytes, the returned name will be bytes; str +otherwise. + +The file is readable and writable only by the creating user ID. +If the operating system uses permission bits to indicate whether a +file is executable, the file is executable by no one. The file +descriptor is not inherited by children of this process. + +Caller is responsible for deleting the file when done with it. +""" @overload def mkstemp( suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False @@ -468,12 +614,43 @@ def mkstemp( # The overloads overlap, but they should still work fine. @overload -def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: + """User-callable function to create and return a unique temporary +directory. The return value is the pathname of the directory. + +Arguments are as for mkstemp, except that the 'text' argument is +not accepted. + +The directory is readable, writable, and searchable only by the +creating user. + +Caller is responsible for deleting the directory when done with it. +""" @overload def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... @deprecated("Deprecated since Python 2.3. Use `mkstemp()` or `NamedTemporaryFile(delete=False)` instead.") -def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... -def gettempdirb() -> bytes: ... -def gettempprefixb() -> bytes: ... -def gettempdir() -> str: ... -def gettempprefix() -> str: ... +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: + """User-callable function to return a unique temporary file name. The +file is not created. + +Arguments are similar to mkstemp, except that the 'text' argument is +not accepted, and suffix=None, prefix=None and bytes file names are not +supported. + +THIS FUNCTION IS UNSAFE AND SHOULD NOT BE USED. The file name may +refer to a file that did not exist at some point, but by the time +you get around to creating it, someone else may have beaten you to +the punch. +""" +def gettempdirb() -> bytes: + """Returns tempfile.tempdir as bytes. +""" +def gettempprefixb() -> bytes: + """The default prefix for temporary directories as bytes. +""" +def gettempdir() -> str: + """Returns tempfile.tempdir as str. +""" +def gettempprefix() -> str: + """The default prefix for temporary directories as string. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi index a35be5dfe740a..6913684279139 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi @@ -1,3 +1,12 @@ +"""This module provides an interface to the Posix calls for tty I/O control. +For a complete description of these calls, see the Posix or Unix manual +pages. It is only available for those Unix versions that support Posix +termios style tty I/O control. + +All functions in this module take a file descriptor fd as their first +argument. This can be an integer file descriptor, such as returned by +sys.stdin.fileno(), or a file object, such as sys.stdin itself. +""" import sys from _typeshed import FileDescriptorLike from typing import Any, Final @@ -291,14 +300,60 @@ if sys.platform != "win32": INIT_C_CC: Final[int] NSWTCH: Final[int] - def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: ... - def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: ... - def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: ... - def tcdrain(fd: FileDescriptorLike, /) -> None: ... - def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: ... - def tcflow(fd: FileDescriptorLike, action: int, /) -> None: ... + def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: + """Get the tty attributes for file descriptor fd. + +Returns a list [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] +where cc is a list of the tty special characters (each a string of +length 1, except the items with indices VMIN and VTIME, which are +integers when these fields are defined). The interpretation of the +flags and the speeds as well as the indexing in the cc array must be +done using the symbolic constants defined in this module. +""" + def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: + """Set the tty attributes for file descriptor fd. + +The attributes to be set are taken from the attributes argument, which +is a list like the one returned by tcgetattr(). The when argument +determines when the attributes are changed: termios.TCSANOW to +change immediately, termios.TCSADRAIN to change after transmitting all +queued output, or termios.TCSAFLUSH to change after transmitting all +queued output and discarding all queued input. +""" + def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: + """Send a break on file descriptor fd. + +A zero duration sends a break for 0.25-0.5 seconds; a nonzero duration +has a system dependent meaning. +""" + def tcdrain(fd: FileDescriptorLike, /) -> None: + """Wait until all output written to file descriptor fd has been transmitted. +""" + def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: + """Discard queued data on file descriptor fd. + +The queue selector specifies which queue: termios.TCIFLUSH for the input +queue, termios.TCOFLUSH for the output queue, or termios.TCIOFLUSH for +both queues. +""" + def tcflow(fd: FileDescriptorLike, action: int, /) -> None: + """Suspend or resume input or output on file descriptor fd. + +The action argument can be termios.TCOOFF to suspend output, +termios.TCOON to restart output, termios.TCIOFF to suspend input, +or termios.TCION to restart input. +""" if sys.version_info >= (3, 11): - def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: ... - def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: ... + def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: + """Get the tty winsize for file descriptor fd. + +Returns a tuple (ws_row, ws_col). +""" + def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: + """Set the tty winsize for file descriptor fd. + +The winsize to be set is taken from the winsize argument, which +is a two-item tuple (ws_row, ws_col) like the one returned by tcgetwinsize(). +""" class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi index c00cce3c2d577..cf41ed1a66098 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi @@ -1,9 +1,58 @@ +"""Text wrapping and filling. +""" from collections.abc import Callable from re import Pattern __all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] class TextWrapper: + """ +Object for wrapping/filling text. The public interface consists of +the wrap() and fill() methods; the other methods are just there for +subclasses to override in order to tweak the default behaviour. +If you want to completely replace the main wrapping algorithm, +you'll probably have to override _wrap_chunks(). + +Several instance attributes control various aspects of wrapping: + width (default: 70) + the maximum width of wrapped lines (unless break_long_words + is false) + initial_indent (default: "") + string that will be prepended to the first line of wrapped + output. Counts towards the line's width. + subsequent_indent (default: "") + string that will be prepended to all lines save the first + of wrapped output; also counts towards each line's width. + expand_tabs (default: true) + Expand tabs in input text to spaces before further processing. + Each tab will become 0 .. 'tabsize' spaces, depending on its position + in its line. If false, each tab is treated as a single character. + tabsize (default: 8) + Expand tabs in input text to 0 .. 'tabsize' spaces, unless + 'expand_tabs' is false. + replace_whitespace (default: true) + Replace all whitespace characters in the input text by spaces + after tab expansion. Note that if expand_tabs is false and + replace_whitespace is true, every tab will be converted to a + single space! + fix_sentence_endings (default: false) + Ensure that sentence-ending punctuation is always followed + by two spaces. Off by default because the algorithm is + (unavoidably) imperfect. + break_long_words (default: true) + Break words longer than 'width'. If false, those words will not + be broken, and some lines might be longer than 'width'. + break_on_hyphens (default: true) + Allow breaking hyphenated words. If true, wrapping will occur + preferably on whitespaces and right after hyphens part of + compound words. + drop_whitespace (default: true) + Drop leading and trailing whitespace from lines. + max_lines (default: None) + Truncate wrapped lines. + placeholder (default: ' [...]') + Append to the last line of truncated text. +""" width: int initial_indent: str subsequent_indent: str @@ -42,14 +91,75 @@ class TextWrapper: placeholder: str = " [...]", ) -> None: ... # Private methods *are* part of the documented API for subclasses. - def _munge_whitespace(self, text: str) -> str: ... - def _split(self, text: str) -> list[str]: ... - def _fix_sentence_endings(self, chunks: list[str]) -> None: ... - def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: ... - def _wrap_chunks(self, chunks: list[str]) -> list[str]: ... + def _munge_whitespace(self, text: str) -> str: + """_munge_whitespace(text : string) -> string + +Munge whitespace in text: expand tabs and convert all other +whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz" +becomes " foo bar baz". +""" + def _split(self, text: str) -> list[str]: + """_split(text : string) -> [string] + +Split the text to wrap into indivisible chunks. Chunks are +not quite the same as words; see _wrap_chunks() for full +details. As an example, the text + Look, goof-ball -- use the -b option! +breaks into the following chunks: + 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' +if break_on_hyphens is True, or in: + 'Look,', ' ', 'goof-ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', option!' +otherwise. +""" + def _fix_sentence_endings(self, chunks: list[str]) -> None: + """_fix_sentence_endings(chunks : [string]) + +Correct for sentence endings buried in 'chunks'. Eg. when the +original text contains "... foo.\\nBar ...", munge_whitespace() +and split() will convert that to [..., "foo.", " ", "Bar", ...] +which has one too few spaces; this method simply changes the one +space to two. +""" + def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: + """_handle_long_word(chunks : [string], + cur_line : [string], + cur_len : int, width : int) + +Handle a chunk of text (most likely a word, not whitespace) that +is too long to fit in any line. +""" + def _wrap_chunks(self, chunks: list[str]) -> list[str]: + """_wrap_chunks(chunks : [string]) -> [string] + +Wrap a sequence of text chunks and return a list of lines of +length 'self.width' or less. (If 'break_long_words' is false, +some lines may be longer than this.) Chunks correspond roughly +to words and the whitespace between them: each chunk is +indivisible (modulo 'break_long_words'), but a line break can +come between any two chunks. Chunks should not have internal +whitespace; ie. a chunk is either all whitespace or a "word". +Whitespace chunks will be removed from the beginning and end of +lines, but apart from that whitespace is preserved. +""" def _split_chunks(self, text: str) -> list[str]: ... - def wrap(self, text: str) -> list[str]: ... - def fill(self, text: str) -> str: ... + def wrap(self, text: str) -> list[str]: + """wrap(text : string) -> [string] + +Reformat the single paragraph in 'text' so it fits in lines of +no more than 'self.width' columns, and return a list of wrapped +lines. Tabs in 'text' are expanded with string.expandtabs(), +and all other whitespace characters (including newline) are +converted to space. +""" + def fill(self, text: str) -> str: + """fill(text : string) -> string + +Reformat the single paragraph in 'text' to fit in lines of no +more than 'self.width' columns, and return a new string +containing the entire wrapped paragraph. +""" def wrap( text: str, @@ -66,7 +176,16 @@ def wrap( drop_whitespace: bool = True, max_lines: int | None = None, placeholder: str = " [...]", -) -> list[str]: ... +) -> list[str]: + """Wrap a single paragraph of text, returning a list of wrapped lines. + +Reformat the single paragraph in 'text' so it fits in lines of no +more than 'width' columns, and return a list of wrapped lines. By +default, tabs in 'text' are expanded with string.expandtabs(), and +all other whitespace characters (including newline) are converted to +space. See TextWrapper class for available keyword args to customize +wrapping behaviour. +""" def fill( text: str, width: int = 70, @@ -82,7 +201,15 @@ def fill( drop_whitespace: bool = True, max_lines: int | None = None, placeholder: str = " [...]", -) -> str: ... +) -> str: + """Fill a single paragraph of text, returning a new string. + +Reformat the single paragraph in 'text' to fit in lines of no more +than 'width' columns, and return a new string containing the entire +wrapped paragraph. As with wrap(), tabs are expanded and other +whitespace characters converted to space. See TextWrapper class for +available keyword args to customize wrapping behaviour. +""" def shorten( text: str, width: int, @@ -98,6 +225,36 @@ def shorten( drop_whitespace: bool = True, # Omit `max_lines: int = None`, it is forced to 1 here. placeholder: str = " [...]", -) -> str: ... -def dedent(text: str) -> str: ... -def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... +) -> str: + """Collapse and truncate the given text to fit in the given width. + +The text first has its whitespace collapsed. If it then fits in +the *width*, it is returned as is. Otherwise, as many words +as possible are joined and then the placeholder is appended:: + + >>> textwrap.shorten("Hello world!", width=12) + 'Hello world!' + >>> textwrap.shorten("Hello world!", width=11) + 'Hello [...]' +""" +def dedent(text: str) -> str: + """Remove any common leading whitespace from every line in `text`. + +This can be used to make triple-quoted strings line up with the left +edge of the display, while still presenting them in the source code +in indented form. + +Note that tabs and spaces are both treated as whitespace, but they +are not equal: the lines " hello" and "\\thello" are +considered to have no common leading whitespace. + +Entirely blank lines are normalized to a newline character. +""" +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: + """Adds 'prefix' to the beginning of selected lines in 'text'. + +If 'predicate' is provided, 'prefix' will only be added to the lines +where 'predicate(line)' is True. If 'predicate' is not provided, +it will default to adding 'prefix' to all non-empty lines that do not +consist solely of whitespace characters. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi index 28fa5267a9975..5d4befa99c2e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi @@ -1,3 +1,5 @@ +"""Thread module emulating a subset of Java's threading model. +""" import _thread import sys from _thread import _excepthook, _ExceptHookArgs, get_native_id as get_native_id @@ -45,27 +47,112 @@ if sys.version_info >= (3, 12): _profile_hook: ProfileFunction | None -def active_count() -> int: ... +def active_count() -> int: + """Return the number of Thread objects currently alive. + +The returned count is equal to the length of the list returned by +enumerate(). + +""" @deprecated("Deprecated since Python 3.10. Use `active_count()` instead.") -def activeCount() -> int: ... -def current_thread() -> Thread: ... +def activeCount() -> int: + """Return the number of Thread objects currently alive. + +This function is deprecated, use active_count() instead. + +""" +def current_thread() -> Thread: + """Return the current Thread object, corresponding to the caller's thread of control. + +If the caller's thread of control was not created through the threading +module, a dummy thread object with limited functionality is returned. + +""" @deprecated("Deprecated since Python 3.10. Use `current_thread()` instead.") -def currentThread() -> Thread: ... -def get_ident() -> int: ... -def enumerate() -> list[Thread]: ... -def main_thread() -> Thread: ... -def settrace(func: TraceFunction) -> None: ... -def setprofile(func: ProfileFunction | None) -> None: ... +def currentThread() -> Thread: + """Return the current Thread object, corresponding to the caller's thread of control. + +This function is deprecated, use current_thread() instead. + +""" +def get_ident() -> int: + """Return a non-zero integer that uniquely identifies the current thread +amongst other threads that exist simultaneously. +This may be used to identify per-thread resources. +Even though on some platforms threads identities may appear to be +allocated consecutive numbers starting at 1, this behavior should not +be relied upon, and the number should be seen purely as a magic cookie. +A thread's identity may be reused for another thread after it exits. +""" +def enumerate() -> list[Thread]: + """Return a list of all Thread objects currently alive. + +The list includes daemonic threads, dummy thread objects created by +current_thread(), and the main thread. It excludes terminated threads and +threads that have not yet been started. + +""" +def main_thread() -> Thread: + """Return the main thread object. + +In normal conditions, the main thread is the thread from which the +Python interpreter was started. +""" +def settrace(func: TraceFunction) -> None: + """Set a trace function for all threads started from the threading module. + +The func will be passed to sys.settrace() for each thread, before its run() +method is called. +""" +def setprofile(func: ProfileFunction | None) -> None: + """Set a profile function for all threads started from the threading module. + +The func will be passed to sys.setprofile() for each thread, before its +run() method is called. +""" if sys.version_info >= (3, 12): - def setprofile_all_threads(func: ProfileFunction | None) -> None: ... - def settrace_all_threads(func: TraceFunction) -> None: ... + def setprofile_all_threads(func: ProfileFunction | None) -> None: + """Set a profile function for all threads started from the threading module +and all Python threads that are currently executing. + +The func will be passed to sys.setprofile() for each thread, before its +run() method is called. +""" + def settrace_all_threads(func: TraceFunction) -> None: + """Set a trace function for all threads started from the threading module +and all Python threads that are currently executing. + +The func will be passed to sys.settrace() for each thread, before its run() +method is called. +""" if sys.version_info >= (3, 10): - def gettrace() -> TraceFunction | None: ... - def getprofile() -> ProfileFunction | None: ... + def gettrace() -> TraceFunction | None: + """Get the trace function as set by threading.settrace(). +""" + def getprofile() -> ProfileFunction | None: + """Get the profiler function as set by threading.setprofile(). +""" -def stack_size(size: int = 0, /) -> int: ... +def stack_size(size: int = 0, /) -> int: + """Return the thread stack size used when creating new threads. The +optional size argument specifies the stack size (in bytes) to be used +for subsequently created threads, and must be 0 (use platform or +configured default) or a positive integer value of at least 32,768 (32k). +If changing the thread stack size is unsupported, a ThreadError +exception is raised. If the specified size is invalid, a ValueError +exception is raised, and the stack size is unmodified. 32k bytes + currently the minimum supported stack size value to guarantee +sufficient stack space for the interpreter itself. + +Note that some platforms may have particular restrictions on values for +the stack size, such as requiring a minimum stack size larger than 32 KiB or +requiring allocation in multiples of the system memory page size +- platform documentation should be referred to for more information +(4 KiB pages are common; using multiples of 4096 for the stack size is +the suggested approach in the absence of more specific information). +""" TIMEOUT_MAX: Final[float] @@ -73,9 +160,23 @@ ThreadError = _thread.error local = _thread._local class Thread: + """A class that represents a thread of control. + +This class can be safely subclassed in a limited fashion. There are two ways +to specify the activity: by passing a callable object to the constructor, or +by overriding the run() method in a subclass. + +""" name: str @property - def ident(self) -> int | None: ... + def ident(self) -> int | None: + """Thread identifier of this thread or None if it has not been started. + +This is a nonzero integer. See the get_ident() function. Thread +identifiers may be recycled when a thread exits and another thread is +created. The identifier is available even after the thread has exited. + +""" daemon: bool if sys.version_info >= (3, 14): def __init__( @@ -88,7 +189,36 @@ class Thread: *, daemon: bool | None = None, context: ContextVar[Any] | None = None, - ) -> None: ... + ) -> None: + """This constructor should always be called with keyword arguments. Arguments are: + +*group* should be None; reserved for future extension when a ThreadGroup +class is implemented. + +*target* is the callable object to be invoked by the run() +method. Defaults to None, meaning nothing is called. + +*name* is the thread name. By default, a unique name is constructed of +the form "Thread-N" where N is a small decimal number. + +*args* is a list or tuple of arguments for the target invocation. Defaults to (). + +*kwargs* is a dictionary of keyword arguments for the target +invocation. Defaults to {}. + +*context* is the contextvars.Context value to use for the thread. +The default value is None, which means to check +sys.flags.thread_inherit_context. If that flag is true, use a copy +of the context of the caller. If false, use an empty context. To +explicitly start with an empty context, pass a new instance of +contextvars.Context(). To explicitly start with a copy of the current +context, pass the value from contextvars.copy_context(). + +If a subclass overrides the constructor, it must make sure to invoke +the base class constructor (Thread.__init__()) before doing anything +else to the thread. + +""" else: def __init__( self, @@ -99,22 +229,116 @@ class Thread: kwargs: Mapping[str, Any] | None = None, *, daemon: bool | None = None, - ) -> None: ... + ) -> None: + """This constructor should always be called with keyword arguments. Arguments are: + +*group* should be None; reserved for future extension when a ThreadGroup +class is implemented. + +*target* is the callable object to be invoked by the run() +method. Defaults to None, meaning nothing is called. + +*name* is the thread name. By default, a unique name is constructed of +the form "Thread-N" where N is a small decimal number. + +*args* is a list or tuple of arguments for the target invocation. Defaults to (). + +*kwargs* is a dictionary of keyword arguments for the target +invocation. Defaults to {}. - def start(self) -> None: ... - def run(self) -> None: ... - def join(self, timeout: float | None = None) -> None: ... +If a subclass overrides the constructor, it must make sure to invoke +the base class constructor (Thread.__init__()) before doing anything +else to the thread. + +""" + + def start(self) -> None: + """Start the thread's activity. + +It must be called at most once per thread object. It arranges for the +object's run() method to be invoked in a separate thread of control. + +This method will raise a RuntimeError if called more than once on the +same thread object. + +""" + def run(self) -> None: + """Method representing the thread's activity. + +You may override this method in a subclass. The standard run() method +invokes the callable object passed to the object's constructor as the +target argument, if any, with sequential and keyword arguments taken +from the args and kwargs arguments, respectively. + +""" + def join(self, timeout: float | None = None) -> None: + """Wait until the thread terminates. + +This blocks the calling thread until the thread whose join() method is +called terminates -- either normally or through an unhandled exception +or until the optional timeout occurs. + +When the timeout argument is present and not None, it should be a +floating-point number specifying a timeout for the operation in seconds +(or fractions thereof). As join() always returns None, you must call +is_alive() after join() to decide whether a timeout happened -- if the +thread is still alive, the join() call timed out. + +When the timeout argument is not present or None, the operation will +block until the thread terminates. + +A thread can be join()ed many times. + +join() raises a RuntimeError if an attempt is made to join the current +thread as that would cause a deadlock. It is also an error to join() a +thread before it has been started and attempts to do so raises the same +exception. + +""" @property - def native_id(self) -> int | None: ... # only available on some platforms - def is_alive(self) -> bool: ... + def native_id(self) -> int | None: # only available on some platforms + """Native integral thread ID of this thread, or None if it has not been started. + +This is a non-negative integer. See the get_native_id() function. +This represents the Thread ID as reported by the kernel. + +""" + def is_alive(self) -> bool: + """Return whether the thread is alive. + +This method returns True just before the run() method starts until just +after the run() method terminates. See also the module function +enumerate(). + +""" @deprecated("Deprecated since Python 3.10. Read the `daemon` attribute instead.") - def isDaemon(self) -> bool: ... + def isDaemon(self) -> bool: + """Return whether this thread is a daemon. + +This method is deprecated, use the daemon attribute instead. + +""" @deprecated("Deprecated since Python 3.10. Set the `daemon` attribute instead.") - def setDaemon(self, daemonic: bool) -> None: ... + def setDaemon(self, daemonic: bool) -> None: + """Set whether this thread is a daemon. + +This method is deprecated, use the .daemon property instead. + +""" @deprecated("Deprecated since Python 3.10. Read the `name` attribute instead.") - def getName(self) -> str: ... + def getName(self) -> str: + """Return a string used for identification purposes only. + +This method is deprecated, use the name attribute instead. + +""" @deprecated("Deprecated since Python 3.10. Set the `name` attribute instead.") - def setName(self, name: str) -> None: ... + def setName(self, name: str) -> None: + """Set the name string for this thread. + +This method is deprecated, use the name attribute instead. + +""" class _DummyThread(Thread): def __init__(self) -> None: ... @@ -125,18 +349,78 @@ Lock = _thread.LockType # Python implementation of RLock. @final class _RLock: + """This class implements reentrant lock objects. + +A reentrant lock must be released by the thread that acquired it. Once a +thread has acquired a reentrant lock, the same thread may acquire it +again without blocking; the thread must release it once for each time it +has acquired it. + +""" _count: int - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... - def release(self) -> None: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: + """Acquire a lock, blocking or non-blocking. + +When invoked without arguments: if this thread already owns the lock, +increment the recursion level by one, and return immediately. Otherwise, +if another thread owns the lock, block until the lock is unlocked. Once +the lock is unlocked (not owned by any thread), then grab ownership, set +the recursion level to one, and return. If more than one thread is +blocked waiting until the lock is unlocked, only one at a time will be +able to grab ownership of the lock. There is no return value in this +case. + +When invoked with the blocking argument set to true, do the same thing +as when called without arguments, and return true. + +When invoked with the blocking argument set to false, do not block. If a +call without an argument would block, return false immediately; +otherwise, do the same thing as when called without arguments, and +return true. + +When invoked with the floating-point timeout argument set to a positive +value, block for at most the number of seconds specified by timeout +and as long as the lock cannot be acquired. Return true if the lock has +been acquired, false if the timeout has elapsed. + +""" + def release(self) -> None: + """Release a lock, decrementing the recursion level. + +If after the decrement it is zero, reset the lock to unlocked (not owned +by any thread), and if any other threads are blocked waiting for the +lock to become unlocked, allow exactly one of them to proceed. If after +the decrement the recursion level is still nonzero, the lock remains +locked and owned by the calling thread. + +Only call this method when the calling thread owns the lock. A +RuntimeError is raised if this method is called when the lock is +unlocked. + +There is no return value. + +""" __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... if sys.version_info >= (3, 14): - def locked(self) -> bool: ... + def locked(self) -> bool: + """Return whether this object is locked. +""" RLock = _thread.RLock # Actually a function at runtime. class Condition: + """Class that implements a condition variable. + +A condition variable allows one or more threads to wait until they are +notified by another thread. + +If the lock argument is given and not None, it must be a Lock or RLock +object, and it is used as the underlying lock. Otherwise, a new RLock object +is created and used as the underlying lock. + +""" def __init__(self, lock: Lock | _RLock | RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( @@ -144,35 +428,206 @@ class Condition: ) -> None: ... def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = None) -> bool: ... - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... - def notify(self, n: int = 1) -> None: ... - def notify_all(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: + """Wait until notified or until a timeout occurs. + +If the calling thread has not acquired the lock when this method is +called, a RuntimeError is raised. + +This method releases the underlying lock, and then blocks until it is +awakened by a notify() or notify_all() call for the same condition +variable in another thread, or until the optional timeout occurs. Once +awakened or timed out, it re-acquires the lock and returns. + +When the timeout argument is present and not None, it should be a +floating-point number specifying a timeout for the operation in seconds +(or fractions thereof). + +When the underlying lock is an RLock, it is not released using its +release() method, since this may not actually unlock the lock when it +was acquired multiple times recursively. Instead, an internal interface +of the RLock class is used, which really unlocks it even when it has +been recursively acquired several times. Another internal interface is +then used to restore the recursion level when the lock is reacquired. + +""" + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: + """Wait until a condition evaluates to True. + +predicate should be a callable which result will be interpreted as a +boolean value. A timeout may be provided giving the maximum time to +wait. + +""" + def notify(self, n: int = 1) -> None: + """Wake up one or more threads waiting on this condition, if any. + +If the calling thread has not acquired the lock when this method is +called, a RuntimeError is raised. + +This method wakes up at most n of the threads waiting for the condition +variable; it is a no-op if no threads are waiting. + +""" + def notify_all(self) -> None: + """Wake up all threads waiting on this condition. + +If the calling thread has not acquired the lock when this method +is called, a RuntimeError is raised. + +""" @deprecated("Deprecated since Python 3.10. Use `notify_all()` instead.") - def notifyAll(self) -> None: ... + def notifyAll(self) -> None: + """Wake up all threads waiting on this condition. + +This method is deprecated, use notify_all() instead. + +""" class Semaphore: + """This class implements semaphore objects. + +Semaphores manage a counter representing the number of release() calls minus +the number of acquire() calls, plus an initial value. The acquire() method +blocks if necessary until it can return without making the counter +negative. If not given, value defaults to 1. + +""" _value: int def __init__(self, value: int = 1) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... - def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... - def release(self, n: int = 1) -> None: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: + """Acquire a semaphore, decrementing the internal counter by one. + +When invoked without arguments: if the internal counter is larger than +zero on entry, decrement it by one and return immediately. If it is zero +on entry, block, waiting until some other thread has called release() to +make it larger than zero. This is done with proper interlocking so that +if multiple acquire() calls are blocked, release() will wake exactly one +of them up. The implementation may pick one at random, so the order in +which blocked threads are awakened should not be relied on. There is no +return value in this case. + +When invoked with blocking set to true, do the same thing as when called +without arguments, and return true. + +When invoked with blocking set to false, do not block. If a call without +an argument would block, return false immediately; otherwise, do the +same thing as when called without arguments, and return true. + +When invoked with a timeout other than None, it will block for at +most timeout seconds. If acquire does not complete successfully in +that interval, return false. Return true otherwise. + +""" + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: + """Acquire a semaphore, decrementing the internal counter by one. + +When invoked without arguments: if the internal counter is larger than +zero on entry, decrement it by one and return immediately. If it is zero +on entry, block, waiting until some other thread has called release() to +make it larger than zero. This is done with proper interlocking so that +if multiple acquire() calls are blocked, release() will wake exactly one +of them up. The implementation may pick one at random, so the order in +which blocked threads are awakened should not be relied on. There is no +return value in this case. + +When invoked with blocking set to true, do the same thing as when called +without arguments, and return true. + +When invoked with blocking set to false, do not block. If a call without +an argument would block, return false immediately; otherwise, do the +same thing as when called without arguments, and return true. -class BoundedSemaphore(Semaphore): ... +When invoked with a timeout other than None, it will block for at +most timeout seconds. If acquire does not complete successfully in +that interval, return false. Return true otherwise. + +""" + def release(self, n: int = 1) -> None: + """Release a semaphore, incrementing the internal counter by one or more. + +When the counter is zero on entry and another thread is waiting for it +to become larger than zero again, wake up that thread. + +""" + +class BoundedSemaphore(Semaphore): + """Implements a bounded semaphore. + +A bounded semaphore checks to make sure its current value doesn't exceed its +initial value. If it does, ValueError is raised. In most situations +semaphores are used to guard resources with limited capacity. + +If the semaphore is released too many times it's a sign of a bug. If not +given, value defaults to 1. + +Like regular semaphores, bounded semaphores manage a counter representing +the number of release() calls minus the number of acquire() calls, plus an +initial value. The acquire() method blocks if necessary until it can return +without making the counter negative. If not given, value defaults to 1. + +""" class Event: - def is_set(self) -> bool: ... + """Class implementing event objects. + +Events manage a flag that can be set to true with the set() method and reset +to false with the clear() method. The wait() method blocks until the flag is +true. The flag is initially false. + +""" + def is_set(self) -> bool: + """Return true if and only if the internal flag is true. +""" @deprecated("Deprecated since Python 3.10. Use `is_set()` instead.") - def isSet(self) -> bool: ... - def set(self) -> None: ... - def clear(self) -> None: ... - def wait(self, timeout: float | None = None) -> bool: ... + def isSet(self) -> bool: + """Return true if and only if the internal flag is true. + +This method is deprecated, use is_set() instead. + +""" + def set(self) -> None: + """Set the internal flag to true. + +All threads waiting for it to become true are awakened. Threads +that call wait() once the flag is true will not block at all. + +""" + def clear(self) -> None: + """Reset the internal flag to false. + +Subsequently, threads calling wait() will block until set() is called to +set the internal flag to true again. + +""" + def wait(self, timeout: float | None = None) -> bool: + """Block until the internal flag is true. + +If the internal flag is true on entry, return immediately. Otherwise, +block until another thread calls set() to set the flag to true, or until +the optional timeout occurs. + +When the timeout argument is present and not None, it should be a +floating-point number specifying a timeout for the operation in seconds +(or fractions thereof). + +This method returns the internal flag on exit, so it will always return +True except if a timeout is given and the operation times out. + +""" excepthook = _excepthook ExceptHookArgs = _ExceptHookArgs class Timer(Thread): + """Call a function after a specified number of seconds: + +t = Timer(30.0, f, args=None, kwargs=None) +t.start() +t.cancel() # stop the timer's action if it's still waiting + +""" args: Iterable[Any] # undocumented finished: Event # undocumented function: Callable[..., Any] # undocumented @@ -186,18 +641,61 @@ class Timer(Thread): args: Iterable[Any] | None = None, kwargs: Mapping[str, Any] | None = None, ) -> None: ... - def cancel(self) -> None: ... + def cancel(self) -> None: + """Stop the timer if it hasn't finished yet. +""" class Barrier: + """Implements a Barrier. + +Useful for synchronizing a fixed number of threads at known synchronization +points. Threads block on 'wait()' and are simultaneously awoken once they +have all made that call. + +""" @property - def parties(self) -> int: ... + def parties(self) -> int: + """Return the number of threads required to trip the barrier. +""" @property - def n_waiting(self) -> int: ... + def n_waiting(self) -> int: + """Return the number of threads currently waiting at the barrier. +""" @property - def broken(self) -> bool: ... - def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... - def wait(self, timeout: float | None = None) -> int: ... - def reset(self) -> None: ... - def abort(self) -> None: ... + def broken(self) -> bool: + """Return True if the barrier is in a broken state. +""" + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: + """Create a barrier, initialised to 'parties' threads. + +'action' is a callable which, when supplied, will be called by one of +the threads after they have all entered the barrier and just prior to +releasing them all. If a 'timeout' is provided, it is used as the +default for all subsequent 'wait()' calls. + +""" + def wait(self, timeout: float | None = None) -> int: + """Wait for the barrier. + +When the specified number of threads have started waiting, they are all +simultaneously awoken. If an 'action' was provided for the barrier, one +of the threads will have executed that callback prior to returning. +Returns an individual index number from 0 to 'parties-1'. + +""" + def reset(self) -> None: + """Reset the barrier to the initial state. + +Any threads currently waiting will get the BrokenBarrier exception +raised. + +""" + def abort(self) -> None: + """Place the barrier into a 'broken' state. + +Useful in case of error. Any currently waiting threads and threads +attempting to 'wait()' will have BrokenBarrierError raised. + +""" class BrokenBarrierError(RuntimeError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi index 5665efbba69d0..58f1978ef3ea9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi @@ -1,3 +1,26 @@ +"""This module provides various functions to manipulate time values. + +There are two standard representations of time. One is the number +of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer +or a floating-point number (to represent fractions of seconds). +The epoch is the point where the time starts, the return value of time.gmtime(0). +It is January 1, 1970, 00:00:00 (UTC) on all platforms. + +The other representation is a tuple of 9 integers giving local time. +The tuple items are: + year (including century, e.g. 1998) + month (1-12) + day (1-31) + hours (0-23) + minutes (0-59) + seconds (0-59) + weekday (0-6, Monday is 0) + Julian day (day in the year, 1-366) + DST (Daylight Savings Time) flag (-1, 0 or 1) +If the DST flag is 0, the time is given in the regular time zone; +if it is 1, the time is given in the DST time zone; +if it is -1, mktime() should guess based on the date and time. +""" import sys from _typeshed import structseq from typing import Any, Final, Literal, Protocol, final, type_check_only @@ -40,45 +63,184 @@ if sys.platform == "linux": # https://github.com/python/typeshed/pull/6560#discussion_r767162532 @final class struct_time(structseq[Any | int], _TimeTuple): + """The time value as returned by gmtime(), localtime(), and strptime(), and + accepted by asctime(), mktime() and strftime(). May be considered as a + sequence of 9 integers. + + Note that several fields' values are not the same as those defined by + the C language standard for struct tm. For example, the value of the + field tm_year is the actual year, not year - 1900. See individual + fields' descriptions for details. +""" if sys.version_info >= (3, 10): __match_args__: Final = ("tm_year", "tm_mon", "tm_mday", "tm_hour", "tm_min", "tm_sec", "tm_wday", "tm_yday", "tm_isdst") @property - def tm_year(self) -> int: ... + def tm_year(self) -> int: + """year, for example, 1993 +""" @property - def tm_mon(self) -> int: ... + def tm_mon(self) -> int: + """month of year, range [1, 12] +""" @property - def tm_mday(self) -> int: ... + def tm_mday(self) -> int: + """day of month, range [1, 31] +""" @property - def tm_hour(self) -> int: ... + def tm_hour(self) -> int: + """hours, range [0, 23] +""" @property - def tm_min(self) -> int: ... + def tm_min(self) -> int: + """minutes, range [0, 59] +""" @property - def tm_sec(self) -> int: ... + def tm_sec(self) -> int: + """seconds, range [0, 61]) +""" @property - def tm_wday(self) -> int: ... + def tm_wday(self) -> int: + """day of week, range [0, 6], Monday is 0 +""" @property - def tm_yday(self) -> int: ... + def tm_yday(self) -> int: + """day of year, range [1, 366] +""" @property - def tm_isdst(self) -> int: ... + def tm_isdst(self) -> int: + """1 if summer time is in effect, 0 if not, and -1 if unknown +""" # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. @property - def tm_zone(self) -> str: ... + def tm_zone(self) -> str: + """abbreviation of timezone name +""" @property - def tm_gmtoff(self) -> int: ... - -def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... -def ctime(seconds: float | None = None, /) -> str: ... -def gmtime(seconds: float | None = None, /) -> struct_time: ... -def localtime(seconds: float | None = None, /) -> struct_time: ... -def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: ... -def sleep(seconds: float, /) -> None: ... -def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: ... -def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: ... -def time() -> float: ... + def tm_gmtoff(self) -> int: + """offset from UTC in seconds +""" + +def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: + """asctime([tuple]) -> string + +Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'. +When the time tuple is not present, current time as returned by localtime() +is used. +""" +def ctime(seconds: float | None = None, /) -> str: + """ctime(seconds) -> string + +Convert a time in seconds since the Epoch to a string in local time. +This is equivalent to asctime(localtime(seconds)). When the time tuple is +not present, current time as returned by localtime() is used. +""" +def gmtime(seconds: float | None = None, /) -> struct_time: + """gmtime([seconds]) -> (tm_year, tm_mon, tm_mday, tm_hour, tm_min, + tm_sec, tm_wday, tm_yday, tm_isdst) + +Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a. +GMT). When 'seconds' is not passed in, convert the current time instead. + +If the platform supports the tm_gmtoff and tm_zone, they are available as +attributes only. +""" +def localtime(seconds: float | None = None, /) -> struct_time: + """localtime([seconds]) -> (tm_year,tm_mon,tm_mday,tm_hour,tm_min, + tm_sec,tm_wday,tm_yday,tm_isdst) + +Convert seconds since the Epoch to a time tuple expressing local time. +When 'seconds' is not passed in, convert the current time instead. +""" +def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: + """mktime(tuple) -> floating-point number + +Convert a time tuple in local time to seconds since the Epoch. +Note that mktime(gmtime(0)) will not generally return zero for most +time zones; instead the returned value will either be equal to that +of the timezone or altzone attributes on the time module. +""" +def sleep(seconds: float, /) -> None: + """sleep(seconds) + +Delay execution for a given number of seconds. The argument may be +a floating-point number for subsecond precision. +""" +def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: + """strftime(format[, tuple]) -> string + +Convert a time tuple to a string according to a format specification. +See the library reference manual for formatting codes. When the time tuple +is not present, current time as returned by localtime() is used. + +Commonly used format codes: + +%Y Year with century as a decimal number. +%m Month as a decimal number [01,12]. +%d Day of the month as a decimal number [01,31]. +%H Hour (24-hour clock) as a decimal number [00,23]. +%M Minute as a decimal number [00,59]. +%S Second as a decimal number [00,61]. +%z Time zone offset from UTC. +%a Locale's abbreviated weekday name. +%A Locale's full weekday name. +%b Locale's abbreviated month name. +%B Locale's full month name. +%c Locale's appropriate date and time representation. +%I Hour (12-hour clock) as a decimal number [01,12]. +%p Locale's equivalent of either AM or PM. + +Other codes may be available on your platform. See documentation for +the C library strftime function. +""" +def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: + """strptime(string, format) -> struct_time + +Parse a string to a time tuple according to a format specification. +See the library reference manual for formatting codes (same as +strftime()). + +Commonly used format codes: + +%Y Year with century as a decimal number. +%m Month as a decimal number [01,12]. +%d Day of the month as a decimal number [01,31]. +%H Hour (24-hour clock) as a decimal number [00,23]. +%M Minute as a decimal number [00,59]. +%S Second as a decimal number [00,61]. +%z Time zone offset from UTC. +%a Locale's abbreviated weekday name. +%A Locale's full weekday name. +%b Locale's abbreviated month name. +%B Locale's full month name. +%c Locale's appropriate date and time representation. +%I Hour (12-hour clock) as a decimal number [01,12]. +%p Locale's equivalent of either AM or PM. + +Other codes may be available on your platform. See documentation for +the C library strftime function. +""" +def time() -> float: + """time() -> floating-point number + +Return the current time in seconds since the Epoch. +Fractions of a second may be present if the system clock provides them. +""" if sys.platform != "win32": - def tzset() -> None: ... # Unix only + def tzset() -> None: # Unix only + """tzset() + +Initialize, or reinitialize, the local timezone to the value stored in +os.environ['TZ']. The TZ environment variable should be specified in +standard Unix timezone format as documented in the tzset man page +(eg. 'US/Eastern', 'Europe/Amsterdam'). Unknown timezones will silently +fall back to UTC. If the TZ environment variable is not set, the local +timezone is set to the systems best guess of wallclock time. +Changing the TZ environment variable without calling tzset *may* change +the local timezone used by methods such as localtime, but this behaviour +should not be relied on. +""" @type_check_only class _ClockInfo(Protocol): @@ -87,26 +249,88 @@ class _ClockInfo(Protocol): monotonic: bool resolution: float -def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: ... -def monotonic() -> float: ... -def perf_counter() -> float: ... -def process_time() -> float: ... +def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: + """get_clock_info(name: str) -> dict + +Get information of the specified clock. +""" +def monotonic() -> float: + """monotonic() -> float + +Monotonic clock, cannot go backward. +""" +def perf_counter() -> float: + """perf_counter() -> float + +Performance counter for benchmarking. +""" +def process_time() -> float: + """process_time() -> float + +Process time for profiling: sum of the kernel and user-space CPU time. +""" if sys.platform != "win32": - def clock_getres(clk_id: int, /) -> float: ... # Unix only - def clock_gettime(clk_id: int, /) -> float: ... # Unix only - def clock_settime(clk_id: int, time: float, /) -> None: ... # Unix only + def clock_getres(clk_id: int, /) -> float: # Unix only + """clock_getres(clk_id) -> floating-point number + +Return the resolution (precision) of the specified clock clk_id. +""" + def clock_gettime(clk_id: int, /) -> float: # Unix only + """Return the time of the specified clock clk_id as a float. +""" + def clock_settime(clk_id: int, time: float, /) -> None: # Unix only + """clock_settime(clk_id, time) + +Set the time of the specified clock clk_id. +""" if sys.platform != "win32": - def clock_gettime_ns(clk_id: int, /) -> int: ... - def clock_settime_ns(clock_id: int, time: int, /) -> int: ... + def clock_gettime_ns(clk_id: int, /) -> int: + """Return the time of the specified clock clk_id as nanoseconds (int). +""" + def clock_settime_ns(clock_id: int, time: int, /) -> int: + """clock_settime_ns(clk_id, time) + +Set the time of the specified clock clk_id with nanoseconds. +""" if sys.platform == "linux": - def pthread_getcpuclockid(thread_id: int, /) -> int: ... - -def monotonic_ns() -> int: ... -def perf_counter_ns() -> int: ... -def process_time_ns() -> int: ... -def time_ns() -> int: ... -def thread_time() -> float: ... -def thread_time_ns() -> int: ... + def pthread_getcpuclockid(thread_id: int, /) -> int: + """pthread_getcpuclockid(thread_id) -> int + +Return the clk_id of a thread's CPU time clock. +""" + +def monotonic_ns() -> int: + """monotonic_ns() -> int + +Monotonic clock, cannot go backward, as nanoseconds. +""" +def perf_counter_ns() -> int: + """perf_counter_ns() -> int + +Performance counter for benchmarking as nanoseconds. +""" +def process_time_ns() -> int: + """process_time() -> int + +Process time for profiling as nanoseconds: +sum of the kernel and user-space CPU time. +""" +def time_ns() -> int: + """time_ns() -> int + +Return the current time in nanoseconds since the Epoch. +""" +def thread_time() -> float: + """thread_time() -> float + +Thread time for profiling: sum of the kernel and user-space CPU time. +""" +def thread_time_ns() -> int: + """thread_time() -> int + +Thread time for profiling as nanoseconds: +sum of the kernel and user-space CPU time. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi index a5da943c84848..8f2c18dbc24d8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi @@ -1,3 +1,50 @@ +"""Tool for measuring execution time of small code snippets. + +This module avoids a number of common traps for measuring execution +times. See also Tim Peters' introduction to the Algorithms chapter in +the Python Cookbook, published by O'Reilly. + +Library usage: see the Timer class. + +Command line usage: + python timeit.py [-n N] [-r N] [-s S] [-p] [-h] [--] [statement] + +Options: + -n/--number N: how many times to execute 'statement' (default: see below) + -r/--repeat N: how many times to repeat the timer (default 5) + -s/--setup S: statement to be executed once initially (default 'pass'). + Execution time of this setup statement is NOT timed. + -p/--process: use time.process_time() (default is time.perf_counter()) + -v/--verbose: print raw timing results; repeat for more digits precision + -u/--unit: set the output time unit (nsec, usec, msec, or sec) + -h/--help: print this usage message and exit + --: separate options from statement, use when statement starts with - + statement: statement to be timed (default 'pass') + +A multi-line statement may be given by specifying each line as a +separate argument; indented lines are possible by enclosing an +argument in quotes and using leading spaces. Multiple -s options are +treated similarly. + +If -n is not given, a suitable number of loops is calculated by trying +increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the +total time is at least 0.2 seconds. + +Note: there is a certain baseline overhead associated with executing a +pass statement. It differs between versions. The code here doesn't try +to hide it, but you should be aware of it. The baseline overhead can be +measured by invoking the program without arguments. + +Classes: + + Timer + +Functions: + + timeit(string, string) -> float + repeat(string, string) -> list + default_timer() -> float +""" from collections.abc import Callable, Sequence from typing import IO, Any from typing_extensions import TypeAlias @@ -10,17 +57,90 @@ _Stmt: TypeAlias = str | Callable[[], object] default_timer: _Timer class Timer: + """Class for timing execution speed of small code snippets. + +The constructor takes a statement to be timed, an additional +statement used for setup, and a timer function. Both statements +default to 'pass'; the timer function is platform-dependent (see +module doc string). If 'globals' is specified, the code will be +executed within that namespace (as opposed to inside timeit's +namespace). + +To measure the execution time of the first statement, use the +timeit() method. The repeat() method is a convenience to call +timeit() multiple times and return a list of results. + +The statements may contain newlines, as long as they don't contain +multi-line string literals. +""" def __init__( self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None - ) -> None: ... - def print_exc(self, file: IO[str] | None = None) -> None: ... - def timeit(self, number: int = 1000000) -> float: ... - def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... - def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... + ) -> None: + """Constructor. See class doc string. +""" + def print_exc(self, file: IO[str] | None = None) -> None: + """Helper to print a traceback from the timed code. + +Typical use: + + t = Timer(...) # outside the try/except + try: + t.timeit(...) # or t.repeat(...) + except: + t.print_exc() + +The advantage over the standard traceback is that source lines +in the compiled template will be displayed. + +The optional file argument directs where the traceback is +sent; it defaults to sys.stderr. +""" + def timeit(self, number: int = 1000000) -> float: + """Time 'number' executions of the main statement. + +To be precise, this executes the setup statement once, and +then returns the time it takes to execute the main statement +a number of times, as float seconds if using the default timer. The +argument is the number of times through the loop, defaulting +to one million. The main statement, the setup statement and +the timer function to be used are passed to the constructor. +""" + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: + """Call timeit() a few times. + +This is a convenience function that calls the timeit() +repeatedly, returning a list of results. The first argument +specifies how many times to call timeit(), defaulting to 5; +the second argument specifies the timer argument, defaulting +to one million. + +Note: it's tempting to calculate mean and standard deviation +from the result vector and report these. However, this is not +very useful. In a typical case, the lowest value gives a +lower bound for how fast your machine can run the given code +snippet; higher values in the result vector are typically not +caused by variability in Python's speed, but by other +processes interfering with your timing accuracy. So the min() +of the result is probably the only number you should be +interested in. After that, you should look at the entire +vector and apply common sense rather than statistics. +""" + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: + """Return the number of loops and time taken so that total time >= 0.2. + +Calls the timeit method with increasing numbers from the sequence +1, 2, 5, 10, 20, 50, ... until the time taken is at least 0.2 +second. Returns (number, time_taken). + +If *callback* is given and is not None, it will be called after +each trial with two arguments: ``callback(number, time_taken)``. +""" def timeit( stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None -) -> float: ... +) -> float: + """Convenience function to create Timer object and call timeit method. +""" def repeat( stmt: _Stmt = "pass", setup: _Stmt = "pass", @@ -28,5 +148,23 @@ def repeat( repeat: int = 5, number: int = 1000000, globals: dict[str, Any] | None = None, -) -> list[float]: ... -def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... +) -> list[float]: + """Convenience function to create Timer object and call repeat method. +""" +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: + """Main program, used when run as a script. + +The optional 'args' argument specifies the command line to be parsed, +defaulting to sys.argv[1:]. + +The return value is an exit code to be passed to sys.exit(); it +may be None to indicate success. + +When an exception happens during timing, a traceback is printed to +stderr and the return value is 1. Exceptions at other times +(including the template compilation) are not caught. + +'_wrap_timer' is an internal interface used for unit testing. If it +is not None, it must be a callable that accepts a timer function +and returns another timer function (used for unit testing). +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index ef57faa2b0097..5db917442d3f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1,3 +1,34 @@ +"""Wrapper functions for Tcl/Tk. + +Tkinter provides classes which allow the display, positioning and +control of widgets. Toplevel widgets are Tk and Toplevel. Other +widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton, +Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox +LabelFrame and PanedWindow. + +Properties of the widgets are specified with keyword arguments. +Keyword arguments have the same name as the corresponding resource +under Tk. + +Widgets are positioned with one of the geometry managers Place, Pack +or Grid. These managers can be called with methods place, pack, grid +available in every Widget. + +Actions are bound to events by resources (e.g. keyword argument +command) or with the method bind. + +Example (Hello, World): +import tkinter +from tkinter.constants import * +tk = tkinter.Tk() +frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2) +frame.pack(fill=BOTH,expand=1) +label = tkinter.Label(frame, text="Hello, World") +label.pack(fill=X, expand=1) +button = tkinter.Button(frame,text="Exit",command=tk.destroy) +button.pack(side=BOTTOM) +tk.mainloop() +""" import _tkinter import sys from _typeshed import Incomplete, MaybeNone, StrOrBytesPath @@ -193,6 +224,8 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 11): class EventType(StrEnum): + """An enumeration. +""" Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -235,6 +268,8 @@ if sys.version_info >= (3, 11): else: class EventType(str, Enum): + """An enumeration. +""" Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -280,6 +315,47 @@ _W = TypeVar("_W", bound=Misc) _W_co = TypeVar("_W_co", covariant=True, bound=Misc, default=Misc) class Event(Generic[_W_co]): + """Container for the properties of an event. + +Instances of this type are generated if one of the following events occurs: + +KeyPress, KeyRelease - for keyboard events +ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events +Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate, +Colormap, Gravity, Reparent, Property, Destroy, Activate, +Deactivate - for window events. + +If a callback function for one of these events is registered +using bind, bind_all, bind_class, or tag_bind, the callback is +called with an Event as first argument. It will have the +following attributes (in braces are the event types for which +the attribute is valid): + + serial - serial number of event +num - mouse button pressed (ButtonPress, ButtonRelease) +focus - whether the window has the focus (Enter, Leave) +height - height of the exposed window (Configure, Expose) +width - width of the exposed window (Configure, Expose) +keycode - keycode of the pressed key (KeyPress, KeyRelease) +state - state of the event as a number (ButtonPress, ButtonRelease, + Enter, KeyPress, KeyRelease, + Leave, Motion) +state - state as a string (Visibility) +time - when the event occurred +x - x-position of the mouse +y - y-position of the mouse +x_root - x-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) +y_root - y-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) +char - pressed character (KeyPress, KeyRelease) +send_event - see X/Windows documentation +keysym - keysym of the event as a string (KeyPress, KeyRelease) +keysym_num - keysym of the event as a number (KeyPress, KeyRelease) +type - type of the event as a number +widget - widget in which the event occurred +delta - delta of wheel movement (MouseWheel) +""" serial: int num: int focus: bool @@ -300,67 +376,247 @@ class Event(Generic[_W_co]): widget: _W_co delta: int if sys.version_info >= (3, 14): - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type -def NoDefaultRoot() -> None: ... +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" + +def NoDefaultRoot() -> None: + """Inhibit setting of default root window. + +Call this function to inhibit that the first instance of +Tk is used for windows without an explicit parent window. +""" class Variable: - def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: ... - def set(self, value) -> None: ... + """Class to define value holders for e.g. buttons. + +Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations +that constrain the type of the value returned from get(). +""" + def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: + """Construct a variable + +MASTER can be given as master widget. +VALUE is an optional value (defaults to "") +NAME is an optional Tcl name (defaults to PY_VARnum). + +If NAME matches an existing variable and VALUE is omitted +then the existing value is retained. +""" + def set(self, value) -> None: + """Set the variable to VALUE. +""" initialize = set - def get(self): ... - def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: ... - def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: ... - def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: ... + def get(self): + """Return value of variable. +""" + def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: + """Define a trace callback for the variable. + +Mode is one of "read", "write", "unset", or a list or tuple of +such strings. +Callback must be a function which is called when the variable is +read, written or unset. + +Return the name of the callback. +""" + def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: + """Delete the trace callback for a variable. + +Mode is one of "read", "write", "unset" or a list or tuple of +such strings. Must be same as were specified in trace_add(). +cbname is the name of the callback returned from trace_add(). +""" + def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: + """Return all trace callback information. +""" if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") - def trace(self, mode, callback) -> str: ... + def trace(self, mode, callback) -> str: + """Define a trace callback for the variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CALLBACK must be a function which is called when +the variable is read, written or undefined. + +Return the name of the callback. + +This deprecated method wraps a deprecated Tcl method removed +in Tcl 9.0. Use trace_add() instead. +""" @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") - def trace_variable(self, mode, callback) -> str: ... + def trace_variable(self, mode, callback) -> str: + """Define a trace callback for the variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CALLBACK must be a function which is called when +the variable is read, written or undefined. + +Return the name of the callback. + +This deprecated method wraps a deprecated Tcl method removed +in Tcl 9.0. Use trace_add() instead. +""" @deprecated("Deprecated since Python 3.14. Use `trace_remove()` instead.") - def trace_vdelete(self, mode, cbname) -> None: ... + def trace_vdelete(self, mode, cbname) -> None: + """Delete the trace callback for a variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CBNAME is the name of the callback returned from trace_variable or trace. + +This deprecated method wraps a deprecated Tcl method removed +in Tcl 9.0. Use trace_remove() instead. +""" @deprecated("Deprecated since Python 3.14. Use `trace_info()` instead.") - def trace_vinfo(self) -> list[Incomplete]: ... + def trace_vinfo(self) -> list[Incomplete]: + """Return all trace callback information. + +This deprecated method wraps a deprecated Tcl method removed +in Tcl 9.0. Use trace_info() instead. +""" else: - def trace(self, mode, callback) -> str: ... - def trace_variable(self, mode, callback) -> str: ... - def trace_vdelete(self, mode, cbname) -> None: ... - def trace_vinfo(self) -> list[Incomplete]: ... + def trace(self, mode, callback) -> str: + """Define a trace callback for the variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CALLBACK must be a function which is called when +the variable is read, written or undefined. + +Return the name of the callback. + +This deprecated method wraps a deprecated Tcl method that will +likely be removed in the future. Use trace_add() instead. +""" + def trace_variable(self, mode, callback) -> str: + """Define a trace callback for the variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CALLBACK must be a function which is called when +the variable is read, written or undefined. + +Return the name of the callback. + +This deprecated method wraps a deprecated Tcl method that will +likely be removed in the future. Use trace_add() instead. +""" + def trace_vdelete(self, mode, cbname) -> None: + """Delete the trace callback for a variable. + +MODE is one of "r", "w", "u" for read, write, undefine. +CBNAME is the name of the callback returned from trace_variable or trace. + +This deprecated method wraps a deprecated Tcl method that will +likely be removed in the future. Use trace_remove() instead. +""" + def trace_vinfo(self) -> list[Incomplete]: + """Return all trace callback information. + +This deprecated method wraps a deprecated Tcl method that will +likely be removed in the future. Use trace_info() instead. +""" def __eq__(self, other: object) -> bool: ... - def __del__(self) -> None: ... + def __del__(self) -> None: + """Unset the variable in Tcl. +""" __hash__: ClassVar[None] # type: ignore[assignment] class StringVar(Variable): - def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... - def set(self, value: str) -> None: ... + """Value holder for strings variables. +""" + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: + """Construct a string variable. + +MASTER can be given as master widget. +VALUE is an optional value (defaults to "") +NAME is an optional Tcl name (defaults to PY_VARnum). + +If NAME matches an existing variable and VALUE is omitted +then the existing value is retained. +""" + def set(self, value: str) -> None: + """Set the variable to VALUE. +""" initialize = set - def get(self) -> str: ... + def get(self) -> str: + """Return value of variable as string. +""" class IntVar(Variable): - def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... - def set(self, value: int) -> None: ... + """Value holder for integer variables. +""" + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: + """Construct an integer variable. + +MASTER can be given as master widget. +VALUE is an optional value (defaults to 0) +NAME is an optional Tcl name (defaults to PY_VARnum). + +If NAME matches an existing variable and VALUE is omitted +then the existing value is retained. +""" + def set(self, value: int) -> None: + """Set the variable to VALUE. +""" initialize = set - def get(self) -> int: ... + def get(self) -> int: + """Return the value of the variable as an integer. +""" class DoubleVar(Variable): - def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... - def set(self, value: float) -> None: ... + """Value holder for float variables. +""" + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: + """Construct a float variable. + +MASTER can be given as master widget. +VALUE is an optional value (defaults to 0.0) +NAME is an optional Tcl name (defaults to PY_VARnum). + +If NAME matches an existing variable and VALUE is omitted +then the existing value is retained. +""" + def set(self, value: float) -> None: + """Set the variable to VALUE. +""" initialize = set - def get(self) -> float: ... + def get(self) -> float: + """Return the value of the variable as a float. +""" class BooleanVar(Variable): - def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... - def set(self, value: bool) -> None: ... + """Value holder for boolean variables. +""" + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: + """Construct a boolean variable. + +MASTER can be given as master widget. +VALUE is an optional value (defaults to False) +NAME is an optional Tcl name (defaults to PY_VARnum). + +If NAME matches an existing variable and VALUE is omitted +then the existing value is retained. +""" + def set(self, value: bool) -> None: + """Set the variable to VALUE. +""" initialize = set - def get(self) -> bool: ... + def get(self) -> bool: + """Return the value of the variable as a bool. +""" -def mainloop(n: int = 0) -> None: ... +def mainloop(n: int = 0) -> None: + """Run the main loop of Tcl. +""" getint = int getdouble = float -def getboolean(s) -> bool: ... +def getboolean(s) -> bool: + """Convert Tcl object to True or False. +""" _Ts = TypeVarTuple("_Ts") @@ -376,140 +632,551 @@ class _BusyInfo(TypedDict): cursor: _Cursor class Misc: + """Internal class. + +Base class which defines methods common for interior widgets. +""" master: Misc | None tk: _tkinter.TkappType children: dict[str, Widget] - def destroy(self) -> None: ... - def deletecommand(self, name: str) -> None: ... - def tk_strictMotif(self, boolean=None): ... - def tk_bisque(self) -> None: ... - def tk_setPalette(self, *args, **kw) -> None: ... - def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... + def destroy(self) -> None: + """Internal function. + +Delete all Tcl commands created for +this widget in the Tcl interpreter. +""" + def deletecommand(self, name: str) -> None: + """Internal function. + +Delete the Tcl command provided in NAME. +""" + def tk_strictMotif(self, boolean=None): + """Set Tcl internal variable, whether the look and feel +should adhere to Motif. + +A parameter of 1 means adhere to Motif (e.g. no color +change if mouse passes over slider). +Returns the set value. +""" + def tk_bisque(self) -> None: + """Change the color scheme to light brown as used in Tk 3.6 and before. +""" + def tk_setPalette(self, *args, **kw) -> None: + """Set a new color scheme for all widget elements. + +A single color as argument will cause that all colors of Tk +widget elements are derived from this. +Alternatively several keyword parameters and its associated +colors can be given. The following keywords are valid: +activeBackground, foreground, selectColor, +activeForeground, highlightBackground, selectBackground, +background, highlightColor, selectForeground, +disabledForeground, insertBackground, troughColor. +""" + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: + """Wait until the variable is modified. + +A parameter of type IntVar, StringVar, DoubleVar or +BooleanVar must be given. +""" waitvar = wait_variable - def wait_window(self, window: Misc | None = None) -> None: ... - def wait_visibility(self, window: Misc | None = None) -> None: ... - def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... - def getvar(self, name: str = "PY_VAR"): ... + def wait_window(self, window: Misc | None = None) -> None: + """Wait until a WIDGET is destroyed. + +If no parameter is given self is used. +""" + def wait_visibility(self, window: Misc | None = None) -> None: + """Wait until the visibility of a WIDGET changes +(e.g. it appears). + +If no parameter is given self is used. +""" + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: + """Set Tcl variable NAME to VALUE. +""" + def getvar(self, name: str = "PY_VAR"): + """Return value of Tcl variable NAME. +""" def getint(self, s) -> int: ... def getdouble(self, s) -> float: ... - def getboolean(self, s) -> bool: ... - def focus_set(self) -> None: ... + def getboolean(self, s) -> bool: + """Return a boolean value for Tcl boolean values true and false given as parameter. +""" + def focus_set(self) -> None: + """Direct input focus to this widget. + +If the application currently does not have the focus +this widget will get the focus if the application gets +the focus through the window manager. +""" focus = focus_set - def focus_force(self) -> None: ... - def focus_get(self) -> Misc | None: ... - def focus_displayof(self) -> Misc | None: ... - def focus_lastfor(self) -> Misc | None: ... - def tk_focusFollowsMouse(self) -> None: ... - def tk_focusNext(self) -> Misc | None: ... - def tk_focusPrev(self) -> Misc | None: ... + def focus_force(self) -> None: + """Direct input focus to this widget even if the +application does not have the focus. Use with +caution! +""" + def focus_get(self) -> Misc | None: + """Return the widget which has currently the focus in the +application. + +Use focus_displayof to allow working with several +displays. Return None if application does not have +the focus. +""" + def focus_displayof(self) -> Misc | None: + """Return the widget which has currently the focus on the +display where this widget is located. + +Return None if the application does not have the focus. +""" + def focus_lastfor(self) -> Misc | None: + """Return the widget which would have the focus if top level +for this widget gets the focus from the window manager. +""" + def tk_focusFollowsMouse(self) -> None: + """The widget under mouse will get automatically focus. Can not +be disabled easily. +""" + def tk_focusNext(self) -> Misc | None: + """Return the next widget in the focus order which follows +widget which has currently the focus. + +The focus order first goes to the next child, then to +the children of the child recursively and then to the +next sibling which is higher in the stacking order. A +widget is omitted if it has the takefocus resource set +to 0. +""" + def tk_focusPrev(self) -> Misc | None: + """Return previous widget in the focus order. See tk_focusNext for details. +""" # .after() can be called without the "func" argument, but it is basically never what you want. # It behaves like time.sleep() and freezes the GUI app. - def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... + def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: + """Call function once after given time. + +MS specifies the time in milliseconds. FUNC gives the +function which shall be called. Additional parameters +are given as parameters to the function call. Return +identifier to cancel scheduling with after_cancel. +""" # after_idle is essentially partialmethod(after, "idle") - def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: ... - def after_cancel(self, id: str) -> None: ... + def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: + """Call FUNC once if the Tcl main loop has no event to +process. + +Return an identifier to cancel the scheduling with +after_cancel. +""" + def after_cancel(self, id: str) -> None: + """Cancel scheduling of function identified with ID. + +Identifier returned by after or after_idle must be +given as first parameter. +""" if sys.version_info >= (3, 13): - def after_info(self, id: str | None = None) -> tuple[str, ...]: ... + def after_info(self, id: str | None = None) -> tuple[str, ...]: + """Return information about existing event handlers. + +With no argument, return a tuple of the identifiers for all existing +event handlers created by the after and after_idle commands for this +interpreter. If id is supplied, it specifies an existing handler; id +must have been the return value from some previous call to after or +after_idle and it must not have triggered yet or been canceled. If the +id doesn't exist, a TclError is raised. Otherwise, the return value is +a tuple containing (script, type) where script is a reference to the +function to be called by the event handler and type is either 'idle' +or 'timer' to indicate what kind of event handler it is. +""" - def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: + """Ring a display's bell. +""" if sys.version_info >= (3, 13): # Supports options from `_BusyInfo`` - def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: ... + def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: + """Return the value of busy configuration option. + +The widget must have been previously made busy by +tk_busy_hold(). Option may have any of the values accepted by +tk_busy_hold(). +""" busy_cget = tk_busy_cget - def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: ... + def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: + """Query or modify the busy configuration options. + +The widget must have been previously made busy by +tk_busy_hold(). Options may have any of the values accepted by +tk_busy_hold(). + +Please note that the option database is referenced by the widget +name or class. For example, if a Frame widget with name "frame" +is to be made busy, the busy cursor can be specified for it by +either call: + + w.option_add('*frame.busyCursor', 'gumby') + w.option_add('*Frame.BusyCursor', 'gumby') +""" tk_busy_config = tk_busy_configure busy_configure = tk_busy_configure busy_config = tk_busy_configure - def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: ... + def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: + """Return a list of widgets that are currently busy. + +If a pattern is given, only busy widgets whose path names match +a pattern are returned. +""" busy_current = tk_busy_current - def tk_busy_forget(self) -> None: ... + def tk_busy_forget(self) -> None: + """Make this widget no longer busy. + +User events will again be received by the widget. +""" busy_forget = tk_busy_forget - def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: ... + def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: + """Make this widget appear busy. + +The specified widget and its descendants will be blocked from +user interactions. Normally update() should be called +immediately afterward to insure that the hold operation is in +effect before the application starts its processing. + +The only supported configuration option is: + + cursor: the cursor to be displayed when the widget is made + busy. +""" tk_busy = tk_busy_hold busy_hold = tk_busy_hold busy = tk_busy_hold - def tk_busy_status(self) -> bool: ... + def tk_busy_status(self) -> bool: + """Return True if the widget is busy, False otherwise. +""" busy_status = tk_busy_status - def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... - def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... - def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... - def grab_current(self): ... - def grab_release(self) -> None: ... - def grab_set(self) -> None: ... - def grab_set_global(self) -> None: ... - def grab_status(self) -> Literal["local", "global"] | None: ... + def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: + """Retrieve data from the clipboard on window's display. + +The window keyword defaults to the root window of the Tkinter +application. + +The type keyword specifies the form in which the data is +to be returned and should be an atom name such as STRING +or FILE_NAME. Type defaults to STRING, except on X11, where the default +is to try UTF8_STRING and fall back to STRING. + +This command is equivalent to: + +selection_get(CLIPBOARD) +""" + def clipboard_clear(self, *, displayof: Misc = ...) -> None: + """Clear the data in the Tk clipboard. + +A widget specified for the optional displayof keyword +argument specifies the target display. +""" + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: + """Append STRING to the Tk clipboard. + +A widget specified at the optional displayof keyword +argument specifies the target display. The clipboard +can be retrieved with selection_get. +""" + def grab_current(self): + """Return widget which has currently the grab in this application +or None. +""" + def grab_release(self) -> None: + """Release grab for this widget if currently set. +""" + def grab_set(self) -> None: + """Set grab for this widget. + +A grab directs all events to this and descendant +widgets in the application. +""" + def grab_set_global(self) -> None: + """Set global grab for this widget. + +A global grab directs all events to this and +descendant widgets on the display. Use with caution - +other applications do not get events anymore. +""" + def grab_status(self) -> Literal["local", "global"] | None: + """Return None, "local" or "global" if this widget has +no, a local or a global grab. +""" def option_add( self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None - ) -> None: ... - def option_clear(self) -> None: ... - def option_get(self, name, className): ... - def option_readfile(self, fileName, priority=None) -> None: ... - def selection_clear(self, **kw) -> None: ... - def selection_get(self, **kw): ... - def selection_handle(self, command, **kw) -> None: ... - def selection_own(self, **kw) -> None: ... - def selection_own_get(self, **kw): ... - def send(self, interp, cmd, *args): ... - def lower(self, belowThis=None) -> None: ... - def tkraise(self, aboveThis=None) -> None: ... + ) -> None: + """Set a VALUE (second parameter) for an option +PATTERN (first parameter). + +An optional third parameter gives the numeric priority +(defaults to 80). +""" + def option_clear(self) -> None: + """Clear the option database. + +It will be reloaded if option_add is called. +""" + def option_get(self, name, className): + """Return the value for an option NAME for this widget +with CLASSNAME. + +Values with higher priority override lower values. +""" + def option_readfile(self, fileName, priority=None) -> None: + """Read file FILENAME into the option database. + +An optional second parameter gives the numeric +priority. +""" + def selection_clear(self, **kw) -> None: + """Clear the current X selection. +""" + def selection_get(self, **kw): + """Return the contents of the current X selection. + +A keyword parameter selection specifies the name of +the selection and defaults to PRIMARY. A keyword +parameter displayof specifies a widget on the display +to use. A keyword parameter type specifies the form of data to be +fetched, defaulting to STRING except on X11, where UTF8_STRING is tried +before STRING. +""" + def selection_handle(self, command, **kw) -> None: + """Specify a function COMMAND to call if the X +selection owned by this widget is queried by another +application. + +This function must return the contents of the +selection. The function will be called with the +arguments OFFSET and LENGTH which allows the chunking +of very long selections. The following keyword +parameters can be provided: +selection - name of the selection (default PRIMARY), +type - type of the selection (e.g. STRING, FILE_NAME). +""" + def selection_own(self, **kw) -> None: + """Become owner of X selection. + +A keyword parameter selection specifies the name of +the selection (default PRIMARY). +""" + def selection_own_get(self, **kw): + """Return owner of X selection. + +The following keyword parameter can +be provided: +selection - name of the selection (default PRIMARY), +type - type of the selection (e.g. STRING, FILE_NAME). +""" + def send(self, interp, cmd, *args): + """Send Tcl command CMD to different interpreter INTERP to be executed. +""" + def lower(self, belowThis=None) -> None: + """Lower this widget in the stacking order. +""" + def tkraise(self, aboveThis=None) -> None: + """Raise this widget in the stacking order. +""" lift = tkraise if sys.version_info >= (3, 11): - def info_patchlevel(self) -> _VersionInfoType: ... - - def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... - def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... - def winfo_cells(self) -> int: ... - def winfo_children(self) -> list[Widget | Toplevel]: ... - def winfo_class(self) -> str: ... - def winfo_colormapfull(self) -> bool: ... - def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... - def winfo_depth(self) -> int: ... - def winfo_exists(self) -> bool: ... - def winfo_fpixels(self, number: float | str) -> float: ... - def winfo_geometry(self) -> str: ... - def winfo_height(self) -> int: ... - def winfo_id(self) -> int: ... - def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... - def winfo_ismapped(self) -> bool: ... - def winfo_manager(self) -> str: ... - def winfo_name(self) -> str: ... - def winfo_parent(self) -> str: ... # return value needs nametowidget() - def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... - def winfo_pixels(self, number: float | str) -> int: ... - def winfo_pointerx(self) -> int: ... - def winfo_pointerxy(self) -> tuple[int, int]: ... - def winfo_pointery(self) -> int: ... - def winfo_reqheight(self) -> int: ... - def winfo_reqwidth(self) -> int: ... - def winfo_rgb(self, color: str) -> tuple[int, int, int]: ... - def winfo_rootx(self) -> int: ... - def winfo_rooty(self) -> int: ... - def winfo_screen(self) -> str: ... - def winfo_screencells(self) -> int: ... - def winfo_screendepth(self) -> int: ... - def winfo_screenheight(self) -> int: ... - def winfo_screenmmheight(self) -> int: ... - def winfo_screenmmwidth(self) -> int: ... - def winfo_screenvisual(self) -> str: ... - def winfo_screenwidth(self) -> int: ... - def winfo_server(self) -> str: ... - def winfo_toplevel(self) -> Tk | Toplevel: ... - def winfo_viewable(self) -> bool: ... - def winfo_visual(self) -> str: ... - def winfo_visualid(self) -> str: ... - def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... - def winfo_vrootheight(self) -> int: ... - def winfo_vrootwidth(self) -> int: ... - def winfo_vrootx(self) -> int: ... - def winfo_vrooty(self) -> int: ... - def winfo_width(self) -> int: ... - def winfo_x(self) -> int: ... - def winfo_y(self) -> int: ... - def update(self) -> None: ... - def update_idletasks(self) -> None: ... - @overload - def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... + def info_patchlevel(self) -> _VersionInfoType: + """Returns the exact version of the Tcl library. +""" + + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: + """Return integer which represents atom NAME. +""" + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: + """Return name of atom with identifier ID. +""" + def winfo_cells(self) -> int: + """Return number of cells in the colormap for this widget. +""" + def winfo_children(self) -> list[Widget | Toplevel]: + """Return a list of all widgets which are children of this widget. +""" + def winfo_class(self) -> str: + """Return window class name of this widget. +""" + def winfo_colormapfull(self) -> bool: + """Return True if at the last color request the colormap was full. +""" + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: + """Return the widget which is at the root coordinates ROOTX, ROOTY. +""" + def winfo_depth(self) -> int: + """Return the number of bits per pixel. +""" + def winfo_exists(self) -> bool: + """Return true if this widget exists. +""" + def winfo_fpixels(self, number: float | str) -> float: + """Return the number of pixels for the given distance NUMBER +(e.g. "3c") as float. +""" + def winfo_geometry(self) -> str: + """Return geometry string for this widget in the form "widthxheight+X+Y". +""" + def winfo_height(self) -> int: + """Return height of this widget. +""" + def winfo_id(self) -> int: + """Return identifier ID for this widget. +""" + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: + """Return the name of all Tcl interpreters for this display. +""" + def winfo_ismapped(self) -> bool: + """Return true if this widget is mapped. +""" + def winfo_manager(self) -> str: + """Return the window manager name for this widget. +""" + def winfo_name(self) -> str: + """Return the name of this widget. +""" + def winfo_parent(self) -> str: # return value needs nametowidget() + """Return the name of the parent of this widget. +""" + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): + """Return the pathname of the widget given by ID. +""" + def winfo_pixels(self, number: float | str) -> int: + """Rounded integer value of winfo_fpixels. +""" + def winfo_pointerx(self) -> int: + """Return the x coordinate of the pointer on the root window. +""" + def winfo_pointerxy(self) -> tuple[int, int]: + """Return a tuple of x and y coordinates of the pointer on the root window. +""" + def winfo_pointery(self) -> int: + """Return the y coordinate of the pointer on the root window. +""" + def winfo_reqheight(self) -> int: + """Return requested height of this widget. +""" + def winfo_reqwidth(self) -> int: + """Return requested width of this widget. +""" + def winfo_rgb(self, color: str) -> tuple[int, int, int]: + """Return a tuple of integer RGB values in range(65536) for color in this widget. +""" + def winfo_rootx(self) -> int: + """Return x coordinate of upper left corner of this widget on the +root window. +""" + def winfo_rooty(self) -> int: + """Return y coordinate of upper left corner of this widget on the +root window. +""" + def winfo_screen(self) -> str: + """Return the screen name of this widget. +""" + def winfo_screencells(self) -> int: + """Return the number of the cells in the colormap of the screen +of this widget. +""" + def winfo_screendepth(self) -> int: + """Return the number of bits per pixel of the root window of the +screen of this widget. +""" + def winfo_screenheight(self) -> int: + """Return the number of pixels of the height of the screen of this widget +in pixel. +""" + def winfo_screenmmheight(self) -> int: + """Return the number of pixels of the height of the screen of +this widget in mm. +""" + def winfo_screenmmwidth(self) -> int: + """Return the number of pixels of the width of the screen of +this widget in mm. +""" + def winfo_screenvisual(self) -> str: + """Return one of the strings directcolor, grayscale, pseudocolor, +staticcolor, staticgray, or truecolor for the default +colormodel of this screen. +""" + def winfo_screenwidth(self) -> int: + """Return the number of pixels of the width of the screen of +this widget in pixel. +""" + def winfo_server(self) -> str: + """Return information of the X-Server of the screen of this widget in +the form "XmajorRminor vendor vendorVersion". +""" + def winfo_toplevel(self) -> Tk | Toplevel: + """Return the toplevel widget of this widget. +""" + def winfo_viewable(self) -> bool: + """Return true if the widget and all its higher ancestors are mapped. +""" + def winfo_visual(self) -> str: + """Return one of the strings directcolor, grayscale, pseudocolor, +staticcolor, staticgray, or truecolor for the +colormodel of this widget. +""" + def winfo_visualid(self) -> str: + """Return the X identifier for the visual for this widget. +""" + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: + """Return a list of all visuals available for the screen +of this widget. + +Each item in the list consists of a visual name (see winfo_visual), a +depth and if includeids is true is given also the X identifier. +""" + def winfo_vrootheight(self) -> int: + """Return the height of the virtual root window associated with this +widget in pixels. If there is no virtual root window return the +height of the screen. +""" + def winfo_vrootwidth(self) -> int: + """Return the width of the virtual root window associated with this +widget in pixel. If there is no virtual root window return the +width of the screen. +""" + def winfo_vrootx(self) -> int: + """Return the x offset of the virtual root relative to the root +window of the screen of this widget. +""" + def winfo_vrooty(self) -> int: + """Return the y offset of the virtual root relative to the root +window of the screen of this widget. +""" + def winfo_width(self) -> int: + """Return the width of this widget. +""" + def winfo_x(self) -> int: + """Return the x coordinate of the upper left corner of this widget +in the parent. +""" + def winfo_y(self) -> int: + """Return the y coordinate of the upper left corner of this widget +in the parent. +""" + def update(self) -> None: + """Enter event loop until all pending events have been processed by Tcl. +""" + def update_idletasks(self) -> None: + """Enter event loop until all idle callbacks have been called. This +will update the display of windows but not process events caused by +the user. +""" + @overload + def bindtags(self, tagList: None = None) -> tuple[str, ...]: + """Set or get the list of bindtags for this widget. + +With no argument return the list of all bindtags associated with +this widget. With a list of strings as argument the bindtags are +set to this list. The bindtags determine in which order events are +processed (see bind). +""" @overload def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other @@ -520,7 +1187,45 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to this widget at event SEQUENCE a call to function FUNC. + +SEQUENCE is a string of concatenated event +patterns. An event pattern is of the form + where MODIFIER is one +of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, +Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, +B3, Alt, Button4, B4, Double, Button5, B5 Triple, +Mod1, M1. TYPE is one of Activate, Enter, Map, +ButtonPress, Button, Expose, Motion, ButtonRelease +FocusIn, MouseWheel, Circulate, FocusOut, Property, +Colormap, Gravity Reparent, Configure, KeyPress, Key, +Unmap, Deactivate, KeyRelease Visibility, Destroy, +Leave and DETAIL is the button number for ButtonPress, +ButtonRelease and DETAIL is the Keysym for KeyPress and +KeyRelease. Examples are + for pressing Control and mouse button 1 or + for pressing A and the Alt key (KeyPress can be omitted). +An event pattern can also be a virtual event of the form +<> where AString can be arbitrary. This +event can be generated by event_generate. +If events are concatenated they must appear shortly +after each other. + +FUNC will be called if the event sequence occurs with an +instance of Event as argument. If the return value of FUNC is +"break" no further bound function is invoked. + +An additional boolean parameter ADD specifies whether FUNC will +be called additionally to the other bound function or whether +it will replace the previous function. + +Bind will return an identifier to allow deletion of the bound function with +unbind without memory leak. + +If FUNC or SEQUENCE is omitted the bound function or list +of bound events are returned. +""" @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -533,7 +1238,12 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to all widgets at an event SEQUENCE a call to function FUNC. +An additional boolean parameter ADD specifies whether FUNC will +be called additionally to the other bound function or whether +it will replace the previous function. See bind for the return value. +""" @overload def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -545,34 +1255,92 @@ class Misc: sequence: str | None = None, func: Callable[[Event[Misc]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to widgets with bindtag CLASSNAME at event +SEQUENCE a call of function FUNC. An additional +boolean parameter ADD specifies whether FUNC will be +called additionally to the other bound function or +whether it will replace the previous function. See bind for +the return value. +""" @overload def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def unbind(self, sequence: str, funcid: str | None = None) -> None: ... - def unbind_all(self, sequence: str) -> None: ... - def unbind_class(self, className: str, sequence: str) -> None: ... - def mainloop(self, n: int = 0) -> None: ... - def quit(self) -> None: ... + def unbind(self, sequence: str, funcid: str | None = None) -> None: + """Unbind for this widget the event SEQUENCE. + +If FUNCID is given, only unbind the function identified with FUNCID +and also delete the corresponding Tcl command. + +Otherwise destroy the current binding for SEQUENCE, leaving SEQUENCE +unbound. +""" + def unbind_all(self, sequence: str) -> None: + """Unbind for all widgets for event SEQUENCE all functions. +""" + def unbind_class(self, className: str, sequence: str) -> None: + """Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE +all functions. +""" + def mainloop(self, n: int = 0) -> None: + """Call the mainloop of Tk. +""" + def quit(self) -> None: + """Quit the Tcl interpreter. All widgets will be destroyed. +""" @property - def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... - def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... + def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: + """Internal function. +""" + def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: + """Return the Tkinter instance of a widget identified by +its Tcl name NAME. +""" def register( self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 - ) -> str: ... - def keys(self) -> list[str]: ... - @overload - def pack_propagate(self, flag: bool) -> bool | None: ... + ) -> str: + """Return a newly created Tcl function. If this +function is called, the Python function FUNC will +be executed. An optional function SUBST can +be given which will be executed before FUNC. +""" + def keys(self) -> list[str]: + """Return a list of all resource names of this widget. +""" + @overload + def pack_propagate(self, flag: bool) -> bool | None: + """Set or get the status for propagation of geometry information. + +A boolean argument specifies whether the geometry information +of the slaves will determine the size of this widget. If no argument +is given the current setting will be returned. +""" @overload def pack_propagate(self) -> None: ... propagate = pack_propagate - def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: ... + def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: + """The anchor value controls how to place the grid within the +master when no row/column has any weight. + +The default anchor is nw. +""" anchor = grid_anchor @overload def grid_bbox( self, column: None = None, row: None = None, col2: None = None, row2: None = None - ) -> tuple[int, int, int, int] | None: ... + ) -> tuple[int, int, int, int] | None: + """Return a tuple of integer coordinates for the bounding +box of this widget controlled by the geometry manager grid. + +If COLUMN, ROW is given the bounding box applies from +the cell with row and column 0 to the specified +cell. If COL2 and ROW2 are given the bounding box +starts at that cell. + +The returned integers specify the offset of the upper left +corner in the master widget and the width and height. +""" @overload def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... @overload @@ -587,7 +1355,13 @@ class Misc: pad: float | str = ..., uniform: str = ..., weight: int = ..., - ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check + ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check + """Configure column INDEX of a grid. + +Valid resources are minsize (minimum size of the column), +weight (how much does additional space propagate to this column) +and pad (how much space to let additionally). +""" def grid_rowconfigure( self, index: int | str | list[int] | tuple[int, ...], @@ -597,23 +1371,56 @@ class Misc: pad: float | str = ..., uniform: str = ..., weight: int = ..., - ) -> _GridIndexInfo | MaybeNone: ... # can be None but annoying to check + ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check + """Configure row INDEX of a grid. + +Valid resources are minsize (minimum size of the row), +weight (how much does additional space propagate to this row) +and pad (how much space to let additionally). +""" columnconfigure = grid_columnconfigure rowconfigure = grid_rowconfigure - def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: ... - @overload - def grid_propagate(self, flag: bool) -> None: ... + def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: + """Return a tuple of column and row which identify the cell +at which the pixel at position X and Y inside the master +widget is located. +""" + @overload + def grid_propagate(self, flag: bool) -> None: + """Set or get the status for propagation of geometry information. + +A boolean argument specifies whether the geometry information +of the slaves will determine the size of this widget. If no argument +is given, the current setting will be returned. +""" @overload def grid_propagate(self) -> bool: ... - def grid_size(self) -> tuple[int, int]: ... + def grid_size(self) -> tuple[int, int]: + """Return a tuple of the number of column and rows in the grid. +""" size = grid_size # Widget because Toplevel or Tk is never a slave - def pack_slaves(self) -> list[Widget]: ... - def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... - def place_slaves(self) -> list[Widget]: ... + def pack_slaves(self) -> list[Widget]: + """Return a list of all slaves of this widget +in its packing order. +""" + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: + """Return a list of all slaves of this widget +in its packing order. +""" + def place_slaves(self) -> list[Widget]: + """Return a list of all slaves of this widget +in its packing order. +""" slaves = pack_slaves - def event_add(self, virtual: str, *sequences: str) -> None: ... - def event_delete(self, virtual: str, *sequences: str) -> None: ... + def event_add(self, virtual: str, *sequences: str) -> None: + """Bind a virtual event VIRTUAL (of the form <>) +to an event SEQUENCE such that the virtual event is triggered +whenever SEQUENCE occurs. +""" + def event_delete(self, virtual: str, *sequences: str) -> None: + """Unbind a virtual event VIRTUAL from SEQUENCE. +""" def event_generate( self, sequence: str, @@ -645,44 +1452,94 @@ class Misc: when: Literal["now", "tail", "head", "mark"] = ..., x: float | str = ..., y: float | str = ..., - ) -> None: ... - def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... - def image_names(self) -> tuple[str, ...]: ... - def image_types(self) -> tuple[str, ...]: ... + ) -> None: + """Generate an event SEQUENCE. Additional +keyword arguments specify parameter of the event +(e.g. x, y, rootx, rooty). +""" + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: + """Return a list of all virtual events or the information +about the SEQUENCE bound to the virtual event VIRTUAL. +""" + def image_names(self) -> tuple[str, ...]: + """Return a list of all existing image names. +""" + def image_types(self) -> tuple[str, ...]: + """Return a list of all available image types (e.g. photo bitmap). +""" # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... - def __getitem__(self, key: str) -> Any: ... - def cget(self, key: str) -> Any: ... - def configure(self, cnf: Any = None) -> Any: ... + def __getitem__(self, key: str) -> Any: + """Return the resource value for a KEY given as string. +""" + def cget(self, key: str) -> Any: + """Return the resource value for a KEY given as string. +""" + def configure(self, cnf: Any = None) -> Any: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" # TODO: config is an alias of configure, but adding that here creates # conflict with the type of config in the subclasses. See #13149 class CallWrapper: + """Internal class. Stores function to call when some user +defined Tcl function is called e.g. after an event occurred. +""" func: Incomplete subst: Incomplete widget: Incomplete - def __init__(self, func, subst, widget) -> None: ... - def __call__(self, *args): ... + def __init__(self, func, subst, widget) -> None: + """Store FUNC, SUBST and WIDGET as members. +""" + def __call__(self, *args): + """Apply first function SUBST to arguments, than FUNC. +""" class XView: + """Mix-in class for querying and changing the horizontal position +of a widget's window. +""" @overload - def xview(self) -> tuple[float, float]: ... + def xview(self) -> tuple[float, float]: + """Query and change the horizontal position of the view. +""" @overload def xview(self, *args) -> None: ... - def xview_moveto(self, fraction: float) -> None: ... + def xview_moveto(self, fraction: float) -> None: + """Adjusts the view in the window so that FRACTION of the +total width of the canvas is off-screen to the left. +""" @overload - def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: + """Shift the x-view according to NUMBER which is measured in "units" +or "pages" (WHAT). +""" @overload def xview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... class YView: + """Mix-in class for querying and changing the vertical position +of a widget's window. +""" @overload - def yview(self) -> tuple[float, float]: ... + def yview(self) -> tuple[float, float]: + """Query and change the vertical position of the view. +""" @overload def yview(self, *args) -> None: ... - def yview_moveto(self, fraction: float) -> None: ... + def yview_moveto(self, fraction: float) -> None: + """Adjusts the view in the window so that FRACTION of the +total height of the canvas is off-screen to the top. +""" @overload - def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: + """Shift the y-view according to NUMBER which is measured in +"units" or "pages" (WHAT). +""" @overload def yview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... @@ -719,8 +1576,14 @@ else: type: str class Wm: + """Provides functions for the communication with the window manager. +""" @overload - def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... + def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: + """Instruct the window manager to set the aspect ratio (width/height) +of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple +of the actual values if no argument is given. +""" @overload def wm_aspect( self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None @@ -728,16 +1591,57 @@ class Wm: aspect = wm_aspect if sys.version_info >= (3, 13): @overload - def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: ... + def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, *, return_python_dict: Literal[True]) -> _WmAttributes: ... else: @overload - def wm_attributes(self) -> tuple[Any, ...]: ... + def wm_attributes(self) -> tuple[Any, ...]: + """This subcommand returns or sets platform specific attributes + + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: + + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. + """ @overload - def wm_attributes(self, option: Literal["-alpha"], /) -> float: ... + def wm_attributes(self, option: Literal["-alpha"], /) -> float: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-fullscreen"], /) -> bool: ... @overload @@ -763,12 +1667,36 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: ... + def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-type"], /) -> str: ... if sys.version_info >= (3, 13): @overload - def wm_attributes(self, option: Literal["alpha"], /) -> float: ... + def wm_attributes(self, option: Literal["alpha"], /) -> float: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["fullscreen"], /) -> bool: ... @overload @@ -794,7 +1722,19 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["zoomed"], /) -> bool: ... + def wm_attributes(self, option: Literal["zoomed"], /) -> bool: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["type"], /) -> str: ... @@ -825,7 +1765,19 @@ class Wm: else: # X11 @overload - def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: ... + def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-type"], value: str, /) -> Literal[""]: ... @@ -862,104 +1814,241 @@ class Wm: @overload def wm_attributes( self, *, alpha: float = ..., topmost: bool = ..., zoomed: bool = ..., fullscreen: bool = ..., type: str = ... - ) -> None: ... + ) -> None: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" attributes = wm_attributes - def wm_client(self, name: str | None = None) -> str: ... + def wm_client(self, name: str | None = None) -> str: + """Store NAME in WM_CLIENT_MACHINE property of this widget. Return +current value. +""" client = wm_client @overload - def wm_colormapwindows(self) -> list[Misc]: ... + def wm_colormapwindows(self) -> list[Misc]: + """Store list of window names (WLIST) into WM_COLORMAPWINDOWS property +of this widget. This list contains windows whose colormaps differ from their +parents. Return current list of widgets if WLIST is empty. +""" @overload def wm_colormapwindows(self, wlist: list[Misc] | tuple[Misc, ...], /) -> None: ... @overload def wm_colormapwindows(self, first_wlist_item: Misc, /, *other_wlist_items: Misc) -> None: ... colormapwindows = wm_colormapwindows - def wm_command(self, value: str | None = None) -> str: ... + def wm_command(self, value: str | None = None) -> str: + """Store VALUE in WM_COMMAND property. It is the command +which shall be used to invoke the application. Return current +command if VALUE is None. +""" command = wm_command # Some of these always return empty string, but return type is set to None to prevent accidentally using it - def wm_deiconify(self) -> None: ... + def wm_deiconify(self) -> None: + """Deiconify this widget. If it was never mapped it will not be mapped. +On Windows it will raise this widget and give it the focus. +""" deiconify = wm_deiconify - def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: + """Set focus model to MODEL. "active" means that this widget will claim +the focus itself, "passive" means that the window manager shall give +the focus. Return current focus model if MODEL is None. +""" focusmodel = wm_focusmodel - def wm_forget(self, window: Wm) -> None: ... + def wm_forget(self, window: Wm) -> None: + """The window will be unmapped from the screen and will no longer +be managed by wm. toplevel windows will be treated like frame +windows once they are no longer managed by wm, however, the menu +option configuration will be remembered and the menus will return +once the widget is managed again. +""" forget = wm_forget - def wm_frame(self) -> str: ... + def wm_frame(self) -> str: + """Return identifier for decorative frame of this widget if present. +""" frame = wm_frame @overload - def wm_geometry(self, newGeometry: None = None) -> str: ... + def wm_geometry(self, newGeometry: None = None) -> str: + """Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return +current value if None is given. +""" @overload def wm_geometry(self, newGeometry: str) -> None: ... geometry = wm_geometry - def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): ... + def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): + """Instruct the window manager that this widget shall only be +resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and +height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the +number of grid units requested in Tk_GeometryRequest. +""" grid = wm_grid - def wm_group(self, pathName=None): ... + def wm_group(self, pathName=None): + """Set the group leader widgets for related widgets to PATHNAME. Return +the group leader of this widget if None is given. +""" group = wm_group - def wm_iconbitmap(self, bitmap=None, default=None): ... + def wm_iconbitmap(self, bitmap=None, default=None): + """Set bitmap for the iconified widget to BITMAP. Return +the bitmap if None is given. + +Under Windows, the DEFAULT parameter can be used to set the icon +for the widget and any descendants that don't have an icon set +explicitly. DEFAULT can be the relative path to a .ico file +(example: root.iconbitmap(default='myicon.ico') ). See Tk +documentation for more information. +""" iconbitmap = wm_iconbitmap - def wm_iconify(self) -> None: ... + def wm_iconify(self) -> None: + """Display widget as icon. +""" iconify = wm_iconify - def wm_iconmask(self, bitmap=None): ... + def wm_iconmask(self, bitmap=None): + """Set mask for the icon bitmap of this widget. Return the +mask if None is given. +""" iconmask = wm_iconmask - def wm_iconname(self, newName=None) -> str: ... + def wm_iconname(self, newName=None) -> str: + """Set the name of the icon for this widget. Return the name if +None is given. +""" iconname = wm_iconname - def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: ... + def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: + """Sets the titlebar icon for this window based on the named photo +images passed through args. If default is True, this is applied to +all future created toplevels as well. + +The data in the images is taken as a snapshot at the time of +invocation. If the images are later changed, this is not reflected +to the titlebar icons. Multiple images are accepted to allow +different images sizes to be provided. The window manager may scale +provided icons to an appropriate size. + +On Windows, the images are packed into a Windows icon structure. +This will override an icon specified to wm_iconbitmap, and vice +versa. + +On X, the images are arranged into the _NET_WM_ICON X property, +which most modern window managers support. An icon specified by +wm_iconbitmap may exist simultaneously. + +On Macintosh, this currently does nothing. +""" iconphoto = wm_iconphoto - def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: + """Set the position of the icon of this widget to X and Y. Return +a tuple of the current values of X and X if None is given. +""" iconposition = wm_iconposition - def wm_iconwindow(self, pathName=None): ... + def wm_iconwindow(self, pathName=None): + """Set widget PATHNAME to be displayed instead of icon. Return the current +value if None is given. +""" iconwindow = wm_iconwindow - def wm_manage(self, widget) -> None: ... + def wm_manage(self, widget) -> None: + """The widget specified will become a stand alone top-level window. +The window will be decorated with the window managers title bar, +etc. +""" manage = wm_manage @overload - def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: + """Set max WIDTH and HEIGHT for this widget. If the window is gridded +the values are given in grid units. Return the current values if None +is given. +""" @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload - def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: + """Set min WIDTH and HEIGHT for this widget. If the window is gridded +the values are given in grid units. Return the current values if None +is given. +""" @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @overload - def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None + def wm_overrideredirect(self, boolean: None = None) -> bool | None: # returns True or None + """Instruct the window manager to ignore this widget +if BOOLEAN is given with 1. Return the current value if None +is given. +""" @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect - def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: + """Instruct the window manager that the position of this widget shall +be defined by the user if WHO is "user", and by its own policy if WHO is +"program". +""" positionfrom = wm_positionfrom @overload - def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... + def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: + """Bind function FUNC to command NAME for this widget. +Return the function bound to NAME if None is given. NAME could be +e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW". +""" @overload def wm_protocol(self, name: str, func: None = None) -> str: ... @overload def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... protocol = wm_protocol @overload - def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: + """Instruct the window manager whether this width can be resized +in WIDTH or HEIGHT. Both values are boolean values. +""" @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable - def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: + """Instruct the window manager that the size of this widget shall +be defined by the user if WHO is "user", and by its own policy if WHO is +"program". +""" sizefrom = wm_sizefrom @overload - def wm_state(self, newstate: None = None) -> str: ... + def wm_state(self, newstate: None = None) -> str: + """Query or set the state of this widget as one of normal, icon, +iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only). +""" @overload def wm_state(self, newstate: str) -> None: ... state = wm_state @overload - def wm_title(self, string: None = None) -> str: ... + def wm_title(self, string: None = None) -> str: + """Set the title of this widget. +""" @overload def wm_title(self, string: str) -> None: ... title = wm_title @overload - def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: + """Instruct the window manager that this widget is transient +with regard to widget MASTER. +""" @overload def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... transient = wm_transient - def wm_withdraw(self) -> None: ... + def wm_withdraw(self) -> None: + """Withdraw this widget from the screen such that it is unmapped +and forgotten by the window manager. Re-draw it with wm_deiconify. +""" withdraw = wm_withdraw class Tk(Misc, Wm): + """Toplevel widget of Tk which represents mostly the main window +of an application. It has an associated Tcl interpreter. +""" master: None def __init__( # Make sure to keep in sync with other functions that use the same @@ -972,7 +2061,13 @@ class Tk(Misc, Wm): useTk: bool = True, sync: bool = False, use: str | None = None, - ) -> None: ... + ) -> None: + """Return a new top level widget on screen SCREENNAME. A new Tcl interpreter will +be created. BASENAME will be used for the identification of the profile file (see +readprofile). +It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME +is the name of the widget class. +""" # Keep this in sync with ttktheme.ThemedTk. See issue #13858 @overload def configure( @@ -995,12 +2090,25 @@ class Tk(Misc, Wm): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def destroy(self) -> None: ... - def readprofile(self, baseName: str, className: str) -> None: ... + def destroy(self) -> None: + """Destroy this and all descendants widgets. This will +end the application of this Tcl interpreter. +""" + def readprofile(self, baseName: str, className: str) -> None: + """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into +the Tcl Interpreter and calls exec on the contents of .BASENAME.py and +.CLASSNAME.py if such a file exists in the home directory. +""" report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object] # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo # Please keep in sync with _tkinter.TkappType. @@ -1055,6 +2163,10 @@ class _PackInfo(_InMiscTotal): pady: int | tuple[int, int] class Pack: + """Geometry manager Pack. + +Base class to use the methods pack_* in every widget. +""" # _PackInfo is not the valid type for cnf because pad stuff accepts any # screen units instead of int only. I didn't bother to create another # TypedDict for cnf because it appears to be a legacy thing that was @@ -1075,9 +2187,29 @@ class Pack: pady: float | str | tuple[float | str, float | str] = ..., in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: ... - def pack_forget(self) -> None: ... - def pack_info(self) -> _PackInfo: ... # errors if widget hasn't been packed + ) -> None: + """Pack a widget in the parent widget. Use as options: +after=widget - pack it after you have packed widget +anchor=NSEW (or subset) - position widget according to + given direction +before=widget - pack it before you will pack widget +expand=bool - expand widget if parent size grows +fill=NONE or X or Y or BOTH - fill widget if widget grows +in=master - use master to contain this widget +in_=master - see 'in' option description +ipadx=amount - add internal padding in x direction +ipady=amount - add internal padding in y direction +padx=amount - add padding in x direction +pady=amount - add padding in y direction +side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget. +""" + def pack_forget(self) -> None: + """Unmap this widget and do not use it for the packing order. +""" + def pack_info(self) -> _PackInfo: # errors if widget hasn't been packed + """Return information about the packing options +for this widget. +""" pack = pack_configure forget = pack_forget propagate = Misc.pack_propagate @@ -1096,6 +2228,10 @@ class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed rely: str # can be float()ed if not empty string class Place: + """Geometry manager Place. + +Base class to use the methods place_* in every widget. +""" def place_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -1113,9 +2249,35 @@ class Place: rely: str | float = ..., in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: ... - def place_forget(self) -> None: ... - def place_info(self) -> _PlaceInfo: ... + ) -> None: + """Place a widget in the parent widget. Use as options: +in=master - master relative to which the widget is placed +in_=master - see 'in' option description +x=amount - locate anchor of this widget at position x of master +y=amount - locate anchor of this widget at position y of master +relx=amount - locate anchor of this widget between 0.0 and 1.0 + relative to width of master (1.0 is right edge) +rely=amount - locate anchor of this widget between 0.0 and 1.0 + relative to height of master (1.0 is bottom edge) +anchor=NSEW (or subset) - position anchor according to given direction +width=amount - width of this widget in pixel +height=amount - height of this widget in pixel +relwidth=amount - width of this widget between 0.0 and 1.0 + relative to width of master (1.0 is the same width + as the master) +relheight=amount - height of this widget between 0.0 and 1.0 + relative to height of master (1.0 is the same + height as the master) +bordermode="inside" or "outside" - whether to take border width of + master widget into account +""" + def place_forget(self) -> None: + """Unmap this widget. +""" + def place_info(self) -> _PlaceInfo: + """Return information about the placing options +for this widget. +""" place = place_configure info = place_info @@ -1132,6 +2294,10 @@ class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded sticky: str # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty class Grid: + """Geometry manager Grid. + +Base class to use the methods grid_* in every widget. +""" def grid_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -1147,22 +2313,55 @@ class Grid: sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 - ) -> None: ... - def grid_forget(self) -> None: ... - def grid_remove(self) -> None: ... - def grid_info(self) -> _GridInfo: ... + ) -> None: + """Position a widget in the parent widget in a grid. Use as options: +column=number - use cell identified with given column (starting with 0) +columnspan=number - this widget will span several columns +in=master - use master to contain this widget +in_=master - see 'in' option description +ipadx=amount - add internal padding in x direction +ipady=amount - add internal padding in y direction +padx=amount - add padding in x direction +pady=amount - add padding in y direction +row=number - use cell identified with given row (starting with 0) +rowspan=number - this widget will span several rows +sticky=NSEW - if cell is larger on which sides will this + widget stick to the cell boundary +""" + def grid_forget(self) -> None: + """Unmap this widget. +""" + def grid_remove(self) -> None: + """Unmap this widget but remember the grid options. +""" + def grid_info(self) -> _GridInfo: + """Return information about the options +for positioning this widget in a grid. +""" grid = grid_configure location = Misc.grid_location size = Misc.grid_size class BaseWidget(Misc): + """Internal class. +""" master: Misc widgetName: str - def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: ... - def destroy(self) -> None: ... + def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: + """Construct a widget with the parent widget MASTER, a name WIDGETNAME +and appropriate options. +""" + def destroy(self) -> None: + """Destroy this and all descendants widgets. +""" # This class represents any widget except Toplevel or Tk. class Widget(BaseWidget, Pack, Place, Grid): + """Internal class. + +Base class for a widget which can be positioned with the geometry managers +Pack, Place or Grid. +""" # Allow bind callbacks to take e.g. Event[Label] instead of Event[Misc]. # Tk and Toplevel get notified for their child widgets' events, but other # widgets don't. @@ -1172,13 +2371,53 @@ class Widget(BaseWidget, Pack, Place, Grid): sequence: str | None = None, func: Callable[[Event[_W]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to this widget at event SEQUENCE a call to function FUNC. + +SEQUENCE is a string of concatenated event +patterns. An event pattern is of the form + where MODIFIER is one +of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, +Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, +B3, Alt, Button4, B4, Double, Button5, B5 Triple, +Mod1, M1. TYPE is one of Activate, Enter, Map, +ButtonPress, Button, Expose, Motion, ButtonRelease +FocusIn, MouseWheel, Circulate, FocusOut, Property, +Colormap, Gravity Reparent, Configure, KeyPress, Key, +Unmap, Deactivate, KeyRelease Visibility, Destroy, +Leave and DETAIL is the button number for ButtonPress, +ButtonRelease and DETAIL is the Keysym for KeyPress and +KeyRelease. Examples are + for pressing Control and mouse button 1 or + for pressing A and the Alt key (KeyPress can be omitted). +An event pattern can also be a virtual event of the form +<> where AString can be arbitrary. This +event can be generated by event_generate. +If events are concatenated they must appear shortly +after each other. + +FUNC will be called if the event sequence occurs with an +instance of Event as argument. If the return value of FUNC is +"break" no further bound function is invoked. + +An additional boolean parameter ADD specifies whether FUNC will +be called additionally to the other bound function or whether +it will replace the previous function. + +Bind will return an identifier to allow deletion of the bound function with +unbind without memory leak. + +If FUNC or SEQUENCE is omitted the bound function or list +of bound events are returned. +""" @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... class Toplevel(BaseWidget, Wm): + """Toplevel widget, e.g. for dialogs. +""" # Toplevel and Tk have the same options because they correspond to the same # Tcl/Tk toplevel widget. For some reason, config and configure must be # copy/pasted here instead of aliasing as 'config = Tk.config'. @@ -1210,7 +2449,14 @@ class Toplevel(BaseWidget, Wm): use: int = ..., visual: str | tuple[str, int] = "", width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a toplevel widget with the parent MASTER. + +Valid resource names: background, bd, bg, borderwidth, class, +colormap, container, cursor, height, highlightbackground, +highlightcolor, highlightthickness, menu, relief, screen, takefocus, +use, visual, width. +""" @overload def configure( self, @@ -1232,12 +2478,20 @@ class Toplevel(BaseWidget, Wm): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Button(Widget): + """Button widget. +""" def __init__( self, master: Misc | None = None, @@ -1285,7 +2539,25 @@ class Button(Widget): underline: int = -1, width: float | str = 0, wraplength: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a button widget with the parent MASTER. + +STANDARD OPTIONS + + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, repeatdelay, + repeatinterval, takefocus, text, + textvariable, underline, wraplength + +WIDGET-SPECIFIC OPTIONS + + command, compound, default, height, + overrelief, state, width +""" @overload def configure( self, @@ -1327,14 +2599,38 @@ class Button(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def flash(self) -> None: ... - def invoke(self) -> Any: ... + def flash(self) -> None: + """Flash the button. + +This is accomplished by redisplaying +the button several times, alternating between active and +normal colors. At the end of the flash the button is left +in the same normal/active state as when the command was +invoked. This command is ignored if the button's state is +disabled. +""" + def invoke(self) -> Any: + """Invoke the command associated with the button. + +The return value is the return value from the command, +or an empty string if there is no command associated with +the button. This command is ignored if the button's state +is disabled. +""" class Canvas(Widget, XView, YView): + """Canvas widget to display graphical elements like lines or text. +""" def __init__( self, master: Misc | None = None, @@ -1374,7 +2670,17 @@ class Canvas(Widget, XView, YView): xscrollincrement: float | str = 0, yscrollcommand: str | Callable[[float, float], object] = "", yscrollincrement: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a canvas widget with the parent MASTER. + +Valid resource names: background, bd, bg, borderwidth, closeenough, +confine, cursor, height, highlightbackground, highlightcolor, +highlightthickness, insertbackground, insertborderwidth, +insertofftime, insertontime, insertwidth, offset, relief, +scrollregion, selectbackground, selectborderwidth, selectforeground, +state, takefocus, width, xscrollcommand, xscrollincrement, +yscrollcommand, yscrollincrement. +""" @overload def configure( self, @@ -1410,32 +2716,83 @@ class Canvas(Widget, XView, YView): xscrollincrement: float | str = ..., yscrollcommand: str | Callable[[float, float], object] = ..., yscrollincrement: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def addtag(self, *args): ... # internal method - def addtag_above(self, newtag: str, tagOrId: str | int) -> None: ... - def addtag_all(self, newtag: str) -> None: ... - def addtag_below(self, newtag: str, tagOrId: str | int) -> None: ... + def addtag(self, *args): # internal method + """Internal function. +""" + def addtag_above(self, newtag: str, tagOrId: str | int) -> None: + """Add tag NEWTAG to all items above TAGORID. +""" + def addtag_all(self, newtag: str) -> None: + """Add tag NEWTAG to all items. +""" + def addtag_below(self, newtag: str, tagOrId: str | int) -> None: + """Add tag NEWTAG to all items below TAGORID. +""" def addtag_closest( self, newtag: str, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None - ) -> None: ... - def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... - def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: ... - def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: ... - def find(self, *args): ... # internal method - def find_above(self, tagOrId: str | int) -> tuple[int, ...]: ... - def find_all(self) -> tuple[int, ...]: ... - def find_below(self, tagOrId: str | int) -> tuple[int, ...]: ... + ) -> None: + """Add tag NEWTAG to item which is closest to pixel at X, Y. +If several match take the top-most. +All items closer than HALO are considered overlapping (all are +closest). If START is specified the next below this tag is taken. +""" + def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: + """Add tag NEWTAG to all items in the rectangle defined +by X1,Y1,X2,Y2. +""" + def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: + """Add tag NEWTAG to all items which overlap the rectangle +defined by X1,Y1,X2,Y2. +""" + def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: + """Add tag NEWTAG to all items with TAGORID. +""" + def find(self, *args): # internal method + """Internal function. +""" + def find_above(self, tagOrId: str | int) -> tuple[int, ...]: + """Return items above TAGORID. +""" + def find_all(self) -> tuple[int, ...]: + """Return all items. +""" + def find_below(self, tagOrId: str | int) -> tuple[int, ...]: + """Return all items below TAGORID. +""" def find_closest( self, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None - ) -> tuple[int, ...]: ... - def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: ... - def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: ... - def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: ... + ) -> tuple[int, ...]: + """Return item which is closest to pixel at X, Y. +If several match take the top-most. +All items closer than HALO are considered overlapping (all are +closest). If START is specified the next below this tag is taken. +""" + def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: + """Return all items in rectangle defined +by X1,Y1,X2,Y2. +""" + def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: + """Return all items which overlap the rectangle +defined by X1,Y1,X2,Y2. +""" + def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: + """Return all items with TAGORID. +""" # Incompatible with Misc.bbox(), tkinter violates LSP - def bbox(self, *args: str | int) -> tuple[int, int, int, int]: ... # type: ignore[override] + def bbox(self, *args: str | int) -> tuple[int, int, int, int]: # type: ignore[override] + """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle +which encloses all items with tags specified as arguments. +""" @overload def tag_bind( self, @@ -1443,18 +2800,35 @@ class Canvas(Widget, XView, YView): sequence: str | None = None, func: Callable[[Event[Canvas]], object] | None = None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to all items with TAGORID at event SEQUENCE a call to function FUNC. + +An additional boolean parameter ADD specifies whether FUNC will be +called additionally to the other bound function or whether it will +replace the previous function. See bind for the return value. +""" @overload def tag_bind( self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None ) -> None: ... @overload def tag_bind(self, tagOrId: str | int, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: ... - def canvasx(self, screenx, gridspacing=None): ... - def canvasy(self, screeny, gridspacing=None): ... - @overload - def coords(self, tagOrId: str | int, /) -> list[float]: ... + def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: + """Unbind for all items with TAGORID for event SEQUENCE the +function identified with FUNCID. +""" + def canvasx(self, screenx, gridspacing=None): + """Return the canvas x coordinate of pixel position SCREENX rounded +to nearest multiple of GRIDSPACING units. +""" + def canvasy(self, screeny, gridspacing=None): + """Return the canvas y coordinate of pixel position SCREENY rounded +to nearest multiple of GRIDSPACING units. +""" + @overload + def coords(self, tagOrId: str | int, /) -> list[float]: + """Return a list of coordinates for the item given in ARGS. +""" @overload def coords(self, tagOrId: str | int, args: list[int] | list[float] | tuple[float, ...], /) -> None: ... @overload @@ -1462,9 +2836,15 @@ class Canvas(Widget, XView, YView): # create_foo() methods accept coords as a list or tuple, or as separate arguments. # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. # Keyword arguments should be the same in all overloads of each method. - def create_arc(self, *args, **kw) -> int: ... - def create_bitmap(self, *args, **kw) -> int: ... - def create_image(self, *args, **kw) -> int: ... + def create_arc(self, *args, **kw) -> int: + """Create arc shaped region with coordinates x1,y1,x2,y2. +""" + def create_bitmap(self, *args, **kw) -> int: + """Create bitmap with coordinates x1,y1. +""" + def create_image(self, *args, **kw) -> int: + """Create image item with coordinates x1,y1. +""" @overload def create_line( self, @@ -1496,7 +2876,9 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: ... + ) -> int: + """Create line with coordinates x1,y1,...,xn,yn. +""" @overload def create_line( self, @@ -1595,7 +2977,9 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: ... + ) -> int: + """Create oval with coordinates x1,y1,x2,y2. +""" @overload def create_oval( self, @@ -1699,7 +3083,9 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: ... + ) -> int: + """Create polygon with coordinates x1,y1,...,xn,yn. +""" @overload def create_polygon( self, @@ -1806,7 +3192,9 @@ class Canvas(Widget, XView, YView): stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., - ) -> int: ... + ) -> int: + """Create rectangle with coordinates x1,y1,x2,y2. +""" @overload def create_rectangle( self, @@ -1897,7 +3285,9 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: float | str = ..., - ) -> int: ... + ) -> int: + """Create text with coordinates x1,y1. +""" @overload def create_text( self, @@ -1933,7 +3323,9 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., window: Widget = ..., - ) -> int: ... + ) -> int: + """Create window with coordinates x1,y1,x2,y2. +""" @overload def create_window( self, @@ -1947,47 +3339,126 @@ class Canvas(Widget, XView, YView): width: float | str = ..., window: Widget = ..., ) -> int: ... - def dchars(self, *args) -> None: ... - def delete(self, *tagsOrCanvasIds: str | int) -> None: ... - @overload - def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: ... + def dchars(self, *args) -> None: + """Delete characters of text items identified by tag or id in ARGS (possibly +several times) from FIRST to LAST character (including). +""" + def delete(self, *tagsOrCanvasIds: str | int) -> None: + """Delete items identified by all tag or ids contained in ARGS. +""" + @overload + def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: + """Delete tag or id given as last arguments in ARGS from items +identified by first argument in ARGS. +""" @overload def dtag(self, id: int, tag_to_delete: str, /) -> None: ... - def focus(self, *args): ... - def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: ... - def icursor(self, *args) -> None: ... - def index(self, *args): ... - def insert(self, *args) -> None: ... - def itemcget(self, tagOrId, option): ... + def focus(self, *args): + """Set focus to the first item specified in ARGS. +""" + def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: + """Return tags associated with the first item specified in ARGS. +""" + def icursor(self, *args) -> None: + """Set cursor at position POS in the item identified by TAGORID. +In ARGS TAGORID must be first. +""" + def index(self, *args): + """Return position of cursor as integer in item specified in ARGS. +""" + def insert(self, *args) -> None: + """Insert TEXT in item TAGORID at position POS. ARGS must +be TAGORID POS TEXT. +""" + def itemcget(self, tagOrId, option): + """Return the resource value for an OPTION for item TAGORID. +""" # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( self, tagOrId: str | int, cnf: dict[str, Any] | None = None, **kw: Any - ) -> dict[str, tuple[str, str, str, str, str]] | None: ... + ) -> dict[str, tuple[str, str, str, str, str]] | None: + """Configure resources of an item TAGORID. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method without arguments. +""" itemconfig = itemconfigure - def move(self, *args) -> None: ... - def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... - def postscript(self, cnf={}, **kw): ... + def move(self, *args) -> None: + """Move an item TAGORID given in ARGS. +""" + def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: + """Move the items given by TAGORID in the canvas coordinate +space so that the first coordinate pair of the bottommost +item with tag TAGORID is located at position (X,Y). +X and Y may be the empty string, in which case the +corresponding coordinate will be unchanged. All items matching +TAGORID remain in the same positions relative to each other. +""" + def postscript(self, cnf={}, **kw): + """Print the contents of the canvas to a postscript +file. Valid options: colormap, colormode, file, fontmap, +height, pageanchor, pageheight, pagewidth, pagex, pagey, +rotate, width, x, y. +""" # tkinter does: # lower = tag_lower # lift = tkraise = tag_raise # # But mypy doesn't like aliasing here (maybe because Misc defines the same names) - def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... - def lower(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] - def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: ... - def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] - def lift(self, first: str | int, second: str | int | None = ..., /) -> None: ... # type: ignore[override] - def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: ... - def scan_mark(self, x, y) -> None: ... - def scan_dragto(self, x, y, gain: int = 10) -> None: ... - def select_adjust(self, tagOrId, index) -> None: ... - def select_clear(self) -> None: ... - def select_from(self, tagOrId, index) -> None: ... - def select_item(self): ... - def select_to(self, tagOrId, index) -> None: ... - def type(self, tagOrId: str | int) -> int | None: ... + def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: + """Lower an item TAGORID given in ARGS +(optional below another item). +""" + def lower(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] + """Lower an item TAGORID given in ARGS +(optional below another item). +""" + def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: + """Raise an item TAGORID given in ARGS +(optional above another item). +""" + def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] + """Raise an item TAGORID given in ARGS +(optional above another item). +""" + def lift(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] + """Raise an item TAGORID given in ARGS +(optional above another item). +""" + def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: + """Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE. +""" + def scan_mark(self, x, y) -> None: + """Remember the current X, Y coordinates. +""" + def scan_dragto(self, x, y, gain: int = 10) -> None: + """Adjust the view of the canvas to GAIN times the +difference between X and Y and the coordinates given in +scan_mark. +""" + def select_adjust(self, tagOrId, index) -> None: + """Adjust the end of the selection near the cursor of an item TAGORID to index. +""" + def select_clear(self) -> None: + """Clear the selection if it is in this widget. +""" + def select_from(self, tagOrId, index) -> None: + """Set the fixed end of a selection in item TAGORID to INDEX. +""" + def select_item(self): + """Return the item which has the selection. +""" + def select_to(self, tagOrId, index) -> None: + """Set the variable end of a selection in item TAGORID to INDEX. +""" + def type(self, tagOrId: str | int) -> int | None: + """Return the type of the item TAGORID. +""" class Checkbutton(Widget): + """Checkbutton widget which is either in on- or off-state. +""" def __init__( self, master: Misc | None = None, @@ -2046,7 +3517,17 @@ class Checkbutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = 0, wraplength: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a checkbutton widget with the parent MASTER. + +Valid resource names: activebackground, activeforeground, anchor, +background, bd, bg, bitmap, borderwidth, command, cursor, +disabledforeground, fg, font, foreground, height, +highlightbackground, highlightcolor, highlightthickness, image, +indicatoron, justify, offvalue, onvalue, padx, pady, relief, +selectcolor, selectimage, state, takefocus, text, textvariable, +underline, variable, width, wraplength. +""" @overload def configure( self, @@ -2094,17 +3575,35 @@ class Checkbutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self) -> None: ... - def flash(self) -> None: ... - def invoke(self) -> Any: ... - def select(self) -> None: ... - def toggle(self) -> None: ... + def deselect(self) -> None: + """Put the button in off-state. +""" + def flash(self) -> None: + """Flash the button. +""" + def invoke(self) -> Any: + """Toggle the button and invoke a command if given as resource. +""" + def select(self) -> None: + """Put the button in on-state. +""" + def toggle(self) -> None: + """Toggle the button. +""" class Entry(Widget, XView): + """Entry widget which allows displaying simple text. +""" def __init__( self, master: Misc | None = None, @@ -2148,7 +3647,18 @@ class Entry(Widget, XView): vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", # same as validatecommand width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct an entry widget with the parent MASTER. + +Valid resource names: background, bd, bg, borderwidth, cursor, +exportselection, fg, font, foreground, highlightbackground, +highlightcolor, highlightthickness, insertbackground, +insertborderwidth, insertofftime, insertontime, insertwidth, +invalidcommand, invcmd, justify, relief, selectbackground, +selectborderwidth, selectforeground, show, state, takefocus, +textvariable, validate, validatecommand, vcmd, width, +xscrollcommand. +""" @overload def configure( self, @@ -2191,23 +3701,58 @@ class Entry(Widget, XView): vcmd: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def delete(self, first: str | int, last: str | int | None = None) -> None: ... - def get(self) -> str: ... - def icursor(self, index: str | int) -> None: ... - def index(self, index: str | int) -> int: ... - def insert(self, index: str | int, string: str) -> None: ... - def scan_mark(self, x) -> None: ... - def scan_dragto(self, x) -> None: ... - def selection_adjust(self, index: str | int) -> None: ... - def selection_clear(self) -> None: ... # type: ignore[override] - def selection_from(self, index: str | int) -> None: ... - def selection_present(self) -> bool: ... - def selection_range(self, start: str | int, end: str | int) -> None: ... - def selection_to(self, index: str | int) -> None: ... + def delete(self, first: str | int, last: str | int | None = None) -> None: + """Delete text from FIRST to LAST (not included). +""" + def get(self) -> str: + """Return the text. +""" + def icursor(self, index: str | int) -> None: + """Insert cursor at INDEX. +""" + def index(self, index: str | int) -> int: + """Return position of cursor. +""" + def insert(self, index: str | int, string: str) -> None: + """Insert STRING at INDEX. +""" + def scan_mark(self, x) -> None: + """Remember the current X, Y coordinates. +""" + def scan_dragto(self, x) -> None: + """Adjust the view of the canvas to 10 times the +difference between X and Y and the coordinates given in +scan_mark. +""" + def selection_adjust(self, index: str | int) -> None: + """Adjust the end of the selection near the cursor to INDEX. +""" + def selection_clear(self) -> None: # type: ignore[override] + """Clear the selection if it is in this widget. +""" + def selection_from(self, index: str | int) -> None: + """Set the fixed end of a selection to INDEX. +""" + def selection_present(self) -> bool: + """Return True if there are characters selected in the entry, False +otherwise. +""" + def selection_range(self, start: str | int, end: str | int) -> None: + """Set the selection from START to END (not included). +""" + def selection_to(self, index: str | int) -> None: + """Set the variable end of a selection to INDEX. +""" select_adjust = selection_adjust select_clear = selection_clear select_from = selection_from @@ -2216,6 +3761,8 @@ class Entry(Widget, XView): select_to = selection_to class Frame(Widget): + """Frame widget which may contain other widgets and can have a 3D border. +""" def __init__( self, master: Misc | None = None, @@ -2241,7 +3788,13 @@ class Frame(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = 0, visual: str | tuple[str, int] = "", # can't be changed with configure() width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a frame widget with the parent MASTER. + +Valid resource names: background, bd, bg, borderwidth, class, +colormap, container, cursor, height, highlightbackground, +highlightcolor, highlightthickness, relief, takefocus, visual, width. +""" @overload def configure( self, @@ -2262,12 +3815,20 @@ class Frame(Widget): relief: Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): + """Label widget which can display text and bitmaps. +""" def __init__( self, master: Misc | None = None, @@ -2305,7 +3866,24 @@ class Label(Widget): underline: int = -1, width: float | str = 0, wraplength: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a label widget with the parent MASTER. + +STANDARD OPTIONS + + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, takefocus, text, + textvariable, underline, wraplength + +WIDGET-SPECIFIC OPTIONS + + height, state, width + +""" @overload def configure( self, @@ -2342,12 +3920,20 @@ class Label(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Listbox(Widget, XView, YView): + """Listbox widget which can display a list of strings. +""" def __init__( self, master: Misc | None = None, @@ -2398,7 +3984,15 @@ class Listbox(Widget, XView, YView): width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct a listbox widget with the parent MASTER. + +Valid resource names: background, bd, bg, borderwidth, cursor, +exportselection, fg, font, foreground, height, highlightbackground, +highlightcolor, highlightthickness, relief, selectbackground, +selectborderwidth, selectforeground, selectmode, setgrid, takefocus, +width, xscrollcommand, yscrollcommand, listvariable. +""" @overload def configure( self, @@ -2433,35 +4027,89 @@ class Listbox(Widget, XView, YView): width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index: str | int) -> None: ... - def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] - def curselection(self): ... - def delete(self, first: str | int, last: str | int | None = None) -> None: ... - def get(self, first: str | int, last: str | int | None = None): ... - def index(self, index: str | int) -> int: ... - def insert(self, index: str | int, *elements: str | float) -> None: ... - def nearest(self, y): ... - def scan_mark(self, x, y) -> None: ... - def scan_dragto(self, x, y) -> None: ... - def see(self, index: str | int) -> None: ... - def selection_anchor(self, index: str | int) -> None: ... + def activate(self, index: str | int) -> None: + """Activate item identified by INDEX. +""" + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: # type: ignore[override] + """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle +which encloses the item identified by the given index. +""" + def curselection(self): + """Return the indices of currently selected item. +""" + def delete(self, first: str | int, last: str | int | None = None) -> None: + """Delete items from FIRST to LAST (included). +""" + def get(self, first: str | int, last: str | int | None = None): + """Get list of items from FIRST to LAST (included). +""" + def index(self, index: str | int) -> int: + """Return index of item identified with INDEX. +""" + def insert(self, index: str | int, *elements: str | float) -> None: + """Insert ELEMENTS at INDEX. +""" + def nearest(self, y): + """Get index of item which is nearest to y coordinate Y. +""" + def scan_mark(self, x, y) -> None: + """Remember the current X, Y coordinates. +""" + def scan_dragto(self, x, y) -> None: + """Adjust the view of the listbox to 10 times the +difference between X and Y and the coordinates given in +scan_mark. +""" + def see(self, index: str | int) -> None: + """Scroll such that INDEX is visible. +""" + def selection_anchor(self, index: str | int) -> None: + """Set the fixed end oft the selection to INDEX. +""" select_anchor = selection_anchor - def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: # type: ignore[override] + """Clear the selection from FIRST to LAST (included). +""" select_clear = selection_clear - def selection_includes(self, index: str | int): ... + def selection_includes(self, index: str | int): + """Return True if INDEX is part of the selection. +""" select_includes = selection_includes - def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... + def selection_set(self, first: str | int, last: str | int | None = None) -> None: + """Set the selection from FIRST to LAST (included) without +changing the currently selected elements. +""" select_set = selection_set - def size(self) -> int: ... # type: ignore[override] - def itemcget(self, index: str | int, option): ... - def itemconfigure(self, index: str | int, cnf=None, **kw): ... + def size(self) -> int: # type: ignore[override] + """Return the number of elements in the listbox. +""" + def itemcget(self, index: str | int, option): + """Return the resource value for an ITEM and an OPTION. +""" + def itemconfigure(self, index: str | int, cnf=None, **kw): + """Configure resources of an ITEM. + +The values for resources are specified as keyword arguments. +To get an overview about the allowed keyword arguments +call the method without arguments. +Valid resource names: background, bg, foreground, fg, +selectbackground, selectforeground. +""" itemconfig = itemconfigure class Menu(Widget): + """Menu widget which allows displaying menu bars, pull-down menus and pop-up menus. +""" def __init__( self, master: Misc | None = None, @@ -2492,7 +4140,14 @@ class Menu(Widget): tearoffcommand: Callable[[str, str], object] | str = "", title: str = "", type: Literal["menubar", "tearoff", "normal"] = "normal", - ) -> None: ... + ) -> None: + """Construct menu widget with the parent MASTER. + +Valid resource names: activebackground, activeborderwidth, +activeforeground, background, bd, bg, borderwidth, cursor, +disabledforeground, fg, font, foreground, postcommand, relief, +selectcolor, takefocus, tearoff, tearoffcommand, title, type. +""" @overload def configure( self, @@ -2519,14 +4174,28 @@ class Menu(Widget): tearoffcommand: Callable[[str, str], object] | str = ..., title: str = ..., type: Literal["menubar", "tearoff", "normal"] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... - def activate(self, index: str | int) -> None: ... - def add(self, itemType, cnf={}, **kw): ... # docstring says "Internal function." - def insert(self, index, itemType, cnf={}, **kw): ... # docstring says "Internal function." + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: + """Post the menu at position X,Y with entry ENTRY. +""" + def activate(self, index: str | int) -> None: + """Activate entry at INDEX. +""" + def add(self, itemType, cnf={}, **kw): # docstring says "Internal function." + """Internal function. +""" + def insert(self, index, itemType, cnf={}, **kw): # docstring says "Internal function." + """Internal function. +""" def add_cascade( self, cnf: dict[str, Any] | None = {}, @@ -2547,7 +4216,9 @@ class Menu(Widget): menu: Menu = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: ... + ) -> None: + """Add hierarchical menu item. +""" def add_checkbutton( self, cnf: dict[str, Any] | None = {}, @@ -2573,7 +4244,9 @@ class Menu(Widget): state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., variable: Variable = ..., - ) -> None: ... + ) -> None: + """Add checkbutton menu item. +""" def add_command( self, cnf: dict[str, Any] | None = {}, @@ -2593,7 +4266,9 @@ class Menu(Widget): label: str = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: ... + ) -> None: + """Add command menu item. +""" def add_radiobutton( self, cnf: dict[str, Any] | None = {}, @@ -2618,8 +4293,12 @@ class Menu(Widget): underline: int = ..., value: Any = ..., variable: Variable = ..., - ) -> None: ... - def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... + ) -> None: + """Add radio menu item. +""" + def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: + """Add separator. +""" def insert_cascade( self, index: str | int, @@ -2641,7 +4320,9 @@ class Menu(Widget): menu: Menu = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: ... + ) -> None: + """Add hierarchical menu item at INDEX. +""" def insert_checkbutton( self, index: str | int, @@ -2668,7 +4349,9 @@ class Menu(Widget): state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., variable: Variable = ..., - ) -> None: ... + ) -> None: + """Add checkbutton menu item at INDEX. +""" def insert_command( self, index: str | int, @@ -2689,7 +4372,9 @@ class Menu(Widget): label: str = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., - ) -> None: ... + ) -> None: + """Add command menu item at INDEX. +""" def insert_radiobutton( self, index: str | int, @@ -2715,23 +4400,51 @@ class Menu(Widget): underline: int = ..., value: Any = ..., variable: Variable = ..., - ) -> None: ... - def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... - def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... - def entrycget(self, index: str | int, option: str) -> Any: ... + ) -> None: + """Add radio menu item at INDEX. +""" + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: + """Add separator at INDEX. +""" + def delete(self, index1: str | int, index2: str | int | None = None) -> None: + """Delete menu items between INDEX1 and INDEX2 (included). +""" + def entrycget(self, index: str | int, option: str) -> Any: + """Return the resource value of a menu item for OPTION at INDEX. +""" def entryconfigure( self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure a menu item at INDEX. +""" entryconfig = entryconfigure - def index(self, index: str | int) -> int | None: ... - def invoke(self, index: str | int) -> Any: ... - def post(self, x: int, y: int) -> None: ... - def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... - def unpost(self) -> None: ... - def xposition(self, index: str | int) -> int: ... - def yposition(self, index: str | int) -> int: ... + def index(self, index: str | int) -> int | None: + """Return the index of a menu item identified by INDEX. +""" + def invoke(self, index: str | int) -> Any: + """Invoke a menu item identified by INDEX and execute +the associated command. +""" + def post(self, x: int, y: int) -> None: + """Display a menu at position X,Y. +""" + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: + """Return the type of the menu item at INDEX. +""" + def unpost(self) -> None: + """Unmap a menu. +""" + def xposition(self, index: str | int) -> int: + """Return the x-position of the leftmost pixel of the menu item +at INDEX. +""" + def yposition(self, index: str | int) -> int: + """Return the y-position of the topmost pixel of the menu item at INDEX. +""" class Menubutton(Widget): + """Menubutton widget, obsolete since Tk8.0. +""" def __init__( self, master: Misc | None = None, @@ -2812,12 +4525,20 @@ class Menubutton(Widget): underline: int = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Message(Widget): + """Message widget to display multiline text. Obsolete since Label does it too. +""" def __init__( self, master: Misc | None = None, @@ -2875,12 +4596,20 @@ class Message(Widget): text: float | str = ..., textvariable: Variable = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Radiobutton(Widget): + """Radiobutton widget which shows only one of several buttons in on-state. +""" def __init__( self, master: Misc | None = None, @@ -2928,7 +4657,17 @@ class Radiobutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = 0, wraplength: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a radiobutton widget with the parent MASTER. + +Valid resource names: activebackground, activeforeground, anchor, +background, bd, bg, bitmap, borderwidth, command, cursor, +disabledforeground, fg, font, foreground, height, +highlightbackground, highlightcolor, highlightthickness, image, +indicatoron, justify, padx, pady, relief, selectcolor, selectimage, +state, takefocus, text, textvariable, underline, value, variable, +width, wraplength. +""" @overload def configure( self, @@ -2975,16 +4714,32 @@ class Radiobutton(Widget): variable: Variable | Literal[""] = ..., width: float | str = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self) -> None: ... - def flash(self) -> None: ... - def invoke(self) -> Any: ... - def select(self) -> None: ... + def deselect(self) -> None: + """Put the button in off-state. +""" + def flash(self) -> None: + """Flash the button. +""" + def invoke(self) -> Any: + """Toggle the button and invoke a command if given as resource. +""" + def select(self) -> None: + """Put the button in on-state. +""" class Scale(Widget): + """Scale widget which can display a numerical scale. +""" def __init__( self, master: Misc | None = None, @@ -3026,7 +4781,16 @@ class Scale(Widget): troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: float | str = 15, - ) -> None: ... + ) -> None: + """Construct a scale widget with the parent MASTER. + +Valid resource names: activebackground, background, bigincrement, bd, +bg, borderwidth, command, cursor, digits, fg, font, foreground, from, +highlightbackground, highlightcolor, highlightthickness, label, +length, orient, relief, repeatdelay, repeatinterval, resolution, +showvalue, sliderlength, sliderrelief, state, takefocus, +tickinterval, to, troughcolor, variable, width. +""" @overload def configure( self, @@ -3066,16 +4830,35 @@ class Scale(Widget): troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def get(self) -> float: ... - def set(self, value) -> None: ... - def coords(self, value: float | None = None) -> tuple[int, int]: ... - def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... + def get(self) -> float: + """Get the current value as integer or float. +""" + def set(self, value) -> None: + """Set the value to VALUE. +""" + def coords(self, value: float | None = None) -> tuple[int, int]: + """Return a tuple (X,Y) of the point along the centerline of the +trough that corresponds to VALUE or the current value if None is +given. +""" + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: + """Return where the point X,Y lies. Valid return values are "slider", +"though1" and "though2". +""" class Scrollbar(Widget): + """Scrollbar widget which displays a slider at a certain position. +""" def __init__( self, master: Misc | None = None, @@ -3107,7 +4890,16 @@ class Scrollbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", troughcolor: str = ..., width: float | str = ..., - ) -> None: ... + ) -> None: + """Construct a scrollbar widget with the parent MASTER. + +Valid resource names: activebackground, activerelief, +background, bd, bg, borderwidth, command, cursor, +elementborderwidth, highlightbackground, +highlightcolor, highlightthickness, jump, orient, +relief, repeatdelay, repeatinterval, takefocus, +troughcolor, width. +""" @overload def configure( self, @@ -3134,22 +4926,52 @@ class Scrollbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., troughcolor: str = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index=None): ... - def delta(self, deltax: int, deltay: int) -> float: ... - def fraction(self, x: int, y: int) -> float: ... - def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... - def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ... - def set(self, first: float | str, last: float | str) -> None: ... + def activate(self, index=None): + """Marks the element indicated by index as active. +The only index values understood by this method are "arrow1", +"slider", or "arrow2". If any other value is specified then no +element of the scrollbar will be active. If index is not specified, +the method returns the name of the element that is currently active, +or None if no element is active. +""" + def delta(self, deltax: int, deltay: int) -> float: + """Return the fractional change of the scrollbar setting if it +would be moved by DELTAX or DELTAY pixels. +""" + def fraction(self, x: int, y: int) -> float: + """Return the fractional value which corresponds to a slider +position of X,Y. +""" + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: + """Return the element under position X,Y as one of +"arrow1","slider","arrow2" or "". +""" + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: + """Return the current fractional values (upper and lower end) +of the slider position. +""" + def set(self, first: float | str, last: float | str) -> None: + """Set the fractional values of the slider position (upper and +lower ends as value between 0 and 1). +""" _WhatToCount: TypeAlias = Literal[ "chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels" ] class Text(Widget, XView, YView): + """Text widget which can display text in various forms. +""" def __init__( self, master: Misc | None = None, @@ -3205,7 +5027,29 @@ class Text(Widget, XView, YView): wrap: Literal["none", "char", "word"] = "char", xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct a text widget with the parent MASTER. + +STANDARD OPTIONS + + background, borderwidth, cursor, + exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, padx, pady, + relief, selectbackground, + selectborderwidth, selectforeground, + setgrid, takefocus, + xscrollcommand, yscrollcommand, + +WIDGET-SPECIFIC OPTIONS + + autoseparators, height, maxundo, + spacing1, spacing2, spacing3, + state, tabs, undo, width, wrap, + +""" @overload def configure( self, @@ -3256,17 +5100,29 @@ class Text(Widget, XView, YView): wrap: Literal["none", "char", "word"] = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def bbox(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int] | None: # type: ignore[override] + """Return a tuple of (x,y,width,height) which gives the bounding +box of the visible part of the character at the given index. +""" def compare( self, index1: str | float | _tkinter.Tcl_Obj | Widget, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: str | float | _tkinter.Tcl_Obj | Widget, - ) -> bool: ... + ) -> bool: + """Return whether between index INDEX1 and index INDEX2 the +relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. +""" if sys.version_info >= (3, 13): @overload def count( @@ -3275,7 +5131,23 @@ class Text(Widget, XView, YView): index2: str | float | _tkinter.Tcl_Obj | Widget, *, return_ints: Literal[True], - ) -> int: ... + ) -> int: + """Counts the number of relevant things between the two indices. + +If INDEX1 is after INDEX2, the result will be a negative number +(and this holds for each of the possible options). + +The actual items which are counted depends on the options given. +The result is a tuple of integers, one for the result of each +counting option given, if more than one option is specified or +return_ints is false (default), otherwise it is an integer. +Valid counting options are "chars", "displaychars", +"displayindices", "displaylines", "indices", "lines", "xpixels" +and "ypixels". The default value, if no option is specified, is +"indices". There is an additional possible option "update", +which if given then all subsequent options ensure that any +possible out of date information is recalculated. +""" @overload def count( self, @@ -3398,7 +5270,19 @@ class Text(Widget, XView, YView): @overload def count( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget - ) -> tuple[int] | None: ... + ) -> tuple[int] | None: + """Counts the number of relevant things between the two indices. + If index1 is after index2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given by + args. The result is a list of integers, one for the result of each + counting option given. Valid counting options are "chars", + "displaychars", "displayindices", "displaylines", "indices", + "lines", "xpixels" and "ypixels". There is an additional possible + option "update", which if given then all subsequent options ensure + that any possible out of date information is recalculated. +""" @overload def count( self, @@ -3447,13 +5331,22 @@ class Text(Widget, XView, YView): ) -> tuple[int, ...]: ... @overload - def debug(self, boolean: None = None) -> bool: ... + def debug(self, boolean: None = None) -> bool: + """Turn on the internal consistency checks of the B-Tree inside the text +widget according to BOOLEAN. +""" @overload def debug(self, boolean: bool) -> None: ... def delete( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None - ) -> None: ... - def dlineinfo(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int, int] | None: ... + ) -> None: + """Delete the characters between INDEX1 and INDEX2 (not included). +""" + def dlineinfo(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int, int] | None: + """Return tuple (x,y,width,height,baseline) giving the bounding box +and baseline position of the visible part of the line containing +the character at INDEX. +""" @overload def dump( self, @@ -3467,7 +5360,19 @@ class Text(Widget, XView, YView): tag: bool = ..., text: bool = ..., window: bool = ..., - ) -> list[tuple[str, str, str]]: ... + ) -> list[tuple[str, str, str]]: + """Return the contents of the widget between index1 and index2. + +The type of contents returned in filtered based on the keyword +parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are +given and true, then the corresponding items are returned. The result +is a list of triples of the form (key, value, index). If none of the +keywords are true then 'all' is used by default. + +If the 'command' argument is given, it is called once for each element +of the list of triples, with the values of each triple serving as the +arguments to the function. In this case the list is not returned. +""" @overload def dump( self, @@ -3496,20 +5401,65 @@ class Text(Widget, XView, YView): text: bool = ..., window: bool = ..., ) -> None: ... - def edit(self, *args): ... # docstring says "Internal method" + def edit(self, *args): # docstring says "Internal method" + """Internal method + +This method controls the undo mechanism and +the modified flag. The exact behavior of the +command depends on the option argument that +follows the edit argument. The following forms +of the command are currently supported: + +edit_modified, edit_redo, edit_reset, edit_separator +and edit_undo + +""" @overload - def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] + def edit_modified(self, arg: None = None) -> bool: # actually returns Literal[0, 1] + """Get or Set the modified flag + +If arg is not specified, returns the modified +flag of the widget. The insert, delete, edit undo and +edit redo commands or the user can set or clear the +modified flag. If boolean is specified, sets the +modified flag of the widget to arg. +""" @overload def edit_modified(self, arg: bool) -> None: ... # actually returns empty string - def edit_redo(self) -> None: ... # actually returns empty string - def edit_reset(self) -> None: ... # actually returns empty string - def edit_separator(self) -> None: ... # actually returns empty string - def edit_undo(self) -> None: ... # actually returns empty string + def edit_redo(self) -> None: # actually returns empty string + """Redo the last undone edit + +When the undo option is true, reapplies the last +undone edits provided no other edits were done since +then. Generates an error when the redo stack is empty. +Does nothing when the undo option is false. +""" + def edit_reset(self) -> None: # actually returns empty string + """Clears the undo and redo stacks + """ + def edit_separator(self) -> None: # actually returns empty string + """Inserts a separator (boundary) on the undo stack. + +Does nothing when the undo option is false +""" + def edit_undo(self) -> None: # actually returns empty string + """Undoes the last edit action + +If the undo option is true. An edit action is defined +as all the insert and delete commands that are recorded +on the undo stack in between two separators. Generates +an error when the undo stack is empty. Does nothing +when the undo option is false +""" def get( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None - ) -> str: ... + ) -> str: + """Return the text from INDEX1 to INDEX2 (not included). +""" @overload - def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["image", "name"]) -> str: ... + def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["image", "name"]) -> str: + """Return the value of OPTION of an embedded image at INDEX. +""" @overload def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... @overload @@ -3521,7 +5471,9 @@ class Text(Widget, XView, YView): @overload def image_configure( self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str - ) -> tuple[str, str, str, str, str | int]: ... + ) -> tuple[str, str, str, str, str | int]: + """Configure an embedded image at INDEX. +""" @overload def image_configure( self, @@ -3544,33 +5496,75 @@ class Text(Widget, XView, YView): name: str = ..., padx: float | str = ..., pady: float | str = ..., - ) -> str: ... - def image_names(self) -> tuple[str, ...]: ... - def index(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str: ... + ) -> str: + """Create an embedded image at INDEX. +""" + def image_names(self) -> tuple[str, ...]: + """Return all names of embedded images in this widget. +""" + def index(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str: + """Return the index in the form line.char for INDEX. +""" def insert( self, index: str | float | _tkinter.Tcl_Obj | Widget, chars: str, *args: str | list[str] | tuple[str, ...] - ) -> None: ... + ) -> None: + """Insert CHARS before the characters at INDEX. An additional +tag can be given in ARGS. Additional CHARS and tags can follow in ARGS. +""" @overload - def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: + """Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT). +Return the current value if None is given for DIRECTION. +""" @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string - def mark_names(self) -> tuple[str, ...]: ... - def mark_set(self, markName: str, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... - def mark_unset(self, *markNames: str) -> None: ... - def mark_next(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... - def mark_previous(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: ... + def mark_names(self) -> tuple[str, ...]: + """Return all mark names. +""" + def mark_set(self, markName: str, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: + """Set mark MARKNAME before the character at INDEX. +""" + def mark_unset(self, *markNames: str) -> None: + """Delete all marks in MARKNAMES. +""" + def mark_next(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: + """Return the name of the next mark after INDEX. +""" + def mark_previous(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: + """Return the name of the previous mark before INDEX. +""" # **kw of peer_create is same as the kwargs of Text.__init__ - def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: ... - def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... + def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: + """Creates a peer text widget with the given newPathName, and any +optional standard configuration options. By default the peer will +have the same start and end line as the parent widget, but +these can be overridden with the standard configuration options. +""" + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: + """Returns a list of peers of this widget (this does not include +the widget itself). +""" def replace( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget, chars: str, *args: str | list[str] | tuple[str, ...], - ) -> None: ... - def scan_mark(self, x: int, y: int) -> None: ... - def scan_dragto(self, x: int, y: int) -> None: ... + ) -> None: + """Replaces the range of characters between index1 and index2 with +the given characters and tags specified by args. + +See the method insert for some more information about args, and the +method delete for information about the indices. +""" + def scan_mark(self, x: int, y: int) -> None: + """Remember the current X, Y coordinates. +""" + def scan_dragto(self, x: int, y: int) -> None: + """Adjust the view of the text to 10 times the +difference between X and Y and the coordinates given in +scan_mark. +""" def search( self, pattern: str, @@ -3583,11 +5577,20 @@ class Text(Widget, XView, YView): nocase: bool | None = None, count: Variable | None = None, elide: bool | None = None, - ) -> str: ... # returns empty string for not found - def see(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: ... + ) -> str: # returns empty string for not found + """Search PATTERN beginning from INDEX until STOPINDEX. +Return the index of the first character of a match or an +empty string. +""" + def see(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: + """Scroll such that the character at INDEX is visible. +""" def tag_add( self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, *args: str | float | _tkinter.Tcl_Obj | Widget - ) -> None: ... + ) -> None: + """Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS. +Additional pairs of indices may follow in ARGS. +""" # tag_bind stuff is very similar to Canvas @overload def tag_bind( @@ -3596,12 +5599,23 @@ class Text(Widget, XView, YView): sequence: str | None, func: Callable[[Event[Text]], object] | None, add: Literal["", "+"] | bool | None = None, - ) -> str: ... + ) -> str: + """Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC. + +An additional boolean parameter ADD specifies whether FUNC will be +called additionally to the other bound function or whether it will +replace the previous function. See bind for the return value. +""" @overload def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: + """Unbind for all characters with TAGNAME for event SEQUENCE the +function identified with FUNCID. +""" # allowing any string for cget instead of just Literals because there's no other way to look up tag options - def tag_cget(self, tagName: str, option: str): ... + def tag_cget(self, tagName: str, option: str): + """Return the value of OPTION for tag TAGNAME. +""" @overload def tag_configure( self, @@ -3636,36 +5650,62 @@ class Text(Widget, XView, YView): underline: bool = ..., underlinefg: str = ..., wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure a tag TAGNAME. +""" @overload def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure - def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: ... # error if no tag names given - def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... - def tag_names(self, index: str | float | _tkinter.Tcl_Obj | Widget | None = None) -> tuple[str, ...]: ... + def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: # error if no tag names given + """Delete all tags in TAGNAMES. +""" + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: + """Change the priority of tag TAGNAME such that it is lower +than the priority of BELOWTHIS. +""" + def tag_names(self, index: str | float | _tkinter.Tcl_Obj | Widget | None = None) -> tuple[str, ...]: + """Return a list of all tag names. +""" def tag_nextrange( self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, - ) -> tuple[str, str] | tuple[()]: ... + ) -> tuple[str, str] | tuple[()]: + """Return a list of start and end index for the first sequence of +characters between INDEX1 and INDEX2 which all have tag TAGNAME. +The text is searched forward from INDEX1. +""" def tag_prevrange( self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, - ) -> tuple[str, str] | tuple[()]: ... - def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... - def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... + ) -> tuple[str, str] | tuple[()]: + """Return a list of start and end index for the first sequence of +characters between INDEX1 and INDEX2 which all have tag TAGNAME. +The text is searched backwards from INDEX1. +""" + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: + """Change the priority of tag TAGNAME such that it is higher +than the priority of ABOVETHIS. +""" + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: + """Return a list of ranges of text which have tag TAGNAME. +""" # tag_remove and tag_delete are different def tag_remove( self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, - ) -> None: ... + ) -> None: + """Remove tag TAGNAME from all characters between INDEX1 and INDEX2. +""" @overload - def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... + def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: + """Return the value of OPTION of an embedded window at INDEX. +""" @overload def window_cget( self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["stretch"] @@ -3681,7 +5721,9 @@ class Text(Widget, XView, YView): @overload def window_configure( self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str - ) -> tuple[str, str, str, str, str | int]: ... + ) -> tuple[str, str, str, str, str | int]: + """Configure an embedded window at INDEX. +""" @overload def window_configure( self, @@ -3707,16 +5749,26 @@ class Text(Widget, XView, YView): pady: float | str = ..., stretch: bool | Literal[0, 1] = ..., window: Misc | str = ..., - ) -> None: ... - def window_names(self) -> tuple[str, ...]: ... - def yview_pickplace(self, *what): ... # deprecated + ) -> None: + """Create a window at INDEX. +""" + def window_names(self) -> tuple[str, ...]: + """Return all names of embedded windows in this widget. +""" + def yview_pickplace(self, *what): # deprecated + """Obsolete function, use see. +""" class _setit: + """Internal class. It wraps the command in the widget OptionMenu. +""" def __init__(self, var, value, callback=None) -> None: ... def __call__(self, *args) -> None: ... # manual page: tk_optionMenu class OptionMenu(Menubutton): + """OptionMenu which allows the user to select a value from a menu. +""" menuname: Incomplete def __init__( # differs from other widgets @@ -3727,7 +5779,12 @@ class OptionMenu(Menubutton): *values: str, # kwarg only from now on command: Callable[[StringVar], object] | None = ..., - ) -> None: ... + ) -> None: + """Construct an optionmenu widget with the parent MASTER, with +the resource textvariable set to VARIABLE, the initially selected +value VALUE, the other menu values VALUES and an additional +keyword argument command. +""" # configure, config, cget are inherited from Menubutton # destroy and __getitem__ are overridden, signature does not change @@ -3747,6 +5804,8 @@ class _BitmapImageLike(_Image): ... class _PhotoImageLike(_Image): ... class Image(_Image): + """Base class for images. +""" name: Incomplete tk: _tkinter.TkappType def __init__(self, imgtype, name=None, cnf={}, master: Misc | _tkinter.TkappType | None = None, **kw) -> None: ... @@ -3755,9 +5814,13 @@ class Image(_Image): def __getitem__(self, key): ... configure: Incomplete config: Incomplete - def type(self): ... + def type(self): + """Return the type of the image, e.g. "photo" or "bitmap". +""" class PhotoImage(Image, _PhotoImageLike): + """Widget which can display images in PGM, PPM, GIF, PNG format. +""" # This should be kept in sync with PIL.ImageTK.PhotoImage.__init__() def __init__( self, @@ -3772,7 +5835,12 @@ class PhotoImage(Image, _PhotoImageLike): height: int = ..., palette: int | str = ..., width: int = ..., - ) -> None: ... + ) -> None: + """Create an image with NAME. + +Valid resource names: data, format, file, gamma, height, palette, +width. +""" def configure( self, *, @@ -3783,10 +5851,16 @@ class PhotoImage(Image, _PhotoImageLike): height: int = ..., palette: int | str = ..., width: int = ..., - ) -> None: ... + ) -> None: + """Configure the image. +""" config = configure - def blank(self) -> None: ... - def cget(self, option: str) -> str: ... + def blank(self) -> None: + """Display a transparent image. +""" + def cget(self, option: str) -> str: + """Return the value of OPTION. +""" def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' if sys.version_info >= (3, 13): def copy( @@ -3795,9 +5869,39 @@ class PhotoImage(Image, _PhotoImageLike): from_coords: Iterable[int] | None = None, zoom: int | tuple[int, int] | list[int] | None = None, subsample: int | tuple[int, int] | list[int] | None = None, - ) -> PhotoImage: ... - def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... - def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: ... + ) -> PhotoImage: + """Return a new PhotoImage with the same image as this widget. + +The FROM_COORDS option specifies a rectangular sub-region of the +source image to be copied. It must be a tuple or a list of 1 to 4 +integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally +opposite corners of the rectangle. If x2 and y2 are not specified, +the default value is the bottom-right corner of the source image. +The pixels copied will include the left and top edges of the +specified rectangle but not the bottom or right edges. If the +FROM_COORDS option is not given, the default is the whole source +image. + +If SUBSAMPLE or ZOOM are specified, the image is transformed as in +the subsample() or zoom() methods. The value must be a single +integer or a pair of integers. +""" + def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: + """Return a new PhotoImage based on the same image as this widget +but use only every Xth or Yth pixel. If Y is not given, the +default value is the same as X. + +The FROM_COORDS option specifies a rectangular sub-region of the +source image to be copied, as in the copy() method. +""" + def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: + """Return a new PhotoImage with the same image as this widget +but zoom it with a factor of X in the X direction and Y in the Y +direction. If Y is not given, the default value is the same as X. + +The FROM_COORDS option specifies a rectangular sub-region of the +source image to be copied, as in the copy() method. +""" def copy_replace( self, sourceImage: PhotoImage | str, @@ -3809,13 +5913,66 @@ class PhotoImage(Image, _PhotoImageLike): subsample: int | tuple[int, int] | list[int] | None = None, # `None` defaults to overlay. compositingrule: Literal["overlay", "set"] | None = None, - ) -> None: ... + ) -> None: + """Copy a region from the source image (which must be a PhotoImage) to +this image, possibly with pixel zooming and/or subsampling. If no +options are specified, this command copies the whole of the source +image into this image, starting at coordinates (0, 0). + +The FROM_COORDS option specifies a rectangular sub-region of the +source image to be copied. It must be a tuple or a list of 1 to 4 +integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally +opposite corners of the rectangle. If x2 and y2 are not specified, +the default value is the bottom-right corner of the source image. +The pixels copied will include the left and top edges of the +specified rectangle but not the bottom or right edges. If the +FROM_COORDS option is not given, the default is the whole source +image. + +The TO option specifies a rectangular sub-region of the destination +image to be affected. It must be a tuple or a list of 1 to 4 +integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally +opposite corners of the rectangle. If x2 and y2 are not specified, +the default value is (x1,y1) plus the size of the source region +(after subsampling and zooming, if specified). If x2 and y2 are +specified, the source region will be replicated if necessary to fill +the destination region in a tiled fashion. + +If SHRINK is true, the size of the destination image should be +reduced, if necessary, so that the region being copied into is at +the bottom-right corner of the image. + +If SUBSAMPLE or ZOOM are specified, the image is transformed as in +the subsample() or zoom() methods. The value must be a single +integer or a pair of integers. + +The COMPOSITINGRULE option specifies how transparent pixels in the +source image are combined with the destination image. When a +compositing rule of 'overlay' is set, the old contents of the +destination image are visible, as if the source image were printed +on a piece of transparent film and placed over the top of the +destination. When a compositing rule of 'set' is set, the old +contents of the destination image are discarded and the source image +is used as-is. The default compositing rule is 'overlay'. +""" else: - def copy(self) -> PhotoImage: ... - def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... - def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def copy(self) -> PhotoImage: + """Return a new PhotoImage with the same image as this widget. +""" + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: + """Return a new PhotoImage with the same image as this widget + but zoom it with a factor of x in the X direction and y in the Y + direction. If y is not given, the default value is the same as x. + """ + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: + """Return a new PhotoImage based on the same image as this widget + but use only every Xth or Yth pixel. If y is not given, the + default value is the same as x. + """ - def get(self, x: int, y: int) -> tuple[int, int, int]: ... + def get(self, x: int, y: int) -> tuple[int, int, int]: + """Return the color (red, green, blue) of the pixel at X,Y. +""" def put( self, data: ( @@ -3829,7 +5986,10 @@ class PhotoImage(Image, _PhotoImageLike): | tuple[tuple[str, ...], ...] ), to: tuple[int, int] | tuple[int, int, int, int] | None = None, - ) -> None: ... + ) -> None: + """Put row formatted colors to image starting from +position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) +""" if sys.version_info >= (3, 13): def read( self, @@ -3839,7 +5999,28 @@ class PhotoImage(Image, _PhotoImageLike): from_coords: Iterable[int] | None = None, to: Iterable[int] | None = None, shrink: bool = False, - ) -> None: ... + ) -> None: + """Reads image data from the file named FILENAME into the image. + +The FORMAT option specifies the format of the image data in the +file. + +The FROM_COORDS option specifies a rectangular sub-region of the image +file data to be copied to the destination image. It must be a tuple +or a list of 1 to 4 integers (x1, y1, x2, y2). (x1, y1) and +(x2, y2) specify diagonally opposite corners of the rectangle. If +x2 and y2 are not specified, the default value is the bottom-right +corner of the source image. The default, if this option is not +specified, is the whole of the image in the image file. + +The TO option specifies the coordinates of the top-left corner of +the region of the image into which data from filename are to be +read. The default is (0, 0). + +If SHRINK is true, the size of the destination image will be +reduced, if necessary, so that the region into which the image file +data are read is at the bottom-right corner of the image. +""" def write( self, filename: StrOrBytesPath, @@ -3848,11 +6029,57 @@ class PhotoImage(Image, _PhotoImageLike): *, background: str | None = None, grayscale: bool = False, - ) -> None: ... + ) -> None: + """Writes image data from the image to a file named FILENAME. + +The FORMAT option specifies the name of the image file format +handler to be used to write the data to the file. If this option +is not given, the format is guessed from the file extension. + +The FROM_COORDS option specifies a rectangular region of the image +to be written to the image file. It must be a tuple or a list of 1 +to 4 integers (x1, y1, x2, y2). If only x1 and y1 are specified, +the region extends from (x1,y1) to the bottom-right corner of the +image. If all four coordinates are given, they specify diagonally +opposite corners of the rectangular region. The default, if this +option is not given, is the whole image. + +If BACKGROUND is specified, the data will not contain any +transparency information. In all transparent pixels the color will +be replaced by the specified color. + +If GRAYSCALE is true, the data will not contain color information. +All pixel data will be transformed into grayscale. +""" @overload def data( self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False - ) -> bytes: ... + ) -> bytes: + """Returns image data. + +The FORMAT option specifies the name of the image file format +handler to be used. If this option is not given, this method uses +a format that consists of a tuple (one element per row) of strings +containing space-separated (one element per pixel/column) colors +in “#RRGGBB” format (where RR is a pair of hexadecimal digits for +the red channel, GG for green, and BB for blue). + +The FROM_COORDS option specifies a rectangular region of the image +to be returned. It must be a tuple or a list of 1 to 4 integers +(x1, y1, x2, y2). If only x1 and y1 are specified, the region +extends from (x1,y1) to the bottom-right corner of the image. If +all four coordinates are given, they specify diagonally opposite +corners of the rectangular region, including (x1, y1) and excluding +(x2, y2). The default, if this option is not given, is the whole +image. + +If BACKGROUND is specified, the data will not contain any +transparency information. In all transparent pixels the color will +be replaced by the specified color. + +If GRAYSCALE is true, the data will not contain color information. +All pixel data will be transformed into grayscale. +""" @overload def data( self, @@ -3866,12 +6093,21 @@ class PhotoImage(Image, _PhotoImageLike): else: def write( self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None - ) -> None: ... + ) -> None: + """Write image to file FILENAME in FORMAT starting from + position FROM_COORDS. +""" - def transparency_get(self, x: int, y: int) -> bool: ... - def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... + def transparency_get(self, x: int, y: int) -> bool: + """Return True if the pixel at x,y is transparent. +""" + def transparency_set(self, x: int, y: int, boolean: bool) -> None: + """Set the transparency of the pixel at x,y. +""" class BitmapImage(Image, _BitmapImageLike): + """Widget which can display images in XBM format. +""" # This should be kept in sync with PIL.ImageTK.BitmapImage.__init__() def __init__( self, @@ -3885,12 +6121,18 @@ class BitmapImage(Image, _BitmapImageLike): foreground: str = ..., maskdata: str = ..., maskfile: StrOrBytesPath = ..., - ) -> None: ... + ) -> None: + """Create a bitmap with NAME. + +Valid resource names: background, data, file, foreground, maskdata, maskfile. +""" def image_names() -> tuple[str, ...]: ... def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): + """spinbox widget. +""" def __init__( self, master: Misc | None = None, @@ -3948,7 +6190,33 @@ class Spinbox(Widget, XView): width: int = 20, wrap: bool = False, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct a spinbox widget with the parent MASTER. + +STANDARD OPTIONS + + activebackground, background, borderwidth, + cursor, exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, justify, relief, + repeatdelay, repeatinterval, + selectbackground, selectborderwidth + selectforeground, takefocus, textvariable + xscrollcommand. + +WIDGET-SPECIFIC OPTIONS + + buttonbackground, buttoncursor, + buttondownrelief, buttonuprelief, + command, disabledbackground, + disabledforeground, format, from, + invalidcommand, increment, + readonlybackground, state, to, + validate, validatecommand values, + width, wrap, +""" @overload def configure( self, @@ -4004,32 +6272,130 @@ class Spinbox(Widget, XView): width: int = ..., wrap: bool = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] - def delete(self, first, last=None) -> Literal[""]: ... - def get(self) -> str: ... - def icursor(self, index): ... - def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... - def index(self, index: str | int) -> int: ... - def insert(self, index: str | int, s: str) -> Literal[""]: ... + def bbox(self, index) -> tuple[int, int, int, int] | None: # type: ignore[override] + """Return a tuple of X1,Y1,X2,Y2 coordinates for a +rectangle which encloses the character given by index. + +The first two elements of the list give the x and y +coordinates of the upper-left corner of the screen +area covered by the character (in pixels relative +to the widget) and the last two elements give the +width and height of the character, in pixels. The +bounding box may refer to a region outside the +visible area of the window. +""" + def delete(self, first, last=None) -> Literal[""]: + """Delete one or more elements of the spinbox. + +First is the index of the first character to delete, +and last is the index of the character just after +the last one to delete. If last isn't specified it +defaults to first+1, i.e. a single character is +deleted. This command returns an empty string. +""" + def get(self) -> str: + """Returns the spinbox's string +""" + def icursor(self, index): + """Alter the position of the insertion cursor. + +The insertion cursor will be displayed just before +the character given by index. Returns an empty string +""" + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: + """Returns the name of the widget at position x, y + +Return value is one of: none, buttondown, buttonup, entry +""" + def index(self, index: str | int) -> int: + """Returns the numerical index corresponding to index + """ + def insert(self, index: str | int, s: str) -> Literal[""]: + """Insert string s at index + +Returns an empty string. +""" # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented - def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: ... - def scan(self, *args): ... - def scan_mark(self, x): ... - def scan_dragto(self, x): ... - def selection(self, *args) -> tuple[int, ...]: ... - def selection_adjust(self, index): ... - def selection_clear(self): ... # type: ignore[override] - def selection_element(self, element=None): ... - def selection_from(self, index: int) -> None: ... - def selection_present(self) -> None: ... - def selection_range(self, start: int, end: int) -> None: ... - def selection_to(self, index: int) -> None: ... + def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: + """Causes the specified element to be invoked + +The element could be buttondown or buttonup +triggering the action associated with it. +""" + def scan(self, *args): + """Internal function. +""" + def scan_mark(self, x): + """Records x and the current view in the spinbox window; + +used in conjunction with later scan dragto commands. +Typically this command is associated with a mouse button +press in the widget. It returns an empty string. +""" + def scan_dragto(self, x): + """Compute the difference between the given x argument +and the x argument to the last scan mark command + +It then adjusts the view left or right by 10 times the +difference in x-coordinates. This command is typically +associated with mouse motion events in the widget, to +produce the effect of dragging the spinbox at high speed +through the window. The return value is an empty string. +""" + def selection(self, *args) -> tuple[int, ...]: + """Internal function. +""" + def selection_adjust(self, index): + """Locate the end of the selection nearest to the character +given by index, + +Then adjust that end of the selection to be at index +(i.e including but not going beyond index). The other +end of the selection is made the anchor point for future +select to commands. If the selection isn't currently in +the spinbox, then a new selection is created to include +the characters between index and the most recent selection +anchor point, inclusive. +""" + def selection_clear(self): # type: ignore[override] + """Clear the selection + +If the selection isn't in this widget then the +command has no effect. +""" + def selection_element(self, element=None): + """Sets or gets the currently selected element. + +If a spinbutton element is specified, it will be +displayed depressed. +""" + def selection_from(self, index: int) -> None: + """Set the fixed end of a selection to INDEX. +""" + def selection_present(self) -> None: + """Return True if there are characters selected in the spinbox, False +otherwise. +""" + def selection_range(self, start: int, end: int) -> None: + """Set the selection from START to END (not included). +""" + def selection_to(self, index: int) -> None: + """Set the variable end of a selection to INDEX. +""" class LabelFrame(Widget): + """labelframe widget. +""" def __init__( self, master: Misc | None = None, @@ -4062,7 +6428,22 @@ class LabelFrame(Widget): text: float | str = "", visual: str | tuple[str, int] = "", # can't be changed with configure() width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a labelframe widget with the parent MASTER. + +STANDARD OPTIONS + + borderwidth, cursor, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, padx, pady, relief, + takefocus, text + +WIDGET-SPECIFIC OPTIONS + + background, class, colormap, container, + height, labelanchor, labelwidget, + visual, width +""" @overload def configure( self, @@ -4089,12 +6470,20 @@ class LabelFrame(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., text: float | str = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class PanedWindow(Widget): + """panedwindow widget. +""" def __init__( self, master: Misc | None = None, @@ -4122,7 +6511,20 @@ class PanedWindow(Widget): sashwidth: float | str = 3, showhandle: bool = False, width: float | str = "", - ) -> None: ... + ) -> None: + """Construct a panedwindow widget with the parent MASTER. + +STANDARD OPTIONS + + background, borderwidth, cursor, height, + orient, relief, width + +WIDGET-SPECIFIC OPTIONS + + handlepad, handlesize, opaqueresize, + sashcursor, sashpad, sashrelief, + sashwidth, showhandle, +""" @overload def configure( self, @@ -4149,25 +6551,151 @@ class PanedWindow(Widget): sashwidth: float | str = ..., showhandle: bool = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def add(self, child: Widget, **kw) -> None: ... - def remove(self, child) -> None: ... + def add(self, child: Widget, **kw) -> None: + """Add a child widget to the panedwindow in a new pane. + +The child argument is the name of the child widget +followed by pairs of arguments that specify how to +manage the windows. The possible options and values +are the ones accepted by the paneconfigure method. +""" + def remove(self, child) -> None: + """Remove the pane containing child from the panedwindow + +All geometry management options for child will be forgotten. +""" forget = remove # type: ignore[assignment] - def identify(self, x: int, y: int): ... - def proxy(self, *args) -> tuple[Incomplete, ...]: ... - def proxy_coord(self) -> tuple[Incomplete, ...]: ... - def proxy_forget(self) -> tuple[Incomplete, ...]: ... - def proxy_place(self, x, y) -> tuple[Incomplete, ...]: ... - def sash(self, *args) -> tuple[Incomplete, ...]: ... - def sash_coord(self, index) -> tuple[Incomplete, ...]: ... - def sash_mark(self, index) -> tuple[Incomplete, ...]: ... - def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: ... - def panecget(self, child, option): ... - def paneconfigure(self, tagOrId, cnf=None, **kw): ... + def identify(self, x: int, y: int): + """Identify the panedwindow component at point x, y + +If the point is over a sash or a sash handle, the result +is a two element list containing the index of the sash or +handle, and a word indicating whether it is over a sash +or a handle, such as {0 sash} or {2 handle}. If the point +is over any other part of the panedwindow, the result is +an empty list. +""" + def proxy(self, *args) -> tuple[Incomplete, ...]: + """Internal function. +""" + def proxy_coord(self) -> tuple[Incomplete, ...]: + """Return the x and y pair of the most recent proxy location + """ + def proxy_forget(self) -> tuple[Incomplete, ...]: + """Remove the proxy from the display. + """ + def proxy_place(self, x, y) -> tuple[Incomplete, ...]: + """Place the proxy at the given x and y coordinates. + """ + def sash(self, *args) -> tuple[Incomplete, ...]: + """Internal function. +""" + def sash_coord(self, index) -> tuple[Incomplete, ...]: + """Return the current x and y pair for the sash given by index. + +Index must be an integer between 0 and 1 less than the +number of panes in the panedwindow. The coordinates given are +those of the top left corner of the region containing the sash. +pathName sash dragto index x y This command computes the +difference between the given coordinates and the coordinates +given to the last sash coord command for the given sash. It then +moves that sash the computed difference. The return value is the +empty string. +""" + def sash_mark(self, index) -> tuple[Incomplete, ...]: + """Records x and y for the sash given by index; + +Used in conjunction with later dragto commands to move the sash. +""" + def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: + """Place the sash given by index at the given coordinates + """ + def panecget(self, child, option): + """Query a management option for window. + +Option may be any value allowed by the paneconfigure subcommand +""" + def paneconfigure(self, tagOrId, cnf=None, **kw): + """Query or modify the management options for window. + +If no option is specified, returns a list describing all +of the available options for pathName. If option is +specified with no value, then the command returns a list +describing the one named option (this list will be identical +to the corresponding sublist of the value returned if no +option is specified). If one or more option-value pairs are +specified, then the command modifies the given widget +option(s) to have the given value(s); in this case the +command returns an empty string. The following options +are supported: + +after window + Insert the window after the window specified. window + should be the name of a window already managed by pathName. +before window + Insert the window before the window specified. window + should be the name of a window already managed by pathName. +height size + Specify a height for the window. The height will be the + outer dimension of the window including its border, if + any. If size is an empty string, or if -height is not + specified, then the height requested internally by the + window will be used initially; the height may later be + adjusted by the movement of sashes in the panedwindow. + Size may be any value accepted by Tk_GetPixels. +minsize n + Specifies that the size of the window cannot be made + less than n. This constraint only affects the size of + the widget in the paned dimension -- the x dimension + for horizontal panedwindows, the y dimension for + vertical panedwindows. May be any value accepted by + Tk_GetPixels. +padx n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the X-direction. The value may have any of the forms + accepted by Tk_GetPixels. +pady n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the Y-direction. The value may have any of the forms + accepted by Tk_GetPixels. +sticky style + If a window's pane is larger than the requested + dimensions of the window, this option may be used + to position (or stretch) the window within its pane. + Style is a string that contains zero or more of the + characters n, s, e or w. The string can optionally + contains spaces or commas, but they are ignored. Each + letter refers to a side (north, south, east, or west) + that the window will "stick" to. If both n and s + (or e and w) are specified, the window will be + stretched to fill the entire height (or width) of + its cavity. +width size + Specify a width for the window. The width will be + the outer dimension of the window including its + border, if any. If size is an empty string, or + if -width is not specified, then the width requested + internally by the window will be used initially; the + width may later be adjusted by the movement of sashes + in the panedwindow. Size may be any value accepted by + Tk_GetPixels. + +""" paneconfig = paneconfigure - def panes(self): ... + def panes(self): + """Returns an ordered list of the child panes. +""" def _test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi index d0d6de8426562..aa3f73d890b06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi @@ -5,8 +5,27 @@ from typing import ClassVar __all__ = ["Chooser", "askcolor"] class Chooser(Dialog): + """Create a dialog for the tk_chooseColor command. + +Args: + master: The master widget for this dialog. If not provided, + defaults to options['parent'] (if defined). + options: Dictionary of options for the tk_chooseColor call. + initialcolor: Specifies the selected color when the + dialog is first displayed. This can be a tk color + string or a 3-tuple of ints in the range (0, 255) + for an RGB triplet. + parent: The parent window of the color dialog. The + color dialog is displayed on top of this. + title: A string for the title of the dialog box. +""" command: ClassVar[str] def askcolor( color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... -) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... +) -> tuple[None, None] | tuple[tuple[int, int, int], str]: + """Display dialog window for selection of a color. + +Convenience wrapper for the Chooser class. Displays the color +chooser dialog with color as the initial value. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi index 521f451a9b2c5..b23a05cec982a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi @@ -1,3 +1,103 @@ +"""Drag-and-drop support for Tkinter. + +This is very preliminary. I currently only support dnd *within* one +application, between different windows (or within the same window). + +I am trying to make this as generic as possible -- not dependent on +the use of a particular widget or icon type, etc. I also hope that +this will work with Pmw. + +To enable an object to be dragged, you must create an event binding +for it that starts the drag-and-drop process. Typically, you should +bind to a callback function that you write. The function +should call Tkdnd.dnd_start(source, event), where 'source' is the +object to be dragged, and 'event' is the event that invoked the call +(the argument to your callback function). Even though this is a class +instantiation, the returned instance should not be stored -- it will +be kept alive automatically for the duration of the drag-and-drop. + +When a drag-and-drop is already in process for the Tk interpreter, the +call is *ignored*; this normally averts starting multiple simultaneous +dnd processes, e.g. because different button callbacks all +dnd_start(). + +The object is *not* necessarily a widget -- it can be any +application-specific object that is meaningful to potential +drag-and-drop targets. + +Potential drag-and-drop targets are discovered as follows. Whenever +the mouse moves, and at the start and end of a drag-and-drop move, the +Tk widget directly under the mouse is inspected. This is the target +widget (not to be confused with the target object, yet to be +determined). If there is no target widget, there is no dnd target +object. If there is a target widget, and it has an attribute +dnd_accept, this should be a function (or any callable object). The +function is called as dnd_accept(source, event), where 'source' is the +object being dragged (the object passed to dnd_start() above), and +'event' is the most recent event object (generally a event; +it can also be or ). If the dnd_accept() +function returns something other than None, this is the new dnd target +object. If dnd_accept() returns None, or if the target widget has no +dnd_accept attribute, the target widget's parent is considered as the +target widget, and the search for a target object is repeated from +there. If necessary, the search is repeated all the way up to the +root widget. If none of the target widgets can produce a target +object, there is no target object (the target object is None). + +The target object thus produced, if any, is called the new target +object. It is compared with the old target object (or None, if there +was no old target widget). There are several cases ('source' is the +source object, and 'event' is the most recent event object): + +- Both the old and new target objects are None. Nothing happens. + +- The old and new target objects are the same object. Its method +dnd_motion(source, event) is called. + +- The old target object was None, and the new target object is not +None. The new target object's method dnd_enter(source, event) is +called. + +- The new target object is None, and the old target object is not +None. The old target object's method dnd_leave(source, event) is +called. + +- The old and new target objects differ and neither is None. The old +target object's method dnd_leave(source, event), and then the new +target object's method dnd_enter(source, event) is called. + +Once this is done, the new target object replaces the old one, and the +Tk mainloop proceeds. The return value of the methods mentioned above +is ignored; if they raise an exception, the normal exception handling +mechanisms take over. + +The drag-and-drop processes can end in two ways: a final target object +is selected, or no final target object is selected. When a final +target object is selected, it will always have been notified of the +potential drop by a call to its dnd_enter() method, as described +above, and possibly one or more calls to its dnd_motion() method; its +dnd_leave() method has not been called since the last call to +dnd_enter(). The target is notified of the drop by a call to its +method dnd_commit(source, event). + +If no final target object is selected, and there was an old target +object, its dnd_leave(source, event) method is called to complete the +dnd sequence. + +Finally, the source object is notified that the drag-and-drop process +is over, by a call to source.dnd_end(target, event), specifying either +the selected target object, or None if no target object was selected. +The source object can use this to implement the commit action; this is +sometimes simpler than to do it in the target's dnd_commit(). The +target's dnd_commit() method could then simply be aliased to +dnd_leave(). + +At any time during a dnd sequence, the application can cancel the +sequence by calling the cancel() method on the object returned by +dnd_start(). This will call dnd_leave() if a target is currently +active; it will never call dnd_commit(). + +""" from tkinter import Event, Misc, Tk, Widget from typing import ClassVar, Protocol, type_check_only diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi index b6ef8f45d0350..ef746e89c2083 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi @@ -1,3 +1,16 @@ +"""File selection dialog classes. + +Classes: + +- FileDialog +- LoadFileDialog +- SaveFileDialog + +This module also presents tk common file dialogues, it provides interfaces +to the native file dialogues available in Tk 4.2 and newer, and the +directory dialogue available in Tk 8.3 and newer. +These interfaces were written by Fredrik Lundh, May 1997. +""" from _typeshed import Incomplete, StrOrBytesPath, StrPath from collections.abc import Hashable, Iterable from tkinter import Button, Entry, Event, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog @@ -22,6 +35,26 @@ __all__ = [ dialogstates: dict[Hashable, tuple[str, str]] class FileDialog: + """Standard file selection dialog -- no checks on selected file. + +Usage: + + d = FileDialog(master) + fname = d.go(dir_or_file, pattern, default, key) + if fname is None: ...canceled... + else: ...open file... + +All arguments to go() are optional. + +The 'key' argument specifies a key in the global dictionary +'dialogstates', which keeps track of the values for the directory +and pattern arguments, overriding the values passed in (it does +not keep track of the default argument!). If no key is specified, +the dialog keeps no memory of previous state. Note that memory is +kept even when the dialog is canceled. (All this emulates the +behavior of the Macintosh file selection dialogs.) + +""" title: str master: Misc directory: str | None @@ -57,22 +90,32 @@ class FileDialog: def set_selection(self, file: StrPath) -> None: ... class LoadFileDialog(FileDialog): + """File selection dialog which checks that the file exists. +""" title: str def ok_command(self) -> None: ... class SaveFileDialog(FileDialog): + """File selection dialog which checks that the file may be created. +""" title: str def ok_command(self) -> None: ... class _Dialog(commondialog.Dialog): ... class Open(_Dialog): + """Ask for a filename to open +""" command: ClassVar[str] class SaveAs(_Dialog): + """Ask for a filename to save as +""" command: ClassVar[str] class Directory(commondialog.Dialog): + """Ask for a directory +""" command: ClassVar[str] # TODO: command kwarg available on macos @@ -86,7 +129,9 @@ def asksaveasfilename( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> str: ... # can be empty string +) -> str: # can be empty string + """Ask for a filename to save as +""" def askopenfilename( *, defaultextension: str | None = "", @@ -96,7 +141,9 @@ def askopenfilename( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> str: ... # can be empty string +) -> str: # can be empty string + """Ask for a filename to open +""" def askopenfilenames( *, defaultextension: str | None = "", @@ -106,10 +153,17 @@ def askopenfilenames( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> Literal[""] | tuple[str, ...]: ... +) -> Literal[""] | tuple[str, ...]: + """Ask for multiple filenames to open + +Returns a list of filenames or empty list if +cancel button selected +""" def askdirectory( *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = False, parent: Misc | None = ..., title: str | None = ... -) -> str: ... # can be empty string +) -> str: # can be empty string + """Ask for a directory, and return the file name +""" # TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) def asksaveasfile( @@ -123,7 +177,9 @@ def asksaveasfile( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> IO[Incomplete] | None: ... +) -> IO[Incomplete] | None: + """Ask for a filename to save as, and returned the opened file +""" def askopenfile( mode: str = "r", *, @@ -134,7 +190,9 @@ def askopenfile( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> IO[Incomplete] | None: ... +) -> IO[Incomplete] | None: + """Ask for a filename to open, and returned the opened file +""" def askopenfiles( mode: str = "r", *, @@ -145,5 +203,13 @@ def askopenfiles( parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> tuple[IO[Incomplete], ...]: ... # can be empty tuple -def test() -> None: ... +) -> tuple[IO[Incomplete], ...]: # can be empty tuple + """Ask for multiple filenames and return the open file +objects + +returns a list of open file objects or an empty list if +cancel selected +""" +def test() -> None: + """Simple test program. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi index 327ba7a2432e0..865d53ce489b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi @@ -40,6 +40,26 @@ class _MetricsDict(TypedDict): fixed: bool class Font: + """Represents a named font. + +Constructor options are: + +font -- font specifier (name, system font, or (family, size, style)-tuple) +name -- name to use for this font configuration (defaults to a unique name) +exists -- does a named font by this name already exist? + Creates a new named font if False, points to the existing font if True. + Raises _tkinter.TclError if the assertion is false. + + the following are ignored if font is specified: + +family -- font 'family', e.g. Courier, Times, Helvetica +size -- font size in points +weight -- font thickness: NORMAL, BOLD +slant -- font slant: ROMAN, ITALIC +underline -- font underlining: false (0), true (1) +overstrike -- font strikeout: false (0), true (1) + +""" name: str delete_font: bool counter: ClassVar[itertools.count[int]] # undocumented @@ -62,7 +82,9 @@ class Font: __hash__: ClassVar[None] # type: ignore[assignment] def __setitem__(self, key: str, value: Any) -> None: ... @overload - def cget(self, option: Literal["family"]) -> str: ... + def cget(self, option: Literal["family"]) -> str: + """Get font attribute +""" @overload def cget(self, option: Literal["size"]) -> int: ... @overload @@ -75,7 +97,9 @@ class Font: def cget(self, option: str) -> Any: ... __getitem__ = cget @overload - def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: + """Return actual font attributes +""" @overload def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... @overload @@ -97,24 +121,43 @@ class Font: slant: Literal["roman", "italic"] = ..., underline: bool = ..., overstrike: bool = ..., - ) -> _FontDict | None: ... + ) -> _FontDict | None: + """Modify font attributes +""" configure = config - def copy(self) -> Font: ... + def copy(self) -> Font: + """Return a distinct copy of the current font +""" @overload - def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: ... + def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: + """Return font metrics. + +For best performance, create a dummy widget +using this font before calling this method. +""" @overload def metrics(self, option: Literal["fixed"], /, *, displayof: tkinter.Misc | None = ...) -> bool: ... @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... - def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: + """Return text width +""" def __eq__(self, other: object) -> bool: ... def __del__(self) -> None: ... -def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... -def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: + """Get font families (as a tuple) +""" +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: + """Get names of defined fonts (as a tuple) +""" if sys.version_info >= (3, 10): - def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: + """Given the name of a tk named font, returns a Font representation. + """ else: - def nametofont(name: str) -> Font: ... + def nametofont(name: str) -> Font: + """Given the name of a tk named font, returns a Font representation. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi index cd95f0de5f803..36e669bddaeb5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi @@ -22,6 +22,8 @@ YES: Final = "yes" NO: Final = "no" class Message(Dialog): + """A message box +""" command: ClassVar[str] def showinfo( @@ -32,7 +34,9 @@ def showinfo( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: ... +) -> str: + """Show an info message +""" def showwarning( title: str | None = None, message: str | None = None, @@ -41,7 +45,9 @@ def showwarning( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: ... +) -> str: + """Show a warning message +""" def showerror( title: str | None = None, message: str | None = None, @@ -50,7 +56,9 @@ def showerror( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok"] = "ok", parent: Misc = ..., -) -> str: ... +) -> str: + """Show an error message +""" def askquestion( title: str | None = None, message: str | None = None, @@ -59,7 +67,9 @@ def askquestion( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["yes", "no"] = ..., parent: Misc = ..., -) -> str: ... +) -> str: + """Ask a question +""" def askokcancel( title: str | None = None, message: str | None = None, @@ -68,7 +78,9 @@ def askokcancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["ok", "cancel"] = ..., parent: Misc = ..., -) -> bool: ... +) -> bool: + """Ask if operation should proceed; return true if the answer is ok +""" def askyesno( title: str | None = None, message: str | None = None, @@ -77,7 +89,9 @@ def askyesno( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["yes", "no"] = ..., parent: Misc = ..., -) -> bool: ... +) -> bool: + """Ask a question; return true if the answer is yes +""" def askyesnocancel( title: str | None = None, message: str | None = None, @@ -86,7 +100,9 @@ def askyesnocancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["cancel", "yes", "no"] = ..., parent: Misc = ..., -) -> bool | None: ... +) -> bool | None: + """Ask a question; return true if the answer is yes, None if cancelled. +""" def askretrycancel( title: str | None = None, message: str | None = None, @@ -95,4 +111,6 @@ def askretrycancel( icon: Literal["error", "info", "question", "warning"] = ..., default: Literal["retry", "cancel"] = ..., parent: Misc = ..., -) -> bool: ... +) -> bool: + """Ask if operation should be retried; return true if the answer is yes +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi index 6f1abc7144877..6ac86b8379b3f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -1,3 +1,15 @@ +"""A ScrolledText widget feels like a text widget but also has a +vertical scroll bar on its right. (Later, options may be added to +add a horizontal bar as well, to make the bars disappear +automatically when not needed, to move them to the other side of the +window, etc.) + +Configuration options are passed to the Text widget. +A Frame widget is inserted between the master and the text, to hold +the Scrollbar widget. +Most methods calls are inherited from the Text widget; Pack, Grid and +Place methods are redirected to the Frame widget however. +""" from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi index 45dce21a6b1c3..adf5f2d54684b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi @@ -1,13 +1,59 @@ +"""This modules handles dialog boxes. + +It contains the following public symbols: + +SimpleDialog -- A simple but flexible modal dialog box + +Dialog -- a base class for dialogs + +askinteger -- get an integer from the user + +askfloat -- get a float from the user + +askstring -- get a string from the user +""" from tkinter import Event, Frame, Misc, Toplevel class Dialog(Toplevel): - def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... - def body(self, master: Frame) -> Misc | None: ... - def buttonbox(self) -> None: ... + """Class to open dialogs. + +This class is intended as a base class for custom dialogs +""" + def __init__(self, parent: Misc | None, title: str | None = None) -> None: + """Initialize a dialog. + +Arguments: + + parent -- a parent window (the application window) + + title -- the dialog title +""" + def body(self, master: Frame) -> Misc | None: + """create dialog body. + +return widget that should have initial focus. +This method should be overridden, and is called +by the __init__ method. +""" + def buttonbox(self) -> None: + """add standard button box. + +override if you do not want the standard buttons +""" def ok(self, event: Event[Misc] | None = None) -> None: ... def cancel(self, event: Event[Misc] | None = None) -> None: ... - def validate(self) -> bool: ... - def apply(self) -> None: ... + def validate(self) -> bool: + """validate the data + +This method is called automatically to validate the data before the +dialog is destroyed. By default, it always validates OK. +""" + def apply(self) -> None: + """process the data + +This method is called automatically to process the data, *after* +the dialog is destroyed. By default, it does nothing. +""" class SimpleDialog: def __init__( @@ -33,7 +79,17 @@ def askfloat( minvalue: float | None = ..., maxvalue: float | None = ..., parent: Misc | None = ..., -) -> float | None: ... +) -> float | None: + """get a float from the user + +Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + +Return value is a float +""" def askinteger( title: str | None, prompt: str, @@ -42,7 +98,17 @@ def askinteger( minvalue: int | None = ..., maxvalue: int | None = ..., parent: Misc | None = ..., -) -> int | None: ... +) -> int | None: + """get an integer from the user + +Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + +Return value is an integer +""" def askstring( title: str | None, prompt: str, @@ -51,4 +117,14 @@ def askstring( show: str | None = ..., # minvalue/maxvalue is accepted but not useful. parent: Misc | None = ..., -) -> str | None: ... +) -> str | None: + """get a string from the user + +Arguments: + + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + +Return value is a string +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi index 7891364fa02c6..e75f0c8e61c85 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi @@ -35,19 +35,120 @@ TCL_IDLE_EVENTS: Final = 32 TCL_ALL_EVENTS: Final = 0 class tixCommand: - def tix_addbitmapdir(self, directory: str) -> None: ... - def tix_cget(self, option: str) -> Any: ... - def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... - def tix_filedialog(self, dlgclass: str | None = None) -> str: ... - def tix_getbitmap(self, name: str) -> str: ... - def tix_getimage(self, name: str) -> str: ... - def tix_option_get(self, name: str) -> Any: ... - def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... + """The tix commands provide access to miscellaneous elements + of Tix's internal state and the Tix application context. + Most of the information manipulated by these commands pertains + to the application as a whole, or to a screen or + display, rather than to a particular window. + + This is a mixin class, assumed to be mixed to Tkinter.Tk + that supports the self.tk.call method. + """ + def tix_addbitmapdir(self, directory: str) -> None: + """Tix maintains a list of directories under which + the tix_getimage and tix_getbitmap commands will + search for image files. The standard bitmap directory + is $TIX_LIBRARY/bitmaps. The addbitmapdir command + adds directory into this list. By using this + command, the image files of an applications can + also be located using the tix_getimage or tix_getbitmap + command. + """ + def tix_cget(self, option: str) -> Any: + """Returns the current value of the configuration + option given by option. Option may be any of the + options described in the CONFIGURATION OPTIONS section. + """ + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: + """Query or modify the configuration options of the Tix application + context. If no option is specified, returns a dictionary all of the + available options. If option is specified with no value, then the + command returns a list describing the one named option (this list + will be identical to the corresponding sublist of the value + returned if no option is specified). If one or more option-value + pairs are specified, then the command modifies the given option(s) + to have the given value(s); in this case the command returns an + empty string. Option may be any of the configuration options. + """ + def tix_filedialog(self, dlgclass: str | None = None) -> str: + """Returns the file selection dialog that may be shared among + different calls from this application. This command will create a + file selection dialog widget when it is called the first time. This + dialog will be returned by all subsequent calls to tix_filedialog. + An optional dlgclass parameter can be passed to specified what type + of file selection dialog widget is desired. Possible options are + tix FileSelectDialog or tixExFileSelectDialog. + """ + def tix_getbitmap(self, name: str) -> str: + """Locates a bitmap file of the name name.xpm or name in one of the + bitmap directories (see the tix_addbitmapdir command above). By + using tix_getbitmap, you can avoid hard coding the pathnames of the + bitmap files in your application. When successful, it returns the + complete pathname of the bitmap file, prefixed with the character + '@'. The returned value can be used to configure the -bitmap + option of the TK and Tix widgets. + """ + def tix_getimage(self, name: str) -> str: + """Locates an image file of the name name.xpm, name.xbm or name.ppm + in one of the bitmap directories (see the addbitmapdir command + above). If more than one file with the same name (but different + extensions) exist, then the image type is chosen according to the + depth of the X display: xbm images are chosen on monochrome + displays and color images are chosen on color displays. By using + tix_ getimage, you can avoid hard coding the pathnames of the + image files in your application. When successful, this command + returns the name of the newly created image, which can be used to + configure the -image option of the Tk and Tix widgets. + """ + def tix_option_get(self, name: str) -> Any: + """Gets the options maintained by the Tix + scheme mechanism. Available options include: + + active_bg active_fg bg + bold_font dark1_bg dark1_fg + dark2_bg dark2_fg disabled_fg + fg fixed_font font + inactive_bg inactive_fg input1_bg + input2_bg italic_font light1_bg + light1_fg light2_bg light2_fg + menu_font output1_bg output2_bg + select_bg select_fg selector + """ + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: + """Resets the scheme and fontset of the Tix application to + newScheme and newFontSet, respectively. This affects only those + widgets created after this call. Therefore, it is best to call the + resetoptions command before the creation of any widgets in a Tix + application. + + The optional parameter newScmPrio can be given to reset the + priority level of the Tk options set by the Tix schemes. + + Because of the way Tk handles the X option database, after Tix has + been has imported and inited, it is not possible to reset the color + schemes and font sets using the tix config command. Instead, the + tix_resetoptions command must be used. + """ class Tk(tkinter.Tk, tixCommand): + """Toplevel widget of Tix which represents mostly the main window + of an application. It has an associated Tcl interpreter. +""" def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): + """A TixWidget class is used to package all (or most) Tix widgets. + + Widget initialization is extended in two ways: + 1) It is possible to give a list of options which must be part of + the creation command (so called Tix 'static' options). These cannot be + given as a 'config' command later. + 2) It is possible to give the name of an existing TK widget. These are + child widgets created automatically by a Tix mega-widget. The Tk call + to create these widgets is therefore bypassed in TixWidget.__init__ + + Both options are for use by subclasses only. + """ def __init__( self, master: tkinter.Misc | None = None, @@ -57,17 +158,35 @@ class TixWidget(tkinter.Widget): kw: dict[str, Any] = {}, ) -> None: ... def __getattr__(self, name: str): ... - def set_silent(self, value: str) -> None: ... - def subwidget(self, name: str) -> tkinter.Widget: ... - def subwidgets_all(self) -> list[tkinter.Widget]: ... - def config_all(self, option: Any, value: Any) -> None: ... + def set_silent(self, value: str) -> None: + """Set a variable without calling its action routine +""" + def subwidget(self, name: str) -> tkinter.Widget: + """Return the named subwidget (which must have been created by + the sub-class). +""" + def subwidgets_all(self) -> list[tkinter.Widget]: + """Return all subwidgets. +""" + def config_all(self, option: Any, value: Any) -> None: + """Set configuration options for all subwidgets (and self). +""" def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): + """Subwidget class. + + This is used to mirror child widgets automatically created + by Tix/Tk as part of a mega-widget in Python (which is not informed + of this) +""" def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: + """DisplayStyle - handle configuration options shared by + (multiple) Display Items +""" def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... @@ -75,16 +194,43 @@ class DisplayStyle: def config(self, cnf: dict[str, Any] = {}, **kw): ... class Balloon(TixWidget): + """Balloon help widget. + + Subwidget Class + --------- ----- + label Label + message Message +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: + """Bind balloon widget to another. + One balloon widget may be bound to several widgets at the same time +""" def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): + """ButtonBox - A container for pushbuttons. + Subwidgets are the buttons added with the add method. + """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: + """Add a button with given name to box. +""" def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): + """ComboBox - an Entry field with a dropdown menu. The user can select a + choice by either typing in the entry subwidget or selecting from the + listbox subwidget. + + Subwidget Class + --------- ----- + entry Entry + arrow Button + slistbox ScrolledListBox + tick Button + cross Button : present if created with the fancy option +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... @@ -92,21 +238,62 @@ class ComboBox(TixWidget): def pick(self, index: int) -> None: ... class Control(TixWidget): + """Control - An entry field with value change arrows. The user can + adjust the value by pressing the two arrow buttons or by entering + the value directly into the entry. The new value will be checked + against the user-defined upper and lower limits. + + Subwidget Class + --------- ----- + incr Button + decr Button + entry Entry + label Label +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): + """LabelEntry - Entry field with label. Packages an entry widget + and a label into one mega widget. It can be used to simplify the creation + of ``entry-form'' type of interface. + + Subwidgets Class + ---------- ----- + label Label + entry Entry +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class LabelFrame(TixWidget): + """LabelFrame - Labelled Frame container. Packages a frame widget + and a label into one mega widget. To create widgets inside a + LabelFrame widget, one creates the new widgets relative to the + frame subwidget and manage them inside the frame subwidget. + + Subwidgets Class + ---------- ----- + label Label + frame Frame +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Meter(TixWidget): + """The Meter widget can be used to show the progress of a background + job which may take a long time to execute. + """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class OptionMenu(TixWidget): + """OptionMenu - creates a menu button of options. + + Subwidget Class + --------- ----- + menubutton Menubutton + menu Menu +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... @@ -115,52 +302,155 @@ class OptionMenu(TixWidget): def enable(self, name: str) -> None: ... class PopupMenu(TixWidget): + """PopupMenu widget can be used as a replacement of the tk_popup command. + The advantage of the Tix PopupMenu widget is it requires less application + code to manipulate. + + + Subwidgets Class + ---------- ----- + menubutton Menubutton + menu Menu +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... class Select(TixWidget): + """Select - Container of button subwidgets. It can be used to provide + radio-box or check-box style of selection options for the user. + + Subwidgets are buttons added dynamically using the add method. +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): + """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): + """DirList - displays a list view of a directory, its previous + directories and its sub-directories. The user can choose one of + the directories displayed in the list or change to another directory. + + Subwidget Class + --------- ----- + hlist HList + hsb Scrollbar + vsb Scrollbar +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirTree(TixWidget): + """DirTree - Directory Listing in a hierarchical view. + Displays a tree view of a directory, its previous directories and its + sub-directories. The user can choose one of the directories displayed + in the list or change to another directory. + + Subwidget Class + --------- ----- + hlist HList + hsb Scrollbar + vsb Scrollbar +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirSelectDialog(TixWidget): + """The DirSelectDialog widget presents the directories in the file + system in a dialog window. The user can use this dialog window to + navigate through the file system to select the desired directory. + + Subwidgets Class + ---------- ----- + dirbox DirSelectDialog +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def popup(self) -> None: ... def popdown(self) -> None: ... class DirSelectBox(TixWidget): + """DirSelectBox - Motif style file select box. + It is generally used for + the user to choose a file. FileSelectBox stores the files mostly + recently selected into a ComboBox widget so that they can be quickly + selected again. + + Subwidget Class + --------- ----- + selection ComboBox + filter ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... class ExFileSelectBox(TixWidget): + """ExFileSelectBox - MS Windows style file select box. + It provides a convenient method for the user to select files. + + Subwidget Class + --------- ----- + cancel Button + ok Button + hidden Checkbutton + types ComboBox + dir ComboBox + file ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def filter(self) -> None: ... def invoke(self) -> None: ... class FileSelectBox(TixWidget): + """ExFileSelectBox - Motif style file select box. + It is generally used for + the user to choose a file. FileSelectBox stores the files mostly + recently selected into a ComboBox widget so that they can be quickly + selected again. + + Subwidget Class + --------- ----- + selection ComboBox + filter ComboBox + dirlist ScrolledListBox + filelist ScrolledListBox +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def apply_filter(self) -> None: ... def invoke(self) -> None: ... class FileEntry(TixWidget): + """FileEntry - Entry field with button that invokes a FileSelectDialog. + The user can type in the filename manually. Alternatively, the user can + press the button widget that sits next to the entry, which will bring + up a file selection dialog. + + Subwidgets Class + ---------- ----- + button Button + entry Entry +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self) -> None: ... def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): + """HList - Hierarchy display widget can be used to display any data + that have a hierarchical structure, for example, file system directory + trees. The list entries are indented and connected by branch lines + according to their places in the hierarchy. + + Subwidgets - None +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... @@ -217,24 +507,83 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): + """The CheckList widget + displays a list of items to be selected by the user. CheckList acts + similarly to the Tk checkbutton or radiobutton widgets, except it is + capable of handling many more items than checkbuttons or radiobuttons. + """ def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: ... - def close(self, entrypath: str) -> None: ... - def getmode(self, entrypath: str) -> str: ... - def open(self, entrypath: str) -> None: ... - def getselection(self, mode: str = "on") -> tuple[str, ...]: ... - def getstatus(self, entrypath: str) -> str: ... - def setstatus(self, entrypath: str, mode: str = "on") -> None: ... + def autosetmode(self) -> None: + """This command calls the setmode method for all the entries in this + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close. +""" + def close(self, entrypath: str) -> None: + """Close the entry given by entryPath if its mode is close. +""" + def getmode(self, entrypath: str) -> str: + """Returns the current mode of the entry given by entryPath. +""" + def open(self, entrypath: str) -> None: + """Open the entry given by entryPath if its mode is open. +""" + def getselection(self, mode: str = "on") -> tuple[str, ...]: + """Returns a list of items whose status matches status. If status is + not specified, the list of items in the "on" status will be returned. + Mode can be on, off, default +""" + def getstatus(self, entrypath: str) -> str: + """Returns the current status of entryPath. +""" + def setstatus(self, entrypath: str, mode: str = "on") -> None: + """Sets the status of entryPath to be status. A bitmap will be + displayed next to the entry its status is on, off or default. +""" class Tree(TixWidget): + """Tree - The tixTree widget can be used to display hierarchical + data in a tree form. The user can adjust + the view of the tree by opening or closing parts of the tree. +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... - def autosetmode(self) -> None: ... - def close(self, entrypath: str) -> None: ... - def getmode(self, entrypath: str) -> str: ... - def open(self, entrypath: str) -> None: ... - def setmode(self, entrypath: str, mode: str = "none") -> None: ... + def autosetmode(self) -> None: + """This command calls the setmode method for all the entries in this + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close. +""" + def close(self, entrypath: str) -> None: + """Close the entry given by entryPath if its mode is close. +""" + def getmode(self, entrypath: str) -> str: + """Returns the current mode of the entry given by entryPath. +""" + def open(self, entrypath: str) -> None: + """Open the entry given by entryPath if its mode is open. +""" + def setmode(self, entrypath: str, mode: str = "none") -> None: + """This command is used to indicate whether the entry given by + entryPath has children entries and whether the children are visible. mode + must be one of open, close or none. If mode is set to open, a (+) + indicator is drawn next the entry. If mode is set to close, a (-) + indicator is drawn next the entry. If mode is set to none, no + indicators will be drawn for this entry. The default mode is none. The + open mode indicates the entry has hidden children and this entry can be + opened by the user. The close mode indicates that all the children of the + entry are now visible and the entry can be closed by the user. +""" class TList(TixWidget, tkinter.XView, tkinter.YView): + """TList - Hierarchy display widget which can be + used to display data in a tabular format. The list entries of a TList + widget are similar to the entries in the Tk listbox widget. The main + differences are (1) the TList widget can display the list entries in a + two dimensional format and (2) you can use graphical images as well as + multiple colors and fonts for the list entries. + + Subwidgets - None +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... @@ -261,6 +610,16 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): + """PanedWindow - Multi-pane container widget + allows the user to interactively manipulate the sizes of several + panes. The panes can be arranged either vertically or horizontally.The + user changes the sizes of the panes by dragging the resize handle + between two panes. + + Subwidgets Class + ---------- ----- + g/p widgets added dynamically with the add method. +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -270,6 +629,13 @@ class PanedWindow(TixWidget): def panes(self) -> list[tkinter.Widget]: ... class ListNoteBook(TixWidget): + """A ListNoteBook widget is very similar to the TixNoteBook widget: + it can be used to display many windows in a limited space using a + notebook metaphor. The notebook is divided into a stack of pages + (windows). At one time only one of these pages can be shown. + The user can navigate through these pages by + choosing the name of the desired page in the hlist subwidget. +""" def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -277,6 +643,13 @@ class ListNoteBook(TixWidget): def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): + """NoteBook - Multi-page container widget (tabbed notebook metaphor). + + Subwidgets Class + ---------- ----- + nbframe NoteBookFrame + page widgets added dynamically with the add method +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -286,9 +659,18 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): + """InputOnly - Invisible widget. Unix only. + + Subwidgets - None +""" def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Form: + """The Tix Form geometry manager + + Widgets can be arranged by specifying attachments to other widgets. + See Tix documentation for complete details +""" def __setitem__(self, key: str, value: Any) -> None: ... def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi index 1d72acd995126..912618cfc94ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi @@ -1,3 +1,16 @@ +"""Ttk wrapper. + +This module provides classes to allow using Tk themed widget set. + +Ttk is based on a revised and enhanced version of +TIP #48 (http://tip.tcl.tk/48) specified style engine. + +Its basic idea is to separate, to the extent possible, the code +implementing a widget's behavior from the code implementing its +appearance. Widget class bindings are primarily responsible for +maintaining the widget state and invoking callbacks, all aspects +of the widgets appearance lies at Themes. +""" import _tkinter import sys import tkinter @@ -35,8 +48,18 @@ __all__ = [ "Spinbox", ] -def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... -def setup_master(master: tkinter.Misc | None = None): ... +def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: + """Returns adict with its values converted from Tcl objects to Python +objects. +""" +def setup_master(master: tkinter.Misc | None = None): + """If master is not None, itself is returned. If master is None, +the default master is returned if there is one, otherwise a new +master is created and returned. + +If it is not allowed to use the default root and master is None, +RuntimeError is raised. +""" _Padding: TypeAlias = ( float @@ -112,24 +135,78 @@ _ThemeSettingsValue = TypedDict( _ThemeSettings: TypeAlias = dict[str, _ThemeSettingsValue] class Style: + """Manipulate style database. +""" master: tkinter.Misc tk: _tkinter.TkappType def __init__(self, master: tkinter.Misc | None = None) -> None: ... # For these methods, values given vary between options. Returned values # seem to be str, but this might not always be the case. @overload - def configure(self, style: str) -> dict[str, Any] | None: ... # Returns None if no configuration. + def configure(self, style: str) -> dict[str, Any] | None: # Returns None if no configuration. + """Query or sets the default value of the specified option(s) in +style. + +Each key in kw is an option and each value is either a string or +a sequence identifying the value for that option. +""" @overload def configure(self, style: str, query_opt: str, **kw: Any) -> Any: ... @overload def configure(self, style: str, query_opt: None = None, **kw: Any) -> None: ... @overload - def map(self, style: str, query_opt: str) -> _Statespec: ... + def map(self, style: str, query_opt: str) -> _Statespec: + """Query or sets dynamic values of the specified option(s) in +style. + +Each key in kw is an option and each value should be a list or a +tuple (usually) containing statespecs grouped in tuples, or list, +or something else of your preference. A statespec is compound of +one or more states and then a value. +""" @overload def map(self, style: str, query_opt: None = None, **kw: Iterable[_Statespec]) -> dict[str, _Statespec]: ... - def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: ... - @overload - def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: ... # Always seems to return an empty list + def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: + """Returns the value specified for option in style. + +If state is specified it is expected to be a sequence of one +or more states. If the default argument is set, it is used as +a fallback value in case no specification for option is found. +""" + @overload + def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: # Always seems to return an empty list + """Define the widget layout for given style. If layoutspec is +omitted, return the layout specification for given style. + +layoutspec is expected to be a list or an object different than +None that evaluates to False if you want to "turn off" that style. +If it is a list (or tuple, or something else), each item should be +a tuple where the first item is the layout name and the second item +should have the format described below: + +LAYOUTS + + A layout can contain the value None, if takes no options, or + a dict of options specifying how to arrange the element. + The layout mechanism uses a simplified version of the pack + geometry manager: given an initial cavity, each element is + allocated a parcel. Valid options/values are: + + side: whichside + Specifies which side of the cavity to place the + element; one of top, right, bottom or left. If + omitted, the element occupies the entire cavity. + + sticky: nswe + Specifies where the element is placed inside its + allocated parcel. + + children: [sublayout... ] + Specifies a list of elements to place inside the + element. Each element is a tuple (or other sequence) + where the first item is the layout name, and the other + is a LAYOUT. +""" @overload def layout(self, style: str, layoutspec: None = None) -> _LayoutSpec: ... @overload @@ -145,7 +222,9 @@ class Style: padding: _Padding = ..., sticky: str = ..., width: float | str = ..., - ) -> None: ... + ) -> None: + """Create a new element in the current theme of given etype. +""" @overload def element_create(self, elementname: str, etype: Literal["from"], themename: str, fromelement: str = ..., /) -> None: ... if sys.platform == "win32" and sys.version_info >= (3, 13): # and tk version >= 8.6 @@ -190,23 +269,91 @@ class Style: height: float | str, ) -> None: ... - def element_names(self) -> tuple[str, ...]: ... - def element_options(self, elementname: str) -> tuple[str, ...]: ... - def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: ... - def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: ... - def theme_names(self) -> tuple[str, ...]: ... - @overload - def theme_use(self, themename: str) -> None: ... + def element_names(self) -> tuple[str, ...]: + """Returns the list of elements defined in the current theme. +""" + def element_options(self, elementname: str) -> tuple[str, ...]: + """Return the list of elementname's options. +""" + def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: + """Creates a new theme. + +It is an error if themename already exists. If parent is +specified, the new theme will inherit styles, elements and +layouts from the specified parent theme. If settings are present, +they are expected to have the same syntax used for theme_settings. +""" + def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: + """Temporarily sets the current theme to themename, apply specified +settings and then restore the previous theme. + +Each key in settings is a style and each value may contain the +keys 'configure', 'map', 'layout' and 'element create' and they +are expected to have the same format as specified by the methods +configure, map, layout and element_create respectively. +""" + def theme_names(self) -> tuple[str, ...]: + """Returns a list of all known themes. +""" + @overload + def theme_use(self, themename: str) -> None: + """If themename is None, returns the theme in use, otherwise, set +the current theme to themename, refreshes all widgets and emits +a <> event. +""" @overload def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: ... - def identify(self, x: int, y: int) -> str: ... - def instate(self, statespec, callback=None, *args, **kw): ... - def state(self, statespec=None): ... + """Base class for Tk themed widgets. +""" + def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: + """Constructs a Ttk Widget with the parent master. + +STANDARD OPTIONS + + class, cursor, takefocus, style + +SCROLLABLE WIDGET OPTIONS + + xscrollcommand, yscrollcommand + +LABEL WIDGET OPTIONS + + text, textvariable, underline, image, compound, width + +WIDGET STATES + + active, disabled, focus, pressed, selected, background, + readonly, alternate, invalid +""" + def identify(self, x: int, y: int) -> str: + """Returns the name of the element at position x, y, or the empty +string if the point does not lie within any element. + +x and y are pixel coordinates relative to the widget. +""" + def instate(self, statespec, callback=None, *args, **kw): + """Test the widget's state. + +If callback is not specified, returns True if the widget state +matches statespec and False otherwise. If callback is specified, +then it will be invoked with *args, **kw if the widget state +matches statespec. statespec is expected to be a sequence. +""" + def state(self, statespec=None): + """Modify or inquire widget state. + +Widget state is returned if statespec is None, otherwise it is +set according to the statespec flags and then a new state spec +is returned indicating which flags were changed. statespec is +expected to be a sequence. +""" class Button(Widget): + """Ttk Button widget, displays a textual label and/or image, and +evaluates a command when pressed. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -226,7 +373,18 @@ class Button(Widget): textvariable: tkinter.Variable = ..., underline: int = -1, width: int | Literal[""] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Button widget with the parent master. + +STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + +WIDGET-SPECIFIC OPTIONS + + command, default, width +""" @overload def configure( self, @@ -245,13 +403,23 @@ class Button(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: ... + def invoke(self) -> Any: + """Invokes the command associated with the button. +""" class Checkbutton(Widget): + """Ttk Checkbutton widget which is either in on- or off-state. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -276,7 +444,18 @@ class Checkbutton(Widget): # here: https://sourceforge.net/projects/tcl/files/Tcl/8.6.9/tcltk-release-notes-8.6.9.txt/view variable: tkinter.Variable = ..., width: int | Literal[""] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Checkbutton widget with the parent master. + +STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + +WIDGET-SPECIFIC OPTIONS + + command, offvalue, onvalue, variable +""" @overload def configure( self, @@ -297,13 +476,30 @@ class Checkbutton(Widget): underline: int = ..., variable: tkinter.Variable = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: ... + def invoke(self) -> Any: + """Toggles between the selected and deselected states and +invokes the associated command. If the widget is currently +selected, sets the option variable to the offvalue option +and deselects the widget; otherwise, sets the option variable +to the option onvalue. + +Returns the result of the associated command. +""" class Entry(Widget, tkinter.Entry): + """Ttk Entry widget displays a one-line text string and allows that +string to be edited by the user. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -327,7 +523,22 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = "", width: int = 20, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Constructs a Ttk Entry widget with the parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus, xscrollcommand + +WIDGET-SPECIFIC OPTIONS + + exportselection, invalidcommand, justify, show, state, + textvariable, validate, validatecommand, width + +VALIDATION MODES + + none, key, focus, focusin, focusout, all +""" @overload # type: ignore[override] def configure( self, @@ -349,7 +560,13 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) @@ -374,14 +591,33 @@ class Entry(Widget, tkinter.Entry): validatecommand: str | list[str] | tuple[str, ...] | Callable[[], bool] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def bbox(self, index) -> tuple[int, int, int, int]: ... # type: ignore[override] - def identify(self, x: int, y: int) -> str: ... - def validate(self): ... + def bbox(self, index) -> tuple[int, int, int, int]: # type: ignore[override] + """Return a tuple of (x, y, width, height) which describes the +bounding box of the character given by index. +""" + def identify(self, x: int, y: int) -> str: + """Returns the name of the element at position x, y, or the +empty string if the coordinates are outside the window. +""" + def validate(self): + """Force revalidation, independent of the conditions specified +by the validate option. Returns False if validation fails, True +if it succeeds. Sets or clears the invalid state accordingly. +""" class Combobox(Entry): + """Ttk Combobox widget combines a text field with a pop-down list of +values. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -407,7 +643,18 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = 20, xscrollcommand: str | Callable[[float, float], object] = ..., # undocumented - ) -> None: ... + ) -> None: + """Construct a Ttk Combobox widget with the parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + exportselection, justify, height, postcommand, state, + textvariable, values, width +""" @overload # type: ignore[override] def configure( self, @@ -432,7 +679,13 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) @@ -460,13 +713,29 @@ class Combobox(Entry): values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def current(self, newindex: int | None = None) -> int: ... - def set(self, value: Any) -> None: ... + def current(self, newindex: int | None = None) -> int: + """If newindex is supplied, sets the combobox value to the +element at position newindex in the list of values. Otherwise, +returns the index of the current value in the list of values +or -1 if the current value does not appear in the list. +""" + def set(self, value: Any) -> None: + """Sets the value of the combobox to value. +""" class Frame(Widget): + """Ttk Frame widget is a container, used to group other widgets +together. +""" # This should be kept in sync with tkinter.ttk.LabeledScale.__init__() # (all of these keyword-only arguments are also present there) def __init__( @@ -484,7 +753,17 @@ class Frame(Widget): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a Ttk Frame with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + borderwidth, relief, padding, width, height +""" @overload def configure( self, @@ -499,12 +778,20 @@ class Frame(Widget): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): + """Ttk Label widget displays a textual label and/or image. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -531,7 +818,19 @@ class Label(Widget): underline: int = -1, width: int | Literal[""] = "", wraplength: float | str = ..., - ) -> None: ... + ) -> None: + """Construct a Ttk Label with parent master. + +STANDARD OPTIONS + + class, compound, cursor, image, style, takefocus, text, + textvariable, underline, width + +WIDGET-SPECIFIC OPTIONS + + anchor, background, font, foreground, justify, padding, + relief, text, wraplength +""" @overload def configure( self, @@ -557,12 +856,22 @@ class Label(Widget): underline: int = ..., width: int | Literal[""] = ..., wraplength: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Labelframe(Widget): + """Ttk Labelframe widget is a container used to group other widgets +together. It has an optional label, which may be a plain text string +or another widget. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -582,7 +891,17 @@ class Labelframe(Widget): text: float | str = "", underline: int = -1, width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a Ttk Labelframe with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + labelanchor, text, underline, padding, labelwidget, width, + height +""" @overload def configure( self, @@ -601,7 +920,13 @@ class Labelframe(Widget): text: float | str = ..., underline: int = ..., width: float | str = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -609,6 +934,9 @@ class Labelframe(Widget): LabelFrame = Labelframe class Menubutton(Widget): + """Ttk Menubutton widget displays a textual label and/or image, and +displays a menu when pressed. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -628,7 +956,18 @@ class Menubutton(Widget): textvariable: tkinter.Variable = ..., underline: int = -1, width: int | Literal[""] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Menubutton with parent master. + +STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + +WIDGET-SPECIFIC OPTIONS + + direction, menu +""" @overload def configure( self, @@ -647,12 +986,22 @@ class Menubutton(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Notebook(Widget): + """Ttk Notebook widget manages a collection of windows and displays +a single one at a time. Each child window is associated with a tab, +which the user may select to change the currently-displayed window. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -665,7 +1014,35 @@ class Notebook(Widget): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = 0, - ) -> None: ... + ) -> None: + """Construct a Ttk Notebook with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + height, padding, width + +TAB OPTIONS + + state, sticky, padding, text, image, compound, underline + +TAB IDENTIFIERS (tab_id) + + The tab_id argument found in several methods may take any of + the following forms: + + * An integer between zero and the number of tabs + * The name of a child window + * A positional specification of the form "@x,y", which + defines the tab + * The string "current", which identifies the + currently-selected tab + * The string "end", which returns the number of tabs (only + valid for method index) +""" @overload def configure( self, @@ -677,7 +1054,13 @@ class Notebook(Widget): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -694,18 +1077,82 @@ class Notebook(Widget): image=..., compound: Literal["top", "left", "center", "right", "bottom", "none"] = ..., underline: int = ..., - ) -> None: ... - def forget(self, tab_id) -> None: ... # type: ignore[override] - def hide(self, tab_id) -> None: ... - def identify(self, x: int, y: int) -> str: ... - def index(self, tab_id): ... - def insert(self, pos, child, **kw) -> None: ... - def select(self, tab_id=None): ... - def tab(self, tab_id, option=None, **kw): ... - def tabs(self): ... - def enable_traversal(self) -> None: ... + ) -> None: + """Adds a new tab to the notebook. + +If window is currently managed by the notebook but hidden, it is +restored to its previous position. +""" + def forget(self, tab_id) -> None: # type: ignore[override] + """Removes the tab specified by tab_id, unmaps and unmanages the +associated window. +""" + def hide(self, tab_id) -> None: + """Hides the tab specified by tab_id. + +The tab will not be displayed, but the associated window remains +managed by the notebook and its configuration remembered. Hidden +tabs may be restored with the add command. +""" + def identify(self, x: int, y: int) -> str: + """Returns the name of the tab element at position x, y, or the +empty string if none. +""" + def index(self, tab_id): + """Returns the numeric index of the tab specified by tab_id, or +the total number of tabs if tab_id is the string "end". +""" + def insert(self, pos, child, **kw) -> None: + """Inserts a pane at the specified position. + +pos is either the string end, an integer index, or the name of +a managed child. If child is already managed by the notebook, +moves it to the specified position. +""" + def select(self, tab_id=None): + """Selects the specified tab. + +The associated child window will be displayed, and the +previously-selected window (if different) is unmapped. If tab_id +is omitted, returns the widget name of the currently selected +pane. +""" + def tab(self, tab_id, option=None, **kw): + """Query or modify the options of the specific tab_id. + +If kw is not given, returns a dict of the tab option values. If option +is specified, returns the value of that option. Otherwise, sets the +options to the corresponding values. +""" + def tabs(self): + """Returns a list of windows managed by the notebook. +""" + def enable_traversal(self) -> None: + """Enable keyboard traversal for a toplevel window containing +this notebook. + +This will extend the bindings for the toplevel window containing +this notebook as follows: + + Control-Tab: selects the tab following the currently selected + one + + Shift-Control-Tab: selects the tab preceding the currently + selected one + + Alt-K: where K is the mnemonic (underlined) character of any + tab, will select that tab. + +Multiple notebooks in a single toplevel may be enabled for +traversal, including nested notebooks. However, notebook traversal +only works properly if all panes are direct children of the +notebook. +""" class Panedwindow(Widget, tkinter.PanedWindow): + """Ttk Panedwindow widget displays a number of subwindows, stacked +either vertically or horizontally. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -719,8 +1166,29 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: int = 0, - ) -> None: ... - def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: ... + ) -> None: + """Construct a Ttk Panedwindow with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + orient, width, height + +PANE OPTIONS + + weight +""" + def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: + """Add a child widget to the panedwindow in a new pane. + +The child argument is the name of the child widget +followed by pairs of arguments that specify how to +manage the windows. The possible options and values +are the ones accepted by the paneconfigure method. +""" @overload # type: ignore[override] def configure( self, @@ -731,7 +1199,13 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) @@ -745,17 +1219,50 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., width: int = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget = tkinter.PanedWindow.forget - def insert(self, pos, child, **kw) -> None: ... - def pane(self, pane, option=None, **kw): ... - def sashpos(self, index, newpos=None): ... + def insert(self, pos, child, **kw) -> None: + """Inserts a pane at the specified positions. + +pos is either the string end, and integer index, or the name +of a child. If child is already managed by the paned window, +moves it to the specified position. +""" + def pane(self, pane, option=None, **kw): + """Query or modify the options of the specified pane. + +pane is either an integer index or the name of a managed subwindow. +If kw is not given, returns a dict of the pane option values. If +option is specified then the value for that option is returned. +Otherwise, sets the options to the corresponding values. +""" + def sashpos(self, index, newpos=None): + """If newpos is specified, sets the position of sash number index. + +May adjust the positions of adjacent sashes to ensure that +positions are monotonically increasing. Sash positions are further +constrained to be between 0 and the total size of the widget. + +Returns the new position of sash number index. +""" PanedWindow = Panedwindow class Progressbar(Widget): + """Ttk Progressbar widget shows the status of a long-running +operation. They can operate in two modes: determinate mode shows the +amount completed relative to the total amount of work to be done, and +indeterminate mode provides an animated display to let the user know +that something is happening. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -772,7 +1279,17 @@ class Progressbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", value: float = 0.0, variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> None: ... + ) -> None: + """Construct a Ttk Progressbar with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + orient, length, mode, maximum, value, variable, phase +""" @overload def configure( self, @@ -788,15 +1305,36 @@ class Progressbar(Widget): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def start(self, interval: Literal["idle"] | int | None = None) -> None: ... - def step(self, amount: float | None = None) -> None: ... - def stop(self) -> None: ... + def start(self, interval: Literal["idle"] | int | None = None) -> None: + """Begin autoincrement mode: schedules a recurring timer event +that calls method step every interval milliseconds. + +interval defaults to 50 milliseconds (20 steps/second) if omitted. +""" + def step(self, amount: float | None = None) -> None: + """Increments the value option by amount. + +amount defaults to 1.0 if omitted. +""" + def stop(self) -> None: + """Stop autoincrement mode: cancels any recurring timer event +initiated by start. +""" class Radiobutton(Widget): + """Ttk Radiobutton widgets are used in groups to show or change a +set of mutually-exclusive options. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -817,7 +1355,18 @@ class Radiobutton(Widget): value: Any = "1", variable: tkinter.Variable | Literal[""] = ..., width: int | Literal[""] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Radiobutton with parent master. + +STANDARD OPTIONS + + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width + +WIDGET-SPECIFIC OPTIONS + + command, value, variable +""" @overload def configure( self, @@ -837,14 +1386,29 @@ class Radiobutton(Widget): value: Any = ..., variable: tkinter.Variable | Literal[""] = ..., width: int | Literal[""] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def invoke(self) -> Any: ... + def invoke(self) -> Any: + """Sets the option variable to the option value, selects the +widget, and invokes the associated command. + +Returns the result of the command, or an empty string if +no command is specified. +""" # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scale(Widget, tkinter.Scale): # type: ignore[misc] + """Ttk Scale widget is typically used to control the numeric value of +a linked variable that varies uniformly over some range. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -862,7 +1426,17 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = 1.0, value: float = 0, variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> None: ... + ) -> None: + """Construct a Ttk Scale with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + command, from, length, orient, to, value, variable +""" @overload # type: ignore[override] def configure( self, @@ -879,7 +1453,12 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Modify or query scale options. + +Setting a value for any of the "from", "from_" or "to" options +generates a <> event. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) @@ -899,13 +1478,27 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def get(self, x: int | None = None, y: int | None = None) -> float: ... + def get(self, x: int | None = None, y: int | None = None) -> float: + """Get the current value of the value option, or the value +corresponding to the coordinates x, y if they are specified. + +x and y are pixel coordinates relative to the scale widget +origin. +""" # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] + """Ttk Scrollbar controls the viewport of a scrollable widget. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -917,7 +1510,17 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = "vertical", style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Scrollbar with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + command, orient +""" @overload # type: ignore[override] def configure( self, @@ -928,7 +1531,13 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) @@ -942,11 +1551,20 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... class Separator(Widget): + """Ttk Separator widget displays a horizontal or vertical separator +bar. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -957,7 +1575,17 @@ class Separator(Widget): orient: Literal["horizontal", "vertical"] = "horizontal", style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Separator with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus + +WIDGET-SPECIFIC OPTIONS + + orient +""" @overload def configure( self, @@ -967,12 +1595,21 @@ class Separator(Widget): orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Sizegrip(Widget): + """Ttk Sizegrip allows the user to resize the containing toplevel +window by pressing and dragging the grip. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -982,7 +1619,13 @@ class Sizegrip(Widget): name: str = ..., style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Sizegrip with parent master. + +STANDARD OPTIONS + + class, cursor, state, style, takefocus +""" @overload def configure( self, @@ -991,12 +1634,23 @@ class Sizegrip(Widget): cursor: tkinter._Cursor = ..., style: str = ..., takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Spinbox(Entry): + """Ttk Spinbox is an Entry with increment and decrement arrows + +It is commonly used for number entry or to select from a list of +string values. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -1026,7 +1680,18 @@ class Spinbox(Entry): width: int = ..., # undocumented wrap: bool = False, xscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Spinbox widget with the parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus, validate, + validatecommand, xscrollcommand, invalidcommand + +WIDGET-SPECIFIC OPTIONS + + to, from_, increment, values, wrap, format, command +""" @overload # type: ignore[override] def configure( self, @@ -1055,11 +1720,19 @@ class Spinbox(Entry): width: int = ..., wrap: bool = ..., xscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure # type: ignore[assignment] - def set(self, value: Any) -> None: ... + def set(self, value: Any) -> None: + """Sets the value of the Spinbox to value. +""" @type_check_only class _TreeviewItemDict(TypedDict): @@ -1094,6 +1767,12 @@ class _TreeviewColumnDict(TypedDict): id: str class Treeview(Widget, tkinter.XView, tkinter.YView): + """Ttk Treeview widget displays a hierarchical collection of items. + +Each item has a textual label, an optional image, and an optional list +of data values. The data values are displayed in successive columns +after the tree label. +""" def __init__( self, master: tkinter.Misc | None = None, @@ -1115,7 +1794,26 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., xscrollcommand: str | Callable[[float, float], object] = "", yscrollcommand: str | Callable[[float, float], object] = "", - ) -> None: ... + ) -> None: + """Construct a Ttk Treeview with parent master. + +STANDARD OPTIONS + + class, cursor, style, takefocus, xscrollcommand, + yscrollcommand + +WIDGET-SPECIFIC OPTIONS + + columns, displaycolumns, height, padding, selectmode, show + +ITEM OPTIONS + + text, image, values, open, tags + +TAG OPTIONS + + foreground, background, font, image +""" @overload def configure( self, @@ -1132,15 +1830,44 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = ..., xscrollcommand: str | Callable[[float, float], object] = ..., yscrollcommand: str | Callable[[float, float], object] = ..., - ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: + """Configure resources of a widget. + +The values for resources are specified as keyword +arguments. To get an overview about +the allowed keyword arguments call the method keys. +""" @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] - def get_children(self, item: str | int | None = None) -> tuple[str, ...]: ... - def set_children(self, item: str | int, *newchildren: str | int) -> None: ... + def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: # type: ignore[override] + """Returns the bounding box (relative to the treeview widget's +window) of the specified item in the form x y width height. + +If column is specified, returns the bounding box of that cell. +If the item is not visible (i.e., if it is a descendant of a +closed item or is scrolled offscreen), returns an empty string. +""" + def get_children(self, item: str | int | None = None) -> tuple[str, ...]: + """Returns a tuple of children belonging to item. + +If item is not specified, returns root children. +""" + def set_children(self, item: str | int, *newchildren: str | int) -> None: + """Replaces item's child with newchildren. + +Children present in item that are not present in newchildren +are detached from tree. No items in newchildren may be an +ancestor of item. +""" @overload - def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: ... + def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: + """Query or modify the options for the specified column. + +If kw is not given, returns a dict of the column option values. If +option is specified then the value for that option is returned. +Otherwise, sets the options to the corresponding values. +""" @overload def column(self, column: str | int, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 @overload @@ -1161,15 +1888,51 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] = ..., # id is read-only ) -> _TreeviewColumnDict | None: ... - def delete(self, *items: str | int) -> None: ... - def detach(self, *items: str | int) -> None: ... - def exists(self, item: str | int) -> bool: ... + def delete(self, *items: str | int) -> None: + """Delete all specified items and all their descendants. The root +item may not be deleted. +""" + def detach(self, *items: str | int) -> None: + """Unlinks all of the specified items from the tree. + +The items and all of their descendants are still present, and may +be reinserted at another point in the tree, but will not be +displayed. The root item may not be detached. +""" + def exists(self, item: str | int) -> bool: + """Returns True if the specified item is present in the tree, +False otherwise. +""" @overload # type: ignore[override] - def focus(self, item: None = None) -> str: ... # can return empty string + def focus(self, item: None = None) -> str: # can return empty string + """If item is specified, sets the focus item to item. Otherwise, +returns the current focus item, or '' if there is none. +""" @overload def focus(self, item: str | int) -> Literal[""]: ... @overload - def heading(self, column: str | int, option: Literal["text"]) -> str: ... + def heading(self, column: str | int, option: Literal["text"]) -> str: + """Query or modify the heading options for the specified column. + +If kw is not given, returns a dict of the heading option values. If +option is specified then the value for that option is returned. +Otherwise, sets the options to the corresponding values. + +Valid options/values are: + text: text + The text to display in the column heading + image: image_name + Specifies an image to display to the right of the column + heading + anchor: anchor + Specifies how the heading text should be aligned. One of + the standard Tk anchor values + command: callback + A callback to be invoked when the heading label is + pressed. + +To configure the tree column heading, call this with column = "#0" +""" @overload def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ... @overload @@ -1192,12 +1955,38 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): command: str | Callable[[], object] = ..., ) -> None: ... # Internal Method. Leave untyped: - def identify(self, component, x, y): ... # type: ignore[override] - def identify_row(self, y: int) -> str: ... - def identify_column(self, x: int) -> str: ... - def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... - def identify_element(self, x: int, y: int) -> str: ... # don't know what possible return values are - def index(self, item: str | int) -> int: ... + def identify(self, component, x, y): # type: ignore[override] + """Returns a description of the specified component under the +point given by x and y, or the empty string if no such component +is present at that position. +""" + def identify_row(self, y: int) -> str: + """Returns the item ID of the item at position y. +""" + def identify_column(self, x: int) -> str: + """Returns the data column identifier of the cell at position x. + +The tree column has ID #0. +""" + def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: + """Returns one of: + +heading: Tree heading area. +separator: Space between two columns headings; +tree: The tree area. +cell: A data cell. + +* Availability: Tk 8.6 +""" + def identify_element(self, x: int, y: int) -> str: # don't know what possible return values are + """Returns the element at position x, y. + +* Availability: Tk 8.6 +""" + def index(self, item: str | int) -> int: + """Returns the integer index of item within its parent's list +of children. +""" def insert( self, parent: str, @@ -1210,9 +1999,29 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): values: list[Any] | tuple[Any, ...] = ..., open: bool = ..., tags: str | list[str] | tuple[str, ...] = ..., - ) -> str: ... - @overload - def item(self, item: str | int, option: Literal["text"]) -> str: ... + ) -> str: + """Creates a new item and return the item identifier of the newly +created item. + +parent is the item ID of the parent item, or the empty string +to create a new top-level item. index is an integer, or the value +end, specifying where in the list of parent's children to insert +the new item. If index is less than or equal to zero, the new node +is inserted at the beginning, if index is greater than or equal to +the current number of children, it is inserted at the end. If iid +is specified, it is used as the item identifier, iid must not +already exist in the tree. Otherwise, a new unique identifier +is generated. +""" + @overload + def item(self, item: str | int, option: Literal["text"]) -> str: + """Query or modify the options for the specified item. + +If no options are given, a dict with options/values for the item +is returned. If option is specified then the value for that option +is returned. Otherwise, sets the options to the corresponding +values as given by kw. +""" @overload def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ... @overload @@ -1237,31 +2046,70 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): open: bool = ..., tags: str | list[str] | tuple[str, ...] = ..., ) -> None: ... - def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: ... + def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: + """Moves item to position index in parent's list of children. + +It is illegal to move an item under one of its descendants. If +index is less than or equal to zero, item is moved to the +beginning, if greater than or equal to the number of children, +it is moved to the end. If item was detached it is reattached. +""" reattach = move - def next(self, item: str | int) -> str: ... # returning empty string means last item - def parent(self, item: str | int) -> str: ... - def prev(self, item: str | int) -> str: ... # returning empty string means first item - def see(self, item: str | int) -> None: ... - def selection(self) -> tuple[str, ...]: ... - @overload - def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + def next(self, item: str | int) -> str: # returning empty string means last item + """Returns the identifier of item's next sibling, or '' if item +is the last child of its parent. +""" + def parent(self, item: str | int) -> str: + """Returns the ID of the parent of item, or '' if item is at the +top level of the hierarchy. +""" + def prev(self, item: str | int) -> str: # returning empty string means first item + """Returns the identifier of item's previous sibling, or '' if +item is the first child of its parent. +""" + def see(self, item: str | int) -> None: + """Ensure that item is visible. + +Sets all of item's ancestors open option to True, and scrolls +the widget if necessary so that item is within the visible +portion of the tree. +""" + def selection(self) -> tuple[str, ...]: + """Returns the tuple of selected items. +""" + @overload + def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: + """The specified items becomes the new selection. +""" @overload def selection_set(self, *items: str | int) -> None: ... @overload - def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: + """Add all of the specified items to the selection. +""" @overload def selection_add(self, *items: str | int) -> None: ... @overload - def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: + """Remove all of the specified items from the selection. +""" @overload def selection_remove(self, *items: str | int) -> None: ... @overload - def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: ... + def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: + """Toggle the selection state of each specified item. +""" @overload def selection_toggle(self, *items: str | int) -> None: ... @overload - def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: ... + def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: + """Query or set the value of given item. + +With one argument, return a dictionary of column/value pairs +for the specified item. With two arguments, return the current +value of the specified column. With three arguments, set the +value of given column in given item to the specified value. +""" @overload def set(self, item: str | int, column: str | int, value: None = None) -> Any: ... @overload @@ -1271,13 +2119,24 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def tag_bind( self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None - ) -> str: ... + ) -> str: + """Bind a callback for the given event sequence to the tag tagname. +When an event is delivered to an item, the callbacks for each +of the item's tags option are called. +""" @overload def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... @overload def tag_bind(self, tagname: str, *, callback: str) -> None: ... @overload - def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: ... + def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: + """Query or modify the options for the specified tagname. + +If kw is not given, returns a dict of the option settings for tagname. +If option is specified, returns the value for that option for the +specified tagname. Otherwise, sets the options to the corresponding +values for the given tagname. +""" @overload def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... @overload @@ -1295,11 +2154,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._Image | str = ..., ) -> _TreeviewTagDict | MaybeNone: ... # can be None but annoying to check @overload - def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: + """If item is specified, returns 1 or 0 depending on whether the +specified item has the given tagname. Otherwise, returns a list of +all items which have the specified tag. + +* Availability: Tk 8.6 +""" @overload def tag_has(self, tagname: str, item: str | int) -> bool: ... class LabeledScale(Frame): + """A Ttk Scale widget with a Ttk Label widget indicating its +current value. + +The Ttk Scale can be accessed through instance.scale, and Ttk Label +can be accessed through instance.label +""" label: Label scale: Scale # This should be kept in sync with tkinter.ttk.Frame.__init__() @@ -1323,11 +2194,24 @@ class LabeledScale(Frame): style: str = "", takefocus: bool | Literal[0, 1, ""] | Callable[[str], bool | None] = "", width: float | str = 0, - ) -> None: ... + ) -> None: + """Construct a horizontal LabeledScale with parent master, a +variable to be associated with the Ttk Scale widget and its range. +If variable is not specified, a tkinter.IntVar is created. + +WIDGET-SPECIFIC OPTIONS + + compound: 'top' or 'bottom' + Specifies how to display the label relative to the scale. + Defaults to 'top'. +""" # destroy is overridden, signature does not change value: Any class OptionMenu(Menubutton): + """Themed OptionMenu, based after tkinter's OptionMenu, which allows +the user to select a value from a menu. +""" def __init__( self, master: tkinter.Misc | None, @@ -1338,7 +2222,24 @@ class OptionMenu(Menubutton): style: str = "", direction: Literal["above", "below", "left", "right", "flush"] = "below", command: Callable[[tkinter.StringVar], object] | None = None, - ) -> None: ... + ) -> None: + """Construct a themed OptionMenu widget with master as the parent, +the resource textvariable set to variable, the initially selected +value specified by the default parameter, the menu values given by +*values and additional keywords. + +WIDGET-SPECIFIC OPTIONS + + style: stylename + Menubutton style. + direction: 'above', 'below', 'left', 'right', or 'flush' + Menubutton direction. + command: callback + A callback that will be invoked after selecting an item. +""" # configure, config, cget, destroy are inherited from Menubutton # destroy and __setitem__ are overridden, signature does not change - def set_menu(self, default: str | None = None, *values: str) -> None: ... + def set_menu(self, default: str | None = None, *values: str) -> None: + """Build a new menu of radiobuttons with *values and optionally +a default value. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi index fd1b10da1d12e..cb682651cd8c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi @@ -1,3 +1,5 @@ +"""Token constants. +""" import sys from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi index 00a24b4eea07d..3d91e8486026a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi @@ -1,3 +1,24 @@ +"""Tokenization help for Python programs. + +tokenize(readline) is a generator that breaks a stream of bytes into +Python tokens. It decodes the bytes according to PEP-0263 for +determining source file encoding. + +It accepts a readline-like method which is called repeatedly to get the +next line of input (or b"" for EOF). It generates 5-tuples with these +members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators. Additionally, all token lists start with an ENCODING token +which tells you which encoding was used to decode the bytes stream. +""" import sys from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence @@ -141,7 +162,12 @@ class Untokenizer: encoding: str | None def add_whitespace(self, start: _Position) -> None: ... if sys.version_info >= (3, 12): - def add_backslash_continuation(self, start: _Position) -> None: ... + def add_backslash_continuation(self, start: _Position) -> None: + """Add backslash continuation characters if the row has increased +without encountering a newline token. + +This also inserts the correct amount of whitespace before the backslash. +""" def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... @@ -149,11 +175,66 @@ class Untokenizer: def escape_brackets(self, token: str) -> str: ... # Returns str, unless the ENCODING token is present, in which case it returns bytes. -def untokenize(iterable: Iterable[_Token]) -> str | Any: ... -def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... -def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... -def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... -def open(filename: FileDescriptorOrPath) -> TextIO: ... +def untokenize(iterable: Iterable[_Token]) -> str | Any: + """Transform tokens back into Python source code. +It returns a bytes object, encoded using the ENCODING +token, which is the first token sequence output by tokenize. + +Each element returned by the iterable must be a token sequence +with at least two elements, a token number and token value. If +only two tokens are passed, the resulting output is poor. + +The result is guaranteed to tokenize back to match the input so +that the conversion is lossless and round-trips are assured. +The guarantee applies only to the token type and token string as +the spacing between tokens (column positions) may change. +""" +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: + """ +The detect_encoding() function is used to detect the encoding that should +be used to decode a Python source file. It requires one argument, readline, +in the same way as the tokenize() generator. + +It will call readline a maximum of twice, and return the encoding used +(as a string) and a list of any lines (left as bytes) it has read in. + +It detects the encoding from the presence of a utf-8 bom or an encoding +cookie as specified in pep-0263. If both a bom and a cookie are present, +but disagree, a SyntaxError will be raised. If the encoding cookie is an +invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, +'utf-8-sig' is returned. + +If no encoding is specified, then the default of 'utf-8' will be returned. +""" +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: + """ +The tokenize() generator requires one argument, readline, which +must be a callable object which provides the same interface as the +readline() method of built-in file objects. Each call to the function +should return one line of input as bytes. Alternatively, readline +can be a callable function terminating with StopIteration: + readline = open(myfile, 'rb').__next__ # Example of alternate readline + +The generator produces 5-tuples with these members: the token type; the +token string; a 2-tuple (srow, scol) of ints specifying the row and +column where the token begins in the source; a 2-tuple (erow, ecol) of +ints specifying the row and column where the token ends in the source; +and the line on which the token was found. The line passed is the +physical line. + +The first token sequence will always be an ENCODING token +which tells you which encoding was used to decode the bytes stream. +""" +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: + """Tokenize a source reading Python code as unicode strings. + +This has the same API as tokenize(), except that it expects the *readline* +callable to return str objects instead of bytes. +""" +def open(filename: FileDescriptorOrPath) -> TextIO: + """Open a file in read only mode using the encoding detected by +detect_encoding(). +""" def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi index 4ff4097f8313a..08ed30b61e30a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi @@ -8,6 +8,15 @@ __all__ = ("loads", "load", "TOMLDecodeError") if sys.version_info >= (3, 14): class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML. + +Adds the following attributes to ValueError: +msg: The unformatted error message +doc: The TOML document being parsed +pos: The index of doc where parsing failed +lineno: The line corresponding to pos +colno: The column corresponding to pos +""" msg: str doc: str pos: int @@ -20,7 +29,13 @@ if sys.version_info >= (3, 14): def __init__(self, msg: str | type = ..., doc: str | type = ..., pos: int | type = ..., *args: Any) -> None: ... else: - class TOMLDecodeError(ValueError): ... + class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML. +""" -def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... -def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... +def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: + """Parse TOML from a binary file object. +""" +def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: + """Parse TOML from a string. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi index 7e7cc1e9ac54a..62cded3e4c6ab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi @@ -1,3 +1,23 @@ +"""program/module to trace Python program or function execution + +Sample use, command line: + trace.py -c -f counts --ignore-dir '$prefix' spam.py eggs + trace.py -t --ignore-dir '$prefix' spam.py eggs + trace.py --trackcalls spam.py eggs + +Sample use, programmatically + import sys + + # create a Trace object, telling it what to ignore, and whether to + # do tracing or line-counting or both. + tracer = trace.Trace(ignoredirs=[sys.base_prefix, sys.base_exec_prefix,], + trace=0, count=1) + # run the new command using the given tracer + tracer.run('main()') + # make a report, placing output in /tmp + r = tracer.results() + r.write_results(show_missing=True, coverdir="/tmp") +""" import sys import types from _typeshed import Incomplete, StrPath, TraceFunction @@ -26,7 +46,9 @@ class CoverageResults: callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, outfile: StrPath | None = None, ) -> None: ... # undocumented - def update(self, other: CoverageResults) -> None: ... + def update(self, other: CoverageResults) -> None: + """Merge in the data from another CoverageResults +""" if sys.version_info >= (3, 13): def write_results( self, @@ -35,14 +57,40 @@ class CoverageResults: coverdir: StrPath | None = None, *, ignore_missing_files: bool = False, - ) -> None: ... + ) -> None: + """ +Write the coverage results. + +:param show_missing: Show lines that had no hits. +:param summary: Include coverage summary per module. +:param coverdir: If None, the results of each module are placed in its + directory, otherwise it is included in the directory + specified. +:param ignore_missing_files: If True, counts for files that no longer + exist are silently ignored. Otherwise, a missing file + will raise a FileNotFoundError. +""" else: - def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: + """ + Write the coverage results. + + :param show_missing: Show lines that had no hits. + :param summary: Include coverage summary per module. + :param coverdir: If None, the results of each module are placed in its + directory, otherwise it is included in the directory + specified. + """ def write_results_file( self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None - ) -> tuple[int, int]: ... - def is_ignored_filename(self, filename: str) -> bool: ... # undocumented + ) -> tuple[int, int]: + """Return a coverage results file in path. +""" + def is_ignored_filename(self, filename: str) -> bool: # undocumented + """Return True if the filename does not refer to a file +we want to have reported. +""" class _Ignore: def __init__(self, modules: Iterable[str] | None = None, dirs: Iterable[StrPath] | None = None) -> None: ... @@ -70,16 +118,46 @@ class Trace: infile: StrPath | None = None, outfile: StrPath | None = None, timing: bool = False, - ) -> None: ... + ) -> None: + """ +@param count true iff it should count number of times each + line is executed +@param trace true iff it should print out each line that is + being counted +@param countfuncs true iff it should just output a list of + (filename, modulename, funcname,) for functions + that were called at least once; This overrides + 'count' and 'trace' +@param ignoremods a list of the names of modules to ignore +@param ignoredirs a list of the names of directories to ignore + all of the (recursive) contents of +@param infile file from which to read stored counts to be + added into the results +@param outfile file in which to write the results +@param timing true iff timing information be displayed +""" def run(self, cmd: str | types.CodeType) -> None: ... def runctx( self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None ) -> None: ... def runfunc(self, func: Callable[_P, _T], /, *args: _P.args, **kw: _P.kwargs) -> _T: ... def file_module_function_of(self, frame: types.FrameType) -> _FileModuleFunction: ... - def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... - def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... - def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: + """Handler for call events. + +Adds information about who called who to the self._callers dict. +""" + def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: + """Handler for call events. + +Adds (filename, modulename, funcname) to the self._calledfuncs dict. +""" + def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: + """Handler for call events. + +If the code block being entered is to be ignored, returns 'None', +else returns self.localtrace. +""" def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi index d587295cd1cf7..f395946adf107 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi @@ -1,3 +1,5 @@ +"""Extract, format and print information about Python stack traces. +""" import sys from _typeshed import SupportsWrite, Unused from collections.abc import Generator, Iterable, Iterator, Mapping @@ -32,7 +34,14 @@ if sys.version_info >= (3, 14): _FrameSummaryTuple: TypeAlias = tuple[str, int, str, str | None] -def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: + """Print up to 'limit' stack trace entries from the traceback 'tb'. + +If 'limit' is omitted or None, all entries are printed. If 'file' +is omitted or None, the output goes to sys.stderr; otherwise +'file' should be an open file or file-like object with a write() +method. +""" if sys.version_info >= (3, 10): @overload @@ -44,7 +53,17 @@ if sys.version_info >= (3, 10): limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True, - ) -> None: ... + ) -> None: + """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. + +This differs from print_tb() in the following ways: (1) if +traceback is not None, it prints a header "Traceback (most recent +call last):"; (2) it prints the exception type and value after the +stack trace; (3) if type is SyntaxError and value has the +appropriate format, it prints the line where the syntax error +occurred with a caret on the next line indicating the approximate +position of the error. +""" @overload def print_exception( exc: BaseException, /, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True @@ -57,7 +76,15 @@ if sys.version_info >= (3, 10): tb: TracebackType | None = ..., limit: int | None = None, chain: bool = True, - ) -> list[str]: ... + ) -> list[str]: + """Format a stack trace and the exception information. + +The arguments have the same meaning as the corresponding arguments +to print_exception(). The return value is a list of strings, each +ending in a newline and some containing internal newlines. When +these lines are concatenated and printed, exactly the same text is +printed as does print_exception(). +""" @overload def format_exception(exc: BaseException, /, *, limit: int | None = None, chain: bool = True) -> list[str]: ... @@ -69,44 +96,163 @@ else: limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True, - ) -> None: ... + ) -> None: + """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. + + This differs from print_tb() in the following ways: (1) if + traceback is not None, it prints a header "Traceback (most recent + call last):"; (2) it prints the exception type and value after the + stack trace; (3) if type is SyntaxError and value has the + appropriate format, it prints the line where the syntax error + occurred with a caret on the next line indicating the approximate + position of the error. + """ def format_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, limit: int | None = None, chain: bool = True, - ) -> list[str]: ... + ) -> list[str]: + """Format a stack trace and the exception information. + + The arguments have the same meaning as the corresponding arguments + to print_exception(). The return value is a list of strings, each + ending in a newline and some containing internal newlines. When + these lines are concatenated and printed, exactly the same text is + printed as does print_exception(). + """ + +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: + """Shorthand for 'print_exception(sys.exception(), limit=limit, file=file, chain=chain)'. +""" +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: + """This is a shorthand for 'print_exception(sys.last_exc, limit=limit, file=file, chain=chain)'. +""" +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: + """Print a stack trace from its invocation point. + +The optional 'f' argument can be used to specify an alternate +stack frame at which to start. The optional 'limit' and 'file' +arguments have the same meaning as for print_exception(). +""" +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: + """ +Return a StackSummary object representing a list of +pre-processed entries from traceback. + +This is useful for alternate formatting of stack traces. If +'limit' is omitted or None, all entries are extracted. A +pre-processed stack trace entry is a FrameSummary object +containing attributes filename, lineno, name, and line +representing the information that is usually printed for a stack +trace. The line is a string with leading and trailing +whitespace stripped; if the source is not available it is None. +""" +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: + """Extract the raw traceback from the current stack frame. + +The return value has the same format as for extract_tb(). The +optional 'f' and 'limit' arguments have the same meaning as for +print_stack(). Each item in the list is a quadruple (filename, +line number, function name, text), and the entries are in order +from oldest to newest stack frame. +""" +def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: + """Format a list of tuples or FrameSummary objects for printing. + +Given a list of tuples or FrameSummary objects as returned by +extract_tb() or extract_stack(), return a list of strings ready +for printing. -def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... -def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... -def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... -def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... -def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... -def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: ... -def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: ... +Each string in the resulting list corresponds to the item with the +same index in the argument list. Each string ends in a newline; +the strings may contain internal newlines as well, for those items +whose source text line is not None. +""" +def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: + """Print the list of tuples as returned by extract_tb() or +extract_stack() as a formatted stack trace to the given file. +""" if sys.version_info >= (3, 13): @overload - def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: ... + def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: + """Format the exception part of a traceback. + +The return value is a list of strings, each ending in a newline. + +The list contains the exception's message, which is +normally a single string; however, for :exc:`SyntaxError` exceptions, it +contains several lines that (when printed) display detailed information +about where the syntax error occurred. Following the message, the list +contains the exception's ``__notes__``. + +When *show_group* is ``True``, and the exception is an instance of +:exc:`BaseExceptionGroup`, the nested exceptions are included as +well, recursively, with indentation relative to their nesting depth. +""" @overload def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... elif sys.version_info >= (3, 10): @overload - def format_exception_only(exc: BaseException | None, /) -> list[str]: ... + def format_exception_only(exc: BaseException | None, /) -> list[str]: + """Format the exception part of a traceback. + + The return value is a list of strings, each ending in a newline. + + The list contains the exception's message, which is + normally a single string; however, for :exc:`SyntaxError` exceptions, it + contains several lines that (when printed) display detailed information + about where the syntax error occurred. Following the message, the list + contains the exception's ``__notes__``. + """ @overload def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: ... else: - def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... + def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. -def format_exc(limit: int | None = None, chain: bool = True) -> str: ... -def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... -def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... -def clear_frames(tb: TracebackType | None) -> None: ... -def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... -def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + +def format_exc(limit: int | None = None, chain: bool = True) -> str: + """Like print_exc() but return a string. +""" +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: + """A shorthand for 'format_list(extract_tb(tb, limit))'. +""" +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: + """Shorthand for 'format_list(extract_stack(f, limit))'. +""" +def clear_frames(tb: TracebackType | None) -> None: + """Clear all references to local variables in the frames of a traceback. +""" +def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: + """Walk a stack yielding the frame and line number for each frame. + +This will follow f.f_back from the given frame. If no frame is given, the +current stack is used. Usually used with StackSummary.extract. +""" +def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: + """Walk a traceback yielding the frame and line number for each frame. + +This will follow tb.tb_next (and thus is in the opposite order to +walk_stack). Usually used with StackSummary.extract. +""" if sys.version_info >= (3, 11): class _ExceptionPrintContext: @@ -114,6 +260,44 @@ if sys.version_info >= (3, 11): def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... class TracebackException: + """An exception ready for rendering. + +The traceback module captures enough attributes from the original exception +to this intermediary form to ensure that no references are held, while +still being able to fully print or format it. + +max_group_width and max_group_depth control the formatting of exception +groups. The depth refers to the nesting level of the group, and the width +refers to the size of a single exception group's exceptions array. The +formatted output is truncated when either limit is exceeded. + +Use `from_exception` to create TracebackException instances from exception +objects, or the constructor to create TracebackException instances from +individual components. + +- :attr:`__cause__` A TracebackException of the original *__cause__*. +- :attr:`__context__` A TracebackException of the original *__context__*. +- :attr:`exceptions` For exception groups - a list of TracebackException + instances for the nested *exceptions*. ``None`` for other exceptions. +- :attr:`__suppress_context__` The *__suppress_context__* value from the + original exception. +- :attr:`stack` A `StackSummary` representing the traceback. +- :attr:`exc_type` (deprecated) The class of the original traceback. +- :attr:`exc_type_str` String display of exc_type +- :attr:`filename` For syntax errors - the filename where the error + occurred. +- :attr:`lineno` For syntax errors - the linenumber where the error + occurred. +- :attr:`end_lineno` For syntax errors - the end linenumber where the error + occurred. Can be `None` if not present. +- :attr:`text` For syntax errors - the text where the error + occurred. +- :attr:`offset` For syntax errors - the offset into the text where the + error occurred. +- :attr:`end_offset` For syntax errors - the end offset into the text where + the error occurred. Can be `None` if not present. +- :attr:`msg` For syntax errors - the compiler error message. +""" __cause__: TracebackException | None __context__: TracebackException | None if sys.version_info >= (3, 11): @@ -211,7 +395,9 @@ class TracebackException: compact: bool = False, max_group_width: int = 15, max_group_depth: int = 10, - ) -> Self: ... + ) -> Self: + """Create a TracebackException from an exception. +""" elif sys.version_info >= (3, 10): @classmethod def from_exception( @@ -222,29 +408,97 @@ class TracebackException: lookup_lines: bool = True, capture_locals: bool = False, compact: bool = False, - ) -> Self: ... + ) -> Self: + """Create a TracebackException from an exception. +""" else: @classmethod def from_exception( cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False - ) -> Self: ... + ) -> Self: + """Create a TracebackException from an exception. +""" def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 11): - def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: + """Format the exception. + +If chain is not *True*, *__cause__* and *__context__* will not be formatted. + +The return value is a generator of strings, each ending in a newline and +some containing internal newlines. `print_exception` is a wrapper around +this method which just prints the lines to a file. + +The message indicating which exception occurred is always the last +string in the output. +""" else: - def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True) -> Generator[str, None, None]: + """Format the exception. + + If chain is not *True*, *__cause__* and *__context__* will not be formatted. + + The return value is a generator of strings, each ending in a newline and + some containing internal newlines. `print_exception` is a wrapper around + this method which just prints the lines to a file. + + The message indicating which exception occurred is always the last + string in the output. + """ if sys.version_info >= (3, 13): - def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: ... + def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: + """Format the exception part of the traceback. + +The return value is a generator of strings, each ending in a newline. + +Generator yields the exception message. +For :exc:`SyntaxError` exceptions, it +also yields (before the exception message) +several lines that (when printed) +display detailed information about where the syntax error occurred. +Following the message, generator also yields +all the exception's ``__notes__``. + +When *show_group* is ``True``, and the exception is an instance of +:exc:`BaseExceptionGroup`, the nested exceptions are included as +well, recursively, with indentation relative to their nesting depth. +""" else: - def format_exception_only(self) -> Generator[str, None, None]: ... + def format_exception_only(self) -> Generator[str, None, None]: + """Format the exception part of the traceback. + + The return value is a generator of strings, each ending in a newline. + + Generator yields the exception message. + For :exc:`SyntaxError` exceptions, it + also yields (before the exception message) + several lines that (when printed) + display detailed information about where the syntax error occurred. + Following the message, generator also yields + all the exception's ``__notes__``. + """ if sys.version_info >= (3, 11): - def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: + """Print the result of self.format(chain=chain) to 'file'. +""" class FrameSummary: + """Information about a single frame from a traceback. + +- :attr:`filename` The filename for the frame. +- :attr:`lineno` The line within filename for the frame that was + active when the frame was captured. +- :attr:`name` The name of the function or method that was executing + when the frame was captured. +- :attr:`line` The text from the linecache module for the + of code that was running when the frame was captured. +- :attr:`locals` Either None if locals were not supplied, or a dict + mapping the name to the repr() of the variable. +""" if sys.version_info >= (3, 13): __slots__ = ( "filename", @@ -275,7 +529,16 @@ class FrameSummary: end_lineno: int | None = None, colno: int | None = None, end_colno: int | None = None, - ) -> None: ... + ) -> None: + """Construct a FrameSummary. + +:param lookup_line: If True, `linecache` is consulted for the source + code line. Otherwise, the line will be looked up when first needed. +:param locals: If supplied the frame locals, which will be captured as + object representations. +:param line: If provided, use this instead of looking up the line in + the linecache. +""" end_lineno: int | None colno: int | None end_colno: int | None @@ -289,7 +552,16 @@ class FrameSummary: lookup_line: bool = True, locals: Mapping[str, str] | None = None, line: str | None = None, - ) -> None: ... + ) -> None: + """Construct a FrameSummary. + + :param lookup_line: If True, `linecache` is consulted for the source + code line. Otherwise, the line will be looked up when first needed. + :param locals: If supplied the frame locals, which will be captured as + object representations. + :param line: If provided, use this instead of looking up the line in + the linecache. + """ filename: str lineno: int | None name: str @@ -314,6 +586,8 @@ class FrameSummary: __hash__: ClassVar[None] # type: ignore[assignment] class StackSummary(list[FrameSummary]): + """A list of FrameSummary objects, representing a stack of frames. +""" @classmethod def extract( cls, @@ -322,10 +596,41 @@ class StackSummary(list[FrameSummary]): limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False, - ) -> StackSummary: ... + ) -> StackSummary: + """Create a StackSummary from a traceback or stack object. + +:param frame_gen: A generator that yields (frame, lineno) tuples + whose summaries are to be included in the stack. +:param limit: None to include all frames or the number of frames to + include. +:param lookup_lines: If True, lookup lines for each frame immediately, + otherwise lookup is deferred until the frame is rendered. +:param capture_locals: If True, the local variables from each frame will + be captured as object representations into the FrameSummary. +""" @classmethod - def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: ... + def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: + """ +Create a StackSummary object from a supplied list of +FrameSummary objects or old-style list of tuples. +""" if sys.version_info >= (3, 11): - def format_frame_summary(self, frame_summary: FrameSummary) -> str: ... + def format_frame_summary(self, frame_summary: FrameSummary) -> str: + """Format the lines for a single FrameSummary. + +Returns a string representing one frame involved in the stack. This +gets called for every frame to be printed in the stack summary. +""" + + def format(self) -> list[str]: + """Format the stack ready for printing. + +Returns a list of strings ready for printing. Each string in the +resulting list corresponds to a single frame from the stack. +Each string ends in a newline; the strings may contain internal +newlines as well, for those items with source text lines. - def format(self) -> list[str]: ... +For long sequences of the same frame and line, the first few +repetitions are shown, followed by a summary line stating the exact +number of further repetitions. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi index 31d8f74456395..0cbaa06e2f60f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi @@ -4,8 +4,18 @@ from collections.abc import Sequence from typing import Any, SupportsIndex, overload from typing_extensions import TypeAlias -def get_object_traceback(obj: object) -> Traceback | None: ... -def take_snapshot() -> Snapshot: ... +def get_object_traceback(obj: object) -> Traceback | None: + """ +Get the traceback where the Python object *obj* was allocated. +Return a Traceback instance. + +Return None if the tracemalloc module is not tracing memory allocations or +did not trace the allocation of the object. +""" +def take_snapshot() -> Snapshot: + """ +Take a snapshot of traces of memory blocks allocated by Python. +""" class BaseFilter: inclusive: bool @@ -32,6 +42,9 @@ class Filter(BaseFilter): ) -> None: ... class Statistic: + """ +Statistic difference on memory allocations between two Snapshot instance. +""" __slots__ = ("traceback", "size", "count") count: int size: int @@ -41,6 +54,10 @@ class Statistic: def __hash__(self) -> int: ... class StatisticDiff: + """ +Statistic difference on memory allocations between an old and a new +Snapshot instance. +""" __slots__ = ("traceback", "size", "size_diff", "count", "count_diff") count: int count_diff: int @@ -54,6 +71,9 @@ class StatisticDiff: _FrameTuple: TypeAlias = tuple[str, int] class Frame: + """ +Frame of a traceback. +""" __slots__ = ("_frame",) @property def filename(self) -> str: ... @@ -64,17 +84,32 @@ class Frame: def __hash__(self) -> int: ... def __lt__(self, other: Frame) -> bool: ... if sys.version_info >= (3, 11): - def __gt__(self, other: Frame) -> bool: ... - def __ge__(self, other: Frame) -> bool: ... - def __le__(self, other: Frame) -> bool: ... + def __gt__(self, other: Frame) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __ge__(self, other: Frame) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __le__(self, other: Frame) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" else: - def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... - def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... - def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] class Trace: + """ +Trace of a memory block. +""" __slots__ = ("_trace",) @property def domain(self) -> int: ... @@ -87,6 +122,10 @@ class Trace: def __hash__(self) -> int: ... class Traceback(Sequence[Frame]): + """ +Sequence of Frame instances sorted from the oldest frame +to the most recent frame. +""" __slots__ = ("_frames", "_total_nframe") @property def total_nframe(self) -> int | None: ... @@ -102,21 +141,56 @@ class Traceback(Sequence[Frame]): def __hash__(self) -> int: ... def __lt__(self, other: Traceback) -> bool: ... if sys.version_info >= (3, 11): - def __gt__(self, other: Traceback) -> bool: ... - def __ge__(self, other: Traceback) -> bool: ... - def __le__(self, other: Traceback) -> bool: ... + def __gt__(self, other: Traceback) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __ge__(self, other: Traceback) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __le__(self, other: Traceback) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" else: - def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... - def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... - def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). +""" + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: + """Return a >= b. Computed by @total_ordering from (not a < b). +""" + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). +""" class Snapshot: + """ +Snapshot of traces of memory blocks allocated by Python. +""" def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... - def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... - def dump(self, filename: str) -> None: ... - def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: + """ +Compute the differences with an old snapshot old_snapshot. Get +statistics as a sorted list of StatisticDiff instances, grouped by +group_by. +""" + def dump(self, filename: str) -> None: + """ +Write the snapshot into a file. +""" + def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: + """ +Create a new Snapshot instance with a filtered traces sequence, filters +is a list of Filter or DomainFilter instances. If filters is an empty +list, return a new Snapshot instance with a copy of the traces. +""" @staticmethod - def load(filename: str) -> Snapshot: ... - def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... + def load(filename: str) -> Snapshot: + """ +Load a snapshot from a file. +""" + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: + """ +Group statistics by key_type. Return a sorted list of Statistic +instances. +""" traceback_limit: int traces: Sequence[Trace] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi index ca3f0013b20ec..e400f73bfcfb9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi @@ -1,3 +1,5 @@ +"""Terminal utilities. +""" import sys import termios from typing import IO, Final @@ -22,9 +24,17 @@ if sys.platform != "win32": ISPEED: Final = 4 OSPEED: Final = 5 CC: Final = 6 - def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... - def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: ... + def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: + """Put terminal into raw mode. +""" + def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: + """Put terminal into cbreak mode. +""" if sys.version_info >= (3, 12): - def cfmakeraw(mode: termios._Attr) -> None: ... - def cfmakecbreak(mode: termios._Attr) -> None: ... + def cfmakeraw(mode: termios._Attr) -> None: + """Make termios mode raw. +""" + def cfmakecbreak(mode: termios._Attr) -> None: + """Make termios mode cbreak. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi index 9b9b329bd74bc..44b40b9aa6dc3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi @@ -1,3 +1,79 @@ +""" +Turtle graphics is a popular way for introducing programming to +kids. It was part of the original Logo programming language developed +by Wally Feurzig and Seymour Papert in 1966. + +Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it +the command turtle.forward(15), and it moves (on-screen!) 15 pixels in +the direction it is facing, drawing a line as it moves. Give it the +command turtle.right(25), and it rotates in-place 25 degrees clockwise. + +By combining together these and similar commands, intricate shapes and +pictures can easily be drawn. + +----- turtle.py + +This module is an extended reimplementation of turtle.py from the +Python standard distribution up to Python 2.5. (See: https://www.python.org) + +It tries to keep the merits of turtle.py and to be (nearly) 100% +compatible with it. This means in the first place to enable the +learning programmer to use all the commands, classes and methods +interactively when using the module from within IDLE run with +the -n switch. + +Roughly it has the following features added: + +- Better animation of the turtle movements, especially of turning the + turtle. So the turtles can more easily be used as a visual feedback + instrument by the (beginning) programmer. + +- Different turtle shapes, image files as turtle shapes, user defined + and user controllable turtle shapes, among them compound + (multicolored) shapes. Turtle shapes can be stretched and tilted, which + makes turtles very versatile geometrical objects. + +- Fine control over turtle movement and screen updates via delay(), + and enhanced tracer() and speed() methods. + +- Aliases for the most commonly used commands, like fd for forward etc., + following the early Logo traditions. This reduces the boring work of + typing long sequences of commands, which often occur in a natural way + when kids try to program fancy pictures on their first encounter with + turtle graphics. + +- Turtles now have an undo()-method with configurable undo-buffer. + +- Some simple commands/methods for creating event driven programs + (mouse-, key-, timer-events). Especially useful for programming games. + +- A scrollable Canvas class. The default scrollable Canvas can be + extended interactively as needed while playing around with the turtle(s). + +- A TurtleScreen class with methods controlling background color or + background image, window and canvas size and other properties of the + TurtleScreen. + +- There is a method, setworldcoordinates(), to install a user defined + coordinate-system for the TurtleScreen. + +- The implementation uses a 2-vector class named Vec2D, derived from tuple. + This class is public, so it can be imported by the application programmer, + which makes certain types of computations very natural and compact. + +- Appearance of the TurtleScreen and the Turtles at startup/import can be + configured by means of a turtle.cfg configuration file. + The default configuration mimics the appearance of the old turtle module. + +- If configured appropriately the module reads in docstrings from a docstring + dictionary in some different language, supplied separately and replaces + the English ones by those read in. There is a utility function + write_docstringdict() to write a dictionary with the original (English) + docstrings to disc, so it can serve as a template for translations. + +Behind the scenes there are some features included with possible +extensions in mind. These will be commented and documented elsewhere. +""" import sys from _typeshed import StrPath from collections.abc import Callable, Generator, Sequence @@ -165,6 +241,19 @@ _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] if sys.version_info >= (3, 12): class Vec2D(tuple[float, float]): + """A 2 dimensional vector class, used as a helper class +for implementing turtle graphics. +May be useful for turtle graphics programs also. +Derived from tuple, so a vector is a tuple! + +Provides (for a, b vectors, k number): + a+b vector addition + a-b vector subtraction + a*b inner product + k*a and a*k multiplication with scalar + |a| absolute value of a + a.rotate(angle) rotation +""" def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -175,11 +264,26 @@ if sys.version_info >= (3, 12): def __sub__(self, other: tuple[float, float]) -> Vec2D: ... def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... - def rotate(self, angle: float) -> Vec2D: ... + def rotate(self, angle: float) -> Vec2D: + """rotate self counterclockwise by angle + """ else: @disjoint_base class Vec2D(tuple[float, float]): + """A 2 dimensional vector class, used as a helper class + for implementing turtle graphics. + May be useful for turtle graphics programs also. + Derived from tuple, so a vector is a tuple! + + Provides (for a, b vectors, k number): + a+b vector addition + a-b vector subtraction + a*b inner product + k*a and a*k multiplication with scalar + |a| absolute value of a + a.rotate(angle) rotation + """ def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -190,10 +294,17 @@ else: def __sub__(self, other: tuple[float, float]) -> Vec2D: ... def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... - def rotate(self, angle: float) -> Vec2D: ... + def rotate(self, angle: float) -> Vec2D: + """rotate self counterclockwise by angle + """ # Does not actually inherit from Canvas, but dynamically gets all methods of Canvas class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] + """Modeled after the scrolled canvas class from Grayons's Tkinter book. + +Used as the default canvas, which pops up automatically when +using turtle graphics functions or the Turtle class. +""" bg: str hscroll: Scrollbar vscroll: Scrollbar @@ -202,130 +313,853 @@ class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] ) -> None: ... canvwidth: int canvheight: int - def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: + """Adjust canvas and scrollbars according to given canvas size. +""" class TurtleScreenBase: + """Provide the basic graphics functionality. +Interface between Tkinter and turtle.py. + +To port turtle.py to some different graphics toolkit +a corresponding TurtleScreenBase class has to be implemented. +""" cv: Canvas canvwidth: int canvheight: int xscale: float yscale: float def __init__(self, cv: Canvas) -> None: ... - def mainloop(self) -> None: ... - def textinput(self, title: str, prompt: str) -> str | None: ... + def mainloop(self) -> None: + """Starts event loop - calling Tkinter's mainloop function. + +No argument. + +Must be last statement in a turtle graphics program. +Must NOT be used if a script is run from within IDLE in -n mode +(No subprocess) - for interactive use of turtle graphics. + +Example (for a TurtleScreen instance named screen): +>>> screen.mainloop() + +""" + def textinput(self, title: str, prompt: str) -> str | None: + """Pop up a dialog window for input of a string. + +Arguments: title is the title of the dialog window, +prompt is a text mostly describing what information to input. + +Return the string input +If the dialog is canceled, return None. + +Example (for a TurtleScreen instance named screen): +>>> screen.textinput("NIM", "Name of first player:") + +""" def numinput( self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None - ) -> float | None: ... + ) -> float | None: + """Pop up a dialog window for input of a number. -class Terminator(Exception): ... -class TurtleGraphicsError(Exception): ... +Arguments: title is the title of the dialog window, +prompt is a text mostly describing what numerical information to input. +default: default value +minval: minimum value for input +maxval: maximum value for input + +The number input must be in the range minval .. maxval if these are +given. If not, a hint is issued and the dialog remains open for +correction. Return the number input. +If the dialog is canceled, return None. + +Example (for a TurtleScreen instance named screen): +>>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) + +""" + +class Terminator(Exception): + """Will be raised in TurtleScreen.update, if _RUNNING becomes False. + +This stops execution of a turtle graphics script. +Main purpose: use in the Demo-Viewer turtle.Demo.py. +""" +class TurtleGraphicsError(Exception): + """Some TurtleGraphics Error + """ class Shape: + """Data structure modeling shapes. + +attribute _type is one of "polygon", "image", "compound" +attribute _data is - depending on _type a poygon-tuple, +an image or a list constructed using the addcomponent method. +""" def __init__( self, type_: Literal["polygon", "image", "compound"], data: _PolygonCoords | PhotoImage | None = None ) -> None: ... - def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: + """Add component to a shape of type compound. + +Arguments: poly is a polygon, i. e. a tuple of number pairs. +fill is the fillcolor of the component, +outline is the outline color of the component. + +call (for a Shapeobject namend s): +-- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue") + +Example: +>>> poly = ((0,0),(10,-5),(0,10),(-10,-5)) +>>> s = Shape("compound") +>>> s.addcomponent(poly, "red", "blue") +>>> # .. add more components and then use register_shape() +""" class TurtleScreen(TurtleScreenBase): + """Provides screen oriented methods like bgcolor etc. + +Only relies upon the methods of TurtleScreenBase and NOT +upon components of the underlying graphics toolkit - +which is Tkinter in this case. +""" def __init__( self, cv: Canvas, mode: Literal["standard", "logo", "world"] = "standard", colormode: float = 1.0, delay: int = 10 ) -> None: ... - def clear(self) -> None: ... + def clear(self) -> None: + """Delete all drawings and all turtles from the TurtleScreen. + +No argument. + +Reset empty TurtleScreen to its initial state: white background, +no backgroundimage, no eventbindings and tracing on. + +Example (for a TurtleScreen instance named screen): +>>> screen.clear() + +Note: this method is not available as function. +""" @overload - def mode(self, mode: None = None) -> str: ... + def mode(self, mode: None = None) -> str: + """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. + +Optional argument: +mode -- one of the strings 'standard', 'logo' or 'world' + +Mode 'standard' is compatible with turtle.py. +Mode 'logo' is compatible with most Logo-Turtle-Graphics. +Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in +this mode angles appear distorted if x/y unit-ratio doesn't equal 1. +If mode is not given, return the current mode. + + Mode Initial turtle heading positive angles + ------------|-------------------------|------------------- + 'standard' to the right (east) counterclockwise + 'logo' upward (north) clockwise + +Examples: +>>> mode('logo') # resets turtle heading to north +>>> mode() +'logo' +""" @overload def mode(self, mode: Literal["standard", "logo", "world"]) -> None: ... - def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... - def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... + def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: + """Set up a user defined coordinate-system. + +Arguments: +llx -- a number, x-coordinate of lower left corner of canvas +lly -- a number, y-coordinate of lower left corner of canvas +urx -- a number, x-coordinate of upper right corner of canvas +ury -- a number, y-coordinate of upper right corner of canvas + +Set up user coodinat-system and switch to mode 'world' if necessary. +This performs a screen.reset. If mode 'world' is already active, +all drawings are redrawn according to the new coordinates. + +But ATTENTION: in user-defined coordinatesystems angles may appear +distorted. (see Screen.mode()) + +Example (for a TurtleScreen instance named screen): +>>> screen.setworldcoordinates(-10,-0.5,50,1.5) +>>> for _ in range(36): +... left(10) +... forward(0.5) +""" + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: + """Adds a turtle shape to TurtleScreen's shapelist. + +Arguments: +(1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! +(2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! +(3) name is an arbitrary string and shape is a tuple + of pairs of coordinates. Installs the corresponding + polygon shape +(4) name is an arbitrary string and shape is a + (compound) Shape object. Installs the corresponding + compound shape. +To use a shape, you have to issue the command shape(shapename). + +call: register_shape("turtle.gif") +--or: register_shape("tri", ((0,0), (10,10), (-10,10))) + +Example (for a TurtleScreen instance named screen): +>>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + +""" @overload - def colormode(self, cmode: None = None) -> float: ... + def colormode(self, cmode: None = None) -> float: + """Return the colormode or set it to 1.0 or 255. + +Optional argument: +cmode -- one of the values 1.0 or 255 + +r, g, b values of colortriples have to be in range 0..cmode. + +Example (for a TurtleScreen instance named screen): +>>> screen.colormode() +1.0 +>>> screen.colormode(255) +>>> pencolor(240,160,80) +""" @overload def colormode(self, cmode: float) -> None: ... - def reset(self) -> None: ... - def turtles(self) -> list[Turtle]: ... + def reset(self) -> None: + """Reset all Turtles on the Screen to their initial state. + +No argument. + +Example (for a TurtleScreen instance named screen): +>>> screen.reset() +""" + def turtles(self) -> list[Turtle]: + """Return the list of turtles on the screen. + +Example (for a TurtleScreen instance named screen): +>>> screen.turtles() +[] +""" @overload - def bgcolor(self) -> _AnyColor: ... + def bgcolor(self) -> _AnyColor: + """Set or return backgroundcolor of the TurtleScreen. + +Arguments (if given): a color string or three numbers +in the range 0..colormode or a 3-tuple of such numbers. + +Example (for a TurtleScreen instance named screen): +>>> screen.bgcolor("orange") +>>> screen.bgcolor() +'orange' +>>> screen.bgcolor(0.5,0,0.5) +>>> screen.bgcolor() +'#800080' +""" @overload def bgcolor(self, color: _Color) -> None: ... @overload def bgcolor(self, r: float, g: float, b: float) -> None: ... @overload - def tracer(self, n: None = None) -> int: ... + def tracer(self, n: None = None) -> int: + """Turns turtle animation on/off and set delay for update drawings. + +Optional arguments: +n -- nonnegative integer +delay -- nonnegative integer + +If n is given, only each n-th regular screen update is really performed. +(Can be used to accelerate the drawing of complex graphics.) +Second arguments sets delay value (see RawTurtle.delay()) + +Example (for a TurtleScreen instance named screen): +>>> screen.tracer(8, 25) +>>> dist = 2 +>>> for i in range(200): +... fd(dist) +... rt(90) +... dist += 2 +""" @overload def tracer(self, n: int, delay: int | None = None) -> None: ... @overload - def delay(self, delay: None = None) -> int: ... + def delay(self, delay: None = None) -> int: + """Return or set the drawing delay in milliseconds. + +Optional argument: +delay -- positive integer + +Example (for a TurtleScreen instance named screen): +>>> screen.delay(15) +>>> screen.delay() +15 +""" @overload def delay(self, delay: int) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def no_animation(self) -> Generator[None]: ... - - def update(self) -> None: ... - def window_width(self) -> int: ... - def window_height(self) -> int: ... - def getcanvas(self) -> Canvas: ... - def getshapes(self) -> list[str]: ... - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... - def onkey(self, fun: Callable[[], object], key: str) -> None: ... - def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... - def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... + def no_animation(self) -> Generator[None]: + """Temporarily turn off auto-updating the screen. + +This is useful for drawing complex shapes where even the fastest setting +is too slow. Once this context manager is exited, the drawing will +be displayed. + +Example (for a TurtleScreen instance named screen +and a Turtle instance named turtle): +>>> with screen.no_animation(): +... turtle.circle(50) +""" + + def update(self) -> None: + """Perform a TurtleScreen update. + """ + def window_width(self) -> int: + """Return the width of the turtle window. + +Example (for a TurtleScreen instance named screen): +>>> screen.window_width() +640 +""" + def window_height(self) -> int: + """Return the height of the turtle window. + +Example (for a TurtleScreen instance named screen): +>>> screen.window_height() +480 +""" + def getcanvas(self) -> Canvas: + """Return the Canvas of this TurtleScreen. + +No argument. + +Example (for a Screen instance named screen): +>>> cv = screen.getcanvas() +>>> cv + +""" + def getshapes(self) -> list[str]: + """Return a list of names of all currently available turtle shapes. + +No argument. + +Example (for a TurtleScreen instance named screen): +>>> screen.getshapes() +['arrow', 'blank', 'circle', ... , 'turtle'] +""" + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + """Bind fun to mouse-click event on canvas. + +Arguments: +fun -- a function with two arguments, the coordinates of the + clicked point on the canvas. +btn -- the number of the mouse-button, defaults to 1 + +Example (for a TurtleScreen instance named screen) + +>>> screen.onclick(goto) +>>> # Subsequently clicking into the TurtleScreen will +>>> # make the turtle move to the clicked point. +>>> screen.onclick(None) +""" + def onkey(self, fun: Callable[[], object], key: str) -> None: + """Bind fun to key-release event of key. + +Arguments: +fun -- a function with no arguments +key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + +In order to be able to register key-events, TurtleScreen +must have focus. (See method listen.) + +Example (for a TurtleScreen instance named screen): + +>>> def f(): +... fd(50) +... lt(60) +... +>>> screen.onkey(f, "Up") +>>> screen.listen() + +Subsequently the turtle can be moved by repeatedly pressing +the up-arrow key, consequently drawing a hexagon + +""" + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: + """Set focus on TurtleScreen (in order to collect key-events) + +No arguments. +Dummy arguments are provided in order +to be able to pass listen to the onclick method. + +Example (for a TurtleScreen instance named screen): +>>> screen.listen() +""" + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: + """Install a timer, which calls fun after t milliseconds. + +Arguments: +fun -- a function with no arguments. +t -- a number >= 0 + +Example (for a TurtleScreen instance named screen): + +>>> running = True +>>> def f(): +... if running: +... fd(50) +... lt(60) +... screen.ontimer(f, 250) +... +>>> f() # makes the turtle marching around +>>> running = False +""" @overload - def bgpic(self, picname: None = None) -> str: ... + def bgpic(self, picname: None = None) -> str: + """Set background image or return name of current backgroundimage. + +Optional argument: +picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". + +If picname is a filename, set the corresponding image as background. +If picname is "nopic", delete backgroundimage, if present. +If picname is None, return the filename of the current backgroundimage. + +Example (for a TurtleScreen instance named screen): +>>> screen.bgpic() +'nopic' +>>> screen.bgpic("landscape.gif") +>>> screen.bgpic() +'landscape.gif' +""" @overload def bgpic(self, picname: str) -> None: ... @overload - def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: + """Resize the canvas the turtles are drawing on. + +Optional arguments: +canvwidth -- positive integer, new width of canvas in pixels +canvheight -- positive integer, new height of canvas in pixels +bg -- colorstring or color-tuple, new backgroundcolor +If no arguments are given, return current (canvaswidth, canvasheight) + +Do not alter the drawing window. To observe hidden parts of +the canvas use the scrollbars. (Can make visible those parts +of a drawing, which were outside the canvas before!) + +Example (for a Turtle instance named turtle): +>>> turtle.screensize(2000,1500) +>>> # e.g. to search for an erroneously escaped turtle ;-) +""" # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... if sys.version_info >= (3, 14): - def save(self, filename: StrPath, *, overwrite: bool = False) -> None: ... + def save(self, filename: StrPath, *, overwrite: bool = False) -> None: + """Save the drawing as a PostScript file + +Arguments: +filename -- a string, the path of the created file. + Must end with '.ps' or '.eps'. + +Optional arguments: +overwrite -- boolean, if true, then existing files will be overwritten + +Example (for a TurtleScreen instance named screen): +>>> screen.save('my_drawing.eps') +""" onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape - def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: + """Bind fun to key-press event of key if key is given, +or to any key-press-event if no key is given. + +Arguments: +fun -- a function with no arguments +key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + +In order to be able to register key-events, TurtleScreen +must have focus. (See method listen.) + +Example (for a TurtleScreen instance named screen +and a Turtle instance named turtle): + +>>> def f(): +... fd(50) +... lt(60) +... +>>> screen.onkeypress(f, "Up") +>>> screen.listen() + +Subsequently the turtle can be moved by repeatedly pressing +the up-arrow key, or by keeping pressed the up-arrow key. +consequently drawing a hexagon. +""" onkeyrelease = onkey class TNavigator: + """Navigation part of the RawTurtle. +Implements methods for turtle movement. +""" START_ORIENTATION: dict[str, Vec2D] DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int DEFAULT_ANGLEORIENT: int def __init__(self, mode: Literal["standard", "logo", "world"] = "standard") -> None: ... - def reset(self) -> None: ... - def degrees(self, fullcircle: float = 360.0) -> None: ... - def radians(self) -> None: ... + def reset(self) -> None: + """reset turtle to its initial values + +Will be overwritten by parent class +""" + def degrees(self, fullcircle: float = 360.0) -> None: + """Set angle measurement units to degrees. + +Optional argument: +fullcircle - a number + +Set angle measurement units, i. e. set number +of 'degrees' for a full circle. Default value is +360 degrees. + +Example (for a Turtle instance named turtle): +>>> turtle.left(90) +>>> turtle.heading() +90 + +Change angle measurement unit to grad (also known as gon, +grade, or gradian and equals 1/100-th of the right angle.) +>>> turtle.degrees(400.0) +>>> turtle.heading() +100 + +""" + def radians(self) -> None: + """Set the angle measurement units to radians. + +No arguments. + +Example (for a Turtle instance named turtle): +>>> turtle.heading() +90 +>>> turtle.radians() +>>> turtle.heading() +1.5707963267948966 +""" if sys.version_info >= (3, 12): - def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... - - def forward(self, distance: float) -> None: ... - def back(self, distance: float) -> None: ... - def right(self, angle: float) -> None: ... - def left(self, angle: float) -> None: ... - def pos(self) -> Vec2D: ... - def xcor(self) -> float: ... - def ycor(self) -> float: ... + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. +Includes no TPen references. +""" + + def forward(self, distance: float) -> None: + """Move the turtle forward by the specified distance. + +Aliases: forward | fd + +Argument: +distance -- a number (integer or float) + +Move the turtle forward by the specified distance, in the direction +the turtle is headed. + +Example (for a Turtle instance named turtle): +>>> turtle.position() +(0.00, 0.00) +>>> turtle.forward(25) +>>> turtle.position() +(25.00,0.00) +>>> turtle.forward(-75) +>>> turtle.position() +(-50.00,0.00) +""" + def back(self, distance: float) -> None: + """Move the turtle backward by distance. + +Aliases: back | backward | bk + +Argument: +distance -- a number + +Move the turtle backward by distance, opposite to the direction the +turtle is headed. Do not change the turtle's heading. + +Example (for a Turtle instance named turtle): +>>> turtle.position() +(0.00, 0.00) +>>> turtle.backward(30) +>>> turtle.position() +(-30.00, 0.00) +""" + def right(self, angle: float) -> None: + """Turn turtle right by angle units. + +Aliases: right | rt + +Argument: +angle -- a number (integer or float) + +Turn turtle right by angle units. (Units are by default degrees, +but can be set via the degrees() and radians() functions.) +Angle orientation depends on mode. (See this.) + +Example (for a Turtle instance named turtle): +>>> turtle.heading() +22.0 +>>> turtle.right(45) +>>> turtle.heading() +337.0 +""" + def left(self, angle: float) -> None: + """Turn turtle left by angle units. + +Aliases: left | lt + +Argument: +angle -- a number (integer or float) + +Turn turtle left by angle units. (Units are by default degrees, +but can be set via the degrees() and radians() functions.) +Angle orientation depends on mode. (See this.) + +Example (for a Turtle instance named turtle): +>>> turtle.heading() +22.0 +>>> turtle.left(45) +>>> turtle.heading() +67.0 +""" + def pos(self) -> Vec2D: + """Return the turtle's current location (x,y), as a Vec2D-vector. + +Aliases: pos | position + +No arguments. + +Example (for a Turtle instance named turtle): +>>> turtle.pos() +(0.00, 240.00) +""" + def xcor(self) -> float: + """Return the turtle's x coordinate. + +No arguments. + +Example (for a Turtle instance named turtle): +>>> reset() +>>> turtle.left(60) +>>> turtle.forward(100) +>>> print(turtle.xcor()) +50.0 +""" + def ycor(self) -> float: + """Return the turtle's y coordinate +--- +No arguments. + +Example (for a Turtle instance named turtle): +>>> reset() +>>> turtle.left(60) +>>> turtle.forward(100) +>>> print(turtle.ycor()) +86.6025403784 +""" @overload - def goto(self, x: tuple[float, float], y: None = None) -> None: ... + def goto(self, x: tuple[float, float], y: None = None) -> None: + """Move turtle to an absolute position. + +Aliases: setpos | setposition | goto: + +Arguments: +x -- a number or a pair/vector of numbers +y -- a number None + +call: goto(x, y) # two coordinates +--or: goto((x, y)) # a pair (tuple) of coordinates +--or: goto(vec) # e.g. as returned by pos() + +Move turtle to an absolute position. If the pen is down, +a line will be drawn. The turtle's orientation does not change. + +Example (for a Turtle instance named turtle): +>>> tp = turtle.pos() +>>> tp +(0.00, 0.00) +>>> turtle.setpos(60,30) +>>> turtle.pos() +(60.00,30.00) +>>> turtle.setpos((20,80)) +>>> turtle.pos() +(20.00,80.00) +>>> turtle.setpos(tp) +>>> turtle.pos() +(0.00,0.00) +""" @overload def goto(self, x: float, y: float) -> None: ... - def home(self) -> None: ... - def setx(self, x: float) -> None: ... - def sety(self, y: float) -> None: ... + def home(self) -> None: + """Move turtle to the origin - coordinates (0,0). + +No arguments. + +Move turtle to the origin - coordinates (0,0) and set its +heading to its start-orientation (which depends on mode). + +Example (for a Turtle instance named turtle): +>>> turtle.home() +""" + def setx(self, x: float) -> None: + """Set the turtle's first coordinate to x + +Argument: +x -- a number (integer or float) + +Set the turtle's first coordinate to x, leave second coordinate +unchanged. + +Example (for a Turtle instance named turtle): +>>> turtle.position() +(0.00, 240.00) +>>> turtle.setx(10) +>>> turtle.position() +(10.00, 240.00) +""" + def sety(self, y: float) -> None: + """Set the turtle's second coordinate to y + +Argument: +y -- a number (integer or float) + +Set the turtle's first coordinate to x, second coordinate remains +unchanged. + +Example (for a Turtle instance named turtle): +>>> turtle.position() +(0.00, 40.00) +>>> turtle.sety(-10) +>>> turtle.position() +(0.00, -10.00) +""" @overload - def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: + """Return the distance from the turtle to (x,y) in turtle step units. + +Arguments: +x -- a number or a pair/vector of numbers or a turtle instance +y -- a number None None + +call: distance(x, y) # two coordinates +--or: distance((x, y)) # a pair (tuple) of coordinates +--or: distance(vec) # e.g. as returned by pos() +--or: distance(mypen) # where mypen is another turtle + +Example (for a Turtle instance named turtle): +>>> turtle.pos() +(0.00, 0.00) +>>> turtle.distance(30,40) +50.0 +>>> pen = Turtle() +>>> pen.forward(77) +>>> turtle.distance(pen) +77.0 +""" @overload def distance(self, x: float, y: float) -> float: ... @overload - def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: + """Return the angle of the line from the turtle's position to (x, y). + +Arguments: +x -- a number or a pair/vector of numbers or a turtle instance +y -- a number None None + +call: distance(x, y) # two coordinates +--or: distance((x, y)) # a pair (tuple) of coordinates +--or: distance(vec) # e.g. as returned by pos() +--or: distance(mypen) # where mypen is another turtle + +Return the angle, between the line from turtle-position to position +specified by x, y and the turtle's start orientation. (Depends on +modes - "standard" or "logo") + +Example (for a Turtle instance named turtle): +>>> turtle.pos() +(10.00, 10.00) +>>> turtle.towards(0,0) +225.0 +""" @overload def towards(self, x: float, y: float) -> float: ... - def heading(self) -> float: ... - def setheading(self, to_angle: float) -> None: ... - def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... - def speed(self, s: int | None = 0) -> int | None: ... + def heading(self) -> float: + """Return the turtle's current heading. + +No arguments. + +Example (for a Turtle instance named turtle): +>>> turtle.left(67) +>>> turtle.heading() +67.0 +""" + def setheading(self, to_angle: float) -> None: + """Set the orientation of the turtle to to_angle. + +Aliases: setheading | seth + +Argument: +to_angle -- a number (integer or float) + +Set the orientation of the turtle to to_angle. +Here are some common directions in degrees: + + standard - mode: logo-mode: +-------------------|-------------------- + 0 - east 0 - north + 90 - north 90 - east + 180 - west 180 - south + 270 - south 270 - west + +Example (for a Turtle instance named turtle): +>>> turtle.setheading(90) +>>> turtle.heading() +90 +""" + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: + """Draw a circle with given radius. + +Arguments: +radius -- a number +extent (optional) -- a number +steps (optional) -- an integer + +Draw a circle with given radius. The center is radius units left +of the turtle; extent - an angle - determines which part of the +circle is drawn. If extent is not given, draw the entire circle. +If extent is not a full circle, one endpoint of the arc is the +current pen position. Draw the arc in counterclockwise direction +if radius is positive, otherwise in clockwise direction. Finally +the direction of the turtle is changed by the amount of extent. + +As the circle is approximated by an inscribed regular polygon, +steps determines the number of steps to use. If not given, +it will be calculated automatically. Maybe used to draw regular +polygons. + +call: circle(radius) # full circle +--or: circle(radius, extent) # arc +--or: circle(radius, extent, steps) +--or: circle(radius, steps=6) # 6-sided polygon + +Example (for a Turtle instance named turtle): +>>> turtle.circle(50) +>>> turtle.circle(120, 180) # semicircle +""" + def speed(self, s: int | None = 0) -> int | None: + """dummy method - to be overwritten by child class +""" fd = forward bk = back backward = back @@ -337,36 +1171,219 @@ class TNavigator: seth = setheading class TPen: + """Drawing part of the RawTurtle. +Implements drawing properties. +""" def __init__(self, resizemode: Literal["auto", "user", "noresize"] = "noresize") -> None: ... @overload - def resizemode(self, rmode: None = None) -> str: ... + def resizemode(self, rmode: None = None) -> str: + """Set resizemode to one of the values: "auto", "user", "noresize". + +(Optional) Argument: +rmode -- one of the strings "auto", "user", "noresize" + +Different resizemodes have the following effects: + - "auto" adapts the appearance of the turtle + corresponding to the value of pensize. + - "user" adapts the appearance of the turtle according to the + values of stretchfactor and outlinewidth (outline), + which are set by shapesize() + - "noresize" no adaption of the turtle's appearance takes place. +If no argument is given, return current resizemode. +resizemode("user") is called by a call of shapesize with arguments. + + +Examples (for a Turtle instance named turtle): +>>> turtle.resizemode("noresize") +>>> turtle.resizemode() +'noresize' +""" @overload def resizemode(self, rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload - def pensize(self, width: None = None) -> int: ... + def pensize(self, width: None = None) -> int: + """Set or return the line thickness. + +Aliases: pensize | width + +Argument: +width -- positive number + +Set the line thickness to width or return it. If resizemode is set +to "auto" and turtleshape is a polygon, that polygon is drawn with +the same line thickness. If no argument is given, current pensize +is returned. + +Example (for a Turtle instance named turtle): +>>> turtle.pensize() +1 +>>> turtle.pensize(10) # from here on lines of width 10 are drawn +""" @overload def pensize(self, width: int) -> None: ... - def penup(self) -> None: ... - def pendown(self) -> None: ... - def isdown(self) -> bool: ... + def penup(self) -> None: + """Pull the pen up -- no drawing when moving. + +Aliases: penup | pu | up + +No argument + +Example (for a Turtle instance named turtle): +>>> turtle.penup() +""" + def pendown(self) -> None: + """Pull the pen down -- drawing when moving. + +Aliases: pendown | pd | down + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.pendown() +""" + def isdown(self) -> bool: + """Return True if pen is down, False if it's up. + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.penup() +>>> turtle.isdown() +False +>>> turtle.pendown() +>>> turtle.isdown() +True +""" @overload - def speed(self, speed: None = None) -> int: ... + def speed(self, speed: None = None) -> int: + """Return or set the turtle's speed. + +Optional argument: +speed -- an integer in the range 0..10 or a speedstring (see below) + +Set the turtle's speed to an integer value in the range 0 .. 10. +If no argument is given: return current speed. + +If input is a number greater than 10 or smaller than 0.5, +speed is set to 0. +Speedstrings are mapped to speedvalues in the following way: + 'fastest' : 0 + 'fast' : 10 + 'normal' : 6 + 'slow' : 3 + 'slowest' : 1 +speeds from 1 to 10 enforce increasingly faster animation of +line drawing and turtle turning. + +Attention: +speed = 0 : *no* animation takes place. forward/back makes turtle jump +and likewise left/right make the turtle turn instantly. + +Example (for a Turtle instance named turtle): +>>> turtle.speed(3) +""" @overload def speed(self, speed: _Speed) -> None: ... @overload - def pencolor(self) -> _AnyColor: ... + def pencolor(self) -> _AnyColor: + """Return or set the pencolor. + +Arguments: +Four input formats are allowed: + - pencolor() + Return the current pencolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - pencolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - pencolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - pencolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + +If turtleshape is a polygon, the outline of that polygon is drawn +with the newly set pencolor. + +Example (for a Turtle instance named turtle): +>>> turtle.pencolor('brown') +>>> tup = (0.2, 0.8, 0.55) +>>> turtle.pencolor(tup) +>>> turtle.pencolor() +'#33cc8c' +""" @overload def pencolor(self, color: _Color) -> None: ... @overload def pencolor(self, r: float, g: float, b: float) -> None: ... @overload - def fillcolor(self) -> _AnyColor: ... + def fillcolor(self) -> _AnyColor: + """Return or set the fillcolor. + +Arguments: +Four input formats are allowed: + - fillcolor() + Return the current fillcolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - fillcolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - fillcolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - fillcolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + +If turtleshape is a polygon, the interior of that polygon is drawn +with the newly set fillcolor. + +Example (for a Turtle instance named turtle): +>>> turtle.fillcolor('violet') +>>> col = turtle.pencolor() +>>> turtle.fillcolor(col) +>>> turtle.fillcolor(0, .5, 0) +""" @overload def fillcolor(self, color: _Color) -> None: ... @overload def fillcolor(self, r: float, g: float, b: float) -> None: ... @overload - def color(self) -> tuple[_AnyColor, _AnyColor]: ... + def color(self) -> tuple[_AnyColor, _AnyColor]: + """Return or set the pencolor and fillcolor. + +Arguments: +Several input formats are allowed. +They use 0, 1, 2, or 3 arguments as follows: + +color() + Return the current pencolor and the current fillcolor + as a pair of color specification strings as are returned + by pencolor and fillcolor. +color(colorstring), color((r,g,b)), color(r,g,b) + inputs as in pencolor, set both, fillcolor and pencolor, + to the given value. +color(colorstring1, colorstring2), +color((r1,g1,b1), (r2,g2,b2)) + equivalent to pencolor(colorstring1) and fillcolor(colorstring2) + and analogously, if the other input format is used. + +If turtleshape is a polygon, outline and interior of that polygon +is drawn with the newly set colors. +For more info see: pencolor, fillcolor + +Example (for a Turtle instance named turtle): +>>> turtle.color('red', 'green') +>>> turtle.color() +('red', 'green') +>>> colormode(255) +>>> color((40, 80, 120), (160, 200, 240)) +>>> color() +('#285078', '#a0c8f0') +""" @overload def color(self, color: _Color) -> None: ... @overload @@ -374,14 +1391,95 @@ class TPen: @overload def color(self, color1: _Color, color2: _Color) -> None: ... if sys.version_info >= (3, 12): - def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: + """To be overwritten by child class RawTurtle. +Includes no TNavigator references. +""" + + def showturtle(self) -> None: + """Makes the turtle visible. - def showturtle(self) -> None: ... - def hideturtle(self) -> None: ... - def isvisible(self) -> bool: ... +Aliases: showturtle | st + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.hideturtle() +>>> turtle.showturtle() +""" + def hideturtle(self) -> None: + """Makes the turtle invisible. + +Aliases: hideturtle | ht + +No argument. + +It's a good idea to do this while you're in the +middle of a complicated drawing, because hiding +the turtle speeds up the drawing observably. + +Example (for a Turtle instance named turtle): +>>> turtle.hideturtle() +""" + def isvisible(self) -> bool: + """Return True if the Turtle is shown, False if it's hidden. + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.hideturtle() +>>> print(turtle.isvisible()) +False +""" # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload - def pen(self) -> _PenState: ... + def pen(self) -> _PenState: + """Return or set the pen's attributes. + +Arguments: + pen -- a dictionary with some or all of the below listed keys. + **pendict -- one or more keyword-arguments with the below + listed keys as keywords. + +Return or set the pen's attributes in a 'pen-dictionary' +with the following key/value pairs: + "shown" : True/False + "pendown" : True/False + "pencolor" : color-string or color-tuple + "fillcolor" : color-string or color-tuple + "pensize" : positive number + "speed" : number in range 0..10 + "resizemode" : "auto" or "user" or "noresize" + "stretchfactor": (positive number, positive number) + "shearfactor": number + "outline" : positive number + "tilt" : number + +This dictionary can be used as argument for a subsequent +pen()-call to restore the former pen-state. Moreover one +or more of these attributes can be provided as keyword-arguments. +This can be used to set several pen attributes in one statement. + + +Examples (for a Turtle instance named turtle): +>>> turtle.pen(fillcolor="black", pencolor="red", pensize=10) +>>> turtle.pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +>>> penstate=turtle.pen() +>>> turtle.color("yellow","") +>>> turtle.penup() +>>> turtle.pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +>>> p.pen(penstate, fillcolor="green") +>>> p.pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +""" @overload def pen( self, @@ -407,6 +1505,10 @@ class TPen: ht = hideturtle class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods in base classes + """Animation part of the RawTurtle. +Puts RawTurtle upon a TurtleScreen and provides tools for +its animation. +""" screen: TurtleScreen screens: ClassVar[list[TurtleScreen]] def __init__( @@ -416,80 +1518,544 @@ class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods undobuffersize: int = 1000, visible: bool = True, ) -> None: ... - def reset(self) -> None: ... - def setundobuffer(self, size: int | None) -> None: ... - def undobufferentries(self) -> int: ... - def clear(self) -> None: ... - def clone(self) -> Self: ... + def reset(self) -> None: + """Delete the turtle's drawings and restore its default values. + +No argument. + +Delete the turtle's drawings from the screen, re-center the turtle +and set variables to the default values. + +Example (for a Turtle instance named turtle): +>>> turtle.position() +(0.00,-22.00) +>>> turtle.heading() +100.0 +>>> turtle.reset() +>>> turtle.position() +(0.00,0.00) +>>> turtle.heading() +0.0 +""" + def setundobuffer(self, size: int | None) -> None: + """Set or disable undobuffer. + +Argument: +size -- an integer or None + +If size is an integer an empty undobuffer of given size is installed. +Size gives the maximum number of turtle-actions that can be undone +by the undo() function. +If size is None, no undobuffer is present. + +Example (for a Turtle instance named turtle): +>>> turtle.setundobuffer(42) +""" + def undobufferentries(self) -> int: + """Return count of entries in the undobuffer. + +No argument. + +Example (for a Turtle instance named turtle): +>>> while undobufferentries(): +... undo() +""" + def clear(self) -> None: + """Delete the turtle's drawings from the screen. Do not move turtle. + +No arguments. + +Delete the turtle's drawings from the screen. Do not move turtle. +State and position of the turtle as well as drawings of other +turtles are not affected. + +Examples (for a Turtle instance named turtle): +>>> turtle.clear() +""" + def clone(self) -> Self: + """Create and return a clone of the turtle. + +No argument. + +Create and return a clone of the turtle with same position, heading +and turtle properties. + +Example (for a Turtle instance named mick): +mick = Turtle() +joe = mick.clone() +""" @overload - def shape(self, name: None = None) -> str: ... + def shape(self, name: None = None) -> str: + """Set turtle shape to shape with given name / return current shapename. + +Optional argument: +name -- a string, which is a valid shapename + +Set turtle shape to shape with given name or, if name is not given, +return name of current shape. +Shape with name must exist in the TurtleScreen's shape dictionary. +Initially there are the following polygon shapes: +'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. +To learn about how to deal with shapes see Screen-method register_shape. + +Example (for a Turtle instance named turtle): +>>> turtle.shape() +'arrow' +>>> turtle.shape("turtle") +>>> turtle.shape() +'turtle' +""" @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapesize(self) -> tuple[float, float, float]: ... + def shapesize(self) -> tuple[float, float, float]: + """Set/return turtle's stretchfactors/outline. Set resizemode to "user". + +Optional arguments: + stretch_wid : positive number + stretch_len : positive number + outline : positive number + +Return or set the pen's attributes x/y-stretchfactors and/or outline. +Set resizemode to "user". +If and only if resizemode is set to "user", the turtle will be displayed +stretched according to its stretchfactors: +stretch_wid is stretchfactor perpendicular to orientation +stretch_len is stretchfactor in direction of turtles orientation. +outline determines the width of the shapes's outline. + +Examples (for a Turtle instance named turtle): +>>> turtle.resizemode("user") +>>> turtle.shapesize(5, 5, 12) +>>> turtle.shapesize(outline=8) +""" @overload def shapesize( self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None ) -> None: ... @overload - def shearfactor(self, shear: None = None) -> float: ... + def shearfactor(self, shear: None = None) -> float: + """Set or return the current shearfactor. + +Optional argument: shear -- number, tangent of the shear angle + +Shear the turtleshape according to the given shearfactor shear, +which is the tangent of the shear angle. DO NOT change the +turtle's heading (direction of movement). +If shear is not given: return the current shearfactor, i. e. the +tangent of the shear angle, by which lines parallel to the +heading of the turtle are sheared. + +Examples (for a Turtle instance named turtle): +>>> turtle.shape("circle") +>>> turtle.shapesize(5,2) +>>> turtle.shearfactor(0.5) +>>> turtle.shearfactor() +>>> 0.5 +""" @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapetransform(self) -> tuple[float, float, float, float]: ... + def shapetransform(self) -> tuple[float, float, float, float]: + """Set or return the current transformation matrix of the turtle shape. + +Optional arguments: t11, t12, t21, t22 -- numbers. + +If none of the matrix elements are given, return the transformation +matrix. +Otherwise set the given elements and transform the turtleshape +according to the matrix consisting of first row t11, t12 and +second row t21, 22. +Modify stretchfactor, shearfactor and tiltangle according to the +given matrix. + +Examples (for a Turtle instance named turtle): +>>> turtle.shape("square") +>>> turtle.shapesize(4,2) +>>> turtle.shearfactor(-0.5) +>>> turtle.shapetransform() +(4.0, -1.0, -0.0, 2.0) +""" @overload def shapetransform( self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... - def get_shapepoly(self) -> _PolygonCoords | None: ... + def get_shapepoly(self) -> _PolygonCoords | None: + """Return the current shape polygon as tuple of coordinate pairs. + +No argument. + +Examples (for a Turtle instance named turtle): +>>> turtle.shape("square") +>>> turtle.shapetransform(4, -1, 0, 2) +>>> turtle.get_shapepoly() +((50, -20), (30, 20), (-50, 20), (-30, -20)) + +""" if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") - def settiltangle(self, angle: float) -> None: ... + def settiltangle(self, angle: float) -> None: + """Rotate the turtleshape to point in the specified direction + + Argument: angle -- number + + Rotate the turtleshape to point in the direction specified by angle, + regardless of its current tilt-angle. DO NOT change the turtle's + heading (direction of movement). + + Deprecated since Python 3.1 + + Examples (for a Turtle instance named turtle): + >>> turtle.shape("circle") + >>> turtle.shapesize(5,2) + >>> turtle.settiltangle(45) + >>> turtle.stamp() + >>> turtle.fd(50) + >>> turtle.settiltangle(-45) + >>> turtle.stamp() + >>> turtle.fd(50) + """ @overload - def tiltangle(self, angle: None = None) -> float: ... + def tiltangle(self, angle: None = None) -> float: + """Set or return the current tilt-angle. + +Optional argument: angle -- number + +Rotate the turtleshape to point in the direction specified by angle, +regardless of its current tilt-angle. DO NOT change the turtle's +heading (direction of movement). +If angle is not given: return the current tilt-angle, i. e. the angle +between the orientation of the turtleshape and the heading of the +turtle (its direction of movement). + +Examples (for a Turtle instance named turtle): +>>> turtle.shape("circle") +>>> turtle.shapesize(5, 2) +>>> turtle.tiltangle() +0.0 +>>> turtle.tiltangle(45) +>>> turtle.tiltangle() +45.0 +>>> turtle.stamp() +>>> turtle.fd(50) +>>> turtle.tiltangle(-45) +>>> turtle.tiltangle() +315.0 +>>> turtle.stamp() +>>> turtle.fd(50) +""" @overload def tiltangle(self, angle: float) -> None: ... - def tilt(self, angle: float) -> None: ... + def tilt(self, angle: float) -> None: + """Rotate the turtleshape by angle. + +Argument: +angle - a number + +Rotate the turtleshape by angle from its current tilt-angle, +but do NOT change the turtle's heading (direction of movement). + +Examples (for a Turtle instance named turtle): +>>> turtle.shape("circle") +>>> turtle.shapesize(5,2) +>>> turtle.tilt(30) +>>> turtle.fd(50) +>>> turtle.tilt(30) +>>> turtle.fd(50) +""" # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. - def stamp(self) -> Any: ... - def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... - def clearstamps(self, n: int | None = None) -> None: ... - def filling(self) -> bool: ... + def stamp(self) -> Any: + """Stamp a copy of the turtleshape onto the canvas and return its id. + +No argument. + +Stamp a copy of the turtle shape onto the canvas at the current +turtle position. Return a stamp_id for that stamp, which can be +used to delete it by calling clearstamp(stamp_id). + +Example (for a Turtle instance named turtle): +>>> turtle.color("blue") +>>> turtle.stamp() +13 +>>> turtle.fd(50) +""" + def clearstamp(self, stampid: int | tuple[int, ...]) -> None: + """Delete stamp with given stampid + +Argument: +stampid - an integer, must be return value of previous stamp() call. + +Example (for a Turtle instance named turtle): +>>> turtle.color("blue") +>>> astamp = turtle.stamp() +>>> turtle.fd(50) +>>> turtle.clearstamp(astamp) +""" + def clearstamps(self, n: int | None = None) -> None: + """Delete all or first/last n of turtle's stamps. + +Optional argument: +n -- an integer + +If n is None, delete all of pen's stamps, +else if n > 0 delete first n stamps +else if n < 0 delete last n stamps. + +Example (for a Turtle instance named turtle): +>>> for i in range(8): +... turtle.stamp(); turtle.fd(30) +... +>>> turtle.clearstamps(2) +>>> turtle.clearstamps(-2) +>>> turtle.clearstamps() +""" + def filling(self) -> bool: + """Return fillstate (True if filling, False else). + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.begin_fill() +>>> if turtle.filling(): +... turtle.pensize(5) +... else: +... turtle.pensize(3) +""" if sys.version_info >= (3, 14): @contextmanager - def fill(self) -> Generator[None]: ... - - def begin_fill(self) -> None: ... - def end_fill(self) -> None: ... + def fill(self) -> Generator[None]: + """A context manager for filling a shape. + +Implicitly ensures the code block is wrapped with +begin_fill() and end_fill(). + +Example (for a Turtle instance named turtle): +>>> turtle.color("black", "red") +>>> with turtle.fill(): +... turtle.circle(60) +""" + + def begin_fill(self) -> None: + """Called just before drawing a shape to be filled. + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.color("black", "red") +>>> turtle.begin_fill() +>>> turtle.circle(60) +>>> turtle.end_fill() +""" + def end_fill(self) -> None: + """Fill the shape drawn after the call begin_fill(). + +No argument. + +Example (for a Turtle instance named turtle): +>>> turtle.color("black", "red") +>>> turtle.begin_fill() +>>> turtle.circle(60) +>>> turtle.end_fill() +""" @overload - def dot(self, size: int | _Color | None = None) -> None: ... + def dot(self, size: int | _Color | None = None) -> None: + """Draw a dot with diameter size, using color. + +Optional arguments: +size -- an integer >= 1 (if given) +color -- a colorstring or a numeric color tuple + +Draw a circular dot with diameter size, using color. +If size is not given, the maximum of pensize+4 and 2*pensize is used. + +Example (for a Turtle instance named turtle): +>>> turtle.dot() +>>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50) +""" @overload def dot(self, size: int | None, color: _Color, /) -> None: ... @overload def dot(self, size: int | None, r: float, g: float, b: float, /) -> None: ... def write( self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal") - ) -> None: ... + ) -> None: + """Write text at the current turtle position. + +Arguments: +arg -- info, which is to be written to the TurtleScreen +move (optional) -- True/False +align (optional) -- one of the strings "left", "center" or right" +font (optional) -- a triple (fontname, fontsize, fonttype) + +Write text - the string representation of arg - at the current +turtle position according to align ("left", "center" or right") +and with the given font. +If move is True, the pen is moved to the bottom-right corner +of the text. By default, move is False. + +Example (for a Turtle instance named turtle): +>>> turtle.write('Home = ', True, align="center") +>>> turtle.write((0,0), True) +""" if sys.version_info >= (3, 14): @contextmanager - def poly(self) -> Generator[None]: ... - - def begin_poly(self) -> None: ... - def end_poly(self) -> None: ... - def get_poly(self) -> _PolygonCoords | None: ... - def getscreen(self) -> TurtleScreen: ... - def getturtle(self) -> Self: ... + def poly(self) -> Generator[None]: + """A context manager for recording the vertices of a polygon. + +Implicitly ensures that the code block is wrapped with +begin_poly() and end_poly() + +Example (for a Turtle instance named turtle) where we create a +triangle as the polygon and move the turtle 100 steps forward: +>>> with turtle.poly(): +... for side in range(3) +... turtle.forward(50) +... turtle.right(60) +>>> turtle.forward(100) +""" + + def begin_poly(self) -> None: + """Start recording the vertices of a polygon. + +No argument. + +Start recording the vertices of a polygon. Current turtle position +is first point of polygon. + +Example (for a Turtle instance named turtle): +>>> turtle.begin_poly() +""" + def end_poly(self) -> None: + """Stop recording the vertices of a polygon. + +No argument. + +Stop recording the vertices of a polygon. Current turtle position is +last point of polygon. This will be connected with the first point. + +Example (for a Turtle instance named turtle): +>>> turtle.end_poly() +""" + def get_poly(self) -> _PolygonCoords | None: + """Return the lastly recorded polygon. + +No argument. + +Example (for a Turtle instance named turtle): +>>> p = turtle.get_poly() +>>> turtle.register_shape("myFavouriteShape", p) +""" + def getscreen(self) -> TurtleScreen: + """Return the TurtleScreen object, the turtle is drawing on. + +No argument. + +Return the TurtleScreen object, the turtle is drawing on. +So TurtleScreen-methods can be called for that object. + +Example (for a Turtle instance named turtle): +>>> ts = turtle.getscreen() +>>> ts + +>>> ts.bgcolor("pink") +""" + def getturtle(self) -> Self: + """Return the Turtleobject itself. + +No argument. + +Only reasonable use: as a function to return the 'anonymous turtle': + +Example: +>>> pet = getturtle() +>>> pet.fd(50) +>>> pet + +>>> turtles() +[] +""" getpen = getturtle - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... - def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... - def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... - def undo(self) -> None: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: + """Bind fun to mouse-click event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). +add -- True or False. If True, new binding will be added, otherwise + it will replace a former binding. + +Example for the anonymous turtle, i. e. the procedural way: + +>>> def turn(x, y): +... left(360) +... +>>> onclick(turn) # Now clicking into the turtle will turn it. +>>> onclick(None) # event-binding will be removed +""" + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: + """Bind fun to mouse-button-release event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). + +Example (for a MyTurtle instance named joe): +>>> class MyTurtle(Turtle): +... def glow(self,x,y): +... self.fillcolor("red") +... def unglow(self,x,y): +... self.fillcolor("") +... +>>> joe = MyTurtle() +>>> joe.onclick(joe.glow) +>>> joe.onrelease(joe.unglow) + +Clicking on joe turns fillcolor red, unclicking turns it to +transparent. +""" + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: + """Bind fun to mouse-move event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). + +Every sequence of mouse-move-events on a turtle is preceded by a +mouse-click event on that turtle. + +Example (for a Turtle instance named turtle): +>>> turtle.ondrag(turtle.goto) + +Subsequently clicking and dragging a Turtle will move it +across the screen thereby producing handdrawings (if pen is +down). +""" + def undo(self) -> None: + """undo (repeatedly) the last turtle action. + +No argument. + +undo (repeatedly) the last turtle action. +Number of available undo actions is determined by the size of +the undobuffer. + +Example (for a Turtle instance named turtle): +>>> for i in range(4): +... turtle.fd(50); turtle.lt(80) +... +>>> for i in range(8): +... turtle.undo() +... +""" turtlesize = shapesize class _Screen(TurtleScreen): @@ -501,128 +2067,946 @@ class _Screen(TurtleScreen): height: int | float = 0.75, # noqa: Y041 startx: int | None = None, starty: int | None = None, - ) -> None: ... - def title(self, titlestring: str) -> None: ... - def bye(self) -> None: ... - def exitonclick(self) -> None: ... + ) -> None: + """Set the size and position of the main window. + +Arguments: +width: as integer a size in pixels, as float a fraction of the screen. + Default is 50% of screen. +height: as integer the height in pixels, as float a fraction of the + screen. Default is 75% of screen. +startx: if positive, starting position in pixels from the left + edge of the screen, if negative from the right edge + Default, startx=None is to center window horizontally. +starty: if positive, starting position in pixels from the top + edge of the screen, if negative from the bottom edge + Default, starty=None is to center window vertically. + +Examples (for a Screen instance named screen): +>>> screen.setup (width=200, height=200, startx=0, starty=0) + +sets window to 200x200 pixels, in upper left of screen + +>>> screen.setup(width=.75, height=0.5, startx=None, starty=None) + +sets window to 75% of screen by 50% of screen and centers +""" + def title(self, titlestring: str) -> None: + """Set title of turtle-window + +Argument: +titlestring -- a string, to appear in the titlebar of the + turtle graphics window. + +This is a method of Screen-class. Not available for TurtleScreen- +objects. + +Example (for a Screen instance named screen): +>>> screen.title("Welcome to the turtle-zoo!") +""" + def bye(self) -> None: + """Shut the turtlegraphics window. + +Example (for a TurtleScreen instance named screen): +>>> screen.bye() +""" + def exitonclick(self) -> None: + """Go into mainloop until the mouse is clicked. + +No arguments. + +Bind bye() method to mouseclick on TurtleScreen. +If "using_IDLE" - value in configuration dictionary is False +(default value), enter mainloop. +If IDLE with -n switch (no subprocess) is used, this value should be +set to True in turtle.cfg. In this case IDLE's mainloop +is active also for the client script. + +This is a method of the Screen-class and not available for +TurtleScreen instances. + +Example (for a Screen instance named screen): +>>> screen.exitonclick() + +""" class Turtle(RawTurtle): + """RawTurtle auto-creating (scrolled) canvas. + +When a Turtle object is created or a function derived from some +Turtle method is called a TurtleScreen object is automatically created. +""" def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... RawPen = RawTurtle Pen = Turtle -def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: + """Create and write docstring-dictionary to file. + +Optional argument: +filename -- a string, used as filename + default value is turtle_docstringdict + +Has to be called explicitly, (not used by the turtle-graphics classes) +The docstring dictionary will be written to the Python script .py +It is intended to serve as a template for translation of the docstrings +into different languages. +""" # Functions copied from TurtleScreenBase: -def mainloop() -> None: ... -def textinput(title: str, prompt: str) -> str | None: ... +def mainloop() -> None: + """Starts event loop - calling Tkinter's mainloop function. + +No argument. + +Must be last statement in a turtle graphics program. +Must NOT be used if a script is run from within IDLE in -n mode +(No subprocess) - for interactive use of turtle graphics. + +Example: +>>> mainloop() + +""" +def textinput(title: str, prompt: str) -> str | None: + """Pop up a dialog window for input of a string. + +Arguments: title is the title of the dialog window, +prompt is a text mostly describing what information to input. + +Return the string input +If the dialog is canceled, return None. + +Example: +>>> textinput("NIM", "Name of first player:") + +""" def numinput( title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None -) -> float | None: ... +) -> float | None: + """Pop up a dialog window for input of a number. + +Arguments: title is the title of the dialog window, +prompt is a text mostly describing what numerical information to input. +default: default value +minval: minimum value for input +maxval: maximum value for input + +The number input must be in the range minval .. maxval if these are +given. If not, a hint is issued and the dialog remains open for +correction. Return the number input. +If the dialog is canceled, return None. + +Example: +>>> numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) + +""" # Functions copied from TurtleScreen: -def clear() -> None: ... +def clear() -> None: + """Delete the turtle's drawings from the screen. Do not move + +No arguments. + +Delete the turtle's drawings from the screen. Do not move +State and position of the turtle as well as drawings of other +turtles are not affected. + +Examples: +>>> clear() +""" @overload -def mode(mode: None = None) -> str: ... +def mode(mode: None = None) -> str: + """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. + +Optional argument: +mode -- one of the strings 'standard', 'logo' or 'world' + +Mode 'standard' is compatible with turtle.py. +Mode 'logo' is compatible with most Logo-Turtle-Graphics. +Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in +this mode angles appear distorted if x/y unit-ratio doesn't equal 1. +If mode is not given, return the current mode. + + Mode Initial turtle heading positive angles + ------------|-------------------------|------------------- + 'standard' to the right (east) counterclockwise + 'logo' upward (north) clockwise + +Examples: +>>> mode('logo') # resets turtle heading to north +>>> mode() +'logo' +""" @overload def mode(mode: Literal["standard", "logo", "world"]) -> None: ... -def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... -def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... +def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: + """Set up a user defined coordinate-system. + +Arguments: +llx -- a number, x-coordinate of lower left corner of canvas +lly -- a number, y-coordinate of lower left corner of canvas +urx -- a number, x-coordinate of upper right corner of canvas +ury -- a number, y-coordinate of upper right corner of canvas + +Set up user coodinat-system and switch to mode 'world' if necessary. +This performs a reset. If mode 'world' is already active, +all drawings are redrawn according to the new coordinates. + +But ATTENTION: in user-defined coordinatesystems angles may appear +distorted. (see Screen.mode()) + +Example: +>>> setworldcoordinates(-10,-0.5,50,1.5) +>>> for _ in range(36): +... left(10) +... forward(0.5) +""" +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: + """Adds a turtle shape to TurtleScreen's shapelist. + +Arguments: +(1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! +(2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! +(3) name is an arbitrary string and shape is a tuple + of pairs of coordinates. Installs the corresponding + polygon shape +(4) name is an arbitrary string and shape is a + (compound) Shape object. Installs the corresponding + compound shape. +To use a shape, you have to issue the command shape(shapename). + +call: register_shape("turtle.gif") +--or: register_shape("tri", ((0,0), (10,10), (-10,10))) + +Example: +>>> register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + +""" @overload -def colormode(cmode: None = None) -> float: ... +def colormode(cmode: None = None) -> float: + """Return the colormode or set it to 1.0 or 255. + +Optional argument: +cmode -- one of the values 1.0 or 255 + +r, g, b values of colortriples have to be in range 0..cmode. + +Example: +>>> colormode() +1.0 +>>> colormode(255) +>>> pencolor(240,160,80) +""" @overload def colormode(cmode: float) -> None: ... -def reset() -> None: ... -def turtles() -> list[Turtle]: ... +def reset() -> None: + """Delete the turtle's drawings and restore its default values. + +No argument. + +Delete the turtle's drawings from the screen, re-center the turtle +and set variables to the default values. + +Example: +>>> position() +(0.00,-22.00) +>>> heading() +100.0 +>>> reset() +>>> position() +(0.00,0.00) +>>> heading() +0.0 +""" +def turtles() -> list[Turtle]: + """Return the list of turtles on the + +Example: +>>> turtles() +[] +""" @overload -def bgcolor() -> _AnyColor: ... +def bgcolor() -> _AnyColor: + """Set or return backgroundcolor of the TurtleScreen. + +Arguments (if given): a color string or three numbers +in the range 0..colormode or a 3-tuple of such numbers. + +Example: +>>> bgcolor("orange") +>>> bgcolor() +'orange' +>>> bgcolor(0.5,0,0.5) +>>> bgcolor() +'#800080' +""" @overload def bgcolor(color: _Color) -> None: ... @overload def bgcolor(r: float, g: float, b: float) -> None: ... @overload -def tracer(n: None = None) -> int: ... +def tracer(n: None = None) -> int: + """Turns turtle animation on/off and set delay for update drawings. + +Optional arguments: +n -- nonnegative integer +delay -- nonnegative integer + +If n is given, only each n-th regular screen update is really performed. +(Can be used to accelerate the drawing of complex graphics.) +Second arguments sets delay value (see RawTurtle.delay()) + +Example: +>>> tracer(8, 25) +>>> dist = 2 +>>> for i in range(200): +... fd(dist) +... rt(90) +... dist += 2 +""" @overload def tracer(n: int, delay: int | None = None) -> None: ... @overload -def delay(delay: None = None) -> int: ... +def delay(delay: None = None) -> int: + """Return or set the drawing delay in milliseconds. + +Optional argument: +delay -- positive integer + +Example: +>>> delay(15) +>>> delay() +15 +""" @overload def delay(delay: int) -> None: ... if sys.version_info >= (3, 14): @contextmanager - def no_animation() -> Generator[None]: ... - -def update() -> None: ... -def window_width() -> int: ... -def window_height() -> int: ... -def getcanvas() -> Canvas: ... -def getshapes() -> list[str]: ... -def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... -def onkey(fun: Callable[[], object], key: str) -> None: ... -def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... -def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... + def no_animation() -> Generator[None]: + """Temporarily turn off auto-updating the + +This is useful for drawing complex shapes where even the fastest setting +is too slow. Once this context manager is exited, the drawing will +be displayed. + +Example (for a TurtleScreen instance named screen +and a Turtle instance named turtle): +>>> with no_animation(): +... turtle.circle(50) +""" + +def update() -> None: + """Perform a TurtleScreen update. + """ +def window_width() -> int: + """Return the width of the turtle window. + +Example: +>>> window_width() +640 +""" +def window_height() -> int: + """Return the height of the turtle window. + +Example: +>>> window_height() +480 +""" +def getcanvas() -> Canvas: + """Return the Canvas of this TurtleScreen. + +No argument. + +Example: +>>> cv = getcanvas() +>>> cv + +""" +def getshapes() -> list[str]: + """Return a list of names of all currently available turtle shapes. + +No argument. + +Example: +>>> getshapes() +['arrow', 'blank', 'circle', ... , 'turtle'] +""" +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + """Bind fun to mouse-click event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). +add -- True or False. If True, new binding will be added, otherwise + it will replace a former binding. + +Example for the anonymous turtle, i. e. the procedural way: + +>>> def turn(x, y): +... left(360) +... +>>> onclick(turn) # Now clicking into the turtle will turn it. +>>> onclick(None) # event-binding will be removed +""" +def onkey(fun: Callable[[], object], key: str) -> None: + """Bind fun to key-release event of key. + +Arguments: +fun -- a function with no arguments +key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + +In order to be able to register key-events, TurtleScreen +must have focus. (See method listen.) + +Example: + +>>> def f(): +... fd(50) +... lt(60) +... +>>> onkey(f, "Up") +>>> listen() + +Subsequently the turtle can be moved by repeatedly pressing +the up-arrow key, consequently drawing a hexagon + +""" +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: + """Set focus on TurtleScreen (in order to collect key-events) + +No arguments. +Dummy arguments are provided in order +to be able to pass listen to the onclick method. + +Example: +>>> listen() +""" +def ontimer(fun: Callable[[], object], t: int = 0) -> None: + """Install a timer, which calls fun after t milliseconds. + +Arguments: +fun -- a function with no arguments. +t -- a number >= 0 + +Example: + +>>> running = True +>>> def f(): +... if running: +... fd(50) +... lt(60) +... ontimer(f, 250) +... +>>> f() # makes the turtle marching around +>>> running = False +""" @overload -def bgpic(picname: None = None) -> str: ... +def bgpic(picname: None = None) -> str: + """Set background image or return name of current backgroundimage. + +Optional argument: +picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". + +If picname is a filename, set the corresponding image as background. +If picname is "nopic", delete backgroundimage, if present. +If picname is None, return the filename of the current backgroundimage. + +Example: +>>> bgpic() +'nopic' +>>> bgpic("landscape.gif") +>>> bgpic() +'landscape.gif' +""" @overload def bgpic(picname: str) -> None: ... @overload -def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: + """Resize the canvas the turtles are drawing on. + +Optional arguments: +canvwidth -- positive integer, new width of canvas in pixels +canvheight -- positive integer, new height of canvas in pixels +bg -- colorstring or color-tuple, new backgroundcolor +If no arguments are given, return current (canvaswidth, canvasheight) + +Do not alter the drawing window. To observe hidden parts of +the canvas use the scrollbars. (Can make visible those parts +of a drawing, which were outside the canvas before!) + +Example (for a Turtle instance named turtle): +>>> turtle.screensize(2000,1500) +>>> # e.g. to search for an erroneously escaped turtle ;-) +""" @overload def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... if sys.version_info >= (3, 14): - def save(filename: StrPath, *, overwrite: bool = False) -> None: ... + def save(filename: StrPath, *, overwrite: bool = False) -> None: + """Save the drawing as a PostScript file + +Arguments: +filename -- a string, the path of the created file. + Must end with '.ps' or '.eps'. + +Optional arguments: +overwrite -- boolean, if true, then existing files will be overwritten + +Example: +>>> save('my_drawing.eps') +""" onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape -def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: + """Bind fun to key-press event of key if key is given, +or to any key-press-event if no key is given. + +Arguments: +fun -- a function with no arguments +key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + +In order to be able to register key-events, TurtleScreen +must have focus. (See method listen.) + +Example (for a TurtleScreen instance named screen +and a Turtle instance named turtle): + +>>> def f(): +... fd(50) +... lt(60) +... +>>> onkeypress(f, "Up") +>>> listen() + +Subsequently the turtle can be moved by repeatedly pressing +the up-arrow key, or by keeping pressed the up-arrow key. +consequently drawing a hexagon. +""" onkeyrelease = onkey # Functions copied from _Screen: -def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... -def title(titlestring: str) -> None: ... -def bye() -> None: ... -def exitonclick() -> None: ... -def Screen() -> _Screen: ... +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: + """Set the size and position of the main window. + +Arguments: +width: as integer a size in pixels, as float a fraction of the + Default is 50% of +height: as integer the height in pixels, as float a fraction of the + Default is 75% of +startx: if positive, starting position in pixels from the left + edge of the screen, if negative from the right edge + Default, startx=None is to center window horizontally. +starty: if positive, starting position in pixels from the top + edge of the screen, if negative from the bottom edge + Default, starty=None is to center window vertically. + +Examples: +>>> setup (width=200, height=200, startx=0, starty=0) + +sets window to 200x200 pixels, in upper left of screen + +>>> setup(width=.75, height=0.5, startx=None, starty=None) + +sets window to 75% of screen by 50% of screen and centers +""" +def title(titlestring: str) -> None: + """Set title of turtle-window + +Argument: +titlestring -- a string, to appear in the titlebar of the + turtle graphics window. + +This is a method of Screen-class. Not available for TurtleScreen- +objects. + +Example: +>>> title("Welcome to the turtle-zoo!") +""" +def bye() -> None: + """Shut the turtlegraphics window. + +Example: +>>> bye() +""" +def exitonclick() -> None: + """Go into mainloop until the mouse is clicked. + +No arguments. + +Bind bye() method to mouseclick on TurtleScreen. +If "using_IDLE" - value in configuration dictionary is False +(default value), enter mainloop. +If IDLE with -n switch (no subprocess) is used, this value should be +set to True in turtle.cfg. In this case IDLE's mainloop +is active also for the client script. + +This is a method of the Screen-class and not available for +TurtleScreen instances. + +Example: +>>> exitonclick() + +""" +def Screen() -> _Screen: + """Return the singleton screen object. +If none exists at the moment, create a new one and return it, +else return the existing one. +""" # Functions copied from TNavigator: -def degrees(fullcircle: float = 360.0) -> None: ... -def radians() -> None: ... -def forward(distance: float) -> None: ... -def back(distance: float) -> None: ... -def right(angle: float) -> None: ... -def left(angle: float) -> None: ... -def pos() -> Vec2D: ... -def xcor() -> float: ... -def ycor() -> float: ... +def degrees(fullcircle: float = 360.0) -> None: + """Set angle measurement units to degrees. + +Optional argument: +fullcircle - a number + +Set angle measurement units, i. e. set number +of 'degrees' for a full circle. Default value is +360 degrees. + +Example: +>>> left(90) +>>> heading() +90 + +Change angle measurement unit to grad (also known as gon, +grade, or gradian and equals 1/100-th of the right angle.) +>>> degrees(400.0) +>>> heading() +100 + +""" +def radians() -> None: + """Set the angle measurement units to radians. + +No arguments. + +Example: +>>> heading() +90 +>>> radians() +>>> heading() +1.5707963267948966 +""" +def forward(distance: float) -> None: + """Move the turtle forward by the specified distance. + +Aliases: forward | fd + +Argument: +distance -- a number (integer or float) + +Move the turtle forward by the specified distance, in the direction +the turtle is headed. + +Example: +>>> position() +(0.00, 0.00) +>>> forward(25) +>>> position() +(25.00,0.00) +>>> forward(-75) +>>> position() +(-50.00,0.00) +""" +def back(distance: float) -> None: + """Move the turtle backward by distance. + +Aliases: back | backward | bk + +Argument: +distance -- a number + +Move the turtle backward by distance, opposite to the direction the +turtle is headed. Do not change the turtle's heading. + +Example: +>>> position() +(0.00, 0.00) +>>> backward(30) +>>> position() +(-30.00, 0.00) +""" +def right(angle: float) -> None: + """Turn turtle right by angle units. + +Aliases: right | rt + +Argument: +angle -- a number (integer or float) + +Turn turtle right by angle units. (Units are by default degrees, +but can be set via the degrees() and radians() functions.) +Angle orientation depends on mode. (See this.) + +Example: +>>> heading() +22.0 +>>> right(45) +>>> heading() +337.0 +""" +def left(angle: float) -> None: + """Turn turtle left by angle units. + +Aliases: left | lt + +Argument: +angle -- a number (integer or float) + +Turn turtle left by angle units. (Units are by default degrees, +but can be set via the degrees() and radians() functions.) +Angle orientation depends on mode. (See this.) + +Example: +>>> heading() +22.0 +>>> left(45) +>>> heading() +67.0 +""" +def pos() -> Vec2D: + """Return the turtle's current location (x,y), as a Vec2D-vector. + +Aliases: pos | position + +No arguments. + +Example: +>>> pos() +(0.00, 240.00) +""" +def xcor() -> float: + """Return the turtle's x coordinate. + +No arguments. + +Example: +>>> reset() +>>> left(60) +>>> forward(100) +>>> print(xcor()) +50.0 +""" +def ycor() -> float: + """Return the turtle's y coordinate +--- +No arguments. + +Example: +>>> reset() +>>> left(60) +>>> forward(100) +>>> print(ycor()) +86.6025403784 +""" @overload -def goto(x: tuple[float, float], y: None = None) -> None: ... +def goto(x: tuple[float, float], y: None = None) -> None: + """Move turtle to an absolute position. + +Aliases: setpos | setposition | goto: + +Arguments: +x -- a number or a pair/vector of numbers +y -- a number None + +call: goto(x, y) # two coordinates +--or: goto((x, y)) # a pair (tuple) of coordinates +--or: goto(vec) # e.g. as returned by pos() + +Move turtle to an absolute position. If the pen is down, +a line will be drawn. The turtle's orientation does not change. + +Example: +>>> tp = pos() +>>> tp +(0.00, 0.00) +>>> setpos(60,30) +>>> pos() +(60.00,30.00) +>>> setpos((20,80)) +>>> pos() +(20.00,80.00) +>>> setpos(tp) +>>> pos() +(0.00,0.00) +""" @overload def goto(x: float, y: float) -> None: ... -def home() -> None: ... -def setx(x: float) -> None: ... -def sety(y: float) -> None: ... +def home() -> None: + """Move turtle to the origin - coordinates (0,0). + +No arguments. + +Move turtle to the origin - coordinates (0,0) and set its +heading to its start-orientation (which depends on mode). + +Example: +>>> home() +""" +def setx(x: float) -> None: + """Set the turtle's first coordinate to x + +Argument: +x -- a number (integer or float) + +Set the turtle's first coordinate to x, leave second coordinate +unchanged. + +Example: +>>> position() +(0.00, 240.00) +>>> setx(10) +>>> position() +(10.00, 240.00) +""" +def sety(y: float) -> None: + """Set the turtle's second coordinate to y + +Argument: +y -- a number (integer or float) + +Set the turtle's first coordinate to x, second coordinate remains +unchanged. + +Example: +>>> position() +(0.00, 40.00) +>>> sety(-10) +>>> position() +(0.00, -10.00) +""" @overload -def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: + """Return the distance from the turtle to (x,y) in turtle step units. + +Arguments: +x -- a number or a pair/vector of numbers or a turtle instance +y -- a number None None + +call: distance(x, y) # two coordinates +--or: distance((x, y)) # a pair (tuple) of coordinates +--or: distance(vec) # e.g. as returned by pos() +--or: distance(mypen) # where mypen is another turtle + +Example: +>>> pos() +(0.00, 0.00) +>>> distance(30,40) +50.0 +>>> pen = Turtle() +>>> pen.forward(77) +>>> distance(pen) +77.0 +""" @overload def distance(x: float, y: float) -> float: ... @overload -def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: + """Return the angle of the line from the turtle's position to (x, y). + +Arguments: +x -- a number or a pair/vector of numbers or a turtle instance +y -- a number None None + +call: distance(x, y) # two coordinates +--or: distance((x, y)) # a pair (tuple) of coordinates +--or: distance(vec) # e.g. as returned by pos() +--or: distance(mypen) # where mypen is another turtle + +Return the angle, between the line from turtle-position to position +specified by x, y and the turtle's start orientation. (Depends on +modes - "standard" or "logo") + +Example: +>>> pos() +(10.00, 10.00) +>>> towards(0,0) +225.0 +""" @overload def towards(x: float, y: float) -> float: ... -def heading() -> float: ... -def setheading(to_angle: float) -> None: ... -def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... +def heading() -> float: + """Return the turtle's current heading. + +No arguments. + +Example: +>>> left(67) +>>> heading() +67.0 +""" +def setheading(to_angle: float) -> None: + """Set the orientation of the turtle to to_angle. + +Aliases: setheading | seth + +Argument: +to_angle -- a number (integer or float) + +Set the orientation of the turtle to to_angle. +Here are some common directions in degrees: + + standard - mode: logo-mode: +-------------------|-------------------- + 0 - east 0 - north + 90 - north 90 - east + 180 - west 180 - south + 270 - south 270 - west + +Example: +>>> setheading(90) +>>> heading() +90 +""" +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: + """Draw a circle with given radius. + +Arguments: +radius -- a number +extent (optional) -- a number +steps (optional) -- an integer + +Draw a circle with given radius. The center is radius units left +of the turtle; extent - an angle - determines which part of the +circle is drawn. If extent is not given, draw the entire circle. +If extent is not a full circle, one endpoint of the arc is the +current pen position. Draw the arc in counterclockwise direction +if radius is positive, otherwise in clockwise direction. Finally +the direction of the turtle is changed by the amount of extent. + +As the circle is approximated by an inscribed regular polygon, +steps determines the number of steps to use. If not given, +it will be calculated automatically. Maybe used to draw regular +polygons. + +call: circle(radius) # full circle +--or: circle(radius, extent) # arc +--or: circle(radius, extent, steps) +--or: circle(radius, steps=6) # 6-sided polygon + +Example: +>>> circle(50) +>>> circle(120, 180) # semicircle +""" fd = forward bk = back @@ -636,47 +3020,305 @@ seth = setheading # Functions copied from TPen: @overload -def resizemode(rmode: None = None) -> str: ... +def resizemode(rmode: None = None) -> str: + """Set resizemode to one of the values: "auto", "user", "noresize". + +(Optional) Argument: +rmode -- one of the strings "auto", "user", "noresize" + +Different resizemodes have the following effects: + - "auto" adapts the appearance of the turtle + corresponding to the value of pensize. + - "user" adapts the appearance of the turtle according to the + values of stretchfactor and outlinewidth (outline), + which are set by shapesize() + - "noresize" no adaption of the turtle's appearance takes place. +If no argument is given, return current resizemode. +resizemode("user") is called by a call of shapesize with arguments. + + +Examples: +>>> resizemode("noresize") +>>> resizemode() +'noresize' +""" @overload def resizemode(rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload -def pensize(width: None = None) -> int: ... +def pensize(width: None = None) -> int: + """Set or return the line thickness. + +Aliases: pensize | width + +Argument: +width -- positive number + +Set the line thickness to width or return it. If resizemode is set +to "auto" and turtleshape is a polygon, that polygon is drawn with +the same line thickness. If no argument is given, current pensize +is returned. + +Example: +>>> pensize() +1 +>>> pensize(10) # from here on lines of width 10 are drawn +""" @overload def pensize(width: int) -> None: ... -def penup() -> None: ... -def pendown() -> None: ... -def isdown() -> bool: ... +def penup() -> None: + """Pull the pen up -- no drawing when moving. + +Aliases: penup | pu | up + +No argument + +Example: +>>> penup() +""" +def pendown() -> None: + """Pull the pen down -- drawing when moving. + +Aliases: pendown | pd | down + +No argument. + +Example: +>>> pendown() +""" +def isdown() -> bool: + """Return True if pen is down, False if it's up. + +No argument. + +Example: +>>> penup() +>>> isdown() +False +>>> pendown() +>>> isdown() +True +""" @overload -def speed(speed: None = None) -> int: ... +def speed(speed: None = None) -> int: + """Return or set the turtle's speed. + +Optional argument: +speed -- an integer in the range 0..10 or a speedstring (see below) + +Set the turtle's speed to an integer value in the range 0 .. 10. +If no argument is given: return current speed. + +If input is a number greater than 10 or smaller than 0.5, +speed is set to 0. +Speedstrings are mapped to speedvalues in the following way: + 'fastest' : 0 + 'fast' : 10 + 'normal' : 6 + 'slow' : 3 + 'slowest' : 1 +speeds from 1 to 10 enforce increasingly faster animation of +line drawing and turtle turning. + +Attention: +speed = 0 : *no* animation takes place. forward/back makes turtle jump +and likewise left/right make the turtle turn instantly. + +Example: +>>> speed(3) +""" @overload def speed(speed: _Speed) -> None: ... @overload -def pencolor() -> _AnyColor: ... +def pencolor() -> _AnyColor: + """Return or set the pencolor. + +Arguments: +Four input formats are allowed: + - pencolor() + Return the current pencolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - pencolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - pencolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - pencolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + +If turtleshape is a polygon, the outline of that polygon is drawn +with the newly set pencolor. + +Example: +>>> pencolor('brown') +>>> tup = (0.2, 0.8, 0.55) +>>> pencolor(tup) +>>> pencolor() +'#33cc8c' +""" @overload def pencolor(color: _Color) -> None: ... @overload def pencolor(r: float, g: float, b: float) -> None: ... @overload -def fillcolor() -> _AnyColor: ... +def fillcolor() -> _AnyColor: + """Return or set the fillcolor. + +Arguments: +Four input formats are allowed: + - fillcolor() + Return the current fillcolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - fillcolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - fillcolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - fillcolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + +If turtleshape is a polygon, the interior of that polygon is drawn +with the newly set fillcolor. + +Example: +>>> fillcolor('violet') +>>> col = pencolor() +>>> fillcolor(col) +>>> fillcolor(0, .5, 0) +""" @overload def fillcolor(color: _Color) -> None: ... @overload def fillcolor(r: float, g: float, b: float) -> None: ... @overload -def color() -> tuple[_AnyColor, _AnyColor]: ... +def color() -> tuple[_AnyColor, _AnyColor]: + """Return or set the pencolor and fillcolor. + +Arguments: +Several input formats are allowed. +They use 0, 1, 2, or 3 arguments as follows: + +color() + Return the current pencolor and the current fillcolor + as a pair of color specification strings as are returned + by pencolor and fillcolor. +color(colorstring), color((r,g,b)), color(r,g,b) + inputs as in pencolor, set both, fillcolor and pencolor, + to the given value. +color(colorstring1, colorstring2), +color((r1,g1,b1), (r2,g2,b2)) + equivalent to pencolor(colorstring1) and fillcolor(colorstring2) + and analogously, if the other input format is used. + +If turtleshape is a polygon, outline and interior of that polygon +is drawn with the newly set colors. +For more info see: pencolor, fillcolor + +Example: +>>> color('red', 'green') +>>> color() +('red', 'green') +>>> colormode(255) +>>> color((40, 80, 120), (160, 200, 240)) +>>> color() +('#285078', '#a0c8f0') +""" @overload def color(color: _Color) -> None: ... @overload def color(r: float, g: float, b: float) -> None: ... @overload def color(color1: _Color, color2: _Color) -> None: ... -def showturtle() -> None: ... -def hideturtle() -> None: ... -def isvisible() -> bool: ... +def showturtle() -> None: + """Makes the turtle visible. + +Aliases: showturtle | st + +No argument. + +Example: +>>> hideturtle() +>>> showturtle() +""" +def hideturtle() -> None: + """Makes the turtle invisible. + +Aliases: hideturtle | ht + +No argument. + +It's a good idea to do this while you're in the +middle of a complicated drawing, because hiding +the turtle speeds up the drawing observably. + +Example: +>>> hideturtle() +""" +def isvisible() -> bool: + """Return True if the Turtle is shown, False if it's hidden. + +No argument. + +Example: +>>> hideturtle() +>>> print(isvisible()) +False +""" # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload -def pen() -> _PenState: ... +def pen() -> _PenState: + """Return or set the pen's attributes. + +Arguments: + pen -- a dictionary with some or all of the below listed keys. + **pendict -- one or more keyword-arguments with the below + listed keys as keywords. + +Return or set the pen's attributes in a 'pen-dictionary' +with the following key/value pairs: + "shown" : True/False + "pendown" : True/False + "pencolor" : color-string or color-tuple + "fillcolor" : color-string or color-tuple + "pensize" : positive number + "speed" : number in range 0..10 + "resizemode" : "auto" or "user" or "noresize" + "stretchfactor": (positive number, positive number) + "shearfactor": number + "outline" : positive number + "tilt" : number + +This dictionary can be used as argument for a subsequent +pen()-call to restore the former pen-state. Moreover one +or more of these attributes can be provided as keyword-arguments. +This can be used to set several pen attributes in one statement. + + +Examples: +>>> pen(fillcolor="black", pencolor="red", pensize=10) +>>> pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +>>> penstate=pen() +>>> color("yellow","") +>>> penup() +>>> pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +>>> p.pen(penstate, fillcolor="green") +>>> p.pen() +{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, +'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', +'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} +""" @overload def pen( pen: _PenState | None = None, @@ -703,88 +3345,540 @@ ht = hideturtle # Functions copied from RawTurtle: -def setundobuffer(size: int | None) -> None: ... -def undobufferentries() -> int: ... +def setundobuffer(size: int | None) -> None: + """Set or disable undobuffer. + +Argument: +size -- an integer or None + +If size is an integer an empty undobuffer of given size is installed. +Size gives the maximum number of turtle-actions that can be undone +by the undo() function. +If size is None, no undobuffer is present. + +Example: +>>> setundobuffer(42) +""" +def undobufferentries() -> int: + """Return count of entries in the undobuffer. + +No argument. + +Example: +>>> while undobufferentries(): +... undo() +""" @overload -def shape(name: None = None) -> str: ... +def shape(name: None = None) -> str: + """Set turtle shape to shape with given name / return current shapename. + +Optional argument: +name -- a string, which is a valid shapename + +Set turtle shape to shape with given name or, if name is not given, +return name of current shape. +Shape with name must exist in the TurtleScreen's shape dictionary. +Initially there are the following polygon shapes: +'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. +To learn about how to deal with shapes see Screen-method register_shape. + +Example: +>>> shape() +'arrow' +>>> shape("turtle") +>>> shape() +'turtle' +""" @overload def shape(name: str) -> None: ... if sys.version_info >= (3, 12): - def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: ... + def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: + """Instantly move turtle to an absolute position. + +Arguments: +x -- a number or None +y -- a number None +fill_gap -- a boolean This argument must be specified by name. + +call: teleport(x, y) # two coordinates +--or: teleport(x) # teleport to x position, keeping y as is +--or: teleport(y=y) # teleport to y position, keeping x as is +--or: teleport(x, y, fill_gap=True) + # teleport but fill the gap in between + +Move turtle to an absolute position. Unlike goto(x, y), a line will not +be drawn. The turtle's orientation does not change. If currently +filling, the polygon(s) teleported from will be filled after leaving, +and filling will begin again after teleporting. This can be disabled +with fill_gap=True, which makes the imaginary line traveled during +teleporting act as a fill barrier like in goto(x, y). + +Example: +>>> tp = pos() +>>> tp +(0.00,0.00) +>>> teleport(60) +>>> pos() +(60.00,0.00) +>>> teleport(y=10) +>>> pos() +(60.00,10.00) +>>> teleport(20, 30) +>>> pos() +(20.00,30.00) +""" # Unsafely overlaps when no arguments are provided @overload -def shapesize() -> tuple[float, float, float]: ... +def shapesize() -> tuple[float, float, float]: + """Set/return turtle's stretchfactors/outline. Set resizemode to "user". + +Optional arguments: + stretch_wid : positive number + stretch_len : positive number + outline : positive number + +Return or set the pen's attributes x/y-stretchfactors and/or outline. +Set resizemode to "user". +If and only if resizemode is set to "user", the turtle will be displayed +stretched according to its stretchfactors: +stretch_wid is stretchfactor perpendicular to orientation +stretch_len is stretchfactor in direction of turtles orientation. +outline determines the width of the shapes's outline. + +Examples: +>>> resizemode("user") +>>> shapesize(5, 5, 12) +>>> shapesize(outline=8) +""" @overload def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... @overload -def shearfactor(shear: None = None) -> float: ... +def shearfactor(shear: None = None) -> float: + """Set or return the current shearfactor. + +Optional argument: shear -- number, tangent of the shear angle + +Shear the turtleshape according to the given shearfactor shear, +which is the tangent of the shear angle. DO NOT change the +turtle's heading (direction of movement). +If shear is not given: return the current shearfactor, i. e. the +tangent of the shear angle, by which lines parallel to the +heading of the turtle are sheared. + +Examples: +>>> shape("circle") +>>> shapesize(5,2) +>>> shearfactor(0.5) +>>> shearfactor() +>>> 0.5 +""" @overload def shearfactor(shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload -def shapetransform() -> tuple[float, float, float, float]: ... +def shapetransform() -> tuple[float, float, float, float]: + """Set or return the current transformation matrix of the turtle shape. + +Optional arguments: t11, t12, t21, t22 -- numbers. + +If none of the matrix elements are given, return the transformation +matrix. +Otherwise set the given elements and transform the turtleshape +according to the matrix consisting of first row t11, t12 and +second row t21, 22. +Modify stretchfactor, shearfactor and tiltangle according to the +given matrix. + +Examples: +>>> shape("square") +>>> shapesize(4,2) +>>> shearfactor(-0.5) +>>> shapetransform() +(4.0, -1.0, -0.0, 2.0) +""" @overload def shapetransform( t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... -def get_shapepoly() -> _PolygonCoords | None: ... +def get_shapepoly() -> _PolygonCoords | None: + """Return the current shape polygon as tuple of coordinate pairs. + +No argument. + +Examples: +>>> shape("square") +>>> shapetransform(4, -1, 0, 2) +>>> get_shapepoly() +((50, -20), (30, 20), (-50, 20), (-30, -20)) + +""" if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") - def settiltangle(angle: float) -> None: ... + def settiltangle(angle: float) -> None: + """Rotate the turtleshape to point in the specified direction + + Argument: angle -- number + + Rotate the turtleshape to point in the direction specified by angle, + regardless of its current tilt-angle. DO NOT change the turtle's + heading (direction of movement). + + Deprecated since Python 3.1 + + Examples: + >>> shape("circle") + >>> shapesize(5,2) + >>> settiltangle(45) + >>> stamp() + >>> fd(50) + >>> settiltangle(-45) + >>> stamp() + >>> fd(50) + """ @overload -def tiltangle(angle: None = None) -> float: ... +def tiltangle(angle: None = None) -> float: + """Set or return the current tilt-angle. + +Optional argument: angle -- number + +Rotate the turtleshape to point in the direction specified by angle, +regardless of its current tilt-angle. DO NOT change the turtle's +heading (direction of movement). +If angle is not given: return the current tilt-angle, i. e. the angle +between the orientation of the turtleshape and the heading of the +turtle (its direction of movement). + +Examples: +>>> shape("circle") +>>> shapesize(5, 2) +>>> tiltangle() +0.0 +>>> tiltangle(45) +>>> tiltangle() +45.0 +>>> stamp() +>>> fd(50) +>>> tiltangle(-45) +>>> tiltangle() +315.0 +>>> stamp() +>>> fd(50) +""" @overload def tiltangle(angle: float) -> None: ... -def tilt(angle: float) -> None: ... +def tilt(angle: float) -> None: + """Rotate the turtleshape by angle. + +Argument: +angle - a number + +Rotate the turtleshape by angle from its current tilt-angle, +but do NOT change the turtle's heading (direction of movement). + +Examples: +>>> shape("circle") +>>> shapesize(5,2) +>>> tilt(30) +>>> fd(50) +>>> tilt(30) +>>> fd(50) +""" # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. -def stamp() -> Any: ... -def clearstamp(stampid: int | tuple[int, ...]) -> None: ... -def clearstamps(n: int | None = None) -> None: ... -def filling() -> bool: ... +def stamp() -> Any: + """Stamp a copy of the turtleshape onto the canvas and return its id. + +No argument. + +Stamp a copy of the turtle shape onto the canvas at the current +turtle position. Return a stamp_id for that stamp, which can be +used to delete it by calling clearstamp(stamp_id). + +Example: +>>> color("blue") +>>> stamp() +13 +>>> fd(50) +""" +def clearstamp(stampid: int | tuple[int, ...]) -> None: + """Delete stamp with given stampid + +Argument: +stampid - an integer, must be return value of previous stamp() call. + +Example: +>>> color("blue") +>>> astamp = stamp() +>>> fd(50) +>>> clearstamp(astamp) +""" +def clearstamps(n: int | None = None) -> None: + """Delete all or first/last n of turtle's stamps. + +Optional argument: +n -- an integer + +If n is None, delete all of pen's stamps, +else if n > 0 delete first n stamps +else if n < 0 delete last n stamps. + +Example: +>>> for i in range(8): +... stamp(); fd(30) +... +>>> clearstamps(2) +>>> clearstamps(-2) +>>> clearstamps() +""" +def filling() -> bool: + """Return fillstate (True if filling, False else). + +No argument. + +Example: +>>> begin_fill() +>>> if filling(): +... pensize(5) +... else: +... pensize(3) +""" if sys.version_info >= (3, 14): @contextmanager - def fill() -> Generator[None]: ... - -def begin_fill() -> None: ... -def end_fill() -> None: ... + def fill() -> Generator[None]: + """A context manager for filling a shape. + +Implicitly ensures the code block is wrapped with +begin_fill() and end_fill(). + +Example: +>>> color("black", "red") +>>> with fill(): +... circle(60) +""" + +def begin_fill() -> None: + """Called just before drawing a shape to be filled. + +No argument. + +Example: +>>> color("black", "red") +>>> begin_fill() +>>> circle(60) +>>> end_fill() +""" +def end_fill() -> None: + """Fill the shape drawn after the call begin_fill(). + +No argument. + +Example: +>>> color("black", "red") +>>> begin_fill() +>>> circle(60) +>>> end_fill() +""" @overload -def dot(size: int | _Color | None = None) -> None: ... +def dot(size: int | _Color | None = None) -> None: + """Draw a dot with diameter size, using color. + +Optional arguments: +size -- an integer >= 1 (if given) +color -- a colorstring or a numeric color tuple + +Draw a circular dot with diameter size, using color. +If size is not given, the maximum of pensize+4 and 2*pensize is used. + +Example: +>>> dot() +>>> fd(50); dot(20, "blue"); fd(50) +""" @overload def dot(size: int | None, color: _Color, /) -> None: ... @overload def dot(size: int | None, r: float, g: float, b: float, /) -> None: ... -def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: + """Write text at the current turtle position. + +Arguments: +arg -- info, which is to be written to the TurtleScreen +move (optional) -- True/False +align (optional) -- one of the strings "left", "center" or right" +font (optional) -- a triple (fontname, fontsize, fonttype) + +Write text - the string representation of arg - at the current +turtle position according to align ("left", "center" or right") +and with the given font. +If move is True, the pen is moved to the bottom-right corner +of the text. By default, move is False. + +Example: +>>> write('Home = ', True, align="center") +>>> write((0,0), True) +""" if sys.version_info >= (3, 14): @contextmanager - def poly() -> Generator[None]: ... - -def begin_poly() -> None: ... -def end_poly() -> None: ... -def get_poly() -> _PolygonCoords | None: ... -def getscreen() -> TurtleScreen: ... -def getturtle() -> Turtle: ... + def poly() -> Generator[None]: + """A context manager for recording the vertices of a polygon. + +Implicitly ensures that the code block is wrapped with +begin_poly() and end_poly() + +Example (for a Turtle instance named turtle) where we create a +triangle as the polygon and move the turtle 100 steps forward: +>>> with poly(): +... for side in range(3) +... forward(50) +... right(60) +>>> forward(100) +""" + +def begin_poly() -> None: + """Start recording the vertices of a polygon. + +No argument. + +Start recording the vertices of a polygon. Current turtle position +is first point of polygon. + +Example: +>>> begin_poly() +""" +def end_poly() -> None: + """Stop recording the vertices of a polygon. + +No argument. + +Stop recording the vertices of a polygon. Current turtle position is +last point of polygon. This will be connected with the first point. + +Example: +>>> end_poly() +""" +def get_poly() -> _PolygonCoords | None: + """Return the lastly recorded polygon. + +No argument. + +Example: +>>> p = get_poly() +>>> register_shape("myFavouriteShape", p) +""" +def getscreen() -> TurtleScreen: + """Return the TurtleScreen object, the turtle is drawing on. + +No argument. + +Return the TurtleScreen object, the turtle is drawing on. +So TurtleScreen-methods can be called for that object. + +Example: +>>> ts = getscreen() +>>> ts + +>>> ts.bgcolor("pink") +""" +def getturtle() -> Turtle: + """Return the Turtleobject itself. + +No argument. + +Only reasonable use: as a function to return the 'anonymous turtle': + +Example: +>>> pet = getturtle() +>>> pet.fd(50) +>>> pet + +>>> turtles() +[] +""" getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... -def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... -def undo() -> None: ... +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + """Bind fun to mouse-button-release event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). + +Example (for a MyTurtle instance named joe): +>>> class MyTurtle(Turtle): +... def glow(self,x,y): +... self.fillcolor("red") +... def unglow(self,x,y): +... self.fillcolor("") +... +>>> joe = MyTurtle() +>>> joe.onclick(joe.glow) +>>> joe.onrelease(joe.unglow) + +Clicking on joe turns fillcolor red, unclicking turns it to +transparent. +""" +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + """Bind fun to mouse-move event on this turtle on canvas. + +Arguments: +fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. +btn -- number of the mouse-button defaults to 1 (left mouse button). + +Every sequence of mouse-move-events on a turtle is preceded by a +mouse-click event on that + +Example: +>>> ondrag(goto) + +Subsequently clicking and dragging a Turtle will move it +across the screen thereby producing handdrawings (if pen is +down). +""" +def undo() -> None: + """undo (repeatedly) the last turtle action. + +No argument. + +undo (repeatedly) the last turtle action. +Number of available undo actions is determined by the size of +the undobuffer. + +Example: +>>> for i in range(4): +... fd(50); lt(80) +... +>>> for i in range(8): +... undo() +... +""" turtlesize = shapesize # Functions copied from RawTurtle with a few tweaks: -def clone() -> Turtle: ... +def clone() -> Turtle: + """Create and return a clone of the + +No argument. + +Create and return a clone of the turtle with same position, heading +and turtle properties. + +Example (for a Turtle instance named mick): +mick = Turtle() +joe = mick.clone() +""" # Extra functions present only in the global scope: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi index 649e463ff71f8..c9a73fb2940c7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi @@ -1,3 +1,6 @@ +""" +Define names for built-in types that aren't directly accessible as a builtin. +""" import sys from _typeshed import AnnotationForm, MaybeNone, SupportsKeysAndGetItem from _typeshed.importlib import LoaderProtocol @@ -71,6 +74,21 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Make sure this class definition stays roughly in line with `builtins.function` @final class FunctionType: + """Create a function object. + + code + a code object + globals + the globals dictionary + name + a string that overrides the name from the code object + argdefs + a tuple that specifies the default argument values + closure + a tuple that supplies the bindings for free variables + kwdefaults + a dictionary that specifies the default keyword argument values +""" @property def __closure__(self) -> tuple[CellType, ...] | None: ... __code__: CodeType @@ -111,9 +129,13 @@ class FunctionType: closure: tuple[CellType, ...] | None = None, ) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" @overload - def __get__(self, instance: None, owner: type, /) -> FunctionType: ... + def __get__(self, instance: None, owner: type, /) -> FunctionType: + """Return an attribute of instance, which is of type owner. +""" @overload def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ... @@ -121,6 +143,8 @@ LambdaType = FunctionType @final class CodeType: + """Create a code object. Not for the faint of heart. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @property @@ -261,7 +285,9 @@ class CodeType: co_qualname: str = ..., co_linetable: bytes = ..., co_exceptiontable: bytes = ..., - ) -> Self: ... + ) -> Self: + """Return a copy of the code object with new values for the specified fields. +""" elif sys.version_info >= (3, 10): def replace( self, @@ -282,7 +308,9 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_linetable: bytes = ..., - ) -> Self: ... + ) -> Self: + """Return a copy of the code object with new values for the specified fields. +""" else: def replace( self, @@ -303,37 +331,67 @@ class CodeType: co_filename: str = ..., co_name: str = ..., co_lnotab: bytes = ..., - ) -> Self: ... + ) -> Self: + """Return a copy of the code object with new values for the specified fields. +""" if sys.version_info >= (3, 13): __replace__ = replace @final class MappingProxyType(Mapping[_KT_co, _VT_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] + """Read-only proxy of a mapping. +""" __hash__: ClassVar[None] # type: ignore[assignment] def __new__(cls, mapping: SupportsKeysAndGetItem[_KT_co, _VT_co]) -> Self: ... - def __getitem__(self, key: _KT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - def __iter__(self) -> Iterator[_KT_co]: ... - def __len__(self) -> int: ... + def __getitem__(self, key: _KT_co, /) -> _VT_co: # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + """Return self[key]. +""" + def __iter__(self) -> Iterator[_KT_co]: + """Implement iter(self). +""" + def __len__(self) -> int: + """Return len(self). +""" def __eq__(self, value: object, /) -> bool: ... - def copy(self) -> dict[_KT_co, _VT_co]: ... - def keys(self) -> KeysView[_KT_co]: ... - def values(self) -> ValuesView[_VT_co]: ... - def items(self) -> ItemsView[_KT_co, _VT_co]: ... + def copy(self) -> dict[_KT_co, _VT_co]: + """D.copy() -> a shallow copy of D +""" + def keys(self) -> KeysView[_KT_co]: + """D.keys() -> a set-like object providing a view on D's keys +""" + def values(self) -> ValuesView[_VT_co]: + """D.values() -> an object providing a view on D's values +""" + def items(self) -> ItemsView[_KT_co, _VT_co]: + """D.items() -> a set-like object providing a view on D's items +""" @overload - def get(self, key: _KT_co, /) -> _VT_co | None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + def get(self, key: _KT_co, /) -> _VT_co | None: # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter + """Return the value for key if key is in the mapping, else default. +""" @overload def get(self, key: _KT_co, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload def get(self, key: _KT_co, default: _T2, /) -> _VT_co | _T2: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... - def __reversed__(self) -> Iterator[_KT_co]: ... - def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... - def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" + def __reversed__(self) -> Iterator[_KT_co]: + """D.__reversed__() -> reverse iterator +""" + def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: + """Return self|value. +""" + def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: + """Return value|self. +""" if sys.version_info >= (3, 12): @disjoint_base class SimpleNamespace: + """A simple attribute-based namespace. +""" __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): def __init__( @@ -347,10 +405,16 @@ if sys.version_info >= (3, 12): def __setattr__(self, name: str, value: Any, /) -> None: ... def __delattr__(self, name: str, /) -> None: ... if sys.version_info >= (3, 13): - def __replace__(self, **kwargs: Any) -> Self: ... + def __replace__(self, **kwargs: Any) -> Self: + """Return a copy of the namespace object with new values for the specified attributes. +""" else: class SimpleNamespace: + """A simple attribute-based namespace. + +SimpleNamespace(**kwargs) +""" __hash__: ClassVar[None] # type: ignore[assignment] def __init__(self, **kwargs: Any) -> None: ... def __eq__(self, value: object, /) -> bool: ... @@ -360,6 +424,10 @@ else: @disjoint_base class ModuleType: + """Create a module object. + +The name must be a string; the optional doc argument can have any type. +""" __name__: str __file__: str | None @property @@ -387,6 +455,14 @@ class ModuleType: @final class CellType: + """Create a new cell object. + + contents + the contents of the cell. If not specified, the cell will be empty, + and + further attempts to access its cell_contents attribute will + raise a ValueError. +""" def __new__(cls, contents: object = ..., /) -> Self: ... __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -404,28 +480,49 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): @property def gi_running(self) -> bool: ... @property - def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: ... + def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: + """object being iterated by yield from, or None +""" if sys.version_info >= (3, 11): @property def gi_suspended(self) -> bool: ... __name__: str __qualname__: str - def __iter__(self) -> Self: ... - def __next__(self) -> _YieldT_co: ... - def send(self, arg: _SendT_contra, /) -> _YieldT_co: ... + def __iter__(self) -> Self: + """Implement iter(self). +""" + def __next__(self) -> _YieldT_co: + """Implement next(self). +""" + def send(self, arg: _SendT_contra, /) -> _YieldT_co: + """send(arg) -> send 'arg' into generator, +return next yielded value or raise StopIteration. +""" @overload def throw( self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: ... + ) -> _YieldT_co: + """throw(value) +throw(type[,value[,tb]]) + +Raise exception in generator, return next yielded value or raise +StopIteration. +the (type, val, tb) signature is deprecated, +and may be removed in a future version of Python. +""" @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: ... + def __class_getitem__(cls, item: Any, /) -> Any: + """See PEP 585 +""" @final class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property - def ag_await(self) -> Awaitable[Any] | None: ... + def ag_await(self) -> Awaitable[Any] | None: + """object being awaited on, or None +""" @property def ag_code(self) -> CodeType: ... @property @@ -438,17 +535,34 @@ class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property def ag_suspended(self) -> bool: ... - def __aiter__(self) -> Self: ... - def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... - def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... + def __aiter__(self) -> Self: + """Return an awaitable, that resolves in asynchronous iterator. +""" + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: + """Return a value or raise StopAsyncIteration. +""" + def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: + """asend(v) -> send 'v' in generator. +""" @overload async def athrow( self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: ... + ) -> _YieldT_co: + """athrow(value) +athrow(type[,value[,tb]]) + +raise exception in generator. +the (type, val, tb) signature is deprecated, +and may be removed in a future version of Python. +""" @overload async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... - def aclose(self) -> Coroutine[Any, Any, None]: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def aclose(self) -> Coroutine[Any, Any, None]: + """aclose() -> raise GeneratorExit inside generator. +""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" # Non-default variations to accommodate coroutines _SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) @@ -459,7 +573,9 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): __name__: str __qualname__: str @property - def cr_await(self) -> Any | None: ... + def cr_await(self) -> Any | None: + """object being awaited on, or None +""" @property def cr_code(self) -> CodeType: ... if sys.version_info >= (3, 12): @@ -477,20 +593,39 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): @property def cr_suspended(self) -> bool: ... - def close(self) -> None: ... - def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: ... - def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: ... + def close(self) -> None: + """close() -> raise GeneratorExit inside coroutine. +""" + def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: + """Return an iterator to be used in await expression. +""" + def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: + """send(arg) -> send 'arg' into coroutine, +return next iterated value or raise StopIteration. +""" @overload def throw( self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: ... + ) -> _YieldT_co: + """throw(value) +throw(type[,value[,traceback]]) + +Raise exception in coroutine, return next iterated value or raise +StopIteration. +the (type, val, tb) signature is deprecated, +and may be removed in a future version of Python. +""" @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def __class_getitem__(cls, item: Any, /) -> Any: ... + def __class_getitem__(cls, item: Any, /) -> Any: + """See PEP 585 +""" @final class MethodType: + """Create a bound instance method object. +""" @property def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function @property @@ -498,18 +633,26 @@ class MethodType: @property def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function @property - def __func__(self) -> Callable[..., Any]: ... + def __func__(self) -> Callable[..., Any]: + """the function (or other callable) implementing a method +""" @property - def __self__(self) -> object: ... + def __self__(self) -> object: + """the instance to which a method is bound +""" @property def __name__(self) -> str: ... # inherited from the added function @property def __qualname__(self) -> str: ... # inherited from the added function def __new__(cls, func: Callable[..., Any], instance: object, /) -> Self: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" if sys.version_info >= (3, 13): - def __get__(self, instance: object, owner: type | None = None, /) -> Self: ... + def __get__(self, instance: object, owner: type | None = None, /) -> Self: + """Return an attribute of instance, which is of type owner. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -522,7 +665,9 @@ class BuiltinFunctionType: def __name__(self) -> str: ... @property def __qualname__(self) -> str: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -536,8 +681,12 @@ class WrapperDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: + """Return an attribute of instance, which is of type owner. +""" @final class MethodWrapperType: @@ -549,7 +698,9 @@ class MethodWrapperType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -562,8 +713,12 @@ class MethodDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: + """Return an attribute of instance, which is of type owner. +""" @final class ClassMethodDescriptorType: @@ -573,11 +728,17 @@ class ClassMethodDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: + """Call self as a function. +""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: + """Return an attribute of instance, which is of type owner. +""" @final class TracebackType: + """Create a new traceback object. +""" def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... tb_next: TracebackType | None # the rest are read-only @@ -593,27 +754,43 @@ class FrameType: @property def f_back(self) -> FrameType | None: ... @property - def f_builtins(self) -> dict[str, Any]: ... + def f_builtins(self) -> dict[str, Any]: + """Return the built-in variables in the frame. +""" @property - def f_code(self) -> CodeType: ... + def f_code(self) -> CodeType: + """Return the code object being executed in this frame. +""" @property - def f_globals(self) -> dict[str, Any]: ... + def f_globals(self) -> dict[str, Any]: + """Return the global variables in the frame. +""" @property - def f_lasti(self) -> int: ... + def f_lasti(self) -> int: + """Return the index of the last attempted instruction in the frame. +""" # see discussion in #6769: f_lineno *can* sometimes be None, # but you should probably file a bug report with CPython if you encounter it being None in the wild. # An `int | None` annotation here causes too many false-positive errors, so applying `int | Any`. @property - def f_lineno(self) -> int | MaybeNone: ... + def f_lineno(self) -> int | MaybeNone: + """Return the current line number in the frame. +""" @property - def f_locals(self) -> dict[str, Any]: ... + def f_locals(self) -> dict[str, Any]: + """Return the mapping used by the frame to look up local variables. +""" f_trace: Callable[[FrameType, str, Any], Any] | None f_trace_lines: bool f_trace_opcodes: bool - def clear(self) -> None: ... + def clear(self) -> None: + """Clear all references held by the frame. +""" if sys.version_info >= (3, 14): @property - def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: ... + def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: + """Return the generator or coroutine associated with this frame, or None. +""" @final class GetSetDescriptorType: @@ -623,9 +800,15 @@ class GetSetDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... - def __set__(self, instance: Any, value: Any, /) -> None: ... - def __delete__(self, instance: Any, /) -> None: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: + """Return an attribute of instance, which is of type owner. +""" + def __set__(self, instance: Any, value: Any, /) -> None: + """Set an attribute of instance to value. +""" + def __delete__(self, instance: Any, /) -> None: + """Delete an attribute of instance. +""" @final class MemberDescriptorType: @@ -635,27 +818,82 @@ class MemberDescriptorType: def __qualname__(self) -> str: ... @property def __objclass__(self) -> type: ... - def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... - def __set__(self, instance: Any, value: Any, /) -> None: ... - def __delete__(self, instance: Any, /) -> None: ... + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: + """Return an attribute of instance, which is of type owner. +""" + def __set__(self, instance: Any, value: Any, /) -> None: + """Set an attribute of instance to value. +""" + def __delete__(self, instance: Any, /) -> None: + """Delete an attribute of instance. +""" def new_class( name: str, bases: Iterable[object] = (), kwds: dict[str, Any] | None = None, exec_body: Callable[[dict[str, Any]], object] | None = None, -) -> type: ... -def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... +) -> type: + """Create a class object dynamically using the appropriate metaclass. +""" +def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: + """Resolve MRO entries dynamically as specified by PEP 560. +""" def prepare_class( name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None -) -> tuple[type, dict[str, Any], dict[str, Any]]: ... +) -> tuple[type, dict[str, Any], dict[str, Any]]: + """Call the __prepare__ method of the appropriate metaclass. + +Returns (metaclass, namespace, kwds) as a 3-tuple + +*metaclass* is the appropriate metaclass +*namespace* is the prepared class namespace +*kwds* is an updated copy of the passed in kwds argument with any +'metaclass' entry removed. If no kwds argument is passed in, this will +be an empty dict. +""" if sys.version_info >= (3, 12): - def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... + def get_original_bases(cls: type, /) -> tuple[Any, ...]: + """Return the class's "original" bases prior to modification by `__mro_entries__`. + +Examples:: + + from typing import TypeVar, Generic, NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) +""" # Does not actually inherit from property, but saying it does makes sure that # pyright handles this class correctly. class DynamicClassAttribute(property): + """Route attribute access on a class to __getattr__. + +This is a descriptor, used to define attributes that act differently when +accessed through an instance and through a class. Instance access remains +normal, but access to an attribute through a class will be routed to the +class's __getattr__ method; this is done by raising AttributeError. + +This allows one to have properties active on an instance, and have virtual +attributes on the class with the same name. (Enum used this between Python +versions 3.4 - 3.9 .) + +Subclass from this to use a different method of accessing virtual attributes +and still be treated properly by the inspect module. (Enum uses this since +Python 3.10 .) + +""" fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], object] | None # type: ignore[assignment] fdel: Callable[[Any], object] | None # type: ignore[assignment] @@ -681,19 +919,29 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable @overload -def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: + """Convert regular generator function to a coroutine. +""" @overload def coroutine(func: _Fn) -> _Fn: ... @disjoint_base class GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" @property def __origin__(self) -> type | TypeAliasType: ... @property def __args__(self) -> tuple[Any, ...]: ... @property - def __parameters__(self) -> tuple[Any, ...]: ... + def __parameters__(self) -> tuple[Any, ...]: + """Type variables in the GenericAlias. +""" def __new__(cls, origin: type, args: Any, /) -> Self: ... - def __getitem__(self, typeargs: Any, /) -> GenericAlias: ... + def __getitem__(self, typeargs: Any, /) -> GenericAlias: + """Return self[key]. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... @@ -703,8 +951,12 @@ class GenericAlias: @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... if sys.version_info >= (3, 10): - def __or__(self, value: Any, /) -> UnionType: ... - def __ror__(self, value: Any, /) -> UnionType: ... + def __or__(self, value: Any, /) -> UnionType: + """Return self|value. +""" + def __ror__(self, value: Any, /) -> UnionType: + """Return value|self. +""" # GenericAlias delegates attr access to `__origin__` def __getattr__(self, name: str) -> Any: ... @@ -712,34 +964,61 @@ class GenericAlias: if sys.version_info >= (3, 10): @final class NoneType: - def __bool__(self) -> Literal[False]: ... + """The type of the None singleton. +""" + def __bool__(self) -> Literal[False]: + """True if self else False +""" @final - class EllipsisType: ... + class EllipsisType: + """The type of the Ellipsis singleton. +""" from builtins import _NotImplementedType NotImplementedType = _NotImplementedType @final class UnionType: + """Represent a union type + +E.g. for int | str +""" @property def __args__(self) -> tuple[Any, ...]: ... @property - def __parameters__(self) -> tuple[Any, ...]: ... + def __parameters__(self) -> tuple[Any, ...]: + """Type variables in the types.UnionType. +""" # `(int | str) | Literal["foo"]` returns a generic alias to an instance of `_SpecialForm` (`Union`). # Normally we'd express this using the return type of `_SpecialForm.__ror__`, # but because `UnionType.__or__` accepts `Any`, type checkers will use # the return type of `UnionType.__or__` to infer the result of this operation # rather than `_SpecialForm.__ror__`. To mitigate this, we use `| Any` # in the return type of `UnionType.__(r)or__`. - def __or__(self, value: Any, /) -> UnionType | Any: ... - def __ror__(self, value: Any, /) -> UnionType | Any: ... + def __or__(self, value: Any, /) -> UnionType | Any: + """Return self|value. +""" + def __ror__(self, value: Any, /) -> UnionType | Any: + """Return value|self. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... # you can only subscript a `UnionType` instance if at least one of the elements # in the union is a generic alias instance that has a non-empty `__parameters__` - def __getitem__(self, parameters: Any) -> object: ... + def __getitem__(self, parameters: Any) -> object: + """Return self[key]. +""" if sys.version_info >= (3, 13): @final - class CapsuleType: ... + class CapsuleType: + """Capsule objects let you wrap a C "void *" pointer in a Python +object. They're a way of passing data through the Python interpreter +without creating your own custom type. + +Capsules are used for communication between extension modules. +They provide a way for an extension module to export a C interface +to other extension modules, so that extension modules can use the +Python import mechanism to link to one another. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi index 2ca65dad4562f..1c96a37dbbd09 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi @@ -1,3 +1,22 @@ +""" +The typing module: Support for gradual typing as defined by PEP 484 and subsequent PEPs. + +Among other things, the module includes the following: +* Generic, Protocol, and internal machinery to support generic aliases. + All subscripted types like X[int], Union[int, str] are generic aliases. +* Various "special forms" that have unique meanings in type annotations: + NoReturn, Never, ClassVar, Self, Concatenate, Unpack, and others. +* Classes whose instances can be type arguments to generic classes and functions: + TypeVar, ParamSpec, TypeVarTuple. +* Public helper functions: get_type_hints, overload, cast, final, and others. +* Several protocols to support duck-typing: + SupportsFloat, SupportsIndex, SupportsAbs, and others. +* Special types: NewType, NamedTuple, TypedDict. +* Deprecated aliases for builtin types and collections.abc ABCs. + +Any name not present in __all__ is an implementation detail +that may be changed without notice. Use at your own risk! +""" # Since this module defines "overload" it is not recognized by Ruff as typing.overload # TODO: The collections import is required, otherwise mypy crashes. # https://github.com/python/mypy/issues/16744 @@ -143,14 +162,95 @@ if sys.version_info >= (3, 13): # due to an import cycle. Below instead we use Any with a comment. # from _typeshed import AnnotationForm -class Any: ... +class Any: + """Special type indicating an unconstrained type. + +- Any is compatible with every type. +- Any assumed to have all methods. +- All values assumed to be instances of Any. + +Note that all the above statements are true from the point of view of +static type checkers. At runtime, Any should not be used with instance +checks. +""" class _Final: + """Mixin to prohibit subclassing. +""" __slots__ = ("__weakref__",) -def final(f: _T) -> _T: ... +def final(f: _T) -> _T: + """Decorator to indicate final methods and final classes. + +Use this decorator to indicate to type checkers that the decorated +method cannot be overridden, and decorated class cannot be subclassed. + +For example:: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + +There is no runtime checking of these properties. The decorator +attempts to set the ``__final__`` attribute to ``True`` on the decorated +object to allow runtime introspection. +""" @final class TypeVar: + """Type variable. + +The preferred way to construct a type variable is via the dedicated +syntax for generic functions, classes, and type aliases:: + + class Sequence[T]: # T is a TypeVar + ... + +This syntax can also be used to create bound and constrained type +variables:: + + # S is a TypeVar bound to str + class StrSequence[S: str]: + ... + + # A is a TypeVar constrained to str or bytes + class StrOrBytesSequence[A: (str, bytes)]: + ... + +Type variables can also have defaults: + + class IntDefault[T = int]: + ... + +However, if desired, reusable type variables can also be constructed +manually, like so:: + + T = TypeVar('T') # Can be anything + S = TypeVar('S', bound=str) # Can be any subtype of str + A = TypeVar('A', str, bytes) # Must be exactly str or bytes + D = TypeVar('D', default=int) # Defaults to int + +Type variables exist primarily for the benefit of static type +checkers. They serve as the parameters for generic types as well +as for generic function and type alias definitions. + +The variance of type variables is inferred by type checkers when they +are created through the type parameter syntax and when +``infer_variance=True`` is passed. Manually created type variables may +be explicitly marked covariant or contravariant by passing +``covariant=True`` or ``contravariant=True``. By default, manually +created type variables are invariant. See PEP 484 and PEP 695 for more +details. +""" @property def __name__(self) -> str: ... @property @@ -207,8 +307,12 @@ class TypeVar: contravariant: bool = False, ) -> None: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any, /) -> _SpecialForm: ... # AnnotationForm - def __ror__(self, left: Any, /) -> _SpecialForm: ... # AnnotationForm + def __or__(self, right: Any, /) -> _SpecialForm: # AnnotationForm + """Return self|value. +""" + def __ror__(self, left: Any, /) -> _SpecialForm: # AnnotationForm + """Return value|self. +""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... if sys.version_info >= (3, 13): @@ -259,11 +363,51 @@ if sys.version_info >= (3, 11): @final class TypeVarTuple: + """Type variable tuple. A specialized form of type variable that enables +variadic generics. + +The preferred way to construct a type variable tuple is via the +dedicated syntax for generic functions, classes, and type aliases, +where a single '*' indicates a type variable tuple:: + + def move_first_element_to_last[T, *Ts](tup: tuple[T, *Ts]) -> tuple[*Ts, T]: + return (*tup[1:], tup[0]) + +Type variables tuples can have default values: + + type AliasWithDefault[*Ts = (str, int)] = tuple[*Ts] + +For compatibility with Python 3.11 and earlier, TypeVarTuple objects +can also be created as follows:: + + Ts = TypeVarTuple('Ts') # Can be given any name + DefaultTs = TypeVarTuple('Ts', default=(str, int)) + +Just as a TypeVar (type variable) is a placeholder for a single type, +a TypeVarTuple is a placeholder for an *arbitrary* number of types. For +example, if we define a generic class using a TypeVarTuple:: + + class C[*Ts]: ... + +Then we can parameterize that class with an arbitrary number of type +arguments:: + + C[int] # Fine + C[int, str] # Also fine + C[()] # Even this is fine + +For more details, see PEP 646. + +Note that only TypeVarTuples defined in the global scope can be +pickled. +""" @property def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... # AnnotationForm + def __default__(self) -> Any: # AnnotationForm + """The default value for this TypeVarTuple. +""" def has_default(self) -> bool: ... if sys.version_info >= (3, 13): def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm @@ -272,7 +416,9 @@ if sys.version_info >= (3, 11): else: def __init__(self, name: str) -> None: ... - def __iter__(self) -> Any: ... + def __iter__(self) -> Any: + """Implement iter(self). +""" def __typing_subst__(self, arg: Never, /) -> Never: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... if sys.version_info >= (3, 14): @@ -282,6 +428,19 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 10): @final class ParamSpecArgs: + """The args for a ParamSpec object. + +Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + +ParamSpecArgs objects have a reference back to their ParamSpec:: + + >>> P = ParamSpec("P") + >>> P.args.__origin__ is P + True + +This type is meant for runtime introspection and has no special meaning +to static type checkers. +""" @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -294,6 +453,19 @@ if sys.version_info >= (3, 10): @final class ParamSpecKwargs: + """The kwargs for a ParamSpec object. + +Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + +ParamSpecKwargs objects have a reference back to their ParamSpec:: + + >>> P = ParamSpec("P") + >>> P.kwargs.__origin__ is P + True + +This type is meant for runtime introspection and has no special meaning +to static type checkers. +""" @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -306,6 +478,57 @@ if sys.version_info >= (3, 10): @final class ParamSpec: + """Parameter specification variable. + +The preferred way to construct a parameter specification is via the +dedicated syntax for generic functions, classes, and type aliases, +where the use of '**' creates a parameter specification:: + + type IntFunc[**P] = Callable[P, int] + +The following syntax creates a parameter specification that defaults +to a callable accepting two positional-only arguments of types int +and str: + + type IntFuncDefault[**P = (int, str)] = Callable[P, int] + +For compatibility with Python 3.11 and earlier, ParamSpec objects +can also be created as follows:: + + P = ParamSpec('P') + DefaultP = ParamSpec('DefaultP', default=(int, str)) + +Parameter specification variables exist primarily for the benefit of +static type checkers. They are used to forward the parameter types of +one callable to another callable, a pattern commonly found in +higher-order functions and decorators. They are only valid when used +in ``Concatenate``, or as the first argument to ``Callable``, or as +parameters for user-defined Generics. See class Generic for more +information on generic types. + +An example for annotating a decorator:: + + def add_logging[**P, T](f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + +Parameter specification variables can be introspected. e.g.:: + + >>> P = ParamSpec("P") + >>> P.__name__ + 'P' + +Note that only parameter specification variables defined in the global +scope can be pickled. +""" @property def __name__(self) -> str: ... @property @@ -319,7 +542,9 @@ if sys.version_info >= (3, 10): def __infer_variance__(self) -> bool: ... if sys.version_info >= (3, 13): @property - def __default__(self) -> Any: ... # AnnotationForm + def __default__(self) -> Any: # AnnotationForm + """The default value for this ParamSpec. +""" if sys.version_info >= (3, 13): def __new__( cls, @@ -361,15 +586,23 @@ if sys.version_info >= (3, 10): ) -> None: ... @property - def args(self) -> ParamSpecArgs: ... + def args(self) -> ParamSpecArgs: + """Represents positional arguments. +""" @property - def kwargs(self) -> ParamSpecKwargs: ... + def kwargs(self) -> ParamSpecKwargs: + """Represents keyword arguments. +""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... - def __or__(self, right: Any, /) -> _SpecialForm: ... - def __ror__(self, left: Any, /) -> _SpecialForm: ... + def __or__(self, right: Any, /) -> _SpecialForm: + """Return self|value. +""" + def __ror__(self, left: Any, /) -> _SpecialForm: + """Return value|self. +""" if sys.version_info >= (3, 13): def has_default(self) -> bool: ... if sys.version_info >= (3, 14): @@ -381,6 +614,26 @@ if sys.version_info >= (3, 10): TypeGuard: _SpecialForm class NewType: + """NewType creates simple unique types with almost zero runtime overhead. + +NewType(name, tp) is considered a subtype of tp +by static type checkers. At runtime, NewType(name, tp) returns +a dummy callable that simply returns its argument. + +Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int +""" def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm if sys.version_info >= (3, 11): @staticmethod @@ -393,7 +646,24 @@ if sys.version_info >= (3, 10): __supertype__: type | NewType else: - def NewType(name: str, tp: Any) -> Any: ... + def NewType(name: str, tp: Any) -> Any: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: + + UserId = NewType('UserId', int) + + def name_by_id(user_id: UserId) -> str: + ... + + UserId('user') # Fails type check + + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ _F = TypeVar("_F", bound=Callable[..., Any]) _P = _ParamSpec("_P") @@ -410,9 +680,52 @@ _KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. _TC = TypeVar("_TC", bound=type[object]) -def overload(func: _F) -> _F: ... -def no_type_check(arg: _F) -> _F: ... -def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... +def overload(func: _F) -> _F: + """Decorator for overloaded functions/methods. + +In a stub file, place two or more stub definitions for the same +function in a row, each decorated with @overload. + +For example:: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + +In a non-stub file (i.e. a regular .py file), do the same but +follow it with an implementation. The implementation should *not* +be decorated with @overload:: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + ... # implementation goes here + +The overloads for a function can be retrieved at runtime using the +get_overloads() function. +""" +def no_type_check(arg: _F) -> _F: + """Decorator to indicate that annotations are not type hints. + +The argument must be a class or function; if it is a class, it +applies recursively to all methods and classes defined in that class +(but not to methods defined in its superclasses or subclasses). + +This mutates the function(s) or class(es) in place. +""" +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: + """Decorator to give another decorator the @no_type_check effect. + +This wraps the decorator with something that wraps the decorated +function in @no_type_check. +""" # This itself is only available during type checking def type_check_only(func_or_cls: _FT) -> _FT: ... @@ -460,45 +773,77 @@ class _ProtocolMeta(ABCMeta): # Abstract base classes. -def runtime_checkable(cls: _TC) -> _TC: ... +def runtime_checkable(cls: _TC) -> _TC: + """Mark a protocol class as a runtime protocol. + +Such protocol can be used with isinstance() and issubclass(). +Raise TypeError if applied to a non-protocol class. +This allows a simple-minded structural check very similar to +one trick ponies in collections.abc such as Iterable. + +For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + +Warning: this will check only the presence of the required methods, +not their type signatures! +""" @runtime_checkable class SupportsInt(Protocol, metaclass=ABCMeta): + """An ABC with one abstract method __int__. +""" __slots__ = () @abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=ABCMeta): + """An ABC with one abstract method __float__. +""" __slots__ = () @abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=ABCMeta): + """An ABC with one abstract method __complex__. +""" __slots__ = () @abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=ABCMeta): + """An ABC with one abstract method __bytes__. +""" __slots__ = () @abstractmethod def __bytes__(self) -> bytes: ... @runtime_checkable class SupportsIndex(Protocol, metaclass=ABCMeta): + """An ABC with one abstract method __index__. +""" __slots__ = () @abstractmethod def __index__(self) -> int: ... @runtime_checkable class SupportsAbs(Protocol[_T_co]): + """An ABC with one abstract method __abs__ that is covariant in its return type. +""" __slots__ = () @abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): + """An ABC with one abstract method __round__ that is covariant in its return type. +""" __slots__ = () @overload @abstractmethod @@ -528,7 +873,9 @@ class Iterable(Protocol[_T_co]): @runtime_checkable class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod - def __next__(self) -> _T_co: ... + def __next__(self) -> _T_co: + """Return the next item from the iterator. When exhausted, raise StopIteration +""" def __iter__(self) -> Iterator[_T_co]: ... @runtime_checkable @@ -542,21 +889,34 @@ _ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) @runtime_checkable class Generator(Iterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra, _ReturnT_co]): - def __next__(self) -> _YieldT_co: ... - @abstractmethod - def send(self, value: _SendT_contra, /) -> _YieldT_co: ... + def __next__(self) -> _YieldT_co: + """Return the next item from the generator. +When exhausted, raise StopIteration. +""" + @abstractmethod + def send(self, value: _SendT_contra, /) -> _YieldT_co: + """Send a value into the generator. +Return next yielded value or raise StopIteration. +""" @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> _YieldT_co: ... + ) -> _YieldT_co: + """Raise an exception in the generator. +Return next yielded value or raise StopIteration. +""" @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... if sys.version_info >= (3, 13): - def close(self) -> _ReturnT_co | None: ... + def close(self) -> _ReturnT_co | None: + """Raise GeneratorExit inside generator. + """ else: - def close(self) -> None: ... + def close(self) -> None: + """Raise GeneratorExit inside generator. + """ def __iter__(self) -> Generator[_YieldT_co, _SendT_contra, _ReturnT_co]: ... @@ -567,10 +927,14 @@ else: from contextlib import AbstractAsyncContextManager, AbstractContextManager @runtime_checkable - class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): ... + class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): + """An abstract base class for context managers. +""" @runtime_checkable - class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): ... + class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): + """An abstract base class for asynchronous context managers. +""" @runtime_checkable class Awaitable(Protocol[_T_co]): @@ -586,17 +950,25 @@ class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, __qualname__: str @abstractmethod - def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: ... + def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: + """Send a value into the coroutine. +Return next yielded value or raise StopIteration. +""" @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> _YieldT_co: ... + ) -> _YieldT_co: + """Raise an exception in the coroutine. +Return next yielded value or raise StopIteration. +""" @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... @abstractmethod - def close(self) -> None: ... + def close(self) -> None: + """Raise GeneratorExit inside coroutine. + """ # NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. # The parameters correspond to Generator, but the 4th is the original type. @@ -617,25 +989,38 @@ class AsyncIterable(Protocol[_T_co]): @runtime_checkable class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod - def __anext__(self) -> Awaitable[_T_co]: ... + def __anext__(self) -> Awaitable[_T_co]: + """Return the next item or raise StopAsyncIteration when exhausted. +""" def __aiter__(self) -> AsyncIterator[_T_co]: ... @runtime_checkable class AsyncGenerator(AsyncIterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra]): - def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: ... - @abstractmethod - def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: ... + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: + """Return the next item from the asynchronous generator. +When exhausted, raise StopAsyncIteration. +""" + @abstractmethod + def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: + """Send a value into the asynchronous generator. +Return next yielded value or raise StopAsyncIteration. +""" @overload @abstractmethod def athrow( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / - ) -> Coroutine[Any, Any, _YieldT_co]: ... + ) -> Coroutine[Any, Any, _YieldT_co]: + """Raise an exception in the asynchronous generator. +Return next yielded value or raise StopAsyncIteration. +""" @overload @abstractmethod def athrow( self, typ: BaseException, val: None = None, tb: TracebackType | None = None, / ) -> Coroutine[Any, Any, _YieldT_co]: ... - def aclose(self) -> Coroutine[Any, Any, None]: ... + def aclose(self) -> Coroutine[Any, Any, None]: + """Raise GeneratorExit inside coroutine. + """ @runtime_checkable class Container(Protocol[_T_co]): @@ -650,6 +1035,11 @@ class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): def __len__(self) -> int: ... class Sequence(Reversible[_T_co], Collection[_T_co]): + """All the operations on a read-only sequence. + +Concrete subclasses must override __new__ or __init__, +__getitem__, and __len__. +""" @overload @abstractmethod def __getitem__(self, index: int) -> _T_co: ... @@ -657,15 +1047,30 @@ class Sequence(Reversible[_T_co], Collection[_T_co]): @abstractmethod def __getitem__(self, index: slice) -> Sequence[_T_co]: ... # Mixin methods - def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... - def count(self, value: Any) -> int: ... + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: + """S.index(value, [start, [stop]]) -> integer -- return first index of value. +Raises ValueError if the value is not present. + +Supporting start and stop arguments is optional, but +recommended. +""" + def count(self, value: Any) -> int: + """S.count(value) -> integer -- return number of occurrences of value +""" def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... class MutableSequence(Sequence[_T]): + """All the operations on a read-write sequence. + +Concrete subclasses must provide __new__ or __init__, +__getitem__, __setitem__, __delitem__, __len__, and insert(). +""" @abstractmethod - def insert(self, index: int, value: _T) -> None: ... + def insert(self, index: int, value: _T) -> None: + """S.insert(index, value) -- insert value before index +""" @overload @abstractmethod def __getitem__(self, index: int) -> _T: ... @@ -685,18 +1090,55 @@ class MutableSequence(Sequence[_T]): @abstractmethod def __delitem__(self, index: slice) -> None: ... # Mixin methods - def append(self, value: _T) -> None: ... - def clear(self) -> None: ... - def extend(self, values: Iterable[_T]) -> None: ... - def reverse(self) -> None: ... - def pop(self, index: int = -1) -> _T: ... - def remove(self, value: _T) -> None: ... + def append(self, value: _T) -> None: + """S.append(value) -- append value to the end of the sequence +""" + def clear(self) -> None: + """S.clear() -> None -- remove all items from S +""" + def extend(self, values: Iterable[_T]) -> None: + """S.extend(iterable) -- extend sequence by appending elements from the iterable +""" + def reverse(self) -> None: + """S.reverse() -- reverse *IN PLACE* +""" + def pop(self, index: int = -1) -> _T: + """S.pop([index]) -> item -- remove and return item at index (default last). +Raise IndexError if list is empty or index is out of range. +""" + def remove(self, value: _T) -> None: + """S.remove(value) -- remove first occurrence of value. +Raise ValueError if the value is not present. +""" def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... class AbstractSet(Collection[_T_co]): + """A set is a finite, iterable container. + +This class provides concrete generic implementations of all +methods except for __contains__, __iter__ and __len__. + +To override the comparisons (presumably for speed, as the +semantics are fixed), redefine __le__ and __ge__, +then the other operations will automatically follow suit. +""" @abstractmethod def __contains__(self, x: object) -> bool: ... - def _hash(self) -> int: ... + def _hash(self) -> int: + """Compute the hash value of a set. + +Note that we don't define __hash__: not all sets are hashable. +But if you define a hashable set type, its __hash__ should +call this function. + +This must be compatible __eq__. + +All sets ought to compare equal if they contain the same +elements, regardless of how they are implemented, and +regardless of the order of the elements; so there's not much +freedom for __eq__ or __hash__. We match the algorithm used +by the built-in frozenset type. +""" # Mixin methods def __le__(self, other: AbstractSet[Any]) -> bool: ... def __lt__(self, other: AbstractSet[Any]) -> bool: ... @@ -707,17 +1149,39 @@ class AbstractSet(Collection[_T_co]): def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... def __eq__(self, other: object) -> bool: ... - def isdisjoint(self, other: Iterable[Any]) -> bool: ... + def isdisjoint(self, other: Iterable[Any]) -> bool: + """Return True if two sets have a null intersection. +""" class MutableSet(AbstractSet[_T]): + """A mutable set is a finite, iterable container. + +This class provides concrete generic implementations of all +methods except for __contains__, __iter__, __len__, +add(), and discard(). + +To override the comparisons (presumably for speed, as the +semantics are fixed), all you have to do is redefine __le__ and +then the other operations will automatically follow suit. +""" @abstractmethod - def add(self, value: _T) -> None: ... + def add(self, value: _T) -> None: + """Add an element. +""" @abstractmethod - def discard(self, value: _T) -> None: ... + def discard(self, value: _T) -> None: + """Remove an element. Do not raise an exception if absent. +""" # Mixin methods - def clear(self) -> None: ... - def pop(self) -> _T: ... - def remove(self, value: _T) -> None: ... + def clear(self) -> None: + """This is slow (creates N new iterators!) but effective. +""" + def pop(self) -> _T: + """Return the popped value. Raise KeyError if empty. +""" + def remove(self, value: _T) -> None: + """Remove an element. If not a member, raise a KeyError. +""" def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] @@ -760,36 +1224,65 @@ class ValuesView(MappingView, Collection[_VT_co]): def __iter__(self) -> Iterator[_VT_co]: ... class Mapping(Collection[_KT], Generic[_KT, _VT_co]): + """A Mapping is a generic container for associating key/value +pairs. + +This class provides concrete generic implementations of all +methods except for __getitem__, __iter__, and __len__. +""" # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @abstractmethod def __getitem__(self, key: _KT, /) -> _VT_co: ... # Mixin methods @overload - def get(self, key: _KT, /) -> _VT_co | None: ... + def get(self, key: _KT, /) -> _VT_co | None: + """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. +""" @overload def get(self, key: _KT, /, default: _VT_co) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload def get(self, key: _KT, /, default: _T) -> _VT_co | _T: ... - def items(self) -> ItemsView[_KT, _VT_co]: ... - def keys(self) -> KeysView[_KT]: ... - def values(self) -> ValuesView[_VT_co]: ... + def items(self) -> ItemsView[_KT, _VT_co]: + """D.items() -> a set-like object providing a view on D's items +""" + def keys(self) -> KeysView[_KT]: + """D.keys() -> a set-like object providing a view on D's keys +""" + def values(self) -> ValuesView[_VT_co]: + """D.values() -> an object providing a view on D's values +""" def __contains__(self, key: object, /) -> bool: ... def __eq__(self, other: object, /) -> bool: ... class MutableMapping(Mapping[_KT, _VT]): + """A MutableMapping is a generic container for associating +key/value pairs. + +This class provides concrete generic implementations of all +methods except for __getitem__, __setitem__, __delitem__, +__iter__, and __len__. +""" @abstractmethod def __setitem__(self, key: _KT, value: _VT, /) -> None: ... @abstractmethod def __delitem__(self, key: _KT, /) -> None: ... - def clear(self) -> None: ... + def clear(self) -> None: + """D.clear() -> None. Remove all items from D. +""" @overload - def pop(self, key: _KT, /) -> _VT: ... + def pop(self, key: _KT, /) -> _VT: + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. +If key is not found, d is returned if given, otherwise KeyError is raised. +""" @overload def pop(self, key: _KT, /, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, /, default: _T) -> _VT | _T: ... - def popitem(self) -> tuple[_KT, _VT]: ... + def popitem(self) -> tuple[_KT, _VT]: + """D.popitem() -> (k, v), remove and return some (key, value) pair +as a 2-tuple; but raise KeyError if D is empty. +""" # This overload should be allowed only if the value type is compatible with None. # # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: @@ -797,7 +1290,9 @@ class MutableMapping(Mapping[_KT, _VT]): # -- collections.ChainMap.setdefault # -- weakref.WeakKeyDictionary.setdefault @overload - def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: ... + def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: + """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D +""" @overload def setdefault(self, key: _KT, default: _VT, /) -> _VT: ... # 'update' used to take a Union, but using overloading is better. @@ -821,7 +1316,12 @@ class MutableMapping(Mapping[_KT, _VT]): # -- weakref.WeakValueDictionary.__ior__ # -- weakref.WeakKeyDictionary.__ior__ @overload - def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ... + def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: + """D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. +If E present and has a .keys() method, does: for k in E.keys(): D[k] = E[k] +If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v +In either case, this is followed by: for k, v in F.items(): D[k] = v +""" @overload def update(self: SupportsGetItem[str, _VT], m: SupportsKeysAndGetItem[str, _VT], /, **kwargs: _VT) -> None: ... @overload @@ -839,6 +1339,17 @@ TYPE_CHECKING: Final[bool] # This differs from runtime, but better reflects the fact that in reality # classes deriving from IO use different names for the arguments. class IO(Generic[AnyStr]): + """Generic base class for TextIO and BinaryIO. + +This is an abstract, generic version of the return of open(). + +NOTE: This does not distinguish between the different possible +classes (text vs. binary, read vs. write vs. read/write, +append-only, unbuffered). The TextIO and BinaryIO subclasses +below capture the distinctions between text vs. binary, which is +pervasive in the interface; however we currently do not offer a +way to track the other distinctions in the type system. +""" # At runtime these are all abstract properties, # but making them abstract in the stub is hugely disruptive, for not much gain. # See #8726 @@ -901,11 +1412,15 @@ class IO(Generic[AnyStr]): ) -> None: ... class BinaryIO(IO[bytes]): + """Typed version of the return of open() in binary mode. +""" __slots__ = () @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): + """Typed version of the return of open() in text mode. +""" # See comment regarding the @properties in the `IO` class __slots__ = () @property @@ -945,7 +1460,37 @@ if sys.version_info >= (3, 14): include_extras: bool = False, *, format: Format | None = None, - ) -> dict[str, Any]: ... # AnnotationForm + ) -> dict[str, Any]: # AnnotationForm + """Return type hints for an object. + +This is often the same as obj.__annotations__, but it handles +forward references encoded as string literals and recursively replaces all +'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + +The argument may be a module, class, method, or function. The annotations +are returned as a dictionary. For classes, annotations include also +inherited members. + +TypeError is raised if the argument is not of a type that can contain +annotations, and an empty dictionary is returned if no annotations are +present. + +BEWARE -- the behavior of globalns and localns is counterintuitive +(unless you are familiar with how eval() and exec() work). The +search order is locals first, then globals. + +- If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. For classes, the search + order is globals first then locals. + +- If one dict argument is passed, it is used for both globals and + locals. + +- If two dict arguments are passed, they specify globals and + locals, respectively. +""" else: def get_type_hints( @@ -953,33 +1498,165 @@ else: globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False, - ) -> dict[str, Any]: ... # AnnotationForm + ) -> dict[str, Any]: # AnnotationForm + """Return type hints for an object. + +This is often the same as obj.__annotations__, but it handles +forward references encoded as string literals and recursively replaces all +'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + +The argument may be a module, class, method, or function. The annotations +are returned as a dictionary. For classes, annotations include also +inherited members. + +TypeError is raised if the argument is not of a type that can contain +annotations, and an empty dictionary is returned if no annotations are +present. + +BEWARE -- the behavior of globalns and localns is counterintuitive +(unless you are familiar with how eval() and exec() work). The +search order is locals first, then globals. + +- If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. For classes, the search + order is globals first then locals. + +- If one dict argument is passed, it is used for both globals and + locals. + +- If two dict arguments are passed, they specify globals and + locals, respectively. +""" + +def get_args(tp: Any) -> tuple[Any, ...]: # AnnotationForm + """Get type arguments with all substitutions performed. -def get_args(tp: Any) -> tuple[Any, ...]: ... # AnnotationForm +For unions, basic simplifications used by Union constructor are performed. + +Examples:: + + >>> T = TypeVar('T') + >>> assert get_args(Dict[str, int]) == (str, int) + >>> assert get_args(int) == () + >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) + >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + >>> assert get_args(Callable[[], T][int]) == ([], int) +""" if sys.version_info >= (3, 10): @overload - def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... + def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: + """Get the unsubscripted version of a type. + +This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, +Annotated, and others. Return None for unsupported types. + +Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P +""" @overload def get_origin(tp: UnionType) -> type[UnionType]: ... @overload -def get_origin(tp: GenericAlias) -> type: ... +def get_origin(tp: GenericAlias) -> type: + """Get the unsubscripted version of a type. + +This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, +Annotated, and others. Return None for unsupported types. + +Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P +""" @overload def get_origin(tp: Any) -> Any | None: ... # AnnotationForm @overload -def cast(typ: type[_T], val: Any) -> _T: ... +def cast(typ: type[_T], val: Any) -> _T: + """Cast a value to a type. + +This returns the value unchanged. To the type checker this +signals that the return value has the designated type, but at +runtime we intentionally don't check anything (we want this +to be as fast as possible). +""" @overload def cast(typ: str, val: Any) -> Any: ... @overload def cast(typ: object, val: Any) -> Any: ... if sys.version_info >= (3, 11): - def reveal_type(obj: _T, /) -> _T: ... - def assert_never(arg: Never, /) -> Never: ... - def assert_type(val: _T, typ: Any, /) -> _T: ... # AnnotationForm - def clear_overloads() -> None: ... - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + def reveal_type(obj: _T, /) -> _T: + """Ask a static type checker to reveal the inferred type of an expression. + +When a static type checker encounters a call to ``reveal_type()``, +it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + +Running a static type checker (e.g., mypy) on this example +will produce output similar to 'Revealed type is "builtins.int"'. + +At runtime, the function prints the runtime type of the +argument and returns the argument unchanged. +""" + def assert_never(arg: Never, /) -> Never: + """Statically assert that a line of code is unreachable. + +Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + +If a type checker finds that a call to assert_never() is +reachable, it will emit an error. + +At runtime, this throws an exception when called. +""" + def assert_type(val: _T, typ: Any, /) -> _T: # AnnotationForm + """Ask a static type checker to confirm that the value is of the given type. + +At runtime this does nothing: it returns the first argument unchanged with no +checks or side effects, no matter the actual type of the argument. + +When a static type checker encounters a call to assert_type(), it +emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # OK + assert_type(name, int) # type checker error +""" + def clear_overloads() -> None: + """Clear all overloads in the registry. +""" + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: + """Return all defined overloads for *func* as a sequence. +""" def dataclass_transform( *, eq_default: bool = True, @@ -988,12 +1665,93 @@ if sys.version_info >= (3, 11): frozen_default: bool = False, # on 3.11, runtime accepts it as part of kwargs field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: Any, - ) -> IdentityFunction: ... + ) -> IdentityFunction: + """Decorator to mark an object as providing dataclass-like behaviour. + +The decorator can be applied to a function, class, or metaclass. + +Example usage with a decorator function:: + + @dataclass_transform() + def create_model[T](cls: type[T]) -> type[T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + +On a base class:: + + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + +On a metaclass:: + + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + +The ``CustomerModel`` classes defined above will +be treated by type checkers similarly to classes created with +``@dataclasses.dataclass``. +For example, type checkers will assume these classes have +``__init__`` methods that accept ``id`` and ``name``. + +The arguments to this decorator can be used to customize this behavior: +- ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + ``True`` or ``False`` if it is omitted by the caller. +- ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. +- ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. +- ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. +- ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. +- Arbitrary other keyword arguments are accepted in order to allow for + possible future extensions. + +At runtime, this decorator records its arguments in the +``__dataclass_transform__`` attribute on the decorated object. +It has no other runtime effect. + +See PEP 681 for more details. +""" # Type constructors # Obsolete, will be changed to a function. Use _typeshed._type_checker_internals.NamedTupleFallback instead. class NamedTuple(tuple[Any, ...]): + """Typed version of namedtuple. + +Usage:: + + class Employee(NamedTuple): + name: str + id: int + +This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + +The resulting class has an extra __annotations__ attribute, giving a +dict that maps field names to types. (The field names are also in +the _fields attribute, which is part of the namedtuple API.) +An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) +""" _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] # __orig_bases__ sometimes exists on <3.12, but not consistently @@ -1043,11 +1801,15 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def keys(self) -> dict_keys[str, object]: ... def values(self) -> dict_values[str, object]: ... @overload - def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: + """Return self|value. +""" @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload - def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: ... + def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: + """Return value|self. +""" @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of __or__ and __ior__ @@ -1064,11 +1826,32 @@ if sys.version_info >= (3, 14): locals: Mapping[str, Any] | None = None, type_params: tuple[TypeVar, ParamSpec, TypeVarTuple] | None = None, format: Format | None = None, - ) -> Any: ... # AnnotationForm + ) -> Any: # AnnotationForm + """Evaluate a forward reference as a type hint. + +This is similar to calling the ForwardRef.evaluate() method, +but unlike that method, evaluate_forward_ref() also +recursively evaluates forward references nested within the type hint. + +*forward_ref* must be an instance of ForwardRef. *owner*, if given, +should be the object that holds the annotations that the forward reference +derived from, such as a module, class object, or function. It is used to +infer the namespaces to use for looking up names. *globals* and *locals* +can also be explicitly given to provide the global and local namespaces. +*type_params* is a tuple of type parameters that are in scope when +evaluating the forward reference. This parameter should be provided (though +it may be an empty tuple) if *owner* is not given and the forward reference +does not already have an owner set. *format* specifies the format of the +annotation and is a member of the annotationlib.Format enum, defaulting to +VALUE. + +""" else: @final class ForwardRef(_Final): + """Internal wrapper to hold a forward reference. +""" __slots__ = ( "__forward_arg__", "__forward_code__", @@ -1128,9 +1911,30 @@ else: def __ror__(self, other: Any) -> _SpecialForm: ... if sys.version_info >= (3, 10): - def is_typeddict(tp: object) -> bool: ... - -def _type_repr(obj: object) -> str: ... + def is_typeddict(tp: object) -> bool: + """Check if an annotation is a TypedDict class. + +For example:: + + >>> from typing import TypedDict + >>> class Film(TypedDict): + ... title: str + ... year: int + ... + >>> is_typeddict(Film) + True + >>> is_typeddict(dict) + False +""" + +def _type_repr(obj: object) -> str: + """Return the repr() of an object, special-casing types (internal helper). + +If obj is a type, we return a shorter version than the default +type.__repr__, based on the module and qualified name, which is +typically enough to uniquely identify a type. For everything +else, we fall back on repr(obj). +""" if sys.version_info >= (3, 12): _TypeParameter: typing_extensions.TypeAlias = ( @@ -1142,9 +1946,56 @@ if sys.version_info >= (3, 12): | typing_extensions.TypeVarTuple ) - def override(method: _F, /) -> _F: ... + def override(method: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + +Usage:: + + class Base: + def method(self) -> None: + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + +When this decorator is applied to a method, the type checker will +validate that it overrides a method or attribute with the same name on a +base class. This helps prevent bugs that may occur when a base class is +changed without an equivalent change to a child class. + +There is no runtime checking of this property. The decorator attempts to +set the ``__override__`` attribute to ``True`` on the decorated object to +allow runtime introspection. + +See PEP 698 for details. +""" @final class TypeAliasType: + """Type alias. + +Type aliases are created through the type statement:: + + type Alias = int + +In this example, Alias and int will be treated equivalently by static +type checkers. + +At runtime, Alias is an instance of TypeAliasType. The __name__ +attribute holds the name of the type alias. The value of the type alias +is stored in the __value__ attribute. It is evaluated lazily, so the +value is computed only if the attribute is accessed. + +Type aliases can also be generic:: + + type ListOrSet[T] = list[T] | set[T] + +In this case, the type parameters of the alias are stored in the +__type_params__ attribute. + +See PEP 695 for more information. +""" def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ... @property def __value__(self) -> Any: ... # AnnotationForm @@ -1157,16 +2008,48 @@ if sys.version_info >= (3, 12): # It's writable on types, but not on instances of TypeAliasType. @property def __module__(self) -> str | None: ... # type: ignore[override] - def __getitem__(self, parameters: Any, /) -> GenericAlias: ... # AnnotationForm - def __or__(self, right: Any, /) -> _SpecialForm: ... - def __ror__(self, left: Any, /) -> _SpecialForm: ... + def __getitem__(self, parameters: Any, /) -> GenericAlias: # AnnotationForm + """Return self[key]. +""" + def __or__(self, right: Any, /) -> _SpecialForm: + """Return self|value. +""" + def __ror__(self, left: Any, /) -> _SpecialForm: + """Return value|self. +""" if sys.version_info >= (3, 14): @property def evaluate_value(self) -> EvaluateFunc: ... if sys.version_info >= (3, 13): - def is_protocol(tp: type, /) -> bool: ... - def get_protocol_members(tp: type, /) -> frozenset[str]: ... + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + +Example:: + + >>> from typing import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False +""" + def get_protocol_members(tp: type, /) -> frozenset[str]: + """Return the set of members defined in a Protocol. + +Example:: + + >>> from typing import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) == frozenset({'a', 'b'}) + True + +Raise a TypeError for arguments that are not Protocols. +""" @final @type_check_only class _NoDefaultType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi index 5fd3f4578a8bd..bc57ed7a56bc5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi @@ -214,14 +214,77 @@ _T_contra = _TypeVar("_T_contra", contravariant=True) # on older versions of Python. Protocol: _SpecialForm -def runtime_checkable(cls: _TC) -> _TC: ... +def runtime_checkable(cls: _TC) -> _TC: + """Mark a protocol class as a runtime protocol. + +Such protocol can be used with isinstance() and issubclass(). +Raise TypeError if applied to a non-protocol class. +This allows a simple-minded structural check very similar to +one trick ponies in collections.abc such as Iterable. + +For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + +Warning: this will check only the presence of the required methods, +not their type signatures! +""" # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable Final: _SpecialForm -def final(f: _F) -> _F: ... -def disjoint_base(cls: _TC) -> _TC: ... +def final(f: _F) -> _F: + """Decorator to indicate final methods and final classes. + +Use this decorator to indicate to type checkers that the decorated +method cannot be overridden, and decorated class cannot be subclassed. + +For example:: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + +There is no runtime checking of these properties. The decorator +attempts to set the ``__final__`` attribute to ``True`` on the decorated +object to allow runtime introspection. +""" +def disjoint_base(cls: _TC) -> _TC: + """This decorator marks a class as a disjoint base. + +Child classes of a disjoint base cannot inherit from other disjoint bases that are +not parent classes of the disjoint base. + +For example: + + @disjoint_base + class Disjoint1: pass + + @disjoint_base + class Disjoint2: pass + + class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error + +Type checkers can use knowledge of disjoint bases to detect unreachable code +and determine when two types can overlap. + +See PEP 800. +""" Literal: _SpecialForm @@ -253,11 +316,15 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def values(self) -> dict_values[str, object]: ... def __delitem__(self, k: Never) -> None: ... @overload - def __or__(self, value: Self, /) -> Self: ... + def __or__(self, value: Self, /) -> Self: + """Return self|value. +""" @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload - def __ror__(self, value: Self, /) -> Self: ... + def __ror__(self, value: Self, /) -> Self: + """Return value|self. +""" @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of `__ior__` and `__or__`: @@ -271,16 +338,94 @@ if sys.version_info >= (3, 13): else: def get_type_hints( obj: Any, globalns: dict[str, Any] | None = None, localns: Mapping[str, Any] | None = None, include_extras: bool = False - ) -> dict[str, AnnotationForm]: ... + ) -> dict[str, AnnotationForm]: + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ -def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: ... +def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: + """Get type arguments with all substitutions performed. + +For unions, basic simplifications used by Union constructor are performed. + +Examples:: + + >>> T = TypeVar('T') + >>> assert get_args(Dict[str, int]) == (str, int) + >>> assert get_args(int) == () + >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) + >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + >>> assert get_args(Callable[[], T][int]) == ([], int) +""" if sys.version_info >= (3, 10): @overload - def get_origin(tp: UnionType) -> type[UnionType]: ... + def get_origin(tp: UnionType) -> type[UnionType]: + """Get the unsubscripted version of a type. + +This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, +Annotated, and others. Return None for unsupported types. + +Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P +""" @overload -def get_origin(tp: GenericAlias) -> type: ... +def get_origin(tp: GenericAlias) -> type: + """Get the unsubscripted version of a type. + +This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, +Annotated, and others. Return None for unsupported types. + +Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P +""" @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload @@ -302,12 +447,34 @@ if sys.version_info >= (3, 10): else: @final class ParamSpecArgs: + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @final class ParamSpecKwargs: + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @@ -315,7 +482,17 @@ else: Concatenate: _SpecialForm TypeAlias: _SpecialForm TypeGuard: _SpecialForm - def is_typeddict(tp: object) -> bool: ... + def is_typeddict(tp: object) -> bool: + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ # New and changed things in 3.11 if sys.version_info >= (3, 11): @@ -338,11 +515,61 @@ if sys.version_info >= (3, 11): else: Self: _SpecialForm Never: _SpecialForm - def reveal_type(obj: _T, /) -> _T: ... - def assert_never(arg: Never, /) -> Never: ... - def assert_type(val: _T, typ: AnnotationForm, /) -> _T: ... - def clear_overloads() -> None: ... - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + def reveal_type(obj: _T, /) -> _T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + def assert_never(arg: Never, /) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + def clear_overloads() -> None: + """Clear all overloads in the registry. +""" + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: + """Return all defined overloads for *func* as a sequence. +""" Required: _SpecialForm NotRequired: _SpecialForm @@ -357,9 +584,89 @@ else: frozen_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: object, - ) -> IdentityFunction: ... + ) -> IdentityFunction: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ class NamedTuple(tuple[Any, ...]): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] __orig_bases__: ClassVar[tuple[Any, ...]] @@ -373,6 +680,18 @@ else: def _replace(self, **kwargs: Any) -> Self: ... class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ def __init__(self, name: str, tp: AnnotationForm) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType @@ -394,38 +713,109 @@ if sys.version_info >= (3, 12): override as override, ) else: - def override(arg: _F, /) -> _F: ... - def get_original_bases(cls: type, /) -> tuple[Any, ...]: ... + def override(arg: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + def get_original_bases(cls: type, /) -> tuple[Any, ...]: + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @runtime_checkable class Buffer(Protocol, abc.ABC): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ # Not actually a Protocol at runtime; see # https://github.com/python/typeshed/issues/10224 for why we're defining it this way def __buffer__(self, flags: int, /) -> memoryview: ... @runtime_checkable class SupportsInt(Protocol, metaclass=abc.ABCMeta): + """An ABC with one abstract method __int__. +""" __slots__ = () @abc.abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=abc.ABCMeta): + """An ABC with one abstract method __float__. +""" __slots__ = () @abc.abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=abc.ABCMeta): + """An ABC with one abstract method __complex__. +""" __slots__ = () @abc.abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=abc.ABCMeta): + """An ABC with one abstract method __bytes__. +""" __slots__ = () @abc.abstractmethod def __bytes__(self) -> bytes: ... @@ -438,12 +828,18 @@ else: @runtime_checkable class SupportsAbs(Protocol[_T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ __slots__ = () @abc.abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ __slots__ = () @overload @abc.abstractmethod @@ -457,15 +853,30 @@ if sys.version_info >= (3, 14): else: @runtime_checkable class Reader(Protocol[_T_co]): + """Protocol for simple I/O reader instances. + +This protocol only supports blocking I/O. +""" __slots__ = () @abc.abstractmethod - def read(self, size: int = ..., /) -> _T_co: ... + def read(self, size: int = ..., /) -> _T_co: + """Read data from the input stream and return it. + +If *size* is specified, at most *size* items (bytes/characters) will be +read. +""" @runtime_checkable class Writer(Protocol[_T_contra]): + """Protocol for simple I/O writer instances. + +This protocol only supports blocking I/O. +""" __slots__ = () @abc.abstractmethod - def write(self, data: _T_contra, /) -> int: ... + def write(self, data: _T_contra, /) -> int: + """Write *data* to the output stream and return the number of items written. +""" if sys.version_info >= (3, 13): from types import CapsuleType as CapsuleType @@ -481,17 +892,93 @@ if sys.version_info >= (3, 13): ) from warnings import deprecated as deprecated else: - def is_protocol(tp: type, /) -> bool: ... - def get_protocol_members(tp: type, /) -> frozenset[str]: ... + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + def get_protocol_members(tp: type, /) -> frozenset[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ @final @type_check_only class _NoDefaultType: ... NoDefault: _NoDefaultType @final - class CapsuleType: ... + class CapsuleType: + """Capsule objects let you wrap a C "void *" pointer in a Python +object. They're a way of passing data through the Python interpreter +without creating your own custom type. + +Capsules are used for communication between extension modules. +They provide a way for an extension module to export a C interface +to other extension modules, so that extension modules can use the +Python import mechanism to link to one another. +""" class deprecated: + """Indicate that a class, function or overload is deprecated. + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ message: LiteralString category: type[Warning] | None stacklevel: int @@ -500,6 +987,8 @@ else: @final class TypeVar: + """Type variable. +""" @property def __name__(self) -> str: ... @property @@ -527,13 +1016,19 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: ... - def __ror__(self, left: Any) -> _SpecialForm: ... + def __or__(self, right: Any) -> _SpecialForm: + """Return self|value. +""" + def __ror__(self, left: Any) -> _SpecialForm: + """Return value|self. +""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... @final class ParamSpec: + """Parameter specification. +""" @property def __name__(self) -> str: ... @property @@ -562,11 +1057,17 @@ else: def has_default(self) -> bool: ... def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): - def __or__(self, right: Any) -> _SpecialForm: ... - def __ror__(self, left: Any) -> _SpecialForm: ... + def __or__(self, right: Any) -> _SpecialForm: + """Return self|value. +""" + def __ror__(self, left: Any) -> _SpecialForm: + """Return value|self. +""" @final class TypeVarTuple: + """Type variable tuple. +""" @property def __name__(self) -> str: ... @property @@ -585,6 +1086,32 @@ if sys.version_info >= (3, 14): else: @final class TypeAliasType: + """Create named, parameterized type aliases. + +This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + +is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + +The name ListOrSet can then be used as an alias for the type it refers to. + +The type_params argument should contain all the type parameters used +in the value of the type alias. If the alias is not generic, this +argument is omitted. + +Static type checkers should only support type aliases declared using +TypeAliasType that follow these rules: + +- The first argument (the name) must be a string literal. +- The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + +""" def __init__( self, name: str, value: AnnotationForm, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () ) -> None: ... @@ -600,7 +1127,18 @@ else: def __name__(self) -> str: ... # It's writable on types, but not on instances of TypeAliasType. @property - def __module__(self) -> str | None: ... # type: ignore[override] + def __module__(self) -> str | None: # type: ignore[override] + """str(object='') -> str +str(bytes_or_buffer[, encoding[, errors]]) -> str + +Create a new string object from the given object. If encoding or +errors is specified, then the object must expose a data buffer +that will be decoded using the given encoding and error handler. +Otherwise, returns the result of object.__str__() (if defined) +or repr(object). +encoding defaults to 'utf-8'. +errors defaults to 'strict'. +""" # Returns typing._GenericAlias, which isn't stubbed. def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> AnnotationForm: ... def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... @@ -610,6 +1148,22 @@ else: # PEP 727 class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + +The value should be a positional-only string literal to allow static tools +like editors and documentation generators to use it. + +This complements docstrings. + +The string value passed is available in the attribute ``documentation``. + +Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... +""" documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... @@ -631,6 +1185,8 @@ if sys.version_info >= (3, 14): from annotationlib import Format as Format, get_annotations as get_annotations, type_repr as type_repr else: class Format(enum.IntEnum): + """An enumeration. +""" VALUE = 1 VALUE_WITH_FAKE_GLOBALS = 2 FORWARDREF = 3 @@ -644,7 +1200,42 @@ else: locals: Mapping[str, Any] | None = None, # value types depend on the key eval_str: bool = False, format: Literal[Format.STRING], - ) -> dict[str, str]: ... + ) -> dict[str, str]: + """Compute the annotations dict for an object. + +obj may be a callable, class, or module. +Passing in an object of any other type raises TypeError. + +Returns a dict. get_annotations() returns a new dict every time +it's called; calling it twice on the same object will return two +different but equivalent dicts. + +This is a backport of `inspect.get_annotations`, which has been +in the standard library since Python 3.10. See the standard library +documentation for more: + + https://docs.python.org/3/library/inspect.html#inspect.get_annotations + +This backport adds the *format* argument introduced by PEP 649. The +three formats supported are: +* VALUE: the annotations are returned as-is. This is the default and + it is compatible with the behavior on previous Python versions. +* FORWARDREF: return annotations as-is if possible, but replace any + undefined names with ForwardRef objects. The implementation proposed by + PEP 649 relies on language changes that cannot be backported; the + typing-extensions implementation simply returns the same result as VALUE. +* STRING: return annotations as strings, in a format close to the original + source. Again, this behavior cannot be replicated directly in a backport. + As an approximation, typing-extensions retrieves the annotations under + VALUE semantics and then stringifies them. + +The purpose of this backport is to allow users who would like to use +FORWARDREF or STRING semantics once PEP 649 is implemented, but who also +want to support earlier Python versions, to simply write: + + typing_extensions.get_annotations(obj, format=Format.FORWARDREF) + +""" @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -673,7 +1264,29 @@ else: type_params: Iterable[TypeVar | ParamSpec | TypeVarTuple] | None = None, format: Literal[Format.STRING], _recursive_guard: Container[str] = ..., - ) -> str: ... + ) -> str: + """Evaluate a forward reference as a type hint. + +This is similar to calling the ForwardRef.evaluate() method, +but unlike that method, evaluate_forward_ref() also: + +* Recursively evaluates forward references nested within the type hint. +* Rejects certain objects that are not valid type hints. +* Replaces type hints that evaluate to None with types.NoneType. +* Supports the *FORWARDREF* and *STRING* formats. + +*forward_ref* must be an instance of ForwardRef. *owner*, if given, +should be the object that holds the annotations that the forward reference +derived from, such as a module, class object, or function. It is used to +infer the namespaces to use for looking up names. *globals* and *locals* +can also be explicitly given to provide the global and local namespaces. +*type_params* is a tuple of type parameters that are in scope when +evaluating the forward reference. This parameter must be provided (though +it may be an empty tuple) if *owner* is not given and the forward reference +does not already have an owner set. *format* specifies the format of the +annotation and is a member of the annotationlib.Format enum. + +""" @overload def evaluate_forward_ref( forward_ref: ForwardRef, @@ -696,10 +1309,24 @@ else: format: Format | None = None, _recursive_guard: Container[str] = ..., ) -> AnnotationForm: ... - def type_repr(value: object) -> str: ... + def type_repr(value: object) -> str: + """Convert a Python value to a format suitable for use with the STRING format. + +This is intended as a helper for tools that support the STRING format but do +not have access to the code that originally produced the annotations. It uses +repr() for most objects. + +""" # PEP 661 class Sentinel: + """Create a unique sentinel object. + +*name* should be the name of the variable to which the return value shall be assigned. + +*repr*, if supplied, will be used for the repr of the sentinel object. +If not provided, "" will be used. +""" def __init__(self, name: str, repr: str | None = None) -> None: ... if sys.version_info >= (3, 14): def __or__(self, other: Any) -> UnionType: ... # other can be any type form legal for unions diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi index 9fff042f0b964..5144d81a3e830 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi @@ -1,3 +1,11 @@ +"""This module provides access to the Unicode Character Database which +defines character properties for all Unicode characters. The data in +this database is based on the UnicodeData.txt file version +16.0.0 which is publicly available from ftp://ftp.unicode.org/. + +The module uses the same names and symbols as defined by the +UnicodeData File Format 16.0.0. +""" import sys from _typeshed import ReadOnlyBuffer from typing import Any, Final, Literal, TypeVar, final, overload @@ -13,32 +21,89 @@ _T = TypeVar("_T") _NormalizationForm: TypeAlias = Literal["NFC", "NFD", "NFKC", "NFKD"] -def bidirectional(chr: str, /) -> str: ... -def category(chr: str, /) -> str: ... -def combining(chr: str, /) -> int: ... +def bidirectional(chr: str, /) -> str: + """Returns the bidirectional class assigned to the character chr as string. + +If no such value is defined, an empty string is returned. +""" +def category(chr: str, /) -> str: + """Returns the general category assigned to the character chr as string. +""" +def combining(chr: str, /) -> int: + """Returns the canonical combining class assigned to the character chr as integer. + +Returns 0 if no combining class is defined. +""" @overload -def decimal(chr: str, /) -> int: ... +def decimal(chr: str, /) -> int: + """Converts a Unicode character into its equivalent decimal value. + +Returns the decimal value assigned to the character chr as integer. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def decimal(chr: str, default: _T, /) -> int | _T: ... -def decomposition(chr: str, /) -> str: ... +def decomposition(chr: str, /) -> str: + """Returns the character decomposition mapping assigned to the character chr as string. + +An empty string is returned in case no such mapping is defined. +""" @overload -def digit(chr: str, /) -> int: ... +def digit(chr: str, /) -> int: + """Converts a Unicode character into its equivalent digit value. + +Returns the digit value assigned to the character chr as integer. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def digit(chr: str, default: _T, /) -> int | _T: ... _EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] -def east_asian_width(chr: str, /) -> _EastAsianWidth: ... -def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: ... -def lookup(name: str | ReadOnlyBuffer, /) -> str: ... -def mirrored(chr: str, /) -> int: ... +def east_asian_width(chr: str, /) -> _EastAsianWidth: + """Returns the east asian width assigned to the character chr as string. +""" +def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: + """Return whether the Unicode string unistr is in the normal form 'form'. + +Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. +""" +def lookup(name: str | ReadOnlyBuffer, /) -> str: + """Look up character by name. + +If a character with the given name is found, return the +corresponding character. If not found, KeyError is raised. +""" +def mirrored(chr: str, /) -> int: + """Returns the mirrored property assigned to the character chr as integer. + +Returns 1 if the character has been identified as a "mirrored" +character in bidirectional text, 0 otherwise. +""" @overload -def name(chr: str, /) -> str: ... +def name(chr: str, /) -> str: + """Returns the name assigned to the character chr as a string. + +If no name is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def name(chr: str, default: _T, /) -> str | _T: ... -def normalize(form: _NormalizationForm, unistr: str, /) -> str: ... +def normalize(form: _NormalizationForm, unistr: str, /) -> str: + """Return the normal form 'form' for the Unicode string unistr. + +Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. +""" @overload -def numeric(chr: str, /) -> float: ... +def numeric(chr: str, /) -> float: + """Converts a Unicode character into its equivalent numeric value. + +Returns the numeric value assigned to the character chr as float. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def numeric(chr: str, default: _T, /) -> float | _T: ... @final @@ -46,28 +111,85 @@ class UCD: # The methods below are constructed from the same array in C # (unicodedata_functions) and hence identical to the functions above. unidata_version: str - def bidirectional(self, chr: str, /) -> str: ... - def category(self, chr: str, /) -> str: ... - def combining(self, chr: str, /) -> int: ... + def bidirectional(self, chr: str, /) -> str: + """Returns the bidirectional class assigned to the character chr as string. + +If no such value is defined, an empty string is returned. +""" + def category(self, chr: str, /) -> str: + """Returns the general category assigned to the character chr as string. +""" + def combining(self, chr: str, /) -> int: + """Returns the canonical combining class assigned to the character chr as integer. + +Returns 0 if no combining class is defined. +""" @overload - def decimal(self, chr: str, /) -> int: ... + def decimal(self, chr: str, /) -> int: + """Converts a Unicode character into its equivalent decimal value. + +Returns the decimal value assigned to the character chr as integer. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def decimal(self, chr: str, default: _T, /) -> int | _T: ... - def decomposition(self, chr: str, /) -> str: ... + def decomposition(self, chr: str, /) -> str: + """Returns the character decomposition mapping assigned to the character chr as string. + +An empty string is returned in case no such mapping is defined. +""" @overload - def digit(self, chr: str, /) -> int: ... + def digit(self, chr: str, /) -> int: + """Converts a Unicode character into its equivalent digit value. + +Returns the digit value assigned to the character chr as integer. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def digit(self, chr: str, default: _T, /) -> int | _T: ... - def east_asian_width(self, chr: str, /) -> _EastAsianWidth: ... - def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: ... - def lookup(self, name: str | ReadOnlyBuffer, /) -> str: ... - def mirrored(self, chr: str, /) -> int: ... + def east_asian_width(self, chr: str, /) -> _EastAsianWidth: + """Returns the east asian width assigned to the character chr as string. +""" + def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: + """Return whether the Unicode string unistr is in the normal form 'form'. + +Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. +""" + def lookup(self, name: str | ReadOnlyBuffer, /) -> str: + """Look up character by name. + +If a character with the given name is found, return the +corresponding character. If not found, KeyError is raised. +""" + def mirrored(self, chr: str, /) -> int: + """Returns the mirrored property assigned to the character chr as integer. + +Returns 1 if the character has been identified as a "mirrored" +character in bidirectional text, 0 otherwise. +""" @overload - def name(self, chr: str, /) -> str: ... + def name(self, chr: str, /) -> str: + """Returns the name assigned to the character chr as a string. + +If no name is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def name(self, chr: str, default: _T, /) -> str | _T: ... - def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: ... + def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: + """Return the normal form 'form' for the Unicode string unistr. + +Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. +""" @overload - def numeric(self, chr: str, /) -> float: ... + def numeric(self, chr: str, /) -> float: + """Converts a Unicode character into its equivalent numeric value. + +Returns the numeric value assigned to the character chr as float. +If no such value is defined, default is returned, or, if not given, +ValueError is raised. +""" @overload def numeric(self, chr: str, default: _T, /) -> float | _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi index 546ea77bb4ca2..61eed4a08d318 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi @@ -1,3 +1,48 @@ +""" +Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's +Smalltalk testing framework (used with permission). + +This module contains the core framework classes that form the basis of +specific test cases and suites (TestCase, TestSuite etc.), and also a +text-based utility class for running the tests and reporting the results + (TextTestRunner). + +Simple usage: + + import unittest + + class IntegerArithmeticTestCase(unittest.TestCase): + def testAdd(self): # test method names begin with 'test' + self.assertEqual((1 + 2), 3) + self.assertEqual(0 + 1, 1) + def testMultiply(self): + self.assertEqual((0 * 10), 0) + self.assertEqual((5 * 8), 40) + + if __name__ == '__main__': + unittest.main() + +Further information is available in the bundled documentation, and from + + http://docs.python.org/library/unittest.html + +Copyright (c) 1999-2003 Steve Purcell +Copyright (c) 2003 Python Software Foundation +This module is free software, and you may redistribute it and/or modify +it under the same terms as Python itself, so long as this copyright message +and disclaimer are retained in their original form. + +IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, +SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF +THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, +AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, +SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. +""" import sys from unittest.async_case import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi index 011a970d8bbce..06f1e660f9363 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi @@ -7,10 +7,14 @@ from unittest.case import TestCase, _BaseTestCaseContext _L = TypeVar("_L", None, _LoggingWatcher) class _LoggingWatcher(NamedTuple): + """_LoggingWatcher(records, output) +""" records: list[logging.LogRecord] output: list[str] class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): + """A context manager for assertLogs() and assertNoLogs() +""" LOGGING_FORMAT: ClassVar[str] logger_name: str level: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi index 0b3fb9122c7b9..2fcfa8690cda1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi @@ -20,6 +20,11 @@ class IsolatedAsyncioTestCase(TestCase): async def asyncTearDown(self) -> None: ... def addAsyncCleanup(self, func: Callable[_P, Awaitable[object]], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... if sys.version_info >= (3, 11): - async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... + async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: + """Enters the supplied asynchronous context manager. + +If successful, also adds its __aexit__ method as a cleanup +function and returns the result of the __aenter__ method. +""" def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi index a602196e73c64..4a1bfda7eb707 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi @@ -1,3 +1,5 @@ +"""Test case implementation +""" import logging import sys import unittest.result @@ -40,26 +42,86 @@ class _AssertRaisesBaseContext(_BaseTestCaseContext): # This returns Self if args is the empty list, and None otherwise. # but it's not possible to construct an overload which expresses that - def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: ... + def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: + """ +If args is empty, assertRaises/Warns is being used as a +context manager, so check for a 'msg' kwarg and return self. +If args is not empty, call a callable passing positional and keyword +arguments. +""" -def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... -def doModuleCleanups() -> None: ... +def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Same as addCleanup, except the cleanup items are called even if +setUpModule fails (unlike tearDownModule). +""" +def doModuleCleanups() -> None: + """Execute all module cleanup functions. Normally called for you after +tearDownModule. +""" if sys.version_info >= (3, 11): - def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: ... + def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: + """Same as enterContext, but module-wide. +""" def expectedFailure(test_item: _FT) -> _FT: ... -def skip(reason: str) -> Callable[[_FT], _FT]: ... -def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... -def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... +def skip(reason: str) -> Callable[[_FT], _FT]: + """ +Unconditionally skip a test. +""" +def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: + """ +Skip a test if the condition is true. +""" +def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: + """ +Skip a test unless the condition is true. +""" class SkipTest(Exception): + """ +Raise this exception in a test to skip it. + +Usually you can use TestCase.skipTest() or one of the skipping decorators +instead of raising this directly. +""" def __init__(self, reason: str) -> None: ... @type_check_only class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... class TestCase: + """A class whose instances are single test cases. + +By default, the test code itself should be placed in a method named +'runTest'. + +If the fixture may be used for many test cases, create as +many test methods as are needed. When instantiating such a TestCase +subclass, specify in the constructor arguments the name of the test method +that the instance is to execute. + +Test authors should subclass TestCase for their own tests. Construction +and deconstruction of the test's environment ('fixture') can be +implemented by overriding the 'setUp' and 'tearDown' methods respectively. + +If it is necessary to override the __init__ method, the base class +__init__ method must always be called. It is important that subclasses +should not change the signature of their __init__ method, since instances +of the classes are instantiated automatically by parts of the framework +in order to be run. + +When subclassing TestCase, you can set these attributes: +* failureException: determines which exception will be raised when + the instance's assertion methods fail; test methods raising this + exception will be deemed to have 'failed' rather than 'errored'. +* longMessage: determines whether long messages (including repr of + objects used in assert methods) will be printed on failure in *addition* + to any explicit message passed. +* maxDiff: sets the maximum length of a diff in failure messages + by assert methods using difflib. It is looked up as an instance + attribute so can be configured by individual tests if required. +""" failureException: type[BaseException] longMessage: bool maxDiff: int | None @@ -67,49 +129,106 @@ class TestCase: _testMethodName: str # undocumented _testMethodDoc: str - def __init__(self, methodName: str = "runTest") -> None: ... + def __init__(self, methodName: str = "runTest") -> None: + """Create an instance of the class that will use the named test +method when executed. Raises a ValueError if the instance does +not have a method with the specified name. +""" def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... - def setUp(self) -> None: ... - def tearDown(self) -> None: ... + def setUp(self) -> None: + """Hook method for setting up the test fixture before exercising it. +""" + def tearDown(self) -> None: + """Hook method for deconstructing the test fixture after testing it. +""" @classmethod - def setUpClass(cls) -> None: ... + def setUpClass(cls) -> None: + """Hook method for setting up class fixture before running tests in the class. +""" @classmethod - def tearDownClass(cls) -> None: ... + def tearDownClass(cls) -> None: + """Hook method for deconstructing the class fixture after running all tests in the class. +""" def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... - def skipTest(self, reason: Any) -> NoReturn: ... - def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... - def debug(self) -> None: ... + def skipTest(self, reason: Any) -> NoReturn: + """Skip this test. +""" + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: + """Return a context manager that will return the enclosed block +of code in a subtest identified by the optional message and +keyword parameters. A failure in the subtest marks the test +case as failed but resumes execution at the end of the enclosed +block, allowing further test code to be executed. +""" + def debug(self) -> None: + """Run the test without collecting errors in a TestResult +""" if sys.version_info < (3, 11): def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... - def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... - def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... - def assertTrue(self, expr: Any, msg: Any = None) -> None: ... - def assertFalse(self, expr: Any, msg: Any = None) -> None: ... - def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... - def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... - def assertIsNone(self, obj: object, msg: Any = None) -> None: ... - def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... - def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... - def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... - def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... - def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: + """Fail if the two objects are unequal as determined by the '==' +operator. +""" + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: + """Fail if the two objects are equal as determined by the '!=' +operator. +""" + def assertTrue(self, expr: Any, msg: Any = None) -> None: + """Check that the expression is true. +""" + def assertFalse(self, expr: Any, msg: Any = None) -> None: + """Check that the expression is false. +""" + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: + """Just like self.assertTrue(a is b), but with a nicer default message. +""" + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: + """Just like self.assertTrue(a is not b), but with a nicer default message. +""" + def assertIsNone(self, obj: object, msg: Any = None) -> None: + """Same as self.assertTrue(obj is None), with a nicer default message. +""" + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: + """Included for symmetry with assertIsNone. +""" + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: + """Just like self.assertTrue(a in b), but with a nicer default message. +""" + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: + """Just like self.assertTrue(a not in b), but with a nicer default message. +""" + def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: + """Same as self.assertTrue(isinstance(obj, cls)), with a nicer +default message. +""" + def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: + """Included for symmetry with assertIsInstance. +""" @overload - def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: + """Just like self.assertTrue(a > b), but with a nicer default message. +""" @overload def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: + """Just like self.assertTrue(a >= b), but with a nicer default message. +""" @overload def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... @overload - def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: + """Just like self.assertTrue(a < b), but with a nicer default message. +""" @overload def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: ... + def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: + """Just like self.assertTrue(a <= b), but with a nicer default message. +""" @overload def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` @@ -122,7 +241,32 @@ class TestCase: callable: Callable[..., object], *args: Any, **kwargs: Any, - ) -> None: ... + ) -> None: + """Fail unless an exception of class expected_exception is raised +by the callable when invoked with specified positional and +keyword arguments. If a different type of exception is +raised, it will not be caught, and the test case will be +deemed to have suffered an error, exactly as for an +unexpected exception. + +If called with the callable and arguments omitted, will return a +context object used like this:: + + with self.assertRaises(SomeException): + do_something() + +An optional keyword argument 'msg' can be provided when assertRaises +is used as a context object. + +The context manager keeps a reference to the exception as +the 'exception' attribute. This allows you to inspect the +exception after the assertion:: + + with self.assertRaises(SomeException) as cm: + do_something() + the_exception = cm.exception + self.assertEqual(the_exception.error_code, 3) +""" @overload def assertRaises( self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... @@ -135,7 +279,18 @@ class TestCase: callable: Callable[..., object], *args: Any, **kwargs: Any, - ) -> None: ... + ) -> None: + """Asserts that the message in a raised exception matches a regex. + +Args: + expected_exception: Exception class expected to be raised. + expected_regex: Regex (re.Pattern object or string) expected + to be found in error message. + args: Function to be called and extra positional args. + kwargs: Extra kwargs. + msg: Optional message used in case of failure. Can only be used + when assertRaisesRegex is used as a context manager. +""" @overload def assertRaisesRegex( self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... @@ -147,7 +302,34 @@ class TestCase: callable: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs, - ) -> None: ... + ) -> None: + """Fail unless a warning of class warnClass is triggered +by the callable when invoked with specified positional and +keyword arguments. If a different type of warning is +triggered, it will not be handled: depending on the other +warning filtering rules in effect, it might be silenced, printed +out, or raised as an exception. + +If called with the callable and arguments omitted, will return a +context object used like this:: + + with self.assertWarns(SomeWarning): + do_something() + +An optional keyword argument 'msg' can be provided when assertWarns +is used as a context object. + +The context manager keeps a reference to the first matching +warning as the 'warning' attribute; similarly, the 'filename' +and 'lineno' attributes give you information about the line +of Python code from which the warning was triggered. +This allows you to inspect the warning after the assertion:: + + with self.assertWarns(SomeWarning) as cm: + do_something() + the_warning = cm.warning + self.assertEqual(the_warning.some_attribute, 147) +""" @overload def assertWarns( self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... @@ -160,21 +342,71 @@ class TestCase: callable: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs, - ) -> None: ... + ) -> None: + """Asserts that the message in a triggered warning matches a regexp. +Basic functioning is similar to assertWarns() with the addition +that only warnings whose messages also match the regular expression +are considered successful matches. + +Args: + expected_warning: Warning class expected to be triggered. + expected_regex: Regex (re.Pattern object or string) expected + to be found in error message. + args: Function to be called and extra positional args. + kwargs: Extra kwargs. + msg: Optional message used in case of failure. Can only be used + when assertWarnsRegex is used as a context manager. +""" @overload def assertWarnsRegex( self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( self, logger: str | logging.Logger | None = None, level: int | str | None = None - ) -> _AssertLogsContext[_LoggingWatcher]: ... + ) -> _AssertLogsContext[_LoggingWatcher]: + """Fail unless a log message of level *level* or higher is emitted +on *logger_name* or its children. If omitted, *level* defaults to +INFO and *logger* defaults to the root logger. + +This method must be used as a context manager, and will yield +a recording object with two attributes: `output` and `records`. +At the end of the context manager, the `output` attribute will +be a list of the matching formatted log messages and the +`records` attribute will be a list of the corresponding LogRecord +objects. + +Example:: + + with self.assertLogs('foo', level='INFO') as cm: + logging.getLogger('foo').info('first message') + logging.getLogger('foo.bar').error('second message') + self.assertEqual(cm.output, ['INFO:foo:first message', + 'ERROR:foo.bar:second message']) +""" if sys.version_info >= (3, 10): def assertNoLogs( self, logger: str | logging.Logger | None = None, level: int | str | None = None - ) -> _AssertLogsContext[None]: ... + ) -> _AssertLogsContext[None]: + """Fail unless no log messages of level *level* or higher are emitted +on *logger_name* or its children. + +This method must be used as a context manager. +""" @overload - def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: + """Fail if the two objects are unequal as determined by their +difference rounded to the given number of decimal places +(default 7) and comparing to zero, or by comparing that the +difference between the two objects is more than the given +delta. + +Note that decimal places (from zero) are usually not the same +as significant digits (measured from the most significant digit). + +If the two objects compare equal then they will automatically +compare almost equal. +""" @overload def assertAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -198,7 +430,17 @@ class TestCase: delta: None = None, ) -> None: ... @overload - def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: + """Fail if the two objects are equal as determined by their +difference rounded to the given number of decimal places +(default 7) and comparing to zero, or by comparing that the +difference between the two objects is less than the given delta. + +Note that decimal places (from zero) are usually not the same +as significant digits (measured from the most significant digit). + +Objects that are equal automatically fail. +""" @overload def assertNotAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -221,42 +463,158 @@ class TestCase: msg: Any = None, delta: None = None, ) -> None: ... - def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... - def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... - def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... - def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... - def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: + """Fail the test unless the text matches the regular expression. +""" + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: + """Fail the test if the text matches the regular expression. +""" + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: + """Asserts that two iterables have the same elements, the same number of +times, without regard to order. + + self.assertEqual(Counter(list(first)), + Counter(list(second))) + + Example: + - [0, 1, 1] and [1, 0, 1] compare equal. + - [0, 0, 1] and [0, 1] compare unequal. + +""" + def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: + """Add a type specific assertEqual style function to compare a type. + +This method is for use by TestCase subclasses that need to register +their own type equality functions to provide nicer error messages. + +Args: + typeobj: The data type to call this function on when both values + are of the same type in assertEqual(). + function: The callable taking two arguments and an optional + msg= argument that raises self.failureException with a + useful error message when the two arguments are not equal. +""" + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: + """Assert that two multi-line strings are equal. +""" def assertSequenceEqual( self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None - ) -> None: ... - def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... - def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... - def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... + ) -> None: + """An equality assertion for ordered sequences (like lists and tuples). + +For the purposes of this function, a valid ordered sequence type is one +which can be indexed, has a length, and has an equality operator. + +Args: + seq1: The first sequence to compare. + seq2: The second sequence to compare. + seq_type: The expected datatype of the sequences, or None if no + datatype should be enforced. + msg: Optional message to use on failure instead of a list of + differences. +""" + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: + """A list-specific equality assertion. + +Args: + list1: The first list to compare. + list2: The second list to compare. + msg: Optional message to use on failure instead of a list of + differences. + +""" + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: + """A tuple-specific equality assertion. + +Args: + tuple1: The first tuple to compare. + tuple2: The second tuple to compare. + msg: Optional message to use on failure instead of a list of + differences. +""" + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: + """A set-specific equality assertion. + +Args: + set1: The first set to compare. + set2: The second set to compare. + msg: Optional message to use on failure instead of a list of + differences. + +assertSetEqual uses ducktyping to support different types of sets, and +is optimized for sets specifically (parameters must support a +difference method). +""" # assertDictEqual accepts only true dict instances. We can't use that here, since that would make # assertDictEqual incompatible with TypedDict. def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... - def fail(self, msg: Any = None) -> NoReturn: ... + def fail(self, msg: Any = None) -> NoReturn: + """Fail immediately, with the given message. +""" def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... - def shortDescription(self) -> str | None: ... - def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def shortDescription(self) -> str | None: + """Returns a one-line description of the test, or None if no +description has been provided. + +The default implementation of this method returns the first line of +the specified test method's docstring. +""" + def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Add a function, with arguments, to be called when the test is +completed. Functions added are called on a LIFO basis and are +called after tearDown on test failure or success. + +Cleanup items are called even if setUp fails (unlike tearDown). +""" if sys.version_info >= (3, 11): - def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... + def enterContext(self, cm: AbstractContextManager[_T]) -> _T: + """Enters the supplied context manager. + +If successful, also adds its __exit__ method as a cleanup +function and returns the result of the __enter__ method. +""" - def doCleanups(self) -> None: ... + def doCleanups(self) -> None: + """Execute all cleanup functions. Normally called for you after +tearDown. +""" @classmethod - def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Same as addCleanup, except the cleanup items are called even if +setUpClass fails (unlike tearDownClass). +""" @classmethod - def doClassCleanups(cls) -> None: ... + def doClassCleanups(cls) -> None: + """Execute all class cleanup functions. Normally called for you after +tearDownClass. +""" if sys.version_info >= (3, 11): @classmethod - def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: ... + def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: + """Same as enterContext, but class-wide. +""" + + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: # undocumented + """Honour the longMessage attribute when generating failure messages. +If longMessage is False this means: +* Use only an explicit message if it is provided +* Otherwise use the standard message for the assert - def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented - def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented +If longMessage is True: +* Use the standard message +* If an explicit message is provided, plus ' : ' and the explicit message +""" + def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: # undocumented + """Get a detailed comparison function for the types of the two args. + +Returns: A callable accepting (first, second, msg=None) that will +raise a failure exception if first != second with a useful human +readable error message for those types. +""" if sys.version_info < (3, 12): failUnlessEqual = assertEqual assertEquals = assertEqual @@ -275,7 +633,9 @@ class TestCase: assertRaisesRegexp = assertRaisesRegex def assertDictContainsSubset( self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None - ) -> None: ... + ) -> None: + """Checks whether dictionary is a superset of subset. +""" if sys.version_info >= (3, 10): # Runtime has *args, **kwargs, but will error if any are supplied @@ -292,6 +652,13 @@ class TestCase: def assertNotEndsWith(self, s: _SB, suffix: _SB | tuple[_SB, ...], msg: Any = None) -> None: ... class FunctionTestCase(TestCase): + """A test case that wraps a test function. + +This is useful for slipping pre-existing test functions into the +unittest framework. Optionally, set-up and tidy-up functions can be +supplied. As with TestCase, the tidy-up ('tearDown') function will +always be called if the set-up ('setUp') function ran successfully. +""" def __init__( self, testFunc: Callable[[], object], @@ -304,14 +671,22 @@ class FunctionTestCase(TestCase): def __eq__(self, other: object) -> bool: ... class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): + """A context manager used to implement TestCase.assertRaises* methods. +""" exception: _E def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> bool: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class _AssertWarnsContext(_AssertRaisesBaseContext): + """A context manager used to implement TestCase.assertWarns* methods. +""" warning: WarningMessage filename: str lineno: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi index 81de40c898496..0d0b854541e45 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi @@ -1,3 +1,5 @@ +"""Loading unittests. +""" import sys import unittest.case import unittest.suite @@ -13,23 +15,73 @@ _SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite VALID_MODULE_NAME: Final[Pattern[str]] class TestLoader: + """ +This class is responsible for loading tests according to various criteria +and returning them wrapped in a TestSuite +""" errors: list[type[BaseException]] testMethodPrefix: str sortTestMethodsUsing: _SortComparisonMethod testNamePatterns: list[str] | None suiteClass: _SuiteClass - def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... + def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: + """Return a suite of all test cases contained in testCaseClass +""" if sys.version_info >= (3, 12): - def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: + """Return a suite of all test cases contained in the given module +""" else: - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: + """Return a suite of all test cases contained in the given module +""" - def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... - def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... - def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: + """Return a suite of all test cases given a string specifier. + +The name may resolve either to a module, a test case class, a +test method within a test case class, or a callable object which +returns a TestCase or TestSuite instance. + +The method optionally resolves the names relative to a given module. +""" + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: + """Return a suite of all test cases found using the given sequence +of string specifiers. See 'loadTestsFromName()'. +""" + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: + """Return a sorted sequence of method names found within testCaseClass + """ def discover( self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None - ) -> unittest.suite.TestSuite: ... + ) -> unittest.suite.TestSuite: + """Find and return all test modules from the specified start +directory, recursing into subdirectories to find them and return all +tests found within them. Only test files that match the pattern will +be loaded. (Using shell style pattern matching.) + +All test modules must be importable from the top level of the project. +If the start directory is not the top level directory then the top +level directory must be specified separately. + +If a test package name (directory with '__init__.py') matches the +pattern then the package will be checked for a 'load_tests' function. If +this exists then it will be called with (loader, tests, pattern) unless +the package has already had load_tests called from the same discovery +invocation, in which case the package module object is not scanned for +tests - this ensures that when a package uses discover to further +discover child tests that infinite recursion does not happen. + +If load_tests exists then discovery does *not* recurse into the package, +load_tests is responsible for loading all tests in the package. + +The pattern is deliberately not stored as a loader attribute so that +packages can continue discovery themselves. top_level_dir is stored so +load_tests does not need to pass this argument in to loader.discover(). + +Paths are sorted before being imported to ensure reproducible execution +order even on filesystems with non-alphabetical ordering like ext3/4. +""" def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... defaultTestLoader: TestLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi index 23ead1638ecc2..68dcfe5c00004 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi @@ -1,3 +1,5 @@ +"""Unittest main program +""" import sys import unittest.case import unittest.loader @@ -17,6 +19,9 @@ class _TestRunner(Protocol): # not really documented class TestProgram: + """A command-line program that runs a set of tests; this is primarily +for making test modules conveniently executable. +""" result: unittest.result.TestResult module: None | str | ModuleType verbosity: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi index f3e58bcd1c009..a060d9752edf4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi @@ -55,10 +55,14 @@ else: FILTER_DIR: bool # controls the way mock objects respond to `dir` function class _SentinelObject: + """A unique, named, sentinel object. +""" name: Any def __init__(self, name: Any) -> None: ... class _Sentinel: + """Access attributes to return a named object, usable as a sentinel. +""" def __getattr__(self, name: str) -> Any: ... sentinel: _Sentinel @@ -70,6 +74,24 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs if sys.version_info >= (3, 12): class _Call(tuple[Any, ...]): + """ +A tuple for holding the results of a call to a mock, either in the form +`(args, kwargs)` or `(name, args, kwargs)`. + +If args or kwargs are empty then a call tuple will compare equal to +a tuple without those values. This makes comparisons less verbose:: + + _Call(('name', (), {})) == ('name',) + _Call(('name', (1,), {})) == ('name', (1,)) + _Call(((), {'a': 'b'})) == ({'a': 'b'},) + +The `_Call` object provides a useful shortcut for comparing with call:: + + _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) + _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + +If the _Call has no name then it will match any name. +""" def __new__( cls, value: _CallValue = (), @@ -96,11 +118,33 @@ if sys.version_info >= (3, 12): def args(self) -> tuple[Any, ...]: ... @property def kwargs(self) -> Mapping[str, Any]: ... - def call_list(self) -> Any: ... + def call_list(self) -> Any: + """For a call object that represents multiple calls, `call_list` +returns a list of all the intermediate calls as well as the +final call. +""" else: @disjoint_base class _Call(tuple[Any, ...]): + """ + A tuple for holding the results of a call to a mock, either in the form + `(args, kwargs)` or `(name, args, kwargs)`. + + If args or kwargs are empty then a call tuple will compare equal to + a tuple without those values. This makes comparisons less verbose:: + + _Call(('name', (), {})) == ('name',) + _Call(('name', (1,), {})) == ('name', (1,)) + _Call(((), {'a': 'b'})) == ({'a': 'b'},) + + The `_Call` object provides a useful shortcut for comparing with call:: + + _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) + _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + + If the _Call has no name then it will match any name. + """ def __new__( cls, value: _CallValue = (), @@ -127,7 +171,11 @@ else: def args(self) -> tuple[Any, ...]: ... @property def kwargs(self) -> Mapping[str, Any]: ... - def call_list(self) -> Any: ... + def call_list(self) -> Any: + """For a call object that represents multiple calls, `call_list` + returns a list of all the intermediate calls as well as the + final call. +""" call: _Call @@ -140,6 +188,8 @@ class Base: # We subclass with "Any" because mocks are explicitly designed to stand in for other types, # something that can't be expressed with our static type system. class NonCallableMock(Base, Any): + """A non-callable version of `Mock` +""" if sys.version_info >= (3, 12): def __new__( cls, @@ -177,22 +227,86 @@ class NonCallableMock(Base, Any): def __getattr__(self, name: str) -> Any: ... def __delattr__(self, name: str) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... - def __dir__(self) -> list[str]: ... - def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... - def assert_not_called(self) -> None: ... - def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def __dir__(self) -> list[str]: + """Filter the output of `dir(mock)` to only useful members. +""" + def assert_called_with(self, *args: Any, **kwargs: Any) -> None: + """assert that the last call was made with the specified arguments. + +Raises an AssertionError if the args and keyword args passed in are +different to the last call to the mock. +""" + def assert_not_called(self) -> None: + """assert that the mock was never called. + """ + def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: + """assert that the mock was called exactly once and that that call was +with the specified arguments. +""" def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... - def assert_called(self) -> None: ... - def assert_called_once(self) -> None: ... - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def assert_called(self) -> None: + """assert that the mock was called at least once + """ + def assert_called_once(self) -> None: + """assert that the mock was called only once. + """ + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: + """Restore the mock object to its initial state. +""" def _extract_mock_name(self) -> str: ... - def _get_call_signature_from_name(self, name: str) -> Any: ... - def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... - def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... + def _get_call_signature_from_name(self, name: str) -> Any: + """ +* If call objects are asserted against a method/function like obj.meth1 +then there could be no name for the call object to lookup. Hence just +return the spec_signature of the method/function being asserted against. +* If the name is not empty then remove () and split by '.' to get +list of names to iterate through the children until a potential +match is found. A child mock is created only during attribute access +so if we get a _SpecState then no attributes of the spec were accessed +and can be safely exited. +""" + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: + """assert the mock has been called with the specified arguments. + +The assert passes if the mock has *ever* been called, unlike +`assert_called_with` and `assert_called_once_with` that only pass if +the call is the most recent one. +""" + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: + """assert the mock has been called with the specified calls. +The `mock_calls` list is checked for the calls. + +If `any_order` is False (the default) then the calls must be +sequential. There can be extra calls before or after the +specified calls. + +If `any_order` is True then the calls can be in any order, but +they must all appear in `mock_calls`. +""" + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: + """Add a spec to a mock. `spec` can either be an object or a +list of strings. Only attributes on the `spec` can be fetched as +attributes from the mock. + +If `spec_set` is True then only attributes on the spec can be set. +""" def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... - def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... - def configure_mock(self, **kwargs: Any) -> None: ... + def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: + """ +Attach a mock as an attribute of this one, replacing its name and +parent. Calls to the attached mock will be recorded in the +`method_calls` and `mock_calls` attributes of this one. +""" + def configure_mock(self, **kwargs: Any) -> None: + """Set attributes on the mock through keyword arguments. + +Attributes plus return values and side effects can be set on child +mocks using standard dot notation and unpacking a dictionary in the +method call: + +>>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} +>>> mock.configure_mock(**attrs) +""" return_value: Any side_effect: Any called: bool @@ -201,12 +315,42 @@ class NonCallableMock(Base, Any): call_args_list: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... - def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... - def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: + """ +Given a call (or simply an (args, kwargs) tuple), return a +comparison key suitable for matching with other calls. +This is a best effort method which relies on the spec's signature, +if available, or falls back on the arguments themselves. +""" + def _get_child_mock(self, **kw: Any) -> NonCallableMock: + """Create the child mocks for attributes and return value. +By default child mocks will be the same type as the parent. +Subclasses of Mock may want to override this to customize the way +child mocks are made. + +For non-callable mocks the callable variant will be used (rather than +any custom subclass). +""" if sys.version_info >= (3, 13): - def _calls_repr(self) -> str: ... + def _calls_repr(self) -> str: + """Renders self.mock_calls as a string. + + Example: " +Calls: [call(1), call(2)]." + + If self.mock_calls is empty, an empty string is returned. The + output will be truncated if very long. + """ else: - def _calls_repr(self, prefix: str = "Calls") -> str: ... + def _calls_repr(self, prefix: str = "Calls") -> str: + """Renders self.mock_calls as a string. + + Example: " +Calls: [call(1), call(2)]." + + If self.mock_calls is empty, an empty string is returned. The + output will be truncated if very long. + """ class CallableMixin(Base): side_effect: Any @@ -226,7 +370,63 @@ class CallableMixin(Base): ) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... -class Mock(CallableMixin, NonCallableMock): ... +class Mock(CallableMixin, NonCallableMock): + """ +Create a new `Mock` object. `Mock` takes several optional arguments +that specify the behaviour of the Mock object: + +* `spec`: This can be either a list of strings or an existing object (a + class or instance) that acts as the specification for the mock object. If + you pass in an object then a list of strings is formed by calling dir on + the object (excluding unsupported magic attributes and methods). Accessing + any attribute not in this list will raise an `AttributeError`. + + If `spec` is an object (rather than a list of strings) then + `mock.__class__` returns the class of the spec object. This allows mocks + to pass `isinstance` tests. + +* `spec_set`: A stricter variant of `spec`. If used, attempting to *set* + or get an attribute on the mock that isn't on the object passed as + `spec_set` will raise an `AttributeError`. + +* `side_effect`: A function to be called whenever the Mock is called. See + the `side_effect` attribute. Useful for raising exceptions or + dynamically changing return values. The function is called with the same + arguments as the mock, and unless it returns `DEFAULT`, the return + value of this function is used as the return value. + + If `side_effect` is an iterable then each call to the mock will return + the next value from the iterable. If any of the members of the iterable + are exceptions they will be raised instead of returned. + +* `return_value`: The value returned when the mock is called. By default + this is a new Mock (created on first access). See the + `return_value` attribute. + +* `unsafe`: By default, accessing any attribute whose name starts with + *assert*, *assret*, *asert*, *aseert*, or *assrt* raises an AttributeError. + Additionally, an AttributeError is raised when accessing + attributes that match the name of an assertion method without the prefix + `assert_`, e.g. accessing `called_once` instead of `assert_called_once`. + Passing `unsafe=True` will allow access to these attributes. + +* `wraps`: Item for the mock object to wrap. If `wraps` is not None then + calling the Mock will pass the call through to the wrapped object + (returning the real result). Attribute access on the mock will return a + Mock object that wraps the corresponding attribute of the wrapped object + (so attempting to access an attribute that doesn't exist will raise an + `AttributeError`). + + If the mock has an explicit `return_value` set then calls are not passed + to the wrapped object and the `return_value` is returned instead. + +* `name`: If the mock has a name then it will be used in the repr of the + mock. This can be useful for debugging. The name is propagated to child + mocks. + +Mocks can also be called with arbitrary keyword arguments. These will be +used to set attributes on the mock after it is created. +""" class _patch(Generic[_T]): attribute_name: Any @@ -289,12 +489,20 @@ class _patch(Generic[_T]): target: Any temp_original: Any is_local: bool - def __enter__(self) -> _T: ... + def __enter__(self) -> _T: + """Perform the patch. +""" def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / - ) -> None: ... - def start(self) -> _T: ... - def stop(self) -> None: ... + ) -> None: + """Undo the patch. +""" + def start(self) -> _T: + """Activate a patch, returning any created mock. +""" + def stop(self) -> None: + """Stop an active patch. +""" # This class does not exist at runtime, it's a hack to make this work: # @patch("foo") @@ -309,6 +517,34 @@ class _patch_pass_arg(_patch[_T]): def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... class _patch_dict: + """ +Patch a dictionary, or dictionary like object, and restore the dictionary +to its original state after the test, where the restored dictionary is +a copy of the dictionary as it was before the test. + +`in_dict` can be a dictionary or a mapping like container. If it is a +mapping then it must at least support getting, setting and deleting items +plus iterating over keys. + +`in_dict` can also be a string specifying the name of the dictionary, which +will then be fetched by importing it. + +`values` can be a dictionary of values to set in the dictionary. `values` +can also be an iterable of `(key, value)` pairs. + +If `clear` is True then the dictionary will be cleared before the new +values are set. + +`patch.dict` can also be called with arbitrary keyword arguments to set +values in the dictionary:: + + with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()): + ... + +`patch.dict` can be used as a context manager, decorator or class +decorator. When used as a class decorator `patch.dict` honours +`patch.TEST_PREFIX` for choosing which methods to wrap. +""" in_dict: Any values: Any clear: Any @@ -319,8 +555,12 @@ class _patch_dict: def decorate_async_callable(self, f: _AF) -> _AF: ... def decorate_class(self, klass: Any) -> Any: ... - def __enter__(self) -> Any: ... - def __exit__(self, *args: object) -> Any: ... + def __enter__(self) -> Any: + """Patch the dict. +""" + def __exit__(self, *args: object) -> Any: + """Unpatch the dict. +""" start: Any stop: Any @@ -474,20 +714,65 @@ patch: _patcher class MagicMixin(Base): def __init__(self, *args: Any, **kw: Any) -> None: ... -class NonCallableMagicMock(MagicMixin, NonCallableMock): ... -class MagicMock(MagicMixin, Mock): ... +class NonCallableMagicMock(MagicMixin, NonCallableMock): + """A version of `MagicMock` that isn't callable. +""" +class MagicMock(MagicMixin, Mock): + """ +MagicMock is a subclass of Mock with default implementations +of most of the magic methods. You can use MagicMock without having to +configure the magic methods yourself. + +If you use the `spec` or `spec_set` arguments then *only* magic +methods that exist in the spec will be created. + +Attributes and the return value of a `MagicMock` will also be `MagicMocks`. +""" class AsyncMockMixin(Base): def __init__(self, *args: Any, **kwargs: Any) -> None: ... async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... - def assert_awaited(self) -> None: ... - def assert_awaited_once(self) -> None: ... - def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... - def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... - def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... - def assert_not_awaited(self) -> None: ... - def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited(self) -> None: + """ +Assert that the mock was awaited at least once. +""" + def assert_awaited_once(self) -> None: + """ +Assert that the mock was awaited exactly once. +""" + def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: + """ +Assert that the last await was with the specified arguments. +""" + def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: + """ +Assert that the mock was awaited exactly once and with the specified +arguments. +""" + def assert_any_await(self, *args: Any, **kwargs: Any) -> None: + """ +Assert the mock has ever been awaited with the specified arguments. +""" + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: + """ +Assert the mock has been awaited with the specified calls. +The :attr:`await_args_list` list is checked for the awaits. + +If `any_order` is False (the default) then the awaits must be +sequential. There can be extra calls before or after the +specified awaits. + +If `any_order` is True then the awaits can be in any order, but +they must all appear in :attr:`await_args_list`. +""" + def assert_not_awaited(self) -> None: + """ +Assert that the mock was never awaited. +""" + def reset_mock(self, *args: Any, **kwargs: Any) -> None: + """ +See :func:`.Mock.reset_mock()` +""" await_count: int await_args: _Call | None await_args_list: _CallList @@ -496,10 +781,53 @@ class AsyncMagicMixin(MagicMixin): def __init__(self, *args: Any, **kw: Any) -> None: ... class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): + """ +Enhance :class:`Mock` with features allowing to mock +an async function. + +The :class:`AsyncMock` object will behave so the object is +recognized as an async function, and the result of a call is an awaitable: + +>>> mock = AsyncMock() +>>> inspect.iscoroutinefunction(mock) +True +>>> inspect.isawaitable(mock()) +True + + +The result of ``mock()`` is an async function which will have the outcome +of ``side_effect`` or ``return_value``: + +- if ``side_effect`` is a function, the async function will return the + result of that function, +- if ``side_effect`` is an exception, the async function will raise the + exception, +- if ``side_effect`` is an iterable, the async function will return the + next value of the iterable, however, if the sequence of result is + exhausted, ``StopIteration`` is raised immediately, +- if ``side_effect`` is not defined, the async function will return the + value defined by ``return_value``, hence, by default, the async function + returns a new :class:`AsyncMock` object. + +If the outcome of ``side_effect`` or ``return_value`` is an async function, +the mock async function obtained when the mock object is called will be this +async function itself (and not an async function returning an async +function). + +The test author can also specify a wrapped object with ``wraps``. In this +case, the :class:`Mock` object behavior is the same as with an +:class:`.Mock` object: the wrapped object may have methods +defined as async function functions. + +Based on Martin Richard's asynctest project. +""" # Improving the `reset_mock` signature. # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal. # But, `NonCallableMock` super-class has the better version. - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: + """ +See :func:`.Mock.reset_mock()` +""" class MagicProxy(Base): name: str @@ -510,6 +838,8 @@ class MagicProxy(Base): # See https://github.com/python/typeshed/issues/14701 class _ANY(Any): + """A helper object that compares equal to everything. +""" def __eq__(self, other: object) -> Literal[True]: ... def __ne__(self, other: object) -> Literal[False]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -526,7 +856,29 @@ if sys.version_info >= (3, 10): *, unsafe: bool = False, **kwargs: Any, - ) -> Any: ... + ) -> Any: + """Create a mock object using another object as a spec. Attributes on the +mock will use the corresponding attribute on the `spec` object as their +spec. + +Functions or methods being mocked will have their arguments checked +to check that they are called with the correct signature. + +If `spec_set` is True then attempting to set attributes that don't exist +on the spec object will raise an `AttributeError`. + +If a class is used as a spec then the return value of the mock (the +instance of the class) will have the same spec. You can use a class as the +spec for an instance object by passing `instance=True`. The returned mock +will only be callable if instances of the mock are callable. + +`create_autospec` will raise a `RuntimeError` if passed some common +misspellings of the arguments autospec and spec_set. Pass the argument +`unsafe` with the value True to disable that check. + +`create_autospec` also takes arbitrary keyword arguments that are passed to +the constructor of the created mock. +""" else: def create_autospec( @@ -536,7 +888,25 @@ else: _parent: Any | None = None, _name: Any | None = None, **kwargs: Any, - ) -> Any: ... + ) -> Any: + """Create a mock object using another object as a spec. Attributes on the + mock will use the corresponding attribute on the `spec` object as their + spec. + + Functions or methods being mocked will have their arguments checked + to check that they are called with the correct signature. + + If `spec_set` is True then attempting to set attributes that don't exist + on the spec object will raise an `AttributeError`. + + If a class is used as a spec then the return value of the mock (the + instance of the class) will have the same spec. You can use a class as the + spec for an instance object by passing `instance=True`. The returned mock + will only be callable if instances of the mock are callable. + + `create_autospec` also takes arbitrary keyword arguments that are passed to + the constructor of the created mock. +""" class _SpecState: spec: Any @@ -555,9 +925,28 @@ class _SpecState: instance: Any = False, ) -> None: ... -def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: + """ +A helper function to create a mock to replace the use of `open`. It works +for `open` called directly or used as a context manager. + +The `mock` argument is the mock object to configure. If `None` (the +default) then a `MagicMock` will be created for you, with the API limited +to methods or attributes available on standard file handles. + +`read_data` is a string for the `read`, `readline` and `readlines` of the +file handle to return. This is an empty string by default. +""" class PropertyMock(Mock): + """ +A mock intended to be used as a property, or other descriptor, on a class. +`PropertyMock` provides `__get__` and `__set__` methods so you can specify +a return value when it is fetched. + +Fetching a `PropertyMock` instance from an object calls the mock, with +no args. Setting it calls the mock with the value being set. +""" def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... def __set__(self, obj: Any, val: Any) -> None: ... @@ -567,10 +956,44 @@ if sys.version_info >= (3, 13): def __init__(self, /, *args: Any, timeout: float | None | _SentinelObject = ..., **kwargs: Any) -> None: ... # Same as `NonCallableMock.reset_mock.` - def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... - def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: ... - def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: ... - - class ThreadingMock(ThreadingMixin, MagicMixin, Mock): ... - -def seal(mock: Any) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: + """ +See :func:`.Mock.reset_mock()` +""" + def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: + """Wait until the mock object is called. + +`timeout` - time to wait for in seconds, waits forever otherwise. +Defaults to the constructor provided timeout. +Use None to block undefinetively. +""" + def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: + """Wait until the mock object is called with given args. + +Waits for the timeout in seconds provided in the constructor. +""" + + class ThreadingMock(ThreadingMixin, MagicMixin, Mock): + """ +A mock that can be used to wait until on calls happening +in a different thread. + +The constructor can take a `timeout` argument which +controls the timeout in seconds for all `wait` calls of the mock. + +You can change the default timeout of all instances via the +`ThreadingMock.DEFAULT_TIMEOUT` attribute. + +If no timeout is set, it will block undefinetively. +""" + +def seal(mock: Any) -> None: + """Disable the automatic generation of child mocks. + +Given an input Mock, seals it to ensure no further mocks will be generated +when accessing an attribute that was not already defined. + +The operation recursively seals the mock passed in, meaning that +the mock itself, any mocks generated by accessing one of its attributes, +and all assigned mocks without a name or spec will be sealed. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi index 0761baaa2830b..ae8f4cd7645f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi @@ -1,3 +1,5 @@ +"""Test result object +""" import sys import unittest.case from _typeshed import OptExcInfo @@ -15,6 +17,16 @@ STDERR_LINE: Final[str] def failfast(method: _F) -> _F: ... class TestResult: + """Holder for test result information. + +Test results are automatically managed by the TestCase and TestSuite +classes, and do not need to be explicitly manipulated by writers of tests. + +Each instance holds the total number of tests run, and collections of +failures and errors that occurred among those test runs. The collections +contain tuples of (testcase, exceptioninfo), where exceptioninfo is the +formatted traceback of the error that occurred. +""" errors: list[tuple[unittest.case.TestCase, str]] failures: list[tuple[unittest.case.TestCase, str]] skipped: list[tuple[unittest.case.TestCase, str]] @@ -29,19 +41,60 @@ class TestResult: collectedDurations: _DurationsType def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... - def printErrors(self) -> None: ... - def wasSuccessful(self) -> bool: ... - def stop(self) -> None: ... - def startTest(self, test: unittest.case.TestCase) -> None: ... - def stopTest(self, test: unittest.case.TestCase) -> None: ... - def startTestRun(self) -> None: ... - def stopTestRun(self) -> None: ... - def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... - def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... - def addSuccess(self, test: unittest.case.TestCase) -> None: ... - def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: ... - def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... - def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... - def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ... + def printErrors(self) -> None: + """Called by TestRunner after test run +""" + def wasSuccessful(self) -> bool: + """Tells whether or not this result was a success. +""" + def stop(self) -> None: + """Indicates that the tests should be aborted. +""" + def startTest(self, test: unittest.case.TestCase) -> None: + """Called when the given test is about to be run +""" + def stopTest(self, test: unittest.case.TestCase) -> None: + """Called when the given test has been run +""" + def startTestRun(self) -> None: + """Called once before any tests are executed. + +See startTest for a method called before each test. +""" + def stopTestRun(self) -> None: + """Called once after all tests are executed. + +See stopTest for a method called after each test. +""" + def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: + """Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info(). +""" + def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: + """Called when an error has occurred. 'err' is a tuple of values as +returned by sys.exc_info(). +""" + def addSuccess(self, test: unittest.case.TestCase) -> None: + """Called when a test has completed successfully +""" + def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: + """Called when a test is skipped. +""" + def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: + """Called when an expected failure/error occurred. +""" + def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: + """Called when a test was expected to fail, but succeed. +""" + def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: + """Called at the end of a subtest. +'err' is None if the subtest ended successfully, otherwise it's a +tuple of values as returned by sys.exc_info(). +""" if sys.version_info >= (3, 12): - def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: ... + def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: + """Called when a test finished to run, regardless of its outcome. +*test* is the test case corresponding to the test method. +*elapsed* is the time represented in seconds, and it includes the +execution of cleanup functions. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi index f76771f55e131..f6ab164c5943d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi @@ -1,3 +1,5 @@ +"""Running tests +""" import sys import unittest.case import unittest.result @@ -22,6 +24,8 @@ class _TextTestStream(_SupportsWriteAndFlush, Protocol): # But that's not feasible to do Generically # We can expand the attributes if requested class _WritelnDecorator: + """Used to decorate file-like objects with a handy 'writeln' method +""" def __init__(self, stream: _SupportsWriteAndFlush) -> None: ... def writeln(self, arg: str | None = None) -> None: ... def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__ @@ -35,6 +39,10 @@ class _WritelnDecorator: _StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator) class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): + """A test result class that can print formatted text results to a stream. + +Used by TextTestRunner. +""" descriptions: bool # undocumented dots: bool # undocumented separator1: str @@ -43,7 +51,10 @@ class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): stream: _StreamT # undocumented if sys.version_info >= (3, 12): durations: int | None - def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: ... + def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: + """Construct a TextTestResult. Subclasses should accept **kwargs +to ensure compatibility as the interface changes. +""" else: def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ... @@ -51,6 +62,11 @@ class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... class TextTestRunner: + """A test runner class that displays results in textual form. + +It prints out the names of tests as they are run, errors as they +occur, and a summary of the results at the end of the test run. +""" resultclass: _ResultClassType stream: _WritelnDecorator descriptions: bool @@ -74,7 +90,12 @@ class TextTestRunner: *, tb_locals: bool = False, durations: int | None = None, - ) -> None: ... + ) -> None: + """Construct a TextTestRunner. + +Subclasses should accept **kwargs to ensure compatibility as the +interface changes. +""" else: def __init__( self, @@ -87,7 +108,14 @@ class TextTestRunner: warnings: str | None = None, *, tb_locals: bool = False, - ) -> None: ... + ) -> None: + """Construct a TextTestRunner. + + Subclasses should accept **kwargs to ensure compatibility as the + interface changes. + """ def _makeResult(self) -> TextTestResult: ... - def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: ... + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: + """Run the given test case or test suite. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi index 443396164b6fe..56577640030d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi @@ -1,3 +1,5 @@ +"""TestSuite +""" import unittest.case import unittest.result from collections.abc import Iterable, Iterator @@ -7,6 +9,8 @@ from typing_extensions import TypeAlias _TestType: TypeAlias = unittest.case.TestCase | TestSuite class BaseTestSuite: + """A simple test suite that doesn't provide class or module shared fixtures. + """ _tests: list[unittest.case.TestCase] _removed_tests: int def __init__(self, tests: Iterable[_TestType] = ()) -> None: ... @@ -14,11 +18,21 @@ class BaseTestSuite: def addTest(self, test: _TestType) -> None: ... def addTests(self, tests: Iterable[_TestType]) -> None: ... def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... - def debug(self) -> None: ... + def debug(self) -> None: + """Run the tests without collecting errors in a TestResult +""" def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] class TestSuite(BaseTestSuite): + """A test suite is a composite test consisting of a number of TestCases. + +For use, create an instance of TestSuite, then add test case instances. +When all tests have been added, the suite can be passed to a test +runner, such as TextTestRunner. It will run the individual test cases +in the order in which they were added, aggregating the results. When +subclassing, do not forget to call the base class constructor. +""" def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi index 31c830e8268a7..c7c108348c568 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi @@ -1,3 +1,5 @@ +"""Various utility functions. +""" from collections.abc import MutableSequence, Sequence from typing import Any, Final, TypeVar from typing_extensions import TypeAlias @@ -16,8 +18,27 @@ def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... def safe_repr(obj: object, short: bool = False) -> str: ... def strclass(cls: type) -> str: ... -def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... -def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: ... -def three_way_cmp(x: Any, y: Any) -> int: ... -def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... -def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... +def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: + """Finds elements in only one or the other of two, sorted input lists. + +Returns a two-element tuple of lists. The first list contains those +elements in the "expected" list but not in the "actual" list, and the +second contains those elements in the "actual" list but not in the +"expected" list. Duplicate elements in either input list are ignored. +""" +def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: + """Same behavior as sorted_list_difference but +for lists of unorderable items (like dicts). + +As it does a linear search per item (remove) it +has O(n*n) performance. +""" +def three_way_cmp(x: Any, y: Any) -> int: + """Return -1 if x < y, 0 if x == y and 1 if x > y +""" +def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: + """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ +""" +def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: + """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi index 2173d7e6efaa5..62807408047fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi @@ -1,3 +1,15 @@ +"""Exception classes raised by urllib. + +The base exception class is URLError, which inherits from OSError. It +doesn't define any behavior of its own, but is the base class for all +exceptions defined in this package. + +HTTPError is an exception class that is also a valid HTTP response +instance. It behaves this way because HTTP protocol errors are valid +responses, with a status code, headers, and a body. In some contexts, +an application may want to handle an exception like a regular +response. +""" from email.message import Message from typing import IO from urllib.response import addinfourl @@ -11,6 +23,8 @@ class URLError(OSError): def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): + """Raised when HTTP error occurs, but also acts like non-error return +""" @property def headers(self) -> Message: ... @headers.setter @@ -24,5 +38,7 @@ class HTTPError(URLError, addinfourl): def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... class ContentTooShortError(URLError): + """Exception raised when downloaded size does not match content-length. +""" content: tuple[str, Message] def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi index 364892ecdf698..aa3db84636684 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi @@ -1,3 +1,35 @@ +"""Parse (absolute and relative) URLs. + +urlparse module is based upon the following RFC specifications. + +RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding +and L. Masinter, January 2005. + +RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter +and L.Masinter, December 1999. + +RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T. +Berners-Lee, R. Fielding, and L. Masinter, August 1998. + +RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zawinski, July 1998. + +RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June +1995. + +RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M. +McCahill, December 1994 + +RFC 3986 is considered the current standard and any future changes to +urlparse module should conform with it. The urlparse module is +currently not entirely compliant with this RFC due to defacto +scenarios for parsing, and for backward compatibility purposes, some +parsing quirks from older RFCs are retained. The testcases in +test_urlparse.py provides a good indicator of parsing behavior. + +The WHATWG URL Parser spec should also be considered. We are not compliant with +it either due to existing user code API behavior expectations (Hyrum's Law). +It serves as a useful guide when making changes. +""" import sys from collections.abc import Iterable, Mapping, Sequence from types import GenericAlias @@ -39,14 +71,20 @@ if sys.version_info < (3, 11): MAX_CACHE_SIZE: Final[int] class _ResultMixinStr: + """Standard approach to encoding parsed results from str to bytes +""" __slots__ = () def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... class _ResultMixinBytes: + """Standard approach to decoding parsed results from bytes to str +""" __slots__ = () def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): + """Shared methods for the parsed result objects containing a netloc element +""" __slots__ = () @property def username(self) -> AnyStr | None: ... @@ -56,7 +94,11 @@ class _NetlocResultMixinBase(Generic[AnyStr]): def hostname(self) -> AnyStr | None: ... @property def port(self) -> int | None: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """Represent a PEP 585 generic type + +E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). +""" class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): __slots__ = () @@ -65,10 +107,22 @@ class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): __slots__ = () class _DefragResultBase(NamedTuple, Generic[AnyStr]): + """ +DefragResult(url, fragment) + +A 2-tuple that contains the url without fragment identifier and the fragment +identifier as a separate argument. +""" url: AnyStr fragment: AnyStr class _SplitResultBase(NamedTuple, Generic[AnyStr]): + """ +SplitResult(scheme, netloc, path, query, fragment) + +A 5-tuple that contains the different components of a URL. Similar to +ParseResult, but does not split params. +""" scheme: AnyStr netloc: AnyStr path: AnyStr @@ -76,6 +130,11 @@ class _SplitResultBase(NamedTuple, Generic[AnyStr]): fragment: AnyStr class _ParseResultBase(NamedTuple, Generic[AnyStr]): + """ +ParseResult(scheme, netloc, path, params, query, fragment) + +A 6-tuple that contains components of a parsed URL. +""" scheme: AnyStr netloc: AnyStr path: AnyStr @@ -111,7 +170,35 @@ def parse_qs( errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", -) -> dict[AnyStr, list[AnyStr]]: ... +) -> dict[AnyStr, list[AnyStr]]: + """Parse a query given as a string argument. + +Arguments: + +qs: percent-encoded query string to be parsed + +keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. + +strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. + +encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + +max_num_fields: int. If set, then throws a ValueError if there + are more than n fields read by parse_qsl(). + +separator: str. The symbol to use for separating the query arguments. + Defaults to &. + +Returns a dictionary. +""" def parse_qsl( qs: AnyStr | None, keep_blank_values: bool = False, @@ -120,21 +207,116 @@ def parse_qsl( errors: str = "replace", max_num_fields: int | None = None, separator: str = "&", -) -> list[tuple[AnyStr, AnyStr]]: ... +) -> list[tuple[AnyStr, AnyStr]]: + """Parse a query given as a string argument. + +Arguments: + +qs: percent-encoded query string to be parsed + +keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as blank + strings. The default false value indicates that blank values + are to be ignored and treated as if they were not included. + +strict_parsing: flag indicating what to do with parsing errors. If + false (the default), errors are silently ignored. If true, + errors raise a ValueError exception. + +encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. + +max_num_fields: int. If set, then throws a ValueError + if there are more than n fields read by parse_qsl(). + +separator: str. The symbol to use for separating the query arguments. + Defaults to &. + +Returns a list, as G-d intended. +""" @overload -def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: + """quote('abc def') -> 'abc%20def' + +Each part of a URL, e.g. the path info, the query, etc., has a +different set of reserved characters that must be quoted. The +quote function offers a cautious (not minimal) way to quote a +string for most of these parts. + +RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists +the following (un)reserved characters. + +unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +reserved = gen-delims / sub-delims +gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" +sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + / "*" / "+" / "," / ";" / "=" + +Each of the reserved characters is reserved in some component of a URL, +but not necessarily in all of them. + +The quote function %-escapes all characters that are neither in the +unreserved chars ("always safe") nor the additional chars set via the +safe arg. + +The default for the safe arg is '/'. The character is reserved, but in +typical usage the quote function is being called on a path where the +existing slash characters are to be preserved. + +Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. +Now, "~" is included in the set of unreserved characters. + +string and safe may be either str or bytes objects. encoding and errors +must not be specified if string is a bytes object. + +The optional encoding and errors parameters specify how to deal with +non-ASCII characters, as accepted by the str.encode method. +By default, encoding='utf-8' (characters are encoded with UTF-8), and +errors='strict' (unsupported characters raise a UnicodeEncodeError). +""" @overload def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... -def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: + """Like quote(), but accepts a bytes object rather than a str, and does +not perform string-to-bytes encoding. It always returns an ASCII string. +quote_from_bytes(b'abc def?') -> 'abc%20def%3f' +""" @overload -def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: + """Like quote(), but also replace ' ' with '+', as required for quoting +HTML form values. Plus signs in the original string are escaped unless +they are included in safe. It also does not have safe default to '/'. +""" @overload def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... -def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... -def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... -def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... +def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: + """Replace %xx escapes by their single-character equivalent. The optional +encoding and errors parameters specify how to decode percent-encoded +sequences into Unicode characters, as accepted by the bytes.decode() +method. +By default, percent-encoded sequences are decoded with UTF-8, and invalid +sequences are replaced by a placeholder character. + +unquote('abc%20def') -> 'abc def'. +""" +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: + """unquote_to_bytes('abc%20def') -> b'abc def'. +""" +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: + """Like unquote(), but also replace plus signs by spaces, as required for +unquoting HTML form values. + +unquote_plus('%7e/abc+def') -> '~/abc def' +""" @overload -def urldefrag(url: str) -> DefragResult: ... +def urldefrag(url: str) -> DefragResult: + """Removes any existing fragment from URL. + +Returns a tuple of the defragmented URL and the fragment. If +the URL contained no fragments, the second element is the +empty string. +""" @overload def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... @@ -164,38 +346,146 @@ def urlencode( encoding: str | None = None, errors: str | None = None, quote_via: _QuoteVia = ..., -) -> str: ... -def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... +) -> str: + """Encode a dict or sequence of two-element tuples into a URL query string. + +If any values in the query arg are sequences and doseq is true, each +sequence element is converted to a separate parameter. + +If the query arg is a sequence of two-element tuples, the order of the +parameters in the output will match the order of parameters in the +input. + +The components of a query arg may each be either a string or a bytes type. + +The safe, encoding, and errors parameters are passed down to the function +specified by quote_via (encoding and errors only if a component is a str). +""" +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: + """Join a base URL and a possibly relative URL to form an absolute +interpretation of the latter. +""" @overload -def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: + """Parse a URL into 6 components: +:///;?# + +The result is a named 6-tuple with fields corresponding to the +above. It is either a ParseResult or ParseResultBytes object, +depending on the type of the url parameter. + +The username, password, hostname, and port sub-components of netloc +can also be accessed as attributes of the returned object. + +The scheme argument provides the default value of the scheme +component when no scheme is found in url. + +If allow_fragments is False, no attempt is made to separate the +fragment component from the previous component, which can be either +path or query. + +Note that % escapes are not expanded. +""" @overload def urlparse( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> ParseResultBytes: ... @overload -def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: + """Parse a URL into 5 components: +:///?# + +The result is a named 5-tuple with fields corresponding to the +above. It is either a SplitResult or SplitResultBytes object, +depending on the type of the url parameter. + +The username, password, hostname, and port sub-components of netloc +can also be accessed as attributes of the returned object. + +The scheme argument provides the default value of the scheme +component when no scheme is found in url. + +If allow_fragments is False, no attempt is made to separate the +fragment component from the previous component, which can be either +path or query. + +Note that % escapes are not expanded. +""" if sys.version_info >= (3, 11): @overload def urlsplit( url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True - ) -> SplitResultBytes: ... + ) -> SplitResultBytes: + """Parse a URL into 5 components: +:///?# + +The result is a named 5-tuple with fields corresponding to the +above. It is either a SplitResult or SplitResultBytes object, +depending on the type of the url parameter. + +The username, password, hostname, and port sub-components of netloc +can also be accessed as attributes of the returned object. + +The scheme argument provides the default value of the scheme +component when no scheme is found in url. + +If allow_fragments is False, no attempt is made to separate the +fragment component from the previous component, which can be either +path or query. + +Note that % escapes are not expanded. +""" else: @overload def urlsplit( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True - ) -> SplitResultBytes: ... + ) -> SplitResultBytes: + """Parse a URL into 5 components: + :///?# + + The result is a named 5-tuple with fields corresponding to the + above. It is either a SplitResult or SplitResultBytes object, + depending on the type of the url parameter. + + The username, password, hostname, and port sub-components of netloc + can also be accessed as attributes of the returned object. + + The scheme argument provides the default value of the scheme + component when no scheme is found in url. + + If allow_fragments is False, no attempt is made to separate the + fragment component from the previous component, which can be either + path or query. + + Note that % escapes are not expanded. + """ # Requires an iterable of length 6 @overload -def urlunparse(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] +def urlunparse(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] + """Put a parsed URL back together again. This may result in a +slightly different, but equivalent URL, if the URL that was parsed +originally had redundant delimiters, e.g. a ? with an empty query +(the draft states that these are equivalent). +""" @overload def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... # Requires an iterable of length 5 @overload -def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap] +def urlunsplit(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] + """Combine the elements of a tuple as returned by urlsplit() into a +complete URL as a string. The data argument can be any five-item iterable. +This may result in a slightly different, but equivalent URL, if the URL that +was parsed originally had unnecessary delimiters (for example, a ? with an +empty query; the RFC states that these are equivalent). +""" @overload def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... -def unwrap(url: str) -> str: ... +def unwrap(url: str) -> str: + """Transform a string like '' into 'scheme://host/path'. + +The string is returned unchanged if it's not a wrapped URL. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi index 876b9d3f165cd..76c3571f9295f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi @@ -1,3 +1,71 @@ +"""An extensible library for opening URLs using a variety of protocols + +The simplest way to use this module is to call the urlopen function, +which accepts a string containing a URL or a Request object (described +below). It opens the URL and returns the results as file-like +object; the returned object has some extra methods described below. + +The OpenerDirector manages a collection of Handler objects that do +all the actual work. Each Handler implements a particular protocol or +option. The OpenerDirector is a composite object that invokes the +Handlers needed to open the requested URL. For example, the +HTTPHandler performs HTTP GET and POST requests and deals with +non-error returns. The HTTPRedirectHandler automatically deals with +HTTP 301, 302, 303, 307, and 308 redirect errors, and the +HTTPDigestAuthHandler deals with digest authentication. + +urlopen(url, data=None) -- Basic usage is the same as original +urllib. pass the url and optionally data to post to an HTTP URL, and +get a file-like object back. One difference is that you can also pass +a Request instance instead of URL. Raises a URLError (subclass of +OSError); for HTTP errors, raises an HTTPError, which can also be +treated as a valid response. + +build_opener -- Function that creates a new OpenerDirector instance. +Will install the default handlers. Accepts one or more Handlers as +arguments, either instances or Handler classes that it will +instantiate. If one of the argument is a subclass of the default +handler, the argument will be installed instead of the default. + +install_opener -- Installs a new opener as the default opener. + +objects of interest: + +OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages +the Handler classes, while dealing with requests and responses. + +Request -- An object that encapsulates the state of a request. The +state can be as simple as the URL. It can also include extra HTTP +headers, e.g. a User-Agent. + +BaseHandler -- + +internals: +BaseHandler and parent +_call_chain conventions + +Example usage: + +import urllib.request + +# set up authentication info +authinfo = urllib.request.HTTPBasicAuthHandler() +authinfo.add_password(realm='PDQ Application', + uri='https://mahler:8092/site-updates.py', + user='klem', + passwd='geheim$parole') + +proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"}) + +# build a new opener that adds authentication and caching FTP handlers +opener = urllib.request.build_opener(proxy_support, authinfo, + urllib.request.CacheFTPHandler) + +# install it +urllib.request.install_opener(opener) + +f = urllib.request.urlopen('https://www.python.org/') +""" import ssl import sys from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead @@ -55,7 +123,47 @@ _DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | if sys.version_info >= (3, 13): def urlopen( url: str | Request, data: _DataType | None = None, timeout: float | None = ..., *, context: ssl.SSLContext | None = None - ) -> _UrlopenRet: ... + ) -> _UrlopenRet: + """Open the URL url, which can be either a string or a Request object. + +*data* must be an object specifying additional data to be sent to +the server, or None if no such data is needed. See Request for +details. + +urllib.request module uses HTTP/1.1 and includes a "Connection:close" +header in its HTTP requests. + +The optional *timeout* parameter specifies a timeout in seconds for +blocking operations like the connection attempt (if not specified, the +global default timeout setting will be used). This only works for HTTP, +HTTPS and FTP connections. + +If *context* is specified, it must be a ssl.SSLContext instance describing +the various SSL options. See HTTPSConnection for more details. + + +This function always returns an object which can work as a +context manager and has the properties url, headers, and status. +See urllib.response.addinfourl for more detail on these properties. + +For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse +object slightly modified. In addition to the three new methods above, the +msg attribute contains the same information as the reason attribute --- +the reason phrase returned by the server --- instead of the response +headers as it is specified in the documentation for HTTPResponse. + +For FTP, file, and data URLs, this function returns a +urllib.response.addinfourl object. + +Note that None may be returned if no handler handles the request (though +the default installed global OpenerDirector uses UnknownHandler to ensure +this never happens). + +In addition, if proxy settings are detected (for example, when a *_proxy +environment variable like http_proxy is set), ProxyHandler is default +installed and makes sure the requests are handled through the proxy. + +""" else: def urlopen( @@ -67,32 +175,134 @@ else: capath: str | None = None, cadefault: bool = False, context: ssl.SSLContext | None = None, - ) -> _UrlopenRet: ... + ) -> _UrlopenRet: + """Open the URL url, which can be either a string or a Request object. + + *data* must be an object specifying additional data to be sent to + the server, or None if no such data is needed. See Request for + details. + + urllib.request module uses HTTP/1.1 and includes a "Connection:close" + header in its HTTP requests. + + The optional *timeout* parameter specifies a timeout in seconds for + blocking operations like the connection attempt (if not specified, the + global default timeout setting will be used). This only works for HTTP, + HTTPS and FTP connections. + + If *context* is specified, it must be a ssl.SSLContext instance describing + the various SSL options. See HTTPSConnection for more details. + + The optional *cafile* and *capath* parameters specify a set of trusted CA + certificates for HTTPS requests. cafile should point to a single file + containing a bundle of CA certificates, whereas capath should point to a + directory of hashed certificate files. More information can be found in + ssl.SSLContext.load_verify_locations(). + + The *cadefault* parameter is ignored. + + + This function always returns an object which can work as a + context manager and has the properties url, headers, and status. + See urllib.response.addinfourl for more detail on these properties. + + For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse + object slightly modified. In addition to the three new methods above, the + msg attribute contains the same information as the reason attribute --- + the reason phrase returned by the server --- instead of the response + headers as it is specified in the documentation for HTTPResponse. + + For FTP, file, and data URLs and requests explicitly handled by legacy + URLopener and FancyURLopener classes, this function returns a + urllib.response.addinfourl object. + + Note that None may be returned if no handler handles the request (though + the default installed global OpenerDirector uses UnknownHandler to ensure + this never happens). + + In addition, if proxy settings are detected (for example, when a *_proxy + environment variable like http_proxy is set), ProxyHandler is default + installed and makes sure the requests are handled through the proxy. + + """ def install_opener(opener: OpenerDirector) -> None: ... -def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... +def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: + """Create an opener object from a list of handlers. + +The opener will use several default handlers, including support +for HTTP, FTP and when applicable HTTPS. + +If any of the handlers passed as arguments are subclasses of the +default handlers, the default handlers will not be used. +""" if sys.version_info >= (3, 14): - def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: ... - def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: ... + def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: + """Convert the given file URL to a local file system path. + +The 'file:' scheme prefix must be omitted unless *require_scheme* +is set to true. + +The URL authority may be resolved with gethostbyname() if +*resolve_host* is set to true. +""" + def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: + """Convert the given local file system path to a file URL. + +The 'file:' scheme prefix is omitted unless *add_scheme* +is set to true. +""" else: if sys.platform == "win32": from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname else: - def url2pathname(pathname: str) -> str: ... - def pathname2url(pathname: str) -> str: ... - -def getproxies() -> dict[str, str]: ... -def getproxies_environment() -> dict[str, str]: ... -def parse_http_list(s: str) -> list[str]: ... -def parse_keqv_list(l: list[str]) -> dict[str, str]: ... + def url2pathname(pathname: str) -> str: + """OS-specific conversion from a relative URL of the 'file' scheme +to a file system path; not recommended for general use. +""" + def pathname2url(pathname: str) -> str: + """OS-specific conversion from a file system path to a relative URL +of the 'file' scheme; not recommended for general use. +""" + +def getproxies() -> dict[str, str]: + """Return a dictionary of scheme -> proxy server URL mappings. + +Scan the environment for variables named _proxy; +this seems to be the standard convention. +""" +def getproxies_environment() -> dict[str, str]: + """Return a dictionary of scheme -> proxy server URL mappings. + +Scan the environment for variables named _proxy; +this seems to be the standard convention. +""" +def parse_http_list(s: str) -> list[str]: + """Parse lists as described by RFC 2068 Section 2. + +In particular, parse comma-separated lists where the elements of +the list may include quoted-strings. A quoted-string could +contain a comma. A non-quoted string could have quotes in the +middle. Neither commas nor quotes count if they are escaped. +Only double-quotes count, not single-quotes. +""" +def parse_keqv_list(l: list[str]) -> dict[str, str]: + """Parse list of key=value strings where keys are not duplicated. +""" if sys.platform == "win32" or sys.platform == "darwin": def proxy_bypass(host: str) -> Any: ... # undocumented else: - def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: # undocumented + """Test if proxies should not be used for a particular host. + +Checks the proxy dict for the value of no_proxy, which should +be a list of comma separated DNS suffixes, or '*' for all hosts. + +""" class Request: @property @@ -120,7 +330,9 @@ class Request: unverifiable: bool = False, method: str | None = None, ) -> None: ... - def get_method(self) -> str: ... + def get_method(self) -> str: + """Return a string indicating the HTTP request method. +""" def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... @@ -159,7 +371,16 @@ class HTTPRedirectHandler(BaseHandler): inf_msg: ClassVar[str] # undocumented def redirect_request( self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage, newurl: str - ) -> Request | None: ... + ) -> Request | None: + """Return a Request or None in response to a redirect. + +This is called by the http_error_30x methods when a +redirection response is received. If a redirection should +take place, return a new Request to allow http_error_30x to +perform the redirect. Otherwise, raise HTTPError if no-one +else should try to handle this url. Return None if you can't +but another Handler might. +""" def http_error_301(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -185,8 +406,14 @@ class ProxyHandler(BaseHandler): class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... - def is_suburi(self, base: str, test: str) -> bool: ... # undocumented - def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: ... # undocumented + def is_suburi(self, base: str, test: str) -> bool: # undocumented + """Check if test is below base in a URI tree + +Both args must be URIs in reduced form. +""" + def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: # undocumented + """Accept authority or URI and extract only the authority and path. +""" class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -230,6 +457,11 @@ class AbstractDigestAuthHandler: def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + """An authentication protocol defined by RFC 2069 + +Digest authentication improves on basic authentication because it +does not transmit passwords in the clear. +""" auth_header: ClassVar[str] # undocumented def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -258,7 +490,11 @@ class AbstractHTTPHandler(BaseHandler): # undocumented def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... - def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... + def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: + """Return an HTTPResponse object for the request, using http_class. + +http_class must implement the HTTPConnection API from http.client. +""" class HTTPHandler(AbstractHTTPHandler): def http_open(self, req: Request) -> HTTPResponse: ... @@ -287,6 +523,8 @@ class DataHandler(BaseHandler): def data_open(self, req: Request) -> addinfourl: ... class ftpwrapper: # undocumented + """Class used by open_ftp() for cache of open FTP connections. +""" def __init__( self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True ) -> None: ... @@ -313,6 +551,8 @@ class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> NoReturn: ... class HTTPErrorProcessor(BaseHandler): + """Process HTTP error responses. +""" def http_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... def https_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... @@ -321,61 +561,130 @@ def urlretrieve( filename: StrOrBytesPath | None = None, reporthook: Callable[[int, int, int], object] | None = None, data: _DataType = None, -) -> tuple[str, HTTPMessage]: ... -def urlcleanup() -> None: ... +) -> tuple[str, HTTPMessage]: + """ +Retrieve a URL into a temporary location on disk. + +Requires a URL argument. If a filename is passed, it is used as +the temporary file location. The reporthook argument should be +a callable that accepts a block number, a read size, and the +total file size of the URL target. The data argument should be +valid URL encoded data. + +If a filename is passed and the URL points to a local resource, +the result is a copy from local file to new file. + +Returns a tuple containing the path to the newly created +data file as well as the resulting HTTPMessage object. +""" +def urlcleanup() -> None: + """Clean up temporary files from urlretrieve calls. +""" if sys.version_info < (3, 14): @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class URLopener: + """Class to open URLs. +This is a class rather than just a subroutine because we may need +more than one set of global protocol-specific options. +Note -- this is a base class for those who don't want the +automatic handling of errors type 302 (relocated) and 401 +(authorization needed). +""" version: ClassVar[str] def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... - def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: + """Use URLopener().open(file) instead of open(file, 'r'). +""" + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: + """Overridable interface to open unknown URL type. +""" def retrieve( self, url: str, filename: str | None = None, reporthook: Callable[[int, int, int], object] | None = None, data: ReadableBuffer | None = None, - ) -> tuple[str, Message | None]: ... - def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented + ) -> tuple[str, Message | None]: + """retrieve(url) returns (filename, headers) for a local object +or (tempfilename, headers) for a remote object. +""" + def addheader(self, *args: tuple[str, str]) -> None: # undocumented + """Add a header to be used by the HTTP interface only +e.g. u.addheader('Accept', 'sound/basic') +""" def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None - ) -> _UrlopenRet: ... # undocumented + ) -> _UrlopenRet: # undocumented + """Handle http errors. + +Derived class can override this, or provide specific handlers +named http_error_DDD where DDD is the 3-digit error code. +""" def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented - def open_file(self, url: str) -> addinfourl: ... # undocumented - def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented - def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented + ) -> _UrlopenRet: # undocumented + """Default error handler: close the connection and raise OSError. +""" + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: # undocumented + """Use "data" URL. +""" + def open_file(self, url: str) -> addinfourl: # undocumented + """Use local file or FTP depending on form of URL. +""" + def open_ftp(self, url: str) -> addinfourl: # undocumented + """Use FTP protocol. +""" + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented + """Use HTTP protocol. +""" + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented + """Use HTTPS protocol. +""" + def open_local_file(self, url: str) -> addinfourl: # undocumented + """Use local file. +""" + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: # undocumented + """Overridable interface to open unknown URL type. +""" def __del__(self) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class FancyURLopener(URLopener): - def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... + """Derived class with handlers for errors we can handle (perhaps). +""" + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: + """Override this in a GUI environment! +""" def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented def http_error_301( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented + ) -> _UrlopenRet | addinfourl | None: # undocumented + """Error 301 -- also relocated (permanently). +""" def http_error_302( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented + ) -> _UrlopenRet | addinfourl | None: # undocumented + """Error 302 -- relocated (temporarily). +""" def http_error_303( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented + ) -> _UrlopenRet | addinfourl | None: # undocumented + """Error 303 -- also relocated (essentially identical to 302). +""" def http_error_307( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented + ) -> _UrlopenRet | addinfourl | None: # undocumented + """Error 307 -- relocated, but turn POST into error. +""" if sys.version_info >= (3, 11): def http_error_308( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None - ) -> _UrlopenRet | addinfourl | None: ... # undocumented + ) -> _UrlopenRet | addinfourl | None: # undocumented + """Error 308 -- relocated, but turn POST into error. +""" def http_error_401( self, @@ -386,7 +695,10 @@ if sys.version_info < (3, 14): headers: HTTPMessage, data: ReadableBuffer | None = None, retry: bool = False, - ) -> _UrlopenRet | None: ... # undocumented + ) -> _UrlopenRet | None: # undocumented + """Error 401 -- authentication required. +This function supports Basic authentication only. +""" def http_error_407( self, url: str, @@ -396,10 +708,15 @@ if sys.version_info < (3, 14): headers: HTTPMessage, data: ReadableBuffer | None = None, retry: bool = False, - ) -> _UrlopenRet | None: ... # undocumented + ) -> _UrlopenRet | None: # undocumented + """Error 407 -- proxy authentication required. +This function supports Basic authentication only. +""" def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage - ) -> addinfourl: ... # undocumented + ) -> addinfourl: # undocumented + """Default error handling -- don't raise an exception. +""" def redirect_internal( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None ) -> _UrlopenRet | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi index 65df9cdff58ff..7888caa979ff3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi @@ -1,3 +1,10 @@ +"""Response classes used by urllib. + +The base class, addbase, defines a minimal file-like interface, +including read() and readline(). The typical response object is an +addinfourl instance, which defines an info() method that returns +headers and a geturl() method that returns the url. +""" import tempfile from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable @@ -8,6 +15,8 @@ from typing import IO, Any __all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(tempfile._TemporaryFileWrapper[bytes]): + """Base class for addinfo and addclosehook. Is a good idea for garbage collection. +""" fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... def __exit__( @@ -21,16 +30,22 @@ class addbase(tempfile._TemporaryFileWrapper[bytes]): def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): + """Class to add a close hook to an open file. +""" closehook: Callable[..., object] hookargs: tuple[Any, ...] def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... class addinfo(addbase): + """class to add an info() method to an open file. +""" headers: Message def __init__(self, fp: IO[bytes], headers: Message) -> None: ... def info(self) -> Message: ... class addinfourl(addinfo): + """class to add info() and geturl() methods to an open file. +""" url: str code: int | None @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi index 14ceef550dab6..987ef185c9c79 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi @@ -1,20 +1,58 @@ +"""robotparser.py + +Copyright (C) 2000 Bastian Kleineidam + +You can choose between two licenses when using this package: +1) GNU GPLv2 +2) PSF license for Python 2.2 + +The robots.txt Exclusion Protocol is implemented as specified in +http://www.robotstxt.org/norobots-rfc.txt +""" from collections.abc import Iterable from typing import NamedTuple __all__ = ["RobotFileParser"] class RequestRate(NamedTuple): + """RequestRate(requests, seconds) +""" requests: int seconds: int class RobotFileParser: + """This class provides a set of methods to read, parse and answer +questions about a single robots.txt file. + +""" def __init__(self, url: str = "") -> None: ... - def set_url(self, url: str) -> None: ... - def read(self) -> None: ... - def parse(self, lines: Iterable[str]) -> None: ... - def can_fetch(self, useragent: str, url: str) -> bool: ... - def mtime(self) -> int: ... - def modified(self) -> None: ... + def set_url(self, url: str) -> None: + """Sets the URL referring to a robots.txt file. +""" + def read(self) -> None: + """Reads the robots.txt URL and feeds it to the parser. +""" + def parse(self, lines: Iterable[str]) -> None: + """Parse the input lines from a robots.txt file. + +We allow that a user-agent: line is not preceded by +one or more blank lines. +""" + def can_fetch(self, useragent: str, url: str) -> bool: + """using the parsed robots.txt decide if useragent can fetch url +""" + def mtime(self) -> int: + """Returns the time the robots.txt file was last fetched. + +This is useful for long-running web spiders that need to +check for new robots.txt files periodically. + +""" + def modified(self) -> None: + """Sets the time the robots.txt file was last fetched to the +current time. + +""" def crawl_delay(self, useragent: str) -> str | None: ... def request_rate(self, useragent: str) -> RequestRate | None: ... def site_maps(self) -> list[str] | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi index 324053e04337c..d418aeea4eb4c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi @@ -1,3 +1,8 @@ +"""Implementation of the UUencode and UUdecode functions. + +encode(in_file, out_file [,name, mode], *, backtick=False) +decode(in_file [, out_file, mode, quiet]) +""" from typing import BinaryIO from typing_extensions import TypeAlias @@ -9,5 +14,9 @@ class Error(Exception): ... def encode( in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False -) -> None: ... -def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... +) -> None: + """Uuencode file +""" +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: + """Decode uuencoded file +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi index 303fb10eaf537..022179c4ddce3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi @@ -1,3 +1,60 @@ +"""UUID objects (universally unique identifiers) according to RFC 4122/9562. + +This module provides immutable UUID objects (class UUID) and functions for +generating UUIDs corresponding to a specific UUID version as specified in +RFC 4122/9562, e.g., uuid1() for UUID version 1, uuid3() for UUID version 3, +and so on. + +Note that UUID version 2 is deliberately omitted as it is outside the scope +of the RFC. + +If all you want is a unique ID, you should probably call uuid1() or uuid4(). +Note that uuid1() may compromise privacy since it creates a UUID containing +the computer's network address. uuid4() creates a random UUID. + +Typical usage: + + >>> import uuid + + # make a UUID based on the host ID and current time + >>> uuid.uuid1() # doctest: +SKIP + UUID('a8098c1a-f86e-11da-bd1a-00112444be1e') + + # make a UUID using an MD5 hash of a namespace UUID and a name + >>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') + UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e') + + # make a random UUID + >>> uuid.uuid4() # doctest: +SKIP + UUID('16fd2706-8baf-433b-82eb-8c7fada847da') + + # make a UUID using a SHA-1 hash of a namespace UUID and a name + >>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') + UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d') + + # make a UUID from a string of hex digits (braces and hyphens ignored) + >>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') + + # convert a UUID to a string of hex digits in standard form + >>> str(x) + '00010203-0405-0607-0809-0a0b0c0d0e0f' + + # get the raw 16 bytes of the UUID + >>> x.bytes + b'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f' + + # make a UUID from a 16-byte string + >>> uuid.UUID(bytes=x.bytes) + UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') + + # get the Nil UUID + >>> uuid.NIL + UUID('00000000-0000-0000-0000-000000000000') + + # get the Max UUID + >>> uuid.MAX + UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') +""" import builtins import sys from enum import Enum @@ -7,11 +64,73 @@ from typing_extensions import LiteralString, TypeAlias _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] class SafeUUID(Enum): + """An enumeration. +""" safe = 0 unsafe = -1 unknown = None class UUID: + """Instances of the UUID class represent UUIDs as specified in RFC 4122. +UUID objects are immutable, hashable, and usable as dictionary keys. +Converting a UUID to a string with str() yields something in the form +'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts +five possible forms: a similar string of hexadecimal digits, or a tuple +of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and +48-bit values respectively) as an argument named 'fields', or a string +of 16 bytes (with all the integer fields in big-endian order) as an +argument named 'bytes', or a string of 16 bytes (with the first three +fields in little-endian order) as an argument named 'bytes_le', or a +single 128-bit integer as an argument named 'int'. + +UUIDs have these read-only attributes: + + bytes the UUID as a 16-byte string (containing the six + integer fields in big-endian byte order) + + bytes_le the UUID as a 16-byte string (with time_low, time_mid, + and time_hi_version in little-endian byte order) + + fields a tuple of the six integer fields of the UUID, + which are also available as six individual attributes + and two derived attributes. Those attributes are not + always relevant to all UUID versions: + + The 'time_*' attributes are only relevant to version 1. + + The 'clock_seq*' and 'node' attributes are only relevant + to versions 1 and 6. + + The 'time' attribute is only relevant to versions 1, 6 + and 7. + + time_low the first 32 bits of the UUID + time_mid the next 16 bits of the UUID + time_hi_version the next 16 bits of the UUID + clock_seq_hi_variant the next 8 bits of the UUID + clock_seq_low the next 8 bits of the UUID + node the last 48 bits of the UUID + + time the 60-bit timestamp for UUIDv1/v6, + or the 48-bit timestamp for UUIDv7 + clock_seq the 14-bit sequence number + + hex the UUID as a 32-character hexadecimal string + + int the UUID as a 128-bit integer + + urn the UUID as a URN as specified in RFC 4122/9562 + + variant the UUID variant (one of the constants RESERVED_NCS, + RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) + + version the UUID version number (1 through 8, meaningful only + when the variant is RFC_4122) + + is_safe An enum indicating whether the UUID has been generated in + a way that is safe for multiprocessing applications, via + uuid_generate_time_safe(3). +""" __slots__ = ("int", "is_safe", "__weakref__") def __init__( self, @@ -23,7 +142,35 @@ class UUID: version: builtins.int | None = None, *, is_safe: SafeUUID = SafeUUID.unknown, - ) -> None: ... + ) -> None: + """Create a UUID from either a string of 32 hexadecimal digits, +a string of 16 bytes as the 'bytes' argument, a string of 16 bytes +in little-endian order as the 'bytes_le' argument, a tuple of six +integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, +8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as +the 'fields' argument, or a single 128-bit integer as the 'int' +argument. When a string of hex digits is given, curly braces, +hyphens, and a URN prefix are all optional. For example, these +expressions all yield the same UUID: + +UUID('{12345678-1234-5678-1234-567812345678}') +UUID('12345678123456781234567812345678') +UUID('urn:uuid:12345678-1234-5678-1234-567812345678') +UUID(bytes='\\x12\\x34\\x56\\x78'*4) +UUID(bytes_le='\\x78\\x56\\x34\\x12\\x34\\x12\\x78\\x56' + + '\\x12\\x34\\x56\\x78\\x12\\x34\\x56\\x78') +UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) +UUID(int=0x12345678123456781234567812345678) + +Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must +be given. The 'version' argument is optional; if given, the resulting +UUID will have its variant and version set according to RFC 4122, +overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. + +is_safe is an enum exposed as an attribute on the instance. It +indicates whether the UUID has been generated in a way that is safe +for multiprocessing applications, via uuid_generate_time_safe(3). +""" @property def is_safe(self) -> SafeUUID: ... @property @@ -66,27 +213,69 @@ class UUID: def __ge__(self, other: UUID) -> bool: ... def __hash__(self) -> builtins.int: ... -def getnode() -> int: ... -def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: ... +def getnode() -> int: + """Get the hardware address as a 48-bit positive integer. + +The first time this runs, it may launch a separate program, which could +be quite slow. If all attempts to obtain the hardware address fail, we +choose a random 48-bit number with its eighth bit set to 1 as recommended +in RFC 4122. +""" +def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: + """Generate a UUID from a host ID, sequence number, and the current time. +If 'node' is not given, getnode() is used to obtain the hardware +address. If 'clock_seq' is given, it is used as the sequence number; +otherwise a random 14-bit sequence number is chosen. +""" if sys.version_info >= (3, 14): - def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: ... - def uuid7() -> UUID: ... - def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: ... + def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: + """Similar to :func:`uuid1` but where fields are ordered differently +for improved DB locality. + +More precisely, given a 60-bit timestamp value as specified for UUIDv1, +for UUIDv6 the first 48 most significant bits are stored first, followed +by the 4-bit version (same position), followed by the remaining 12 bits +of the original 60-bit timestamp. +""" + def uuid7() -> UUID: + """Generate a UUID from a Unix timestamp in milliseconds and random bits. + +UUIDv7 objects feature monotonicity within a millisecond. +""" + def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: + """Generate a UUID from three custom blocks. + +* 'a' is the first 48-bit chunk of the UUID (octets 0-5); +* 'b' is the mid 12-bit chunk (octets 6-7); +* 'c' is the last 62-bit chunk (octets 8-15). + +When a value is not specified, a pseudo-random value is generated. +""" if sys.version_info >= (3, 12): - def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... + def uuid3(namespace: UUID, name: str | bytes) -> UUID: + """Generate a UUID from the MD5 hash of a namespace UUID and a name. +""" else: - def uuid3(namespace: UUID, name: str) -> UUID: ... + def uuid3(namespace: UUID, name: str) -> UUID: + """Generate a UUID from the MD5 hash of a namespace UUID and a name. +""" -def uuid4() -> UUID: ... +def uuid4() -> UUID: + """Generate a random UUID. +""" if sys.version_info >= (3, 12): - def uuid5(namespace: UUID, name: str | bytes) -> UUID: ... + def uuid5(namespace: UUID, name: str | bytes) -> UUID: + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name. +""" else: - def uuid5(namespace: UUID, name: str) -> UUID: ... + def uuid5(namespace: UUID, name: str) -> UUID: + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name. +""" if sys.version_info >= (3, 14): NIL: Final[UUID] @@ -102,4 +291,6 @@ RESERVED_MICROSOFT: Final[LiteralString] RESERVED_FUTURE: Final[LiteralString] if sys.version_info >= (3, 12): - def main() -> None: ... + def main() -> None: + """Run the uuid command line interface. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi index 14db88523dba4..aa09f42e017e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/venv/__init__.pyi @@ -1,3 +1,9 @@ +""" +Virtual environment (venv) package for Python. Based on PEP 405. + +Copyright (C) 2011-2014 Vinay Sajip. +Licensed to the PSF under a contributor agreement. +""" import logging import sys from _typeshed import StrOrBytesPath @@ -10,6 +16,32 @@ logger: logging.Logger CORE_VENV_DEPS: Final[tuple[str, ...]] class EnvBuilder: + """ +This class exists to allow virtual environment creation to be +customized. The constructor parameters determine the builder's +behaviour when called upon to create a virtual environment. + +By default, the builder makes the system (global) site-packages dir +*un*available to the created environment. + +If invoked using the Python -m option, the default is to use copying +on Windows platforms but symlinks elsewhere. If instantiated some +other way, the default is to *not* use symlinks. + +:param system_site_packages: If True, the system (global) site-packages + dir is available to created environments. +:param clear: If True, delete the contents of the environment directory if + it already exists, before environment creation. +:param symlinks: If True, attempt to symlink rather than copy files into + virtual environment. +:param upgrade: If True, upgrade an existing virtual environment. +:param with_pip: If True, ensure pip is installed in the virtual + environment +:param prompt: Alternative terminal prefix for the environment. +:param upgrade_deps: Update the base venv modules to the latest on PyPI +:param scm_ignore_files: Create ignore files for the SCMs specified by the + iterable. +""" system_site_packages: bool clear: bool symlinks: bool @@ -42,22 +74,100 @@ class EnvBuilder: upgrade_deps: bool = False, ) -> None: ... - def create(self, env_dir: StrOrBytesPath) -> None: ... + def create(self, env_dir: StrOrBytesPath) -> None: + """ +Create a virtual environment in a directory. + +:param env_dir: The target directory to create an environment in. + +""" def clear_directory(self, path: StrOrBytesPath) -> None: ... # undocumented - def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... - def create_configuration(self, context: SimpleNamespace) -> None: ... + def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: + """ +Create the directories for the environment. + +Returns a context object which holds paths in the environment, +for use by subsequent logic. +""" + def create_configuration(self, context: SimpleNamespace) -> None: + """ +Create a configuration file indicating where the environment's Python +was copied from, and whether the system site-packages should be made +available in the environment. + +:param context: The information for the environment creation request + being processed. +""" def symlink_or_copy( self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False - ) -> None: ... # undocumented - def setup_python(self, context: SimpleNamespace) -> None: ... - def _setup_pip(self, context: SimpleNamespace) -> None: ... # undocumented - def setup_scripts(self, context: SimpleNamespace) -> None: ... - def post_setup(self, context: SimpleNamespace) -> None: ... - def replace_variables(self, text: str, context: SimpleNamespace) -> str: ... # undocumented - def install_scripts(self, context: SimpleNamespace, path: str) -> None: ... + ) -> None: # undocumented + """ +Try symlinking a file, and if that fails, fall back to copying. +(Unused on Windows, because we can't just copy a failed symlink file: we +switch to a different set of files instead.) +""" + def setup_python(self, context: SimpleNamespace) -> None: + """ +Set up a Python executable in the environment. + +:param context: The information for the environment creation request + being processed. +""" + def _setup_pip(self, context: SimpleNamespace) -> None: # undocumented + """Installs or upgrades pip in a virtual environment +""" + def setup_scripts(self, context: SimpleNamespace) -> None: + """ +Set up scripts into the created environment from a directory. + +This method installs the default scripts into the environment +being created. You can prevent the default installation by overriding +this method if you really need to, or if you need to specify +a different location for the scripts to install. By default, the +'scripts' directory in the venv package is used as the source of +scripts to install. +""" + def post_setup(self, context: SimpleNamespace) -> None: + """ +Hook for post-setup modification of the venv. Subclasses may install +additional packages or scripts here, add activation shell scripts, etc. + +:param context: The information for the environment creation request + being processed. +""" + def replace_variables(self, text: str, context: SimpleNamespace) -> str: # undocumented + """ +Replace variable placeholders in script text with context-specific +variables. + +Return the text passed in , but with variables replaced. + +:param text: The text in which to replace placeholder variables. +:param context: The information for the environment creation request + being processed. +""" + def install_scripts(self, context: SimpleNamespace, path: str) -> None: + """ +Install scripts into the created environment from a directory. + +:param context: The information for the environment creation request + being processed. +:param path: Absolute pathname of a directory containing script. + Scripts in the 'common' subdirectory of this directory, + and those in the directory named for the platform + being run on, are installed in the created environment. + Placeholder variables are replaced with environment- + specific values. +""" def upgrade_dependencies(self, context: SimpleNamespace) -> None: ... if sys.version_info >= (3, 13): - def create_git_ignore_file(self, context: SimpleNamespace) -> None: ... + def create_git_ignore_file(self, context: SimpleNamespace) -> None: + """ +Create a .gitignore file in the environment directory. + +The contents of the file cause the entire environment directory to be +ignored by git. +""" if sys.version_info >= (3, 13): def create( @@ -70,7 +180,9 @@ if sys.version_info >= (3, 13): upgrade_deps: bool = False, *, scm_ignore_files: Iterable[str] = ..., - ) -> None: ... + ) -> None: + """Create a virtual environment in a directory. +""" else: def create( @@ -81,6 +193,8 @@ else: with_pip: bool = False, prompt: str | None = None, upgrade_deps: bool = False, - ) -> None: ... + ) -> None: + """Create a virtual environment in a directory. +""" def main(args: Sequence[str] | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi index 49c98cb07540e..4ac34b299a5b6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi @@ -1,3 +1,5 @@ +"""Python part of the warnings subsystem. +""" import re import sys from _warnings import warn as warn, warn_explicit as warn_explicit @@ -36,17 +38,44 @@ def showwarning( lineno: int, file: TextIO | None = None, line: str | None = None, -) -> None: ... +) -> None: + """Hook to write a warning to a file; replace if you like. +""" def formatwarning( message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None -) -> str: ... +) -> str: + """Function to format a warning the standard way. +""" def filterwarnings( action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False -) -> None: ... -def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... -def resetwarnings() -> None: ... +) -> None: + """Insert an entry into the list of warnings filters (at the front). -class _OptionError(Exception): ... +'action' -- one of "error", "ignore", "always", "all", "default", "module", + or "once" +'message' -- a regex that the warning message must match +'category' -- a class that the warning must be a subclass of +'module' -- a regex that the module name must match +'lineno' -- an integer line number, 0 matches all warnings +'append' -- if true, append to the list of filters +""" +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: + """Insert a simple entry into the list of warnings filters (at the front). + +A simple filter matches all modules and messages. +'action' -- one of "error", "ignore", "always", "all", "default", "module", + or "once" +'category' -- a class that the warning must be a subclass of +'lineno' -- an integer line number, 0 matches all warnings +'append' -- if true, append to the list of filters +""" +def resetwarnings() -> None: + """Clear the list of warning filters, so that no filters are active. +""" + +class _OptionError(Exception): + """Exception used by option processing helpers. +""" class WarningMessage: message: Warning | str @@ -68,6 +97,23 @@ class WarningMessage: ) -> None: ... class catch_warnings(Generic[_W_co]): + """A context manager that copies and restores the warnings filter upon +exiting the context. + +The 'record' argument specifies whether warnings should be captured by a +custom implementation of warnings.showwarning() and be appended to a list +returned by the context manager. Otherwise None is returned by the context +manager. The objects appended to the list are arguments whose attributes +mirror the arguments to showwarning(). + +The 'module' argument is to specify an alternative module to the module +named 'warnings' and imported under that name. This argument is only useful +when testing the warnings module itself. + +If the 'action' argument is not None, the remaining arguments are passed +to warnings.simplefilter() as if it were called immediately on entering the +context. +""" if sys.version_info >= (3, 11): @overload def __init__( @@ -79,7 +125,11 @@ class catch_warnings(Generic[_W_co]): category: type[Warning] = ..., lineno: int = 0, append: bool = False, - ) -> None: ... + ) -> None: + """Specify whether to record warnings and if an alternative module +should be used other than sys.modules['warnings']. + +""" @overload def __init__( self: catch_warnings[list[WarningMessage]], @@ -104,7 +154,14 @@ class catch_warnings(Generic[_W_co]): ) -> None: ... else: @overload - def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: + """Specify whether to record warnings and if an alternative module + should be used other than sys.modules['warnings']. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None @@ -119,6 +176,47 @@ class catch_warnings(Generic[_W_co]): if sys.version_info >= (3, 13): class deprecated: + """Indicate that a class, function or overload is deprecated. + +When this decorator is applied to an object, the type checker +will generate a diagnostic on usage of the deprecated object. + +Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + +The warning specified by *category* will be emitted at runtime +on use of deprecated objects. For functions, that happens on calls; +for classes, on instantiation and on creation of subclasses. +If the *category* is ``None``, no warning is emitted at runtime. +The *stacklevel* determines where the +warning is emitted. If it is ``1`` (the default), the warning +is emitted at the direct caller of the deprecated object; if it +is higher, it is emitted further up the stack. +Static type checker behavior is not affected by the *category* +and *stacklevel* arguments. + +The deprecation message passed to the decorator is saved in the +``__deprecated__`` attribute on the decorated object. +If applied to an overload, the decorator +must be after the ``@overload`` decorator for the attribute to +exist on the overload as returned by ``get_overloads()``. + +See PEP 702 for details. + +""" message: LiteralString category: type[Warning] | None stacklevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi index fd7dbfade884b..df95e15d65af6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi @@ -1,3 +1,75 @@ +"""Stuff to parse WAVE files. + +Usage. + +Reading WAVE files: + f = wave.open(file, 'r') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods read(), seek(), and close(). +When the setpos() and rewind() methods are not used, the seek() +method is not necessary. + +This returns an instance of a class with the following public methods: + getnchannels() -- returns number of audio channels (1 for + mono, 2 for stereo) + getsampwidth() -- returns sample width in bytes + getframerate() -- returns sampling frequency + getnframes() -- returns number of audio frames + getcomptype() -- returns compression type ('NONE' for linear samples) + getcompname() -- returns human-readable version of + compression type ('not compressed' linear samples) + getparams() -- returns a namedtuple consisting of all of the + above in the above order + getmarkers() -- returns None (for compatibility with the + old aifc module) + getmark(id) -- raises an error since the mark does not + exist (for compatibility with the old aifc module) + readframes(n) -- returns at most n frames of audio + rewind() -- rewind to the beginning of the audio stream + setpos(pos) -- seek to the specified position + tell() -- return the current position + close() -- close the instance (make it unusable) +The position returned by tell() and the position given to setpos() +are compatible and have nothing to do with the actual position in the +file. +The close() method is called automatically when the class instance +is destroyed. + +Writing WAVE files: + f = wave.open(file, 'w') +where file is either the name of a file or an open file pointer. +The open file pointer must have methods write(), tell(), seek(), and +close(). + +This returns an instance of a class with the following public methods: + setnchannels(n) -- set the number of channels + setsampwidth(n) -- set the sample width + setframerate(n) -- set the frame rate + setnframes(n) -- set the number of frames + setcomptype(type, name) + -- set the compression type and the + human-readable compression type + setparams(tuple) + -- set all parameters at once + tell() -- return current position in output file + writeframesraw(data) + -- write audio frames without patching up the + file header + writeframes(data) + -- write audio frames and patch up the file header + close() -- patch up the file header and close the + output file +You should set the parameters before the first writeframesraw or +writeframes. The total number of frames does not need to be set, +but when it is set to the correct value, the header does not have to +be patched up. +It is best to first set all parameters, perhaps possibly the +compression type, and then write audio frames using writeframesraw. +When all frames have been written, either call writeframes(b'') or +close() to patch up the sizes in the header. +The close() method is called automatically when the class instance +is destroyed. +""" import sys from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload @@ -12,6 +84,8 @@ class Error(Exception): ... WAVE_FORMAT_PCM: Final = 0x0001 class _wave_params(NamedTuple): + """_wave_params(nchannels, sampwidth, framerate, nframes, comptype, compname) +""" nchannels: int sampwidth: int framerate: int @@ -20,6 +94,35 @@ class _wave_params(NamedTuple): compname: str class Wave_read: + """Variables used in this class: + +These variables are available to the user though appropriate +methods of this class: +_file -- the open file with methods read(), close(), and seek() + set through the __init__() method +_nchannels -- the number of audio channels + available through the getnchannels() method +_nframes -- the number of audio frames + available through the getnframes() method +_sampwidth -- the number of bytes per audio sample + available through the getsampwidth() method +_framerate -- the sampling frequency + available through the getframerate() method +_comptype -- the AIFF-C compression type ('NONE' if AIFF) + available through the getcomptype() method +_compname -- the human-readable AIFF-C compression type + available through the getcomptype() method +_soundpos -- the position in the audio stream + available through the tell() method, set through the + setpos() method + +These variables are used internally only: +_fmt_chunk_read -- 1 iff the FMT chunk has been read +_data_seek_needed -- 1 iff positioned correctly in audio + file for readframes() +_data_chunk -- instantiation of a chunk class for the DATA chunk +_framesize -- size of one frame in the file +""" def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... @@ -48,6 +151,30 @@ class Wave_read: def readframes(self, nframes: int) -> bytes: ... class Wave_write: + """Variables used in this class: + +These variables are user settable through appropriate methods +of this class: +_file -- the open file with methods write(), close(), tell(), seek() + set through the __init__() method +_comptype -- the AIFF-C compression type ('NONE' in AIFF) + set through the setcomptype() or setparams() method +_compname -- the human-readable AIFF-C compression type + set through the setcomptype() or setparams() method +_nchannels -- the number of audio channels + set through the setnchannels() or setparams() method +_sampwidth -- the number of bytes per audio sample + set through the setsampwidth() or setparams() method +_framerate -- the sampling frequency + set through the setframerate() or setparams() method +_nframes -- the number of audio frames written to the header + set through the setnframes() or setparams() method + +These variables are used internally only: +_datalength -- the size of the audio samples written to the header +_nframeswritten -- the number of frames actually written +_datawritten -- the size of the audio samples actually written +""" def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi index 76ab86b957a13..0fd1505760842 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi @@ -1,3 +1,9 @@ +"""Weak reference support for Python. + +This module is an implementation of PEP 205: + +https://peps.python.org/pep-0205/ +""" from _typeshed import SupportsKeysAndGetItem from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy from _weakrefset import WeakSet as WeakSet @@ -56,16 +62,24 @@ class ProxyType(Generic[_T]): # "weakproxy" class ReferenceType(Generic[_T]): # "weakref" __callback__: Callable[[Self], Any] def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ... - def __call__(self) -> _T | None: ... + def __call__(self) -> _T | None: + """Call self as a function. +""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... - def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... + def __class_getitem__(cls, item: Any, /) -> GenericAlias: + """See PEP 585 +""" ref = ReferenceType # everything below here is implemented in weakref.py class WeakMethod(ref[_CallableT]): + """ +A custom `weakref.ref` subclass which simulates a weak reference to +a bound method, working around the lifetime problem of bound methods. +""" __slots__ = ("_func_ref", "_meth_type", "_alive", "__weakref__") def __new__(cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... @@ -74,6 +88,11 @@ class WeakMethod(ref[_CallableT]): def __hash__(self) -> int: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): + """Mapping class that references values weakly. + +Entries in the dictionary will be discarded when no strong +reference to the value exists anymore +""" @overload def __init__(self) -> None: ... @overload @@ -108,8 +127,26 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] - def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... - def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... + def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: + """Return an iterator that yields the weak references to the values. + +The references are not guaranteed to be 'live' at the time +they are used, so the result of calling the references needs +to be checked before being used. This can be used to avoid +creating references that will cause the garbage collector to +keep the values around longer than needed. + +""" + def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: + """Return a list of weak references to the values. + +The references are not guaranteed to be 'live' at the time +they are used, so the result of calling the references needs +to be checked before being used. This can be used to avoid +creating references that will cause the garbage collector to +keep the values around longer than needed. + +""" def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @@ -132,12 +169,29 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): + """Specialized reference that includes a key corresponding to the value. + +This is used in the WeakValueDictionary to avoid having to create +a function object for each key stored in the mapping. A shared +callback object can use the 'key' attribute of a KeyedRef instead +of getting a reference to the key from an enclosing scope. + +""" __slots__ = ("key",) key: _KT def __new__(type, ob: _T, callback: Callable[[Self], Any], key: _KT) -> Self: ... def __init__(self, ob: _T, callback: Callable[[Self], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): + """Mapping class that references keys weakly. + +Entries in the dictionary will be discarded when there is no +longer a strong reference to the key. This can be used to +associate additional data with an object owned by other parts of +an application without adding attributes to those objects. This +can be especially useful with objects that override attribute +accesses. +""" @overload def __init__(self, dict: None = None) -> None: ... @overload @@ -161,7 +215,16 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] - def keyrefs(self) -> list[ref[_KT]]: ... + def keyrefs(self) -> list[ref[_KT]]: + """Return a list of weak references to the keys. + +The references are not guaranteed to be 'live' at the time +they are used, so the result of calling the references needs +to be checked before being used. This can be used to avoid +creating references that will cause the garbage collector to +keep the keys around longer than needed. + +""" # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences @overload def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... @@ -188,11 +251,34 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class finalize(Generic[_P, _T]): + """Class for finalization of weakrefable objects + +finalize(obj, func, *args, **kwargs) returns a callable finalizer +object which will be called when obj is garbage collected. The +first time the finalizer is called it evaluates func(*arg, **kwargs) +and returns the result. After this the finalizer is dead, and +calling it just returns None. + +When the program exits any remaining finalizers for which the +atexit attribute is true will be run in reverse order of creation. +By default atexit is true. +""" __slots__ = () def __init__(self, obj: _T, func: Callable[_P, Any], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... - def __call__(self, _: Any = None) -> Any | None: ... - def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... - def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: ... + def __call__(self, _: Any = None) -> Any | None: + """If alive then mark as dead and return func(*args, **kwargs); +otherwise return None +""" + def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: + """If alive then mark as dead and return (obj, func, args, kwargs); +otherwise return None +""" + def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: + """If alive then return (obj, func, args, kwargs); +otherwise return None +""" @property - def alive(self) -> bool: ... + def alive(self) -> bool: + """Whether finalizer is alive +""" atexit: bool diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi index 56c30f8727277..935f0eed1742b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi @@ -1,3 +1,5 @@ +"""Interfaces for launching and remotely controlling web browsers. +""" import sys from abc import abstractmethod from collections.abc import Callable, Sequence @@ -10,13 +12,38 @@ class Error(Exception): ... def register( name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False -) -> None: ... -def get(using: str | None = None) -> BaseBrowser: ... -def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... -def open_new(url: str) -> bool: ... -def open_new_tab(url: str) -> bool: ... +) -> None: + """Register a browser connector. +""" +def get(using: str | None = None) -> BaseBrowser: + """Return a browser launcher instance appropriate for the environment. +""" +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: + """Display url using the default browser. + +If possible, open url in a location determined by new. +- 0: the same browser window (the default). +- 1: a new browser window. +- 2: a new browser page ("tab"). +If possible, autoraise raises the window (the default) or not. + +If opening the browser succeeds, return True. +If there is a problem, return False. +""" +def open_new(url: str) -> bool: + """Open url in a new window of the default browser. + +If not possible, then open url in the only browser window. +""" +def open_new_tab(url: str) -> bool: + """Open url in a new page ("tab") of the default browser. + +If not possible, then the behavior becomes equivalent to open_new(). +""" class BaseBrowser: + """Parent class for all browsers. Do not use directly. +""" args: list[str] name: str basename: str @@ -27,12 +54,20 @@ class BaseBrowser: def open_new_tab(self, url: str) -> bool: ... class GenericBrowser(BaseBrowser): + """Class for all browsers started with a command +and without remote functionality. +""" def __init__(self, name: str | Sequence[str]) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... -class BackgroundBrowser(GenericBrowser): ... +class BackgroundBrowser(GenericBrowser): + """Class for all browsers which are to be started in the +background. +""" class UnixBrowser(BaseBrowser): + """Parent class for all Unix browsers with remote functionality. +""" def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool @@ -42,20 +77,35 @@ class UnixBrowser(BaseBrowser): remote_action_newwin: str remote_action_newtab: str -class Mozilla(UnixBrowser): ... +class Mozilla(UnixBrowser): + """Launcher class for Mozilla browsers. +""" if sys.version_info < (3, 12): class Galeon(UnixBrowser): + """Launcher class for Galeon/Epiphany browsers. +""" raise_opts: list[str] class Grail(BaseBrowser): def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... -class Chrome(UnixBrowser): ... -class Opera(UnixBrowser): ... -class Elinks(UnixBrowser): ... +class Chrome(UnixBrowser): + """Launcher class for Google Chrome browser. +""" +class Opera(UnixBrowser): + """Launcher class for Opera browser. +""" +class Elinks(UnixBrowser): + """Launcher class for Elinks browsers. +""" class Konqueror(BaseBrowser): + """Controller for the KDE File Manager (kfm, or Konqueror). + +See the output of ``kfmclient --commands`` +for more information on the Konqueror remote-control interface. +""" def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi index e69de29bb2d1d..59ee48fddec23 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/__init__.pyi @@ -0,0 +1,25 @@ +"""wsgiref -- a WSGI (PEP 3333) Reference Library + +Current Contents: + +* util -- Miscellaneous useful functions and wrappers + +* headers -- Manage response headers + +* handlers -- base classes for server/gateway implementations + +* simple_server -- a simple BaseHTTPServer that supports WSGI + +* validate -- validation wrapper that sits between an app and a server + to detect errors in either + +* types -- collection of WSGI-related types for static type checking + +To-Do: + +* cgi_gateway -- Run WSGI apps under CGI (pending a deployment standard) + +* cgi_wrapper -- Run CGI apps under WSGI + +* router -- a simple middleware component that handles URL traversal +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi index ebead540018e1..afc08a8a6963f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi @@ -1,3 +1,5 @@ +"""Base classes for server/gateway implementations +""" from _typeshed import OptExcInfo from _typeshed.wsgi import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment from abc import abstractmethod @@ -10,9 +12,13 @@ from .util import FileWrapper __all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISCGIHandler", "read_environ"] def format_date_time(timestamp: float | None) -> str: ... # undocumented -def read_environ() -> dict[str, str]: ... +def read_environ() -> dict[str, str]: + """Read environment, fixing HTTP variables +""" class BaseHandler: + """Manage the invocation of a WSGI application +""" wsgi_version: tuple[int, int] # undocumented wsgi_multithread: bool wsgi_multiprocess: bool @@ -31,38 +37,139 @@ class BaseHandler: error_status: str error_headers: list[tuple[str, str]] error_body: bytes - def run(self, application: WSGIApplication) -> None: ... - def setup_environ(self) -> None: ... - def finish_response(self) -> None: ... - def get_scheme(self) -> str: ... - def set_content_length(self) -> None: ... - def cleanup_headers(self) -> None: ... + def run(self, application: WSGIApplication) -> None: + """Invoke the application +""" + def setup_environ(self) -> None: + """Set up the environment for one request +""" + def finish_response(self) -> None: + """Send any iterable data, then close self and the iterable + +Subclasses intended for use in asynchronous servers will +want to redefine this method, such that it sets up callbacks +in the event loop to iterate over the data, and to call +'self.close()' once the response is finished. +""" + def get_scheme(self) -> str: + """Return the URL scheme being used +""" + def set_content_length(self) -> None: + """Compute Content-Length or switch to chunked encoding if possible +""" + def cleanup_headers(self) -> None: + """Make any necessary header changes or defaults + +Subclasses can extend this to add other defaults. +""" def start_response( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None - ) -> Callable[[bytes], None]: ... - def send_preamble(self) -> None: ... - def write(self, data: bytes) -> None: ... - def sendfile(self) -> bool: ... - def finish_content(self) -> None: ... - def close(self) -> None: ... - def send_headers(self) -> None: ... - def result_is_file(self) -> bool: ... - def client_is_modern(self) -> bool: ... - def log_exception(self, exc_info: OptExcInfo) -> None: ... - def handle_error(self) -> None: ... - def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... + ) -> Callable[[bytes], None]: + """'start_response()' callable as specified by PEP 3333 +""" + def send_preamble(self) -> None: + """Transmit version/status/date/server, via self._write() +""" + def write(self, data: bytes) -> None: + """'write()' callable as specified by PEP 3333 +""" + def sendfile(self) -> bool: + """Platform-specific file transmission + +Override this method in subclasses to support platform-specific +file transmission. It is only called if the application's +return iterable ('self.result') is an instance of +'self.wsgi_file_wrapper'. + +This method should return a true value if it was able to actually +transmit the wrapped file-like object using a platform-specific +approach. It should return a false value if normal iteration +should be used instead. An exception can be raised to indicate +that transmission was attempted, but failed. + +NOTE: this method should call 'self.send_headers()' if +'self.headers_sent' is false and it is going to attempt direct +transmission of the file. +""" + def finish_content(self) -> None: + """Ensure headers and content have both been sent +""" + def close(self) -> None: + """Close the iterable (if needed) and reset all instance vars + +Subclasses may want to also drop the client connection. +""" + def send_headers(self) -> None: + """Transmit headers to the client, via self._write() +""" + def result_is_file(self) -> bool: + """True if 'self.result' is an instance of 'self.wsgi_file_wrapper' +""" + def client_is_modern(self) -> bool: + """True if client can accept status and headers +""" + def log_exception(self, exc_info: OptExcInfo) -> None: + """Log the 'exc_info' tuple in the server log + +Subclasses may override to retarget the output or change its format. +""" + def handle_error(self) -> None: + """Log current error, and send error output to client if possible +""" + def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: + """WSGI mini-app to create error output + +By default, this just uses the 'error_status', 'error_headers', +and 'error_body' attributes to generate an output page. It can +be overridden in a subclass to dynamically generate diagnostics, +choose an appropriate message for the user's preferred language, etc. + +Note, however, that it's not recommended from a security perspective to +spit out diagnostics to any old user; ideally, you should have to do +something special to enable diagnostic output, which is why we don't +include any here! +""" @abstractmethod - def _write(self, data: bytes) -> None: ... + def _write(self, data: bytes) -> None: + """Override in subclass to buffer data for send to client + +It's okay if this method actually transmits the data; BaseHandler +just separates write and flush operations for greater efficiency +when the underlying system actually has such a distinction. +""" @abstractmethod - def _flush(self) -> None: ... + def _flush(self) -> None: + """Override in subclass to force sending of recent '_write()' calls + +It's okay if this method is a no-op (i.e., if '_write()' actually +sends the data. +""" @abstractmethod - def get_stdin(self) -> InputStream: ... + def get_stdin(self) -> InputStream: + """Override in subclass to return suitable 'wsgi.input' +""" @abstractmethod - def get_stderr(self) -> ErrorStream: ... + def get_stderr(self) -> ErrorStream: + """Override in subclass to return suitable 'wsgi.errors' +""" @abstractmethod - def add_cgi_vars(self) -> None: ... + def add_cgi_vars(self) -> None: + """Override in subclass to insert CGI variables in 'self.environ' +""" class SimpleHandler(BaseHandler): + """Handler that's just initialized with streams, environment, etc. + +This handler subclass is intended for synchronous HTTP/1.0 origin servers, +and handles sending the entire response output, given the correct inputs. + +Usage:: + + handler = SimpleHandler( + inp,out,err,env, multithread=False, multiprocess=True + ) + handler.run(app) +""" stdin: InputStream stdout: IO[bytes] stderr: ErrorStream @@ -82,10 +189,48 @@ class SimpleHandler(BaseHandler): def _write(self, data: bytes) -> None: ... def _flush(self) -> None: ... -class BaseCGIHandler(SimpleHandler): ... +class BaseCGIHandler(SimpleHandler): + """CGI-like systems using input/output/error streams and environ mapping + +Usage:: + + handler = BaseCGIHandler(inp,out,err,env) + handler.run(app) + +This handler class is useful for gateway protocols like ReadyExec and +FastCGI, that have usable input/output/error streams and an environment +mapping. It's also the base class for CGIHandler, which just uses +sys.stdin, os.environ, and so on. + +The constructor also takes keyword arguments 'multithread' and +'multiprocess' (defaulting to 'True' and 'False' respectively) to control +the configuration sent to the application. It sets 'origin_server' to +False (to enable CGI-like output), and assumes that 'wsgi.run_once' is +False. +""" class CGIHandler(BaseCGIHandler): + """CGI-based invocation via sys.stdin/stdout/stderr and os.environ + +Usage:: + + CGIHandler().run(app) + +The difference between this class and BaseCGIHandler is that it always +uses 'wsgi.run_once' of 'True', 'wsgi.multithread' of 'False', and +'wsgi.multiprocess' of 'True'. It does not take any initialization +parameters, but always uses 'sys.stdin', 'os.environ', and friends. + +If you need to override any of these parameters, use BaseCGIHandler +instead. +""" def __init__(self) -> None: ... class IISCGIHandler(BaseCGIHandler): + """CGI-based invocation with workaround for IIS path bug + +This handler should be used in preference to CGIHandler when deploying on +Microsoft IIS without having set the config allowPathInfo option (IIS>=7) +or metabase allowPathInfoForScriptMappings (IIS<7). +""" def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi index 9febad4b32775..a59077307138a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi @@ -1,3 +1,9 @@ +"""Manage HTTP Response Headers + +Much of this module is red-handedly pilfered from email.message in the stdlib, +so portions are Copyright (C) 2001 Python Software Foundation, and were +written by Barry Warsaw. +""" from re import Pattern from typing import Final, overload from typing_extensions import TypeAlias @@ -7,20 +13,90 @@ _HeaderList: TypeAlias = list[tuple[str, str]] tspecials: Final[Pattern[str]] # undocumented class Headers: + """Manage a collection of HTTP response headers +""" def __init__(self, headers: _HeaderList | None = None) -> None: ... - def __len__(self) -> int: ... - def __setitem__(self, name: str, val: str) -> None: ... - def __delitem__(self, name: str) -> None: ... - def __getitem__(self, name: str) -> str | None: ... - def __contains__(self, name: str) -> bool: ... - def get_all(self, name: str) -> list[str]: ... + def __len__(self) -> int: + """Return the total number of headers, including duplicates. +""" + def __setitem__(self, name: str, val: str) -> None: + """Set the value of a header. +""" + def __delitem__(self, name: str) -> None: + """Delete all occurrences of a header, if present. + +Does *not* raise an exception if the header is missing. +""" + def __getitem__(self, name: str) -> str | None: + """Get the first header value for 'name' + +Return None if the header is missing instead of raising an exception. + +Note that if the header appeared multiple times, the first exactly which +occurrence gets returned is undefined. Use getall() to get all +the values matching a header field name. +""" + def __contains__(self, name: str) -> bool: + """Return true if the message contains the header. +""" + def get_all(self, name: str) -> list[str]: + """Return a list of all the values for the named field. + +These will be sorted in the order they appeared in the original header +list or were added to this instance, and may contain duplicates. Any +fields deleted and re-inserted are always appended to the header list. +If no fields exist with the given name, returns an empty list. +""" @overload - def get(self, name: str, default: str) -> str: ... + def get(self, name: str, default: str) -> str: + """Get the first header value for 'name', or return 'default' +""" @overload def get(self, name: str, default: str | None = None) -> str | None: ... - def keys(self) -> list[str]: ... - def values(self) -> list[str]: ... - def items(self) -> _HeaderList: ... + def keys(self) -> list[str]: + """Return a list of all the header field names. + +These will be sorted in the order they appeared in the original header +list, or were added to this instance, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" + def values(self) -> list[str]: + """Return a list of all header values. + +These will be sorted in the order they appeared in the original header +list, or were added to this instance, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" + def items(self) -> _HeaderList: + """Get all the header fields and values. + +These will be sorted in the order they were in the original header +list, or were added to this instance, and may contain duplicates. +Any fields deleted and re-inserted are always appended to the header +list. +""" def __bytes__(self) -> bytes: ... - def setdefault(self, name: str, value: str) -> str: ... - def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: ... + def setdefault(self, name: str, value: str) -> str: + """Return first matching header value for 'name', or 'value' + +If there is no header named 'name', add a new header with name 'name' +and value 'value'. +""" + def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: + """Extended header setting. + +_name is the header field to add. keyword arguments can be used to set +additional parameters for the header field, with underscores converted +to dashes. Normally the parameter will be added as key="value" unless +value is None, in which case only the key will be added. + +Example: + +h.add_header('content-disposition', 'attachment', filename='bud.gif') + +Note that unlike the corresponding 'email.message' method, this does +*not* handle '(charset, language, value)' tuples: all values must be +strings or None. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi index bdf58719c8289..572aae0a4d7c7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi @@ -1,3 +1,14 @@ +"""BaseHTTPServer that implements the Python WSGI protocol (PEP 3333) + +This is both an example of how WSGI can be implemented, and a basis for running +simple web applications on a local machine, such as might be done when testing +or debugging an application. It has not been reviewed for security issues, +however, and we strongly recommend that you use a "real" web server for +production use. + +For example usage, see the 'if __name__=="__main__"' block at the end of the +module. See also the BaseHTTPServer module docs for other API information. +""" from _typeshed.wsgi import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment from http.server import BaseHTTPRequestHandler, HTTPServer from typing import Final, TypeVar, overload @@ -14,6 +25,8 @@ class ServerHandler(SimpleHandler): # undocumented server_software: str class WSGIServer(HTTPServer): + """BaseHTTPServer that implements the Python WSGI protocol +""" application: WSGIApplication | None base_environ: WSGIEnvironment # only available after call to setup_environ() def setup_environ(self) -> None: ... @@ -30,7 +43,9 @@ def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[by _S = TypeVar("_S", bound=WSGIServer) @overload -def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: ... +def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: + """Create a new WSGI server listening on `host` and `port` for `app` +""" @overload def make_server( host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi index 57276fd05ea84..204229e9fd363 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi @@ -1,3 +1,5 @@ +"""WSGI-related types for static type checking +""" from _typeshed import OptExcInfo from collections.abc import Callable, Iterable, Iterator from typing import Any, Protocol @@ -6,6 +8,8 @@ from typing_extensions import TypeAlias __all__ = ["StartResponse", "WSGIEnvironment", "WSGIApplication", "InputStream", "ErrorStream", "FileWrapper"] class StartResponse(Protocol): + """start_response() callable as defined in PEP 3333 +""" def __call__( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., / ) -> Callable[[bytes], object]: ... @@ -14,12 +18,16 @@ WSGIEnvironment: TypeAlias = dict[str, Any] WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] class InputStream(Protocol): + """WSGI input stream as defined in PEP 3333 +""" def read(self, size: int = ..., /) -> bytes: ... def readline(self, size: int = ..., /) -> bytes: ... def readlines(self, hint: int = ..., /) -> list[bytes]: ... def __iter__(self) -> Iterator[bytes]: ... class ErrorStream(Protocol): + """WSGI error stream as defined in PEP 3333 +""" def flush(self) -> object: ... def write(self, s: str, /) -> object: ... def writelines(self, seq: list[str], /) -> object: ... @@ -29,4 +37,6 @@ class _Readable(Protocol): # Optional: def close(self) -> object: ... class FileWrapper(Protocol): + """WSGI file wrapper as defined in PEP 3333 +""" def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi index 3966e17b0d28d..03674798dcaee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi @@ -1,3 +1,5 @@ +"""Miscellaneous WSGI-related Utilities +""" import sys from _typeshed.wsgi import WSGIEnvironment from collections.abc import Callable @@ -8,6 +10,8 @@ if sys.version_info >= (3, 13): __all__ += ["is_hop_by_hop"] class FileWrapper: + """Wrapper to convert file-like objects to iterables +""" filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists @@ -18,9 +22,40 @@ class FileWrapper: def __iter__(self) -> FileWrapper: ... def __next__(self) -> bytes: ... -def guess_scheme(environ: WSGIEnvironment) -> str: ... -def application_uri(environ: WSGIEnvironment) -> str: ... -def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... -def shift_path_info(environ: WSGIEnvironment) -> str | None: ... -def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... -def is_hop_by_hop(header_name: str) -> bool: ... +def guess_scheme(environ: WSGIEnvironment) -> str: + """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https' + """ +def application_uri(environ: WSGIEnvironment) -> str: + """Return the application's base URI (no PATH_INFO or QUERY_STRING) +""" +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: + """Return the full request URI, optionally including the query string +""" +def shift_path_info(environ: WSGIEnvironment) -> str | None: + """Shift a name from PATH_INFO to SCRIPT_NAME, returning it + +If there are no remaining path segments in PATH_INFO, return None. +Note: 'environ' is modified in-place; use a copy if you need to keep +the original PATH_INFO or SCRIPT_NAME. + +Note: when PATH_INFO is just a '/', this returns '' and appends a trailing +'/' to SCRIPT_NAME, even though empty path segments are normally ignored, +and SCRIPT_NAME doesn't normally end in a '/'. This is intentional +behavior, to ensure that an application can tell the difference between +'/x' and '/x/' when traversing to objects. +""" +def setup_testing_defaults(environ: WSGIEnvironment) -> None: + """Update 'environ' with trivial defaults for testing purposes + +This adds various parameters required for WSGI, including HTTP_HOST, +SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO, +and all of the wsgi.* variables. It only supplies default values, +and does not replace any existing settings for these variables. + +This routine is intended to make it easier for unit tests of WSGI +servers and applications to set up dummy environments. It should *not* +be used by actual WSGI servers or applications, since the data is fake! +""" +def is_hop_by_hop(header_name: str) -> bool: + """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi index fa8a6bbb8d039..622c2bc9d2767 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi @@ -1,3 +1,109 @@ +""" +Middleware to check for obedience to the WSGI specification. + +Some of the things this checks: + +* Signature of the application and start_response (including that + keyword arguments are not used). + +* Environment checks: + + - Environment is a dictionary (and not a subclass). + + - That all the required keys are in the environment: REQUEST_METHOD, + SERVER_NAME, SERVER_PORT, wsgi.version, wsgi.input, wsgi.errors, + wsgi.multithread, wsgi.multiprocess, wsgi.run_once + + - That HTTP_CONTENT_TYPE and HTTP_CONTENT_LENGTH are not in the + environment (these headers should appear as CONTENT_LENGTH and + CONTENT_TYPE). + + - Warns if QUERY_STRING is missing, as the cgi module acts + unpredictably in that case. + + - That CGI-style variables (that don't contain a .) have + (non-unicode) string values + + - That wsgi.version is a tuple + + - That wsgi.url_scheme is 'http' or 'https' (@@: is this too + restrictive?) + + - Warns if the REQUEST_METHOD is not known (@@: probably too + restrictive). + + - That SCRIPT_NAME and PATH_INFO are empty or start with / + + - That at least one of SCRIPT_NAME or PATH_INFO are set. + + - That CONTENT_LENGTH is a positive integer. + + - That SCRIPT_NAME is not '/' (it should be '', and PATH_INFO should + be '/'). + + - That wsgi.input has the methods read, readline, readlines, and + __iter__ + + - That wsgi.errors has the methods flush, write, writelines + +* The status is a string, contains a space, starts with an integer, + and that integer is in range (> 100). + +* That the headers is a list (not a subclass, not another kind of + sequence). + +* That the items of the headers are tuples of strings. + +* That there is no 'status' header (that is used in CGI, but not in + WSGI). + +* That the headers don't contain newlines or colons, end in _ or -, or + contain characters codes below 037. + +* That Content-Type is given if there is content (CGI often has a + default content type, but WSGI does not). + +* That no Content-Type is given when there is no content (@@: is this + too restrictive?) + +* That the exc_info argument to start_response is a tuple or None. + +* That all calls to the writer are with strings, and no other methods + on the writer are accessed. + +* That wsgi.input is used properly: + + - .read() is called with exactly one argument + + - That it returns a string + + - That readline, readlines, and __iter__ return strings + + - That .close() is not called + + - No other methods are provided + +* That wsgi.errors is used properly: + + - .write() and .writelines() is called with a string + + - That .close() is not called, and no other methods are provided. + +* The response iterator: + + - That it is not a string (it should be a list of a single string; a + string will work, but perform horribly). + + - That .__next__() returns a string + + - That the iterator is not iterated over until start_response has + been called (that can signal either a server or application + error). + + - That .close() is called (doesn't raise exception, only prints to + sys.stderr, because we only know it isn't called when the object + is garbage collected). +""" from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication from collections.abc import Callable, Iterable, Iterator from typing import Any, NoReturn @@ -5,9 +111,21 @@ from typing_extensions import TypeAlias __all__ = ["validator"] -class WSGIWarning(Warning): ... +class WSGIWarning(Warning): + """ +Raised in response to WSGI-spec-related warnings +""" -def validator(application: WSGIApplication) -> WSGIApplication: ... +def validator(application: WSGIApplication) -> WSGIApplication: + """ +When applied between a WSGI server and a WSGI application, this +middleware will check for WSGI compliance on a number of levels. +This middleware does not modify the request or response in any +way, but will raise an AssertionError if anything seems off +(except for a failure to close the application iterator, which +will be printed to stderr -- there's no way to raise an exception +at that point). +""" class InputWrapper: input: InputStream diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi index 78f3ecec8d78b..d688654d8d8e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi @@ -1,3 +1,8 @@ +"""Implements (a subset of) Sun XDR -- eXternal Data Representation. + +See: RFC 1014 + +""" from collections.abc import Callable, Sequence from typing import TypeVar @@ -6,12 +11,23 @@ __all__ = ["Error", "Packer", "Unpacker", "ConversionError"] _T = TypeVar("_T") class Error(Exception): + """Exception class for this module. Use: + + except xdrlib.Error as var: + # var has the Error instance for the exception + + Public ivars: + msg -- contains the message + + """ msg: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: + """Pack various data representations into a buffer. +""" def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... @@ -33,6 +49,8 @@ class Packer: def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... class Unpacker: + """Unpacks various data representations from the given buffer. +""" def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi index 7a240965136e5..6c9a8e08b4616 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi @@ -1,3 +1,20 @@ +"""Core XML support for Python. + +This package contains four sub-packages: + +dom -- The W3C Document Object Model. This supports DOM Level 1 + + Namespaces. + +parsers -- Python wrappers for XML parsers (currently only supports Expat). + +sax -- The Simple API for XML, developed by XML-Dev, led by David + Megginson and ported to Python by Lars Marius Garshol. This + supports the SAX 2 API. + +etree -- The ElementTree XML library. This is a subset of the full + ElementTree XML release. + +""" # At runtime, listing submodules in __all__ without them being imported is # valid, and causes them to be included in a star import. See #6523 __all__ = ["dom", "parsers", "sax", "etree"] # noqa: F822 # pyright: ignore[reportUnsupportedDunderAll] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi index 7b301373f5288..0988324b0a656 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi @@ -2,6 +2,9 @@ from typing import Final from xml.dom.minidom import Node class NodeFilter: + """ +This is the DOM2 NodeFilter interface. It contains only constants. +""" FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi index 5dbb6c536f617..6d8e3df101f16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi @@ -1,8 +1,25 @@ +"""W3C Document Object Model implementation for Python. + +The Python mapping of the Document Object Model is documented in the +Python Library Reference in the section on the xml.dom package. + +This package contains the following modules: + +minidom -- A simple implementation of the Level 1 DOM with namespace + support added (based on the Level 2 specification) and other + minor Level 2 functionality. + +pulldom -- DOM builder supporting on-demand tree-building for selected + subtrees of the document. + +""" from typing import Any, Final, Literal from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation class Node: + """Class giving the NodeType constants. +""" __slots__ = () ELEMENT_NODE: Final = 1 ATTRIBUTE_NODE: Final = 2 @@ -36,6 +53,9 @@ INVALID_ACCESS_ERR: Final = 15 VALIDATION_ERR: Final = 16 class DOMException(Exception): + """Abstract base class for DOM exceptions. +Exceptions with specific codes are specializations of this class. +""" code: int def __init__(self, *args: Any, **kw: Any) -> None: ... def _get_code(self) -> int: ... @@ -89,6 +109,8 @@ class ValidationErr(DOMException): code: Literal[16] class UserDataHandler: + """Class giving the operation constants for UserDataHandler.handle(). +""" NODE_CLONED: Final = 1 NODE_IMPORTED: Final = 2 NODE_DELETED: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi index 346a4bf63bd4d..ef09ed10c9c47 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi @@ -1,8 +1,32 @@ +"""Registration facilities for DOM. This module should not be used +directly. Instead, the functions getDOMImplementation and +registerDOMImplementation should be imported from xml.dom. +""" from _typeshed.xml import DOMImplementation from collections.abc import Callable, Iterable well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] -def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... -def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: ... +def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: + """registerDOMImplementation(name, factory) + +Register the factory function with the name. The factory function +should return an object which implements the DOMImplementation +interface. The factory function can either return the same object, +or a new one (e.g. if that implementation supports some +customization). +""" +def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: + """getDOMImplementation(name = None, features = ()) -> DOM implementation. + +Return a suitable DOM implementation. The name is either +well-known, the module name of a DOM implementation, or None. If +it is not None, imports the corresponding module and returns +DOMImplementation object if the import succeeds. + +If name is not given, consider the available implementations to +find one with the required feature set. If no implementation can +be found, raise an ImportError. The features list must be a sequence +of (feature, version) pairs which are passed to hasFeature. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi index 2b9ac88769700..47bcdcbe84b10 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -1,3 +1,8 @@ +"""Facility to use the Expat parser to load a minidom instance +from a string or file. + +This avoids all the overhead of SAX and pulldom to gain performance. +""" from _typeshed import ReadableBuffer, SupportsRead from typing import Any, Final, NoReturn from typing_extensions import TypeAlias @@ -28,15 +33,31 @@ class ElementInfo: def isIdNS(self, euri: str, ename: str, auri: str, aname: str) -> bool: ... class ExpatBuilder: + """Document builder that uses Expat to build a ParsedXML.DOM document +instance. +""" document: Document # Created in self.reset() curNode: DocumentFragment | Element | Document # Created in self.reset() def __init__(self, options: Options | None = None) -> None: ... - def createParser(self) -> XMLParserType: ... - def getParser(self) -> XMLParserType: ... - def reset(self) -> None: ... - def install(self, parser: XMLParserType) -> None: ... - def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... - def parseString(self, string: str | ReadableBuffer) -> Document: ... + def createParser(self) -> XMLParserType: + """Create a new parser object. +""" + def getParser(self) -> XMLParserType: + """Return the parser object, creating a new one if needed. +""" + def reset(self) -> None: + """Free all data structures used during DOM construction. +""" + def install(self, parser: XMLParserType) -> None: + """Install the callbacks needed to build the DOM into the parser. +""" + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: + """Parse a document from a file object, returning the document +node. +""" + def parseString(self, string: str | ReadableBuffer) -> Document: + """Parse a document from a string, returning the document node. +""" def start_doctype_decl_handler( self, doctypeName: str, systemId: str | None, publicId: str | None, has_internal_subset: bool ) -> None: ... @@ -67,6 +88,9 @@ class ExpatBuilder: def xml_decl_handler(self, version: str, encoding: str | None, standalone: int) -> None: ... class FilterVisibilityController: + """Wrapper around a DOMBuilderFilter which implements the checks +to make the whatToShow filter attribute work. +""" __slots__ = ("filter",) filter: DOMBuilderFilter def __init__(self, filter: DOMBuilderFilter) -> None: ... @@ -88,29 +112,60 @@ class Skipper(FilterCrutch): def end_element_handler(self, *args: Any) -> None: ... class FragmentBuilder(ExpatBuilder): + """Builder which constructs document fragments given XML source +text and a context node. + +The context node is expected to provide information about the +namespace declarations which are in scope at the start of the +fragment. +""" fragment: DocumentFragment | None originalDocument: Document context: Node def __init__(self, context: Node, options: Options | None = None) -> None: ... def reset(self) -> None: ... - def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: ... # type: ignore[override] - def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: ... # type: ignore[override] + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: # type: ignore[override] + """Parse a document fragment from a file object, returning the +fragment node. +""" + def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: # type: ignore[override] + """Parse a document fragment from a string, returning the +fragment node. +""" def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... class Namespaces: - def createParser(self) -> XMLParserType: ... - def install(self, parser: XMLParserType) -> None: ... - def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: ... + """Mix-in class for builders; adds support for namespaces. +""" + def createParser(self) -> XMLParserType: + """Create a new namespace-handling parser. +""" + def install(self, parser: XMLParserType) -> None: + """Insert the namespace-handlers onto the parser. +""" + def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: + """Push this namespace declaration on our storage. +""" def start_element_handler(self, name: str, attributes: list[str]) -> None: ... def end_element_handler(self, name: str) -> None: ... # only exists if __debug__ -class ExpatBuilderNS(Namespaces, ExpatBuilder): ... -class FragmentBuilderNS(Namespaces, FragmentBuilder): ... -class ParseEscape(Exception): ... +class ExpatBuilderNS(Namespaces, ExpatBuilder): + """Document builder that supports namespaces. +""" +class FragmentBuilderNS(Namespaces, FragmentBuilder): + """Fragment builder that supports namespaces. +""" +class ParseEscape(Exception): + """Exception raised to short-circuit parsing in InternalSubsetExtractor. +""" class InternalSubsetExtractor(ExpatBuilder): + """XML processor which can rip out the internal document type subset. +""" subset: str | list[str] | None = None - def getSubset(self) -> str: ... + def getSubset(self) -> str: + """Return the internal subset as a string. +""" def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler( # type: ignore[override] @@ -119,8 +174,27 @@ class InternalSubsetExtractor(ExpatBuilder): def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name: str, attrs: list[str]) -> NoReturn: ... -def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: ... -def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: ... -def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: ... -def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: ... -def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: + """Parse a document, returning the resulting Document node. + +'file' may be either a file name or an open file object. +""" +def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: + """Parse a document from a string, returning the resulting +Document node. +""" +def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: + """Parse a fragment of a document, given the context from which it +was originally extracted. context should be the parent of the +node(s) which are in the fragment. + +'file' may be either a file name or an open file object. +""" +def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: + """Parse a fragment of a document from a string, given the context +from which it was originally extracted. context should be the +parent of the node(s) which are in the fragment. +""" +def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: + """Create a builder based on an Options object. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi index 6fcaee019dc20..fb8de652f49b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi @@ -1,3 +1,8 @@ +"""Python version compatibility support for minidom. + +This module contains internal implementation details and +should not be imported; use xml.dom.minidom instead. +""" from collections.abc import Iterable from typing import Any, Literal, TypeVar @@ -10,13 +15,17 @@ StringTypes: tuple[type[str]] class NodeList(list[_T]): __slots__ = () @property - def length(self) -> int: ... + def length(self) -> int: + """The number of nodes in the NodeList. +""" def item(self, index: int) -> _T | None: ... class EmptyNodeList(tuple[()]): __slots__ = () @property - def length(self) -> Literal[0]: ... + def length(self) -> Literal[0]: + """The number of nodes in the NodeList. +""" def item(self, index: int) -> None: ... def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi index e0431417aa3c0..a05af66b70022 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,3 +1,19 @@ +"""Simple implementation of the Level 1 DOM. + +Namespaces and other minor Level 2 features are also supported. + +parse("foo.xml") + +parseString("") + +Todo: +===== + * convenience methods for getting elements and text. + * more testing + * bring some of the writer and linearizer code into conformance with this + interface + * SAX 2 namespaces +""" import xml.dom from _collections_abc import dict_keys, dict_values from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite @@ -50,8 +66,12 @@ class _UserDataHandler(Protocol): def parse( file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None -) -> Document: ... -def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: ... +) -> Document: + """Parse a file into a DOM by filename or file object. +""" +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: + """Parse a file into a DOM from a string. +""" @overload def getDOMImplementation(features: None = None) -> DOMImplementation: ... @overload @@ -83,11 +103,17 @@ class Node(xml.dom.Node): attributes: NamedNodeMap | None # non-null only for Element @property - def firstChild(self) -> _NodesThatAreChildren | None: ... + def firstChild(self) -> _NodesThatAreChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _NodesThatAreChildren | None: ... + def lastChild(self) -> _NodesThatAreChildren | None: + """Last child node, or None. +""" @property - def localName(self) -> str | None: ... # non-null only for Element and Attr + def localName(self) -> str | None: # non-null only for Element and Attr + """Namespace-local name of this node. +""" def __bool__(self) -> Literal[True]: ... @overload def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... @@ -165,14 +191,20 @@ class DocumentFragment(Node): previousSibling: None childNodes: NodeList[_DocumentFragmentChildren] @property - def firstChild(self) -> _DocumentFragmentChildren | None: ... + def firstChild(self) -> _DocumentFragmentChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _DocumentFragmentChildren | None: ... + def lastChild(self) -> _DocumentFragmentChildren | None: + """Last child node, or None. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" def __init__(self) -> None: ... def insertBefore( # type: ignore[override] self, newChild: _DFChildrenPlusFragment, refChild: _DocumentFragmentChildren | None @@ -199,14 +231,20 @@ class Attr(Node): previousSibling: None childNodes: NodeList[_AttrChildren] @property - def firstChild(self) -> _AttrChildren | None: ... + def firstChild(self) -> _AttrChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _AttrChildren | None: ... + def lastChild(self) -> _AttrChildren | None: + """Last child node, or None. +""" namespaceURI: str | None prefix: str | None @property - def localName(self) -> str: ... + def localName(self) -> str: + """Namespace-local name of this attribute. +""" name: str value: str @@ -218,9 +256,13 @@ class Attr(Node): ) -> None: ... def unlink(self) -> None: ... @property - def isId(self) -> bool: ... + def isId(self) -> bool: + """True if this attribute is an ID. +""" @property - def schemaType(self) -> TypeInfo: ... + def schemaType(self) -> TypeInfo: + """Schema type for this attribute. +""" def insertBefore(self, newChild: _AttrChildrenPlusFragment, refChild: _AttrChildren | None) -> _AttrChildrenPlusFragment: ... # type: ignore[override] def appendChild(self, node: _AttrChildrenPlusFragment) -> _AttrChildrenPlusFragment: ... # type: ignore[override] @overload # type: ignore[override] @@ -232,10 +274,19 @@ class Attr(Node): # In the DOM, this interface isn't specific to Attr, but our implementation is # because that's the only place we use it. class NamedNodeMap: + """The attribute list is a transient interface to the underlying +dictionaries. Mutations here will change the underlying element's +dictionary. + +Ordering is imposed artificially and does not reflect the order of +attributes as found in an input document. +""" __slots__ = ("_attrs", "_attrsNS", "_ownerElement") def __init__(self, attrs: dict[str, Attr], attrsNS: dict[_NSName, Attr], ownerElement: Element) -> None: ... @property - def length(self) -> int: ... + def length(self) -> int: + """Number of nodes in the NamedNodeMap. +""" def item(self, index: int) -> Node | None: ... def items(self) -> list[tuple[str, str]]: ... def itemsNS(self) -> list[tuple[_NSName, str]]: ... @@ -291,21 +342,29 @@ class Element(Node): nodeName: str # same as Element.tagName nodeValue: None @property - def attributes(self) -> NamedNodeMap: ... # type: ignore[override] + def attributes(self) -> NamedNodeMap: # type: ignore[override] + """NamedNodeMap of attributes on the element. +""" parentNode: Document | Element | DocumentFragment | None nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: NodeList[_ElementChildren] @property - def firstChild(self) -> _ElementChildren | None: ... + def firstChild(self) -> _ElementChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _ElementChildren | None: ... + def lastChild(self) -> _ElementChildren | None: + """Last child node, or None. +""" namespaceURI: str | None prefix: str | None @property - def localName(self) -> str: ... + def localName(self) -> str: + """Namespace-local name of this element. +""" schemaType: TypeInfo tagName: str @@ -314,7 +373,15 @@ class Element(Node): self, tagName: str, namespaceURI: str | None = None, prefix: str | None = None, localName: str | None = None ) -> None: ... def unlink(self) -> None: ... - def getAttribute(self, attname: str) -> str: ... + def getAttribute(self, attname: str) -> str: + """Returns the value of the specified attribute. + +Returns the value of the element's attribute named attname as +a string. An empty string is returned if the element does not +have such an attribute. Note that an empty string may also be +returned as an explicitly given attribute value, use the +hasAttribute method to distinguish these two cases. +""" def getAttributeNS(self, namespaceURI: str | None, localName: str) -> str: ... def setAttribute(self, attname: str, value: str) -> None: ... def setAttributeNS(self, namespaceURI: str | None, qualifiedName: str, value: str) -> None: ... @@ -326,11 +393,26 @@ class Element(Node): def removeAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... def removeAttributeNode(self, node: Attr) -> Attr: ... removeAttributeNodeNS = removeAttributeNode - def hasAttribute(self, name: str) -> bool: ... + def hasAttribute(self, name: str) -> bool: + """Checks whether the element has an attribute with the specified name. + +Returns True if the element has an attribute with the specified name. +Otherwise, returns False. +""" def hasAttributeNS(self, namespaceURI: str | None, localName: str) -> bool: ... - def getElementsByTagName(self, name: str) -> NodeList[Element]: ... + def getElementsByTagName(self, name: str) -> NodeList[Element]: + """Returns all descendant elements with the given tag name. + +Returns the list of all descendant elements (not direct children +only) with the specified tag name. +""" def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... - def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: + """Write an XML element to a file-like object + +Write the element to the writer object that must provide +a write method (e.g. a file or StringIO object). +""" def hasAttributes(self) -> bool: ... def setIdAttribute(self, name: str) -> None: ... def setIdAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... @@ -348,13 +430,20 @@ class Element(Node): def removeChild(self, oldChild: _ElementChildrenVar) -> _ElementChildrenVar: ... # type: ignore[override] class Childless: + """Mixin that makes childless-ness easy to implement and avoids +the complexity of the Node methods that deal with children. +""" __slots__ = () attributes: None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" def appendChild(self, node: _NodesThatAreChildren | DocumentFragment) -> NoReturn: ... def hasChildNodes(self) -> Literal[False]: ... def insertBefore( @@ -376,14 +465,20 @@ class ProcessingInstruction(Childless, Node): previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" target: str data: str @@ -401,14 +496,18 @@ class CharacterData(Childless, Node): previousSibling: _NodesThatAreChildren | None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" ownerDocument: Document | None data: str def __init__(self) -> None: ... @property - def length(self) -> int: ... + def length(self) -> int: + """Length of the string data. +""" def __len__(self) -> int: ... def substringData(self, offset: int, count: int) -> str: ... def appendData(self, arg: str) -> None: ... @@ -428,23 +527,33 @@ class Text(CharacterData): previousSibling: _DocumentFragmentChildren | _ElementChildren | _AttrChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" data: str def splitText(self, offset: int) -> Self: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def replaceWholeText(self, content: str) -> Self | None: ... @property - def isWhitespaceInElementContent(self) -> bool: ... + def isWhitespaceInElementContent(self) -> bool: + """True iff this text node contains only whitespace and is in element content. +""" @property - def wholeText(self) -> str: ... + def wholeText(self) -> str: + """The text of all logically-adjacent text nodes. +""" class Comment(CharacterData): nodeType: ClassVar[Literal[8]] @@ -457,14 +566,20 @@ class Comment(CharacterData): previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" def __init__(self, data: str) -> None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... @@ -494,9 +609,13 @@ class ReadOnlySequentialNamedNodeMap(Generic[_N]): def setNamedItem(self, node: Node) -> NoReturn: ... def setNamedItemNS(self, node: Node) -> NoReturn: ... @property - def length(self) -> int: ... + def length(self) -> int: + """Number of entries in the NamedNodeMap. +""" class Identified: + """Mix-in class that supports the publicId and systemId attributes. +""" __slots__ = ("publicId", "systemId") publicId: str | None systemId: str | None @@ -512,14 +631,20 @@ class DocumentType(Identified, Childless, Node): previousSibling: _DocumentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" name: str | None internalSubset: str | None @@ -541,14 +666,20 @@ class Entity(Identified, Node): previousSibling: None childNodes: NodeList[_EntityChildren] @property - def firstChild(self) -> _EntityChildren | None: ... + def firstChild(self) -> _EntityChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _EntityChildren | None: ... + def lastChild(self) -> _EntityChildren | None: + """Last child node, or None. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" actualEncoding: str | None encoding: str | None @@ -572,14 +703,20 @@ class Notation(Identified, Childless, Node): previousSibling: _DocumentFragmentChildren | None childNodes: EmptyNodeList @property - def firstChild(self) -> None: ... + def firstChild(self) -> None: + """The type of the None singleton. +""" @property - def lastChild(self) -> None: ... + def lastChild(self) -> None: + """The type of the None singleton. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" def __init__(self, name: str, publicId: str | None, systemId: str | None) -> None: ... class DOMImplementation(DOMImplementationLS): @@ -589,15 +726,29 @@ class DOMImplementation(DOMImplementationLS): def getInterface(self, feature: str) -> Self | None: ... class ElementInfo: + """Object that represents content-model information for an element. + +This implementation is not expected to be used in practice; DOM +builders should provide implementations which do the right thing +using information available to it. + +""" __slots__ = ("tagName",) tagName: str def __init__(self, name: str) -> None: ... def getAttributeType(self, aname: str) -> TypeInfo: ... def getAttributeTypeNS(self, namespaceURI: str | None, localName: str) -> TypeInfo: ... def isElementContent(self) -> bool: ... - def isEmpty(self) -> bool: ... - def isId(self, aname: str) -> bool: ... - def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: ... + def isEmpty(self) -> bool: + """Returns true iff this element is declared to have an EMPTY +content model. +""" + def isId(self, aname: str) -> bool: + """Returns true iff the named attribute is a DTD-style ID. +""" + def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: + """Returns true iff the identified attribute is a DTD-style ID. +""" _DocumentChildrenPlusFragment = TypeVar("_DocumentChildrenPlusFragment", bound=_DocumentChildren | DocumentFragment) @@ -613,14 +764,20 @@ class Document(Node, DocumentLS): nextSibling: None childNodes: NodeList[_DocumentChildren] @property - def firstChild(self) -> _DocumentChildren | None: ... + def firstChild(self) -> _DocumentChildren | None: + """First child node, or None. +""" @property - def lastChild(self) -> _DocumentChildren | None: ... + def lastChild(self) -> _DocumentChildren | None: + """Last child node, or None. +""" namespaceURI: None prefix: None @property - def localName(self) -> None: ... + def localName(self) -> None: + """Namespace-local name of this node. +""" implementation: DOMImplementation actualEncoding: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi index df7a3ad0eddb0..a7888dfb54b2b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi @@ -69,7 +69,9 @@ class PullDOM(ContentHandler): def startDocument(self) -> None: ... def buildDocument(self, uri: str | None, tagname: str | None) -> Element: ... def endDocument(self) -> None: ... - def clear(self) -> None: ... + def clear(self) -> None: + """clear(): Explicitly release parsing structures +""" class ErrorHandler: def warning(self, exception: BaseException) -> None: ... @@ -90,7 +92,9 @@ class DOMEventStream: def getEvent(self) -> _Event | None: ... def expandNode(self, node: Document) -> None: ... def reset(self) -> None: ... - def clear(self) -> None: ... + def clear(self) -> None: + """clear(): Explicitly release parsing objects +""" class SAX2DOM(PullDOM): def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi index f19f7050b08df..fae3199f173b9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -1,3 +1,5 @@ +"""Implementation of the DOM Level 3 'LS-Load' feature. +""" from _typeshed import SupportsRead from typing import Any, Final, Literal, NoReturn from xml.dom.minidom import Document, Node, _DOMErrorHandler @@ -5,6 +7,11 @@ from xml.dom.minidom import Document, Node, _DOMErrorHandler __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] class Options: + """Features object that has variables set for each DOMBuilder feature. + +The DOMBuilder class uses an instance of this class to pass settings to +the ExpatBuilder class. +""" namespaces: int namespace_declarations: bool validation: bool @@ -58,6 +65,9 @@ class DOMInputSource: baseURI: str | None class DOMBuilderFilter: + """Element filter which can be used to tailor construction of +a DOM instance. +""" FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 @@ -67,6 +77,8 @@ class DOMBuilderFilter: def startContainer(self, element: Node) -> Literal[1, 2, 3, 4]: ... class DocumentLS: + """Mixin to create documents that conform to the load/save spec. +""" async_: bool def abort(self) -> NoReturn: ... def load(self, uri: str) -> NoReturn: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi index d42db1bc0c571..afd3030cb755d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,3 +1,37 @@ +"""Lightweight XML support for Python. + +XML is an inherently hierarchical data format, and the most natural way to +represent it is with a tree. This module has two classes for this purpose: + + 1. ElementTree represents the whole XML document as a tree and + + 2. Element represents a single node in this tree. + +Interactions with the whole document (reading and writing to/from files) are +usually done on the ElementTree level. Interactions with a single XML element +and its sub-elements are done on the Element level. + +Element is a flexible container object designed to store hierarchical data +structures in memory. It can be described as a cross between a list and a +dictionary. Each Element has a number of properties associated with it: + + 'tag' - a string containing the element's name. + + 'attributes' - a Python dictionary storing the element's attributes. + + 'text' - a string containing the element's text content. + + 'tail' - an optional string containing text after the element's end tag. + + And a number of child elements stored in a Python sequence. + +To create an element instance, use the Element constructor, +or the SubElement factory function. + +You can also use the ElementTree class to wrap an element structure +and convert it to and from XML. + +""" import sys from _collections_abc import dict_keys from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite @@ -47,7 +81,9 @@ class ParseError(SyntaxError): position: tuple[int, int] # In reality it works based on `.tag` attribute duck typing. -def iselement(element: object) -> TypeGuard[Element]: ... +def iselement(element: object) -> TypeGuard[Element]: + """Return True if *element* appears to be an Element. +""" @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -61,7 +97,19 @@ def canonicalize( qname_aware_attrs: Iterable[str] | None = None, exclude_attrs: Iterable[str] | None = None, exclude_tags: Iterable[str] | None = None, -) -> str: ... +) -> str: + """Convert XML to its C14N 2.0 serialised form. + +If *out* is provided, it must be a file or file-like object that receives +the serialised canonical XML output (text, not bytes) through its ``.write()`` +method. To write to a file, open it in text mode with encoding "utf-8". +If *out* is not provided, this function returns the output as text string. + +Either *xml_data* (an XML string) or *from_file* (a file path or +file-like object) must be provided as input. + +The configuration options are the same as for the ``C14NWriterTarget``. +""" @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -119,30 +167,70 @@ class Element(Generic[_Tag]): def set(self, key: str, value: str, /) -> None: ... def __copy__(self) -> Element[_Tag]: ... # returns the type of self in Python impl, but not in C impl def __deepcopy__(self, memo: Any, /) -> Element: ... # Only exists in C impl - def __delitem__(self, key: SupportsIndex | slice, /) -> None: ... + def __delitem__(self, key: SupportsIndex | slice, /) -> None: + """Delete self[key]. +""" @overload - def __getitem__(self, key: SupportsIndex, /) -> Element: ... + def __getitem__(self, key: SupportsIndex, /) -> Element: + """Return self[key]. +""" @overload def __getitem__(self, key: slice, /) -> list[Element]: ... - def __len__(self) -> int: ... + def __len__(self) -> int: + """Return len(self). +""" # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. def __iter__(self) -> Iterator[Element]: ... @overload - def __setitem__(self, key: SupportsIndex, value: Element[Any], /) -> None: ... + def __setitem__(self, key: SupportsIndex, value: Element[Any], /) -> None: + """Set self[key] to value. +""" @overload def __setitem__(self, key: slice, value: Iterable[Element[Any]], /) -> None: ... # Doesn't really exist in earlier versions, where __len__ is called implicitly instead @deprecated("Testing an element's truth value is deprecated.") - def __bool__(self) -> bool: ... + def __bool__(self) -> bool: + """True if self else False +""" def SubElement(parent: Element[Any], tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... -def Comment(text: str | None = None) -> Element[_ElementCallable]: ... -def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: ... +def Comment(text: str | None = None) -> Element[_ElementCallable]: + """Comment element factory. + +This function creates a special element which the standard serializer +serializes as an XML comment. + +*text* is a string containing the comment string. + +""" +def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: + """Processing Instruction element factory. + +This function creates a special element which the standard serializer +serializes as an XML comment. + +*target* is a string containing the processing instruction, *text* is a +string containing the processing instruction contents, if any. + +""" PI = ProcessingInstruction class QName: + """Qualified name wrapper. + +This class can be used to wrap a QName attribute value in order to get +proper namespace handing on output. + +*text_or_uri* is a string containing the QName value either in the form +{uri}local, or if the tag argument is given, the URI part of a QName. + +*tag* is an optional argument which if given, will make the first +argument (text_or_uri) be interpreted as a URI, and this argument (tag) +be interpreted as a local name. + +""" text: str def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... def __lt__(self, other: QName | str) -> bool: ... @@ -155,18 +243,88 @@ class QName: _Root = TypeVar("_Root", Element, Element | None, default=Element | None) class ElementTree(Generic[_Root]): + """An XML element hierarchy. + +This class also provides support for serialization to and from +standard XML. + +*element* is an optional root element node, +*file* is an optional file handle or file name of an XML file whose +contents will be used to initialize the tree with. + +""" def __init__(self, element: Element[Any] | None = None, file: _FileRead | None = None) -> None: ... - def getroot(self) -> _Root: ... - def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... - def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... - def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + def getroot(self) -> _Root: + """Return root element of this tree. +""" + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: + """Load external XML document into element tree. + +*source* is a file name or file object, *parser* is an optional parser +instance that defaults to XMLParser. + +ParseError is raised if the parser fails to parse the document. + +Returns the root element of the given source document. + +""" + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: + """Create and return tree iterator for the root element. + +The iterator loops over all elements in this tree, in document order. + +*tag* is a string with the tag name to iterate over +(default is to return all elements). + +""" + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: + """Find first matching element by tag name or path. + +Same as getroot().find(path), which is Element.find() + +*path* is a string having either an element tag or an XPath, +*namespaces* is an optional mapping from namespace prefix to full name. + +Return the first matching element, or None if no element was found. + +""" @overload - def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: + """Find first matching element by tag name or path. + +Same as getroot().findtext(path), which is Element.findtext() + +*path* is a string having either an element tag or an XPath, +*namespaces* is an optional mapping from namespace prefix to full name. + +Return the first matching element, or None if no element was found. + +""" @overload def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... - def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: + """Find all matching subelements by tag name or path. + +Same as getroot().findall(path), which is Element.findall(). + +*path* is a string having either an element tag or an XPath, +*namespaces* is an optional mapping from namespace prefix to full name. + +Return list containing all matching elements in document order. + +""" @overload - def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: ... # type: ignore[overload-overlap] + def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: # type: ignore[overload-overlap] + """Find all matching subelements by tag name or path. + +Same as getroot().iterfind(path), which is element.iterfind() + +*path* is a string having either an element tag or an XPath, +*namespaces* is an optional mapping from namespace prefix to full name. + +Return an iterable yielding all matching elements in document order. + +""" @overload def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( @@ -178,12 +336,46 @@ class ElementTree(Generic[_Root]): method: Literal["xml", "html", "text", "c14n"] | None = None, *, short_empty_elements: bool = True, - ) -> None: ... + ) -> None: + """Write element tree to a file as XML. + +Arguments: + *file_or_filename* -- file name or a file object opened for writing + + *encoding* -- the output encoding (default: US-ASCII) + + *xml_declaration* -- bool indicating if an XML declaration should be + added to the output. If None, an XML declaration + is added if encoding IS NOT either of: + US-ASCII, UTF-8, or Unicode + + *default_namespace* -- sets the default XML namespace (for "xmlns") + + *method* -- either "xml" (default), "html, "text", or "c14n" + + *short_empty_elements* -- controls the formatting of elements + that contain no content. If True (default) + they are emitted as a single self-closed + tag, otherwise they are emitted as a pair + of start/end tags + +""" def write_c14n(self, file: _FileWriteC14N) -> None: ... HTML_EMPTY: Final[set[str]] -def register_namespace(prefix: str, uri: str) -> None: ... +def register_namespace(prefix: str, uri: str) -> None: + """Register a namespace prefix. + +The registry is global, and any existing mapping for either the +given prefix or the namespace URI will be removed. + +*prefix* is the namespace prefix, *uri* is a namespace uri. Tags and +attributes in this namespace will be serialized with prefix if possible. + +ValueError is raised if prefix is reserved or is invalid. + +""" @overload def tostring( element: Element[Any], @@ -193,7 +385,20 @@ def tostring( xml_declaration: bool | None = None, default_namespace: str | None = None, short_empty_elements: bool = True, -) -> bytes: ... +) -> bytes: + """Generate string representation of XML element. + +All subelements are included. If encoding is "unicode", a string +is returned. Otherwise a bytestring is returned. + +*element* is an Element instance, *encoding* is an optional output +encoding defaulting to US-ASCII, *method* is an optional output which can +be one of "xml" (default), "html", "text" or "c14n", *default_namespace* +sets the default XML namespace (for "xmlns"). + +Returns an (optionally) encoded string containing the XML data. + +""" @overload def tostring( element: Element[Any], @@ -244,9 +449,40 @@ def tostringlist( default_namespace: str | None = None, short_empty_elements: bool = True, ) -> list[Any]: ... -def dump(elem: Element[Any] | ElementTree[Any]) -> None: ... -def indent(tree: Element[Any] | ElementTree[Any], space: str = " ", level: int = 0) -> None: ... -def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: ... +def dump(elem: Element[Any] | ElementTree[Any]) -> None: + """Write element tree or element structure to sys.stdout. + +This function should be used for debugging only. + +*elem* is either an ElementTree, or a single Element. The exact output +format is implementation dependent. In this version, it's written as an +ordinary XML file. + +""" +def indent(tree: Element[Any] | ElementTree[Any], space: str = " ", level: int = 0) -> None: + """Indent an XML document by inserting newlines and indentation space +after elements. + +*tree* is the ElementTree or Element to modify. The (root) element +itself will not be changed, but the tail text of all elements in its +subtree will be adapted. + +*space* is the whitespace to insert for each indentation level, two +space characters by default. + +*level* is the initial indentation level. Setting this to a higher +value than 0 can be used for indenting subtrees that are more deeply +nested inside of a document. +""" +def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: + """Parse XML document into element tree. + +*source* is a filename or file object containing XML data, +*parser* is an optional parser instance defaulting to XMLParser. + +Return an ElementTree instance. + +""" # This class is defined inside the body of iterparse @type_check_only @@ -257,24 +493,77 @@ class _IterParseIterator(Iterator[tuple[str, Element]], Protocol): if sys.version_info >= (3, 11): def __del__(self) -> None: ... -def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: ... +def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: + """Incrementally parse XML document into ElementTree. + +This class also reports what's going on to the user based on the +*events* it is initialized with. The supported events are the strings +"start", "end", "start-ns" and "end-ns" (the "ns" events are used to get +detailed namespace information). If *events* is omitted, only +"end" events are reported. + +*source* is a filename or file object containing XML data, *events* is +a list of events to report back, *parser* is an optional parser instance. + +Returns an iterator providing (event, elem) pairs. + +""" _EventQueue: TypeAlias = tuple[str] | tuple[str, tuple[str, str]] | tuple[str, None] class XMLPullParser(Generic[_E]): def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser[_E] | None = None) -> None: ... - def feed(self, data: str | ReadableBuffer) -> None: ... - def close(self) -> None: ... - def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: ... + def feed(self, data: str | ReadableBuffer) -> None: + """Feed encoded data to parser. +""" + def close(self) -> None: + """Finish feeding data to parser. + +Unlike XMLParser, does not return the root element. Use +read_events() to consume elements from XMLPullParser. +""" + def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: + """Return an iterator over currently available (event, elem) pairs. + +Events are consumed from the internal event queue as they are +retrieved from the iterator. +""" def flush(self) -> None: ... -def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... -def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: + """Parse XML document from string constant. + +This function can be used to embed "XML Literals" in Python code. + +*text* is a string containing XML data, *parser* is an +optional parser instance, defaulting to the standard XMLParser. + +Returns an Element instance. + +""" +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: + """Parse XML document from string constant for its IDs. + +*text* is a string containing XML data, *parser* is an +optional parser instance, defaulting to the standard XMLParser. + +Returns an (Element, dict) tuple, in which the +dict maps element id:s to elements. + +""" # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: + """Parse XML document from sequence of string fragments. + +*sequence* is a list of other sequence, *parser* is an optional parser +instance, defaulting to the standard XMLParser. + +Returns an Element instance. + +""" # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -313,6 +602,27 @@ class TreeBuilder: def pi(self, target: str, text: str | None = None, /) -> Element[Any]: ... class C14NWriterTarget: + """ +Canonicalization writer target for the XMLParser. + +Serialises parse events to XML C14N 2.0. + +The *write* function is used for writing out the resulting data stream +as text (not bytes). To write to a file, open it in text mode with encoding +"utf-8" and pass its ``.write`` method. + +Configuration options: + +- *with_comments*: set to true to include comments +- *strip_text*: set to true to strip whitespace before and after text content +- *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}" +- *qname_aware_tags*: a set of qname aware tag names in which prefixes + should be replaced in text content +- *qname_aware_attrs*: a set of qname aware attribute names in which prefixes + should be replaced in text content +- *exclude_attrs*: a set of attribute names that should not be serialised +- *exclude_tags*: a set of tag names that should not be serialised +""" def __init__( self, write: Callable[[str], object], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi index cebdb6a30014b..c5cc45d5d1bca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi @@ -1 +1,9 @@ +"""Python interfaces to XML parsers. + +This package contains one module: + +expat -- Python wrapper for James Clark's Expat parser, with namespace + support. + +""" from xml.parsers import expat as expat diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi index d9b7ea5369998..e805d22e393b9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi @@ -1,3 +1,5 @@ +"""Interface to the Expat non-validating XML parser. +""" from pyexpat import * # This is actually implemented in the C module pyexpat, but considers itself to live here. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi index e22d769ec3403..4ef138228f0ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi @@ -1 +1,3 @@ +"""Constants used to describe error conditions. +""" from pyexpat.errors import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi index d8f44b47c51b0..eef0f347d3318 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi @@ -1 +1,3 @@ +"""Constants used to interpret content model information. +""" from pyexpat.model import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi index 679466fa34d2c..70ad3bdf4be21 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,3 +1,23 @@ +"""Simple API for XML (SAX) implementation for Python. + +This module provides an implementation of the SAX 2 interface; +information about the Java version of the interface can be found at +http://www.megginson.com/SAX/. The Python version of the interface is +documented at <...>. + +This package contains the following modules: + +handler -- Base classes and constants which define the SAX 2 API for + the 'client-side' of SAX for Python. + +saxutils -- Implementation of the convenience classes commonly used to + work with SAX. + +xmlreader -- Base classes and constants which define the SAX 2 API for + the parsers used with SAX for Python. + +expatreader -- Driver that allows use of the Expat parser with SAX. +""" import sys from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable @@ -21,7 +41,14 @@ _Source: TypeAlias = StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[st default_parser_list: Final[list[str]] -def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ... +def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: + """Creates and returns a SAX parser. + +Creates the first parser it is able to instantiate of the ones +given in the iterable created by chaining parser_list and +default_parser_list. The iterables must contain the names of Python +modules containing both a SAX parser and a create_parser function. +""" def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi index e9cc8856a9c8d..cac7eb77e9042 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi @@ -1,19 +1,83 @@ +"""Different kinds of SAX Exceptions +""" from typing import NoReturn from xml.sax.xmlreader import Locator class SAXException(Exception): - def __init__(self, msg: str, exception: Exception | None = None) -> None: ... - def getMessage(self) -> str: ... - def getException(self) -> Exception | None: ... - def __getitem__(self, ix: object) -> NoReturn: ... + """Encapsulate an XML error or warning. This class can contain +basic error or warning information from either the XML parser or +the application: you can subclass it to provide additional +functionality, or to add localization. Note that although you will +receive a SAXException as the argument to the handlers in the +ErrorHandler interface, you are not actually required to raise +the exception; instead, you can simply read the information in +it. +""" + def __init__(self, msg: str, exception: Exception | None = None) -> None: + """Creates an exception. The message is required, but the exception +is optional. +""" + def getMessage(self) -> str: + """Return a message for this exception. +""" + def getException(self) -> Exception | None: + """Return the embedded exception, or None if there was none. +""" + def __getitem__(self, ix: object) -> NoReturn: + """Avoids weird error messages if someone does exception[ix] by +mistake, since Exception has __getitem__ defined. +""" class SAXParseException(SAXException): - def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... - def getColumnNumber(self) -> int | None: ... - def getLineNumber(self) -> int | None: ... - def getPublicId(self) -> str | None: ... - def getSystemId(self) -> str | None: ... - -class SAXNotRecognizedException(SAXException): ... -class SAXNotSupportedException(SAXException): ... -class SAXReaderNotAvailable(SAXNotSupportedException): ... + """Encapsulate an XML parse error or warning. + +This exception will include information for locating the error in +the original XML document. Note that although the application will +receive a SAXParseException as the argument to the handlers in the +ErrorHandler interface, the application is not actually required +to raise the exception; instead, it can simply read the +information in it and take a different action. + +Since this exception is a subclass of SAXException, it inherits +the ability to wrap another exception. +""" + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: + """Creates the exception. The exception parameter is allowed to be None. +""" + def getColumnNumber(self) -> int | None: + """The column number of the end of the text where the exception +occurred. +""" + def getLineNumber(self) -> int | None: + """The line number of the end of the text where the exception occurred. +""" + def getPublicId(self) -> str | None: + """Get the public identifier of the entity where the exception occurred. +""" + def getSystemId(self) -> str | None: + """Get the system identifier of the entity where the exception occurred. +""" + +class SAXNotRecognizedException(SAXException): + """Exception class for an unrecognized identifier. + +An XMLReader will raise this exception when it is confronted with an +unrecognized feature or property. SAX applications and extensions may +use this class for similar purposes. +""" +class SAXNotSupportedException(SAXException): + """Exception class for an unsupported operation. + +An XMLReader will raise this exception when a service it cannot +perform is requested (specifically setting a state or value). SAX +applications and extensions may use this class for similar +purposes. +""" +class SAXReaderNotAvailable(SAXNotSupportedException): + """Exception class for a missing driver. + +An XMLReader module (driver) should raise this exception when it +is first imported, e.g. when a support module cannot be imported. +It also may be raised during parsing, e.g. if executing an external +program is not permitted. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi index 3f9573a25f9aa..5422fe5754122 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi @@ -1,3 +1,7 @@ +""" +SAX driver for the pyexpat C module. This driver works with +pyexpat.__version__ == '2.22'. +""" import sys from _typeshed import ReadableBuffer from collections.abc import Mapping @@ -20,6 +24,11 @@ class _ClosedParser: ErrorLineNumber: int class ExpatLocator(xmlreader.Locator): + """Locator for use with the ExpatParser class. + +This uses a weak reference to the parser object to avoid creating +a circular reference between the parser and the content handler. +""" def __init__(self, parser: ExpatParser) -> None: ... def getColumnNumber(self) -> int | None: ... def getLineNumber(self) -> int: ... @@ -27,8 +36,12 @@ class ExpatLocator(xmlreader.Locator): def getSystemId(self) -> str | None: ... class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): + """SAX driver for the pyexpat C module. +""" def __init__(self, namespaceHandling: _BoolType = 0, bufsize: int = 65516) -> None: ... - def parse(self, source: xmlreader.InputSource | _Source) -> None: ... + def parse(self, source: xmlreader.InputSource | _Source) -> None: + """Parse an XML document from a URL or an InputSource. +""" def prepareParser(self, source: xmlreader.InputSource) -> None: ... def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... def getFeature(self, name: str) -> _BoolType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi index 5ecbfa6f1272c..8575ccf43539f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi @@ -1,3 +1,13 @@ +""" +This module contains the core classes of version 2.0 of SAX for Python. +This file provides only default classes with absolutely minimum +functionality, from which drivers and applications can be subclassed. + +Many of these classes are empty and are included only as documentation +of the interfaces. + +$Id$ +""" import sys from typing import Final, NoReturn, Protocol, type_check_only from xml.sax import xmlreader @@ -11,9 +21,24 @@ class _ErrorHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def warning(self, exception: BaseException) -> None: ... class ErrorHandler: - def error(self, exception: BaseException) -> NoReturn: ... - def fatalError(self, exception: BaseException) -> NoReturn: ... - def warning(self, exception: BaseException) -> None: ... + """Basic interface for SAX error handlers. + +If you create an object that implements this interface, then +register the object with your XMLReader, the parser will call the +methods in your object to report all warnings and errors. There +are three levels of errors available: warnings, (possibly) +recoverable errors, and unrecoverable errors. All methods take a +SAXParseException as the only parameter. +""" + def error(self, exception: BaseException) -> NoReturn: + """Handle a recoverable error. +""" + def fatalError(self, exception: BaseException) -> NoReturn: + """Handle a non-recoverable error. +""" + def warning(self, exception: BaseException) -> None: + """Handle a warning. +""" @type_check_only class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -32,19 +57,159 @@ class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def skippedEntity(self, name: str) -> None: ... class ContentHandler: - def setDocumentLocator(self, locator: xmlreader.Locator) -> None: ... - def startDocument(self) -> None: ... - def endDocument(self) -> None: ... - def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... - def endPrefixMapping(self, prefix: str | None) -> None: ... - def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: ... - def endElement(self, name: str) -> None: ... - def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: ... - def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: ... - def characters(self, content: str) -> None: ... - def ignorableWhitespace(self, whitespace: str) -> None: ... - def processingInstruction(self, target: str, data: str) -> None: ... - def skippedEntity(self, name: str) -> None: ... + """Interface for receiving logical document content events. + +This is the main callback interface in SAX, and the one most +important to applications. The order of events in this interface +mirrors the order of the information in the document. +""" + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: + """Called by the parser to give the application a locator for +locating the origin of document events. + +SAX parsers are strongly encouraged (though not absolutely +required) to supply a locator: if it does so, it must supply +the locator to the application by invoking this method before +invoking any of the other methods in the DocumentHandler +interface. + +The locator allows the application to determine the end +position of any document-related event, even if the parser is +not reporting an error. Typically, the application will use +this information for reporting its own errors (such as +character content that does not match an application's +business rules). The information returned by the locator is +probably not sufficient for use with a search engine. + +Note that the locator will return correct information only +during the invocation of the events in this interface. The +application should not attempt to use it at any other time. +""" + def startDocument(self) -> None: + """Receive notification of the beginning of a document. + +The SAX parser will invoke this method only once, before any +other methods in this interface or in DTDHandler (except for +setDocumentLocator). +""" + def endDocument(self) -> None: + """Receive notification of the end of a document. + +The SAX parser will invoke this method only once, and it will +be the last method invoked during the parse. The parser shall +not invoke this method until it has either abandoned parsing +(because of an unrecoverable error) or reached the end of +input. +""" + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: + """Begin the scope of a prefix-URI Namespace mapping. + +The information from this event is not necessary for normal +Namespace processing: the SAX XML reader will automatically +replace prefixes for element and attribute names when the +http://xml.org/sax/features/namespaces feature is true (the +default). + +There are cases, however, when applications need to use +prefixes in character data or in attribute values, where they +cannot safely be expanded automatically; the +start/endPrefixMapping event supplies the information to the +application to expand prefixes in those contexts itself, if +necessary. + +Note that start/endPrefixMapping events are not guaranteed to +be properly nested relative to each-other: all +startPrefixMapping events will occur before the corresponding +startElement event, and all endPrefixMapping events will occur +after the corresponding endElement event, but their order is +not guaranteed. +""" + def endPrefixMapping(self, prefix: str | None) -> None: + """End the scope of a prefix-URI mapping. + +See startPrefixMapping for details. This event will always +occur after the corresponding endElement event, but the order +of endPrefixMapping events is not otherwise guaranteed. +""" + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: + """Signals the start of an element in non-namespace mode. + +The name parameter contains the raw XML 1.0 name of the +element type as a string and the attrs parameter holds an +instance of the Attributes class containing the attributes of +the element. +""" + def endElement(self, name: str) -> None: + """Signals the end of an element in non-namespace mode. + +The name parameter contains the name of the element type, just +as with the startElement event. +""" + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: + """Signals the start of an element in namespace mode. + +The name parameter contains the name of the element type as a +(uri, localname) tuple, the qname parameter the raw XML 1.0 +name used in the source document, and the attrs parameter +holds an instance of the Attributes class containing the +attributes of the element. + +The uri part of the name tuple is None for elements which have +no namespace. +""" + def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: + """Signals the end of an element in namespace mode. + +The name parameter contains the name of the element type, just +as with the startElementNS event. +""" + def characters(self, content: str) -> None: + """Receive notification of character data. + +The Parser will call this method to report each chunk of +character data. SAX parsers may return all contiguous +character data in a single chunk, or they may split it into +several chunks; however, all of the characters in any single +event must come from the same external entity so that the +Locator provides useful information. +""" + def ignorableWhitespace(self, whitespace: str) -> None: + """Receive notification of ignorable whitespace in element content. + +Validating Parsers must use this method to report each chunk +of ignorable whitespace (see the W3C XML 1.0 recommendation, +section 2.10): non-validating parsers may also use this method +if they are capable of parsing and using content models. + +SAX parsers may return all contiguous whitespace in a single +chunk, or they may split it into several chunks; however, all +of the characters in any single event must come from the same +external entity, so that the Locator provides useful +information. +""" + def processingInstruction(self, target: str, data: str) -> None: + """Receive notification of a processing instruction. + +The Parser will invoke this method once for each processing +instruction found: note that processing instructions may occur +before or after the main document element. + +A SAX parser should never report an XML declaration (XML 1.0, +section 2.8) or a text declaration (XML 1.0, section 4.3.1) +using this method. +""" + def skippedEntity(self, name: str) -> None: + """Receive notification of a skipped entity. + +The Parser will invoke this method once for each entity +skipped. Non-validating processors may skip entities if they +have not seen the declarations (because, for example, the +entity was declared in an external DTD subset). All processors +may skip external entities, depending on the values of the +http://xml.org/sax/features/external-general-entities and the +http://xml.org/sax/features/external-parameter-entities +properties. +""" @type_check_only class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -52,15 +217,34 @@ class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... class DTDHandler: - def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: ... - def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: ... + """Handle DTD events. + +This interface specifies only those DTD events required for basic +parsing (unparsed entities and attributes). +""" + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: + """Handle a notation declaration event. +""" + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: + """Handle an unparsed entity declaration event. +""" @type_check_only class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... class EntityResolver: - def resolveEntity(self, publicId: str | None, systemId: str) -> str: ... + """Basic interface for resolving entities. If you create an object +implementing this interface, then register the object with your +Parser, the parser will call the method in your object to +resolve all external entities. Note that DefaultHandler implements +this interface with the default behaviour. +""" + def resolveEntity(self, publicId: str | None, systemId: str) -> str: + """Resolve the system identifier of an entity and return either +the system identifier to read from as a string, or an InputSource +to read from. +""" feature_namespaces: Final = "http://xml.org/sax/features/namespaces" feature_namespace_prefixes: Final = "http://xml.org/sax/features/namespace-prefixes" @@ -79,8 +263,46 @@ all_properties: Final[list[str]] if sys.version_info >= (3, 10): class LexicalHandler: - def comment(self, content: str) -> None: ... - def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: ... - def endDTD(self) -> None: ... - def startCDATA(self) -> None: ... - def endCDATA(self) -> None: ... + """Optional SAX2 handler for lexical events. + +This handler is used to obtain lexical information about an XML +document, that is, information about how the document was encoded +(as opposed to what it contains, which is reported to the +ContentHandler), such as comments and CDATA marked section +boundaries. + +To set the LexicalHandler of an XMLReader, use the setProperty +method with the property identifier +'http://xml.org/sax/properties/lexical-handler'. +""" + def comment(self, content: str) -> None: + """Reports a comment anywhere in the document (including the +DTD and outside the document element). + +content is a string that holds the contents of the comment. +""" + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: + """Report the start of the DTD declarations, if the document +has an associated DTD. + +A startEntity event will be reported before declaration events +from the external DTD subset are reported, and this can be +used to infer from which subset DTD declarations derive. + +name is the name of the document element type, public_id the +public identifier of the DTD (or None if none were supplied) +and system_id the system identifier of the external subset (or +None if none were supplied). +""" + def endDTD(self) -> None: + """Signals the end of DTD declarations. +""" + def startCDATA(self) -> None: + """Reports the beginning of a CDATA marked section. + +The contents of the CDATA marked section will be reported +through the characters event. +""" + def endCDATA(self) -> None: + """Reports the end of a CDATA marked section. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi index a29588faae2ae..3d4e6c4f28b99 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi @@ -1,3 +1,6 @@ +"""A library of useful helper classes to the SAX classes, for the +convenience of application and driver writers. +""" from _typeshed import SupportsWrite from codecs import StreamReaderWriter, StreamWriter from collections.abc import Mapping @@ -5,9 +8,31 @@ from io import RawIOBase, TextIOBase from typing import Literal, NoReturn from xml.sax import _Source, handler, xmlreader -def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... -def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ... -def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: ... +def escape(data: str, entities: Mapping[str, str] = {}) -> str: + """Escape &, <, and > in a string of data. + +You can escape other strings of data by passing a dictionary as +the optional entities parameter. The keys and values must all be +strings; each key will be replaced with its corresponding value. +""" +def unescape(data: str, entities: Mapping[str, str] = {}) -> str: + """Unescape &, <, and > in a string of data. + +You can unescape other strings of data by passing a dictionary as +the optional entities parameter. The keys and values must all be +strings; each key will be replaced with its corresponding value. +""" +def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: + """Escape and quote an attribute value. + +Escape &, <, and > in a string of data, then quote it for use as +an attribute value. The " character will be escaped as well, if +necessary. + +You can escape other strings of data by passing a dictionary as +the optional entities parameter. The keys and values must all be +strings; each key will be replaced with its corresponding value. +""" class XMLGenerator(handler.ContentHandler): def __init__( @@ -16,7 +41,9 @@ class XMLGenerator(handler.ContentHandler): encoding: str = "iso-8859-1", short_empty_elements: bool = False, ) -> None: ... - def _qname(self, name: tuple[str | None, str]) -> str: ... + def _qname(self, name: tuple[str | None, str]) -> str: + """Builds a qualified name from a (ns_url, localname) pair +""" def startDocument(self) -> None: ... def endDocument(self) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... @@ -30,6 +57,13 @@ class XMLGenerator(handler.ContentHandler): def processingInstruction(self, target: str, data: str) -> None: ... class XMLFilterBase(xmlreader.XMLReader): + """This class is designed to sit between an XMLReader and the +client application's event handlers. By default, it does nothing +but pass requests up to the reader and events on to the handlers +unmodified, but subclasses can override specific methods to modify +the event stream or the configuration requests as they pass +through. +""" def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... # ErrorHandler methods def error(self, exception: BaseException) -> NoReturn: ... @@ -65,4 +99,7 @@ class XMLFilterBase(xmlreader.XMLReader): def getParent(self) -> xmlreader.XMLReader | None: ... def setParent(self, parent: xmlreader.XMLReader) -> None: ... -def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: ... +def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: + """This function takes an InputSource and an optional base URL and +returns a fully resolved InputSource object ready for reading. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi index e7d04ddeadb80..574171695b685 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -1,3 +1,6 @@ +"""An XML Reader is the SAX 2 name for an XML parser. XML Parsers +should be based on this code. +""" from _typeshed import ReadableBuffer from collections.abc import Mapping from typing import Generic, Literal, TypeVar, overload @@ -6,52 +9,218 @@ from xml.sax import _Source, _SupportsReadClose from xml.sax.handler import _ContentHandlerProtocol, _DTDHandlerProtocol, _EntityResolverProtocol, _ErrorHandlerProtocol class XMLReader: - def parse(self, source: InputSource | _Source) -> None: ... - def getContentHandler(self) -> _ContentHandlerProtocol: ... - def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... - def getDTDHandler(self) -> _DTDHandlerProtocol: ... - def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: ... - def getEntityResolver(self) -> _EntityResolverProtocol: ... - def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: ... - def getErrorHandler(self) -> _ErrorHandlerProtocol: ... - def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: ... - def setLocale(self, locale: str) -> None: ... - def getFeature(self, name: str) -> Literal[0, 1] | bool: ... - def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: ... - def getProperty(self, name: str) -> object: ... - def setProperty(self, name: str, value: object) -> None: ... + """Interface for reading an XML document using callbacks. + +XMLReader is the interface that an XML parser's SAX2 driver must +implement. This interface allows an application to set and query +features and properties in the parser, to register event handlers +for document processing, and to initiate a document parse. + +All SAX interfaces are assumed to be synchronous: the parse +methods must not return until parsing is complete, and readers +must wait for an event-handler callback to return before reporting +the next event. +""" + def parse(self, source: InputSource | _Source) -> None: + """Parse an XML document from a system identifier or an InputSource. +""" + def getContentHandler(self) -> _ContentHandlerProtocol: + """Returns the current ContentHandler. +""" + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: + """Registers a new object to receive document content events. +""" + def getDTDHandler(self) -> _DTDHandlerProtocol: + """Returns the current DTD handler. +""" + def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: + """Register an object to receive basic DTD-related events. +""" + def getEntityResolver(self) -> _EntityResolverProtocol: + """Returns the current EntityResolver. +""" + def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: + """Register an object to resolve external entities. +""" + def getErrorHandler(self) -> _ErrorHandlerProtocol: + """Returns the current ErrorHandler. +""" + def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: + """Register an object to receive error-message events. +""" + def setLocale(self, locale: str) -> None: + """Allow an application to set the locale for errors and warnings. + +SAX parsers are not required to provide localization for errors +and warnings; if they cannot support the requested locale, +however, they must raise a SAX exception. Applications may +request a locale change in the middle of a parse. +""" + def getFeature(self, name: str) -> Literal[0, 1] | bool: + """Looks up and returns the state of a SAX2 feature. +""" + def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: + """Sets the state of a SAX2 feature. +""" + def getProperty(self, name: str) -> object: + """Looks up and returns the value of a SAX2 property. +""" + def setProperty(self, name: str, value: object) -> None: + """Sets the value of a SAX2 property. +""" class IncrementalParser(XMLReader): + """This interface adds three extra methods to the XMLReader +interface that allow XML parsers to support incremental +parsing. Support for this interface is optional, since not all +underlying XML parsers support this functionality. + +When the parser is instantiated it is ready to begin accepting +data from the feed method immediately. After parsing has been +finished with a call to close the reset method must be called to +make the parser ready to accept new data, either from feed or +using the parse method. + +Note that these methods must _not_ be called during parsing, that +is, after parse has been called and before it returns. + +By default, the class also implements the parse method of the XMLReader +interface using the feed, close and reset methods of the +IncrementalParser interface as a convenience to SAX 2.0 driver +writers. +""" def __init__(self, bufsize: int = 65536) -> None: ... def parse(self, source: InputSource | _Source) -> None: ... - def feed(self, data: str | ReadableBuffer) -> None: ... - def prepareParser(self, source: InputSource) -> None: ... - def close(self) -> None: ... - def reset(self) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: + """This method gives the raw XML data in the data parameter to +the parser and makes it parse the data, emitting the +corresponding events. It is allowed for XML constructs to be +split across several calls to feed. + +feed may raise SAXException. +""" + def prepareParser(self, source: InputSource) -> None: + """This method is called by the parse implementation to allow +the SAX 2.0 driver to prepare itself for parsing. +""" + def close(self) -> None: + """This method is called when the entire XML document has been +passed to the parser through the feed method, to notify the +parser that there are no more data. This allows the parser to +do the final checks on the document and empty the internal +data buffer. + +The parser will not be ready to parse another document until +the reset method has been called. + +close may raise SAXException. +""" + def reset(self) -> None: + """This method is called after close has been called to reset +the parser so that it is ready to parse new documents. The +results of calling parse or feed after close without calling +reset are undefined. +""" class Locator: - def getColumnNumber(self) -> int | None: ... - def getLineNumber(self) -> int | None: ... - def getPublicId(self) -> str | None: ... - def getSystemId(self) -> str | None: ... + """Interface for associating a SAX event with a document +location. A locator object will return valid results only during +calls to DocumentHandler methods; at any other time, the +results are unpredictable. +""" + def getColumnNumber(self) -> int | None: + """Return the column number where the current event ends. +""" + def getLineNumber(self) -> int | None: + """Return the line number where the current event ends. +""" + def getPublicId(self) -> str | None: + """Return the public identifier for the current event. +""" + def getSystemId(self) -> str | None: + """Return the system identifier for the current event. +""" class InputSource: + """Encapsulation of the information needed by the XMLReader to +read entities. + +This class may include information about the public identifier, +system identifier, byte stream (possibly with character encoding +information) and/or the character stream of an entity. + +Applications will create objects of this class for use in the +XMLReader.parse method and for returning from +EntityResolver.resolveEntity. + +An InputSource belongs to the application, the XMLReader is not +allowed to modify InputSource objects passed to it from the +application, although it may make copies and modify those. +""" def __init__(self, system_id: str | None = None) -> None: ... - def setPublicId(self, public_id: str | None) -> None: ... - def getPublicId(self) -> str | None: ... - def setSystemId(self, system_id: str | None) -> None: ... - def getSystemId(self) -> str | None: ... - def setEncoding(self, encoding: str | None) -> None: ... - def getEncoding(self) -> str | None: ... - def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: ... - def getByteStream(self) -> _SupportsReadClose[bytes] | None: ... - def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: ... - def getCharacterStream(self) -> _SupportsReadClose[str] | None: ... + def setPublicId(self, public_id: str | None) -> None: + """Sets the public identifier of this InputSource. +""" + def getPublicId(self) -> str | None: + """Returns the public identifier of this InputSource. +""" + def setSystemId(self, system_id: str | None) -> None: + """Sets the system identifier of this InputSource. +""" + def getSystemId(self) -> str | None: + """Returns the system identifier of this InputSource. +""" + def setEncoding(self, encoding: str | None) -> None: + """Sets the character encoding of this InputSource. + +The encoding must be a string acceptable for an XML encoding +declaration (see section 4.3.3 of the XML recommendation). + +The encoding attribute of the InputSource is ignored if the +InputSource also contains a character stream. +""" + def getEncoding(self) -> str | None: + """Get the character encoding of this InputSource. +""" + def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: + """Set the byte stream (a Python file-like object which does +not perform byte-to-character conversion) for this input +source. + +The SAX parser will ignore this if there is also a character +stream specified, but it will use a byte stream in preference +to opening a URI connection itself. + +If the application knows the character encoding of the byte +stream, it should set it with the setEncoding method. +""" + def getByteStream(self) -> _SupportsReadClose[bytes] | None: + """Get the byte stream for this input source. + +The getEncoding method will return the character encoding for +this byte stream, or None if unknown. +""" + def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: + """Set the character stream for this input source. (The stream +must be a Python 2.0 Unicode-wrapped file-like that performs +conversion to Unicode strings.) + +If there is a character stream specified, the SAX parser will +ignore any byte stream and will not attempt to open a URI +connection to the system identifier. +""" + def getCharacterStream(self) -> _SupportsReadClose[str] | None: + """Get the character stream for this input source. +""" _AttrKey = TypeVar("_AttrKey", default=str) class AttributesImpl(Generic[_AttrKey]): - def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: ... + def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: + """Non-NS-aware implementation. + +attrs should be of the form {name : value}. +""" def getLength(self) -> int: ... def getType(self, name: str) -> str: ... def getValue(self, name: _AttrKey) -> str: ... @@ -75,7 +244,12 @@ class AttributesImpl(Generic[_AttrKey]): _NSName: TypeAlias = tuple[str | None, str] class AttributesNSImpl(AttributesImpl[_NSName]): - def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: ... + def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: + """NS-aware implementation. + +attrs should be of the form {(ns_uri, lname): value, ...}. +qnames of the form {(ns_uri, lname): qname, ...}. +""" def getValue(self, name: _NSName) -> str: ... def getNameByQName(self, name: str) -> _NSName: ... def getQNameByName(self, name: _NSName) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi index 42420ee85848f..f4893c8d8079d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi @@ -1,3 +1,44 @@ +""" +An XML-RPC client interface for Python. + +The marshalling and response parser code can also be used to +implement XML-RPC servers. + +Exported exceptions: + + Error Base class for client errors + ProtocolError Indicates an HTTP protocol error + ResponseError Indicates a broken response package + Fault Indicates an XML-RPC fault package + +Exported classes: + + ServerProxy Represents a logical connection to an XML-RPC server + + MultiCall Executor of boxcared xmlrpc requests + DateTime dateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate a "dateTime.iso8601" + XML-RPC value + Binary binary data wrapper + + Marshaller Generate an XML-RPC params chunk from a Python data structure + Unmarshaller Unmarshal an XML-RPC response from incoming XML event message + Transport Handles an HTTP transaction to an XML-RPC server + SafeTransport Handles an HTTPS transaction to an XML-RPC server + +Exported constants: + + (none) + +Exported functions: + + getparser Create instance of the fastest available parser & attach + to an unmarshalling object + dumps Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + loads Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). +""" import gzip import http.client import time @@ -52,18 +93,26 @@ METHOD_NOT_FOUND: Final[int] # undocumented INVALID_METHOD_PARAMS: Final[int] # undocumented INTERNAL_ERROR: Final[int] # undocumented -class Error(Exception): ... +class Error(Exception): + """Base class for client errors. +""" class ProtocolError(Error): + """Indicates an HTTP protocol error. +""" url: str errcode: int errmsg: str headers: dict[str, str] def __init__(self, url: str, errcode: int, errmsg: str, headers: dict[str, str]) -> None: ... -class ResponseError(Error): ... +class ResponseError(Error): + """Indicates a broken response package. +""" class Fault(Error): + """Indicates an XML-RPC fault package. +""" faultCode: int faultString: str def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... @@ -75,6 +124,10 @@ def _iso8601_format(value: datetime) -> str: ... # undocumented def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: + """DateTime wrapper for an ISO 8601 string or time tuple or +localtime integer value to generate 'dateTime.iso8601' XML-RPC +value. +""" value: str # undocumented def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -92,6 +145,8 @@ def _datetime(data: Any) -> DateTime: ... # undocumented def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: + """Wrapper for binary data. +""" data: bytes def __init__(self, data: bytes | bytearray | None = None) -> None: ... def decode(self, data: ReadableBuffer) -> None: ... @@ -111,6 +166,14 @@ class ExpatParser: # undocumented _WriteCallback: TypeAlias = Callable[[str], object] class Marshaller: + """Generate an XML-RPC params chunk from a Python data structure. + +Create a Marshaller instance for each set of parameters, and use +the "dumps" method to convert your data (represented as a tuple) +to an XML-RPC params chunk. To write a fault response, pass a +Fault instance instead. You may prefer to use the "dumps" module +function for this purpose. +""" dispatch: dict[type[_Marshallable] | Literal["_arbitrary_instance"], Callable[[Marshaller, Any, _WriteCallback], None]] memo: dict[Any, None] data: None @@ -134,6 +197,13 @@ class Marshaller: def dump_instance(self, value: object, write: _WriteCallback) -> None: ... class Unmarshaller: + """Unmarshal an XML-RPC response, based on incoming XML event +messages (start, data, end). Call close() to get the resulting +data structure. + +Note that this reader is fairly tolerant, and gladly accepts bogus +XML-RPC data without complaining (but not bogus XML). +""" dispatch: dict[str, Callable[[Unmarshaller, str], None]] _type: str | None @@ -177,11 +247,29 @@ class _MultiCallMethod: # undocumented def __call__(self, *args: _Marshallable) -> None: ... class MultiCallIterator: # undocumented + """Iterates over the results of a multicall. Exceptions are +raised in response to xmlrpc faults. +""" results: list[list[_Marshallable]] def __init__(self, results: list[list[_Marshallable]]) -> None: ... def __getitem__(self, i: int) -> _Marshallable: ... class MultiCall: + """server -> an object used to boxcar method calls + +server should be a ServerProxy object. + +Methods can be added to the MultiCall using normal +method call syntax e.g.: + +multicall = MultiCall(server_proxy) +multicall.add(2,3) +multicall.get_address("Guido") + +To execute the multicall, call the MultiCall object e.g.: + +add_result, address = multicall() +""" __server: ServerProxy __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... @@ -193,21 +281,65 @@ FastMarshaller: Marshaller | None FastParser: ExpatParser | None FastUnmarshaller: Unmarshaller | None -def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: + """getparser() -> parser, unmarshaller + +Create an instance of the fastest available parser, and attach it +to an unmarshalling object. Return both objects. +""" def dumps( params: Fault | tuple[_Marshallable, ...], methodname: str | None = None, methodresponse: bool | None = None, encoding: str | None = None, allow_none: bool = False, -) -> str: ... +) -> str: + """data [,options] -> marshalled data + +Convert an argument tuple or a Fault instance to an XML-RPC +request (or response, if the methodresponse option is used). + +In addition to the data object, the following options can be given +as keyword arguments: + + methodname: the method name for a methodCall packet + + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). + + encoding: the packet encoding (default is UTF-8) + +All byte strings in the data structure are assumed to use the +packet encoding. Unicode strings are automatically converted, +where necessary. +""" def loads( data: str | ReadableBuffer, use_datetime: bool = False, use_builtin_types: bool = False -) -> tuple[tuple[_Marshallable, ...], str | None]: ... -def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented -def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented +) -> tuple[tuple[_Marshallable, ...], str | None]: + """data -> unmarshalled data, method name + +Convert an XML-RPC packet to unmarshalled data plus a method +name (None if not present). + +If the XML-RPC packet represents a fault condition, this function +raises a Fault exception. +""" +def gzip_encode(data: ReadableBuffer) -> bytes: # undocumented + """data -> gzip encoded data + +Encode data using the gzip content encoding as described in RFC 1952 +""" +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: # undocumented + """gzip encoded data -> unencoded data + +Decode data using the gzip content encoding as described in RFC 1952 +""" class GzipDecodedResponse(gzip.GzipFile): # undocumented + """a file-like object to decode a response encoded with the gzip +method, as described in RFC 1952. +""" io: BytesIO def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... @@ -219,6 +351,8 @@ class _Method: # undocumented def __call__(self, *args: _Marshallable) -> _Marshallable: ... class Transport: + """Handles an HTTP transaction to an XML-RPC server. +""" user_agent: str accept_gzip_encoding: bool encode_threshold: int | None @@ -250,6 +384,8 @@ class Transport: def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server. +""" def __init__( self, use_datetime: bool = False, @@ -261,6 +397,26 @@ class SafeTransport(Transport): def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... class ServerProxy: + """uri [,options] -> a logical connection to an XML-RPC server + +uri is the connection point on the server, given as +scheme://host/target. + +The standard implementation always supports the "http" scheme. If +SSL socket support is available (Python 2.0), it also supports +"https". + +If the target part and the slash preceding it are both omitted, +"/RPC2" is assumed. + +The following options can be given as keyword arguments: + + transport: a transport factory + encoding: the request encoding (default is UTF-8) + +All 8-bit strings passed to the server proxy are assumed to use +the given encoding. +""" __host: str __handler: str __transport: Transport @@ -283,7 +439,10 @@ class ServerProxy: ) -> None: ... def __getattr__(self, name: str) -> _Method: ... @overload - def __call__(self, attr: Literal["close"]) -> Callable[[], None]: ... + def __call__(self, attr: Literal["close"]) -> Callable[[], None]: + """A workaround to get special attributes on the ServerProxy +without interfering with the magic __getattr__ +""" @overload def __call__(self, attr: Literal["transport"]) -> Transport: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi index 286aaf980fbf5..ca4e7c5b72e57 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi @@ -1,3 +1,105 @@ +"""XML-RPC Servers. + +This module can be used to create simple XML-RPC servers +by creating a server and either installing functions, a +class instance, or by extending the SimpleXMLRPCServer +class. + +It can also be used to handle XML-RPC requests in a CGI +environment using CGIXMLRPCRequestHandler. + +The Doc* classes can be used to create XML-RPC servers that +serve pydoc-style documentation in response to HTTP +GET requests. This documentation is dynamically generated +based on the functions and methods registered with the +server. + +A list of possible usage patterns follows: + +1. Install functions: + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_function(pow) +server.register_function(lambda x,y: x+y, 'add') +server.serve_forever() + +2. Install an instance: + +class MyFuncs: + def __init__(self): + # make all of the sys functions available through sys.func_name + import sys + self.sys = sys + def _listMethods(self): + # implement this method so that system.listMethods + # knows to advertise the sys methods + return list_public_methods(self) + \\ + ['sys.' + method for method in list_public_methods(self.sys)] + def pow(self, x, y): return pow(x, y) + def add(self, x, y) : return x + y + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(MyFuncs()) +server.serve_forever() + +3. Install an instance with custom dispatch method: + +class Math: + def _listMethods(self): + # this method must be present for system.listMethods + # to work + return ['add', 'pow'] + def _methodHelp(self, method): + # this method must be present for system.methodHelp + # to work + if method == 'add': + return "add(2,3) => 5" + elif method == 'pow': + return "pow(x, y[, z]) => number" + else: + # By convention, return empty + # string if no help is available + return "" + def _dispatch(self, method, params): + if method == 'pow': + return pow(*params) + elif method == 'add': + return params[0] + params[1] + else: + raise ValueError('bad method') + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(Math()) +server.serve_forever() + +4. Subclass SimpleXMLRPCServer: + +class MathServer(SimpleXMLRPCServer): + def _dispatch(self, method, params): + try: + # We are forcing the 'export_' prefix on methods that are + # callable through XML-RPC to prevent potential security + # problems + func = getattr(self, 'export_' + method) + except AttributeError: + raise Exception('method "%s" is not supported' % method) + else: + return func(*params) + + def export_add(self, x, y): + return x + y + +server = MathServer(("localhost", 8000)) +server.serve_forever() + +5. CGI script: + +server = CGIXMLRPCRequestHandler() +server.register_function(pow) +server.handle_request() +""" import http.server import pydoc import socketserver @@ -39,43 +141,177 @@ _DispatchProtocol: TypeAlias = ( _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN ) -def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented -def list_public_methods(obj: Any) -> list[str]: ... # undocumented +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: # undocumented + """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d + +Resolves a dotted attribute name to an object. Raises +an AttributeError if any attribute in the chain starts with a '_'. + +If the optional allow_dotted_names argument is false, dots are not +supported and this function operates similar to getattr(obj, attr). +""" +def list_public_methods(obj: Any) -> list[str]: # undocumented + """Returns a list of attribute strings, found in the specified +object, which represent callable attributes +""" class SimpleXMLRPCDispatcher: # undocumented + """Mix-in class that dispatches XML-RPC requests. + +This class is used to register XML-RPC method handlers +and then to dispatch them. This class doesn't need to be +instanced directly when used by SimpleXMLRPCServer but it +can be instanced when used by the MultiPathXMLRPCServer +""" funcs: dict[str, _DispatchProtocol] instance: Any | None allow_none: bool encoding: str use_builtin_types: bool def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... - def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... - def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... - def register_introspection_functions(self) -> None: ... - def register_multicall_functions(self) -> None: ... + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: + """Registers an instance to respond to XML-RPC requests. + +Only one instance can be installed at a time. + +If the registered instance has a _dispatch method then that +method will be called with the name of the XML-RPC method and +its parameters as a tuple +e.g. instance._dispatch('add',(2,3)) + +If the registered instance does not have a _dispatch method +then the instance will be searched to find a matching method +and, if found, will be called. Methods beginning with an '_' +are considered private and will not be called by +SimpleXMLRPCServer. + +If a registered function matches an XML-RPC request, then it +will be called instead of the registered instance. + +If the optional allow_dotted_names argument is true and the +instance does not have a _dispatch method, method names +containing dots are supported and resolved, as long as none of +the name segments start with an '_'. + + *** SECURITY WARNING: *** + + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + +""" + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: + """Registers a function to respond to XML-RPC requests. + +The optional name argument can be used to set a Unicode name +for the function. +""" + def register_introspection_functions(self) -> None: + """Registers the XML-RPC introspection methods in the system +namespace. + +see http://xmlrpc.usefulinc.com/doc/reserved.html +""" + def register_multicall_functions(self) -> None: + """Registers the XML-RPC multicall method in the system +namespace. + +see http://www.xmlrpc.com/discuss/msgReader$1208 +""" def _marshaled_dispatch( self, data: str | ReadableBuffer, dispatch_method: Callable[[str, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, path: Any | None = None, - ) -> str: ... # undocumented - def system_listMethods(self) -> list[str]: ... # undocumented - def system_methodSignature(self, method_name: str) -> str: ... # undocumented - def system_methodHelp(self, method_name: str) -> str: ... # undocumented - def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: ... # undocumented - def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: ... # undocumented + ) -> str: # undocumented + """Dispatches an XML-RPC method from marshalled (XML) data. + +XML-RPC methods are dispatched from the marshalled (XML) data +using the _dispatch method and the result is returned as +marshalled data. For backwards compatibility, a dispatch +function can be provided as an argument (see comment in +SimpleXMLRPCRequestHandler.do_POST) but overriding the +existing method through subclassing is the preferred means +of changing method dispatch behavior. +""" + def system_listMethods(self) -> list[str]: # undocumented + """system.listMethods() => ['add', 'subtract', 'multiple'] + +Returns a list of the methods supported by the server. +""" + def system_methodSignature(self, method_name: str) -> str: # undocumented + """system.methodSignature('add') => [double, int, int] + +Returns a list describing the signature of the method. In the +above example, the add method takes two integers as arguments +and returns a double result. + +This server does NOT support system.methodSignature. +""" + def system_methodHelp(self, method_name: str) -> str: # undocumented + """system.methodHelp('add') => "Adds two integers together" + +Returns a string containing documentation for the specified method. +""" + def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: # undocumented + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => [[4], ...] + +Allows the caller to package multiple XML-RPC calls into a single +request. + +See http://www.xmlrpc.com/discuss/msgReader$1208 +""" + def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: # undocumented + """Dispatches the XML-RPC method. + +XML-RPC calls are forwarded to a registered function that +matches the called XML-RPC method name. If no such function +exists then the call is forwarded to the registered instance, +if available. + +If the registered instance has a _dispatch method then that +method will be called with the name of the XML-RPC method and +its parameters as a tuple +e.g. instance._dispatch('add',(2,3)) + +If the registered instance does not have a _dispatch method +then the instance will be searched to find a matching method +and, if found, will be called. + +Methods beginning with an '_' are considered private and will +not be called. +""" class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): + """Simple XML-RPC request handler class. + +Handles all HTTP POST requests and attempts to decode them as +XML-RPC requests. +""" rpc_paths: ClassVar[tuple[str, ...]] encode_threshold: int # undocumented aepattern: Pattern[str] # undocumented def accept_encodings(self) -> dict[str, float]: ... def is_rpc_path_valid(self) -> bool: ... - def do_POST(self) -> None: ... + def do_POST(self) -> None: + """Handles the HTTP POST request. + +Attempts to interpret all HTTP POST requests as XML-RPC calls, +which are forwarded to the server's _dispatch method for handling. +""" def decode_request_content(self, data: bytes) -> bytes | None: ... def report_404(self) -> None: ... class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): + """Simple XML-RPC server. + +Simple XML-RPC server that allows functions and a single instance +to be installed to handle requests. The default implementation +attempts to dispatch XML-RPC calls to the functions or instance +installed in the server. Override the _dispatch method inherited +from SimpleXMLRPCDispatcher to change this behavior. +""" _send_traceback_handler: bool def __init__( self, @@ -89,6 +325,13 @@ class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): ) -> None: ... class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented + """Multipath XML-RPC Server +This specialization of SimpleXMLRPCServer allows the user to create +multiple Dispatcher instances and assign them to different +HTTP request paths. This makes it possible to run two or more +'virtual XML-RPC servers' at the same port. +Make sure that the requestHandler accepts the paths in question. +""" dispatchers: dict[str, SimpleXMLRPCDispatcher] def __init__( self, @@ -104,12 +347,29 @@ class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + """Simple handler for XML-RPC data passed through CGI. +""" def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... - def handle_xmlrpc(self, request_text: str) -> None: ... - def handle_get(self) -> None: ... - def handle_request(self, request_text: str | None = None) -> None: ... + def handle_xmlrpc(self, request_text: str) -> None: + """Handle a single XML-RPC request +""" + def handle_get(self) -> None: + """Handle a single HTTP GET request. + +Default implementation indicates an error because +XML-RPC uses the POST method. +""" + def handle_request(self, request_text: str | None = None) -> None: + """Handle a single XML-RPC request passed through a CGI post method. + +If no XML data is given then it is read from stdin. The resulting +XML-RPC response is printed to stdout along with the correct HTTP +headers. +""" class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented + """Class used to generate pydoc HTML document for a server +""" def docroutine( # type: ignore[override] self, object: object, @@ -119,22 +379,65 @@ class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented classes: Mapping[str, str] = {}, methods: Mapping[str, str] = {}, cl: type | None = None, - ) -> str: ... - def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... + ) -> str: + """Produce HTML documentation for a function or method object. +""" + def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: + """Produce HTML documentation for an XML-RPC server. +""" class XMLRPCDocGenerator: # undocumented + """Generates documentation for an XML-RPC server. + +This class is designed as mix-in and should not +be constructed directly. +""" server_name: str server_documentation: str server_title: str - def set_server_title(self, server_title: str) -> None: ... - def set_server_name(self, server_name: str) -> None: ... - def set_server_documentation(self, server_documentation: str) -> None: ... - def generate_html_documentation(self) -> str: ... + def set_server_title(self, server_title: str) -> None: + """Set the HTML title of the generated server documentation +""" + def set_server_name(self, server_name: str) -> None: + """Set the name of the generated HTML server documentation +""" + def set_server_documentation(self, server_documentation: str) -> None: + """Set the documentation string for the entire server. +""" + def generate_html_documentation(self) -> str: + """generate_html_documentation() => html documentation for the server + +Generates HTML documentation for the server using introspection for +installed functions and instances that do not implement the +_dispatch method. Alternatively, instances can choose to implement +the _get_method_argstring(method_name) method to provide the +argument string used in the documentation and the +_methodHelp(method_name) method to provide the help text used +in the documentation. +""" class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): - def do_GET(self) -> None: ... + """XML-RPC and documentation request handler class. + +Handles all HTTP POST requests and attempts to decode them as +XML-RPC requests. + +Handles all HTTP GET requests and interprets them as requests +for documentation. +""" + def do_GET(self) -> None: + """Handles the HTTP GET request. + +Interpret all HTTP GET requests as requests for server +documentation. +""" class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): + """XML-RPC and HTML documentation server. + +Adds the ability to serve server documentation to the capabilities +of SimpleXMLRPCServer. +""" def __init__( self, addr: tuple[str, int], @@ -147,4 +450,7 @@ class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): ) -> None: ... class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): + """Handler for XML-RPC data and documentation requests passed through +CGI +""" def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi index 78a50b85f405a..f64783497f358 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi @@ -1,3 +1,5 @@ +"""This is a template module just for instruction. +""" import sys from typing import Any, ClassVar, final @@ -5,12 +7,22 @@ class Str(str): ... @final class Xxo: - def demo(self) -> None: ... + """A class that explicitly stores attributes in an internal dict +""" + def demo(self) -> None: + """demo(o) -> o +""" if sys.version_info >= (3, 11) and sys.platform != "win32": x_exports: int -def foo(i: int, j: int, /) -> Any: ... -def new() -> Xxo: ... +def foo(i: int, j: int, /) -> Any: + """foo(i,j) + +Return the sum of i and j. +""" +def new() -> Xxo: + """new() -> new Xx object +""" if sys.version_info >= (3, 10): class Error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi index c7cf1704b1359..5bbb824fe0c87 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi @@ -16,5 +16,23 @@ def create_archive( main: str | None = None, filter: Callable[[Path], bool] | None = None, compressed: bool = False, -) -> None: ... +) -> None: + """Create an application archive from SOURCE. + +The SOURCE can be the name of a directory, or a filename or a file-like +object referring to an existing archive. + +The content of SOURCE is packed into an application archive in TARGET, +which can be a filename or a file-like object. If SOURCE is a directory, +TARGET can be omitted and will default to the name of SOURCE with .pyz +appended. + +The created application archive will have a shebang line specifying +that it should run with INTERPRETER (there will be no shebang line if +INTERPRETER is None), and a __main__.py which runs MAIN (if MAIN is +not specified, an existing __main__.py will be used). It is an error +to specify MAIN for anything other than a directory source with no +__main__.py, and it is an error to omit MAIN if the directory has no +__main__.py. +""" def get_interpreter(archive: _Path) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi index e573d04dba051..bdceff72da9ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi @@ -1,3 +1,8 @@ +""" +Read and write ZIP files. + +XXX references to utf-8 need further investigation. +""" import io import sys from _typeshed import SizedBuffer, StrOrBytesPath, StrPath @@ -39,7 +44,11 @@ class BadZipFile(Exception): ... BadZipfile = BadZipFile error = BadZipfile -class LargeZipFile(Exception): ... +class LargeZipFile(Exception): + """ +Raised when writing a zipfile, the zipfile requires ZIP64 extensions +and those extensions are disabled. +""" @type_check_only class _ZipStream(Protocol): @@ -61,6 +70,9 @@ class _ClosableZipStream(_ZipStream, Protocol): def close(self) -> object: ... class ZipExtFile(io.BufferedIOBase): + """File-like object for reading an archive member. +Is returned by ZipFile.open(). +""" MAX_N: int MIN_READ_SIZE: int MAX_SEEK_READ: int @@ -90,10 +102,21 @@ class ZipExtFile(io.BufferedIOBase): pwd: bytes | None = None, close_fileobj: Literal[False] = False, ) -> None: ... - def read(self, n: int | None = -1) -> bytes: ... - def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] - def peek(self, n: int = 1) -> bytes: ... - def read1(self, n: int | None) -> bytes: ... # type: ignore[override] + def read(self, n: int | None = -1) -> bytes: + """Read and return up to n bytes. +If the argument is omitted, None, or negative, data is read and returned until EOF is reached. +""" + def readline(self, limit: int = -1) -> bytes: # type: ignore[override] + """Read and return a line from the stream. + +If limit is specified, at most limit bytes will be read. +""" + def peek(self, n: int = 1) -> bytes: + """Returns buffered bytes without advancing the position. +""" + def read1(self, n: int | None) -> bytes: # type: ignore[override] + """Read up to n bytes with at most one read() system call. +""" def seek(self, offset: int, whence: int = 0) -> int: ... @type_check_only @@ -119,6 +142,31 @@ class _ZipWritable(Protocol): def write(self, b: bytes, /) -> int: ... class ZipFile: + """Class with methods to open, read, write, close, list zip files. + +z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True, + compresslevel=None) + +file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. +mode: The mode can be either read 'r', write 'w', exclusive create 'x', + or append 'a'. +compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + ZIP_BZIP2 (requires bz2), ZIP_LZMA (requires lzma), or + ZIP_ZSTANDARD (requires compression.zstd). +allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. +compresslevel: None (default for the given compression type) or an integer + specifying the level to pass to the compressor. + When using ZIP_STORED or ZIP_LZMA this keyword has no effect. + When using ZIP_DEFLATED integers 0 through 9 are accepted. + When using ZIP_BZIP2 integers 1 through 9 are accepted. + When using ZIP_ZSTANDARD integers -7 though 22 are common, + see the CompressionParameter enum in compression.zstd for + details. + +""" filename: str | None debug: int comment: bytes @@ -143,7 +191,10 @@ class ZipFile: *, strict_timestamps: bool = True, metadata_encoding: str | None = None, - ) -> None: ... + ) -> None: + """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', +or append 'a'. +""" # metadata_encoding is only allowed for read mode @overload def __init__( @@ -192,7 +243,10 @@ class ZipFile: compresslevel: int | None = None, *, strict_timestamps: bool = True, - ) -> None: ... + ) -> None: + """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', + or append 'a'. +""" @overload def __init__( self, @@ -231,47 +285,126 @@ class ZipFile: def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def close(self) -> None: ... - def getinfo(self, name: str) -> ZipInfo: ... - def infolist(self) -> list[ZipInfo]: ... - def namelist(self) -> list[str]: ... + def close(self) -> None: + """Close the file, and for mode 'w', 'x' and 'a' write the ending +records. +""" + def getinfo(self, name: str) -> ZipInfo: + """Return the instance of ZipInfo given 'name'. +""" + def infolist(self) -> list[ZipInfo]: + """Return a list of class ZipInfo instances for files in the +archive. +""" + def namelist(self) -> list[str]: + """Return a list of file names in the archive. +""" def open( self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False - ) -> IO[bytes]: ... - def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... + ) -> IO[bytes]: + """Return file-like object for 'name'. + +name is a string for the file name within the ZIP file, or a ZipInfo +object. + +mode should be 'r' to read a file already in the ZIP file, or 'w' to +write to a file newly added to the archive. + +pwd is the password to decrypt files (only used for reading). + +When writing, if the file size is not known in advance but may exceed +2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large +files. If the size is known in advance, it is best to pass a ZipInfo +instance for name, with zinfo.file_size set. +""" + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: + """Extract a member from the archive to the current working directory, +using its full name. Its file information is extracted as accurately +as possible. 'member' may be a filename or a ZipInfo object. You can +specify a different directory using 'path'. You can specify the +password to decrypt the file using 'pwd'. +""" def extractall( self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None - ) -> None: ... - def printdir(self, file: _Writer | None = None) -> None: ... - def setpassword(self, pwd: bytes) -> None: ... - def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... - def testzip(self) -> str | None: ... + ) -> None: + """Extract all members from the archive to the current working +directory. 'path' specifies a different directory to extract to. +'members' is optional and must be a subset of the list returned +by namelist(). You can specify the password to decrypt all files +using 'pwd'. +""" + def printdir(self, file: _Writer | None = None) -> None: + """Print a table of contents for the zip file. +""" + def setpassword(self, pwd: bytes) -> None: + """Set default password for encrypted files. +""" + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: + """Return file bytes for name. 'pwd' is the password to decrypt +encrypted files. +""" + def testzip(self) -> str | None: + """Read all the files and check the CRC. + +Return None if all files could be read successfully, or the name +of the offending file otherwise. +""" def write( self, filename: StrPath, arcname: StrPath | None = None, compress_type: int | None = None, compresslevel: int | None = None, - ) -> None: ... + ) -> None: + """Put the bytes from filename into the archive under the name +arcname. +""" def writestr( self, zinfo_or_arcname: str | ZipInfo, data: SizedBuffer | str, compress_type: int | None = None, compresslevel: int | None = None, - ) -> None: ... + ) -> None: + """Write a file into the archive. The contents is 'data', which +may be either a 'str' or a 'bytes' instance; if it is a 'str', +it is encoded as UTF-8 first. +'zinfo_or_arcname' is either a ZipInfo instance or +the name of the file in the archive. +""" if sys.version_info >= (3, 11): - def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: + """Creates a directory inside the zip archive. +""" - def __del__(self) -> None: ... + def __del__(self) -> None: + """Call the "close()" method in case the user forgot. +""" class PyZipFile(ZipFile): + """Class to create ZIP archives with Python library files and packages. +""" def __init__( self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 ) -> None: ... - def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: + """Add all files from "pathname" to the ZIP archive. + +If pathname is a package directory, search the directory and +all package subdirectories recursively for all *.py and enter +the modules into the archive. If pathname is a plain +directory, listdir *.py and enter all modules. Else, pathname +must be a Python *.py file and the module will be put into the +archive. Added modules are always module.pyc. +This method will compile the module.py into module.pyc if +necessary. +If filterfunc(pathname) is given, it is called with every argument. +When it is False, the file or directory is skipped. +""" class ZipInfo: + """Class with attributes describing each file in the ZIP archive. +""" __slots__ = ( "orig_filename", "filename", @@ -318,27 +451,140 @@ class ZipInfo: def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ... @classmethod - def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... - def is_dir(self) -> bool: ... - def FileHeader(self, zip64: bool | None = None) -> bytes: ... + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: + """Construct an appropriate ZipInfo for a file on the filesystem. + +filename should be the path to a file or directory on the filesystem. + +arcname is the name which it will have within the archive (by default, +this will be the same as filename, but without a drive letter and with +leading path separators removed). +""" + def is_dir(self) -> bool: + """Return True if this archive member is a directory. +""" + def FileHeader(self, zip64: bool | None = None) -> bytes: + """Return the per-file header as a bytes object. + +When the optional zip64 arg is None rather than a bool, we will +decide based upon the file_size and compress_size, if known, +False otherwise. +""" if sys.version_info >= (3, 12): from zipfile._path import CompleteDirs as CompleteDirs, Path as Path else: class CompleteDirs(ZipFile): - def resolve_dir(self, name: str) -> str: ... + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + def resolve_dir(self, name: str) -> str: + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ @overload @classmethod - def make(cls, source: ZipFile) -> CompleteDirs: ... + def make(cls, source: ZipFile) -> CompleteDirs: + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text() + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ root: CompleteDirs at: str - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ @property def name(self) -> str: ... @property @@ -365,7 +611,12 @@ else: write_through: bool = ..., *, pwd: bytes | None = None, - ) -> TextIOWrapper: ... + ) -> TextIOWrapper: + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... @@ -393,7 +644,11 @@ else: def __truediv__(self, add: StrPath) -> Path: ... -def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... +def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: + """Quickly see if a file is a ZIP file by checking the magic number. + +The filename argument may be a file or file-like object too. +""" ZIP64_LIMIT: Final[int] ZIP_FILECOUNT_LIMIT: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi index 4c7b39ec4c6ca..2b60d6d78eca7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi @@ -1,3 +1,11 @@ +""" +A Path-like interface for zipfiles. + +This codebase is shared between zipfile.Path in the stdlib +and zipp in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -13,26 +21,160 @@ if sys.version_info >= (3, 12): __all__ = ["Path"] class InitializedState: + """ +Mix-in to save the initialization state for pickling. +""" def __init__(self, *args: object, **kwargs: object) -> None: ... def __getstate__(self) -> tuple[list[object], dict[object, object]]: ... def __setstate__(self, state: Sequence[tuple[list[object], dict[object, object]]]) -> None: ... class CompleteDirs(InitializedState, ZipFile): - def resolve_dir(self, name: str) -> str: ... + """ +A ZipFile subclass that ensures that implied directories +are always included in the namelist. + +>>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt'])) +['foo/', 'foo/bar/'] +>>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/'])) +['foo/'] +""" + def resolve_dir(self, name: str) -> str: + """ +If the name represents a directory, return that name +as a directory (with the trailing slash). +""" @overload @classmethod - def make(cls, source: ZipFile) -> CompleteDirs: ... + def make(cls, source: ZipFile) -> CompleteDirs: + """ +Given a source (filename or zipfile), return an +appropriate CompleteDirs subclass. +""" @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... if sys.version_info >= (3, 13): @classmethod - def inject(cls, zf: _ZF) -> _ZF: ... + def inject(cls, zf: _ZF) -> _ZF: + """ +Given a writable zip file zf, inject directory entries for +any directories implied by the presence of children. +""" class Path: + """ +A :class:`importlib.resources.abc.Traversable` interface for zip files. + +Implements many of the features users enjoy from +:class:`pathlib.Path`. + +Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + +>>> data = io.BytesIO() +>>> zf = ZipFile(data, 'w') +>>> zf.writestr('a.txt', 'content of a') +>>> zf.writestr('b/c.txt', 'content of c') +>>> zf.writestr('b/d/e.txt', 'content of e') +>>> zf.filename = 'mem/abcde.zip' + +Path accepts the zipfile object itself or a filename + +>>> path = Path(zf) + +From there, several path operations are available. + +Directory iteration (including the zip file itself): + +>>> a, b = path.iterdir() +>>> a +Path('mem/abcde.zip', 'a.txt') +>>> b +Path('mem/abcde.zip', 'b/') + +name property: + +>>> b.name +'b' + +join with divide operator: + +>>> c = b / 'c.txt' +>>> c +Path('mem/abcde.zip', 'b/c.txt') +>>> c.name +'c.txt' + +Read text: + +>>> c.read_text(encoding='utf-8') +'content of c' + +existence: + +>>> c.exists() +True +>>> (b / 'missing.txt').exists() +False + +Coercion to string: + +>>> import os +>>> str(c).replace(os.sep, posixpath.sep) +'mem/abcde.zip/b/c.txt' + +At the root, ``name``, ``filename``, and ``parent`` +resolve to the zipfile. + +>>> str(path) +'mem/abcde.zip/' +>>> path.name +'abcde.zip' +>>> path.filename == pathlib.Path('mem/abcde.zip') +True +>>> str(path.parent) +'mem' + +If the zipfile has no filename, such attributes are not +valid and accessing them will raise an Exception. + +>>> zf.filename = None +>>> path.name +Traceback (most recent call last): +... +TypeError: ... + +>>> path.filename +Traceback (most recent call last): +... +TypeError: ... + +>>> path.parent +Traceback (most recent call last): +... +TypeError: ... + +# workaround python/cpython#106763 +>>> pass +""" root: CompleteDirs at: str - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: + """ +Construct a Path from a ZipFile or filename. + +Note: When the source is an existing ZipFile object, +its type (__class__) will be mutated to a +specialized type. If the caller wishes to retain the +original type, the caller should either create a +separate ZipFile object or pass a filename. +""" @property def name(self) -> str: ... @property @@ -56,7 +198,12 @@ if sys.version_info >= (3, 12): write_through: bool = ..., *, pwd: bytes | None = None, - ) -> TextIOWrapper: ... + ) -> TextIOWrapper: + """ +Open this entry as text or binary following the semantics +of ``pathlib.Path.open()`` by passing arguments through +to io.TextIOWrapper(). +""" @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... def iterdir(self) -> Iterator[Self]: ... @@ -75,9 +222,16 @@ if sys.version_info >= (3, 12): def joinpath(self, *other: StrPath) -> Path: ... def glob(self, pattern: str) -> Iterator[Self]: ... def rglob(self, pattern: str) -> Iterator[Self]: ... - def is_symlink(self) -> Literal[False]: ... + def is_symlink(self) -> Literal[False]: + """ +Return whether this path is a symlink. +""" def relative_to(self, other: Path, *extra: StrPath) -> str: ... def match(self, path_pattern: str) -> bool: ... - def __eq__(self, other: object) -> bool: ... + def __eq__(self, other: object) -> bool: + """ +>>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo' +False +""" def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi index f6a661be8cdf4..920d18fa5b235 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi @@ -4,23 +4,96 @@ from re import Match if sys.version_info >= (3, 13): class Translator: + """ +>>> Translator('xyz') +Traceback (most recent call last): +... +AssertionError: Invalid separators + +>>> Translator('') +Traceback (most recent call last): +... +AssertionError: Invalid separators +""" if sys.platform == "win32": def __init__(self, seps: str = "\\/") -> None: ... else: def __init__(self, seps: str = "/") -> None: ... - def translate(self, pattern: str) -> str: ... - def extend(self, pattern: str) -> str: ... - def match_dirs(self, pattern: str) -> str: ... - def translate_core(self, pattern: str) -> str: ... - def replace(self, match: Match[str]) -> str: ... - def restrict_rglob(self, pattern: str) -> None: ... - def star_not_empty(self, pattern: str) -> str: ... + def translate(self, pattern: str) -> str: + """ +Given a glob pattern, produce a regex that matches it. +""" + def extend(self, pattern: str) -> str: + """ +Extend regex for pattern-wide concerns. + +Apply '(?s:)' to create a non-matching group that +matches newlines (valid on Unix). + +Append '\\z' to imply fullmatch even when match is used. +""" + def match_dirs(self, pattern: str) -> str: + """ +Ensure that zipfile.Path directory names are matched. + +zipfile.Path directory names always end in a slash. +""" + def translate_core(self, pattern: str) -> str: + """ +Given a glob pattern, produce a regex that matches it. + +>>> t = Translator() +>>> t.translate_core('*.txt').replace('\\\\\\\\', '') +'[^/]*\\\\.txt' +>>> t.translate_core('a?txt') +'a[^/]txt' +>>> t.translate_core('**/*').replace('\\\\\\\\', '') +'.*/[^/][^/]*' +""" + def replace(self, match: Match[str]) -> str: + """ +Perform the replacements for a match from :func:`separate`. +""" + def restrict_rglob(self, pattern: str) -> None: + """ +Raise ValueError if ** appears in anything but a full path segment. + +>>> Translator().translate('**foo') +Traceback (most recent call last): +... +ValueError: ** must appear alone in a path segment +""" + def star_not_empty(self, pattern: str) -> str: + """ +Ensure that * will not match an empty segment. +""" else: - def translate(pattern: str) -> str: ... + def translate(pattern: str) -> str: + """ + Given a glob pattern, produce a regex that matches it. + + >>> translate('*.txt') + '[^/]*\\\\.txt' + >>> translate('a?txt') + 'a.txt' + >>> translate('**/*') + '.*/[^/]*' + """ def match_dirs(pattern: str) -> str: ... def translate_core(pattern: str) -> str: ... - def replace(match: Match[str]) -> str: ... + def replace(match: Match[str]) -> str: + """ + Perform the replacements for a match from :func:`separate`. + """ + +def separate(pattern: str) -> Iterator[Match[str]]: + """ +Separate out character sets to avoid translating their contents. -def separate(pattern: str) -> Iterator[Match[str]]: ... +>>> [m.group(0) for m in separate('*.txt')] +['*.txt'] +>>> [m.group(0) for m in separate('a[?]txt')] +['a', '[?]', 'txt'] +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi index 22af3c272759b..759ccea36f51c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi @@ -1,3 +1,14 @@ +"""zipimport provides support for importing Python modules from Zip archives. + +This module exports two objects: +- zipimporter: a class; its constructor takes a path to a Zip archive. +- ZipImportError: exception raised by zipimporter objects. It's a + subclass of ImportError, so it can be caught as ImportError, too. + +It is usually not needed to use the zipimport module explicitly; it is +used by the builtin import mechanism for sys.path items that are paths +to Zip archives. +""" import sys from _typeshed import StrOrBytesPath from importlib.machinery import ModuleSpec @@ -19,6 +30,19 @@ __all__ = ["ZipImportError", "zipimporter"] class ZipImportError(ImportError): ... class zipimporter(_LoaderBasics): + """zipimporter(archivepath) -> zipimporter object + +Create a new zipimporter instance. 'archivepath' must be a path to +a zipfile, or to a specific path inside a zipfile. For example, it can be +'/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a +valid directory inside the archive. + +'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip +archive. + +The 'archive' attribute of zipimporter objects contains the name of the +zipfile targeted. +""" archive: str prefix: str if sys.version_info >= (3, 11): @@ -29,31 +53,128 @@ class zipimporter(_LoaderBasics): if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: + """find_loader(fullname, path=None) -> self, str or None. + + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, a string containing the + full path name if it's possibly a portion of a namespace package, + or None otherwise. The optional 'path' argument is ignored -- it's + there for compatibility with the importer protocol. + + Deprecated since Python 3.10. Use find_spec() instead. + """ @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: + """find_module(fullname, path=None) -> self or None. + + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, or None if it wasn't. + The optional 'path' argument is ignored -- it's there for compatibility + with the importer protocol. + + Deprecated since Python 3.10. Use find_spec() instead. + """ else: - def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... - def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: + """find_loader(fullname, path=None) -> self, str or None. + + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, a string containing the + full path name if it's possibly a portion of a namespace package, + or None otherwise. The optional 'path' argument is ignored -- it's + there for compatibility with the importer protocol. + """ + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: + """find_module(fullname, path=None) -> self or None. + + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, or None if it wasn't. + The optional 'path' argument is ignored -- it's there for compatibility + with the importer protocol. + """ - def get_code(self, fullname: str) -> CodeType: ... - def get_data(self, pathname: str) -> bytes: ... - def get_filename(self, fullname: str) -> str: ... + def get_code(self, fullname: str) -> CodeType: + """get_code(fullname) -> code object. + +Return the code object for the specified module. Raise ZipImportError +if the module couldn't be imported. +""" + def get_data(self, pathname: str) -> bytes: + """get_data(pathname) -> string with file data. + +Return the data associated with 'pathname'. Raise OSError if +the file wasn't found. +""" + def get_filename(self, fullname: str) -> str: + """get_filename(fullname) -> filename string. + +Return the filename for the specified module or raise ZipImportError +if it couldn't be imported. +""" if sys.version_info >= (3, 14): - def get_resource_reader(self, fullname: str) -> ZipReader: ... # undocumented + def get_resource_reader(self, fullname: str) -> ZipReader: # undocumented + """Return the ResourceReader for a module in a zip file. +""" elif sys.version_info >= (3, 10): - def get_resource_reader(self, fullname: str) -> ZipReader | None: ... # undocumented + def get_resource_reader(self, fullname: str) -> ZipReader | None: # undocumented + """Return the ResourceReader for a module in a zip file. +""" else: - def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented + def get_resource_reader(self, fullname: str) -> ResourceReader | None: # undocumented + """Return the ResourceReader for a package in a zip file. + + If 'fullname' is a package within the zip file, return the + 'ResourceReader' object for the package. Otherwise return None. + """ + + def get_source(self, fullname: str) -> str | None: + """get_source(fullname) -> source string. - def get_source(self, fullname: str) -> str | None: ... - def is_package(self, fullname: str) -> bool: ... +Return the source code for the specified module. Raise ZipImportError +if the module couldn't be found, return None if the archive does +contain the module, but has no source for it. +""" + def is_package(self, fullname: str) -> bool: + """is_package(fullname) -> bool. + +Return True if the module specified by fullname is a package. +Raise ZipImportError if the module couldn't be found. +""" if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.15. Use `exec_module()` instead.") - def load_module(self, fullname: str) -> ModuleType: ... - def exec_module(self, module: ModuleType) -> None: ... - def create_module(self, spec: ModuleSpec) -> None: ... - def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... - def invalidate_caches(self) -> None: ... + def load_module(self, fullname: str) -> ModuleType: + """load_module(fullname) -> module. + +Load the module specified by 'fullname'. 'fullname' must be the +fully qualified (dotted) module name. It returns the imported +module, or raises ZipImportError if it could not be imported. + +Deprecated since Python 3.10. Use exec_module() instead. +""" + def exec_module(self, module: ModuleType) -> None: + """Execute the module. +""" + def create_module(self, spec: ModuleSpec) -> None: + """Use default semantics for module creation. +""" + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: + """Create a ModuleSpec for the specified module. + +Returns None if the module cannot be found. +""" + def invalidate_caches(self) -> None: + """Invalidates the cache of file data of the archive path. +""" else: - def load_module(self, fullname: str) -> ModuleType: ... + def load_module(self, fullname: str) -> ModuleType: + """load_module(fullname) -> module. + + Load the module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the imported + module, or raises ZipImportError if it wasn't found. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi index 4e410fdd18ad9..9d165cf2f1545 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi @@ -1,3 +1,17 @@ +"""The functions in this module allow compression and decompression using the +zlib library, which is based on GNU zip. + +adler32(string[, start]) -- Compute an Adler-32 checksum. +compress(data[, level]) -- Compress data, with compression level 0-9 or -1. +compressobj([level[, ...]]) -- Return a compressor object. +crc32(string[, start]) -- Compute a CRC-32 checksum. +decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string. +decompressobj([wbits[, zdict]]) -- Return a decompressor object. + +'wbits' is window buffer size and container format. +Compressor objects support compress() and flush() methods; decompressor +objects support decompress() and flush(). +""" import sys from _typeshed import ReadableBuffer from typing import Any, Final, final, type_check_only @@ -58,17 +72,88 @@ class _Decompress: def flush(self, length: int = 16384, /) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(data: ReadableBuffer, value: int = 1, /) -> int: ... +def adler32(data: ReadableBuffer, value: int = 1, /) -> int: + """Compute an Adler-32 checksum of data. + + value + Starting value of the checksum. + +The returned checksum is an integer. +""" if sys.version_info >= (3, 11): - def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: ... + def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: + """Returns a bytes object containing compressed data. + + data + Binary data to be compressed. + level + Compression level, in 0-9 or -1. + wbits + The window buffer size and container format. +""" else: - def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: ... + def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: + """Returns a bytes object containing compressed data. + + data + Binary data to be compressed. + level + Compression level, in 0-9 or -1. +""" def compressobj( level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None -) -> _Compress: ... -def crc32(data: ReadableBuffer, value: int = 0, /) -> int: ... -def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: ... -def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... +) -> _Compress: + """Return a compressor object. + + level + The compression level (an integer in the range 0-9 or -1; default is + currently equivalent to 6). Higher compression levels are slower, + but produce smaller results. + method + The compression algorithm. If given, this must be DEFLATED. + wbits + +9 to +15: The base-two logarithm of the window size. Include a zlib + container. + -9 to -15: Generate a raw stream. + +25 to +31: Include a gzip container. + memLevel + Controls the amount of memory used for internal compression state. + Valid values range from 1 to 9. Higher values result in higher memory + usage, faster compression, and smaller output. + strategy + Used to tune the compression algorithm. Possible values are + Z_DEFAULT_STRATEGY, Z_FILTERED, and Z_HUFFMAN_ONLY. + zdict + The predefined compression dictionary - a sequence of bytes + containing subsequences that are likely to occur in the input data. +""" +def crc32(data: ReadableBuffer, value: int = 0, /) -> int: + """Compute a CRC-32 checksum of data. + + value + Starting value of the checksum. + +The returned checksum is an integer. +""" +def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: + """Returns a bytes object containing the uncompressed data. + + data + Compressed data. + wbits + The window buffer size and container format. + bufsize + The initial output buffer size. +""" +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: + """Return a decompressor object. + + wbits + The window buffer size and container format. + zdict + The predefined compression dictionary. This must be the same + dictionary as used by the compressor that produced the input data. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi index b7433f835f83d..0c08ab748e89f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi @@ -18,18 +18,32 @@ class ZoneInfo(tzinfo): def key(self) -> str: ... def __new__(cls, key: str) -> Self: ... @classmethod - def no_cache(cls, key: str) -> Self: ... + def no_cache(cls, key: str) -> Self: + """Get a new instance of ZoneInfo, bypassing the cache. +""" if sys.version_info >= (3, 12): @classmethod - def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: ... + def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: + """Create a ZoneInfo file from a file object. +""" else: @classmethod - def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: ... + def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: + """Create a ZoneInfo file from a file object. +""" @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: ... - def tzname(self, dt: datetime | None, /) -> str | None: ... - def utcoffset(self, dt: datetime | None, /) -> timedelta | None: ... - def dst(self, dt: datetime | None, /) -> timedelta | None: ... + def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: + """Clear the ZoneInfo cache. +""" + def tzname(self, dt: datetime | None, /) -> str | None: + """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime. +""" + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: + """Retrieve a timedelta representing the UTC offset in a zone at the given datetime. +""" + def dst(self, dt: datetime | None, /) -> timedelta | None: + """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime. +""" def __dir__() -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi index e6d2d83caac18..efd01fe61f8ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi @@ -11,4 +11,6 @@ def load_data( fobj: _IOBytes, ) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[str, ...], bytes | None]: ... -class ZoneInfoNotFoundError(KeyError): ... +class ZoneInfoNotFoundError(KeyError): + """Exception raised when a ZoneInfo key is not found. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi index 0ef78d03e5f4a..ce0ef778ccfc0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi @@ -4,10 +4,24 @@ from collections.abc import Sequence # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 -def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... -def find_tzfile(key: str) -> str | None: ... -def available_timezones() -> set[str]: ... +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: + """Reset global TZPATH. +""" +def find_tzfile(key: str) -> str | None: + """Retrieve the path to a TZif file from a key. +""" +def available_timezones() -> set[str]: + """Returns a set containing all available time zones. + +.. caution:: + + This may attempt to open a large number of files, since the best way to + determine if a given file on the time zone search path is to open it + and check for the "magic string" at the beginning. +""" TZPATH: tuple[str, ...] -class InvalidTZPathWarning(RuntimeWarning): ... +class InvalidTZPathWarning(RuntimeWarning): + """Warning raised if an invalid path is specified in PYTHONTZPATH. +""" From 0d316a0c5e69b57f786d3847dcc282bb1538310e Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:36:50 +0000 Subject: [PATCH 3/8] Sync Windows docstrings --- .../vendor/typeshed/stdlib/_ctypes.pyi | 28 +- .../vendor/typeshed/stdlib/_msi.pyi | 32 +- .../vendor/typeshed/stdlib/_socket.pyi | 18 +- .../vendor/typeshed/stdlib/_ssl.pyi | 19 +- .../vendor/typeshed/stdlib/_winapi.pyi | 130 ++++- .../stdlib/asyncio/windows_events.pyi | 37 +- .../typeshed/stdlib/asyncio/windows_utils.pyi | 14 +- .../typeshed/stdlib/ctypes/__init__.pyi | 12 +- .../vendor/typeshed/stdlib/ctypes/util.pyi | 4 +- .../stdlib/distutils/_msvccompiler.pyi | 11 + .../stdlib/distutils/command/bdist_msi.pyi | 57 ++- .../vendor/typeshed/stdlib/encodings/mbcs.pyi | 9 + .../vendor/typeshed/stdlib/encodings/oem.pyi | 3 + .../typeshed/stdlib/msilib/__init__.pyi | 40 +- .../vendor/typeshed/stdlib/msvcrt.pyi | 95 +++- .../stdlib/multiprocessing/connection.pyi | 12 +- .../stdlib/multiprocessing/context.pyi | 4 +- .../multiprocessing/popen_spawn_win32.pyi | 3 + .../stdlib/multiprocessing/reduction.pyi | 22 +- .../multiprocessing/resource_sharer.pyi | 6 +- .../ty_vendored/vendor/typeshed/stdlib/nt.pyi | 5 + .../vendor/typeshed/stdlib/os/__init__.pyi | 150 +++++- .../typeshed/stdlib/pathlib/__init__.pyi | 25 +- .../vendor/typeshed/stdlib/shutil.pyi | 11 +- .../vendor/typeshed/stdlib/socket.pyi | 15 +- .../vendor/typeshed/stdlib/sys/__init__.pyi | 38 +- .../vendor/typeshed/stdlib/tarfile.pyi | 6 +- .../typeshed/stdlib/tkinter/__init__.pyi | 56 ++- .../vendor/typeshed/stdlib/tkinter/ttk.pyi | 4 +- .../vendor/typeshed/stdlib/urllib/request.pyi | 8 +- .../vendor/typeshed/stdlib/winreg.pyi | 475 +++++++++++++++++- .../vendor/typeshed/stdlib/winsound.pyi | 43 +- .../typeshed/stdlib/zipfile/_path/glob.pyi | 19 +- 33 files changed, 1265 insertions(+), 146 deletions(-) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi index af9e7512b1cc9..ee939a614db37 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi @@ -31,22 +31,42 @@ if sys.platform == "win32": _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] class COMError(Exception): + """Raised when a COM method call failed. +""" hresult: int text: str | None details: _COMError_Details def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... - def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ... + def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: + """CopyComPointer(src, dst) -> HRESULT value +""" FUNCFLAG_HRESULT: Final = 0x2 FUNCFLAG_STDCALL: Final = 0x0 - def FormatError(code: int = ...) -> str: ... + def FormatError(code: int = ...) -> str: + """FormatError([integer]) -> string + +Convert a win32 error code into a string. If the error code is not +given, the return value of a call to GetLastError() is used. +""" def get_last_error() -> int: ... def set_last_error(value: int) -> int: ... - def LoadLibrary(name: str, load_flags: int = 0, /) -> int: ... - def FreeLibrary(handle: int, /) -> None: ... + def LoadLibrary(name: str, load_flags: int = 0, /) -> int: + """LoadLibrary(name, load_flags) -> handle + +Load an executable (usually a DLL), and return a handle to it. +The handle may be used to locate exported functions in this +module. load_flags are as defined for LoadLibraryEx in the +Windows API. +""" + def FreeLibrary(handle: int, /) -> None: + """FreeLibrary(handle) -> void + +Free the handle of an executable previously loaded by LoadLibrary. +""" else: def dlclose(handle: int, /) -> None: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi index edceed51bf9db..81f6282cc3d18 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi @@ -1,3 +1,5 @@ +"""Documentation +""" import sys from typing import Final, type_check_only @@ -51,10 +53,32 @@ if sys.platform == "win32": __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def UuidCreate() -> str: ... - def FCICreate(cabname: str, files: list[str], /) -> None: ... - def OpenDatabase(path: str, persist: int, /) -> _Database: ... - def CreateRecord(count: int, /) -> _Record: ... + def UuidCreate() -> str: + """Return the string representation of a new unique identifier. +""" + def FCICreate(cabname: str, files: list[str], /) -> None: + """Create a new CAB file. + + cabname + the name of the CAB file + files + a list of tuples, each containing the name of the file on disk, + and the name of the file inside the CAB file +""" + def OpenDatabase(path: str, persist: int, /) -> _Database: + """Return a new database object. + + path + the file name of the MSI file + persist + the persistence mode +""" + def CreateRecord(count: int, /) -> _Record: + """Return a new record object. + + count + the number of fields of the record +""" MSICOLINFO_NAMES: Final[int] MSICOLINFO_TYPES: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi index 5fa8cbfd93421..02c3cdab753f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi @@ -880,7 +880,14 @@ operations. A timeout of None indicates that timeouts on socket operations are disabled. """ if sys.platform == "win32": - def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: ... + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: + """ioctl(cmd, option) -> long + +Control the socket with WSAIoctl syscall. Currently supported 'cmd' values are +SIO_RCVALL: 'option' must be one of the socket.RCVALL_* constants. +SIO_KEEPALIVE_VALS: 'option' is a tuple of (onoff, timeout, interval). +SIO_LOOPBACK_FAST_PATH: 'option' is a boolean value, and is disabled by default +""" def listen(self, backlog: int = ..., /) -> None: """listen([backlog]) @@ -1070,7 +1077,14 @@ None, optlen. @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None: ... if sys.platform == "win32": - def share(self, process_id: int, /) -> bytes: ... + def share(self, process_id: int, /) -> bytes: + """share(process_id) -> bytes + +Share the socket with another process. The target process id +must be provided and the resulting bytes object passed to the target +process. There the shared socket can be instantiated by calling +socket.fromshare(). +""" def shutdown(self, how: int, /) -> None: """shutdown(flag) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi index 00d3d6e2fbb56..f3058f390057c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi @@ -86,8 +86,23 @@ The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. if sys.platform == "win32": _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] - def enum_certificates(store_name: str) -> _EnumRetType: ... - def enum_crls(store_name: str) -> _EnumRetType: ... + def enum_certificates(store_name: str) -> _EnumRetType: + """Retrieve certificates from Windows' cert store. + +store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide +more cert storages, too. The function returns a list of (bytes, +encoding_type, trust) tuples. The encoding_type flag can be interpreted +with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either +a set of OIDs or the boolean True. +""" + def enum_crls(store_name: str) -> _EnumRetType: + """Retrieve CRLs from Windows' cert store. + +store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide +more cert storages, too. The function returns a list of (bytes, +encoding_type) tuples. The encoding_type flag can be interpreted with +X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. +""" def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: """Lookup NID, short name, long name and OID of an ASN1_OBJECT. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi index d9e2c377b115a..0d8f80a35972d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi @@ -175,7 +175,9 @@ if sys.platform == "win32": if sys.version_info >= (3, 14): COPY_FILE_DIRECTORY: Final = 0x00000080 - def CloseHandle(handle: int, /) -> None: ... + def CloseHandle(handle: int, /) -> None: + """Close handle. +""" @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload @@ -204,7 +206,14 @@ if sys.platform == "win32": security_attributes: int, /, ) -> int: ... - def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: ... + def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: + """Create an anonymous pipe. + + pipe_attrs + Ignored internally, can be None. + +Returns a 2-tuple of handles, to the read and write ends of the pipe. +""" def CreateProcess( application_name: str | None, command_line: str | None, @@ -216,7 +225,19 @@ if sys.platform == "win32": current_directory: str | None, startup_info: Any, /, - ) -> tuple[int, int, int, int]: ... + ) -> tuple[int, int, int, int]: + """Create a new process and its primary thread. + + command_line + Can be str or None + proc_attrs + Ignored internally, can be None. + thread_attrs + Ignored internally, can be None. + +The return value is a tuple of the process handle, thread handle, +process ID, and thread ID. +""" def DuplicateHandle( source_process_handle: int, source_handle: int, @@ -225,16 +246,46 @@ if sys.platform == "win32": inherit_handle: bool, options: int = 0, /, - ) -> int: ... + ) -> int: + """Return a duplicate handle object. + +The duplicate handle refers to the same object as the original +handle. Therefore, any changes to the object are reflected +through both handles. +""" def ExitProcess(ExitCode: int, /) -> NoReturn: ... - def GetACP() -> int: ... + def GetACP() -> int: + """Get the current Windows ANSI code page identifier. +""" def GetFileType(handle: int) -> int: ... - def GetCurrentProcess() -> int: ... - def GetExitCodeProcess(process: int, /) -> int: ... + def GetCurrentProcess() -> int: + """Return a handle object for the current process. +""" + def GetExitCodeProcess(process: int, /) -> int: + """Return the termination status of the specified process. +""" def GetLastError() -> int: ... - def GetModuleFileName(module_handle: int, /) -> str: ... - def GetStdHandle(std_handle: int, /) -> int: ... - def GetVersion() -> int: ... + def GetModuleFileName(module_handle: int, /) -> str: + """Return the fully-qualified path for the file that contains module. + +The module must have been loaded by the current process. + +The module parameter should be a handle to the loaded module +whose path is being requested. If this parameter is 0, +GetModuleFileName retrieves the path of the executable file +of the current process. +""" + def GetStdHandle(std_handle: int, /) -> int: + """Return a handle to the specified standard device. + + std_handle + One of STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, or STD_ERROR_HANDLE. + +The integer associated with the handle object is returned. +""" + def GetVersion() -> int: + """Return the version number of the current operating system. +""" def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ... def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): @@ -250,9 +301,17 @@ if sys.platform == "win32": def SetNamedPipeHandleState( named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, / ) -> None: ... - def TerminateProcess(handle: int, exit_code: int, /) -> None: ... + def TerminateProcess(handle: int, exit_code: int, /) -> None: + """Terminate the specified process and all of its threads. +""" def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ... - def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: ... + def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: + """Wait for a single object. + +Wait until the specified object is in the signaled state or +the time-out interval elapses. The timeout value is specified +in milliseconds. +""" def WaitNamedPipe(name: str, timeout: int, /) -> None: ... @overload def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @@ -262,6 +321,8 @@ if sys.platform == "win32": def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: + """OVERLAPPED structure wrapper +""" event: int def GetOverlappedResult(self, wait: bool, /) -> tuple[int, int]: ... def cancel(self) -> None: ... @@ -270,11 +331,40 @@ if sys.platform == "win32": if sys.version_info >= (3, 13): def BatchedWaitForMultipleObjects( handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF - ) -> list[int]: ... + ) -> list[int]: + """Supports a larger number of handles than WaitForMultipleObjects + +Note that the handles may be waited on other threads, which could cause +issues for objects like mutexes that become associated with the thread +that was waiting for them. Objects may also be left signalled, even if +the wait fails. + +It is recommended to use WaitForMultipleObjects whenever possible, and +only switch to BatchedWaitForMultipleObjects for scenarios where you +control all the handles involved, such as your own thread pool or +files, and all wait objects are left unmodified by a wait (for example, +manual reset events, threads, and files/pipes). + +Overlapped handles returned from this module use manual reset events. +""" def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ... def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ... - def GetLongPathName(path: str) -> str: ... - def GetShortPathName(path: str) -> str: ... + def GetLongPathName(path: str) -> str: + """Return the long version of the provided path. + +If the path is already in its long form, returns the same value. + +The path must already be a 'str'. If the type is not known, use +os.fsdecode before calling this function. +""" + def GetShortPathName(path: str) -> str: + """Return the short version of the provided path. + +If the path is already in its short form, returns the same value. + +The path must already be a 'str'. If the type is not known, use +os.fsdecode before calling this function. +""" def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def ReleaseMutex(mutex: int) -> None: ... @@ -282,5 +372,13 @@ if sys.platform == "win32": def SetEvent(event: int) -> None: ... if sys.version_info >= (3, 12): - def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ... + def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: + """Copies a file from one name to a new name. + +This is implemented using the CopyFile2 API, which preserves all stat +and metadata information apart from security attributes. + +progress_routine is reserved for future use, but is currently not +implemented. Its value is ignored. +""" def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi index a32381bfb3e63..92c18dd5d5069 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,3 +1,5 @@ +"""Selector and proactor event loops for Windows. +""" import socket import sys from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer @@ -47,14 +49,22 @@ if sys.platform == "win32": CONNECT_PIPE_MAX_DELAY: float class PipeServer: + """Class representing a pipe server. + +This is much like a bound, listening socket. +""" def __init__(self, address: str) -> None: ... def __del__(self) -> None: ... def closed(self) -> bool: ... def close(self) -> None: ... - class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... + class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): + """Windows version of selector event loop. +""" class ProactorEventLoop(proactor_events.BaseProactorEventLoop): + """Windows version of proactor event loop using IOCP. +""" def __init__(self, proactor: IocpProactor | None = None) -> None: ... async def create_pipe_connection( self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str @@ -64,6 +74,8 @@ if sys.platform == "win32": ) -> list[PipeServer]: ... class IocpProactor: + """Proactor implementation using IOCP. +""" def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... @@ -86,7 +98,12 @@ if sys.platform == "win32": def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... - def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: + """Wait for a handle. + +Return a Future object. The result of the future is True if the wait +completed, or False if the wait did not complete (on timeout). +""" def close(self) -> None: ... if sys.version_info >= (3, 11): def recvfrom_into( @@ -105,13 +122,21 @@ if sys.platform == "win32": else: class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[SelectorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... + def get_child_watcher(self) -> NoReturn: + """Get the watcher for child processes. +""" + def set_child_watcher(self, watcher: Any) -> NoReturn: + """Set the watcher for child processes. +""" class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[ProactorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... + def get_child_watcher(self) -> NoReturn: + """Get the watcher for child processes. +""" + def set_child_watcher(self, watcher: Any) -> NoReturn: + """Set the watcher for child processes. +""" if sys.version_info >= (3, 14): _DefaultEventLoopPolicy = _WindowsProactorEventLoopPolicy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi index 5cedd61b5f4a3..b78b9f2a9d7e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi @@ -1,3 +1,5 @@ +"""Various Windows specific bits and pieces. +""" import subprocess import sys from collections.abc import Callable @@ -11,9 +13,15 @@ if sys.platform == "win32": BUFSIZE: Final = 8192 PIPE: Final = subprocess.PIPE STDOUT: Final = subprocess.STDOUT - def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: ... + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: + """Like os.pipe() but with overlapped support and using handles not fds. +""" class PipeHandle: + """Wrapper for an overlapped pipe handle which is vaguely file-object like. + +The IOCP event loop can use these instead of socket objects. +""" def __init__(self, handle: int) -> None: ... def __del__(self) -> None: ... def __enter__(self) -> Self: ... @@ -24,6 +32,10 @@ if sys.platform == "win32": def close(self, *, CloseHandle: Callable[[int], object] = ...) -> None: ... class Popen(subprocess.Popen[AnyStr]): + """Replacement for subprocess.Popen using overlapped pipe handles. + +The stdin, stdout, stderr are None or instances of PipeHandle. +""" stdin: PipeHandle | None # type: ignore[assignment] stdout: PipeHandle | None # type: ignore[assignment] stderr: PipeHandle | None # type: ignore[assignment] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi index ae616f3576ca2..db0c4141a5a31 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi @@ -121,8 +121,16 @@ reacquires it afterwards. def __getitem__(self, name_or_ordinal: str) -> _NamedFuncPointer: ... if sys.platform == "win32": - class OleDLL(CDLL): ... - class WinDLL(CDLL): ... + class OleDLL(CDLL): + """This class represents a dll exporting functions using the +Windows stdcall calling convention, and returning HRESULT. +HRESULT error values are automatically raised as OSError +exceptions. +""" + class WinDLL(CDLL): + """This class represents a dll exporting functions using the +Windows stdcall calling convention. +""" class PyDLL(CDLL): """This class represents the Python library itself. It allows diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi index fb80e2836bb75..b2a02f5df4bdc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi @@ -3,7 +3,9 @@ import sys def find_library(name: str) -> str | None: ... if sys.platform == "win32": - def find_msvcrt() -> str | None: ... + def find_msvcrt() -> str | None: + """Return the name of the VC runtime dll +""" if sys.version_info >= (3, 14): def dllist() -> list[str]: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi index bba9373b72dbc..6a673bc8b99ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi @@ -1,3 +1,11 @@ +"""distutils._msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for Microsoft Visual Studio 2015. + +The module is compatible with VS 2015 and later. You can find legacy support +for older versions in distutils.msvc9compiler and distutils.msvccompiler. +""" from _typeshed import Incomplete from distutils.ccompiler import CCompiler from typing import ClassVar, Final @@ -6,6 +14,9 @@ PLAT_SPEC_TO_RUNTIME: Final[dict[str, str]] PLAT_TO_VCVARS: Final[dict[str, str]] class MSVCCompiler(CCompiler): + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class. +""" compiler_type: ClassVar[str] executables: ClassVar[dict[Incomplete, Incomplete]] res_extension: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi index d677f81d14251..f1848cd457e56 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -1,3 +1,6 @@ +""" +Implements the bdist_msi command. +""" import sys from _typeshed import Incomplete from typing import ClassVar, Literal @@ -8,12 +11,42 @@ if sys.platform == "win32": from msilib import Control, Dialog class PyDialog(Dialog): - def __init__(self, *args, **kw) -> None: ... - def title(self, title) -> None: ... - def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: ... - def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: ... - def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: ... - def xbutton(self, name, title, next, xpos) -> Control: ... + """Dialog class with a fixed layout: controls at the top, then a ruler, + then a list of buttons: back, next, cancel. Optionally a bitmap at the + left. +""" + def __init__(self, *args, **kw) -> None: + """Dialog(database, name, x, y, w, h, attributes, title, first, + default, cancel, bitmap=true) +""" + def title(self, title) -> None: + """Set the title text of the dialog at the top. +""" + def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: + """Add a back button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated +""" + def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: + """Add a cancel button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated +""" + def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: + """Add a Next button with a given title, the tab-next button, + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated +""" + def xbutton(self, name, title, next, xpos) -> Control: + """Add a button with a given title, the tab-next button, + its name in the Control table, giving its x position; the + y-position is aligned with the other buttons. + + Return the button, so that events can be associated +""" class bdist_msi(Command): description: str @@ -39,7 +72,17 @@ if sys.platform == "win32": db: Incomplete def run(self) -> None: ... def add_files(self) -> None: ... - def add_find_python(self) -> None: ... + def add_find_python(self) -> None: + """Adds code to the installer to compute the location of Python. + + Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the + registry for each version of Python. + + Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, + else from PYTHON.MACHINE.X.Y. + + Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe +""" def add_scripts(self) -> None: ... def add_ui(self) -> None: ... def get_installer_filename(self, fullname): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi index 2c2917d63f6db..17a553fbc1841 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi @@ -1,3 +1,12 @@ +"""Python 'mbcs' Codec for Windows + + +Cloned by Mark Hammond (mhammond@skippinet.com.au) from ascii.py, +which was written by Marc-Andre Lemburg (mal@lemburg.com). + +(c) Copyright CNRI, All Rights Reserved. NO WARRANTY. + +""" import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi index 376c12c445f42..62215026427e0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi @@ -1,3 +1,6 @@ +"""Python 'oem' Codec for Windows + +""" import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi index 622f585f5beea..80b5054294496 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi @@ -37,7 +37,9 @@ if sys.platform == "win32": action: str, seqno: int | type[_Unspecified] = ..., cond: str | type[_Unspecified] = ..., - ) -> None: ... + ) -> None: + """Change the sequence number of an action in a sequence list +""" def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... def add_stream(db: _Database, name: str, path: str) -> None: ... def init_database( @@ -80,7 +82,16 @@ if sys.platform == "win32": _logical: str, default: str, componentflags: int | None = None, - ) -> None: ... + ) -> None: + """Create a new directory in the Directory table. There is a current component + at each point in time for the directory, which is either explicitly created + through start_component, or implicitly when files are added for the first + time. Files are added into the current component, and into the cab file. + To create a directory, a base directory object needs to be specified (can be + None), the path to the physical directory, and a logical directory name. + Default specifies the DefaultDir slot in the directory table. componentflags + specifies the default flags that new components get. +""" def start_component( self, component: str | None = None, @@ -88,11 +99,28 @@ if sys.platform == "win32": flags: int | None = None, keyfile: str | None = None, uuid: str | None = None, - ) -> None: ... + ) -> None: + """Add an entry to the Component table, and make this component the current for this + directory. If no component name is given, the directory name is used. If no feature + is given, the current feature is used. If no flags are given, the directory's default + flags are used. If no keyfile is given, the KeyPath is left null in the Component + table. +""" def make_short(self, file: str) -> str: ... - def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... - def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... - def remove_pyc(self) -> None: ... + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: + """Add a file to the current component of the directory, starting a new one + if there is no current component. By default, the file name in the source + and the file table will be identical. If the src file is specified, it is + interpreted relative to the current directory. Optionally, a version and a + language can be specified for the entry in the File table. +""" + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: + """Add a list of files to the current component as specified in the + glob pattern. Individual files can be excluded in the exclude list. +""" + def remove_pyc(self) -> None: + """Remove .pyc files on uninstall +""" class Binary: name: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi index 5feca8eab5c1c..26d790eef4d5c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi @@ -13,20 +13,83 @@ if sys.platform == "win32": SEM_NOALIGNMENTFAULTEXCEPT: Final = 0x0004 SEM_NOGPFAULTERRORBOX: Final = 0x0002 SEM_NOOPENFILEERRORBOX: Final = 0x8000 - def locking(fd: int, mode: int, nbytes: int, /) -> None: ... - def setmode(fd: int, mode: int, /) -> int: ... - def open_osfhandle(handle: int, flags: int, /) -> int: ... - def get_osfhandle(fd: int, /) -> int: ... - def kbhit() -> bool: ... - def getch() -> bytes: ... - def getwch() -> str: ... - def getche() -> bytes: ... - def getwche() -> str: ... - def putch(char: bytes | bytearray, /) -> None: ... - def putwch(unicode_char: str, /) -> None: ... - def ungetch(char: bytes | bytearray, /) -> None: ... - def ungetwch(unicode_char: str, /) -> None: ... - def heapmin() -> None: ... - def SetErrorMode(mode: int, /) -> int: ... + def locking(fd: int, mode: int, nbytes: int, /) -> None: + """Lock part of a file based on file descriptor fd from the C runtime. + +Raises OSError on failure. The locked region of the file extends from +the current file position for nbytes bytes, and may continue beyond +the end of the file. mode must be one of the LK_* constants listed +below. Multiple regions in a file may be locked at the same time, but +may not overlap. Adjacent regions are not merged; they must be unlocked +individually. +""" + def setmode(fd: int, mode: int, /) -> int: + """Set the line-end translation mode for the file descriptor fd. + +To set it to text mode, flags should be os.O_TEXT; for binary, it +should be os.O_BINARY. + +Return value is the previous mode. +""" + def open_osfhandle(handle: int, flags: int, /) -> int: + """Create a C runtime file descriptor from the file handle handle. + +The flags parameter should be a bitwise OR of os.O_APPEND, os.O_RDONLY, +and os.O_TEXT. The returned file descriptor may be used as a parameter +to os.fdopen() to create a file object. +""" + def get_osfhandle(fd: int, /) -> int: + """Return the file handle for the file descriptor fd. + +Raises OSError if fd is not recognized. +""" + def kbhit() -> bool: + """Returns a nonzero value if a keypress is waiting to be read. Otherwise, return 0. +""" + def getch() -> bytes: + """Read a keypress and return the resulting character as a byte string. + +Nothing is echoed to the console. This call will block if a keypress is +not already available, but will not wait for Enter to be pressed. If the +pressed key was a special function key, this will return '\\000' or +'\\xe0'; the next call will return the keycode. The Control-C keypress +cannot be read with this function. +""" + def getwch() -> str: + """Wide char variant of getch(), returning a Unicode value. +""" + def getche() -> bytes: + """Similar to getch(), but the keypress will be echoed if possible. +""" + def getwche() -> str: + """Wide char variant of getche(), returning a Unicode value. +""" + def putch(char: bytes | bytearray, /) -> None: + """Print the byte string char to the console without buffering. +""" + def putwch(unicode_char: str, /) -> None: + """Wide char variant of putch(), accepting a Unicode value. +""" + def ungetch(char: bytes | bytearray, /) -> None: + """Opposite of getch. + +Cause the byte string char to be "pushed back" into the +console buffer; it will be the next character read by +getch() or getche(). +""" + def ungetwch(unicode_char: str, /) -> None: + """Wide char variant of ungetch(), accepting a Unicode value. +""" + def heapmin() -> None: + """Minimize the malloc() heap. + +Force the malloc() heap to clean itself up and return unused blocks +to the operating system. On failure, this raises OSError. +""" + def SetErrorMode(mode: int, /) -> int: + """Wrapper around SetErrorMode. +""" if sys.version_info >= (3, 10): - def GetErrorMode() -> int: ... # undocumented + def GetErrorMode() -> int: # undocumented + """Wrapper around GetErrorMode. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi index cdee41d297abb..33ff02214196d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi @@ -69,7 +69,12 @@ a socket handle (Windows). """ if sys.platform == "win32": - class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): ... + class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): + """ +Connection class based on a Windows named pipe. +Overlapped I/O is used, so the handles must have been created +with FILE_FLAG_OVERLAPPED. +""" class Listener: """ @@ -134,4 +139,7 @@ Returns pair of connection objects at either end of a pipe """ else: - def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... + def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: + """ +Returns pair of connection objects at either end of a pipe +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi index 3b8356360ab4e..c347ed1cbc798 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi @@ -72,7 +72,9 @@ can be used to create shared objects. """Returns two connection object connected by a pipe """ else: - def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: ... + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: + """Returns two connection object connected by a pipe +""" def Barrier( self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi index 481b9eec5a37c..87c44fe9d5fc5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -13,6 +13,9 @@ if sys.platform == "win32": WINENV: Final[bool] class Popen: + """ +Start a subprocess to run the code of a process object +""" finalizer: Finalize method: ClassVar[str] pid: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi index 047471be64d16..ff0ef0627d0f8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi @@ -39,14 +39,26 @@ def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> N if sys.platform == "win32": def duplicate( handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None - ) -> int: ... - def steal_handle(source_pid: int, handle: int) -> int: ... - def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: ... - def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: ... + ) -> int: + """Duplicate a handle. (target_process is a handle not a pid!) +""" + def steal_handle(source_pid: int, handle: int) -> int: + """Steal a handle from process identified by source_pid. +""" + def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: + """Send a handle over a local connection. +""" + def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: + """Receive a handle over a local connection. +""" class DupHandle: + """Picklable wrapper for a handle. +""" def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... - def detach(self) -> int: ... + def detach(self) -> int: + """Get the handle. This should only be called once. +""" else: if sys.version_info < (3, 14): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi index 38185f87d39e8..feae80357d427 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -7,8 +7,12 @@ if sys.platform == "win32": __all__ += ["DupSocket"] class DupSocket: + """Picklable wrapper for a socket. +""" def __init__(self, sock: socket) -> None: ... - def detach(self) -> socket: ... + def detach(self) -> socket: + """Get the socket. This should only be called once. +""" else: __all__ += ["DupFd"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi index 0c87444d18f44..a5f1fb6363257 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi @@ -1,3 +1,8 @@ +"""This module provides access to operating system functionality that is +standardized by the C Standard and the POSIX standard (a thinly +disguised Unix interface). Refer to the library manual and +corresponding Unix manual entries for more information on calls. +""" import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 98818110fbd9c..2396cacb4df7f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -865,7 +865,9 @@ See os.stat for more information. Use st_birthtime instead to retrieve the file creation time. \ In the future, this property will contain the last metadata change time.""" ) - def st_ctime(self) -> float: ... + def st_ctime(self) -> float: + """time of last change +""" else: @property def st_ctime(self) -> float: @@ -887,14 +889,22 @@ In the future, this property will contain the last metadata change time.""" """ if sys.platform == "win32": @property - def st_file_attributes(self) -> int: ... + def st_file_attributes(self) -> int: + """Windows file attribute bits +""" @property - def st_reparse_tag(self) -> int: ... + def st_reparse_tag(self) -> int: + """Windows reparse tag +""" if sys.version_info >= (3, 12): @property - def st_birthtime(self) -> float: ... # time of file creation in seconds + def st_birthtime(self) -> float: # time of file creation in seconds + """time of creation +""" @property - def st_birthtime_ns(self) -> int: ... # time of file creation in nanoseconds + def st_birthtime_ns(self) -> int: # time of file creation in nanoseconds + """time of creation in nanoseconds +""" else: @property def st_blocks(self) -> int: # number of blocks allocated for file @@ -1252,8 +1262,12 @@ key, default and the result are bytes. """ else: - def putenv(name: str, value: str, /) -> None: ... - def unsetenv(name: str, /) -> None: ... + def putenv(name: str, value: str, /) -> None: + """Change or add an environment variable. +""" + def unsetenv(name: str, /) -> None: + """Delete an environment variable. +""" _Opener: TypeAlias = Callable[[str, int], int] @@ -1643,8 +1657,12 @@ def set_inheritable(fd: int, inheritable: bool, /) -> None: """ if sys.platform == "win32": - def get_handle_inheritable(handle: int, /) -> bool: ... - def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: ... + def get_handle_inheritable(handle: int, /) -> bool: + """Get the close-on-exe flag of the specified file descriptor. +""" + def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: + """Set the inheritable flag of the specified handle. +""" if sys.platform != "win32": # Unix only @@ -2416,8 +2434,28 @@ otherwise return -SIG, where SIG is the signal that killed it. """ else: - def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: ... - def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: ... + def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: + """Execute the program specified by path in a new process. + + mode + Mode of process creation. + path + Path of executable file. + argv + Tuple or list of strings. +""" + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: + """Execute the program specified by path in a new process. + + mode + Mode of process creation. + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. +""" def system(command: StrOrBytesPath) -> int: """Execute the command in a subshell. @@ -2481,9 +2519,53 @@ if sys.platform == "win32": arguments: str = "", cwd: StrOrBytesPath | None = None, show_cmd: int = 1, - ) -> None: ... + ) -> None: + """Start a file with its associated application. + +When "operation" is not specified or "open", this acts like +double-clicking the file in Explorer, or giving the file name as an +argument to the DOS "start" command: the file is opened with whatever +application (if any) its extension is associated. +When another "operation" is given, it specifies what should be done with +the file. A typical operation is "print". + +"arguments" is passed to the application, but should be omitted if the +file is a document. + +"cwd" is the working directory for the operation. If "filepath" is +relative, it will be resolved against this directory. This argument +should usually be an absolute path. + +"show_cmd" can be used to override the recommended visibility option. +See the Windows ShellExecute documentation for values. + +startfile returns as soon as the associated application is launched. +There is no option to wait for the application to close, and no way +to retrieve the application's exit status. + +The filepath is relative to the current directory. If you want to use +an absolute path, make sure the first character is not a slash ("/"); +the underlying Win32 ShellExecute function doesn't work if it is. +""" else: - def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: ... + def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: + """Start a file with its associated application. + +When "operation" is not specified or "open", this acts like +double-clicking the file in Explorer, or giving the file name as an +argument to the DOS "start" command: the file is opened with whatever +application (if any) its extension is associated. +When another "operation" is given, it specifies what should be done with +the file. A typical operation is "print". + +startfile returns as soon as the associated application is launched. +There is no option to wait for the application to close, and no way +to retrieve the application's exit status. + +The filepath is relative to the current directory. If you want to use +an absolute path, make sure the first character is not a slash ("/"); +the underlying Win32 ShellExecute function doesn't work if it is. +""" else: def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: @@ -2772,7 +2854,11 @@ Return the number of logical CPUs usable by the calling thread of the current process. Return None if indeterminable. """ else: - def process_cpu_count() -> int | None: ... + def process_cpu_count() -> int | None: + """Return the number of logical CPUs in the system. + +Return None if indeterminable. +""" if sys.platform != "win32": # Unix only @@ -2827,7 +2913,16 @@ if sys.platform == "win32": def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... - def add_dll_directory(path: str) -> _AddedDllDirectory: ... + def add_dll_directory(path: str) -> _AddedDllDirectory: + """Add a path to the DLL search path. + +This search path is used when resolving dependencies for imported +extension modules (the module itself is resolved through sys.path), +and also by ctypes. + +Remove the directory by calling close() on the returned object or +using it in a with statement. +""" if sys.platform == "linux": MFD_CLOEXEC: Final[int] @@ -2894,9 +2989,21 @@ if sys.version_info >= (3, 12) and sys.platform == "linux": PIDFD_NONBLOCK: Final = 2048 if sys.version_info >= (3, 12) and sys.platform == "win32": - def listdrives() -> list[str]: ... - def listmounts(volume: str) -> list[str]: ... - def listvolumes() -> list[str]: ... + def listdrives() -> list[str]: + """Return a list containing the names of drives in the system. + +A drive name typically looks like 'C:\\\\'. +""" + def listmounts(volume: str) -> list[str]: + """Return a list containing mount points for a particular volume. + +'volume' should be a GUID path as returned from os.listvolumes. +""" + def listvolumes() -> list[str]: + """Return a list containing the volumes in the system. + +Volumes are typically represented as a GUID path. +""" if sys.version_info >= (3, 10) and sys.platform == "linux": EFD_CLOEXEC: Final[int] @@ -3090,4 +3197,9 @@ Equivalent to os.chmod(fd, mode). if sys.platform != "linux": if sys.version_info >= (3, 13) or sys.platform != "win32": # Added to Windows in 3.13. - def lchmod(path: StrOrBytesPath, mode: int) -> None: ... + def lchmod(path: StrOrBytesPath, mode: int) -> None: + """Change the access permissions of a file, without following symbolic links. + +If path is a symlink, this affects the link itself rather than the target. +Equivalent to chmod(path, mode, follow_symlinks=False)." +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi index ebb6967c14288..eca854db596c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi @@ -546,11 +546,23 @@ the built-in open() function does. if sys.platform == "win32": if sys.version_info >= (3, 13): # raises UnsupportedOperation: - def owner(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] - def group(self: Never, *, follow_symlinks: bool = True) -> str: ... # type: ignore[misc] + def owner(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] + """ +Return the login name of the file owner. +""" + def group(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] + """ +Return the group name of the file gid. +""" else: - def owner(self: Never) -> str: ... # type: ignore[misc] - def group(self: Never) -> str: ... # type: ignore[misc] + def owner(self: Never) -> str: # type: ignore[misc] + """ + Return the login name of the file owner. + """ + def group(self: Never) -> str: # type: ignore[misc] + """ + Return the group name of the file gid. + """ else: if sys.version_info >= (3, 13): def owner(self, *, follow_symlinks: bool = True) -> str: @@ -574,7 +586,10 @@ Return the group name of the file gid. # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms if sys.platform == "win32" and sys.version_info < (3, 12): - def is_mount(self: Never) -> bool: ... # type: ignore[misc] + def is_mount(self: Never) -> bool: # type: ignore[misc] + """ + Check if this path is a POSIX mount point + """ else: def is_mount(self) -> bool: """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi index fbe57c3da3d29..96fa1ac74df5e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi @@ -345,7 +345,16 @@ else: if sys.platform == "win32" and sys.version_info < (3, 12): @overload @deprecated("On Windows before Python 3.12, using a PathLike as `cmd` would always fail or return `None`.") - def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: ... + def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ @overload def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi index e6699c2f4724b..b51f3da0582a9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi @@ -1500,10 +1500,21 @@ data and a list containing the descriptors. """ if sys.platform == "win32": - def fromshare(info: bytes) -> socket: ... + def fromshare(info: bytes) -> socket: + """fromshare(info) -> socket object + +Create a socket object from the bytes object returned by +socket.share(pid). +""" if sys.platform == "win32": - def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: + """socketpair([family[, type[, proto]]]) -> (socket object, socket object) +Create a pair of socket objects from the sockets returned by the platform +socketpair() function. +The arguments are the same as for socket() except the default family is AF_UNIX +if defined on the platform; otherwise, the default is AF_INET. +""" else: def socketpair( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index cf33181842696..90f3a284e90aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -721,7 +721,19 @@ if sys.platform == "win32": @property def platform_version(self) -> tuple[int, int, int]: ... - def getwindowsversion() -> _WinVersion: ... + def getwindowsversion() -> _WinVersion: + """Return info about the running version of Windows as a named tuple. + +The members are named: major, minor, build, platform, service_pack, +service_pack_major, service_pack_minor, suite_mask, product_type and +platform_version. For backward compatibility, only the first 5 items +are available by indexing. All elements are numbers, except +service_pack and platform_type which are strings, and platform_version +which is a 3-tuple. Platform is always 2. Product_type may be 1 for a +workstation, 2 for a domain controller, 3 for a server. +Platform_version is a 3-tuple containing a version number that is +intended for identifying the OS rather than feature detection. +""" def intern(string: str, /) -> str: """``Intern'' the given string. @@ -843,9 +855,25 @@ if sys.platform == "win32": "Deprecated since Python 3.13; will be removed in Python 3.16. " "Use the `PYTHONLEGACYWINDOWSFSENCODING` environment variable instead." ) - def _enablelegacywindowsfsencoding() -> None: ... + def _enablelegacywindowsfsencoding() -> None: + """Changes the default filesystem encoding to mbcs:replace. + +This is done for consistency with earlier versions of Python. See PEP +529 for more information. + +This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING +environment variable before launching Python. +""" else: - def _enablelegacywindowsfsencoding() -> None: ... + def _enablelegacywindowsfsencoding() -> None: + """Changes the default filesystem encoding to mbcs:replace. + +This is done for consistency with earlier versions of Python. See PEP +529 for more information. + +This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING +environment variable before launching Python. +""" def get_coroutine_origin_tracking_depth() -> int: """Check status of origin tracking for coroutine objects in this thread. @@ -892,7 +920,9 @@ If no stack profiler is activated, this function has no effect. """Activate stack profiler trampoline *backend*. """ else: - def activate_stack_trampoline(backend: str, /) -> NoReturn: ... + def activate_stack_trampoline(backend: str, /) -> NoReturn: + """Activate stack profiler trampoline *backend*. +""" from . import _monitoring diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi index cb2c6f2f9febe..f0a504a7e864f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi @@ -907,7 +907,11 @@ at targetpath. def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented """Make a character or block device called targetpath. """ - def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ def makelink_with_filter( self, tarinfo: TarInfo, targetpath: StrOrBytesPath, filter_function: _FilterFunction, extraction_root: str ) -> None: # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 5db917442d3f9..077b53b4d5cd2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1659,7 +1659,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["-type"], /) -> str: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: ... + def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-disabled"], /) -> bool: ... @overload @@ -1714,7 +1726,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["type"], /) -> str: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: ... + def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["disabled"], /) -> bool: ... @overload @@ -1757,7 +1781,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["-transparent"], value: bool, /) -> Literal[""]: ... elif sys.platform == "win32": @overload - def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: ... + def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-disabled"], value: bool, /) -> Literal[""]: ... @overload @@ -1808,7 +1844,19 @@ corresponding attributes. fullscreen: bool = ..., toolwindow: bool = ..., topmost: bool = ..., - ) -> None: ... + ) -> None: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" else: # X11 @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi index 912618cfc94ea..821f7557eb971 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi @@ -242,7 +242,9 @@ LAYOUTS /, *, padding: _Padding = ..., - ) -> None: ... + ) -> None: + """Create a new element in the current theme of given etype. +""" @overload def element_create( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi index 76c3571f9295f..11ebb205854d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi @@ -293,7 +293,13 @@ def parse_keqv_list(l: list[str]) -> dict[str, str]: """ if sys.platform == "win32" or sys.platform == "darwin": - def proxy_bypass(host: str) -> Any: ... # undocumented + def proxy_bypass(host: str) -> Any: # undocumented + """Return True, if host should be bypassed. + +Checks proxy settings gathered from the environment, if specified, +or the registry. + +""" else: def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi index 53457112ee968..8a8b8b01dd3d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi @@ -1,3 +1,41 @@ +"""This module provides access to the Windows registry API. + +Functions: + +CloseKey() - Closes a registry key. +ConnectRegistry() - Establishes a connection to a predefined registry handle + on another computer. +CreateKey() - Creates the specified key, or opens it if it already exists. +DeleteKey() - Deletes the specified key. +DeleteValue() - Removes a named value from the specified registry key. +EnumKey() - Enumerates subkeys of the specified open registry key. +EnumValue() - Enumerates values of the specified open registry key. +ExpandEnvironmentStrings() - Expand the env strings in a REG_EXPAND_SZ + string. +FlushKey() - Writes all the attributes of the specified key to the registry. +LoadKey() - Creates a subkey under HKEY_USER or HKEY_LOCAL_MACHINE and + stores registration information from a specified file into that + subkey. +OpenKey() - Opens the specified key. +OpenKeyEx() - Alias of OpenKey(). +QueryValue() - Retrieves the value associated with the unnamed value for a + specified key in the registry. +QueryValueEx() - Retrieves the type and data for a specified value name + associated with an open registry key. +QueryInfoKey() - Returns information about the specified key. +SaveKey() - Saves the specified key, and all its subkeys a file. +SetValue() - Associates a value with a specified key. +SetValueEx() - Stores data in the value field of an open registry key. + +Special objects: + +HKEYType -- type object for HKEY objects +error -- exception raised for Win32 errors + +Integer constants: +Many constants are defined - see the documentation for each function +to see what constants are used, and where. +""" import sys from _typeshed import ReadableBuffer, Unused from types import TracebackType @@ -6,29 +44,346 @@ from typing_extensions import Self, TypeAlias if sys.platform == "win32": _KeyType: TypeAlias = HKEYType | int - def CloseKey(hkey: _KeyType, /) -> None: ... - def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: ... - def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: ... - def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... - def DeleteKey(key: _KeyType, sub_key: str, /) -> None: ... - def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... - def DeleteValue(key: _KeyType, value: str, /) -> None: ... - def EnumKey(key: _KeyType, index: int, /) -> str: ... - def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: ... - def ExpandEnvironmentStrings(string: str, /) -> str: ... - def FlushKey(key: _KeyType, /) -> None: ... - def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: ... - def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... - def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... - def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: ... - def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: ... - def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: ... - def SaveKey(key: _KeyType, file_name: str, /) -> None: ... - def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: ... + def CloseKey(hkey: _KeyType, /) -> None: + """Closes a previously opened registry key. + + hkey + A previously opened key. + +Note that if the key is not closed using this method, it will be +closed when the hkey object is destroyed by Python. +""" + def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: + """Establishes a connection to the registry on another computer. + + computer_name + The name of the remote computer, of the form r"\\\\computername". If + None, the local computer is used. + key + The predefined key to connect to. + +The return value is the handle of the opened key. +If the function fails, an OSError exception is raised. +""" + def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: + """Creates or opens the specified key. + + key + An already open key, or one of the predefined HKEY_* constants. + sub_key + The name of the key this method opens or creates. + +If key is one of the predefined keys, sub_key may be None. In that case, +the handle returned is the same key handle passed in to the function. + +If the key already exists, this function opens the existing key. + +The return value is the handle of the opened key. +If the function fails, an OSError exception is raised. +""" + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: + """Creates or opens the specified key. + + key + An already open key, or one of the predefined HKEY_* constants. + sub_key + The name of the key this method opens or creates. + reserved + A reserved integer, and must be zero. Default is zero. + access + An integer that specifies an access mask that describes the + desired security access for the key. Default is KEY_WRITE. + +If key is one of the predefined keys, sub_key may be None. In that case, +the handle returned is the same key handle passed in to the function. + +If the key already exists, this function opens the existing key + +The return value is the handle of the opened key. +If the function fails, an OSError exception is raised. +""" + def DeleteKey(key: _KeyType, sub_key: str, /) -> None: + """Deletes the specified key. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that must be the name of a subkey of the key identified by + the key parameter. This value must not be None, and the key may not + have subkeys. + +This method can not delete keys with subkeys. + +If the function succeeds, the entire key, including all of its values, +is removed. If the function fails, an OSError exception is raised. +""" + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: + """Deletes the specified key (intended for 64-bit OS). + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that must be the name of a subkey of the key identified by + the key parameter. This value must not be None, and the key may not + have subkeys. + access + An integer that specifies an access mask that describes the + desired security access for the key. Default is KEY_WOW64_64KEY. + reserved + A reserved integer, and must be zero. Default is zero. + +While this function is intended to be used for 64-bit OS, it is also + available on 32-bit systems. + +This method can not delete keys with subkeys. + +If the function succeeds, the entire key, including all of its values, +is removed. If the function fails, an OSError exception is raised. +On unsupported Windows versions, NotImplementedError is raised. +""" + def DeleteValue(key: _KeyType, value: str, /) -> None: + """Removes a named value from a registry key. + + key + An already open key, or any one of the predefined HKEY_* constants. + value + A string that identifies the value to remove. +""" + def EnumKey(key: _KeyType, index: int, /) -> str: + """Enumerates subkeys of an open registry key. + + key + An already open key, or any one of the predefined HKEY_* constants. + index + An integer that identifies the index of the key to retrieve. + +The function retrieves the name of one subkey each time it is called. +It is typically called repeatedly until an OSError exception is +raised, indicating no more values are available. +""" + def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: + """Enumerates values of an open registry key. + + key + An already open key, or any one of the predefined HKEY_* constants. + index + An integer that identifies the index of the value to retrieve. + +The function retrieves the name of one subkey each time it is called. +It is typically called repeatedly, until an OSError exception +is raised, indicating no more values. + +The result is a tuple of 3 items: + value_name + A string that identifies the value. + value_data + An object that holds the value data, and whose type depends + on the underlying registry type. + data_type + An integer that identifies the type of the value data. +""" + def ExpandEnvironmentStrings(string: str, /) -> str: + """Expand environment vars. +""" + def FlushKey(key: _KeyType, /) -> None: + """Writes all the attributes of a key to the registry. + + key + An already open key, or any one of the predefined HKEY_* constants. + +It is not necessary to call FlushKey to change a key. Registry changes +are flushed to disk by the registry using its lazy flusher. Registry +changes are also flushed to disk at system shutdown. Unlike +CloseKey(), the FlushKey() method returns only when all the data has +been written to the registry. + +An application should only call FlushKey() if it requires absolute +certainty that registry changes are on disk. If you don't know whether +a FlushKey() call is required, it probably isn't. +""" + def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: + """Insert data into the registry from a file. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub-key to load. + file_name + The name of the file to load registry data from. This file must + have been created with the SaveKey() function. Under the file + allocation table (FAT) file system, the filename may not have an + extension. + +Creates a subkey under the specified key and stores registration +information from a specified file into that subkey. + +A call to LoadKey() fails if the calling process does not have the +SE_RESTORE_PRIVILEGE privilege. + +If key is a handle returned by ConnectRegistry(), then the path +specified in fileName is relative to the remote computer. + +The MSDN docs imply key must be in the HKEY_USER or HKEY_LOCAL_MACHINE +tree. +""" + def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: + """Opens the specified key. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub_key to open. + reserved + A reserved integer that must be zero. Default is zero. + access + An integer that specifies an access mask that describes the desired + security access for the key. Default is KEY_READ. + +The result is a new handle to the specified key. +If the function fails, an OSError exception is raised. +""" + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: + """Opens the specified key. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub_key to open. + reserved + A reserved integer that must be zero. Default is zero. + access + An integer that specifies an access mask that describes the desired + security access for the key. Default is KEY_READ. + +The result is a new handle to the specified key. +If the function fails, an OSError exception is raised. +""" + def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: + """Returns information about a key. + + key + An already open key, or any one of the predefined HKEY_* constants. + +The result is a tuple of 3 items: +An integer that identifies the number of sub keys this key has. +An integer that identifies the number of values this key has. +An integer that identifies when the key was last modified (if available) +as 100's of nanoseconds since Jan 1, 1600. +""" + def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: + """Retrieves the unnamed value for a key. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that holds the name of the subkey with which the value + is associated. If this parameter is None or empty, the function + retrieves the value set by the SetValue() method for the key + identified by key. + +Values in the registry have name, type, and data components. This method +retrieves the data for a key's first value that has a NULL name. +But since the underlying API call doesn't return the type, you'll +probably be happier using QueryValueEx; this function is just here for +completeness. +""" + def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: + """Retrieves the type and value of a specified sub-key. + + key + An already open key, or any one of the predefined HKEY_* constants. + name + A string indicating the value to query. + +Behaves mostly like QueryValue(), but also returns the type of the +specified value name associated with the given open registry key. + +The return value is a tuple of the value and the type_id. +""" + def SaveKey(key: _KeyType, file_name: str, /) -> None: + """Saves the specified key, and all its subkeys to the specified file. + + key + An already open key, or any one of the predefined HKEY_* constants. + file_name + The name of the file to save registry data to. This file cannot + already exist. If this filename includes an extension, it cannot be + used on file allocation table (FAT) file systems by the LoadKey(), + ReplaceKey() or RestoreKey() methods. + +If key represents a key on a remote computer, the path described by +file_name is relative to the remote computer. + +The caller of this method must possess the SeBackupPrivilege +security privilege. This function passes NULL for security_attributes +to the API. +""" + def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: + """Associates a value with a specified key. + + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that names the subkey with which the value is associated. + type + An integer that specifies the type of the data. Currently this must + be REG_SZ, meaning only strings are supported. + value + A string that specifies the new value. + +If the key specified by the sub_key parameter does not exist, the +SetValue function creates it. + +Value lengths are limited by available memory. Long values (more than +2048 bytes) should be stored as files with the filenames stored in +the configuration registry to help the registry perform efficiently. + +The key identified by the key parameter must have been opened with +KEY_SET_VALUE access. +""" @overload # type=REG_DWORD|REG_QWORD def SetValueEx( key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, / - ) -> None: ... + ) -> None: + """Stores data in the value field of an open registry key. + + key + An already open key, or any one of the predefined HKEY_* constants. + value_name + A string containing the name of the value to set, or None. + reserved + Can be anything - zero is always passed to the API. + type + An integer that specifies the type of the data, one of: + REG_BINARY -- Binary data in any form. + REG_DWORD -- A 32-bit number. + REG_DWORD_LITTLE_ENDIAN -- A 32-bit number in little-endian format. Equivalent to REG_DWORD + REG_DWORD_BIG_ENDIAN -- A 32-bit number in big-endian format. + REG_EXPAND_SZ -- A null-terminated string that contains unexpanded + references to environment variables (for example, + %PATH%). + REG_LINK -- A Unicode symbolic link. + REG_MULTI_SZ -- A sequence of null-terminated strings, terminated + by two null characters. Note that Python handles + this termination automatically. + REG_NONE -- No defined value type. + REG_QWORD -- A 64-bit number. + REG_QWORD_LITTLE_ENDIAN -- A 64-bit number in little-endian format. Equivalent to REG_QWORD. + REG_RESOURCE_LIST -- A device-driver resource list. + REG_SZ -- A null-terminated string. + value + A string that specifies the new value. + +This method can also set additional value and type information for the +specified key. The key identified by the key parameter must have been +opened with KEY_SET_VALUE access. + +To open the key, use the CreateKeyEx() or OpenKeyEx() methods. + +Value lengths are limited by available memory. Long values (more than +2048 bytes) should be stored as files with the filenames stored in +the configuration registry to help the registry perform efficiently. +""" @overload # type=REG_SZ|REG_EXPAND_SZ def SetValueEx( key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[1, 2], value: str | None, / @@ -55,9 +410,36 @@ if sys.platform == "win32": value: int | str | list[str] | ReadableBuffer | None, /, ) -> None: ... - def DisableReflectionKey(key: _KeyType, /) -> None: ... - def EnableReflectionKey(key: _KeyType, /) -> None: ... - def QueryReflectionKey(key: _KeyType, /) -> bool: ... + def DisableReflectionKey(key: _KeyType, /) -> None: + """Disables registry reflection for 32bit processes running on a 64bit OS. + + key + An already open key, or any one of the predefined HKEY_* constants. + +Will generally raise NotImplementedError if executed on a 32bit OS. + +If the key is not on the reflection list, the function succeeds but has +no effect. Disabling reflection for a key does not affect reflection +of any subkeys. +""" + def EnableReflectionKey(key: _KeyType, /) -> None: + """Restores registry reflection for the specified disabled key. + + key + An already open key, or any one of the predefined HKEY_* constants. + +Will generally raise NotImplementedError if executed on a 32bit OS. +Restoring reflection for a key does not affect reflection of any +subkeys. +""" + def QueryReflectionKey(key: _KeyType, /) -> bool: + """Returns the reflection state for the specified key as a bool. + + key + An already open key, or any one of the predefined HKEY_* constants. + +Will generally raise NotImplementedError if executed on a 32bit OS. +""" HKEY_CLASSES_ROOT: Final[int] HKEY_CURRENT_USER: Final[int] @@ -119,14 +501,53 @@ if sys.platform == "win32": # Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason @final class HKEYType: - def __bool__(self) -> bool: ... - def __int__(self) -> int: ... + """PyHKEY Object - A Python object, representing a win32 registry key. + +This object wraps a Windows HKEY object, automatically closing it when +the object is destroyed. To guarantee cleanup, you can call either +the Close() method on the PyHKEY, or the CloseKey() method. + +All functions which accept a handle object also accept an integer -- +however, use of the handle object is encouraged. + +Functions: +Close() - Closes the underlying handle. +Detach() - Returns the integer Win32 handle, detaching it from the object + +Properties: +handle - The integer Win32 handle. + +Operations: +__bool__ - Handles with an open object return true, otherwise false. +__int__ - Converting a handle to an integer returns the Win32 handle. +rich comparison - Handle objects are compared using the handle value. +""" + def __bool__(self) -> bool: + """True if self else False +""" + def __int__(self) -> int: + """int(self) +""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> bool | None: ... - def Close(self) -> None: ... - def Detach(self) -> int: ... + def Close(self) -> None: + """Closes the underlying Windows handle. + +If the handle is already closed, no error is raised. +""" + def Detach(self) -> int: + """Detaches the Windows handle from the handle object. + +The result is the value of the handle before it is detached. If the +handle is already detached, this will return zero. + +After calling this function, the handle is effectively invalidated, +but the handle is not closed. You would call this function when you +need the underlying win32 handle to exist beyond the lifetime of the +handle object. +""" def __hash__(self) -> int: ... @property def handle(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi index 39dfa7b8b9c42..c356d2a3338fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi @@ -1,3 +1,21 @@ +"""PlaySound(sound, flags) - play a sound +SND_FILENAME - sound is a wav file name +SND_ALIAS - sound is a registry sound association name +SND_LOOP - Play the sound repeatedly; must also specify SND_ASYNC +SND_MEMORY - sound is a memory image of a wav file +SND_PURGE - stop all instances of the specified sound +SND_ASYNC - PlaySound returns immediately +SND_NODEFAULT - Do not play a default beep if the sound can not be found +SND_NOSTOP - Do not interrupt any sounds currently playing +SND_NOWAIT - Return immediately if the sound driver is busy +SND_APPLICATION - sound is an application-specific alias in the registry. +SND_SENTRY - Triggers a SoundSentry event when the sound is played. +SND_SYNC - Play the sound synchronously, default behavior. +SND_SYSTEM - Assign sound to the audio session for system notification sounds. + +Beep(frequency, duration) - Make a beep through the PC speaker. +MessageBeep(type) - Call Windows MessageBeep. +""" import sys from _typeshed import ReadableBuffer from typing import Final, Literal, overload @@ -29,10 +47,29 @@ if sys.platform == "win32": MB_ICONSTOP: Final = 16 MB_ICONWARNING: Final = 48 - def Beep(frequency: int, duration: int) -> None: ... + def Beep(frequency: int, duration: int) -> None: + """A wrapper around the Windows Beep API. + + frequency + Frequency of the sound in hertz. + Must be in the range 37 through 32,767. + duration + How long the sound should play, in milliseconds. +""" # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload - def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: + """A wrapper around the Windows PlaySound API. + + sound + The sound to play; a filename, data, or None. + flags + Flag values, ored together. See module documentation. +""" @overload def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... - def MessageBeep(type: int = 0) -> None: ... + def MessageBeep(type: int = 0) -> None: + """Call Windows MessageBeep(x). + +x defaults to MB_OK. +""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi index 920d18fa5b235..9f9202dfd3063 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi @@ -81,8 +81,23 @@ else: >>> translate('**/*') '.*/[^/]*' """ - def match_dirs(pattern: str) -> str: ... - def translate_core(pattern: str) -> str: ... + def match_dirs(pattern: str) -> str: + """ + Ensure that zipfile.Path directory names are matched. + + zipfile.Path directory names always end in a slash. + """ + def translate_core(pattern: str) -> str: + """ + Given a glob pattern, produce a regex that matches it. + + >>> translate('*.txt') + '[^/]*\\\\.txt' + >>> translate('a?txt') + 'a.txt' + >>> translate('**/*') + '.*/[^/]*' + """ def replace(match: Match[str]) -> str: """ Perform the replacements for a match from :func:`separate`. From 6da37c2ce789d58a80ec7926d726315e36027fee Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:39:24 +0000 Subject: [PATCH 4/8] Sync macOS docstrings --- .../vendor/typeshed/stdlib/nis.pyi | 27 +++++++-- .../vendor/typeshed/stdlib/os/__init__.pyi | 33 +++++++++-- .../vendor/typeshed/stdlib/select.pyi | 56 +++++++++++++++++-- .../vendor/typeshed/stdlib/selectors.pyi | 2 + .../typeshed/stdlib/tkinter/__init__.pyi | 56 +++++++++++++++++-- .../vendor/typeshed/stdlib/webbrowser.pyi | 18 ++++++ .../vendor/typeshed/stdlib/xxlimited.pyi | 4 +- 7 files changed, 177 insertions(+), 19 deletions(-) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi index 10eef2336a834..5cda780e7f673 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi @@ -1,9 +1,28 @@ +"""This module contains functions for accessing NIS maps. +""" import sys if sys.platform != "win32": - def cat(map: str, domain: str = ...) -> dict[str, str]: ... - def get_default_domain() -> str: ... - def maps(domain: str = ...) -> list[str]: ... - def match(key: str, map: str, domain: str = ...) -> str: ... + def cat(map: str, domain: str = ...) -> dict[str, str]: + """cat(map, domain = defaultdomain) +Returns the entire map as a dictionary. Optionally domain can be +specified but it defaults to the system default domain. +""" + def get_default_domain() -> str: + """get_default_domain() -> str +Corresponds to the C library yp_get_default_domain() call, returning +the default NIS domain. +""" + def maps(domain: str = ...) -> list[str]: + """maps(domain = defaultdomain) +Returns an array of all available NIS maps within a domain. If domain +is not specified it defaults to the system default domain. +""" + def match(key: str, map: str, domain: str = ...) -> str: + """match(key, map, domain = defaultdomain) +Corresponds to the C library yp_match() call, returning the value of +key in the given map. Optionally domain can be specified but it +defaults to the system default domain. +""" class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 2396cacb4df7f..13cd998b7ff45 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -923,12 +923,18 @@ In the future, this property will contain the last metadata change time.""" # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): @property - def st_gen(self) -> int: ... # file generation number + def st_gen(self) -> int: # file generation number + """generation number +""" @property - def st_birthtime(self) -> float: ... # time of file creation in seconds + def st_birthtime(self) -> float: # time of file creation in seconds + """time of creation +""" if sys.platform == "darwin": @property - def st_flags(self) -> int: ... # user defined flags for file + def st_flags(self) -> int: # user defined flags for file + """user defined flags for file +""" # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 @@ -1583,7 +1589,9 @@ Using non-zero flags requires Linux 4.7 or newer. headers: Sequence[ReadableBuffer] = (), trailers: Sequence[ReadableBuffer] = (), flags: int = 0, - ) -> int: ... # FreeBSD and Mac OS X only + ) -> int: # FreeBSD and Mac OS X only + """Copy count bytes from file descriptor in_fd to file descriptor out_fd. +""" def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: """Read from a file descriptor fd into an iterable of buffers. @@ -1762,8 +1770,21 @@ dir_fd and follow_symlinks may not be implemented on your platform. """ if sys.platform != "win32" and sys.platform != "linux": - def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix - def lchflags(path: StrOrBytesPath, flags: int) -> None: ... + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: # some flavors of Unix + """Set file flags. + +If follow_symlinks is False, and the last element of the path is a symbolic + link, chflags will change flags on the symbolic link itself instead of the + file the link points to. +follow_symlinks may not be implemented on your platform. If it is +unavailable, using it will raise a NotImplementedError. +""" + def lchflags(path: StrOrBytesPath, flags: int) -> None: + """Set file flags. + +This function will not follow symbolic links. +Equivalent to chflags(path, flags, follow_symlinks=False). +""" if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi index 83db6d9eb4a14..421671107a0ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi @@ -74,6 +74,21 @@ if sys.platform != "linux" and sys.platform != "win32": # BSD only @final class kevent: + """kevent(ident, filter=KQ_FILTER_READ, flags=KQ_EV_ADD, fflags=0, data=0, udata=0) + +This object is the equivalent of the struct kevent for the C API. + +See the kqueue manpage for more detailed information about the meaning +of the arguments. + +One minor note: while you might hope that udata could store a +reference to a python object, it cannot, because it is impossible to +keep a proper reference count of the object once it's passed into the +kernel. Therefore, I have restricted it to only storing an integer. I +recommend ignoring it and simply using the 'ident' field to key off +of. You could also set up a dictionary on the python side to store a +udata->object mapping. +""" data: Any fflags: int filter: int @@ -94,15 +109,48 @@ if sys.platform != "linux" and sys.platform != "win32": # BSD only @final class kqueue: + """Kqueue syscall wrapper. + +For example, to start watching a socket for input: +>>> kq = kqueue() +>>> sock = socket() +>>> sock.connect((host, port)) +>>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_ADD)], 0) + +To wait one second for it to become writeable: +>>> kq.control(None, 1, 1000) + +To stop listening: +>>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_DELETE)], 0) +""" closed: bool def __init__(self) -> None: ... - def close(self) -> None: ... + def close(self) -> None: + """Close the kqueue control file descriptor. + +Further operations on the kqueue object will raise an exception. +""" def control( self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, / - ) -> list[kevent]: ... - def fileno(self) -> int: ... + ) -> list[kevent]: + """Calls the kernel kevent function. + + changelist + Must be an iterable of kevent objects describing the changes to be made + to the kernel's watch list or None. + maxevents + The maximum number of events that the kernel will return. + timeout + The maximum time to wait in seconds, or else None to wait forever. + This accepts floats for smaller timeouts, too. +""" + def fileno(self) -> int: + """Return the kqueue control file descriptor. +""" @classmethod - def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: ... + def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: + """Create a kqueue object from a given control fd. +""" KQ_EV_ADD: Final[int] KQ_EV_CLEAR: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi index 20ea1708cd967..c53f95de620a1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi @@ -163,6 +163,8 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win if sys.platform != "win32" and sys.platform != "linux": class KqueueSelector(_BaseSelectorImpl): + """Kqueue-based selector. +""" def fileno(self) -> int: ... def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 077b53b4d5cd2..98676ea6a508a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1648,7 +1648,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["-topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["-modified"], /) -> bool: ... + def wm_attributes(self, option: Literal["-modified"], /) -> bool: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-notify"], /) -> bool: ... @overload @@ -1715,7 +1727,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["modified"], /) -> bool: ... + def wm_attributes(self, option: Literal["modified"], /) -> bool: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["notify"], /) -> bool: ... @overload @@ -1772,7 +1796,19 @@ corresponding attributes. def wm_attributes(self, option: Literal["-topmost"], value: bool, /) -> Literal[""]: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: ... + def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" @overload def wm_attributes(self, option: Literal["-notify"], value: bool, /) -> Literal[""]: ... @overload @@ -1832,7 +1868,19 @@ corresponding attributes. titlepath: str = ..., topmost: bool = ..., transparent: bool = ..., - ) -> None: ... + ) -> None: + """Return or sets platform specific attributes. + +When called with a single argument return_python_dict=True, +return a dict of the platform specific attributes and their values. +When called without arguments or with a single argument +return_python_dict=False, return a tuple containing intermixed +attribute names with the minus prefix and their values. + +When called with a single string value, return the value for the +specific option. When called with keyword arguments, set the +corresponding attributes. +""" elif sys.platform == "win32": @overload def wm_attributes( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi index 935f0eed1742b..030a17d39f4c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi @@ -117,11 +117,29 @@ if sys.platform == "darwin": if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13.") class MacOSX(BaseBrowser): + """Launcher class for Aqua browsers on Mac OS X + + Optionally specify a browser name on instantiation. Note that this + will not work for Aqua browsers if the user has moved the application + package after installation. + + If no browser is specified, the default browser, as specified in the + Internet System Preferences panel, will be used. + """ def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... else: class MacOSX(BaseBrowser): + """Launcher class for Aqua browsers on Mac OS X + + Optionally specify a browser name on instantiation. Note that this + will not work for Aqua browsers if the user has moved the application + package after installation. + + If no browser is specified, the default browser, as specified in the + Internet System Preferences panel, will be used. + """ def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi index f64783497f358..92cf681944e59 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi @@ -33,4 +33,6 @@ else: class Null: __hash__: ClassVar[None] # type: ignore[assignment] - def roj(b: Any, /) -> None: ... + def roj(b: Any, /) -> None: + """roj(a,b) -> None +""" From 54f5e17b6c4a066ecb2273c9cf37c1937890bbdb Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:39:57 +0000 Subject: [PATCH 5/8] Format codemodded docstrings --- .../vendor/typeshed/stdlib/__future__.pyi | 12 +- .../vendor/typeshed/stdlib/_asyncio.pyi | 224 +- .../vendor/typeshed/stdlib/_bisect.pyi | 96 +- .../vendor/typeshed/stdlib/_blake2.pyi | 42 +- .../vendor/typeshed/stdlib/_bootlocale.pyi | 1 + .../vendor/typeshed/stdlib/_bz2.pyi | 63 +- .../vendor/typeshed/stdlib/_codecs.pyi | 57 +- .../typeshed/stdlib/_collections_abc.pyi | 25 +- .../vendor/typeshed/stdlib/_compression.pyi | 11 +- .../vendor/typeshed/stdlib/_contextvars.pyi | 68 +- .../vendor/typeshed/stdlib/_csv.pyi | 126 +- .../vendor/typeshed/stdlib/_ctypes.pyi | 153 +- .../vendor/typeshed/stdlib/_curses.pyi | 1440 ++--- .../vendor/typeshed/stdlib/_curses_panel.pyi | 64 +- .../vendor/typeshed/stdlib/_dbm.pyi | 32 +- .../vendor/typeshed/stdlib/_decimal.pyi | 33 +- .../typeshed/stdlib/_frozen_importlib.pyi | 160 +- .../stdlib/_frozen_importlib_external.pyi | 306 +- .../vendor/typeshed/stdlib/_gdbm.pyi | 37 +- .../vendor/typeshed/stdlib/_hashlib.pyi | 289 +- .../vendor/typeshed/stdlib/_heapq.pyi | 56 +- .../vendor/typeshed/stdlib/_imp.pyi | 77 +- .../typeshed/stdlib/_interpchannels.pyi | 204 +- .../vendor/typeshed/stdlib/_interpqueues.pyi | 77 +- .../vendor/typeshed/stdlib/_interpreters.pyi | 152 +- .../vendor/typeshed/stdlib/_io.pyi | 722 +-- .../vendor/typeshed/stdlib/_json.pyi | 76 +- .../vendor/typeshed/stdlib/_locale.pyi | 50 +- .../vendor/typeshed/stdlib/_lsprof.pyi | 74 +- .../vendor/typeshed/stdlib/_lzma.pyi | 134 +- .../vendor/typeshed/stdlib/_markupbase.pyi | 10 +- .../vendor/typeshed/stdlib/_msi.pyi | 39 +- .../vendor/typeshed/stdlib/_operator.pyi | 237 +- .../vendor/typeshed/stdlib/_osx_support.pyi | 126 +- .../vendor/typeshed/stdlib/_pickle.pyi | 280 +- .../typeshed/stdlib/_posixsubprocess.pyi | 60 +- .../vendor/typeshed/stdlib/_py_abc.pyi | 33 +- .../vendor/typeshed/stdlib/_pydecimal.pyi | 4 +- .../vendor/typeshed/stdlib/_queue.pyi | 57 +- .../vendor/typeshed/stdlib/_random.pyi | 30 +- .../vendor/typeshed/stdlib/_sitebuiltins.pyi | 17 +- .../vendor/typeshed/stdlib/_socket.pyi | 649 +-- .../vendor/typeshed/stdlib/_sqlite3.pyi | 58 +- .../vendor/typeshed/stdlib/_ssl.pyi | 123 +- .../vendor/typeshed/stdlib/_stat.pyi | 62 +- .../vendor/typeshed/stdlib/_struct.pyi | 105 +- .../vendor/typeshed/stdlib/_thread.pyi | 397 +- .../typeshed/stdlib/_threading_local.pyi | 13 +- .../vendor/typeshed/stdlib/_tkinter.pyi | 50 +- .../vendor/typeshed/stdlib/_tracemalloc.pyi | 63 +- .../vendor/typeshed/stdlib/_warnings.pyi | 36 +- .../vendor/typeshed/stdlib/_weakref.pyi | 18 +- .../vendor/typeshed/stdlib/_weakrefset.pyi | 4 +- .../vendor/typeshed/stdlib/_winapi.pyi | 152 +- .../vendor/typeshed/stdlib/_zstd.pyi | 320 +- .../vendor/typeshed/stdlib/abc.pyi | 144 +- .../vendor/typeshed/stdlib/aifc.pyi | 5 +- .../vendor/typeshed/stdlib/annotationlib.pyi | 183 +- .../vendor/typeshed/stdlib/argparse.pyi | 253 +- .../vendor/typeshed/stdlib/array.pyi | 265 +- .../vendor/typeshed/stdlib/ast.pyi | 1190 ++-- .../vendor/typeshed/stdlib/asynchat.pyi | 9 +- .../typeshed/stdlib/asyncio/__init__.pyi | 4 +- .../typeshed/stdlib/asyncio/base_events.pyi | 422 +- .../typeshed/stdlib/asyncio/base_futures.pyi | 16 +- .../stdlib/asyncio/base_subprocess.pyi | 5 +- .../typeshed/stdlib/asyncio/constants.pyi | 4 +- .../typeshed/stdlib/asyncio/coroutines.pyi | 13 +- .../vendor/typeshed/stdlib/asyncio/events.pyi | 618 +-- .../typeshed/stdlib/asyncio/exceptions.pyi | 39 +- .../stdlib/asyncio/format_helpers.pyi | 20 +- .../typeshed/stdlib/asyncio/futures.pyi | 7 +- .../vendor/typeshed/stdlib/asyncio/graph.pyi | 65 +- .../vendor/typeshed/stdlib/asyncio/locks.pyi | 318 +- .../vendor/typeshed/stdlib/asyncio/log.pyi | 4 +- .../vendor/typeshed/stdlib/asyncio/mixins.pyi | 4 +- .../stdlib/asyncio/proactor_events.pyi | 21 +- .../typeshed/stdlib/asyncio/protocols.pyi | 224 +- .../vendor/typeshed/stdlib/asyncio/queues.pyi | 136 +- .../typeshed/stdlib/asyncio/runners.pyi | 110 +- .../stdlib/asyncio/selector_events.pyi | 14 +- .../typeshed/stdlib/asyncio/sslproto.pyi | 210 +- .../typeshed/stdlib/asyncio/staggered.pyi | 76 +- .../typeshed/stdlib/asyncio/streams.pyi | 324 +- .../typeshed/stdlib/asyncio/subprocess.pyi | 8 +- .../typeshed/stdlib/asyncio/taskgroups.pyi | 26 +- .../vendor/typeshed/stdlib/asyncio/tasks.pyi | 448 +- .../typeshed/stdlib/asyncio/threads.pyi | 16 +- .../typeshed/stdlib/asyncio/timeouts.pyi | 67 +- .../vendor/typeshed/stdlib/asyncio/tools.pyi | 32 +- .../typeshed/stdlib/asyncio/transports.pyi | 266 +- .../vendor/typeshed/stdlib/asyncio/trsock.pyi | 9 +- .../typeshed/stdlib/asyncio/unix_events.pyi | 303 +- .../stdlib/asyncio/windows_events.pyi | 41 +- .../typeshed/stdlib/asyncio/windows_utils.pyi | 17 +- .../vendor/typeshed/stdlib/asyncore.pyi | 1 + .../vendor/typeshed/stdlib/atexit.pyi | 23 +- .../vendor/typeshed/stdlib/audioop.pyi | 103 +- .../vendor/typeshed/stdlib/base64.pyi | 235 +- .../vendor/typeshed/stdlib/bdb.pyi | 395 +- .../vendor/typeshed/stdlib/binascii.pyi | 124 +- .../vendor/typeshed/stdlib/binhex.pyi | 7 +- .../vendor/typeshed/stdlib/bisect.pyi | 4 +- .../vendor/typeshed/stdlib/builtins.pyi | 4051 +++++++------- .../vendor/typeshed/stdlib/bz2.pyi | 159 +- .../vendor/typeshed/stdlib/cProfile.pyi | 37 +- .../vendor/typeshed/stdlib/calendar.pyi | 288 +- .../vendor/typeshed/stdlib/cgi.pyi | 85 +- .../vendor/typeshed/stdlib/cgitb.pyi | 26 +- .../vendor/typeshed/stdlib/chunk.pyi | 11 +- .../vendor/typeshed/stdlib/cmath.pyi | 115 +- .../vendor/typeshed/stdlib/cmd.pyi | 113 +- .../vendor/typeshed/stdlib/code.pyi | 253 +- .../vendor/typeshed/stdlib/codecs.pyi | 628 ++- .../vendor/typeshed/stdlib/codeop.pyi | 99 +- .../typeshed/stdlib/collections/__init__.pyi | 572 +- .../typeshed/stdlib/collections/abc.pyi | 1 + .../vendor/typeshed/stdlib/colorsys.pyi | 1 + .../vendor/typeshed/stdlib/compileall.pyi | 189 +- .../stdlib/compression/_common/_streams.pyi | 11 +- .../typeshed/stdlib/compression/bz2.pyi | 1 + .../typeshed/stdlib/compression/gzip.pyi | 1 + .../typeshed/stdlib/compression/lzma.pyi | 1 + .../typeshed/stdlib/compression/zlib.pyi | 1 + .../stdlib/compression/zstd/__init__.pyi | 126 +- .../stdlib/compression/zstd/_zstdfile.pyi | 164 +- .../stdlib/concurrent/futures/__init__.pyi | 4 +- .../stdlib/concurrent/futures/_base.pyi | 371 +- .../stdlib/concurrent/futures/interpreter.pyi | 25 +- .../stdlib/concurrent/futures/process.pyi | 165 +- .../stdlib/concurrent/futures/thread.pyi | 33 +- .../concurrent/interpreters/__init__.pyi | 103 +- .../concurrent/interpreters/_crossinterp.pyi | 21 +- .../concurrent/interpreters/_queues.pyi | 118 +- .../vendor/typeshed/stdlib/configparser.pyi | 295 +- .../vendor/typeshed/stdlib/contextlib.pyi | 344 +- .../vendor/typeshed/stdlib/copy.pyi | 16 +- .../vendor/typeshed/stdlib/copyreg.pyi | 9 +- .../vendor/typeshed/stdlib/crypt.pyi | 30 +- .../vendor/typeshed/stdlib/csv.pyi | 41 +- .../typeshed/stdlib/ctypes/__init__.pyi | 111 +- .../vendor/typeshed/stdlib/ctypes/_endian.pyi | 14 +- .../stdlib/ctypes/macholib/__init__.pyi | 1 + .../typeshed/stdlib/ctypes/macholib/dyld.pyi | 18 +- .../typeshed/stdlib/ctypes/macholib/dylib.pyi | 33 +- .../stdlib/ctypes/macholib/framework.pyi | 33 +- .../vendor/typeshed/stdlib/ctypes/util.pyi | 6 +- .../typeshed/stdlib/curses/__init__.pyi | 11 +- .../vendor/typeshed/stdlib/curses/ascii.pyi | 4 +- .../vendor/typeshed/stdlib/curses/panel.pyi | 1 + .../vendor/typeshed/stdlib/curses/textpad.pyi | 64 +- .../vendor/typeshed/stdlib/dataclasses.pyi | 385 +- .../vendor/typeshed/stdlib/datetime.pyi | 352 +- .../vendor/typeshed/stdlib/dbm/__init__.pyi | 59 +- .../vendor/typeshed/stdlib/dbm/dumb.pyi | 37 +- .../vendor/typeshed/stdlib/dbm/gnu.pyi | 4 +- .../vendor/typeshed/stdlib/dbm/ndbm.pyi | 4 +- .../vendor/typeshed/stdlib/dbm/sqlite3.pyi | 18 +- .../vendor/typeshed/stdlib/decimal.pyi | 819 ++- .../vendor/typeshed/stdlib/difflib.pyi | 1225 ++--- .../vendor/typeshed/stdlib/dis.pyi | 285 +- .../typeshed/stdlib/distutils/__init__.pyi | 1 + .../stdlib/distutils/_msvccompiler.pyi | 6 +- .../stdlib/distutils/archive_util.pyi | 3 + .../stdlib/distutils/bcppcompiler.pyi | 1 + .../typeshed/stdlib/distutils/ccompiler.pyi | 39 +- .../vendor/typeshed/stdlib/distutils/cmd.pyi | 30 +- .../stdlib/distutils/command/__init__.pyi | 1 + .../stdlib/distutils/command/bdist.pyi | 4 +- .../stdlib/distutils/command/bdist_dumb.pyi | 1 + .../stdlib/distutils/command/bdist_msi.pyi | 59 +- .../stdlib/distutils/command/bdist_rpm.pyi | 1 + .../distutils/command/bdist_wininst.pyi | 1 + .../stdlib/distutils/command/build.pyi | 1 + .../stdlib/distutils/command/build_clib.pyi | 2 + .../stdlib/distutils/command/build_ext.pyi | 10 +- .../stdlib/distutils/command/build_py.pyi | 24 +- .../distutils/command/build_scripts.pyi | 1 + .../stdlib/distutils/command/check.pyi | 21 +- .../stdlib/distutils/command/clean.pyi | 1 + .../stdlib/distutils/command/config.pyi | 8 + .../stdlib/distutils/command/install.pyi | 73 +- .../stdlib/distutils/command/install_data.pyi | 1 + .../distutils/command/install_egg_info.pyi | 7 +- .../distutils/command/install_headers.pyi | 1 + .../stdlib/distutils/command/install_lib.pyi | 2 + .../distutils/command/install_scripts.pyi | 1 + .../stdlib/distutils/command/register.pyi | 55 +- .../stdlib/distutils/command/sdist.pyi | 15 +- .../stdlib/distutils/command/upload.pyi | 1 + .../typeshed/stdlib/distutils/config.pyi | 13 +- .../vendor/typeshed/stdlib/distutils/core.pyi | 2 + .../stdlib/distutils/cygwinccompiler.pyi | 14 +- .../typeshed/stdlib/distutils/dep_util.pyi | 3 + .../typeshed/stdlib/distutils/dir_util.pyi | 4 + .../vendor/typeshed/stdlib/distutils/dist.pyi | 31 +- .../typeshed/stdlib/distutils/errors.pyi | 65 +- .../typeshed/stdlib/distutils/extension.pyi | 2 + .../stdlib/distutils/fancy_getopt.pyi | 10 +- .../typeshed/stdlib/distutils/file_util.pyi | 3 + .../typeshed/stdlib/distutils/filelist.pyi | 8 + .../vendor/typeshed/stdlib/distutils/log.pyi | 4 +- .../stdlib/distutils/msvccompiler.pyi | 5 +- .../typeshed/stdlib/distutils/spawn.pyi | 2 + .../typeshed/stdlib/distutils/sysconfig.pyi | 20 +- .../typeshed/stdlib/distutils/text_file.pyi | 180 +- .../stdlib/distutils/unixccompiler.pyi | 1 + .../vendor/typeshed/stdlib/distutils/util.pyi | 17 +- .../typeshed/stdlib/distutils/version.pyi | 4 + .../vendor/typeshed/stdlib/doctest.pyi | 1305 ++--- .../vendor/typeshed/stdlib/email/__init__.pyi | 24 +- .../stdlib/email/_header_value_parser.pyi | 435 +- .../typeshed/stdlib/email/_policybase.pyi | 449 +- .../typeshed/stdlib/email/base64mime.pyi | 32 +- .../vendor/typeshed/stdlib/email/charset.pyi | 222 +- .../vendor/typeshed/stdlib/email/encoders.pyi | 21 +- .../vendor/typeshed/stdlib/email/errors.pyi | 106 +- .../typeshed/stdlib/email/feedparser.pyi | 34 +- .../typeshed/stdlib/email/generator.pyi | 158 +- .../vendor/typeshed/stdlib/email/header.pyi | 195 +- .../typeshed/stdlib/email/headerregistry.pyi | 250 +- .../typeshed/stdlib/email/iterators.pyi | 27 +- .../vendor/typeshed/stdlib/email/message.pyi | 573 +- .../stdlib/email/mime/application.pyi | 26 +- .../typeshed/stdlib/email/mime/audio.pyi | 40 +- .../typeshed/stdlib/email/mime/base.pyi | 16 +- .../typeshed/stdlib/email/mime/image.pyi | 38 +- .../typeshed/stdlib/email/mime/message.pyi | 20 +- .../typeshed/stdlib/email/mime/multipart.pyi | 32 +- .../stdlib/email/mime/nonmultipart.pyi | 7 +- .../typeshed/stdlib/email/mime/text.pyi | 24 +- .../vendor/typeshed/stdlib/email/parser.pyi | 98 +- .../vendor/typeshed/stdlib/email/policy.pyi | 349 +- .../typeshed/stdlib/email/quoprimime.pyi | 88 +- .../vendor/typeshed/stdlib/email/utils.pyi | 169 +- .../typeshed/stdlib/encodings/__init__.pyi | 14 +- .../typeshed/stdlib/encodings/aliases.pyi | 1 + .../typeshed/stdlib/encodings/ascii.pyi | 1 + .../stdlib/encodings/base64_codec.pyi | 1 + .../typeshed/stdlib/encodings/bz2_codec.pyi | 1 + .../typeshed/stdlib/encodings/charmap.pyi | 1 + .../typeshed/stdlib/encodings/cp037.pyi | 3 +- .../typeshed/stdlib/encodings/cp1006.pyi | 3 +- .../typeshed/stdlib/encodings/cp1026.pyi | 3 +- .../typeshed/stdlib/encodings/cp1125.pyi | 3 +- .../typeshed/stdlib/encodings/cp1140.pyi | 3 +- .../typeshed/stdlib/encodings/cp1250.pyi | 3 +- .../typeshed/stdlib/encodings/cp1251.pyi | 3 +- .../typeshed/stdlib/encodings/cp1252.pyi | 3 +- .../typeshed/stdlib/encodings/cp1253.pyi | 3 +- .../typeshed/stdlib/encodings/cp1254.pyi | 3 +- .../typeshed/stdlib/encodings/cp1255.pyi | 3 +- .../typeshed/stdlib/encodings/cp1256.pyi | 3 +- .../typeshed/stdlib/encodings/cp1257.pyi | 3 +- .../typeshed/stdlib/encodings/cp1258.pyi | 3 +- .../typeshed/stdlib/encodings/cp273.pyi | 3 +- .../typeshed/stdlib/encodings/cp424.pyi | 3 +- .../typeshed/stdlib/encodings/cp437.pyi | 3 +- .../typeshed/stdlib/encodings/cp500.pyi | 3 +- .../typeshed/stdlib/encodings/cp720.pyi | 1 + .../typeshed/stdlib/encodings/cp737.pyi | 3 +- .../typeshed/stdlib/encodings/cp775.pyi | 3 +- .../typeshed/stdlib/encodings/cp850.pyi | 3 +- .../typeshed/stdlib/encodings/cp852.pyi | 3 +- .../typeshed/stdlib/encodings/cp855.pyi | 3 +- .../typeshed/stdlib/encodings/cp856.pyi | 3 +- .../typeshed/stdlib/encodings/cp857.pyi | 3 +- .../typeshed/stdlib/encodings/cp858.pyi | 3 +- .../typeshed/stdlib/encodings/cp860.pyi | 3 +- .../typeshed/stdlib/encodings/cp861.pyi | 3 +- .../typeshed/stdlib/encodings/cp862.pyi | 3 +- .../typeshed/stdlib/encodings/cp863.pyi | 3 +- .../typeshed/stdlib/encodings/cp864.pyi | 3 +- .../typeshed/stdlib/encodings/cp865.pyi | 3 +- .../typeshed/stdlib/encodings/cp866.pyi | 3 +- .../typeshed/stdlib/encodings/cp869.pyi | 3 +- .../typeshed/stdlib/encodings/cp874.pyi | 3 +- .../typeshed/stdlib/encodings/cp875.pyi | 3 +- .../typeshed/stdlib/encodings/hex_codec.pyi | 1 + .../typeshed/stdlib/encodings/hp_roman8.pyi | 1 + .../typeshed/stdlib/encodings/iso8859_1.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_10.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_11.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_13.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_14.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_15.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_16.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_2.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_3.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_4.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_5.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_6.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_7.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_8.pyi | 3 +- .../typeshed/stdlib/encodings/iso8859_9.pyi | 3 +- .../typeshed/stdlib/encodings/koi8_r.pyi | 3 +- .../typeshed/stdlib/encodings/koi8_t.pyi | 4 +- .../typeshed/stdlib/encodings/koi8_u.pyi | 3 +- .../typeshed/stdlib/encodings/kz1048.pyi | 3 +- .../typeshed/stdlib/encodings/latin_1.pyi | 1 + .../typeshed/stdlib/encodings/mac_arabic.pyi | 3 +- .../stdlib/encodings/mac_croatian.pyi | 3 +- .../stdlib/encodings/mac_cyrillic.pyi | 3 +- .../typeshed/stdlib/encodings/mac_farsi.pyi | 3 +- .../typeshed/stdlib/encodings/mac_greek.pyi | 3 +- .../typeshed/stdlib/encodings/mac_iceland.pyi | 3 +- .../typeshed/stdlib/encodings/mac_latin2.pyi | 1 + .../typeshed/stdlib/encodings/mac_roman.pyi | 3 +- .../stdlib/encodings/mac_romanian.pyi | 3 +- .../typeshed/stdlib/encodings/mac_turkish.pyi | 3 +- .../vendor/typeshed/stdlib/encodings/mbcs.pyi | 1 + .../vendor/typeshed/stdlib/encodings/oem.pyi | 3 +- .../typeshed/stdlib/encodings/palmos.pyi | 1 + .../typeshed/stdlib/encodings/ptcp154.pyi | 1 + .../typeshed/stdlib/encodings/punycode.pyi | 40 +- .../stdlib/encodings/quopri_codec.pyi | 1 + .../stdlib/encodings/raw_unicode_escape.pyi | 1 + .../typeshed/stdlib/encodings/rot_13.pyi | 1 + .../typeshed/stdlib/encodings/tis_620.pyi | 3 +- .../typeshed/stdlib/encodings/undefined.pyi | 1 + .../stdlib/encodings/unicode_escape.pyi | 1 + .../typeshed/stdlib/encodings/utf_16.pyi | 1 + .../typeshed/stdlib/encodings/utf_16_be.pyi | 1 + .../typeshed/stdlib/encodings/utf_16_le.pyi | 1 + .../typeshed/stdlib/encodings/utf_32.pyi | 1 + .../typeshed/stdlib/encodings/utf_32_be.pyi | 1 + .../typeshed/stdlib/encodings/utf_32_le.pyi | 1 + .../typeshed/stdlib/encodings/utf_7.pyi | 1 + .../typeshed/stdlib/encodings/utf_8.pyi | 1 + .../typeshed/stdlib/encodings/utf_8_sig.pyi | 1 + .../typeshed/stdlib/encodings/uu_codec.pyi | 1 + .../typeshed/stdlib/encodings/zlib_codec.pyi | 1 + .../typeshed/stdlib/ensurepip/__init__.pyi | 13 +- .../vendor/typeshed/stdlib/enum.pyi | 446 +- .../vendor/typeshed/stdlib/errno.pyi | 1 + .../vendor/typeshed/stdlib/faulthandler.pyi | 38 +- .../vendor/typeshed/stdlib/fcntl.pyi | 117 +- .../vendor/typeshed/stdlib/filecmp.pyi | 120 +- .../vendor/typeshed/stdlib/fileinput.pyi | 108 +- .../vendor/typeshed/stdlib/fnmatch.pyi | 40 +- .../vendor/typeshed/stdlib/formatter.pyi | 5 + .../vendor/typeshed/stdlib/fractions.pyi | 328 +- .../vendor/typeshed/stdlib/ftplib.pyi | 433 +- .../vendor/typeshed/stdlib/functools.pyi | 291 +- .../ty_vendored/vendor/typeshed/stdlib/gc.pyi | 113 +- .../vendor/typeshed/stdlib/genericpath.pyi | 62 +- .../vendor/typeshed/stdlib/getopt.pyi | 68 +- .../vendor/typeshed/stdlib/getpass.pyi | 67 +- .../vendor/typeshed/stdlib/gettext.pyi | 5 +- .../vendor/typeshed/stdlib/glob.pyi | 119 +- .../vendor/typeshed/stdlib/graphlib.pyi | 109 +- .../vendor/typeshed/stdlib/grp.pyi | 41 +- .../vendor/typeshed/stdlib/gzip.pyi | 136 +- .../vendor/typeshed/stdlib/hashlib.pyi | 19 +- .../vendor/typeshed/stdlib/heapq.pyi | 35 +- .../vendor/typeshed/stdlib/hmac.pyi | 83 +- .../vendor/typeshed/stdlib/html/__init__.pyi | 24 +- .../vendor/typeshed/stdlib/html/entities.pyi | 4 +- .../vendor/typeshed/stdlib/html/parser.pyi | 61 +- .../vendor/typeshed/stdlib/http/__init__.pyi | 42 +- .../vendor/typeshed/stdlib/http/client.pyi | 210 +- .../vendor/typeshed/stdlib/http/cookiejar.pyi | 230 +- .../vendor/typeshed/stdlib/http/cookies.pyi | 78 +- .../vendor/typeshed/stdlib/http/server.pyi | 458 +- .../vendor/typeshed/stdlib/imaplib.pyi | 531 +- .../vendor/typeshed/stdlib/imghdr.pyi | 8 +- .../vendor/typeshed/stdlib/imp.pyi | 31 +- .../typeshed/stdlib/importlib/__init__.pyi | 27 +- .../vendor/typeshed/stdlib/importlib/_abc.pyi | 38 +- .../typeshed/stdlib/importlib/_bootstrap.pyi | 1 + .../stdlib/importlib/_bootstrap_external.pyi | 1 + .../vendor/typeshed/stdlib/importlib/abc.pyi | 402 +- .../typeshed/stdlib/importlib/machinery.pyi | 7 +- .../stdlib/importlib/metadata/__init__.pyi | 723 +-- .../stdlib/importlib/metadata/_meta.pyi | 28 +- .../typeshed/stdlib/importlib/readers.pyi | 37 +- .../stdlib/importlib/resources/__init__.pyi | 63 +- .../stdlib/importlib/resources/_common.pyi | 44 +- .../importlib/resources/_functional.pyi | 35 +- .../stdlib/importlib/resources/abc.pyi | 106 +- .../stdlib/importlib/resources/simple.pyi | 49 +- .../typeshed/stdlib/importlib/simple.pyi | 1 + .../vendor/typeshed/stdlib/importlib/util.pyi | 78 +- .../vendor/typeshed/stdlib/inspect.pyi | 1037 ++-- .../ty_vendored/vendor/typeshed/stdlib/io.pyi | 99 +- .../vendor/typeshed/stdlib/ipaddress.pyi | 1008 ++-- .../vendor/typeshed/stdlib/itertools.pyi | 327 +- .../vendor/typeshed/stdlib/json/__init__.pyi | 211 +- .../vendor/typeshed/stdlib/json/decoder.pyi | 148 +- .../vendor/typeshed/stdlib/json/encoder.pyi | 185 +- .../vendor/typeshed/stdlib/json/scanner.pyi | 4 +- .../vendor/typeshed/stdlib/json/tool.pyi | 1 + .../vendor/typeshed/stdlib/keyword.pyi | 7 +- .../typeshed/stdlib/lib2to3/btm_matcher.pyi | 19 +- .../typeshed/stdlib/lib2to3/fixer_base.pyi | 17 +- .../stdlib/lib2to3/fixes/fix_apply.pyi | 1 + .../stdlib/lib2to3/fixes/fix_asserts.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_basestring.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_buffer.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_dict.pyi | 1 + .../stdlib/lib2to3/fixes/fix_except.pyi | 1 + .../stdlib/lib2to3/fixes/fix_exec.pyi | 1 + .../stdlib/lib2to3/fixes/fix_execfile.pyi | 1 + .../stdlib/lib2to3/fixes/fix_exitfunc.pyi | 1 + .../stdlib/lib2to3/fixes/fix_filter.pyi | 1 + .../stdlib/lib2to3/fixes/fix_funcattrs.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_future.pyi | 1 + .../stdlib/lib2to3/fixes/fix_getcwdu.pyi | 1 + .../stdlib/lib2to3/fixes/fix_has_key.pyi | 1 + .../stdlib/lib2to3/fixes/fix_idioms.pyi | 1 + .../stdlib/lib2to3/fixes/fix_import.pyi | 1 + .../stdlib/lib2to3/fixes/fix_imports.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_imports2.pyi | 1 + .../stdlib/lib2to3/fixes/fix_input.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_intern.pyi | 1 + .../stdlib/lib2to3/fixes/fix_isinstance.pyi | 1 + .../stdlib/lib2to3/fixes/fix_itertools.pyi | 13 +- .../lib2to3/fixes/fix_itertools_imports.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_long.pyi | 4 +- .../typeshed/stdlib/lib2to3/fixes/fix_map.pyi | 1 + .../stdlib/lib2to3/fixes/fix_metaclass.pyi | 46 +- .../stdlib/lib2to3/fixes/fix_methodattrs.pyi | 4 +- .../typeshed/stdlib/lib2to3/fixes/fix_ne.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_next.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_nonzero.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_numliterals.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_operator.pyi | 1 + .../stdlib/lib2to3/fixes/fix_paren.pyi | 1 + .../stdlib/lib2to3/fixes/fix_print.pyi | 1 + .../stdlib/lib2to3/fixes/fix_raise.pyi | 1 + .../stdlib/lib2to3/fixes/fix_raw_input.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_reduce.pyi | 1 + .../stdlib/lib2to3/fixes/fix_reload.pyi | 1 + .../stdlib/lib2to3/fixes/fix_renames.pyi | 1 + .../stdlib/lib2to3/fixes/fix_repr.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_set_literal.pyi | 1 + .../lib2to3/fixes/fix_standarderror.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_sys_exc.pyi | 1 + .../stdlib/lib2to3/fixes/fix_throw.pyi | 1 + .../stdlib/lib2to3/fixes/fix_tuple_params.pyi | 1 + .../stdlib/lib2to3/fixes/fix_types.pyi | 1 + .../stdlib/lib2to3/fixes/fix_unicode.pyi | 1 + .../stdlib/lib2to3/fixes/fix_urllib.pyi | 19 +- .../stdlib/lib2to3/fixes/fix_ws_comma.pyi | 1 + .../stdlib/lib2to3/fixes/fix_xrange.pyi | 4 +- .../stdlib/lib2to3/fixes/fix_xreadlines.pyi | 1 + .../typeshed/stdlib/lib2to3/fixes/fix_zip.pyi | 1 + .../vendor/typeshed/stdlib/lib2to3/main.pyi | 5 +- .../stdlib/lib2to3/pgen2/__init__.pyi | 4 +- .../typeshed/stdlib/lib2to3/pgen2/driver.pyi | 27 +- .../typeshed/stdlib/lib2to3/pgen2/grammar.pyi | 14 +- .../stdlib/lib2to3/pgen2/literals.pyi | 4 +- .../typeshed/stdlib/lib2to3/pgen2/parse.pyi | 27 +- .../typeshed/stdlib/lib2to3/pgen2/token.pyi | 4 +- .../stdlib/lib2to3/pgen2/tokenize.pyi | 2 + .../vendor/typeshed/stdlib/lib2to3/pygram.pyi | 4 +- .../vendor/typeshed/stdlib/lib2to3/pytree.pyi | 69 +- .../typeshed/stdlib/lib2to3/refactor.pyi | 42 +- .../vendor/typeshed/stdlib/linecache.pyi | 43 +- .../vendor/typeshed/stdlib/locale.pyi | 143 +- .../typeshed/stdlib/logging/__init__.pyi | 1293 ++--- .../vendor/typeshed/stdlib/logging/config.pyi | 166 +- .../typeshed/stdlib/logging/handlers.pyi | 691 +-- .../vendor/typeshed/stdlib/lzma.pyi | 208 +- .../vendor/typeshed/stdlib/mailbox.pyi | 624 ++- .../vendor/typeshed/stdlib/mailcap.pyi | 5 +- .../vendor/typeshed/stdlib/marshal.pyi | 180 +- .../vendor/typeshed/stdlib/math.pyi | 334 +- .../vendor/typeshed/stdlib/mimetypes.pyi | 215 +- .../vendor/typeshed/stdlib/mmap.pyi | 63 +- .../vendor/typeshed/stdlib/modulefinder.pyi | 29 +- .../typeshed/stdlib/msilib/__init__.pyi | 51 +- .../vendor/typeshed/stdlib/msvcrt.pyi | 101 +- .../stdlib/multiprocessing/connection.pyi | 102 +- .../stdlib/multiprocessing/context.pyi | 130 +- .../stdlib/multiprocessing/forkserver.pyi | 36 +- .../typeshed/stdlib/multiprocessing/heap.pyi | 5 +- .../stdlib/multiprocessing/managers.pyi | 167 +- .../typeshed/stdlib/multiprocessing/pool.pyi | 57 +- .../multiprocessing/popen_spawn_win32.pyi | 5 +- .../stdlib/multiprocessing/process.pyi | 81 +- .../stdlib/multiprocessing/queues.pyi | 8 +- .../stdlib/multiprocessing/reduction.pyi | 59 +- .../multiprocessing/resource_sharer.pyi | 17 +- .../multiprocessing/resource_tracker.pyi | 17 +- .../stdlib/multiprocessing/shared_memory.pyi | 98 +- .../stdlib/multiprocessing/sharedctypes.pyi | 20 +- .../typeshed/stdlib/multiprocessing/spawn.pyi | 32 +- .../typeshed/stdlib/multiprocessing/util.pyi | 33 +- .../vendor/typeshed/stdlib/netrc.pyi | 11 +- .../vendor/typeshed/stdlib/nis.pyi | 33 +- .../vendor/typeshed/stdlib/nntplib.pyi | 94 +- .../ty_vendored/vendor/typeshed/stdlib/nt.pyi | 1 + .../vendor/typeshed/stdlib/ntpath.pyi | 4 +- .../vendor/typeshed/stdlib/nturl2path.pyi | 19 +- .../vendor/typeshed/stdlib/numbers.pyi | 327 +- .../vendor/typeshed/stdlib/opcode.pyi | 4 +- .../vendor/typeshed/stdlib/operator.pyi | 37 +- .../vendor/typeshed/stdlib/optparse.pyi | 498 +- .../vendor/typeshed/stdlib/os/__init__.pyi | 2405 +++++---- .../vendor/typeshed/stdlib/os/path.pyi | 1 + .../vendor/typeshed/stdlib/parser.pyi | 62 +- .../typeshed/stdlib/pathlib/__init__.pyi | 634 +-- .../vendor/typeshed/stdlib/pathlib/types.pyi | 6 +- .../vendor/typeshed/stdlib/pdb.pyi | 634 +-- .../vendor/typeshed/stdlib/pickle.pyi | 232 +- .../vendor/typeshed/stdlib/pickletools.pyi | 515 +- .../vendor/typeshed/stdlib/pipes.pyi | 30 +- .../vendor/typeshed/stdlib/pkgutil.pyi | 284 +- .../vendor/typeshed/stdlib/platform.pyi | 267 +- .../vendor/typeshed/stdlib/plistlib.pyi | 43 +- .../vendor/typeshed/stdlib/poplib.pyi | 221 +- .../vendor/typeshed/stdlib/posix.pyi | 1 + .../vendor/typeshed/stdlib/posixpath.pyi | 95 +- .../vendor/typeshed/stdlib/pprint.pyi | 101 +- .../vendor/typeshed/stdlib/profile.pyi | 92 +- .../vendor/typeshed/stdlib/pstats.pyi | 98 +- .../vendor/typeshed/stdlib/pty.pyi | 47 +- .../vendor/typeshed/stdlib/pwd.pyi | 51 +- .../vendor/typeshed/stdlib/py_compile.pyi | 136 +- .../vendor/typeshed/stdlib/pyclbr.pyi | 26 +- .../vendor/typeshed/stdlib/pydoc.pyi | 384 +- .../typeshed/stdlib/pyexpat/__init__.pyi | 70 +- .../vendor/typeshed/stdlib/pyexpat/errors.pyi | 4 +- .../vendor/typeshed/stdlib/pyexpat/model.pyi | 4 +- .../vendor/typeshed/stdlib/queue.pyi | 159 +- .../vendor/typeshed/stdlib/quopri.pyi | 24 +- .../vendor/typeshed/stdlib/random.pyi | 342 +- .../ty_vendored/vendor/typeshed/stdlib/re.pyi | 246 +- .../vendor/typeshed/stdlib/readline.pyi | 136 +- .../vendor/typeshed/stdlib/reprlib.pyi | 7 +- .../vendor/typeshed/stdlib/resource.pyi | 77 +- .../vendor/typeshed/stdlib/rlcompleter.pyi | 46 +- .../vendor/typeshed/stdlib/runpy.pyi | 50 +- .../vendor/typeshed/stdlib/sched.pyi | 82 +- .../vendor/typeshed/stdlib/secrets.pyi | 47 +- .../vendor/typeshed/stdlib/select.pyi | 191 +- .../vendor/typeshed/stdlib/selectors.pyi | 164 +- .../vendor/typeshed/stdlib/shelve.pyi | 66 +- .../vendor/typeshed/stdlib/shlex.pyi | 46 +- .../vendor/typeshed/stdlib/shutil.pyi | 382 +- .../vendor/typeshed/stdlib/signal.pyi | 201 +- .../vendor/typeshed/stdlib/site.pyi | 116 +- .../vendor/typeshed/stdlib/smtpd.pyi | 5 +- .../vendor/typeshed/stdlib/smtplib.pyi | 488 +- .../vendor/typeshed/stdlib/sndhdr.pyi | 12 +- .../vendor/typeshed/stdlib/socket.pyi | 272 +- .../vendor/typeshed/stdlib/socketserver.pyi | 298 +- .../vendor/typeshed/stdlib/spwd.pyi | 61 +- .../typeshed/stdlib/sqlite3/__init__.pyi | 374 +- .../vendor/typeshed/stdlib/sre_compile.pyi | 4 +- .../vendor/typeshed/stdlib/sre_constants.pyi | 4 +- .../vendor/typeshed/stdlib/sre_parse.pyi | 4 +- .../vendor/typeshed/stdlib/ssl.pyi | 424 +- .../vendor/typeshed/stdlib/stat.pyi | 1 + .../vendor/typeshed/stdlib/statistics.pyi | 987 ++-- .../typeshed/stdlib/string/__init__.pyi | 19 +- .../typeshed/stdlib/string/templatelib.pyi | 34 +- .../vendor/typeshed/stdlib/stringprep.pyi | 1 + .../vendor/typeshed/stdlib/struct.pyi | 1 + .../vendor/typeshed/stdlib/subprocess.pyi | 646 +-- .../vendor/typeshed/stdlib/sunau.pyi | 5 +- .../vendor/typeshed/stdlib/symbol.pyi | 4 +- .../vendor/typeshed/stdlib/symtable.pyi | 180 +- .../vendor/typeshed/stdlib/sys/__init__.pyi | 605 ++- .../vendor/typeshed/stdlib/sysconfig.pyi | 93 +- .../vendor/typeshed/stdlib/syslog.pyi | 25 +- .../vendor/typeshed/stdlib/tabnanny.pyi | 21 +- .../vendor/typeshed/stdlib/tarfile.pyi | 627 +-- .../vendor/typeshed/stdlib/telnetlib.pyi | 57 +- .../vendor/typeshed/stdlib/tempfile.pyi | 261 +- .../vendor/typeshed/stdlib/termios.pyi | 70 +- .../vendor/typeshed/stdlib/textwrap.pyi | 269 +- .../vendor/typeshed/stdlib/threading.pyi | 684 +-- .../vendor/typeshed/stdlib/time.pyi | 317 +- .../vendor/typeshed/stdlib/timeit.pyi | 145 +- .../typeshed/stdlib/tkinter/__init__.pyi | 4328 ++++++++------- .../typeshed/stdlib/tkinter/colorchooser.pyi | 31 +- .../vendor/typeshed/stdlib/tkinter/dnd.pyi | 1 + .../typeshed/stdlib/tkinter/filedialog.pyi | 88 +- .../vendor/typeshed/stdlib/tkinter/font.pyi | 68 +- .../typeshed/stdlib/tkinter/messagebox.pyi | 35 +- .../typeshed/stdlib/tkinter/scrolledtext.pyi | 1 + .../typeshed/stdlib/tkinter/simpledialog.pyi | 80 +- .../vendor/typeshed/stdlib/tkinter/tix.pyi | 193 +- .../vendor/typeshed/stdlib/tkinter/ttk.pyi | 1317 ++--- .../vendor/typeshed/stdlib/token.pyi | 4 +- .../vendor/typeshed/stdlib/tokenize.pyi | 100 +- .../vendor/typeshed/stdlib/tomllib.pyi | 25 +- .../vendor/typeshed/stdlib/trace.pyi | 98 +- .../vendor/typeshed/stdlib/traceback.pyi | 489 +- .../vendor/typeshed/stdlib/tracemalloc.pyi | 127 +- .../vendor/typeshed/stdlib/tty.pyi | 19 +- .../vendor/typeshed/stdlib/turtle.pyi | 4793 +++++++++-------- .../vendor/typeshed/stdlib/types.pyi | 483 +- .../vendor/typeshed/stdlib/typing.pyi | 1319 ++--- .../typeshed/stdlib/typing_extensions.pyi | 453 +- .../vendor/typeshed/stdlib/unicodedata.pyi | 163 +- .../typeshed/stdlib/unittest/__init__.pyi | 1 + .../vendor/typeshed/stdlib/unittest/_log.pyi | 8 +- .../typeshed/stdlib/unittest/async_case.pyi | 6 +- .../vendor/typeshed/stdlib/unittest/case.pyi | 655 +-- .../typeshed/stdlib/unittest/loader.pyi | 97 +- .../vendor/typeshed/stdlib/unittest/main.pyi | 9 +- .../vendor/typeshed/stdlib/unittest/mock.pyi | 686 +-- .../typeshed/stdlib/unittest/result.pyi | 89 +- .../typeshed/stdlib/unittest/runner.pyi | 39 +- .../vendor/typeshed/stdlib/unittest/suite.pyi | 25 +- .../vendor/typeshed/stdlib/unittest/util.pyi | 35 +- .../vendor/typeshed/stdlib/urllib/error.pyi | 9 +- .../vendor/typeshed/stdlib/urllib/parse.pyi | 381 +- .../vendor/typeshed/stdlib/urllib/request.pyi | 387 +- .../typeshed/stdlib/urllib/response.pyi | 17 +- .../typeshed/stdlib/urllib/robotparser.pyi | 41 +- .../ty_vendored/vendor/typeshed/stdlib/uu.pyi | 12 +- .../vendor/typeshed/stdlib/uuid.pyi | 214 +- .../vendor/typeshed/stdlib/warnings.pyi | 180 +- .../vendor/typeshed/stdlib/wave.pyi | 105 +- .../vendor/typeshed/stdlib/weakref.pyi | 123 +- .../vendor/typeshed/stdlib/webbrowser.pyi | 106 +- .../vendor/typeshed/stdlib/winreg.pyi | 634 +-- .../vendor/typeshed/stdlib/winsound.pyi | 28 +- .../typeshed/stdlib/wsgiref/handlers.pyi | 233 +- .../typeshed/stdlib/wsgiref/headers.pyi | 108 +- .../typeshed/stdlib/wsgiref/simple_server.pyi | 9 +- .../vendor/typeshed/stdlib/wsgiref/types.pyi | 20 +- .../vendor/typeshed/stdlib/wsgiref/util.pyi | 59 +- .../typeshed/stdlib/wsgiref/validate.pyi | 21 +- .../vendor/typeshed/stdlib/xdrlib.pyi | 10 +- .../vendor/typeshed/stdlib/xml/__init__.pyi | 1 + .../typeshed/stdlib/xml/dom/NodeFilter.pyi | 5 +- .../typeshed/stdlib/xml/dom/__init__.pyi | 14 +- .../vendor/typeshed/stdlib/xml/dom/domreg.pyi | 32 +- .../typeshed/stdlib/xml/dom/expatbuilder.pyi | 121 +- .../typeshed/stdlib/xml/dom/minicompat.pyi | 9 +- .../typeshed/stdlib/xml/dom/minidom.pyi | 258 +- .../typeshed/stdlib/xml/dom/pulldom.pyi | 6 +- .../typeshed/stdlib/xml/dom/xmlbuilder.pyi | 20 +- .../typeshed/stdlib/xml/etree/ElementTree.pyi | 356 +- .../typeshed/stdlib/xml/parsers/__init__.pyi | 1 + .../stdlib/xml/parsers/expat/__init__.pyi | 4 +- .../stdlib/xml/parsers/expat/errors.pyi | 4 +- .../stdlib/xml/parsers/expat/model.pyi | 4 +- .../typeshed/stdlib/xml/sax/__init__.pyi | 12 +- .../typeshed/stdlib/xml/sax/_exceptions.pyi | 107 +- .../typeshed/stdlib/xml/sax/expatreader.pyi | 16 +- .../typeshed/stdlib/xml/sax/handler.pyi | 350 +- .../typeshed/stdlib/xml/sax/saxutils.pyi | 54 +- .../typeshed/stdlib/xml/sax/xmlreader.pyi | 292 +- .../vendor/typeshed/stdlib/xmlrpc/client.pyi | 173 +- .../vendor/typeshed/stdlib/xmlrpc/server.pyi | 306 +- .../vendor/typeshed/stdlib/xxlimited.pyi | 22 +- .../vendor/typeshed/stdlib/zipapp.pyi | 33 +- .../typeshed/stdlib/zipfile/__init__.pyi | 399 +- .../stdlib/zipfile/_path/__init__.pyi | 226 +- .../typeshed/stdlib/zipfile/_path/glob.pyi | 148 +- .../vendor/typeshed/stdlib/zipimport.pyi | 160 +- .../vendor/typeshed/stdlib/zlib.pyi | 114 +- .../typeshed/stdlib/zoneinfo/__init__.pyi | 24 +- .../typeshed/stdlib/zoneinfo/_common.pyi | 3 +- .../typeshed/stdlib/zoneinfo/_tzpath.pyi | 21 +- 661 files changed, 44416 insertions(+), 41265 deletions(-) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi index 27d0e6e145d4b..9f0b95bc4a127 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/__future__.pyi @@ -46,6 +46,7 @@ attribute on _Future instances. These values must match the appropriate No feature line is ever to be deleted from this file. """ + from typing_extensions import TypeAlias _VersionInfo: TypeAlias = tuple[int, int, int, str, int] @@ -55,14 +56,15 @@ class _Feature: def getOptionalRelease(self) -> _VersionInfo: """Return first release in which this feature was recognized. -This is a 5-tuple, of the same form as sys.version_info. -""" + This is a 5-tuple, of the same form as sys.version_info. + """ + def getMandatoryRelease(self) -> _VersionInfo | None: """Return release in which this feature will become mandatory. -This is a 5-tuple, of the same form as sys.version_info, or, if -the feature was dropped, or the release date is undetermined, is None. -""" + This is a 5-tuple, of the same form as sys.version_info, or, if + the feature was dropped, or the release date is undetermined, is None. + """ compiler_flag: int absolute_import: _Feature diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi index aff8cf899ba4a..5b11b3445aab1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_asyncio.pyi @@ -1,5 +1,5 @@ -"""Accelerator module for asyncio -""" +"""Accelerator module for asyncio""" + import sys from asyncio.events import AbstractEventLoop from collections.abc import Awaitable, Callable, Coroutine, Generator @@ -26,7 +26,8 @@ class Future(Awaitable[_T]): - This class is not compatible with the wait() and as_completed() methods in the concurrent.futures package. -""" + """ + _state: str @property def _exception(self) -> BaseException | None: ... @@ -38,79 +39,86 @@ class Future(Awaitable[_T]): _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def __del__(self) -> None: - """Called when the instance is about to be destroyed. -""" + """Called when the instance is about to be destroyed.""" + def get_loop(self) -> AbstractEventLoop: - """Return the event loop the Future is bound to. -""" + """Return the event loop the Future is bound to.""" + @property def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... def add_done_callback(self, fn: Callable[[Self], object], /, *, context: Context | None = None) -> None: """Add a callback to be run when the future becomes done. -The callback is called with a single argument - the future object. If -the future is already done when this is called, the callback is -scheduled with call_soon. -""" + The callback is called with a single argument - the future object. If + the future is already done when this is called, the callback is + scheduled with call_soon. + """ + def cancel(self, msg: Any | None = None) -> bool: """Cancel the future and schedule callbacks. -If the future is already done or cancelled, return False. Otherwise, -change the future's state to cancelled, schedule the callbacks and -return True. -""" + If the future is already done or cancelled, return False. Otherwise, + change the future's state to cancelled, schedule the callbacks and + return True. + """ + def cancelled(self) -> bool: - """Return True if the future was cancelled. -""" + """Return True if the future was cancelled.""" + def done(self) -> bool: """Return True if the future is done. -Done means either that a result / exception are available, or that the -future was cancelled. -""" + Done means either that a result / exception are available, or that the + future was cancelled. + """ + def result(self) -> _T: """Return the result this future represents. -If the future has been cancelled, raises CancelledError. If the -future's result isn't yet available, raises InvalidStateError. If -the future is done and has an exception set, this exception is raised. -""" + If the future has been cancelled, raises CancelledError. If the + future's result isn't yet available, raises InvalidStateError. If + the future is done and has an exception set, this exception is raised. + """ + def exception(self) -> BaseException | None: """Return the exception that was set on this future. -The exception (or None if no exception was set) is returned only if -the future is done. If the future has been cancelled, raises -CancelledError. If the future isn't done yet, raises -InvalidStateError. -""" + The exception (or None if no exception was set) is returned only if + the future is done. If the future has been cancelled, raises + CancelledError. If the future isn't done yet, raises + InvalidStateError. + """ + def remove_done_callback(self, fn: Callable[[Self], object], /) -> int: """Remove all instances of a callback from the "call when done" list. -Returns the number of callbacks removed. -""" + Returns the number of callbacks removed. + """ + def set_result(self, result: _T, /) -> None: """Mark the future done and set its result. -If the future is already done when this method is called, raises -InvalidStateError. -""" + If the future is already done when this method is called, raises + InvalidStateError. + """ + def set_exception(self, exception: type | BaseException, /) -> None: """Mark the future done and set an exception. -If the future is already done when this method is called, raises -InvalidStateError. -""" + If the future is already done when this method is called, raises + InvalidStateError. + """ + def __iter__(self) -> Generator[Any, None, _T]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __await__(self) -> Generator[Any, None, _T]: - """Return an iterator to be used in await expression. -""" + """Return an iterator to be used in await expression.""" + @property def _loop(self) -> AbstractEventLoop: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" if sys.version_info >= (3, 12): _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] @@ -123,8 +131,8 @@ else: # and `asyncio.Task.set_result()` always raises. @disjoint_base class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] - """A coroutine wrapped in a Future. -""" + """A coroutine wrapped in a Future.""" + if sys.version_info >= (3, 12): def __init__( self, @@ -162,115 +170,121 @@ class Task(Future[_T_co]): # type: ignore[type-var] # pyright: ignore[reportIn def get_stack(self, *, limit: int | None = None) -> list[FrameType]: """Return the list of stack frames for this task's coroutine. -If the coroutine is not done, this returns the stack where it is -suspended. If the coroutine has completed successfully or was -cancelled, this returns an empty list. If the coroutine was -terminated by an exception, this returns the list of traceback -frames. + If the coroutine is not done, this returns the stack where it is + suspended. If the coroutine has completed successfully or was + cancelled, this returns an empty list. If the coroutine was + terminated by an exception, this returns the list of traceback + frames. + + The frames are always ordered from oldest to newest. -The frames are always ordered from oldest to newest. + The optional limit gives the maximum number of frames to + return; by default all available frames are returned. Its + meaning differs depending on whether a stack or a traceback is + returned: the newest frames of a stack are returned, but the + oldest frames of a traceback are returned. (This matches the + behavior of the traceback module.) -The optional limit gives the maximum number of frames to -return; by default all available frames are returned. Its -meaning differs depending on whether a stack or a traceback is -returned: the newest frames of a stack are returned, but the -oldest frames of a traceback are returned. (This matches the -behavior of the traceback module.) + For reasons beyond our control, only one stack frame is + returned for a suspended coroutine. + """ -For reasons beyond our control, only one stack frame is -returned for a suspended coroutine. -""" def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: """Print the stack or traceback for this task's coroutine. -This produces output similar to that of the traceback module, -for the frames retrieved by get_stack(). The limit argument -is passed to get_stack(). The file argument is an I/O stream -to which the output is written; by default output is written -to sys.stderr. -""" + This produces output similar to that of the traceback module, + for the frames retrieved by get_stack(). The limit argument + is passed to get_stack(). The file argument is an I/O stream + to which the output is written; by default output is written + to sys.stderr. + """ if sys.version_info >= (3, 11): def cancelling(self) -> int: """Return the count of the task's cancellation requests. -This count is incremented when .cancel() is called -and may be decremented using .uncancel(). -""" + This count is incremented when .cancel() is called + and may be decremented using .uncancel(). + """ + def uncancel(self) -> int: """Decrement the task's count of cancellation requests. -This should be used by tasks that catch CancelledError -and wish to continue indefinitely until they are cancelled again. + This should be used by tasks that catch CancelledError + and wish to continue indefinitely until they are cancelled again. -Returns the remaining number of cancellation requests. -""" + Returns the remaining number of cancellation requests. + """ def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" def get_event_loop() -> AbstractEventLoop: """Return an asyncio event loop. -When called from a coroutine or a callback (e.g. scheduled with -call_soon or similar API), this function will always return the -running event loop. + When called from a coroutine or a callback (e.g. scheduled with + call_soon or similar API), this function will always return the + running event loop. + + If there is no running event loop set, the function will return + the result of `get_event_loop_policy().get_event_loop()` call. + """ -If there is no running event loop set, the function will return -the result of `get_event_loop_policy().get_event_loop()` call. -""" def get_running_loop() -> AbstractEventLoop: """Return the running event loop. Raise a RuntimeError if there is none. -This function is thread-specific. -""" + This function is thread-specific. + """ + def _set_running_loop(loop: AbstractEventLoop | None, /) -> None: """Set the running event loop. -This is a low-level function intended to be used by event loops. -This function is thread-specific. -""" + This is a low-level function intended to be used by event loops. + This function is thread-specific. + """ + def _get_running_loop() -> AbstractEventLoop: """Return the running event loop or None. -This is a low-level function intended to be used by event loops. -This function is thread-specific. -""" + This is a low-level function intended to be used by event loops. + This function is thread-specific. + """ + def _register_task(task: Task[Any]) -> None: """Register a new task in asyncio as executed by loop. -Returns None. -""" + Returns None. + """ + def _unregister_task(task: Task[Any]) -> None: """Unregister a task. -Returns None. -""" + Returns None. + """ + def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: """Enter into task execution or resume suspended task. -Task belongs to loop. + Task belongs to loop. + + Returns None. + """ -Returns None. -""" def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: """Leave task execution or suspend a task. -Task belongs to loop. + Task belongs to loop. -Returns None. -""" + Returns None. + """ if sys.version_info >= (3, 12): def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: - """Return a currently executed task. -""" + """Return a currently executed task.""" if sys.version_info >= (3, 14): def future_discard_from_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: ... def future_add_to_awaited_by(future: Future[Any], waiter: Future[Any], /) -> None: - """Record that `fut` is awaited on by `waiter`. -""" + """Record that `fut` is awaited on by `waiter`.""" + def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: - """Return a set of all tasks for the loop. -""" + """Return a set of all tasks for the loop.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi index 04b301296cbd4..9d4c323ef5ca8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bisect.pyi @@ -5,6 +5,7 @@ having to sort the list after each insertion. For long lists of items with expensive comparison operations, this can be an improvement over the more common approach. """ + import sys from _typeshed import SupportsLenAndGetItem, SupportsRichComparisonT from collections.abc import Callable, MutableSequence @@ -24,15 +25,16 @@ if sys.version_info >= (3, 10): ) -> int: """Return the index where to insert item x in list a, assuming a is sorted. -The return value i is such that all e in a[:i] have e < x, and all e in -a[i:] have e >= x. So if x already appears in the list, a.insert(i, x) will -insert just before the leftmost x already there. + The return value i is such that all e in a[:i] have e < x, and all e in + a[i:] have e >= x. So if x already appears in the list, a.insert(i, x) will + insert just before the leftmost x already there. -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + + A custom key function can be supplied to customize the sort order. + """ -A custom key function can be supplied to customize the sort order. -""" @overload def bisect_left( a: SupportsLenAndGetItem[_T], @@ -53,15 +55,16 @@ A custom key function can be supplied to customize the sort order. ) -> int: """Return the index where to insert item x in list a, assuming a is sorted. -The return value i is such that all e in a[:i] have e <= x, and all e in -a[i:] have e > x. So if x already appears in the list, a.insert(i, x) will -insert just after the rightmost x already there. + The return value i is such that all e in a[:i] have e <= x, and all e in + a[i:] have e > x. So if x already appears in the list, a.insert(i, x) will + insert just after the rightmost x already there. -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + + A custom key function can be supplied to customize the sort order. + """ -A custom key function can be supplied to customize the sort order. -""" @overload def bisect_right( a: SupportsLenAndGetItem[_T], @@ -82,13 +85,14 @@ A custom key function can be supplied to customize the sort order. ) -> None: """Insert item x in list a, and keep it sorted assuming a is sorted. -If x is already in a, insert it to the left of the leftmost x. + If x is already in a, insert it to the left of the leftmost x. -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + + A custom key function can be supplied to customize the sort order. + """ -A custom key function can be supplied to customize the sort order. -""" @overload def insort_left( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -104,13 +108,14 @@ A custom key function can be supplied to customize the sort order. ) -> None: """Insert item x in list a, and keep it sorted assuming a is sorted. -If x is already in a, insert it to the right of the rightmost x. + If x is already in a, insert it to the right of the rightmost x. -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + + A custom key function can be supplied to customize the sort order. + """ -A custom key function can be supplied to customize the sort order. -""" @overload def insort_right( a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] @@ -122,42 +127,45 @@ else: ) -> int: """Return the index where to insert item x in list a, assuming a is sorted. -The return value i is such that all e in a[:i] have e < x, and all e in -a[i:] have e >= x. So if x already appears in the list, i points just -before the leftmost x already there. + The return value i is such that all e in a[:i] have e < x, and all e in + a[i:] have e >= x. So if x already appears in the list, i points just + before the leftmost x already there. + + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + """ -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. -""" def bisect_right( a: SupportsLenAndGetItem[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> int: """Return the index where to insert item x in list a, assuming a is sorted. -The return value i is such that all e in a[:i] have e <= x, and all e in -a[i:] have e > x. So if x already appears in the list, i points just -beyond the rightmost x already there + The return value i is such that all e in a[:i] have e <= x, and all e in + a[i:] have e > x. So if x already appears in the list, i points just + beyond the rightmost x already there + + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + """ -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. -""" def insort_left( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: """Insert item x in list a, and keep it sorted assuming a is sorted. -If x is already in a, insert it to the left of the leftmost x. + If x is already in a, insert it to the left of the leftmost x. + + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + """ -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. -""" def insort_right( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: """Insert item x in list a, and keep it sorted assuming a is sorted. -If x is already in a, insert it to the right of the rightmost x. + If x is already in a, insert it to the right of the rightmost x. -Optional args lo (default 0) and hi (default len(a)) bound the -slice of a to be searched. -""" + Optional args lo (default 0) and hi (default len(a)) bound the + slice of a to be searched. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi index 3dd4a55422b20..b806cd201c7fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_blake2.pyi @@ -1,5 +1,5 @@ -"""_blake2b provides BLAKE2b for hashlib -""" +"""_blake2b provides BLAKE2b for hashlib""" + import sys from _typeshed import ReadableBuffer from typing import ClassVar, Final, final @@ -16,8 +16,8 @@ BLAKE2S_SALT_SIZE: Final = 8 @final class blake2b: - """Return a new BLAKE2b hash object. -""" + """Return a new BLAKE2b hash object.""" + MAX_DIGEST_SIZE: ClassVar[int] = 64 MAX_KEY_SIZE: ClassVar[int] = 64 PERSON_SIZE: ClassVar[int] = 16 @@ -65,22 +65,21 @@ class blake2b: ) -> Self: ... def copy(self) -> Self: - """Return a copy of the hash object. -""" + """Return a copy of the hash object.""" + def digest(self) -> bytes: - """Return the digest value as a bytes object. -""" + """Return the digest value as a bytes object.""" + def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits. -""" + """Return the digest value as a string of hexadecimal digits.""" + def update(self, data: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided bytes-like object. -""" + """Update this hash object's state with the provided bytes-like object.""" @final class blake2s: - """Return a new BLAKE2s hash object. -""" + """Return a new BLAKE2s hash object.""" + MAX_DIGEST_SIZE: ClassVar[int] = 32 MAX_KEY_SIZE: ClassVar[int] = 32 PERSON_SIZE: ClassVar[int] = 8 @@ -128,14 +127,13 @@ class blake2s: ) -> Self: ... def copy(self) -> Self: - """Return a copy of the hash object. -""" + """Return a copy of the hash object.""" + def digest(self) -> bytes: - """Return the digest value as a bytes object. -""" + """Return the digest value as a bytes object.""" + def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits. -""" + """Return the digest value as a string of hexadecimal digits.""" + def update(self, data: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided bytes-like object. -""" + """Update this hash object's state with the provided bytes-like object.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi index d5e032c352a8a..b3f4e5d0c88de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bootlocale.pyi @@ -3,4 +3,5 @@ Don't import directly from third-party code; use the `locale` module instead! """ + def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi index 5aca352012c78..61a76f4f85811 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_bz2.pyi @@ -7,11 +7,12 @@ from typing_extensions import Self class BZ2Compressor: """Create a compressor object for compressing data incrementally. - compresslevel - Compression level, as a number between 1 and 9. + compresslevel + Compression level, as a number between 1 and 9. + + For one-shot compression, use the compress() function instead. + """ -For one-shot compression, use the compress() function instead. -""" if sys.version_info >= (3, 12): def __new__(cls, compresslevel: int = 9, /) -> Self: ... else: @@ -20,50 +21,52 @@ For one-shot compression, use the compress() function instead. def compress(self, data: ReadableBuffer, /) -> bytes: """Provide data to the compressor object. -Returns a chunk of compressed data if possible, or b'' otherwise. + Returns a chunk of compressed data if possible, or b'' otherwise. + + When you have finished providing data to the compressor, call the + flush() method to finish the compression process. + """ -When you have finished providing data to the compressor, call the -flush() method to finish the compression process. -""" def flush(self) -> bytes: """Finish the compression process. -Returns the compressed data left in internal buffers. + Returns the compressed data left in internal buffers. -The compressor object may not be used after this method is called. -""" + The compressor object may not be used after this method is called. + """ @final class BZ2Decompressor: """Create a decompressor object for decompressing data incrementally. -For one-shot decompression, use the decompress() function instead. -""" + For one-shot decompression, use the decompress() function instead. + """ + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: """Decompress *data*, returning uncompressed data as bytes. -If *max_length* is nonnegative, returns at most *max_length* bytes of -decompressed data. If this limit is reached and further output can be -produced, *self.needs_input* will be set to ``False``. In this case, the next -call to *decompress()* may provide *data* as b'' to obtain more of the output. + If *max_length* is nonnegative, returns at most *max_length* bytes of + decompressed data. If this limit is reached and further output can be + produced, *self.needs_input* will be set to ``False``. In this case, the next + call to *decompress()* may provide *data* as b'' to obtain more of the output. -If all of the input data was decompressed and returned (either because this -was less than *max_length* bytes, or because *max_length* was negative), -*self.needs_input* will be set to True. + If all of the input data was decompressed and returned (either because this + was less than *max_length* bytes, or because *max_length* was negative), + *self.needs_input* will be set to True. + + Attempting to decompress data after the end of stream is reached raises an + EOFError. Any data found after the end of the stream is ignored and saved in + the unused_data attribute. + """ -Attempting to decompress data after the end of stream is reached raises an -EOFError. Any data found after the end of the stream is ignored and saved in -the unused_data attribute. -""" @property def eof(self) -> bool: - """True if the end-of-stream marker has been reached. -""" + """True if the end-of-stream marker has been reached.""" + @property def needs_input(self) -> bool: - """True if more input is needed before more decompressed data can be produced. -""" + """True if more input is needed before more decompressed data can be produced.""" + @property def unused_data(self) -> bytes: - """Data found after the end of the compressed stream. -""" + """Data found after the end of the compressed stream.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi index a64ade83f139d..7548f98b66a8b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_codecs.pyi @@ -19,31 +19,32 @@ _SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] def register(search_function: _SearchFunction, /) -> None: """Register a codec search function. -Search functions are expected to take one argument, the encoding name in -all lower case letters, and either return None, or a tuple of functions -(encoder, decoder, stream_reader, stream_writer) (or a CodecInfo object). -""" + Search functions are expected to take one argument, the encoding name in + all lower case letters, and either return None, or a tuple of functions + (encoder, decoder, stream_reader, stream_writer) (or a CodecInfo object). + """ if sys.version_info >= (3, 10): def unregister(search_function: _SearchFunction, /) -> None: """Unregister a codec search function and clear the registry's cache. -If the search function is not registered, do nothing. -""" + If the search function is not registered, do nothing. + """ def register_error(errors: str, handler: _Handler, /) -> None: """Register the specified error handler under the name errors. -handler must be a callable object, that will be called with an exception -instance containing information about the location of the encoding/decoding -error and must return a (replacement, new position) tuple. -""" + handler must be a callable object, that will be called with an exception + instance containing information about the location of the encoding/decoding + error and must return a (replacement, new position) tuple. + """ + def lookup_error(name: str, /) -> _Handler: """lookup_error(errors) -> handler -Return the error handler for the specified error handling name or raise a -LookupError, if no handler exists under this name. -""" + Return the error handler for the specified error handling name or raise a + LookupError, if no handler exists under this name. + """ # The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 # https://docs.python.org/3/library/codecs.html#binary-transforms @@ -72,12 +73,13 @@ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: """Encodes obj using the codec registered for encoding. -The default encoding is 'utf-8'. errors may be given to set a -different error handling scheme. Default is 'strict' meaning that encoding -errors raise a ValueError. Other possible values are 'ignore', 'replace' -and 'backslashreplace' as well as any other name registered with -codecs.register_error that can handle ValueErrors. -""" + The default encoding is 'utf-8'. errors may be given to set a + different error handling scheme. Default is 'strict' meaning that encoding + errors raise a ValueError. Other possible values are 'ignore', 'replace' + and 'backslashreplace' as well as any other name registered with + codecs.register_error that can handle ValueErrors. + """ + @overload def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[overload-overlap] @overload @@ -86,12 +88,13 @@ def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: # type: ignore[overload-overlap] """Decodes obj using the codec registered for encoding. -Default encoding is 'utf-8'. errors may be given to set a -different error handling scheme. Default is 'strict' meaning that encoding -errors raise a ValueError. Other possible values are 'ignore', 'replace' -and 'backslashreplace' as well as any other name registered with -codecs.register_error that can handle ValueErrors. -""" + Default encoding is 'utf-8'. errors may be given to set a + different error handling scheme. Default is 'strict' meaning that encoding + errors raise a ValueError. Other possible values are 'ignore', 'replace' + and 'backslashreplace' as well as any other name registered with + codecs.register_error that can handle ValueErrors. + """ + @overload def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... @@ -109,8 +112,8 @@ def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "stric @overload def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... def lookup(encoding: str, /) -> codecs.CodecInfo: - """Looks up a codec tuple in the Python codec registry and returns a CodecInfo object. -""" + """Looks up a codec tuple in the Python codec registry and returns a CodecInfo object.""" + def charmap_build(map: str, /) -> _CharMap: ... def ascii_decode(data: ReadableBuffer, errors: str | None = None, /) -> tuple[str, int]: ... def ascii_encode(str: str, errors: str | None = None, /) -> tuple[bytes, int]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi index 0792b4ef5502c..0016acdc5549e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_collections_abc.pyi @@ -2,6 +2,7 @@ Unit tests are in test_collections. """ + import sys from abc import abstractmethod from types import MappingProxyType @@ -76,46 +77,38 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... def __reversed__(self) -> Iterator[_KT_co]: - """Return a reverse iterator over the dict keys. -""" + """Return a reverse iterator over the dict keys.""" __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: - """Return True if the view and the given iterable have a null intersection. -""" + """Return True if the view and the given iterable have a null intersection.""" if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to -""" + """dictionary that this view refers to""" @final class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented def __reversed__(self) -> Iterator[_VT_co]: - """Return a reverse iterator over the dict values. -""" + """Return a reverse iterator over the dict values.""" if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to -""" + """dictionary that this view refers to""" @final class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented def __eq__(self, value: object, /) -> bool: ... def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: - """Return a reverse iterator over the dict items. -""" + """Return a reverse iterator over the dict items.""" __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: - """Return True if the view and the given iterable have a null intersection. -""" + """Return True if the view and the given iterable have a null intersection.""" if sys.version_info >= (3, 10): @property def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: - """dictionary that this view refers to -""" + """dictionary that this view refers to""" if sys.version_info >= (3, 12): @runtime_checkable diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi index 327135f4148c1..12eafa626744d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_compression.pyi @@ -1,5 +1,5 @@ -"""Internal classes used by the gzip, lzma and bz2 modules -""" +"""Internal classes used by the gzip, lzma and bz2 modules""" + # _compression is replaced by compression._common._streams on Python 3.14+ (PEP-784) from _typeshed import Incomplete, WriteableBuffer @@ -16,12 +16,11 @@ class _Reader(Protocol): def seek(self, n: int, /) -> Any: ... class BaseStream(BufferedIOBase): - """Mode-checking helper functions. -""" + """Mode-checking helper functions.""" class DecompressReader(RawIOBase): - """Adapts the decompressor API to a RawIOBase reader API -""" + """Adapts the decompressor API to a RawIOBase reader API""" + def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi index 0abc254f3774a..a46b110a8c128 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_contextvars.pyi @@ -1,5 +1,5 @@ -"""Context Variables -""" +"""Context Variables""" + import sys from collections.abc import Callable, Iterator, Mapping from types import GenericAlias, TracebackType @@ -23,12 +23,13 @@ class ContextVar(Generic[_T]): def get(self) -> _T: """Return a value for the context variable for the current context. -If there is no value for the variable in the current context, the method will: - * return the value of the default argument of the method, if provided; or - * return the default value for the context variable, if it was created - with one; or - * raise a LookupError. -""" + If there is no value for the variable in the current context, the method will: + * return the value of the default argument of the method, if provided; or + * return the default value for the context variable, if it was created + with one; or + * raise a LookupError. + """ + @overload def get(self, default: _T, /) -> _T: ... @overload @@ -36,20 +37,21 @@ If there is no value for the variable in the current context, the method will: def set(self, value: _T, /) -> Token[_T]: """Call to set a new value for the context variable in the current context. -The required value argument is the new value for the context variable. + The required value argument is the new value for the context variable. + + Returns a Token object that can be used to restore the variable to its previous + value via the `ContextVar.reset()` method. + """ -Returns a Token object that can be used to restore the variable to its previous -value via the `ContextVar.reset()` method. -""" def reset(self, token: Token[_T], /) -> None: """Reset the context variable. -The variable is reset to the value it had before the `ContextVar.set()` that -created the token was used. -""" + The variable is reset to the value it had before the `ContextVar.set()` that + created the token was used. + """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @final class Token(Generic[_T]): @@ -60,17 +62,15 @@ class Token(Generic[_T]): MISSING: ClassVar[object] __hash__: ClassVar[None] # type: ignore[assignment] def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" if sys.version_info >= (3, 14): def __enter__(self) -> Self: - """Enter into Token context manager. -""" + """Enter into Token context manager.""" + def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> None: - """Exit from Token context manager, restore the linked ContextVar. -""" + """Exit from Token context manager, restore the linked ContextVar.""" def copy_context() -> Context: ... @@ -83,25 +83,25 @@ class Context(Mapping[ContextVar[Any], Any]): def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: """Return the value for `key` if `key` has the value in the context object. -If `key` does not exist, return `default`. If `default` is not given, -return None. -""" + If `key` does not exist, return `default`. If `default` is not given, + return None. + """ + @overload def get(self, key: ContextVar[_T], default: _T, /) -> _T: ... @overload def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ... def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... def copy(self) -> Context: - """Return a shallow copy of the context object. -""" + """Return a shallow copy of the context object.""" __hash__: ClassVar[None] # type: ignore[assignment] def __getitem__(self, key: ContextVar[_T], /) -> _T: - """Return self[key]. -""" + """Return self[key].""" + def __iter__(self) -> Iterator[ContextVar[Any]]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __eq__(self, value: object, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi index 118bd5669477f..e3adaf6fb07d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_csv.pyi @@ -1,5 +1,5 @@ -"""CSV parsing and writing. -""" +"""CSV parsing and writing.""" + import csv import sys from _typeshed import SupportsWrite @@ -30,8 +30,9 @@ _DialectLike: TypeAlias = str | Dialect | csv.Dialect | type[Dialect | csv.Diale class Dialect: """CSV dialect -The Dialect type records CSV parsing and generation options. -""" + The Dialect type records CSV parsing and generation options. + """ + delimiter: str quotechar: str | None escapechar: str | None @@ -59,55 +60,58 @@ if sys.version_info >= (3, 10): class Reader: """CSV reader -Reader objects are responsible for reading and parsing tabular data -in CSV format. -""" + Reader objects are responsible for reading and parsing tabular data + in CSV format. + """ + @property def dialect(self) -> Dialect: ... line_num: int def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> list[str]: - """Implement next(self). -""" + """Implement next(self).""" # This class calls itself _csv.writer. @disjoint_base class Writer: """CSV writer -Writer objects are responsible for generating tabular data -in CSV format from sequence input. -""" + Writer objects are responsible for generating tabular data + in CSV format from sequence input. + """ + @property def dialect(self) -> Dialect: ... if sys.version_info >= (3, 13): def writerow(self, row: Iterable[Any], /) -> Any: """writerow(iterable) -Construct and write a CSV record from an iterable of fields. Non-string -elements will be converted to string. -""" + Construct and write a CSV record from an iterable of fields. Non-string + elements will be converted to string. + """ + def writerows(self, rows: Iterable[Iterable[Any]], /) -> None: """writerows(iterable of iterables) -Construct and write a series of iterables to a csv file. Non-string -elements will be converted to string. -""" + Construct and write a series of iterables to a csv file. Non-string + elements will be converted to string. + """ else: def writerow(self, row: Iterable[Any]) -> Any: """writerow(iterable) -Construct and write a CSV record from an iterable of fields. Non-string -elements will be converted to string. -""" + Construct and write a CSV record from an iterable of fields. Non-string + elements will be converted to string. + """ + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: """writerows(iterable of iterables) -Construct and write a series of iterables to a csv file. Non-string -elements will be converted to string. -""" + Construct and write a series of iterables to a csv file. Non-string + elements will be converted to string. + """ # For the return types below. # These aliases can be removed when typeshed drops support for 3.9. @@ -145,19 +149,20 @@ def writer( quoting: _QuotingType = 0, strict: bool = False, ) -> _writer: - """ csv_writer = csv.writer(fileobj [, dialect='excel'] - [optional keyword args]) - for row in sequence: - csv_writer.writerow(row) + """csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + for row in sequence: + csv_writer.writerow(row) - [or] + [or] - csv_writer = csv.writer(fileobj [, dialect='excel'] - [optional keyword args]) - csv_writer.writerows(rows) + csv_writer = csv.writer(fileobj [, dialect='excel'] + [optional keyword args]) + csv_writer.writerows(rows) + + The "fileobj" argument can be any object that supports the file API. + """ -The "fileobj" argument can be any object that supports the file API. -""" def reader( iterable: Iterable[str], /, @@ -172,20 +177,21 @@ def reader( quoting: _QuotingType = 0, strict: bool = False, ) -> _reader: - """ csv_reader = reader(iterable [, dialect='excel'] - [optional keyword args]) - for row in csv_reader: - process(row) - -The "iterable" argument can be any object that returns a line -of input for each iteration, such as a file object or a list. The -optional "dialect" parameter is discussed below. The function -also accepts optional keyword arguments which override settings -provided by the dialect. - -The returned object is an iterator. Each iteration returns a row -of the CSV file (which can span multiple input lines). -""" + """csv_reader = reader(iterable [, dialect='excel'] + [optional keyword args]) + for row in csv_reader: + process(row) + + The "iterable" argument can be any object that returns a line + of input for each iteration, such as a file object or a list. The + optional "dialect" parameter is discussed below. The function + also accepts optional keyword arguments which override settings + provided by the dialect. + + The returned object is an iterator. Each iteration returns a row + of the CSV file (which can span multiple input lines). + """ + def register_dialect( name: str, /, @@ -202,27 +208,31 @@ def register_dialect( ) -> None: """Create a mapping from a string name to a dialect class. dialect = csv.register_dialect(name[, dialect[, **fmtparams]]) -""" + """ + def unregister_dialect(name: str) -> None: """Delete the name/dialect mapping associated with a string name. csv.unregister_dialect(name) -""" + """ + def get_dialect(name: str) -> Dialect: """Return the dialect instance associated with name. dialect = csv.get_dialect(name) -""" + """ + def list_dialects() -> list[str]: """Return a list of all known dialect names. names = csv.list_dialects() -""" + """ + def field_size_limit(new_limit: int = ...) -> int: """Sets an upper limit on parsed fields. - csv.field_size_limit([limit]) + csv.field_size_limit([limit]) -Returns old limit. If limit is not given, no new limit is set and -the old limit is returned -""" + Returns old limit. If limit is not given, no new limit is set and + the old limit is returned + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi index ee939a614db37..2d7c2dc307f7a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ctypes.pyi @@ -1,5 +1,5 @@ -"""Create and manipulate C compatible data types in Python. -""" +"""Create and manipulate C compatible data types in Python.""" + import _typeshed import sys from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -31,8 +31,8 @@ if sys.platform == "win32": _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] class COMError(Exception): - """Raised when a COM method call failed. -""" + """Raised when a COM method call failed.""" + hresult: int text: str | None details: _COMError_Details @@ -40,45 +40,43 @@ if sys.platform == "win32": def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: - """CopyComPointer(src, dst) -> HRESULT value -""" - + """CopyComPointer(src, dst) -> HRESULT value""" FUNCFLAG_HRESULT: Final = 0x2 FUNCFLAG_STDCALL: Final = 0x0 def FormatError(code: int = ...) -> str: """FormatError([integer]) -> string -Convert a win32 error code into a string. If the error code is not -given, the return value of a call to GetLastError() is used. -""" + Convert a win32 error code into a string. If the error code is not + given, the return value of a call to GetLastError() is used. + """ + def get_last_error() -> int: ... def set_last_error(value: int) -> int: ... def LoadLibrary(name: str, load_flags: int = 0, /) -> int: """LoadLibrary(name, load_flags) -> handle -Load an executable (usually a DLL), and return a handle to it. -The handle may be used to locate exported functions in this -module. load_flags are as defined for LoadLibraryEx in the -Windows API. -""" + Load an executable (usually a DLL), and return a handle to it. + The handle may be used to locate exported functions in this + module. load_flags are as defined for LoadLibraryEx in the + Windows API. + """ + def FreeLibrary(handle: int, /) -> None: """FreeLibrary(handle) -> void -Free the handle of an executable previously loaded by LoadLibrary. -""" + Free the handle of an executable previously loaded by LoadLibrary. + """ else: def dlclose(handle: int, /) -> None: - """dlclose a library -""" + """dlclose a library""" # The default for flag is RTLD_GLOBAL|RTLD_LOCAL, which is platform dependent. def dlopen(name: StrOrBytesPath, flag: int = ..., /) -> int: - """dlopen(name, flag={RTLD_GLOBAL|RTLD_LOCAL}) open a shared library -""" + """dlopen(name, flag={RTLD_GLOBAL|RTLD_LOCAL}) open a shared library""" + def dlsym(handle: int, name: str, /) -> int: - """find symbol in shared library -""" + """find symbol in shared library""" if sys.version_info >= (3, 13): # This class is not exposed. It calls itself _ctypes.CType_Type. @@ -125,8 +123,8 @@ class _PyCSimpleType(_CTypeBaseType): def __rmul__(self: type[_CT], value: int, /) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _SimpleCData(_CData, Generic[_T], metaclass=_PyCSimpleType): - """XXX to be provided -""" + """XXX to be provided""" + value: _T # The TypeVar can be unsolved here, # but we can't use overloads without creating many, many mypy false-positive errors @@ -154,8 +152,8 @@ class _PyCPointerType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): - """XXX to be provided -""" + """XXX to be provided""" + _type_: type[_CT] contents: _CT @overload @@ -164,34 +162,34 @@ class _Pointer(_PointerLike, _CData, Generic[_CT], metaclass=_PyCPointerType): def __init__(self, arg: _CT) -> None: ... @overload def __getitem__(self, key: int, /) -> Any: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> list[Any]: ... def __setitem__(self, key: int, value: Any, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" if sys.version_info < (3, 14): @overload def POINTER(type: None, /) -> type[c_void_p]: """Create and return a new ctypes pointer type. - type - A ctypes type. + type + A ctypes type. + + Pointer types are cached and reused internally, + so calling this function repeatedly is cheap. + """ -Pointer types are cached and reused internally, -so calling this function repeatedly is cheap. -""" @overload def POINTER(type: type[_CT], /) -> type[_Pointer[_CT]]: ... def pointer(obj: _CT, /) -> _Pointer[_CT]: """Create a new pointer instance, pointing to 'obj'. -The returned object is of the type POINTER(type(obj)). Note that if you -just want to pass a pointer to an object to a foreign function call, you -should use byref(obj) which is much faster. -""" + The returned object is of the type POINTER(type(obj)). Note that if you + just want to pass a pointer to an object to a foreign function call, you + should use byref(obj) which is much faster. + """ # This class is not exposed. It calls itself _ctypes.CArgObject. @final @@ -200,15 +198,14 @@ class _CArgObject: ... if sys.version_info >= (3, 14): def byref(obj: _CData | _CDataType, offset: int = 0, /) -> _CArgObject: - """Return a pointer lookalike to a C instance, only usable as function argument. -""" + """Return a pointer lookalike to a C instance, only usable as function argument.""" else: def byref(obj: _CData | _CDataType, offset: int = 0) -> _CArgObject: """byref(C instance[, offset=0]) -> byref-object -Return a pointer lookalike to a C instance, only usable -as function argument -""" + Return a pointer lookalike to a C instance, only usable + as function argument + """ _ECT: TypeAlias = Callable[[_CData | _CDataType | None, CFuncPtr, tuple[_CData | _CDataType, ...]], _CDataType] _PF: TypeAlias = tuple[int] | tuple[int, str | None] | tuple[int, str | None, Any] @@ -227,8 +224,8 @@ class _PyCFuncPtrType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): - """Function Pointer -""" + """Function Pointer""" + restype: type[_CDataType] | Callable[[int], Any] | None argtypes: Sequence[type[_CDataType]] errcheck: _ECT @@ -249,8 +246,7 @@ class CFuncPtr(_PointerLike, _CData, metaclass=_PyCFuncPtrType): ) -> Self: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" _GetT = TypeVar("_GetT") _SetT = TypeVar("_SetT") @@ -292,8 +288,8 @@ class _UnionType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Union(_CData, metaclass=_UnionType): - """Union base class -""" + """Union base class""" + _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -322,8 +318,8 @@ class _PyCStructType(_CTypeBaseType): def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class Structure(_CData, metaclass=_PyCStructType): - """Structure base class -""" + """Structure base class""" + _fields_: ClassVar[Sequence[tuple[str, type[_CDataType]] | tuple[str, type[_CDataType], int]]] _pack_: ClassVar[int] _anonymous_: ClassVar[Sequence[str]] @@ -350,12 +346,13 @@ class _PyCArrayType(_CTypeBaseType): class Array(_CData, Generic[_CT], metaclass=_PyCArrayType): """Abstract base class for arrays. -The recommended way to create concrete array types is by multiplying any -ctypes data type with a non-negative integer. Alternatively, you can subclass -this type and define _length_ and _type_ class variables. Array elements can -be read and written using standard subscript and slice accesses for slice -reads, the resulting object is not itself an Array. -""" + The recommended way to create concrete array types is by multiplying any + ctypes data type with a non-negative integer. Alternatively, you can subclass + this type and define _length_ and _type_ class variables. Array elements can + be read and written using standard subscript and slice accesses for slice + reads, the resulting object is not itself an Array. + """ + @property @abstractmethod def _length_(self) -> int: ... @@ -387,47 +384,47 @@ reads, the resulting object is not itself an Array. def __init__(self, *args: Any) -> None: ... @overload def __getitem__(self, key: int, /) -> Any: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> list[Any]: ... @overload def __setitem__(self, key: int, value: Any, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: Iterable[Any], /) -> None: ... def __iter__(self) -> Iterator[Any]: ... # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" def addressof(obj: _CData | _CDataType, /) -> int: - """Return the address of the C instance internal buffer -""" + """Return the address of the C instance internal buffer""" + def alignment(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: """alignment(C type) -> integer -alignment(C instance) -> integer -Return the alignment requirements of a C instance -""" + alignment(C instance) -> integer + Return the alignment requirements of a C instance + """ + def get_errno() -> int: ... def resize(obj: _CData | _CDataType, size: int, /) -> None: - """Resize the memory buffer of a ctypes instance -""" + """Resize the memory buffer of a ctypes instance""" + def set_errno(value: int, /) -> int: ... def sizeof(obj_or_type: _CData | _CDataType | type[_CData | _CDataType], /) -> int: - """Return the size in bytes of a C instance. -""" + """Return the size in bytes of a C instance.""" + def PyObj_FromPtr(address: int, /) -> Any: ... def Py_DECREF(o: _T, /) -> _T: ... def Py_INCREF(o: _T, /) -> _T: ... def buffer_info(o: _CData | _CDataType | type[_CData | _CDataType], /) -> tuple[str, int, tuple[int, ...]]: - """Return buffer interface information -""" + """Return buffer interface information""" + def call_cdeclfunction(address: int, arguments: tuple[Any, ...], /) -> Any: ... def call_function(address: int, arguments: tuple[Any, ...], /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi index 398da932f4038..b53a33e16e46f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses.pyi @@ -277,481 +277,530 @@ _C_API: Any version: Final[bytes] def baudrate() -> int: - """Return the output speed of the terminal in bits per second. -""" + """Return the output speed of the terminal in bits per second.""" + def beep() -> None: - """Emit a short attention sound. -""" + """Emit a short attention sound.""" + def can_change_color() -> bool: - """Return True if the programmer can change the colors displayed by the terminal. -""" + """Return True if the programmer can change the colors displayed by the terminal.""" + def cbreak(flag: bool = True, /) -> None: """Enter cbreak mode. - flag - If false, the effect is the same as calling nocbreak(). + flag + If false, the effect is the same as calling nocbreak(). + + In cbreak mode (sometimes called "rare" mode) normal tty line buffering is + turned off and characters are available to be read one by one. However, + unlike raw mode, special characters (interrupt, quit, suspend, and flow + control) retain their effects on the tty driver and calling program. + Calling first raw() then cbreak() leaves the terminal in cbreak mode. + """ -In cbreak mode (sometimes called "rare" mode) normal tty line buffering is -turned off and characters are available to be read one by one. However, -unlike raw mode, special characters (interrupt, quit, suspend, and flow -control) retain their effects on the tty driver and calling program. -Calling first raw() then cbreak() leaves the terminal in cbreak mode. -""" def color_content(color_number: int, /) -> tuple[int, int, int]: """Return the red, green, and blue (RGB) components of the specified color. - color_number - The number of the color (0 - (COLORS-1)). + color_number + The number of the color (0 - (COLORS-1)). + + A 3-tuple is returned, containing the R, G, B values for the given color, + which will be between 0 (no component) and 1000 (maximum amount of component). + """ -A 3-tuple is returned, containing the R, G, B values for the given color, -which will be between 0 (no component) and 1000 (maximum amount of component). -""" def color_pair(pair_number: int, /) -> int: """Return the attribute value for displaying text in the specified color. - pair_number - The number of the color pair. + pair_number + The number of the color pair. + + This attribute value can be combined with A_STANDOUT, A_REVERSE, and the + other A_* attributes. pair_number() is the counterpart to this function. + """ -This attribute value can be combined with A_STANDOUT, A_REVERSE, and the -other A_* attributes. pair_number() is the counterpart to this function. -""" def curs_set(visibility: int, /) -> int: """Set the cursor state. - visibility - 0 for invisible, 1 for normal visible, or 2 for very visible. + visibility + 0 for invisible, 1 for normal visible, or 2 for very visible. + + If the terminal supports the visibility requested, the previous cursor + state is returned; otherwise, an exception is raised. On many terminals, + the "visible" mode is an underline cursor and the "very visible" mode is + a block cursor. + """ -If the terminal supports the visibility requested, the previous cursor -state is returned; otherwise, an exception is raised. On many terminals, -the "visible" mode is an underline cursor and the "very visible" mode is -a block cursor. -""" def def_prog_mode() -> None: """Save the current terminal mode as the "program" mode. -The "program" mode is the mode when the running program is using curses. + The "program" mode is the mode when the running program is using curses. + + Subsequent calls to reset_prog_mode() will restore this mode. + """ -Subsequent calls to reset_prog_mode() will restore this mode. -""" def def_shell_mode() -> None: """Save the current terminal mode as the "shell" mode. -The "shell" mode is the mode when the running program is not using curses. + The "shell" mode is the mode when the running program is not using curses. + + Subsequent calls to reset_shell_mode() will restore this mode. + """ -Subsequent calls to reset_shell_mode() will restore this mode. -""" def delay_output(ms: int, /) -> None: """Insert a pause in output. - ms - Duration in milliseconds. -""" + ms + Duration in milliseconds. + """ + def doupdate() -> None: - """Update the physical screen to match the virtual screen. -""" + """Update the physical screen to match the virtual screen.""" + def echo(flag: bool = True, /) -> None: """Enter echo mode. - flag - If false, the effect is the same as calling noecho(). + flag + If false, the effect is the same as calling noecho(). + + In echo mode, each character input is echoed to the screen as it is entered. + """ -In echo mode, each character input is echoed to the screen as it is entered. -""" def endwin() -> None: - """De-initialize the library, and return terminal to normal status. -""" + """De-initialize the library, and return terminal to normal status.""" + def erasechar() -> bytes: - """Return the user's current erase character. -""" + """Return the user's current erase character.""" + def filter() -> None: ... def flash() -> None: """Flash the screen. -That is, change it to reverse-video and then change it back in a short interval. -""" + That is, change it to reverse-video and then change it back in a short interval. + """ + def flushinp() -> None: """Flush all input buffers. -This throws away any typeahead that has been typed by the user and has not -yet been processed by the program. -""" + This throws away any typeahead that has been typed by the user and has not + yet been processed by the program. + """ + def get_escdelay() -> int: """Gets the curses ESCDELAY setting. -Gets the number of milliseconds to wait after reading an escape character, -to distinguish between an individual escape character entered on the -keyboard from escape sequences sent by cursor and function keys. -""" + Gets the number of milliseconds to wait after reading an escape character, + to distinguish between an individual escape character entered on the + keyboard from escape sequences sent by cursor and function keys. + """ + def get_tabsize() -> int: """Gets the curses TABSIZE setting. -Gets the number of columns used by the curses library when converting a tab -character to spaces as it adds the tab to a window. -""" + Gets the number of columns used by the curses library when converting a tab + character to spaces as it adds the tab to a window. + """ + def getmouse() -> tuple[int, int, int, int, int]: """Retrieve the queued mouse event. -After getch() returns KEY_MOUSE to signal a mouse event, this function -returns a 5-tuple (id, x, y, z, bstate). -""" + After getch() returns KEY_MOUSE to signal a mouse event, this function + returns a 5-tuple (id, x, y, z, bstate). + """ + def getsyx() -> tuple[int, int]: """Return the current coordinates of the virtual screen cursor. -Return a (y, x) tuple. If leaveok is currently true, return (-1, -1). -""" + Return a (y, x) tuple. If leaveok is currently true, return (-1, -1). + """ + def getwin(file: SupportsRead[bytes], /) -> window: """Read window related data stored in the file by an earlier putwin() call. -The routine then creates and initializes a new window using that data, -returning the new window object. -""" + The routine then creates and initializes a new window using that data, + returning the new window object. + """ + def halfdelay(tenths: int, /) -> None: """Enter half-delay mode. - tenths - Maximal blocking delay in tenths of seconds (1 - 255). + tenths + Maximal blocking delay in tenths of seconds (1 - 255). + + Use nocbreak() to leave half-delay mode. + """ -Use nocbreak() to leave half-delay mode. -""" def has_colors() -> bool: - """Return True if the terminal can display colors; otherwise, return False. -""" + """Return True if the terminal can display colors; otherwise, return False.""" if sys.version_info >= (3, 10): def has_extended_color_support() -> bool: """Return True if the module supports extended colors; otherwise, return False. -Extended color support allows more than 256 color-pairs for terminals -that support more than 16 colors (e.g. xterm-256color). -""" + Extended color support allows more than 256 color-pairs for terminals + that support more than 16 colors (e.g. xterm-256color). + """ if sys.version_info >= (3, 14): def assume_default_colors(fg: int, bg: int, /) -> None: """Allow use of default values for colors on terminals supporting this feature. -Assign terminal default foreground/background colors to color number -1. -Change the definition of the color-pair 0 to (fg, bg). + Assign terminal default foreground/background colors to color number -1. + Change the definition of the color-pair 0 to (fg, bg). -Use this to support transparency in your application. -""" + Use this to support transparency in your application. + """ def has_ic() -> bool: - """Return True if the terminal has insert- and delete-character capabilities. -""" + """Return True if the terminal has insert- and delete-character capabilities.""" + def has_il() -> bool: - """Return True if the terminal has insert- and delete-line capabilities. -""" + """Return True if the terminal has insert- and delete-line capabilities.""" + def has_key(key: int, /) -> bool: """Return True if the current terminal type recognizes a key with that value. - key - Key number. -""" + key + Key number. + """ + def init_color(color_number: int, r: int, g: int, b: int, /) -> None: """Change the definition of a color. - color_number - The number of the color to be changed (0 - (COLORS-1)). - r - Red component (0 - 1000). - g - Green component (0 - 1000). - b - Blue component (0 - 1000). - -When init_color() is used, all occurrences of that color on the screen -immediately change to the new definition. This function is a no-op on -most terminals; it is active only if can_change_color() returns true. -""" + color_number + The number of the color to be changed (0 - (COLORS-1)). + r + Red component (0 - 1000). + g + Green component (0 - 1000). + b + Blue component (0 - 1000). + + When init_color() is used, all occurrences of that color on the screen + immediately change to the new definition. This function is a no-op on + most terminals; it is active only if can_change_color() returns true. + """ + def init_pair(pair_number: int, fg: int, bg: int, /) -> None: """Change the definition of a color-pair. - pair_number - The number of the color-pair to be changed (1 - (COLOR_PAIRS-1)). - fg - Foreground color number (-1 - (COLORS-1)). - bg - Background color number (-1 - (COLORS-1)). + pair_number + The number of the color-pair to be changed (1 - (COLOR_PAIRS-1)). + fg + Foreground color number (-1 - (COLORS-1)). + bg + Background color number (-1 - (COLORS-1)). + + If the color-pair was previously initialized, the screen is refreshed and + all occurrences of that color-pair are changed to the new definition. + """ -If the color-pair was previously initialized, the screen is refreshed and -all occurrences of that color-pair are changed to the new definition. -""" def initscr() -> window: """Initialize the library. -Return a WindowObject which represents the whole screen. -""" + Return a WindowObject which represents the whole screen. + """ + def intrflush(flag: bool, /) -> None: ... def is_term_resized(nlines: int, ncols: int, /) -> bool: """Return True if resize_term() would modify the window structure, False otherwise. - nlines - Height. - ncols - Width. -""" + nlines + Height. + ncols + Width. + """ + def isendwin() -> bool: - """Return True if endwin() has been called. -""" + """Return True if endwin() has been called.""" + def keyname(key: int, /) -> bytes: """Return the name of specified key. - key - Key number. -""" + key + Key number. + """ + def killchar() -> bytes: - """Return the user's current line kill character. -""" + """Return the user's current line kill character.""" + def longname() -> bytes: """Return the terminfo long name field describing the current terminal. -The maximum length of a verbose description is 128 characters. It is defined -only after the call to initscr(). -""" + The maximum length of a verbose description is 128 characters. It is defined + only after the call to initscr(). + """ + def meta(yes: bool, /) -> None: """Enable/disable meta keys. -If yes is True, allow 8-bit characters to be input. If yes is False, -allow only 7-bit characters. -""" + If yes is True, allow 8-bit characters to be input. If yes is False, + allow only 7-bit characters. + """ + def mouseinterval(interval: int, /) -> None: """Set and retrieve the maximum time between press and release in a click. - interval - Time in milliseconds. + interval + Time in milliseconds. + + Set the maximum time that can elapse between press and release events in + order for them to be recognized as a click, and return the previous interval + value. + """ -Set the maximum time that can elapse between press and release events in -order for them to be recognized as a click, and return the previous interval -value. -""" def mousemask(newmask: int, /) -> tuple[int, int]: """Set the mouse events to be reported, and return a tuple (availmask, oldmask). -Return a tuple (availmask, oldmask). availmask indicates which of the -specified mouse events can be reported; on complete failure it returns 0. -oldmask is the previous value of the given window's mouse event mask. -If this function is never called, no mouse events are ever reported. -""" + Return a tuple (availmask, oldmask). availmask indicates which of the + specified mouse events can be reported; on complete failure it returns 0. + oldmask is the previous value of the given window's mouse event mask. + If this function is never called, no mouse events are ever reported. + """ + def napms(ms: int, /) -> int: """Sleep for specified time. - ms - Duration in milliseconds. -""" + ms + Duration in milliseconds. + """ + def newpad(nlines: int, ncols: int, /) -> window: """Create and return a pointer to a new pad data structure. - nlines - Height. - ncols - Width. -""" + nlines + Height. + ncols + Width. + """ + def newwin(nlines: int, ncols: int, begin_y: int = 0, begin_x: int = 0, /) -> window: """newwin(nlines, ncols, [begin_y=0, begin_x=0]) -Return a new window. - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - -By default, the window will extend from the specified position to the lower -right corner of the screen. -""" + Return a new window. + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the window will extend from the specified position to the lower + right corner of the screen. + """ + def nl(flag: bool = True, /) -> None: """Enter newline mode. - flag - If false, the effect is the same as calling nonl(). + flag + If false, the effect is the same as calling nonl(). + + This mode translates the return key into newline on input, and translates + newline into return and line-feed on output. Newline mode is initially on. + """ -This mode translates the return key into newline on input, and translates -newline into return and line-feed on output. Newline mode is initially on. -""" def nocbreak() -> None: """Leave cbreak mode. -Return to normal "cooked" mode with line buffering. -""" + Return to normal "cooked" mode with line buffering. + """ + def noecho() -> None: """Leave echo mode. -Echoing of input characters is turned off. -""" + Echoing of input characters is turned off. + """ + def nonl() -> None: """Leave newline mode. -Disable translation of return into newline on input, and disable low-level -translation of newline into newline/return on output. -""" + Disable translation of return into newline on input, and disable low-level + translation of newline into newline/return on output. + """ + def noqiflush() -> None: """Disable queue flushing. -When queue flushing is disabled, normal flush of input and output queues -associated with the INTR, QUIT and SUSP characters will not be done. -""" + When queue flushing is disabled, normal flush of input and output queues + associated with the INTR, QUIT and SUSP characters will not be done. + """ + def noraw() -> None: """Leave raw mode. -Return to normal "cooked" mode with line buffering. -""" + Return to normal "cooked" mode with line buffering. + """ + def pair_content(pair_number: int, /) -> tuple[int, int]: """Return a tuple (fg, bg) containing the colors for the requested color pair. - pair_number - The number of the color pair (0 - (COLOR_PAIRS-1)). -""" + pair_number + The number of the color pair (0 - (COLOR_PAIRS-1)). + """ + def pair_number(attr: int, /) -> int: """Return the number of the color-pair set by the specified attribute value. -color_pair() is the counterpart to this function. -""" + color_pair() is the counterpart to this function. + """ + def putp(string: ReadOnlyBuffer, /) -> None: """Emit the value of a specified terminfo capability for the current terminal. -Note that the output of putp() always goes to standard output. -""" + Note that the output of putp() always goes to standard output. + """ + def qiflush(flag: bool = True, /) -> None: """Enable queue flushing. - flag - If false, the effect is the same as calling noqiflush(). + flag + If false, the effect is the same as calling noqiflush(). + + If queue flushing is enabled, all output in the display driver queue + will be flushed when the INTR, QUIT and SUSP characters are read. + """ -If queue flushing is enabled, all output in the display driver queue -will be flushed when the INTR, QUIT and SUSP characters are read. -""" def raw(flag: bool = True, /) -> None: """Enter raw mode. - flag - If false, the effect is the same as calling noraw(). + flag + If false, the effect is the same as calling noraw(). + + In raw mode, normal line buffering and processing of interrupt, quit, + suspend, and flow control keys are turned off; characters are presented to + curses input functions one by one. + """ -In raw mode, normal line buffering and processing of interrupt, quit, -suspend, and flow control keys are turned off; characters are presented to -curses input functions one by one. -""" def reset_prog_mode() -> None: - """Restore the terminal to "program" mode, as previously saved by def_prog_mode(). -""" + """Restore the terminal to "program" mode, as previously saved by def_prog_mode().""" + def reset_shell_mode() -> None: - """Restore the terminal to "shell" mode, as previously saved by def_shell_mode(). -""" + """Restore the terminal to "shell" mode, as previously saved by def_shell_mode().""" + def resetty() -> None: - """Restore terminal mode. -""" + """Restore terminal mode.""" + def resize_term(nlines: int, ncols: int, /) -> None: """Backend function used by resizeterm(), performing most of the work. - nlines - Height. - ncols - Width. - -When resizing the windows, resize_term() blank-fills the areas that are -extended. The calling application should fill in these areas with appropriate -data. The resize_term() function attempts to resize all windows. However, -due to the calling convention of pads, it is not possible to resize these -without additional interaction with the application. -""" + nlines + Height. + ncols + Width. + + When resizing the windows, resize_term() blank-fills the areas that are + extended. The calling application should fill in these areas with appropriate + data. The resize_term() function attempts to resize all windows. However, + due to the calling convention of pads, it is not possible to resize these + without additional interaction with the application. + """ + def resizeterm(nlines: int, ncols: int, /) -> None: """Resize the standard and current windows to the specified dimensions. - nlines - Height. - ncols - Width. + nlines + Height. + ncols + Width. + + Adjusts other bookkeeping data used by the curses library that record the + window dimensions (in particular the SIGWINCH handler). + """ -Adjusts other bookkeeping data used by the curses library that record the -window dimensions (in particular the SIGWINCH handler). -""" def savetty() -> None: - """Save terminal mode. -""" + """Save terminal mode.""" + def set_escdelay(ms: int, /) -> None: """Sets the curses ESCDELAY setting. - ms - length of the delay in milliseconds. + ms + length of the delay in milliseconds. + + Sets the number of milliseconds to wait after reading an escape character, + to distinguish between an individual escape character entered on the + keyboard from escape sequences sent by cursor and function keys. + """ -Sets the number of milliseconds to wait after reading an escape character, -to distinguish between an individual escape character entered on the -keyboard from escape sequences sent by cursor and function keys. -""" def set_tabsize(size: int, /) -> None: """Sets the curses TABSIZE setting. - size - rendered cell width of a tab character. + size + rendered cell width of a tab character. + + Sets the number of columns used by the curses library when converting a tab + character to spaces as it adds the tab to a window. + """ -Sets the number of columns used by the curses library when converting a tab -character to spaces as it adds the tab to a window. -""" def setsyx(y: int, x: int, /) -> None: """Set the virtual screen cursor. - y - Y-coordinate. - x - X-coordinate. + y + Y-coordinate. + x + X-coordinate. + + If y and x are both -1, then leaveok is set. + """ -If y and x are both -1, then leaveok is set. -""" def setupterm(term: str | None = None, fd: int = -1) -> None: """Initialize the terminal. - term - Terminal name. - If omitted, the value of the TERM environment variable will be used. - fd - File descriptor to which any initialization sequences will be sent. - If not supplied, the file descriptor for sys.stdout will be used. -""" + term + Terminal name. + If omitted, the value of the TERM environment variable will be used. + fd + File descriptor to which any initialization sequences will be sent. + If not supplied, the file descriptor for sys.stdout will be used. + """ + def start_color() -> None: """Initializes eight basic colors and global variables COLORS and COLOR_PAIRS. -Must be called if the programmer wants to use colors, and before any other -color manipulation routine is called. It is good practice to call this -routine right after initscr(). + Must be called if the programmer wants to use colors, and before any other + color manipulation routine is called. It is good practice to call this + routine right after initscr(). + + It also restores the colors on the terminal to the values they had when the + terminal was just turned on. + """ -It also restores the colors on the terminal to the values they had when the -terminal was just turned on. -""" def termattrs() -> int: - """Return a logical OR of all video attributes supported by the terminal. -""" + """Return a logical OR of all video attributes supported by the terminal.""" + def termname() -> bytes: - """Return the value of the environment variable TERM, truncated to 14 characters. -""" + """Return the value of the environment variable TERM, truncated to 14 characters.""" + def tigetflag(capname: str, /) -> int: """Return the value of the Boolean capability. - capname - The terminfo capability name. + capname + The terminfo capability name. + + The value -1 is returned if capname is not a Boolean capability, or 0 if + it is canceled or absent from the terminal description. + """ -The value -1 is returned if capname is not a Boolean capability, or 0 if -it is canceled or absent from the terminal description. -""" def tigetnum(capname: str, /) -> int: """Return the value of the numeric capability. - capname - The terminfo capability name. + capname + The terminfo capability name. + + The value -2 is returned if capname is not a numeric capability, or -1 if + it is canceled or absent from the terminal description. + """ -The value -2 is returned if capname is not a numeric capability, or -1 if -it is canceled or absent from the terminal description. -""" def tigetstr(capname: str, /) -> bytes | None: """Return the value of the string capability. - capname - The terminfo capability name. + capname + The terminfo capability name. + + None is returned if capname is not a string capability, or is canceled or + absent from the terminal description. + """ -None is returned if capname is not a string capability, or is canceled or -absent from the terminal description. -""" def tparm( str: ReadOnlyBuffer, i1: int = 0, @@ -767,50 +816,54 @@ def tparm( ) -> bytes: """Instantiate the specified byte string with the supplied parameters. - str - Parameterized byte string obtained from the terminfo database. -""" + str + Parameterized byte string obtained from the terminfo database. + """ + def typeahead(fd: int, /) -> None: """Specify that the file descriptor fd be used for typeahead checking. - fd - File descriptor. + fd + File descriptor. + + If fd is -1, then no typeahead checking is done. + """ -If fd is -1, then no typeahead checking is done. -""" def unctrl(ch: _ChType, /) -> bytes: """Return a string which is a printable representation of the character ch. -Control characters are displayed as a caret followed by the character, -for example as ^C. Printing characters are left as they are. -""" + Control characters are displayed as a caret followed by the character, + for example as ^C. Printing characters are left as they are. + """ + def unget_wch(ch: int | str, /) -> None: - """Push ch so the next get_wch() will return it. -""" + """Push ch so the next get_wch() will return it.""" + def ungetch(ch: _ChType, /) -> None: - """Push ch so the next getch() will return it. -""" + """Push ch so the next getch() will return it.""" + def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: """Push a KEY_MOUSE event onto the input queue. -The following getmouse() will return the given state data. -""" + The following getmouse() will return the given state data. + """ + def update_lines_cols() -> None: ... def use_default_colors() -> None: - """Equivalent to assume_default_colors(-1, -1). -""" + """Equivalent to assume_default_colors(-1, -1).""" + def use_env(flag: bool, /) -> None: """Use environment variables LINES and COLUMNS. -If used, this function should be called before initscr() or newterm() are -called. + If used, this function should be called before initscr() or newterm() are + called. -When flag is False, the values of lines and columns specified in the terminfo -database will be used, even if environment variables LINES and COLUMNS (used -by default) are set, or if curses is running in a window (in which case -default behavior would be to use the window size if LINES and COLUMNS are -not set). -""" + When flag is False, the values of lines and columns specified in the terminfo + database will be used, even if environment variables LINES and COLUMNS (used + by default) are set, or if curses is running in a window (in which case + default behavior would be to use the window size if LINES and COLUMNS are + not set). + """ class error(Exception): ... @@ -820,93 +873,98 @@ class window: # undocumented @overload def addch(self, ch: _ChType, attr: int = ...) -> None: """addch([y, x,] ch, [attr=_curses.A_NORMAL]) -Paint the character. - - y - Y-coordinate. - x - X-coordinate. - ch - Character to add. - attr - Attributes for the character. - -Paint character ch at (y, x) with attributes attr, -overwriting any character previously painted at that location. -By default, the character position and attributes are the -current settings for the window object. -""" + Paint the character. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to add. + attr + Attributes for the character. + + Paint character ch at (y, x) with attributes attr, + overwriting any character previously painted at that location. + By default, the character position and attributes are the + current settings for the window object. + """ + @overload def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... @overload def addnstr(self, str: str, n: int, attr: int = ...) -> None: """addnstr([y, x,] str, n, [attr]) -Paint at most n characters of the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to add. - n - Maximal number of characters. - attr - Attributes for characters. - -Paint at most n characters of the string str at (y, x) with -attributes attr, overwriting anything previously on the display. -By default, the character position and attributes are the -current settings for the window object. -""" + Paint at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + n + Maximal number of characters. + attr + Attributes for characters. + + Paint at most n characters of the string str at (y, x) with + attributes attr, overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + @overload def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload def addstr(self, str: str, attr: int = ...) -> None: """addstr([y, x,] str, [attr]) -Paint the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to add. - attr - Attributes for characters. - -Paint the string str at (y, x) with attributes attr, -overwriting anything previously on the display. -By default, the character position and attributes are the -current settings for the window object. -""" + Paint the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to add. + attr + Attributes for characters. + + Paint the string str at (y, x) with attributes attr, + overwriting anything previously on the display. + By default, the character position and attributes are the + current settings for the window object. + """ + @overload def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... def attroff(self, attr: int, /) -> None: - """Remove attribute attr from the "background" set. -""" + """Remove attribute attr from the "background" set.""" + def attron(self, attr: int, /) -> None: - """Add attribute attr from the "background" set. -""" + """Add attribute attr from the "background" set.""" + def attrset(self, attr: int, /) -> None: - """Set the "background" set of attributes. -""" + """Set the "background" set of attributes.""" + def bkgd(self, ch: _ChType, attr: int = 0, /) -> None: """Set the background property of the window. - ch - Background character. - attr - Background attributes. -""" + ch + Background character. + attr + Background attributes. + """ + def bkgdset(self, ch: _ChType, attr: int = 0, /) -> None: """Set the window's background. - ch - Background character. - attr - Background attributes. -""" + ch + Background character. + attr + Background attributes. + """ + def border( self, ls: _ChType = ..., @@ -920,64 +978,67 @@ current settings for the window object. ) -> None: """Draw a border around the edges of the window. - ls - Left side. - rs - Right side. - ts - Top side. - bs - Bottom side. - tl - Upper-left corner. - tr - Upper-right corner. - bl - Bottom-left corner. - br - Bottom-right corner. - -Each parameter specifies the character to use for a specific part of the -border. The characters can be specified as integers or as one-character -strings. A 0 value for any parameter will cause the default character to be -used for that parameter. -""" + ls + Left side. + rs + Right side. + ts + Top side. + bs + Bottom side. + tl + Upper-left corner. + tr + Upper-right corner. + bl + Bottom-left corner. + br + Bottom-right corner. + + Each parameter specifies the character to use for a specific part of the + border. The characters can be specified as integers or as one-character + strings. A 0 value for any parameter will cause the default character to be + used for that parameter. + """ + @overload def box(self) -> None: """box([verch=0, horch=0]) -Draw a border around the edges of the window. + Draw a border around the edges of the window. - verch - Left and right side. - horch - Top and bottom side. + verch + Left and right side. + horch + Top and bottom side. + + Similar to border(), but both ls and rs are verch and both ts and bs are + horch. The default corner characters are always used by this function. + """ -Similar to border(), but both ls and rs are verch and both ts and bs are -horch. The default corner characters are always used by this function. -""" @overload def box(self, vertch: _ChType = 0, horch: _ChType = 0) -> None: ... @overload def chgat(self, attr: int) -> None: """chgat([y, x,] [n=-1,] attr) -Set the attributes of characters. - - y - Y-coordinate. - x - X-coordinate. - n - Number of characters. - attr - Attributes for characters. - -Set the attributes of num characters at the current cursor position, or at -position (y, x) if supplied. If no value of num is given or num = -1, the -attribute will be set on all the characters to the end of the line. This -function does not move the cursor. The changed line will be touched using -the touchline() method so that the contents will be redisplayed by the next -window refresh. -""" + Set the attributes of characters. + + y + Y-coordinate. + x + X-coordinate. + n + Number of characters. + attr + Attributes for characters. + + Set the attributes of num characters at the current cursor position, or at + position (y, x) if supplied. If no value of num is given or num = -1, the + attribute will be set on all the characters to the end of the line. This + function does not move the cursor. The changed line will be touched using + the touchline() method so that the contents will be redisplayed by the next + window refresh. + """ + @overload def chgat(self, num: int, attr: int) -> None: ... @overload @@ -992,102 +1053,109 @@ window refresh. @overload def delch(self) -> None: """delch([y, x]) -Delete any character at (y, x). + Delete any character at (y, x). + + y + Y-coordinate. + x + X-coordinate. + """ - y - Y-coordinate. - x - X-coordinate. -""" @overload def delch(self, y: int, x: int) -> None: ... def deleteln(self) -> None: ... @overload def derwin(self, begin_y: int, begin_x: int) -> window: """derwin([nlines=0, ncols=0,] begin_y, begin_x) -Create a sub-window (window-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - -derwin() is the same as calling subwin(), except that begin_y and begin_x -are relative to the origin of the window, rather than relative to the entire -screen. -""" + Create a sub-window (window-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + derwin() is the same as calling subwin(), except that begin_y and begin_x + are relative to the origin of the window, rather than relative to the entire + screen. + """ + @overload def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... def echochar(self, ch: _ChType, attr: int = 0, /) -> None: """Add character ch with attribute attr, and refresh. - ch - Character to add. - attr - Attributes for the character. -""" + ch + Character to add. + attr + Attributes for the character. + """ + def enclose(self, y: int, x: int, /) -> bool: """Return True if the screen-relative coordinates are enclosed by the window. - y - Y-coordinate. - x - X-coordinate. -""" + y + Y-coordinate. + x + X-coordinate. + """ + def erase(self) -> None: ... def getbegyx(self) -> tuple[int, int]: ... def getbkgd(self) -> tuple[int, int]: - """Return the window's current background character/attribute pair. -""" + """Return the window's current background character/attribute pair.""" + @overload def getch(self) -> int: """getch([y, x]) -Get a character code from terminal keyboard. + Get a character code from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. - y - Y-coordinate. - x - X-coordinate. + The integer returned does not have to be in ASCII range: function keys, + keypad keys and so on return numbers higher than 256. In no-delay mode, -1 + is returned if there is no input, else getch() waits until a key is pressed. + """ -The integer returned does not have to be in ASCII range: function keys, -keypad keys and so on return numbers higher than 256. In no-delay mode, -1 -is returned if there is no input, else getch() waits until a key is pressed. -""" @overload def getch(self, y: int, x: int) -> int: ... @overload def get_wch(self) -> int | str: """get_wch([y, x]) -Get a wide character from terminal keyboard. + Get a wide character from terminal keyboard. - y - Y-coordinate. - x - X-coordinate. + y + Y-coordinate. + x + X-coordinate. + + Return a character for most keys, or an integer for function keys, + keypad keys, and other special keys. + """ -Return a character for most keys, or an integer for function keys, -keypad keys, and other special keys. -""" @overload def get_wch(self, y: int, x: int) -> int | str: ... @overload def getkey(self) -> str: """getkey([y, x]) -Get a character (string) from terminal keyboard. + Get a character (string) from terminal keyboard. + + y + Y-coordinate. + x + X-coordinate. - y - Y-coordinate. - x - X-coordinate. + Returning a string instead of an integer, as getch() does. Function keys, + keypad keys and other special keys return a multibyte string containing the + key name. In no-delay mode, an exception is raised if there is no input. + """ -Returning a string instead of an integer, as getch() does. Function keys, -keypad keys and other special keys return a multibyte string containing the -key name. In no-delay mode, an exception is raised if there is no input. -""" @overload def getkey(self, y: int, x: int) -> str: ... def getmaxyx(self) -> tuple[int, int]: ... @@ -1095,15 +1163,16 @@ key name. In no-delay mode, an exception is raised if there is no input. @overload def getstr(self) -> bytes: """getstr([[y, x,] n=2047]) -Read a string from the user, with primitive line editing capacity. - - y - Y-coordinate. - x - X-coordinate. - n - Maximal number of characters. -""" + Read a string from the user, with primitive line editing capacity. + + y + Y-coordinate. + x + X-coordinate. + n + Maximal number of characters. + """ + @overload def getstr(self, n: int) -> bytes: ... @overload @@ -1114,19 +1183,20 @@ Read a string from the user, with primitive line editing capacity. @overload def hline(self, ch: _ChType, n: int) -> None: """hline([y, x,] ch, n, [attr=_curses.A_NORMAL]) -Display a horizontal line. - - y - Starting Y-coordinate. - x - Starting X-coordinate. - ch - Character to draw. - n - Line length. - attr - Attributes for the characters. -""" + Display a horizontal line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the characters. + """ + @overload def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... def idcok(self, flag: bool) -> None: ... @@ -1135,34 +1205,36 @@ Display a horizontal line. @overload def inch(self) -> int: """inch([y, x]) -Return the character at the given position in the window. + Return the character at the given position in the window. + + y + Y-coordinate. + x + X-coordinate. - y - Y-coordinate. - x - X-coordinate. + The bottom 8 bits are the character proper, and upper bits are the attributes. + """ -The bottom 8 bits are the character proper, and upper bits are the attributes. -""" @overload def inch(self, y: int, x: int) -> int: ... @overload def insch(self, ch: _ChType, attr: int = ...) -> None: """insch([y, x,] ch, [attr=_curses.A_NORMAL]) -Insert a character before the current or specified position. - - y - Y-coordinate. - x - X-coordinate. - ch - Character to insert. - attr - Attributes for the character. - -All characters to the right of the cursor are shifted one position right, with -the rightmost characters on the line being lost. -""" + Insert a character before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + ch + Character to insert. + attr + Attributes for the character. + + All characters to the right of the cursor are shifted one position right, with + the rightmost characters on the line being lost. + """ + @overload def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... def insdelln(self, nlines: int) -> None: ... @@ -1170,77 +1242,81 @@ the rightmost characters on the line being lost. @overload def insnstr(self, str: str, n: int, attr: int = ...) -> None: """insnstr([y, x,] str, n, [attr]) -Insert at most n characters of the string. - - y - Y-coordinate. - x - X-coordinate. - str - String to insert. - n - Maximal number of characters. - attr - Attributes for characters. - -Insert a character string (as many characters as will fit on the line) -before the character under the cursor, up to n characters. If n is zero -or negative, the entire string is inserted. All characters to the right -of the cursor are shifted right, with the rightmost characters on the line -being lost. The cursor position does not change (after moving to y, x, if -specified). -""" + Insert at most n characters of the string. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + n + Maximal number of characters. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor, up to n characters. If n is zero + or negative, the entire string is inserted. All characters to the right + of the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, if + specified). + """ + @overload def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... @overload def insstr(self, str: str, attr: int = ...) -> None: """insstr([y, x,] str, [attr]) -Insert the string before the current or specified position. - - y - Y-coordinate. - x - X-coordinate. - str - String to insert. - attr - Attributes for characters. - -Insert a character string (as many characters as will fit on the line) -before the character under the cursor. All characters to the right of -the cursor are shifted right, with the rightmost characters on the line -being lost. The cursor position does not change (after moving to y, x, -if specified). -""" + Insert the string before the current or specified position. + + y + Y-coordinate. + x + X-coordinate. + str + String to insert. + attr + Attributes for characters. + + Insert a character string (as many characters as will fit on the line) + before the character under the cursor. All characters to the right of + the cursor are shifted right, with the rightmost characters on the line + being lost. The cursor position does not change (after moving to y, x, + if specified). + """ + @overload def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... @overload def instr(self, n: int = 2047) -> bytes: """instr([y, x,] n=2047) -Return a string of characters, extracted from the window. - - y - Y-coordinate. - x - X-coordinate. - n - Maximal number of characters. - -Return a string of characters, extracted from the window starting at the -current cursor position, or at y, x if specified. Attributes are stripped -from the characters. If n is specified, instr() returns a string at most -n characters long (exclusive of the trailing NUL). -""" + Return a string of characters, extracted from the window. + + y + Y-coordinate. + x + X-coordinate. + n + Maximal number of characters. + + Return a string of characters, extracted from the window starting at the + current cursor position, or at y, x if specified. Attributes are stripped + from the characters. If n is specified, instr() returns a string at most + n characters long (exclusive of the trailing NUL). + """ + @overload def instr(self, y: int, x: int, n: int = 2047) -> bytes: ... def is_linetouched(self, line: int, /) -> bool: """Return True if the specified line was modified, otherwise return False. - line - Line number. + line + Line number. + + Raise a curses.error exception if line is not valid for the given window. + """ -Raise a curses.error exception if line is not valid for the given window. -""" def is_wintouched(self) -> bool: ... def keypad(self, yes: bool, /) -> None: ... def leaveok(self, yes: bool) -> None: ... @@ -1252,28 +1328,30 @@ Raise a curses.error exception if line is not valid for the given window. @overload def noutrefresh(self) -> None: """noutrefresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) -Mark for refresh but wait. + Mark for refresh but wait. + + This function updates the data structure representing the desired state of the + window, but does not force an update of the physical screen. To accomplish + that, call doupdate(). + """ -This function updates the data structure representing the desired state of the -window, but does not force an update of the physical screen. To accomplish -that, call doupdate(). -""" @overload def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... @overload def overlay(self, destwin: window) -> None: """overlay(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol]) -Overlay the window on top of destwin. + Overlay the window on top of destwin. -The windows need not be the same size, only the overlapping region is copied. -This copy is non-destructive, which means that the current background -character does not overwrite the old contents of destwin. + The windows need not be the same size, only the overlapping region is copied. + This copy is non-destructive, which means that the current background + character does not overwrite the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overlay() can be used. sminrow and smincol are the upper-left coordinates + of the source window, and the other variables mark a rectangle in the + destination window. + """ -To get fine-grained control over the copied region, the second form of -overlay() can be used. sminrow and smincol are the upper-left coordinates -of the source window, and the other variables mark a rectangle in the -destination window. -""" @overload def overlay( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int @@ -1281,18 +1359,19 @@ destination window. @overload def overwrite(self, destwin: window) -> None: """overwrite(destwin, [sminrow, smincol, dminrow, dmincol, dmaxrow, - dmaxcol]) -Overwrite the window on top of destwin. - -The windows need not be the same size, in which case only the overlapping -region is copied. This copy is destructive, which means that the current -background character overwrites the old contents of destwin. - -To get fine-grained control over the copied region, the second form of -overwrite() can be used. sminrow and smincol are the upper-left coordinates -of the source window, the other variables mark a rectangle in the destination -window. -""" + dmaxcol]) + Overwrite the window on top of destwin. + + The windows need not be the same size, in which case only the overlapping + region is copied. This copy is destructive, which means that the current + background character overwrites the old contents of destwin. + + To get fine-grained control over the copied region, the second form of + overwrite() can be used. sminrow and smincol are the upper-left coordinates + of the source window, the other variables mark a rectangle in the destination + window. + """ + @overload def overwrite( self, destwin: window, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int @@ -1300,97 +1379,104 @@ window. def putwin(self, file: SupportsWrite[bytes], /) -> None: """Write all data associated with the window into the provided file object. -This information can be later retrieved using the getwin() function. -""" + This information can be later retrieved using the getwin() function. + """ + def redrawln(self, beg: int, num: int, /) -> None: """Mark the specified lines corrupted. - beg - Starting line number. - num - The number of lines. + beg + Starting line number. + num + The number of lines. + + They should be completely redrawn on the next refresh() call. + """ -They should be completely redrawn on the next refresh() call. -""" def redrawwin(self) -> None: ... @overload def refresh(self) -> None: """refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) -Update the display immediately. - -Synchronize actual screen with previous drawing/deleting methods. -The 6 optional arguments can only be specified when the window is a pad -created with newpad(). The additional parameters are needed to indicate -what part of the pad and screen are involved. pminrow and pmincol specify -the upper left-hand corner of the rectangle to be displayed in the pad. -sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to -be displayed on the screen. The lower right-hand corner of the rectangle to -be displayed in the pad is calculated from the screen coordinates, since the -rectangles must be the same size. Both rectangles must be entirely contained -within their respective structures. Negative values of pminrow, pmincol, -sminrow, or smincol are treated as if they were zero. -""" + Update the display immediately. + + Synchronize actual screen with previous drawing/deleting methods. + The 6 optional arguments can only be specified when the window is a pad + created with newpad(). The additional parameters are needed to indicate + what part of the pad and screen are involved. pminrow and pmincol specify + the upper left-hand corner of the rectangle to be displayed in the pad. + sminrow, smincol, smaxrow, and smaxcol specify the edges of the rectangle to + be displayed on the screen. The lower right-hand corner of the rectangle to + be displayed in the pad is calculated from the screen coordinates, since the + rectangles must be the same size. Both rectangles must be entirely contained + within their respective structures. Negative values of pminrow, pmincol, + sminrow, or smincol are treated as if they were zero. + """ + @overload def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... def resize(self, nlines: int, ncols: int) -> None: ... def scroll(self, lines: int = 1) -> None: """scroll([lines=1]) -Scroll the screen or scrolling region. + Scroll the screen or scrolling region. + + lines + Number of lines to scroll. - lines - Number of lines to scroll. + Scroll upward if the argument is positive and downward if it is negative. + """ -Scroll upward if the argument is positive and downward if it is negative. -""" def scrollok(self, flag: bool) -> None: ... def setscrreg(self, top: int, bottom: int, /) -> None: """Define a software scrolling region. - top - First line number. - bottom - Last line number. + top + First line number. + bottom + Last line number. + + All scrolling actions will take place in this region. + """ -All scrolling actions will take place in this region. -""" def standend(self) -> None: ... def standout(self) -> None: ... @overload def subpad(self, begin_y: int, begin_x: int) -> window: """subwin([nlines=0, ncols=0,] begin_y, begin_x) -Create a sub-window (screen-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - -By default, the sub-window will extend from the specified position to the -lower right corner of the window. -""" + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + @overload def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... @overload def subwin(self, begin_y: int, begin_x: int) -> window: """subwin([nlines=0, ncols=0,] begin_y, begin_x) -Create a sub-window (screen-relative coordinates). - - nlines - Height. - ncols - Width. - begin_y - Top side y-coordinate. - begin_x - Left side x-coordinate. - -By default, the sub-window will extend from the specified position to the -lower right corner of the window. -""" + Create a sub-window (screen-relative coordinates). + + nlines + Height. + ncols + Width. + begin_y + Top side y-coordinate. + begin_x + Left side x-coordinate. + + By default, the sub-window will extend from the specified position to the + lower right corner of the window. + """ + @overload def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> window: ... def syncdown(self) -> None: ... @@ -1399,29 +1485,31 @@ lower right corner of the window. def timeout(self, delay: int) -> None: ... def touchline(self, start: int, count: int, changed: bool = True) -> None: """touchline(start, count, [changed=True]) -Pretend count lines have been changed, starting with line start. + Pretend count lines have been changed, starting with line start. + + If changed is supplied, it specifies whether the affected lines are marked + as having been changed (changed=True) or unchanged (changed=False). + """ -If changed is supplied, it specifies whether the affected lines are marked -as having been changed (changed=True) or unchanged (changed=False). -""" def touchwin(self) -> None: ... def untouchwin(self) -> None: ... @overload def vline(self, ch: _ChType, n: int) -> None: """vline([y, x,] ch, n, [attr=_curses.A_NORMAL]) -Display a vertical line. - - y - Starting Y-coordinate. - x - Starting X-coordinate. - ch - Character to draw. - n - Line length. - attr - Attributes for the character. -""" + Display a vertical line. + + y + Starting Y-coordinate. + x + Starting X-coordinate. + ch + Character to draw. + n + Line length. + attr + Attributes for the character. + """ + @overload def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi index fb695b9fff475..39c877ed5816b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_curses_panel.pyi @@ -9,55 +9,55 @@ class error(Exception): ... @final class panel: def above(self) -> panel: - """Return the panel above the current panel. -""" + """Return the panel above the current panel.""" + def below(self) -> panel: - """Return the panel below the current panel. -""" + """Return the panel below the current panel.""" + def bottom(self) -> None: - """Push the panel to the bottom of the stack. -""" + """Push the panel to the bottom of the stack.""" + def hidden(self) -> bool: - """Return True if the panel is hidden (not visible), False otherwise. -""" + """Return True if the panel is hidden (not visible), False otherwise.""" + def hide(self) -> None: """Hide the panel. -This does not delete the object, it just makes the window on screen invisible. -""" + This does not delete the object, it just makes the window on screen invisible. + """ + def move(self, y: int, x: int, /) -> None: - """Move the panel to the screen coordinates (y, x). -""" + """Move the panel to the screen coordinates (y, x).""" + def replace(self, win: window, /) -> None: - """Change the window associated with the panel to the window win. -""" + """Change the window associated with the panel to the window win.""" + def set_userptr(self, obj: object, /) -> None: - """Set the panel's user pointer to obj. -""" + """Set the panel's user pointer to obj.""" + def show(self) -> None: - """Display the panel (which might have been hidden). -""" + """Display the panel (which might have been hidden).""" + def top(self) -> None: - """Push panel to the top of the stack. -""" + """Push panel to the top of the stack.""" + def userptr(self) -> object: - """Return the user pointer for the panel. -""" + """Return the user pointer for the panel.""" + def window(self) -> window: - """Return the window object associated with the panel. -""" + """Return the window object associated with the panel.""" def bottom_panel() -> panel: - """Return the bottom panel in the panel stack. -""" + """Return the bottom panel in the panel stack.""" + def new_panel(win: window, /) -> panel: - """Return a panel object, associating it with the given window win. -""" + """Return a panel object, associating it with the given window win.""" + def top_panel() -> panel: - """Return the top panel in the panel stack. -""" + """Return the top panel in the panel stack.""" + def update_panels() -> panel: """Updates the virtual screen after changes in the panel stack. -This does not call curses.doupdate(), so you'll have to do this yourself. -""" + This does not call curses.doupdate(), so you'll have to do this yourself. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi index 828fdaaa96bb8..7d6157c503011 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_dbm.pyi @@ -42,23 +42,23 @@ if sys.platform != "win32": def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: """Return a database object. - filename - The filename to open. - flags - How to open the file. "r" for reading, "w" for writing, etc. - mode - If creating a new file, the mode bits for the new file - (e.g. os.O_RDWR). -""" + filename + The filename to open. + flags + How to open the file. "r" for reading, "w" for writing, etc. + mode + If creating a new file, the mode bits for the new file + (e.g. os.O_RDWR). + """ else: def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: """Return a database object. - filename - The filename to open. - flags - How to open the file. "r" for reading, "w" for writing, etc. - mode - If creating a new file, the mode bits for the new file - (e.g. os.O_RDWR). -""" + filename + The filename to open. + flags + How to open the file. "r" for reading, "w" for writing, etc. + mode + If creating a new file, the mode bits for the new file + (e.g. os.O_RDWR). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi index e51c433f15889..bda849f6988e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_decimal.pyi @@ -1,5 +1,5 @@ -"""C decimal arithmetic module -""" +"""C decimal arithmetic module""" + import sys from decimal import ( Clamped as Clamped, @@ -47,13 +47,10 @@ if sys.version_info >= (3, 14): IEEE_CONTEXT_MAX_BITS: Final[int] def setcontext(context: Context, /) -> None: - """Set a new default context. + """Set a new default context.""" -""" def getcontext() -> Context: - """Get the current default context. - -""" + """Get the current default context.""" if sys.version_info >= (3, 11): def localcontext( @@ -69,28 +66,28 @@ if sys.version_info >= (3, 11): flags: dict[_TrapType, bool] | None = None, ) -> _ContextManager: """Return a context manager that will set the default context to a copy of ctx -on entry to the with-statement and restore the previous default context when -exiting the with-statement. If no context is specified, a copy of the current -default context is used. + on entry to the with-statement and restore the previous default context when + exiting the with-statement. If no context is specified, a copy of the current + default context is used. -""" + """ else: def localcontext(ctx: Context | None = None) -> _ContextManager: """Return a context manager that will set the default context to a copy of ctx -on entry to the with-statement and restore the previous default context when -exiting the with-statement. If no context is specified, a copy of the current -default context is used. + on entry to the with-statement and restore the previous default context when + exiting the with-statement. If no context is specified, a copy of the current + default context is used. -""" + """ if sys.version_info >= (3, 14): def IEEEContext(bits: int, /) -> Context: """Return a context object initialized to the proper values for one of the -IEEE interchange formats. The argument must be a multiple of 32 and less -than IEEE_CONTEXT_MAX_BITS. + IEEE interchange formats. The argument must be a multiple of 32 and less + than IEEE_CONTEXT_MAX_BITS. -""" + """ DefaultContext: Context BasicContext: Context diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi index 2c8f4b7d146f9..7ca4f5b4774c6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib.pyi @@ -6,6 +6,7 @@ such it requires the injection of specific modules and attributes in order to work. One should use importlib as the public-facing version of this module. """ + import importlib.abc import importlib.machinery import sys @@ -26,22 +27,23 @@ def __import__( ) -> ModuleType: """Import a module. -The 'globals' argument is used to infer where the import is occurring from -to handle relative imports. The 'locals' argument is ignored. The -'fromlist' argument specifies what should exist as attributes on the module -being imported (e.g. ``from module import ``). The 'level' -argument represents the package location to import from in a relative -import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). + The 'globals' argument is used to infer where the import is occurring from + to handle relative imports. The 'locals' argument is ignored. The + 'fromlist' argument specifies what should exist as attributes on the module + being imported (e.g. ``from module import ``). The 'level' + argument represents the package location to import from in a relative + import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). + + """ -""" def spec_from_loader( name: str, loader: LoaderProtocol | None, *, origin: str | None = None, is_package: bool | None = None ) -> importlib.machinery.ModuleSpec | None: - """Return a module spec based on various loader methods. -""" + """Return a module spec based on various loader methods.""" + def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: - """Create a module based on the provided spec. -""" + """Create a module based on the provided spec.""" + def _init_module_attrs( spec: importlib.machinery.ModuleSpec, module: types.ModuleType, *, override: bool = False ) -> types.ModuleType: ... @@ -49,39 +51,40 @@ def _init_module_attrs( class ModuleSpec: """The specification for a module, used for loading. -A module's spec is the source for information about the module. For -data associated with the module, including source, use the spec's -loader. + A module's spec is the source for information about the module. For + data associated with the module, including source, use the spec's + loader. -`name` is the absolute name of the module. `loader` is the loader -to use when loading the module. `parent` is the name of the -package the module is in. The parent is derived from the name. + `name` is the absolute name of the module. `loader` is the loader + to use when loading the module. `parent` is the name of the + package the module is in. The parent is derived from the name. -`is_package` determines if the module is considered a package or -not. On modules this is reflected by the `__path__` attribute. + `is_package` determines if the module is considered a package or + not. On modules this is reflected by the `__path__` attribute. -`origin` is the specific location used by the loader from which to -load the module, if that information is available. When filename is -set, origin will match. + `origin` is the specific location used by the loader from which to + load the module, if that information is available. When filename is + set, origin will match. -`has_location` indicates that a spec's "origin" reflects a location. -When this is True, `__file__` attribute of the module is set. + `has_location` indicates that a spec's "origin" reflects a location. + When this is True, `__file__` attribute of the module is set. -`cached` is the location of the cached bytecode file, if any. It -corresponds to the `__cached__` attribute. + `cached` is the location of the cached bytecode file, if any. It + corresponds to the `__cached__` attribute. -`submodule_search_locations` is the sequence of path entries to -search when importing submodules. If set, is_package should be -True--and False otherwise. + `submodule_search_locations` is the sequence of path entries to + search when importing submodules. If set, is_package should be + True--and False otherwise. -Packages are simply modules that (may) have submodules. If a spec -has a non-None value in `submodule_search_locations`, the import -system will consider modules loaded from the spec as packages. + Packages are simply modules that (may) have submodules. If a spec + has a non-None value in `submodule_search_locations`, the import + system will consider modules loaded from the spec as packages. -Only finders (see importlib.abc.MetaPathFinder and -importlib.abc.PathEntryFinder) should modify ModuleSpec instances. + Only finders (see importlib.abc.MetaPathFinder and + importlib.abc.PathEntryFinder) should modify ModuleSpec instances. + + """ -""" def __init__( self, name: str, @@ -99,8 +102,7 @@ importlib.abc.PathEntryFinder) should modify ModuleSpec instances. cached: str | None @property def parent(self) -> str | None: - """The name of the module's parent. -""" + """The name of the module's parent.""" has_location: bool def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -108,10 +110,11 @@ importlib.abc.PathEntryFinder) should modify ModuleSpec instances. class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): """Meta path import for built-in modules. -All methods are either class or static methods to avoid the need to -instantiate the class. + All methods are either class or static methods to avoid the need to + instantiate the class. + + """ -""" # MetaPathFinder if sys.version_info < (3, 12): @classmethod @@ -119,11 +122,11 @@ instantiate the class. def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: """Find the built-in module. - If 'path' is ever specified then the search is considered a failure. + If 'path' is ever specified then the search is considered a failure. - This method is deprecated. Use find_spec() instead. + This method is deprecated. Use find_spec() instead. - """ + """ @classmethod def find_spec( @@ -132,23 +135,23 @@ instantiate the class. # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: - """Return False as built-in modules are never packages. -""" + """Return False as built-in modules are never packages.""" + @classmethod def load_module(cls, fullname: str) -> types.ModuleType: """Load the specified module into sys.modules and return it. -This method is deprecated. Use loader.exec_module() instead. + This method is deprecated. Use loader.exec_module() instead. + + """ -""" @classmethod def get_code(cls, fullname: str) -> None: - """Return None as built-in modules do not have code objects. -""" + """Return None as built-in modules do not have code objects.""" + @classmethod def get_source(cls, fullname: str) -> None: - """Return None as built-in modules do not have source code. -""" + """Return None as built-in modules do not have source code.""" # Loader if sys.version_info < (3, 12): @staticmethod @@ -159,35 +162,34 @@ This method is deprecated. Use loader.exec_module() instead. def module_repr(module: types.ModuleType) -> str: """Return repr for the module. - The method is deprecated. The import machinery does the job itself. + The method is deprecated. The import machinery does the job itself. - """ + """ if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: - """Create a built-in module -""" + """Create a built-in module""" + @staticmethod def exec_module(module: types.ModuleType) -> None: - """Exec a built-in module -""" + """Exec a built-in module""" else: @classmethod def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: - """Create a built-in module -""" + """Create a built-in module""" + @classmethod def exec_module(cls, module: types.ModuleType) -> None: - """Exec a built-in module -""" + """Exec a built-in module""" class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): """Meta path import for frozen modules. -All methods are either class or static methods to avoid the need to -instantiate the class. + All methods are either class or static methods to avoid the need to + instantiate the class. + + """ -""" # MetaPathFinder if sys.version_info < (3, 12): @classmethod @@ -195,9 +197,9 @@ instantiate the class. def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: """Find a frozen module. - This method is deprecated. Use find_spec() instead. + This method is deprecated. Use find_spec() instead. - """ + """ @classmethod def find_spec( @@ -206,23 +208,23 @@ instantiate the class. # InspectLoader @classmethod def is_package(cls, fullname: str) -> bool: - """Return True if the frozen module is a package. -""" + """Return True if the frozen module is a package.""" + @classmethod def load_module(cls, fullname: str) -> types.ModuleType: """Load a frozen module. -This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. + + """ -""" @classmethod def get_code(cls, fullname: str) -> None: - """Return the code object for the frozen module. -""" + """Return the code object for the frozen module.""" + @classmethod def get_source(cls, fullname: str) -> None: - """Return None as frozen modules do not have source code. -""" + """Return None as frozen modules do not have source code.""" # Loader if sys.version_info < (3, 12): @staticmethod @@ -233,19 +235,17 @@ This method is deprecated. Use exec_module() instead. def module_repr(m: types.ModuleType) -> str: """Return repr for the module. - The method is deprecated. The import machinery does the job itself. + The method is deprecated. The import machinery does the job itself. - """ + """ if sys.version_info >= (3, 10): @staticmethod def create_module(spec: ModuleSpec) -> types.ModuleType | None: - """Set __file__, if able. -""" + """Set __file__, if able.""" else: @classmethod def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: - """Use default semantics for module creation. -""" + """Use default semantics for module creation.""" @staticmethod def exec_module(module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi index b157f74dcf1a6..455af653893ba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_frozen_importlib_external.pyi @@ -6,6 +6,7 @@ such it requires the injection of specific modules and attributes in order to work. One should use importlib as the public-facing version of this module. """ + import _ast import _io import importlib.abc @@ -37,35 +38,38 @@ MAGIC_NUMBER: Final[bytes] def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: """Given the path to a .py file, return the path to its .pyc file. -The .py file does not need to exist; this simply returns the path to the -.pyc file calculated as if the .py file were imported. + The .py file does not need to exist; this simply returns the path to the + .pyc file calculated as if the .py file were imported. -The 'optimization' parameter controls the presumed optimization level of -the bytecode file. If 'optimization' is not None, the string representation -of the argument is taken and verified to be alphanumeric (else ValueError -is raised). + The 'optimization' parameter controls the presumed optimization level of + the bytecode file. If 'optimization' is not None, the string representation + of the argument is taken and verified to be alphanumeric (else ValueError + is raised). -The debug_override parameter is deprecated. If debug_override is not None, -a True value is the same as setting 'optimization' to the empty string -while a False value is equivalent to setting 'optimization' to '1'. + The debug_override parameter is deprecated. If debug_override is not None, + a True value is the same as setting 'optimization' to the empty string + while a False value is equivalent to setting 'optimization' to '1'. -If sys.implementation.cache_tag is None then NotImplementedError is raised. + If sys.implementation.cache_tag is None then NotImplementedError is raised. + + """ -""" def source_from_cache(path: StrPath) -> str: """Given the path to a .pyc. file, return the path to its .py file. -The .pyc file does not need to exist; this simply returns the path to -the .py file calculated to correspond to the .pyc file. If path does -not conform to PEP 3147/488 format, ValueError will be raised. If -sys.implementation.cache_tag is None then NotImplementedError is raised. + The .pyc file does not need to exist; this simply returns the path to + the .py file calculated to correspond to the .pyc file. If path does + not conform to PEP 3147/488 format, ValueError will be raised. If + sys.implementation.cache_tag is None then NotImplementedError is raised. + + """ -""" def decode_source(source_bytes: ReadableBuffer) -> str: """Decode bytes representing source code and return the string. -Universal newline support is used in the decoding. -""" + Universal newline support is used in the decoding. + """ + def spec_from_file_location( name: str, location: StrOrBytesPath | None = None, @@ -75,30 +79,31 @@ def spec_from_file_location( ) -> importlib.machinery.ModuleSpec | None: """Return a module spec based on a file location. -To indicate that the module is a package, set -submodule_search_locations to a list of directory paths. An -empty list is sufficient, though its not otherwise useful to the -import system. + To indicate that the module is a package, set + submodule_search_locations to a list of directory paths. An + empty list is sufficient, though its not otherwise useful to the + import system. -The loader must take a spec as its only __init__() arg. + The loader must take a spec as its only __init__() arg. + + """ -""" @deprecated( "Deprecated since Python 3.6. Use site configuration instead. " "Future versions of Python may not enable this finder by default." ) class WindowsRegistryFinder(importlib.abc.MetaPathFinder): - """Meta path finder for modules declared in the Windows registry. -""" + """Meta path finder for modules declared in the Windows registry.""" + if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: """Find module named in the registry. - This method is deprecated. Use find_spec() instead. + This method is deprecated. Use find_spec() instead. - """ + """ @classmethod def find_spec( @@ -106,42 +111,42 @@ class WindowsRegistryFinder(importlib.abc.MetaPathFinder): ) -> ModuleSpec | None: ... class PathFinder(importlib.abc.MetaPathFinder): - """Meta path finder for sys.path and package __path__ attributes. -""" + """Meta path finder for sys.path and package __path__ attributes.""" + if sys.version_info >= (3, 10): @staticmethod def invalidate_caches() -> None: """Call the invalidate_caches() method on all path entry finders -stored in sys.path_importer_cache (where implemented). -""" + stored in sys.path_importer_cache (where implemented). + """ else: @classmethod def invalidate_caches(cls) -> None: """Call the invalidate_caches() method on all path entry finders - stored in sys.path_importer_caches (where implemented). -""" + stored in sys.path_importer_caches (where implemented). + """ if sys.version_info >= (3, 10): @staticmethod def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: """ -Find distributions. + Find distributions. -Return an iterable of all Distribution instances capable of -loading the metadata for packages matching ``context.name`` -(or all names if ``None`` indicated) along the paths in the list -of directories ``context.path``. -""" + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ else: @classmethod def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: """ - Find distributions. + Find distributions. - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ @classmethod def find_spec( @@ -149,18 +154,18 @@ of directories ``context.path``. ) -> ModuleSpec | None: """Try to find a spec for 'fullname' on sys.path or 'path'. -The search is based on sys.path_hooks and sys.path_importer_cache. -""" + The search is based on sys.path_hooks and sys.path_importer_cache. + """ if sys.version_info < (3, 12): @classmethod @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: """find the module on sys.path or 'path' based on sys.path_hooks and - sys.path_importer_cache. + sys.path_importer_cache. - This method is deprecated. Use find_spec() instead. + This method is deprecated. Use find_spec() instead. - """ + """ SOURCE_SUFFIXES: Final[list[str]] DEBUG_BYTECODE_SUFFIXES: Final = [".pyc"] @@ -171,111 +176,120 @@ EXTENSION_SUFFIXES: Final[list[str]] class FileFinder(importlib.abc.PathEntryFinder): """File-based finder. -Interactions with the file system are cached for performance, being -refreshed when the directory the finder is handling has been modified. + Interactions with the file system are cached for performance, being + refreshed when the directory the finder is handling has been modified. + + """ -""" path: str def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: """Initialize with the path to search on and a variable number of -2-tuples containing the loader and the file suffixes the loader -recognizes. -""" + 2-tuples containing the loader and the file suffixes the loader + recognizes. + """ + @classmethod def path_hook( cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] ) -> Callable[[str], importlib.abc.PathEntryFinder]: """A class method which returns a closure to use on sys.path_hook -which will return an instance using the specified loaders and the path -called on the closure. + which will return an instance using the specified loaders and the path + called on the closure. -If the path called on the closure is not a directory, ImportError is -raised. + If the path called on the closure is not a directory, ImportError is + raised. -""" + """ class _LoaderBasics: """Base class of common code needed by both SourceLoader and -SourcelessFileLoader. -""" + SourcelessFileLoader. + """ + def is_package(self, fullname: str) -> bool: """Concrete implementation of InspectLoader.is_package by checking if -the path returned by get_filename has a filename of '__init__.py'. -""" + the path returned by get_filename has a filename of '__init__.py'. + """ + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: - """Use default semantics for module creation. -""" + """Use default semantics for module creation.""" + def exec_module(self, module: types.ModuleType) -> None: - """Execute the module. -""" + """Execute the module.""" + def load_module(self, fullname: str) -> types.ModuleType: - """This method is deprecated. -""" + """This method is deprecated.""" class SourceLoader(_LoaderBasics): def path_mtime(self, path: str) -> float: """Optional method that returns the modification time (an int) for the -specified path (a str). + specified path (a str). + + Raises OSError when the path cannot be handled. + """ -Raises OSError when the path cannot be handled. -""" def set_data(self, path: str, data: bytes) -> None: """Optional method which writes data (bytes) to a file path (a str). -Implementing this method allows for the writing of bytecode files. -""" + Implementing this method allows for the writing of bytecode files. + """ + def get_source(self, fullname: str) -> str | None: - """Concrete implementation of InspectLoader.get_source. -""" + """Concrete implementation of InspectLoader.get_source.""" + def path_stats(self, path: str) -> Mapping[str, Any]: """Optional method returning a metadata dict for the specified -path (a str). + path (a str). -Possible keys: -- 'mtime' (mandatory) is the numeric timestamp of last source - code modification; -- 'size' (optional) is the size in bytes of the source code. + Possible keys: + - 'mtime' (mandatory) is the numeric timestamp of last source + code modification; + - 'size' (optional) is the size in bytes of the source code. + + Implementing this method allows the loader to read bytecode files. + Raises OSError when the path cannot be handled. + """ -Implementing this method allows the loader to read bytecode files. -Raises OSError when the path cannot be handled. -""" def source_to_code( self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath ) -> types.CodeType: """Return the code object compiled from source. -The 'data' argument can be any object type that compile() supports. -""" + The 'data' argument can be any object type that compile() supports. + """ + def get_code(self, fullname: str) -> types.CodeType | None: """Concrete implementation of InspectLoader.get_code. -Reading of bytecode requires path_stats to be implemented. To write -bytecode, set_data must also be implemented. + Reading of bytecode requires path_stats to be implemented. To write + bytecode, set_data must also be implemented. -""" + """ class FileLoader: """Base file loader class which implements the loader protocol methods that -require file system usage. -""" + require file system usage. + """ + name: str path: str def __init__(self, fullname: str, path: str) -> None: """Cache the module name and the path to the file found by the -finder. -""" + finder. + """ + def get_data(self, path: str) -> bytes: - """Return the data from path as raw bytes. -""" + """Return the data from path as raw bytes.""" + def get_filename(self, fullname: str | None = None) -> str: - """Return the path to the source file as found by the finder. -""" + """Return the path to the source file as found by the finder.""" + def load_module(self, fullname: str | None = None) -> types.ModuleType: """Load a module from a file. -This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. -""" + """ if sys.version_info >= (3, 10): def get_resource_reader(self, name: str | None = None) -> importlib.readers.FileReader: ... else: @@ -286,14 +300,14 @@ This method is deprecated. Use exec_module() instead. def contents(self) -> Iterator[str]: ... class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.SourceLoader, SourceLoader): # type: ignore[misc] # incompatible method arguments in base classes - """Concrete implementation of SourceLoader using the file system. -""" + """Concrete implementation of SourceLoader using the file system.""" + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: - """Write bytes data to a file. -""" + """Write bytes data to a file.""" + def path_stats(self, path: str) -> Mapping[str, Any]: - """Return the metadata for the path. -""" + """Return the metadata for the path.""" + def source_to_code( # type: ignore[override] # incompatible with InspectLoader.source_to_code self, data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, @@ -303,39 +317,39 @@ class SourceFileLoader(importlib.abc.FileLoader, FileLoader, importlib.abc.Sourc ) -> types.CodeType: """Return the code object compiled from source. -The 'data' argument can be any object type that compile() supports. -""" + The 'data' argument can be any object type that compile() supports. + """ class SourcelessFileLoader(importlib.abc.FileLoader, FileLoader, _LoaderBasics): - """Loader which handles sourceless file imports. -""" + """Loader which handles sourceless file imports.""" + def get_code(self, fullname: str) -> types.CodeType | None: ... def get_source(self, fullname: str) -> None: - """Return None as there is no source code. -""" + """Return None as there is no source code.""" class ExtensionFileLoader(FileLoader, _LoaderBasics, importlib.abc.ExecutionLoader): """Loader for extension modules. -The constructor is designed to work with FileFinder. + The constructor is designed to work with FileFinder. + + """ -""" def __init__(self, name: str, path: str) -> None: ... def get_filename(self, fullname: str | None = None) -> str: - """Return the path to the source file as found by the finder. -""" + """Return the path to the source file as found by the finder.""" + def get_source(self, fullname: str) -> None: - """Return None as extension modules have no source code. -""" + """Return None as extension modules have no source code.""" + def create_module(self, spec: ModuleSpec) -> types.ModuleType: - """Create an uninitialized extension module -""" + """Create an uninitialized extension module""" + def exec_module(self, module: types.ModuleType) -> None: - """Initialize an extension module -""" + """Initialize an extension module""" + def get_code(self, fullname: str) -> None: - """Return None as an extension module cannot create a code object. -""" + """Return None as an extension module cannot create a code object.""" + def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -348,16 +362,17 @@ if sys.version_info >= (3, 11): def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation. -""" + """Use default semantics for module creation.""" + def exec_module(self, module: types.ModuleType) -> None: ... @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") def load_module(self, fullname: str) -> types.ModuleType: """Load a namespace module. -This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. + + """ -""" def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... if sys.version_info < (3, 12): @staticmethod @@ -368,9 +383,9 @@ This method is deprecated. Use exec_module() instead. def module_repr(module: types.ModuleType) -> str: """Return repr for the module. - The method is deprecated. The import machinery does the job itself. + The method is deprecated. The import machinery does the job itself. - """ + """ _NamespaceLoader = NamespaceLoader else: @@ -382,17 +397,18 @@ else: def get_source(self, fullname: str) -> Literal[""]: ... def get_code(self, fullname: str) -> types.CodeType: ... def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation. -""" + """Use default semantics for module creation.""" + def exec_module(self, module: types.ModuleType) -> None: ... if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; will be removed in Python 3.15. Use `exec_module()` instead.") def load_module(self, fullname: str) -> types.ModuleType: """Load a namespace module. - This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. + + """ - """ @staticmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " @@ -401,17 +417,19 @@ else: def module_repr(module: types.ModuleType) -> str: """Return repr for the module. - The method is deprecated. The import machinery does the job itself. + The method is deprecated. The import machinery does the job itself. + + """ - """ def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.NamespaceReader: ... else: def load_module(self, fullname: str) -> types.ModuleType: """Load a namespace module. - This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. + + """ - """ @classmethod @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " @@ -420,12 +438,12 @@ else: def module_repr(cls, module: types.ModuleType) -> str: """Return repr for the module. - The method is deprecated. The import machinery does the job itself. + The method is deprecated. The import machinery does the job itself. - """ + """ if sys.version_info >= (3, 13): class AppleFrameworkLoader(ExtensionFileLoader, importlib.abc.ExecutionLoader): """A loader for modules that have been packaged as frameworks for -compatibility with Apple's iOS App Store policies. -""" + compatibility with Apple's iOS App Store policies. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi index a05b39aaa0b63..58bd1d41d3588 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_gdbm.pyi @@ -9,6 +9,7 @@ values are always immutable bytes-like objects or strings. Printing a GDBM object doesn't print the keys and values, and the items() and values() methods are not supported. """ + import sys from _typeshed import ReadOnlyBuffer, StrOrBytesPath from types import TracebackType @@ -57,26 +58,26 @@ if sys.platform != "win32": def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: """Open a dbm database and return a dbm object. -The filename argument is the name of the database file. + The filename argument is the name of the database file. -The optional flags argument can be 'r' (to open an existing database -for reading only -- default), 'w' (to open an existing database for -reading and writing), 'c' (which creates the database if it doesn't -exist), or 'n' (which always creates a new empty database). + The optional flags argument can be 'r' (to open an existing database + for reading only -- default), 'w' (to open an existing database for + reading and writing), 'c' (which creates the database if it doesn't + exist), or 'n' (which always creates a new empty database). -Some versions of gdbm support additional flags which must be -appended to one of the flags described above. The module constant -'open_flags' is a string of valid additional flags. The 'f' flag -opens the database in fast mode; altered data will not automatically -be written to the disk after every change. This results in faster -writes to the database, but may result in an inconsistent database -if the program crashes while the database is still open. Use the -sync() method to force any unwritten data to be written to the disk. -The 's' flag causes all database operations to be synchronized to -disk. The 'u' flag disables locking of the database file. + Some versions of gdbm support additional flags which must be + appended to one of the flags described above. The module constant + 'open_flags' is a string of valid additional flags. The 'f' flag + opens the database in fast mode; altered data will not automatically + be written to the disk after every change. This results in faster + writes to the database, but may result in an inconsistent database + if the program crashes while the database is still open. Use the + sync() method to force any unwritten data to be written to the disk. + The 's' flag causes all database operations to be synchronized to + disk. The 'u' flag disables locking of the database file. -The optional mode argument is the Unix mode of the file, used only -when the database has to be created. It defaults to octal 0o666. -""" + The optional mode argument is the Unix mode of the file, used only + when the database has to be created. It defaults to octal 0o666. + """ else: def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi index 45ef7d3e00087..1dda9af2cbf3e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_hashlib.pyi @@ -1,5 +1,5 @@ -"""OpenSSL interface for hashlib module -""" +"""OpenSSL interface for hashlib module""" + import sys from _typeshed import ReadableBuffer from collections.abc import Callable @@ -28,18 +28,19 @@ class _HashObject(Protocol): class HASH: """A hash is an object used to calculate a checksum of a string of information. -Methods: + Methods: + + update() -- updates the current digest with an additional string + digest() -- return the current digest value + hexdigest() -- return the current digest as a string of hexadecimal digits + copy() -- return a copy of the current hash object -update() -- updates the current digest with an additional string -digest() -- return the current digest value -hexdigest() -- return the current digest as a string of hexadecimal digits -copy() -- return a copy of the current hash object + Attributes: -Attributes: + name -- the hash algorithm being used by this object + digest_size -- number of bytes in this hashes output + """ -name -- the hash algorithm being used by this object -digest_size -- number of bytes in this hashes output -""" @property def digest_size(self) -> int: ... @property @@ -47,17 +48,16 @@ digest_size -- number of bytes in this hashes output @property def name(self) -> str: ... def copy(self) -> Self: - """Return a copy of the hash object. -""" + """Return a copy of the hash object.""" + def digest(self) -> bytes: - """Return the digest value as a bytes object. -""" + """Return the digest value as a bytes object.""" + def hexdigest(self) -> str: - """Return the digest value as a string of hexadecimal digits. -""" + """Return the digest value as a string of hexadecimal digits.""" + def update(self, obj: ReadableBuffer, /) -> None: - """Update this hash object's state with the provided string. -""" + """Update this hash object's state with the provided string.""" if sys.version_info >= (3, 10): class UnsupportedDigestmodError(ValueError): ... @@ -65,41 +65,42 @@ if sys.version_info >= (3, 10): class HASHXOF(HASH): """A hash is an object used to calculate a checksum of a string of information. -Methods: + Methods: + + update() -- updates the current digest with an additional string + digest(length) -- return the current digest value + hexdigest(length) -- return the current digest as a string of hexadecimal digits + copy() -- return a copy of the current hash object -update() -- updates the current digest with an additional string -digest(length) -- return the current digest value -hexdigest(length) -- return the current digest as a string of hexadecimal digits -copy() -- return a copy of the current hash object + Attributes: -Attributes: + name -- the hash algorithm being used by this object + digest_size -- number of bytes in this hashes output + """ -name -- the hash algorithm being used by this object -digest_size -- number of bytes in this hashes output -""" def digest(self, length: int) -> bytes: # type: ignore[override] - """Return the digest value as a bytes object. -""" + """Return the digest value as a bytes object.""" + def hexdigest(self, length: int) -> str: # type: ignore[override] - """Return the digest value as a string of hexadecimal digits. -""" + """Return the digest value as a string of hexadecimal digits.""" @final class HMAC: """The object used to calculate HMAC of a message. -Methods: + Methods: -update() -- updates the current digest with an additional string -digest() -- return the current digest value -hexdigest() -- return the current digest as a string of hexadecimal digits -copy() -- return a copy of the current hash object + update() -- updates the current digest with an additional string + digest() -- return the current digest value + hexdigest() -- return the current digest as a string of hexadecimal digits + copy() -- return a copy of the current hash object -Attributes: + Attributes: + + name -- the name, including the hash algorithm used by this object + digest_size -- number of bytes in digest() output + """ -name -- the name, including the hash algorithm used by this object -digest_size -- number of bytes in digest() output -""" @property def digest_size(self) -> int: ... @property @@ -107,179 +108,163 @@ digest_size -- number of bytes in digest() output @property def name(self) -> str: ... def copy(self) -> Self: - """Return a copy ("clone") of the HMAC object. -""" + """Return a copy ("clone") of the HMAC object.""" + def digest(self) -> bytes: - """Return the digest of the bytes passed to the update() method so far. -""" + """Return the digest of the bytes passed to the update() method so far.""" + def hexdigest(self) -> str: """Return hexadecimal digest of the bytes passed to the update() method so far. -This may be used to exchange the value safely in email or other non-binary -environments. -""" + This may be used to exchange the value safely in email or other non-binary + environments. + """ + def update(self, msg: ReadableBuffer) -> None: - """Update the HMAC object with msg. -""" + """Update the HMAC object with msg.""" @overload def compare_digest(a: ReadableBuffer, b: ReadableBuffer, /) -> bool: """Return 'a == b'. -This function uses an approach designed to prevent -timing analysis, making it appropriate for cryptography. + This function uses an approach designed to prevent + timing analysis, making it appropriate for cryptography. -a and b must both be of the same type: either str (ASCII only), -or any bytes-like object. + a and b must both be of the same type: either str (ASCII only), + or any bytes-like object. + + Note: If a and b are of different lengths, or if an error occurs, + a timing attack could theoretically reveal information about the + types and lengths of a and b--but not their values. + """ -Note: If a and b are of different lengths, or if an error occurs, -a timing attack could theoretically reveal information about the -types and lengths of a and b--but not their values. -""" @overload def compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ... def get_fips_mode() -> int: """Determine the OpenSSL FIPS mode of operation. -For OpenSSL 3.0.0 and newer it returns the state of the default provider -in the default OSSL context. It's not quite the same as FIPS_mode() but good -enough for unittests. + For OpenSSL 3.0.0 and newer it returns the state of the default provider + in the default OSSL context. It's not quite the same as FIPS_mode() but good + enough for unittests. + + Effectively any non-zero return value indicates FIPS mode; + values other than 1 may have additional significance. + """ -Effectively any non-zero return value indicates FIPS mode; -values other than 1 may have additional significance. -""" def hmac_new(key: bytes | bytearray, msg: ReadableBuffer = b"", digestmod: _DigestMod = None) -> HMAC: - """Return a new hmac object. -""" + """Return a new hmac object.""" if sys.version_info >= (3, 13): - def new( - name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: + def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: """Return a new hash object using the named algorithm. -An optional string argument may be provided and will be -automatically hashed. + An optional string argument may be provided and will be + automatically hashed. + + The MD5 and SHA1 algorithms are always supported. + """ + + def openssl_md5(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a md5 hash object; optionally initialized with a string""" + + def openssl_sha1(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a sha1 hash object; optionally initialized with a string""" + + def openssl_sha224(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a sha224 hash object; optionally initialized with a string""" + + def openssl_sha256(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a sha256 hash object; optionally initialized with a string""" + + def openssl_sha384(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a sha384 hash object; optionally initialized with a string""" + + def openssl_sha512(data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None) -> HASH: + """Returns a sha512 hash object; optionally initialized with a string""" -The MD5 and SHA1 algorithms are always supported. -""" - def openssl_md5( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a md5 hash object; optionally initialized with a string -""" - def openssl_sha1( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha1 hash object; optionally initialized with a string -""" - def openssl_sha224( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha224 hash object; optionally initialized with a string -""" - def openssl_sha256( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha256 hash object; optionally initialized with a string -""" - def openssl_sha384( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha384 hash object; optionally initialized with a string -""" - def openssl_sha512( - data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None - ) -> HASH: - """Returns a sha512 hash object; optionally initialized with a string -""" def openssl_sha3_224( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASH: - """Returns a sha3-224 hash object; optionally initialized with a string -""" + """Returns a sha3-224 hash object; optionally initialized with a string""" + def openssl_sha3_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASH: - """Returns a sha3-256 hash object; optionally initialized with a string -""" + """Returns a sha3-256 hash object; optionally initialized with a string""" + def openssl_sha3_384( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASH: - """Returns a sha3-384 hash object; optionally initialized with a string -""" + """Returns a sha3-384 hash object; optionally initialized with a string""" + def openssl_sha3_512( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASH: - """Returns a sha3-512 hash object; optionally initialized with a string -""" + """Returns a sha3-512 hash object; optionally initialized with a string""" + def openssl_shake_128( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASHXOF: - """Returns a shake-128 variable hash object; optionally initialized with a string -""" + """Returns a shake-128 variable hash object; optionally initialized with a string""" + def openssl_shake_256( data: ReadableBuffer = b"", *, usedforsecurity: bool = True, string: ReadableBuffer | None = None ) -> HASHXOF: - """Returns a shake-256 variable hash object; optionally initialized with a string -""" + """Returns a shake-256 variable hash object; optionally initialized with a string""" else: def new(name: str, string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: """Return a new hash object using the named algorithm. -An optional string argument may be provided and will be -automatically hashed. + An optional string argument may be provided and will be + automatically hashed. + + The MD5 and SHA1 algorithms are always supported. + """ -The MD5 and SHA1 algorithms are always supported. -""" def openssl_md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a md5 hash object; optionally initialized with a string -""" + """Returns a md5 hash object; optionally initialized with a string""" + def openssl_sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha1 hash object; optionally initialized with a string -""" + """Returns a sha1 hash object; optionally initialized with a string""" + def openssl_sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha224 hash object; optionally initialized with a string -""" + """Returns a sha224 hash object; optionally initialized with a string""" + def openssl_sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha256 hash object; optionally initialized with a string -""" + """Returns a sha256 hash object; optionally initialized with a string""" + def openssl_sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha384 hash object; optionally initialized with a string -""" + """Returns a sha384 hash object; optionally initialized with a string""" + def openssl_sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha512 hash object; optionally initialized with a string -""" + """Returns a sha512 hash object; optionally initialized with a string""" + def openssl_sha3_224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-224 hash object; optionally initialized with a string -""" + """Returns a sha3-224 hash object; optionally initialized with a string""" + def openssl_sha3_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-256 hash object; optionally initialized with a string -""" + """Returns a sha3-256 hash object; optionally initialized with a string""" + def openssl_sha3_384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-384 hash object; optionally initialized with a string -""" + """Returns a sha3-384 hash object; optionally initialized with a string""" + def openssl_sha3_512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASH: - """Returns a sha3-512 hash object; optionally initialized with a string -""" + """Returns a sha3-512 hash object; optionally initialized with a string""" + def openssl_shake_128(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: - """Returns a shake-128 variable hash object; optionally initialized with a string -""" + """Returns a shake-128 variable hash object; optionally initialized with a string""" + def openssl_shake_256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> HASHXOF: - """Returns a shake-256 variable hash object; optionally initialized with a string -""" + """Returns a shake-256 variable hash object; optionally initialized with a string""" def hmac_digest(key: bytes | bytearray, msg: ReadableBuffer, digest: str) -> bytes: - """Single-shot HMAC. -""" + """Single-shot HMAC.""" + def pbkdf2_hmac( hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None ) -> bytes: - """Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as pseudorandom function. -""" -def scrypt( - password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64 -) -> bytes: - """scrypt password-based key derivation function. -""" + """Password based key derivation function 2 (PKCS #5 v2.0) with HMAC as pseudorandom function.""" + +def scrypt(password: ReadableBuffer, *, salt: ReadableBuffer, n: int, r: int, p: int, maxmem: int = 0, dklen: int = 64) -> bytes: + """scrypt password-based key derivation function.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi index 618451d739ec2..07a2e751c5d61 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_heapq.pyi @@ -27,6 +27,7 @@ These two make it possible to view the heap as a regular Python list without surprises: heap[0] is the smallest item, and heap.sort() maintains the heap invariant! """ + import sys from _typeshed import SupportsRichComparisonT as _T # All type variable use in this module requires comparability. from typing import Final @@ -34,48 +35,49 @@ from typing import Final __about__: Final[str] def heapify(heap: list[_T], /) -> None: - """Transform list into a heap, in-place, in O(len(heap)) time. -""" + """Transform list into a heap, in-place, in O(len(heap)) time.""" + def heappop(heap: list[_T], /) -> _T: - """Pop the smallest item off the heap, maintaining the heap invariant. -""" + """Pop the smallest item off the heap, maintaining the heap invariant.""" + def heappush(heap: list[_T], item: _T, /) -> None: - """Push item onto heap, maintaining the heap invariant. -""" + """Push item onto heap, maintaining the heap invariant.""" + def heappushpop(heap: list[_T], item: _T, /) -> _T: """Push item on the heap, then pop and return the smallest item from the heap. -The combined action runs more efficiently than heappush() followed by -a separate call to heappop(). -""" + The combined action runs more efficiently than heappush() followed by + a separate call to heappop(). + """ + def heapreplace(heap: list[_T], item: _T, /) -> _T: """Pop and return the current smallest value, and add the new item. -This is more efficient than heappop() followed by heappush(), and can be -more appropriate when using a fixed-size heap. Note that the value -returned may be larger than item! That constrains reasonable uses of -this routine unless written as part of a conditional replacement: + This is more efficient than heappop() followed by heappush(), and can be + more appropriate when using a fixed-size heap. Note that the value + returned may be larger than item! That constrains reasonable uses of + this routine unless written as part of a conditional replacement: - if item > heap[0]: - item = heapreplace(heap, item) -""" + if item > heap[0]: + item = heapreplace(heap, item) + """ if sys.version_info >= (3, 14): def heapify_max(heap: list[_T], /) -> None: - """Maxheap variant of heapify. -""" + """Maxheap variant of heapify.""" + def heappop_max(heap: list[_T], /) -> _T: - """Maxheap variant of heappop. -""" + """Maxheap variant of heappop.""" + def heappush_max(heap: list[_T], item: _T, /) -> None: - """Push item onto max heap, maintaining the heap invariant. -""" + """Push item onto max heap, maintaining the heap invariant.""" + def heappushpop_max(heap: list[_T], item: _T, /) -> _T: """Maxheap variant of heappushpop. -The combined action runs more efficiently than heappush_max() followed by -a separate call to heappop_max(). -""" + The combined action runs more efficiently than heappush_max() followed by + a separate call to heappop_max(). + """ + def heapreplace_max(heap: list[_T], item: _T, /) -> _T: - """Maxheap variant of heapreplace. -""" + """Maxheap variant of heapreplace.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi index 58ee36affd6ec..1f47d2ec56aa4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_imp.pyi @@ -1,5 +1,5 @@ -"""(Extremely) low-level import machinery bits as used by importlib. -""" +"""(Extremely) low-level import machinery bits as used by importlib.""" + import sys import types from _typeshed import ReadableBuffer @@ -12,66 +12,67 @@ if sys.version_info >= (3, 14): def source_hash(key: int, source: ReadableBuffer) -> bytes: ... def create_builtin(spec: ModuleSpec, /) -> types.ModuleType: - """Create an extension module. -""" + """Create an extension module.""" + def create_dynamic(spec: ModuleSpec, file: Any = None, /) -> types.ModuleType: - """Create an extension module. -""" + """Create an extension module.""" + def acquire_lock() -> None: """Acquires the interpreter's import lock for the current thread. -This lock should be used by import hooks to ensure thread-safety when importing -modules. On platforms without threads, this function does nothing. -""" + This lock should be used by import hooks to ensure thread-safety when importing + modules. On platforms without threads, this function does nothing. + """ + def exec_builtin(mod: types.ModuleType, /) -> int: - """Initialize a built-in module. -""" + """Initialize a built-in module.""" + def exec_dynamic(mod: types.ModuleType, /) -> int: - """Initialize an extension module. -""" + """Initialize an extension module.""" + def extension_suffixes() -> list[str]: - """Returns the list of file suffixes used to identify extension modules. -""" + """Returns the list of file suffixes used to identify extension modules.""" + def init_frozen(name: str, /) -> types.ModuleType: - """Initializes a frozen module. -""" + """Initializes a frozen module.""" + def is_builtin(name: str, /) -> int: - """Returns True if the module name corresponds to a built-in module. -""" + """Returns True if the module name corresponds to a built-in module.""" + def is_frozen(name: str, /) -> bool: - """Returns True if the module name corresponds to a frozen module. -""" + """Returns True if the module name corresponds to a frozen module.""" + def is_frozen_package(name: str, /) -> bool: - """Returns True if the module name is of a frozen package. -""" + """Returns True if the module name is of a frozen package.""" + def lock_held() -> bool: """Return True if the import lock is currently held, else False. -On platforms without threads, return False. -""" + On platforms without threads, return False. + """ + def release_lock() -> None: """Release the interpreter's import lock. -On platforms without threads, this function does nothing. -""" + On platforms without threads, this function does nothing. + """ if sys.version_info >= (3, 11): def find_frozen(name: str, /, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: """Return info about the corresponding frozen module (if there is one) or None. -The returned info (a 2-tuple): + The returned info (a 2-tuple): + + * data the raw marshalled bytes + * is_package whether or not it is a package + * origname the originally frozen module's name, or None if not + a stdlib module (this will usually be the same as + the module's current name) + """ - * data the raw marshalled bytes - * is_package whether or not it is a package - * origname the originally frozen module's name, or None if not - a stdlib module (this will usually be the same as - the module's current name) -""" def get_frozen_object(name: str, data: ReadableBuffer | None = None, /) -> types.CodeType: - """Create a code object for a frozen module. -""" + """Create a code object for a frozen module.""" else: def get_frozen_object(name: str, /) -> types.CodeType: - """Create a code object for a frozen module. -""" + """Create a code object for a frozen module.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi index ad78f8f718b4f..987ca4566c1a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpchannels.pyi @@ -1,6 +1,7 @@ """This module provides primitive operations to manage Python interpreters. The 'interpreters' module provides a more convenient interface. """ + from _typeshed import structseq from typing import Any, Final, Literal, SupportsIndex, final from typing_extensions import Buffer, Self @@ -14,30 +15,30 @@ class ChannelNotFoundError(ChannelError): ... # Mark as final, since instantiating ChannelID is not supported. @final class ChannelID: - """A channel ID identifies a channel and may be used as an int. -""" + """A channel ID identifies a channel and may be used as an int.""" + @property def end(self) -> Literal["send", "recv", "both"]: - """'send', 'recv', or 'both' -""" + """'send', 'recv', or 'both'""" + @property def send(self) -> Self: - """the 'send' end of the channel -""" + """the 'send' end of the channel""" + @property def recv(self) -> Self: - """the 'recv' end of the channel -""" + """the 'recv' end of the channel""" + def __eq__(self, other: object, /) -> bool: ... def __ge__(self, other: ChannelID, /) -> bool: ... def __gt__(self, other: ChannelID, /) -> bool: ... def __hash__(self) -> int: ... def __index__(self) -> int: - """Return self converted to an integer, if self is suitable for use as an index into a list. -""" + """Return self converted to an integer, if self is suitable for use as an index into a list.""" + def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __le__(self, other: ChannelID, /) -> bool: ... def __lt__(self, other: ChannelID, /) -> bool: ... def __ne__(self, other: object, /) -> bool: ... @@ -46,8 +47,9 @@ class ChannelID: class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, int]): """ChannelInfo -A named tuple of a channel's state. -""" + A named tuple of a channel's state. + """ + __match_args__: Final = ( "open", "closing", @@ -60,159 +62,169 @@ A named tuple of a channel's state. ) @property def open(self) -> bool: - """both ends are open -""" + """both ends are open""" + @property def closing(self) -> bool: - """send is closed, recv is non-empty -""" + """send is closed, recv is non-empty""" + @property def closed(self) -> bool: - """both ends are closed -""" + """both ends are closed""" + @property def count(self) -> int: # type: ignore[override] - """queued objects -""" + """queued objects""" + @property def num_interp_send(self) -> int: - """interpreters bound to the send end -""" + """interpreters bound to the send end""" + @property def num_interp_send_released(self) -> int: - """interpreters bound to the send end and released -""" + """interpreters bound to the send end and released""" + @property def num_interp_recv(self) -> int: - """interpreters bound to the send end -""" + """interpreters bound to the send end""" + @property def num_interp_recv_released(self) -> int: - """interpreters bound to the send end and released -""" + """interpreters bound to the send end and released""" + @property def num_interp_both(self) -> int: - """interpreters bound to both ends -""" + """interpreters bound to both ends""" + @property def num_interp_both_recv_released(self) -> int: - """interpreters bound to both ends and released_from_the recv end -""" + """interpreters bound to both ends and released_from_the recv end""" + @property def num_interp_both_send_released(self) -> int: - """interpreters bound to both ends and released_from_the send end -""" + """interpreters bound to both ends and released_from_the send end""" + @property def num_interp_both_released(self) -> int: - """interpreters bound to both ends and released_from_both -""" + """interpreters bound to both ends and released_from_both""" + @property def recv_associated(self) -> bool: - """current interpreter is bound to the recv end -""" + """current interpreter is bound to the recv end""" + @property def recv_released(self) -> bool: - """current interpreter *was* bound to the recv end -""" + """current interpreter *was* bound to the recv end""" + @property def send_associated(self) -> bool: - """current interpreter is bound to the send end -""" + """current interpreter is bound to the send end""" + @property def send_released(self) -> bool: - """current interpreter *was* bound to the send end -""" + """current interpreter *was* bound to the send end""" def create(unboundop: Literal[1, 2, 3]) -> ChannelID: """channel_create(unboundop) -> cid -Create a new cross-interpreter channel and return a unique generated ID. -""" + Create a new cross-interpreter channel and return a unique generated ID. + """ + def destroy(cid: SupportsIndex) -> None: """channel_destroy(cid) -Close and finalize the channel. Afterward attempts to use the channel -will behave as though it never existed. -""" + Close and finalize the channel. Afterward attempts to use the channel + will behave as though it never existed. + """ + def list_all() -> list[ChannelID]: """channel_list_all() -> [cid] -Return the list of all IDs for active channels. -""" + Return the list of all IDs for active channels. + """ + def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: """channel_list_interpreters(cid, *, send) -> [id] -Return the list of all interpreter IDs associated with an end of the channel. + Return the list of all interpreter IDs associated with an end of the channel. + + The 'send' argument should be a boolean indicating whether to use the send or + receive end. + """ -The 'send' argument should be a boolean indicating whether to use the send or -receive end. -""" def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: """channel_send(cid, obj, *, blocking=True, timeout=None) -Add the object's data to the channel's queue. -By default this waits for the object to be received. -""" + Add the object's data to the channel's queue. + By default this waits for the object to be received. + """ + def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: """channel_send_buffer(cid, obj, *, blocking=True, timeout=None) -Add the object's buffer to the channel's queue. -By default this waits for the object to be received. -""" + Add the object's buffer to the channel's queue. + By default this waits for the object to be received. + """ + def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: """channel_recv(cid, [default]) -> (obj, unboundop) -Return a new object from the data at the front of the channel's queue. + Return a new object from the data at the front of the channel's queue. + + If there is nothing to receive then raise ChannelEmptyError, unless + a default value is provided. In that case return it. + """ -If there is nothing to receive then raise ChannelEmptyError, unless -a default value is provided. In that case return it. -""" def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: """channel_close(cid, *, send=None, recv=None, force=False) -Close the channel for all interpreters. + Close the channel for all interpreters. -If the channel is empty then the keyword args are ignored and both -ends are immediately closed. Otherwise, if 'force' is True then -all queued items are released and both ends are immediately -closed. + If the channel is empty then the keyword args are ignored and both + ends are immediately closed. Otherwise, if 'force' is True then + all queued items are released and both ends are immediately + closed. -If the channel is not empty *and* 'force' is False then following -happens: + If the channel is not empty *and* 'force' is False then following + happens: - * recv is True (regardless of send): - - raise ChannelNotEmptyError - * recv is None and send is None: - - raise ChannelNotEmptyError - * send is True and recv is not True: - - fully close the 'send' end - - close the 'recv' end to interpreters not already receiving - - fully close it once empty + * recv is True (regardless of send): + - raise ChannelNotEmptyError + * recv is None and send is None: + - raise ChannelNotEmptyError + * send is True and recv is not True: + - fully close the 'send' end + - close the 'recv' end to interpreters not already receiving + - fully close it once empty -Closing an already closed channel results in a ChannelClosedError. + Closing an already closed channel results in a ChannelClosedError. + + Once the channel's ID has no more ref counts in any interpreter + the channel will be destroyed. + """ -Once the channel's ID has no more ref counts in any interpreter -the channel will be destroyed. -""" def get_count(cid: SupportsIndex) -> int: """get_count(cid) -Return the number of items in the channel. -""" + Return the number of items in the channel. + """ + def get_info(cid: SupportsIndex) -> ChannelInfo: """get_info(cid) -Return details about the channel. -""" + Return details about the channel. + """ + def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: """get_channel_defaults(cid) -Return the channel's default values, set when it was created. -""" + Return the channel's default values, set when it was created. + """ + def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: """channel_release(cid, *, send=None, recv=None, force=True) -Close the channel for the current interpreter. 'send' and 'recv' -(bool) may be used to indicate the ends to close. By default both -ends are closed. Closing an already closed end is a noop. -""" + Close the channel for the current interpreter. 'send' and 'recv' + (bool) may be used to indicate the ends to close. By default both + ends are closed. Closing an already closed end is a noop. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi index 42271acfb4aa8..3402f6bbb1247 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpqueues.pyi @@ -1,79 +1,90 @@ """This module provides primitive operations to manage Python interpreters. The 'interpreters' module provides a more convenient interface. """ + from typing import Any, Literal, SupportsIndex from typing_extensions import TypeAlias _UnboundOp: TypeAlias = Literal[1, 2, 3] class QueueError(RuntimeError): - """Indicates that a queue-related error happened. -""" + """Indicates that a queue-related error happened.""" + class QueueNotFoundError(QueueError): ... def bind(qid: SupportsIndex) -> None: """bind(qid) -Take a reference to the identified queue. -The queue is not destroyed until there are no references left. -""" + Take a reference to the identified queue. + The queue is not destroyed until there are no references left. + """ + def create(maxsize: SupportsIndex, fmt: SupportsIndex, unboundop: _UnboundOp) -> int: """create(maxsize, unboundop, fallback) -> qid -Create a new cross-interpreter queue and return its unique generated ID. -It is a new reference as though bind() had been called on the queue. + Create a new cross-interpreter queue and return its unique generated ID. + It is a new reference as though bind() had been called on the queue. + + The caller is responsible for calling destroy() for the new queue + before the runtime is finalized. + """ -The caller is responsible for calling destroy() for the new queue -before the runtime is finalized. -""" def destroy(qid: SupportsIndex) -> None: """destroy(qid) -Clear and destroy the queue. Afterward attempts to use the queue -will behave as though it never existed. -""" + Clear and destroy the queue. Afterward attempts to use the queue + will behave as though it never existed. + """ + def get(qid: SupportsIndex) -> tuple[Any, int, _UnboundOp | None]: """get(qid) -> (obj, unboundop) -Return a new object from the data at the front of the queue. -The unbound op is also returned. + Return a new object from the data at the front of the queue. + The unbound op is also returned. + + If there is nothing to receive then raise QueueEmpty. + """ -If there is nothing to receive then raise QueueEmpty. -""" def get_count(qid: SupportsIndex) -> int: """get_count(qid) -Return the number of items in the queue. -""" + Return the number of items in the queue. + """ + def get_maxsize(qid: SupportsIndex) -> int: """get_maxsize(qid) -Return the maximum number of items in the queue. -""" + Return the maximum number of items in the queue. + """ + def get_queue_defaults(qid: SupportsIndex) -> tuple[int, _UnboundOp]: """get_queue_defaults(qid) -Return the queue's default values, set when it was created. -""" + Return the queue's default values, set when it was created. + """ + def is_full(qid: SupportsIndex) -> bool: """is_full(qid) -Return true if the queue has a maxsize and has reached it. -""" + Return true if the queue has a maxsize and has reached it. + """ + def list_all() -> list[tuple[int, int, _UnboundOp]]: """list_all() -> [(qid, unboundop, fallback)] -Return the list of IDs for all queues. -Each corresponding default unbound op and fallback is also included. -""" + Return the list of IDs for all queues. + Each corresponding default unbound op and fallback is also included. + """ + def put(qid: SupportsIndex, obj: Any, fmt: SupportsIndex, unboundop: _UnboundOp) -> None: """put(qid, obj) -Add the object's data to the queue. -""" + Add the object's data to the queue. + """ + def release(qid: SupportsIndex) -> None: """release(qid) -Release a reference to the queue. -The queue is destroyed once there are no references left. -""" + Release a reference to the queue. + The queue is destroyed once there are no references left. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi index 88e6315a8f742..958ae497f1ac8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_interpreters.pyi @@ -1,6 +1,7 @@ """This module provides primitive operations to manage Python interpreters. The 'interpreters' module provides a more convenient interface. """ + import types from collections.abc import Callable from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload @@ -12,101 +13,110 @@ _Configs: TypeAlias = Literal["default", "isolated", "legacy", "empty", ""] _SharedDict: TypeAlias = dict[str, Any] # many objects can be shared class InterpreterError(Exception): - """A cross-interpreter operation failed -""" + """A cross-interpreter operation failed""" + class InterpreterNotFoundError(InterpreterError): - """An interpreter was not found -""" + """An interpreter was not found""" + class NotShareableError(ValueError): ... @disjoint_base class CrossInterpreterBufferView: def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" def new_config(name: _Configs = "isolated", /, **overides: object) -> types.SimpleNamespace: """new_config(name='isolated', /, **overrides) -> type.SimpleNamespace -Return a representation of a new PyInterpreterConfig. + Return a representation of a new PyInterpreterConfig. -The name determines the initial values of the config. Supported named -configs are: default, isolated, legacy, and empty. + The name determines the initial values of the config. Supported named + configs are: default, isolated, legacy, and empty. + + Any keyword arguments are set on the corresponding config fields, + overriding the initial values. + """ -Any keyword arguments are set on the corresponding config fields, -overriding the initial values. -""" def create(config: types.SimpleNamespace | _Configs | None = "isolated", *, reqrefs: bool = False) -> int: """create([config], *, reqrefs=False) -> ID -Create a new interpreter and return a unique generated ID. + Create a new interpreter and return a unique generated ID. -The caller is responsible for destroying the interpreter before exiting, -typically by using _interpreters.destroy(). This can be managed -automatically by passing "reqrefs=True" and then using _incref() and -_decref() appropriately. + The caller is responsible for destroying the interpreter before exiting, + typically by using _interpreters.destroy(). This can be managed + automatically by passing "reqrefs=True" and then using _incref() and + _decref() appropriately. + + "config" must be a valid interpreter config or the name of a + predefined config ("isolated" or "legacy"). The default + is "isolated". + """ -"config" must be a valid interpreter config or the name of a -predefined config ("isolated" or "legacy"). The default -is "isolated". -""" def destroy(id: SupportsIndex, *, restrict: bool = False) -> None: """destroy(id, *, restrict=False) -Destroy the identified interpreter. + Destroy the identified interpreter. + + Attempting to destroy the current interpreter raises InterpreterError. + So does an unrecognized ID. + """ -Attempting to destroy the current interpreter raises InterpreterError. -So does an unrecognized ID. -""" def list_all(*, require_ready: bool = False) -> list[tuple[int, _Whence]]: """list_all() -> [(ID, whence)] -Return a list containing the ID of every existing interpreter. -""" + Return a list containing the ID of every existing interpreter. + """ + def get_current() -> tuple[int, _Whence]: """get_current() -> (ID, whence) -Return the ID of current interpreter. -""" + Return the ID of current interpreter. + """ + def get_main() -> tuple[int, _Whence]: """get_main() -> (ID, whence) -Return the ID of main interpreter. -""" + Return the ID of main interpreter. + """ + def is_running(id: SupportsIndex, *, restrict: bool = False) -> bool: """is_running(id, *, restrict=False) -> bool -Return whether or not the identified interpreter is running. -""" + Return whether or not the identified interpreter is running. + """ + def get_config(id: SupportsIndex, *, restrict: bool = False) -> types.SimpleNamespace: """get_config(id, *, restrict=False) -> types.SimpleNamespace -Return a representation of the config used to initialize the interpreter. -""" + Return a representation of the config used to initialize the interpreter. + """ + def whence(id: SupportsIndex) -> _Whence: """whence(id) -> int -Return an identifier for where the interpreter was created. -""" + Return an identifier for where the interpreter was created. + """ + def exec( id: SupportsIndex, code: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False ) -> None | types.SimpleNamespace: """exec(id, code, shared=None, *, restrict=False) -Execute the provided code in the identified interpreter. -This is equivalent to running the builtin exec() under the target -interpreter, using the __dict__ of its __main__ module as both -globals and locals. + Execute the provided code in the identified interpreter. + This is equivalent to running the builtin exec() under the target + interpreter, using the __dict__ of its __main__ module as both + globals and locals. -"code" may be a string containing the text of a Python script. + "code" may be a string containing the text of a Python script. -Functions (and code objects) are also supported, with some restrictions. -The code/function must not take any arguments or be a closure -(i.e. have cell vars). Methods and other callables are not supported. + Functions (and code objects) are also supported, with some restrictions. + The code/function must not take any arguments or be a closure + (i.e. have cell vars). Methods and other callables are not supported. + + If a function is provided, its code object is used and all its state + is ignored, including its __globals__ dict. + """ -If a function is provided, its code object is used and all its state -is ignored, including its __globals__ dict. -""" def call( id: SupportsIndex, callable: Callable[..., _R], @@ -118,51 +128,57 @@ def call( ) -> tuple[_R, types.SimpleNamespace]: """call(id, callable, args=None, kwargs=None, *, restrict=False) -Call the provided object in the identified interpreter. -Pass the given args and kwargs, if possible. -""" + Call the provided object in the identified interpreter. + Pass the given args and kwargs, if possible. + """ + def run_string( id: SupportsIndex, script: str | types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False ) -> None: """run_string(id, script, shared=None, *, restrict=False) -Execute the provided string in the identified interpreter. + Execute the provided string in the identified interpreter. + + (See _interpreters.exec(). + """ -(See _interpreters.exec(). -""" def run_func( id: SupportsIndex, func: types.CodeType | Callable[[], object], shared: _SharedDict = {}, *, restrict: bool = False ) -> None: """run_func(id, func, shared=None, *, restrict=False) -Execute the body of the provided function in the identified interpreter. -Code objects are also supported. In both cases, closures and args -are not supported. Methods and other callables are not supported either. + Execute the body of the provided function in the identified interpreter. + Code objects are also supported. In both cases, closures and args + are not supported. Methods and other callables are not supported either. + + (See _interpreters.exec(). + """ -(See _interpreters.exec(). -""" def set___main___attrs(id: SupportsIndex, updates: _SharedDict, *, restrict: bool = False) -> None: """set___main___attrs(id, ns, *, restrict=False) -Bind the given attributes in the interpreter's __main__ module. -""" + Bind the given attributes in the interpreter's __main__ module. + """ + def incref(id: SupportsIndex, *, implieslink: bool = False, restrict: bool = False) -> None: ... def decref(id: SupportsIndex, *, restrict: bool = False) -> None: ... def is_shareable(obj: object) -> bool: """is_shareable(obj) -> bool -Return True if the object's data may be shared between interpreters and -False otherwise. -""" + Return True if the object's data may be shared between interpreters and + False otherwise. + """ + @overload def capture_exception(exc: BaseException) -> types.SimpleNamespace: """capture_exception(exc=None) -> types.SimpleNamespace -Return a snapshot of an exception. If "exc" is None -then the current exception, if any, is used (but not cleared). + Return a snapshot of an exception. If "exc" is None + then the current exception, if any, is used (but not cleared). + + The returned snapshot is the same as what _interpreters.exec() returns. + """ -The returned snapshot is the same as what _interpreters.exec() returns. -""" @overload def capture_exception(exc: None = None) -> types.SimpleNamespace | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi index 2bca099f80b49..e63f1d485689e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_io.pyi @@ -31,6 +31,7 @@ DEFAULT_BUFFER_SIZE An int containing the default buffer size used by the module's buffered I/O classes. """ + import builtins import codecs import sys @@ -54,9 +55,9 @@ open = builtins.open def open_code(path: str) -> IO[bytes]: """Opens the provided file with the intent to import the contents. -This may perform extra validation beyond open(), but is otherwise interchangeable -with calling open(path, 'rb'). -""" + This may perform extra validation beyond open(), but is otherwise interchangeable + with calling open(path, 'rb'). + """ BlockingIOError = builtins.BlockingIOError @@ -65,40 +66,41 @@ if sys.version_info >= (3, 12): class _IOBase: """The abstract base class for all I/O classes. -This class provides dummy implementations for many methods that -derived classes can override selectively; the default implementations -represent a file that cannot be read, written or seeked. + This class provides dummy implementations for many methods that + derived classes can override selectively; the default implementations + represent a file that cannot be read, written or seeked. -Even though IOBase does not declare read, readinto, or write because -their signatures will vary, implementations and clients should -consider those methods part of the interface. Also, implementations -may raise UnsupportedOperation when operations they do not support are -called. + Even though IOBase does not declare read, readinto, or write because + their signatures will vary, implementations and clients should + consider those methods part of the interface. Also, implementations + may raise UnsupportedOperation when operations they do not support are + called. -The basic type used for binary data read from or written to a file is -bytes. Other bytes-like objects are accepted as method arguments too. -In some cases (such as readinto), a writable object is required. Text -I/O classes work with str data. + The basic type used for binary data read from or written to a file is + bytes. Other bytes-like objects are accepted as method arguments too. + In some cases (such as readinto), a writable object is required. Text + I/O classes work with str data. -Note that calling any method (except additional calls to close(), -which are ignored) on a closed stream should raise a ValueError. + Note that calling any method (except additional calls to close(), + which are ignored) on a closed stream should raise a ValueError. -IOBase (and its subclasses) support the iterator protocol, meaning -that an IOBase object can be iterated over yielding the lines in a -stream. + IOBase (and its subclasses) support the iterator protocol, meaning + that an IOBase object can be iterated over yielding the lines in a + stream. -IOBase also supports the :keyword:`with` statement. In this example, -fp is closed after the suite of the with statement is complete: + IOBase also supports the :keyword:`with` statement. In this example, + fp is closed after the suite of the with statement is complete: + + with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') + """ -with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') -""" def __iter__(self) -> Iterator[bytes]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> bytes: - """Implement next(self). -""" + """Implement next(self).""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -106,92 +108,102 @@ with open('spam.txt', 'r') as fp: def close(self) -> None: """Flush and close the IO object. -This method has no effect if the file is already closed. -""" + This method has no effect if the file is already closed. + """ + def fileno(self) -> int: """Return underlying file descriptor if one exists. -Raise OSError if the IO object does not use a file descriptor. -""" + Raise OSError if the IO object does not use a file descriptor. + """ + def flush(self) -> None: """Flush write buffers, if applicable. -This is not implemented for read-only and non-blocking streams. -""" + This is not implemented for read-only and non-blocking streams. + """ + def isatty(self) -> bool: """Return whether this is an 'interactive' stream. -Return False if it can't be determined. -""" + Return False if it can't be determined. + """ + def readable(self) -> bool: """Return whether object was opened for reading. -If False, read() will raise OSError. -""" + If False, read() will raise OSError. + """ read: Callable[..., Any] def readlines(self, hint: int = -1, /) -> list[bytes]: """Return a list of lines from the stream. -hint can be specified to control the number of lines read: no more -lines will be read if the total size (in bytes/characters) of all -lines so far exceeds hint. -""" + hint can be specified to control the number of lines read: no more + lines will be read if the total size (in bytes/characters) of all + lines so far exceeds hint. + """ + def seek(self, offset: int, whence: int = 0, /) -> int: """Change the stream position to the given byte offset. - offset - The stream position, relative to 'whence'. - whence - The relative position to seek from. + offset + The stream position, relative to 'whence'. + whence + The relative position to seek from. -The offset is interpreted relative to the position indicated by whence. -Values for whence are: + The offset is interpreted relative to the position indicated by whence. + Values for whence are: -* os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive -* os.SEEK_CUR or 1 -- current stream position; offset may be negative -* os.SEEK_END or 2 -- end of stream; offset is usually negative + * os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive + * os.SEEK_CUR or 1 -- current stream position; offset may be negative + * os.SEEK_END or 2 -- end of stream; offset is usually negative + + Return the new absolute position. + """ -Return the new absolute position. -""" def seekable(self) -> bool: """Return whether object supports random access. -If False, seek(), tell() and truncate() will raise OSError. -This method may need to do a test seek(). -""" + If False, seek(), tell() and truncate() will raise OSError. + This method may need to do a test seek(). + """ + def tell(self) -> int: - """Return current stream position. -""" + """Return current stream position.""" + def truncate(self, size: int | None = None, /) -> int: """Truncate file to size bytes. -File pointer is left unchanged. Size defaults to the current IO position -as reported by tell(). Return the new size. -""" + File pointer is left unchanged. Size defaults to the current IO position + as reported by tell(). Return the new size. + """ + def writable(self) -> bool: """Return whether object was opened for writing. -If False, write() will raise OSError. -""" + If False, write() will raise OSError. + """ write: Callable[..., Any] def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: """Write a list of lines to stream. -Line separators are not added, so it is usual for each of the -lines provided to have a line separator at the end. -""" + Line separators are not added, so it is usual for each of the + lines provided to have a line separator at the end. + """ + def readline(self, size: int | None = -1, /) -> bytes: """Read and return a line from the stream. -If size is specified, at most size bytes will be read. + If size is specified, at most size bytes will be read. + + The line terminator is always b'\\n' for binary files; for text + files, the newlines argument to open can be used to select the line + terminator(s) recognized. + """ -The line terminator is always b'\\n' for binary files; for text -files, the newlines argument to open can be used to select the line -terminator(s) recognized. -""" def __del__(self) -> None: - """Called when the instance is about to be destroyed. -""" + """Called when the instance is about to be destroyed.""" + @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented @@ -200,40 +212,41 @@ else: class _IOBase: """The abstract base class for all I/O classes. -This class provides dummy implementations for many methods that -derived classes can override selectively; the default implementations -represent a file that cannot be read, written or seeked. + This class provides dummy implementations for many methods that + derived classes can override selectively; the default implementations + represent a file that cannot be read, written or seeked. -Even though IOBase does not declare read, readinto, or write because -their signatures will vary, implementations and clients should -consider those methods part of the interface. Also, implementations -may raise UnsupportedOperation when operations they do not support are -called. + Even though IOBase does not declare read, readinto, or write because + their signatures will vary, implementations and clients should + consider those methods part of the interface. Also, implementations + may raise UnsupportedOperation when operations they do not support are + called. -The basic type used for binary data read from or written to a file is -bytes. Other bytes-like objects are accepted as method arguments too. -In some cases (such as readinto), a writable object is required. Text -I/O classes work with str data. + The basic type used for binary data read from or written to a file is + bytes. Other bytes-like objects are accepted as method arguments too. + In some cases (such as readinto), a writable object is required. Text + I/O classes work with str data. -Note that calling any method (except additional calls to close(), -which are ignored) on a closed stream should raise a ValueError. + Note that calling any method (except additional calls to close(), + which are ignored) on a closed stream should raise a ValueError. -IOBase (and its subclasses) support the iterator protocol, meaning -that an IOBase object can be iterated over yielding the lines in a -stream. + IOBase (and its subclasses) support the iterator protocol, meaning + that an IOBase object can be iterated over yielding the lines in a + stream. -IOBase also supports the :keyword:`with` statement. In this example, -fp is closed after the suite of the with statement is complete: + IOBase also supports the :keyword:`with` statement. In this example, + fp is closed after the suite of the with statement is complete: + + with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') + """ -with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') -""" def __iter__(self) -> Iterator[bytes]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> bytes: - """Implement next(self). -""" + """Implement next(self).""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -241,100 +254,109 @@ with open('spam.txt', 'r') as fp: def close(self) -> None: """Flush and close the IO object. -This method has no effect if the file is already closed. -""" + This method has no effect if the file is already closed. + """ + def fileno(self) -> int: """Returns underlying file descriptor if one exists. -OSError is raised if the IO object does not use a file descriptor. -""" + OSError is raised if the IO object does not use a file descriptor. + """ + def flush(self) -> None: """Flush write buffers, if applicable. -This is not implemented for read-only and non-blocking streams. -""" + This is not implemented for read-only and non-blocking streams. + """ + def isatty(self) -> bool: """Return whether this is an 'interactive' stream. -Return False if it can't be determined. -""" + Return False if it can't be determined. + """ + def readable(self) -> bool: """Return whether object was opened for reading. -If False, read() will raise OSError. -""" + If False, read() will raise OSError. + """ read: Callable[..., Any] def readlines(self, hint: int = -1, /) -> list[bytes]: """Return a list of lines from the stream. -hint can be specified to control the number of lines read: no more -lines will be read if the total size (in bytes/characters) of all -lines so far exceeds hint. -""" + hint can be specified to control the number of lines read: no more + lines will be read if the total size (in bytes/characters) of all + lines so far exceeds hint. + """ + def seek(self, offset: int, whence: int = 0, /) -> int: """Change the stream position to the given byte offset. - offset - The stream position, relative to 'whence'. - whence - The relative position to seek from. + offset + The stream position, relative to 'whence'. + whence + The relative position to seek from. -The offset is interpreted relative to the position indicated by whence. -Values for whence are: + The offset is interpreted relative to the position indicated by whence. + Values for whence are: -* os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive -* os.SEEK_CUR or 1 -- current stream position; offset may be negative -* os.SEEK_END or 2 -- end of stream; offset is usually negative + * os.SEEK_SET or 0 -- start of stream (the default); offset should be zero or positive + * os.SEEK_CUR or 1 -- current stream position; offset may be negative + * os.SEEK_END or 2 -- end of stream; offset is usually negative + + Return the new absolute position. + """ -Return the new absolute position. -""" def seekable(self) -> bool: """Return whether object supports random access. -If False, seek(), tell() and truncate() will raise OSError. -This method may need to do a test seek(). -""" + If False, seek(), tell() and truncate() will raise OSError. + This method may need to do a test seek(). + """ + def tell(self) -> int: - """Return current stream position. -""" + """Return current stream position.""" + def truncate(self, size: int | None = None, /) -> int: """Truncate file to size bytes. -File pointer is left unchanged. Size defaults to the current IO -position as reported by tell(). Returns the new size. -""" + File pointer is left unchanged. Size defaults to the current IO + position as reported by tell(). Returns the new size. + """ + def writable(self) -> bool: """Return whether object was opened for writing. -If False, write() will raise OSError. -""" + If False, write() will raise OSError. + """ write: Callable[..., Any] def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: """Write a list of lines to stream. -Line separators are not added, so it is usual for each of the -lines provided to have a line separator at the end. -""" + Line separators are not added, so it is usual for each of the + lines provided to have a line separator at the end. + """ + def readline(self, size: int | None = -1, /) -> bytes: """Read and return a line from the stream. -If size is specified, at most size bytes will be read. + If size is specified, at most size bytes will be read. + + The line terminator is always b'\\n' for binary files; for text + files, the newlines argument to open can be used to select the line + terminator(s) recognized. + """ -The line terminator is always b'\\n' for binary files; for text -files, the newlines argument to open can be used to select the line -terminator(s) recognized. -""" def __del__(self) -> None: ... @property def closed(self) -> bool: ... def _checkClosed(self) -> None: ... # undocumented class _RawIOBase(_IOBase): - """Base class for raw binary I/O. -""" + """Base class for raw binary I/O.""" + def readall(self) -> bytes: - """Read until EOF, using multiple read() call. -""" + """Read until EOF, using multiple read() call.""" # The following methods can return None if the file is in non-blocking mode # and no data is available. def readinto(self, buffer: WriteableBuffer, /) -> int | MaybeNone: ... @@ -344,76 +366,81 @@ class _RawIOBase(_IOBase): class _BufferedIOBase(_IOBase): """Base class for buffered IO objects. -The main difference with RawIOBase is that the read() method -supports omitting the size argument, and does not have a default -implementation that defers to readinto(). + The main difference with RawIOBase is that the read() method + supports omitting the size argument, and does not have a default + implementation that defers to readinto(). -In addition, read(), readinto() and write() may raise -BlockingIOError if the underlying raw stream is in non-blocking -mode and not ready; unlike their raw counterparts, they will never -return None. + In addition, read(), readinto() and write() may raise + BlockingIOError if the underlying raw stream is in non-blocking + mode and not ready; unlike their raw counterparts, they will never + return None. + + A typical implementation should not inherit from a RawIOBase + implementation, but wrap one. + """ -A typical implementation should not inherit from a RawIOBase -implementation, but wrap one. -""" def detach(self) -> RawIOBase: """Disconnect this buffer from its underlying raw stream and return it. -After the raw stream has been detached, the buffer is in an unusable -state. -""" + After the raw stream has been detached, the buffer is in an unusable + state. + """ + def readinto(self, buffer: WriteableBuffer, /) -> int: ... def write(self, buffer: ReadableBuffer, /) -> int: """Write buffer b to the IO stream. -Return the number of bytes written, which is always -the length of b in bytes. + Return the number of bytes written, which is always + the length of b in bytes. + + Raise BlockingIOError if the buffer is full and the + underlying raw stream cannot accept more data at the moment. + """ -Raise BlockingIOError if the buffer is full and the -underlying raw stream cannot accept more data at the moment. -""" def readinto1(self, buffer: WriteableBuffer, /) -> int: ... def read(self, size: int | None = -1, /) -> bytes: """Read and return up to n bytes. -If the size argument is omitted, None, or negative, read and -return all data until EOF. + If the size argument is omitted, None, or negative, read and + return all data until EOF. -If the size argument is positive, and the underlying raw stream is -not 'interactive', multiple raw reads may be issued to satisfy -the byte count (unless EOF is reached first). -However, for interactive raw streams (as well as sockets and pipes), -at most one raw read will be issued, and a short result does not -imply that EOF is imminent. + If the size argument is positive, and the underlying raw stream is + not 'interactive', multiple raw reads may be issued to satisfy + the byte count (unless EOF is reached first). + However, for interactive raw streams (as well as sockets and pipes), + at most one raw read will be issued, and a short result does not + imply that EOF is imminent. -Return an empty bytes object on EOF. + Return an empty bytes object on EOF. + + Return None if the underlying raw stream was open in non-blocking + mode and no data is available at the moment. + """ -Return None if the underlying raw stream was open in non-blocking -mode and no data is available at the moment. -""" def read1(self, size: int = -1, /) -> bytes: """Read and return up to size bytes, with at most one read() call to the underlying raw stream. -Return an empty bytes object on EOF. -A short result does not imply that EOF is imminent. -""" + Return an empty bytes object on EOF. + A short result does not imply that EOF is imminent. + """ @disjoint_base class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes """Open a file. -The mode can be 'r' (default), 'w', 'x' or 'a' for reading, -writing, exclusive creation or appending. The file will be created if it -doesn't exist when opened for writing or appending; it will be truncated -when opened for writing. A FileExistsError will be raised if it already -exists when opened for creating. Opening a file for creating implies -writing so this mode behaves in a similar way to 'w'.Add a '+' to the mode -to allow simultaneous reading and writing. A custom opener can be used by -passing a callable as *opener*. The underlying file descriptor for the file -object is then obtained by calling opener with (*name*, *flags*). -*opener* must return an open file descriptor (passing os.open as *opener* -results in functionality similar to passing None). -""" + The mode can be 'r' (default), 'w', 'x' or 'a' for reading, + writing, exclusive creation or appending. The file will be created if it + doesn't exist when opened for writing or appending; it will be truncated + when opened for writing. A FileExistsError will be raised if it already + exists when opened for creating. Opening a file for creating implies + writing so this mode behaves in a similar way to 'w'.Add a '+' to the mode + to allow simultaneous reading and writing. A custom opener can be used by + passing a callable as *opener*. The underlying file descriptor for the file + object is then obtained by calling opener with (*name*, *flags*). + *opener* must return an open file descriptor (passing os.open as *opener* + results in functionality similar to passing None). + """ + mode: str # The type of "name" equals the argument passed in to the constructor, # but that can make FileIO incompatible with other I/O types that assume @@ -424,69 +451,72 @@ results in functionality similar to passing None). ) -> None: ... @property def closefd(self) -> bool: - """True if the file descriptor will be closed by close(). -""" + """True if the file descriptor will be closed by close().""" + def seek(self, pos: int, whence: int = 0, /) -> int: """Move to new file position and return the file position. -Argument offset is a byte count. Optional argument whence defaults to -SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values -are SEEK_CUR or 1 (move relative to current position, positive or negative), -and SEEK_END or 2 (move relative to end of file, usually negative, although -many platforms allow seeking beyond the end of a file). + Argument offset is a byte count. Optional argument whence defaults to + SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values + are SEEK_CUR or 1 (move relative to current position, positive or negative), + and SEEK_END or 2 (move relative to end of file, usually negative, although + many platforms allow seeking beyond the end of a file). + + Note that not all file objects are seekable. + """ -Note that not all file objects are seekable. -""" def read(self, size: int | None = -1, /) -> bytes | MaybeNone: """Read at most size bytes, returned as bytes. -If size is less than 0, read all bytes in the file making multiple read calls. -See ``FileIO.readall``. + If size is less than 0, read all bytes in the file making multiple read calls. + See ``FileIO.readall``. -Attempts to make only one system call, retrying only per PEP 475 (EINTR). This -means less data may be returned than requested. + Attempts to make only one system call, retrying only per PEP 475 (EINTR). This + means less data may be returned than requested. -In non-blocking mode, returns None if no data is available. Return an empty -bytes object at EOF. -""" + In non-blocking mode, returns None if no data is available. Return an empty + bytes object at EOF. + """ @disjoint_base class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes - """Buffered I/O implementation using an in-memory bytes buffer. -""" + """Buffered I/O implementation using an in-memory bytes buffer.""" + def __init__(self, initial_bytes: ReadableBuffer = b"") -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> bytes: - """Retrieve the entire contents of the BytesIO object. -""" + """Retrieve the entire contents of the BytesIO object.""" + def getbuffer(self) -> memoryview: - """Get a read-write view over the contents of the BytesIO object. -""" + """Get a read-write view over the contents of the BytesIO object.""" + def read1(self, size: int | None = -1, /) -> bytes: """Read at most size bytes, returned as a bytes object. -If the size argument is negative or omitted, read until EOF is reached. -Return an empty bytes object at EOF. -""" + If the size argument is negative or omitted, read until EOF is reached. + Return an empty bytes object at EOF. + """ + def readlines(self, size: int | None = None, /) -> list[bytes]: """List of bytes objects, each a line from the file. -Call readline() repeatedly and return a list of the lines so read. -The optional size argument, if given, is an approximate bound on the -total number of bytes in the lines returned. -""" + Call readline() repeatedly and return a list of the lines so read. + The optional size argument, if given, is an approximate bound on the + total number of bytes in the lines returned. + """ + def seek(self, pos: int, whence: int = 0, /) -> int: """Change stream position. -Seek to byte offset pos relative to position indicated by whence: - 0 Start of stream (the default). pos should be >= 0; - 1 Current position - pos may be negative; - 2 End of stream - pos usually negative. -Returns the new absolute position. -""" + Seek to byte offset pos relative to position indicated by whence: + 0 Start of stream (the default). pos should be >= 0; + 1 Current position - pos may be negative; + 2 End of stream - pos usually negative. + Returns the new absolute position. + """ @type_check_only class _BufferedReaderStream(Protocol): @@ -518,8 +548,8 @@ _BufferedReaderStreamT = TypeVar("_BufferedReaderStreamT", bound=_BufferedReader @disjoint_base class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_BufferedReaderStreamT]): # type: ignore[misc] # incompatible definitions of methods in the base classes - """Create a new buffered reader using the given readable raw IO object. -""" + """Create a new buffered reader using the given readable raw IO object.""" + raw: _BufferedReaderStreamT if sys.version_info >= (3, 14): def __init__(self, raw: _BufferedReaderStreamT, buffer_size: int = 131072) -> None: ... @@ -534,10 +564,11 @@ class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO, Generic[_Buffere class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes """A buffer for a writeable sequential RawIO object. -The constructor creates a BufferedWriter for the given writeable raw -stream. If the buffer_size is not given, it defaults to -DEFAULT_BUFFER_SIZE. -""" + The constructor creates a BufferedWriter for the given writeable raw + stream. If the buffer_size is not given, it defaults to + DEFAULT_BUFFER_SIZE. + """ + raw: RawIOBase if sys.version_info >= (3, 14): def __init__(self, raw: RawIOBase, buffer_size: int = 131072) -> None: ... @@ -552,10 +583,11 @@ DEFAULT_BUFFER_SIZE. class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes """A buffered interface to random access streams. -The constructor creates a reader and writer for a seekable stream, -raw, given in the first argument. If the buffer_size is omitted it -defaults to DEFAULT_BUFFER_SIZE. -""" + The constructor creates a reader and writer for a seekable stream, + raw, given in the first argument. If the buffer_size is omitted it + defaults to DEFAULT_BUFFER_SIZE. + """ + mode: str name: Any raw: RawIOBase @@ -572,14 +604,15 @@ defaults to DEFAULT_BUFFER_SIZE. class BufferedRWPair(BufferedIOBase, _BufferedIOBase, Generic[_BufferedReaderStreamT]): """A buffered reader and writer object together. -A buffered reader object and buffered writer object put together to -form a sequential IO object that can read and write. This is typically -used with a socket or two-way pipe. + A buffered reader object and buffered writer object put together to + form a sequential IO object that can read and write. This is typically + used with a socket or two-way pipe. + + reader and writer are RawIOBase objects that are readable and + writeable respectively. If the buffer_size is omitted it defaults to + DEFAULT_BUFFER_SIZE. + """ -reader and writer are RawIOBase objects that are readable and -writeable respectively. If the buffer_size is omitted it defaults to -DEFAULT_BUFFER_SIZE. -""" if sys.version_info >= (3, 14): def __init__(self, reader: _BufferedReaderStreamT, writer: RawIOBase, buffer_size: int = 131072, /) -> None: ... else: @@ -590,55 +623,61 @@ DEFAULT_BUFFER_SIZE. class _TextIOBase(_IOBase): """Base class for text I/O. -This class provides a character and line based interface to stream -I/O. There is no readinto method because Python's character strings -are immutable. -""" + This class provides a character and line based interface to stream + I/O. There is no readinto method because Python's character strings + are immutable. + """ + encoding: str errors: str | None newlines: str | tuple[str, ...] | None def __iter__(self) -> Iterator[str]: # type: ignore[override] - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> str: # type: ignore[override] - """Implement next(self). -""" + """Implement next(self).""" + def detach(self) -> BinaryIO: """Separate the underlying buffer from the TextIOBase and return it. -After the underlying buffer has been detached, the TextIO is in an unusable state. -""" + After the underlying buffer has been detached, the TextIO is in an unusable state. + """ + def write(self, s: str, /) -> int: """Write string s to stream. -Return the number of characters written -(which is always equal to the length of the string). -""" + Return the number of characters written + (which is always equal to the length of the string). + """ + def writelines(self, lines: Iterable[str], /) -> None: # type: ignore[override] """Write a list of lines to stream. -Line separators are not added, so it is usual for each of the -lines provided to have a line separator at the end. -""" + Line separators are not added, so it is usual for each of the + lines provided to have a line separator at the end. + """ + def readline(self, size: int = -1, /) -> str: # type: ignore[override] """Read until newline or EOF. -Return an empty string if EOF is hit immediately. -If size is specified, at most size characters will be read. -""" + Return an empty string if EOF is hit immediately. + If size is specified, at most size characters will be read. + """ + def readlines(self, hint: int = -1, /) -> list[str]: # type: ignore[override] """Return a list of lines from the stream. -hint can be specified to control the number of lines read: no more -lines will be read if the total size (in bytes/characters) of all -lines so far exceeds hint. -""" + hint can be specified to control the number of lines read: no more + lines will be read if the total size (in bytes/characters) of all + lines so far exceeds hint. + """ + def read(self, size: int | None = -1, /) -> str: """Read at most size characters from stream. -Read from underlying buffer until we have size characters or we hit EOF. -If size is negative or omitted, read until EOF. -""" + Read from underlying buffer until we have size characters or we hit EOF. + If size is negative or omitted, read until EOF. + """ @type_check_only class _WrappedBuffer(Protocol): @@ -669,33 +708,34 @@ _BufferT_co = TypeVar("_BufferT_co", bound=_WrappedBuffer, default=_WrappedBuffe class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # type: ignore[misc] # incompatible definitions of write in the base classes """Character and line based layer over a BufferedIOBase object, buffer. -encoding gives the name of the encoding that the stream will be -decoded or encoded with. It defaults to locale.getencoding(). + encoding gives the name of the encoding that the stream will be + decoded or encoded with. It defaults to locale.getencoding(). -errors determines the strictness of encoding and decoding (see -help(codecs.Codec) or the documentation for codecs.register) and -defaults to "strict". + errors determines the strictness of encoding and decoding (see + help(codecs.Codec) or the documentation for codecs.register) and + defaults to "strict". -newline controls how line endings are handled. It can be None, '', -'\\n', '\\r', and '\\r\\n'. It works as follows: + newline controls how line endings are handled. It can be None, '', + '\\n', '\\r', and '\\r\\n'. It works as follows: -* On input, if newline is None, universal newlines mode is - enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and - these are translated into '\\n' before being returned to the - caller. If it is '', universal newline mode is enabled, but line - endings are returned to the caller untranslated. If it has any of - the other legal values, input lines are only terminated by the given - string, and the line ending is returned to the caller untranslated. + * On input, if newline is None, universal newlines mode is + enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and + these are translated into '\\n' before being returned to the + caller. If it is '', universal newline mode is enabled, but line + endings are returned to the caller untranslated. If it has any of + the other legal values, input lines are only terminated by the given + string, and the line ending is returned to the caller untranslated. -* On output, if newline is None, any '\\n' characters written are - translated to the system default line separator, os.linesep. If - newline is '' or '\\n', no translation takes place. If newline is any - of the other legal values, any '\\n' characters written are translated - to the given string. + * On output, if newline is None, any '\\n' characters written are + translated to the system default line separator, os.linesep. If + newline is '' or '\\n', no translation takes place. If newline is any + of the other legal values, any '\\n' characters written are translated + to the given string. + + If line_buffering is True, a call to flush is implied when a call to + write contains a newline character. + """ -If line_buffering is True, a call to flush is implied when a call to -write contains a newline character. -""" def __init__( self, buffer: _BufferT_co, @@ -723,8 +763,9 @@ write contains a newline character. ) -> None: """Reconfigure the text stream with new parameters. -This also does an implicit stream flush. -""" + This also does an implicit stream flush. + """ + def readline(self, size: int = -1, /) -> str: ... # type: ignore[override] # Equals the "buffer" argument passed in to the constructor. def detach(self) -> _BufferT_co: ... # type: ignore[override] @@ -733,70 +774,74 @@ This also does an implicit stream flush. def seek(self, cookie: int, whence: int = 0, /) -> int: """Set the stream position, and return the new stream position. - cookie - Zero or an opaque number returned by tell(). - whence - The relative position to seek from. + cookie + Zero or an opaque number returned by tell(). + whence + The relative position to seek from. -Four operations are supported, given by the following argument -combinations: + Four operations are supported, given by the following argument + combinations: -- seek(0, SEEK_SET): Rewind to the start of the stream. -- seek(cookie, SEEK_SET): Restore a previous position; - 'cookie' must be a number returned by tell(). -- seek(0, SEEK_END): Fast-forward to the end of the stream. -- seek(0, SEEK_CUR): Leave the current stream position unchanged. + - seek(0, SEEK_SET): Rewind to the start of the stream. + - seek(cookie, SEEK_SET): Restore a previous position; + 'cookie' must be a number returned by tell(). + - seek(0, SEEK_END): Fast-forward to the end of the stream. + - seek(0, SEEK_CUR): Leave the current stream position unchanged. + + Any other argument combinations are invalid, + and may raise exceptions. + """ -Any other argument combinations are invalid, -and may raise exceptions. -""" def truncate(self, pos: int | None = None, /) -> int: ... @disjoint_base class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes """Text I/O implementation using an in-memory buffer. -The initial_value argument sets the value of object. The newline -argument is like the one of TextIOWrapper's constructor. -""" + The initial_value argument sets the value of object. The newline + argument is like the one of TextIOWrapper's constructor. + """ + def __init__(self, initial_value: str | None = "", newline: str | None = "\n") -> None: ... # StringIO does not contain a "name" field. This workaround is necessary # to allow StringIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any def getvalue(self) -> str: - """Retrieve the entire contents of the object. -""" + """Retrieve the entire contents of the object.""" + @property def line_buffering(self) -> bool: ... def seek(self, pos: int, whence: int = 0, /) -> int: """Change stream position. -Seek to character offset pos relative to position indicated by whence: - 0 Start of stream (the default). pos should be >= 0; - 1 Current position - pos must be 0; - 2 End of stream - pos must be 0. -Returns the new absolute position. -""" + Seek to character offset pos relative to position indicated by whence: + 0 Start of stream (the default). pos should be >= 0; + 1 Current position - pos must be 0; + 2 End of stream - pos must be 0. + Returns the new absolute position. + """ + def truncate(self, pos: int | None = None, /) -> int: """Truncate size to pos. -The pos argument defaults to the current file position, as -returned by tell(). The current file position is unchanged. -Returns the new absolute position. -""" + The pos argument defaults to the current file position, as + returned by tell(). The current file position is unchanged. + Returns the new absolute position. + """ @disjoint_base class IncrementalNewlineDecoder: """Codec used when reading a file in universal newlines mode. -It wraps another incremental decoder, translating \\r\\n and \\r into \\n. -It also records the types of newlines encountered. When used with -translate=False, it ensures that the newline sequence is returned in -one piece. When used with decoder=None, it expects unicode strings as -decode input and translates newlines without first invoking an external -decoder. -""" + It wraps another incremental decoder, translating \\r\\n and \\r into \\n. + It also records the types of newlines encountered. When used with + translate=False, it ensures that the newline sequence is returned in + one piece. When used with decoder=None, it expects unicode strings as + decode input and translates newlines without first invoking an external + decoder. + """ + def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = "strict") -> None: ... def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... @property @@ -810,15 +855,16 @@ if sys.version_info >= (3, 10): def text_encoding(encoding: None, stacklevel: int = 2, /) -> Literal["locale", "utf-8"]: """A helper function to choose the text encoding. -When encoding is not None, this function returns it. -Otherwise, this function returns the default text encoding -(i.e. "locale" or "utf-8" depends on UTF-8 mode). + When encoding is not None, this function returns it. + Otherwise, this function returns the default text encoding + (i.e. "locale" or "utf-8" depends on UTF-8 mode). -This function emits an EncodingWarning if encoding is None and -sys.flags.warn_default_encoding is true. + This function emits an EncodingWarning if encoding is None and + sys.flags.warn_default_encoding is true. + + This can be used in APIs with an encoding=None parameter. + However, please consider using encoding="utf-8" for new APIs. + """ -This can be used in APIs with an encoding=None parameter. -However, please consider using encoding="utf-8" for new APIs. -""" @overload def text_encoding(encoding: _T, stacklevel: int = 2, /) -> _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi index 628f898923d3a..c0ee2275573af 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_json.pyi @@ -1,45 +1,45 @@ -"""json speedups -""" +"""json speedups""" + from collections.abc import Callable from typing import Any, final from typing_extensions import Self @final class make_encoder: - """Encoder(markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan) -""" + """Encoder(markers, default, encoder, indent, key_separator, item_separator, sort_keys, skipkeys, allow_nan)""" + @property def sort_keys(self) -> bool: - """sort_keys -""" + """sort_keys""" + @property def skipkeys(self) -> bool: - """skipkeys -""" + """skipkeys""" + @property def key_separator(self) -> str: - """key_separator -""" + """key_separator""" + @property def indent(self) -> str | None: - """indent -""" + """indent""" + @property def markers(self) -> dict[int, Any] | None: - """markers -""" + """markers""" + @property def default(self) -> Callable[[Any], Any]: - """default -""" + """default""" + @property def encoder(self) -> Callable[[str], str]: - """encoder -""" + """encoder""" + @property def item_separator(self) -> str: - """item_separator -""" + """item_separator""" + def __new__( cls, markers: dict[int, Any] | None, @@ -53,13 +53,12 @@ class make_encoder: allow_nan: bool, ) -> Self: ... def __call__(self, obj: object, _current_indent_level: int) -> Any: - """Call self as a function. -""" + """Call self as a function.""" @final class make_scanner: - """JSON scanner object -""" + """JSON scanner object""" + object_hook: Any object_pairs_hook: Any parse_int: Any @@ -69,28 +68,29 @@ class make_scanner: # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __new__(cls, context: make_scanner) -> Self: ... def __call__(self, string: str, index: int) -> tuple[Any, int]: - """Call self as a function. -""" + """Call self as a function.""" def encode_basestring(s: str, /) -> str: """encode_basestring(string) -> string -Return a JSON representation of a Python string -""" + Return a JSON representation of a Python string + """ + def encode_basestring_ascii(s: str, /) -> str: """encode_basestring_ascii(string) -> string -Return an ASCII-only JSON representation of a Python string -""" + Return an ASCII-only JSON representation of a Python string + """ + def scanstring(string: str, end: int, strict: bool = True) -> tuple[str, int]: """scanstring(string, end, strict=True) -> (string, end) -Scan the string s for a JSON string. End is the index of the -character in s after the quote that started the JSON string. -Unescapes all valid JSON string escape sequences and raises ValueError -on attempt to decode an invalid string. If strict is False then literal -control characters are allowed in the string. + Scan the string s for a JSON string. End is the index of the + character in s after the quote that started the JSON string. + Unescapes all valid JSON string escape sequences and raises ValueError + on attempt to decode an invalid string. If strict is False then literal + control characters are allowed in the string. -Returns a tuple of the decoded string and the index of the character in s -after the end quote. -""" + Returns a tuple of the decoded string and the index of the character in s + after the end quote. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi index f140761483a01..1c8f6ce53f1aa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_locale.pyi @@ -1,5 +1,5 @@ -"""Support for POSIX locales. -""" +"""Support for POSIX locales.""" + import sys from _typeshed import StrPath from typing import Final, Literal, TypedDict, type_check_only @@ -34,23 +34,20 @@ LC_ALL: Final[int] CHAR_MAX: Final = 127 def setlocale(category: int, locale: str | None = None, /) -> str: - """Activates/queries locale processing. -""" + """Activates/queries locale processing.""" + def localeconv() -> _LocaleConv: - """Returns numeric and monetary locale-specific parameters. -""" + """Returns numeric and monetary locale-specific parameters.""" if sys.version_info >= (3, 11): def getencoding() -> str: - """Get the current locale encoding. -""" + """Get the current locale encoding.""" def strcoll(os1: str, os2: str, /) -> int: - """Compares two strings according to the locale. -""" + """Compares two strings according to the locale.""" + def strxfrm(string: str, /) -> str: - """Return a string that can be used as a key for locale-aware comparisons. -""" + """Return a string that can be used as a key for locale-aware comparisons.""" # native gettext functions # https://docs.python.org/3/library/locale.html#access-to-message-catalogs @@ -121,31 +118,30 @@ if sys.platform != "win32": ALT_DIGITS: Final[int] def nl_langinfo(key: int, /) -> str: - """Return the value for the locale information associated with key. -""" - + """Return the value for the locale information associated with key.""" # This is dependent on `libintl.h` which is a part of `gettext` # system dependency. These functions might be missing. # But, we always say that they are present. def gettext(msg: str, /) -> str: """gettext(msg) -> string -Return translation of msg. -""" + Return translation of msg. + """ + def dgettext(domain: str | None, msg: str, /) -> str: """dgettext(domain, msg) -> string -Return translation of msg in domain. -""" + Return translation of msg in domain. + """ + def dcgettext(domain: str | None, msg: str, category: int, /) -> str: - """Return translation of msg in domain and category. -""" + """Return translation of msg in domain and category.""" + def textdomain(domain: str | None, /) -> str: - """Set the C library's textdmain to domain, returning the new domain. -""" + """Set the C library's textdmain to domain, returning the new domain.""" + def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: - """Bind the C library's domain to dir. -""" + """Bind the C library's domain to dir.""" + def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: - """Bind the C library's domain to codeset. -""" + """Bind the C library's domain to codeset.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi index b2875fd9a5f22..ed6d52517bb11 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lsprof.pyi @@ -1,5 +1,5 @@ -"""Fast profiler -""" +"""Fast profiler""" + import sys from _typeshed import structseq from collections.abc import Callable @@ -11,55 +11,57 @@ from typing_extensions import disjoint_base class Profiler: """Build a profiler object using the specified timer function. -The default timer is a fast built-in one based on real time. -For custom timer functions returning integers, 'timeunit' can -be a float specifying a scale (that is, how long each integer unit -is, in seconds). -""" + The default timer is a fast built-in one based on real time. + For custom timer functions returning integers, 'timeunit' can + be a float specifying a scale (that is, how long each integer unit + is, in seconds). + """ + def __init__( self, timer: Callable[[], float] | None = None, timeunit: float = 0.0, subcalls: bool = True, builtins: bool = True ) -> None: ... def getstats(self) -> list[profiler_entry]: """list of profiler_entry objects. -getstats() -> list of profiler_entry objects + getstats() -> list of profiler_entry objects + + Return all information collected by the profiler. + Each profiler_entry is a tuple-like object with the + following attributes: -Return all information collected by the profiler. -Each profiler_entry is a tuple-like object with the -following attributes: + code code object + callcount how many times this was called + reccallcount how many times called recursively + totaltime total time in this entry + inlinetime inline time in this entry (not in subcalls) + calls details of the calls - code code object - callcount how many times this was called - reccallcount how many times called recursively - totaltime total time in this entry - inlinetime inline time in this entry (not in subcalls) - calls details of the calls + The calls attribute is either None or a list of + profiler_subentry objects: -The calls attribute is either None or a list of -profiler_subentry objects: + code called code object + callcount how many times this is called + reccallcount how many times this is called recursively + totaltime total time spent in this call + inlinetime inline time (not in further subcalls) + """ - code called code object - callcount how many times this is called - reccallcount how many times this is called recursively - totaltime total time spent in this call - inlinetime inline time (not in further subcalls) -""" def enable(self, subcalls: bool = True, builtins: bool = True) -> None: """Start collecting profiling information. - subcalls - If True, also records for each function - statistics separated according to its current caller. - builtins - If True, records the time spent in - built-in functions separately from their caller. -""" + subcalls + If True, also records for each function + statistics separated according to its current caller. + builtins + If True, records the time spent in + built-in functions separately from their caller. + """ + def disable(self) -> None: - """Stop collecting profiling information. -""" + """Stop collecting profiling information.""" + def clear(self) -> None: - """Clear all profiling information collected so far. -""" + """Clear all profiling information collected so far.""" @final class profiler_entry(structseq[Any], tuple[CodeType | str, int, int, float, float, list[profiler_subentry]]): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi index 1eaea87686c57..dc41fd420f147 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_lzma.pyi @@ -39,23 +39,24 @@ PRESET_EXTREME: Final[int] # v big number class LZMADecompressor: """Create a decompressor object for decompressing data incrementally. - format - Specifies the container format of the input stream. If this is - FORMAT_AUTO (the default), the decompressor will automatically detect - whether the input is FORMAT_XZ or FORMAT_ALONE. Streams created with - FORMAT_RAW cannot be autodetected. - memlimit - Limit the amount of memory used by the decompressor. This will cause - decompression to fail if the input cannot be decompressed within the - given limit. - filters - A custom filter chain. This argument is required for FORMAT_RAW, and - not accepted with any other format. When provided, this should be a - sequence of dicts, each indicating the ID and options for a single - filter. - -For one-shot decompression, use the decompress() function instead. -""" + format + Specifies the container format of the input stream. If this is + FORMAT_AUTO (the default), the decompressor will automatically detect + whether the input is FORMAT_XZ or FORMAT_ALONE. Streams created with + FORMAT_RAW cannot be autodetected. + memlimit + Limit the amount of memory used by the decompressor. This will cause + decompression to fail if the input cannot be decompressed within the + given limit. + filters + A custom filter chain. This argument is required for FORMAT_RAW, and + not accepted with any other format. When provided, this should be a + sequence of dicts, each indicating the ID and options for a single + filter. + + For one-shot decompression, use the decompress() function instead. + """ + if sys.version_info >= (3, 12): def __new__(cls, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> Self: ... else: @@ -64,65 +65,66 @@ For one-shot decompression, use the decompress() function instead. def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: """Decompress *data*, returning uncompressed data as bytes. -If *max_length* is nonnegative, returns at most *max_length* bytes of -decompressed data. If this limit is reached and further output can be -produced, *self.needs_input* will be set to ``False``. In this case, the next -call to *decompress()* may provide *data* as b'' to obtain more of the output. + If *max_length* is nonnegative, returns at most *max_length* bytes of + decompressed data. If this limit is reached and further output can be + produced, *self.needs_input* will be set to ``False``. In this case, the next + call to *decompress()* may provide *data* as b'' to obtain more of the output. + + If all of the input data was decompressed and returned (either because this + was less than *max_length* bytes, or because *max_length* was negative), + *self.needs_input* will be set to True. -If all of the input data was decompressed and returned (either because this -was less than *max_length* bytes, or because *max_length* was negative), -*self.needs_input* will be set to True. + Attempting to decompress data after the end of stream is reached raises an + EOFError. Any data found after the end of the stream is ignored and saved in + the unused_data attribute. + """ -Attempting to decompress data after the end of stream is reached raises an -EOFError. Any data found after the end of the stream is ignored and saved in -the unused_data attribute. -""" @property def check(self) -> int: - """ID of the integrity check used by the input stream. -""" + """ID of the integrity check used by the input stream.""" + @property def eof(self) -> bool: - """True if the end-of-stream marker has been reached. -""" + """True if the end-of-stream marker has been reached.""" + @property def unused_data(self) -> bytes: - """Data found after the end of the compressed stream. -""" + """Data found after the end of the compressed stream.""" + @property def needs_input(self) -> bool: - """True if more input is needed before more decompressed data can be produced. -""" + """True if more input is needed before more decompressed data can be produced.""" @final class LZMACompressor: """LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None) -Create a compressor object for compressing data incrementally. + Create a compressor object for compressing data incrementally. + + format specifies the container format to use for the output. This can + be FORMAT_XZ (default), FORMAT_ALONE, or FORMAT_RAW. -format specifies the container format to use for the output. This can -be FORMAT_XZ (default), FORMAT_ALONE, or FORMAT_RAW. + check specifies the integrity check to use. For FORMAT_XZ, the default + is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not support integrity + checks; for these formats, check must be omitted, or be CHECK_NONE. -check specifies the integrity check to use. For FORMAT_XZ, the default -is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not support integrity -checks; for these formats, check must be omitted, or be CHECK_NONE. + The settings used by the compressor can be specified either as a + preset compression level (with the 'preset' argument), or in detail + as a custom filter chain (with the 'filters' argument). For FORMAT_XZ + and FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset + level. For FORMAT_RAW, the caller must always specify a filter chain; + the raw compressor does not support preset compression levels. -The settings used by the compressor can be specified either as a -preset compression level (with the 'preset' argument), or in detail -as a custom filter chain (with the 'filters' argument). For FORMAT_XZ -and FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset -level. For FORMAT_RAW, the caller must always specify a filter chain; -the raw compressor does not support preset compression levels. + preset (if provided) should be an integer in the range 0-9, optionally + OR-ed with the constant PRESET_EXTREME. -preset (if provided) should be an integer in the range 0-9, optionally -OR-ed with the constant PRESET_EXTREME. + filters (if provided) should be a sequence of dicts. Each dict should + have an entry for "id" indicating the ID of the filter, plus + additional entries for options to the filter. -filters (if provided) should be a sequence of dicts. Each dict should -have an entry for "id" indicating the ID of the filter, plus -additional entries for options to the filter. + For one-shot compression, use the compress() function instead. + """ -For one-shot compression, use the compress() function instead. -""" if sys.version_info >= (3, 12): def __new__( cls, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None @@ -135,25 +137,25 @@ For one-shot compression, use the compress() function instead. def compress(self, data: ReadableBuffer, /) -> bytes: """Provide data to the compressor object. -Returns a chunk of compressed data if possible, or b'' otherwise. + Returns a chunk of compressed data if possible, or b'' otherwise. + + When you have finished providing data to the compressor, call the + flush() method to finish the compression process. + """ -When you have finished providing data to the compressor, call the -flush() method to finish the compression process. -""" def flush(self) -> bytes: """Finish the compression process. -Returns the compressed data left in internal buffers. + Returns the compressed data left in internal buffers. -The compressor object may not be used after this method is called. -""" + The compressor object may not be used after this method is called. + """ class LZMAError(Exception): - """Call to liblzma failed. -""" + """Call to liblzma failed.""" def is_check_supported(check_id: int, /) -> bool: """Test whether the given integrity check is supported. -Always returns True for CHECK_NONE and CHECK_CRC32. -""" + Always returns True for CHECK_NONE and CHECK_CRC32. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi index 6348d2d98d298..89ca2bfe82788 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_markupbase.pyi @@ -4,17 +4,19 @@ This module is used as a foundation for the html.parser module. It has no documented public API and should not be used directly. """ + import sys from typing import Any class ParserBase: """Parser base class which provides some common support methods used -by the SGML/HTML and XHTML parsers. -""" + by the SGML/HTML and XHTML parsers. + """ + def reset(self) -> None: ... def getpos(self) -> tuple[int, int]: - """Return current line number and offset. -""" + """Return current line number and offset.""" + def unknown_decl(self, data: str) -> None: ... def parse_comment(self, i: int, report: bool = True) -> int: ... # undocumented def parse_declaration(self, i: int) -> int: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi index 81f6282cc3d18..353942a5296cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_msi.pyi @@ -1,5 +1,5 @@ -"""Documentation -""" +"""Documentation""" + import sys from typing import Final, type_check_only @@ -54,32 +54,33 @@ if sys.platform == "win32": __init__: None # type: ignore[assignment] def UuidCreate() -> str: - """Return the string representation of a new unique identifier. -""" + """Return the string representation of a new unique identifier.""" + def FCICreate(cabname: str, files: list[str], /) -> None: """Create a new CAB file. - cabname - the name of the CAB file - files - a list of tuples, each containing the name of the file on disk, - and the name of the file inside the CAB file -""" + cabname + the name of the CAB file + files + a list of tuples, each containing the name of the file on disk, + and the name of the file inside the CAB file + """ + def OpenDatabase(path: str, persist: int, /) -> _Database: """Return a new database object. - path - the file name of the MSI file - persist - the persistence mode -""" + path + the file name of the MSI file + persist + the persistence mode + """ + def CreateRecord(count: int, /) -> _Record: """Return a new record object. - count - the number of fields of the record -""" - + count + the number of fields of the record + """ MSICOLINFO_NAMES: Final[int] MSICOLINFO_TYPES: Final[int] MSIDBOPEN_CREATE: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi index 893772a615317..7e893a6e4211d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_operator.pyi @@ -6,6 +6,7 @@ is equivalent to the expression x+y. The function names are those used for special methods; variants without leading and trailing '__' are also provided for convenience. """ + import sys from _typeshed import SupportsGetItem from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence @@ -56,122 +57,122 @@ class _SupportsPos(Protocol[_T_co]): # All four comparison functions must have the same signature, or we get false-positive errors def lt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a < b. -""" + """Same as a < b.""" + def le(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a <= b. -""" + """Same as a <= b.""" + def eq(a: object, b: object, /) -> Any: - """Same as a == b. -""" + """Same as a == b.""" + def ne(a: object, b: object, /) -> Any: - """Same as a != b. -""" + """Same as a != b.""" + def ge(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a >= b. -""" + """Same as a >= b.""" + def gt(a: _SupportsComparison, b: _SupportsComparison, /) -> Any: - """Same as a > b. -""" + """Same as a > b.""" + def not_(a: object, /) -> bool: - """Same as not a. -""" + """Same as not a.""" + def truth(a: object, /) -> bool: - """Return True if a is true, False otherwise. -""" + """Return True if a is true, False otherwise.""" + def is_(a: object, b: object, /) -> bool: - """Same as a is b. -""" + """Same as a is b.""" + def is_not(a: object, b: object, /) -> bool: - """Same as a is not b. -""" + """Same as a is not b.""" + def abs(a: SupportsAbs[_T], /) -> _T: - """Same as abs(a). -""" + """Same as abs(a).""" + def add(a: Any, b: Any, /) -> Any: - """Same as a + b. -""" + """Same as a + b.""" + def and_(a: Any, b: Any, /) -> Any: - """Same as a & b. -""" + """Same as a & b.""" + def floordiv(a: Any, b: Any, /) -> Any: - """Same as a // b. -""" + """Same as a // b.""" + def index(a: SupportsIndex, /) -> int: - """Same as a.__index__() -""" + """Same as a.__index__()""" + def inv(a: _SupportsInversion[_T_co], /) -> _T_co: - """Same as ~a. -""" + """Same as ~a.""" + def invert(a: _SupportsInversion[_T_co], /) -> _T_co: - """Same as ~a. -""" + """Same as ~a.""" + def lshift(a: Any, b: Any, /) -> Any: - """Same as a << b. -""" + """Same as a << b.""" + def mod(a: Any, b: Any, /) -> Any: - """Same as a % b. -""" + """Same as a % b.""" + def mul(a: Any, b: Any, /) -> Any: - """Same as a * b. -""" + """Same as a * b.""" + def matmul(a: Any, b: Any, /) -> Any: - """Same as a @ b. -""" + """Same as a @ b.""" + def neg(a: _SupportsNeg[_T_co], /) -> _T_co: - """Same as -a. -""" + """Same as -a.""" + def or_(a: Any, b: Any, /) -> Any: - """Same as a | b. -""" + """Same as a | b.""" + def pos(a: _SupportsPos[_T_co], /) -> _T_co: - """Same as +a. -""" + """Same as +a.""" + def pow(a: Any, b: Any, /) -> Any: - """Same as a ** b. -""" + """Same as a ** b.""" + def rshift(a: Any, b: Any, /) -> Any: - """Same as a >> b. -""" + """Same as a >> b.""" + def sub(a: Any, b: Any, /) -> Any: - """Same as a - b. -""" + """Same as a - b.""" + def truediv(a: Any, b: Any, /) -> Any: - """Same as a / b. -""" + """Same as a / b.""" + def xor(a: Any, b: Any, /) -> Any: - """Same as a ^ b. -""" + """Same as a ^ b.""" + def concat(a: Sequence[_T], b: Sequence[_T], /) -> Sequence[_T]: - """Same as a + b, for a and b sequences. -""" + """Same as a + b, for a and b sequences.""" + def contains(a: Container[object], b: object, /) -> bool: - """Same as b in a (note reversed operands). -""" + """Same as b in a (note reversed operands).""" + def countOf(a: Iterable[object], b: object, /) -> int: - """Return the number of items in a which are, or which equal, b. -""" + """Return the number of items in a which are, or which equal, b.""" + @overload def delitem(a: MutableSequence[Any], b: SupportsIndex, /) -> None: - """Same as del a[b]. -""" + """Same as del a[b].""" + @overload def delitem(a: MutableSequence[Any], b: slice, /) -> None: ... @overload def delitem(a: MutableMapping[_K, Any], b: _K, /) -> None: ... @overload def getitem(a: Sequence[_T], b: slice, /) -> Sequence[_T]: - """Same as a[b]. -""" + """Same as a[b].""" + @overload def getitem(a: SupportsGetItem[_K, _V], b: _K, /) -> _V: ... def indexOf(a: Iterable[_T], b: _T, /) -> int: - """Return the first index of b in a. -""" + """Return the first index of b in a.""" + @overload def setitem(a: MutableSequence[_T], b: SupportsIndex, c: _T, /) -> None: - """Same as a[b] = c. -""" + """Same as a[b] = c.""" + @overload def setitem(a: MutableSequence[_T], b: slice, c: Sequence[_T], /) -> None: ... @overload @@ -179,78 +180,76 @@ def setitem(a: MutableMapping[_K, _V], b: _K, c: _V, /) -> None: ... def length_hint(obj: object, default: int = 0, /) -> int: """Return an estimate of the number of items in obj. -This is useful for presizing containers when building from an iterable. + This is useful for presizing containers when building from an iterable. + + If the object supports len(), the result will be exact. + Otherwise, it may over- or under-estimate by an arbitrary amount. + The result will be an integer >= 0. + """ -If the object supports len(), the result will be exact. -Otherwise, it may over- or under-estimate by an arbitrary amount. -The result will be an integer >= 0. -""" def iadd(a: Any, b: Any, /) -> Any: - """Same as a += b. -""" + """Same as a += b.""" + def iand(a: Any, b: Any, /) -> Any: - """Same as a &= b. -""" + """Same as a &= b.""" + def iconcat(a: Any, b: Any, /) -> Any: - """Same as a += b, for a and b sequences. -""" + """Same as a += b, for a and b sequences.""" + def ifloordiv(a: Any, b: Any, /) -> Any: - """Same as a //= b. -""" + """Same as a //= b.""" + def ilshift(a: Any, b: Any, /) -> Any: - """Same as a <<= b. -""" + """Same as a <<= b.""" + def imod(a: Any, b: Any, /) -> Any: - """Same as a %= b. -""" + """Same as a %= b.""" + def imul(a: Any, b: Any, /) -> Any: - """Same as a *= b. -""" + """Same as a *= b.""" + def imatmul(a: Any, b: Any, /) -> Any: - """Same as a @= b. -""" + """Same as a @= b.""" + def ior(a: Any, b: Any, /) -> Any: - """Same as a |= b. -""" + """Same as a |= b.""" + def ipow(a: Any, b: Any, /) -> Any: - """Same as a **= b. -""" + """Same as a **= b.""" + def irshift(a: Any, b: Any, /) -> Any: - """Same as a >>= b. -""" + """Same as a >>= b.""" + def isub(a: Any, b: Any, /) -> Any: - """Same as a -= b. -""" + """Same as a -= b.""" + def itruediv(a: Any, b: Any, /) -> Any: - """Same as a /= b. -""" + """Same as a /= b.""" + def ixor(a: Any, b: Any, /) -> Any: - """Same as a ^= b. -""" + """Same as a ^= b.""" if sys.version_info >= (3, 11): def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: - """Same as obj(*args, **kwargs). -""" + """Same as obj(*args, **kwargs).""" def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: """Return 'a == b'. -This function uses an approach designed to prevent -timing analysis, making it appropriate for cryptography. + This function uses an approach designed to prevent + timing analysis, making it appropriate for cryptography. -a and b must both be of the same type: either str (ASCII only), -or any bytes-like object. + a and b must both be of the same type: either str (ASCII only), + or any bytes-like object. -Note: If a and b are of different lengths, or if an error occurs, -a timing attack could theoretically reveal information about the -types and lengths of a and b--but not their values. -""" + Note: If a and b are of different lengths, or if an error occurs, + a timing attack could theoretically reveal information about the + types and lengths of a and b--but not their values. + """ if sys.version_info >= (3, 14): def is_none(a: object, /) -> TypeIs[None]: - """Same as a is None. -""" + """Same as a is None.""" + def is_not_none(a: _T | None, /) -> TypeIs[_T]: - """Same as a is not None. -""" + """Same as a is not None.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi index caff65eead678..e026e5e01e5e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_osx_support.pyi @@ -1,5 +1,5 @@ -"""Shared OS X support functions. -""" +"""Shared OS X support functions.""" + from collections.abc import Iterable, Sequence from typing import Final, TypeVar @@ -16,87 +16,87 @@ _INITPRE: Final[str] # undocumented def _find_executable(executable: str, path: str | None = None) -> str | None: # undocumented """Tries to find 'executable' in the directories listed in 'path'. -A string listing directories separated by 'os.pathsep'; defaults to -os.environ['PATH']. Returns the complete filename or None if not found. -""" + A string listing directories separated by 'os.pathsep'; defaults to + os.environ['PATH']. Returns the complete filename or None if not found. + """ + def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: # undocumented - """Output from successful command execution or None -""" + """Output from successful command execution or None""" + def _find_build_tool(toolname: str) -> str: # undocumented - """Find a build tool on current path or using xcrun -""" + """Find a build tool on current path or using xcrun""" _SYSTEM_VERSION: Final[str | None] # undocumented def _get_system_version() -> str: # undocumented - """Return the OS X system version as a string -""" + """Return the OS X system version as a string""" + def _remove_original_values(_config_vars: dict[str, str]) -> None: # undocumented - """Remove original unmodified values for testing -""" + """Remove original unmodified values for testing""" + def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: # undocumented - """Save modified and original unmodified value of configuration var -""" + """Save modified and original unmodified value of configuration var""" + def _supports_universal_builds() -> bool: # undocumented - """Returns True if universal builds are supported on this system -""" + """Returns True if universal builds are supported on this system""" + def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Find appropriate C compiler for extension module builds -""" + """Find appropriate C compiler for extension module builds""" + def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove all universal build arguments from config vars -""" + """Remove all universal build arguments from config vars""" + def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove any unsupported archs from config vars -""" + """Remove any unsupported archs from config vars""" + def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Allow override of all archs with ARCHFLAGS env var -""" + """Allow override of all archs with ARCHFLAGS env var""" + def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: # undocumented - """Remove references to any SDKs not available -""" + """Remove references to any SDKs not available""" + def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: """ -This function will strip '-isysroot PATH' and '-arch ARCH' from the -compile flags if the user has specified one them in extra_compile_flags. + This function will strip '-isysroot PATH' and '-arch ARCH' from the + compile flags if the user has specified one them in extra_compile_flags. + + This is needed because '-arch ARCH' adds another architecture to the + build, without a way to remove an architecture. Furthermore GCC will + barf if multiple '-isysroot' arguments are present. + """ -This is needed because '-arch ARCH' adds another architecture to the -build, without a way to remove an architecture. Furthermore GCC will -barf if multiple '-isysroot' arguments are present. -""" def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: """Customize Python build configuration variables. -Called internally from sysconfig with a mutable mapping -containing name/value pairs parsed from the configured -makefile used to build this interpreter. Returns -the mapping updated as needed to reflect the environment -in which the interpreter is running; in the case of -a Python from a binary installer, the installed -environment may be very different from the build -environment, i.e. different OS levels, different -built tools, different available CPU architectures. - -This customization is performed whenever -distutils.sysconfig.get_config_vars() is first -called. It may be used in environments where no -compilers are present, i.e. when installing pure -Python dists. Customization of compiler paths -and detection of unavailable archs is deferred -until the first extension module build is -requested (in distutils.sysconfig.customize_compiler). - -Currently called from distutils.sysconfig -""" + Called internally from sysconfig with a mutable mapping + containing name/value pairs parsed from the configured + makefile used to build this interpreter. Returns + the mapping updated as needed to reflect the environment + in which the interpreter is running; in the case of + a Python from a binary installer, the installed + environment may be very different from the build + environment, i.e. different OS levels, different + built tools, different available CPU architectures. + + This customization is performed whenever + distutils.sysconfig.get_config_vars() is first + called. It may be used in environments where no + compilers are present, i.e. when installing pure + Python dists. Customization of compiler paths + and detection of unavailable archs is deferred + until the first extension module build is + requested (in distutils.sysconfig.customize_compiler). + + Currently called from distutils.sysconfig + """ + def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: """Customize compiler path and configuration variables. -This customization is performed when the first -extension module build is requested -in distutils.sysconfig.customize_compiler. -""" -def get_platform_osx( - _config_vars: dict[str, str], osname: _T, release: _K, machine: _V -) -> tuple[str | _T, str | _K, str | _V]: - """Filter values for get_platform() -""" + This customization is performed when the first + extension module build is requested + in distutils.sysconfig.customize_compiler. + """ + +def get_platform_osx(_config_vars: dict[str, str], osname: _T, release: _K, machine: _V) -> tuple[str | _T, str | _K, str | _V]: + """Filter values for get_platform()""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi index ef86bcf806fa4..9867a477a7f80 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pickle.pyi @@ -1,5 +1,5 @@ -"""Optimized C implementation for the Python pickle module. -""" +"""Optimized C implementation for the Python pickle module.""" + from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping from pickle import PickleBuffer as PickleBuffer @@ -31,53 +31,53 @@ def dump( ) -> None: """Write a pickled representation of obj to the open file object file. -This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may -be more efficient. - -The optional *protocol* argument tells the pickler to use the given -protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default -protocol is 5. It was introduced in Python 3.8, and is incompatible -with previous versions. - -Specifying a negative protocol version selects the highest protocol -version supported. The higher the protocol used, the more recent the -version of Python needed to read the pickle produced. - -The *file* argument must have a write() method that accepts a single -bytes argument. It can thus be a file object opened for binary -writing, an io.BytesIO instance, or any other custom object that meets -this interface. - -If *fix_imports* is True and protocol is less than 3, pickle will try -to map the new Python 3 names to the old module names used in Python -2, so that the pickle data stream is readable with Python 2. - -If *buffer_callback* is None (the default), buffer views are serialized -into *file* as part of the pickle stream. It is an error if -*buffer_callback* is not None and *protocol* is None or smaller than 5. -""" -def dumps( - obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None -) -> bytes: + This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may + be more efficient. + + The optional *protocol* argument tells the pickler to use the given + protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default + protocol is 5. It was introduced in Python 3.8, and is incompatible + with previous versions. + + Specifying a negative protocol version selects the highest protocol + version supported. The higher the protocol used, the more recent the + version of Python needed to read the pickle produced. + + The *file* argument must have a write() method that accepts a single + bytes argument. It can thus be a file object opened for binary + writing, an io.BytesIO instance, or any other custom object that meets + this interface. + + If *fix_imports* is True and protocol is less than 3, pickle will try + to map the new Python 3 names to the old module names used in Python + 2, so that the pickle data stream is readable with Python 2. + + If *buffer_callback* is None (the default), buffer views are serialized + into *file* as part of the pickle stream. It is an error if + *buffer_callback* is not None and *protocol* is None or smaller than 5. + """ + +def dumps(obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None) -> bytes: """Return the pickled representation of the object as a bytes object. -The optional *protocol* argument tells the pickler to use the given -protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default -protocol is 5. It was introduced in Python 3.8, and is incompatible -with previous versions. + The optional *protocol* argument tells the pickler to use the given + protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default + protocol is 5. It was introduced in Python 3.8, and is incompatible + with previous versions. + + Specifying a negative protocol version selects the highest protocol + version supported. The higher the protocol used, the more recent the + version of Python needed to read the pickle produced. -Specifying a negative protocol version selects the highest protocol -version supported. The higher the protocol used, the more recent the -version of Python needed to read the pickle produced. + If *fix_imports* is True and *protocol* is less than 3, pickle will + try to map the new Python 3 names to the old module names used in + Python 2, so that the pickle data stream is readable with Python 2. -If *fix_imports* is True and *protocol* is less than 3, pickle will -try to map the new Python 3 names to the old module names used in -Python 2, so that the pickle data stream is readable with Python 2. + If *buffer_callback* is None (the default), buffer views are serialized + into *file* as part of the pickle stream. It is an error if + *buffer_callback* is not None and *protocol* is None or smaller than 5. + """ -If *buffer_callback* is None (the default), buffer views are serialized -into *file* as part of the pickle stream. It is an error if -*buffer_callback* is not None and *protocol* is None or smaller than 5. -""" def load( file: _ReadableFileobj, *, @@ -88,28 +88,29 @@ def load( ) -> Any: """Read and return an object from the pickle data stored in a file. -This is equivalent to ``Unpickler(file).load()``, but may be more -efficient. - -The protocol version of the pickle is detected automatically, so no -protocol argument is needed. Bytes past the pickled object's -representation are ignored. - -The argument *file* must have two methods, a read() method that takes -an integer argument, and a readline() method that requires no -arguments. Both methods should return bytes. Thus *file* can be a -binary file object opened for reading, an io.BytesIO object, or any -other custom object that meets this interface. - -Optional keyword arguments are *fix_imports*, *encoding* and *errors*, -which are used to control compatibility support for pickle stream -generated by Python 2. If *fix_imports* is True, pickle will try to -map the old Python 2 names to the new names used in Python 3. The -*encoding* and *errors* tell pickle how to decode 8-bit string -instances pickled by Python 2; these default to 'ASCII' and 'strict', -respectively. The *encoding* can be 'bytes' to read these 8-bit -string instances as bytes objects. -""" + This is equivalent to ``Unpickler(file).load()``, but may be more + efficient. + + The protocol version of the pickle is detected automatically, so no + protocol argument is needed. Bytes past the pickled object's + representation are ignored. + + The argument *file* must have two methods, a read() method that takes + an integer argument, and a readline() method that requires no + arguments. Both methods should return bytes. Thus *file* can be a + binary file object opened for reading, an io.BytesIO object, or any + other custom object that meets this interface. + + Optional keyword arguments are *fix_imports*, *encoding* and *errors*, + which are used to control compatibility support for pickle stream + generated by Python 2. If *fix_imports* is True, pickle will try to + map the old Python 2 names to the new names used in Python 3. The + *encoding* and *errors* tell pickle how to decode 8-bit string + instances pickled by Python 2; these default to 'ASCII' and 'strict', + respectively. The *encoding* can be 'bytes' to read these 8-bit + string instances as bytes objects. + """ + def loads( data: ReadableBuffer, /, @@ -121,19 +122,19 @@ def loads( ) -> Any: """Read and return an object from the given pickle data. -The protocol version of the pickle is detected automatically, so no -protocol argument is needed. Bytes past the pickled object's -representation are ignored. + The protocol version of the pickle is detected automatically, so no + protocol argument is needed. Bytes past the pickled object's + representation are ignored. -Optional keyword arguments are *fix_imports*, *encoding* and *errors*, -which are used to control compatibility support for pickle stream -generated by Python 2. If *fix_imports* is True, pickle will try to -map the old Python 2 names to the new names used in Python 3. The -*encoding* and *errors* tell pickle how to decode 8-bit string -instances pickled by Python 2; these default to 'ASCII' and 'strict', -respectively. The *encoding* can be 'bytes' to read these 8-bit -string instances as bytes objects. -""" + Optional keyword arguments are *fix_imports*, *encoding* and *errors*, + which are used to control compatibility support for pickle stream + generated by Python 2. If *fix_imports* is True, pickle will try to + map the old Python 2 names to the new names used in Python 3. The + *encoding* and *errors* tell pickle how to decode 8-bit string + instances pickled by Python 2; these default to 'ASCII' and 'strict', + respectively. The *encoding* can be 'bytes' to read these 8-bit + string instances as bytes objects. + """ class PickleError(Exception): ... class PicklingError(PickleError): ... @@ -148,35 +149,36 @@ class PicklerMemoProxy: class Pickler: """This takes a binary file for writing a pickle data stream. -The optional *protocol* argument tells the pickler to use the given -protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default -protocol is 5. It was introduced in Python 3.8, and is incompatible -with previous versions. + The optional *protocol* argument tells the pickler to use the given + protocol; supported protocols are 0, 1, 2, 3, 4 and 5. The default + protocol is 5. It was introduced in Python 3.8, and is incompatible + with previous versions. + + Specifying a negative protocol version selects the highest protocol + version supported. The higher the protocol used, the more recent the + version of Python needed to read the pickle produced. -Specifying a negative protocol version selects the highest protocol -version supported. The higher the protocol used, the more recent the -version of Python needed to read the pickle produced. + The *file* argument must have a write() method that accepts a single + bytes argument. It can thus be a file object opened for binary + writing, an io.BytesIO instance, or any other custom object that meets + this interface. -The *file* argument must have a write() method that accepts a single -bytes argument. It can thus be a file object opened for binary -writing, an io.BytesIO instance, or any other custom object that meets -this interface. + If *fix_imports* is True and protocol is less than 3, pickle will try + to map the new Python 3 names to the old module names used in Python + 2, so that the pickle data stream is readable with Python 2. -If *fix_imports* is True and protocol is less than 3, pickle will try -to map the new Python 3 names to the old module names used in Python -2, so that the pickle data stream is readable with Python 2. + If *buffer_callback* is None (the default), buffer views are + serialized into *file* as part of the pickle stream. -If *buffer_callback* is None (the default), buffer views are -serialized into *file* as part of the pickle stream. + If *buffer_callback* is not None, then it can be called any number + of times with a buffer view. If the callback returns a false value + (such as None), the given buffer is out-of-band; otherwise the + buffer is serialized in-band, i.e. inside the pickle stream. -If *buffer_callback* is not None, then it can be called any number -of times with a buffer view. If the callback returns a false value -(such as None), the given buffer is out-of-band; otherwise the -buffer is serialized in-band, i.e. inside the pickle stream. + It is an error if *buffer_callback* is not None and *protocol* + is None or smaller than 5. + """ -It is an error if *buffer_callback* is not None and *protocol* -is None or smaller than 5. -""" fast: bool dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] reducer_override: Callable[[Any], Any] @@ -193,17 +195,16 @@ is None or smaller than 5. @memo.setter def memo(self, value: PicklerMemoProxy | dict[int, tuple[int, Any]]) -> None: ... def dump(self, obj: Any, /) -> None: - """Write a pickled representation of the given object to the open file. -""" + """Write a pickled representation of the given object to the open file.""" + def clear_memo(self) -> None: """Clears the pickler's "memo". -The memo is the data structure that remembers which objects the -pickler has already seen, so that shared or recursive objects are -pickled by reference and not by value. This method is useful when -re-using picklers. -""" - + The memo is the data structure that remembers which objects the + pickler has already seen, so that shared or recursive objects are + pickled by reference and not by value. This method is useful when + re-using picklers. + """ # this method has no default implementation for Python < 3.13 def persistent_id(self, obj: Any, /) -> Any: ... @@ -216,25 +217,26 @@ class UnpicklerMemoProxy: class Unpickler: """This takes a binary file for reading a pickle data stream. -The protocol version of the pickle is detected automatically, so no -protocol argument is needed. Bytes past the pickled object's -representation are ignored. - -The argument *file* must have two methods, a read() method that takes -an integer argument, and a readline() method that requires no -arguments. Both methods should return bytes. Thus *file* can be a -binary file object opened for reading, an io.BytesIO object, or any -other custom object that meets this interface. - -Optional keyword arguments are *fix_imports*, *encoding* and *errors*, -which are used to control compatibility support for pickle stream -generated by Python 2. If *fix_imports* is True, pickle will try to -map the old Python 2 names to the new names used in Python 3. The -*encoding* and *errors* tell pickle how to decode 8-bit string -instances pickled by Python 2; these default to 'ASCII' and 'strict', -respectively. The *encoding* can be 'bytes' to read these 8-bit -string instances as bytes objects. -""" + The protocol version of the pickle is detected automatically, so no + protocol argument is needed. Bytes past the pickled object's + representation are ignored. + + The argument *file* must have two methods, a read() method that takes + an integer argument, and a readline() method that requires no + arguments. Both methods should return bytes. Thus *file* can be a + binary file object opened for reading, an io.BytesIO object, or any + other custom object that meets this interface. + + Optional keyword arguments are *fix_imports*, *encoding* and *errors*, + which are used to control compatibility support for pickle stream + generated by Python 2. If *fix_imports* is True, pickle will try to + map the old Python 2 names to the new names used in Python 3. The + *encoding* and *errors* tell pickle how to decode 8-bit string + instances pickled by Python 2; these default to 'ASCII' and 'strict', + respectively. The *encoding* can be 'bytes' to read these 8-bit + string instances as bytes objects. + """ + def __init__( self, file: _ReadableFileobj, @@ -251,20 +253,20 @@ string instances as bytes objects. def load(self) -> Any: """Load a pickle. -Read a pickled object representation from the open file object given -in the constructor, and return the reconstituted object hierarchy -specified therein. -""" + Read a pickled object representation from the open file object given + in the constructor, and return the reconstituted object hierarchy + specified therein. + """ + def find_class(self, module_name: str, global_name: str, /) -> Any: """Return an object from a specified module. -If necessary, the module will be imported. Subclasses may override -this method (e.g. to restrict unpickling of arbitrary classes and -functions). - -This method is called whenever a class or a function object is -needed. Both arguments passed are str objects. -""" + If necessary, the module will be imported. Subclasses may override + this method (e.g. to restrict unpickling of arbitrary classes and + functions). + This method is called whenever a class or a function object is + needed. Both arguments passed are str objects. + """ # this method has no default implementation for Python < 3.13 def persistent_load(self, pid: Any, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi index ba2509ac9aad4..0a1d475b8409c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_posixsubprocess.pyi @@ -1,5 +1,5 @@ -"""A POSIX helper for the subprocess module. -""" +"""A POSIX helper for the subprocess module.""" + import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -34,26 +34,26 @@ if sys.platform != "win32": ) -> int: """Spawn a fresh new child process. -Fork a child process, close parent file descriptors as appropriate in the -child and duplicate the few that are needed before calling exec() in the -child process. + Fork a child process, close parent file descriptors as appropriate in the + child and duplicate the few that are needed before calling exec() in the + child process. -If close_fds is True, close file descriptors 3 and higher, except those listed -in the sorted tuple pass_fds. + If close_fds is True, close file descriptors 3 and higher, except those listed + in the sorted tuple pass_fds. -The preexec_fn, if supplied, will be called immediately before closing file -descriptors and exec. + The preexec_fn, if supplied, will be called immediately before closing file + descriptors and exec. -WARNING: preexec_fn is NOT SAFE if your application uses threads. - It may trigger infrequent, difficult to debug deadlocks. + WARNING: preexec_fn is NOT SAFE if your application uses threads. + It may trigger infrequent, difficult to debug deadlocks. -If an error occurs in the child process before the exec, it is -serialized and written to the errpipe_write fd per subprocess.py. + If an error occurs in the child process before the exec, it is + serialized and written to the errpipe_write fd per subprocess.py. -Returns: the child process's PID. + Returns: the child process's PID. -Raises: Only on an error in the parent process. -""" + Raises: Only on an error in the parent process. + """ else: def fork_exec( args: Sequence[StrOrBytesPath] | None, @@ -83,23 +83,23 @@ Raises: Only on an error in the parent process. ) -> int: """Spawn a fresh new child process. -Fork a child process, close parent file descriptors as appropriate in the -child and duplicate the few that are needed before calling exec() in the -child process. + Fork a child process, close parent file descriptors as appropriate in the + child and duplicate the few that are needed before calling exec() in the + child process. -If close_fds is True, close file descriptors 3 and higher, except those listed -in the sorted tuple pass_fds. + If close_fds is True, close file descriptors 3 and higher, except those listed + in the sorted tuple pass_fds. -The preexec_fn, if supplied, will be called immediately before closing file -descriptors and exec. + The preexec_fn, if supplied, will be called immediately before closing file + descriptors and exec. -WARNING: preexec_fn is NOT SAFE if your application uses threads. - It may trigger infrequent, difficult to debug deadlocks. + WARNING: preexec_fn is NOT SAFE if your application uses threads. + It may trigger infrequent, difficult to debug deadlocks. -If an error occurs in the child process before the exec, it is -serialized and written to the errpipe_write fd per subprocess.py. + If an error occurs in the child process before the exec, it is + serialized and written to the errpipe_write fd per subprocess.py. -Returns: the child process's PID. + Returns: the child process's PID. -Raises: Only on an error in the parent process. -""" + Raises: Only on an error in the parent process. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi index 471c434effb42..b273d362048a2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_py_abc.pyi @@ -8,29 +8,30 @@ _CacheToken = NewType("_CacheToken", int) def get_cache_token() -> _CacheToken: """Returns the current ABC cache token. -The token is an opaque object (supporting equality testing) identifying the -current version of the ABC cache for virtual subclasses. The token changes -with every call to ``register()`` on any ABC. -""" + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to ``register()`` on any ABC. + """ class ABCMeta(type): """Metaclass for defining Abstract Base Classes (ABCs). -Use this metaclass to create an ABC. An ABC can be subclassed -directly, and then acts as a mix-in class. You can also register -unrelated concrete classes (even built-in classes) and unrelated -ABCs as 'virtual subclasses' -- these and their descendants will -be considered subclasses of the registering ABC by the built-in -issubclass() function, but the registering ABC won't show up in -their MRO (Method Resolution Order) nor will method -implementations defined by the registering ABC be callable (not -even via super()). -""" + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + """ + def __new__( mcls: type[_typeshed.Self], name: str, bases: tuple[type[Any], ...], namespace: dict[str, Any], / ) -> _typeshed.Self: ... def register(cls, subclass: type[_T]) -> type[_T]: """Register a virtual subclass of an ABC. -Returns the subclass, to allow usage as a class decorator. -""" + Returns the subclass, to allow usage as a class decorator. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi index 77408ac26a399..95499412cc7d1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_pydecimal.pyi @@ -1,5 +1,5 @@ -"""Python decimal arithmetic module -""" +"""Python decimal arithmetic module""" + # This is a slight lie, the implementations aren't exactly identical # However, in all likelihood, the differences are inconsequential import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi index 1bf712af28979..2e78fd1d78391 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_queue.pyi @@ -1,6 +1,7 @@ """C implementation of the Python queue module. This module is an implementation detail, please do not use it directly. """ + from types import GenericAlias from typing import Any, Generic, TypeVar from typing_extensions import disjoint_base @@ -8,49 +9,51 @@ from typing_extensions import disjoint_base _T = TypeVar("_T") class Empty(Exception): - """Exception raised by Queue.get(block=0)/get_nowait(). -""" + """Exception raised by Queue.get(block=0)/get_nowait().""" @disjoint_base class SimpleQueue(Generic[_T]): - """Simple, unbounded, reentrant FIFO queue. -""" + """Simple, unbounded, reentrant FIFO queue.""" + def __init__(self) -> None: ... def empty(self) -> bool: - """Return True if the queue is empty, False otherwise (not reliable!). -""" + """Return True if the queue is empty, False otherwise (not reliable!).""" + def get(self, block: bool = True, timeout: float | None = None) -> _T: """Remove and return an item from the queue. -If optional args 'block' is true and 'timeout' is None (the default), -block if necessary until an item is available. If 'timeout' is -a non-negative number, it blocks at most 'timeout' seconds and raises -the Empty exception if no item was available within that time. -Otherwise ('block' is false), return an item if one is immediately -available, else raise the Empty exception ('timeout' is ignored -in that case). -""" + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until an item is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the Empty exception if no item was available within that time. + Otherwise ('block' is false), return an item if one is immediately + available, else raise the Empty exception ('timeout' is ignored + in that case). + """ + def get_nowait(self) -> _T: """Remove and return an item from the queue without blocking. -Only get an item if one is immediately available. Otherwise -raise the Empty exception. -""" + Only get an item if one is immediately available. Otherwise + raise the Empty exception. + """ + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: """Put the item on the queue. -The optional 'block' and 'timeout' arguments are ignored, as this method -never blocks. They are provided for compatibility with the Queue class. -""" + The optional 'block' and 'timeout' arguments are ignored, as this method + never blocks. They are provided for compatibility with the Queue class. + """ + def put_nowait(self, item: _T) -> None: """Put an item into the queue without blocking. -This is exactly equivalent to `put(item)` and is only provided -for compatibility with the Queue class. -""" + This is exactly equivalent to `put(item)` and is only provided + for compatibility with the Queue class. + """ + def qsize(self) -> int: - """Return the approximate size of the queue (not reliable!). -""" + """Return the approximate size of the queue (not reliable!).""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi index 41e15341277c0..243de87f5aa5f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_random.pyi @@ -1,5 +1,5 @@ -"""Module implements the Mersenne Twister random number generator. -""" +"""Module implements the Mersenne Twister random number generator.""" + import sys from typing_extensions import Self, TypeAlias, disjoint_base @@ -8,8 +8,8 @@ _State: TypeAlias = tuple[int, ...] @disjoint_base class Random: - """Random() -> create a random number generator with its own internal state. -""" + """Random() -> create a random number generator with its own internal state.""" + if sys.version_info >= (3, 10): def __init__(self, seed: object = ..., /) -> None: ... else: @@ -18,18 +18,18 @@ class Random: def seed(self, n: object = None, /) -> None: """seed([n]) -> None. -Defaults to use urandom and falls back to a combination -of the current time and the process identifier. -""" + Defaults to use urandom and falls back to a combination + of the current time and the process identifier. + """ + def getstate(self) -> _State: - """getstate() -> tuple containing the current state. -""" + """getstate() -> tuple containing the current state.""" + def setstate(self, state: _State, /) -> None: - """setstate(state) -> None. Restores generator state. -""" + """setstate(state) -> None. Restores generator state.""" + def random(self) -> float: - """random() -> x in the interval [0, 1). -""" + """random() -> x in the interval [0, 1).""" + def getrandbits(self, k: int, /) -> int: - """getrandbits(k) -> x. Generates an int with k random bits. -""" + """getrandbits(k) -> x. Generates an int with k random bits.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi index abd394f971041..98fa3d1ef8ffa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sitebuiltins.pyi @@ -1,6 +1,7 @@ """ The objects used by the site module to add custom builtins. """ + import sys from collections.abc import Iterable from typing import ClassVar, Literal, NoReturn @@ -13,8 +14,9 @@ class Quitter: class _Printer: """interactive prompt objects for printing the license text, a list of -contributors and the copyright notice. -""" + contributors and the copyright notice. + """ + MAXLINES: ClassVar[Literal[23]] def __init__(self, name: str, data: str, files: Iterable[str] = (), dirs: Iterable[str] = ()) -> None: ... def __call__(self) -> None: ... @@ -22,10 +24,11 @@ contributors and the copyright notice. class _Helper: """Define the builtin 'help'. -This is a wrapper around pydoc.help that provides a helpful message -when 'help' is typed at the Python interactive prompt. + This is a wrapper around pydoc.help that provides a helpful message + when 'help' is typed at the Python interactive prompt. + + Calling help() at the Python prompt starts an interactive help session. + Calling help(thing) prints help for the python object 'thing'. + """ -Calling help() at the Python prompt starts an interactive help session. -Calling help(thing) prints help for the python object 'thing'. -""" def __call__(self, request: object = ...) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi index 02c3cdab753f0..fd99c122d0100 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_socket.pyi @@ -2,6 +2,7 @@ See the socket module for documentation. """ + import sys from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterable @@ -738,67 +739,66 @@ if sys.platform != "win32" and sys.platform != "darwin": @disjoint_base class socket: """socket(family=AF_INET, type=SOCK_STREAM, proto=0) -> socket object -socket(family=-1, type=-1, proto=-1, fileno=None) -> socket object - -Open a socket of the given type. The family argument specifies the -address family; it defaults to AF_INET. The type argument specifies -whether this is a stream (SOCK_STREAM, this is the default) -or datagram (SOCK_DGRAM) socket. The protocol argument defaults to 0, -specifying the default protocol. Keyword arguments are accepted. -The socket is created as non-inheritable. - -When a fileno is passed in, family, type and proto are auto-detected, -unless they are explicitly set. - -A socket object represents one endpoint of a network connection. - -Methods of socket objects (keyword arguments not allowed): - -_accept() -- accept connection, returning new socket fd and client address -bind(addr) -- bind the socket to a local address -close() -- close the socket -connect(addr) -- connect the socket to a remote address -connect_ex(addr) -- connect, return an error code instead of an exception -dup() -- return a new socket fd duplicated from fileno() -fileno() -- return underlying file descriptor -getpeername() -- return remote address [*] -getsockname() -- return local address -getsockopt(level, optname[, buflen]) -- get socket options -gettimeout() -- return timeout or None -listen([n]) -- start listening for incoming connections -recv(buflen[, flags]) -- receive data -recv_into(buffer[, nbytes[, flags]]) -- receive data (into a buffer) -recvfrom(buflen[, flags]) -- receive data and sender's address -recvfrom_into(buffer[, nbytes, [, flags]) - -- receive data and sender's address (into a buffer) -sendall(data[, flags]) -- send all data -send(data[, flags]) -- send data, may not send all of it -sendto(data[, flags], addr) -- send data to a given address -setblocking(bool) -- set or clear the blocking I/O flag -getblocking() -- return True if socket is blocking, False if non-blocking -setsockopt(level, optname, value[, optlen]) -- set socket options -settimeout(None | float) -- set or clear the timeout -shutdown(how) -- shut down traffic in one or both directions - - [*] not available on all platforms! -""" + socket(family=-1, type=-1, proto=-1, fileno=None) -> socket object + + Open a socket of the given type. The family argument specifies the + address family; it defaults to AF_INET. The type argument specifies + whether this is a stream (SOCK_STREAM, this is the default) + or datagram (SOCK_DGRAM) socket. The protocol argument defaults to 0, + specifying the default protocol. Keyword arguments are accepted. + The socket is created as non-inheritable. + + When a fileno is passed in, family, type and proto are auto-detected, + unless they are explicitly set. + + A socket object represents one endpoint of a network connection. + + Methods of socket objects (keyword arguments not allowed): + + _accept() -- accept connection, returning new socket fd and client address + bind(addr) -- bind the socket to a local address + close() -- close the socket + connect(addr) -- connect the socket to a remote address + connect_ex(addr) -- connect, return an error code instead of an exception + dup() -- return a new socket fd duplicated from fileno() + fileno() -- return underlying file descriptor + getpeername() -- return remote address [*] + getsockname() -- return local address + getsockopt(level, optname[, buflen]) -- get socket options + gettimeout() -- return timeout or None + listen([n]) -- start listening for incoming connections + recv(buflen[, flags]) -- receive data + recv_into(buffer[, nbytes[, flags]]) -- receive data (into a buffer) + recvfrom(buflen[, flags]) -- receive data and sender's address + recvfrom_into(buffer[, nbytes, [, flags]) + -- receive data and sender's address (into a buffer) + sendall(data[, flags]) -- send all data + send(data[, flags]) -- send data, may not send all of it + sendto(data[, flags], addr) -- send data to a given address + setblocking(bool) -- set or clear the blocking I/O flag + getblocking() -- return True if socket is blocking, False if non-blocking + setsockopt(level, optname, value[, optlen]) -- set socket options + settimeout(None | float) -- set or clear the timeout + shutdown(how) -- shut down traffic in one or both directions + + [*] not available on all platforms! + """ + @property def family(self) -> int: - """the socket family -""" + """the socket family""" + @property def type(self) -> int: - """the socket type -""" + """the socket type""" + @property def proto(self) -> int: - """the socket protocol -""" + """the socket protocol""" # F811: "Redefinition of unused `timeout`" @property def timeout(self) -> float | None: # noqa: F811 - """the socket timeout -""" + """the socket timeout""" if sys.platform == "win32": def __init__( self, family: int = ..., type: int = ..., proto: int = ..., fileno: SupportsIndex | bytes | None = None @@ -809,209 +809,227 @@ shutdown(how) -- shut down traffic in one or both directions def bind(self, address: _Address, /) -> None: """bind(address) -Bind the socket to a local address. For IP sockets, the address is a -pair (host, port); the host must refer to the local host. For raw packet -sockets the address is a tuple (ifname, proto [,pkttype [,hatype [,addr]]]) -""" + Bind the socket to a local address. For IP sockets, the address is a + pair (host, port); the host must refer to the local host. For raw packet + sockets the address is a tuple (ifname, proto [,pkttype [,hatype [,addr]]]) + """ + def close(self) -> None: """close() -Close the socket. It cannot be used after this call. -""" + Close the socket. It cannot be used after this call. + """ + def connect(self, address: _Address, /) -> None: """connect(address) -Connect the socket to a remote address. For IP sockets, the address -is a pair (host, port). -""" + Connect the socket to a remote address. For IP sockets, the address + is a pair (host, port). + """ + def connect_ex(self, address: _Address, /) -> int: """connect_ex(address) -> errno -This is like connect(address), but returns an error code (the errno value) -instead of raising an exception when an error occurs. -""" + This is like connect(address), but returns an error code (the errno value) + instead of raising an exception when an error occurs. + """ + def detach(self) -> int: """detach() -Close the socket object without closing the underlying file descriptor. -The object cannot be used after this call, but the file descriptor -can be reused for other purposes. The file descriptor is returned. -""" + Close the socket object without closing the underlying file descriptor. + The object cannot be used after this call, but the file descriptor + can be reused for other purposes. The file descriptor is returned. + """ + def fileno(self) -> int: """fileno() -> integer -Return the integer file descriptor of the socket. -""" + Return the integer file descriptor of the socket. + """ + def getpeername(self) -> _RetAddress: """getpeername() -> address info -Return the address of the remote endpoint. For IP sockets, the address -info is a pair (hostaddr, port). -""" + Return the address of the remote endpoint. For IP sockets, the address + info is a pair (hostaddr, port). + """ + def getsockname(self) -> _RetAddress: """getsockname() -> address info -Return the address of the local endpoint. The format depends on the -address family. For IPv4 sockets, the address info is a pair -(hostaddr, port). For IPv6 sockets, the address info is a 4-tuple -(hostaddr, port, flowinfo, scope_id). -""" + Return the address of the local endpoint. The format depends on the + address family. For IPv4 sockets, the address info is a pair + (hostaddr, port). For IPv6 sockets, the address info is a 4-tuple + (hostaddr, port, flowinfo, scope_id). + """ + @overload def getsockopt(self, level: int, optname: int, /) -> int: """getsockopt(level, option[, buffersize]) -> value -Get a socket option. See the Unix manual for level and option. -If a nonzero buffersize argument is given, the return value is a -string of that length; otherwise it is an integer. -""" + Get a socket option. See the Unix manual for level and option. + If a nonzero buffersize argument is given, the return value is a + string of that length; otherwise it is an integer. + """ + @overload def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes: ... def getblocking(self) -> bool: """getblocking() -Returns True if socket is in blocking mode, or False if it -is in non-blocking mode. -""" + Returns True if socket is in blocking mode, or False if it + is in non-blocking mode. + """ + def gettimeout(self) -> float | None: """gettimeout() -> timeout -Returns the timeout in seconds (float) associated with socket -operations. A timeout of None indicates that timeouts on socket -operations are disabled. -""" + Returns the timeout in seconds (float) associated with socket + operations. A timeout of None indicates that timeouts on socket + operations are disabled. + """ if sys.platform == "win32": def ioctl(self, control: int, option: int | tuple[int, int, int] | bool, /) -> None: """ioctl(cmd, option) -> long -Control the socket with WSAIoctl syscall. Currently supported 'cmd' values are -SIO_RCVALL: 'option' must be one of the socket.RCVALL_* constants. -SIO_KEEPALIVE_VALS: 'option' is a tuple of (onoff, timeout, interval). -SIO_LOOPBACK_FAST_PATH: 'option' is a boolean value, and is disabled by default -""" + Control the socket with WSAIoctl syscall. Currently supported 'cmd' values are + SIO_RCVALL: 'option' must be one of the socket.RCVALL_* constants. + SIO_KEEPALIVE_VALS: 'option' is a tuple of (onoff, timeout, interval). + SIO_LOOPBACK_FAST_PATH: 'option' is a boolean value, and is disabled by default + """ def listen(self, backlog: int = ..., /) -> None: """listen([backlog]) -Enable a server to accept connections. If backlog is specified, it must be -at least 0 (if it is lower, it is set to 0); it specifies the number of -unaccepted connections that the system will allow before refusing new -connections. If not specified, a default reasonable value is chosen. -""" + Enable a server to accept connections. If backlog is specified, it must be + at least 0 (if it is lower, it is set to 0); it specifies the number of + unaccepted connections that the system will allow before refusing new + connections. If not specified, a default reasonable value is chosen. + """ + def recv(self, bufsize: int, flags: int = 0, /) -> bytes: """recv(buffersize[, flags]) -> data -Receive up to buffersize bytes from the socket. For the optional flags -argument, see the Unix manual. When no data is available, block until -at least one byte is available or until the remote end is closed. When -the remote end is closed and all data is read, return the empty string. -""" + Receive up to buffersize bytes from the socket. For the optional flags + argument, see the Unix manual. When no data is available, block until + at least one byte is available or until the remote end is closed. When + the remote end is closed and all data is read, return the empty string. + """ + def recvfrom(self, bufsize: int, flags: int = 0, /) -> tuple[bytes, _RetAddress]: """recvfrom(buffersize[, flags]) -> (data, address info) -Like recv(buffersize, flags) but also return the sender's address info. -""" + Like recv(buffersize, flags) but also return the sender's address info. + """ if sys.platform != "win32": def recvmsg(self, bufsize: int, ancbufsize: int = 0, flags: int = 0, /) -> tuple[bytes, list[_CMSG], int, Any]: """recvmsg(bufsize[, ancbufsize[, flags]]) -> (data, ancdata, msg_flags, address) -Receive normal data (up to bufsize bytes) and ancillary data from the -socket. The ancbufsize argument sets the size in bytes of the -internal buffer used to receive the ancillary data; it defaults to 0, -meaning that no ancillary data will be received. Appropriate buffer -sizes for ancillary data can be calculated using CMSG_SPACE() or -CMSG_LEN(), and items which do not fit into the buffer might be -truncated or discarded. The flags argument defaults to 0 and has the -same meaning as for recv(). - -The return value is a 4-tuple: (data, ancdata, msg_flags, address). -The data item is a bytes object holding the non-ancillary data -received. The ancdata item is a list of zero or more tuples -(cmsg_level, cmsg_type, cmsg_data) representing the ancillary data -(control messages) received: cmsg_level and cmsg_type are integers -specifying the protocol level and protocol-specific type respectively, -and cmsg_data is a bytes object holding the associated data. The -msg_flags item is the bitwise OR of various flags indicating -conditions on the received message; see your system documentation for -details. If the receiving socket is unconnected, address is the -address of the sending socket, if available; otherwise, its value is -unspecified. - -If recvmsg() raises an exception after the system call returns, it -will first attempt to close any file descriptors received via the -SCM_RIGHTS mechanism. -""" + Receive normal data (up to bufsize bytes) and ancillary data from the + socket. The ancbufsize argument sets the size in bytes of the + internal buffer used to receive the ancillary data; it defaults to 0, + meaning that no ancillary data will be received. Appropriate buffer + sizes for ancillary data can be calculated using CMSG_SPACE() or + CMSG_LEN(), and items which do not fit into the buffer might be + truncated or discarded. The flags argument defaults to 0 and has the + same meaning as for recv(). + + The return value is a 4-tuple: (data, ancdata, msg_flags, address). + The data item is a bytes object holding the non-ancillary data + received. The ancdata item is a list of zero or more tuples + (cmsg_level, cmsg_type, cmsg_data) representing the ancillary data + (control messages) received: cmsg_level and cmsg_type are integers + specifying the protocol level and protocol-specific type respectively, + and cmsg_data is a bytes object holding the associated data. The + msg_flags item is the bitwise OR of various flags indicating + conditions on the received message; see your system documentation for + details. If the receiving socket is unconnected, address is the + address of the sending socket, if available; otherwise, its value is + unspecified. + + If recvmsg() raises an exception after the system call returns, it + will first attempt to close any file descriptors received via the + SCM_RIGHTS mechanism. + """ + def recvmsg_into( self, buffers: Iterable[WriteableBuffer], ancbufsize: int = 0, flags: int = 0, / ) -> tuple[int, list[_CMSG], int, Any]: """recvmsg_into(buffers[, ancbufsize[, flags]]) -> (nbytes, ancdata, msg_flags, address) -Receive normal data and ancillary data from the socket, scattering the -non-ancillary data into a series of buffers. The buffers argument -must be an iterable of objects that export writable buffers -(e.g. bytearray objects); these will be filled with successive chunks -of the non-ancillary data until it has all been written or there are -no more buffers. The ancbufsize argument sets the size in bytes of -the internal buffer used to receive the ancillary data; it defaults to -0, meaning that no ancillary data will be received. Appropriate -buffer sizes for ancillary data can be calculated using CMSG_SPACE() -or CMSG_LEN(), and items which do not fit into the buffer might be -truncated or discarded. The flags argument defaults to 0 and has the -same meaning as for recv(). - -The return value is a 4-tuple: (nbytes, ancdata, msg_flags, address). -The nbytes item is the total number of bytes of non-ancillary data -written into the buffers. The ancdata item is a list of zero or more -tuples (cmsg_level, cmsg_type, cmsg_data) representing the ancillary -data (control messages) received: cmsg_level and cmsg_type are -integers specifying the protocol level and protocol-specific type -respectively, and cmsg_data is a bytes object holding the associated -data. The msg_flags item is the bitwise OR of various flags -indicating conditions on the received message; see your system -documentation for details. If the receiving socket is unconnected, -address is the address of the sending socket, if available; otherwise, -its value is unspecified. - -If recvmsg_into() raises an exception after the system call returns, -it will first attempt to close any file descriptors received via the -SCM_RIGHTS mechanism. -""" + Receive normal data and ancillary data from the socket, scattering the + non-ancillary data into a series of buffers. The buffers argument + must be an iterable of objects that export writable buffers + (e.g. bytearray objects); these will be filled with successive chunks + of the non-ancillary data until it has all been written or there are + no more buffers. The ancbufsize argument sets the size in bytes of + the internal buffer used to receive the ancillary data; it defaults to + 0, meaning that no ancillary data will be received. Appropriate + buffer sizes for ancillary data can be calculated using CMSG_SPACE() + or CMSG_LEN(), and items which do not fit into the buffer might be + truncated or discarded. The flags argument defaults to 0 and has the + same meaning as for recv(). + + The return value is a 4-tuple: (nbytes, ancdata, msg_flags, address). + The nbytes item is the total number of bytes of non-ancillary data + written into the buffers. The ancdata item is a list of zero or more + tuples (cmsg_level, cmsg_type, cmsg_data) representing the ancillary + data (control messages) received: cmsg_level and cmsg_type are + integers specifying the protocol level and protocol-specific type + respectively, and cmsg_data is a bytes object holding the associated + data. The msg_flags item is the bitwise OR of various flags + indicating conditions on the received message; see your system + documentation for details. If the receiving socket is unconnected, + address is the address of the sending socket, if available; otherwise, + its value is unspecified. + + If recvmsg_into() raises an exception after the system call returns, + it will first attempt to close any file descriptors received via the + SCM_RIGHTS mechanism. + """ def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> tuple[int, _RetAddress]: """recvfrom_into(buffer[, nbytes[, flags]]) -> (nbytes, address info) -Like recv_into(buffer[, nbytes[, flags]]) but also return the sender's address info. -""" + Like recv_into(buffer[, nbytes[, flags]]) but also return the sender's address info. + """ + def recv_into(self, buffer: WriteableBuffer, nbytes: int = 0, flags: int = 0) -> int: """recv_into(buffer, [nbytes[, flags]]) -> nbytes_read -A version of recv() that stores its data into a buffer rather than creating -a new string. Receive up to buffersize bytes from the socket. If buffersize -is not specified (or 0), receive up to the size available in the given buffer. + A version of recv() that stores its data into a buffer rather than creating + a new string. Receive up to buffersize bytes from the socket. If buffersize + is not specified (or 0), receive up to the size available in the given buffer. + + See recv() for documentation about the flags. + """ -See recv() for documentation about the flags. -""" def send(self, data: ReadableBuffer, flags: int = 0, /) -> int: """send(data[, flags]) -> count -Send a data string to the socket. For the optional flags -argument, see the Unix manual. Return the number of bytes -sent; this may be less than len(data) if the network is busy. -""" + Send a data string to the socket. For the optional flags + argument, see the Unix manual. Return the number of bytes + sent; this may be less than len(data) if the network is busy. + """ + def sendall(self, data: ReadableBuffer, flags: int = 0, /) -> None: """sendall(data[, flags]) -Send a data string to the socket. For the optional flags -argument, see the Unix manual. This calls send() repeatedly -until all data is sent. If an error occurs, it's impossible -to tell how much data has been sent. -""" + Send a data string to the socket. For the optional flags + argument, see the Unix manual. This calls send() repeatedly + until all data is sent. If an error occurs, it's impossible + to tell how much data has been sent. + """ + @overload def sendto(self, data: ReadableBuffer, address: _Address, /) -> int: """sendto(data[, flags], address) -> count -Like send(data, flags) but allows specifying the destination address. -For IP sockets, the address is a pair (hostaddr, port). -""" + Like send(data, flags) but allows specifying the destination address. + For IP sockets, the address is a pair (hostaddr, port). + """ + @overload def sendto(self, data: ReadableBuffer, flags: int, address: _Address, /) -> int: ... if sys.platform != "win32": @@ -1025,73 +1043,76 @@ For IP sockets, the address is a pair (hostaddr, port). ) -> int: """sendmsg(buffers[, ancdata[, flags[, address]]]) -> count -Send normal and ancillary data to the socket, gathering the -non-ancillary data from a series of buffers and concatenating it into -a single message. The buffers argument specifies the non-ancillary -data as an iterable of bytes-like objects (e.g. bytes objects). -The ancdata argument specifies the ancillary data (control messages) -as an iterable of zero or more tuples (cmsg_level, cmsg_type, -cmsg_data), where cmsg_level and cmsg_type are integers specifying the -protocol level and protocol-specific type respectively, and cmsg_data -is a bytes-like object holding the associated data. The flags -argument defaults to 0 and has the same meaning as for send(). If -address is supplied and not None, it sets a destination address for -the message. The return value is the number of bytes of non-ancillary -data sent. -""" + Send normal and ancillary data to the socket, gathering the + non-ancillary data from a series of buffers and concatenating it into + a single message. The buffers argument specifies the non-ancillary + data as an iterable of bytes-like objects (e.g. bytes objects). + The ancdata argument specifies the ancillary data (control messages) + as an iterable of zero or more tuples (cmsg_level, cmsg_type, + cmsg_data), where cmsg_level and cmsg_type are integers specifying the + protocol level and protocol-specific type respectively, and cmsg_data + is a bytes-like object holding the associated data. The flags + argument defaults to 0 and has the same meaning as for send(). If + address is supplied and not None, it sets a destination address for + the message. The return value is the number of bytes of non-ancillary + data sent. + """ if sys.platform == "linux": def sendmsg_afalg( self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = 0 ) -> int: """sendmsg_afalg([msg], *, op[, iv[, assoclen[, flags=MSG_MORE]]]) -Set operation mode, IV and length of associated data for an AF_ALG -operation socket. -""" + Set operation mode, IV and length of associated data for an AF_ALG + operation socket. + """ def setblocking(self, flag: bool, /) -> None: """setblocking(flag) -Set the socket to blocking (flag is true) or non-blocking (false). -setblocking(True) is equivalent to settimeout(None); -setblocking(False) is equivalent to settimeout(0.0). -""" + Set the socket to blocking (flag is true) or non-blocking (false). + setblocking(True) is equivalent to settimeout(None); + setblocking(False) is equivalent to settimeout(0.0). + """ + def settimeout(self, value: float | None, /) -> None: """settimeout(timeout) -Set a timeout on socket operations. 'timeout' can be a float, -giving in seconds, or None. Setting a timeout of None disables -the timeout feature and is equivalent to setblocking(1). -Setting a timeout of zero is the same as setblocking(0). -""" + Set a timeout on socket operations. 'timeout' can be a float, + giving in seconds, or None. Setting a timeout of None disables + the timeout feature and is equivalent to setblocking(1). + Setting a timeout of zero is the same as setblocking(0). + """ + @overload def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer, /) -> None: """setsockopt(level, option, value: int) -setsockopt(level, option, value: buffer) -setsockopt(level, option, None, optlen: int) + setsockopt(level, option, value: buffer) + setsockopt(level, option, None, optlen: int) + + Set a socket option. See the Unix manual for level and option. + The value argument can either be an integer, a string buffer, or + None, optlen. + """ -Set a socket option. See the Unix manual for level and option. -The value argument can either be an integer, a string buffer, or -None, optlen. -""" @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None: ... if sys.platform == "win32": def share(self, process_id: int, /) -> bytes: """share(process_id) -> bytes -Share the socket with another process. The target process id -must be provided and the resulting bytes object passed to the target -process. There the shared socket can be instantiated by calling -socket.fromshare(). -""" + Share the socket with another process. The target process id + must be provided and the resulting bytes object passed to the target + process. There the shared socket can be instantiated by calling + socket.fromshare(). + """ def shutdown(self, how: int, /) -> None: """shutdown(flag) -Shut down the reading side of the socket (flag == SHUT_RD), the writing side -of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR). -""" + Shut down the reading side of the socket (flag == SHUT_RD), the writing side + of the socket (flag == SHUT_WR), or both ends (flag == SHUT_RDWR). + """ SocketType = socket @@ -1100,172 +1121,186 @@ SocketType = socket def close(fd: SupportsIndex, /) -> None: """close(integer) -> None -Close an integer socket file descriptor. This is like os.close(), but for -sockets; on some platforms os.close() won't work for socket file descriptors. -""" + Close an integer socket file descriptor. This is like os.close(), but for + sockets; on some platforms os.close() won't work for socket file descriptors. + """ + def dup(fd: SupportsIndex, /) -> int: """dup(integer) -> integer -Duplicate an integer socket file descriptor. This is like os.dup(), but for -sockets; on some platforms os.dup() won't work for socket file descriptors. -""" + Duplicate an integer socket file descriptor. This is like os.dup(), but for + sockets; on some platforms os.dup() won't work for socket file descriptors. + """ # the 5th tuple item is an address def getaddrinfo( host: bytes | str | None, port: bytes | str | int | None, family: int = ..., type: int = 0, proto: int = 0, flags: int = 0 ) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: """getaddrinfo(host, port [, family, type, proto, flags]) - -> list of (family, type, proto, canonname, sockaddr) + -> list of (family, type, proto, canonname, sockaddr) + + Resolve host and port into addrinfo struct. + """ -Resolve host and port into addrinfo struct. -""" def gethostbyname(hostname: str, /) -> str: """gethostbyname(host) -> address -Return the IP address (a string of the form '255.255.255.255') for a host. -""" + Return the IP address (a string of the form '255.255.255.255') for a host. + """ + def gethostbyname_ex(hostname: str, /) -> tuple[str, list[str], list[str]]: """gethostbyname_ex(host) -> (name, aliaslist, addresslist) -Return the true host name, a list of aliases, and a list of IP addresses, -for a host. The host argument is a string giving a host name or IP number. -""" + Return the true host name, a list of aliases, and a list of IP addresses, + for a host. The host argument is a string giving a host name or IP number. + """ + def gethostname() -> str: """gethostname() -> string -Return the current host name. -""" + Return the current host name. + """ + def gethostbyaddr(ip_address: str, /) -> tuple[str, list[str], list[str]]: """gethostbyaddr(host) -> (name, aliaslist, addresslist) -Return the true host name, a list of aliases, and a list of IP addresses, -for a host. The host argument is a string giving a host name or IP number. -""" + Return the true host name, a list of aliases, and a list of IP addresses, + for a host. The host argument is a string giving a host name or IP number. + """ + def getnameinfo(sockaddr: tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], flags: int, /) -> tuple[str, str]: """getnameinfo(sockaddr, flags) --> (host, port) -Get host and port for a sockaddr. -""" + Get host and port for a sockaddr. + """ + def getprotobyname(protocolname: str, /) -> int: """getprotobyname(name) -> integer -Return the protocol number for the named protocol. (Rarely used.) -""" + Return the protocol number for the named protocol. (Rarely used.) + """ + def getservbyname(servicename: str, protocolname: str = ..., /) -> int: """getservbyname(servicename[, protocolname]) -> integer -Return a port number from a service name and protocol name. -The optional protocol name, if given, should be 'tcp' or 'udp', -otherwise any protocol will match. -""" + Return a port number from a service name and protocol name. + The optional protocol name, if given, should be 'tcp' or 'udp', + otherwise any protocol will match. + """ + def getservbyport(port: int, protocolname: str = ..., /) -> str: """getservbyport(port[, protocolname]) -> string -Return the service name from a port number and protocol name. -The optional protocol name, if given, should be 'tcp' or 'udp', -otherwise any protocol will match. -""" + Return the service name from a port number and protocol name. + The optional protocol name, if given, should be 'tcp' or 'udp', + otherwise any protocol will match. + """ + def ntohl(x: int, /) -> int: # param & ret val are 32-bit ints - """Convert a 32-bit unsigned integer from network to host byte order. -""" + """Convert a 32-bit unsigned integer from network to host byte order.""" + def ntohs(x: int, /) -> int: # param & ret val are 16-bit ints - """Convert a 16-bit unsigned integer from network to host byte order. -""" + """Convert a 16-bit unsigned integer from network to host byte order.""" + def htonl(x: int, /) -> int: # param & ret val are 32-bit ints - """Convert a 32-bit unsigned integer from host to network byte order. -""" + """Convert a 32-bit unsigned integer from host to network byte order.""" + def htons(x: int, /) -> int: # param & ret val are 16-bit ints - """Convert a 16-bit unsigned integer from host to network byte order. -""" + """Convert a 16-bit unsigned integer from host to network byte order.""" + def inet_aton(ip_addr: str, /) -> bytes: # ret val 4 bytes in length - """Convert an IP address in string format (123.45.67.89) to the 32-bit packed binary format used in low-level network functions. -""" + """Convert an IP address in string format (123.45.67.89) to the 32-bit packed binary format used in low-level network functions.""" + def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: - """Convert an IP address from 32-bit packed binary format to string format. -""" + """Convert an IP address from 32-bit packed binary format to string format.""" + def inet_pton(address_family: int, ip_string: str, /) -> bytes: """inet_pton(af, ip) -> packed IP address string -Convert an IP address from string format to a packed string suitable -for use with low-level network functions. -""" + Convert an IP address from string format to a packed string suitable + for use with low-level network functions. + """ + def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: """inet_ntop(af, packed_ip) -> string formatted IP address -Convert a packed IP address of the given family to string format. -""" + Convert a packed IP address of the given family to string format. + """ + def getdefaulttimeout() -> float | None: """getdefaulttimeout() -> timeout -Returns the default timeout in seconds (float) for new socket objects. -A value of None indicates that new socket objects have no timeout. -When the socket module is first imported, the default is None. -""" + Returns the default timeout in seconds (float) for new socket objects. + A value of None indicates that new socket objects have no timeout. + When the socket module is first imported, the default is None. + """ # F811: "Redefinition of unused `timeout`" def setdefaulttimeout(timeout: float | None, /) -> None: # noqa: F811 """setdefaulttimeout(timeout) -Set the default timeout in seconds (float) for new socket objects. -A value of None indicates that new socket objects have no timeout. -When the socket module is first imported, the default is None. -""" + Set the default timeout in seconds (float) for new socket objects. + A value of None indicates that new socket objects have no timeout. + When the socket module is first imported, the default is None. + """ if sys.platform != "win32": def sethostname(name: str, /) -> None: """sethostname(name) -Sets the hostname to name. -""" + Sets the hostname to name. + """ + def CMSG_LEN(length: int, /) -> int: """CMSG_LEN(length) -> control message length -Return the total length, without trailing padding, of an ancillary -data item with associated data of the given length. This value can -often be used as the buffer size for recvmsg() to receive a single -item of ancillary data, but RFC 3542 requires portable applications to -use CMSG_SPACE() and thus include space for padding, even when the -item will be the last in the buffer. Raises OverflowError if length -is outside the permissible range of values. -""" + Return the total length, without trailing padding, of an ancillary + data item with associated data of the given length. This value can + often be used as the buffer size for recvmsg() to receive a single + item of ancillary data, but RFC 3542 requires portable applications to + use CMSG_SPACE() and thus include space for padding, even when the + item will be the last in the buffer. Raises OverflowError if length + is outside the permissible range of values. + """ + def CMSG_SPACE(length: int, /) -> int: """CMSG_SPACE(length) -> buffer size -Return the buffer size needed for recvmsg() to receive an ancillary -data item with associated data of the given length, along with any -trailing padding. The buffer space needed to receive multiple items -is the sum of the CMSG_SPACE() values for their associated data -lengths. Raises OverflowError if length is outside the permissible -range of values. -""" + Return the buffer size needed for recvmsg() to receive an ancillary + data item with associated data of the given length, along with any + trailing padding. The buffer space needed to receive multiple items + is the sum of the CMSG_SPACE() values for their associated data + lengths. Raises OverflowError if length is outside the permissible + range of values. + """ + def socketpair(family: int = ..., type: int = ..., proto: int = 0, /) -> tuple[socket, socket]: """socketpair([family[, type [, proto]]]) -> (socket object, socket object) -Create a pair of socket objects from the sockets returned by the platform -socketpair() function. -The arguments are the same as for socket() except the default family is -AF_UNIX if defined on the platform; otherwise, the default is AF_INET. -""" + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is + AF_UNIX if defined on the platform; otherwise, the default is AF_INET. + """ def if_nameindex() -> list[tuple[int, str]]: """if_nameindex() -Returns a list of network interface information (index, name) tuples. -""" + Returns a list of network interface information (index, name) tuples. + """ + def if_nametoindex(oname: str, /) -> int: - """Returns the interface index corresponding to the interface name if_name. -""" + """Returns the interface index corresponding to the interface name if_name.""" if sys.version_info >= (3, 14): def if_indextoname(if_index: int, /) -> str: - """Returns the interface name corresponding to the interface index if_index. -""" + """Returns the interface name corresponding to the interface index if_index.""" else: def if_indextoname(index: int, /) -> str: """if_indextoname(if_index) -Returns the interface name corresponding to the interface index if_index. -""" + Returns the interface name corresponding to the interface index if_index. + """ CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi index 67b46423aa359..bae33a446d2a3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_sqlite3.pyi @@ -216,13 +216,12 @@ if sys.version_info >= (3, 11): # Can take or return anything depending on what's in the registry. @overload def adapt(obj: Any, proto: Any, /) -> Any: - """Adapt given object to given protocol. -""" + """Adapt given object to given protocol.""" + @overload def adapt(obj: Any, proto: Any, alt: _T, /) -> Any | _T: ... def complete_statement(statement: str) -> bool: - """Checks if a string contains a complete SQL statement. -""" + """Checks if a string contains a complete SQL statement.""" if sys.version_info >= (3, 12): @overload @@ -239,14 +238,15 @@ if sys.version_info >= (3, 12): ) -> Connection: """Open a connection to the SQLite database file 'database'. -You can use ":memory:" to open a database connection to a database that -resides in RAM instead of on disk. + You can use ":memory:" to open a database connection to a database that + resides in RAM instead of on disk. + + Note: Passing more than 1 positional argument to _sqlite3.connect() is + deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', + 'check_same_thread', 'factory', 'cached_statements' and 'uri' will + become keyword-only parameters in Python 3.15. + """ -Note: Passing more than 1 positional argument to _sqlite3.connect() is -deprecated. Parameters 'timeout', 'detect_types', 'isolation_level', -'check_same_thread', 'factory', 'cached_statements' and 'uri' will -become keyword-only parameters in Python 3.15. -""" @overload def connect( database: StrOrBytesPath, @@ -287,9 +287,10 @@ else: ) -> Connection: """Opens a connection to the SQLite database file database. -You can use ":memory:" to open a database connection to a database that resides -in RAM instead of on disk. -""" + You can use ":memory:" to open a database connection to a database that resides + in RAM instead of on disk. + """ + @overload def connect( database: StrOrBytesPath, @@ -315,39 +316,38 @@ in RAM instead of on disk. ) -> _ConnectionT: ... def enable_callback_tracebacks(enable: bool, /) -> None: - """Enable or disable callback functions throwing errors to stderr. -""" + """Enable or disable callback functions throwing errors to stderr.""" if sys.version_info < (3, 12): # takes a pos-or-keyword argument because there is a C wrapper def enable_shared_cache(do_enable: int) -> None: """Enable or disable shared cache mode for the calling thread. -This method is deprecated and will be removed in Python 3.12. -Shared cache is strongly discouraged by the SQLite 3 documentation. -If shared cache must be used, open the database in URI mode using -the cache=shared query parameter. -""" + This method is deprecated and will be removed in Python 3.12. + Shared cache is strongly discouraged by the SQLite 3 documentation. + If shared cache must be used, open the database in URI mode using + the cache=shared query parameter. + """ if sys.version_info >= (3, 10): def register_adapter(type: type[_T], adapter: _Adapter[_T], /) -> None: - """Register a function to adapt Python objects to SQLite values. -""" + """Register a function to adapt Python objects to SQLite values.""" + def register_converter(typename: str, converter: _Converter, /) -> None: - """Register a function to convert SQLite values to Python objects. -""" + """Register a function to convert SQLite values to Python objects.""" else: def register_adapter(type: type[_T], caster: _Adapter[_T], /) -> None: """register_adapter(type, callable) -Registers an adapter with sqlite3's adapter registry. -""" + Registers an adapter with sqlite3's adapter registry. + """ + def register_converter(name: str, converter: _Converter, /) -> None: """register_converter(typename, callable) -Registers a converter with sqlite3. -""" + Registers a converter with sqlite3. + """ if sys.version_info < (3, 10): OptimizedUnicode = str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi index f3058f390057c..ca8fe20333eae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_ssl.pyi @@ -1,6 +1,7 @@ """Implementation module for SSL socket operations. See the socket module for documentation. """ + import sys from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable @@ -53,21 +54,21 @@ class _CertInfo(TypedDict): def RAND_add(string: str | ReadableBuffer, entropy: float, /) -> None: """Mix string into the OpenSSL PRNG state. -entropy (a float) is a lower bound on the entropy contained in -string. See RFC 4086. -""" + entropy (a float) is a lower bound on the entropy contained in + string. See RFC 4086. + """ + def RAND_bytes(n: int, /) -> bytes: - """Generate n cryptographically strong pseudo-random bytes. -""" + """Generate n cryptographically strong pseudo-random bytes.""" if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.6; removed in Python 3.12. Use `ssl.RAND_bytes()` instead.") def RAND_pseudo_bytes(n: int, /) -> tuple[bytes, bool]: """Generate n pseudo-random bytes. -Return a pair (bytes, is_cryptographic). is_cryptographic is True -if the bytes generated are cryptographically strong. -""" + Return a pair (bytes, is_cryptographic). is_cryptographic is True + if the bytes generated are cryptographically strong. + """ if sys.version_info < (3, 10): def RAND_egd(path: str) -> None: ... @@ -75,44 +76,47 @@ if sys.version_info < (3, 10): def RAND_status() -> bool: """Returns True if the OpenSSL PRNG has been seeded with enough data and False if not. -It is necessary to seed the PRNG with RAND_add() on some platforms before -using the ssl() function. -""" + It is necessary to seed the PRNG with RAND_add() on some platforms before + using the ssl() function. + """ + def get_default_verify_paths() -> tuple[str, str, str, str]: """Return search paths and environment vars that are used by SSLContext's set_default_verify_paths() to load default CAs. -The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. -""" + The values are 'cert_file_env', 'cert_file', 'cert_dir_env', 'cert_dir'. + """ if sys.platform == "win32": _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] def enum_certificates(store_name: str) -> _EnumRetType: """Retrieve certificates from Windows' cert store. -store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide -more cert storages, too. The function returns a list of (bytes, -encoding_type, trust) tuples. The encoding_type flag can be interpreted -with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either -a set of OIDs or the boolean True. -""" + store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide + more cert storages, too. The function returns a list of (bytes, + encoding_type, trust) tuples. The encoding_type flag can be interpreted + with X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. The trust setting is either + a set of OIDs or the boolean True. + """ + def enum_crls(store_name: str) -> _EnumRetType: """Retrieve CRLs from Windows' cert store. -store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide -more cert storages, too. The function returns a list of (bytes, -encoding_type) tuples. The encoding_type flag can be interpreted with -X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. -""" + store_name may be one of 'CA', 'ROOT' or 'MY'. The system may provide + more cert storages, too. The function returns a list of (bytes, + encoding_type) tuples. The encoding_type flag can be interpreted with + X509_ASN_ENCODING or PKCS_7_ASN_ENCODING. + """ def txt2obj(txt: str, name: bool = False) -> tuple[int, str, str, str]: """Lookup NID, short name, long name and OID of an ASN1_OBJECT. -By default objects are looked up by OID. With name=True short and -long name are also matched. -""" + By default objects are looked up by OID. With name=True short and + long name are also matched. + """ + def nid2obj(nid: int, /) -> tuple[int, str, str, str]: - """Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID. -""" + """Lookup NID, short name, long name and OID of an ASN1_OBJECT by NID.""" + @disjoint_base class _SSLContext: check_hostname: bool @@ -132,22 +136,24 @@ class _SSLContext: def cert_store_stats(self) -> dict[str, int]: """Returns quantities of loaded X.509 certificates. -X.509 certificates with a CA extension and certificate revocation lists -inside the context's cert store. + X.509 certificates with a CA extension and certificate revocation lists + inside the context's cert store. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ -NOTE: Certificates in a capath directory aren't loaded unless they have -been used at least once. -""" @overload def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: """Returns a list of dicts with information of loaded CA certs. -If the optional argument is True, returns a DER-encoded copy of the CA -certificate. + If the optional argument is True, returns a DER-encoded copy of the CA + certificate. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ -NOTE: Certificates in a capath directory aren't loaded unless they have -been used at least once. -""" @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -181,45 +187,46 @@ class MemoryBIO: def read(self, size: int = -1, /) -> bytes: """Read up to size bytes from the memory BIO. -If size is not specified, read the entire buffer. -If the return value is an empty bytes instance, this means either -EOF or that no data is available. Use the "eof" property to -distinguish between the two. -""" + If size is not specified, read the entire buffer. + If the return value is an empty bytes instance, this means either + EOF or that no data is available. Use the "eof" property to + distinguish between the two. + """ + def write(self, b: ReadableBuffer, /) -> int: """Writes the bytes b into the memory BIO. -Returns the number of bytes written. -""" + Returns the number of bytes written. + """ + def write_eof(self) -> None: """Write an EOF marker to the memory BIO. -When all data has been read, the "eof" property will be True. -""" + When all data has been read, the "eof" property will be True. + """ @final class SSLSession: __hash__: ClassVar[None] # type: ignore[assignment] @property def has_ticket(self) -> bool: - """Does the session contain a ticket? -""" + """Does the session contain a ticket?""" + @property def id(self) -> bytes: - """Session ID. -""" + """Session ID.""" + @property def ticket_lifetime_hint(self) -> int: - """Ticket life time hint. -""" + """Ticket life time hint.""" + @property def time(self) -> int: - """Session creation time (seconds since epoch). -""" + """Session creation time (seconds since epoch).""" + @property def timeout(self) -> int: - """Session timeout (delta in seconds). -""" + """Session timeout (delta in seconds).""" # _ssl.Certificate is weird: it can't be instantiated or subclassed. # Instances can only be created via methods of the private _ssl._SSLSocket class, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi index 2688e78a9c8cc..44de15e745e92 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_stat.pyi @@ -68,6 +68,7 @@ ST_CTIME FILE_ATTRIBUTE_*: Windows file attribute constants (only present on Windows) """ + import sys from typing import Final @@ -135,64 +136,73 @@ UF_NOUNLINK: Final = 0x00000010 UF_OPAQUE: Final = 0x00000008 def S_IMODE(mode: int, /) -> int: - """Return the portion of the file's mode that can be set by os.chmod(). -""" + """Return the portion of the file's mode that can be set by os.chmod().""" + def S_IFMT(mode: int, /) -> int: - """Return the portion of the file's mode that describes the file type. -""" + """Return the portion of the file's mode that describes the file type.""" + def S_ISBLK(mode: int, /) -> bool: """S_ISBLK(mode) -> bool -Return True if mode is from a block special device file. -""" + Return True if mode is from a block special device file. + """ + def S_ISCHR(mode: int, /) -> bool: """S_ISCHR(mode) -> bool -Return True if mode is from a character special device file. -""" + Return True if mode is from a character special device file. + """ + def S_ISDIR(mode: int, /) -> bool: """S_ISDIR(mode) -> bool -Return True if mode is from a directory. -""" + Return True if mode is from a directory. + """ + def S_ISDOOR(mode: int, /) -> bool: """S_ISDOOR(mode) -> bool -Return True if mode is from a door. -""" + Return True if mode is from a door. + """ + def S_ISFIFO(mode: int, /) -> bool: """S_ISFIFO(mode) -> bool -Return True if mode is from a FIFO (named pipe). -""" + Return True if mode is from a FIFO (named pipe). + """ + def S_ISLNK(mode: int, /) -> bool: """S_ISLNK(mode) -> bool -Return True if mode is from a symbolic link. -""" + Return True if mode is from a symbolic link. + """ + def S_ISPORT(mode: int, /) -> bool: """S_ISPORT(mode) -> bool -Return True if mode is from an event port. -""" + Return True if mode is from an event port. + """ + def S_ISREG(mode: int, /) -> bool: """S_ISREG(mode) -> bool -Return True if mode is from a regular file. -""" + Return True if mode is from a regular file. + """ + def S_ISSOCK(mode: int, /) -> bool: """S_ISSOCK(mode) -> bool -Return True if mode is from a socket. -""" + Return True if mode is from a socket. + """ + def S_ISWHT(mode: int, /) -> bool: """S_ISWHT(mode) -> bool -Return True if mode is from a whiteout. -""" + Return True if mode is from a whiteout. + """ + def filemode(mode: int, /) -> str: - """Convert a file's mode to a string of the form '-rwxrwxrwx' -""" + """Convert a file's mode to a string of the form '-rwxrwxrwx'""" if sys.platform == "win32": IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi index 8b18d34a85c49..801e999a08089 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_struct.pyi @@ -27,6 +27,7 @@ Whitespace between formats is ignored. The variable struct.error is an exception raised on errors. """ + from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterator from typing import Any @@ -35,94 +36,102 @@ from typing_extensions import disjoint_base def pack(fmt: str | bytes, /, *v: Any) -> bytes: """pack(format, v1, v2, ...) -> bytes -Return a bytes object containing the values v1, v2, ... packed according -to the format string. See help(struct) for more on format strings. -""" + Return a bytes object containing the values v1, v2, ... packed according + to the format string. See help(struct) for more on format strings. + """ + def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: """pack_into(format, buffer, offset, v1, v2, ...) -Pack the values v1, v2, ... according to the format string and write -the packed bytes into the writable buffer buf starting at offset. Note -that the offset is a required argument. See help(struct) for more -on format strings. -""" + Pack the values v1, v2, ... according to the format string and write + the packed bytes into the writable buffer buf starting at offset. Note + that the offset is a required argument. See help(struct) for more + on format strings. + """ + def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: """Return a tuple containing values unpacked according to the format string. -The buffer's size in bytes must be calcsize(format). + The buffer's size in bytes must be calcsize(format). + + See help(struct) for more on format strings. + """ -See help(struct) for more on format strings. -""" def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: """Return a tuple containing values unpacked according to the format string. -The buffer's size, minus offset, must be at least calcsize(format). + The buffer's size, minus offset, must be at least calcsize(format). + + See help(struct) for more on format strings. + """ -See help(struct) for more on format strings. -""" def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: """Return an iterator yielding tuples unpacked from the given bytes. -The bytes are unpacked according to the format string, like -a repeated invocation of unpack_from(). + The bytes are unpacked according to the format string, like + a repeated invocation of unpack_from(). + + Requires that the bytes length be a multiple of the format struct size. + """ -Requires that the bytes length be a multiple of the format struct size. -""" def calcsize(format: str | bytes, /) -> int: - """Return size in bytes of the struct described by the format string. -""" + """Return size in bytes of the struct described by the format string.""" + @disjoint_base class Struct: - """Struct(fmt) --> compiled struct object + """Struct(fmt) --> compiled struct object""" -""" @property def format(self) -> str: - """struct format string -""" + """struct format string""" + @property def size(self) -> int: - """struct size in bytes -""" + """struct size in bytes""" + def __init__(self, format: str | bytes) -> None: ... def pack(self, *v: Any) -> bytes: """S.pack(v1, v2, ...) -> bytes -Return a bytes object containing values v1, v2, ... packed according -to the format string S.format. See help(struct) for more on format -strings. -""" + Return a bytes object containing values v1, v2, ... packed according + to the format string S.format. See help(struct) for more on format + strings. + """ + def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: """S.pack_into(buffer, offset, v1, v2, ...) -Pack the values v1, v2, ... according to the format string S.format -and write the packed bytes into the writable buffer buf starting at -offset. Note that the offset is a required argument. See -help(struct) for more on format strings. -""" + Pack the values v1, v2, ... according to the format string S.format + and write the packed bytes into the writable buffer buf starting at + offset. Note that the offset is a required argument. See + help(struct) for more on format strings. + """ + def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: """Return a tuple containing unpacked values. -Unpack according to the format string Struct.format. The buffer's size -in bytes must be Struct.size. + Unpack according to the format string Struct.format. The buffer's size + in bytes must be Struct.size. + + See help(struct) for more on format strings. + """ -See help(struct) for more on format strings. -""" def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: """Return a tuple containing unpacked values. -Values are unpacked according to the format string Struct.format. + Values are unpacked according to the format string Struct.format. -The buffer's size in bytes, starting at position offset, must be -at least Struct.size. + The buffer's size in bytes, starting at position offset, must be + at least Struct.size. + + See help(struct) for more on format strings. + """ -See help(struct) for more on format strings. -""" def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: """Return an iterator yielding tuples. -Tuples are unpacked from the given bytes source, like a repeated -invocation of unpack_from(). + Tuples are unpacked from the given bytes source, like a repeated + invocation of unpack_from(). -Requires that the bytes length be a multiple of the struct size. -""" + Requires that the bytes length be a multiple of the struct size. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi index 32b186fbf488b..8b63cc7ca8bc1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_thread.pyi @@ -1,6 +1,7 @@ """This module provides primitive operations to write multi-threaded programs. The 'threading' module provides a more convenient interface. """ + import signal import sys from _typeshed import structseq @@ -16,49 +17,50 @@ error = RuntimeError def _count() -> int: """Return the number of currently running Python threads, excluding -the main thread. The returned number comprises all threads created -through `start_new_thread()` as well as `threading.Thread`, and not -yet finished. + the main thread. The returned number comprises all threads created + through `start_new_thread()` as well as `threading.Thread`, and not + yet finished. + + This function is meant for internal and specialized purposes only. + In most applications `threading.enumerate()` should be used instead. + """ -This function is meant for internal and specialized purposes only. -In most applications `threading.enumerate()` should be used instead. -""" @final class RLock: def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: """Lock the lock. `blocking` indicates whether we should wait -for the lock to be available or not. If `blocking` is False -and another thread holds the lock, the method will return False -immediately. If `blocking` is True and another thread holds -the lock, the method will wait for the lock to be released, -take it and then return True. -(note: the blocking operation is interruptible.) - -In all other cases, the method will return True immediately. -Precisely, if the current thread already holds the lock, its -internal counter is simply incremented. If nobody holds the lock, -the lock is taken and its internal counter initialized to 1. -""" + for the lock to be available or not. If `blocking` is False + and another thread holds the lock, the method will return False + immediately. If `blocking` is True and another thread holds + the lock, the method will wait for the lock to be released, + take it and then return True. + (note: the blocking operation is interruptible.) + + In all other cases, the method will return True immediately. + Precisely, if the current thread already holds the lock, its + internal counter is simply incremented. If nobody holds the lock, + the lock is taken and its internal counter initialized to 1. + """ + def release(self) -> None: """Release the lock, allowing another thread that is blocked waiting for -the lock to acquire the lock. The lock must be in the locked state, -and must be locked by the same thread that unlocks it; otherwise a -`RuntimeError` is raised. - -Do note that if the lock was acquire()d several times in a row by the -current thread, release() needs to be called as many times for the lock -to be available for other threads. -""" + the lock to acquire the lock. The lock must be in the locked state, + and must be locked by the same thread that unlocks it; otherwise a + `RuntimeError` is raised. + + Do note that if the lock was acquire()d several times in a row by the + current thread, release() needs to be called as many times for the lock + to be available for other threads. + """ __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: - """Release the lock. -""" + """Release the lock.""" if sys.version_info >= (3, 14): def locked(self) -> bool: """locked() -Return a boolean indicating whether this object is locked right now. -""" + Return a boolean indicating whether this object is locked right now. + """ if sys.version_info >= (3, 13): @final @@ -74,280 +76,295 @@ if sys.version_info >= (3, 13): ) -> _ThreadHandle: """*For internal use only*: start a new thread. -Like start_new_thread(), this starts a new thread calling the given function. -Unlike start_new_thread(), this returns a handle object with methods to join -or detach the given thread. -This function is not for third-party code, please use the -`threading` module instead. During finalization the runtime will not wait for -the thread to exit if daemon is True. If handle is provided it must be a -newly created thread._ThreadHandle instance. -""" + Like start_new_thread(), this starts a new thread calling the given function. + Unlike start_new_thread(), this returns a handle object with methods to join + or detach the given thread. + This function is not for third-party code, please use the + `threading` module instead. During finalization the runtime will not wait for + the thread to exit if daemon is True. If handle is provided it must be a + newly created thread._ThreadHandle instance. + """ + @final class lock: """A lock object is a synchronization primitive. To create a lock, -call threading.Lock(). Methods are: + call threading.Lock(). Methods are: -acquire() -- lock the lock, possibly blocking until it can be obtained -release() -- unlock of the lock -locked() -- test whether the lock is currently locked + acquire() -- lock the lock, possibly blocking until it can be obtained + release() -- unlock of the lock + locked() -- test whether the lock is currently locked + + A lock is not owned by the thread that locked it; another thread may + unlock it. A thread attempting to lock a lock that it has already locked + will block until another thread unlocks it. Deadlocks may ensue. + """ -A lock is not owned by the thread that locked it; another thread may -unlock it. A thread attempting to lock a lock that it has already locked -will block until another thread unlocks it. Deadlocks may ensue. -""" def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: """Lock the lock. Without argument, this blocks if the lock is already -locked (even by the same thread), waiting for another thread to release -the lock, and return True once the lock is acquired. -With an argument, this will only block if the argument is true, -and the return value reflects whether the lock is acquired. -The blocking operation is interruptible. -""" + locked (even by the same thread), waiting for another thread to release + the lock, and return True once the lock is acquired. + With an argument, this will only block if the argument is true, + and the return value reflects whether the lock is acquired. + The blocking operation is interruptible. + """ + def release(self) -> None: """Release the lock, allowing another thread that is blocked waiting for -the lock to acquire the lock. The lock must be in the locked state, -but it needn't be locked by the same thread that unlocks it. -""" + the lock to acquire the lock. The lock must be in the locked state, + but it needn't be locked by the same thread that unlocks it. + """ + def locked(self) -> bool: - """Return whether the lock is in the locked state. -""" + """Return whether the lock is in the locked state.""" + def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: - """An obsolete synonym of acquire(). -""" + """An obsolete synonym of acquire().""" + def release_lock(self) -> None: - """An obsolete synonym of release(). -""" + """An obsolete synonym of release().""" + def locked_lock(self) -> bool: - """An obsolete synonym of locked(). -""" + """An obsolete synonym of locked().""" + def __enter__(self) -> bool: - """Lock the lock. -""" + """Lock the lock.""" + def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: - """Release the lock. -""" + """Release the lock.""" LockType = lock else: @final class LockType: """A lock object is a synchronization primitive. To create a lock, -call threading.Lock(). Methods are: + call threading.Lock(). Methods are: -acquire() -- lock the lock, possibly blocking until it can be obtained -release() -- unlock of the lock -locked() -- test whether the lock is currently locked + acquire() -- lock the lock, possibly blocking until it can be obtained + release() -- unlock of the lock + locked() -- test whether the lock is currently locked + + A lock is not owned by the thread that locked it; another thread may + unlock it. A thread attempting to lock a lock that it has already locked + will block until another thread unlocks it. Deadlocks may ensue. + """ -A lock is not owned by the thread that locked it; another thread may -unlock it. A thread attempting to lock a lock that it has already locked -will block until another thread unlocks it. Deadlocks may ensue. -""" def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: """acquire(blocking=True, timeout=-1) -> bool -(acquire_lock() is an obsolete synonym) - -Lock the lock. Without argument, this blocks if the lock is already -locked (even by the same thread), waiting for another thread to release -the lock, and return True once the lock is acquired. -With an argument, this will only block if the argument is true, -and the return value reflects whether the lock is acquired. -The blocking operation is interruptible. -""" + (acquire_lock() is an obsolete synonym) + + Lock the lock. Without argument, this blocks if the lock is already + locked (even by the same thread), waiting for another thread to release + the lock, and return True once the lock is acquired. + With an argument, this will only block if the argument is true, + and the return value reflects whether the lock is acquired. + The blocking operation is interruptible. + """ + def release(self) -> None: """release() -(release_lock() is an obsolete synonym) + (release_lock() is an obsolete synonym) + + Release the lock, allowing another thread that is blocked waiting for + the lock to acquire the lock. The lock must be in the locked state, + but it needn't be locked by the same thread that unlocks it. + """ -Release the lock, allowing another thread that is blocked waiting for -the lock to acquire the lock. The lock must be in the locked state, -but it needn't be locked by the same thread that unlocks it. -""" def locked(self) -> bool: """locked() -> bool -(locked_lock() is an obsolete synonym) + (locked_lock() is an obsolete synonym) + + Return whether the lock is in the locked state. + """ -Return whether the lock is in the locked state. -""" def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: """acquire(blocking=True, timeout=-1) -> bool -(acquire_lock() is an obsolete synonym) - -Lock the lock. Without argument, this blocks if the lock is already -locked (even by the same thread), waiting for another thread to release -the lock, and return True once the lock is acquired. -With an argument, this will only block if the argument is true, -and the return value reflects whether the lock is acquired. -The blocking operation is interruptible. -""" + (acquire_lock() is an obsolete synonym) + + Lock the lock. Without argument, this blocks if the lock is already + locked (even by the same thread), waiting for another thread to release + the lock, and return True once the lock is acquired. + With an argument, this will only block if the argument is true, + and the return value reflects whether the lock is acquired. + The blocking operation is interruptible. + """ + def release_lock(self) -> None: """release() -(release_lock() is an obsolete synonym) + (release_lock() is an obsolete synonym) + + Release the lock, allowing another thread that is blocked waiting for + the lock to acquire the lock. The lock must be in the locked state, + but it needn't be locked by the same thread that unlocks it. + """ -Release the lock, allowing another thread that is blocked waiting for -the lock to acquire the lock. The lock must be in the locked state, -but it needn't be locked by the same thread that unlocks it. -""" def locked_lock(self) -> bool: """locked() -> bool -(locked_lock() is an obsolete synonym) + (locked_lock() is an obsolete synonym) + + Return whether the lock is in the locked state. + """ -Return whether the lock is in the locked state. -""" def __enter__(self) -> bool: """acquire(blocking=True, timeout=-1) -> bool -(acquire_lock() is an obsolete synonym) - -Lock the lock. Without argument, this blocks if the lock is already -locked (even by the same thread), waiting for another thread to release -the lock, and return True once the lock is acquired. -With an argument, this will only block if the argument is true, -and the return value reflects whether the lock is acquired. -The blocking operation is interruptible. -""" + (acquire_lock() is an obsolete synonym) + + Lock the lock. Without argument, this blocks if the lock is already + locked (even by the same thread), waiting for another thread to release + the lock, and return True once the lock is acquired. + With an argument, this will only block if the argument is true, + and the return value reflects whether the lock is acquired. + The blocking operation is interruptible. + """ + def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: """release() -(release_lock() is an obsolete synonym) + (release_lock() is an obsolete synonym) -Release the lock, allowing another thread that is blocked waiting for -the lock to acquire the lock. The lock must be in the locked state, -but it needn't be locked by the same thread that unlocks it. -""" + Release the lock, allowing another thread that is blocked waiting for + the lock to acquire the lock. The lock must be in the locked state, + but it needn't be locked by the same thread that unlocks it. + """ @overload def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: """Start a new thread and return its identifier. -The thread will call the function with positional arguments from the -tuple args and keyword arguments taken from the optional dictionary -kwargs. The thread exits when the function returns; the return value -is ignored. The thread will also exit when the function raises an -unhandled exception; a stack trace will be printed unless the exception -is SystemExit. -""" + The thread will call the function with positional arguments from the + tuple args and keyword arguments taken from the optional dictionary + kwargs. The thread exits when the function returns; the return value + is ignored. The thread will also exit when the function raises an + unhandled exception; a stack trace will be printed unless the exception + is SystemExit. + """ + @overload def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... # Obsolete synonym for start_new_thread() @overload def start_new(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: - """An obsolete synonym of start_new_thread(). -""" + """An obsolete synonym of start_new_thread().""" + @overload def start_new(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ... if sys.version_info >= (3, 10): def interrupt_main(signum: signal.Signals = signal.SIGINT, /) -> None: """Simulate the arrival of the given signal in the main thread, -where the corresponding signal handler will be executed. -If *signum* is omitted, SIGINT is assumed. -A subthread can use this function to interrupt the main thread. + where the corresponding signal handler will be executed. + If *signum* is omitted, SIGINT is assumed. + A subthread can use this function to interrupt the main thread. -Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. -""" + Note: the default signal handler for SIGINT raises ``KeyboardInterrupt``. + """ else: def interrupt_main() -> None: """interrupt_main() -Raise a KeyboardInterrupt in the main thread. -A subthread can use this function to interrupt the main thread. -""" + Raise a KeyboardInterrupt in the main thread. + A subthread can use this function to interrupt the main thread. + """ def exit() -> NoReturn: """This is synonymous to ``raise SystemExit''. It will cause the current -thread to exit silently unless the exception is caught. -""" + thread to exit silently unless the exception is caught. + """ + def exit_thread() -> NoReturn: # Obsolete synonym for exit() - """An obsolete synonym of exit(). -""" + """An obsolete synonym of exit().""" + def allocate_lock() -> LockType: """Create a new lock object. See help(type(threading.Lock())) for -information about locks. -""" + information about locks. + """ + def allocate() -> LockType: # Obsolete synonym for allocate_lock() - """An obsolete synonym of allocate_lock(). -""" + """An obsolete synonym of allocate_lock().""" + def get_ident() -> int: """Return a non-zero integer that uniquely identifies the current thread -amongst other threads that exist simultaneously. -This may be used to identify per-thread resources. -Even though on some platforms threads identities may appear to be -allocated consecutive numbers starting at 1, this behavior should not -be relied upon, and the number should be seen purely as a magic cookie. -A thread's identity may be reused for another thread after it exits. -""" + amongst other threads that exist simultaneously. + This may be used to identify per-thread resources. + Even though on some platforms threads identities may appear to be + allocated consecutive numbers starting at 1, this behavior should not + be relied upon, and the number should be seen purely as a magic cookie. + A thread's identity may be reused for another thread after it exits. + """ + def stack_size(size: int = 0, /) -> int: """Return the thread stack size used when creating new threads. The -optional size argument specifies the stack size (in bytes) to be used -for subsequently created threads, and must be 0 (use platform or -configured default) or a positive integer value of at least 32,768 (32k). -If changing the thread stack size is unsupported, a ThreadError -exception is raised. If the specified size is invalid, a ValueError -exception is raised, and the stack size is unmodified. 32k bytes - currently the minimum supported stack size value to guarantee -sufficient stack space for the interpreter itself. - -Note that some platforms may have particular restrictions on values for -the stack size, such as requiring a minimum stack size larger than 32 KiB or -requiring allocation in multiples of the system memory page size -- platform documentation should be referred to for more information -(4 KiB pages are common; using multiples of 4096 for the stack size is -the suggested approach in the absence of more specific information). -""" + optional size argument specifies the stack size (in bytes) to be used + for subsequently created threads, and must be 0 (use platform or + configured default) or a positive integer value of at least 32,768 (32k). + If changing the thread stack size is unsupported, a ThreadError + exception is raised. If the specified size is invalid, a ValueError + exception is raised, and the stack size is unmodified. 32k bytes + currently the minimum supported stack size value to guarantee + sufficient stack space for the interpreter itself. + + Note that some platforms may have particular restrictions on values for + the stack size, such as requiring a minimum stack size larger than 32 KiB or + requiring allocation in multiples of the system memory page size + - platform documentation should be referred to for more information + (4 KiB pages are common; using multiples of 4096 for the stack size is + the suggested approach in the absence of more specific information). + """ TIMEOUT_MAX: Final[float] def get_native_id() -> int: # only available on some platforms """Return a non-negative integer identifying the thread as reported -by the OS (kernel). This may be used to uniquely identify a -particular thread within a system. -""" + by the OS (kernel). This may be used to uniquely identify a + particular thread within a system. + """ + @final class _ExceptHookArgs(structseq[Any], tuple[type[BaseException], BaseException | None, TracebackType | None, Thread | None]): """ExceptHookArgs -Type used to pass arguments to threading.excepthook. -""" + Type used to pass arguments to threading.excepthook. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("exc_type", "exc_value", "exc_traceback", "thread") @property def exc_type(self) -> type[BaseException]: - """Exception type -""" + """Exception type""" + @property def exc_value(self) -> BaseException | None: - """Exception value -""" + """Exception value""" + @property def exc_traceback(self) -> TracebackType | None: - """Exception traceback -""" + """Exception traceback""" + @property def thread(self) -> Thread | None: - """Thread -""" + """Thread""" _excepthook: Callable[[_ExceptHookArgs], Any] if sys.version_info >= (3, 12): def daemon_threads_allowed() -> bool: """Return True if daemon threads are allowed in the current interpreter, -and False otherwise. -""" + and False otherwise. + """ if sys.version_info >= (3, 14): def set_name(name: str) -> None: - """Set the name of the current thread. -""" + """Set the name of the current thread.""" @disjoint_base class _local: - """Thread-local data -""" + """Thread-local data""" + def __getattribute__(self, name: str, /) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... def __delattr__(self, name: str, /) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi index bae09fa7e458b..fcf88de1bbc57 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_threading_local.pyi @@ -5,6 +5,7 @@ faster one available. You should always import the `local` class from `threading`.) """ + from threading import RLock from typing import Any from typing_extensions import Self, TypeAlias @@ -14,8 +15,8 @@ __all__ = ["local"] _LocalDict: TypeAlias = dict[Any, Any] class _localimpl: - """A class managing thread-local dicts -""" + """A class managing thread-local dicts""" + __slots__ = ("key", "dicts", "localargs", "locallock", "__weakref__") key: str dicts: dict[int, tuple[ReferenceType[Any], _LocalDict]] @@ -24,11 +25,11 @@ class _localimpl: locallock: RLock def get_dict(self) -> _LocalDict: """Return the dict for the current thread. Raises KeyError if none -defined. -""" + defined. + """ + def create_dict(self) -> _LocalDict: - """Create a new dict for the current thread, and return it. -""" + """Create a new dict for the current thread, and return it.""" class local: __slots__ = ("_local__impl", "__dict__") diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi index 3e14cb7f3823b..37ccdc051eade 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tkinter.pyi @@ -20,12 +20,11 @@ from typing_extensions import TypeAlias, deprecated class Tcl_Obj: @property def string(self) -> str: - """the string representation of this object, either as str or bytes -""" + """the string representation of this object, either as str or bytes""" + @property def typename(self) -> str: - """name of the Tcl type -""" + """name of the Tcl type""" __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, value, /): ... def __ge__(self, value, /): ... @@ -97,11 +96,10 @@ class TkappType: def willdispatch(self) -> None: ... if sys.version_info >= (3, 12): def gettrace(self, /) -> _TkinterTraceFunc | None: - """Get the tracing function. -""" + """Get the tracing function.""" + def settrace(self, func: _TkinterTraceFunc | None, /) -> None: - """Set the tracing function. -""" + """Set the tracing function.""" # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS ALL_EVENTS: Final = -3 @@ -136,13 +134,13 @@ if sys.version_info >= (3, 13): ): """ - wantTk - if false, then Tk_Init() doesn't get called - sync - if true, then pass -sync to wish - use - if not None, then pass -use to wish -""" + wantTk + if false, then Tk_Init() doesn't get called + sync + if true, then pass -sync to wish + use + if not None, then pass -use to wish + """ else: def create( @@ -158,19 +156,19 @@ else: ): """ - wantTk - if false, then Tk_Init() doesn't get called - sync - if true, then pass -sync to wish - use - if not None, then pass -use to wish -""" + wantTk + if false, then Tk_Init() doesn't get called + sync + if true, then pass -sync to wish + use + if not None, then pass -use to wish + """ def getbusywaitinterval() -> int: - """Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. -""" + """Return the current busy-wait interval between successive calls to Tcl_DoOneEvent in a threaded Python interpreter.""" + def setbusywaitinterval(new_val: int, /) -> None: """Set the busy-wait interval in milliseconds between successive calls to Tcl_DoOneEvent in a threaded Python interpreter. -It should be set to a divisor of the maximum time between frames in an animation. -""" + It should be set to a divisor of the maximum time between frames in an animation. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi index fbbdacb49e8f3..0ecc728570ce2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_tracemalloc.pyi @@ -1,58 +1,65 @@ -"""Debug module to trace memory blocks allocated by Python. -""" +"""Debug module to trace memory blocks allocated by Python.""" + from collections.abc import Sequence from tracemalloc import _FrameTuple, _TraceTuple def _get_object_traceback(obj: object, /) -> Sequence[_FrameTuple] | None: """Get the traceback where the Python object obj was allocated. -Return a tuple of (filename: str, lineno: int) tuples. -Return None if the tracemalloc module is disabled or did not -trace the allocation of the object. -""" + Return a tuple of (filename: str, lineno: int) tuples. + Return None if the tracemalloc module is disabled or did not + trace the allocation of the object. + """ + def _get_traces() -> Sequence[_TraceTuple]: """Get traces of all memory blocks allocated by Python. -Return a list of (size: int, traceback: tuple) tuples. -traceback is a tuple of (filename: str, lineno: int) tuples. + Return a list of (size: int, traceback: tuple) tuples. + traceback is a tuple of (filename: str, lineno: int) tuples. + + Return an empty list if the tracemalloc module is disabled. + """ -Return an empty list if the tracemalloc module is disabled. -""" def clear_traces() -> None: - """Clear traces of memory blocks allocated by Python. -""" + """Clear traces of memory blocks allocated by Python.""" + def get_traceback_limit() -> int: """Get the maximum number of frames stored in the traceback of a trace. -By default, a trace of an allocated memory block only stores -the most recent frame: the limit is 1. -""" + By default, a trace of an allocated memory block only stores + the most recent frame: the limit is 1. + """ + def get_traced_memory() -> tuple[int, int]: """Get the current size and peak size of memory blocks traced by tracemalloc. -Returns a tuple: (current: int, peak: int). -""" + Returns a tuple: (current: int, peak: int). + """ + def get_tracemalloc_memory() -> int: """Get the memory usage in bytes of the tracemalloc module. -This memory is used internally to trace memory allocations. -""" + This memory is used internally to trace memory allocations. + """ + def is_tracing() -> bool: - """Return True if the tracemalloc module is tracing Python memory allocations. -""" + """Return True if the tracemalloc module is tracing Python memory allocations.""" + def reset_peak() -> None: """Set the peak size of memory blocks traced by tracemalloc to the current size. -Do nothing if the tracemalloc module is not tracing memory allocations. -""" + Do nothing if the tracemalloc module is not tracing memory allocations. + """ + def start(nframe: int = 1, /) -> None: """Start tracing Python memory allocations. -Also set the maximum number of frames stored in the traceback of a -trace to nframe. -""" + Also set the maximum number of frames stored in the traceback of a + trace to nframe. + """ + def stop() -> None: """Stop tracing Python memory allocations. -Also clear traces of memory blocks allocated by Python. -""" + Also clear traces of memory blocks allocated by Python. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi index 5418a5aa0928c..10ab8833673ba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_warnings.pyi @@ -1,6 +1,7 @@ """_warnings provides basic warning filtering support. It is a helper module to speed up interpreter start-up. """ + import sys from typing import Any, overload @@ -20,19 +21,20 @@ if sys.version_info >= (3, 12): ) -> None: """Issue a warning, or maybe ignore it or raise an exception. - message - Text of the warning message. - category - The Warning category subclass. Defaults to UserWarning. - stacklevel - How far up the call stack to make this warning appear. A value of 2 for - example attributes the warning to the caller of the code calling warn(). - source - If supplied, the destroyed object which emitted a ResourceWarning - skip_file_prefixes - An optional tuple of module filename prefixes indicating frames to skip - during stacklevel computations for stack frame attribution. -""" + message + Text of the warning message. + category + The Warning category subclass. Defaults to UserWarning. + stacklevel + How far up the call stack to make this warning appear. A value of 2 for + example attributes the warning to the caller of the code calling warn(). + source + If supplied, the destroyed object which emitted a ResourceWarning + skip_file_prefixes + An optional tuple of module filename prefixes indicating frames to skip + during stacklevel computations for stack frame attribution. + """ + @overload def warn( message: Warning, @@ -46,8 +48,8 @@ if sys.version_info >= (3, 12): else: @overload def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: - """Issue a warning, or maybe ignore it or raise an exception. -""" + """Issue a warning, or maybe ignore it or raise an exception.""" + @overload def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @@ -62,8 +64,8 @@ def warn_explicit( module_globals: dict[str, Any] | None = None, source: Any | None = None, ) -> None: - """Issue a warning, or maybe ignore it or raise an exception. -""" + """Issue a warning, or maybe ignore it or raise an exception.""" + @overload def warn_explicit( message: Warning, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi index a2e8785813fc7..3d59e1c66c44b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakref.pyi @@ -1,5 +1,5 @@ -"""Weak-reference support module. -""" +"""Weak-reference support module.""" + from collections.abc import Callable from typing import Any, TypeVar, overload from weakref import CallableProxyType as CallableProxyType, ProxyType as ProxyType, ReferenceType as ReferenceType, ref as ref @@ -8,19 +8,19 @@ _C = TypeVar("_C", bound=Callable[..., Any]) _T = TypeVar("_T") def getweakrefcount(object: Any, /) -> int: - """Return the number of weak references to 'object'. -""" + """Return the number of weak references to 'object'.""" + def getweakrefs(object: Any, /) -> list[Any]: - """Return a list of all weak reference objects pointing to 'object'. -""" + """Return a list of all weak reference objects pointing to 'object'.""" # Return CallableProxyType if object is callable, ProxyType otherwise @overload def proxy(object: _C, callback: Callable[[_C], Any] | None = None, /) -> CallableProxyType[_C]: """Create a proxy object that weakly references 'object'. -'callback', if given, is called with a reference to the -proxy when 'object' is about to be finalized. -""" + 'callback', if given, is called with a reference to the + proxy when 'object' is about to be finalized. + """ + @overload def proxy(object: _T, callback: Callable[[_T], Any] | None = None, /) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi index 3341561681ffb..7b5f0af1f73e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_weakrefset.pyi @@ -48,5 +48,5 @@ class WeakSet(MutableSet[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi index 0d8f80a35972d..0880f7404b635 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_winapi.pyi @@ -176,8 +176,8 @@ if sys.platform == "win32": COPY_FILE_DIRECTORY: Final = 0x00000080 def CloseHandle(handle: int, /) -> None: - """Close handle. -""" + """Close handle.""" + @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload @@ -209,11 +209,12 @@ if sys.platform == "win32": def CreatePipe(pipe_attrs: Any, size: int, /) -> tuple[int, int]: """Create an anonymous pipe. - pipe_attrs - Ignored internally, can be None. + pipe_attrs + Ignored internally, can be None. + + Returns a 2-tuple of handles, to the read and write ends of the pipe. + """ -Returns a 2-tuple of handles, to the read and write ends of the pipe. -""" def CreateProcess( application_name: str | None, command_line: str | None, @@ -228,16 +229,17 @@ Returns a 2-tuple of handles, to the read and write ends of the pipe. ) -> tuple[int, int, int, int]: """Create a new process and its primary thread. - command_line - Can be str or None - proc_attrs - Ignored internally, can be None. - thread_attrs - Ignored internally, can be None. + command_line + Can be str or None + proc_attrs + Ignored internally, can be None. + thread_attrs + Ignored internally, can be None. + + The return value is a tuple of the process handle, thread handle, + process ID, and thread ID. + """ -The return value is a tuple of the process handle, thread handle, -process ID, and thread ID. -""" def DuplicateHandle( source_process_handle: int, source_handle: int, @@ -249,43 +251,46 @@ process ID, and thread ID. ) -> int: """Return a duplicate handle object. -The duplicate handle refers to the same object as the original -handle. Therefore, any changes to the object are reflected -through both handles. -""" + The duplicate handle refers to the same object as the original + handle. Therefore, any changes to the object are reflected + through both handles. + """ + def ExitProcess(ExitCode: int, /) -> NoReturn: ... def GetACP() -> int: - """Get the current Windows ANSI code page identifier. -""" + """Get the current Windows ANSI code page identifier.""" + def GetFileType(handle: int) -> int: ... def GetCurrentProcess() -> int: - """Return a handle object for the current process. -""" + """Return a handle object for the current process.""" + def GetExitCodeProcess(process: int, /) -> int: - """Return the termination status of the specified process. -""" + """Return the termination status of the specified process.""" + def GetLastError() -> int: ... def GetModuleFileName(module_handle: int, /) -> str: """Return the fully-qualified path for the file that contains module. -The module must have been loaded by the current process. + The module must have been loaded by the current process. + + The module parameter should be a handle to the loaded module + whose path is being requested. If this parameter is 0, + GetModuleFileName retrieves the path of the executable file + of the current process. + """ -The module parameter should be a handle to the loaded module -whose path is being requested. If this parameter is 0, -GetModuleFileName retrieves the path of the executable file -of the current process. -""" def GetStdHandle(std_handle: int, /) -> int: """Return a handle to the specified standard device. - std_handle - One of STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, or STD_ERROR_HANDLE. + std_handle + One of STD_INPUT_HANDLE, STD_OUTPUT_HANDLE, or STD_ERROR_HANDLE. + + The integer associated with the handle object is returned. + """ -The integer associated with the handle object is returned. -""" def GetVersion() -> int: - """Return the version number of the current operating system. -""" + """Return the version number of the current operating system.""" + def OpenProcess(desired_access: int, inherit_handle: bool, process_id: int, /) -> int: ... def PeekNamedPipe(handle: int, size: int = 0, /) -> tuple[int, int] | tuple[bytes, int, int]: ... if sys.version_info >= (3, 10): @@ -302,16 +307,17 @@ The integer associated with the handle object is returned. named_pipe: int, mode: int | None, max_collection_count: int | None, collect_data_timeout: int | None, / ) -> None: ... def TerminateProcess(handle: int, exit_code: int, /) -> None: - """Terminate the specified process and all of its threads. -""" + """Terminate the specified process and all of its threads.""" + def WaitForMultipleObjects(handle_seq: Sequence[int], wait_flag: bool, milliseconds: int = 0xFFFFFFFF, /) -> int: ... def WaitForSingleObject(handle: int, milliseconds: int, /) -> int: """Wait for a single object. -Wait until the specified object is in the signaled state or -the time-out interval elapses. The timeout value is specified -in milliseconds. -""" + Wait until the specified object is in the signaled state or + the time-out interval elapses. The timeout value is specified + in milliseconds. + """ + def WaitNamedPipe(name: str, timeout: int, /) -> None: ... @overload def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @@ -321,50 +327,51 @@ in milliseconds. def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: - """OVERLAPPED structure wrapper -""" + """OVERLAPPED structure wrapper""" + event: int def GetOverlappedResult(self, wait: bool, /) -> tuple[int, int]: ... def cancel(self) -> None: ... def getbuffer(self) -> bytes | None: ... if sys.version_info >= (3, 13): - def BatchedWaitForMultipleObjects( - handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF - ) -> list[int]: + def BatchedWaitForMultipleObjects(handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF) -> list[int]: """Supports a larger number of handles than WaitForMultipleObjects -Note that the handles may be waited on other threads, which could cause -issues for objects like mutexes that become associated with the thread -that was waiting for them. Objects may also be left signalled, even if -the wait fails. + Note that the handles may be waited on other threads, which could cause + issues for objects like mutexes that become associated with the thread + that was waiting for them. Objects may also be left signalled, even if + the wait fails. -It is recommended to use WaitForMultipleObjects whenever possible, and -only switch to BatchedWaitForMultipleObjects for scenarios where you -control all the handles involved, such as your own thread pool or -files, and all wait objects are left unmodified by a wait (for example, -manual reset events, threads, and files/pipes). + It is recommended to use WaitForMultipleObjects whenever possible, and + only switch to BatchedWaitForMultipleObjects for scenarios where you + control all the handles involved, such as your own thread pool or + files, and all wait objects are left unmodified by a wait (for example, + manual reset events, threads, and files/pipes). + + Overlapped handles returned from this module use manual reset events. + """ -Overlapped handles returned from this module use manual reset events. -""" def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ... def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ... def GetLongPathName(path: str) -> str: """Return the long version of the provided path. -If the path is already in its long form, returns the same value. + If the path is already in its long form, returns the same value. + + The path must already be a 'str'. If the type is not known, use + os.fsdecode before calling this function. + """ -The path must already be a 'str'. If the type is not known, use -os.fsdecode before calling this function. -""" def GetShortPathName(path: str) -> str: """Return the short version of the provided path. -If the path is already in its short form, returns the same value. + If the path is already in its short form, returns the same value. + + The path must already be a 'str'. If the type is not known, use + os.fsdecode before calling this function. + """ -The path must already be a 'str'. If the type is not known, use -os.fsdecode before calling this function. -""" def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ... def ReleaseMutex(mutex: int) -> None: ... @@ -375,10 +382,11 @@ os.fsdecode before calling this function. def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: """Copies a file from one name to a new name. -This is implemented using the CopyFile2 API, which preserves all stat -and metadata information apart from security attributes. + This is implemented using the CopyFile2 API, which preserves all stat + and metadata information apart from security attributes. + + progress_routine is reserved for future use, but is currently not + implemented. Its value is ignored. + """ -progress_routine is reserved for future use, but is currently not -implemented. Its value is ignored. -""" def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi index 2f544bab641f4..3d4413ed11377 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/_zstd.pyi @@ -1,5 +1,5 @@ -"""Implementation module for Zstandard compression. -""" +"""Implementation module for Zstandard compression.""" + from _typeshed import ReadableBuffer from collections.abc import Mapping from compression.zstd import CompressionParameter, DecompressionParameter @@ -46,16 +46,17 @@ _ZstdCompressorFlushFrame: TypeAlias = Literal[2] class ZstdCompressor: """Create a compressor object for compressing data incrementally. - level - The compression level to use. Defaults to COMPRESSION_LEVEL_DEFAULT. - options - A dict object that contains advanced compression parameters. - zstd_dict - A ZstdDict object, a pre-trained Zstandard dictionary. + level + The compression level to use. Defaults to COMPRESSION_LEVEL_DEFAULT. + options + A dict object that contains advanced compression parameters. + zstd_dict + A ZstdDict object, a pre-trained Zstandard dictionary. + + Thread-safe at method level. For one-shot compression, use the compress() + function instead. + """ -Thread-safe at method level. For one-shot compression, use the compress() -function instead. -""" CONTINUE: Final = 0 FLUSH_BLOCK: Final = 1 FLUSH_FRAME: Final = 2 @@ -67,234 +68,249 @@ function instead. ) -> bytes: """Provide data to the compressor object. - mode - Can be these 3 values ZstdCompressor.CONTINUE, - ZstdCompressor.FLUSH_BLOCK, ZstdCompressor.FLUSH_FRAME + mode + Can be these 3 values ZstdCompressor.CONTINUE, + ZstdCompressor.FLUSH_BLOCK, ZstdCompressor.FLUSH_FRAME + + Return a chunk of compressed data if possible, or b'' otherwise. When you have + finished providing data to the compressor, call the flush() method to finish + the compression process. + """ -Return a chunk of compressed data if possible, or b'' otherwise. When you have -finished providing data to the compressor, call the flush() method to finish -the compression process. -""" def flush(self, /, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 2) -> bytes: """Finish the compression process. - mode - Can be these 2 values ZstdCompressor.FLUSH_FRAME, - ZstdCompressor.FLUSH_BLOCK + mode + Can be these 2 values ZstdCompressor.FLUSH_FRAME, + ZstdCompressor.FLUSH_BLOCK + + Flush any remaining data left in internal buffers. Since Zstandard data + consists of one or more independent frames, the compressor object can still + be used after this method is called. + """ -Flush any remaining data left in internal buffers. Since Zstandard data -consists of one or more independent frames, the compressor object can still -be used after this method is called. -""" def set_pledged_input_size(self, size: int | None, /) -> None: """Set the uncompressed content size to be written into the frame header. - size - The size of the uncompressed data to be provided to the compressor. + size + The size of the uncompressed data to be provided to the compressor. + + This method can be used to ensure the header of the frame about to be written + includes the size of the data, unless the CompressionParameter.content_size_flag + is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised. -This method can be used to ensure the header of the frame about to be written -includes the size of the data, unless the CompressionParameter.content_size_flag -is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised. + It is important to ensure that the pledged data size matches the actual data + size. If they do not match the compressed output data may be corrupted and the + final chunk written may be lost. + """ -It is important to ensure that the pledged data size matches the actual data -size. If they do not match the compressed output data may be corrupted and the -final chunk written may be lost. -""" @property def last_mode(self) -> _ZstdCompressorContinue | _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame: """The last mode used to this compressor object, its value can be .CONTINUE, -.FLUSH_BLOCK, .FLUSH_FRAME. Initialized to .FLUSH_FRAME. + .FLUSH_BLOCK, .FLUSH_FRAME. Initialized to .FLUSH_FRAME. -It can be used to get the current state of a compressor, such as, data -flushed, or a frame ended. -""" + It can be used to get the current state of a compressor, such as, data + flushed, or a frame ended. + """ @final class ZstdDecompressor: """Create a decompressor object for decompressing data incrementally. - zstd_dict - A ZstdDict object, a pre-trained Zstandard dictionary. - options - A dict object that contains advanced decompression parameters. + zstd_dict + A ZstdDict object, a pre-trained Zstandard dictionary. + options + A dict object that contains advanced decompression parameters. + + Thread-safe at method level. For one-shot decompression, use the decompress() + function instead. + """ -Thread-safe at method level. For one-shot decompression, use the decompress() -function instead. -""" def __new__(cls, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> Self: ... def decompress(self, /, data: ReadableBuffer, max_length: int = -1) -> bytes: """Decompress *data*, returning uncompressed bytes if possible, or b'' otherwise. - data - A bytes-like object, Zstandard data to be decompressed. - max_length - Maximum size of returned data. When it is negative, the size of - output buffer is unlimited. When it is nonnegative, returns at - most max_length bytes of decompressed data. - -If *max_length* is nonnegative, returns at most *max_length* bytes of -decompressed data. If this limit is reached and further output can be -produced, *self.needs_input* will be set to ``False``. In this case, the next -call to *decompress()* may provide *data* as b'' to obtain more of the output. - -If all of the input data was decompressed and returned (either because this -was less than *max_length* bytes, or because *max_length* was negative), -*self.needs_input* will be set to True. - -Attempting to decompress data after the end of a frame is reached raises an -EOFError. Any data found after the end of the frame is ignored and saved in -the self.unused_data attribute. -""" + data + A bytes-like object, Zstandard data to be decompressed. + max_length + Maximum size of returned data. When it is negative, the size of + output buffer is unlimited. When it is nonnegative, returns at + most max_length bytes of decompressed data. + + If *max_length* is nonnegative, returns at most *max_length* bytes of + decompressed data. If this limit is reached and further output can be + produced, *self.needs_input* will be set to ``False``. In this case, the next + call to *decompress()* may provide *data* as b'' to obtain more of the output. + + If all of the input data was decompressed and returned (either because this + was less than *max_length* bytes, or because *max_length* was negative), + *self.needs_input* will be set to True. + + Attempting to decompress data after the end of a frame is reached raises an + EOFError. Any data found after the end of the frame is ignored and saved in + the self.unused_data attribute. + """ + @property def eof(self) -> bool: """True means the end of the first frame has been reached. If decompress data -after that, an EOFError exception will be raised. -""" + after that, an EOFError exception will be raised. + """ + @property def needs_input(self) -> bool: """If the max_length output limit in .decompress() method has been reached, -and the decompressor has (or may has) unconsumed input data, it will be set -to False. In this case, passing b'' to the .decompress() method may output -further data. -""" + and the decompressor has (or may has) unconsumed input data, it will be set + to False. In this case, passing b'' to the .decompress() method may output + further data. + """ + @property def unused_data(self) -> bytes: """A bytes object of un-consumed input data. -When ZstdDecompressor object stops after a frame is -decompressed, unused input data after the frame. Otherwise this will be b''. -""" + When ZstdDecompressor object stops after a frame is + decompressed, unused input data after the frame. Otherwise this will be b''. + """ @final class ZstdDict: """Represents a Zstandard dictionary. - dict_content - The content of a Zstandard dictionary as a bytes-like object. - is_raw - If true, perform no checks on *dict_content*, useful for some - advanced cases. Otherwise, check that the content represents - a Zstandard dictionary created by the zstd library or CLI. + dict_content + The content of a Zstandard dictionary as a bytes-like object. + is_raw + If true, perform no checks on *dict_content*, useful for some + advanced cases. Otherwise, check that the content represents + a Zstandard dictionary created by the zstd library or CLI. + + The dictionary can be used for compression or decompression, and can be shared + by multiple ZstdCompressor or ZstdDecompressor objects. + """ -The dictionary can be used for compression or decompression, and can be shared -by multiple ZstdCompressor or ZstdDecompressor objects. -""" def __new__(cls, dict_content: bytes, /, *, is_raw: bool = False) -> Self: ... def __len__(self, /) -> int: - """Return len(self). -""" + """Return len(self).""" + @property def as_digested_dict(self) -> tuple[Self, int]: """Load as a digested dictionary to compressor. -Pass this attribute as zstd_dict argument: -compress(dat, zstd_dict=zd.as_digested_dict) + Pass this attribute as zstd_dict argument: + compress(dat, zstd_dict=zd.as_digested_dict) + + 1. Some advanced compression parameters of compressor may be overridden + by parameters of digested dictionary. + 2. ZstdDict has a digested dictionaries cache for each compression level. + It's faster when loading again a digested dictionary with the same + compression level. + 3. No need to use this for decompression. + """ -1. Some advanced compression parameters of compressor may be overridden - by parameters of digested dictionary. -2. ZstdDict has a digested dictionaries cache for each compression level. - It's faster when loading again a digested dictionary with the same - compression level. -3. No need to use this for decompression. -""" @property def as_prefix(self) -> tuple[Self, int]: """Load as a prefix to compressor/decompressor. -Pass this attribute as zstd_dict argument: -compress(dat, zstd_dict=zd.as_prefix) + Pass this attribute as zstd_dict argument: + compress(dat, zstd_dict=zd.as_prefix) + + 1. Prefix is compatible with long distance matching, while dictionary is not. + 2. It only works for the first frame, then the compressor/decompressor will + return to no prefix state. + 3. When decompressing, must use the same prefix as when compressing. + """ -1. Prefix is compatible with long distance matching, while dictionary is not. -2. It only works for the first frame, then the compressor/decompressor will - return to no prefix state. -3. When decompressing, must use the same prefix as when compressing. -""" @property def as_undigested_dict(self) -> tuple[Self, int]: """Load as an undigested dictionary to compressor. -Pass this attribute as zstd_dict argument: -compress(dat, zstd_dict=zd.as_undigested_dict) + Pass this attribute as zstd_dict argument: + compress(dat, zstd_dict=zd.as_undigested_dict) + + 1. The advanced compression parameters of compressor will not be overridden. + 2. Loading an undigested dictionary is costly. If load an undigested dictionary + multiple times, consider reusing a compressor object. + 3. No need to use this for decompression. + """ -1. The advanced compression parameters of compressor will not be overridden. -2. Loading an undigested dictionary is costly. If load an undigested dictionary - multiple times, consider reusing a compressor object. -3. No need to use this for decompression. -""" @property def dict_content(self) -> bytes: - """The content of a Zstandard dictionary, as a bytes object. -""" + """The content of a Zstandard dictionary, as a bytes object.""" + @property def dict_id(self) -> int: """The Zstandard dictionary, an int between 0 and 2**32. -A non-zero value represents an ordinary Zstandard dictionary, -conforming to the standardised format. + A non-zero value represents an ordinary Zstandard dictionary, + conforming to the standardised format. -A value of zero indicates a 'raw content' dictionary, -without any restrictions on format or content. -""" + A value of zero indicates a 'raw content' dictionary, + without any restrictions on format or content. + """ class ZstdError(Exception): - """An error occurred in the zstd library. -""" + """An error occurred in the zstd library.""" def finalize_dict( custom_dict_bytes: bytes, samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, compression_level: int, / ) -> bytes: """Finalize a Zstandard dictionary. - custom_dict_bytes - Custom dictionary content. - samples_bytes - Concatenation of samples. - samples_sizes - Tuple of samples' sizes. - dict_size - The size of the dictionary. - compression_level - Optimize for a specific Zstandard compression level, 0 means default. -""" + custom_dict_bytes + Custom dictionary content. + samples_bytes + Concatenation of samples. + samples_sizes + Tuple of samples' sizes. + dict_size + The size of the dictionary. + compression_level + Optimize for a specific Zstandard compression level, 0 means default. + """ + def get_frame_info(frame_buffer: ReadableBuffer) -> tuple[int, int]: """Get Zstandard frame infomation from a frame header. - frame_buffer - A bytes-like object, containing the header of a Zstandard frame. -""" + frame_buffer + A bytes-like object, containing the header of a Zstandard frame. + """ + def get_frame_size(frame_buffer: ReadableBuffer) -> int: """Get the size of a Zstandard frame, including the header and optional checksum. - frame_buffer - A bytes-like object, it should start from the beginning of a frame, - and contains at least one complete frame. -""" + frame_buffer + A bytes-like object, it should start from the beginning of a frame, + and contains at least one complete frame. + """ + def get_param_bounds(parameter: int, is_compress: bool) -> tuple[int, int]: """Get CompressionParameter/DecompressionParameter bounds. - parameter - The parameter to get bounds. - is_compress - True for CompressionParameter, False for DecompressionParameter. -""" + parameter + The parameter to get bounds. + is_compress + True for CompressionParameter, False for DecompressionParameter. + """ + def set_parameter_types(c_parameter_type: type[CompressionParameter], d_parameter_type: type[DecompressionParameter]) -> None: """Set CompressionParameter and DecompressionParameter types for validity check. - c_parameter_type - CompressionParameter IntEnum type object - d_parameter_type - DecompressionParameter IntEnum type object -""" + c_parameter_type + CompressionParameter IntEnum type object + d_parameter_type + DecompressionParameter IntEnum type object + """ + def train_dict(samples_bytes: bytes, samples_sizes: tuple[int, ...], dict_size: int, /) -> bytes: """Train a Zstandard dictionary on sample data. - samples_bytes - Concatenation of samples. - samples_sizes - Tuple of samples' sizes. - dict_size - The size of the dictionary. -""" + samples_bytes + Concatenation of samples. + samples_sizes + Tuple of samples' sizes. + dict_size + The size of the dictionary. + """ zstd_version: Final[str] zstd_version_number: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi index bbc3d793d9683..04202fae9444c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/abc.pyi @@ -1,5 +1,5 @@ -"""Abstract Base Classes (ABCs) according to PEP 3119. -""" +"""Abstract Base Classes (ABCs) according to PEP 3119.""" + import _typeshed import sys from _typeshed import SupportsWrite @@ -16,16 +16,17 @@ _P = ParamSpec("_P") class ABCMeta(type): """Metaclass for defining Abstract Base Classes (ABCs). -Use this metaclass to create an ABC. An ABC can be subclassed -directly, and then acts as a mix-in class. You can also register -unrelated concrete classes (even built-in classes) and unrelated -ABCs as 'virtual subclasses' -- these and their descendants will -be considered subclasses of the registering ABC by the built-in -issubclass() function, but the registering ABC won't show up in -their MRO (Method Resolution Order) nor will method -implementations defined by the registering ABC be callable (not -even via super()). -""" + Use this metaclass to create an ABC. An ABC can be subclassed + directly, and then acts as a mix-in class. You can also register + unrelated concrete classes (even built-in classes) and unrelated + ABCs as 'virtual subclasses' -- these and their descendants will + be considered subclasses of the registering ABC by the built-in + issubclass() function, but the registering ABC won't show up in + their MRO (Method Resolution Order) nor will method + implementations defined by the registering ABC be callable (not + even via super()). + """ + __abstractmethods__: frozenset[str] if sys.version_info >= (3, 11): def __new__( @@ -37,50 +38,52 @@ even via super()). ) -> _typeshed.Self: ... def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: - """Override for isinstance(instance, cls). -""" + """Override for isinstance(instance, cls).""" + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: - """Override for issubclass(subclass, cls). -""" + """Override for issubclass(subclass, cls).""" + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: - """Debug helper to print the ABC registry. -""" + """Debug helper to print the ABC registry.""" + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: """Register a virtual subclass of an ABC. -Returns the subclass, to allow usage as a class decorator. -""" + Returns the subclass, to allow usage as a class decorator. + """ def abstractmethod(funcobj: _FuncT) -> _FuncT: """A decorator indicating abstract methods. -Requires that the metaclass is ABCMeta or derived from it. A -class that has a metaclass derived from ABCMeta cannot be -instantiated unless all of its abstract methods are overridden. -The abstract methods can be called using any of the normal -'super' call mechanisms. abstractmethod() may be used to declare -abstract methods for properties and descriptors. + Requires that the metaclass is ABCMeta or derived from it. A + class that has a metaclass derived from ABCMeta cannot be + instantiated unless all of its abstract methods are overridden. + The abstract methods can be called using any of the normal + 'super' call mechanisms. abstractmethod() may be used to declare + abstract methods for properties and descriptors. -Usage: + Usage: + + class C(metaclass=ABCMeta): + @abstractmethod + def my_abstract_method(self, arg1, arg2, argN): + ... + """ - class C(metaclass=ABCMeta): - @abstractmethod - def my_abstract_method(self, arg1, arg2, argN): - ... -""" @deprecated("Deprecated since Python 3.3. Use `@classmethod` stacked on top of `@abstractmethod` instead.") class abstractclassmethod(classmethod[_T, _P, _R_co]): """A decorator indicating abstract classmethods. -Deprecated, use 'classmethod' with 'abstractmethod' instead: + Deprecated, use 'classmethod' with 'abstractmethod' instead: + + class C(ABC): + @classmethod + @abstractmethod + def my_abstract_classmethod(cls, ...): + ... - class C(ABC): - @classmethod - @abstractmethod - def my_abstract_classmethod(cls, ...): - ... + """ -""" __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... @@ -88,15 +91,16 @@ Deprecated, use 'classmethod' with 'abstractmethod' instead: class abstractstaticmethod(staticmethod[_P, _R_co]): """A decorator indicating abstract staticmethods. -Deprecated, use 'staticmethod' with 'abstractmethod' instead: + Deprecated, use 'staticmethod' with 'abstractmethod' instead: + + class C(ABC): + @staticmethod + @abstractmethod + def my_abstract_staticmethod(...): + ... - class C(ABC): - @staticmethod - @abstractmethod - def my_abstract_staticmethod(...): - ... + """ -""" __isabstractmethod__: Literal[True] def __init__(self, callable: Callable[_P, _R_co]) -> None: ... @@ -104,45 +108,47 @@ Deprecated, use 'staticmethod' with 'abstractmethod' instead: class abstractproperty(property): """A decorator indicating abstract properties. -Deprecated, use 'property' with 'abstractmethod' instead: + Deprecated, use 'property' with 'abstractmethod' instead: - class C(ABC): - @property - @abstractmethod - def my_abstract_property(self): - ... + class C(ABC): + @property + @abstractmethod + def my_abstract_property(self): + ... + + """ -""" __isabstractmethod__: Literal[True] class ABC(metaclass=ABCMeta): """Helper class that provides a standard way to create an ABC using -inheritance. -""" + inheritance. + """ + __slots__ = () def get_cache_token() -> object: """Returns the current ABC cache token. -The token is an opaque object (supporting equality testing) identifying the -current version of the ABC cache for virtual subclasses. The token changes -with every call to register() on any ABC. -""" + The token is an opaque object (supporting equality testing) identifying the + current version of the ABC cache for virtual subclasses. The token changes + with every call to register() on any ABC. + """ if sys.version_info >= (3, 10): def update_abstractmethods(cls: type[_T]) -> type[_T]: """Recalculate the set of abstract methods of an abstract class. -If a class has had one of its abstract methods implemented after the -class was created, the method will not be considered implemented until -this function is called. Alternatively, if a new abstract method has been -added to the class, it will only be considered an abstract method of the -class after this function is called. + If a class has had one of its abstract methods implemented after the + class was created, the method will not be considered implemented until + this function is called. Alternatively, if a new abstract method has been + added to the class, it will only be considered an abstract method of the + class after this function is called. -This function should be called before any use is made of the class, -usually in class decorators that add methods to the subject class. + This function should be called before any use is made of the class, + usually in class decorators that add methods to the subject class. -Returns cls, to allow usage as a class decorator. + Returns cls, to allow usage as a class decorator. -If cls is not an instance of ABCMeta, does nothing. -""" + If cls is not an instance of ABCMeta, does nothing. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi index b163547755f2f..4d624b8c6c187 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/aifc.pyi @@ -133,6 +133,7 @@ written, otherwise an AIFF-C file is written. This default can be changed by calling aiff() or aifc() before the first writeframes or writeframesraw. """ + from types import TracebackType from typing import IO, Any, Literal, NamedTuple, overload from typing_extensions import Self, TypeAlias @@ -142,8 +143,8 @@ __all__ = ["Error", "open"] class Error(Exception): ... class _aifc_params(NamedTuple): - """_aifc_params(nchannels, sampwidth, framerate, nframes, comptype, compname) -""" + """_aifc_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" + nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi index 0254954591fdd..685bd2ea8687e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/annotationlib.pyi @@ -1,5 +1,5 @@ -"""Helpers for introspecting and wrapping annotations. -""" +"""Helpers for introspecting and wrapping annotations.""" + import sys from typing import Literal @@ -32,15 +32,16 @@ if sys.version_info >= (3, 14): class ForwardRef: """Wrapper that holds a forward reference. -Constructor arguments: -* arg: a string representing the code to be evaluated. -* module: the module where the forward reference was created. - Must be a string, not a module object. -* owner: The owning object (module, class, or function). -* is_argument: Does nothing, retained for compatibility. -* is_class: True if the forward reference was created in class scope. + Constructor arguments: + * arg: a string representing the code to be evaluated. + * module: the module where the forward reference was created. + Must be a string, not a module object. + * owner: The owning object (module, class, or function). + * is_argument: Does nothing, retained for compatibility. + * is_class: True if the forward reference was created in class scope. + + """ -""" __slots__ = ( "__forward_is_argument__", "__forward_is_class__", @@ -73,8 +74,9 @@ Constructor arguments: ) -> str: """Evaluate the forward reference and return the value. -If the forward reference cannot be evaluated, raise an exception. -""" + If the forward reference cannot be evaluated, raise an exception. + """ + @overload def evaluate( self, @@ -116,9 +118,10 @@ If the forward reference cannot be evaluated, raise an exception. @overload def call_evaluate_function(evaluate: EvaluateFunc, format: Literal[Format.STRING], *, owner: object = None) -> str: """Call an evaluate function. Evaluate functions are normally generated for -the value of type aliases and the bounds, constraints, and defaults of -type parameter objects. -""" + the value of type aliases and the bounds, constraints, and defaults of + type parameter objects. + """ + @overload def call_evaluate_function( evaluate: EvaluateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None @@ -126,28 +129,27 @@ type parameter objects. @overload def call_evaluate_function(evaluate: EvaluateFunc, format: Format, *, owner: object = None) -> AnnotationForm: ... @overload - def call_annotate_function( - annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None - ) -> dict[str, str]: + def call_annotate_function(annotate: AnnotateFunc, format: Literal[Format.STRING], *, owner: object = None) -> dict[str, str]: """Call an __annotate__ function. __annotate__ functions are normally -generated by the compiler to defer the evaluation of annotations. They -can be called with any of the format arguments in the Format enum, but -compiler-generated __annotate__ functions only support the VALUE format. -This function provides additional functionality to call __annotate__ -functions with the FORWARDREF and STRING formats. + generated by the compiler to defer the evaluation of annotations. They + can be called with any of the format arguments in the Format enum, but + compiler-generated __annotate__ functions only support the VALUE format. + This function provides additional functionality to call __annotate__ + functions with the FORWARDREF and STRING formats. + + *annotate* must be an __annotate__ function, which takes a single argument + and returns a dict of annotations. -*annotate* must be an __annotate__ function, which takes a single argument -and returns a dict of annotations. + *format* must be a member of the Format enum or one of the corresponding + integer values. -*format* must be a member of the Format enum or one of the corresponding -integer values. + *owner* can be the object that owns the annotations (i.e., the module, + class, or function that the __annotate__ function derives from). With the + FORWARDREF format, it is used to provide better evaluation capabilities + on the generated ForwardRef objects. -*owner* can be the object that owns the annotations (i.e., the module, -class, or function that the __annotate__ function derives from). With the -FORWARDREF format, it is used to provide better evaluation capabilities -on the generated ForwardRef objects. + """ -""" @overload def call_annotate_function( annotate: AnnotateFunc, format: Literal[Format.FORWARDREF], *, owner: object = None @@ -157,9 +159,10 @@ on the generated ForwardRef objects. def get_annotate_from_class_namespace(obj: Mapping[str, object]) -> AnnotateFunc | None: """Retrieve the annotate function from a class namespace dictionary. -Return None if the namespace does not contain an annotate function. -This is useful in metaclass ``__new__`` methods to retrieve the annotate function. -""" + Return None if the namespace does not contain an annotate function. + This is useful in metaclass ``__new__`` methods to retrieve the annotate function. + """ + @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -171,54 +174,55 @@ This is useful in metaclass ``__new__`` methods to retrieve the annotate functio ) -> dict[str, str]: """Compute the annotations dict for an object. -obj may be a callable, class, module, or other object with -__annotate__ or __annotations__ attributes. -Passing any other object raises TypeError. - -The *format* parameter controls the format in which annotations are returned, -and must be a member of the Format enum or its integer equivalent. -For the VALUE format, the __annotations__ is tried first; if it -does not exist, the __annotate__ function is called. The -FORWARDREF format uses __annotations__ if it exists and can be -evaluated, and otherwise falls back to calling the __annotate__ function. -The SOURCE format tries __annotate__ first, and falls back to -using __annotations__, stringified using annotations_to_string(). - -This function handles several details for you: - - * If eval_str is true, values of type str will - be un-stringized using eval(). This is intended - for use with stringized annotations - ("from __future__ import annotations"). - * If obj doesn't have an annotations dict, returns an - empty dict. (Functions and methods always have an - annotations dict; classes, modules, and other types of - callables may not.) - * Ignores inherited annotations on classes. If a class - doesn't have its own annotations dict, returns an empty dict. - * All accesses to object members and dict values are done - using getattr() and dict.get() for safety. - * Always, always, always returns a freshly-created dict. - -eval_str controls whether or not values of type str are replaced -with the result of calling eval() on those values: - - * If eval_str is true, eval() is called on values of type str. - * If eval_str is false (the default), values of type str are unchanged. - -globals and locals are passed in to eval(); see the documentation -for eval() for more information. If either globals or locals is -None, this function may replace that value with a context-specific -default, contingent on type(obj): - - * If obj is a module, globals defaults to obj.__dict__. - * If obj is a class, globals defaults to - sys.modules[obj.__module__].__dict__ and locals - defaults to the obj class namespace. - * If obj is a callable, globals defaults to obj.__globals__, - although if obj is a wrapped function (using - functools.update_wrapper()) it is first unwrapped. -""" + obj may be a callable, class, module, or other object with + __annotate__ or __annotations__ attributes. + Passing any other object raises TypeError. + + The *format* parameter controls the format in which annotations are returned, + and must be a member of the Format enum or its integer equivalent. + For the VALUE format, the __annotations__ is tried first; if it + does not exist, the __annotate__ function is called. The + FORWARDREF format uses __annotations__ if it exists and can be + evaluated, and otherwise falls back to calling the __annotate__ function. + The SOURCE format tries __annotate__ first, and falls back to + using __annotations__, stringified using annotations_to_string(). + + This function handles several details for you: + + * If eval_str is true, values of type str will + be un-stringized using eval(). This is intended + for use with stringized annotations + ("from __future__ import annotations"). + * If obj doesn't have an annotations dict, returns an + empty dict. (Functions and methods always have an + annotations dict; classes, modules, and other types of + callables may not.) + * Ignores inherited annotations on classes. If a class + doesn't have its own annotations dict, returns an empty dict. + * All accesses to object members and dict values are done + using getattr() and dict.get() for safety. + * Always, always, always returns a freshly-created dict. + + eval_str controls whether or not values of type str are replaced + with the result of calling eval() on those values: + + * If eval_str is true, eval() is called on values of type str. + * If eval_str is false (the default), values of type str are unchanged. + + globals and locals are passed in to eval(); see the documentation + for eval() for more information. If either globals or locals is + None, this function may replace that value with a context-specific + default, contingent on type(obj): + + * If obj is a module, globals defaults to obj.__dict__. + * If obj is a class, globals defaults to + sys.modules[obj.__module__].__dict__ and locals + defaults to the obj class namespace. + * If obj is a callable, globals defaults to obj.__globals__, + although if obj is a wrapped function (using + functools.update_wrapper()) it is first unwrapped. + """ + @overload def get_annotations( obj: Any, @@ -240,13 +244,14 @@ default, contingent on type(obj): def type_repr(value: object) -> str: """Convert a Python value to a format suitable for use with the STRING format. -This is intended as a helper for tools that support the STRING format but do -not have access to the code that originally produced the annotations. It uses -repr() for most objects. + This is intended as a helper for tools that support the STRING format but do + not have access to the code that originally produced the annotations. It uses + repr() for most objects. + + """ -""" def annotations_to_string(annotations: SupportsItems[str, object]) -> dict[str, str]: """Convert an annotation dict containing values to approximately the STRING format. -Always returns a fresh a dictionary. -""" + Always returns a fresh a dictionary. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi index 9132c0a066260..52ab6bbf013b6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/argparse.pyi @@ -60,6 +60,7 @@ All other classes in this module are considered implementation details. considered public as object names -- the API of the formatter objects is still considered an implementation detail.) """ + import sys from _typeshed import SupportsWrite, sentinel from collections.abc import Callable, Generator, Iterable, Sequence @@ -105,9 +106,10 @@ _UNRECOGNIZED_ARGS_ATTR: Final = "_unrecognized_args" # undocumented class ArgumentError(Exception): """An error from creating or using an argument (optional or positional). -The string value of this exception is the message, augmented with -information about the argument that caused it. -""" + The string value of this exception is the message, augmented with + information about the argument that caused it. + """ + argument_name: str | None message: str def __init__(self, argument: Action | None, message: str) -> None: ... @@ -116,11 +118,12 @@ information about the argument that caused it. class _AttributeHolder: """Abstract base class that provides __repr__. -The __repr__ method returns a string in the format:: - ClassName(attr=name, attr=name, ...) -The attributes are determined either by a class-level attribute, -'_kwarg_names', or by inspecting the instance __dict__. -""" + The __repr__ method returns a string in the format:: + ClassName(attr=name, attr=name, ...) + The attributes are determined either by a class-level attribute, + '_kwarg_names', or by inspecting the instance __dict__. + """ + def _get_kwargs(self) -> list[tuple[str, Any]]: ... def _get_args(self) -> list[Any]: ... @@ -166,9 +169,10 @@ class _ActionsContainer: **kwargs: Any, ) -> Action: """ -add_argument(dest, ..., name=value, ...) -add_argument(option_string, option_string, ..., name=value, ...) -""" + add_argument(dest, ..., name=value, ...) + add_argument(option_string, option_string, ..., name=value, ...) + """ + def add_argument_group( self, title: str | None = None, @@ -197,27 +201,28 @@ class _FormatterClass(Protocol): class ArgumentParser(_AttributeHolder, _ActionsContainer): """Object for parsing command line strings into Python objects. -Keyword Arguments: - - prog -- The name of the program (default: - ``os.path.basename(sys.argv[0])``) - - usage -- A usage message (default: auto-generated from arguments) - - description -- A description of what the program does - - epilog -- Text following the argument descriptions - - parents -- Parsers whose arguments should be copied into this one - - formatter_class -- HelpFormatter class for printing help messages - - prefix_chars -- Characters that prefix optional arguments - - fromfile_prefix_chars -- Characters that prefix files containing - additional arguments - - argument_default -- The default value for all arguments - - conflict_handler -- String indicating how to handle conflicts - - add_help -- Add a -h/-help option - - allow_abbrev -- Allow long options to be abbreviated unambiguously - - exit_on_error -- Determines whether or not ArgumentParser exits with - error info when an error occurs - - suggest_on_error - Enables suggestions for mistyped argument choices - and subparser names (default: ``False``) - - color - Allow color output in help messages (default: ``False``) -""" + Keyword Arguments: + - prog -- The name of the program (default: + ``os.path.basename(sys.argv[0])``) + - usage -- A usage message (default: auto-generated from arguments) + - description -- A description of what the program does + - epilog -- Text following the argument descriptions + - parents -- Parsers whose arguments should be copied into this one + - formatter_class -- HelpFormatter class for printing help messages + - prefix_chars -- Characters that prefix optional arguments + - fromfile_prefix_chars -- Characters that prefix files containing + additional arguments + - argument_default -- The default value for all arguments + - conflict_handler -- String indicating how to handle conflicts + - add_help -- Add a -h/-help option + - allow_abbrev -- Allow long options to be abbreviated unambiguously + - exit_on_error -- Determines whether or not ArgumentParser exits with + error info when an error occurs + - suggest_on_error - Enables suggestions for mistyped argument choices + and subparser names (default: ``False``) + - color - Allow color output in help messages (default: ``False``) + """ + prog: str usage: str | None epilog: str | None @@ -325,12 +330,13 @@ Keyword Arguments: def error(self, message: str) -> NoReturn: """error(message: string) -Prints a usage message incorporating the message to stderr and -exits. + Prints a usage message incorporating the message to stderr and + exits. + + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ -If you override this in a subclass, it should not return -- it -should either exit or raise an exception. -""" @overload def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... @overload @@ -370,9 +376,10 @@ should either exit or raise an exception. class HelpFormatter: """Formatter for generating usage messages and argument help strings. -Only the name of this class is considered a public API. All the methods -provided by the class are considered an implementation detail. -""" + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + # undocumented _prog: str _indent_increment: int @@ -436,79 +443,83 @@ provided by the class are considered an implementation detail. class RawDescriptionHelpFormatter(HelpFormatter): """Help message formatter which retains any formatting in descriptions. -Only the name of this class is considered a public API. All the methods -provided by the class are considered an implementation detail. -""" + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + class RawTextHelpFormatter(RawDescriptionHelpFormatter): """Help message formatter which retains formatting of all help text. -Only the name of this class is considered a public API. All the methods -provided by the class are considered an implementation detail. -""" + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + class ArgumentDefaultsHelpFormatter(HelpFormatter): """Help message formatter which adds default values to argument help. -Only the name of this class is considered a public API. All the methods -provided by the class are considered an implementation detail. -""" + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ + class MetavarTypeHelpFormatter(HelpFormatter): """Help message formatter which uses the argument 'type' as the default -metavar value (instead of the argument 'dest') + metavar value (instead of the argument 'dest') -Only the name of this class is considered a public API. All the methods -provided by the class are considered an implementation detail. -""" + Only the name of this class is considered a public API. All the methods + provided by the class are considered an implementation detail. + """ class Action(_AttributeHolder): """Information about how to convert command line strings to Python objects. -Action objects are used by an ArgumentParser to represent the information -needed to parse a single argument from one or more strings from the -command line. The keyword arguments to the Action constructor are also -all attributes of Action instances. + Action objects are used by an ArgumentParser to represent the information + needed to parse a single argument from one or more strings from the + command line. The keyword arguments to the Action constructor are also + all attributes of Action instances. -Keyword Arguments: + Keyword Arguments: - - option_strings -- A list of command-line option strings which - should be associated with this action. + - option_strings -- A list of command-line option strings which + should be associated with this action. - - dest -- The name of the attribute to hold the created object(s) + - dest -- The name of the attribute to hold the created object(s) - - nargs -- The number of command-line arguments that should be - consumed. By default, one argument will be consumed and a single - value will be produced. Other values include: - - N (an integer) consumes N arguments (and produces a list) - - '?' consumes zero or one arguments - - '*' consumes zero or more arguments (and produces a list) - - '+' consumes one or more arguments (and produces a list) - Note that the difference between the default and nargs=1 is that - with the default, a single value will be produced, while with - nargs=1, a list containing a single value will be produced. + - nargs -- The number of command-line arguments that should be + consumed. By default, one argument will be consumed and a single + value will be produced. Other values include: + - N (an integer) consumes N arguments (and produces a list) + - '?' consumes zero or one arguments + - '*' consumes zero or more arguments (and produces a list) + - '+' consumes one or more arguments (and produces a list) + Note that the difference between the default and nargs=1 is that + with the default, a single value will be produced, while with + nargs=1, a list containing a single value will be produced. - - const -- The value to be produced if the option is specified and the - option uses an action that takes no values. + - const -- The value to be produced if the option is specified and the + option uses an action that takes no values. - - default -- The value to be produced if the option is not specified. + - default -- The value to be produced if the option is not specified. - - type -- A callable that accepts a single string argument, and - returns the converted value. The standard Python types str, int, - float, and complex are useful examples of such callables. If None, - str is used. + - type -- A callable that accepts a single string argument, and + returns the converted value. The standard Python types str, int, + float, and complex are useful examples of such callables. If None, + str is used. - - choices -- A container of values that should be allowed. If not None, - after a command-line argument has been converted to the appropriate - type, an exception will be raised if it is not a member of this - collection. + - choices -- A container of values that should be allowed. If not None, + after a command-line argument has been converted to the appropriate + type, an exception will be raised if it is not a member of this + collection. - - required -- True if the action must always be specified at the - command line. This is only meaningful for optional command-line - arguments. + - required -- True if the action must always be specified at the + command line. This is only meaningful for optional command-line + arguments. - - help -- The help string describing the argument. + - help -- The help string describing the argument. + + - metavar -- The name to be used for the option's argument with the + help string. If None, the 'dest' value will be used as the name. + """ - - metavar -- The name to be used for the option's argument with the - help string. If None, the 'dest' value will be used as the name. -""" option_strings: Sequence[str] dest: str nargs: int | str | None @@ -646,9 +657,10 @@ else: class Namespace(_AttributeHolder): """Simple object for storing attributes. -Implements equality by attribute names and values, and provides a simple -string representation. -""" + Implements equality by attribute names and values, and provides a simple + string representation. + """ + def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any, /) -> None: ... @@ -661,19 +673,20 @@ if sys.version_info >= (3, 14): class FileType: """Deprecated factory for creating file object types -Instances of FileType are typically passed as type= arguments to the -ArgumentParser add_argument() method. - -Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. -""" + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. + """ + # undocumented _mode: str _bufsize: int @@ -688,19 +701,20 @@ else: class FileType: """Factory for creating file object types -Instances of FileType are typically passed as type= arguments to the -ArgumentParser add_argument() method. - -Keyword Arguments: - - mode -- A string indicating how the file is to be opened. Accepts the - same values as the builtin open() function. - - bufsize -- The file's desired buffer size. Accepts the same values as - the builtin open() function. - - encoding -- The file's encoding. Accepts the same values as the - builtin open() function. - - errors -- A string indicating how encoding and decoding errors are to - be handled. Accepts the same value as the builtin open() function. -""" + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. + """ + # undocumented _mode: str _bufsize: int @@ -1041,8 +1055,7 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): # undocumented class ArgumentTypeError(Exception): - """An error from trying to convert a command line string to a type. -""" + """An error from trying to convert a command line string to a type.""" # undocumented def _get_action_name(argument: Action | None) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi index 299bbe7535924..a7b41229c92d3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/array.pyi @@ -3,6 +3,7 @@ an array of basic values: characters, integers, floating-point numbers. Arrays are sequence types and behave very much like lists, except that the type of objects stored in them is constrained. """ + import sys from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Iterable, MutableSequence @@ -26,69 +27,70 @@ typecodes: str class array(MutableSequence[_T]): """array(typecode [, initializer]) -> array -Return a new array whose items are restricted by typecode, and -initialized from the optional initializer value, which must be a list, -string or iterable over elements of the appropriate type. - -Arrays represent basic values and behave very much like lists, except -the type of objects stored in them is constrained. The type is specified -at object creation time by using a type code, which is a single character. -The following type codes are defined: - - Type code C Type Minimum size in bytes - 'b' signed integer 1 - 'B' unsigned integer 1 - 'u' Unicode character 2 (see note) - 'h' signed integer 2 - 'H' unsigned integer 2 - 'i' signed integer 2 - 'I' unsigned integer 2 - 'l' signed integer 4 - 'L' unsigned integer 4 - 'q' signed integer 8 (see note) - 'Q' unsigned integer 8 (see note) - 'f' floating-point 4 - 'd' floating-point 8 - -NOTE: The 'u' typecode corresponds to Python's unicode character. On -narrow builds this is 2-bytes on wide builds this is 4-bytes. - -NOTE: The 'q' and 'Q' type codes are only available if the platform -C compiler used to build Python supports 'long long', or, on Windows, -'__int64'. - -Methods: - -append() -- append a new item to the end of the array -buffer_info() -- return information giving the current memory info -byteswap() -- byteswap all the items of the array -count() -- return number of occurrences of an object -extend() -- extend array by appending multiple elements from an iterable -fromfile() -- read items from a file object -fromlist() -- append items from the list -frombytes() -- append items from the string -index() -- return index of first occurrence of an object -insert() -- insert a new item into the array at a provided position -pop() -- remove and return item (default last) -remove() -- remove first occurrence of an object -reverse() -- reverse the order of the items in the array -tofile() -- write all items to a file object -tolist() -- return the array converted to an ordinary list -tobytes() -- return the array converted to a string - -Attributes: - -typecode -- the typecode character used to create the array -itemsize -- the length in bytes of one array item -""" + Return a new array whose items are restricted by typecode, and + initialized from the optional initializer value, which must be a list, + string or iterable over elements of the appropriate type. + + Arrays represent basic values and behave very much like lists, except + the type of objects stored in them is constrained. The type is specified + at object creation time by using a type code, which is a single character. + The following type codes are defined: + + Type code C Type Minimum size in bytes + 'b' signed integer 1 + 'B' unsigned integer 1 + 'u' Unicode character 2 (see note) + 'h' signed integer 2 + 'H' unsigned integer 2 + 'i' signed integer 2 + 'I' unsigned integer 2 + 'l' signed integer 4 + 'L' unsigned integer 4 + 'q' signed integer 8 (see note) + 'Q' unsigned integer 8 (see note) + 'f' floating-point 4 + 'd' floating-point 8 + + NOTE: The 'u' typecode corresponds to Python's unicode character. On + narrow builds this is 2-bytes on wide builds this is 4-bytes. + + NOTE: The 'q' and 'Q' type codes are only available if the platform + C compiler used to build Python supports 'long long', or, on Windows, + '__int64'. + + Methods: + + append() -- append a new item to the end of the array + buffer_info() -- return information giving the current memory info + byteswap() -- byteswap all the items of the array + count() -- return number of occurrences of an object + extend() -- extend array by appending multiple elements from an iterable + fromfile() -- read items from a file object + fromlist() -- append items from the list + frombytes() -- append items from the string + index() -- return index of first occurrence of an object + insert() -- insert a new item into the array at a provided position + pop() -- remove and return item (default last) + remove() -- remove first occurrence of an object + reverse() -- reverse the order of the items in the array + tofile() -- write all items to a file object + tolist() -- return the array converted to an ordinary list + tobytes() -- return the array converted to a string + + Attributes: + + typecode -- the typecode character used to create the array + itemsize -- the length in bytes of one array item + """ + @property def typecode(self) -> _TypeCode: - """the typecode character used to create the array -""" + """the typecode character used to create the array""" + @property def itemsize(self) -> int: - """the size, in bytes, of one array item -""" + """the size, in bytes, of one array item""" + @overload def __new__( cls: type[array[int]], typecode: _IntTypeCode, initializer: bytes | bytearray | Iterable[int] = ..., / @@ -119,138 +121,137 @@ itemsize -- the length in bytes of one array item @overload def __new__(cls, typecode: str, initializer: bytes | bytearray = ..., /) -> Self: ... def append(self, v: _T, /) -> None: - """Append new value v to the end of the array. -""" + """Append new value v to the end of the array.""" + def buffer_info(self) -> tuple[int, int]: """Return a tuple (address, length) giving the current memory address and the length in items of the buffer used to hold array's contents. -The length should be multiplied by the itemsize attribute to calculate -the buffer length in bytes. -""" + The length should be multiplied by the itemsize attribute to calculate + the buffer length in bytes. + """ + def byteswap(self) -> None: """Byteswap all items of the array. -If the items in the array are not 1, 2, 4, or 8 bytes in size, RuntimeError is -raised. -""" + If the items in the array are not 1, 2, 4, or 8 bytes in size, RuntimeError is + raised. + """ + def count(self, v: _T, /) -> int: - """Return number of occurrences of v in the array. -""" + """Return number of occurrences of v in the array.""" + def extend(self, bb: Iterable[_T], /) -> None: - """Append items to the end of the array. -""" + """Append items to the end of the array.""" + def frombytes(self, buffer: ReadableBuffer, /) -> None: - """Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method. -""" + """Appends items from the string, interpreting it as an array of machine values, as if it had been read from a file using the fromfile() method.""" + def fromfile(self, f: SupportsRead[bytes], n: int, /) -> None: - """Read n objects from the file object f and append them to the end of the array. -""" + """Read n objects from the file object f and append them to the end of the array.""" + def fromlist(self, list: list[_T], /) -> None: - """Append items to array from list. -""" + """Append items to array from list.""" + def fromunicode(self, ustr: str, /) -> None: """Extends this array with data from the unicode string ustr. -The array must be a unicode type array; otherwise a ValueError is raised. -Use array.frombytes(ustr.encode(...)) to append Unicode data to an array of -some other type. -""" + The array must be a unicode type array; otherwise a ValueError is raised. + Use array.frombytes(ustr.encode(...)) to append Unicode data to an array of + some other type. + """ if sys.version_info >= (3, 10): def index(self, v: _T, start: int = 0, stop: int = sys.maxsize, /) -> int: """Return index of first occurrence of v in the array. -Raise ValueError if the value is not present. -""" + Raise ValueError if the value is not present. + """ else: def index(self, v: _T, /) -> int: # type: ignore[override] - """Return index of first occurrence of v in the array. -""" + """Return index of first occurrence of v in the array.""" def insert(self, i: int, v: _T, /) -> None: - """Insert a new item v into the array before position i. -""" + """Insert a new item v into the array before position i.""" + def pop(self, i: int = -1, /) -> _T: """Return the i-th element and delete it from the array. -i defaults to -1. -""" + i defaults to -1. + """ + def remove(self, v: _T, /) -> None: - """Remove the first occurrence of v in the array. -""" + """Remove the first occurrence of v in the array.""" + def tobytes(self) -> bytes: - """Convert the array to an array of machine values and return the bytes representation. -""" + """Convert the array to an array of machine values and return the bytes representation.""" + def tofile(self, f: SupportsWrite[bytes], /) -> None: - """Write all items (as machine values) to the file object f. -""" + """Write all items (as machine values) to the file object f.""" + def tolist(self) -> list[_T]: - """Convert array to an ordinary list with the same items. -""" + """Convert array to an ordinary list with the same items.""" + def tounicode(self) -> str: """Extends this array with data from the unicode string ustr. -Convert the array to a unicode string. The array must be a unicode type array; -otherwise a ValueError is raised. Use array.tobytes().decode() to obtain a -unicode string from an array of some other type. -""" - + Convert the array to a unicode string. The array must be a unicode type array; + otherwise a ValueError is raised. Use array.tobytes().decode() to obtain a + unicode string from an array of some other type. + """ __hash__: ClassVar[None] # type: ignore[assignment] def __contains__(self, value: object, /) -> bool: - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + @overload def __getitem__(self, key: SupportsIndex, /) -> _T: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> array[_T]: ... @overload # type: ignore[override] def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: array[_T], /) -> None: ... def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key]. -""" + """Delete self[key].""" + def __add__(self, value: array[_T], /) -> array[_T]: - """Return self+value. -""" + """Return self+value.""" + def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: array[_T], /) -> bool: ... def __gt__(self, value: array[_T], /) -> bool: ... def __iadd__(self, value: array[_T], /) -> Self: # type: ignore[override] - """Implement self+=value. -""" + """Implement self+=value.""" + def __imul__(self, value: int, /) -> Self: - """Implement self*=value. -""" + """Implement self*=value.""" + def __le__(self, value: array[_T], /) -> bool: ... def __lt__(self, value: array[_T], /) -> bool: ... def __mul__(self, value: int, /) -> array[_T]: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: int, /) -> array[_T]: - """Return value*self. -""" + """Return value*self.""" + def __copy__(self) -> array[_T]: - """Return a copy of the array. -""" + """Return a copy of the array.""" + def __deepcopy__(self, unused: Any, /) -> array[_T]: - """Return a copy of the array. -""" + """Return a copy of the array.""" + def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" + def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object. -""" + """Release the buffer object that exposes the underlying memory of the object.""" if sys.version_info >= (3, 12): def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" ArrayType = array diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi index 5cf48716038b4..d9ca52c7d6cc1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ast.pyi @@ -20,6 +20,7 @@ that work tightly with the python syntax (template engines for example). :copyright: Copyright 2008 by Armin Ronacher. :license: Python License. """ + import ast import builtins import os @@ -63,8 +64,7 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__(self) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" else: class AST: @@ -78,11 +78,11 @@ class mod(AST): | Interactive(stmt* body) | Expression(expr body) | FunctionType(expr* argtypes, expr returns) -""" + """ class Module(mod): - """Module(stmt* body, type_ignore* type_ignores) -""" + """Module(stmt* body, type_ignore* type_ignores)""" + if sys.version_info >= (3, 10): __match_args__ = ("body", "type_ignores") body: list[stmt] @@ -94,12 +94,11 @@ class Module(mod): if sys.version_info >= (3, 14): def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Interactive(mod): - """Interactive(stmt* body) -""" + """Interactive(stmt* body)""" + if sys.version_info >= (3, 10): __match_args__ = ("body",) body: list[stmt] @@ -110,12 +109,11 @@ class Interactive(mod): if sys.version_info >= (3, 14): def __replace__(self, *, body: list[stmt] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Expression(mod): - """Expression(expr body) -""" + """Expression(expr body)""" + if sys.version_info >= (3, 10): __match_args__ = ("body",) body: expr @@ -123,12 +121,11 @@ class Expression(mod): if sys.version_info >= (3, 14): def __replace__(self, *, body: expr = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class FunctionType(mod): - """FunctionType(expr* argtypes, expr returns) -""" + """FunctionType(expr* argtypes, expr returns)""" + if sys.version_info >= (3, 10): __match_args__ = ("argtypes", "returns") argtypes: list[expr] @@ -143,39 +140,39 @@ class FunctionType(mod): if sys.version_info >= (3, 14): def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class stmt(AST): """stmt = FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) - | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) - | Return(expr? value) - | Delete(expr* targets) - | Assign(expr* targets, expr value, string? type_comment) - | TypeAlias(expr name, type_param* type_params, expr value) - | AugAssign(expr target, operator op, expr value) - | AnnAssign(expr target, expr annotation, expr? value, int simple) - | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) - | While(expr test, stmt* body, stmt* orelse) - | If(expr test, stmt* body, stmt* orelse) - | With(withitem* items, stmt* body, string? type_comment) - | AsyncWith(withitem* items, stmt* body, string? type_comment) - | Match(expr subject, match_case* cases) - | Raise(expr? exc, expr? cause) - | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) - | Assert(expr test, expr? msg) - | Import(alias* names) - | ImportFrom(identifier? module, alias* names, int? level) - | Global(identifier* names) - | Nonlocal(identifier* names) - | Expr(expr value) - | Pass - | Break - | Continue -""" + | AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) + | ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) + | Return(expr? value) + | Delete(expr* targets) + | Assign(expr* targets, expr value, string? type_comment) + | TypeAlias(expr name, type_param* type_params, expr value) + | AugAssign(expr target, operator op, expr value) + | AnnAssign(expr target, expr annotation, expr? value, int simple) + | For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) + | While(expr test, stmt* body, stmt* orelse) + | If(expr test, stmt* body, stmt* orelse) + | With(withitem* items, stmt* body, string? type_comment) + | AsyncWith(withitem* items, stmt* body, string? type_comment) + | Match(expr subject, match_case* cases) + | Raise(expr? exc, expr? cause) + | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) + | Assert(expr test, expr? msg) + | Import(alias* names) + | ImportFrom(identifier? module, alias* names, int? level) + | Global(identifier* names) + | Nonlocal(identifier* names) + | Expr(expr value) + | Pass + | Break + | Continue + """ + lineno: int col_offset: int end_lineno: int | None @@ -184,12 +181,11 @@ class stmt(AST): if sys.version_info >= (3, 14): def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class FunctionDef(stmt): - """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) -""" + """FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" + if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -265,12 +261,11 @@ class FunctionDef(stmt): type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class AsyncFunctionDef(stmt): - """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params) -""" + """AsyncFunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list, expr? returns, string? type_comment, type_param* type_params)""" + if sys.version_info >= (3, 12): __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params") elif sys.version_info >= (3, 10): @@ -346,12 +341,11 @@ class AsyncFunctionDef(stmt): type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class ClassDef(stmt): - """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params) -""" + """ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body, expr* decorator_list, type_param* type_params)""" + if sys.version_info >= (3, 12): __match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params") elif sys.version_info >= (3, 10): @@ -408,12 +402,11 @@ class ClassDef(stmt): type_params: list[type_param] = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Return(stmt): - """Return(expr? value) -""" + """Return(expr? value)""" + if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None @@ -421,12 +414,11 @@ class Return(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Delete(stmt): - """Delete(expr* targets) -""" + """Delete(expr* targets)""" + if sys.version_info >= (3, 10): __match_args__ = ("targets",) targets: list[expr] @@ -437,12 +429,11 @@ class Delete(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Assign(stmt): - """Assign(expr* targets, expr value, string? type_comment) -""" + """Assign(expr* targets, expr value, string? type_comment)""" + if sys.version_info >= (3, 10): __match_args__ = ("targets", "value", "type_comment") targets: list[expr] @@ -466,13 +457,12 @@ class Assign(stmt): def __replace__( self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 12): class TypeAlias(stmt): - """TypeAlias(expr name, type_param* type_params, expr value) -""" + """TypeAlias(expr name, type_param* type_params, expr value)""" + __match_args__ = ("name", "type_params", "value") name: Name type_params: list[type_param] @@ -500,12 +490,11 @@ if sys.version_info >= (3, 12): value: expr = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class AugAssign(stmt): - """AugAssign(expr target, operator op, expr value) -""" + """AugAssign(expr target, operator op, expr value)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") target: Name | Attribute | Subscript @@ -524,12 +513,11 @@ class AugAssign(stmt): value: expr = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class AnnAssign(stmt): - """AnnAssign(expr target, expr annotation, expr? value, int simple) -""" + """AnnAssign(expr target, expr annotation, expr? value, int simple)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") target: Name | Attribute | Subscript @@ -566,12 +554,11 @@ class AnnAssign(stmt): simple: int = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class For(stmt): - """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) -""" + """For(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -611,12 +598,11 @@ class For(stmt): type_comment: str | None = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class AsyncFor(stmt): - """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment) -""" + """AsyncFor(expr target, expr iter, stmt* body, stmt* orelse, string? type_comment)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "body", "orelse", "type_comment") target: expr @@ -656,12 +642,11 @@ class AsyncFor(stmt): type_comment: str | None = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class While(stmt): - """While(expr test, stmt* body, stmt* orelse) -""" + """While(expr test, stmt* body, stmt* orelse)""" + if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -678,12 +663,11 @@ class While(stmt): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class If(stmt): - """If(expr test, stmt* body, stmt* orelse) -""" + """If(expr test, stmt* body, stmt* orelse)""" + if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -700,12 +684,11 @@ class If(stmt): def __replace__( self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class With(stmt): - """With(withitem* items, stmt* body, string? type_comment) -""" + """With(withitem* items, stmt* body, string? type_comment)""" + if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -733,12 +716,11 @@ class With(stmt): type_comment: str | None = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class AsyncWith(stmt): - """AsyncWith(withitem* items, stmt* body, string? type_comment) -""" + """AsyncWith(withitem* items, stmt* body, string? type_comment)""" + if sys.version_info >= (3, 10): __match_args__ = ("items", "body", "type_comment") items: list[withitem] @@ -766,12 +748,11 @@ class AsyncWith(stmt): type_comment: str | None = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Raise(stmt): - """Raise(expr? exc, expr? cause) -""" + """Raise(expr? exc, expr? cause)""" + if sys.version_info >= (3, 10): __match_args__ = ("exc", "cause") exc: expr | None @@ -780,12 +761,11 @@ class Raise(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Try(stmt): - """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) -""" + """Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" + if sys.version_info >= (3, 10): __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] @@ -821,13 +801,12 @@ class Try(stmt): finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 11): class TryStar(stmt): - """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody) -""" + """TryStar(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)""" + __match_args__ = ("body", "handlers", "orelse", "finalbody") body: list[stmt] handlers: list[ExceptHandler] @@ -862,12 +841,11 @@ if sys.version_info >= (3, 11): finalbody: list[stmt] = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Assert(stmt): - """Assert(expr test, expr? msg) -""" + """Assert(expr test, expr? msg)""" + if sys.version_info >= (3, 10): __match_args__ = ("test", "msg") test: expr @@ -876,12 +854,11 @@ class Assert(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, test: expr = ..., msg: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Import(stmt): - """Import(alias* names) -""" + """Import(alias* names)""" + if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[alias] @@ -892,12 +869,11 @@ class Import(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class ImportFrom(stmt): - """ImportFrom(identifier? module, alias* names, int? level) -""" + """ImportFrom(identifier? module, alias* names, int? level)""" + if sys.version_info >= (3, 10): __match_args__ = ("module", "names", "level") module: str | None @@ -922,12 +898,11 @@ class ImportFrom(stmt): def __replace__( self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Global(stmt): - """Global(identifier* names) -""" + """Global(identifier* names)""" + if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -938,12 +913,11 @@ class Global(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Nonlocal(stmt): - """Nonlocal(identifier* names) -""" + """Nonlocal(identifier* names)""" + if sys.version_info >= (3, 10): __match_args__ = ("names",) names: list[str] @@ -954,12 +928,11 @@ class Nonlocal(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, names: list[str] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Expr(stmt): - """Expr(expr value) -""" + """Expr(expr value)""" + if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr @@ -967,50 +940,49 @@ class Expr(stmt): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Pass(stmt): - """Pass -""" + """Pass""" + class Break(stmt): - """Break -""" + """Break""" + class Continue(stmt): - """Continue -""" + """Continue""" class expr(AST): """expr = BoolOp(boolop op, expr* values) - | NamedExpr(expr target, expr value) - | BinOp(expr left, operator op, expr right) - | UnaryOp(unaryop op, expr operand) - | Lambda(arguments args, expr body) - | IfExp(expr test, expr body, expr orelse) - | Dict(expr?* keys, expr* values) - | Set(expr* elts) - | ListComp(expr elt, comprehension* generators) - | SetComp(expr elt, comprehension* generators) - | DictComp(expr key, expr value, comprehension* generators) - | GeneratorExp(expr elt, comprehension* generators) - | Await(expr value) - | Yield(expr? value) - | YieldFrom(expr value) - | Compare(expr left, cmpop* ops, expr* comparators) - | Call(expr func, expr* args, keyword* keywords) - | FormattedValue(expr value, int conversion, expr? format_spec) - | Interpolation(expr value, constant str, int conversion, expr? format_spec) - | JoinedStr(expr* values) - | TemplateStr(expr* values) - | Constant(constant value, string? kind) - | Attribute(expr value, identifier attr, expr_context ctx) - | Subscript(expr value, expr slice, expr_context ctx) - | Starred(expr value, expr_context ctx) - | Name(identifier id, expr_context ctx) - | List(expr* elts, expr_context ctx) - | Tuple(expr* elts, expr_context ctx) - | Slice(expr? lower, expr? upper, expr? step) -""" + | NamedExpr(expr target, expr value) + | BinOp(expr left, operator op, expr right) + | UnaryOp(unaryop op, expr operand) + | Lambda(arguments args, expr body) + | IfExp(expr test, expr body, expr orelse) + | Dict(expr?* keys, expr* values) + | Set(expr* elts) + | ListComp(expr elt, comprehension* generators) + | SetComp(expr elt, comprehension* generators) + | DictComp(expr key, expr value, comprehension* generators) + | GeneratorExp(expr elt, comprehension* generators) + | Await(expr value) + | Yield(expr? value) + | YieldFrom(expr value) + | Compare(expr left, cmpop* ops, expr* comparators) + | Call(expr func, expr* args, keyword* keywords) + | FormattedValue(expr value, int conversion, expr? format_spec) + | Interpolation(expr value, constant str, int conversion, expr? format_spec) + | JoinedStr(expr* values) + | TemplateStr(expr* values) + | Constant(constant value, string? kind) + | Attribute(expr value, identifier attr, expr_context ctx) + | Subscript(expr value, expr slice, expr_context ctx) + | Starred(expr value, expr_context ctx) + | Name(identifier id, expr_context ctx) + | List(expr* elts, expr_context ctx) + | Tuple(expr* elts, expr_context ctx) + | Slice(expr? lower, expr? upper, expr? step) + """ + lineno: int col_offset: int end_lineno: int | None @@ -1019,12 +991,11 @@ class expr(AST): if sys.version_info >= (3, 14): def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class BoolOp(expr): - """BoolOp(boolop op, expr* values) -""" + """BoolOp(boolop op, expr* values)""" + if sys.version_info >= (3, 10): __match_args__ = ("op", "values") op: boolop @@ -1036,12 +1007,11 @@ class BoolOp(expr): if sys.version_info >= (3, 14): def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class NamedExpr(expr): - """NamedExpr(expr target, expr value) -""" + """NamedExpr(expr target, expr value)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "value") target: Name @@ -1050,12 +1020,11 @@ class NamedExpr(expr): if sys.version_info >= (3, 14): def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class BinOp(expr): - """BinOp(expr left, operator op, expr right) -""" + """BinOp(expr left, operator op, expr right)""" + if sys.version_info >= (3, 10): __match_args__ = ("left", "op", "right") left: expr @@ -1064,15 +1033,12 @@ class BinOp(expr): def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class UnaryOp(expr): - """UnaryOp(unaryop op, expr operand) -""" + """UnaryOp(unaryop op, expr operand)""" + if sys.version_info >= (3, 10): __match_args__ = ("op", "operand") op: unaryop @@ -1081,12 +1047,11 @@ class UnaryOp(expr): if sys.version_info >= (3, 14): def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Lambda(expr): - """Lambda(arguments args, expr body) -""" + """Lambda(arguments args, expr body)""" + if sys.version_info >= (3, 10): __match_args__ = ("args", "body") args: arguments @@ -1095,12 +1060,11 @@ class Lambda(expr): if sys.version_info >= (3, 14): def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class IfExp(expr): - """IfExp(expr test, expr body, expr orelse) -""" + """IfExp(expr test, expr body, expr orelse)""" + if sys.version_info >= (3, 10): __match_args__ = ("test", "body", "orelse") test: expr @@ -1109,15 +1073,12 @@ class IfExp(expr): def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class Dict(expr): - """Dict(expr?* keys, expr* values) -""" + """Dict(expr?* keys, expr* values)""" + if sys.version_info >= (3, 10): __match_args__ = ("keys", "values") keys: list[expr | None] @@ -1128,15 +1089,12 @@ class Dict(expr): def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class Set(expr): - """Set(expr* elts) -""" + """Set(expr* elts)""" + if sys.version_info >= (3, 10): __match_args__ = ("elts",) elts: list[expr] @@ -1147,12 +1105,11 @@ class Set(expr): if sys.version_info >= (3, 14): def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class ListComp(expr): - """ListComp(expr elt, comprehension* generators) -""" + """ListComp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1163,15 +1120,12 @@ class ListComp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class SetComp(expr): - """SetComp(expr elt, comprehension* generators) -""" + """SetComp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1182,15 +1136,12 @@ class SetComp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class DictComp(expr): - """DictComp(expr key, expr value, comprehension* generators) -""" + """DictComp(expr key, expr value, comprehension* generators)""" + if sys.version_info >= (3, 10): __match_args__ = ("key", "value", "generators") key: expr @@ -1207,12 +1158,11 @@ class DictComp(expr): def __replace__( self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class GeneratorExp(expr): - """GeneratorExp(expr elt, comprehension* generators) -""" + """GeneratorExp(expr elt, comprehension* generators)""" + if sys.version_info >= (3, 10): __match_args__ = ("elt", "generators") elt: expr @@ -1223,15 +1173,12 @@ class GeneratorExp(expr): def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class Await(expr): - """Await(expr value) -""" + """Await(expr value)""" + if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr @@ -1239,12 +1186,11 @@ class Await(expr): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Yield(expr): - """Yield(expr? value) -""" + """Yield(expr? value)""" + if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr | None @@ -1252,12 +1198,11 @@ class Yield(expr): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class YieldFrom(expr): - """YieldFrom(expr value) -""" + """YieldFrom(expr value)""" + if sys.version_info >= (3, 10): __match_args__ = ("value",) value: expr @@ -1265,12 +1210,11 @@ class YieldFrom(expr): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Compare(expr): - """Compare(expr left, cmpop* ops, expr* comparators) -""" + """Compare(expr left, cmpop* ops, expr* comparators)""" + if sys.version_info >= (3, 10): __match_args__ = ("left", "ops", "comparators") left: expr @@ -1287,12 +1231,11 @@ class Compare(expr): def __replace__( self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Call(expr): - """Call(expr func, expr* args, keyword* keywords) -""" + """Call(expr func, expr* args, keyword* keywords)""" + if sys.version_info >= (3, 10): __match_args__ = ("func", "args", "keywords") func: expr @@ -1309,12 +1252,11 @@ class Call(expr): def __replace__( self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class FormattedValue(expr): - """FormattedValue(expr value, int conversion, expr? format_spec) -""" + """FormattedValue(expr value, int conversion, expr? format_spec)""" + if sys.version_info >= (3, 10): __match_args__ = ("value", "conversion", "format_spec") value: expr @@ -1326,12 +1268,11 @@ class FormattedValue(expr): def __replace__( self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class JoinedStr(expr): - """JoinedStr(expr* values) -""" + """JoinedStr(expr* values)""" + if sys.version_info >= (3, 10): __match_args__ = ("values",) values: list[expr] @@ -1342,23 +1283,21 @@ class JoinedStr(expr): if sys.version_info >= (3, 14): def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 14): class TemplateStr(expr): - """TemplateStr(expr* values) -""" + """TemplateStr(expr* values)""" + __match_args__ = ("values",) values: list[expr] def __init__(self, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> None: ... def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Interpolation(expr): - """Interpolation(expr value, constant str, int conversion, expr? format_spec) -""" + """Interpolation(expr value, constant str, int conversion, expr? format_spec)""" + __match_args__ = ("value", "str", "conversion", "format_spec") value: expr str: builtins.str @@ -1381,8 +1320,7 @@ if sys.version_info >= (3, 14): format_spec: expr | None = ..., **kwargs: Unpack[_Attributes], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 10): from types import EllipsisType @@ -1393,8 +1331,8 @@ else: _ConstantValue: typing_extensions.TypeAlias = str | bytes | bool | int | float | complex | None | ellipsis # noqa: F821 class Constant(expr): - """Constant(constant value, string? kind) -""" + """Constant(constant value, string? kind)""" + if sys.version_info >= (3, 10): __match_args__ = ("value", "kind") value: _ConstantValue @@ -1404,16 +1342,16 @@ class Constant(expr): @property @deprecated("Removed in Python 3.14. Use `value` instead.") def n(self) -> _ConstantValue: - """Deprecated. Use value instead. -""" + """Deprecated. Use value instead.""" + @n.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def n(self, value: _ConstantValue) -> None: ... @property @deprecated("Removed in Python 3.14. Use `value` instead.") def s(self) -> _ConstantValue: - """Deprecated. Use value instead. -""" + """Deprecated. Use value instead.""" + @s.setter @deprecated("Removed in Python 3.14. Use `value` instead.") def s(self, value: _ConstantValue) -> None: ... @@ -1422,12 +1360,11 @@ class Constant(expr): if sys.version_info >= (3, 14): def __replace__(self, *, value: _ConstantValue = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Attribute(expr): - """Attribute(expr value, identifier attr, expr_context ctx) -""" + """Attribute(expr value, identifier attr, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("value", "attr", "ctx") value: expr @@ -1439,12 +1376,11 @@ class Attribute(expr): def __replace__( self, *, value: expr = ..., attr: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Subscript(expr): - """Subscript(expr value, expr slice, expr_context ctx) -""" + """Subscript(expr value, expr slice, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("value", "slice", "ctx") value: expr @@ -1456,12 +1392,11 @@ class Subscript(expr): def __replace__( self, *, value: expr = ..., slice: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Starred(expr): - """Starred(expr value, expr_context ctx) -""" + """Starred(expr value, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("value", "ctx") value: expr @@ -1470,12 +1405,11 @@ class Starred(expr): if sys.version_info >= (3, 14): def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Name(expr): - """Name(identifier id, expr_context ctx) -""" + """Name(identifier id, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("id", "ctx") id: str @@ -1484,12 +1418,11 @@ class Name(expr): if sys.version_info >= (3, 14): def __replace__(self, *, id: str = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class List(expr): - """List(expr* elts, expr_context ctx) -""" + """List(expr* elts, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1501,12 +1434,11 @@ class List(expr): if sys.version_info >= (3, 14): def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Tuple(expr): - """Tuple(expr* elts, expr_context ctx) -""" + """Tuple(expr* elts, expr_context ctx)""" + if sys.version_info >= (3, 10): __match_args__ = ("elts", "ctx") elts: list[expr] @@ -1519,17 +1451,15 @@ class Tuple(expr): if sys.version_info >= (3, 14): def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" @deprecated("Deprecated since Python 3.9.") class slice(AST): - """Deprecated AST node class. -""" + """Deprecated AST node class.""" class Slice(expr): - """Slice(expr? lower, expr? upper, expr? step) -""" + """Slice(expr? lower, expr? upper, expr? step)""" + if sys.version_info >= (3, 10): __match_args__ = ("lower", "upper", "step") lower: expr | None @@ -1543,157 +1473,150 @@ class Slice(expr): def __replace__( self, *, lower: expr | None = ..., upper: expr | None = ..., step: expr | None = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" @deprecated("Deprecated since Python 3.9. Use `ast.Tuple` instead.") class ExtSlice(slice): - """Deprecated AST node class. Use ast.Tuple instead. -""" + """Deprecated AST node class. Use ast.Tuple instead.""" + def __new__(cls, dims: Iterable[slice] = (), **kwargs: Unpack[_Attributes]) -> Tuple: ... # type: ignore[misc] @deprecated("Deprecated since Python 3.9. Use the index value directly instead.") class Index(slice): - """Deprecated AST node class. Use the index value directly instead. -""" + """Deprecated AST node class. Use the index value directly instead.""" + def __new__(cls, value: expr, **kwargs: Unpack[_Attributes]) -> expr: ... # type: ignore[misc] class expr_context(AST): - """expr_context = Load | Store | Del -""" + """expr_context = Load | Store | Del""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") class AugLoad(expr_context): - """Deprecated AST node class. Unused in Python 3. -""" + """Deprecated AST node class. Unused in Python 3.""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") class AugStore(expr_context): - """Deprecated AST node class. Unused in Python 3. -""" + """Deprecated AST node class. Unused in Python 3.""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") class Param(expr_context): - """Deprecated AST node class. Unused in Python 3. -""" + """Deprecated AST node class. Unused in Python 3.""" @deprecated("Deprecated since Python 3.9. Unused in Python 3.") class Suite(mod): - """Deprecated AST node class. Unused in Python 3. -""" + """Deprecated AST node class. Unused in Python 3.""" class Load(expr_context): - """Load -""" + """Load""" + class Store(expr_context): - """Store -""" + """Store""" + class Del(expr_context): - """Del -""" + """Del""" + class boolop(AST): - """boolop = And | Or -""" + """boolop = And | Or""" + class And(boolop): - """And -""" + """And""" + class Or(boolop): - """Or -""" + """Or""" + class operator(AST): - """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv -""" + """operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift | RShift | BitOr | BitXor | BitAnd | FloorDiv""" + class Add(operator): - """Add -""" + """Add""" + class Sub(operator): - """Sub -""" + """Sub""" + class Mult(operator): - """Mult -""" + """Mult""" + class MatMult(operator): - """MatMult -""" + """MatMult""" + class Div(operator): - """Div -""" + """Div""" + class Mod(operator): - """Mod -""" + """Mod""" + class Pow(operator): - """Pow -""" + """Pow""" + class LShift(operator): - """LShift -""" + """LShift""" + class RShift(operator): - """RShift -""" + """RShift""" + class BitOr(operator): - """BitOr -""" + """BitOr""" + class BitXor(operator): - """BitXor -""" + """BitXor""" + class BitAnd(operator): - """BitAnd -""" + """BitAnd""" + class FloorDiv(operator): - """FloorDiv -""" + """FloorDiv""" + class unaryop(AST): - """unaryop = Invert | Not | UAdd | USub -""" + """unaryop = Invert | Not | UAdd | USub""" + class Invert(unaryop): - """Invert -""" + """Invert""" + class Not(unaryop): - """Not -""" + """Not""" + class UAdd(unaryop): - """UAdd -""" + """UAdd""" + class USub(unaryop): - """USub -""" + """USub""" + class cmpop(AST): - """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn -""" + """cmpop = Eq | NotEq | Lt | LtE | Gt | GtE | Is | IsNot | In | NotIn""" + class Eq(cmpop): - """Eq -""" + """Eq""" + class NotEq(cmpop): - """NotEq -""" + """NotEq""" + class Lt(cmpop): - """Lt -""" + """Lt""" + class LtE(cmpop): - """LtE -""" + """LtE""" + class Gt(cmpop): - """Gt -""" + """Gt""" + class GtE(cmpop): - """GtE -""" + """GtE""" + class Is(cmpop): - """Is -""" + """Is""" + class IsNot(cmpop): - """IsNot -""" + """IsNot""" + class In(cmpop): - """In -""" + """In""" + class NotIn(cmpop): - """NotIn -""" + """NotIn""" class comprehension(AST): - """comprehension(expr target, expr iter, expr* ifs, int is_async) -""" + """comprehension(expr target, expr iter, expr* ifs, int is_async)""" + if sys.version_info >= (3, 10): __match_args__ = ("target", "iter", "ifs", "is_async") target: expr @@ -1710,12 +1633,11 @@ class comprehension(AST): if sys.version_info >= (3, 14): def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class excepthandler(AST): - """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body) -""" + """excepthandler = ExceptHandler(expr? type, identifier? name, stmt* body)""" + lineno: int col_offset: int end_lineno: int | None @@ -1726,12 +1648,11 @@ class excepthandler(AST): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ... ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class ExceptHandler(excepthandler): - """ExceptHandler(expr? type, identifier? name, stmt* body) -""" + """ExceptHandler(expr? type, identifier? name, stmt* body)""" + if sys.version_info >= (3, 10): __match_args__ = ("type", "name", "body") type: expr | None @@ -1753,12 +1674,11 @@ class ExceptHandler(excepthandler): def __replace__( self, *, type: expr | None = ..., name: str | None = ..., body: list[stmt] = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class arguments(AST): - """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults) -""" + """arguments(arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs, expr* kw_defaults, arg? kwarg, expr* defaults)""" + if sys.version_info >= (3, 10): __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") posonlyargs: list[arg] @@ -1828,12 +1748,11 @@ class arguments(AST): kwarg: arg | None = ..., defaults: list[expr] = ..., ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class arg(AST): - """arg(identifier arg, expr? annotation, string? type_comment) -""" + """arg(identifier arg, expr? annotation, string? type_comment)""" + lineno: int col_offset: int end_lineno: int | None @@ -1851,12 +1770,11 @@ class arg(AST): def __replace__( self, *, arg: str = ..., annotation: expr | None = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class keyword(AST): - """keyword(identifier? arg, expr value) -""" + """keyword(identifier? arg, expr value)""" + lineno: int col_offset: int end_lineno: int | None @@ -1872,12 +1790,11 @@ class keyword(AST): if sys.version_info >= (3, 14): def __replace__(self, *, arg: str | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class alias(AST): - """alias(identifier name, identifier? asname) -""" + """alias(identifier name, identifier? asname)""" + name: str asname: str | None if sys.version_info >= (3, 10): @@ -1894,12 +1811,11 @@ class alias(AST): if sys.version_info >= (3, 14): def __replace__(self, *, name: str = ..., asname: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class withitem(AST): - """withitem(expr context_expr, expr? optional_vars) -""" + """withitem(expr context_expr, expr? optional_vars)""" + if sys.version_info >= (3, 10): __match_args__ = ("context_expr", "optional_vars") context_expr: expr @@ -1908,8 +1824,7 @@ class withitem(AST): if sys.version_info >= (3, 14): def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 10): class pattern(AST): @@ -1921,7 +1836,8 @@ if sys.version_info >= (3, 10): | MatchStar(identifier? name) | MatchAs(pattern? pattern, identifier? name) | MatchOr(pattern* patterns) -""" + """ + lineno: int col_offset: int end_lineno: int @@ -1932,12 +1848,11 @@ if sys.version_info >= (3, 10): def __replace__( self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ... ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class match_case(AST): - """match_case(pattern pattern, expr? guard, stmt* body) -""" + """match_case(pattern pattern, expr? guard, stmt* body)""" + __match_args__ = ("pattern", "guard", "body") pattern: ast.pattern guard: expr | None @@ -1952,12 +1867,11 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__(self, *, pattern: ast.pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class Match(stmt): - """Match(expr subject, match_case* cases) -""" + """Match(expr subject, match_case* cases)""" + __match_args__ = ("subject", "cases") subject: expr cases: list[match_case] @@ -1967,39 +1881,34 @@ if sys.version_info >= (3, 10): def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ... if sys.version_info >= (3, 14): - def __replace__( - self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes] - ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + def __replace__(self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]) -> Self: + """Return a copy of the AST node with new values for the specified fields.""" class MatchValue(pattern): - """MatchValue(expr value) -""" + """MatchValue(expr value)""" + __match_args__ = ("value",) value: expr def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchSingleton(pattern): - """MatchSingleton(constant value) -""" + """MatchSingleton(constant value)""" + __match_args__ = ("value",) value: bool | None def __init__(self, value: bool | None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__(self, *, value: bool | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchSequence(pattern): - """MatchSequence(pattern* patterns) -""" + """MatchSequence(pattern* patterns)""" + __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -2009,12 +1918,11 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchMapping(pattern): - """MatchMapping(expr* keys, pattern* patterns, identifier? rest) -""" + """MatchMapping(expr* keys, pattern* patterns, identifier? rest)""" + __match_args__ = ("keys", "patterns", "rest") keys: list[expr] patterns: list[pattern] @@ -2041,12 +1949,11 @@ if sys.version_info >= (3, 10): rest: str | None = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchClass(pattern): - """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns) -""" + """MatchClass(expr cls, pattern* patterns, identifier* kwd_attrs, pattern* kwd_patterns)""" + __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") cls: expr patterns: list[pattern] @@ -2081,24 +1988,22 @@ if sys.version_info >= (3, 10): kwd_patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchStar(pattern): - """MatchStar(identifier? name) -""" + """MatchStar(identifier? name)""" + __match_args__ = ("name",) name: str | None def __init__(self, name: str | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ... if sys.version_info >= (3, 14): def __replace__(self, *, name: str | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchAs(pattern): - """MatchAs(pattern? pattern, identifier? name) -""" + """MatchAs(pattern? pattern, identifier? name)""" + __match_args__ = ("pattern", "name") pattern: ast.pattern | None name: str | None @@ -2110,12 +2015,11 @@ if sys.version_info >= (3, 10): def __replace__( self, *, pattern: ast.pattern | None = ..., name: str | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class MatchOr(pattern): - """MatchOr(pattern* patterns) -""" + """MatchOr(pattern* patterns)""" + __match_args__ = ("patterns",) patterns: list[pattern] if sys.version_info >= (3, 13): @@ -2125,16 +2029,14 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 14): def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class type_ignore(AST): - """type_ignore = TypeIgnore(int lineno, string tag) -""" + """type_ignore = TypeIgnore(int lineno, string tag)""" class TypeIgnore(type_ignore): - """TypeIgnore(int lineno, string tag) -""" + """TypeIgnore(int lineno, string tag)""" + if sys.version_info >= (3, 10): __match_args__ = ("lineno", "tag") lineno: int @@ -2143,15 +2045,15 @@ class TypeIgnore(type_ignore): if sys.version_info >= (3, 14): def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 12): class type_param(AST): """type_param = TypeVar(identifier name, expr? bound, expr? default_value) - | ParamSpec(identifier name, expr? default_value) - | TypeVarTuple(identifier name, expr? default_value) -""" + | ParamSpec(identifier name, expr? default_value) + | TypeVarTuple(identifier name, expr? default_value) + """ + lineno: int col_offset: int end_lineno: int @@ -2160,12 +2062,11 @@ if sys.version_info >= (3, 12): if sys.version_info >= (3, 14): def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class TypeVar(type_param): - """TypeVar(identifier name, expr? bound, expr? default_value) -""" + """TypeVar(identifier name, expr? bound, expr? default_value)""" + if sys.version_info >= (3, 13): __match_args__ = ("name", "bound", "default_value") else: @@ -2189,12 +2090,11 @@ if sys.version_info >= (3, 12): default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]], ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class ParamSpec(type_param): - """ParamSpec(identifier name, expr? default_value) -""" + """ParamSpec(identifier name, expr? default_value)""" + if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -2210,12 +2110,11 @@ if sys.version_info >= (3, 12): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" class TypeVarTuple(type_param): - """TypeVarTuple(identifier name, expr? default_value) -""" + """TypeVarTuple(identifier name, expr? default_value)""" + if sys.version_info >= (3, 13): __match_args__ = ("name", "default_value") else: @@ -2231,8 +2130,7 @@ if sys.version_info >= (3, 12): def __replace__( self, *, name: str = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]] ) -> Self: - """Return a copy of the AST node with new values for the specified fields. -""" + """Return a copy of the AST node with new values for the specified fields.""" if sys.version_info >= (3, 14): @type_check_only @@ -2246,32 +2144,32 @@ else: if sys.version_info < (3, 14): @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Num(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead -""" + """Deprecated AST node class. Use ast.Constant instead""" + def __new__(cls, n: complex, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Str(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead -""" + """Deprecated AST node class. Use ast.Constant instead""" + def __new__(cls, s: str, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Bytes(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead -""" + """Deprecated AST node class. Use ast.Constant instead""" + def __new__(cls, s: bytes, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class NameConstant(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead -""" + """Deprecated AST node class. Use ast.Constant instead""" + def __new__(cls, value: _ConstantValue, kind: str | None, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] @deprecated("Removed in Python 3.14. Use `ast.Constant` instead.") class Ellipsis(Constant, metaclass=_ABC): - """Deprecated AST node class. Use ast.Constant instead -""" + """Deprecated AST node class. Use ast.Constant instead""" + def __new__(cls, **kwargs: Unpack[_Attributes]) -> Constant: ... # type: ignore[misc] # pyright: ignore[reportInconsistentConstructor] # everything below here is defined in ast.py @@ -2290,10 +2188,11 @@ if sys.version_info >= (3, 13): optimize: Literal[-1, 0, 1, 2] = -1, ) -> _T: """ -Parse the source into an AST node. -Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). -Pass type_comments=True to get back type comments where the syntax allows. -""" + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + @overload def parse( source: str | ReadableBuffer, @@ -2383,10 +2282,11 @@ else: feature_version: None | int | tuple[int, int] = None, ) -> _T: """ - Parse the source into an AST node. - Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). - Pass type_comments=True to get back type comments where the syntax allows. - """ + Parse the source into an AST node. + Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + Pass type_comments=True to get back type comments where the syntax allows. + """ + @overload def parse( source: str | ReadableBuffer, @@ -2459,13 +2359,13 @@ else: def literal_eval(node_or_string: str | AST) -> Any: """ -Evaluate an expression node or a string containing only a Python -expression. The string or node provided may only consist of the following -Python literal structures: strings, bytes, numbers, tuples, lists, dicts, -sets, booleans, and None. + Evaluate an expression node or a string containing only a Python + expression. The string or node provided may only consist of the following + Python literal structures: strings, bytes, numbers, tuples, lists, dicts, + sets, booleans, and None. -Caution: A complex expression can overflow the C stack and cause a crash. -""" + Caution: A complex expression can overflow the C stack and cause a crash. + """ if sys.version_info >= (3, 13): def dump( @@ -2477,127 +2377,134 @@ if sys.version_info >= (3, 13): show_empty: bool = False, ) -> str: """ -Return a formatted dump of the tree in node. This is mainly useful for -debugging purposes. If annotate_fields is true (by default), -the returned string will show the names and the values for fields. -If annotate_fields is false, the result string will be more compact by -omitting unambiguous field names. Attributes such as line -numbers and column offsets are not dumped by default. If this is wanted, -include_attributes can be set to true. If indent is a non-negative -integer or string, then the tree will be pretty-printed with that indent -level. None (the default) selects the single line representation. -If show_empty is False, then empty lists and fields that are None -will be omitted from the output for better readability. -""" + Return a formatted dump of the tree in node. This is mainly useful for + debugging purposes. If annotate_fields is true (by default), + the returned string will show the names and the values for fields. + If annotate_fields is false, the result string will be more compact by + omitting unambiguous field names. Attributes such as line + numbers and column offsets are not dumped by default. If this is wanted, + include_attributes can be set to true. If indent is a non-negative + integer or string, then the tree will be pretty-printed with that indent + level. None (the default) selects the single line representation. + If show_empty is False, then empty lists and fields that are None + will be omitted from the output for better readability. + """ else: def dump( node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None ) -> str: """ - Return a formatted dump of the tree in node. This is mainly useful for - debugging purposes. If annotate_fields is true (by default), - the returned string will show the names and the values for fields. - If annotate_fields is false, the result string will be more compact by - omitting unambiguous field names. Attributes such as line - numbers and column offsets are not dumped by default. If this is wanted, - include_attributes can be set to true. If indent is a non-negative - integer or string, then the tree will be pretty-printed with that indent - level. None (the default) selects the single line representation. - """ + Return a formatted dump of the tree in node. This is mainly useful for + debugging purposes. If annotate_fields is true (by default), + the returned string will show the names and the values for fields. + If annotate_fields is false, the result string will be more compact by + omitting unambiguous field names. Attributes such as line + numbers and column offsets are not dumped by default. If this is wanted, + include_attributes can be set to true. If indent is a non-negative + integer or string, then the tree will be pretty-printed with that indent + level. None (the default) selects the single line representation. + """ def copy_location(new_node: _T, old_node: AST) -> _T: """ -Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` -attributes) from *old_node* to *new_node* if possible, and return *new_node*. -""" + Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset` + attributes) from *old_node* to *new_node* if possible, and return *new_node*. + """ + def fix_missing_locations(node: _T) -> _T: """ -When you compile a node tree with compile(), the compiler expects lineno and -col_offset attributes for every node that supports them. This is rather -tedious to fill in for generated nodes, so this helper adds these attributes -recursively where not already set, by setting them to the values of the -parent node. It works recursively starting at *node*. -""" + When you compile a node tree with compile(), the compiler expects lineno and + col_offset attributes for every node that supports them. This is rather + tedious to fill in for generated nodes, so this helper adds these attributes + recursively where not already set, by setting them to the values of the + parent node. It works recursively starting at *node*. + """ + def increment_lineno(node: _T, n: int = 1) -> _T: """ -Increment the line number and end line number of each node in the tree -starting at *node* by *n*. This is useful to "move code" to a different -location in a file. -""" + Increment the line number and end line number of each node in the tree + starting at *node* by *n*. This is useful to "move code" to a different + location in a file. + """ + def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: """ -Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` -that is present on *node*. -""" + Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` + that is present on *node*. + """ + def iter_child_nodes(node: AST) -> Iterator[AST]: """ -Yield all direct child nodes of *node*, that is, all fields that are nodes -and all items of fields that are lists of nodes. -""" + Yield all direct child nodes of *node*, that is, all fields that are nodes + and all items of fields that are lists of nodes. + """ + def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: """ -Return the docstring for the given node or None if no docstring can -be found. If the node provided does not have docstrings a TypeError -will be raised. + Return the docstring for the given node or None if no docstring can + be found. If the node provided does not have docstrings a TypeError + will be raised. + + If *clean* is `True`, all tabs are expanded to spaces and any whitespace + that can be uniformly removed from the second line onwards is removed. + """ -If *clean* is `True`, all tabs are expanded to spaces and any whitespace -that can be uniformly removed from the second line onwards is removed. -""" def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: """Get source code segment of the *source* that generated *node*. -If some location information (`lineno`, `end_lineno`, `col_offset`, -or `end_col_offset`) is missing, return None. + If some location information (`lineno`, `end_lineno`, `col_offset`, + or `end_col_offset`) is missing, return None. + + If *padded* is `True`, the first line of a multi-line statement will + be padded with spaces to match its original position. + """ -If *padded* is `True`, the first line of a multi-line statement will -be padded with spaces to match its original position. -""" def walk(node: AST) -> Iterator[AST]: """ -Recursively yield all descendant nodes in the tree starting at *node* -(including *node* itself), in no specified order. This is useful if you -only want to modify nodes in place and don't care about the context. -""" + Recursively yield all descendant nodes in the tree starting at *node* + (including *node* itself), in no specified order. This is useful if you + only want to modify nodes in place and don't care about the context. + """ if sys.version_info >= (3, 14): def compare(left: AST, right: AST, /, *, compare_attributes: bool = False) -> bool: """Recursively compares two ASTs. -compare_attributes affects whether AST attributes are considered -in the comparison. If compare_attributes is False (default), then -attributes are ignored. Otherwise they must all be equal. This -option is useful to check whether the ASTs are structurally equal but -might differ in whitespace or similar details. -""" + compare_attributes affects whether AST attributes are considered + in the comparison. If compare_attributes is False (default), then + attributes are ignored. Otherwise they must all be equal. This + option is useful to check whether the ASTs are structurally equal but + might differ in whitespace or similar details. + """ class NodeVisitor: """ -A node visitor base class that walks the abstract syntax tree and calls a -visitor function for every node found. This function may return a value -which is forwarded by the `visit` method. - -This class is meant to be subclassed, with the subclass adding visitor -methods. - -Per default the visitor functions for the nodes are ``'visit_'`` + -class name of the node. So a `TryFinally` node visit function would -be `visit_TryFinally`. This behavior can be changed by overriding -the `visit` method. If no visitor function exists for a node -(return value `None`) the `generic_visit` visitor is used instead. - -Don't use the `NodeVisitor` if you want to apply changes to nodes during -traversing. For this a special visitor exists (`NodeTransformer`) that -allows modifications. -""" + A node visitor base class that walks the abstract syntax tree and calls a + visitor function for every node found. This function may return a value + which is forwarded by the `visit` method. + + This class is meant to be subclassed, with the subclass adding visitor + methods. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `visit` method. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + + Don't use the `NodeVisitor` if you want to apply changes to nodes during + traversing. For this a special visitor exists (`NodeTransformer`) that + allows modifications. + """ + # All visit methods below can be overwritten by subclasses and return an # arbitrary value, which is passed to the caller. def visit(self, node: AST) -> Any: - """Visit a node. -""" + """Visit a node.""" + def generic_visit(self, node: AST) -> Any: - """Called if no explicit visitor function exists for a node. -""" + """Called if no explicit visitor function exists for a node.""" # The following visit methods are not defined on NodeVisitor, but can # be implemented by subclasses and are called during a visit if defined. def visit_Module(self, node: Module) -> Any: ... @@ -2738,39 +2645,40 @@ allows modifications. class NodeTransformer(NodeVisitor): """ -A :class:`NodeVisitor` subclass that walks the abstract syntax tree and -allows modification of nodes. + A :class:`NodeVisitor` subclass that walks the abstract syntax tree and + allows modification of nodes. -The `NodeTransformer` will walk the AST and use the return value of the -visitor methods to replace or remove the old node. If the return value of -the visitor method is ``None``, the node will be removed from its location, -otherwise it is replaced with the return value. The return value may be the -original node in which case no replacement takes place. + The `NodeTransformer` will walk the AST and use the return value of the + visitor methods to replace or remove the old node. If the return value of + the visitor method is ``None``, the node will be removed from its location, + otherwise it is replaced with the return value. The return value may be the + original node in which case no replacement takes place. -Here is an example transformer that rewrites all occurrences of name lookups -(``foo``) to ``data['foo']``:: + Here is an example transformer that rewrites all occurrences of name lookups + (``foo``) to ``data['foo']``:: - class RewriteName(NodeTransformer): + class RewriteName(NodeTransformer): - def visit_Name(self, node): - return Subscript( - value=Name(id='data', ctx=Load()), - slice=Constant(value=node.id), - ctx=node.ctx - ) + def visit_Name(self, node): + return Subscript( + value=Name(id='data', ctx=Load()), + slice=Constant(value=node.id), + ctx=node.ctx + ) -Keep in mind that if the node you're operating on has child nodes you must -either transform the child nodes yourself or call the :meth:`generic_visit` -method for the node first. + Keep in mind that if the node you're operating on has child nodes you must + either transform the child nodes yourself or call the :meth:`generic_visit` + method for the node first. -For nodes that were part of a collection of statements (that applies to all -statement nodes), the visitor may also return a list of nodes rather than -just a single node. + For nodes that were part of a collection of statements (that applies to all + statement nodes), the visitor may also return a list of nodes rather than + just a single node. -Usually you use the transformer like this:: + Usually you use the transformer like this:: + + node = YourTransformer().visit(node) + """ - node = YourTransformer().visit(node) -""" def generic_visit(self, node: AST) -> AST: ... # TODO: Override the visit_* methods with better return types. # The usual return type is AST | None, but Iterable[AST] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi index e012aa2771ebc..45a778d76e172 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asynchat.pyi @@ -18,6 +18,7 @@ command will be accumulated (using your own 'collect_incoming_data' method) up to the terminator, and then control will be returned to you - by calling your self.found_terminator() method. """ + import asyncore from abc import abstractmethod @@ -28,7 +29,8 @@ class simple_producer: class async_chat(asyncore.dispatcher): """This is an abstract class. You must derive from this class, and add the two methods collect_incoming_data() and found_terminator() -""" + """ + ac_in_buffer_size: int ac_out_buffer_size: int @abstractmethod @@ -40,11 +42,12 @@ class async_chat(asyncore.dispatcher): Can be a fixed string of any length, an integer, or None. """ + def get_terminator(self) -> bytes | int | None: ... def push(self, data: bytes) -> None: ... def push_with_producer(self, producer: simple_producer) -> None: ... def close_when_done(self) -> None: - """automatically close this channel once the outgoing queue is empty -""" + """automatically close this channel once the outgoing queue is empty""" + def initiate_send(self) -> None: ... def discard_buffers(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi index cb39de1059ff1..a8732a22deec2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/__init__.pyi @@ -1,5 +1,5 @@ -"""The asyncio package, tracking PEP 3156. -""" +"""The asyncio package, tracking PEP 3156.""" + # This condition is so big, it's clearer to keep to platform condition in two blocks # Can't NOQA on a specific line: https://github.com/plinss/flake8-noqa/issues/22 import sys diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi index 34af33ed6366f..82ab82d7c5940 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_events.pyi @@ -12,6 +12,7 @@ Keyword arguments for the callback are not supported; this is a conscious design decision, leaving the door open for keyword arguments to modify the meaning of the API call itself. """ + import ssl import sys from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer @@ -76,142 +77,144 @@ class Server(AbstractServer): async def wait_closed(self) -> None: """Wait until server is closed and all connections are dropped. -- If the server is not closed, wait. -- If it is closed, but there are still active connections, wait. + - If the server is not closed, wait. + - If it is closed, but there are still active connections, wait. -Anyone waiting here will be unblocked once both conditions -(server is closed and all connections have been dropped) -have become true, in either order. + Anyone waiting here will be unblocked once both conditions + (server is closed and all connections have been dropped) + have become true, in either order. -Historical note: In 3.11 and before, this was broken, returning -immediately if the server was already closed, even if there -were still active connections. An attempted fix in 3.12.0 was -still broken, returning immediately if the server was still -open and there were no active connections. Hopefully in 3.12.1 -we have it right. -""" + Historical note: In 3.11 and before, this was broken, returning + immediately if the server was already closed, even if there + were still active connections. An attempted fix in 3.12.0 was + still broken, returning immediately if the server was still + open and there were no active connections. Hopefully in 3.12.1 + we have it right. + """ class BaseEventLoop(AbstractEventLoop): def run_forever(self) -> None: - """Run until stop() is called. -""" + """Run until stop() is called.""" + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: """Run until the Future is done. -If the argument is a coroutine, it is wrapped in a Task. + If the argument is a coroutine, it is wrapped in a Task. -WARNING: It would be disastrous to call run_until_complete() -with the same coroutine twice -- it would wrap it in two -different Tasks and that can't be good. + WARNING: It would be disastrous to call run_until_complete() + with the same coroutine twice -- it would wrap it in two + different Tasks and that can't be good. + + Return the Future's result, or raise its exception. + """ -Return the Future's result, or raise its exception. -""" def stop(self) -> None: """Stop running the event loop. -Every callback already scheduled will still run. This simply informs -run_forever to stop looping after a complete iteration. -""" + Every callback already scheduled will still run. This simply informs + run_forever to stop looping after a complete iteration. + """ + def is_running(self) -> bool: - """Returns True if the event loop is running. -""" + """Returns True if the event loop is running.""" + def is_closed(self) -> bool: - """Returns True if the event loop was closed. -""" + """Returns True if the event loop was closed.""" + def close(self) -> None: """Close the event loop. -This clears the queues and shuts down the executor, -but does not wait for the executor to finish. + This clears the queues and shuts down the executor, + but does not wait for the executor to finish. + + The event loop must not be running. + """ -The event loop must not be running. -""" async def shutdown_asyncgens(self) -> None: - """Shutdown all active asynchronous generators. -""" + """Shutdown all active asynchronous generators.""" # Methods scheduling callbacks. All these return Handles. - def call_soon( - self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None - ) -> Handle: + def call_soon(self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None) -> Handle: """Arrange for a callback to be called as soon as possible. -This operates as a FIFO queue: callbacks are called in the -order in which they are registered. Each callback will be -called exactly once. + This operates as a FIFO queue: callbacks are called in the + order in which they are registered. Each callback will be + called exactly once. + + Any positional arguments after the callback will be passed to + the callback when it is called. + """ -Any positional arguments after the callback will be passed to -the callback when it is called. -""" def call_later( self, delay: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None ) -> TimerHandle: """Arrange for a callback to be called at a given time. -Return a Handle: an opaque object with a cancel() method that -can be used to cancel the call. + Return a Handle: an opaque object with a cancel() method that + can be used to cancel the call. -The delay can be an int or float, expressed in seconds. It is -always relative to the current time. + The delay can be an int or float, expressed in seconds. It is + always relative to the current time. -Each callback will be called exactly once. If two callbacks -are scheduled for exactly the same time, it is undefined which -will be called first. + Each callback will be called exactly once. If two callbacks + are scheduled for exactly the same time, it is undefined which + will be called first. + + Any positional arguments after the callback will be passed to + the callback when it is called. + """ -Any positional arguments after the callback will be passed to -the callback when it is called. -""" def call_at( self, when: float, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None ) -> TimerHandle: """Like call_later(), but uses an absolute time. -Absolute time corresponds to the event loop's time() method. -""" + Absolute time corresponds to the event loop's time() method. + """ + def time(self) -> float: """Return the time according to the event loop's clock. -This is a float expressed in seconds since an epoch, but the -epoch, precision, accuracy and drift are unspecified and may -differ per event loop. -""" + This is a float expressed in seconds since an epoch, but the + epoch, precision, accuracy and drift are unspecified and may + differ per event loop. + """ # Future methods def create_future(self) -> Future[Any]: - """Create a Future object attached to the loop. -""" + """Create a Future object attached to the loop.""" # Tasks methods if sys.version_info >= (3, 11): def create_task(self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None) -> Task[_T]: """Schedule or begin executing a coroutine object. -Return a task object. -""" + Return a task object. + """ else: def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: """Schedule a coroutine object. - Return a task object. - """ + Return a task object. + """ def set_task_factory(self, factory: _TaskFactory | None) -> None: """Set a task factory that will be used by loop.create_task(). -If factory is None the default task factory will be set. + If factory is None the default task factory will be set. + + If factory is a callable, it should have a signature matching + '(loop, coro, **kwargs)', where 'loop' will be a reference to the active + event loop, 'coro' will be a coroutine object, and **kwargs will be + arbitrary keyword arguments that should be passed on to Task. + The callable must return a Task. + """ -If factory is a callable, it should have a signature matching -'(loop, coro, **kwargs)', where 'loop' will be a reference to the active -event loop, 'coro' will be a coroutine object, and **kwargs will be -arbitrary keyword arguments that should be passed on to Task. -The callable must return a Task. -""" def get_task_factory(self) -> _TaskFactory | None: - """Return a task factory, or None if the default one is in use. -""" + """Return a task factory, or None if the default one is in use.""" # Methods for interacting with threads def call_soon_threadsafe( self, callback: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts], context: Context | None = None ) -> Handle: - """Like call_soon(), but thread-safe. -""" + """Like call_soon(), but thread-safe.""" + def run_in_executor(self, executor: Executor | None, func: Callable[[Unpack[_Ts]], _T], *args: Unpack[_Ts]) -> Future[_T]: ... def set_default_executor(self, executor: ThreadPoolExecutor) -> None: ... # type: ignore[override] # Network I/O methods returning Futures. @@ -249,15 +252,16 @@ The callable must return a Task. ) -> tuple[Transport, _ProtocolT]: """Connect to a TCP server. -Create a streaming transport connection to a given internet host and -port: socket family AF_INET or socket.AF_INET6 depending on host (or -family if specified), socket type SOCK_STREAM. protocol_factory must be -a callable returning a protocol instance. + Create a streaming transport connection to a given internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ -This method is a coroutine which will try to establish the connection -in the background. When successful, the coroutine returns a -(transport, protocol) pair. -""" @overload async def create_connection( self, @@ -300,15 +304,16 @@ in the background. When successful, the coroutine returns a ) -> tuple[Transport, _ProtocolT]: """Connect to a TCP server. - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. + Create a streaming transport connection to a given internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ @overload async def create_connection( self, @@ -349,15 +354,16 @@ in the background. When successful, the coroutine returns a ) -> tuple[Transport, _ProtocolT]: """Connect to a TCP server. - Create a streaming transport connection to a given internet host and - port: socket family AF_INET or socket.AF_INET6 depending on host (or - family if specified), socket type SOCK_STREAM. protocol_factory must be - a callable returning a protocol instance. + Create a streaming transport connection to a given internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ - This method is a coroutine which will try to establish the connection - in the background. When successful, the coroutine returns a - (transport, protocol) pair. - """ @overload async def create_connection( self, @@ -400,19 +406,20 @@ in the background. When successful, the coroutine returns a ) -> Server: """Create a TCP server. -The host parameter can be a string, in that case the TCP server is -bound to host and port. + The host parameter can be a string, in that case the TCP server is + bound to host and port. -The host parameter can also be a sequence of strings and in that case -the TCP server is bound to all hosts of the sequence. If a host -appears multiple times (possibly indirectly e.g. when hostnames -resolve to the same IP address), the server is only bound once to that -host. + The host parameter can also be a sequence of strings and in that case + the TCP server is bound to all hosts of the sequence. If a host + appears multiple times (possibly indirectly e.g. when hostnames + resolve to the same IP address), the server is only bound once to that + host. -Return a Server object which can be used to stop the service. + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ -This method is a coroutine. -""" @overload async def create_server( self, @@ -453,19 +460,20 @@ This method is a coroutine. ) -> Server: """Create a TCP server. - The host parameter can be a string, in that case the TCP server is - bound to host and port. + The host parameter can be a string, in that case the TCP server is + bound to host and port. - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. + The host parameter can also be a sequence of strings and in that case + the TCP server is bound to all hosts of the sequence. If a host + appears multiple times (possibly indirectly e.g. when hostnames + resolve to the same IP address), the server is only bound once to that + host. - Return a Server object which can be used to stop the service. + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ - This method is a coroutine. - """ @overload async def create_server( self, @@ -504,19 +512,20 @@ This method is a coroutine. ) -> Server: """Create a TCP server. - The host parameter can be a string, in that case the TCP server is - bound to host and port. + The host parameter can be a string, in that case the TCP server is + bound to host and port. - The host parameter can also be a sequence of strings and in that case - the TCP server is bound to all hosts of the sequence. If a host - appears multiple times (possibly indirectly e.g. when hostnames - resolve to the same IP address), the server is only bound once to that - host. + The host parameter can also be a sequence of strings and in that case + the TCP server is bound to all hosts of the sequence. If a host + appears multiple times (possibly indirectly e.g. when hostnames + resolve to the same IP address), the server is only bound once to that + host. - Return a Server object which can be used to stop the service. + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ - This method is a coroutine. - """ @overload async def create_server( self, @@ -549,9 +558,10 @@ This method is a coroutine. ) -> Transport | None: """Upgrade transport to TLS. -Return a new transport that *protocol* should start using -immediately. -""" + Return a new transport that *protocol* should start using + immediately. + """ + async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -574,9 +584,10 @@ immediately. ) -> Transport | None: """Upgrade transport to TLS. - Return a new transport that *protocol* should start using - immediately. - """ + Return a new transport that *protocol* should start using + immediately. + """ + async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], @@ -587,12 +598,12 @@ immediately. ) -> tuple[Transport, _ProtocolT]: """Handle an accepted connection. - This is used by servers that accept connections outside of - asyncio but that use asyncio to handle connections. + This is used by servers that accept connections outside of + asyncio but that use asyncio to handle connections. - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ + This method is a coroutine. When completed, the coroutine + returns a (transport, protocol) pair. + """ async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True @@ -602,26 +613,26 @@ immediately. ) -> int: """Send a file to transport. -Return the total number of bytes which were sent. + Return the total number of bytes which were sent. -The method uses high-performance os.sendfile if available. + The method uses high-performance os.sendfile if available. -file must be a regular file object opened in binary mode. + file must be a regular file object opened in binary mode. -offset tells from where to start reading the file. If specified, -count is the total number of bytes to transmit as opposed to -sending the file until EOF is reached. File position is updated on -return or also in case of error in which case file.tell() -can be used to figure out the number of bytes -which were sent. + offset tells from where to start reading the file. If specified, + count is the total number of bytes to transmit as opposed to + sending the file until EOF is reached. File position is updated on + return or also in case of error in which case file.tell() + can be used to figure out the number of bytes + which were sent. -fallback set to True makes asyncio to manually read and send -the file when the platform does not support the sendfile syscall -(e.g. Windows or SSL socket on Unix). + fallback set to True makes asyncio to manually read and send + the file when the platform does not support the sendfile syscall + (e.g. Windows or SSL socket on Unix). -Raise SendfileNotAvailableError if the system does not support -sendfile syscall and fallback is False. -""" + Raise SendfileNotAvailableError if the system does not support + sendfile syscall and fallback is False. + """ if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, @@ -636,8 +647,7 @@ sendfile syscall and fallback is False. allow_broadcast: bool | None = None, sock: socket | None = None, ) -> tuple[DatagramTransport, _ProtocolT]: - """Create datagram connection. -""" + """Create datagram connection.""" else: async def create_datagram_endpoint( self, @@ -653,8 +663,7 @@ sendfile syscall and fallback is False. allow_broadcast: bool | None = None, sock: socket | None = None, ) -> tuple[DatagramTransport, _ProtocolT]: - """Create datagram connection. -""" + """Create datagram connection.""" # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -716,57 +725,59 @@ sendfile syscall and fallback is False. def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: """Set handler as the new event loop exception handler. -If handler is None, the default exception handler will -be set. + If handler is None, the default exception handler will + be set. -If handler is a callable object, it should have a -signature matching '(loop, context)', where 'loop' -will be a reference to the active event loop, 'context' -will be a dict object (see `call_exception_handler()` -documentation for details about context). -""" - def get_exception_handler(self) -> _ExceptionHandler | None: - """Return an exception handler, or None if the default one is in use. + If handler is a callable object, it should have a + signature matching '(loop, context)', where 'loop' + will be a reference to the active event loop, 'context' + will be a dict object (see `call_exception_handler()` + documentation for details about context). """ + + def get_exception_handler(self) -> _ExceptionHandler | None: + """Return an exception handler, or None if the default one is in use.""" + def default_exception_handler(self, context: _Context) -> None: """Default exception handler. -This is called when an exception occurs and no exception -handler is set, and can be called by a custom exception -handler that wants to defer to the default behavior. + This is called when an exception occurs and no exception + handler is set, and can be called by a custom exception + handler that wants to defer to the default behavior. -This default handler logs the error message and other -context-dependent information. In debug mode, a truncated -stack trace is also appended showing where the given object -(e.g. a handle or future or task) was created, if any. + This default handler logs the error message and other + context-dependent information. In debug mode, a truncated + stack trace is also appended showing where the given object + (e.g. a handle or future or task) was created, if any. + + The context parameter has the same meaning as in + `call_exception_handler()`. + """ -The context parameter has the same meaning as in -`call_exception_handler()`. -""" def call_exception_handler(self, context: _Context) -> None: """Call the current event loop's exception handler. -The context argument is a dict containing the following keys: - -- 'message': Error message; -- 'exception' (optional): Exception object; -- 'future' (optional): Future instance; -- 'task' (optional): Task instance; -- 'handle' (optional): Handle instance; -- 'protocol' (optional): Protocol instance; -- 'transport' (optional): Transport instance; -- 'socket' (optional): Socket instance; -- 'source_traceback' (optional): Traceback of the source; -- 'handle_traceback' (optional): Traceback of the handle; -- 'asyncgen' (optional): Asynchronous generator that caused - the exception. - -New keys maybe introduced in the future. - -Note: do not overload this method in an event loop subclass. -For custom exception handling, use the -`set_exception_handler()` method. -""" + The context argument is a dict containing the following keys: + + - 'message': Error message; + - 'exception' (optional): Exception object; + - 'future' (optional): Future instance; + - 'task' (optional): Task instance; + - 'handle' (optional): Handle instance; + - 'protocol' (optional): Protocol instance; + - 'transport' (optional): Transport instance; + - 'socket' (optional): Socket instance; + - 'source_traceback' (optional): Traceback of the source; + - 'handle_traceback' (optional): Traceback of the handle; + - 'asyncgen' (optional): Asynchronous generator that caused + the exception. + + New keys maybe introduced in the future. + + Note: do not overload this method in an event loop subclass. + For custom exception handling, use the + `set_exception_handler()` method. + """ # Debug flag management. def get_debug(self) -> bool: ... def set_debug(self, enabled: bool) -> None: ... @@ -774,13 +785,12 @@ For custom exception handling, use the async def shutdown_default_executor(self, timeout: float | None = None) -> None: """Schedule the shutdown of the default executor. -The timeout parameter specifies the amount of time the executor will -be given to finish joining. The default value is None, which means -that the executor will be given an unlimited amount of time. -""" + The timeout parameter specifies the amount of time the executor will + be given to finish joining. The default value is None, which means + that the executor will be given an unlimited amount of time. + """ else: async def shutdown_default_executor(self) -> None: - """Schedule the shutdown of the default executor. -""" + """Schedule the shutdown of the default executor.""" def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi index 200e26c30b1c5..8b1ed8ae122a2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_futures.pyi @@ -15,13 +15,13 @@ _FINISHED: Final = "FINISHED" # undocumented def isfuture(obj: object) -> TypeIs[Future[Any]]: """Check for a Future. -This returns True when obj is a Future instance or is advertising -itself as duck-type compatible by setting _asyncio_future_blocking. -See comment in Future for more details. -""" + This returns True when obj is a Future instance or is advertising + itself as duck-type compatible by setting _asyncio_future_blocking. + See comment in Future for more details. + """ + def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: # undocumented - """helper function for Future.__repr__ -""" + """helper function for Future.__repr__""" + def _future_repr_info(future: futures.Future[Any]) -> list[str]: # undocumented - """helper function for Future.__repr__ -""" + """helper function for Future.__repr__""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi index ec7294f2ace33..989824b6101c6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -55,8 +55,9 @@ class BaseSubprocessTransport(transports.SubprocessTransport): async def _wait(self) -> int: # undocumented """Wait until the process exit and return the process return code. -This method is a coroutine. -""" + This method is a coroutine. + """ + def _try_finish(self) -> None: ... # undocumented def _call_connection_lost(self, exc: BaseException | None) -> None: ... # undocumented def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi index e9593b12fea2d..61aaf51e026fa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/constants.pyi @@ -15,8 +15,8 @@ if sys.version_info >= (3, 12): THREAD_JOIN_TIMEOUT: Final = 300 class _SendfileMode(enum.Enum): - """An enumeration. -""" + """An enumeration.""" + UNSUPPORTED = 1 TRY_NATIVE = 2 FALLBACK = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi index 91d6fc04020e6..d1db48fb9cfad 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/coroutines.pyi @@ -18,14 +18,14 @@ if sys.version_info < (3, 11): def coroutine(func: _FunctionT) -> _FunctionT: """Decorator to mark coroutines. - If the coroutine is not yielded from before it is destroyed, - an error message is logged. - """ + If the coroutine is not yielded from before it is destroyed, + an error message is logged. + """ @overload def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: - """Return True if func is a decorated coroutine function. -""" + """Return True if func is a decorated coroutine function.""" + @overload def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... @overload @@ -33,5 +33,4 @@ def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Co @overload def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... def iscoroutine(obj: object) -> TypeIs[Coroutine[Any, Any, Any]]: - """Return True if obj is a coroutine object. -""" + """Return True if obj is a coroutine object.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi index 6fc52e657cd68..10d72b8fa88cb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/events.pyi @@ -1,5 +1,5 @@ -"""Event loop and event loop policy. -""" +"""Event loop and event loop policy.""" + import ssl import sys from _asyncio import ( @@ -75,8 +75,8 @@ class _TaskFactory(Protocol): def __call__(self, loop: AbstractEventLoop, factory: _CoroutineLike[_T], /) -> Future[_T]: ... class Handle: - """Object returned by callback registration methods. -""" + """Object returned by callback registration methods.""" + __slots__ = ("_callback", "_args", "_cancelled", "_loop", "_source_traceback", "_repr", "__weakref__", "_context") _cancelled: bool _args: Sequence[Any] @@ -90,8 +90,8 @@ class Handle: def get_context(self) -> Context: ... class TimerHandle(Handle): - """Object returned by timed callback registration methods. -""" + """Object returned by timed callback registration methods.""" + __slots__ = ["_scheduled", "_when"] def __init__( self, @@ -105,9 +105,10 @@ class TimerHandle(Handle): def when(self) -> float: """Return a scheduled callback time. -The time is an absolute timestamp, using the same time -reference as loop.time(). -""" + The time is an absolute timestamp, using the same time + reference as loop.time(). + """ + def __lt__(self, other: TimerHandle) -> bool: ... def __le__(self, other: TimerHandle) -> bool: ... def __gt__(self, other: TimerHandle) -> bool: ... @@ -115,93 +116,94 @@ reference as loop.time(). def __eq__(self, other: object) -> bool: ... class AbstractServer: - """Abstract server returned by create_server(). -""" + """Abstract server returned by create_server().""" + @abstractmethod def close(self) -> None: - """Stop serving. This leaves existing connections open. -""" + """Stop serving. This leaves existing connections open.""" if sys.version_info >= (3, 13): @abstractmethod def close_clients(self) -> None: - """Close all active connections. -""" + """Close all active connections.""" + @abstractmethod def abort_clients(self) -> None: - """Close all active connections immediately. -""" + """Close all active connections immediately.""" async def __aenter__(self) -> Self: ... async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod def get_loop(self) -> AbstractEventLoop: - """Get the event loop the Server object is attached to. -""" + """Get the event loop the Server object is attached to.""" + @abstractmethod def is_serving(self) -> bool: - """Return True if the server is accepting connections. -""" + """Return True if the server is accepting connections.""" + @abstractmethod async def start_serving(self) -> None: """Start accepting connections. -This method is idempotent, so it can be called when -the server is already being serving. -""" + This method is idempotent, so it can be called when + the server is already being serving. + """ + @abstractmethod async def serve_forever(self) -> None: """Start accepting connections until the coroutine is cancelled. -The server is closed when the coroutine is cancelled. -""" + The server is closed when the coroutine is cancelled. + """ + @abstractmethod async def wait_closed(self) -> None: - """Coroutine to wait until service is closed. -""" + """Coroutine to wait until service is closed.""" class AbstractEventLoop: - """Abstract event loop. -""" + """Abstract event loop.""" + slow_callback_duration: float @abstractmethod def run_forever(self) -> None: - """Run the event loop until stop() is called. -""" + """Run the event loop until stop() is called.""" + @abstractmethod def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: """Run the event loop until a Future is done. -Return the Future's result, or raise its exception. -""" + Return the Future's result, or raise its exception. + """ + @abstractmethod def stop(self) -> None: """Stop the event loop as soon as reasonable. -Exactly how soon that is may depend on the implementation, but -no more I/O callbacks should be scheduled. -""" + Exactly how soon that is may depend on the implementation, but + no more I/O callbacks should be scheduled. + """ + @abstractmethod def is_running(self) -> bool: - """Return whether the event loop is currently running. -""" + """Return whether the event loop is currently running.""" + @abstractmethod def is_closed(self) -> bool: - """Returns True if the event loop was closed. -""" + """Returns True if the event loop was closed.""" + @abstractmethod def close(self) -> None: """Close the loop. -The loop should not be running. + The loop should not be running. + + This is idempotent and irreversible. -This is idempotent and irreversible. + No other methods should be called after this one. + """ -No other methods should be called after this one. -""" @abstractmethod async def shutdown_asyncgens(self) -> None: - """Shutdown all active asynchronous generators. -""" + """Shutdown all active asynchronous generators.""" # Methods scheduling callbacks. All these return Handles. # "context" added in 3.9.10/3.10.2 for call_* @abstractmethod @@ -364,55 +366,56 @@ No other methods should be called after this one. ) -> Server: """A coroutine which creates a TCP server bound to host and port. -The return value is a Server object which can be used to stop -the service. + The return value is a Server object which can be used to stop + the service. + + If host is an empty string or None all interfaces are assumed + and a list of multiple sockets will be returned (most likely + one for IPv4 and another one for IPv6). The host parameter can also be + a sequence (e.g. list) of hosts to bind to. -If host is an empty string or None all interfaces are assumed -and a list of multiple sockets will be returned (most likely -one for IPv4 and another one for IPv6). The host parameter can also be -a sequence (e.g. list) of hosts to bind to. + family can be set to either AF_INET or AF_INET6 to force the + socket to use IPv4 or IPv6. If not set it will be determined + from host (defaults to AF_UNSPEC). -family can be set to either AF_INET or AF_INET6 to force the -socket to use IPv4 or IPv6. If not set it will be determined -from host (defaults to AF_UNSPEC). + flags is a bitmask for getaddrinfo(). -flags is a bitmask for getaddrinfo(). + sock can optionally be specified in order to use a preexisting + socket object. -sock can optionally be specified in order to use a preexisting -socket object. + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). -backlog is the maximum number of queued connections passed to -listen() (defaults to 100). + ssl can be set to an SSLContext to enable SSL over the + accepted connections. -ssl can be set to an SSLContext to enable SSL over the -accepted connections. + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified will automatically be set to True on + UNIX. -reuse_address tells the kernel to reuse a local socket in -TIME_WAIT state, without waiting for its natural timeout to -expire. If not specified will automatically be set to True on -UNIX. + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows. -reuse_port tells the kernel to allow this endpoint to be bound to -the same port as other existing endpoints are bound to, so long as -they all set this flag when being created. This option is not -supported on Windows. + keep_alive set to True keeps connections active by enabling the + periodic transmission of messages. -keep_alive set to True keeps connections active by enabling the -periodic transmission of messages. + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for completion of the SSL handshake before aborting the + connection. Default is 60s. -ssl_handshake_timeout is the time in seconds that an SSL server -will wait for completion of the SSL handshake before aborting the -connection. Default is 60s. + ssl_shutdown_timeout is the time in seconds that an SSL server + will wait for completion of the SSL shutdown procedure + before aborting the connection. Default is 30s. -ssl_shutdown_timeout is the time in seconds that an SSL server -will wait for completion of the SSL shutdown procedure -before aborting the connection. Default is 30s. + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ -start_serving set to True (default) causes the created server -to start accepting connections immediately. When set to False, -the user should await Server.start_serving() or Server.serve_forever() -to make the server to start accepting connections. -""" @overload @abstractmethod async def create_server( @@ -455,52 +458,53 @@ to make the server to start accepting connections. ) -> Server: """A coroutine which creates a TCP server bound to host and port. - The return value is a Server object which can be used to stop - the service. + The return value is a Server object which can be used to stop + the service. - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. + If host is an empty string or None all interfaces are assumed + and a list of multiple sockets will be returned (most likely + one for IPv4 and another one for IPv6). The host parameter can also be + a sequence (e.g. list) of hosts to bind to. - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). + family can be set to either AF_INET or AF_INET6 to force the + socket to use IPv4 or IPv6. If not set it will be determined + from host (defaults to AF_UNSPEC). - flags is a bitmask for getaddrinfo(). + flags is a bitmask for getaddrinfo(). - sock can optionally be specified in order to use a preexisting - socket object. + sock can optionally be specified in order to use a preexisting + socket object. - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). - ssl can be set to an SSLContext to enable SSL over the - accepted connections. + ssl can be set to an SSLContext to enable SSL over the + accepted connections. - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified will automatically be set to True on + UNIX. - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows. - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for completion of the SSL handshake before aborting the + connection. Default is 60s. - ssl_shutdown_timeout is the time in seconds that an SSL server - will wait for completion of the SSL shutdown procedure - before aborting the connection. Default is 30s. + ssl_shutdown_timeout is the time in seconds that an SSL server + will wait for completion of the SSL shutdown procedure + before aborting the connection. Default is 30s. + + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ @overload @abstractmethod async def create_server( @@ -541,48 +545,49 @@ to make the server to start accepting connections. ) -> Server: """A coroutine which creates a TCP server bound to host and port. - The return value is a Server object which can be used to stop - the service. + The return value is a Server object which can be used to stop + the service. - If host is an empty string or None all interfaces are assumed - and a list of multiple sockets will be returned (most likely - one for IPv4 and another one for IPv6). The host parameter can also be - a sequence (e.g. list) of hosts to bind to. + If host is an empty string or None all interfaces are assumed + and a list of multiple sockets will be returned (most likely + one for IPv4 and another one for IPv6). The host parameter can also be + a sequence (e.g. list) of hosts to bind to. - family can be set to either AF_INET or AF_INET6 to force the - socket to use IPv4 or IPv6. If not set it will be determined - from host (defaults to AF_UNSPEC). + family can be set to either AF_INET or AF_INET6 to force the + socket to use IPv4 or IPv6. If not set it will be determined + from host (defaults to AF_UNSPEC). - flags is a bitmask for getaddrinfo(). + flags is a bitmask for getaddrinfo(). - sock can optionally be specified in order to use a preexisting - socket object. + sock can optionally be specified in order to use a preexisting + socket object. - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). - ssl can be set to an SSLContext to enable SSL over the - accepted connections. + ssl can be set to an SSLContext to enable SSL over the + accepted connections. - reuse_address tells the kernel to reuse a local socket in - TIME_WAIT state, without waiting for its natural timeout to - expire. If not specified will automatically be set to True on - UNIX. + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified will automatically be set to True on + UNIX. - reuse_port tells the kernel to allow this endpoint to be bound to - the same port as other existing endpoints are bound to, so long as - they all set this flag when being created. This option is not - supported on Windows. + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows. - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for completion of the SSL handshake before aborting the - connection. Default is 60s. + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for completion of the SSL handshake before aborting the + connection. Default is 60s. + + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ @overload @abstractmethod async def create_server( @@ -617,9 +622,10 @@ to make the server to start accepting connections. ) -> Transport | None: """Upgrade a transport to TLS. -Return a new transport that *protocol* should start using -immediately. -""" + Return a new transport that *protocol* should start using + immediately. + """ + async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -634,32 +640,32 @@ immediately. ) -> Server: """A coroutine which creates a UNIX Domain Socket server. -The return value is a Server object, which can be used to stop -the service. + The return value is a Server object, which can be used to stop + the service. -path is a str, representing a file system path to bind the -server socket to. + path is a str, representing a file system path to bind the + server socket to. -sock can optionally be specified in order to use a preexisting -socket object. + sock can optionally be specified in order to use a preexisting + socket object. -backlog is the maximum number of queued connections passed to -listen() (defaults to 100). + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). -ssl can be set to an SSLContext to enable SSL over the -accepted connections. + ssl can be set to an SSLContext to enable SSL over the + accepted connections. -ssl_handshake_timeout is the time in seconds that an SSL server -will wait for the SSL handshake to complete (defaults to 60s). + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for the SSL handshake to complete (defaults to 60s). -ssl_shutdown_timeout is the time in seconds that an SSL server -will wait for the SSL shutdown to finish (defaults to 30s). + ssl_shutdown_timeout is the time in seconds that an SSL server + will wait for the SSL shutdown to finish (defaults to 30s). -start_serving set to True (default) causes the created server -to start accepting connections immediately. When set to False, -the user should await Server.start_serving() or Server.serve_forever() -to make the server to start accepting connections. -""" + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ else: @abstractmethod async def start_tls( @@ -674,9 +680,10 @@ to make the server to start accepting connections. ) -> Transport | None: """Upgrade a transport to TLS. - Return a new transport that *protocol* should start using - immediately. - """ + Return a new transport that *protocol* should start using + immediately. + """ + async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -690,30 +697,29 @@ to make the server to start accepting connections. ) -> Server: """A coroutine which creates a UNIX Domain Socket server. - The return value is a Server object, which can be used to stop - the service. + The return value is a Server object, which can be used to stop + the service. - path is a str, representing a file system path to bind the - server socket to. + path is a str, representing a file system path to bind the + server socket to. - sock can optionally be specified in order to use a preexisting - socket object. + sock can optionally be specified in order to use a preexisting + socket object. - backlog is the maximum number of queued connections passed to - listen() (defaults to 100). + backlog is the maximum number of queued connections passed to + listen() (defaults to 100). - ssl can be set to an SSLContext to enable SSL over the - accepted connections. + ssl can be set to an SSLContext to enable SSL over the + accepted connections. - ssl_handshake_timeout is the time in seconds that an SSL server - will wait for the SSL handshake to complete (defaults to 60s). - - start_serving set to True (default) causes the created server - to start accepting connections immediately. When set to False, - the user should await Server.start_serving() or Server.serve_forever() - to make the server to start accepting connections. - """ + ssl_handshake_timeout is the time in seconds that an SSL server + will wait for the SSL handshake to complete (defaults to 60s). + start_serving set to True (default) causes the created server + to start accepting connections immediately. When set to False, + the user should await Server.start_serving() or Server.serve_forever() + to make the server to start accepting connections. + """ if sys.version_info >= (3, 11): async def connect_accepted_socket( self, @@ -726,12 +732,12 @@ to make the server to start accepting connections. ) -> tuple[Transport, _ProtocolT]: """Handle an accepted connection. -This is used by servers that accept connections outside of -asyncio, but use asyncio to handle connections. + This is used by servers that accept connections outside of + asyncio, but use asyncio to handle connections. -This method is a coroutine. When completed, the coroutine -returns a (transport, protocol) pair. -""" + This method is a coroutine. When completed, the coroutine + returns a (transport, protocol) pair. + """ elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, @@ -743,12 +749,12 @@ returns a (transport, protocol) pair. ) -> tuple[Transport, _ProtocolT]: """Handle an accepted connection. - This is used by servers that accept connections outside of - asyncio, but use asyncio to handle connections. + This is used by servers that accept connections outside of + asyncio, but use asyncio to handle connections. - This method is a coroutine. When completed, the coroutine - returns a (transport, protocol) pair. - """ + This method is a coroutine. When completed, the coroutine + returns a (transport, protocol) pair. + """ if sys.version_info >= (3, 11): async def create_unix_connection( self, @@ -783,8 +789,9 @@ returns a (transport, protocol) pair. ) -> int: """Send a file through a transport. -Return an amount of sent bytes. -""" + Return an amount of sent bytes. + """ + @abstractmethod async def create_datagram_endpoint( self, @@ -802,55 +809,55 @@ Return an amount of sent bytes. ) -> tuple[DatagramTransport, _ProtocolT]: """A coroutine which creates a datagram endpoint. -This method will try to establish the endpoint in the background. -When successful, the coroutine returns a (transport, protocol) pair. + This method will try to establish the endpoint in the background. + When successful, the coroutine returns a (transport, protocol) pair. -protocol_factory must be a callable returning a protocol instance. + protocol_factory must be a callable returning a protocol instance. -socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on -host (or family if specified), socket type SOCK_DGRAM. + socket family AF_INET, socket.AF_INET6 or socket.AF_UNIX depending on + host (or family if specified), socket type SOCK_DGRAM. -reuse_address tells the kernel to reuse a local socket in -TIME_WAIT state, without waiting for its natural timeout to -expire. If not specified it will automatically be set to True on -UNIX. + reuse_address tells the kernel to reuse a local socket in + TIME_WAIT state, without waiting for its natural timeout to + expire. If not specified it will automatically be set to True on + UNIX. -reuse_port tells the kernel to allow this endpoint to be bound to -the same port as other existing endpoints are bound to, so long as -they all set this flag when being created. This option is not -supported on Windows and some UNIX's. If the -:py:data:`~socket.SO_REUSEPORT` constant is not defined then this -capability is unsupported. + reuse_port tells the kernel to allow this endpoint to be bound to + the same port as other existing endpoints are bound to, so long as + they all set this flag when being created. This option is not + supported on Windows and some UNIX's. If the + :py:data:`~socket.SO_REUSEPORT` constant is not defined then this + capability is unsupported. -allow_broadcast tells the kernel to allow this endpoint to send -messages to the broadcast address. + allow_broadcast tells the kernel to allow this endpoint to send + messages to the broadcast address. -sock can optionally be specified in order to use a preexisting -socket object. -""" + sock can optionally be specified in order to use a preexisting + socket object. + """ # Pipes and subprocesses. @abstractmethod - async def connect_read_pipe( - self, protocol_factory: Callable[[], _ProtocolT], pipe: Any - ) -> tuple[ReadTransport, _ProtocolT]: + async def connect_read_pipe(self, protocol_factory: Callable[[], _ProtocolT], pipe: Any) -> tuple[ReadTransport, _ProtocolT]: """Register read pipe in event loop. Set the pipe to non-blocking mode. -protocol_factory should instantiate object with Protocol interface. -pipe is a file-like object. -Return pair (transport, protocol), where transport supports the -ReadTransport interface. -""" + protocol_factory should instantiate object with Protocol interface. + pipe is a file-like object. + Return pair (transport, protocol), where transport supports the + ReadTransport interface. + """ + @abstractmethod async def connect_write_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any ) -> tuple[WriteTransport, _ProtocolT]: """Register write pipe in event loop. -protocol_factory should instantiate object with BaseProtocol interface. -Pipe is file-like object already switched to nonblocking. -Return pair (transport, protocol), where transport support -WriteTransport interface. -""" + protocol_factory should instantiate object with BaseProtocol interface. + Pipe is file-like object already switched to nonblocking. + Return pair (transport, protocol), where transport support + WriteTransport interface. + """ + @abstractmethod async def subprocess_shell( self, @@ -930,33 +937,33 @@ WriteTransport interface. def set_debug(self, enabled: bool) -> None: ... @abstractmethod async def shutdown_default_executor(self) -> None: - """Schedule the shutdown of the default executor. -""" + """Schedule the shutdown of the default executor.""" if sys.version_info >= (3, 14): class _AbstractEventLoopPolicy: - """Abstract policy for accessing the event loop. -""" + """Abstract policy for accessing the event loop.""" + @abstractmethod def get_event_loop(self) -> AbstractEventLoop: """Get the event loop for the current context. -Returns an event loop object implementing the AbstractEventLoop interface, -or raises an exception in case no event loop has been set for the -current context and the current policy does not specify to create one. + Returns an event loop object implementing the AbstractEventLoop interface, + or raises an exception in case no event loop has been set for the + current context and the current policy does not specify to create one. + + It should never return None. + """ -It should never return None. -""" @abstractmethod def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop for the current context to loop. -""" + """Set the event loop for the current context to loop.""" + @abstractmethod def new_event_loop(self) -> AbstractEventLoop: """Create and return a new event loop object according to this -policy's rules. If there's need to set this loop as the event loop for -the current context, set_event_loop must be called explicitly. -""" + policy's rules. If there's need to set this loop as the event loop for + the current context, set_event_loop must be called explicitly. + """ else: @type_check_only @@ -987,67 +994,72 @@ if sys.version_info >= (3, 14): class _BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): """Default policy implementation for accessing the event loop. -In this policy, each thread has its own event loop. However, we -only automatically create an event loop by default for the main -thread; other threads by default have no event loop. + In this policy, each thread has its own event loop. However, we + only automatically create an event loop by default for the main + thread; other threads by default have no event loop. + + Other policies may have different rules (e.g. a single global + event loop, or automatically creating an event loop per thread, or + using some other notion of context to which an event loop is + associated). + """ -Other policies may have different rules (e.g. a single global -event loop, or automatically creating an event loop per thread, or -using some other notion of context to which an event loop is -associated). -""" def get_event_loop(self) -> AbstractEventLoop: """Get the event loop for the current context. -Returns an instance of EventLoop or raises an exception. -""" + Returns an instance of EventLoop or raises an exception. + """ + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop. -""" + """Set the event loop.""" + def new_event_loop(self) -> AbstractEventLoop: """Create a new event loop. -You must call set_event_loop() to make this the current event -loop. -""" + You must call set_event_loop() to make this the current event + loop. + """ else: class BaseDefaultEventLoopPolicy(_AbstractEventLoopPolicy, metaclass=ABCMeta): """Default policy implementation for accessing the event loop. -In this policy, each thread has its own event loop. However, we -only automatically create an event loop by default for the main -thread; other threads by default have no event loop. + In this policy, each thread has its own event loop. However, we + only automatically create an event loop by default for the main + thread; other threads by default have no event loop. + + Other policies may have different rules (e.g. a single global + event loop, or automatically creating an event loop per thread, or + using some other notion of context to which an event loop is + associated). + """ -Other policies may have different rules (e.g. a single global -event loop, or automatically creating an event loop per thread, or -using some other notion of context to which an event loop is -associated). -""" def get_event_loop(self) -> AbstractEventLoop: """Get the event loop for the current context. -Returns an instance of EventLoop or raises an exception. -""" + Returns an instance of EventLoop or raises an exception. + """ + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: - """Set the event loop. -""" + """Set the event loop.""" + def new_event_loop(self) -> AbstractEventLoop: """Create a new event loop. -You must call set_event_loop() to make this the current event -loop. -""" + You must call set_event_loop() to make this the current event + loop. + """ if sys.version_info >= (3, 14): def _get_event_loop_policy() -> _AbstractEventLoopPolicy: - """Get the current event loop policy. -""" + """Get the current event loop policy.""" + def _set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: """Set the current event loop policy. -If policy is None, the default policy is restored. -""" + If policy is None, the default policy is restored. + """ + @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def get_event_loop_policy() -> _AbstractEventLoopPolicy: ... @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") @@ -1055,38 +1067,36 @@ If policy is None, the default policy is restored. else: def get_event_loop_policy() -> _AbstractEventLoopPolicy: - """Get the current event loop policy. -""" + """Get the current event loop policy.""" + def set_event_loop_policy(policy: _AbstractEventLoopPolicy | None) -> None: """Set the current event loop policy. -If policy is None, the default policy is restored. -""" + If policy is None, the default policy is restored. + """ def set_event_loop(loop: AbstractEventLoop | None) -> None: - """Equivalent to calling get_event_loop_policy().set_event_loop(loop). -""" + """Equivalent to calling get_event_loop_policy().set_event_loop(loop).""" + def new_event_loop() -> AbstractEventLoop: - """Equivalent to calling get_event_loop_policy().new_event_loop(). -""" + """Equivalent to calling get_event_loop_policy().new_event_loop().""" if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher(). -""" + """Equivalent to calling get_event_loop_policy().get_child_watcher().""" + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") def set_child_watcher(watcher: AbstractChildWatcher) -> None: """Equivalent to calling -get_event_loop_policy().set_child_watcher(watcher). -""" - + get_event_loop_policy().set_child_watcher(watcher). + """ else: def get_child_watcher() -> AbstractChildWatcher: - """Equivalent to calling get_event_loop_policy().get_child_watcher(). -""" + """Equivalent to calling get_event_loop_policy().get_child_watcher().""" + def set_child_watcher(watcher: AbstractChildWatcher) -> None: """Equivalent to calling - get_event_loop_policy().set_child_watcher(watcher). -""" + get_event_loop_policy().set_child_watcher(watcher). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi index 98a69a67a1346..bf1330f7b1518 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/exceptions.pyi @@ -1,5 +1,5 @@ -"""asyncio exceptions. -""" +"""asyncio exceptions.""" + import sys # Keep asyncio.__all__ updated with any changes to __all__ here @@ -24,33 +24,32 @@ else: ) class CancelledError(BaseException): - """The Future or Task was cancelled. -""" + """The Future or Task was cancelled.""" if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: class TimeoutError(Exception): - """The operation exceeded the given deadline. -""" + """The operation exceeded the given deadline.""" class InvalidStateError(Exception): - """The operation is not allowed in this state. -""" + """The operation is not allowed in this state.""" + class SendfileNotAvailableError(RuntimeError): """Sendfile syscall is not available. -Raised if OS does not support sendfile syscall for given socket or -file type. -""" + Raised if OS does not support sendfile syscall for given socket or + file type. + """ class IncompleteReadError(EOFError): """ -Incomplete read error. Attributes: + Incomplete read error. Attributes: + + - partial: read bytes string before the end of stream was reached + - expected: total number of expected bytes (or None if unknown) + """ -- partial: read bytes string before the end of stream was reached -- expected: total number of expected bytes (or None if unknown) -""" expected: int | None partial: bytes def __init__(self, partial: bytes, expected: int | None) -> None: ... @@ -58,13 +57,13 @@ Incomplete read error. Attributes: class LimitOverrunError(Exception): """Reached the buffer limit while looking for a separator. -Attributes: -- consumed: total number of to be consumed bytes. -""" + Attributes: + - consumed: total number of to be consumed bytes. + """ + consumed: int def __init__(self, message: str, consumed: int) -> None: ... if sys.version_info >= (3, 11): class BrokenBarrierError(RuntimeError): - """Barrier is broken by barrier.abort() call. -""" + """Barrier is broken by barrier.abort() call.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi index fa3e8825ff875..9213dcbd43196 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/format_helpers.pyi @@ -22,12 +22,13 @@ if sys.version_info >= (3, 13): def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False) -> str: """Format function arguments and keyword arguments. -Special case for a single parameter: ('hello',) is formatted as ('hello'). + Special case for a single parameter: ('hello',) is formatted as ('hello'). + + Note that this function only returns argument details when + debug=True is specified, as arguments may contain sensitive + information. + """ -Note that this function only returns argument details when -debug=True is specified, as arguments may contain sensitive -information. -""" def _format_callback( func: object, args: Iterable[Any], kwargs: dict[str, Any], *, debug: bool = False, suffix: str = "" ) -> str: ... @@ -37,11 +38,12 @@ else: def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: """Format function arguments and keyword arguments. - Special case for a single parameter: ('hello',) is formatted as ('hello'). - """ + Special case for a single parameter: ('hello',) is formatted as ('hello'). + """ + def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: """Replacement for traceback.extract_stack() that only does the -necessary work for asyncio debug mode. -""" + necessary work for asyncio debug mode. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi index 435efbd5c4f4e..8acaaea444bf9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/futures.pyi @@ -1,5 +1,5 @@ -"""A Future class similar to the one in PEP 3148. -""" +"""A Future class similar to the one in PEP 3148.""" + import sys from _asyncio import Future as Future from concurrent.futures._base import Future as _ConcurrentFuture @@ -19,5 +19,4 @@ else: _T = TypeVar("_T") def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: - """Wrap concurrent.futures.Future object. -""" + """Wrap concurrent.futures.Future object.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi index 21243d6bba928..2b9fb15f4fc63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/graph.pyi @@ -1,5 +1,5 @@ -"""Introspection utils for tasks call graphs. -""" +"""Introspection utils for tasks call graphs.""" + import sys from _typeshed import SupportsWrite from asyncio import Future @@ -12,14 +12,14 @@ if sys.version_info >= (3, 14): @dataclass(frozen=True, slots=True) class FrameCallGraphEntry: - """FrameCallGraphEntry(frame: frame) -""" + """FrameCallGraphEntry(frame: frame)""" + frame: FrameType @dataclass(frozen=True, slots=True) class FutureCallGraph: - """FutureCallGraph(future: _asyncio.Future, call_stack: tuple['FrameCallGraphEntry', ...], awaited_by: tuple['FutureCallGraph', ...]) -""" + """FutureCallGraph(future: _asyncio.Future, call_stack: tuple['FrameCallGraphEntry', ...], awaited_by: tuple['FutureCallGraph', ...])""" + future: Future[Any] call_stack: tuple[FrameCallGraphEntry, ...] awaited_by: tuple[FutureCallGraph, ...] @@ -28,46 +28,47 @@ if sys.version_info >= (3, 14): def capture_call_graph(future: None = None, /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: """Capture the async call graph for the current task or the provided Future. -The graph is represented with three data structures: + The graph is represented with three data structures: -* FutureCallGraph(future, call_stack, awaited_by) + * FutureCallGraph(future, call_stack, awaited_by) - Where 'future' is an instance of asyncio.Future or asyncio.Task. + Where 'future' is an instance of asyncio.Future or asyncio.Task. - 'call_stack' is a tuple of FrameGraphEntry objects. + 'call_stack' is a tuple of FrameGraphEntry objects. - 'awaited_by' is a tuple of FutureCallGraph objects. + 'awaited_by' is a tuple of FutureCallGraph objects. -* FrameCallGraphEntry(frame) + * FrameCallGraphEntry(frame) - Where 'frame' is a frame object of a regular Python function - in the call stack. + Where 'frame' is a frame object of a regular Python function + in the call stack. -Receives an optional 'future' argument. If not passed, -the current task will be used. If there's no current task, the function -returns None. + Receives an optional 'future' argument. If not passed, + the current task will be used. If there's no current task, the function + returns None. -If "capture_call_graph()" is introspecting *the current task*, the -optional keyword-only 'depth' argument can be used to skip the specified -number of frames from top of the stack. + If "capture_call_graph()" is introspecting *the current task*, the + optional keyword-only 'depth' argument can be used to skip the specified + number of frames from top of the stack. + + If the optional keyword-only 'limit' argument is provided, each call stack + in the resulting graph is truncated to include at most ``abs(limit)`` + entries. If 'limit' is positive, the entries left are the closest to + the invocation point. If 'limit' is negative, the topmost entries are + left. If 'limit' is omitted or None, all entries are present. + If 'limit' is 0, the call stack is not captured at all, only + "awaited by" information is present. + """ -If the optional keyword-only 'limit' argument is provided, each call stack -in the resulting graph is truncated to include at most ``abs(limit)`` -entries. If 'limit' is positive, the entries left are the closest to -the invocation point. If 'limit' is negative, the topmost entries are -left. If 'limit' is omitted or None, all entries are present. -If 'limit' is 0, the call stack is not captured at all, only -"awaited by" information is present. -""" @overload def capture_call_graph(future: Future[Any], /, *, depth: int = 1, limit: int | None = None) -> FutureCallGraph | None: ... def format_call_graph(future: Future[Any] | None = None, /, *, depth: int = 1, limit: int | None = None) -> str: """Return the async call graph as a string for `future`. -If `future` is not provided, format the call graph for the current task. -""" + If `future` is not provided, format the call graph for the current task. + """ + def print_call_graph( future: Future[Any] | None = None, /, *, file: SupportsWrite[str] | None = None, depth: int = 1, limit: int | None = None ) -> None: - """Print the async call graph for the current task or the provided Future. -""" + """Print the async call graph for the current task or the provided Future.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi index b7fdd987a3f12..d38607b4106c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/locks.pyi @@ -1,5 +1,5 @@ -"""Synchronization primitives. -""" +"""Synchronization primitives.""" + import enum import sys from _typeshed import Unused @@ -34,54 +34,55 @@ class _ContextManagerMixin: class Lock(_ContextManagerMixin, _LoopBoundMixin): """Primitive lock objects. -A primitive lock is a synchronization primitive that is not owned -by a particular task when locked. A primitive lock is in one -of two states, 'locked' or 'unlocked'. - -It is created in the unlocked state. It has two basic methods, -acquire() and release(). When the state is unlocked, acquire() -changes the state to locked and returns immediately. When the -state is locked, acquire() blocks until a call to release() in -another task changes it to unlocked, then the acquire() call -resets it to locked and returns. The release() method should only -be called in the locked state; it changes the state to unlocked -and returns immediately. If an attempt is made to release an -unlocked lock, a RuntimeError will be raised. - -When more than one task is blocked in acquire() waiting for -the state to turn to unlocked, only one task proceeds when a -release() call resets the state to unlocked; successive release() -calls will unblock tasks in FIFO order. - -Locks also support the asynchronous context management protocol. -'async with lock' statement should be used. - -Usage: - - lock = Lock() - ... - await lock.acquire() - try: + A primitive lock is a synchronization primitive that is not owned + by a particular task when locked. A primitive lock is in one + of two states, 'locked' or 'unlocked'. + + It is created in the unlocked state. It has two basic methods, + acquire() and release(). When the state is unlocked, acquire() + changes the state to locked and returns immediately. When the + state is locked, acquire() blocks until a call to release() in + another task changes it to unlocked, then the acquire() call + resets it to locked and returns. The release() method should only + be called in the locked state; it changes the state to unlocked + and returns immediately. If an attempt is made to release an + unlocked lock, a RuntimeError will be raised. + + When more than one task is blocked in acquire() waiting for + the state to turn to unlocked, only one task proceeds when a + release() call resets the state to unlocked; successive release() + calls will unblock tasks in FIFO order. + + Locks also support the asynchronous context management protocol. + 'async with lock' statement should be used. + + Usage: + + lock = Lock() ... - finally: - lock.release() + await lock.acquire() + try: + ... + finally: + lock.release() -Context manager usage: + Context manager usage: - lock = Lock() - ... - async with lock: - ... + lock = Lock() + ... + async with lock: + ... -Lock objects can be tested for locking state: + Lock objects can be tested for locking state: - if not lock.locked(): - await lock.acquire() - else: - # lock is acquired - ... + if not lock.locked(): + await lock.acquire() + else: + # lock is acquired + ... + + """ -""" _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): def __init__(self) -> None: ... @@ -89,34 +90,36 @@ Lock objects can be tested for locking state: def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: - """Return True if lock is acquired. -""" + """Return True if lock is acquired.""" + async def acquire(self) -> Literal[True]: """Acquire a lock. -This method blocks until the lock is unlocked, then sets it to -locked and returns True. -""" + This method blocks until the lock is unlocked, then sets it to + locked and returns True. + """ + def release(self) -> None: """Release a lock. -When the lock is locked, reset it to unlocked, and return. -If any other tasks are blocked waiting for the lock to become -unlocked, allow exactly one of them to proceed. + When the lock is locked, reset it to unlocked, and return. + If any other tasks are blocked waiting for the lock to become + unlocked, allow exactly one of them to proceed. -When invoked on an unlocked lock, a RuntimeError is raised. + When invoked on an unlocked lock, a RuntimeError is raised. -There is no return value. -""" + There is no return value. + """ class Event(_LoopBoundMixin): """Asynchronous equivalent to threading.Event. -Class implementing event objects. An event manages a flag that can be set -to true with the set() method and reset to false with the clear() method. -The wait() method blocks until the flag is true. The flag is initially -false. -""" + Class implementing event objects. An event manages a flag that can be set + to true with the set() method and reset to false with the clear() method. + The wait() method blocks until the flag is true. The flag is initially + false. + """ + _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self) -> None: ... @@ -124,35 +127,38 @@ false. def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def is_set(self) -> bool: - """Return True if and only if the internal flag is true. -""" + """Return True if and only if the internal flag is true.""" + def set(self) -> None: """Set the internal flag to true. All tasks waiting for it to -become true are awakened. Tasks that call wait() once the flag is -true will not block at all. -""" + become true are awakened. Tasks that call wait() once the flag is + true will not block at all. + """ + def clear(self) -> None: """Reset the internal flag to false. Subsequently, tasks calling -wait() will block until set() is called to set the internal flag -to true again. -""" + wait() will block until set() is called to set the internal flag + to true again. + """ + async def wait(self) -> Literal[True]: """Block until the internal flag is true. -If the internal flag is true on entry, return True -immediately. Otherwise, block until another task calls -set() to set the flag to true, then return True. -""" + If the internal flag is true on entry, return True + immediately. Otherwise, block until another task calls + set() to set the flag to true, then return True. + """ class Condition(_ContextManagerMixin, _LoopBoundMixin): """Asynchronous equivalent to threading.Condition. -This class implements condition variable objects. A condition variable -allows one or more tasks to wait until they are notified by another -task. + This class implements condition variable objects. A condition variable + allows one or more tasks to wait until they are notified by another + task. + + A new Lock object is created and used as the underlying lock. + """ -A new Lock object is created and used as the underlying lock. -""" _waiters: deque[Future[Any]] if sys.version_info >= (3, 10): def __init__(self, lock: Lock | None = None) -> None: ... @@ -165,59 +171,63 @@ A new Lock object is created and used as the underlying lock. async def wait(self) -> Literal[True]: """Wait until notified. -If the calling task has not acquired the lock when this -method is called, a RuntimeError is raised. + If the calling task has not acquired the lock when this + method is called, a RuntimeError is raised. + + This method releases the underlying lock, and then blocks + until it is awakened by a notify() or notify_all() call for + the same condition variable in another task. Once + awakened, it re-acquires the lock and returns True. -This method releases the underlying lock, and then blocks -until it is awakened by a notify() or notify_all() call for -the same condition variable in another task. Once -awakened, it re-acquires the lock and returns True. + This method may return spuriously, + which is why the caller should always + re-check the state and be prepared to wait() again. + """ -This method may return spuriously, -which is why the caller should always -re-check the state and be prepared to wait() again. -""" async def wait_for(self, predicate: Callable[[], _T]) -> _T: """Wait until a predicate becomes true. -The predicate should be a callable whose result will be -interpreted as a boolean value. The method will repeatedly -wait() until it evaluates to true. The final predicate value is -the return value. -""" + The predicate should be a callable whose result will be + interpreted as a boolean value. The method will repeatedly + wait() until it evaluates to true. The final predicate value is + the return value. + """ + def notify(self, n: int = 1) -> None: """By default, wake up one task waiting on this condition, if any. -If the calling task has not acquired the lock when this method -is called, a RuntimeError is raised. + If the calling task has not acquired the lock when this method + is called, a RuntimeError is raised. + + This method wakes up n of the tasks waiting for the condition + variable; if fewer than n are waiting, they are all awoken. -This method wakes up n of the tasks waiting for the condition - variable; if fewer than n are waiting, they are all awoken. + Note: an awakened task does not actually return from its + wait() call until it can reacquire the lock. Since notify() does + not release the lock, its caller should. + """ -Note: an awakened task does not actually return from its -wait() call until it can reacquire the lock. Since notify() does -not release the lock, its caller should. -""" def notify_all(self) -> None: """Wake up all tasks waiting on this condition. This method acts -like notify(), but wakes up all waiting tasks instead of one. If the -calling task has not acquired the lock when this method is called, -a RuntimeError is raised. -""" + like notify(), but wakes up all waiting tasks instead of one. If the + calling task has not acquired the lock when this method is called, + a RuntimeError is raised. + """ class Semaphore(_ContextManagerMixin, _LoopBoundMixin): """A Semaphore implementation. -A semaphore manages an internal counter which is decremented by each -acquire() call and incremented by each release() call. The counter -can never go below zero; when acquire() finds that it is zero, it blocks, -waiting until some other thread calls release(). + A semaphore manages an internal counter which is decremented by each + acquire() call and incremented by each release() call. The counter + can never go below zero; when acquire() finds that it is zero, it blocks, + waiting until some other thread calls release(). -Semaphores also support the context management protocol. + Semaphores also support the context management protocol. + + The optional argument gives the initial value for the internal + counter; it defaults to 1. If the value given is less than 0, + ValueError is raised. + """ -The optional argument gives the initial value for the internal -counter; it defaults to 1. If the value given is less than 0, -ValueError is raised. -""" _value: int _waiters: deque[Future[Any]] | None if sys.version_info >= (3, 10): @@ -226,33 +236,34 @@ ValueError is raised. def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: - """Returns True if semaphore cannot be acquired immediately. -""" + """Returns True if semaphore cannot be acquired immediately.""" + async def acquire(self) -> Literal[True]: """Acquire a semaphore. -If the internal counter is larger than zero on entry, -decrement it by one and return True immediately. If it is -zero on entry, block, waiting until some other task has -called release() to make it larger than 0, and then return -True. -""" + If the internal counter is larger than zero on entry, + decrement it by one and return True immediately. If it is + zero on entry, block, waiting until some other task has + called release() to make it larger than 0, and then return + True. + """ + def release(self) -> None: """Release a semaphore, incrementing the internal counter by one. -When it was zero on entry and another task is waiting for it to -become larger than zero again, wake up that task. -""" + When it was zero on entry and another task is waiting for it to + become larger than zero again, wake up that task. + """ + def _wake_up_next(self) -> None: - """Wake up the first waiter that isn't done. -""" + """Wake up the first waiter that isn't done.""" class BoundedSemaphore(Semaphore): """A bounded semaphore implementation. -This raises ValueError in release() if it would increase the value -above the initial value. -""" + This raises ValueError in release() if it would increase the value + above the initial value. + """ if sys.version_info >= (3, 11): class _BarrierState(enum.Enum): # undocumented @@ -264,44 +275,47 @@ if sys.version_info >= (3, 11): class Barrier(_LoopBoundMixin): """Asyncio equivalent to threading.Barrier -Implements a Barrier primitive. -Useful for synchronizing a fixed number of tasks at known synchronization -points. Tasks block on 'wait()' and are simultaneously awoken once they -have all made their call. -""" + Implements a Barrier primitive. + Useful for synchronizing a fixed number of tasks at known synchronization + points. Tasks block on 'wait()' and are simultaneously awoken once they + have all made their call. + """ + def __init__(self, parties: int) -> None: - """Create a barrier, initialised to 'parties' tasks. -""" + """Create a barrier, initialised to 'parties' tasks.""" + async def __aenter__(self) -> Self: ... async def __aexit__(self, *args: Unused) -> None: ... async def wait(self) -> int: """Wait for the barrier. -When the specified number of tasks have started waiting, they are all -simultaneously awoken. -Returns an unique and individual index number from 0 to 'parties-1'. -""" + When the specified number of tasks have started waiting, they are all + simultaneously awoken. + Returns an unique and individual index number from 0 to 'parties-1'. + """ + async def abort(self) -> None: """Place the barrier into a 'broken' state. -Useful in case of error. Any currently waiting tasks and tasks -attempting to 'wait()' will have BrokenBarrierError raised. -""" + Useful in case of error. Any currently waiting tasks and tasks + attempting to 'wait()' will have BrokenBarrierError raised. + """ + async def reset(self) -> None: """Reset the barrier to the initial state. -Any tasks currently waiting will get the BrokenBarrier exception -raised. -""" + Any tasks currently waiting will get the BrokenBarrier exception + raised. + """ + @property def parties(self) -> int: - """Return the number of tasks required to trip the barrier. -""" + """Return the number of tasks required to trip the barrier.""" + @property def n_waiting(self) -> int: - """Return the number of tasks currently waiting at the barrier. -""" + """Return the number of tasks currently waiting at the barrier.""" + @property def broken(self) -> bool: - """Return True if the barrier is in a broken state. -""" + """Return True if the barrier is in a broken state.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi index 29b24be2208d8..a544534b38ea7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/log.pyi @@ -1,5 +1,5 @@ -"""Logging configuration. -""" +"""Logging configuration.""" + import logging logger: logging.Logger diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi index c6a7430bd5ef5..1950908ee08e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/mixins.pyi @@ -1,5 +1,5 @@ -"""Event loop mixins. -""" +"""Event loop mixins.""" + import sys import threading from typing_extensions import Never diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi index e434594f39ed9..c67cd07286563 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/proactor_events.pyi @@ -3,6 +3,7 @@ A proactor is a "notify-on-completion" multiplexer. Currently a proactor is only implemented on Windows with IOCP. """ + import sys from collections.abc import Mapping from socket import socket @@ -13,8 +14,8 @@ from . import base_events, constants, events, futures, streams, transports __all__ = ("BaseProactorEventLoop",) class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): - """Base class for pipe and socket transports. -""" + """Base class for pipe and socket transports.""" + def __init__( self, loop: events.AbstractEventLoop, @@ -27,8 +28,8 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr def __del__(self) -> None: ... class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): - """Transport for read pipes. -""" + """Transport for read pipes.""" + if sys.version_info >= (3, 10): def __init__( self, @@ -52,16 +53,16 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran ) -> None: ... class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): - """Transport for write pipes. -""" + """Transport for write pipes.""" + class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... + class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - """Transport for duplex pipes. -""" + """Transport for duplex pipes.""" class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - """Transport for connected sockets. -""" + """Transport for connected sockets.""" + _sendfile_compatible: ClassVar[constants._SendfileMode] def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi index e989001725373..bd2e9c1118cf3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/protocols.pyi @@ -1,5 +1,5 @@ -"""Abstract Protocol base classes. -""" +"""Abstract Protocol base classes.""" + from _typeshed import ReadableBuffer from asyncio import transports from typing import Any @@ -10,182 +10,192 @@ __all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", class BaseProtocol: """Common base class for protocol interfaces. -Usually user implements protocols that derived from BaseProtocol -like Protocol or ProcessProtocol. + Usually user implements protocols that derived from BaseProtocol + like Protocol or ProcessProtocol. + + The only case when BaseProtocol should be implemented directly is + write-only transport like write pipe + """ -The only case when BaseProtocol should be implemented directly is -write-only transport like write pipe -""" __slots__ = () def connection_made(self, transport: transports.BaseTransport) -> None: """Called when a connection is made. -The argument is the transport representing the pipe connection. -To receive data, wait for data_received() calls. -When the connection is closed, connection_lost() is called. -""" + The argument is the transport representing the pipe connection. + To receive data, wait for data_received() calls. + When the connection is closed, connection_lost() is called. + """ + def connection_lost(self, exc: Exception | None) -> None: """Called when the connection is lost or closed. -The argument is an exception object or None (the latter -meaning a regular EOF is received or the connection was -aborted or closed). -""" + The argument is an exception object or None (the latter + meaning a regular EOF is received or the connection was + aborted or closed). + """ + def pause_writing(self) -> None: """Called when the transport's buffer goes over the high-water mark. -Pause and resume calls are paired -- pause_writing() is called -once when the buffer goes strictly over the high-water mark -(even if subsequent writes increases the buffer size even -more), and eventually resume_writing() is called once when the -buffer size reaches the low-water mark. - -Note that if the buffer size equals the high-water mark, -pause_writing() is not called -- it must go strictly over. -Conversely, resume_writing() is called when the buffer size is -equal or lower than the low-water mark. These end conditions -are important to ensure that things go as expected when either -mark is zero. - -NOTE: This is the only Protocol callback that is not called -through EventLoop.call_soon() -- if it were, it would have no -effect when it's most needed (when the app keeps writing -without yielding until pause_writing() is called). -""" + Pause and resume calls are paired -- pause_writing() is called + once when the buffer goes strictly over the high-water mark + (even if subsequent writes increases the buffer size even + more), and eventually resume_writing() is called once when the + buffer size reaches the low-water mark. + + Note that if the buffer size equals the high-water mark, + pause_writing() is not called -- it must go strictly over. + Conversely, resume_writing() is called when the buffer size is + equal or lower than the low-water mark. These end conditions + are important to ensure that things go as expected when either + mark is zero. + + NOTE: This is the only Protocol callback that is not called + through EventLoop.call_soon() -- if it were, it would have no + effect when it's most needed (when the app keeps writing + without yielding until pause_writing() is called). + """ + def resume_writing(self) -> None: """Called when the transport's buffer drains below the low-water mark. -See pause_writing() for details. -""" + See pause_writing() for details. + """ class Protocol(BaseProtocol): """Interface for stream protocol. -The user should implement this interface. They can inherit from -this class but don't need to. The implementations here do -nothing (they don't raise exceptions). + The user should implement this interface. They can inherit from + this class but don't need to. The implementations here do + nothing (they don't raise exceptions). + + When the user wants to requests a transport, they pass a protocol + factory to a utility function (e.g., EventLoop.create_connection()). -When the user wants to requests a transport, they pass a protocol -factory to a utility function (e.g., EventLoop.create_connection()). + When the connection is made successfully, connection_made() is + called with a suitable transport object. Then data_received() + will be called 0 or more times with data (bytes) received from the + transport; finally, connection_lost() will be called exactly once + with either an exception object or None as an argument. -When the connection is made successfully, connection_made() is -called with a suitable transport object. Then data_received() -will be called 0 or more times with data (bytes) received from the -transport; finally, connection_lost() will be called exactly once -with either an exception object or None as an argument. + State machine of calls: -State machine of calls: + start -> CM [-> DR*] [-> ER?] -> CL -> end - start -> CM [-> DR*] [-> ER?] -> CL -> end + * CM: connection_made() + * DR: data_received() + * ER: eof_received() + * CL: connection_lost() + """ -* CM: connection_made() -* DR: data_received() -* ER: eof_received() -* CL: connection_lost() -""" # Need annotation or mypy will complain about 'Cannot determine type of "__slots__" in base class' __slots__: tuple[()] = () def data_received(self, data: bytes) -> None: """Called when some data is received. -The argument is a bytes object. -""" + The argument is a bytes object. + """ + def eof_received(self) -> bool | None: """Called when the other end calls write_eof() or equivalent. -If this returns a false value (including None), the transport -will close itself. If it returns a true value, closing the -transport is up to the protocol. -""" + If this returns a false value (including None), the transport + will close itself. If it returns a true value, closing the + transport is up to the protocol. + """ class BufferedProtocol(BaseProtocol): """Interface for stream protocol with manual buffer control. -Event methods, such as `create_server` and `create_connection`, -accept factories that return protocols that implement this interface. + Event methods, such as `create_server` and `create_connection`, + accept factories that return protocols that implement this interface. + + The idea of BufferedProtocol is that it allows to manually allocate + and control the receive buffer. Event loops can then use the buffer + provided by the protocol to avoid unnecessary data copies. This + can result in noticeable performance improvement for protocols that + receive big amounts of data. Sophisticated protocols can allocate + the buffer only once at creation time. -The idea of BufferedProtocol is that it allows to manually allocate -and control the receive buffer. Event loops can then use the buffer -provided by the protocol to avoid unnecessary data copies. This -can result in noticeable performance improvement for protocols that -receive big amounts of data. Sophisticated protocols can allocate -the buffer only once at creation time. + State machine of calls: -State machine of calls: + start -> CM [-> GB [-> BU?]]* [-> ER?] -> CL -> end - start -> CM [-> GB [-> BU?]]* [-> ER?] -> CL -> end + * CM: connection_made() + * GB: get_buffer() + * BU: buffer_updated() + * ER: eof_received() + * CL: connection_lost() + """ -* CM: connection_made() -* GB: get_buffer() -* BU: buffer_updated() -* ER: eof_received() -* CL: connection_lost() -""" __slots__ = () def get_buffer(self, sizehint: int) -> ReadableBuffer: """Called to allocate a new receive buffer. -*sizehint* is a recommended minimal size for the returned -buffer. When set to -1, the buffer size can be arbitrary. + *sizehint* is a recommended minimal size for the returned + buffer. When set to -1, the buffer size can be arbitrary. + + Must return an object that implements the + :ref:`buffer protocol `. + It is an error to return a zero-sized buffer. + """ -Must return an object that implements the -:ref:`buffer protocol `. -It is an error to return a zero-sized buffer. -""" def buffer_updated(self, nbytes: int) -> None: """Called when the buffer was updated with the received data. -*nbytes* is the total number of bytes that were written to -the buffer. -""" + *nbytes* is the total number of bytes that were written to + the buffer. + """ + def eof_received(self) -> bool | None: """Called when the other end calls write_eof() or equivalent. -If this returns a false value (including None), the transport -will close itself. If it returns a true value, closing the -transport is up to the protocol. -""" + If this returns a false value (including None), the transport + will close itself. If it returns a true value, closing the + transport is up to the protocol. + """ class DatagramProtocol(BaseProtocol): - """Interface for datagram protocol. -""" + """Interface for datagram protocol.""" + __slots__ = () def connection_made(self, transport: transports.DatagramTransport) -> None: # type: ignore[override] """Called when a connection is made. -The argument is the transport representing the pipe connection. -To receive data, wait for data_received() calls. -When the connection is closed, connection_lost() is called. -""" + The argument is the transport representing the pipe connection. + To receive data, wait for data_received() calls. + When the connection is closed, connection_lost() is called. + """ # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. # See https://github.com/python/typing/issues/566 def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: - """Called when some datagram is received. -""" + """Called when some datagram is received.""" + def error_received(self, exc: Exception) -> None: """Called when a send or receive operation raises an OSError. -(Other than BlockingIOError or InterruptedError.) -""" + (Other than BlockingIOError or InterruptedError.) + """ class SubprocessProtocol(BaseProtocol): - """Interface for protocol for subprocess calls. -""" + """Interface for protocol for subprocess calls.""" + __slots__: tuple[()] = () def pipe_data_received(self, fd: int, data: bytes) -> None: """Called when the subprocess writes data into stdout/stderr pipe. -fd is int file descriptor. -data is bytes object. -""" + fd is int file descriptor. + data is bytes object. + """ + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: """Called when a file descriptor associated with the child process is -closed. + closed. + + fd is the int file descriptor that was closed. + """ -fd is the int file descriptor that was closed. -""" def process_exited(self) -> None: - """Called when subprocess has exited. -""" + """Called when subprocess has exited.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi index 028d1b7b7e649..691da360709b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/queues.pyi @@ -10,11 +10,10 @@ else: _LoopBoundMixin = object class QueueEmpty(Exception): - """Raised when Queue.get_nowait() is called on an empty Queue. -""" + """Raised when Queue.get_nowait() is called on an empty Queue.""" + class QueueFull(Exception): - """Raised when the Queue.put_nowait() method is called on a full Queue. -""" + """Raised when the Queue.put_nowait() method is called on a full Queue.""" # Keep asyncio.__all__ updated with any changes to __all__ here if sys.version_info >= (3, 13): @@ -27,22 +26,22 @@ _T = TypeVar("_T") if sys.version_info >= (3, 13): class QueueShutDown(Exception): - """Raised when putting on to or getting from a shut-down Queue. -""" + """Raised when putting on to or getting from a shut-down Queue.""" # If Generic[_T] is last and _LoopBoundMixin is object, pyright is unhappy. # We can remove the noqa pragma when dropping 3.9 support. class Queue(Generic[_T], _LoopBoundMixin): # noqa: Y059 """A queue, useful for coordinating producer and consumer coroutines. -If maxsize is less than or equal to zero, the queue size is infinite. If it -is an integer greater than 0, then "await put()" will block when the -queue reaches maxsize, until an item is removed by get(). + If maxsize is less than or equal to zero, the queue size is infinite. If it + is an integer greater than 0, then "await put()" will block when the + queue reaches maxsize, until an item is removed by get(). + + Unlike the standard library Queue, you can reliably know this Queue's size + with qsize(), since your single-threaded asyncio application won't be + interrupted between calling qsize() and doing an operation on the Queue. + """ -Unlike the standard library Queue, you can reliably know this Queue's size -with qsize(), since your single-threaded asyncio application won't be -interrupted between calling qsize() and doing an operation on the Queue. -""" if sys.version_info >= (3, 10): def __init__(self, maxsize: int = 0) -> None: ... else: @@ -53,99 +52,106 @@ interrupted between calling qsize() and doing an operation on the Queue. def _put(self, item: _T) -> None: ... def _format(self) -> str: ... def qsize(self) -> int: - """Number of items in the queue. -""" + """Number of items in the queue.""" + @property def maxsize(self) -> int: - """Number of items allowed in the queue. -""" + """Number of items allowed in the queue.""" + def empty(self) -> bool: - """Return True if the queue is empty, False otherwise. -""" + """Return True if the queue is empty, False otherwise.""" + def full(self) -> bool: """Return True if there are maxsize items in the queue. -Note: if the Queue was initialized with maxsize=0 (the default), -then full() is never True. -""" + Note: if the Queue was initialized with maxsize=0 (the default), + then full() is never True. + """ + async def put(self, item: _T) -> None: """Put an item into the queue. -Put an item into the queue. If the queue is full, wait until a free -slot is available before adding item. + Put an item into the queue. If the queue is full, wait until a free + slot is available before adding item. + + Raises QueueShutDown if the queue has been shut down. + """ -Raises QueueShutDown if the queue has been shut down. -""" def put_nowait(self, item: _T) -> None: """Put an item into the queue without blocking. -If no free slot is immediately available, raise QueueFull. + If no free slot is immediately available, raise QueueFull. + + Raises QueueShutDown if the queue has been shut down. + """ -Raises QueueShutDown if the queue has been shut down. -""" async def get(self) -> _T: """Remove and return an item from the queue. -If queue is empty, wait until an item is available. + If queue is empty, wait until an item is available. + + Raises QueueShutDown if the queue has been shut down and is empty, or + if the queue has been shut down immediately. + """ -Raises QueueShutDown if the queue has been shut down and is empty, or -if the queue has been shut down immediately. -""" def get_nowait(self) -> _T: """Remove and return an item from the queue. -Return an item if one is immediately available, else raise QueueEmpty. + Return an item if one is immediately available, else raise QueueEmpty. + + Raises QueueShutDown if the queue has been shut down and is empty, or + if the queue has been shut down immediately. + """ -Raises QueueShutDown if the queue has been shut down and is empty, or -if the queue has been shut down immediately. -""" async def join(self) -> None: """Block until all items in the queue have been gotten and processed. -The count of unfinished tasks goes up whenever an item is added to the -queue. The count goes down whenever a consumer calls task_done() to -indicate that the item was retrieved and all work on it is complete. -When the count of unfinished tasks drops to zero, join() unblocks. -""" + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer calls task_done() to + indicate that the item was retrieved and all work on it is complete. + When the count of unfinished tasks drops to zero, join() unblocks. + """ + def task_done(self) -> None: """Indicate that a formerly enqueued task is complete. -Used by queue consumers. For each get() used to fetch a task, -a subsequent call to task_done() tells the queue that the processing -on the task is complete. + Used by queue consumers. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items have + been processed (meaning that a task_done() call was received for every + item that had been put() into the queue). -If a join() is currently blocking, it will resume when all items have -been processed (meaning that a task_done() call was received for every -item that had been put() into the queue). + shutdown(immediate=True) calls task_done() for each remaining item in + the queue. -shutdown(immediate=True) calls task_done() for each remaining item in -the queue. + Raises ValueError if called more times than there were items placed in + the queue. + """ -Raises ValueError if called more times than there were items placed in -the queue. -""" def __class_getitem__(cls, type: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ if sys.version_info >= (3, 13): def shutdown(self, immediate: bool = False) -> None: """Shut-down the queue, making queue gets and puts raise QueueShutDown. -By default, gets will only raise once the queue is empty. Set -'immediate' to True to make gets raise immediately instead. + By default, gets will only raise once the queue is empty. Set + 'immediate' to True to make gets raise immediately instead. -All blocked callers of put() and get() will be unblocked. If -'immediate', a task is marked as done for each item remaining in -the queue, which may unblock callers of join(). -""" + All blocked callers of put() and get() will be unblocked. If + 'immediate', a task is marked as done for each item remaining in + the queue, which may unblock callers of join(). + """ class PriorityQueue(Queue[SupportsRichComparisonT]): """A subclass of Queue; retrieves entries in priority order (lowest first). -Entries are typically tuples of the form: (priority number, data). -""" + Entries are typically tuples of the form: (priority number, data). + """ + class LifoQueue(Queue[_T]): - """A subclass of Queue that retrieves most recently added entries first. -""" + """A subclass of Queue that retrieves most recently added entries first.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi index 54589f26b42f0..25698e14a64e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/runners.pyi @@ -19,40 +19,40 @@ if sys.version_info >= (3, 11): class Runner: """A context manager that controls event loop life cycle. -The context manager always creates a new event loop, -allows to run async functions inside it, -and properly finalizes the loop at the context manager exit. + The context manager always creates a new event loop, + allows to run async functions inside it, + and properly finalizes the loop at the context manager exit. -If debug is True, the event loop will be run in debug mode. -If loop_factory is passed, it is used for new event loop creation. + If debug is True, the event loop will be run in debug mode. + If loop_factory is passed, it is used for new event loop creation. -asyncio.run(main(), debug=True) + asyncio.run(main(), debug=True) -is a shortcut for + is a shortcut for -with asyncio.Runner(debug=True) as runner: - runner.run(main()) + with asyncio.Runner(debug=True) as runner: + runner.run(main()) -The run() method can be called multiple times within the runner's context. + The run() method can be called multiple times within the runner's context. -This can be useful for interactive console (e.g. IPython), -unittest runners, console tools, -- everywhere when async code -is called from existing sync framework and where the preferred single -asyncio.run() call doesn't work. + This can be useful for interactive console (e.g. IPython), + unittest runners, console tools, -- everywhere when async code + is called from existing sync framework and where the preferred single + asyncio.run() call doesn't work. + + """ -""" def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... def close(self) -> None: - """Shutdown and close event loop. -""" + """Shutdown and close event loop.""" + def get_loop(self) -> AbstractEventLoop: - """Return embedded event loop. -""" + """Return embedded event loop.""" + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: - """Run code in the embedded event loop. -""" + """Run code in the embedded event loop.""" if sys.version_info >= (3, 12): def run( @@ -60,55 +60,55 @@ if sys.version_info >= (3, 12): ) -> _T: """Execute the coroutine and return the result. -This function runs the passed coroutine, taking care of -managing the asyncio event loop, finalizing asynchronous -generators and closing the default executor. + This function runs the passed coroutine, taking care of + managing the asyncio event loop, finalizing asynchronous + generators and closing the default executor. -This function cannot be called when another asyncio event loop is -running in the same thread. + This function cannot be called when another asyncio event loop is + running in the same thread. -If debug is True, the event loop will be run in debug mode. -If loop_factory is passed, it is used for new event loop creation. + If debug is True, the event loop will be run in debug mode. + If loop_factory is passed, it is used for new event loop creation. -This function always creates a new event loop and closes it at the end. -It should be used as a main entry point for asyncio programs, and should -ideally only be called once. + This function always creates a new event loop and closes it at the end. + It should be used as a main entry point for asyncio programs, and should + ideally only be called once. -The executor is given a timeout duration of 5 minutes to shutdown. -If the executor hasn't finished within that duration, a warning is -emitted and the executor is closed. + The executor is given a timeout duration of 5 minutes to shutdown. + If the executor hasn't finished within that duration, a warning is + emitted and the executor is closed. -Example: + Example: - async def main(): - await asyncio.sleep(1) - print('hello') + async def main(): + await asyncio.sleep(1) + print('hello') - asyncio.run(main()) -""" + asyncio.run(main()) + """ else: def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: """Execute the coroutine and return the result. - This function runs the passed coroutine, taking care of - managing the asyncio event loop and finalizing asynchronous - generators. + This function runs the passed coroutine, taking care of + managing the asyncio event loop and finalizing asynchronous + generators. - This function cannot be called when another asyncio event loop is - running in the same thread. + This function cannot be called when another asyncio event loop is + running in the same thread. - If debug is True, the event loop will be run in debug mode. + If debug is True, the event loop will be run in debug mode. - This function always creates a new event loop and closes it at the end. - It should be used as a main entry point for asyncio programs, and should - ideally only be called once. + This function always creates a new event loop and closes it at the end. + It should be used as a main entry point for asyncio programs, and should + ideally only be called once. - Example: + Example: - async def main(): - await asyncio.sleep(1) - print('hello') + async def main(): + await asyncio.sleep(1) + print('hello') - asyncio.run(main()) - """ + asyncio.run(main()) + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi index c67a9942360c1..358dd1abbc3f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/selector_events.pyi @@ -3,6 +3,7 @@ A selector is a "notify-when-ready" multiplexer. For a subclass which also includes support for signal handling, see the unix_events sub-module. """ + import selectors from socket import socket @@ -13,13 +14,14 @@ __all__ = ("BaseSelectorEventLoop",) class BaseSelectorEventLoop(base_events.BaseEventLoop): """Selector event loop. -See events.EventLoop for API specification. -""" + See events.EventLoop for API specification. + """ + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... async def sock_recv(self, sock: socket, n: int) -> bytes: """Receive data from the socket. -The return value is a bytes object representing the data received. -The maximum amount of data to be received at once is specified by -nbytes. -""" + The return value is a bytes object representing the data received. + The maximum amount of data to be received at once is specified by + nbytes. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi index ec38f27e25617..1a73b0cec2130 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/sslproto.pyi @@ -38,20 +38,21 @@ if sys.version_info < (3, 11): class _SSLPipe: """An SSL "Pipe". - An SSL pipe allows you to communicate with an SSL/TLS protocol instance - through memory buffers. It can be used to implement a security layer for an - existing connection where you don't have access to the connection's file - descriptor, or for some reason you don't want to use it. - - An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, - data is passed through untransformed. In wrapped mode, application level - data is encrypted to SSL record level data and vice versa. The SSL record - level is the lowest level in the SSL protocol suite and is what travels - as-is over the wire. - - An SslPipe initially is in "unwrapped" mode. To start SSL, call - do_handshake(). To shutdown SSL again, call unwrap(). - """ + An SSL pipe allows you to communicate with an SSL/TLS protocol instance + through memory buffers. It can be used to implement a security layer for an + existing connection where you don't have access to the connection's file + descriptor, or for some reason you don't want to use it. + + An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, + data is passed through untransformed. In wrapped mode, application level + data is encrypted to SSL record level data and vice versa. The SSL record + level is the lowest level in the SSL protocol suite and is what travels + as-is over the wire. + + An SslPipe initially is in "unwrapped" mode. To start SSL, call + do_handshake(). To shutdown SSL again, call unwrap(). + """ + max_size: ClassVar[int] _context: ssl.SSLContext @@ -66,91 +67,99 @@ if sys.version_info < (3, 11): _shutdown_cb: Callable[[], None] | None def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: """ - The *context* argument specifies the ssl.SSLContext to use. + The *context* argument specifies the ssl.SSLContext to use. - The *server_side* argument indicates whether this is a server side or - client side transport. + The *server_side* argument indicates whether this is a server side or + client side transport. + + The optional *server_hostname* argument can be used to specify the + hostname you are connecting to. You may only specify this parameter if + the _ssl module supports Server Name Indication (SNI). + """ - The optional *server_hostname* argument can be used to specify the - hostname you are connecting to. You may only specify this parameter if - the _ssl module supports Server Name Indication (SNI). - """ @property def context(self) -> ssl.SSLContext: - """The SSL context passed to the constructor. -""" + """The SSL context passed to the constructor.""" + @property def ssl_object(self) -> ssl.SSLObject | None: """The internal ssl.SSLObject instance. - Return None if the pipe is not wrapped. - """ + Return None if the pipe is not wrapped. + """ + @property def need_ssldata(self) -> bool: """Whether more record level data is needed to complete a handshake - that is currently in progress. -""" + that is currently in progress. + """ + @property def wrapped(self) -> bool: """ - Whether a security layer is currently in effect. + Whether a security layer is currently in effect. + + Return False during handshake. + """ - Return False during handshake. - """ def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: """Start the SSL handshake. - Return a list of ssldata. A ssldata element is a list of buffers + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the handshake is complete. The callback will be + called with None if successful, else an exception instance. + """ - The optional *callback* argument can be used to install a callback that - will be called when the handshake is complete. The callback will be - called with None if successful, else an exception instance. - """ def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: """Start the SSL shutdown sequence. - Return a list of ssldata. A ssldata element is a list of buffers + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the shutdown is complete. The callback will be + called without arguments. + """ - The optional *callback* argument can be used to install a callback that - will be called when the shutdown is complete. The callback will be - called without arguments. - """ def feed_eof(self) -> None: """Send a potentially "ragged" EOF. - This method will raise an SSL_ERROR_EOF exception if the EOF is - unexpected. - """ + This method will raise an SSL_ERROR_EOF exception if the EOF is + unexpected. + """ + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: """Feed SSL record level data into the pipe. - The data must be a bytes instance. It is OK to send an empty bytes - instance. This can be used to get ssldata for a handshake initiated by - this endpoint. + The data must be a bytes instance. It is OK to send an empty bytes + instance. This can be used to get ssldata for a handshake initiated by + this endpoint. - Return a (ssldata, appdata) tuple. The ssldata element is a list of - buffers containing SSL data that needs to be sent to the remote SSL. + Return a (ssldata, appdata) tuple. The ssldata element is a list of + buffers containing SSL data that needs to be sent to the remote SSL. + + The appdata element is a list of buffers containing plaintext data that + needs to be forwarded to the application. The appdata list may contain + an empty buffer indicating an SSL "close_notify" alert. This alert must + be acknowledged by calling shutdown(). + """ - The appdata element is a list of buffers containing plaintext data that - needs to be forwarded to the application. The appdata list may contain - an empty buffer indicating an SSL "close_notify" alert. This alert must - be acknowledged by calling shutdown(). - """ def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: """Feed plaintext data into the pipe. - Return an (ssldata, offset) tuple. The ssldata element is a list of - buffers containing record level data that needs to be sent to the - remote SSL instance. The offset is the number of plaintext bytes that - were processed, which may be less than the length of data. - - NOTE: In case of short writes, this call MUST be retried with the SAME - buffer passed into the *data* argument (i.e. the id() must be the - same). This is an OpenSSL requirement. A further particularity is that - a short write will always have offset == 0, because the _ssl module - does not enable partial writes. And even though the offset is zero, - there will still be encrypted data in ssldata. - """ + Return an (ssldata, offset) tuple. The ssldata element is a list of + buffers containing record level data that needs to be sent to the + remote SSL instance. The offset is the number of plaintext bytes that + were processed, which may be less than the length of data. + + NOTE: In case of short writes, this call MUST be retried with the SAME + buffer passed into the *data* argument (i.e. the id() must be the + same). This is an OpenSSL requirement. A further particularity is that + a short write will always have offset == 0, because the _ssl module + does not enable partial writes. And even though the offset is zero, + there will still be encrypted data in ssldata. + """ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _sendfile_compatible: ClassVar[constants._SendfileMode] @@ -163,44 +172,44 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): _closed: bool def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: - """Get optional transport information. -""" + """Get optional transport information.""" + @property def _protocol_paused(self) -> bool: ... def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape """Write some data bytes to the transport. -This does not block; it buffers the data and arranges for it -to be sent out asynchronously. -""" + This does not block; it buffers the data and arranges for it + to be sent out asynchronously. + """ + def can_write_eof(self) -> Literal[False]: - """Return True if this transport supports write_eof(), False if not. -""" + """Return True if this transport supports write_eof(), False if not.""" if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... def get_read_buffer_limits(self) -> tuple[int, int]: ... def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: """Set the high- and low-water limits for read flow control. -These two values control when to call the upstream transport's -pause_reading() and resume_reading() methods. If specified, -the low-water limit must be less than or equal to the -high-water limit. Neither value can be negative. - -The defaults are implementation-specific. If only the -high-water limit is given, the low-water limit defaults to an -implementation-specific value less than or equal to the -high-water limit. Setting high to zero forces low to zero as -well, and causes pause_reading() to be called whenever the -buffer becomes non-empty. Setting low to zero causes -resume_reading() to be called only once the buffer is empty. -Use of zero for either limit is generally sub-optimal as it -reduces opportunities for doing I/O and computation -concurrently. -""" + These two values control when to call the upstream transport's + pause_reading() and resume_reading() methods. If specified, + the low-water limit must be less than or equal to the + high-water limit. Neither value can be negative. + + The defaults are implementation-specific. If only the + high-water limit is given, the low-water limit defaults to an + implementation-specific value less than or equal to the + high-water limit. Setting high to zero forces low to zero as + well, and causes pause_reading() to be called whenever the + buffer becomes non-empty. Setting low to zero causes + resume_reading() to be called only once the buffer is empty. + Use of zero for either limit is generally sub-optimal as it + reduces opportunities for doing I/O and computation + concurrently. + """ + def get_read_buffer_size(self) -> int: - """Return the current size of the read buffer. -""" + """Return the current size of the read buffer.""" def __del__(self) -> None: ... @@ -215,6 +224,7 @@ class SSLProtocol(_SSLProtocolBase): Implementation of SSL on top of a socket using incoming and outgoing buffers which are ssl.MemoryBIO objects. """ + _server_side: bool _server_hostname: str | None _sslcontext: ssl.SSLContext @@ -269,18 +279,20 @@ class SSLProtocol(_SSLProtocolBase): def connection_lost(self, exc: BaseException | None) -> None: """Called when the low-level connection is lost or closed. -The argument is an exception object or None (the latter -meaning a regular EOF is received or the connection was -aborted or closed). -""" + The argument is an exception object or None (the latter + meaning a regular EOF is received or the connection was + aborted or closed). + """ + def eof_received(self) -> None: """Called when the other end of the low-level stream -is half-closed. + is half-closed. + + If this returns a false value (including None), the transport + will close itself. If it returns a true value, closing the + transport is up to the protocol. + """ -If this returns a false value (including None), the transport -will close itself. If it returns a true value, closing the -transport is up to the protocol. -""" def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... def _start_shutdown(self) -> None: ... if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi index beb5df707b4e9..938020b3a0f89 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/staggered.pyi @@ -1,5 +1,5 @@ -"""Support for running coroutines in parallel with staggered start times. -""" +"""Support for running coroutines in parallel with staggered start times.""" + from collections.abc import Awaitable, Callable, Iterable from typing import Any @@ -12,51 +12,51 @@ async def staggered_race( ) -> tuple[Any, int | None, list[Exception | None]]: """Run coroutines with staggered start times and take the first to finish. -This method takes an iterable of coroutine functions. The first one is -started immediately. From then on, whenever the immediately preceding one -fails (raises an exception), or when *delay* seconds has passed, the next -coroutine is started. This continues until one of the coroutines complete -successfully, in which case all others are cancelled, or until all -coroutines fail. + This method takes an iterable of coroutine functions. The first one is + started immediately. From then on, whenever the immediately preceding one + fails (raises an exception), or when *delay* seconds has passed, the next + coroutine is started. This continues until one of the coroutines complete + successfully, in which case all others are cancelled, or until all + coroutines fail. -The coroutines provided should be well-behaved in the following way: + The coroutines provided should be well-behaved in the following way: -* They should only ``return`` if completed successfully. + * They should only ``return`` if completed successfully. -* They should always raise an exception if they did not complete - successfully. In particular, if they handle cancellation, they should - probably reraise, like this:: + * They should always raise an exception if they did not complete + successfully. In particular, if they handle cancellation, they should + probably reraise, like this:: - try: - # do work - except asyncio.CancelledError: - # undo partially completed work - raise + try: + # do work + except asyncio.CancelledError: + # undo partially completed work + raise -Args: - coro_fns: an iterable of coroutine functions, i.e. callables that - return a coroutine object when called. Use ``functools.partial`` or - lambdas to pass arguments. + Args: + coro_fns: an iterable of coroutine functions, i.e. callables that + return a coroutine object when called. Use ``functools.partial`` or + lambdas to pass arguments. - delay: amount of time, in seconds, between starting coroutines. If - ``None``, the coroutines will run sequentially. + delay: amount of time, in seconds, between starting coroutines. If + ``None``, the coroutines will run sequentially. - loop: the event loop to use. + loop: the event loop to use. -Returns: - tuple *(winner_result, winner_index, exceptions)* where + Returns: + tuple *(winner_result, winner_index, exceptions)* where - - *winner_result*: the result of the winning coroutine, or ``None`` - if no coroutines won. + - *winner_result*: the result of the winning coroutine, or ``None`` + if no coroutines won. - - *winner_index*: the index of the winning coroutine in - ``coro_fns``, or ``None`` if no coroutines won. If the winning - coroutine may return None on success, *winner_index* can be used - to definitively determine whether any coroutine won. + - *winner_index*: the index of the winning coroutine in + ``coro_fns``, or ``None`` if no coroutines won. If the winning + coroutine may return None on success, *winner_index* can be used + to definitively determine whether any coroutine won. - - *exceptions*: list of exceptions returned by the coroutines. - ``len(exceptions)`` is equal to the number of coroutines actually - started, and the order is the same as in ``coro_fns``. The winning - coroutine's entry is ``None``. + - *exceptions*: list of exceptions returned by the coroutines. + ``len(exceptions)`` is equal to the number of coroutines actually + started, and the order is the same as in ``coro_fns``. The winning + coroutine's entry is ``None``. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi index 02cb9a9b7348a..968a5d08ee14d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/streams.pyi @@ -39,21 +39,22 @@ if sys.version_info >= (3, 10): ) -> tuple[StreamReader, StreamWriter]: """A wrapper for create_connection() returning a (reader, writer) pair. -The reader returned is a StreamReader instance; the writer is a -StreamWriter instance. + The reader returned is a StreamReader instance; the writer is a + StreamWriter instance. -The arguments are all the usual arguments to create_connection() -except protocol_factory; most common are positional host and port, -with various optional keyword arguments following. + The arguments are all the usual arguments to create_connection() + except protocol_factory; most common are positional host and port, + with various optional keyword arguments following. -Additional optional keyword arguments are loop (to set the event loop -instance to use) and limit (to set the buffer limit passed to the -StreamReader). + Additional optional keyword arguments are loop (to set the event loop + instance to use) and limit (to set the buffer limit passed to the + StreamReader). + + (If you want to customize the StreamReader and/or + StreamReaderProtocol classes, just copy the code -- there's + really nothing special here except some convenience.) + """ -(If you want to customize the StreamReader and/or -StreamReaderProtocol classes, just copy the code -- there's -really nothing special here except some convenience.) -""" async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | Sequence[str] | None = None, @@ -65,24 +66,24 @@ really nothing special here except some convenience.) ) -> Server: """Start a socket server, call back for each client connected. -The first parameter, `client_connected_cb`, takes two parameters: -client_reader, client_writer. client_reader is a StreamReader -object, while client_writer is a StreamWriter object. This -parameter can either be a plain callback function or a coroutine; -if it is a coroutine, it will be automatically converted into a -Task. + The first parameter, `client_connected_cb`, takes two parameters: + client_reader, client_writer. client_reader is a StreamReader + object, while client_writer is a StreamWriter object. This + parameter can either be a plain callback function or a coroutine; + if it is a coroutine, it will be automatically converted into a + Task. -The rest of the arguments are all the usual arguments to -loop.create_server() except protocol_factory; most common are -positional host and port, with various optional keyword arguments -following. The return value is the same as loop.create_server(). + The rest of the arguments are all the usual arguments to + loop.create_server() except protocol_factory; most common are + positional host and port, with various optional keyword arguments + following. The return value is the same as loop.create_server(). -Additional optional keyword argument is limit (to set the buffer -limit passed to the StreamReader). + Additional optional keyword argument is limit (to set the buffer + limit passed to the StreamReader). -The return value is the same as loop.create_server(), i.e. a -Server object which can be used to stop the service. -""" + The return value is the same as loop.create_server(), i.e. a + Server object which can be used to stop the service. + """ else: async def open_connection( @@ -96,21 +97,22 @@ else: ) -> tuple[StreamReader, StreamWriter]: """A wrapper for create_connection() returning a (reader, writer) pair. - The reader returned is a StreamReader instance; the writer is a - StreamWriter instance. + The reader returned is a StreamReader instance; the writer is a + StreamWriter instance. - The arguments are all the usual arguments to create_connection() - except protocol_factory; most common are positional host and port, - with various optional keyword arguments following. + The arguments are all the usual arguments to create_connection() + except protocol_factory; most common are positional host and port, + with various optional keyword arguments following. - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). + Additional optional keyword arguments are loop (to set the event loop + instance to use) and limit (to set the buffer limit passed to the + StreamReader). + + (If you want to customize the StreamReader and/or + StreamReaderProtocol classes, just copy the code -- there's + really nothing special here except some convenience.) + """ - (If you want to customize the StreamReader and/or - StreamReaderProtocol classes, just copy the code -- there's - really nothing special here except some convenience.) - """ async def start_server( client_connected_cb: _ClientConnectedCallback, host: str | None = None, @@ -123,44 +125,43 @@ else: ) -> Server: """Start a socket server, call back for each client connected. - The first parameter, `client_connected_cb`, takes two parameters: - client_reader, client_writer. client_reader is a StreamReader - object, while client_writer is a StreamWriter object. This - parameter can either be a plain callback function or a coroutine; - if it is a coroutine, it will be automatically converted into a - Task. + The first parameter, `client_connected_cb`, takes two parameters: + client_reader, client_writer. client_reader is a StreamReader + object, while client_writer is a StreamWriter object. This + parameter can either be a plain callback function or a coroutine; + if it is a coroutine, it will be automatically converted into a + Task. - The rest of the arguments are all the usual arguments to - loop.create_server() except protocol_factory; most common are - positional host and port, with various optional keyword arguments - following. The return value is the same as loop.create_server(). + The rest of the arguments are all the usual arguments to + loop.create_server() except protocol_factory; most common are + positional host and port, with various optional keyword arguments + following. The return value is the same as loop.create_server(). - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). + Additional optional keyword arguments are loop (to set the event loop + instance to use) and limit (to set the buffer limit passed to the + StreamReader). - The return value is the same as loop.create_server(), i.e. a - Server object which can be used to stop the service. - """ + The return value is the same as loop.create_server(), i.e. a + Server object which can be used to stop the service. + """ if sys.platform != "win32": if sys.version_info >= (3, 10): async def open_unix_connection( path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: - """Similar to `open_connection` but works with UNIX Domain Sockets. -""" + """Similar to `open_connection` but works with UNIX Domain Sockets.""" + async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> Server: - """Similar to `start_server` but works with UNIX Domain Sockets. -""" + """Similar to `start_server` but works with UNIX Domain Sockets.""" else: async def open_unix_connection( path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: - """Similar to `open_connection` but works with UNIX Domain Sockets. -""" + """Similar to `open_connection` but works with UNIX Domain Sockets.""" + async def start_unix_server( client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, @@ -169,28 +170,29 @@ if sys.platform != "win32": limit: int = 65536, **kwds: Any, ) -> Server: - """Similar to `start_server` but works with UNIX Domain Sockets. -""" + """Similar to `start_server` but works with UNIX Domain Sockets.""" class FlowControlMixin(protocols.Protocol): """Reusable flow control logic for StreamWriter.drain(). -This implements the protocol methods pause_writing(), -resume_writing() and connection_lost(). If the subclass overrides -these it must call the super methods. + This implements the protocol methods pause_writing(), + resume_writing() and connection_lost(). If the subclass overrides + these it must call the super methods. + + StreamWriter.drain() must wait for _drain_helper() coroutine. + """ -StreamWriter.drain() must wait for _drain_helper() coroutine. -""" def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): """Helper class to adapt between Protocol and StreamReader. -(This is a helper class instead of making StreamReader itself a -Protocol subclass, because the StreamReader has other potential -uses, and to prevent the user of the StreamReader to accidentally -call inappropriate methods of the protocol.) -""" + (This is a helper class instead of making StreamReader itself a + Protocol subclass, because the StreamReader has other potential + uses, and to prevent the user of the StreamReader to accidentally + call inappropriate methods of the protocol.) + """ + def __init__( self, stream_reader: StreamReader, @@ -202,12 +204,13 @@ call inappropriate methods of the protocol.) class StreamWriter: """Wraps a Transport. -This exposes write(), writelines(), [can_]write_eof(), -get_extra_info() and close(). It adds drain() which returns an -optional Future on which you can wait for flow control. It also -adds a transport property which references the Transport -directly. -""" + This exposes write(), writelines(), [can_]write_eof(), + get_extra_info() and close(). It adds drain() which returns an + optional Future on which you can wait for flow control. It also + adds a transport property which references the Transport + directly. + """ + def __init__( self, transport: transports.WriteTransport, @@ -228,11 +231,11 @@ directly. async def drain(self) -> None: """Flush the write buffer. -The intended use is to write + The intended use is to write - w.write(data) - await w.drain() -""" + w.write(data) + await w.drain() + """ if sys.version_info >= (3, 12): async def start_tls( self, @@ -242,15 +245,12 @@ The intended use is to write ssl_handshake_timeout: float | None = None, ssl_shutdown_timeout: float | None = None, ) -> None: - """Upgrade an existing stream-based connection to TLS. -""" + """Upgrade an existing stream-based connection to TLS.""" elif sys.version_info >= (3, 11): async def start_tls( self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None ) -> None: - """Upgrade an existing stream-based connection to TLS. -""" - + """Upgrade an existing stream-based connection to TLS.""" if sys.version_info >= (3, 13): def __del__(self, warnings: ModuleType = ...) -> None: ... elif sys.version_info >= (3, 11): @@ -263,110 +263,112 @@ class StreamReader: def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: - """Return True if the buffer is empty and 'feed_eof' was called. -""" + """Return True if the buffer is empty and 'feed_eof' was called.""" + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... async def readline(self) -> bytes: """Read chunk of data from the stream until newline (b' -') is found. + ') is found. - On success, return chunk that ends with newline. If only partial - line can be read due to EOF, return incomplete line without - terminating newline. When EOF was reached while no bytes read, empty - bytes object is returned. + On success, return chunk that ends with newline. If only partial + line can be read due to EOF, return incomplete line without + terminating newline. When EOF was reached while no bytes read, empty + bytes object is returned. - If limit is reached, ValueError will be raised. In that case, if - newline was found, complete line including newline will be removed - from internal buffer. Else, internal buffer will be cleared. Limit is - compared against part of the line without newline. + If limit is reached, ValueError will be raised. In that case, if + newline was found, complete line including newline will be removed + from internal buffer. Else, internal buffer will be cleared. Limit is + compared against part of the line without newline. - If stream was paused, this function will automatically resume it if - needed. + If stream was paused, this function will automatically resume it if + needed. """ if sys.version_info >= (3, 13): async def readuntil(self, separator: _ReaduntilBuffer | tuple[_ReaduntilBuffer, ...] = b"\n") -> bytes: """Read data from the stream until ``separator`` is found. -On success, the data and separator will be removed from the -internal buffer (consumed). Returned data will include the -separator at the end. - -Configured stream limit is used to check result. Limit sets the -maximal length of data that can be returned, not counting the -separator. - -If an EOF occurs and the complete separator is still not found, -an IncompleteReadError exception will be raised, and the internal -buffer will be reset. The IncompleteReadError.partial attribute -may contain the separator partially. - -If the data cannot be read because of over limit, a -LimitOverrunError exception will be raised, and the data -will be left in the internal buffer, so it can be read again. - -The ``separator`` may also be a tuple of separators. In this -case the return value will be the shortest possible that has any -separator as the suffix. For the purposes of LimitOverrunError, -the shortest possible separator is considered to be the one that -matched. -""" + On success, the data and separator will be removed from the + internal buffer (consumed). Returned data will include the + separator at the end. + + Configured stream limit is used to check result. Limit sets the + maximal length of data that can be returned, not counting the + separator. + + If an EOF occurs and the complete separator is still not found, + an IncompleteReadError exception will be raised, and the internal + buffer will be reset. The IncompleteReadError.partial attribute + may contain the separator partially. + + If the data cannot be read because of over limit, a + LimitOverrunError exception will be raised, and the data + will be left in the internal buffer, so it can be read again. + + The ``separator`` may also be a tuple of separators. In this + case the return value will be the shortest possible that has any + separator as the suffix. For the purposes of LimitOverrunError, + the shortest possible separator is considered to be the one that + matched. + """ else: async def readuntil(self, separator: _ReaduntilBuffer = b"\n") -> bytes: """Read data from the stream until ``separator`` is found. - On success, the data and separator will be removed from the - internal buffer (consumed). Returned data will include the - separator at the end. + On success, the data and separator will be removed from the + internal buffer (consumed). Returned data will include the + separator at the end. - Configured stream limit is used to check result. Limit sets the - maximal length of data that can be returned, not counting the - separator. + Configured stream limit is used to check result. Limit sets the + maximal length of data that can be returned, not counting the + separator. - If an EOF occurs and the complete separator is still not found, - an IncompleteReadError exception will be raised, and the internal - buffer will be reset. The IncompleteReadError.partial attribute - may contain the separator partially. + If an EOF occurs and the complete separator is still not found, + an IncompleteReadError exception will be raised, and the internal + buffer will be reset. The IncompleteReadError.partial attribute + may contain the separator partially. - If the data cannot be read because of over limit, a - LimitOverrunError exception will be raised, and the data - will be left in the internal buffer, so it can be read again. - """ + If the data cannot be read because of over limit, a + LimitOverrunError exception will be raised, and the data + will be left in the internal buffer, so it can be read again. + """ async def read(self, n: int = -1) -> bytes: """Read up to `n` bytes from the stream. -If `n` is not provided or set to -1, -read until EOF, then return all read bytes. -If EOF was received and the internal buffer is empty, -return an empty bytes object. + If `n` is not provided or set to -1, + read until EOF, then return all read bytes. + If EOF was received and the internal buffer is empty, + return an empty bytes object. -If `n` is 0, return an empty bytes object immediately. + If `n` is 0, return an empty bytes object immediately. -If `n` is positive, return at most `n` available bytes -as soon as at least 1 byte is available in the internal buffer. -If EOF is received before any byte is read, return an empty -bytes object. + If `n` is positive, return at most `n` available bytes + as soon as at least 1 byte is available in the internal buffer. + If EOF is received before any byte is read, return an empty + bytes object. -Returned value is not limited with limit, configured at stream -creation. + Returned value is not limited with limit, configured at stream + creation. + + If stream was paused, this function will automatically resume it if + needed. + """ -If stream was paused, this function will automatically resume it if -needed. -""" async def readexactly(self, n: int) -> bytes: """Read exactly `n` bytes. -Raise an IncompleteReadError if EOF is reached before `n` bytes can be -read. The IncompleteReadError.partial attribute of the exception will -contain the partial read bytes. + Raise an IncompleteReadError if EOF is reached before `n` bytes can be + read. The IncompleteReadError.partial attribute of the exception will + contain the partial read bytes. + + if n is zero, return empty bytes object. -if n is zero, return empty bytes object. + Returned value is not limited with limit, configured at stream + creation. -Returned value is not limited with limit, configured at stream -creation. + If stream was paused, this function will automatically resume it if + needed. + """ -If stream was paused, this function will automatically resume it if -needed. -""" def __aiter__(self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi index 4b1dc0e066f58..b82f0ba4ea3a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/subprocess.pyi @@ -13,8 +13,8 @@ STDOUT: int DEVNULL: int class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): - """Like StreamReaderProtocol, but for a subprocess. -""" + """Like StreamReaderProtocol, but for a subprocess.""" + stdin: streams.StreamWriter | None stdout: streams.StreamReader | None stderr: streams.StreamReader | None @@ -32,8 +32,8 @@ class Process: @property def returncode(self) -> int | None: ... async def wait(self) -> int: - """Wait until the process exit and return the process return code. -""" + """Wait until the process exit and return the process return code.""" + def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi index 3076e17d865ad..fe720da9ab77e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/taskgroups.pyi @@ -19,19 +19,20 @@ _T = TypeVar("_T") class TaskGroup: """Asynchronous context manager for managing groups of tasks. -Example use: + Example use: - async with asyncio.TaskGroup() as group: - task1 = group.create_task(some_coroutine(...)) - task2 = group.create_task(other_coroutine(...)) - print("Both tasks have completed now.") + async with asyncio.TaskGroup() as group: + task1 = group.create_task(some_coroutine(...)) + task2 = group.create_task(other_coroutine(...)) + print("Both tasks have completed now.") -All tasks are awaited when the context manager exits. + All tasks are awaited when the context manager exits. + + Any exceptions other than `asyncio.CancelledError` raised within + a task will cancel all remaining tasks and wait for them to exit. + The exceptions are then combined and raised as an `ExceptionGroup`. + """ -Any exceptions other than `asyncio.CancelledError` raised within -a task will cancel all remaining tasks and wait for them to exit. -The exceptions are then combined and raised as an `ExceptionGroup`. -""" _loop: AbstractEventLoop | None _tasks: set[Task[Any]] @@ -40,6 +41,7 @@ The exceptions are then combined and raised as an `ExceptionGroup`. def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: """Create a new task in this group and return it. -Similar to `asyncio.create_task`. -""" + Similar to `asyncio.create_task`. + """ + def _on_task_done(self, task: Task[object]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi index 8b2b6eb0ae56b..61563b419a9cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tasks.pyi @@ -1,5 +1,5 @@ -"""Support for tasks, coroutines and the scheduler. -""" +"""Support for tasks, coroutines and the scheduler.""" + import concurrent.futures import sys from _asyncio import ( @@ -95,69 +95,69 @@ if sys.version_info >= (3, 13): def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> _SyncAndAsyncIterator[Future[_T]]: """Create an iterator of awaitables or their results in completion order. -Run the supplied awaitables concurrently. The returned object can be -iterated to obtain the results of the awaitables as they finish. + Run the supplied awaitables concurrently. The returned object can be + iterated to obtain the results of the awaitables as they finish. -The object returned can be iterated as an asynchronous iterator or a plain -iterator. When asynchronous iteration is used, the originally-supplied -awaitables are yielded if they are tasks or futures. This makes it easy to -correlate previously-scheduled tasks with their results: + The object returned can be iterated as an asynchronous iterator or a plain + iterator. When asynchronous iteration is used, the originally-supplied + awaitables are yielded if they are tasks or futures. This makes it easy to + correlate previously-scheduled tasks with their results: - ipv4_connect = create_task(open_connection("127.0.0.1", 80)) - ipv6_connect = create_task(open_connection("::1", 80)) - tasks = [ipv4_connect, ipv6_connect] + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] - async for earliest_connect in as_completed(tasks): - # earliest_connect is done. The result can be obtained by - # awaiting it or calling earliest_connect.result() - reader, writer = await earliest_connect + async for earliest_connect in as_completed(tasks): + # earliest_connect is done. The result can be obtained by + # awaiting it or calling earliest_connect.result() + reader, writer = await earliest_connect - if earliest_connect is ipv6_connect: - print("IPv6 connection established.") - else: - print("IPv4 connection established.") + if earliest_connect is ipv6_connect: + print("IPv6 connection established.") + else: + print("IPv4 connection established.") -During asynchronous iteration, implicitly-created tasks will be yielded for -supplied awaitables that aren't tasks or futures. + During asynchronous iteration, implicitly-created tasks will be yielded for + supplied awaitables that aren't tasks or futures. -When used as a plain iterator, each iteration yields a new coroutine that -returns the result or raises the exception of the next completed awaitable. -This pattern is compatible with Python versions older than 3.13: + When used as a plain iterator, each iteration yields a new coroutine that + returns the result or raises the exception of the next completed awaitable. + This pattern is compatible with Python versions older than 3.13: - ipv4_connect = create_task(open_connection("127.0.0.1", 80)) - ipv6_connect = create_task(open_connection("::1", 80)) - tasks = [ipv4_connect, ipv6_connect] + ipv4_connect = create_task(open_connection("127.0.0.1", 80)) + ipv6_connect = create_task(open_connection("::1", 80)) + tasks = [ipv4_connect, ipv6_connect] - for next_connect in as_completed(tasks): - # next_connect is not one of the original task objects. It must be - # awaited to obtain the result value or raise the exception of the - # awaitable that finishes next. - reader, writer = await next_connect + for next_connect in as_completed(tasks): + # next_connect is not one of the original task objects. It must be + # awaited to obtain the result value or raise the exception of the + # awaitable that finishes next. + reader, writer = await next_connect -A TimeoutError is raised if the timeout occurs before all awaitables are -done. This is raised by the async for loop during asynchronous iteration or -by the coroutines yielded during plain iteration. -""" + A TimeoutError is raised if the timeout occurs before all awaitables are + done. This is raised by the async for loop during asynchronous iteration or + by the coroutines yielded during plain iteration. + """ elif sys.version_info >= (3, 10): def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: """Return an iterator whose values are coroutines. - When waiting for the yielded coroutines you'll get the results (or - exceptions!) of the original Futures (or coroutines), in the order - in which and as soon as they complete. + When waiting for the yielded coroutines you'll get the results (or + exceptions!) of the original Futures (or coroutines), in the order + in which and as soon as they complete. - This differs from PEP 3148; the proper way to use this is: + This differs from PEP 3148; the proper way to use this is: - for f in as_completed(fs): - result = await f # The 'await' may raise. - # Use result. + for f in as_completed(fs): + result = await f # The 'await' may raise. + # Use result. - If a timeout is specified, the 'await' will raise - TimeoutError when the timeout occurs before all Futures are done. + If a timeout is specified, the 'await' will raise + TimeoutError when the timeout occurs before all Futures are done. - Note: The futures 'f' are not necessarily members of fs. - """ + Note: The futures 'f' are not necessarily members of fs. + """ else: def as_completed( @@ -165,28 +165,29 @@ else: ) -> Iterator[Future[_T]]: """Return an iterator whose values are coroutines. - When waiting for the yielded coroutines you'll get the results (or - exceptions!) of the original Futures (or coroutines), in the order - in which and as soon as they complete. + When waiting for the yielded coroutines you'll get the results (or + exceptions!) of the original Futures (or coroutines), in the order + in which and as soon as they complete. - This differs from PEP 3148; the proper way to use this is: + This differs from PEP 3148; the proper way to use this is: - for f in as_completed(fs): - result = await f # The 'await' may raise. - # Use result. + for f in as_completed(fs): + result = await f # The 'await' may raise. + # Use result. - If a timeout is specified, the 'await' will raise - TimeoutError when the timeout occurs before all Futures are done. + If a timeout is specified, the 'await' will raise + TimeoutError when the timeout occurs before all Futures are done. - Note: The futures 'f' are not necessarily members of fs. - """ + Note: The futures 'f' are not necessarily members of fs. + """ @overload def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: # type: ignore[overload-overlap] """Wrap a coroutine or an awaitable in a future. -If the argument is a Future, it is returned directly. -""" + If the argument is a Future, it is returned directly. + """ + @overload def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... @@ -201,33 +202,34 @@ if sys.version_info >= (3, 10): def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: # type: ignore[overload-overlap] """Return a future aggregating results from the given coroutines/futures. -Coroutines will be wrapped in a future and scheduled in the event -loop. They will not necessarily be scheduled in the same order as -passed in. - -All futures must share the same event loop. If all the tasks are -done successfully, the returned future's result is the list of -results (in the order of the original sequence, not necessarily -the order of results arrival). If *return_exceptions* is True, -exceptions in the tasks are treated the same as successful -results, and gathered in the result list; otherwise, the first -raised exception will be immediately propagated to the returned -future. - -Cancellation: if the outer Future is cancelled, all children (that -have not completed yet) are also cancelled. If any child is -cancelled, this is treated as if it raised CancelledError -- -the outer Future is *not* cancelled in this case. (This is to -prevent the cancellation of one child to cause other children to -be cancelled.) - -If *return_exceptions* is False, cancelling gather() after it -has been marked done won't cancel any submitted awaitables. -For instance, gather can be marked done after propagating an -exception to the caller, therefore, calling ``gather.cancel()`` -after catching an exception (raised by one of the awaitables) from -gather won't cancel any other awaitables. -""" + Coroutines will be wrapped in a future and scheduled in the event + loop. They will not necessarily be scheduled in the same order as + passed in. + + All futures must share the same event loop. If all the tasks are + done successfully, the returned future's result is the list of + results (in the order of the original sequence, not necessarily + the order of results arrival). If *return_exceptions* is True, + exceptions in the tasks are treated the same as successful + results, and gathered in the result list; otherwise, the first + raised exception will be immediately propagated to the returned + future. + + Cancellation: if the outer Future is cancelled, all children (that + have not completed yet) are also cancelled. If any child is + cancelled, this is treated as if it raised CancelledError -- + the outer Future is *not* cancelled in this case. (This is to + prevent the cancellation of one child to cause other children to + be cancelled.) + + If *return_exceptions* is False, cancelling gather() after it + has been marked done won't cancel any submitted awaitables. + For instance, gather can be marked done after propagating an + exception to the caller, therefore, calling ``gather.cancel()`` + after catching an exception (raised by one of the awaitables) from + gather won't cancel any other awaitables. + """ + @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: Literal[False] = False @@ -345,33 +347,34 @@ else: ) -> Future[tuple[_T1]]: """Return a future aggregating results from the given coroutines/futures. - Coroutines will be wrapped in a future and scheduled in the event - loop. They will not necessarily be scheduled in the same order as - passed in. - - All futures must share the same event loop. If all the tasks are - done successfully, the returned future's result is the list of - results (in the order of the original sequence, not necessarily - the order of results arrival). If *return_exceptions* is True, - exceptions in the tasks are treated the same as successful - results, and gathered in the result list; otherwise, the first - raised exception will be immediately propagated to the returned - future. - - Cancellation: if the outer Future is cancelled, all children (that - have not completed yet) are also cancelled. If any child is - cancelled, this is treated as if it raised CancelledError -- - the outer Future is *not* cancelled in this case. (This is to - prevent the cancellation of one child to cause other children to - be cancelled.) - - If *return_exceptions* is False, cancelling gather() after it - has been marked done won't cancel any submitted awaitables. - For instance, gather can be marked done after propagating an - exception to the caller, therefore, calling ``gather.cancel()`` - after catching an exception (raised by one of the awaitables) from - gather won't cancel any other awaitables. - """ + Coroutines will be wrapped in a future and scheduled in the event + loop. They will not necessarily be scheduled in the same order as + passed in. + + All futures must share the same event loop. If all the tasks are + done successfully, the returned future's result is the list of + results (in the order of the original sequence, not necessarily + the order of results arrival). If *return_exceptions* is True, + exceptions in the tasks are treated the same as successful + results, and gathered in the result list; otherwise, the first + raised exception will be immediately propagated to the returned + future. + + Cancellation: if the outer Future is cancelled, all children (that + have not completed yet) are also cancelled. If any child is + cancelled, this is treated as if it raised CancelledError -- + the outer Future is *not* cancelled in this case. (This is to + prevent the cancellation of one child to cause other children to + be cancelled.) + + If *return_exceptions* is False, cancelling gather() after it + has been marked done won't cancel any submitted awaitables. + For instance, gather can be marked done after propagating an + exception to the caller, therefore, calling ``gather.cancel()`` + after catching an exception (raised by one of the awaitables) from + gather won't cancel any other awaitables. + """ + @overload def gather( # type: ignore[overload-overlap] coro_or_future1: _FutureLike[_T1], @@ -496,112 +499,114 @@ else: def run_coroutine_threadsafe(coro: Coroutine[Any, Any, _T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: """Submit a coroutine object to a given event loop. -Return a concurrent.futures.Future to access the result. -""" + Return a concurrent.futures.Future to access the result. + """ if sys.version_info >= (3, 10): def shield(arg: _FutureLike[_T]) -> Future[_T]: """Wait for a future, shielding it from cancellation. -The statement + The statement - task = asyncio.create_task(something()) - res = await shield(task) + task = asyncio.create_task(something()) + res = await shield(task) -is exactly equivalent to the statement + is exactly equivalent to the statement - res = await something() + res = await something() -*except* that if the coroutine containing it is cancelled, the -task running in something() is not cancelled. From the POV of -something(), the cancellation did not happen. But its caller is -still cancelled, so the yield-from expression still raises -CancelledError. Note: If something() is cancelled by other means -this will still cancel shield(). + *except* that if the coroutine containing it is cancelled, the + task running in something() is not cancelled. From the POV of + something(), the cancellation did not happen. But its caller is + still cancelled, so the yield-from expression still raises + CancelledError. Note: If something() is cancelled by other means + this will still cancel shield(). -If you want to completely ignore cancellation (not recommended) -you can combine shield() with a try/except clause, as follows: + If you want to completely ignore cancellation (not recommended) + you can combine shield() with a try/except clause, as follows: - task = asyncio.create_task(something()) - try: - res = await shield(task) - except CancelledError: - res = None + task = asyncio.create_task(something()) + try: + res = await shield(task) + except CancelledError: + res = None + + Save a reference to tasks passed to this function, to avoid + a task disappearing mid-execution. The event loop only keeps + weak references to tasks. A task that isn't referenced elsewhere + may get garbage collected at any time, even before it's done. + """ -Save a reference to tasks passed to this function, to avoid -a task disappearing mid-execution. The event loop only keeps -weak references to tasks. A task that isn't referenced elsewhere -may get garbage collected at any time, even before it's done. -""" @overload async def sleep(delay: float) -> None: - """Coroutine that completes after a given time (in seconds). -""" + """Coroutine that completes after a given time (in seconds).""" + @overload async def sleep(delay: float, result: _T) -> _T: ... async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: """Wait for the single Future or coroutine to complete, with timeout. -Coroutine will be wrapped in Task. + Coroutine will be wrapped in Task. -Returns result of the Future or coroutine. When a timeout occurs, -it cancels the task and raises TimeoutError. To avoid the task -cancellation, wrap it in shield(). + Returns result of the Future or coroutine. When a timeout occurs, + it cancels the task and raises TimeoutError. To avoid the task + cancellation, wrap it in shield(). -If the wait is cancelled, the task is also cancelled. + If the wait is cancelled, the task is also cancelled. -If the task suppresses the cancellation and returns a value instead, -that value is returned. + If the task suppresses the cancellation and returns a value instead, + that value is returned. -This function is a coroutine. -""" + This function is a coroutine. + """ else: def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: """Wait for a future, shielding it from cancellation. - The statement + The statement - res = await shield(something()) + res = await shield(something()) - is exactly equivalent to the statement + is exactly equivalent to the statement - res = await something() + res = await something() - *except* that if the coroutine containing it is cancelled, the - task running in something() is not cancelled. From the POV of - something(), the cancellation did not happen. But its caller is - still cancelled, so the yield-from expression still raises - CancelledError. Note: If something() is cancelled by other means - this will still cancel shield(). + *except* that if the coroutine containing it is cancelled, the + task running in something() is not cancelled. From the POV of + something(), the cancellation did not happen. But its caller is + still cancelled, so the yield-from expression still raises + CancelledError. Note: If something() is cancelled by other means + this will still cancel shield(). - If you want to completely ignore cancellation (not recommended) - you can combine shield() with a try/except clause, as follows: + If you want to completely ignore cancellation (not recommended) + you can combine shield() with a try/except clause, as follows: + + try: + res = await shield(something()) + except CancelledError: + res = None + """ - try: - res = await shield(something()) - except CancelledError: - res = None - """ @overload async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: - """Coroutine that completes after a given time (in seconds). -""" + """Coroutine that completes after a given time (in seconds).""" + @overload async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: """Wait for the single Future or coroutine to complete, with timeout. - Coroutine will be wrapped in Task. + Coroutine will be wrapped in Task. - Returns result of the Future or coroutine. When a timeout occurs, - it cancels the task and raises TimeoutError. To avoid the task - cancellation, wrap it in shield(). + Returns result of the Future or coroutine. When a timeout occurs, + it cancels the task and raises TimeoutError. To avoid the task + cancellation, wrap it in shield(). - If the wait is cancelled, the task is also cancelled. + If the wait is cancelled, the task is also cancelled. - This function is a coroutine. - """ + This function is a coroutine. + """ if sys.version_info >= (3, 11): @overload @@ -610,17 +615,18 @@ if sys.version_info >= (3, 11): ) -> tuple[set[_FT], set[_FT]]: """Wait for the Futures or Tasks given by fs to complete. -The fs iterable must not be empty. + The fs iterable must not be empty. + + Returns two sets of Future: (done, pending). -Returns two sets of Future: (done, pending). + Usage: -Usage: + done, pending = await asyncio.wait(fs) - done, pending = await asyncio.wait(fs) + Note: This does not raise TimeoutError! Futures that aren't done + when the timeout occurs are returned in the second set. + """ -Note: This does not raise TimeoutError! Futures that aren't done -when the timeout occurs are returned in the second set. -""" @overload async def wait( fs: Iterable[Task[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -633,19 +639,20 @@ elif sys.version_info >= (3, 10): ) -> tuple[set[_FT], set[_FT]]: """Wait for the Futures and coroutines given by fs to complete. - The fs iterable must not be empty. + The fs iterable must not be empty. - Coroutines will be wrapped in Tasks. + Coroutines will be wrapped in Tasks. - Returns two sets of Future: (done, pending). + Returns two sets of Future: (done, pending). - Usage: + Usage: - done, pending = await asyncio.wait(fs) + done, pending = await asyncio.wait(fs) + + Note: This does not raise TimeoutError! Futures that aren't done + when the timeout occurs are returned in the second set. + """ - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ @overload async def wait( fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" @@ -662,19 +669,20 @@ else: ) -> tuple[set[_FT], set[_FT]]: """Wait for the Futures and coroutines given by fs to complete. - The fs iterable must not be empty. + The fs iterable must not be empty. - Coroutines will be wrapped in Tasks. + Coroutines will be wrapped in Tasks. - Returns two sets of Future: (done, pending). + Returns two sets of Future: (done, pending). - Usage: + Usage: - done, pending = await asyncio.wait(fs) + done, pending = await asyncio.wait(fs) + + Note: This does not raise TimeoutError! Futures that aren't done + when the timeout occurs are returned in the second set. + """ - Note: This does not raise TimeoutError! Futures that aren't done - when the timeout occurs are returned in the second set. - """ @overload async def wait( fs: Iterable[Awaitable[_T]], @@ -690,29 +698,27 @@ else: _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Coroutine[Any, Any, _T_co] def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: - """Return a set of all tasks for the loop. -""" + """Return a set of all tasks for the loop.""" if sys.version_info >= (3, 11): def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: """Schedule the execution of a coroutine object in a spawn task. -Return a Task object. -""" + Return a Task object. + """ else: def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: """Schedule the execution of a coroutine object in a spawn task. - Return a Task object. - """ + Return a Task object. + """ if sys.version_info >= (3, 12): from _asyncio import current_task as current_task else: def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: - """Return a currently executed task. -""" + """Return a currently executed task.""" if sys.version_info >= (3, 14): def eager_task_factory( @@ -760,23 +766,21 @@ if sys.version_info >= (3, 12): context: Context | None = None, ) -> _TaskT_co: ... - def create_eager_task_factory( - custom_task_constructor: _CustomTaskConstructor[_TaskT_co], - ) -> _EagerTaskFactoryType[_TaskT_co]: + def create_eager_task_factory(custom_task_constructor: _CustomTaskConstructor[_TaskT_co]) -> _EagerTaskFactoryType[_TaskT_co]: """Create a function suitable for use as a task factory on an event-loop. -Example usage: + Example usage: - loop.set_task_factory( - asyncio.create_eager_task_factory(my_task_constructor)) + loop.set_task_factory( + asyncio.create_eager_task_factory(my_task_constructor)) -Now, tasks created will be started immediately (rather than being first -scheduled to an event loop). The constructor argument can be any callable -that returns a Task-compatible object and has a signature compatible -with `Task.__init__`; it must have the `eager_start` keyword argument. + Now, tasks created will be started immediately (rather than being first + scheduled to an event loop). The constructor argument can be any callable + that returns a Task-compatible object and has a signature compatible + with `Task.__init__`; it must have the `eager_start` keyword argument. -Most applications will use `Task` for `custom_task_constructor` and in -this case there's no need to call `create_eager_task_factory()` -directly. Instead the global `eager_task_factory` instance can be -used. E.g. `loop.set_task_factory(asyncio.eager_task_factory)`. -""" + Most applications will use `Task` for `custom_task_constructor` and in + this case there's no need to call `create_eager_task_factory()` + directly. Instead the global `eager_task_factory` instance can be + used. E.g. `loop.set_task_factory(asyncio.eager_task_factory)`. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi index 5f0ce23352905..6fd5104a57c29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/threads.pyi @@ -1,5 +1,5 @@ -"""High-level support for working with threads in asyncio -""" +"""High-level support for working with threads in asyncio""" + from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -12,10 +12,10 @@ _R = TypeVar("_R") async def to_thread(func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: """Asynchronously run function *func* in a separate thread. -Any *args and **kwargs supplied for this function are directly passed -to *func*. Also, the current :class:`contextvars.Context` is propagated, -allowing context variables from the main thread to be accessed in the -separate thread. + Any *args and **kwargs supplied for this function are directly passed + to *func*. Also, the current :class:`contextvars.Context` is propagated, + allowing context variables from the main thread to be accessed in the + separate thread. -Return a coroutine that can be awaited to get the eventual result of *func*. -""" + Return a coroutine that can be awaited to get the eventual result of *func*. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi index 826010a8b18c7..06d2eac5b0e29 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/timeouts.pyi @@ -9,24 +9,26 @@ __all__ = ("Timeout", "timeout", "timeout_at") class Timeout: """Asynchronous context manager for cancelling overdue coroutines. -Use `timeout()` or `timeout_at()` rather than instantiating this class directly. -""" + Use `timeout()` or `timeout_at()` rather than instantiating this class directly. + """ + def __init__(self, when: float | None) -> None: """Schedule a timeout that will trigger at a given loop time. -- If `when` is `None`, the timeout will never trigger. -- If `when < loop.time()`, the timeout will trigger on the next - iteration of the event loop. -""" + - If `when` is `None`, the timeout will never trigger. + - If `when < loop.time()`, the timeout will trigger on the next + iteration of the event loop. + """ + def when(self) -> float | None: - """Return the current deadline. -""" + """Return the current deadline.""" + def reschedule(self, when: float | None) -> None: - """Reschedule the timeout. -""" + """Reschedule the timeout.""" + def expired(self) -> bool: - """Is timeout expired during execution? -""" + """Is timeout expired during execution?""" + async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -35,35 +37,36 @@ Use `timeout()` or `timeout_at()` rather than instantiating this class directly. def timeout(delay: float | None) -> Timeout: """Timeout async context manager. -Useful in cases when you want to apply timeout logic around block -of code or in cases when asyncio.wait_for is not suitable. For example: + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> async with asyncio.timeout(10): # 10 seconds timeout + ... await long_running_task() ->>> async with asyncio.timeout(10): # 10 seconds timeout -... await long_running_task() + delay - value in seconds or None to disable timeout logic -delay - value in seconds or None to disable timeout logic + long_running_task() is interrupted by raising asyncio.CancelledError, + the top-most affected timeout() context manager converts CancelledError + into TimeoutError. + """ -long_running_task() is interrupted by raising asyncio.CancelledError, -the top-most affected timeout() context manager converts CancelledError -into TimeoutError. -""" def timeout_at(when: float | None) -> Timeout: """Schedule the timeout at absolute time. -Like timeout() but argument gives absolute time in the same clock system -as loop.time(). + Like timeout() but argument gives absolute time in the same clock system + as loop.time(). -Please note: it is not POSIX time but a time with -undefined starting base, e.g. the time of the system power on. + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. ->>> async with asyncio.timeout_at(loop.time() + 10): -... await long_running_task() + >>> async with asyncio.timeout_at(loop.time() + 10): + ... await long_running_task() -when - a deadline when timeout occurs or None to disable timeout logic + when - a deadline when timeout occurs or None to disable timeout logic -long_running_task() is interrupted by raising asyncio.CancelledError, -the top-most affected timeout() context manager converts CancelledError -into TimeoutError. -""" + long_running_task() is interrupted by raising asyncio.CancelledError, + the top-most affected timeout() context manager converts CancelledError + into TimeoutError. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi index 223abc068ce75..e74fd30eb5038 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/tools.pyi @@ -1,5 +1,5 @@ -"""Tools to analyze tasks running in asyncio programs. -""" +"""Tools to analyze tasks running in asyncio programs.""" + import sys from collections.abc import Iterable from enum import Enum @@ -33,8 +33,8 @@ class NodeType(Enum): TASK = 2 class CycleFoundException(Exception): - """Raised when there is a cycle when drawing the call tree. -""" + """Raised when there is a cycle when drawing the call tree.""" + cycles: list[list[int]] id2name: dict[int, str] def __init__(self, cycles: list[list[int]], id2name: dict[int, str]) -> None: ... @@ -42,24 +42,24 @@ class CycleFoundException(Exception): def get_all_awaited_by(pid: SupportsIndex) -> list[_AwaitedInfo]: ... def build_async_tree(result: Iterable[_AwaitedInfo], task_emoji: str = "(T)", cor_emoji: str = "") -> list[list[str]]: """ -Build a list of strings for pretty-print an async call tree. + Build a list of strings for pretty-print an async call tree. + + The call tree is produced by `get_all_async_stacks()`, prefixing tasks + with `task_emoji` and coroutine frames with `cor_emoji`. + """ -The call tree is produced by `get_all_async_stacks()`, prefixing tasks -with `task_emoji` and coroutine frames with `cor_emoji`. -""" def build_task_table(result: Iterable[_AwaitedInfo]) -> list[list[int | str]]: ... if sys.version_info >= (3, 14): def exit_with_permission_help_text() -> None: """ -Prints a message pointing to platform-specific permission help text and exits the program. -This function is called when a PermissionError is encountered while trying -to attach to a process. -""" + Prints a message pointing to platform-specific permission help text and exits the program. + This function is called when a PermissionError is encountered while trying + to attach to a process. + """ def display_awaited_by_tasks_table(pid: SupportsIndex) -> None: - """Build and print a table of all pending tasks under `pid`. -""" + """Build and print a table of all pending tasks under `pid`.""" + def display_awaited_by_tasks_tree(pid: SupportsIndex) -> None: - """Build and print a tree of all pending tasks under `pid`. -""" + """Build and print a tree of all pending tasks under `pid`.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi index 464911c5fe3d0..5b2f7d12489c2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/transports.pyi @@ -1,5 +1,5 @@ -"""Abstract Transport class. -""" +"""Abstract Transport class.""" + from asyncio.events import AbstractEventLoop from asyncio.protocols import BaseProtocol from collections.abc import Iterable, Mapping @@ -10,215 +10,225 @@ from typing import Any __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") class BaseTransport: - """Base class for transports. -""" + """Base class for transports.""" + __slots__ = ("_extra",) def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... def get_extra_info(self, name: str, default: Any = None) -> Any: - """Get optional transport information. -""" + """Get optional transport information.""" + def is_closing(self) -> bool: - """Return True if the transport is closing or closed. -""" + """Return True if the transport is closing or closed.""" + def close(self) -> None: """Close the transport. -Buffered data will be flushed asynchronously. No more data -will be received. After all buffered data is flushed, the -protocol's connection_lost() method will (eventually) be -called with None as its argument. -""" + Buffered data will be flushed asynchronously. No more data + will be received. After all buffered data is flushed, the + protocol's connection_lost() method will (eventually) be + called with None as its argument. + """ + def set_protocol(self, protocol: BaseProtocol) -> None: - """Set a new protocol. -""" + """Set a new protocol.""" + def get_protocol(self) -> BaseProtocol: - """Return the current protocol. -""" + """Return the current protocol.""" class ReadTransport(BaseTransport): - """Interface for read-only transports. -""" + """Interface for read-only transports.""" + __slots__ = () def is_reading(self) -> bool: - """Return True if the transport is receiving. -""" + """Return True if the transport is receiving.""" + def pause_reading(self) -> None: """Pause the receiving end. -No data will be passed to the protocol's data_received() -method until resume_reading() is called. -""" + No data will be passed to the protocol's data_received() + method until resume_reading() is called. + """ + def resume_reading(self) -> None: """Resume the receiving end. -Data received will once again be passed to the protocol's -data_received() method. -""" + Data received will once again be passed to the protocol's + data_received() method. + """ class WriteTransport(BaseTransport): - """Interface for write-only transports. -""" + """Interface for write-only transports.""" + __slots__ = () def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: """Set the high- and low-water limits for write flow control. -These two values control when to call the protocol's -pause_writing() and resume_writing() methods. If specified, -the low-water limit must be less than or equal to the -high-water limit. Neither value can be negative. - -The defaults are implementation-specific. If only the -high-water limit is given, the low-water limit defaults to an -implementation-specific value less than or equal to the -high-water limit. Setting high to zero forces low to zero as -well, and causes pause_writing() to be called whenever the -buffer becomes non-empty. Setting low to zero causes -resume_writing() to be called only once the buffer is empty. -Use of zero for either limit is generally sub-optimal as it -reduces opportunities for doing I/O and computation -concurrently. -""" + These two values control when to call the protocol's + pause_writing() and resume_writing() methods. If specified, + the low-water limit must be less than or equal to the + high-water limit. Neither value can be negative. + + The defaults are implementation-specific. If only the + high-water limit is given, the low-water limit defaults to an + implementation-specific value less than or equal to the + high-water limit. Setting high to zero forces low to zero as + well, and causes pause_writing() to be called whenever the + buffer becomes non-empty. Setting low to zero causes + resume_writing() to be called only once the buffer is empty. + Use of zero for either limit is generally sub-optimal as it + reduces opportunities for doing I/O and computation + concurrently. + """ + def get_write_buffer_size(self) -> int: - """Return the current size of the write buffer. -""" + """Return the current size of the write buffer.""" + def get_write_buffer_limits(self) -> tuple[int, int]: """Get the high and low watermarks for write flow control. -Return a tuple (low, high) where low and high are -positive number of bytes. -""" + Return a tuple (low, high) where low and high are + positive number of bytes. + """ + def write(self, data: bytes | bytearray | memoryview[Any]) -> None: # any memoryview format or shape """Write some data bytes to the transport. -This does not block; it buffers the data and arranges for it -to be sent out asynchronously. -""" - def writelines( - self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]] - ) -> None: # any memoryview format or shape + This does not block; it buffers the data and arranges for it + to be sent out asynchronously. + """ + + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview[Any]]) -> None: # any memoryview format or shape """Write a list (or any iterable) of data bytes to the transport. -The default implementation concatenates the arguments and -calls write() on the result. -""" + The default implementation concatenates the arguments and + calls write() on the result. + """ + def write_eof(self) -> None: """Close the write end after flushing buffered data. -(This is like typing ^D into a UNIX program reading from stdin.) + (This is like typing ^D into a UNIX program reading from stdin.) + + Data may still be received. + """ -Data may still be received. -""" def can_write_eof(self) -> bool: - """Return True if this transport supports write_eof(), False if not. -""" + """Return True if this transport supports write_eof(), False if not.""" + def abort(self) -> None: """Close the transport immediately. -Buffered data will be lost. No more data will be received. -The protocol's connection_lost() method will (eventually) be -called with None as its argument. -""" + Buffered data will be lost. No more data will be received. + The protocol's connection_lost() method will (eventually) be + called with None as its argument. + """ class Transport(ReadTransport, WriteTransport): """Interface representing a bidirectional transport. -There may be several implementations, but typically, the user does -not implement new transports; rather, the platform provides some -useful transports that are implemented using the platform's best -practices. + There may be several implementations, but typically, the user does + not implement new transports; rather, the platform provides some + useful transports that are implemented using the platform's best + practices. -The user never instantiates a transport directly; they call a -utility function, passing it a protocol factory and other -information necessary to create the transport and protocol. (E.g. -EventLoop.create_connection() or EventLoop.create_server().) + The user never instantiates a transport directly; they call a + utility function, passing it a protocol factory and other + information necessary to create the transport and protocol. (E.g. + EventLoop.create_connection() or EventLoop.create_server().) -The utility function will asynchronously create a transport and a -protocol and hook them up by calling the protocol's -connection_made() method, passing it the transport. + The utility function will asynchronously create a transport and a + protocol and hook them up by calling the protocol's + connection_made() method, passing it the transport. + + The implementation here raises NotImplemented for every method + except writelines(), which calls write() in a loop. + """ -The implementation here raises NotImplemented for every method -except writelines(), which calls write() in a loop. -""" __slots__ = () class DatagramTransport(BaseTransport): - """Interface for datagram (UDP) transports. -""" + """Interface for datagram (UDP) transports.""" + __slots__ = () def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: """Send data to the transport. -This does not block; it buffers the data and arranges for it -to be sent out asynchronously. -addr is target socket address. -If addr is None use target address pointed on transport creation. -If data is an empty bytes object a zero-length datagram will be -sent. -""" + This does not block; it buffers the data and arranges for it + to be sent out asynchronously. + addr is target socket address. + If addr is None use target address pointed on transport creation. + If data is an empty bytes object a zero-length datagram will be + sent. + """ + def abort(self) -> None: """Close the transport immediately. -Buffered data will be lost. No more data will be received. -The protocol's connection_lost() method will (eventually) be -called with None as its argument. -""" + Buffered data will be lost. No more data will be received. + The protocol's connection_lost() method will (eventually) be + called with None as its argument. + """ class SubprocessTransport(BaseTransport): __slots__ = () def get_pid(self) -> int: - """Get subprocess id. -""" + """Get subprocess id.""" + def get_returncode(self) -> int | None: """Get subprocess returncode. -See also -http://docs.python.org/3/library/subprocess#subprocess.Popen.returncode -""" + See also + http://docs.python.org/3/library/subprocess#subprocess.Popen.returncode + """ + def get_pipe_transport(self, fd: int) -> BaseTransport | None: - """Get transport for pipe with number fd. -""" + """Get transport for pipe with number fd.""" + def send_signal(self, signal: int) -> None: """Send signal to subprocess. -See also: -docs.python.org/3/library/subprocess#subprocess.Popen.send_signal -""" + See also: + docs.python.org/3/library/subprocess#subprocess.Popen.send_signal + """ + def terminate(self) -> None: """Stop the subprocess. -Alias for close() method. + Alias for close() method. + + On Posix OSs the method sends SIGTERM to the subprocess. + On Windows the Win32 API function TerminateProcess() + is called to stop the subprocess. -On Posix OSs the method sends SIGTERM to the subprocess. -On Windows the Win32 API function TerminateProcess() - is called to stop the subprocess. + See also: + http://docs.python.org/3/library/subprocess#subprocess.Popen.terminate + """ -See also: -http://docs.python.org/3/library/subprocess#subprocess.Popen.terminate -""" def kill(self) -> None: """Kill the subprocess. -On Posix OSs the function sends SIGKILL to the subprocess. -On Windows kill() is an alias for terminate(). + On Posix OSs the function sends SIGKILL to the subprocess. + On Windows kill() is an alias for terminate(). -See also: -http://docs.python.org/3/library/subprocess#subprocess.Popen.kill -""" + See also: + http://docs.python.org/3/library/subprocess#subprocess.Popen.kill + """ class _FlowControlMixin(Transport): """All the logic for (write) flow control in a mix-in base class. -The subclass must implement get_write_buffer_size(). It must call -_maybe_pause_protocol() whenever the write buffer size increases, -and _maybe_resume_protocol() whenever it decreases. It may also -override set_write_buffer_limits() (e.g. to specify different -defaults). + The subclass must implement get_write_buffer_size(). It must call + _maybe_pause_protocol() whenever the write buffer size increases, + and _maybe_resume_protocol() whenever it decreases. It may also + override set_write_buffer_limits() (e.g. to specify different + defaults). + + The subclass constructor must call super().__init__(extra). This + will call set_write_buffer_limits(). -The subclass constructor must call super().__init__(extra). This -will call set_write_buffer_limits(). + The user may call set_write_buffer_limits() and + get_write_buffer_size(), and their protocol's pause_writing() and + resume_writing() may be called. + """ -The user may call set_write_buffer_limits() and -get_write_buffer_size(), and their protocol's pause_writing() and -resume_writing() may be called. -""" __slots__ = ("_loop", "_protocol_paused", "_high_water", "_low_water") def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi index e610f7271c3cb..4d08d240165be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/trsock.pyi @@ -16,10 +16,11 @@ _CMSG: TypeAlias = tuple[int, int, bytes] class TransportSocket: """A socket-like wrapper for exposing real transport sockets. -These objects can be safely returned by APIs like -`transport.get_extra_info('socket')`. All potentially disruptive -operations (like "socket.close()") are banned. -""" + These objects can be safely returned by APIs like + `transport.get_extra_info('socket')`. All potentially disruptive + operations (like "socket.close()") are banned. + """ + __slots__ = ("_sock",) def __init__(self, sock: socket.socket) -> None: ... @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi index 4f13579af61dc..679f2e6734780 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/unix_events.pyi @@ -1,5 +1,5 @@ -"""Selector event loop for Unix with signal handling. -""" +"""Selector event loop for Unix with signal handling.""" + import sys import types from _typeshed import StrPath @@ -54,157 +54,169 @@ if sys.version_info < (3, 14): class AbstractChildWatcher: """Abstract base class for monitoring child processes. -Objects derived from this class monitor a collection of subprocesses and -report their termination or interruption by a signal. + Objects derived from this class monitor a collection of subprocesses and + report their termination or interruption by a signal. + + New callbacks are registered with .add_child_handler(). Starting a new + process must be done within a 'with' block to allow the watcher to suspend + its activity until the new process if fully registered (this is needed to + prevent a race condition in some implementations). -New callbacks are registered with .add_child_handler(). Starting a new -process must be done within a 'with' block to allow the watcher to suspend -its activity until the new process if fully registered (this is needed to -prevent a race condition in some implementations). + Example: + with watcher: + proc = subprocess.Popen("sleep 1") + watcher.add_child_handler(proc.pid, callback) -Example: - with watcher: - proc = subprocess.Popen("sleep 1") - watcher.add_child_handler(proc.pid, callback) + Notes: + Implementations of this class must be thread-safe. -Notes: - Implementations of this class must be thread-safe. + Since child watcher objects may catch the SIGCHLD signal and call + waitpid(-1), there should be only one active object per process. + """ - Since child watcher objects may catch the SIGCHLD signal and call - waitpid(-1), there should be only one active object per process. -""" @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: """Register a new child handler. -Arrange for callback(pid, returncode, *args) to be called when -process 'pid' terminates. Specifying another callback for the same -process replaces the previous handler. + Arrange for callback(pid, returncode, *args) to be called when + process 'pid' terminates. Specifying another callback for the same + process replaces the previous handler. + + Note: callback() must be thread-safe. + """ -Note: callback() must be thread-safe. -""" @abstractmethod def remove_child_handler(self, pid: int) -> bool: """Removes the handler for process 'pid'. -The function returns True if the handler was successfully removed, -False if there was nothing to remove. -""" + The function returns True if the handler was successfully removed, + False if there was nothing to remove. + """ + @abstractmethod def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: """Attach the watcher to an event loop. -If the watcher was previously attached to an event loop, then it is -first detached before attaching to the new loop. + If the watcher was previously attached to an event loop, then it is + first detached before attaching to the new loop. + + Note: loop may be None. + """ -Note: loop may be None. -""" @abstractmethod def close(self) -> None: """Close the watcher. -This must be called to make sure that any underlying resource is freed. -""" + This must be called to make sure that any underlying resource is freed. + """ + @abstractmethod def __enter__(self) -> Self: """Enter the watcher's context and allow starting new processes -This function must return self -""" + This function must return self + """ + @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None ) -> None: - """Exit the watcher's context -""" + """Exit the watcher's context""" + @abstractmethod def is_active(self) -> bool: """Return ``True`` if the watcher is active and is used by the event loop. -Return True if the watcher is installed and ready to handle process exit -notifications. + Return True if the watcher is installed and ready to handle process exit + notifications. -""" + """ else: class AbstractChildWatcher: """Abstract base class for monitoring child processes. - Objects derived from this class monitor a collection of subprocesses and - report their termination or interruption by a signal. + Objects derived from this class monitor a collection of subprocesses and + report their termination or interruption by a signal. + + New callbacks are registered with .add_child_handler(). Starting a new + process must be done within a 'with' block to allow the watcher to suspend + its activity until the new process if fully registered (this is needed to + prevent a race condition in some implementations). - New callbacks are registered with .add_child_handler(). Starting a new - process must be done within a 'with' block to allow the watcher to suspend - its activity until the new process if fully registered (this is needed to - prevent a race condition in some implementations). + Example: + with watcher: + proc = subprocess.Popen("sleep 1") + watcher.add_child_handler(proc.pid, callback) - Example: - with watcher: - proc = subprocess.Popen("sleep 1") - watcher.add_child_handler(proc.pid, callback) + Notes: + Implementations of this class must be thread-safe. - Notes: - Implementations of this class must be thread-safe. + Since child watcher objects may catch the SIGCHLD signal and call + waitpid(-1), there should be only one active object per process. + """ - Since child watcher objects may catch the SIGCHLD signal and call - waitpid(-1), there should be only one active object per process. - """ @abstractmethod def add_child_handler( self, pid: int, callback: Callable[[int, int, Unpack[_Ts]], object], *args: Unpack[_Ts] ) -> None: """Register a new child handler. - Arrange for callback(pid, returncode, *args) to be called when - process 'pid' terminates. Specifying another callback for the same - process replaces the previous handler. + Arrange for callback(pid, returncode, *args) to be called when + process 'pid' terminates. Specifying another callback for the same + process replaces the previous handler. + + Note: callback() must be thread-safe. + """ - Note: callback() must be thread-safe. - """ @abstractmethod def remove_child_handler(self, pid: int) -> bool: """Removes the handler for process 'pid'. - The function returns True if the handler was successfully removed, - False if there was nothing to remove. -""" + The function returns True if the handler was successfully removed, + False if there was nothing to remove. + """ + @abstractmethod def attach_loop(self, loop: events.AbstractEventLoop | None) -> None: """Attach the watcher to an event loop. - If the watcher was previously attached to an event loop, then it is - first detached before attaching to the new loop. + If the watcher was previously attached to an event loop, then it is + first detached before attaching to the new loop. + + Note: loop may be None. + """ - Note: loop may be None. - """ @abstractmethod def close(self) -> None: """Close the watcher. - This must be called to make sure that any underlying resource is freed. - """ + This must be called to make sure that any underlying resource is freed. + """ + @abstractmethod def __enter__(self) -> Self: """Enter the watcher's context and allow starting new processes - This function must return self -""" + This function must return self + """ + @abstractmethod def __exit__( self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None ) -> None: - """Exit the watcher's context -""" + """Exit the watcher's context""" + @abstractmethod def is_active(self) -> bool: """Return ``True`` if the watcher is active and is used by the event loop. - Return True if the watcher is installed and ready to handle process exit - notifications. + Return True if the watcher is installed and ready to handle process exit + notifications. - """ + """ if sys.platform != "win32": if sys.version_info < (3, 14): @@ -220,13 +232,14 @@ if sys.platform != "win32": class SafeChildWatcher(BaseChildWatcher): """'Safe' child watcher implementation. -This implementation avoids disrupting other code spawning processes by -polling explicitly each process in the SIGCHLD handler instead of calling -os.waitpid(-1). + This implementation avoids disrupting other code spawning processes by + polling explicitly each process in the SIGCHLD handler instead of calling + os.waitpid(-1). + + This is a safe solution but it has a significant overhead when handling a + big number of children (O(n) each time SIGCHLD is raised) + """ -This is a safe solution but it has a significant overhead when handling a -big number of children (O(n) each time SIGCHLD is raised) -""" def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -240,13 +253,14 @@ big number of children (O(n) each time SIGCHLD is raised) class FastChildWatcher(BaseChildWatcher): """'Fast' child watcher implementation. -This implementation reaps every terminated processes by calling -os.waitpid(-1) directly, possibly breaking other code spawning processes -and waiting for their termination. + This implementation reaps every terminated processes by calling + os.waitpid(-1) directly, possibly breaking other code spawning processes + and waiting for their termination. + + There is no noticeable overhead when handling a big number of children + (O(1) each time a child terminates). + """ -There is no noticeable overhead when handling a big number of children -(O(1) each time a child terminates). -""" def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -267,13 +281,14 @@ There is no noticeable overhead when handling a big number of children class SafeChildWatcher(BaseChildWatcher): """'Safe' child watcher implementation. - This implementation avoids disrupting other code spawning processes by - polling explicitly each process in the SIGCHLD handler instead of calling - os.waitpid(-1). + This implementation avoids disrupting other code spawning processes by + polling explicitly each process in the SIGCHLD handler instead of calling + os.waitpid(-1). + + This is a safe solution but it has a significant overhead when handling a + big number of children (O(n) each time SIGCHLD is raised) + """ - This is a safe solution but it has a significant overhead when handling a - big number of children (O(n) each time SIGCHLD is raised) - """ def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -286,13 +301,14 @@ There is no noticeable overhead when handling a big number of children class FastChildWatcher(BaseChildWatcher): """'Fast' child watcher implementation. - This implementation reaps every terminated processes by calling - os.waitpid(-1) directly, possibly breaking other code spawning processes - and waiting for their termination. + This implementation reaps every terminated processes by calling + os.waitpid(-1) directly, possibly breaking other code spawning processes + and waiting for their termination. + + There is no noticeable overhead when handling a big number of children + (O(1) each time a child terminates). + """ - There is no noticeable overhead when handling a big number of children - (O(1) each time a child terminates). - """ def __enter__(self) -> Self: ... def __exit__( self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None @@ -305,8 +321,9 @@ There is no noticeable overhead when handling a big number of children class _UnixSelectorEventLoop(BaseSelectorEventLoop): """Unix event loop. -Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. -""" + Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. + """ + if sys.version_info >= (3, 13): async def create_unix_server( self, @@ -324,32 +341,32 @@ Adds signal handling and UNIX Domain Socket support to SelectorEventLoop. if sys.version_info >= (3, 14): class _UnixDefaultEventLoopPolicy(events._BaseDefaultEventLoopPolicy): - """UNIX event loop policy -""" + """UNIX event loop policy""" + else: class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - """UNIX event loop policy with a watcher for child processes. -""" + """UNIX event loop policy with a watcher for child processes.""" + if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") def get_child_watcher(self) -> AbstractChildWatcher: """Get the watcher for child processes. -If not yet set, a ThreadedChildWatcher object is automatically created. -""" + If not yet set, a ThreadedChildWatcher object is automatically created. + """ + @deprecated("Deprecated since Python 3.12; removed in Python 3.14.") def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: - """Set the watcher for child processes. -""" + """Set the watcher for child processes.""" else: def get_child_watcher(self) -> AbstractChildWatcher: """Get the watcher for child processes. - If not yet set, a ThreadedChildWatcher object is automatically created. - """ + If not yet set, a ThreadedChildWatcher object is automatically created. + """ + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: - """Set the watcher for child processes. -""" + """Set the watcher for child processes.""" SelectorEventLoop = _UnixSelectorEventLoop @@ -367,14 +384,15 @@ If not yet set, a ThreadedChildWatcher object is automatically created. class MultiLoopChildWatcher(AbstractChildWatcher): """A watcher that doesn't require running loop in the main thread. -This implementation registers a SIGCHLD signal handler on -instantiation (which may conflict with other code that -install own handler for this signal). + This implementation registers a SIGCHLD signal handler on + instantiation (which may conflict with other code that + install own handler for this signal). + + The solution is safe but it has a significant overhead when + handling a big number of processes (*O(n)* each time a + SIGCHLD is received). + """ -The solution is safe but it has a significant overhead when -handling a big number of processes (*O(n)* each time a -SIGCHLD is received). -""" def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -391,14 +409,15 @@ SIGCHLD is received). class MultiLoopChildWatcher(AbstractChildWatcher): """A watcher that doesn't require running loop in the main thread. - This implementation registers a SIGCHLD signal handler on - instantiation (which may conflict with other code that - install own handler for this signal). + This implementation registers a SIGCHLD signal handler on + instantiation (which may conflict with other code that + install own handler for this signal). + + The solution is safe but it has a significant overhead when + handling a big number of processes (*O(n)* each time a + SIGCHLD is received). + """ - The solution is safe but it has a significant overhead when - handling a big number of processes (*O(n)* each time a - SIGCHLD is received). - """ def is_active(self) -> bool: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -415,15 +434,16 @@ SIGCHLD is received). class ThreadedChildWatcher(AbstractChildWatcher): """Threaded child watcher implementation. -The watcher uses a thread per process -for waiting for the process finish. + The watcher uses a thread per process + for waiting for the process finish. -It doesn't require subscription on POSIX signal -but a thread creation is not free. + It doesn't require subscription on POSIX signal + but a thread creation is not free. + + The watcher has O(1) complexity, its performance doesn't depend + on amount of spawn processes. + """ -The watcher has O(1) complexity, its performance doesn't depend -on amount of spawn processes. -""" def is_active(self) -> Literal[True]: ... def close(self) -> None: ... def __enter__(self) -> Self: ... @@ -440,14 +460,15 @@ on amount of spawn processes. class PidfdChildWatcher(AbstractChildWatcher): """Child watcher implementation using Linux's pid file descriptors. -This child watcher polls process file descriptors (pidfds) to await child -process termination. In some respects, PidfdChildWatcher is a "Goldilocks" -child watcher implementation. It doesn't require signals or threads, doesn't -interfere with any processes launched outside the event loop, and scales -linearly with the number of subprocesses launched by the event loop. The -main disadvantage is that pidfds are specific to Linux, and only work on -recent (5.3+) kernels. -""" + This child watcher polls process file descriptors (pidfds) to await child + process termination. In some respects, PidfdChildWatcher is a "Goldilocks" + child watcher implementation. It doesn't require signals or threads, doesn't + interfere with any processes launched outside the event loop, and scales + linearly with the number of subprocesses launched by the event loop. The + main disadvantage is that pidfds are specific to Linux, and only work on + recent (5.3+) kernels. + """ + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi index 92c18dd5d5069..99b9ec9565aed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,5 +1,5 @@ -"""Selector and proactor event loops for Windows. -""" +"""Selector and proactor event loops for Windows.""" + import socket import sys from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer @@ -51,20 +51,20 @@ if sys.platform == "win32": class PipeServer: """Class representing a pipe server. -This is much like a bound, listening socket. -""" + This is much like a bound, listening socket. + """ + def __init__(self, address: str) -> None: ... def __del__(self) -> None: ... def closed(self) -> bool: ... def close(self) -> None: ... class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): - """Windows version of selector event loop. -""" + """Windows version of selector event loop.""" class ProactorEventLoop(proactor_events.BaseProactorEventLoop): - """Windows version of proactor event loop using IOCP. -""" + """Windows version of proactor event loop using IOCP.""" + def __init__(self, proactor: IocpProactor | None = None) -> None: ... async def create_pipe_connection( self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str @@ -74,8 +74,8 @@ This is much like a bound, listening socket. ) -> list[PipeServer]: ... class IocpProactor: - """Proactor implementation using IOCP. -""" + """Proactor implementation using IOCP.""" + def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... @@ -101,9 +101,10 @@ This is much like a bound, listening socket. def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: """Wait for a handle. -Return a Future object. The result of the future is True if the wait -completed, or False if the wait did not complete (on timeout). -""" + Return a Future object. The result of the future is True if the wait + completed, or False if the wait did not complete (on timeout). + """ + def close(self) -> None: ... if sys.version_info >= (3, 11): def recvfrom_into( @@ -123,20 +124,18 @@ completed, or False if the wait did not complete (on timeout). class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[SelectorEventLoop]] def get_child_watcher(self) -> NoReturn: - """Get the watcher for child processes. -""" + """Get the watcher for child processes.""" + def set_child_watcher(self, watcher: Any) -> NoReturn: - """Set the watcher for child processes. -""" + """Set the watcher for child processes.""" class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): _loop_factory: ClassVar[type[ProactorEventLoop]] def get_child_watcher(self) -> NoReturn: - """Get the watcher for child processes. -""" + """Get the watcher for child processes.""" + def set_child_watcher(self, watcher: Any) -> NoReturn: - """Set the watcher for child processes. -""" + """Set the watcher for child processes.""" if sys.version_info >= (3, 14): _DefaultEventLoopPolicy = _WindowsProactorEventLoopPolicy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi index b78b9f2a9d7e3..1b1d0bcf62c27 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncio/windows_utils.pyi @@ -1,5 +1,5 @@ -"""Various Windows specific bits and pieces. -""" +"""Various Windows specific bits and pieces.""" + import subprocess import sys from collections.abc import Callable @@ -14,14 +14,14 @@ if sys.platform == "win32": PIPE: Final = subprocess.PIPE STDOUT: Final = subprocess.STDOUT def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: - """Like os.pipe() but with overlapped support and using handles not fds. -""" + """Like os.pipe() but with overlapped support and using handles not fds.""" class PipeHandle: """Wrapper for an overlapped pipe handle which is vaguely file-object like. -The IOCP event loop can use these instead of socket objects. -""" + The IOCP event loop can use these instead of socket objects. + """ + def __init__(self, handle: int) -> None: ... def __del__(self) -> None: ... def __enter__(self) -> Self: ... @@ -34,8 +34,9 @@ The IOCP event loop can use these instead of socket objects. class Popen(subprocess.Popen[AnyStr]): """Replacement for subprocess.Popen using overlapped pipe handles. -The stdin, stdout, stderr are None or instances of PipeHandle. -""" + The stdin, stdout, stderr are None or instances of PipeHandle. + """ + stdin: PipeHandle | None # type: ignore[assignment] stdout: PipeHandle | None # type: ignore[assignment] stderr: PipeHandle | None # type: ignore[assignment] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi index a5f931fdca357..96a81edcf2d3d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/asyncore.pyi @@ -18,6 +18,7 @@ control than multi-threaded programming. The module documented here solves many of the difficult problems for you, making the task of building sophisticated high-performance network servers and clients a snap. """ + import sys from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi index 61e9320810a4c..f00719b18777a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/atexit.pyi @@ -3,6 +3,7 @@ upon normal program termination. Two public functions, register and unregister, are defined. """ + from collections.abc import Callable from typing import TypeVar from typing_extensions import ParamSpec @@ -11,16 +12,17 @@ _T = TypeVar("_T") _P = ParamSpec("_P") def _clear() -> None: - """Clear the list of previously registered exit functions. -""" + """Clear the list of previously registered exit functions.""" + def _ncallbacks() -> int: - """Return the number of registered exit functions. -""" + """Return the number of registered exit functions.""" + def _run_exitfuncs() -> None: """Run all registered exit functions. -If a callback raises an exception, it is logged with sys.unraisablehook. -""" + If a callback raises an exception, it is logged with sys.unraisablehook. + """ + def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: """Register a function to be executed upon normal program termination @@ -29,10 +31,11 @@ def register(func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> kwargs - optional keyword arguments to pass to func func is returned to facilitate usage as a decorator. -""" + """ + def unregister(func: Callable[..., object], /) -> None: """Unregister an exit function which was previously registered using -atexit.register + atexit.register - func - function to be unregistered -""" + func - function to be unregistered + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi index 08234a5fc3584..fdfbc7d2cd841 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/audioop.pyi @@ -6,65 +6,65 @@ _RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] class error(Exception): ... def add(fragment1: Buffer, fragment2: Buffer, width: int, /) -> bytes: - """Return a fragment which is the addition of the two samples passed as parameters. -""" + """Return a fragment which is the addition of the two samples passed as parameters.""" + def adpcm2lin(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Decode an Intel/DVI ADPCM coded fragment to a linear fragment. -""" + """Decode an Intel/DVI ADPCM coded fragment to a linear fragment.""" + def alaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in a-LAW encoding to linearly encoded sound fragments. -""" + """Convert sound fragments in a-LAW encoding to linearly encoded sound fragments.""" + def avg(fragment: Buffer, width: int, /) -> int: - """Return the average over all samples in the fragment. -""" + """Return the average over all samples in the fragment.""" + def avgpp(fragment: Buffer, width: int, /) -> int: - """Return the average peak-peak value over all samples in the fragment. -""" + """Return the average peak-peak value over all samples in the fragment.""" + def bias(fragment: Buffer, width: int, bias: int, /) -> bytes: - """Return a fragment that is the original fragment with a bias added to each sample. -""" + """Return a fragment that is the original fragment with a bias added to each sample.""" + def byteswap(fragment: Buffer, width: int, /) -> bytes: - """Convert big-endian samples to little-endian and vice versa. -""" + """Convert big-endian samples to little-endian and vice versa.""" + def cross(fragment: Buffer, width: int, /) -> int: - """Return the number of zero crossings in the fragment passed as an argument. -""" + """Return the number of zero crossings in the fragment passed as an argument.""" + def findfactor(fragment: Buffer, reference: Buffer, /) -> float: - """Return a factor F such that rms(add(fragment, mul(reference, -F))) is minimal. -""" + """Return a factor F such that rms(add(fragment, mul(reference, -F))) is minimal.""" + def findfit(fragment: Buffer, reference: Buffer, /) -> tuple[int, float]: - """Try to match reference as well as possible to a portion of fragment. -""" + """Try to match reference as well as possible to a portion of fragment.""" + def findmax(fragment: Buffer, length: int, /) -> int: - """Search fragment for a slice of specified number of samples with maximum energy. -""" + """Search fragment for a slice of specified number of samples with maximum energy.""" + def getsample(fragment: Buffer, width: int, index: int, /) -> int: - """Return the value of sample index from the fragment. -""" + """Return the value of sample index from the fragment.""" + def lin2adpcm(fragment: Buffer, width: int, state: _AdpcmState | None, /) -> tuple[bytes, _AdpcmState]: - """Convert samples to 4 bit Intel/DVI ADPCM encoding. -""" + """Convert samples to 4 bit Intel/DVI ADPCM encoding.""" + def lin2alaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to a-LAW encoding. -""" + """Convert samples in the audio fragment to a-LAW encoding.""" + def lin2lin(fragment: Buffer, width: int, newwidth: int, /) -> bytes: - """Convert samples between 1-, 2-, 3- and 4-byte formats. -""" + """Convert samples between 1-, 2-, 3- and 4-byte formats.""" + def lin2ulaw(fragment: Buffer, width: int, /) -> bytes: - """Convert samples in the audio fragment to u-LAW encoding. -""" + """Convert samples in the audio fragment to u-LAW encoding.""" + def max(fragment: Buffer, width: int, /) -> int: - """Return the maximum of the absolute value of all samples in a fragment. -""" + """Return the maximum of the absolute value of all samples in a fragment.""" + def maxpp(fragment: Buffer, width: int, /) -> int: - """Return the maximum peak-peak value in the sound fragment. -""" + """Return the maximum peak-peak value in the sound fragment.""" + def minmax(fragment: Buffer, width: int, /) -> tuple[int, int]: - """Return the minimum and maximum values of all samples in the sound fragment. -""" + """Return the minimum and maximum values of all samples in the sound fragment.""" + def mul(fragment: Buffer, width: int, factor: float, /) -> bytes: - """Return a fragment that has all samples in the original fragment multiplied by the floating-point value factor. -""" + """Return a fragment that has all samples in the original fragment multiplied by the floating-point value factor.""" + def ratecv( fragment: Buffer, width: int, @@ -76,20 +76,19 @@ def ratecv( weightB: int = 0, /, ) -> tuple[bytes, _RatecvState]: - """Convert the frame rate of the input fragment. -""" + """Convert the frame rate of the input fragment.""" + def reverse(fragment: Buffer, width: int, /) -> bytes: - """Reverse the samples in a fragment and returns the modified fragment. -""" + """Reverse the samples in a fragment and returns the modified fragment.""" + def rms(fragment: Buffer, width: int, /) -> int: - """Return the root-mean-square of the fragment, i.e. sqrt(sum(S_i^2)/n). -""" + """Return the root-mean-square of the fragment, i.e. sqrt(sum(S_i^2)/n).""" + def tomono(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Convert a stereo fragment to a mono fragment. -""" + """Convert a stereo fragment to a mono fragment.""" + def tostereo(fragment: Buffer, width: int, lfactor: float, rfactor: float, /) -> bytes: - """Generate a stereo fragment from a mono fragment. -""" + """Generate a stereo fragment from a mono fragment.""" + def ulaw2lin(fragment: Buffer, width: int, /) -> bytes: - """Convert sound fragments in u-LAW encoding to linearly encoded sound fragments. -""" + """Convert sound fragments in u-LAW encoding to linearly encoded sound fragments.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi index 96fa7dd7430ed..0fc7e13da0cec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/base64.pyi @@ -1,5 +1,5 @@ -"""Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings -""" +"""Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings""" + import sys from _typeshed import ReadableBuffer from typing import IO @@ -33,181 +33,192 @@ if sys.version_info >= (3, 13): def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: """Encode the bytes-like object s using Base64 and return a bytes object. -Optional altchars should be a byte string of length 2 which specifies an -alternative alphabet for the '+' and '/' characters. This allows an -application to e.g. generate url or filesystem safe Base64 strings. -""" + Optional altchars should be a byte string of length 2 which specifies an + alternative alphabet for the '+' and '/' characters. This allows an + application to e.g. generate url or filesystem safe Base64 strings. + """ + def b64decode(s: str | ReadableBuffer, altchars: str | ReadableBuffer | None = None, validate: bool = False) -> bytes: """Decode the Base64 encoded bytes-like object or ASCII string s. -Optional altchars must be a bytes-like object or ASCII string of length 2 -which specifies the alternative alphabet used instead of the '+' and '/' -characters. + Optional altchars must be a bytes-like object or ASCII string of length 2 + which specifies the alternative alphabet used instead of the '+' and '/' + characters. -The result is returned as a bytes object. A binascii.Error is raised if -s is incorrectly padded. + The result is returned as a bytes object. A binascii.Error is raised if + s is incorrectly padded. -If validate is False (the default), characters that are neither in the -normal base-64 alphabet nor the alternative alphabet are discarded prior -to the padding check. If validate is True, these non-alphabet characters -in the input result in a binascii.Error. -For more information about the strict base64 check, see: + If validate is False (the default), characters that are neither in the + normal base-64 alphabet nor the alternative alphabet are discarded prior + to the padding check. If validate is True, these non-alphabet characters + in the input result in a binascii.Error. + For more information about the strict base64 check, see: + + https://docs.python.org/3.11/library/binascii.html#binascii.a2b_base64 + """ -https://docs.python.org/3.11/library/binascii.html#binascii.a2b_base64 -""" def standard_b64encode(s: ReadableBuffer) -> bytes: """Encode bytes-like object s using the standard Base64 alphabet. -The result is returned as a bytes object. -""" + The result is returned as a bytes object. + """ + def standard_b64decode(s: str | ReadableBuffer) -> bytes: """Decode bytes encoded with the standard Base64 alphabet. -Argument s is a bytes-like object or ASCII string to decode. The result -is returned as a bytes object. A binascii.Error is raised if the input -is incorrectly padded. Characters that are not in the standard alphabet -are discarded prior to the padding check. -""" + Argument s is a bytes-like object or ASCII string to decode. The result + is returned as a bytes object. A binascii.Error is raised if the input + is incorrectly padded. Characters that are not in the standard alphabet + are discarded prior to the padding check. + """ + def urlsafe_b64encode(s: ReadableBuffer) -> bytes: """Encode bytes using the URL- and filesystem-safe Base64 alphabet. -Argument s is a bytes-like object to encode. The result is returned as a -bytes object. The alphabet uses '-' instead of '+' and '_' instead of -'/'. -""" + Argument s is a bytes-like object to encode. The result is returned as a + bytes object. The alphabet uses '-' instead of '+' and '_' instead of + '/'. + """ + def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: """Decode bytes using the URL- and filesystem-safe Base64 alphabet. -Argument s is a bytes-like object or ASCII string to decode. The result -is returned as a bytes object. A binascii.Error is raised if the input -is incorrectly padded. Characters that are not in the URL-safe base-64 -alphabet, and are not a plus '+' or slash '/', are discarded prior to the -padding check. + Argument s is a bytes-like object or ASCII string to decode. The result + is returned as a bytes object. A binascii.Error is raised if the input + is incorrectly padded. Characters that are not in the URL-safe base-64 + alphabet, and are not a plus '+' or slash '/', are discarded prior to the + padding check. + + The alphabet uses '-' instead of '+' and '_' instead of '/'. + """ -The alphabet uses '-' instead of '+' and '_' instead of '/'. -""" def b32encode(s: ReadableBuffer) -> bytes: """ -Encode the bytes-like objects using base32 and return a bytes object. -""" -def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: + Encode the bytes-like objects using base32 and return a bytes object. """ -Decode the base32 encoded bytes-like object or ASCII string s. - -Optional casefold is a flag specifying whether a lowercase alphabet is -acceptable as input. For security purposes, the default is False. -RFC 3548 allows for optional mapping of the digit 0 (zero) to the -letter O (oh), and for optional mapping of the digit 1 (one) to -either the letter I (eye) or letter L (el). The optional argument -map01 when not None, specifies which letter the digit 1 should be -mapped to (when map01 is not None, the digit 0 is always mapped to -the letter O). For security purposes the default is None, so that -0 and 1 are not allowed in the input. +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: str | ReadableBuffer | None = None) -> bytes: + """ + Decode the base32 encoded bytes-like object or ASCII string s. + + Optional casefold is a flag specifying whether a lowercase alphabet is + acceptable as input. For security purposes, the default is False. + + RFC 3548 allows for optional mapping of the digit 0 (zero) to the + letter O (oh), and for optional mapping of the digit 1 (one) to + either the letter I (eye) or letter L (el). The optional argument + map01 when not None, specifies which letter the digit 1 should be + mapped to (when map01 is not None, the digit 0 is always mapped to + the letter O). For security purposes the default is None, so that + 0 and 1 are not allowed in the input. + + The result is returned as a bytes object. A binascii.Error is raised if + the input is incorrectly padded or if there are non-alphabet + characters present in the input. + """ -The result is returned as a bytes object. A binascii.Error is raised if -the input is incorrectly padded or if there are non-alphabet -characters present in the input. -""" def b16encode(s: ReadableBuffer) -> bytes: - """Encode the bytes-like object s using Base16 and return a bytes object. - """ + """Encode the bytes-like object s using Base16 and return a bytes object.""" + def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: """Decode the Base16 encoded bytes-like object or ASCII string s. -Optional casefold is a flag specifying whether a lowercase alphabet is -acceptable as input. For security purposes, the default is False. + Optional casefold is a flag specifying whether a lowercase alphabet is + acceptable as input. For security purposes, the default is False. -The result is returned as a bytes object. A binascii.Error is raised if -s is incorrectly padded or if there are non-alphabet characters present -in the input. -""" + The result is returned as a bytes object. A binascii.Error is raised if + s is incorrectly padded or if there are non-alphabet characters present + in the input. + """ if sys.version_info >= (3, 10): def b32hexencode(s: ReadableBuffer) -> bytes: """ -Encode the bytes-like objects using base32hex and return a bytes object. -""" + Encode the bytes-like objects using base32hex and return a bytes object. + """ + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: """ -Decode the base32hex encoded bytes-like object or ASCII string s. + Decode the base32hex encoded bytes-like object or ASCII string s. -Optional casefold is a flag specifying whether a lowercase alphabet is -acceptable as input. For security purposes, the default is False. + Optional casefold is a flag specifying whether a lowercase alphabet is + acceptable as input. For security purposes, the default is False. -The result is returned as a bytes object. A binascii.Error is raised if -the input is incorrectly padded or if there are non-alphabet -characters present in the input. -""" + The result is returned as a bytes object. A binascii.Error is raised if + the input is incorrectly padded or if there are non-alphabet + characters present in the input. + """ -def a85encode( - b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False -) -> bytes: +def a85encode(b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False) -> bytes: """Encode bytes-like object b using Ascii85 and return a bytes object. -foldspaces is an optional flag that uses the special short sequence 'y' -instead of 4 consecutive spaces (ASCII 0x20) as supported by 'btoa'. This -feature is not supported by the "standard" Adobe encoding. + foldspaces is an optional flag that uses the special short sequence 'y' + instead of 4 consecutive spaces (ASCII 0x20) as supported by 'btoa'. This + feature is not supported by the "standard" Adobe encoding. -wrapcol controls whether the output should have newline (b'\\n') characters -added to it. If this is non-zero, each output line will be at most this -many characters long, excluding the trailing newline. + wrapcol controls whether the output should have newline (b'\\n') characters + added to it. If this is non-zero, each output line will be at most this + many characters long, excluding the trailing newline. -pad controls whether the input is padded to a multiple of 4 before -encoding. Note that the btoa implementation always pads. + pad controls whether the input is padded to a multiple of 4 before + encoding. Note that the btoa implementation always pads. + + adobe controls whether the encoded byte sequence is framed with <~ and ~>, + which is used by the Adobe implementation. + """ -adobe controls whether the encoded byte sequence is framed with <~ and ~>, -which is used by the Adobe implementation. -""" def a85decode( b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" ) -> bytes: """Decode the Ascii85 encoded bytes-like object or ASCII string b. -foldspaces is a flag that specifies whether the 'y' short sequence should be -accepted as shorthand for 4 consecutive spaces (ASCII 0x20). This feature is -not supported by the "standard" Adobe encoding. + foldspaces is a flag that specifies whether the 'y' short sequence should be + accepted as shorthand for 4 consecutive spaces (ASCII 0x20). This feature is + not supported by the "standard" Adobe encoding. -adobe controls whether the input sequence is in Adobe Ascii85 format (i.e. -is framed with <~ and ~>). + adobe controls whether the input sequence is in Adobe Ascii85 format (i.e. + is framed with <~ and ~>). -ignorechars should be a byte string containing characters to ignore from the -input. This should only contain whitespace characters, and by default -contains all whitespace characters in ASCII. + ignorechars should be a byte string containing characters to ignore from the + input. This should only contain whitespace characters, and by default + contains all whitespace characters in ASCII. + + The result is returned as a bytes object. + """ -The result is returned as a bytes object. -""" def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: """Encode bytes-like object b in base85 format and return a bytes object. -If pad is true, the input is padded with b'\\0' so its length is a multiple of -4 bytes before encoding. -""" + If pad is true, the input is padded with b'\\0' so its length is a multiple of + 4 bytes before encoding. + """ + def b85decode(b: str | ReadableBuffer) -> bytes: """Decode the base85-encoded bytes-like object or ASCII string b -The result is returned as a bytes object. -""" + The result is returned as a bytes object. + """ + def decode(input: IO[bytes], output: IO[bytes]) -> None: - """Decode a file; input and output are binary files. -""" + """Decode a file; input and output are binary files.""" + def encode(input: IO[bytes], output: IO[bytes]) -> None: - """Encode a file; input and output are binary files. -""" + """Encode a file; input and output are binary files.""" + def encodebytes(s: ReadableBuffer) -> bytes: """Encode a bytestring into a bytes object containing multiple lines -of base-64 data. -""" + of base-64 data. + """ + def decodebytes(s: ReadableBuffer) -> bytes: - """Decode a bytestring of base-64 data into a bytes object. -""" + """Decode a bytestring of base-64 data into a bytes object.""" if sys.version_info >= (3, 13): def z85encode(s: ReadableBuffer) -> bytes: - """Encode bytes-like object b in z85 format and return a bytes object. -""" + """Encode bytes-like object b in z85 format and return a bytes object.""" + def z85decode(s: str | ReadableBuffer) -> bytes: """Decode the z85-encoded bytes-like object or ASCII string b -The result is returned as a bytes object. -""" + The result is returned as a bytes object. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi index ecf5505d6718d..e9ff8da0e462d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bdb.pyi @@ -1,5 +1,5 @@ -"""Debugger basics -""" +"""Debugger basics""" + import sys from _typeshed import ExcInfo, TraceFunction, Unused from collections.abc import Callable, Iterable, Iterator, Mapping @@ -20,22 +20,22 @@ _Backend: TypeAlias = Literal["settrace", "monitoring"] GENERATOR_AND_COROUTINE_FLAGS: Final[int] class BdbQuit(Exception): - """Exception to give up completely. -""" + """Exception to give up completely.""" class Bdb: """Generic Python debugger base class. -This class takes care of details of the trace facility; -a derived class should implement user interaction. -The standard debugger class (pdb.Pdb) is an example. + This class takes care of details of the trace facility; + a derived class should implement user interaction. + The standard debugger class (pdb.Pdb) is an example. + + The optional skip argument must be an iterable of glob-style + module name patterns. The debugger will not step into frames + that originate in a module that matches one of these patterns. + Whether a frame is considered to originate in a certain module + is determined by the __name__ in the frame globals. + """ -The optional skip argument must be an iterable of glob-style -module name patterns. The debugger will not step into frames -that originate in a module that matches one of these patterns. -Whether a frame is considered to originate in a certain module -is determined by the __name__ in the frame globals. -""" skip: set[str] | None breaks: dict[str, list[int]] fncache: dict[str, str] @@ -54,14 +54,14 @@ is determined by the __name__ in the frame globals. def canonic(self, filename: str) -> str: """Return canonical form of filename. -For real filenames, the canonical form is a case-normalized (on -case insensitive filesystems) absolute path. 'Filenames' with -angle brackets, such as "", generated in interactive -mode, are returned unchanged. -""" + For real filenames, the canonical form is a case-normalized (on + case insensitive filesystems) absolute path. 'Filenames' with + angle brackets, such as "", generated in interactive + mode, are returned unchanged. + """ + def reset(self) -> None: - """Set values of attributes as ready to start debugging. -""" + """Set values of attributes as ready to start debugging.""" if sys.version_info >= (3, 12): @contextmanager def set_enterframe(self, frame: FrameType) -> Iterator[None]: ... @@ -69,249 +69,265 @@ mode, are returned unchanged. def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: """Dispatch a trace function for debugged frames based on the event. -This function is installed as the trace function for debugged -frames. Its return value is the new trace function, which is -usually itself. The default implementation decides how to -dispatch a frame, depending on the type of event (passed in as a -string) that is about to be executed. - -The event can be one of the following: - line: A new line of code is going to be executed. - call: A function is about to be called or another code block - is entered. - return: A function or other code block is about to return. - exception: An exception has occurred. - c_call: A C function is about to be called. - c_return: A C function has returned. - c_exception: A C function has raised an exception. - -For the Python events, specialized functions (see the dispatch_*() -methods) are called. For the C events, no action is taken. - -The arg parameter depends on the previous event. -""" + This function is installed as the trace function for debugged + frames. Its return value is the new trace function, which is + usually itself. The default implementation decides how to + dispatch a frame, depending on the type of event (passed in as a + string) that is about to be executed. + + The event can be one of the following: + line: A new line of code is going to be executed. + call: A function is about to be called or another code block + is entered. + return: A function or other code block is about to return. + exception: An exception has occurred. + c_call: A C function is about to be called. + c_return: A C function has returned. + c_exception: A C function has raised an exception. + + For the Python events, specialized functions (see the dispatch_*() + methods) are called. For the C events, no action is taken. + + The arg parameter depends on the previous event. + """ + def dispatch_line(self, frame: FrameType) -> TraceFunction: """Invoke user function and return trace function for line event. -If the debugger stops on the current line, invoke -self.user_line(). Raise BdbQuit if self.quitting is set. -Return self.trace_dispatch to continue tracing in this scope. -""" + If the debugger stops on the current line, invoke + self.user_line(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. + """ + def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: """Invoke user function and return trace function for call event. -If the debugger stops on this function call, invoke -self.user_call(). Raise BdbQuit if self.quitting is set. -Return self.trace_dispatch to continue tracing in this scope. -""" + If the debugger stops on this function call, invoke + self.user_call(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. + """ + def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: """Invoke user function and return trace function for return event. -If the debugger stops on this function return, invoke -self.user_return(). Raise BdbQuit if self.quitting is set. -Return self.trace_dispatch to continue tracing in this scope. -""" + If the debugger stops on this function return, invoke + self.user_return(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. + """ + def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: """Invoke user function and return trace function for exception event. -If the debugger stops on this exception, invoke -self.user_exception(). Raise BdbQuit if self.quitting is set. -Return self.trace_dispatch to continue tracing in this scope. -""" + If the debugger stops on this exception, invoke + self.user_exception(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. + """ if sys.version_info >= (3, 13): def dispatch_opcode(self, frame: FrameType, arg: Unused) -> Callable[[FrameType, str, Any], TraceFunction]: """Invoke user function and return trace function for opcode event. -If the debugger stops on the current opcode, invoke -self.user_opcode(). Raise BdbQuit if self.quitting is set. -Return self.trace_dispatch to continue tracing in this scope. + If the debugger stops on the current opcode, invoke + self.user_opcode(). Raise BdbQuit if self.quitting is set. + Return self.trace_dispatch to continue tracing in this scope. -Opcode event will always trigger the user callback. For now the only -opcode event is from an inline set_trace() and we want to stop there -unconditionally. -""" + Opcode event will always trigger the user callback. For now the only + opcode event is from an inline set_trace() and we want to stop there + unconditionally. + """ def is_skipped_module(self, module_name: str) -> bool: - """Return True if module_name matches any skip pattern. -""" + """Return True if module_name matches any skip pattern.""" + def stop_here(self, frame: FrameType) -> bool: - """Return True if frame is below the starting frame in the stack. -""" + """Return True if frame is below the starting frame in the stack.""" + def break_here(self, frame: FrameType) -> bool: """Return True if there is an effective breakpoint for this line. -Check for line or function breakpoint and if in effect. -Delete temporary breakpoints if effective() says to. -""" + Check for line or function breakpoint and if in effect. + Delete temporary breakpoints if effective() says to. + """ + def do_clear(self, arg: Any) -> bool | None: """Remove temporary breakpoint. -Must implement in derived classes or get NotImplementedError. -""" - def break_anywhere(self, frame: FrameType) -> bool: - """Return True if there is any breakpoint in that frame + Must implement in derived classes or get NotImplementedError. """ + + def break_anywhere(self, frame: FrameType) -> bool: + """Return True if there is any breakpoint in that frame""" + def user_call(self, frame: FrameType, argument_list: None) -> None: - """Called if we might stop in a function. -""" + """Called if we might stop in a function.""" + def user_line(self, frame: FrameType) -> None: - """Called when we stop or break at a line. -""" + """Called when we stop or break at a line.""" + def user_return(self, frame: FrameType, return_value: Any) -> None: - """Called when a return trap is set here. -""" + """Called when a return trap is set here.""" + def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: - """Called when we stop on an exception. -""" + """Called when we stop on an exception.""" + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: """Stop when the line with the lineno greater than the current one is -reached or when returning from current frame. -""" + reached or when returning from current frame. + """ if sys.version_info >= (3, 13): def user_opcode(self, frame: FrameType) -> None: # undocumented - """Called when we are about to execute an opcode. -""" + """Called when we are about to execute an opcode.""" def set_step(self) -> None: - """Stop after one line of code. -""" + """Stop after one line of code.""" if sys.version_info >= (3, 13): def set_stepinstr(self) -> None: # undocumented - """Stop before the next instruction. -""" + """Stop before the next instruction.""" def set_next(self, frame: FrameType) -> None: - """Stop on the next line in or below the given frame. -""" + """Stop on the next line in or below the given frame.""" + def set_return(self, frame: FrameType) -> None: - """Stop when returning from the given frame. -""" + """Stop when returning from the given frame.""" + def set_trace(self, frame: FrameType | None = None) -> None: """Start debugging from frame. -If frame is not specified, debugging starts from caller's frame. -""" + If frame is not specified, debugging starts from caller's frame. + """ + def set_continue(self) -> None: """Stop only at breakpoints or when finished. -If there are no breakpoints, set the system trace function to None. -""" + If there are no breakpoints, set the system trace function to None. + """ + def set_quit(self) -> None: """Set quitting attribute to True. -Raises BdbQuit exception in the next call to a dispatch_*() method. -""" + Raises BdbQuit exception in the next call to a dispatch_*() method. + """ + def set_break( self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None ) -> str | None: """Set a new breakpoint for filename:lineno. -If lineno doesn't exist for the filename, return an error message. -The filename should be in canonical form. -""" + If lineno doesn't exist for the filename, return an error message. + The filename should be in canonical form. + """ + def clear_break(self, filename: str, lineno: int) -> str | None: """Delete breakpoints for filename:lineno. -If no breakpoints were set, return an error message. -""" + If no breakpoints were set, return an error message. + """ + def clear_bpbynumber(self, arg: SupportsInt) -> str | None: """Delete a breakpoint by its index in Breakpoint.bpbynumber. -If arg is invalid, return an error message. -""" + If arg is invalid, return an error message. + """ + def clear_all_file_breaks(self, filename: str) -> str | None: """Delete all breakpoints in filename. -If none were set, return an error message. -""" + If none were set, return an error message. + """ + def clear_all_breaks(self) -> str | None: """Delete all existing breakpoints. -If none were set, return an error message. -""" + If none were set, return an error message. + """ + def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: """Return a breakpoint by its index in Breakpoint.bybpnumber. -For invalid arg values or if the breakpoint doesn't exist, -raise a ValueError. -""" + For invalid arg values or if the breakpoint doesn't exist, + raise a ValueError. + """ + def get_break(self, filename: str, lineno: int) -> bool: - """Return True if there is a breakpoint for filename:lineno. -""" + """Return True if there is a breakpoint for filename:lineno.""" + def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: """Return all breakpoints for filename:lineno. -If no breakpoints are set, return an empty list. -""" + If no breakpoints are set, return an empty list. + """ + def get_file_breaks(self, filename: str) -> list[int]: """Return all lines with breakpoints for filename. -If no breakpoints are set, return an empty list. -""" + If no breakpoints are set, return an empty list. + """ + def get_all_breaks(self) -> dict[str, list[int]]: - """Return all breakpoints that are set. -""" + """Return all breakpoints that are set.""" + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: """Return a list of (frame, lineno) in a stack trace and a size. -List starts with original calling frame, if there is one. -Size may be number of frames above or below f. -""" + List starts with original calling frame, if there is one. + Size may be number of frames above or below f. + """ + def format_stack_entry(self, frame_lineno: tuple[FrameType, int], lprefix: str = ": ") -> str: """Return a string with information about a stack entry. -The stack entry frame_lineno is a (frame, lineno) tuple. The -return string contains the canonical filename, the function name -or '', the input arguments, the return value, and the -line of code (if it exists). + The stack entry frame_lineno is a (frame, lineno) tuple. The + return string contains the canonical filename, the function name + or '', the input arguments, the return value, and the + line of code (if it exists). + + """ -""" - def run( - self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None - ) -> None: + def run(self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: """Debug a statement executed via the exec() function. -globals defaults to __main__.dict; locals defaults to globals. -""" + globals defaults to __main__.dict; locals defaults to globals. + """ + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: """Debug an expression executed via the eval() function. -globals defaults to __main__.dict; locals defaults to globals. -""" + globals defaults to __main__.dict; locals defaults to globals. + """ + def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: - """For backwards-compatibility. Defers to run(). -""" + """For backwards-compatibility. Defers to run().""" + def runcall(self, func: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> _T | None: """Debug a single function call. -Return the result of the function call. -""" + Return the result of the function call. + """ if sys.version_info >= (3, 14): def start_trace(self) -> None: ... def stop_trace(self) -> None: ... def disable_current_event(self) -> None: - """Disable the current event. -""" + """Disable the current event.""" + def restart_events(self) -> None: - """Restart all events. -""" + """Restart all events.""" class Breakpoint: """Breakpoint class. -Implements temporary breakpoints, ignore counts, disabling and -(re)-enabling, and conditionals. + Implements temporary breakpoints, ignore counts, disabling and + (re)-enabling, and conditionals. + + Breakpoints are indexed by number through bpbynumber and by + the (file, line) tuple using bplist. The former points to a + single instance of class Breakpoint. The latter points to a + list of such instances since there may be more than one + breakpoint per line. -Breakpoints are indexed by number through bpbynumber and by -the (file, line) tuple using bplist. The former points to a -single instance of class Breakpoint. The latter points to a -list of such instances since there may be more than one -breakpoint per line. + When creating a breakpoint, its associated filename should be + in canonical form. If funcname is defined, a breakpoint hit will be + counted when the first line of that function is executed. A + conditional breakpoint always counts a hit. + """ -When creating a breakpoint, its associated filename should be -in canonical form. If funcname is defined, a breakpoint hit will be -counted when the first line of that function is executed. A -conditional breakpoint always counts a hit. -""" next: int bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] @@ -336,51 +352,54 @@ conditional breakpoint always counts a hit. def deleteMe(self) -> None: """Delete the breakpoint from the list associated to a file:line. -If it is the last breakpoint in that position, it also deletes -the entry for the file:line. -""" + If it is the last breakpoint in that position, it also deletes + the entry for the file:line. + """ + def enable(self) -> None: - """Mark the breakpoint as enabled. -""" + """Mark the breakpoint as enabled.""" + def disable(self) -> None: - """Mark the breakpoint as disabled. -""" + """Mark the breakpoint as disabled.""" + def bpprint(self, out: IO[str] | None = None) -> None: """Print the output of bpformat(). -The optional out argument directs where the output is sent -and defaults to standard output. -""" + The optional out argument directs where the output is sent + and defaults to standard output. + """ + def bpformat(self) -> str: """Return a string with information about the breakpoint. -The information includes the breakpoint number, temporary -status, file:line position, break condition, number of times to -ignore, and number of times hit. + The information includes the breakpoint number, temporary + status, file:line position, break condition, number of times to + ignore, and number of times hit. -""" + """ def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: """Return True if break should happen here. -Whether a break should happen depends on the way that b (the breakpoint) -was set. If it was set via line number, check if b.line is the same as -the one in the frame. If it was set via function name, check if this is -the right function and if it is on the first executable line. -""" + Whether a break should happen depends on the way that b (the breakpoint) + was set. If it was set via line number, check if b.line is the same as + the one in the frame. If it was set via function name, check if this is + the right function and if it is on the first executable line. + """ + def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: """Return (active breakpoint, delete temporary flag) or (None, None) as -breakpoint to act upon. + breakpoint to act upon. + + The "active breakpoint" is the first entry in bplist[line, file] (which + must exist) that is enabled, for which checkfuncname is True, and that + has neither a False condition nor a positive ignore count. The flag, + meaning that a temporary breakpoint should be deleted, is False only + when the condiion cannot be evaluated (in which case, ignore count is + ignored). -The "active breakpoint" is the first entry in bplist[line, file] (which -must exist) that is enabled, for which checkfuncname is True, and that -has neither a False condition nor a positive ignore count. The flag, -meaning that a temporary breakpoint should be deleted, is False only -when the condiion cannot be evaluated (in which case, ignore count is -ignored). + If no such entry exists, then (None, None) is returned. + """ -If no such entry exists, then (None, None) is returned. -""" def set_trace() -> None: - """Start debugging with a Bdb instance from the caller's frame. -""" + """Start debugging with a Bdb instance from the caller's frame.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi index 3176d98aeac3c..394c9ed001c0d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binascii.pyi @@ -1,5 +1,5 @@ -"""Conversion between binary data and ASCII -""" +"""Conversion between binary data and ASCII""" + import sys from _typeshed import ReadableBuffer from typing_extensions import TypeAlias, deprecated @@ -9,107 +9,107 @@ from typing_extensions import TypeAlias, deprecated _AsciiBuffer: TypeAlias = str | ReadableBuffer def a2b_uu(data: _AsciiBuffer, /) -> bytes: - """Decode a line of uuencoded data. -""" + """Decode a line of uuencoded data.""" + def b2a_uu(data: ReadableBuffer, /, *, backtick: bool = False) -> bytes: - """Uuencode line of data. -""" + """Uuencode line of data.""" if sys.version_info >= (3, 11): def a2b_base64(data: _AsciiBuffer, /, *, strict_mode: bool = False) -> bytes: """Decode a line of base64 data. - strict_mode - When set to True, bytes that are not part of the base64 standard are not allowed. - The same applies to excess data after padding (= / ==). -""" + strict_mode + When set to True, bytes that are not part of the base64 standard are not allowed. + The same applies to excess data after padding (= / ==). + """ else: def a2b_base64(data: _AsciiBuffer, /) -> bytes: - """Decode a line of base64 data. -""" + """Decode a line of base64 data.""" def b2a_base64(data: ReadableBuffer, /, *, newline: bool = True) -> bytes: - """Base64-code line of data. -""" + """Base64-code line of data.""" + def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: - """Decode a string of qp-encoded data. -""" + """Decode a string of qp-encoded data.""" + def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: """Encode a string using quoted-printable encoding. -On encoding, when istext is set, newlines are not encoded, and white -space at end of lines is. When istext is not set, \\r and \\n (CR/LF) -are both encoded. When quotetabs is set, space and tabs are encoded. -""" + On encoding, when istext is set, newlines are not encoded, and white + space at end of lines is. When istext is not set, \\r and \\n (CR/LF) + are both encoded. When quotetabs is set, space and tabs are encoded. + """ if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") def a2b_hqx(data: _AsciiBuffer, /) -> bytes: - """Decode .hqx coding. -""" + """Decode .hqx coding.""" + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") def rledecode_hqx(data: ReadableBuffer, /) -> bytes: - """Decode hexbin RLE-coded string. -""" + """Decode hexbin RLE-coded string.""" + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") def rlecode_hqx(data: ReadableBuffer, /) -> bytes: - """Binhex RLE-code binary data. -""" + """Binhex RLE-code binary data.""" + @deprecated("Deprecated since Python 3.9; removed in Python 3.11.") def b2a_hqx(data: ReadableBuffer, /) -> bytes: - """Encode .hqx data. -""" + """Encode .hqx data.""" def crc_hqx(data: ReadableBuffer, crc: int, /) -> int: - """Compute CRC-CCITT incrementally. -""" + """Compute CRC-CCITT incrementally.""" + def crc32(data: ReadableBuffer, crc: int = 0, /) -> int: - """Compute CRC-32 incrementally. -""" + """Compute CRC-32 incrementally.""" + def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: """Hexadecimal representation of binary data. - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - -The return value is a bytes object. This function is also -available as "hexlify()". - -Example: ->>> binascii.b2a_hex(b'\\xb9\\x01\\xef') -b'b901ef' ->>> binascii.hexlify(b'\\xb9\\x01\\xef', ':') -b'b9:01:ef' ->>> binascii.b2a_hex(b'\\xb9\\x01\\xef', b'_', 2) -b'b9_01ef' -""" + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + + The return value is a bytes object. This function is also + available as "hexlify()". + + Example: + >>> binascii.b2a_hex(b'\\xb9\\x01\\xef') + b'b901ef' + >>> binascii.hexlify(b'\\xb9\\x01\\xef', ':') + b'b9:01:ef' + >>> binascii.b2a_hex(b'\\xb9\\x01\\xef', b'_', 2) + b'b9_01ef' + """ + def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = 1) -> bytes: """Hexadecimal representation of binary data. - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + + The return value is a bytes object. This function is also + available as "b2a_hex()". + """ -The return value is a bytes object. This function is also -available as "b2a_hex()". -""" def a2b_hex(hexstr: _AsciiBuffer, /) -> bytes: """Binary data of hexadecimal representation. -hexstr must contain an even number of hex digits (upper or lower case). -This function is also available as "unhexlify()". -""" + hexstr must contain an even number of hex digits (upper or lower case). + This function is also available as "unhexlify()". + """ + def unhexlify(hexstr: _AsciiBuffer, /) -> bytes: """Binary data of hexadecimal representation. -hexstr must contain an even number of hex digits (upper or lower case). -""" + hexstr must contain an even number of hex digits (upper or lower case). + """ class Error(ValueError): ... class Incomplete(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi index 560a3d75c3aed..3129767a79537 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/binhex.pyi @@ -4,6 +4,7 @@ easy interface: binhex(inputfilename, outputfilename) hexbin(inputfilename, outputfilename) """ + from _typeshed import SizedBuffer from typing import IO, Any, Final from typing_extensions import TypeAlias @@ -40,8 +41,7 @@ class BinHex: def close(self) -> None: ... def binhex(inp: str, out: str) -> None: - """binhex(infilename, outfilename): create binhex-encoded copy of a file -""" + """binhex(infilename, outfilename): create binhex-encoded copy of a file""" class HexBin: def __init__(self, ifp: _FileHandleUnion) -> None: ... @@ -51,5 +51,4 @@ class HexBin: def close(self) -> None: ... def hexbin(inp: str, out: str) -> None: - """hexbin(infilename, outfilename) - Decode binhexed file -""" + """hexbin(infilename, outfilename) - Decode binhexed file""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi index 774c6cb0c22a5..69bf605572354 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bisect.pyi @@ -1,5 +1,5 @@ -"""Bisection algorithms. -""" +"""Bisection algorithms.""" + from _bisect import * bisect = bisect_right diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi index 7d8535f9107f0..bcacb3857bc8b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi @@ -9,6 +9,7 @@ applications, but can be useful in modules that provide objects with the same name as a built-in value, but in which the built-in of that name is also needed. """ + import _ast import _sitebuiltins import _typeshed @@ -119,9 +120,10 @@ _StepT_co = TypeVar("_StepT_co", covariant=True, default=_StartT_co | _StopT_co) class object: """The base class of the class hierarchy. -When called, it accepts no arguments and returns a new featureless -instance that has no instance attributes and cannot be given any. -""" + When called, it accepts no arguments and returns a new featureless + instance that has no instance attributes and cannot be given any. + """ + __doc__: str | None __dict__: dict[str, Any] __module__: str @@ -160,21 +162,22 @@ instance that has no instance attributes and cannot be given any. class staticmethod(Generic[_P, _R_co]): """Convert a function to be a static method. -A static method does not receive an implicit first argument. -To declare a static method, use this idiom: + A static method does not receive an implicit first argument. + To declare a static method, use this idiom: - class C: - @staticmethod - def f(arg1, arg2, argN): - ... + class C: + @staticmethod + def f(arg1, arg2, argN): + ... -It can be called either on the class (e.g. C.f()) or on an instance -(e.g. C().f()). Both the class and the instance are ignored, and -neither is passed implicitly as the first argument to the method. + It can be called either on the class (e.g. C.f()) or on an instance + (e.g. C().f()). Both the class and the instance are ignored, and + neither is passed implicitly as the first argument to the method. + + Static methods in Python are similar to those found in Java or C++. + For a more advanced concept, see the classmethod builtin. + """ -Static methods in Python are similar to those found in Java or C++. -For a more advanced concept, see the classmethod builtin. -""" @property def __func__(self) -> Callable[_P, _R_co]: ... @property @@ -182,8 +185,8 @@ For a more advanced concept, see the classmethod builtin. def __init__(self, f: Callable[_P, _R_co], /) -> None: ... @overload def __get__(self, instance: None, owner: type, /) -> Callable[_P, _R_co]: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + @overload def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -192,8 +195,7 @@ For a more advanced concept, see the classmethod builtin. @property def __wrapped__(self) -> Callable[_P, _R_co]: ... def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: - """Call self as a function. -""" + """Call self as a function.""" if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: ... __annotate__: AnnotateFunc | None @@ -202,23 +204,24 @@ For a more advanced concept, see the classmethod builtin. class classmethod(Generic[_T, _P, _R_co]): """Convert a function to be a class method. -A class method receives the class as implicit first argument, -just like an instance method receives the instance. -To declare a class method, use this idiom: + A class method receives the class as implicit first argument, + just like an instance method receives the instance. + To declare a class method, use this idiom: - class C: - @classmethod - def f(cls, arg1, arg2, argN): - ... + class C: + @classmethod + def f(cls, arg1, arg2, argN): + ... -It can be called either on the class (e.g. C.f()) or on an instance -(e.g. C().f()). The instance is ignored except for its class. -If a class method is called for a derived class, the derived class -object is passed as the implied first argument. + It can be called either on the class (e.g. C.f()) or on an instance + (e.g. C().f()). The instance is ignored except for its class. + If a class method is called for a derived class, the derived class + object is passed as the implied first argument. + + Class methods are different than C++ or Java static methods. + If you want those, see the staticmethod builtin. + """ -Class methods are different than C++ or Java static methods. -If you want those, see the staticmethod builtin. -""" @property def __func__(self) -> Callable[Concatenate[type[_T], _P], _R_co]: ... @property @@ -226,8 +229,8 @@ If you want those, see the staticmethod builtin. def __init__(self, f: Callable[Concatenate[type[_T], _P], _R_co], /) -> None: ... @overload def __get__(self, instance: _T, owner: type[_T] | None = None, /) -> Callable[_P, _R_co]: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + @overload def __get__(self, instance: None, owner: type[_T], /) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): @@ -242,8 +245,9 @@ If you want those, see the staticmethod builtin. @disjoint_base class type: """type(object) -> the object's type -type(name, bases, dict, **kwds) -> a new type -""" + type(name, bases, dict, **kwds) -> a new type + """ + # object.__base__ is None. Otherwise, it would be a type. @property def __base__(self) -> type | None: ... @@ -278,35 +282,32 @@ type(name, bases, dict, **kwds) -> a new type cls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any ) -> _typeshed.Self: ... def __call__(self, *args: Any, **kwds: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: - """Return a list of immediate subclasses. -""" + """Return a list of immediate subclasses.""" # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> list[type]: - """Return a type's method resolution order. -""" + """Return a type's method resolution order.""" + def __instancecheck__(self, instance: Any, /) -> bool: - """Check if an object is an instance. -""" + """Check if an object is an instance.""" + def __subclasscheck__(self, subclass: type, /) -> bool: - """Check if a class is a subclass. -""" + """Check if a class is a subclass.""" + @classmethod def __prepare__(metacls, name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object]: - """Create the namespace for the class statement -""" + """Create the namespace for the class statement""" if sys.version_info >= (3, 10): # `int | str` produces an instance of `UnionType`, but `int | int` produces an instance of `type`, # and `abc.ABC | abc.ABC` produces an instance of `abc.ABCMeta`. def __or__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self: _typeshed.Self, value: Any, /) -> types.UnionType | _typeshed.Self: - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 12): __type_params__: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] __annotations__: dict[str, AnnotationForm] @@ -316,19 +317,20 @@ type(name, bases, dict, **kwds) -> a new type @disjoint_base class super: """super() -> same as super(__class__, ) -super(type) -> unbound super object -super(type, obj) -> bound super object; requires isinstance(obj, type) -super(type, type2) -> bound super object; requires issubclass(type2, type) -Typical use to call a cooperative superclass method: -class C(B): - def meth(self, arg): - super().meth(arg) -This works for class methods too: -class C(B): - @classmethod - def cmeth(cls, arg): - super().cmeth(arg) -""" + super(type) -> unbound super object + super(type, obj) -> bound super object; requires isinstance(obj, type) + super(type, type2) -> bound super object; requires issubclass(type2, type) + Typical use to call a cooperative superclass method: + class C(B): + def meth(self, arg): + super().meth(arg) + This works for class methods too: + class C(B): + @classmethod + def cmeth(cls, arg): + super().cmeth(arg) + """ + @overload def __init__(self, t: Any, obj: Any, /) -> None: ... @overload @@ -343,20 +345,21 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 @disjoint_base class int: """int([x]) -> integer -int(x, base=10) -> integer - -Convert a number or string to an integer, or return 0 if no arguments -are given. If x is a number, return x.__int__(). For floating-point -numbers, this truncates towards zero. - -If x is not a number or if base is given, then x must be a string, -bytes, or bytearray instance representing an integer literal in the -given base. The literal can be preceded by '+' or '-' and be surrounded -by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. -Base 0 means to interpret the base from the string as an integer literal. ->>> int('0b100', base=0) -4 -""" + int(x, base=10) -> integer + + Convert a number or string to an integer, or return 0 if no arguments + are given. If x is a number, return x.__int__(). For floating-point + numbers, this truncates towards zero. + + If x is not a number or if base is given, then x must be a string, + bytes, or bytearray instance representing an integer literal in the + given base. The literal can be preceded by '+' or '-' and be surrounded + by whitespace. The base defaults to 10. Valid bases are 0 and 2-36. + Base 0 means to interpret the base from the string as an integer literal. + >>> int('0b100', base=0) + 4 + """ + @overload def __new__(cls, x: ConvertibleToInt = 0, /) -> Self: ... @overload @@ -364,75 +367,76 @@ Base 0 means to interpret the base from the string as an integer literal. def as_integer_ratio(self) -> tuple[int, Literal[1]]: """Return a pair of integers, whose ratio is equal to the original int. -The ratio is in lowest terms and has a positive denominator. + The ratio is in lowest terms and has a positive denominator. + + >>> (10).as_integer_ratio() + (10, 1) + >>> (-10).as_integer_ratio() + (-10, 1) + >>> (0).as_integer_ratio() + (0, 1) + """ ->>> (10).as_integer_ratio() -(10, 1) ->>> (-10).as_integer_ratio() -(-10, 1) ->>> (0).as_integer_ratio() -(0, 1) -""" @property def real(self) -> int: - """the real part of a complex number -""" + """the real part of a complex number""" + @property def imag(self) -> Literal[0]: - """the imaginary part of a complex number -""" + """the imaginary part of a complex number""" + @property def numerator(self) -> int: - """the numerator of a rational number in lowest terms -""" + """the numerator of a rational number in lowest terms""" + @property def denominator(self) -> Literal[1]: - """the denominator of a rational number in lowest terms -""" + """the denominator of a rational number in lowest terms""" + def conjugate(self) -> int: - """Returns self, the complex conjugate of any int. -""" + """Returns self, the complex conjugate of any int.""" + def bit_length(self) -> int: """Number of bits necessary to represent self in binary. ->>> bin(37) -'0b100101' ->>> (37).bit_length() -6 -""" + >>> bin(37) + '0b100101' + >>> (37).bit_length() + 6 + """ if sys.version_info >= (3, 10): def bit_count(self) -> int: """Number of ones in the binary representation of the absolute value of self. -Also known as the population count. - ->>> bin(13) -'0b1101' ->>> (13).bit_count() -3 -""" + Also known as the population count. + >>> bin(13) + '0b1101' + >>> (13).bit_count() + 3 + """ if sys.version_info >= (3, 11): def to_bytes( self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False ) -> bytes: """Return an array of bytes representing an integer. - length - Length of bytes object to use. An OverflowError is raised if the - integer is not representable with the given number of bytes. Default - is length 1. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - sys.byteorder as the byte order value. Default is to use 'big'. - signed - Determines whether two's complement is used to represent the integer. - If signed is False and a negative integer is given, an OverflowError - is raised. -""" + length + Length of bytes object to use. An OverflowError is raised if the + integer is not representable with the given number of bytes. Default + is length 1. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + sys.byteorder as the byte order value. Default is to use 'big'. + signed + Determines whether two's complement is used to represent the integer. + If signed is False and a negative integer is given, an OverflowError + is raised. + """ + @classmethod def from_bytes( cls, @@ -443,38 +447,39 @@ Also known as the population count. ) -> Self: """Return the integer represented by the given array of bytes. - bytes - Holds the array of bytes to convert. The argument must either - support the buffer protocol or be an iterable object producing bytes. - Bytes and bytearray are examples of built-in objects that support the - buffer protocol. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - sys.byteorder as the byte order value. Default is to use 'big'. - signed - Indicates whether two's complement is used to represent the integer. -""" + bytes + Holds the array of bytes to convert. The argument must either + support the buffer protocol or be an iterable object producing bytes. + Bytes and bytearray are examples of built-in objects that support the + buffer protocol. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + sys.byteorder as the byte order value. Default is to use 'big'. + signed + Indicates whether two's complement is used to represent the integer. + """ else: def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: """Return an array of bytes representing an integer. - length - Length of bytes object to use. An OverflowError is raised if the - integer is not representable with the given number of bytes. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Determines whether two's complement is used to represent the integer. - If signed is False and a negative integer is given, an OverflowError - is raised. -""" + length + Length of bytes object to use. An OverflowError is raised if the + integer is not representable with the given number of bytes. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + `sys.byteorder' as the byte order value. + signed + Determines whether two's complement is used to represent the integer. + If signed is False and a negative integer is given, an OverflowError + is raised. + """ + @classmethod def from_bytes( cls, @@ -485,72 +490,70 @@ Also known as the population count. ) -> Self: """Return the integer represented by the given array of bytes. - bytes - Holds the array of bytes to convert. The argument must either - support the buffer protocol or be an iterable object producing bytes. - Bytes and bytearray are examples of built-in objects that support the - buffer protocol. - byteorder - The byte order used to represent the integer. If byteorder is 'big', - the most significant byte is at the beginning of the byte array. If - byteorder is 'little', the most significant byte is at the end of the - byte array. To request the native byte order of the host system, use - `sys.byteorder' as the byte order value. - signed - Indicates whether two's complement is used to represent the integer. -""" - + bytes + Holds the array of bytes to convert. The argument must either + support the buffer protocol or be an iterable object producing bytes. + Bytes and bytearray are examples of built-in objects that support the + buffer protocol. + byteorder + The byte order used to represent the integer. If byteorder is 'big', + the most significant byte is at the beginning of the byte array. If + byteorder is 'little', the most significant byte is at the end of the + byte array. To request the native byte order of the host system, use + `sys.byteorder' as the byte order value. + signed + Indicates whether two's complement is used to represent the integer. + """ if sys.version_info >= (3, 12): def is_integer(self) -> Literal[True]: - """Returns True. Exists for duck type compatibility with float.is_integer. -""" + """Returns True. Exists for duck type compatibility with float.is_integer.""" def __add__(self, value: int, /) -> int: - """Return self+value. -""" + """Return self+value.""" + def __sub__(self, value: int, /) -> int: - """Return self-value. -""" + """Return self-value.""" + def __mul__(self, value: int, /) -> int: - """Return self*value. -""" + """Return self*value.""" + def __floordiv__(self, value: int, /) -> int: - """Return self//value. -""" + """Return self//value.""" + def __truediv__(self, value: int, /) -> float: - """Return self/value. -""" + """Return self/value.""" + def __mod__(self, value: int, /) -> int: - """Return self%value. -""" + """Return self%value.""" + def __divmod__(self, value: int, /) -> tuple[int, int]: - """Return divmod(self, value). -""" + """Return divmod(self, value).""" + def __radd__(self, value: int, /) -> int: - """Return value+self. -""" + """Return value+self.""" + def __rsub__(self, value: int, /) -> int: - """Return value-self. -""" + """Return value-self.""" + def __rmul__(self, value: int, /) -> int: - """Return value*self. -""" + """Return value*self.""" + def __rfloordiv__(self, value: int, /) -> int: - """Return value//self. -""" + """Return value//self.""" + def __rtruediv__(self, value: int, /) -> float: - """Return value/self. -""" + """Return value/self.""" + def __rmod__(self, value: int, /) -> int: - """Return value%self. -""" + """Return value%self.""" + def __rdivmod__(self, value: int, /) -> tuple[int, int]: - """Return divmod(value, self). -""" + """Return divmod(value, self).""" + @overload def __pow__(self, x: Literal[0], /) -> Literal[1]: - """Return pow(self, value, mod). -""" + """Return pow(self, value, mod).""" + @overload def __pow__(self, value: Literal[0], mod: None, /) -> Literal[1]: ... @overload @@ -564,68 +567,67 @@ Also known as the population count. @overload def __pow__(self, value: int, mod: int, /) -> int: ... def __rpow__(self, value: int, mod: int | None = None, /) -> Any: - """Return pow(value, self, mod). -""" + """Return pow(value, self, mod).""" + def __and__(self, value: int, /) -> int: - """Return self&value. -""" + """Return self&value.""" + def __or__(self, value: int, /) -> int: - """Return self|value. -""" + """Return self|value.""" + def __xor__(self, value: int, /) -> int: - """Return self^value. -""" + """Return self^value.""" + def __lshift__(self, value: int, /) -> int: - """Return self< int: - """Return self>>value. -""" + """Return self>>value.""" + def __rand__(self, value: int, /) -> int: - """Return value&self. -""" + """Return value&self.""" + def __ror__(self, value: int, /) -> int: - """Return value|self. -""" + """Return value|self.""" + def __rxor__(self, value: int, /) -> int: - """Return value^self. -""" + """Return value^self.""" + def __rlshift__(self, value: int, /) -> int: - """Return value< int: - """Return value>>self. -""" + """Return value>>self.""" + def __neg__(self) -> int: - """-self -""" + """-self""" + def __pos__(self) -> int: - """+self -""" + """+self""" + def __invert__(self) -> int: - """~self -""" + """~self""" + def __trunc__(self) -> int: - """Truncating an Integral returns itself. -""" + """Truncating an Integral returns itself.""" + def __ceil__(self) -> int: - """Ceiling of an Integral returns itself. -""" + """Ceiling of an Integral returns itself.""" + def __floor__(self) -> int: - """Flooring an Integral returns itself. -""" + """Flooring an Integral returns itself.""" if sys.version_info >= (3, 14): def __round__(self, ndigits: SupportsIndex | None = None, /) -> int: """Rounding an Integral returns itself. -Rounding with an ndigits argument also returns an integer. -""" + Rounding with an ndigits argument also returns an integer. + """ else: def __round__(self, ndigits: SupportsIndex = ..., /) -> int: """Rounding an Integral returns itself. -Rounding with an ndigits argument also returns an integer. -""" + Rounding with an ndigits argument also returns an integer. + """ def __getnewargs__(self) -> tuple[int]: ... def __eq__(self, value: object, /) -> bool: ... @@ -635,128 +637,129 @@ Rounding with an ndigits argument also returns an integer. def __gt__(self, value: int, /) -> bool: ... def __ge__(self, value: int, /) -> bool: ... def __float__(self) -> float: - """float(self) -""" + """float(self)""" + def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __abs__(self) -> int: - """abs(self) -""" + """abs(self)""" + def __hash__(self) -> int: ... def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def __index__(self) -> int: - """Return self converted to an integer, if self is suitable for use as an index into a list. -""" + """Return self converted to an integer, if self is suitable for use as an index into a list.""" + def __format__(self, format_spec: str, /) -> str: - """Convert to a string according to format_spec. -""" + """Convert to a string according to format_spec.""" @disjoint_base class float: - """Convert a string or number to a floating-point number, if possible. -""" + """Convert a string or number to a floating-point number, if possible.""" + def __new__(cls, x: ConvertibleToFloat = 0, /) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: """Return a pair of integers, whose ratio is exactly equal to the original float. -The ratio is in lowest terms and has a positive denominator. Raise -OverflowError on infinities and a ValueError on NaNs. + The ratio is in lowest terms and has a positive denominator. Raise + OverflowError on infinities and a ValueError on NaNs. + + >>> (10.0).as_integer_ratio() + (10, 1) + >>> (0.0).as_integer_ratio() + (0, 1) + >>> (-.25).as_integer_ratio() + (-1, 4) + """ ->>> (10.0).as_integer_ratio() -(10, 1) ->>> (0.0).as_integer_ratio() -(0, 1) ->>> (-.25).as_integer_ratio() -(-1, 4) -""" def hex(self) -> str: """Return a hexadecimal representation of a floating-point number. ->>> (-0.1).hex() -'-0x1.999999999999ap-4' ->>> 3.14159.hex() -'0x1.921f9f01b866ep+1' -""" + >>> (-0.1).hex() + '-0x1.999999999999ap-4' + >>> 3.14159.hex() + '0x1.921f9f01b866ep+1' + """ + def is_integer(self) -> bool: - """Return True if the float is an integer. -""" + """Return True if the float is an integer.""" + @classmethod def fromhex(cls, string: str, /) -> Self: """Create a floating-point number from a hexadecimal string. ->>> float.fromhex('0x1.ffffp10') -2047.984375 ->>> float.fromhex('-0x1p-1074') --5e-324 -""" + >>> float.fromhex('0x1.ffffp10') + 2047.984375 + >>> float.fromhex('-0x1p-1074') + -5e-324 + """ + @property def real(self) -> float: - """the real part of a complex number -""" + """the real part of a complex number""" + @property def imag(self) -> float: - """the imaginary part of a complex number -""" + """the imaginary part of a complex number""" + def conjugate(self) -> float: - """Return self, the complex conjugate of any float. -""" + """Return self, the complex conjugate of any float.""" + def __add__(self, value: float, /) -> float: - """Return self+value. -""" + """Return self+value.""" + def __sub__(self, value: float, /) -> float: - """Return self-value. -""" + """Return self-value.""" + def __mul__(self, value: float, /) -> float: - """Return self*value. -""" + """Return self*value.""" + def __floordiv__(self, value: float, /) -> float: - """Return self//value. -""" + """Return self//value.""" + def __truediv__(self, value: float, /) -> float: - """Return self/value. -""" + """Return self/value.""" + def __mod__(self, value: float, /) -> float: - """Return self%value. -""" + """Return self%value.""" + def __divmod__(self, value: float, /) -> tuple[float, float]: - """Return divmod(self, value). -""" + """Return divmod(self, value).""" + @overload def __pow__(self, value: int, mod: None = None, /) -> float: - """Return pow(self, value, mod). -""" + """Return pow(self, value, mod).""" # positive __value -> float; negative __value -> complex # return type must be Any as `float | complex` causes too many false-positive errors @overload def __pow__(self, value: float, mod: None = None, /) -> Any: ... def __radd__(self, value: float, /) -> float: - """Return value+self. -""" + """Return value+self.""" + def __rsub__(self, value: float, /) -> float: - """Return value-self. -""" + """Return value-self.""" + def __rmul__(self, value: float, /) -> float: - """Return value*self. -""" + """Return value*self.""" + def __rfloordiv__(self, value: float, /) -> float: - """Return value//self. -""" + """Return value//self.""" + def __rtruediv__(self, value: float, /) -> float: - """Return value/self. -""" + """Return value/self.""" + def __rmod__(self, value: float, /) -> float: - """Return value%self. -""" + """Return value%self.""" + def __rdivmod__(self, value: float, /) -> tuple[float, float]: - """Return divmod(value, self). -""" + """Return divmod(value, self).""" + @overload def __rpow__(self, value: _PositiveInteger, mod: None = None, /) -> float: - """Return pow(value, self, mod). -""" + """Return pow(value, self, mod).""" + @overload def __rpow__(self, value: _NegativeInteger, mod: None = None, /) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @@ -764,20 +767,21 @@ OverflowError on infinities and a ValueError on NaNs. def __rpow__(self, value: float, mod: None = None, /) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... def __trunc__(self) -> int: - """Return the Integral closest to x between 0 and x. -""" + """Return the Integral closest to x between 0 and x.""" + def __ceil__(self) -> int: - """Return the ceiling as an Integral. -""" + """Return the ceiling as an Integral.""" + def __floor__(self) -> int: - """Return the floor as an Integral. -""" + """Return the floor as an Integral.""" + @overload def __round__(self, ndigits: None = None, /) -> int: """Return the Integral closest to x, rounding half toward even. -When an argument is passed, work like built-in round(x, ndigits). -""" + When an argument is passed, work like built-in round(x, ndigits). + """ + @overload def __round__(self, ndigits: SupportsIndex, /) -> float: ... def __eq__(self, value: object, /) -> bool: ... @@ -787,42 +791,41 @@ When an argument is passed, work like built-in round(x, ndigits). def __gt__(self, value: float, /) -> bool: ... def __ge__(self, value: float, /) -> bool: ... def __neg__(self) -> float: - """-self -""" + """-self""" + def __pos__(self) -> float: - """+self -""" + """+self""" + def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __float__(self) -> float: - """float(self) -""" + """float(self)""" + def __abs__(self) -> float: - """abs(self) -""" + """abs(self)""" + def __hash__(self) -> int: ... def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def __format__(self, format_spec: str, /) -> str: - """Formats the float according to format_spec. -""" + """Formats the float according to format_spec.""" if sys.version_info >= (3, 14): @classmethod def from_number(cls, number: float | SupportsIndex | SupportsFloat, /) -> Self: - """Convert real number to a floating-point number. -""" + """Convert real number to a floating-point number.""" @disjoint_base class complex: """Create a complex number from a string or numbers. -If a string is given, parse it as a complex number. -If a single number is given, convert it to a complex number. -If the 'real' or 'imag' arguments are given, create a complex number -with the specified real and imaginary components. -""" + If a string is given, parse it as a complex number. + If a single number is given, convert it to a complex number. + If the 'real' or 'imag' arguments are given, create a complex number + with the specified real and imaginary components. + """ + # Python doesn't currently accept SupportsComplex for the second argument @overload def __new__( @@ -834,72 +837,69 @@ with the specified real and imaginary components. def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... @property def real(self) -> float: - """the real part of a complex number -""" + """the real part of a complex number""" + @property def imag(self) -> float: - """the imaginary part of a complex number -""" + """the imaginary part of a complex number""" + def conjugate(self) -> complex: - """Return the complex conjugate of its argument. (3-4j).conjugate() == 3+4j. -""" + """Return the complex conjugate of its argument. (3-4j).conjugate() == 3+4j.""" + def __add__(self, value: complex, /) -> complex: - """Return self+value. -""" + """Return self+value.""" + def __sub__(self, value: complex, /) -> complex: - """Return self-value. -""" + """Return self-value.""" + def __mul__(self, value: complex, /) -> complex: - """Return self*value. -""" + """Return self*value.""" + def __pow__(self, value: complex, mod: None = None, /) -> complex: - """Return pow(self, value, mod). -""" + """Return pow(self, value, mod).""" + def __truediv__(self, value: complex, /) -> complex: - """Return self/value. -""" + """Return self/value.""" + def __radd__(self, value: complex, /) -> complex: - """Return value+self. -""" + """Return value+self.""" + def __rsub__(self, value: complex, /) -> complex: - """Return value-self. -""" + """Return value-self.""" + def __rmul__(self, value: complex, /) -> complex: - """Return value*self. -""" + """Return value*self.""" + def __rpow__(self, value: complex, mod: None = None, /) -> complex: - """Return pow(value, self, mod). -""" + """Return pow(value, self, mod).""" + def __rtruediv__(self, value: complex, /) -> complex: - """Return value/self. -""" + """Return value/self.""" + def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __neg__(self) -> complex: - """-self -""" + """-self""" + def __pos__(self) -> complex: - """+self -""" + """+self""" + def __abs__(self) -> float: - """abs(self) -""" + """abs(self)""" + def __hash__(self) -> int: ... def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def __format__(self, format_spec: str, /) -> str: - """Convert to a string according to format_spec. -""" + """Convert to a string according to format_spec.""" if sys.version_info >= (3, 11): def __complex__(self) -> complex: - """Convert this value to exact type complex. -""" + """Convert this value to exact type complex.""" if sys.version_info >= (3, 14): @classmethod def from_number(cls, number: complex | SupportsComplex | SupportsFloat | SupportsIndex, /) -> Self: - """Convert number to a complex floating-point number. -""" + """Convert number to a complex floating-point number.""" @type_check_only class _FormatMapMapping(Protocol): @@ -912,16 +912,17 @@ class _TranslateTable(Protocol): @disjoint_base class str(Sequence[str]): """str(object='') -> str -str(bytes_or_buffer[, encoding[, errors]]) -> str - -Create a new string object from the given object. If encoding or -errors is specified, then the object must expose a data buffer -that will be decoded using the given encoding and error handler. -Otherwise, returns the result of object.__str__() (if defined) -or repr(object). -encoding defaults to 'utf-8'. -errors defaults to 'strict'. -""" + str(bytes_or_buffer[, encoding[, errors]]) -> str + + Create a new string object from the given object. If encoding or + errors is specified, then the object must expose a data buffer + that will be decoded using the given encoding and error handler. + Otherwise, returns the result of object.__str__() (if defined) + or repr(object). + encoding defaults to 'utf-8'. + errors defaults to 'strict'. + """ + @overload def __new__(cls, object: object = "") -> Self: ... @overload @@ -930,232 +931,256 @@ errors defaults to 'strict'. def capitalize(self: LiteralString) -> LiteralString: """Return a capitalized version of the string. -More specifically, make the first character have upper case and the rest lower -case. -""" + More specifically, make the first character have upper case and the rest lower + case. + """ + @overload def capitalize(self) -> str: ... # type: ignore[misc] @overload def casefold(self: LiteralString) -> LiteralString: - """Return a version of the string suitable for caseless comparisons. -""" + """Return a version of the string suitable for caseless comparisons.""" + @overload def casefold(self) -> str: ... # type: ignore[misc] @overload def center(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: """Return a centered string of length width. -Padding is done using the specified fill character (default is a space). -""" + Padding is done using the specified fill character (default is a space). + """ + @overload def center(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] def count(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: """Return the number of non-overlapping occurrences of substring sub in string S[start:end]. -Optional arguments start and end are interpreted as in slice notation. -""" + Optional arguments start and end are interpreted as in slice notation. + """ + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: """Encode the string using the codec registered for encoding. - encoding - The encoding in which to encode the string. - errors - The error handling scheme to use for encoding errors. - The default is 'strict' meaning that encoding errors raise a - UnicodeEncodeError. Other possible values are 'ignore', 'replace' and - 'xmlcharrefreplace' as well as any other name registered with - codecs.register_error that can handle UnicodeEncodeErrors. -""" + encoding + The encoding in which to encode the string. + errors + The error handling scheme to use for encoding errors. + The default is 'strict' meaning that encoding errors raise a + UnicodeEncodeError. Other possible values are 'ignore', 'replace' and + 'xmlcharrefreplace' as well as any other name registered with + codecs.register_error that can handle UnicodeEncodeErrors. + """ + def endswith( self, suffix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> bool: """Return True if the string ends with the specified suffix, False otherwise. - suffix - A string or a tuple of strings to try. - start - Optional start position. Default: start of the string. - end - Optional stop position. Default: end of the string. -""" + suffix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. + """ + @overload def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: """Return a copy where all tab characters are expanded using spaces. -If tabsize is not given, a tab size of 8 characters is assumed. -""" + If tabsize is not given, a tab size of 8 characters is assumed. + """ + @overload def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] def find(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. -Optional arguments start and end are interpreted as in slice notation. -Return -1 on failure. -""" + Optional arguments start and end are interpreted as in slice notation. + Return -1 on failure. + """ + @overload def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: """Return a formatted version of the string, using substitutions from args and kwargs. -The substitutions are identified by braces ('{' and '}'). -""" + The substitutions are identified by braces ('{' and '}'). + """ + @overload def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, mapping: _FormatMapMapping, /) -> str: """Return a formatted version of the string, using substitutions from mapping. -The substitutions are identified by braces ('{' and '}'). -""" + The substitutions are identified by braces ('{' and '}'). + """ + def index(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: """Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end]. -Optional arguments start and end are interpreted as in slice notation. -Raises ValueError when the substring is not found. -""" + Optional arguments start and end are interpreted as in slice notation. + Raises ValueError when the substring is not found. + """ + def isalnum(self) -> bool: """Return True if the string is an alpha-numeric string, False otherwise. -A string is alpha-numeric if all characters in the string are alpha-numeric and -there is at least one character in the string. -""" + A string is alpha-numeric if all characters in the string are alpha-numeric and + there is at least one character in the string. + """ + def isalpha(self) -> bool: """Return True if the string is an alphabetic string, False otherwise. -A string is alphabetic if all characters in the string are alphabetic and there -is at least one character in the string. -""" + A string is alphabetic if all characters in the string are alphabetic and there + is at least one character in the string. + """ + def isascii(self) -> bool: """Return True if all characters in the string are ASCII, False otherwise. -ASCII characters have code points in the range U+0000-U+007F. -Empty string is ASCII too. -""" + ASCII characters have code points in the range U+0000-U+007F. + Empty string is ASCII too. + """ + def isdecimal(self) -> bool: """Return True if the string is a decimal string, False otherwise. -A string is a decimal string if all characters in the string are decimal and -there is at least one character in the string. -""" + A string is a decimal string if all characters in the string are decimal and + there is at least one character in the string. + """ + def isdigit(self) -> bool: """Return True if the string is a digit string, False otherwise. -A string is a digit string if all characters in the string are digits and there -is at least one character in the string. -""" + A string is a digit string if all characters in the string are digits and there + is at least one character in the string. + """ + def isidentifier(self) -> bool: """Return True if the string is a valid Python identifier, False otherwise. -Call keyword.iskeyword(s) to test whether string s is a reserved identifier, -such as "def" or "class". -""" + Call keyword.iskeyword(s) to test whether string s is a reserved identifier, + such as "def" or "class". + """ + def islower(self) -> bool: """Return True if the string is a lowercase string, False otherwise. -A string is lowercase if all cased characters in the string are lowercase and -there is at least one cased character in the string. -""" + A string is lowercase if all cased characters in the string are lowercase and + there is at least one cased character in the string. + """ + def isnumeric(self) -> bool: """Return True if the string is a numeric string, False otherwise. -A string is numeric if all characters in the string are numeric and there is at -least one character in the string. -""" + A string is numeric if all characters in the string are numeric and there is at + least one character in the string. + """ + def isprintable(self) -> bool: """Return True if all characters in the string are printable, False otherwise. -A character is printable if repr() may use it in its output. -""" + A character is printable if repr() may use it in its output. + """ + def isspace(self) -> bool: """Return True if the string is a whitespace string, False otherwise. -A string is whitespace if all characters in the string are whitespace and there -is at least one character in the string. -""" + A string is whitespace if all characters in the string are whitespace and there + is at least one character in the string. + """ + def istitle(self) -> bool: """Return True if the string is a title-cased string, False otherwise. -In a title-cased string, upper- and title-case characters may only -follow uncased characters and lowercase characters only cased ones. -""" + In a title-cased string, upper- and title-case characters may only + follow uncased characters and lowercase characters only cased ones. + """ + def isupper(self) -> bool: """Return True if the string is an uppercase string, False otherwise. -A string is uppercase if all cased characters in the string are uppercase and -there is at least one cased character in the string. -""" + A string is uppercase if all cased characters in the string are uppercase and + there is at least one cased character in the string. + """ + @overload def join(self: LiteralString, iterable: Iterable[LiteralString], /) -> LiteralString: """Concatenate any number of strings. -The string whose method is called is inserted in between each given string. -The result is returned as a new string. + The string whose method is called is inserted in between each given string. + The result is returned as a new string. + + Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs' + """ -Example: '.'.join(['ab', 'pq', 'rs']) -> 'ab.pq.rs' -""" @overload def join(self, iterable: Iterable[str], /) -> str: ... # type: ignore[misc] @overload def ljust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: """Return a left-justified string of length width. -Padding is done using the specified fill character (default is a space). -""" + Padding is done using the specified fill character (default is a space). + """ + @overload def ljust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload def lower(self: LiteralString) -> LiteralString: - """Return a copy of the string converted to lowercase. -""" + """Return a copy of the string converted to lowercase.""" + @overload def lower(self) -> str: ... # type: ignore[misc] @overload def lstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: """Return a copy of the string with leading whitespace removed. -If chars is given and not None, remove characters in chars instead. -""" + If chars is given and not None, remove characters in chars instead. + """ + @overload def lstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload def partition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: """Partition the string into three parts using the given separator. -This will search for the separator in the string. If the separator is found, -returns a 3-tuple containing the part before the separator, the separator -itself, and the part after it. + This will search for the separator in the string. If the separator is found, + returns a 3-tuple containing the part before the separator, the separator + itself, and the part after it. + + If the separator is not found, returns a 3-tuple containing the original string + and two empty strings. + """ -If the separator is not found, returns a 3-tuple containing the original string -and two empty strings. -""" @overload def partition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] if sys.version_info >= (3, 13): @overload - def replace( - self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1 - ) -> LiteralString: + def replace(self: LiteralString, old: LiteralString, new: LiteralString, /, count: SupportsIndex = -1) -> LiteralString: """Return a copy with all occurrences of substring old replaced by new. - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + + If the optional argument count is given, only the first count occurrences are + replaced. + """ -If the optional argument count is given, only the first count occurrences are -replaced. -""" @overload def replace(self, old: str, new: str, /, count: SupportsIndex = -1) -> str: ... # type: ignore[misc] else: @overload - def replace( - self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, / - ) -> LiteralString: + def replace(self: LiteralString, old: LiteralString, new: LiteralString, count: SupportsIndex = -1, /) -> LiteralString: """Return a copy with all occurrences of substring old replaced by new. - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + + If the optional argument count is given, only the first count occurrences are + replaced. + """ -If the optional argument count is given, only the first count occurrences are -replaced. -""" @overload def replace(self, old: str, new: str, count: SupportsIndex = -1, /) -> str: ... # type: ignore[misc] @@ -1163,109 +1188,119 @@ replaced. def removeprefix(self: LiteralString, prefix: LiteralString, /) -> LiteralString: """Return a str with the given prefix string removed if present. -If the string starts with the prefix string, return string[len(prefix):]. -Otherwise, return a copy of the original string. -""" + If the string starts with the prefix string, return string[len(prefix):]. + Otherwise, return a copy of the original string. + """ + @overload def removeprefix(self, prefix: str, /) -> str: ... # type: ignore[misc] @overload def removesuffix(self: LiteralString, suffix: LiteralString, /) -> LiteralString: """Return a str with the given suffix string removed if present. -If the string ends with the suffix string and that suffix is not empty, -return string[:-len(suffix)]. Otherwise, return a copy of the original -string. -""" + If the string ends with the suffix string and that suffix is not empty, + return string[:-len(suffix)]. Otherwise, return a copy of the original + string. + """ + @overload def removesuffix(self, suffix: str, /) -> str: ... # type: ignore[misc] def rfind(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. -Optional arguments start and end are interpreted as in slice notation. -Return -1 on failure. -""" + Optional arguments start and end are interpreted as in slice notation. + Return -1 on failure. + """ + def rindex(self, sub: str, start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> int: """Return the highest index in S where substring sub is found, such that sub is contained within S[start:end]. -Optional arguments start and end are interpreted as in slice notation. -Raises ValueError when the substring is not found. -""" + Optional arguments start and end are interpreted as in slice notation. + Raises ValueError when the substring is not found. + """ + @overload def rjust(self: LiteralString, width: SupportsIndex, fillchar: LiteralString = " ", /) -> LiteralString: """Return a right-justified string of length width. -Padding is done using the specified fill character (default is a space). -""" + Padding is done using the specified fill character (default is a space). + """ + @overload def rjust(self, width: SupportsIndex, fillchar: str = " ", /) -> str: ... # type: ignore[misc] @overload def rpartition(self: LiteralString, sep: LiteralString, /) -> tuple[LiteralString, LiteralString, LiteralString]: """Partition the string into three parts using the given separator. -This will search for the separator in the string, starting at the end. If -the separator is found, returns a 3-tuple containing the part before the -separator, the separator itself, and the part after it. + This will search for the separator in the string, starting at the end. If + the separator is found, returns a 3-tuple containing the part before the + separator, the separator itself, and the part after it. + + If the separator is not found, returns a 3-tuple containing two empty strings + and the original string. + """ -If the separator is not found, returns a 3-tuple containing two empty strings -and the original string. -""" @overload def rpartition(self, sep: str, /) -> tuple[str, str, str]: ... # type: ignore[misc] @overload def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: """Return a list of the substrings in the string, using sep as the separator string. - sep - The separator used to split the string. + sep + The separator used to split the string. - When set to None (the default value), will split on any whitespace - character (including \\n \\r \\t \\f and spaces) and will discard - empty strings from the result. - maxsplit - Maximum number of splits. - -1 (the default value) means no limit. + When set to None (the default value), will split on any whitespace + character (including \\n \\r \\t \\f and spaces) and will discard + empty strings from the result. + maxsplit + Maximum number of splits. + -1 (the default value) means no limit. + + Splitting starts at the end of the string and works to the front. + """ -Splitting starts at the end of the string and works to the front. -""" @overload def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload def rstrip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: """Return a copy of the string with trailing whitespace removed. -If chars is given and not None, remove characters in chars instead. -""" + If chars is given and not None, remove characters in chars instead. + """ + @overload def rstrip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: """Return a list of the substrings in the string, using sep as the separator string. - sep - The separator used to split the string. + sep + The separator used to split the string. - When set to None (the default value), will split on any whitespace - character (including \\n \\r \\t \\f and spaces) and will discard - empty strings from the result. - maxsplit - Maximum number of splits. - -1 (the default value) means no limit. + When set to None (the default value), will split on any whitespace + character (including \\n \\r \\t \\f and spaces) and will discard + empty strings from the result. + maxsplit + Maximum number of splits. + -1 (the default value) means no limit. -Splitting starts at the front of the string and works to the end. + Splitting starts at the front of the string and works to the end. + + Note, str.split() is mainly useful for data that has been intentionally + delimited. With natural text that includes punctuation, consider using + the regular expression module. + """ -Note, str.split() is mainly useful for data that has been intentionally -delimited. With natural text that includes punctuation, consider using -the regular expression module. -""" @overload def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] @overload def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: """Return a list of the lines in the string, breaking at line boundaries. -Line breaks are not included in the resulting list unless keepends is given and -true. -""" + Line breaks are not included in the resulting list unless keepends is given and + true. + """ + @overload def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( @@ -1273,59 +1308,64 @@ true. ) -> bool: """Return True if the string starts with the specified prefix, False otherwise. - prefix - A string or a tuple of strings to try. - start - Optional start position. Default: start of the string. - end - Optional stop position. Default: end of the string. -""" + prefix + A string or a tuple of strings to try. + start + Optional start position. Default: start of the string. + end + Optional stop position. Default: end of the string. + """ + @overload def strip(self: LiteralString, chars: LiteralString | None = None, /) -> LiteralString: """Return a copy of the string with leading and trailing whitespace removed. -If chars is given and not None, remove characters in chars instead. -""" + If chars is given and not None, remove characters in chars instead. + """ + @overload def strip(self, chars: str | None = None, /) -> str: ... # type: ignore[misc] @overload def swapcase(self: LiteralString) -> LiteralString: - """Convert uppercase characters to lowercase and lowercase characters to uppercase. -""" + """Convert uppercase characters to lowercase and lowercase characters to uppercase.""" + @overload def swapcase(self) -> str: ... # type: ignore[misc] @overload def title(self: LiteralString) -> LiteralString: """Return a version of the string where each word is titlecased. -More specifically, words start with uppercased characters and all remaining -cased characters have lower case. -""" + More specifically, words start with uppercased characters and all remaining + cased characters have lower case. + """ + @overload def title(self) -> str: ... # type: ignore[misc] def translate(self, table: _TranslateTable, /) -> str: """Replace each character in the string using the given translation table. - table - Translation table, which must be a mapping of Unicode ordinals to - Unicode ordinals, strings, or None. + table + Translation table, which must be a mapping of Unicode ordinals to + Unicode ordinals, strings, or None. + + The table must implement lookup/indexing via __getitem__, for instance a + dictionary or list. If this operation raises LookupError, the character is + left untouched. Characters mapped to None are deleted. + """ -The table must implement lookup/indexing via __getitem__, for instance a -dictionary or list. If this operation raises LookupError, the character is -left untouched. Characters mapped to None are deleted. -""" @overload def upper(self: LiteralString) -> LiteralString: - """Return a copy of the string converted to uppercase. -""" + """Return a copy of the string converted to uppercase.""" + @overload def upper(self) -> str: ... # type: ignore[misc] @overload def zfill(self: LiteralString, width: SupportsIndex, /) -> LiteralString: """Pad a numeric string with zeros on the left, to fill a field of the given width. -The string is never truncated. -""" + The string is never truncated. + """ + @overload def zfill(self, width: SupportsIndex, /) -> str: ... # type: ignore[misc] @staticmethod @@ -1333,14 +1373,15 @@ The string is never truncated. def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T], /) -> dict[int, _T]: """Return a translation table usable for str.translate(). -If there is only one argument, it must be a dictionary mapping Unicode -ordinals (integers) or characters to Unicode ordinals, strings or None. -Character keys will be then converted to ordinals. -If there are two arguments, they must be strings of equal length, and -in the resulting dictionary, each character in x will be mapped to the -character at the same position in y. If there is a third argument, it -must be a string, whose characters will be mapped to None in the result. -""" + If there is only one argument, it must be a dictionary mapping Unicode + ordinals (integers) or characters to Unicode ordinals, strings or None. + Character keys will be then converted to ordinals. + If there are two arguments, they must be strings of equal length, and + in the resulting dictionary, each character in x will be mapped to the + character at the same position in y. If there is a third argument, it + must be a string, whose characters will be mapped to None in the result. + """ + @staticmethod @overload def maketrans(x: str, y: str, /) -> dict[int, int]: ... @@ -1349,73 +1390,73 @@ must be a string, whose characters will be mapped to None in the result. def maketrans(x: str, y: str, z: str, /) -> dict[int, int | None]: ... @overload def __add__(self: LiteralString, value: LiteralString, /) -> LiteralString: - """Return self+value. -""" + """Return self+value.""" + @overload def __add__(self, value: str, /) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, key: str, /) -> bool: # type: ignore[override] - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: str, /) -> bool: ... @overload def __getitem__(self: LiteralString, key: SupportsIndex | slice, /) -> LiteralString: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: SupportsIndex | slice, /) -> str: ... # type: ignore[misc] def __gt__(self, value: str, /) -> bool: ... def __hash__(self) -> int: ... @overload def __iter__(self: LiteralString) -> Iterator[LiteralString]: - """Implement iter(self). -""" + """Implement iter(self).""" + @overload def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, value: str, /) -> bool: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __lt__(self, value: str, /) -> bool: ... @overload def __mod__(self: LiteralString, value: LiteralString | tuple[LiteralString, ...], /) -> LiteralString: - """Return self%value. -""" + """Return self%value.""" + @overload def __mod__(self, value: Any, /) -> str: ... @overload def __mul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: - """Return self*value. -""" + """Return self*value.""" + @overload def __mul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __ne__(self, value: object, /) -> bool: ... @overload def __rmul__(self: LiteralString, value: SupportsIndex, /) -> LiteralString: - """Return value*self. -""" + """Return value*self.""" + @overload def __rmul__(self, value: SupportsIndex, /) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... def __format__(self, format_spec: str, /) -> str: - """Return a formatted version of the string as described by format_spec. -""" + """Return a formatted version of the string as described by format_spec.""" @disjoint_base class bytes(Sequence[int]): """bytes(iterable_of_ints) -> bytes -bytes(string, encoding[, errors]) -> bytes -bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer -bytes(int) -> bytes object of size given by the parameter initialized with null bytes -bytes() -> empty bytes object - -Construct an immutable array of bytes from: - - an iterable yielding integers in range(256) - - a text string encoded using the specified encoding - - any object implementing the buffer API. - - an integer -""" + bytes(string, encoding[, errors]) -> bytes + bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer + bytes(int) -> bytes object of size given by the parameter initialized with null bytes + bytes() -> empty bytes object + + Construct an immutable array of bytes from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - any object implementing the buffer API. + - an integer + """ + @overload def __new__(cls, o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer, /) -> Self: ... @overload @@ -1425,36 +1466,40 @@ Construct an immutable array of bytes from: def capitalize(self) -> bytes: """B.capitalize() -> copy of B -Return a copy of B with only its first character capitalized (ASCII) -and the rest lower-cased. -""" + Return a copy of B with only its first character capitalized (ASCII) + and the rest lower-cased. + """ + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytes: """Return a centered string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def count( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. -""" + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + """ + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: """Decode the bytes using the codec registered for encoding. - encoding - The encoding with which to decode the bytes. - errors - The error handling scheme to use for the handling of decoding errors. - The default is 'strict' meaning that decoding errors raise a - UnicodeDecodeError. Other possible values are 'ignore' and 'replace' - as well as any other name registered with codecs.register_error that - can handle UnicodeDecodeErrors. -""" + encoding + The encoding with which to decode the bytes. + errors + The error handling scheme to use for the handling of decoding errors. + The default is 'strict' meaning that decoding errors raise a + UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that + can handle UnicodeDecodeErrors. + """ + def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -1464,243 +1509,272 @@ Padding is done using the specified fill character. ) -> bool: """Return True if the bytes ends with the specified suffix, False otherwise. - suffix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. -""" + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + """ + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: """Return a copy where all tab characters are expanded using spaces. -If tabsize is not given, a tab size of 8 characters is assumed. -""" + If tabsize is not given, a tab size of 8 characters is assumed. + """ + def find( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Return -1 on failure. + """ -Return -1 on failure. -""" def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: """Create a string of hexadecimal numbers from a bytes object. - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - -Example: ->>> value = b'\\xb9\\x01\\xef' ->>> value.hex() -'b901ef' ->>> value.hex(':') -'b9:01:ef' ->>> value.hex(':', 2) -'b9:01ef' ->>> value.hex(':', -2) -'b901:ef' -""" + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + + Example: + >>> value = b'\\xb9\\x01\\xef' + >>> value.hex() + 'b901ef' + >>> value.hex(':') + 'b9:01:ef' + >>> value.hex(':', 2) + 'b9:01ef' + >>> value.hex(':', -2) + 'b901:ef' + """ + def index( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Raise ValueError if the subsection is not found. + """ -Raise ValueError if the subsection is not found. -""" def isalnum(self) -> bool: """B.isalnum() -> bool -Return True if all characters in B are alphanumeric -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are alphanumeric + and there is at least one character in B, False otherwise. + """ + def isalpha(self) -> bool: """B.isalpha() -> bool -Return True if all characters in B are alphabetic -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are alphabetic + and there is at least one character in B, False otherwise. + """ + def isascii(self) -> bool: """B.isascii() -> bool -Return True if B is empty or all characters in B are ASCII, -False otherwise. -""" + Return True if B is empty or all characters in B are ASCII, + False otherwise. + """ + def isdigit(self) -> bool: """B.isdigit() -> bool -Return True if all characters in B are digits -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are digits + and there is at least one character in B, False otherwise. + """ + def islower(self) -> bool: """B.islower() -> bool -Return True if all cased characters in B are lowercase and there is -at least one cased character in B, False otherwise. -""" + Return True if all cased characters in B are lowercase and there is + at least one cased character in B, False otherwise. + """ + def isspace(self) -> bool: """B.isspace() -> bool -Return True if all characters in B are whitespace -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are whitespace + and there is at least one character in B, False otherwise. + """ + def istitle(self) -> bool: """B.istitle() -> bool -Return True if B is a titlecased string and there is at least one -character in B, i.e. uppercase characters may only follow uncased -characters and lowercase characters only cased ones. Return False -otherwise. -""" + Return True if B is a titlecased string and there is at least one + character in B, i.e. uppercase characters may only follow uncased + characters and lowercase characters only cased ones. Return False + otherwise. + """ + def isupper(self) -> bool: """B.isupper() -> bool -Return True if all cased characters in B are uppercase and there is -at least one cased character in B, False otherwise. -""" + Return True if all cased characters in B are uppercase and there is + at least one cased character in B, False otherwise. + """ + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytes: """Concatenate any number of bytes objects. -The bytes whose method is called is inserted in between each pair. + The bytes whose method is called is inserted in between each pair. -The result is returned as a new bytes object. + The result is returned as a new bytes object. + + Example: b'.'.join([b'ab', b'pq', b'rs']) -> b'ab.pq.rs'. + """ -Example: b'.'.join([b'ab', b'pq', b'rs']) -> b'ab.pq.rs'. -""" def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: """Return a left-justified string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def lower(self) -> bytes: """B.lower() -> copy of B -Return a copy of B with all ASCII characters converted to lowercase. -""" + Return a copy of B with all ASCII characters converted to lowercase. + """ + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: """Strip leading bytes contained in the argument. -If the argument is omitted or None, strip leading ASCII whitespace. -""" + If the argument is omitted or None, strip leading ASCII whitespace. + """ + def partition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: """Partition the bytes into three parts using the given separator. -This will search for the separator sep in the bytes. If the separator is found, -returns a 3-tuple containing the part before the separator, the separator -itself, and the part after it. + This will search for the separator sep in the bytes. If the separator is found, + returns a 3-tuple containing the part before the separator, the separator + itself, and the part after it. + + If the separator is not found, returns a 3-tuple containing the original bytes + object and two empty bytes objects. + """ -If the separator is not found, returns a 3-tuple containing the original bytes -object and two empty bytes objects. -""" def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytes: """Return a copy with all occurrences of substring old replaced by new. - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + + If the optional argument count is given, only the first count occurrences are + replaced. + """ -If the optional argument count is given, only the first count occurrences are -replaced. -""" def removeprefix(self, prefix: ReadableBuffer, /) -> bytes: """Return a bytes object with the given prefix string removed if present. -If the bytes starts with the prefix string, return bytes[len(prefix):]. -Otherwise, return a copy of the original bytes. -""" + If the bytes starts with the prefix string, return bytes[len(prefix):]. + Otherwise, return a copy of the original bytes. + """ + def removesuffix(self, suffix: ReadableBuffer, /) -> bytes: """Return a bytes object with the given suffix string removed if present. -If the bytes ends with the suffix string and that suffix is not empty, -return bytes[:-len(prefix)]. Otherwise, return a copy of the original -bytes. -""" + If the bytes ends with the suffix string and that suffix is not empty, + return bytes[:-len(prefix)]. Otherwise, return a copy of the original + bytes. + """ + def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Return -1 on failure. + """ -Return -1 on failure. -""" def rindex( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Raise ValueError if the subsection is not found. + """ -Raise ValueError if the subsection is not found. -""" def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytes: """Return a right-justified string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytes, bytes, bytes]: """Partition the bytes into three parts using the given separator. -This will search for the separator sep in the bytes, starting at the end. If -the separator is found, returns a 3-tuple containing the part before the -separator, the separator itself, and the part after it. + This will search for the separator sep in the bytes, starting at the end. If + the separator is found, returns a 3-tuple containing the part before the + separator, the separator itself, and the part after it. + + If the separator is not found, returns a 3-tuple containing two empty bytes + objects and the original bytes object. + """ -If the separator is not found, returns a 3-tuple containing two empty bytes -objects and the original bytes object. -""" def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: """Return a list of the sections in the bytes, using sep as the delimiter. - sep - The delimiter according which to split the bytes. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. + sep + The delimiter according which to split the bytes. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + + Splitting is done starting at the end of the bytes and working to the front. + """ -Splitting is done starting at the end of the bytes and working to the front. -""" def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytes: """Strip trailing bytes contained in the argument. -If the argument is omitted or None, strip trailing ASCII whitespace. -""" + If the argument is omitted or None, strip trailing ASCII whitespace. + """ + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: """Return a list of the sections in the bytes, using sep as the delimiter. - sep - The delimiter according which to split the bytes. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. -""" + sep + The delimiter according which to split the bytes. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + """ + def splitlines(self, keepends: bool = False) -> list[bytes]: """Return a list of the lines in the bytes, breaking at line boundaries. -Line breaks are not included in the resulting list unless keepends is given and -true. -""" + Line breaks are not included in the resulting list unless keepends is given and + true. + """ + def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -1710,94 +1784,102 @@ true. ) -> bool: """Return True if the bytes starts with the specified prefix, False otherwise. - prefix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. -""" + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + """ + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytes: """Strip leading and trailing bytes contained in the argument. -If the argument is omitted or None, strip leading and trailing ASCII whitespace. -""" + If the argument is omitted or None, strip leading and trailing ASCII whitespace. + """ + def swapcase(self) -> bytes: """B.swapcase() -> copy of B -Return a copy of B with uppercase ASCII characters converted -to lowercase ASCII and vice versa. -""" + Return a copy of B with uppercase ASCII characters converted + to lowercase ASCII and vice versa. + """ + def title(self) -> bytes: """B.title() -> copy of B -Return a titlecased version of B, i.e. ASCII words start with uppercase -characters, all remaining cased characters have lowercase. -""" + Return a titlecased version of B, i.e. ASCII words start with uppercase + characters, all remaining cased characters have lowercase. + """ + def translate(self, table: ReadableBuffer | None, /, delete: ReadableBuffer = b"") -> bytes: """Return a copy with each character mapped by the given translation table. - table - Translation table, which must be a bytes object of length 256. + table + Translation table, which must be a bytes object of length 256. + + All characters occurring in the optional argument delete are removed. + The remaining characters are mapped through the given translation table. + """ -All characters occurring in the optional argument delete are removed. -The remaining characters are mapped through the given translation table. -""" def upper(self) -> bytes: """B.upper() -> copy of B -Return a copy of B with all ASCII characters converted to uppercase. -""" + Return a copy of B with all ASCII characters converted to uppercase. + """ + def zfill(self, width: SupportsIndex, /) -> bytes: """Pad a numeric string with zeros on the left, to fill a field of the given width. -The original string is never truncated. -""" + The original string is never truncated. + """ + @classmethod def fromhex(cls, string: str, /) -> Self: """Create a bytes object from a string of hexadecimal numbers. -Spaces between two numbers are accepted. -Example: bytes.fromhex('B9 01EF') -> b'\\\\xb9\\\\x01\\\\xef'. -""" + Spaces between two numbers are accepted. + Example: bytes.fromhex('B9 01EF') -> b'\\\\xb9\\\\x01\\\\xef'. + """ + @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: """Return a translation table usable for the bytes or bytearray translate method. -The returned table will be one where each byte in frm is mapped to the byte at -the same position in to. + The returned table will be one where each byte in frm is mapped to the byte at + the same position in to. + + The bytes objects frm and to must be of the same length. + """ -The bytes objects frm and to must be of the same length. -""" def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __iter__(self) -> Iterator[int]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __hash__(self) -> int: ... @overload def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> bytes: ... def __add__(self, value: ReadableBuffer, /) -> bytes: - """Return self+value. -""" + """Return self+value.""" + def __mul__(self, value: SupportsIndex, /) -> bytes: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: SupportsIndex, /) -> bytes: - """Return value*self. -""" + """Return value*self.""" + def __mod__(self, value: Any, /) -> bytes: - """Return self%value. -""" + """Return self%value.""" # Incompatible with Sequence.__contains__ def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: bytes, /) -> bool: ... @@ -1807,28 +1889,27 @@ The bytes objects frm and to must be of the same length. def __getnewargs__(self) -> tuple[bytes]: ... if sys.version_info >= (3, 11): def __bytes__(self) -> bytes: - """Convert this value to exact type bytes. -""" + """Convert this value to exact type bytes.""" def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" @disjoint_base class bytearray(MutableSequence[int]): """bytearray(iterable_of_ints) -> bytearray -bytearray(string, encoding[, errors]) -> bytearray -bytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer -bytearray(int) -> bytes array of size given by the parameter initialized with null bytes -bytearray() -> empty bytes array - -Construct a mutable bytearray object from: - - an iterable yielding integers in range(256) - - a text string encoded using the specified encoding - - a bytes or a buffer object - - any object implementing the buffer API. - - an integer -""" + bytearray(string, encoding[, errors]) -> bytearray + bytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer + bytearray(int) -> bytes array of size given by the parameter initialized with null bytes + bytearray() -> empty bytes array + + Construct a mutable bytearray object from: + - an iterable yielding integers in range(256) + - a text string encoded using the specified encoding + - a bytes or a buffer object + - any object implementing the buffer API. + - an integer + """ + @overload def __init__(self) -> None: ... @overload @@ -1838,45 +1919,50 @@ Construct a mutable bytearray object from: def append(self, item: SupportsIndex, /) -> None: """Append a single item to the end of the bytearray. - item - The item to be appended. -""" + item + The item to be appended. + """ + def capitalize(self) -> bytearray: """B.capitalize() -> copy of B -Return a copy of B with only its first character capitalized (ASCII) -and the rest lower-cased. -""" + Return a copy of B with only its first character capitalized (ASCII) + and the rest lower-cased. + """ + def center(self, width: SupportsIndex, fillchar: bytes = b" ", /) -> bytearray: """Return a centered string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def count( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. -""" + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + """ + def copy(self) -> bytearray: - """Return a copy of B. -""" + """Return a copy of B.""" + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: """Decode the bytearray using the codec registered for encoding. - encoding - The encoding with which to decode the bytearray. - errors - The error handling scheme to use for the handling of decoding errors. - The default is 'strict' meaning that decoding errors raise a - UnicodeDecodeError. Other possible values are 'ignore' and 'replace' - as well as any other name registered with codecs.register_error that - can handle UnicodeDecodeErrors. -""" + encoding + The encoding with which to decode the bytearray. + errors + The error handling scheme to use for the handling of decoding errors. + The default is 'strict' meaning that decoding errors raise a + UnicodeDecodeError. Other possible values are 'ignore' and 'replace' + as well as any other name registered with codecs.register_error that + can handle UnicodeDecodeErrors. + """ + def endswith( self, suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -1886,272 +1972,305 @@ Padding is done using the specified fill character. ) -> bool: """Return True if the bytearray ends with the specified suffix, False otherwise. - suffix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytearray. - end - Optional stop position. Default: end of the bytearray. -""" + suffix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. + """ + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: """Return a copy where all tab characters are expanded using spaces. -If tabsize is not given, a tab size of 8 characters is assumed. -""" + If tabsize is not given, a tab size of 8 characters is assumed. + """ + def extend(self, iterable_of_ints: Iterable[SupportsIndex], /) -> None: """Append all the items from the iterator or sequence to the end of the bytearray. - iterable_of_ints - The iterable of items to append. -""" + iterable_of_ints + The iterable of items to append. + """ + def find( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Return -1 on failure. + """ -Return -1 on failure. -""" def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: """Create a string of hexadecimal numbers from a bytearray object. - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - -Example: ->>> value = bytearray([0xb9, 0x01, 0xef]) ->>> value.hex() -'b901ef' ->>> value.hex(':') -'b9:01:ef' ->>> value.hex(':', 2) -'b9:01ef' ->>> value.hex(':', -2) -'b901:ef' -""" + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + + Example: + >>> value = bytearray([0xb9, 0x01, 0xef]) + >>> value.hex() + 'b901ef' + >>> value.hex(':') + 'b9:01:ef' + >>> value.hex(':', 2) + 'b9:01ef' + >>> value.hex(':', -2) + 'b901:ef' + """ + def index( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Raise ValueError if the subsection is not found. + """ -Raise ValueError if the subsection is not found. -""" def insert(self, index: SupportsIndex, item: SupportsIndex, /) -> None: """Insert a single item into the bytearray before the given index. - index - The index where the value is to be inserted. - item - The item to be inserted. -""" + index + The index where the value is to be inserted. + item + The item to be inserted. + """ + def isalnum(self) -> bool: """B.isalnum() -> bool -Return True if all characters in B are alphanumeric -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are alphanumeric + and there is at least one character in B, False otherwise. + """ + def isalpha(self) -> bool: """B.isalpha() -> bool -Return True if all characters in B are alphabetic -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are alphabetic + and there is at least one character in B, False otherwise. + """ + def isascii(self) -> bool: """B.isascii() -> bool -Return True if B is empty or all characters in B are ASCII, -False otherwise. -""" + Return True if B is empty or all characters in B are ASCII, + False otherwise. + """ + def isdigit(self) -> bool: """B.isdigit() -> bool -Return True if all characters in B are digits -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are digits + and there is at least one character in B, False otherwise. + """ + def islower(self) -> bool: """B.islower() -> bool -Return True if all cased characters in B are lowercase and there is -at least one cased character in B, False otherwise. -""" + Return True if all cased characters in B are lowercase and there is + at least one cased character in B, False otherwise. + """ + def isspace(self) -> bool: """B.isspace() -> bool -Return True if all characters in B are whitespace -and there is at least one character in B, False otherwise. -""" + Return True if all characters in B are whitespace + and there is at least one character in B, False otherwise. + """ + def istitle(self) -> bool: """B.istitle() -> bool -Return True if B is a titlecased string and there is at least one -character in B, i.e. uppercase characters may only follow uncased -characters and lowercase characters only cased ones. Return False -otherwise. -""" + Return True if B is a titlecased string and there is at least one + character in B, i.e. uppercase characters may only follow uncased + characters and lowercase characters only cased ones. Return False + otherwise. + """ + def isupper(self) -> bool: """B.isupper() -> bool -Return True if all cased characters in B are uppercase and there is -at least one cased character in B, False otherwise. -""" + Return True if all cased characters in B are uppercase and there is + at least one cased character in B, False otherwise. + """ + def join(self, iterable_of_bytes: Iterable[ReadableBuffer], /) -> bytearray: """Concatenate any number of bytes/bytearray objects. -The bytearray whose method is called is inserted in between each pair. + The bytearray whose method is called is inserted in between each pair. + + The result is returned as a new bytearray object. + """ -The result is returned as a new bytearray object. -""" def ljust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: """Return a left-justified string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def lower(self) -> bytearray: """B.lower() -> copy of B -Return a copy of B with all ASCII characters converted to lowercase. -""" + Return a copy of B with all ASCII characters converted to lowercase. + """ + def lstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: """Strip leading bytes contained in the argument. -If the argument is omitted or None, strip leading ASCII whitespace. -""" + If the argument is omitted or None, strip leading ASCII whitespace. + """ + def partition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: """Partition the bytearray into three parts using the given separator. -This will search for the separator sep in the bytearray. If the separator is -found, returns a 3-tuple containing the part before the separator, the -separator itself, and the part after it as new bytearray objects. + This will search for the separator sep in the bytearray. If the separator is + found, returns a 3-tuple containing the part before the separator, the + separator itself, and the part after it as new bytearray objects. + + If the separator is not found, returns a 3-tuple containing the copy of the + original bytearray object and two empty bytearray objects. + """ -If the separator is not found, returns a 3-tuple containing the copy of the -original bytearray object and two empty bytearray objects. -""" def pop(self, index: int = -1, /) -> int: """Remove and return a single item from B. - index - The index from where to remove the item. - -1 (the default value) means remove the last item. + index + The index from where to remove the item. + -1 (the default value) means remove the last item. + + If no index argument is given, will pop the last item. + """ -If no index argument is given, will pop the last item. -""" def remove(self, value: int, /) -> None: """Remove the first occurrence of a value in the bytearray. - value - The value to remove. -""" + value + The value to remove. + """ + def removeprefix(self, prefix: ReadableBuffer, /) -> bytearray: """Return a bytearray with the given prefix string removed if present. -If the bytearray starts with the prefix string, return -bytearray[len(prefix):]. Otherwise, return a copy of the original -bytearray. -""" + If the bytearray starts with the prefix string, return + bytearray[len(prefix):]. Otherwise, return a copy of the original + bytearray. + """ + def removesuffix(self, suffix: ReadableBuffer, /) -> bytearray: """Return a bytearray with the given suffix string removed if present. -If the bytearray ends with the suffix string and that suffix is not -empty, return bytearray[:-len(suffix)]. Otherwise, return a copy of -the original bytearray. -""" + If the bytearray ends with the suffix string and that suffix is not + empty, return bytearray[:-len(suffix)]. Otherwise, return a copy of + the original bytearray. + """ + def replace(self, old: ReadableBuffer, new: ReadableBuffer, count: SupportsIndex = -1, /) -> bytearray: """Return a copy with all occurrences of substring old replaced by new. - count - Maximum number of occurrences to replace. - -1 (the default value) means replace all occurrences. + count + Maximum number of occurrences to replace. + -1 (the default value) means replace all occurrences. + + If the optional argument count is given, only the first count occurrences are + replaced. + """ -If the optional argument count is given, only the first count occurrences are -replaced. -""" def rfind( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Return -1 on failure. + """ -Return -1 on failure. -""" def rindex( self, sub: ReadableBuffer | SupportsIndex, start: SupportsIndex | None = None, end: SupportsIndex | None = None, / ) -> int: """Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end]. - start - Optional start position. Default: start of the bytes. - end - Optional stop position. Default: end of the bytes. + start + Optional start position. Default: start of the bytes. + end + Optional stop position. Default: end of the bytes. + + Raise ValueError if the subsection is not found. + """ -Raise ValueError if the subsection is not found. -""" def rjust(self, width: SupportsIndex, fillchar: bytes | bytearray = b" ", /) -> bytearray: """Return a right-justified string of length width. -Padding is done using the specified fill character. -""" + Padding is done using the specified fill character. + """ + def rpartition(self, sep: ReadableBuffer, /) -> tuple[bytearray, bytearray, bytearray]: """Partition the bytearray into three parts using the given separator. -This will search for the separator sep in the bytearray, starting at the end. -If the separator is found, returns a 3-tuple containing the part before the -separator, the separator itself, and the part after it as new bytearray -objects. + This will search for the separator sep in the bytearray, starting at the end. + If the separator is found, returns a 3-tuple containing the part before the + separator, the separator itself, and the part after it as new bytearray + objects. + + If the separator is not found, returns a 3-tuple containing two empty bytearray + objects and the copy of the original bytearray object. + """ -If the separator is not found, returns a 3-tuple containing two empty bytearray -objects and the copy of the original bytearray object. -""" def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: """Return a list of the sections in the bytearray, using sep as the delimiter. - sep - The delimiter according which to split the bytearray. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. + sep + The delimiter according which to split the bytearray. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + + Splitting is done starting at the end of the bytearray and working to the front. + """ -Splitting is done starting at the end of the bytearray and working to the front. -""" def rstrip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: """Strip trailing bytes contained in the argument. -If the argument is omitted or None, strip trailing ASCII whitespace. -""" + If the argument is omitted or None, strip trailing ASCII whitespace. + """ + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: """Return a list of the sections in the bytearray, using sep as the delimiter. - sep - The delimiter according which to split the bytearray. - None (the default value) means split on ASCII whitespace characters - (space, tab, return, newline, formfeed, vertical tab). - maxsplit - Maximum number of splits to do. - -1 (the default value) means no limit. -""" + sep + The delimiter according which to split the bytearray. + None (the default value) means split on ASCII whitespace characters + (space, tab, return, newline, formfeed, vertical tab). + maxsplit + Maximum number of splits to do. + -1 (the default value) means no limit. + """ + def splitlines(self, keepends: bool = False) -> list[bytearray]: """Return a list of the lines in the bytearray, breaking at line boundaries. -Line breaks are not included in the resulting list unless keepends is given and -true. -""" + Line breaks are not included in the resulting list unless keepends is given and + true. + """ + def startswith( self, prefix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -2161,110 +2280,116 @@ true. ) -> bool: """Return True if the bytearray starts with the specified prefix, False otherwise. - prefix - A bytes or a tuple of bytes to try. - start - Optional start position. Default: start of the bytearray. - end - Optional stop position. Default: end of the bytearray. -""" + prefix + A bytes or a tuple of bytes to try. + start + Optional start position. Default: start of the bytearray. + end + Optional stop position. Default: end of the bytearray. + """ + def strip(self, bytes: ReadableBuffer | None = None, /) -> bytearray: """Strip leading and trailing bytes contained in the argument. -If the argument is omitted or None, strip leading and trailing ASCII whitespace. -""" + If the argument is omitted or None, strip leading and trailing ASCII whitespace. + """ + def swapcase(self) -> bytearray: """B.swapcase() -> copy of B -Return a copy of B with uppercase ASCII characters converted -to lowercase ASCII and vice versa. -""" + Return a copy of B with uppercase ASCII characters converted + to lowercase ASCII and vice versa. + """ + def title(self) -> bytearray: """B.title() -> copy of B -Return a titlecased version of B, i.e. ASCII words start with uppercase -characters, all remaining cased characters have lowercase. -""" + Return a titlecased version of B, i.e. ASCII words start with uppercase + characters, all remaining cased characters have lowercase. + """ + def translate(self, table: ReadableBuffer | None, /, delete: bytes = b"") -> bytearray: """Return a copy with each character mapped by the given translation table. - table - Translation table, which must be a bytes object of length 256. + table + Translation table, which must be a bytes object of length 256. + + All characters occurring in the optional argument delete are removed. + The remaining characters are mapped through the given translation table. + """ -All characters occurring in the optional argument delete are removed. -The remaining characters are mapped through the given translation table. -""" def upper(self) -> bytearray: """B.upper() -> copy of B -Return a copy of B with all ASCII characters converted to uppercase. -""" + Return a copy of B with all ASCII characters converted to uppercase. + """ + def zfill(self, width: SupportsIndex, /) -> bytearray: """Pad a numeric string with zeros on the left, to fill a field of the given width. -The original string is never truncated. -""" + The original string is never truncated. + """ + @classmethod def fromhex(cls, string: str, /) -> Self: """Create a bytearray object from a string of hexadecimal numbers. -Spaces between two numbers are accepted. -Example: bytearray.fromhex('B9 01EF') -> bytearray(b'\\\\xb9\\\\x01\\\\xef') -""" + Spaces between two numbers are accepted. + Example: bytearray.fromhex('B9 01EF') -> bytearray(b'\\\\xb9\\\\x01\\\\xef') + """ + @staticmethod def maketrans(frm: ReadableBuffer, to: ReadableBuffer, /) -> bytes: """Return a translation table usable for the bytes or bytearray translate method. -The returned table will be one where each byte in frm is mapped to the byte at -the same position in to. + The returned table will be one where each byte in frm is mapped to the byte at + the same position in to. + + The bytes objects frm and to must be of the same length. + """ -The bytes objects frm and to must be of the same length. -""" def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __iter__(self) -> Iterator[int]: - """Implement iter(self). -""" + """Implement iter(self).""" __hash__: ClassVar[None] # type: ignore[assignment] @overload def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> bytearray: ... @overload def __setitem__(self, key: SupportsIndex, value: SupportsIndex, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: Iterable[SupportsIndex] | bytes, /) -> None: ... def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key]. -""" + """Delete self[key].""" + def __add__(self, value: ReadableBuffer, /) -> bytearray: - """Return self+value. -""" + """Return self+value.""" # The superclass wants us to accept Iterable[int], but that fails at runtime. def __iadd__(self, value: ReadableBuffer, /) -> Self: # type: ignore[override] - """Implement self+=value. -""" + """Implement self+=value.""" + def __mul__(self, value: SupportsIndex, /) -> bytearray: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: SupportsIndex, /) -> bytearray: - """Return value*self. -""" + """Return value*self.""" + def __imul__(self, value: SupportsIndex, /) -> Self: - """Implement self*=value. -""" + """Implement self*=value.""" + def __mod__(self, value: Any, /) -> bytes: - """Return self%value. -""" + """Return self%value.""" # Incompatible with Sequence.__contains__ def __contains__(self, key: SupportsIndex | ReadableBuffer, /) -> bool: # type: ignore[override] - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __lt__(self, value: ReadableBuffer, /) -> bool: ... @@ -2274,21 +2399,21 @@ The bytes objects frm and to must be of the same length. def __alloc__(self) -> int: """B.__alloc__() -> int -Return the number of bytes actually allocated. -""" + Return the number of bytes actually allocated. + """ + def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" + def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object. -""" + """Release the buffer object that exposes the underlying memory of the object.""" if sys.version_info >= (3, 14): def resize(self, size: int, /) -> None: """Resize the internal buffer of bytearray to len. - size - New size to resize to.. -""" + size + New size to resize to.. + """ _IntegerFormats: TypeAlias = Literal[ "b", "B", "@b", "@B", "h", "H", "@h", "@H", "i", "I", "@i", "@I", "l", "L", "@l", "@L", "q", "Q", "@q", "@Q", "P", "@P" @@ -2296,61 +2421,66 @@ _IntegerFormats: TypeAlias = Literal[ @final class memoryview(Sequence[_I]): - """Create a new memoryview object which references the given object. -""" + """Create a new memoryview object which references the given object.""" + @property def format(self) -> str: """A string containing the format (in struct module style) - for each element in the view. -""" + for each element in the view. + """ + @property def itemsize(self) -> int: - """The size in bytes of each element of the memoryview. -""" + """The size in bytes of each element of the memoryview.""" + @property def shape(self) -> tuple[int, ...] | None: """A tuple of ndim integers giving the shape of the memory - as an N-dimensional array. -""" + as an N-dimensional array. + """ + @property def strides(self) -> tuple[int, ...] | None: """A tuple of ndim integers giving the size in bytes to access - each element for each dimension of the array. -""" + each element for each dimension of the array. + """ + @property def suboffsets(self) -> tuple[int, ...] | None: - """A tuple of integers used internally for PIL-style arrays. -""" + """A tuple of integers used internally for PIL-style arrays.""" + @property def readonly(self) -> bool: - """A bool indicating whether the memory is read only. -""" + """A bool indicating whether the memory is read only.""" + @property def ndim(self) -> int: """An integer indicating how many dimensions of a multi-dimensional - array the memory represents. -""" + array the memory represents. + """ + @property def obj(self) -> ReadableBuffer: - """The underlying object of the memoryview. -""" + """The underlying object of the memoryview.""" + @property def c_contiguous(self) -> bool: - """A bool indicating whether the memory is C contiguous. -""" + """A bool indicating whether the memory is C contiguous.""" + @property def f_contiguous(self) -> bool: - """A bool indicating whether the memory is Fortran contiguous. -""" + """A bool indicating whether the memory is Fortran contiguous.""" + @property def contiguous(self) -> bool: - """A bool indicating whether the memory is contiguous. -""" + """A bool indicating whether the memory is contiguous.""" + @property def nbytes(self) -> int: """The amount of space in bytes that the array would use in - a contiguous representation. -""" + a contiguous representation. + """ + def __new__(cls, obj: ReadableBuffer) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -2360,12 +2490,12 @@ class memoryview(Sequence[_I]): exc_tb: TracebackType | None, /, ) -> None: - """Release the underlying buffer exposed by the memoryview object. -""" + """Release the underlying buffer exposed by the memoryview object.""" + @overload def cast(self, format: Literal["c", "@c"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[bytes]: - """Cast a memoryview to a new format or shape. -""" + """Cast a memoryview to a new format or shape.""" + @overload def cast(self, format: Literal["f", "@f", "d", "@d"], shape: list[int] | tuple[int, ...] = ...) -> memoryview[float]: ... @overload @@ -2374,147 +2504,146 @@ class memoryview(Sequence[_I]): def cast(self, format: _IntegerFormats, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... @overload def __getitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], /) -> _I: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> memoryview[_I]: ... def __contains__(self, x: object, /) -> bool: ... def __iter__(self) -> Iterator[_I]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @overload def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: SupportsIndex | tuple[SupportsIndex, ...], value: _I, /) -> None: ... if sys.version_info >= (3, 10): def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: """Return the data in the buffer as a byte string. -Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the -original array is converted to C or Fortran order. For contiguous views, -'A' returns an exact copy of the physical memory. In particular, in-memory -Fortran order is preserved. For non-contiguous views, the data is converted -to C first. order=None is the same as order='C'. -""" + Order can be {'C', 'F', 'A'}. When order is 'C' or 'F', the data of the + original array is converted to C or Fortran order. For contiguous views, + 'A' returns an exact copy of the physical memory. In particular, in-memory + Fortran order is preserved. For non-contiguous views, the data is converted + to C first. order=None is the same as order='C'. + """ else: def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: """Return the data in the buffer as a byte string. Order can be {'C', 'F', 'A'}. -When order is 'C' or 'F', the data of the original array is converted to C or -Fortran order. For contiguous views, 'A' returns an exact copy of the physical -memory. In particular, in-memory Fortran order is preserved. For non-contiguous -views, the data is converted to C first. order=None is the same as order='C'. -""" + When order is 'C' or 'F', the data of the original array is converted to C or + Fortran order. For contiguous views, 'A' returns an exact copy of the physical + memory. In particular, in-memory Fortran order is preserved. For non-contiguous + views, the data is converted to C first. order=None is the same as order='C'. + """ def tolist(self) -> list[int]: - """Return the data in the buffer as a list of elements. -""" + """Return the data in the buffer as a list of elements.""" + def toreadonly(self) -> memoryview: - """Return a readonly version of the memoryview. -""" + """Return a readonly version of the memoryview.""" + def release(self) -> None: - """Release the underlying buffer exposed by the memoryview object. -""" + """Release the underlying buffer exposed by the memoryview object.""" + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = 1) -> str: """Return the data in the buffer as a str of hexadecimal numbers. - sep - An optional single character or byte to separate hex bytes. - bytes_per_sep - How many bytes between separators. Positive values count from the - right, negative values count from the left. - -Example: ->>> value = memoryview(b'\\xb9\\x01\\xef') ->>> value.hex() -'b901ef' ->>> value.hex(':') -'b9:01:ef' ->>> value.hex(':', 2) -'b9:01ef' ->>> value.hex(':', -2) -'b901:ef' -""" + sep + An optional single character or byte to separate hex bytes. + bytes_per_sep + How many bytes between separators. Positive values count from the + right, negative values count from the left. + + Example: + >>> value = memoryview(b'\\xb9\\x01\\xef') + >>> value.hex() + 'b901ef' + >>> value.hex(':') + 'b9:01:ef' + >>> value.hex(':', 2) + 'b9:01ef' + >>> value.hex(':', -2) + 'b901:ef' + """ + def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" - def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" + def __release_buffer__(self, buffer: memoryview, /) -> None: + """Release the buffer object that exposes the underlying memory of the object.""" # These are inherited from the Sequence ABC, but don't actually exist on memoryview. # See https://github.com/python/cpython/issues/125420 index: ClassVar[None] # type: ignore[assignment] count: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @final class bool(int): """Returns True when the argument is true, False otherwise. -The builtins True and False are the only two instances of the class bool. -The class bool is a subclass of the class int, and cannot be subclassed. -""" + The builtins True and False are the only two instances of the class bool. + The class bool is a subclass of the class int, and cannot be subclassed. + """ + def __new__(cls, o: object = False, /) -> Self: ... # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload def __and__(self, value: bool, /) -> bool: - """Return self&value. -""" + """Return self&value.""" + @overload def __and__(self, value: int, /) -> int: ... @overload def __or__(self, value: bool, /) -> bool: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: int, /) -> int: ... @overload def __xor__(self, value: bool, /) -> bool: - """Return self^value. -""" + """Return self^value.""" + @overload def __xor__(self, value: int, /) -> int: ... @overload def __rand__(self, value: bool, /) -> bool: - """Return value&self. -""" + """Return value&self.""" + @overload def __rand__(self, value: int, /) -> int: ... @overload def __ror__(self, value: bool, /) -> bool: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: int, /) -> int: ... @overload def __rxor__(self, value: bool, /) -> bool: - """Return value^self. -""" + """Return value^self.""" + @overload def __rxor__(self, value: int, /) -> int: ... def __getnewargs__(self) -> tuple[int]: ... @deprecated("Will throw an error in Python 3.16. Use `not` for logical negation of bools instead.") def __invert__(self) -> int: - """~self -""" + """~self""" @final class slice(Generic[_StartT_co, _StopT_co, _StepT_co]): """slice(stop) -slice(start, stop[, step]) + slice(start, stop[, step]) + + Create a slice object. This is used for extended slicing (e.g. a[0:10:2]). + """ -Create a slice object. This is used for extended slicing (e.g. a[0:10:2]). -""" @property def start(self) -> _StartT_co: ... @property @@ -2554,37 +2683,38 @@ Create a slice object. This is used for extended slicing (e.g. a[0:10:2]). def indices(self, len: SupportsIndex, /) -> tuple[int, int, int]: """S.indices(len) -> (start, stop, stride) -Assuming a sequence of length len, calculate the start and stop -indices, and the stride length of the extended slice described by -S. Out of bounds indices are clipped in a manner consistent with the -handling of normal slices. -""" + Assuming a sequence of length len, calculate the start and stop + indices, and the stride length of the extended slice described by + S. Out of bounds indices are clipped in a manner consistent with the + handling of normal slices. + """ @disjoint_base class tuple(Sequence[_T_co]): """Built-in immutable sequence. -If no argument is given, the constructor returns an empty tuple. -If iterable is specified the tuple is initialized from iterable's items. + If no argument is given, the constructor returns an empty tuple. + If iterable is specified the tuple is initialized from iterable's items. + + If the argument is a tuple, the return value is the same object. + """ -If the argument is a tuple, the return value is the same object. -""" def __new__(cls, iterable: Iterable[_T_co] = (), /) -> Self: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __contains__(self, key: object, /) -> bool: - """Return bool(key in self). -""" + """Return bool(key in self).""" + @overload def __getitem__(self, key: SupportsIndex, /) -> _T_co: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __lt__(self, value: tuple[_T_co, ...], /) -> bool: ... def __le__(self, value: tuple[_T_co, ...], /) -> bool: ... def __gt__(self, value: tuple[_T_co, ...], /) -> bool: ... @@ -2593,27 +2723,27 @@ If the argument is a tuple, the return value is the same object. def __hash__(self) -> int: ... @overload def __add__(self, value: tuple[_T_co, ...], /) -> tuple[_T_co, ...]: - """Return self+value. -""" + """Return self+value.""" + @overload def __add__(self, value: tuple[_T, ...], /) -> tuple[_T_co | _T, ...]: ... def __mul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: SupportsIndex, /) -> tuple[_T_co, ...]: - """Return value*self. -""" + """Return value*self.""" + def count(self, value: Any, /) -> int: - """Return number of occurrences of value. -""" + """Return number of occurrences of value.""" + def index(self, value: Any, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: """Return first index of value. -Raises ValueError if the value is not present. -""" + Raises ValueError if the value is not present. + """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" # Doesn't exist at runtime, but deleting this breaks mypy and pyright. See: # https://github.com/python/typeshed/issues/7580 @@ -2670,45 +2800,47 @@ class function: class list(MutableSequence[_T]): """Built-in mutable sequence. -If no argument is given, the constructor creates a new empty list. -The argument must be an iterable if specified. -""" + If no argument is given, the constructor creates a new empty list. + The argument must be an iterable if specified. + """ + @overload def __init__(self) -> None: ... @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... def copy(self) -> list[_T]: - """Return a shallow copy of the list. -""" + """Return a shallow copy of the list.""" + def append(self, object: _T, /) -> None: - """Append object to the end of the list. -""" + """Append object to the end of the list.""" + def extend(self, iterable: Iterable[_T], /) -> None: - """Extend list by appending elements from the iterable. -""" + """Extend list by appending elements from the iterable.""" + def pop(self, index: SupportsIndex = -1, /) -> _T: """Remove and return item at index (default last). -Raises IndexError if list is empty or index is out of range. -""" + Raises IndexError if list is empty or index is out of range. + """ # Signature of `list.index` should be kept in line with `collections.UserList.index()` # and multiprocessing.managers.ListProxy.index() def index(self, value: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /) -> int: """Return first index of value. -Raises ValueError if the value is not present. -""" + Raises ValueError if the value is not present. + """ + def count(self, value: _T, /) -> int: - """Return number of occurrences of value. -""" + """Return number of occurrences of value.""" + def insert(self, index: SupportsIndex, object: _T, /) -> None: - """Insert object before index. -""" + """Insert object before index.""" + def remove(self, value: _T, /) -> None: """Remove first occurrence of value. -Raises ValueError if the value is not present. -""" + Raises ValueError if the value is not present. + """ # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` # and multiprocessing.managers.ListProxy.sort() # @@ -2718,84 +2850,83 @@ Raises ValueError if the value is not present. def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: """Sort the list in ascending order and return None. -The sort is in-place (i.e. the list itself is modified) and stable (i.e. the -order of two equal elements is maintained). + The sort is in-place (i.e. the list itself is modified) and stable (i.e. the + order of two equal elements is maintained). -If a key function is given, apply it once to each list item and sort them, -ascending or descending, according to their function values. + If a key function is given, apply it once to each list item and sort them, + ascending or descending, according to their function values. + + The reverse flag can be set to sort in descending order. + """ -The reverse flag can be set to sort in descending order. -""" @overload def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __iter__(self) -> Iterator[_T]: - """Implement iter(self). -""" + """Implement iter(self).""" __hash__: ClassVar[None] # type: ignore[assignment] @overload def __getitem__(self, i: SupportsIndex, /) -> _T: - """Return self[index]. -""" + """Return self[index].""" + @overload def __getitem__(self, s: slice, /) -> list[_T]: ... @overload def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: Iterable[_T], /) -> None: ... def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key]. -""" + """Delete self[key].""" # Overloading looks unnecessary, but is needed to work around complex mypy problems @overload def __add__(self, value: list[_T], /) -> list[_T]: - """Return self+value. -""" + """Return self+value.""" + @overload def __add__(self, value: list[_S], /) -> list[_S | _T]: ... def __iadd__(self, value: Iterable[_T], /) -> Self: # type: ignore[misc] - """Implement self+=value. -""" + """Implement self+=value.""" + def __mul__(self, value: SupportsIndex, /) -> list[_T]: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: SupportsIndex, /) -> list[_T]: - """Return value*self. -""" + """Return value*self.""" + def __imul__(self, value: SupportsIndex, /) -> Self: - """Implement self*=value. -""" + """Implement self*=value.""" + def __contains__(self, key: object, /) -> bool: - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __reversed__(self) -> Iterator[_T]: - """Return a reverse iterator over the list. -""" + """Return a reverse iterator over the list.""" + def __gt__(self, value: list[_T], /) -> bool: ... def __ge__(self, value: list[_T], /) -> bool: ... def __lt__(self, value: list[_T], /) -> bool: ... def __le__(self, value: list[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @disjoint_base class dict(MutableMapping[_KT, _VT]): """dict() -> new empty dictionary -dict(mapping) -> new dictionary initialized from a mapping object's - (key, value) pairs -dict(iterable) -> new dictionary initialized as if via: - d = {} - for k, v in iterable: - d[k] = v -dict(**kwargs) -> new dictionary initialized with the name=value pairs - in the keyword argument list. For example: dict(one=1, two=2) -""" + dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs + dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v + dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) + """ + # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics # Also multiprocessing.managers.SyncManager.dict() @overload @@ -2828,17 +2959,16 @@ dict(**kwargs) -> new dictionary initialized with the name=value pairs def __init__(self: dict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ... def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: - """Return a shallow copy of the dict. -""" + """Return a shallow copy of the dict.""" + def keys(self) -> dict_keys[_KT, _VT]: - """Return a set-like object providing a view on the dict's keys. -""" + """Return a set-like object providing a view on the dict's keys.""" + def values(self) -> dict_values[_KT, _VT]: - """Return an object providing a view on the dict's values. -""" + """Return an object providing a view on the dict's values.""" + def items(self) -> dict_items[_KT, _VT]: - """Return a set-like object providing a view on the dict's items. -""" + """Return a set-like object providing a view on the dict's items.""" # Signature of `dict.fromkeys` should be kept identical to # `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. @@ -2846,16 +2976,16 @@ dict(**kwargs) -> new dictionary initialized with the name=value pairs @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: None = None, /) -> dict[_T, Any | None]: - """Create a new dictionary with keys from iterable and values set to value. -""" + """Create a new dictionary with keys from iterable and values set to value.""" + @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> dict[_T, _S]: ... # Positional-only in dict, but not in MutableMapping @overload # type: ignore[override] def get(self, key: _KT, default: None = None, /) -> _VT | None: - """Return the value for key if key is in the dictionary, else default. -""" + """Return the value for key if key is in the dictionary, else default.""" + @overload def get(self, key: _KT, default: _VT, /) -> _VT: ... @overload @@ -2864,60 +2994,60 @@ dict(**kwargs) -> new dictionary initialized with the name=value pairs def pop(self, key: _KT, /) -> _VT: """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. -If the key is not found, return the default if given; otherwise, -raise a KeyError. -""" + If the key is not found, return the default if given; otherwise, + raise a KeyError. + """ + @overload def pop(self, key: _KT, default: _VT, /) -> _VT: ... @overload def pop(self, key: _KT, default: _T, /) -> _VT | _T: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __getitem__(self, key: _KT, /) -> _VT: - """Return self[key]. -""" + """Return self[key].""" + def __setitem__(self, key: _KT, value: _VT, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + def __delitem__(self, key: _KT, /) -> None: - """Delete self[key]. -""" + """Delete self[key].""" + def __iter__(self) -> Iterator[_KT]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __eq__(self, value: object, /) -> bool: ... def __reversed__(self) -> Iterator[_KT]: - """Return a reverse iterator over the dict keys. -""" + """Return a reverse iterator over the dict keys.""" __hash__: ClassVar[None] # type: ignore[assignment] def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" + @overload def __or__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... @overload def __ror__(self, value: dict[_KT, _VT], /) -> dict[_KT, _VT]: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: dict[_T1, _T2], /) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] def __ior__(self, value: SupportsKeysAndGetItem[_KT, _VT], /) -> Self: - """Return self|=value. -""" + """Return self|=value.""" + @overload def __ior__(self, value: Iterable[tuple[_KT, _VT]], /) -> Self: ... @disjoint_base class set(MutableSet[_T]): - """Build an unordered collection of unique elements. -""" + """Build an unordered collection of unique elements.""" + @overload def __init__(self) -> None: ... @overload @@ -2925,88 +3055,91 @@ class set(MutableSet[_T]): def add(self, element: _T, /) -> None: """Add an element to a set. -This has no effect if the element is already present. -""" + This has no effect if the element is already present. + """ + def copy(self) -> set[_T]: - """Return a shallow copy of a set. -""" + """Return a shallow copy of a set.""" + def difference(self, *s: Iterable[Any]) -> set[_T]: - """Return a new set with elements in the set that are not in the others. -""" + """Return a new set with elements in the set that are not in the others.""" + def difference_update(self, *s: Iterable[Any]) -> None: - """Update the set, removing elements found in others. -""" + """Update the set, removing elements found in others.""" + def discard(self, element: _T, /) -> None: """Remove an element from a set if it is a member. -Unlike set.remove(), the discard() method does not raise -an exception when an element is missing from the set. -""" + Unlike set.remove(), the discard() method does not raise + an exception when an element is missing from the set. + """ + def intersection(self, *s: Iterable[Any]) -> set[_T]: - """Return a new set with elements common to the set and all others. -""" + """Return a new set with elements common to the set and all others.""" + def intersection_update(self, *s: Iterable[Any]) -> None: - """Update the set, keeping only elements found in it and all others. -""" + """Update the set, keeping only elements found in it and all others.""" + def isdisjoint(self, s: Iterable[Any], /) -> bool: - """Return True if two sets have a null intersection. -""" + """Return True if two sets have a null intersection.""" + def issubset(self, s: Iterable[Any], /) -> bool: - """Report whether another set contains this set. -""" + """Report whether another set contains this set.""" + def issuperset(self, s: Iterable[Any], /) -> bool: - """Report whether this set contains another set. -""" + """Report whether this set contains another set.""" + def remove(self, element: _T, /) -> None: """Remove an element from a set; it must be a member. -If the element is not a member, raise a KeyError. -""" + If the element is not a member, raise a KeyError. + """ + def symmetric_difference(self, s: Iterable[_T], /) -> set[_T]: - """Return a new set with elements in either the set or other but not both. -""" + """Return a new set with elements in either the set or other but not both.""" + def symmetric_difference_update(self, s: Iterable[_T], /) -> None: - """Update the set, keeping only elements found in either set, but not in both. -""" + """Update the set, keeping only elements found in either set, but not in both.""" + def union(self, *s: Iterable[_S]) -> set[_T | _S]: - """Return a new set with elements from the set and all others. -""" + """Return a new set with elements from the set and all others.""" + def update(self, *s: Iterable[_T]) -> None: - """Update the set, adding elements from all others. -""" + """Update the set, adding elements from all others.""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __contains__(self, o: object, /) -> bool: - """x.__contains__(y) <==> y in x. -""" + """x.__contains__(y) <==> y in x.""" + def __iter__(self) -> Iterator[_T]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __and__(self, value: AbstractSet[object], /) -> set[_T]: - """Return self&value. -""" + """Return self&value.""" + def __iand__(self, value: AbstractSet[object], /) -> Self: - """Return self&=value. -""" + """Return self&=value.""" + def __or__(self, value: AbstractSet[_S], /) -> set[_T | _S]: - """Return self|value. -""" + """Return self|value.""" + def __ior__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] - """Return self|=value. -""" + """Return self|=value.""" + def __sub__(self, value: AbstractSet[_T | None], /) -> set[_T]: - """Return self-value. -""" + """Return self-value.""" + def __isub__(self, value: AbstractSet[object], /) -> Self: - """Return self-=value. -""" + """Return self-=value.""" + def __xor__(self, value: AbstractSet[_S], /) -> set[_T | _S]: - """Return self^value. -""" + """Return self^value.""" + def __ixor__(self, value: AbstractSet[_T], /) -> Self: # type: ignore[override,misc] - """Return self^=value. -""" + """Return self^=value.""" + def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... @@ -3014,62 +3147,61 @@ If the element is not a member, raise a KeyError. def __eq__(self, value: object, /) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @disjoint_base class frozenset(AbstractSet[_T_co]): - """Build an immutable unordered collection of unique elements. -""" + """Build an immutable unordered collection of unique elements.""" + @overload def __new__(cls) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T_co], /) -> Self: ... def copy(self) -> frozenset[_T_co]: - """Return a shallow copy of a set. -""" + """Return a shallow copy of a set.""" + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: - """Return a new set with elements in the set that are not in the others. -""" + """Return a new set with elements in the set that are not in the others.""" + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: - """Return a new set with elements common to the set and all others. -""" + """Return a new set with elements common to the set and all others.""" + def isdisjoint(self, s: Iterable[_T_co], /) -> bool: - """Return True if two sets have a null intersection. -""" + """Return True if two sets have a null intersection.""" + def issubset(self, s: Iterable[object], /) -> bool: - """Report whether another set contains this set. -""" + """Report whether another set contains this set.""" + def issuperset(self, s: Iterable[object], /) -> bool: - """Report whether this set contains another set. -""" + """Report whether this set contains another set.""" + def symmetric_difference(self, s: Iterable[_T_co], /) -> frozenset[_T_co]: - """Return a new set with elements in either the set or other but not both. -""" + """Return a new set with elements in either the set or other but not both.""" + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: - """Return a new set with elements from the set and all others. -""" + """Return a new set with elements from the set and all others.""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __contains__(self, o: object, /) -> bool: - """x.__contains__(y) <==> y in x. -""" + """x.__contains__(y) <==> y in x.""" + def __iter__(self) -> Iterator[_T_co]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __and__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: - """Return self&value. -""" + """Return self&value.""" + def __or__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: - """Return self|value. -""" + """Return self|value.""" + def __sub__(self, value: AbstractSet[_T_co], /) -> frozenset[_T_co]: - """Return self-value. -""" + """Return self-value.""" + def __xor__(self, value: AbstractSet[_S], /) -> frozenset[_T_co | _S]: - """Return self^value. -""" + """Return self^value.""" + def __le__(self, value: AbstractSet[object], /) -> bool: ... def __lt__(self, value: AbstractSet[object], /) -> bool: ... def __ge__(self, value: AbstractSet[object], /) -> bool: ... @@ -3077,44 +3209,44 @@ class frozenset(AbstractSet[_T_co]): def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @disjoint_base class enumerate(Generic[_T]): """Return an enumerate object. - iterable - an object supporting iteration + iterable + an object supporting iteration -The enumerate object yields pairs containing a count (from start, which -defaults to zero) and a value yielded by the iterable argument. + The enumerate object yields pairs containing a count (from start, which + defaults to zero) and a value yielded by the iterable argument. + + enumerate is useful for obtaining an indexed list: + (0, seq[0]), (1, seq[1]), (2, seq[2]), ... + """ -enumerate is useful for obtaining an indexed list: - (0, seq[0]), (1, seq[1]), (2, seq[2]), ... -""" def __new__(cls, iterable: Iterable[_T], start: int = 0) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> tuple[int, _T]: - """Implement next(self). -""" + """Implement next(self).""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @final class range(Sequence[int]): """range(stop) -> range object -range(start, stop[, step]) -> range object + range(start, stop[, step]) -> range object + + Return an object that produces a sequence of integers from start (inclusive) + to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1. + start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3. + These are exactly the valid indices for a list of 4 elements. + When step is given, it specifies the increment (or decrement). + """ -Return an object that produces a sequence of integers from start (inclusive) -to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1. -start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3. -These are exactly the valid indices for a list of 4 elements. -When step is given, it specifies the increment (or decrement). -""" @property def start(self) -> int: ... @property @@ -3126,68 +3258,69 @@ When step is given, it specifies the increment (or decrement). @overload def __new__(cls, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = 1, /) -> Self: ... def count(self, value: int, /) -> int: - """rangeobject.count(value) -> integer -- return number of occurrences of value -""" + """rangeobject.count(value) -> integer -- return number of occurrences of value""" + def index(self, value: int, /) -> int: # type: ignore[override] """rangeobject.index(value) -> integer -- return index of value. -Raise ValueError if the value is not present. -""" + Raise ValueError if the value is not present. + """ + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __contains__(self, key: object, /) -> bool: - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __iter__(self) -> Iterator[int]: - """Implement iter(self). -""" + """Implement iter(self).""" + @overload def __getitem__(self, key: SupportsIndex, /) -> int: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> range: ... def __reversed__(self) -> Iterator[int]: - """Return a reverse iterator. -""" + """Return a reverse iterator.""" @disjoint_base class property: """Property attribute. - fget - function to be used for getting an attribute value - fset - function to be used for setting an attribute value - fdel - function to be used for del'ing an attribute - doc - docstring + fget + function to be used for getting an attribute value + fset + function to be used for setting an attribute value + fdel + function to be used for del'ing an attribute + doc + docstring -Typical use is to define a managed attribute x: + Typical use is to define a managed attribute x: -class C(object): - def getx(self): return self._x - def setx(self, value): self._x = value - def delx(self): del self._x - x = property(getx, setx, delx, "I'm the 'x' property.") + class C(object): + def getx(self): return self._x + def setx(self, value): self._x = value + def delx(self): del self._x + x = property(getx, setx, delx, "I'm the 'x' property.") -Decorators make defining new properties or modifying existing ones easy: + Decorators make defining new properties or modifying existing ones easy: + + class C(object): + @property + def x(self): + "I am the 'x' property." + return self._x + @x.setter + def x(self, value): + self._x = value + @x.deleter + def x(self): + del self._x + """ -class C(object): - @property - def x(self): - "I am the 'x' property." - return self._x - @x.setter - def x(self, value): - self._x = value - @x.deleter - def x(self): - del self._x -""" fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], None] | None fdel: Callable[[Any], None] | None @@ -3203,26 +3336,25 @@ class C(object): doc: str | None = None, ) -> None: ... def getter(self, fget: Callable[[Any], Any], /) -> property: - """Descriptor to obtain a copy of the property with a different getter. -""" + """Descriptor to obtain a copy of the property with a different getter.""" + def setter(self, fset: Callable[[Any, Any], None], /) -> property: - """Descriptor to obtain a copy of the property with a different setter. -""" + """Descriptor to obtain a copy of the property with a different setter.""" + def deleter(self, fdel: Callable[[Any], None], /) -> property: - """Descriptor to obtain a copy of the property with a different deleter. -""" + """Descriptor to obtain a copy of the property with a different deleter.""" + @overload def __get__(self, instance: None, owner: type, /) -> Self: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + @overload def __get__(self, instance: Any, owner: type | None = None, /) -> Any: ... def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value. -""" + """Set an attribute of instance to value.""" + def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance. -""" + """Delete an attribute of instance.""" @final @type_check_only @@ -3232,52 +3364,57 @@ class _NotImplementedType(Any): NotImplemented: _NotImplementedType def abs(x: SupportsAbs[_T], /) -> _T: - """Return the absolute value of the argument. -""" + """Return the absolute value of the argument.""" + def all(iterable: Iterable[object], /) -> bool: """Return True if bool(x) is True for all values x in the iterable. -If the iterable is empty, return True. -""" + If the iterable is empty, return True. + """ + def any(iterable: Iterable[object], /) -> bool: """Return True if bool(x) is True for any x in the iterable. -If the iterable is empty, return False. -""" + If the iterable is empty, return False. + """ + def ascii(obj: object, /) -> str: """Return an ASCII-only representation of an object. -As repr(), return a string containing a printable representation of an -object, but escape the non-ASCII characters in the string returned by -repr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar -to that returned by repr() in Python 2. -""" + As repr(), return a string containing a printable representation of an + object, but escape the non-ASCII characters in the string returned by + repr() using \\\\x, \\\\u or \\\\U escapes. This generates a string similar + to that returned by repr() in Python 2. + """ + def bin(number: int | SupportsIndex, /) -> str: """Return the binary representation of an integer. - >>> bin(2796202) - '0b1010101010101010101010' -""" + >>> bin(2796202) + '0b1010101010101010101010' + """ + def breakpoint(*args: Any, **kws: Any) -> None: """Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept -whatever arguments are passed. + whatever arguments are passed. + + By default, this drops you into the pdb debugger. + """ -By default, this drops you into the pdb debugger. -""" def callable(obj: object, /) -> TypeIs[Callable[..., object]]: """Return whether the object is callable (i.e., some kind of function). -Note that classes are callable, as are instances of classes with a -__call__() method. -""" + Note that classes are callable, as are instances of classes with a + __call__() method. + """ + def chr(i: int | SupportsIndex, /) -> str: - """Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff. -""" + """Return a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.""" if sys.version_info >= (3, 10): def aiter(async_iterable: SupportsAiter[_SupportsAnextT_co], /) -> _SupportsAnextT_co: - """Return an AsyncIterator for an AsyncIterable object. -""" + """Return an AsyncIterator for an AsyncIterable object.""" + @type_check_only class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): def __anext__(self) -> _AwaitableT_co: ... @@ -3289,9 +3426,10 @@ if sys.version_info >= (3, 10): def anext(i: _SupportsSynchronousAnext[_AwaitableT], /) -> _AwaitableT: """Return the next item from the async iterator. -If default is given and the async iterator is exhausted, -it is returned instead of raising StopAsyncIteration. -""" + If default is given and the async iterator is exhausted, + it is returned instead of raising StopAsyncIteration. + """ + @overload async def anext(i: SupportsAnext[_T], default: _VT, /) -> _T | _VT: ... @@ -3311,17 +3449,18 @@ def compile( ) -> CodeType: """Compile source into a code object that can be executed by exec() or eval(). -The source code may represent a Python module, statement or expression. -The filename will be used for run-time error messages. -The mode must be 'exec' to compile a module, 'single' to compile a -single (interactive) statement, or 'eval' to compile an expression. -The flags argument, if present, controls which future statements influence -the compilation of the code. -The dont_inherit argument, if true, stops the compilation inheriting -the effects of any future statements in effect in the code calling -compile; if absent or false these statements do influence the compilation, -in addition to any features explicitly specified. -""" + The source code may represent a Python module, statement or expression. + The filename will be used for run-time error messages. + The mode must be 'exec' to compile a module, 'single' to compile a + single (interactive) statement, or 'eval' to compile an expression. + The flags argument, if present, controls which future statements influence + the compilation of the code. + The dont_inherit argument, if true, stops the compilation inheriting + the effects of any future statements in effect in the code calling + compile; if absent or false these statements do influence the compilation, + in addition to any features explicitly specified. + """ + @overload def compile( source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, @@ -3361,26 +3500,28 @@ credits: _sitebuiltins._Printer def delattr(obj: object, name: str, /) -> None: """Deletes the named attribute from the given object. -delattr(x, 'y') is equivalent to ``del x.y`` -""" + delattr(x, 'y') is equivalent to ``del x.y`` + """ + def dir(o: object = ..., /) -> list[str]: """dir([object]) -> list of strings -If called without an argument, return the names in the current scope. -Else, return an alphabetized list of names comprising (some of) the attributes -of the given object, and of attributes reachable from it. -If the object supplies a method named __dir__, it will be used; otherwise -the default dir() logic is used and returns: - for a module object: the module's attributes. - for a class object: its attributes, and recursively the attributes - of its bases. - for any other object: its attributes, its class's attributes, and - recursively the attributes of its class's base classes. -""" + If called without an argument, return the names in the current scope. + Else, return an alphabetized list of names comprising (some of) the attributes + of the given object, and of attributes reachable from it. + If the object supplies a method named __dir__, it will be used; otherwise + the default dir() logic is used and returns: + for a module object: the module's attributes. + for a class object: its attributes, and recursively the attributes + of its bases. + for any other object: its attributes, its class's attributes, and + recursively the attributes of its class's base classes. + """ + @overload def divmod(x: SupportsDivMod[_T_contra, _T_co], y: _T_contra, /) -> _T_co: - """Return the tuple (x//y, x%y). Invariant: div*y + mod == x. -""" + """Return the tuple (x//y, x%y). Invariant: div*y + mod == x.""" + @overload def divmod(x: _T_contra, y: SupportsRDivMod[_T_contra, _T_co], /) -> _T_co: ... @@ -3395,12 +3536,12 @@ if sys.version_info >= (3, 13): ) -> Any: """Evaluate the given source in the context of globals and locals. -The source may be a string representing a Python expression -or a code object as returned by compile(). -The globals must be a dictionary and locals can be any mapping, -defaulting to the current globals and locals. -If only globals is given, locals defaults to it. -""" + The source may be a string representing a Python expression + or a code object as returned by compile(). + The globals must be a dictionary and locals can be any mapping, + defaulting to the current globals and locals. + If only globals is given, locals defaults to it. + """ else: def eval( @@ -3411,12 +3552,12 @@ else: ) -> Any: """Evaluate the given source in the context of globals and locals. -The source may be a string representing a Python expression -or a code object as returned by compile(). -The globals must be a dictionary and locals can be any mapping, -defaulting to the current globals and locals. -If only globals is given, locals defaults to it. -""" + The source may be a string representing a Python expression + or a code object as returned by compile(). + The globals must be a dictionary and locals can be any mapping, + defaulting to the current globals and locals. + If only globals is given, locals defaults to it. + """ # Comment above regarding `eval` applies to `exec` as well if sys.version_info >= (3, 13): @@ -3430,14 +3571,14 @@ if sys.version_info >= (3, 13): ) -> None: """Execute the given source in the context of globals and locals. -The source may be a string representing one or more Python statements -or a code object as returned by compile(). -The globals must be a dictionary and locals can be any mapping, -defaulting to the current globals and locals. -If only globals is given, locals defaults to it. -The closure must be a tuple of cellvars, and can only be used -when source is a code object requiring exactly that many cellvars. -""" + The source may be a string representing one or more Python statements + or a code object as returned by compile(). + The globals must be a dictionary and locals can be any mapping, + defaulting to the current globals and locals. + If only globals is given, locals defaults to it. + The closure must be a tuple of cellvars, and can only be used + when source is a code object requiring exactly that many cellvars. + """ elif sys.version_info >= (3, 11): def exec( @@ -3450,14 +3591,14 @@ elif sys.version_info >= (3, 11): ) -> None: """Execute the given source in the context of globals and locals. -The source may be a string representing one or more Python statements -or a code object as returned by compile(). -The globals must be a dictionary and locals can be any mapping, -defaulting to the current globals and locals. -If only globals is given, locals defaults to it. -The closure must be a tuple of cellvars, and can only be used -when source is a code object requiring exactly that many cellvars. -""" + The source may be a string representing one or more Python statements + or a code object as returned by compile(). + The globals must be a dictionary and locals can be any mapping, + defaulting to the current globals and locals. + If only globals is given, locals defaults to it. + The closure must be a tuple of cellvars, and can only be used + when source is a code object requiring exactly that many cellvars. + """ else: def exec( @@ -3468,20 +3609,21 @@ else: ) -> None: """Execute the given source in the context of globals and locals. -The source may be a string representing one or more Python statements -or a code object as returned by compile(). -The globals must be a dictionary and locals can be any mapping, -defaulting to the current globals and locals. -If only globals is given, locals defaults to it. -""" + The source may be a string representing one or more Python statements + or a code object as returned by compile(). + The globals must be a dictionary and locals can be any mapping, + defaulting to the current globals and locals. + If only globals is given, locals defaults to it. + """ exit: _sitebuiltins.Quitter @disjoint_base class filter(Generic[_T]): """Return an iterator yielding those items of iterable for which function(item) -is true. If function is None, return the items that are true. -""" + is true. If function is None, return the items that are true. + """ + @overload def __new__(cls, function: None, iterable: Iterable[_T | None], /) -> Self: ... @overload @@ -3491,30 +3633,30 @@ is true. If function is None, return the items that are true. @overload def __new__(cls, function: Callable[[_T], Any], iterable: Iterable[_T], /) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" def format(value: object, format_spec: str = "", /) -> str: """Return type(value).__format__(value, format_spec) -Many built-in types implement format_spec according to the -Format Specification Mini-language. See help('FORMATTING'). + Many built-in types implement format_spec according to the + Format Specification Mini-language. See help('FORMATTING'). + + If type(value) does not supply a method named __format__ + and format_spec is empty, then str(value) is returned. + See also help('SPECIALMETHODS'). + """ -If type(value) does not supply a method named __format__ -and format_spec is empty, then str(value) is returned. -See also help('SPECIALMETHODS'). -""" @overload def getattr(o: object, name: str, /) -> Any: """getattr(object, name[, default]) -> value -Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. -When a default argument is given, it is returned when the attribute doesn't -exist; without it, an exception is raised in that case. -""" + Get a named attribute from an object; getattr(x, 'y') is equivalent to x.y. + When a default argument is given, it is returned when the attribute doesn't + exist; without it, an exception is raised in that case. + """ # While technically covered by the last overload, spelling out the types for None, bool # and basic containers help mypy out in some tricky situations involving type context @@ -3532,44 +3674,49 @@ def getattr(o: object, name: str, default: _T, /) -> Any | _T: ... def globals() -> dict[str, Any]: """Return the dictionary containing the current scope's global variables. -NOTE: Updates to this dictionary *will* affect name lookups in the current -global scope and vice-versa. -""" + NOTE: Updates to this dictionary *will* affect name lookups in the current + global scope and vice-versa. + """ + def hasattr(obj: object, name: str, /) -> bool: """Return whether the object has an attribute with the given name. -This is done by calling getattr(obj, name) and catching AttributeError. -""" + This is done by calling getattr(obj, name) and catching AttributeError. + """ + def hash(obj: object, /) -> int: """Return the hash value for the given object. -Two objects that compare equal must also have the same hash value, but the -reverse is not necessarily true. -""" + Two objects that compare equal must also have the same hash value, but the + reverse is not necessarily true. + """ help: _sitebuiltins._Helper def hex(number: int | SupportsIndex, /) -> str: """Return the hexadecimal representation of an integer. - >>> hex(12648430) - '0xc0ffee' -""" + >>> hex(12648430) + '0xc0ffee' + """ + def id(obj: object, /) -> int: """Return the identity of an object. -This is guaranteed to be unique among simultaneously existing objects. -(CPython uses the object's memory address.) -""" + This is guaranteed to be unique among simultaneously existing objects. + (CPython uses the object's memory address.) + """ + def input(prompt: object = "", /) -> str: """Read a string from standard input. The trailing newline is stripped. -The prompt string, if given, is printed to standard output without a -trailing newline before reading input. + The prompt string, if given, is printed to standard output without a + trailing newline before reading input. + + If the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError. + On *nix systems, readline is used if available. + """ -If the user hits EOF (*nix: Ctrl-D, Windows: Ctrl-Z+Return), raise EOFError. -On *nix systems, readline is used if available. -""" @type_check_only class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, i: int, /) -> _T_co: ... @@ -3577,12 +3724,13 @@ class _GetItemIterable(Protocol[_T_co]): @overload def iter(object: SupportsIter[_SupportsNextT_co], /) -> _SupportsNextT_co: """iter(iterable) -> iterator -iter(callable, sentinel) -> iterator + iter(callable, sentinel) -> iterator + + Get an iterator from an object. In the first form, the argument must + supply its own iterator, or be a sequence. + In the second form, the callable is called until it returns the sentinel. + """ -Get an iterator from an object. In the first form, the argument must -supply its own iterator, or be a sequence. -In the second form, the callable is called until it returns the sentinel. -""" @overload def iter(object: _GetItemIterable[_T], /) -> Iterator[_T]: ... @overload @@ -3598,38 +3746,41 @@ else: def isinstance(obj: object, class_or_tuple: _ClassInfo, /) -> bool: """Return whether an object is an instance of a class or of a subclass thereof. -A tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to -check against. This is equivalent to ``isinstance(x, A) or isinstance(x, B) -or ...`` etc. -""" + A tuple, as in ``isinstance(x, (A, B, ...))``, may be given as the target to + check against. This is equivalent to ``isinstance(x, A) or isinstance(x, B) + or ...`` etc. + """ + def issubclass(cls: type, class_or_tuple: _ClassInfo, /) -> bool: """Return whether 'cls' is derived from another class or is the same class. -A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to -check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B) -or ...``. -""" + A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to + check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B) + or ...``. + """ + def len(obj: Sized, /) -> int: - """Return the number of items in a container. -""" + """Return the number of items in a container.""" license: _sitebuiltins._Printer def locals() -> dict[str, Any]: """Return a dictionary containing the current scope's local variables. -NOTE: Whether or not updates to this dictionary will affect name lookups in -the local scope and vice-versa is *implementation dependent* and not -covered by any backwards compatibility guarantees. -""" + NOTE: Whether or not updates to this dictionary will affect name lookups in + the local scope and vice-versa is *implementation dependent* and not + covered by any backwards compatibility guarantees. + """ + @disjoint_base class map(Generic[_S]): """Make an iterator that computes the function using arguments from -each of the iterables. Stops when the shortest iterable is exhausted. + each of the iterables. Stops when the shortest iterable is exhausted. + + If strict is true and one of the arguments is exhausted before the others, + raise a ValueError. + """ -If strict is true and one of the arguments is exhausted before the others, -raise a ValueError. -""" # 3.14 adds `strict` argument. if sys.version_info >= (3, 14): @overload @@ -3733,24 +3884,24 @@ raise a ValueError. ) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _S: - """Implement next(self). -""" + """Implement next(self).""" @overload def max( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: """max(iterable, *[, default=obj, key=func]) -> value -max(arg1, arg2, *args, *[, key=func]) -> value + max(arg1, arg2, *args, *[, key=func]) -> value + + With a single iterable argument, return its biggest item. The + default keyword-only argument specifies an object to return if + the provided iterable is empty. + With two or more positional arguments, return the largest argument. + """ -With a single iterable argument, return its biggest item. The -default keyword-only argument specifies an object to return if -the provided iterable is empty. -With two or more positional arguments, return the largest argument. -""" @overload def max(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -3766,13 +3917,14 @@ def min( arg1: SupportsRichComparisonT, arg2: SupportsRichComparisonT, /, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: """min(iterable, *[, default=obj, key=func]) -> value -min(arg1, arg2, *args, *[, key=func]) -> value + min(arg1, arg2, *args, *[, key=func]) -> value + + With a single iterable argument, return its smallest item. The + default keyword-only argument specifies an object to return if + the provided iterable is empty. + With two or more positional arguments, return the smallest argument. + """ -With a single iterable argument, return its smallest item. The -default keyword-only argument specifies an object to return if -the provided iterable is empty. -With two or more positional arguments, return the smallest argument. -""" @overload def min(arg1: _T, arg2: _T, /, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload @@ -3787,17 +3939,18 @@ def min(iterable: Iterable[_T1], /, *, key: Callable[[_T1], SupportsRichComparis def next(i: SupportsNext[_T], /) -> _T: """next(iterator[, default]) -Return the next item from the iterator. If default is given and the iterator -is exhausted, it is returned instead of raising StopIteration. -""" + Return the next item from the iterator. If default is given and the iterator + is exhausted, it is returned instead of raising StopIteration. + """ + @overload def next(i: SupportsNext[_T], default: _VT, /) -> _T | _VT: ... def oct(number: int | SupportsIndex, /) -> str: """Return the octal representation of an integer. - >>> oct(342391) - '0o1234567' -""" + >>> oct(342391) + '0o1234567' + """ _Opener: TypeAlias = Callable[[str, int], int] @@ -3815,118 +3968,118 @@ def open( ) -> TextIOWrapper: """Open file and return a stream. Raise OSError upon failure. -file is either a text or byte string giving the name (and the path -if the file isn't in the current working directory) of the file to -be opened or an integer file descriptor of the file to be -wrapped. (If a file descriptor is given, it is closed when the -returned I/O object is closed, unless closefd is set to False.) - -mode is an optional string that specifies the mode in which the file -is opened. It defaults to 'r' which means open for reading in text -mode. Other common values are 'w' for writing (truncating the file if -it already exists), 'x' for creating and writing to a new file, and -'a' for appending (which on some Unix systems, means that all writes -append to the end of the file regardless of the current seek position). -In text mode, if encoding is not specified the encoding used is platform -dependent: locale.getencoding() is called to get the current locale encoding. -(For reading and writing raw bytes use binary mode and leave encoding -unspecified.) The available modes are: - -========= =============================================================== -Character Meaning ---------- --------------------------------------------------------------- -'r' open for reading (default) -'w' open for writing, truncating the file first -'x' create a new file and open it for writing -'a' open for writing, appending to the end of the file if it exists -'b' binary mode -'t' text mode (default) -'+' open a disk file for updating (reading and writing) -========= =============================================================== - -The default mode is 'rt' (open for reading text). For binary random -access, the mode 'w+b' opens and truncates the file to 0 bytes, while -'r+b' opens the file without truncation. The 'x' mode implies 'w' and -raises an `FileExistsError` if the file already exists. - -Python distinguishes between files opened in binary and text modes, -even when the underlying operating system doesn't. Files opened in -binary mode (appending 'b' to the mode argument) return contents as -bytes objects without any decoding. In text mode (the default, or when -'t' is appended to the mode argument), the contents of the file are -returned as strings, the bytes having been first decoded using a -platform-dependent encoding or using the specified encoding if given. - -buffering is an optional integer used to set the buffering policy. -Pass 0 to switch buffering off (only allowed in binary mode), 1 to select -line buffering (only usable in text mode), and an integer > 1 to indicate -the size of a fixed-size chunk buffer. When no buffering argument is -given, the default buffering policy works as follows: - -* Binary files are buffered in fixed-size chunks; the size of the buffer - is max(min(blocksize, 8 MiB), DEFAULT_BUFFER_SIZE) - when the device block size is available. - On most systems, the buffer will typically be 128 kilobytes long. - -* "Interactive" text files (files for which isatty() returns True) - use line buffering. Other text files use the policy described above - for binary files. - -encoding is the name of the encoding used to decode or encode the -file. This should only be used in text mode. The default encoding is -platform dependent, but any encoding supported by Python can be -passed. See the codecs module for the list of supported encodings. - -errors is an optional string that specifies how encoding errors are to -be handled---this argument should not be used in binary mode. Pass -'strict' to raise a ValueError exception if there is an encoding error -(the default of None has the same effect), or pass 'ignore' to ignore -errors. (Note that ignoring encoding errors can lead to data loss.) -See the documentation for codecs.register or run 'help(codecs.Codec)' -for a list of the permitted encoding error strings. - -newline controls how universal newlines works (it only applies to text -mode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as -follows: - -* On input, if newline is None, universal newlines mode is - enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and - these are translated into '\\n' before being returned to the - caller. If it is '', universal newline mode is enabled, but line - endings are returned to the caller untranslated. If it has any of - the other legal values, input lines are only terminated by the given - string, and the line ending is returned to the caller untranslated. - -* On output, if newline is None, any '\\n' characters written are - translated to the system default line separator, os.linesep. If - newline is '' or '\\n', no translation takes place. If newline is any - of the other legal values, any '\\n' characters written are translated - to the given string. - -If closefd is False, the underlying file descriptor will be kept open -when the file is closed. This does not work when a file name is given -and must be True in that case. - -A custom opener can be used by passing a callable as *opener*. The -underlying file descriptor for the file object is then obtained by -calling *opener* with (*file*, *flags*). *opener* must return an open -file descriptor (passing os.open as *opener* results in functionality -similar to passing None). - -open() returns a file object whose type depends on the mode, and -through which the standard file operations such as reading and writing -are performed. When open() is used to open a file in a text mode ('w', -'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open -a file in a binary mode, the returned class varies: in read binary -mode, it returns a BufferedReader; in write binary and append binary -modes, it returns a BufferedWriter, and in read/write mode, it returns -a BufferedRandom. - -It is also possible to use a string or bytearray as a file for both -reading and writing. For strings StringIO can be used like a file -opened in a text mode, and for bytes a BytesIO can be used like a file -opened in a binary mode. -""" + file is either a text or byte string giving the name (and the path + if the file isn't in the current working directory) of the file to + be opened or an integer file descriptor of the file to be + wrapped. (If a file descriptor is given, it is closed when the + returned I/O object is closed, unless closefd is set to False.) + + mode is an optional string that specifies the mode in which the file + is opened. It defaults to 'r' which means open for reading in text + mode. Other common values are 'w' for writing (truncating the file if + it already exists), 'x' for creating and writing to a new file, and + 'a' for appending (which on some Unix systems, means that all writes + append to the end of the file regardless of the current seek position). + In text mode, if encoding is not specified the encoding used is platform + dependent: locale.getencoding() is called to get the current locale encoding. + (For reading and writing raw bytes use binary mode and leave encoding + unspecified.) The available modes are: + + ========= =============================================================== + Character Meaning + --------- --------------------------------------------------------------- + 'r' open for reading (default) + 'w' open for writing, truncating the file first + 'x' create a new file and open it for writing + 'a' open for writing, appending to the end of the file if it exists + 'b' binary mode + 't' text mode (default) + '+' open a disk file for updating (reading and writing) + ========= =============================================================== + + The default mode is 'rt' (open for reading text). For binary random + access, the mode 'w+b' opens and truncates the file to 0 bytes, while + 'r+b' opens the file without truncation. The 'x' mode implies 'w' and + raises an `FileExistsError` if the file already exists. + + Python distinguishes between files opened in binary and text modes, + even when the underlying operating system doesn't. Files opened in + binary mode (appending 'b' to the mode argument) return contents as + bytes objects without any decoding. In text mode (the default, or when + 't' is appended to the mode argument), the contents of the file are + returned as strings, the bytes having been first decoded using a + platform-dependent encoding or using the specified encoding if given. + + buffering is an optional integer used to set the buffering policy. + Pass 0 to switch buffering off (only allowed in binary mode), 1 to select + line buffering (only usable in text mode), and an integer > 1 to indicate + the size of a fixed-size chunk buffer. When no buffering argument is + given, the default buffering policy works as follows: + + * Binary files are buffered in fixed-size chunks; the size of the buffer + is max(min(blocksize, 8 MiB), DEFAULT_BUFFER_SIZE) + when the device block size is available. + On most systems, the buffer will typically be 128 kilobytes long. + + * "Interactive" text files (files for which isatty() returns True) + use line buffering. Other text files use the policy described above + for binary files. + + encoding is the name of the encoding used to decode or encode the + file. This should only be used in text mode. The default encoding is + platform dependent, but any encoding supported by Python can be + passed. See the codecs module for the list of supported encodings. + + errors is an optional string that specifies how encoding errors are to + be handled---this argument should not be used in binary mode. Pass + 'strict' to raise a ValueError exception if there is an encoding error + (the default of None has the same effect), or pass 'ignore' to ignore + errors. (Note that ignoring encoding errors can lead to data loss.) + See the documentation for codecs.register or run 'help(codecs.Codec)' + for a list of the permitted encoding error strings. + + newline controls how universal newlines works (it only applies to text + mode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as + follows: + + * On input, if newline is None, universal newlines mode is + enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and + these are translated into '\\n' before being returned to the + caller. If it is '', universal newline mode is enabled, but line + endings are returned to the caller untranslated. If it has any of + the other legal values, input lines are only terminated by the given + string, and the line ending is returned to the caller untranslated. + + * On output, if newline is None, any '\\n' characters written are + translated to the system default line separator, os.linesep. If + newline is '' or '\\n', no translation takes place. If newline is any + of the other legal values, any '\\n' characters written are translated + to the given string. + + If closefd is False, the underlying file descriptor will be kept open + when the file is closed. This does not work when a file name is given + and must be True in that case. + + A custom opener can be used by passing a callable as *opener*. The + underlying file descriptor for the file object is then obtained by + calling *opener* with (*file*, *flags*). *opener* must return an open + file descriptor (passing os.open as *opener* results in functionality + similar to passing None). + + open() returns a file object whose type depends on the mode, and + through which the standard file operations such as reading and writing + are performed. When open() is used to open a file in a text mode ('w', + 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open + a file in a binary mode, the returned class varies: in read binary + mode, it returns a BufferedReader; in write binary and append binary + modes, it returns a BufferedWriter, and in read/write mode, it returns + a BufferedRandom. + + It is also possible to use a string or bytearray as a file for both + reading and writing. For strings StringIO can be used like a file + opened in a text mode, and for bytes a BytesIO can be used like a file + opened in a binary mode. + """ # Unbuffered binary mode: returns a FileIO @overload @@ -4004,12 +4157,13 @@ def open( def ord(c: str | bytes | bytearray, /) -> int: """Return the ordinal value of a character. -If the argument is a one-character string, return the Unicode code -point of that character. + If the argument is a one-character string, return the Unicode code + point of that character. + + If the argument is a bytes or bytearray object of length 1, return its + single byte value. + """ -If the argument is a bytes or bytearray object of length 1, return its -single byte value. -""" @type_check_only class _SupportsWriteAndFlush(SupportsWrite[_T_contra], SupportsFlush, Protocol[_T_contra]): ... @@ -4023,15 +4177,16 @@ def print( ) -> None: """Prints the values to a stream, or to sys.stdout by default. - sep - string inserted between values, default a space. - end - string appended after the last value, default a newline. - file - a file-like object (stream); defaults to the current sys.stdout. - flush - whether to forcibly flush the stream. -""" + sep + string inserted between values, default a space. + end + string appended after the last value, default a newline. + file + a file-like object (stream); defaults to the current sys.stdout. + flush + whether to forcibly flush the stream. + """ + @overload def print( *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool @@ -4062,9 +4217,10 @@ _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs a def pow(base: int, exp: int, mod: int) -> int: """Equivalent to base**exp with 2 arguments or base**exp % mod with 3 arguments -Some types, such as ints, are able to use a more efficient algorithm when -invoked using the three argument form. -""" + Some types, such as ints, are able to use a more efficient algorithm when + invoked using the three argument form. + """ + @overload def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... @overload @@ -4105,27 +4261,26 @@ quit: _sitebuiltins.Quitter @disjoint_base class reversed(Generic[_T]): - """Return a reverse iterator over the values of the given sequence. -""" + """Return a reverse iterator over the values of the given sequence.""" + @overload def __new__(cls, sequence: Reversible[_T], /) -> Iterator[_T]: ... # type: ignore[misc] @overload def __new__(cls, sequence: SupportsLenAndGetItem[_T], /) -> Iterator[_T]: ... # type: ignore[misc] def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" + def __length_hint__(self) -> int: - """Private method returning an estimate of len(list(it)). -""" + """Private method returning an estimate of len(list(it)).""" def repr(obj: object, /) -> str: """Return the canonical string representation of the object. -For many object types, including most builtins, eval(repr(obj)) == obj. -""" + For many object types, including most builtins, eval(repr(obj)) == obj. + """ # See https://github.com/python/typeshed/pull/9141 # and https://github.com/python/typeshed/pull/9151 @@ -4143,9 +4298,10 @@ class _SupportsRound2(Protocol[_T_co]): def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: """Round a number to a given precision in decimal digits. -The return value is an integer if ndigits is omitted or None. Otherwise -the return value has the same type as the number. ndigits may be negative. -""" + The return value is an integer if ndigits is omitted or None. Otherwise + the return value has the same type as the number. ndigits may be negative. + """ + @overload def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... @@ -4154,17 +4310,19 @@ def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... def setattr(obj: object, name: str, value: Any, /) -> None: """Sets the named attribute on the given object to the specified value. -setattr(x, 'y', v) is equivalent to ``x.y = v`` -""" + setattr(x, 'y', v) is equivalent to ``x.y = v`` + """ + @overload def sorted( iterable: Iterable[SupportsRichComparisonT], /, *, key: None = None, reverse: bool = False ) -> list[SupportsRichComparisonT]: """Return a new list containing all items from the iterable in ascending order. -A custom key function can be supplied to customize the sort order, and the -reverse flag can be set to request the result in descending order. -""" + A custom key function can be supplied to customize the sort order, and the + reverse flag can be set to request the result in descending order. + """ + @overload def sorted(iterable: Iterable[_T], /, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... @@ -4184,10 +4342,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: """Return the sum of a 'start' value (default: 0) plus an iterable of numbers -When the iterable is empty, return the start value. -This function is intended specifically for use with numeric values and may -reject non-numeric types. -""" + When the iterable is empty, return the start value. + This function is intended specifically for use with numeric values and may + reject non-numeric types. + """ + @overload def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ... @overload @@ -4199,24 +4358,26 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A def vars(object: type, /) -> types.MappingProxyType[str, Any]: """vars([object]) -> dictionary -Without arguments, equivalent to locals(). -With an argument, equivalent to object.__dict__. -""" + Without arguments, equivalent to locals(). + With an argument, equivalent to object.__dict__. + """ + @overload def vars(object: Any = ..., /) -> dict[str, Any]: ... @disjoint_base class zip(Generic[_T_co]): """The zip object yields n-length tuples, where n is the number of iterables -passed as positional arguments to zip(). The i-th element in every tuple -comes from the i-th iterable argument to zip(). This continues until the -shortest argument is exhausted. + passed as positional arguments to zip(). The i-th element in every tuple + comes from the i-th iterable argument to zip(). This continues until the + shortest argument is exhausted. -If strict is true and one of the arguments is exhausted before the others, -raise a ValueError. + If strict is true and one of the arguments is exhausted before the others, + raise a ValueError. + + >>> list(zip('abcdefg', range(3), range(4))) + [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] + """ - >>> list(zip('abcdefg', range(3), range(4))) - [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)] -""" if sys.version_info >= (3, 10): @overload def __new__(cls, *, strict: bool = False) -> zip[Any]: ... @@ -4295,11 +4456,10 @@ raise a ValueError. ) -> zip[tuple[Any, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` # Return type of `__import__` should be kept the same as return type of `importlib.import_module` @@ -4312,25 +4472,26 @@ def __import__( ) -> types.ModuleType: """Import a module. -Because this function is meant for use by the Python -interpreter and not for general use, it is better to use -importlib.import_module() to programmatically import a module. - -The globals argument is only used to determine the context; -they are not modified. The locals argument is unused. The fromlist -should be a list of names to emulate ``from name import ...``, or an -empty list to emulate ``import name``. -When importing a module from a package, note that __import__('A.B', ...) -returns package A when fromlist is empty, but its submodule B when -fromlist is not empty. The level argument is used to determine whether to -perform absolute or relative imports: 0 is absolute, while a positive number -is the number of parent directories to search relative to the current module. -""" + Because this function is meant for use by the Python + interpreter and not for general use, it is better to use + importlib.import_module() to programmatically import a module. + + The globals argument is only used to determine the context; + they are not modified. The locals argument is unused. The fromlist + should be a list of names to emulate ``from name import ...``, or an + empty list to emulate ``import name``. + When importing a module from a package, note that __import__('A.B', ...) + returns package A when fromlist is empty, but its submodule B when + fromlist is not empty. The level argument is used to determine whether to + perform absolute or relative imports: 0 is absolute, while a positive number + is the number of parent directories to search relative to the current module. + """ + def __build_class__(func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: """__build_class__(func, name, /, *bases, [metaclass], **kwds) -> class -Internal helper function used by the class statement. -""" + Internal helper function used by the class statement. + """ if sys.version_info >= (3, 10): from types import EllipsisType @@ -4352,8 +4513,8 @@ else: @disjoint_base class BaseException: - """Common base class for all exceptions -""" + """Common base class for all exceptions""" + args: tuple[Any, ...] __cause__: BaseException | None __context__: BaseException | None @@ -4363,42 +4524,38 @@ class BaseException: def __new__(cls, *args: Any, **kwds: Any) -> Self: ... def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... def with_traceback(self, tb: TracebackType | None, /) -> Self: - """Set self.__traceback__ to tb and return self. -""" + """Set self.__traceback__ to tb and return self.""" if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] def add_note(self, note: str, /) -> None: - """Add a note to the exception -""" + """Add a note to the exception""" class GeneratorExit(BaseException): - """Request that a generator exit. -""" + """Request that a generator exit.""" + class KeyboardInterrupt(BaseException): - """Program interrupted by user. -""" + """Program interrupted by user.""" @disjoint_base class SystemExit(BaseException): - """Request to exit from the interpreter. -""" + """Request to exit from the interpreter.""" + code: sys._ExitCode class Exception(BaseException): - """Common base class for all non-exit exceptions. -""" + """Common base class for all non-exit exceptions.""" @disjoint_base class StopIteration(Exception): - """Signal the end from iterator.__next__(). -""" + """Signal the end from iterator.__next__().""" + value: Any @disjoint_base class OSError(Exception): - """Base class for I/O related errors. -""" + """Base class for I/O related errors.""" + errno: int | None strerror: str | None # filename, filename2 are actually str | bytes | None @@ -4413,37 +4570,34 @@ if sys.platform == "win32": WindowsError = OSError class ArithmeticError(Exception): - """Base class for arithmetic errors. -""" + """Base class for arithmetic errors.""" + class AssertionError(Exception): - """Assertion failed. -""" + """Assertion failed.""" if sys.version_info >= (3, 10): @disjoint_base class AttributeError(Exception): - """Attribute not found. -""" + """Attribute not found.""" + def __init__(self, *args: object, name: str | None = None, obj: object = None) -> None: ... name: str | None obj: object else: class AttributeError(Exception): - """Attribute not found. -""" + """Attribute not found.""" class BufferError(Exception): - """Buffer error. -""" + """Buffer error.""" + class EOFError(Exception): - """Read beyond end of file. -""" + """Read beyond end of file.""" @disjoint_base class ImportError(Exception): - """Import can't find module, or can't find name in module. -""" + """Import can't find module, or can't find name in module.""" + def __init__(self, *args: object, name: str | None = None, path: str | None = None) -> None: ... name: str | None path: str | None @@ -4452,39 +4606,36 @@ class ImportError(Exception): name_from: str | None # undocumented class LookupError(Exception): - """Base class for lookup errors. -""" + """Base class for lookup errors.""" + class MemoryError(Exception): - """Out of memory. -""" + """Out of memory.""" if sys.version_info >= (3, 10): @disjoint_base class NameError(Exception): - """Name not found globally. -""" + """Name not found globally.""" + def __init__(self, *args: object, name: str | None = None) -> None: ... name: str | None else: class NameError(Exception): - """Name not found globally. -""" + """Name not found globally.""" class ReferenceError(Exception): - """Weak ref proxy used after referent went away. -""" + """Weak ref proxy used after referent went away.""" + class RuntimeError(Exception): - """Unspecified run-time error. -""" + """Unspecified run-time error.""" + class StopAsyncIteration(Exception): - """Signal the end from iterator.__anext__(). -""" + """Signal the end from iterator.__anext__().""" @disjoint_base class SyntaxError(Exception): - """Invalid syntax. -""" + """Invalid syntax.""" + msg: str filename: str | None lineno: int | None @@ -4516,104 +4667,103 @@ class SyntaxError(Exception): class SystemError(Exception): """Internal error in the Python interpreter. -Please report this to the Python maintainer, along with the traceback, -the Python version, and the hardware/OS platform and version. -""" + Please report this to the Python maintainer, along with the traceback, + the Python version, and the hardware/OS platform and version. + """ + class TypeError(Exception): - """Inappropriate argument type. -""" + """Inappropriate argument type.""" + class ValueError(Exception): - """Inappropriate argument value (of correct type). -""" + """Inappropriate argument value (of correct type).""" + class FloatingPointError(ArithmeticError): - """Floating-point operation failed. -""" + """Floating-point operation failed.""" + class OverflowError(ArithmeticError): - """Result too large to be represented. -""" + """Result too large to be represented.""" + class ZeroDivisionError(ArithmeticError): - """Second argument to a division or modulo operation was zero. -""" + """Second argument to a division or modulo operation was zero.""" + class ModuleNotFoundError(ImportError): - """Module not found. -""" + """Module not found.""" + class IndexError(LookupError): - """Sequence index out of range. -""" + """Sequence index out of range.""" + class KeyError(LookupError): - """Mapping key not found. -""" + """Mapping key not found.""" + class UnboundLocalError(NameError): - """Local name referenced but not bound to a value. -""" + """Local name referenced but not bound to a value.""" class BlockingIOError(OSError): - """I/O operation would block. -""" + """I/O operation would block.""" + characters_written: int class ChildProcessError(OSError): - """Child process error. -""" + """Child process error.""" + class ConnectionError(OSError): - """Connection error. -""" + """Connection error.""" + class BrokenPipeError(ConnectionError): - """Broken pipe. -""" + """Broken pipe.""" + class ConnectionAbortedError(ConnectionError): - """Connection aborted. -""" + """Connection aborted.""" + class ConnectionRefusedError(ConnectionError): - """Connection refused. -""" + """Connection refused.""" + class ConnectionResetError(ConnectionError): - """Connection reset. -""" + """Connection reset.""" + class FileExistsError(OSError): - """File already exists. -""" + """File already exists.""" + class FileNotFoundError(OSError): - """File not found. -""" + """File not found.""" + class InterruptedError(OSError): - """Interrupted by signal. -""" + """Interrupted by signal.""" + class IsADirectoryError(OSError): - """Operation doesn't work on directories. -""" + """Operation doesn't work on directories.""" + class NotADirectoryError(OSError): - """Operation only works on directories. -""" + """Operation only works on directories.""" + class PermissionError(OSError): - """Not enough permissions. -""" + """Not enough permissions.""" + class ProcessLookupError(OSError): - """Process not found. -""" + """Process not found.""" + class TimeoutError(OSError): - """Timeout expired. -""" + """Timeout expired.""" + class NotImplementedError(RuntimeError): - """Method or function hasn't been implemented yet. -""" + """Method or function hasn't been implemented yet.""" + class RecursionError(RuntimeError): - """Recursion limit exceeded. -""" + """Recursion limit exceeded.""" + class IndentationError(SyntaxError): - """Improper indentation. -""" + """Improper indentation.""" + class TabError(IndentationError): - """Improper mixture of spaces and tabs. -""" + """Improper mixture of spaces and tabs.""" + class UnicodeError(ValueError): - """Unicode related error. -""" + """Unicode related error.""" @disjoint_base class UnicodeDecodeError(UnicodeError): - """Unicode decoding error. -""" + """Unicode decoding error.""" + encoding: str object: bytes start: int @@ -4623,8 +4773,8 @@ class UnicodeDecodeError(UnicodeError): @disjoint_base class UnicodeEncodeError(UnicodeError): - """Unicode encoding error. -""" + """Unicode encoding error.""" + encoding: str object: str start: int @@ -4634,8 +4784,8 @@ class UnicodeEncodeError(UnicodeError): @disjoint_base class UnicodeTranslateError(UnicodeError): - """Unicode translation error. -""" + """Unicode translation error.""" + encoding: None object: str start: int @@ -4644,47 +4794,49 @@ class UnicodeTranslateError(UnicodeError): def __init__(self, object: str, start: int, end: int, reason: str, /) -> None: ... class Warning(Exception): - """Base class for warning categories. -""" + """Base class for warning categories.""" + class UserWarning(Warning): - """Base class for warnings generated by user code. -""" + """Base class for warnings generated by user code.""" + class DeprecationWarning(Warning): - """Base class for warnings about deprecated features. -""" + """Base class for warnings about deprecated features.""" + class SyntaxWarning(Warning): - """Base class for warnings about dubious syntax. -""" + """Base class for warnings about dubious syntax.""" + class RuntimeWarning(Warning): - """Base class for warnings about dubious runtime behavior. -""" + """Base class for warnings about dubious runtime behavior.""" + class FutureWarning(Warning): """Base class for warnings about constructs that will change semantically -in the future. -""" + in the future. + """ + class PendingDeprecationWarning(Warning): """Base class for warnings about features which will be deprecated -in the future. -""" + in the future. + """ + class ImportWarning(Warning): - """Base class for warnings about probable mistakes in module imports -""" + """Base class for warnings about probable mistakes in module imports""" + class UnicodeWarning(Warning): """Base class for warnings about Unicode related problems, mostly -related to conversion problems. -""" + related to conversion problems. + """ + class BytesWarning(Warning): """Base class for warnings about bytes and buffer related problems, mostly -related to conversion from str or comparing to str. -""" + related to conversion from str or comparing to str. + """ + class ResourceWarning(Warning): - """Base class for warnings about resource usage. -""" + """Base class for warnings about resource usage.""" if sys.version_info >= (3, 10): class EncodingWarning(Warning): - """Base class for warnings about encodings. -""" + """Base class for warnings about encodings.""" if sys.version_info >= (3, 11): _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True, default=BaseException) @@ -4695,18 +4847,18 @@ if sys.version_info >= (3, 11): # See `check_exception_group.py` for use-cases and comments. @disjoint_base class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): - """A combination of multiple unrelated exceptions. -""" + """A combination of multiple unrelated exceptions.""" + def __new__(cls, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_BaseExceptionT_co], /) -> None: ... @property def message(self) -> str: - """exception message -""" + """exception message""" + @property def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: - """nested exceptions -""" + """nested exceptions""" + @overload def subgroup( self, matcher_value: type[_ExceptionT] | tuple[type[_ExceptionT], ...], / @@ -4737,16 +4889,14 @@ if sys.version_info >= (3, 11): @overload def derive(self, excs: Sequence[_BaseExceptionT], /) -> BaseExceptionGroup[_BaseExceptionT]: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): def __new__(cls, message: str, exceptions: Sequence[_ExceptionT_co], /) -> Self: ... def __init__(self, message: str, exceptions: Sequence[_ExceptionT_co], /) -> None: ... @property def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: - """nested exceptions -""" + """nested exceptions""" # We accept a narrower type, but that's OK. @overload # type: ignore[override] def subgroup( @@ -4767,5 +4917,4 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 13): class PythonFinalizationError(RuntimeError): - """Operation blocked during Python finalization. -""" + """Operation blocked during Python finalization.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi index 0d9b74be7bd07..84b3b4e30ec8b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/bz2.pyi @@ -3,6 +3,7 @@ This module provides a file interface, classes for incremental (de)compression, and functions for one-shot (de)compression. """ + import sys from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer @@ -34,15 +35,16 @@ class _WritableFileobj(Protocol): def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: """Compress a block of data. -compresslevel, if given, must be a number between 1 and 9. + compresslevel, if given, must be a number between 1 and 9. + + For incremental compression, use a BZ2Compressor object instead. + """ -For incremental compression, use a BZ2Compressor object instead. -""" def decompress(data: ReadableBuffer) -> bytes: """Decompress a block of data. -For incremental decompression, use a BZ2Decompressor object instead. -""" + For incremental decompression, use a BZ2Decompressor object instead. + """ _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -60,23 +62,24 @@ def open( ) -> BZ2File: """Open a bzip2-compressed file in binary or text mode. -The filename argument can be an actual filename (a str, bytes, or -PathLike object), or an existing file object to read from or write -to. + The filename argument can be an actual filename (a str, bytes, or + PathLike object), or an existing file object to read from or write + to. -The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or -"ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode. -The default mode is "rb", and the default compresslevel is 9. + The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or + "ab" for binary mode, or "rt", "wt", "xt" or "at" for text mode. + The default mode is "rb", and the default compresslevel is 9. -For binary mode, this function is equivalent to the BZ2File -constructor: BZ2File(filename, mode, compresslevel). In this case, -the encoding, errors and newline arguments must not be provided. + For binary mode, this function is equivalent to the BZ2File + constructor: BZ2File(filename, mode, compresslevel). In this case, + the encoding, errors and newline arguments must not be provided. -For text mode, a BZ2File object is created, and wrapped in an -io.TextIOWrapper instance with the specified encoding, error -handling behavior, and line ending(s). + For text mode, a BZ2File object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error + handling behavior, and line ending(s). + + """ -""" @overload def open( filename: _ReadableFileobj, @@ -135,32 +138,34 @@ def open( class BZ2File(BaseStream, IO[bytes]): """A file object providing transparent bzip2 (de)compression. -A BZ2File can act as a wrapper for an existing file object, or refer -directly to a named file on disk. + A BZ2File can act as a wrapper for an existing file object, or refer + directly to a named file on disk. + + Note that BZ2File provides a *binary* file interface - data read is + returned as bytes, and data to be written should be given as bytes. + """ -Note that BZ2File provides a *binary* file interface - data read is -returned as bytes, and data to be written should be given as bytes. -""" def __enter__(self) -> Self: ... @overload def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: """Open a bzip2-compressed file. -If filename is a str, bytes, or PathLike object, it gives the -name of the file to be opened. Otherwise, it should be a file -object, which will be used to read or write the compressed data. + If filename is a str, bytes, or PathLike object, it gives the + name of the file to be opened. Otherwise, it should be a file + object, which will be used to read or write the compressed data. -mode can be 'r' for reading (default), 'w' for (over)writing, -'x' for creating exclusively, or 'a' for appending. These can -equivalently be given as 'rb', 'wb', 'xb', and 'ab'. + mode can be 'r' for reading (default), 'w' for (over)writing, + 'x' for creating exclusively, or 'a' for appending. These can + equivalently be given as 'rb', 'wb', 'xb', and 'ab'. -If mode is 'w', 'x' or 'a', compresslevel can be a number between 1 -and 9 specifying the level of compression: 1 produces the least -compression, and 9 (default) produces the most compression. + If mode is 'w', 'x' or 'a', compresslevel can be a number between 1 + and 9 specifying the level of compression: 1 produces the least + compression, and 9 (default) produces the most compression. + + If mode is 'r', the input file may be the concatenation of + multiple compressed streams. + """ -If mode is 'r', the input file may be the concatenation of -multiple compressed streams. -""" @overload def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... @overload @@ -170,69 +175,77 @@ multiple compressed streams. def read(self, size: int | None = -1) -> bytes: """Read up to size uncompressed bytes from the file. -If size is negative or omitted, read until EOF is reached. -Returns b'' if the file is already at EOF. -""" + If size is negative or omitted, read until EOF is reached. + Returns b'' if the file is already at EOF. + """ + def read1(self, size: int = -1) -> bytes: """Read up to size uncompressed bytes, while trying to avoid -making multiple reads from the underlying stream. Reads up to a -buffer's worth of data if size is negative. + making multiple reads from the underlying stream. Reads up to a + buffer's worth of data if size is negative. + + Returns b'' if the file is at EOF. + """ -Returns b'' if the file is at EOF. -""" def readline(self, size: SupportsIndex = -1) -> bytes: # type: ignore[override] """Read a line of uncompressed bytes from the file. -The terminating newline (if present) is retained. If size is -non-negative, no more than size bytes will be read (in which -case the line may be incomplete). Returns b'' if already at EOF. -""" + The terminating newline (if present) is retained. If size is + non-negative, no more than size bytes will be read (in which + case the line may be incomplete). Returns b'' if already at EOF. + """ + def readinto(self, b: WriteableBuffer) -> int: """Read bytes into b. -Returns the number of bytes read (0 for EOF). -""" + Returns the number of bytes read (0 for EOF). + """ + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: """Read a list of lines of uncompressed bytes from the file. -size can be specified to control the number of lines read: no -further lines will be read once the total size of the lines read -so far equals or exceeds size. -""" + size can be specified to control the number of lines read: no + further lines will be read once the total size of the lines read + so far equals or exceeds size. + """ + def peek(self, n: int = 0) -> bytes: """Return buffered data without advancing the file position. -Always returns at least one byte of data, unless at EOF. -The exact number of bytes returned is unspecified. -""" + Always returns at least one byte of data, unless at EOF. + The exact number of bytes returned is unspecified. + """ + def seek(self, offset: int, whence: int = 0) -> int: """Change the file position. -The new position is specified by offset, relative to the -position indicated by whence. Values for whence are: + The new position is specified by offset, relative to the + position indicated by whence. Values for whence are: - 0: start of stream (default); offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive + 0: start of stream (default); offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive -Returns the new file position. + Returns the new file position. + + Note that seeking is emulated, so depending on the parameters, + this operation may be extremely slow. + """ -Note that seeking is emulated, so depending on the parameters, -this operation may be extremely slow. -""" def write(self, data: ReadableBuffer) -> int: """Write a byte string to the file. -Returns the number of uncompressed bytes written, which is -always the length of data in bytes. Note that due to buffering, -the file on disk may not reflect the data written until close() -is called. -""" + Returns the number of uncompressed bytes written, which is + always the length of data in bytes. Note that due to buffering, + the file on disk may not reflect the data written until close() + is called. + """ + def writelines(self, seq: Iterable[ReadableBuffer]) -> None: """Write a sequence of byte strings to the file. -Returns the number of uncompressed bytes written. -seq can be any iterable yielding byte strings. + Returns the number of uncompressed bytes written. + seq can be any iterable yielding byte strings. -Line separators are not added between the written byte strings. -""" + Line separators are not added between the written byte strings. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi index 6364cacf8d47b..008bb72e507c3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cProfile.pyi @@ -1,6 +1,7 @@ """Python interface for the 'lsprof' profiler. Compatible with the 'profile' module. """ + import _lsprof from _typeshed import StrOrBytesPath, Unused from collections.abc import Callable, Mapping @@ -13,22 +14,23 @@ __all__ = ["run", "runctx", "Profile"] def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: """Run statement under profiler optionally saving results in filename -This function takes a single argument that can be passed to the -"exec" statement, and an optional file name. In all cases this -routine attempts to "exec" its first argument and gather profiling -statistics from the execution. If no file name is present, then this -function automatically prints a simple profiling report, sorted by the -standard name string (file/line/function-name) that is presented in -each line. -""" + This function takes a single argument that can be passed to the + "exec" statement, and an optional file name. In all cases this + routine attempts to "exec" its first argument and gather profiling + statistics from the execution. If no file name is present, then this + function automatically prints a simple profiling report, sorted by the + standard name string (file/line/function-name) that is presented in + each line. + """ + def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: """Run statement under profiler, supplying your own globals and locals, -optionally saving results in filename. + optionally saving results in filename. -statement and filename have the same semantics as profile.run -""" + statement and filename have the same semantics as profile.run + """ _T = TypeVar("_T") _P = ParamSpec("_P") @@ -37,12 +39,13 @@ _Label: TypeAlias = tuple[str, int, str] class Profile(_lsprof.Profiler): """Profile(timer=None, timeunit=None, subcalls=True, builtins=True) -Builds a profiler object using the specified timer function. -The default timer is a fast built-in one based on real time. -For custom timer functions returning integers, timeunit can -be a float specifying a scale (i.e. how long each integer unit -is, in seconds). -""" + Builds a profiler object using the specified timer function. + The default timer is a fast built-in one based on real time. + For custom timer functions returning integers, timeunit can + be a float specifying a scale (i.e. how long each integer unit + is, in seconds). + """ + stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi index 280cd18c82018..3b2aa61ceb873 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/calendar.pyi @@ -5,6 +5,7 @@ default, these calendars have Monday as the first day of the week, and Sunday as the last (the European convention). Use setfirstweekday() to set the first day of the week (0=Monday, 6=Sunday). """ + import datetime import enum import sys @@ -70,181 +71,210 @@ class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... def isleap(year: int) -> bool: - """Return True for leap years, False for non-leap years. -""" + """Return True for leap years, False for non-leap years.""" + def leapdays(y1: int, y2: int) -> int: """Return number of leap years in range [y1, y2). -Assume y1 <= y2. -""" + Assume y1 <= y2. + """ + def weekday(year: int, month: int, day: int) -> int: - """Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31). -""" + """Return weekday (0-6 ~ Mon-Sun) for year, month (1-12), day (1-31).""" + def monthrange(year: int, month: int) -> tuple[int, int]: """Return weekday of first day of month (0-6 ~ Mon-Sun) -and number of days (28-31) for year, month. -""" + and number of days (28-31) for year, month. + """ class Calendar: """ -Base calendar class. This class doesn't do any formatting. It simply -provides data to subclasses. -""" + Base calendar class. This class doesn't do any formatting. It simply + provides data to subclasses. + """ + firstweekday: int def __init__(self, firstweekday: int = 0) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: """ -Return an iterator for one week of weekday numbers starting with the -configured first one. -""" + Return an iterator for one week of weekday numbers starting with the + configured first one. + """ + def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: """ -Return an iterator for one month. The iterator will yield datetime.date -values and will always iterate through complete weeks, so it will yield -dates outside the specified month. -""" + Return an iterator for one month. The iterator will yield datetime.date + values and will always iterate through complete weeks, so it will yield + dates outside the specified month. + """ + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: """ -Like itermonthdates(), but will yield (day number, weekday number) -tuples. For days outside the specified month the day number is 0. -""" + Like itermonthdates(), but will yield (day number, weekday number) + tuples. For days outside the specified month the day number is 0. + """ + def itermonthdays(self, year: int, month: int) -> Iterable[int]: """ -Like itermonthdates(), but will yield day numbers. For days outside -the specified month the day number is 0. -""" + Like itermonthdates(), but will yield day numbers. For days outside + the specified month the day number is 0. + """ + def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: """ -Return a matrix (list of lists) representing a month's calendar. -Each row represents a week; week entries are datetime.date values. -""" + Return a matrix (list of lists) representing a month's calendar. + Each row represents a week; week entries are datetime.date values. + """ + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: """ -Return a matrix representing a month's calendar. -Each row represents a week; week entries are -(day number, weekday number) tuples. Day numbers outside this month -are zero. -""" + Return a matrix representing a month's calendar. + Each row represents a week; week entries are + (day number, weekday number) tuples. Day numbers outside this month + are zero. + """ + def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: """ -Return a matrix representing a month's calendar. -Each row represents a week; days outside this month are zero. -""" + Return a matrix representing a month's calendar. + Each row represents a week; days outside this month are zero. + """ + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[list[list[datetime.date]]]]: """ -Return the data for the specified year ready for formatting. The return -value is a list of month rows. Each month row contains up to width months. -Each month contains between 4 and 6 weeks and each week contains 1-7 -days. Days are datetime.date objects. -""" + Return the data for the specified year ready for formatting. The return + value is a list of month rows. Each month row contains up to width months. + Each month contains between 4 and 6 weeks and each week contains 1-7 + days. Days are datetime.date objects. + """ + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[list[list[tuple[int, int]]]]]: """ -Return the data for the specified year ready for formatting (similar to -yeardatescalendar()). Entries in the week lists are -(day number, weekday number) tuples. Day numbers outside this month are -zero. -""" + Return the data for the specified year ready for formatting (similar to + yeardatescalendar()). Entries in the week lists are + (day number, weekday number) tuples. Day numbers outside this month are + zero. + """ + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[list[list[int]]]]: """ -Return the data for the specified year ready for formatting (similar to -yeardatescalendar()). Entries in the week lists are day numbers. -Day numbers outside this month are zero. -""" + Return the data for the specified year ready for formatting (similar to + yeardatescalendar()). Entries in the week lists are day numbers. + Day numbers outside this month are zero. + """ + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: """ -Like itermonthdates(), but will yield (year, month, day) tuples. Can be -used for dates outside of datetime.date range. -""" + Like itermonthdates(), but will yield (year, month, day) tuples. Can be + used for dates outside of datetime.date range. + """ + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: """ -Like itermonthdates(), but will yield (year, month, day, day_of_week) tuples. -Can be used for dates outside of datetime.date range. -""" + Like itermonthdates(), but will yield (year, month, day, day_of_week) tuples. + Can be used for dates outside of datetime.date range. + """ class TextCalendar(Calendar): """ -Subclass of Calendar that outputs a calendar as a simple plain text -similar to the UNIX program cal. -""" + Subclass of Calendar that outputs a calendar as a simple plain text + similar to the UNIX program cal. + """ + def prweek(self, theweek: int, width: int) -> None: """ -Print a single week (no newline). -""" + Print a single week (no newline). + """ + def formatday(self, day: int, weekday: int, width: int) -> str: """ -Returns a formatted day. -""" + Returns a formatted day. + """ + def formatweek(self, theweek: int, width: int) -> str: """ -Returns a single week in a string (no newline). -""" + Returns a single week in a string (no newline). + """ + def formatweekday(self, day: int, width: int) -> str: """ -Returns a formatted week day name. -""" + Returns a formatted week day name. + """ + def formatweekheader(self, width: int) -> str: """ -Return a header for a week. -""" + Return a header for a week. + """ + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: """ -Return a formatted month name. -""" + Return a formatted month name. + """ + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: """ -Print a month's calendar. -""" + Print a month's calendar. + """ + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: """ -Return a month's calendar string (multi-line). -""" + Return a month's calendar string (multi-line). + """ + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: """ -Returns a year's calendar as a multi-line string. -""" + Returns a year's calendar as a multi-line string. + """ + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: - """Print a year's calendar. -""" + """Print a year's calendar.""" def firstweekday() -> int: ... def monthcalendar(year: int, month: int) -> list[list[int]]: """ -Return a matrix representing a month's calendar. -Each row represents a week; days outside this month are zero. -""" + Return a matrix representing a month's calendar. + Each row represents a week; days outside this month are zero. + """ + def prweek(theweek: int, width: int) -> None: """ -Print a single week (no newline). -""" + Print a single week (no newline). + """ + def week(theweek: int, width: int) -> str: """ -Returns a single week in a string (no newline). -""" + Returns a single week in a string (no newline). + """ + def weekheader(width: int) -> str: """ -Return a header for a week. -""" + Return a header for a week. + """ + def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: """ -Print a month's calendar. -""" + Print a month's calendar. + """ + def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: """ -Return a month's calendar string (multi-line). -""" + Return a month's calendar string (multi-line). + """ + def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: """ -Returns a year's calendar as a multi-line string. -""" + Returns a year's calendar as a multi-line string. + """ + def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: - """Print a year's calendar. -""" + """Print a year's calendar.""" class HTMLCalendar(Calendar): """ -This calendar returns complete HTML pages. -""" + This calendar returns complete HTML pages. + """ + cssclasses: ClassVar[list[str]] cssclass_noday: ClassVar[str] cssclasses_weekday_head: ClassVar[list[str]] @@ -254,38 +284,45 @@ This calendar returns complete HTML pages. cssclass_year_head: ClassVar[str] def formatday(self, day: int, weekday: int) -> str: """ -Return a day as a table cell. -""" + Return a day as a table cell. + """ + def formatweek(self, theweek: int) -> str: """ -Return a complete week as a table row. -""" + Return a complete week as a table row. + """ + def formatweekday(self, day: int) -> str: """ -Return a weekday name as a table header. -""" + Return a weekday name as a table header. + """ + def formatweekheader(self) -> str: """ -Return a header for a week as a table row. -""" + Return a header for a week as a table row. + """ + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: """ -Return a month name as a table row. -""" + Return a month name as a table row. + """ + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: """ -Return a formatted month as a table. -""" + Return a formatted month as a table. + """ + def formatyear(self, theyear: int, width: int = 3) -> str: """ -Return a formatted year as a table of tables. -""" + Return a formatted year as a table of tables. + """ + def formatyearpage( self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None ) -> bytes: """ -Return a formatted year as a complete HTML page. -""" + Return a formatted year as a complete HTML page. + """ class different_locale: def __init__(self, locale: _LocaleType) -> None: ... @@ -294,16 +331,18 @@ class different_locale: class LocaleTextCalendar(TextCalendar): """ -This class can be passed a locale name in the constructor and will return -month and weekday names in the specified locale. -""" + This class can be passed a locale name in the constructor and will return + month and weekday names in the specified locale. + """ + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... class LocaleHTMLCalendar(HTMLCalendar): """ -This class can be passed a locale name in the constructor and will return -month and weekday names in the specified locale. -""" + This class can be passed a locale name in the constructor and will return + month and weekday names in the specified locale. + """ + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... def formatweekday(self, day: int) -> str: ... def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... @@ -312,14 +351,13 @@ c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: - """Prints multi-column formatting for year calendars -""" + """Prints multi-column formatting for year calendars""" + def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: - """Returns a string formatted from n strings, centered within n columns. -""" + """Returns a string formatted from n strings, centered within n columns.""" + def timegm(tuple: tuple[int, ...] | struct_time) -> int: - """Unrelated but handy function to calculate Unix timestamp from GMT. -""" + """Unrelated but handy function to calculate Unix timestamp from GMT.""" # Data attributes day_name: Sequence[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi index 336fc90f7a14a..1154503deb7b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgi.pyi @@ -8,6 +8,7 @@ of a POST request. POST requests larger than this size will result in a ValueError being raised during parsing. The default value of this variable is 0, meaning the request size is unlimited. """ + import os from _typeshed import SupportsContainsAndGetItem, SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type @@ -41,26 +42,27 @@ def parse( ) -> dict[str, list[str]]: """Parse a query in the environment or from a file (default stdin) - Arguments, all optional: + Arguments, all optional: - fp : file pointer; default: sys.stdin.buffer + fp : file pointer; default: sys.stdin.buffer - environ : environment dictionary; default: os.environ + environ : environment dictionary; default: os.environ - keep_blank_values: flag indicating whether blank values in - percent-encoded forms should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. + keep_blank_values: flag indicating whether blank values in + percent-encoded forms should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. - strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. - separator: str. The symbol to use for separating the query arguments. - Defaults to &. + separator: str. The symbol to use for separating the query arguments. + Defaults to &. """ + def parse_multipart( fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" ) -> dict[str, list[Any]]: @@ -76,6 +78,7 @@ def parse_multipart( value is a list of values for that field. For non-file fields, the value is a list of strings. """ + @type_check_only class _Environ(Protocol): def __getitem__(self, k: str, /) -> str: ... @@ -87,6 +90,7 @@ def parse_header(line: str) -> tuple[str, dict[str, str]]: Return the main content-type and a dictionary of options. """ + def test(environ: _Environ = os.environ) -> None: """Robust test CGI script, usable as main program. @@ -94,22 +98,22 @@ def test(environ: _Environ = os.environ) -> None: the script in HTML form. """ + def print_environ(environ: _Environ = os.environ) -> None: - """Dump the shell environment as HTML. -""" + """Dump the shell environment as HTML.""" + def print_form(form: dict[str, Any]) -> None: - """Dump the contents of a form as HTML. -""" + """Dump the contents of a form as HTML.""" + def print_directory() -> None: - """Dump the current directory as HTML. -""" + """Dump the current directory as HTML.""" + def print_environ_usage() -> None: - """Dump a list of environment variables used by CGI as HTML. -""" + """Dump a list of environment variables used by CGI as HTML.""" class MiniFieldStorage: - """Like FieldStorage, for use when no file uploads are possible. -""" + """Like FieldStorage, for use when no file uploads are possible.""" + # The first five "Any" attributes here are always None, but mypy doesn't support that filename: Any list: Any @@ -122,8 +126,7 @@ class MiniFieldStorage: name: Any value: Any def __init__(self, name: Any, value: Any) -> None: - """Constructor from field name and value. -""" + """Constructor from field name and value.""" class FieldStorage: """Store a sequence of fields, reading multipart/form-data. @@ -167,6 +170,7 @@ class FieldStorage: directory and unlinking them as soon as they have been opened. """ + FieldStorageClass: _type | None keep_blank_values: int strict_parsing: int @@ -246,30 +250,31 @@ class FieldStorage: if there are more than n fields read by parse_qsl(). """ + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... def __getitem__(self, key: str) -> Any: - """Dictionary style indexing. -""" + """Dictionary style indexing.""" + def getvalue(self, key: str, default: Any = None) -> Any: - """Dictionary style get() method, including 'value' lookup. -""" + """Dictionary style get() method, including 'value' lookup.""" + def getfirst(self, key: str, default: Any = None) -> Any: - """ Return the first value received. -""" + """Return the first value received.""" + def getlist(self, key: str) -> _list[Any]: - """ Return list of received values. -""" + """Return list of received values.""" + def keys(self) -> _list[str]: - """Dictionary style keys() method. -""" + """Dictionary style keys() method.""" + def __contains__(self, key: str) -> bool: - """Dictionary style __contains__ method. -""" + """Dictionary style __contains__ method.""" + def __len__(self) -> int: - """Dictionary style len(x) support. -""" + """Dictionary style len(x) support.""" + def __bool__(self) -> bool: ... def __del__(self) -> None: ... # Returns bytes or str IO depending on an internal flag diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi index 9f02d6defa0f5..64119f136f339 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cgitb.pyi @@ -21,6 +21,7 @@ for you, call cgitb.handler(). The optional argument to handler() is a The default handler displays output as HTML. """ + from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType @@ -29,29 +30,28 @@ from typing import IO, Any, Final __UNDEF__: Final[object] # undocumented sentinel def reset() -> str: # undocumented - """Return a string that resets the CGI and browser to a known state. -""" + """Return a string that resets the CGI and browser to a known state.""" + def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: # undocumented - """Find the value for a given name in the given environment. -""" + """Find the value for a given name in the given environment.""" + def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] ) -> list[tuple[str, str | None, Any]]: # undocumented - """Scan one logical line of Python and look up values of variables used. -""" + """Scan one logical line of Python and look up values of variables used.""" + def html(einfo: OptExcInfo, context: int = 5) -> str: - """Return a nice HTML document describing a given traceback. -""" + """Return a nice HTML document describing a given traceback.""" + def text(einfo: OptExcInfo, context: int = 5) -> str: - """Return a plain text document describing a given traceback. -""" + """Return a plain text document describing a given traceback.""" class Hook: # undocumented - """A hook to replace sys.excepthook that shows tracebacks in HTML. -""" + """A hook to replace sys.excepthook that shows tracebacks in HTML.""" + def __init__( self, display: int = 1, @@ -70,4 +70,4 @@ def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int The optional argument 'display' can be set to 0 to suppress sending the traceback to the browser, and 'logdir' can be set to a directory to cause tracebacks to be written to files there. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi index 2814433192b5c..3f2c3a5b8c3da 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/chunk.pyi @@ -47,6 +47,7 @@ The __init__ method has one required argument, a file-like object specifies whether or not chunks are aligned on 2-byte boundaries. The default is 1, i.e. aligned. """ + from typing import IO class Chunk: @@ -60,11 +61,11 @@ class Chunk: seekable: bool def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... def getname(self) -> bytes: - """Return the name (ID) of the current chunk. -""" + """Return the name (ID) of the current chunk.""" + def getsize(self) -> int: - """Return the size of the current chunk. -""" + """Return the size of the current chunk.""" + def close(self) -> None: ... def isatty(self) -> bool: ... def seek(self, pos: int, whence: int = 0) -> None: @@ -72,12 +73,14 @@ class Chunk: Default position is 0 (start of chunk). If the file is not seekable, this will result in an error. """ + def tell(self) -> int: ... def read(self, size: int = -1) -> bytes: """Read at most size bytes from the chunk. If size is omitted or negative, read until the end of the chunk. """ + def skip(self) -> None: """Skip the rest of the chunk. If you are not interested in the contents of the chunk, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi index 4d9de751052a0..575f2bf95dacb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi @@ -1,6 +1,7 @@ """This module provides access to mathematical functions for complex numbers. """ + from typing import Final, SupportsComplex, SupportsFloat, SupportsIndex from typing_extensions import TypeAlias @@ -15,90 +16,92 @@ tau: Final[float] _C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex def acos(z: _C, /) -> complex: - """Return the arc cosine of z. -""" + """Return the arc cosine of z.""" + def acosh(z: _C, /) -> complex: - """Return the inverse hyperbolic cosine of z. -""" + """Return the inverse hyperbolic cosine of z.""" + def asin(z: _C, /) -> complex: - """Return the arc sine of z. -""" + """Return the arc sine of z.""" + def asinh(z: _C, /) -> complex: - """Return the inverse hyperbolic sine of z. -""" + """Return the inverse hyperbolic sine of z.""" + def atan(z: _C, /) -> complex: - """Return the arc tangent of z. -""" + """Return the arc tangent of z.""" + def atanh(z: _C, /) -> complex: - """Return the inverse hyperbolic tangent of z. -""" + """Return the inverse hyperbolic tangent of z.""" + def cos(z: _C, /) -> complex: - """Return the cosine of z. -""" + """Return the cosine of z.""" + def cosh(z: _C, /) -> complex: - """Return the hyperbolic cosine of z. -""" + """Return the hyperbolic cosine of z.""" + def exp(z: _C, /) -> complex: - """Return the exponential value e**z. -""" + """Return the exponential value e**z.""" + def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: """Determine whether two complex numbers are close in value. - rel_tol - maximum difference for being considered "close", relative to the - magnitude of the input values - abs_tol - maximum difference for being considered "close", regardless of the - magnitude of the input values + rel_tol + maximum difference for being considered "close", relative to the + magnitude of the input values + abs_tol + maximum difference for being considered "close", regardless of the + magnitude of the input values -Return True if a is close in value to b, and False otherwise. + Return True if a is close in value to b, and False otherwise. -For the values to be considered close, the difference between them must be -smaller than at least one of the tolerances. + For the values to be considered close, the difference between them must be + smaller than at least one of the tolerances. + + -inf, inf and NaN behave similarly to the IEEE 754 Standard. That is, NaN is + not close to anything, even itself. inf and -inf are only close to themselves. + """ --inf, inf and NaN behave similarly to the IEEE 754 Standard. That is, NaN is -not close to anything, even itself. inf and -inf are only close to themselves. -""" def isinf(z: _C, /) -> bool: - """Checks if the real or imaginary part of z is infinite. -""" + """Checks if the real or imaginary part of z is infinite.""" + def isnan(z: _C, /) -> bool: - """Checks if the real or imaginary part of z not a number (NaN). -""" + """Checks if the real or imaginary part of z not a number (NaN).""" + def log(x: _C, base: _C = ..., /) -> complex: """log(z[, base]) -> the logarithm of z to the given base. -If the base is not specified, returns the natural logarithm (base e) of z. -""" + If the base is not specified, returns the natural logarithm (base e) of z. + """ + def log10(z: _C, /) -> complex: - """Return the base-10 logarithm of z. -""" + """Return the base-10 logarithm of z.""" + def phase(z: _C, /) -> float: - """Return argument, also known as the phase angle, of a complex. -""" + """Return argument, also known as the phase angle, of a complex.""" + def polar(z: _C, /) -> tuple[float, float]: """Convert a complex from rectangular coordinates to polar coordinates. -r is the distance from 0 and phi the phase angle. -""" + r is the distance from 0 and phi the phase angle. + """ + def rect(r: float, phi: float, /) -> complex: - """Convert from polar coordinates to rectangular coordinates. -""" + """Convert from polar coordinates to rectangular coordinates.""" + def sin(z: _C, /) -> complex: - """Return the sine of z. -""" + """Return the sine of z.""" + def sinh(z: _C, /) -> complex: - """Return the hyperbolic sine of z. -""" + """Return the hyperbolic sine of z.""" + def sqrt(z: _C, /) -> complex: - """Return the square root of z. -""" + """Return the square root of z.""" + def tan(z: _C, /) -> complex: - """Return the tangent of z. -""" + """Return the tangent of z.""" + def tanh(z: _C, /) -> complex: - """Return the hyperbolic tangent of z. -""" + """Return the hyperbolic tangent of z.""" + def isfinite(z: _C, /) -> bool: - """Return True if both the real and imaginary parts of z are finite, else False. -""" + """Return True if both the real and imaginary parts of z are finite, else False.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi index 14b251712dcb1..a2ffd98d5f5bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmd.pyi @@ -41,6 +41,7 @@ The data members 'self.doc_header', 'self.misc_header', and listings of documented functions, miscellaneous topics, and undocumented functions respectively. """ + from collections.abc import Callable from typing import IO, Any, Final from typing_extensions import LiteralString @@ -53,15 +54,16 @@ IDENTCHARS: Final[LiteralString] # Too big to be `Literal` class Cmd: """A simple framework for writing line-oriented command interpreters. -These are often useful for test harnesses, administrative tools, and -prototypes that will later be wrapped in a more sophisticated interface. + These are often useful for test harnesses, administrative tools, and + prototypes that will later be wrapped in a more sophisticated interface. -A Cmd instance or subclass instance is a line-oriented interpreter -framework. There is no good reason to instantiate Cmd itself; rather, -it's useful as a superclass of an interpreter class you define yourself -in order to inherit Cmd's methods and encapsulate action methods. + A Cmd instance or subclass instance is a line-oriented interpreter + framework. There is no good reason to instantiate Cmd itself; rather, + it's useful as a superclass of an interpreter class you define yourself + in order to inherit Cmd's methods and encapsulate action methods. + + """ -""" prompt: str identchars: str ruler: str @@ -80,91 +82,100 @@ in order to inherit Cmd's methods and encapsulate action methods. def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: """Instantiate a line-oriented interpreter framework. -The optional argument 'completekey' is the readline name of a -completion key; it defaults to the Tab key. If completekey is -not None and the readline module is available, command completion -is done automatically. The optional arguments stdin and stdout -specify alternate input and output file objects; if not specified, -sys.stdin and sys.stdout are used. + The optional argument 'completekey' is the readline name of a + completion key; it defaults to the Tab key. If completekey is + not None and the readline module is available, command completion + is done automatically. The optional arguments stdin and stdout + specify alternate input and output file objects; if not specified, + sys.stdin and sys.stdout are used. -""" + """ old_completer: Callable[[str, int], str | None] | None def cmdloop(self, intro: Any | None = None) -> None: """Repeatedly issue a prompt, accept input, parse an initial prefix -off the received input, and dispatch to action methods, passing them -the remainder of the line as argument. + off the received input, and dispatch to action methods, passing them + the remainder of the line as argument. + + """ -""" def precmd(self, line: str) -> str: """Hook method executed just before the command line is -interpreted, but after the input prompt is generated and issued. + interpreted, but after the input prompt is generated and issued. + + """ -""" def postcmd(self, stop: bool, line: str) -> bool: - """Hook method executed just after a command dispatch is finished. -""" + """Hook method executed just after a command dispatch is finished.""" + def preloop(self) -> None: - """Hook method executed once when the cmdloop() method is called. -""" + """Hook method executed once when the cmdloop() method is called.""" + def postloop(self) -> None: """Hook method executed once when the cmdloop() method is about to -return. + return. + + """ -""" def parseline(self, line: str) -> tuple[str | None, str | None, str]: """Parse the line into a command name and a string containing -the arguments. Returns a tuple containing (command, args, line). -'command' and 'args' may be None if the line couldn't be parsed. -""" + the arguments. Returns a tuple containing (command, args, line). + 'command' and 'args' may be None if the line couldn't be parsed. + """ + def onecmd(self, line: str) -> bool: """Interpret the argument as though it had been typed in response -to the prompt. + to the prompt. -This may be overridden, but should not normally need to be; -see the precmd() and postcmd() methods for useful execution hooks. -The return value is a flag indicating whether interpretation of -commands by the interpreter should stop. + This may be overridden, but should not normally need to be; + see the precmd() and postcmd() methods for useful execution hooks. + The return value is a flag indicating whether interpretation of + commands by the interpreter should stop. + + """ -""" def emptyline(self) -> bool: """Called when an empty line is entered in response to the prompt. -If this method is not overridden, it repeats the last nonempty -command entered. + If this method is not overridden, it repeats the last nonempty + command entered. + + """ -""" def default(self, line: str) -> None: """Called on an input line when the command prefix is not recognized. -If this method is not overridden, it prints an error message and -returns. + If this method is not overridden, it prints an error message and + returns. + + """ -""" def completedefault(self, *ignored: Any) -> list[str]: """Method called to complete an input line when no command-specific -complete_*() method is available. + complete_*() method is available. -By default, it returns an empty list. + By default, it returns an empty list. + + """ -""" def completenames(self, text: str, *ignored: Any) -> list[str]: ... completion_matches: list[str] | None def complete(self, text: str, state: int) -> list[str] | None: """Return the next possible completion for 'text'. -If a command has not been entered, then complete against command list. -Otherwise try to call complete_ to get list of completions. -""" + If a command has not been entered, then complete against command list. + Otherwise try to call complete_ to get list of completions. + """ + def get_names(self) -> list[str]: ... # Only the first element of args matters. def complete_help(self, *args: Any) -> list[str]: ... def do_help(self, arg: str) -> bool | None: - """List available commands with "help" or detailed help with "help cmd". -""" + """List available commands with "help" or detailed help with "help cmd".""" + def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: """Display a list of strings as a compact set of columns. -Each column is only as wide as necessary. -Columns are separated by two spaces (one was not legible enough). -""" + Each column is only as wide as necessary. + Columns are separated by two spaces (one was not legible enough). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi index 36af4c6e1b79d..2a1098ac03a5d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/code.pyi @@ -1,6 +1,5 @@ -"""Utilities needed to emulate Python's interactive interpreter. +"""Utilities needed to emulate Python's interactive interpreter.""" -""" import sys from codeop import CommandCompiler, compile_command as compile_command from collections.abc import Callable @@ -12,108 +11,113 @@ __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_ class InteractiveInterpreter: """Base class for InteractiveConsole. -This class deals with parsing and interpreter state (the user's -namespace); it doesn't deal with input buffering or prompting or -input file naming (the filename is always passed in explicitly). + This class deals with parsing and interpreter state (the user's + namespace); it doesn't deal with input buffering or prompting or + input file naming (the filename is always passed in explicitly). + + """ -""" locals: dict[str, Any] # undocumented compile: CommandCompiler # undocumented def __init__(self, locals: dict[str, Any] | None = None) -> None: """Constructor. -The optional 'locals' argument specifies a mapping to use as the -namespace in which code will be executed; it defaults to a newly -created dictionary with key "__name__" set to "__console__" and -key "__doc__" set to None. + The optional 'locals' argument specifies a mapping to use as the + namespace in which code will be executed; it defaults to a newly + created dictionary with key "__name__" set to "__console__" and + key "__doc__" set to None. + + """ -""" def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: """Compile and run some source in the interpreter. -Arguments are as for compile_command(). + Arguments are as for compile_command(). -One of several things can happen: + One of several things can happen: -1) The input is incorrect; compile_command() raised an -exception (SyntaxError or OverflowError). A syntax traceback -will be printed by calling the showsyntaxerror() method. + 1) The input is incorrect; compile_command() raised an + exception (SyntaxError or OverflowError). A syntax traceback + will be printed by calling the showsyntaxerror() method. -2) The input is incomplete, and more input is required; -compile_command() returned None. Nothing happens. + 2) The input is incomplete, and more input is required; + compile_command() returned None. Nothing happens. -3) The input is complete; compile_command() returned a code -object. The code is executed by calling self.runcode() (which -also handles run-time exceptions, except for SystemExit). + 3) The input is complete; compile_command() returned a code + object. The code is executed by calling self.runcode() (which + also handles run-time exceptions, except for SystemExit). -The return value is True in case 2, False in the other cases (unless -an exception is raised). The return value can be used to -decide whether to use sys.ps1 or sys.ps2 to prompt the next -line. + The return value is True in case 2, False in the other cases (unless + an exception is raised). The return value can be used to + decide whether to use sys.ps1 or sys.ps2 to prompt the next + line. + + """ -""" def runcode(self, code: CodeType) -> None: """Execute a code object. -When an exception occurs, self.showtraceback() is called to -display a traceback. All exceptions are caught except -SystemExit, which is reraised. + When an exception occurs, self.showtraceback() is called to + display a traceback. All exceptions are caught except + SystemExit, which is reraised. -A note about KeyboardInterrupt: this exception may occur -elsewhere in this code, and may not always be caught. The -caller should be prepared to deal with it. + A note about KeyboardInterrupt: this exception may occur + elsewhere in this code, and may not always be caught. The + caller should be prepared to deal with it. -""" + """ if sys.version_info >= (3, 13): def showsyntaxerror(self, filename: str | None = None, *, source: str = "") -> None: """Display the syntax error that just occurred. -This doesn't display a stack trace because there isn't one. + This doesn't display a stack trace because there isn't one. -If a filename is given, it is stuffed in the exception instead -of what was there before (because Python's parser always uses -"" when reading from a string). + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "" when reading from a string). -The output is written by self.write(), below. + The output is written by self.write(), below. -""" + """ else: def showsyntaxerror(self, filename: str | None = None) -> None: """Display the syntax error that just occurred. - This doesn't display a stack trace because there isn't one. + This doesn't display a stack trace because there isn't one. - If a filename is given, it is stuffed in the exception instead - of what was there before (because Python's parser always uses - "" when reading from a string). + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "" when reading from a string). - The output is written by self.write(), below. + The output is written by self.write(), below. - """ + """ def showtraceback(self) -> None: """Display the exception that just occurred. -We remove the first stack item because it is our own code. + We remove the first stack item because it is our own code. + + The output is written by self.write(), below. -The output is written by self.write(), below. + """ -""" def write(self, data: str) -> None: """Write a string. -The base implementation writes to sys.stderr; a subclass may -replace this with a different implementation. + The base implementation writes to sys.stderr; a subclass may + replace this with a different implementation. -""" + """ class InteractiveConsole(InteractiveInterpreter): """Closely emulate the behavior of the interactive Python interpreter. -This class builds on InteractiveInterpreter and adds prompting -using the familiar sys.ps1 and sys.ps2, and input buffering. + This class builds on InteractiveInterpreter and adds prompting + using the familiar sys.ps1 and sys.ps2, and input buffering. + + """ -""" buffer: list[str] # undocumented filename: str # undocumented if sys.version_info >= (3, 13): @@ -122,83 +126,86 @@ using the familiar sys.ps1 and sys.ps2, and input buffering. ) -> None: """Constructor. -The optional locals argument will be passed to the -InteractiveInterpreter base class. + The optional locals argument will be passed to the + InteractiveInterpreter base class. -The optional filename argument should specify the (file)name -of the input stream; it will show up in tracebacks. + The optional filename argument should specify the (file)name + of the input stream; it will show up in tracebacks. + + """ -""" def push(self, line: str, filename: str | None = None) -> bool: """Push a line to the interpreter. -The line should not have a trailing newline; it may have -internal newlines. The line is appended to a buffer and the -interpreter's runsource() method is called with the -concatenated contents of the buffer as source. If this -indicates that the command was executed or invalid, the buffer -is reset; otherwise, the command is incomplete, and the buffer -is left as it was after the line was appended. The return -value is 1 if more input is required, 0 if the line was dealt -with in some way (this is the same as runsource()). - -""" + The line should not have a trailing newline; it may have + internal newlines. The line is appended to a buffer and the + interpreter's runsource() method is called with the + concatenated contents of the buffer as source. If this + indicates that the command was executed or invalid, the buffer + is reset; otherwise, the command is incomplete, and the buffer + is left as it was after the line was appended. The return + value is 1 if more input is required, 0 if the line was dealt + with in some way (this is the same as runsource()). + + """ else: def __init__(self, locals: dict[str, Any] | None = None, filename: str = "") -> None: """Constructor. - The optional locals argument will be passed to the - InteractiveInterpreter base class. + The optional locals argument will be passed to the + InteractiveInterpreter base class. - The optional filename argument should specify the (file)name - of the input stream; it will show up in tracebacks. + The optional filename argument should specify the (file)name + of the input stream; it will show up in tracebacks. + + """ - """ def push(self, line: str) -> bool: """Push a line to the interpreter. - The line should not have a trailing newline; it may have - internal newlines. The line is appended to a buffer and the - interpreter's runsource() method is called with the - concatenated contents of the buffer as source. If this - indicates that the command was executed or invalid, the buffer - is reset; otherwise, the command is incomplete, and the buffer - is left as it was after the line was appended. The return - value is 1 if more input is required, 0 if the line was dealt - with in some way (this is the same as runsource()). + The line should not have a trailing newline; it may have + internal newlines. The line is appended to a buffer and the + interpreter's runsource() method is called with the + concatenated contents of the buffer as source. If this + indicates that the command was executed or invalid, the buffer + is reset; otherwise, the command is incomplete, and the buffer + is left as it was after the line was appended. The return + value is 1 if more input is required, 0 if the line was dealt + with in some way (this is the same as runsource()). - """ + """ def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: """Closely emulate the interactive Python console. -The optional banner argument specifies the banner to print -before the first interaction; by default it prints a banner -similar to the one printed by the real Python interpreter, -followed by the current class name in parentheses (so as not -to confuse this with the real interpreter -- since it's so -close!). + The optional banner argument specifies the banner to print + before the first interaction; by default it prints a banner + similar to the one printed by the real Python interpreter, + followed by the current class name in parentheses (so as not + to confuse this with the real interpreter -- since it's so + close!). -The optional exitmsg argument specifies the exit message -printed when exiting. Pass the empty string to suppress -printing an exit message. If exitmsg is not given or None, -a default message is printed. + The optional exitmsg argument specifies the exit message + printed when exiting. Pass the empty string to suppress + printing an exit message. If exitmsg is not given or None, + a default message is printed. + + """ -""" def resetbuffer(self) -> None: - """Reset the input buffer. -""" + """Reset the input buffer.""" + def raw_input(self, prompt: str = "") -> str: """Write a prompt and read a line. -The returned line does not include the trailing newline. -When the user enters the EOF key sequence, EOFError is raised. + The returned line does not include the trailing newline. + When the user enters the EOF key sequence, EOFError is raised. -The base implementation uses the built-in function -input(); a subclass may replace this with a different -implementation. + The base implementation uses the built-in function + input(); a subclass may replace this with a different + implementation. -""" + """ if sys.version_info >= (3, 13): def interact( @@ -210,19 +217,19 @@ if sys.version_info >= (3, 13): ) -> None: """Closely emulate the interactive Python interpreter. -This is a backwards compatible interface to the InteractiveConsole -class. When readfunc is not specified, it attempts to import the -readline module to enable GNU readline if it is available. + This is a backwards compatible interface to the InteractiveConsole + class. When readfunc is not specified, it attempts to import the + readline module to enable GNU readline if it is available. -Arguments (all optional, all default to None): + Arguments (all optional, all default to None): -banner -- passed to InteractiveConsole.interact() -readfunc -- if not None, replaces InteractiveConsole.raw_input() -local -- passed to InteractiveInterpreter.__init__() -exitmsg -- passed to InteractiveConsole.interact() -local_exit -- passed to InteractiveConsole.__init__() + banner -- passed to InteractiveConsole.interact() + readfunc -- if not None, replaces InteractiveConsole.raw_input() + local -- passed to InteractiveInterpreter.__init__() + exitmsg -- passed to InteractiveConsole.interact() + local_exit -- passed to InteractiveConsole.__init__() -""" + """ else: def interact( @@ -233,15 +240,15 @@ else: ) -> None: """Closely emulate the interactive Python interpreter. - This is a backwards compatible interface to the InteractiveConsole - class. When readfunc is not specified, it attempts to import the - readline module to enable GNU readline if it is available. + This is a backwards compatible interface to the InteractiveConsole + class. When readfunc is not specified, it attempts to import the + readline module to enable GNU readline if it is available. - Arguments (all optional, all default to None): + Arguments (all optional, all default to None): - banner -- passed to InteractiveConsole.interact() - readfunc -- if not None, replaces InteractiveConsole.raw_input() - local -- passed to InteractiveInterpreter.__init__() - exitmsg -- passed to InteractiveConsole.interact() + banner -- passed to InteractiveConsole.interact() + readfunc -- if not None, replaces InteractiveConsole.raw_input() + local -- passed to InteractiveInterpreter.__init__() + exitmsg -- passed to InteractiveConsole.interact() - """ + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi index fc31827455d40..5754989a3f37c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codecs.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import sys import types from _codecs import * @@ -133,8 +134,8 @@ class _BufferedIncrementalDecoder(Protocol): if sys.version_info >= (3, 12): class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): - """Codec details when looking up the codec registry -""" + """Codec details when looking up the codec registry""" + _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -165,8 +166,8 @@ if sys.version_info >= (3, 12): else: @disjoint_base class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): - """Codec details when looking up the codec registry -""" + """Codec details when looking up the codec registry""" + _is_text_encoding: bool @property def encode(self) -> _Encoder: ... @@ -196,124 +197,133 @@ else: def getencoder(encoding: str) -> _Encoder: """Lookup up the codec for the given encoding and return -its encoder function. + its encoder function. -Raises a LookupError in case the encoding cannot be found. + Raises a LookupError in case the encoding cannot be found. + + """ -""" def getdecoder(encoding: str) -> _Decoder: """Lookup up the codec for the given encoding and return -its decoder function. + its decoder function. -Raises a LookupError in case the encoding cannot be found. + Raises a LookupError in case the encoding cannot be found. + + """ -""" def getincrementalencoder(encoding: str) -> _IncrementalEncoder: """Lookup up the codec for the given encoding and return -its IncrementalEncoder class or factory function. + its IncrementalEncoder class or factory function. -Raises a LookupError in case the encoding cannot be found -or the codecs doesn't provide an incremental encoder. + Raises a LookupError in case the encoding cannot be found + or the codecs doesn't provide an incremental encoder. + + """ -""" @overload def getincrementaldecoder(encoding: _BufferedEncoding) -> _BufferedIncrementalDecoder: """Lookup up the codec for the given encoding and return -its IncrementalDecoder class or factory function. + its IncrementalDecoder class or factory function. -Raises a LookupError in case the encoding cannot be found -or the codecs doesn't provide an incremental decoder. + Raises a LookupError in case the encoding cannot be found + or the codecs doesn't provide an incremental decoder. + + """ -""" @overload def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: """Lookup up the codec for the given encoding and return -its StreamReader class or factory function. + its StreamReader class or factory function. -Raises a LookupError in case the encoding cannot be found. + Raises a LookupError in case the encoding cannot be found. + + """ -""" def getwriter(encoding: str) -> _StreamWriter: """Lookup up the codec for the given encoding and return -its StreamWriter class or factory function. + its StreamWriter class or factory function. -Raises a LookupError in case the encoding cannot be found. + Raises a LookupError in case the encoding cannot be found. + + """ -""" def open( filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 ) -> StreamReaderWriter: """Open an encoded file using the given mode and return -a wrapped version providing transparent encoding/decoding. + a wrapped version providing transparent encoding/decoding. -Note: The wrapped version will only accept the object format -defined by the codecs, i.e. Unicode objects for most builtin -codecs. Output is also codec dependent and will usually be -Unicode as well. + Note: The wrapped version will only accept the object format + defined by the codecs, i.e. Unicode objects for most builtin + codecs. Output is also codec dependent and will usually be + Unicode as well. -If encoding is not None, then the -underlying encoded files are always opened in binary mode. -The default file mode is 'r', meaning to open the file in read mode. + If encoding is not None, then the + underlying encoded files are always opened in binary mode. + The default file mode is 'r', meaning to open the file in read mode. -encoding specifies the encoding which is to be used for the -file. + encoding specifies the encoding which is to be used for the + file. -errors may be given to define the error handling. It defaults -to 'strict' which causes ValueErrors to be raised in case an -encoding error occurs. + errors may be given to define the error handling. It defaults + to 'strict' which causes ValueErrors to be raised in case an + encoding error occurs. -buffering has the same meaning as for the builtin open() API. -It defaults to -1 which means that the default buffer size will -be used. + buffering has the same meaning as for the builtin open() API. + It defaults to -1 which means that the default buffer size will + be used. + + The returned wrapped file object provides an extra attribute + .encoding which allows querying the used encoding. This + attribute is only available if an encoding was specified as + parameter. + """ -The returned wrapped file object provides an extra attribute -.encoding which allows querying the used encoding. This -attribute is only available if an encoding was specified as -parameter. -""" def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: """Return a wrapped version of file which provides transparent -encoding translation. + encoding translation. -Data written to the wrapped file is decoded according -to the given data_encoding and then encoded to the underlying -file using file_encoding. The intermediate data type -will usually be Unicode but depends on the specified codecs. + Data written to the wrapped file is decoded according + to the given data_encoding and then encoded to the underlying + file using file_encoding. The intermediate data type + will usually be Unicode but depends on the specified codecs. -Bytes read from the file are decoded using file_encoding and then -passed back to the caller encoded using data_encoding. + Bytes read from the file are decoded using file_encoding and then + passed back to the caller encoded using data_encoding. -If file_encoding is not given, it defaults to data_encoding. + If file_encoding is not given, it defaults to data_encoding. -errors may be given to define the error handling. It defaults -to 'strict' which causes ValueErrors to be raised in case an -encoding error occurs. + errors may be given to define the error handling. It defaults + to 'strict' which causes ValueErrors to be raised in case an + encoding error occurs. -The returned wrapped file object provides two extra attributes -.data_encoding and .file_encoding which reflect the given -parameters of the same name. The attributes can be used for -introspection by Python programs. + The returned wrapped file object provides two extra attributes + .data_encoding and .file_encoding which reflect the given + parameters of the same name. The attributes can be used for + introspection by Python programs. + + """ -""" def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: """ -Encoding iterator. + Encoding iterator. -Encodes the input strings from the iterator using an IncrementalEncoder. + Encodes the input strings from the iterator using an IncrementalEncoder. + + errors and kwargs are passed through to the IncrementalEncoder + constructor. + """ -errors and kwargs are passed through to the IncrementalEncoder -constructor. -""" def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: """ -Decoding iterator. + Decoding iterator. -Decodes the input strings from the iterator using an IncrementalDecoder. + Decodes the input strings from the iterator using an IncrementalDecoder. -errors and kwargs are passed through to the IncrementalDecoder -constructor. -""" + errors and kwargs are passed through to the IncrementalDecoder + constructor. + """ BOM: Final[Literal[b"\xff\xfe", b"\xfe\xff"]] # depends on `sys.byteorder` BOM_BE: Final = b"\xfe\xff" @@ -327,172 +337,183 @@ BOM_UTF32_BE: Final = b"\x00\x00\xfe\xff" BOM_UTF32_LE: Final = b"\xff\xfe\x00\x00" def strict_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'strict' error handling, which raises a UnicodeError on coding errors. -""" + """Implements the 'strict' error handling, which raises a UnicodeError on coding errors.""" + def replace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'replace' error handling, which replaces malformed data with a replacement marker. -""" + """Implements the 'replace' error handling, which replaces malformed data with a replacement marker.""" + def ignore_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'ignore' error handling, which ignores malformed data and continues. -""" + """Implements the 'ignore' error handling, which ignores malformed data and continues.""" + def xmlcharrefreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'xmlcharrefreplace' error handling, which replaces an unencodable character with the appropriate XML character reference. -""" + """Implements the 'xmlcharrefreplace' error handling, which replaces an unencodable character with the appropriate XML character reference.""" + def backslashreplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'backslashreplace' error handling, which replaces malformed data with a backslashed escape sequence. -""" + """Implements the 'backslashreplace' error handling, which replaces malformed data with a backslashed escape sequence.""" + def namereplace_errors(exception: UnicodeError, /) -> tuple[str | bytes, int]: - """Implements the 'namereplace' error handling, which replaces an unencodable character with a \\N{...} escape sequence. -""" + """Implements the 'namereplace' error handling, which replaces an unencodable character with a \\N{...} escape sequence.""" class Codec: """Defines the interface for stateless encoders/decoders. -The .encode()/.decode() methods may use different error -handling schemes by providing the errors argument. These -string values are predefined: - - 'strict' - raise a ValueError error (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace' - replace with a suitable replacement character; - Python will use the official U+FFFD REPLACEMENT - CHARACTER for the builtin Unicode codecs on - decoding and '?' on encoding. - 'surrogateescape' - replace with private code points U+DCnn. - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference (only for encoding). - 'backslashreplace' - Replace with backslashed escape sequences. - 'namereplace' - Replace with \\N{...} escape sequences - (only for encoding). - -The set of allowed values can be extended via register_error. + The .encode()/.decode() methods may use different error + handling schemes by providing the errors argument. These + string values are predefined: + + 'strict' - raise a ValueError error (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace' - replace with a suitable replacement character; + Python will use the official U+FFFD REPLACEMENT + CHARACTER for the builtin Unicode codecs on + decoding and '?' on encoding. + 'surrogateescape' - replace with private code points U+DCnn. + 'xmlcharrefreplace' - Replace with the appropriate XML + character reference (only for encoding). + 'backslashreplace' - Replace with backslashed escape sequences. + 'namereplace' - Replace with \\N{...} escape sequences + (only for encoding). + + The set of allowed values can be extended via register_error. + + """ -""" # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: """Encodes the object input and returns a tuple (output -object, length consumed). + object, length consumed). -errors defines the error handling to apply. It defaults to -'strict' handling. + errors defines the error handling to apply. It defaults to + 'strict' handling. -The method may not store state in the Codec instance. Use -StreamWriter for codecs which have to keep state in order to -make encoding efficient. + The method may not store state in the Codec instance. Use + StreamWriter for codecs which have to keep state in order to + make encoding efficient. -The encoder must be able to handle zero length input and -return an empty object of the output object type in this -situation. + The encoder must be able to handle zero length input and + return an empty object of the output object type in this + situation. + + """ -""" def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: """Decodes the object input and returns a tuple (output -object, length consumed). + object, length consumed). -input must be an object which provides the bf_getreadbuf -buffer slot. Python strings, buffer objects and memory -mapped files are examples of objects providing this slot. + input must be an object which provides the bf_getreadbuf + buffer slot. Python strings, buffer objects and memory + mapped files are examples of objects providing this slot. -errors defines the error handling to apply. It defaults to -'strict' handling. + errors defines the error handling to apply. It defaults to + 'strict' handling. -The method may not store state in the Codec instance. Use -StreamReader for codecs which have to keep state in order to -make decoding efficient. + The method may not store state in the Codec instance. Use + StreamReader for codecs which have to keep state in order to + make decoding efficient. -The decoder must be able to handle zero length input and -return an empty object of the output object type in this -situation. + The decoder must be able to handle zero length input and + return an empty object of the output object type in this + situation. -""" + """ class IncrementalEncoder: """ -An IncrementalEncoder encodes an input in multiple steps. The input can -be passed piece by piece to the encode() method. The IncrementalEncoder -remembers the state of the encoding process between calls to encode(). -""" + An IncrementalEncoder encodes an input in multiple steps. The input can + be passed piece by piece to the encode() method. The IncrementalEncoder + remembers the state of the encoding process between calls to encode(). + """ + errors: str def __init__(self, errors: str = "strict") -> None: """ -Creates an IncrementalEncoder instance. + Creates an IncrementalEncoder instance. + + The IncrementalEncoder may use different error handling schemes by + providing the errors keyword argument. See the module docstring + for a list of possible values. + """ -The IncrementalEncoder may use different error handling schemes by -providing the errors keyword argument. See the module docstring -for a list of possible values. -""" @abstractmethod def encode(self, input: str, final: bool = False) -> bytes: """ -Encodes input and returns the resulting object. -""" + Encodes input and returns the resulting object. + """ + def reset(self) -> None: """ -Resets the encoder to the initial state. -""" + Resets the encoder to the initial state. + """ # documentation says int but str is needed for the subclass. def getstate(self) -> int | str: """ -Return the current state of the encoder. -""" + Return the current state of the encoder. + """ + def setstate(self, state: int | str) -> None: """ -Set the current state of the encoder. state must have been -returned by getstate(). -""" + Set the current state of the encoder. state must have been + returned by getstate(). + """ class IncrementalDecoder: """ -An IncrementalDecoder decodes an input in multiple steps. The input can -be passed piece by piece to the decode() method. The IncrementalDecoder -remembers the state of the decoding process between calls to decode(). -""" + An IncrementalDecoder decodes an input in multiple steps. The input can + be passed piece by piece to the decode() method. The IncrementalDecoder + remembers the state of the decoding process between calls to decode(). + """ + errors: str def __init__(self, errors: str = "strict") -> None: """ -Create an IncrementalDecoder instance. + Create an IncrementalDecoder instance. + + The IncrementalDecoder may use different error handling schemes by + providing the errors keyword argument. See the module docstring + for a list of possible values. + """ -The IncrementalDecoder may use different error handling schemes by -providing the errors keyword argument. See the module docstring -for a list of possible values. -""" @abstractmethod def decode(self, input: ReadableBuffer, final: bool = False) -> str: """ -Decode input and returns the resulting object. -""" + Decode input and returns the resulting object. + """ + def reset(self) -> None: """ -Reset the decoder to the initial state. -""" + Reset the decoder to the initial state. + """ + def getstate(self) -> tuple[bytes, int]: """ -Return the current state of the decoder. + Return the current state of the decoder. + + This must be a (buffered_input, additional_state_info) tuple. + buffered_input must be a bytes object containing bytes that + were passed to decode() that have not yet been converted. + additional_state_info must be a non-negative integer + representing the state of the decoder WITHOUT yet having + processed the contents of buffered_input. In the initial state + and after reset(), getstate() must return (b"", 0). + """ -This must be a (buffered_input, additional_state_info) tuple. -buffered_input must be a bytes object containing bytes that -were passed to decode() that have not yet been converted. -additional_state_info must be a non-negative integer -representing the state of the decoder WITHOUT yet having -processed the contents of buffered_input. In the initial state -and after reset(), getstate() must return (b"", 0). -""" def setstate(self, state: tuple[bytes, int]) -> None: """ -Set the current state of the decoder. + Set the current state of the decoder. -state must have been returned by getstate(). The effect of -setstate((b"", 0)) must be equivalent to reset(). -""" + state must have been returned by getstate(). The effect of + setstate((b"", 0)) must be equivalent to reset(). + """ # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): """ -This subclass of IncrementalEncoder can be used as the baseclass for an -incremental encoder if the encoder must keep some of the output in a -buffer between calls to encode(). -""" + This subclass of IncrementalEncoder can be used as the baseclass for an + incremental encoder if the encoder must keep some of the output in a + buffer between calls to encode(). + """ + buffer: str def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -501,10 +522,11 @@ buffer between calls to encode(). class BufferedIncrementalDecoder(IncrementalDecoder): """ -This subclass of IncrementalDecoder can be used as the baseclass for an -incremental decoder if the decoder must be able to handle incomplete -byte sequences. -""" + This subclass of IncrementalDecoder can be used as the baseclass for an + incremental decoder if the decoder must be able to handle incomplete + byte sequences. + """ + buffer: bytes def __init__(self, errors: str = "strict") -> None: ... @abstractmethod @@ -519,46 +541,48 @@ class StreamWriter(Codec): def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: """Creates a StreamWriter instance. -stream must be a file-like object open for writing. + stream must be a file-like object open for writing. -The StreamWriter may use different error handling -schemes by providing the errors keyword argument. These -parameters are predefined: + The StreamWriter may use different error handling + schemes by providing the errors keyword argument. These + parameters are predefined: - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'xmlcharrefreplace' - Replace with the appropriate XML - character reference. - 'backslashreplace' - Replace with backslashed escape - sequences. - 'namereplace' - Replace with \\N{...} escape sequences. + 'strict' - raise a ValueError (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace'- replace with a suitable replacement character + 'xmlcharrefreplace' - Replace with the appropriate XML + character reference. + 'backslashreplace' - Replace with backslashed escape + sequences. + 'namereplace' - Replace with \\N{...} escape sequences. -The set of allowed parameter values can be extended via -register_error. -""" - def write(self, object: str) -> None: - """Writes the object's contents encoded to self.stream. + The set of allowed parameter values can be extended via + register_error. """ + + def write(self, object: str) -> None: + """Writes the object's contents encoded to self.stream.""" + def writelines(self, list: Iterable[str]) -> None: """Writes the concatenated list of strings to the stream -using .write(). -""" + using .write(). + """ + def reset(self) -> None: """Resets the codec buffers used for keeping internal state. -Calling this method should ensure that the data on the -output is put into a clean state, that allows appending -of new fresh data without having to rescan the whole -stream to recover state. + Calling this method should ensure that the data on the + output is put into a clean state, that allows appending + of new fresh data without having to rescan the whole + stream to recover state. + + """ -""" def seek(self, offset: int, whence: int = 0) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: - """Inherit all other methods from the underlying stream. - """ + """Inherit all other methods from the underlying stream.""" class StreamReader(Codec): stream: _ReadableStream @@ -568,118 +592,125 @@ class StreamReader(Codec): def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: """Creates a StreamReader instance. -stream must be a file-like object open for reading. + stream must be a file-like object open for reading. -The StreamReader may use different error handling -schemes by providing the errors keyword argument. These -parameters are predefined: + The StreamReader may use different error handling + schemes by providing the errors keyword argument. These + parameters are predefined: - 'strict' - raise a ValueError (or a subclass) - 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character - 'backslashreplace' - Replace with backslashed escape sequences; + 'strict' - raise a ValueError (or a subclass) + 'ignore' - ignore the character and continue with the next + 'replace'- replace with a suitable replacement character + 'backslashreplace' - Replace with backslashed escape sequences; + + The set of allowed parameter values can be extended via + register_error. + """ -The set of allowed parameter values can be extended via -register_error. -""" def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: """Decodes data from the stream self.stream and returns the -resulting object. - -chars indicates the number of decoded code points or bytes to -return. read() will never return more data than requested, -but it might return less, if there is not enough available. - -size indicates the approximate maximum number of decoded -bytes or code points to read for decoding. The decoder -can modify this setting as appropriate. The default value --1 indicates to read and decode as much as possible. size -is intended to prevent having to decode huge files in one -step. - -If firstline is true, and a UnicodeDecodeError happens -after the first line terminator in the input only the first line -will be returned, the rest of the input will be kept until the -next call to read(). - -The method should use a greedy read strategy, meaning that -it should read as much data as is allowed within the -definition of the encoding and the given size, e.g. if -optional encoding endings or state markers are available -on the stream, these should be read too. -""" + resulting object. + + chars indicates the number of decoded code points or bytes to + return. read() will never return more data than requested, + but it might return less, if there is not enough available. + + size indicates the approximate maximum number of decoded + bytes or code points to read for decoding. The decoder + can modify this setting as appropriate. The default value + -1 indicates to read and decode as much as possible. size + is intended to prevent having to decode huge files in one + step. + + If firstline is true, and a UnicodeDecodeError happens + after the first line terminator in the input only the first line + will be returned, the rest of the input will be kept until the + next call to read(). + + The method should use a greedy read strategy, meaning that + it should read as much data as is allowed within the + definition of the encoding and the given size, e.g. if + optional encoding endings or state markers are available + on the stream, these should be read too. + """ + def readline(self, size: int | None = None, keepends: bool = True) -> str: """Read one line from the input stream and return the -decoded data. + decoded data. -size, if given, is passed as size argument to the -read() method. + size, if given, is passed as size argument to the + read() method. + + """ -""" def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: """Read all lines available on the input stream -and return them as a list. + and return them as a list. -Line breaks are implemented using the codec's decoder -method and are included in the list entries. + Line breaks are implemented using the codec's decoder + method and are included in the list entries. -sizehint, if given, is ignored since there is no efficient -way of finding the true end-of-line. + sizehint, if given, is ignored since there is no efficient + way of finding the true end-of-line. + + """ -""" def reset(self) -> None: """Resets the codec buffers used for keeping internal state. -Note that no stream repositioning should take place. -This method is primarily intended to be able to recover -from decoding errors. + Note that no stream repositioning should take place. + This method is primarily intended to be able to recover + from decoding errors. + + """ -""" def seek(self, offset: int, whence: int = 0) -> None: """Set the input stream's current position. -Resets the codec buffers used for keeping state. -""" + Resets the codec buffers used for keeping state. + """ + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __iter__(self) -> Self: ... def __next__(self) -> str: - """Return the next decoded line from the input stream. -""" + """Return the next decoded line from the input stream.""" + def __getattr__(self, name: str, getattr: Callable[[Any, str], Any] = ...) -> Any: - """Inherit all other methods from the underlying stream. - """ + """Inherit all other methods from the underlying stream.""" # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): """StreamReaderWriter instances allow wrapping streams which -work in both read and write modes. + work in both read and write modes. -The design is such that one can use the factory functions -returned by the codec.lookup() function to construct the -instance. + The design is such that one can use the factory functions + returned by the codec.lookup() function to construct the + instance. + + """ -""" stream: _Stream def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: """Creates a StreamReaderWriter instance. -stream must be a Stream-like object. + stream must be a Stream-like object. -Reader, Writer must be factory functions or classes -providing the StreamReader, StreamWriter interface resp. + Reader, Writer must be factory functions or classes + providing the StreamReader, StreamWriter interface resp. -Error handling is done in the same way as defined for the -StreamWriter/Readers. + Error handling is done in the same way as defined for the + StreamWriter/Readers. + + """ -""" def read(self, size: int = -1) -> str: ... def readline(self, size: int | None = None) -> str: ... def readlines(self, sizehint: int | None = None) -> list[str]: ... def __next__(self) -> str: - """Return the next decoded line from the input stream. -""" + """Return the next decoded line from the input stream.""" + def __iter__(self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... @@ -688,8 +719,7 @@ StreamWriter/Readers. def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: - """Inherit all other methods from the underlying stream. - """ + """Inherit all other methods from the underlying stream.""" # These methods don't actually exist directly, but they are needed to satisfy the TextIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... @@ -705,18 +735,19 @@ StreamWriter/Readers. class StreamRecoder(BinaryIO): """StreamRecoder instances translate data from one encoding to another. -They use the complete set of APIs returned by the -codecs.lookup() function to implement their task. + They use the complete set of APIs returned by the + codecs.lookup() function to implement their task. -Data written to the StreamRecoder is first decoded into an -intermediate format (depending on the "decode" codec) and then -written to the underlying stream using an instance of the provided -Writer class. + Data written to the StreamRecoder is first decoded into an + intermediate format (depending on the "decode" codec) and then + written to the underlying stream using an instance of the provided + Writer class. -In the other direction, data is read from the underlying stream using -a Reader instance and then encoded and returned to the caller. + In the other direction, data is read from the underlying stream using + a Reader instance and then encoded and returned to the caller. + + """ -""" data_encoding: str file_encoding: str def __init__( @@ -729,37 +760,38 @@ a Reader instance and then encoded and returned to the caller. errors: str = "strict", ) -> None: """Creates a StreamRecoder instance which implements a two-way -conversion: encode and decode work on the frontend (the -data visible to .read() and .write()) while Reader and Writer -work on the backend (the data in stream). + conversion: encode and decode work on the frontend (the + data visible to .read() and .write()) while Reader and Writer + work on the backend (the data in stream). -You can use these objects to do transparent -transcodings from e.g. latin-1 to utf-8 and back. + You can use these objects to do transparent + transcodings from e.g. latin-1 to utf-8 and back. -stream must be a file-like object. + stream must be a file-like object. -encode and decode must adhere to the Codec interface; Reader and -Writer must be factory functions or classes providing the -StreamReader and StreamWriter interfaces resp. + encode and decode must adhere to the Codec interface; Reader and + Writer must be factory functions or classes providing the + StreamReader and StreamWriter interfaces resp. -Error handling is done in the same way as defined for the -StreamWriter/Readers. + Error handling is done in the same way as defined for the + StreamWriter/Readers. + + """ -""" def read(self, size: int = -1) -> bytes: ... def readline(self, size: int | None = None) -> bytes: ... def readlines(self, sizehint: int | None = None) -> list[bytes]: ... def __next__(self) -> bytes: - """Return the next decoded line from the input stream. -""" + """Return the next decoded line from the input stream.""" + def __iter__(self) -> Self: ... # Base class accepts more types than just bytes def write(self, data: bytes) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[bytes]) -> None: ... # type: ignore[override] def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: - """Inherit all other methods from the underlying stream. - """ + """Inherit all other methods from the underlying stream.""" + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi index 95dc4d4cefc85..2ef06796dda4c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/codeop.pyi @@ -31,6 +31,7 @@ Compile(): Instances of this class act like the built-in function compile, but with 'memory' in the sense described above. """ + import sys from types import CodeType @@ -40,50 +41,51 @@ if sys.version_info >= (3, 14): def compile_command(source: str, filename: str = "", symbol: str = "single", flags: int = 0) -> CodeType | None: """Compile a command and determine whether it is incomplete. -Arguments: + Arguments: -source -- the source string; may contain \\n characters -filename -- optional filename from which source was read; default - "" -symbol -- optional grammar start symbol; "single" (default), "exec" - or "eval" + source -- the source string; may contain \\n characters + filename -- optional filename from which source was read; default + "" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" -Return value / exceptions raised: + Return value / exceptions raised: -- Return a code object if the command is complete and valid -- Return None if the command is incomplete -- Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). -""" + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ else: def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: """Compile a command and determine whether it is incomplete. -Arguments: + Arguments: -source -- the source string; may contain \\n characters -filename -- optional filename from which source was read; default - "" -symbol -- optional grammar start symbol; "single" (default), "exec" - or "eval" + source -- the source string; may contain \\n characters + filename -- optional filename from which source was read; default + "" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" -Return value / exceptions raised: + Return value / exceptions raised: -- Return a code object if the command is complete and valid -- Return None if the command is incomplete -- Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). -""" + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ class Compile: """Instances of this class behave much like the built-in compile -function, but if one is used to compile text containing a future -statement, it "remembers" and compiles all subsequent program texts -with the statement in force. -""" + function, but if one is used to compile text containing a future + statement, it "remembers" and compiles all subsequent program texts + with the statement in force. + """ + flags: int if sys.version_info >= (3, 13): def __call__(self, source: str, filename: str, symbol: str, flags: int = 0) -> CodeType: ... @@ -92,28 +94,29 @@ with the statement in force. class CommandCompiler: """Instances of this class have __call__ methods identical in -signature to compile_command; the difference is that if the -instance compiles program text containing a __future__ statement, -the instance 'remembers' and compiles all subsequent program texts -with the statement in force. -""" + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force. + """ + compiler: Compile def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: """Compile a command and determine whether it is incomplete. -Arguments: + Arguments: -source -- the source string; may contain \\n characters -filename -- optional filename from which source was read; - default "" -symbol -- optional grammar start symbol; "single" (default) or - "eval" + source -- the source string; may contain \\n characters + filename -- optional filename from which source was read; + default "" + symbol -- optional grammar start symbol; "single" (default) or + "eval" -Return value / exceptions raised: + Return value / exceptions raised: -- Return a code object if the command is complete and valid -- Return None if the command is incomplete -- Raise SyntaxError, ValueError or OverflowError if the command is a - syntax error (OverflowError and ValueError can be produced by - malformed literals). -""" + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi index 1da0fbd790fef..adb79e5ff4104 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/__init__.pyi @@ -13,6 +13,7 @@ list, set, and tuple. * UserString wrapper around string objects for easier string subclassing """ + import sys from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import SupportsItems, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT @@ -58,26 +59,26 @@ def namedtuple( ) -> type[tuple[Any, ...]]: """Returns a new subclass of tuple with named fields. ->>> Point = namedtuple('Point', ['x', 'y']) ->>> Point.__doc__ # docstring for the new class -'Point(x, y)' ->>> p = Point(11, y=22) # instantiate with positional args or keywords ->>> p[0] + p[1] # indexable like a plain tuple -33 ->>> x, y = p # unpack like a regular tuple ->>> x, y -(11, 22) ->>> p.x + p.y # fields also accessible by name -33 ->>> d = p._asdict() # convert to a dictionary ->>> d['x'] -11 ->>> Point(**d) # convert from a dictionary -Point(x=11, y=22) ->>> p._replace(x=100) # _replace() is like str.replace() but targets named fields -Point(x=100, y=22) - -""" + >>> Point = namedtuple('Point', ['x', 'y']) + >>> Point.__doc__ # docstring for the new class + 'Point(x, y)' + >>> p = Point(11, y=22) # instantiate with positional args or keywords + >>> p[0] + p[1] # indexable like a plain tuple + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> d = p._asdict() # convert to a dictionary + >>> d['x'] + 11 + >>> Point(**d) # convert from a dictionary + Point(x=11, y=22) + >>> p._replace(x=100) # _replace() is like str.replace() but targets named fields + Point(x=100, y=22) + + """ class UserDict(MutableMapping[_KT, _VT]): data: dict[_KT, _VT] @@ -150,8 +151,8 @@ class UserDict(MutableMapping[_KT, _VT]): def get(self, key: _KT, default: _T) -> _VT | _T: ... class UserList(MutableSequence[_T]): - """A more or less complete user-defined wrapper around list objects. -""" + """A more or less complete user-defined wrapper around list objects.""" + data: list[_T] @overload def __init__(self, initlist: None = None) -> None: ... @@ -272,154 +273,155 @@ class UserString(Sequence[UserString]): @disjoint_base class deque(MutableSequence[_T]): - """A list-like sequence optimized for data accesses near its endpoints. -""" + """A list-like sequence optimized for data accesses near its endpoints.""" + @property def maxlen(self) -> int | None: - """maximum size of a deque or None if unbounded -""" + """maximum size of a deque or None if unbounded""" + @overload def __init__(self, *, maxlen: int | None = None) -> None: ... @overload def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... def append(self, x: _T, /) -> None: - """Add an element to the right side of the deque. -""" + """Add an element to the right side of the deque.""" + def appendleft(self, x: _T, /) -> None: - """Add an element to the left side of the deque. -""" + """Add an element to the left side of the deque.""" + def copy(self) -> Self: - """Return a shallow copy of a deque. -""" + """Return a shallow copy of a deque.""" + def count(self, x: _T, /) -> int: - """Return number of occurrences of value. -""" + """Return number of occurrences of value.""" + def extend(self, iterable: Iterable[_T], /) -> None: - """Extend the right side of the deque with elements from the iterable. -""" + """Extend the right side of the deque with elements from the iterable.""" + def extendleft(self, iterable: Iterable[_T], /) -> None: - """Extend the left side of the deque with elements from the iterable. -""" + """Extend the left side of the deque with elements from the iterable.""" + def insert(self, i: int, x: _T, /) -> None: - """Insert value before index. -""" + """Insert value before index.""" + def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: """Return first index of value. -Raises ValueError if the value is not present. -""" + Raises ValueError if the value is not present. + """ + def pop(self) -> _T: # type: ignore[override] - """Remove and return the rightmost element. -""" + """Remove and return the rightmost element.""" + def popleft(self) -> _T: - """Remove and return the leftmost element. -""" + """Remove and return the leftmost element.""" + def remove(self, value: _T, /) -> None: - """Remove first occurrence of value. -""" + """Remove first occurrence of value.""" + def rotate(self, n: int = 1, /) -> None: - """Rotate the deque n steps to the right. If n is negative, rotates left. -""" + """Rotate the deque n steps to the right. If n is negative, rotates left.""" + def __copy__(self) -> Self: - """Return a shallow copy of a deque. -""" + """Return a shallow copy of a deque.""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" __hash__: ClassVar[None] # type: ignore[assignment] # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores def __getitem__(self, key: SupportsIndex, /) -> _T: # type: ignore[override] - """Return self[key]. -""" + """Return self[key].""" + def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: # type: ignore[override] - """Set self[key] to value. -""" + """Set self[key] to value.""" + def __delitem__(self, key: SupportsIndex, /) -> None: # type: ignore[override] - """Delete self[key]. -""" + """Delete self[key].""" + def __contains__(self, key: object, /) -> bool: - """Return bool(key in self). -""" + """Return bool(key in self).""" + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: - """Return state information for pickling. -""" + """Return state information for pickling.""" + def __iadd__(self, value: Iterable[_T], /) -> Self: - """Implement self+=value. -""" + """Implement self+=value.""" + def __add__(self, value: Self, /) -> Self: - """Return self+value. -""" + """Return self+value.""" + def __mul__(self, value: int, /) -> Self: - """Return self*value. -""" + """Return self*value.""" + def __imul__(self, value: int, /) -> Self: - """Implement self*=value. -""" + """Implement self*=value.""" + def __lt__(self, value: deque[_T], /) -> bool: ... def __le__(self, value: deque[_T], /) -> bool: ... def __gt__(self, value: deque[_T], /) -> bool: ... def __ge__(self, value: deque[_T], /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" class Counter(dict[_T, int], Generic[_T]): """Dict subclass for counting hashable items. Sometimes called a bag -or multiset. Elements are stored as dictionary keys and their counts -are stored as dictionary values. - ->>> c = Counter('abcdeabcdabcaba') # count elements from a string - ->>> c.most_common(3) # three most common elements -[('a', 5), ('b', 4), ('c', 3)] ->>> sorted(c) # list all unique elements -['a', 'b', 'c', 'd', 'e'] ->>> ''.join(sorted(c.elements())) # list elements with repetitions -'aaaaabbbbcccdde' ->>> sum(c.values()) # total of all counts -15 - ->>> c['a'] # count of letter 'a' -5 ->>> for elem in 'shazam': # update counts from an iterable -... c[elem] += 1 # by adding 1 to each element's count ->>> c['a'] # now there are seven 'a' -7 ->>> del c['b'] # remove all 'b' ->>> c['b'] # now there are zero 'b' -0 - ->>> d = Counter('simsalabim') # make another counter ->>> c.update(d) # add in the second counter ->>> c['a'] # now there are nine 'a' -9 - ->>> c.clear() # empty the counter ->>> c -Counter() - -Note: If a count is set to zero or reduced to zero, it will remain -in the counter until the entry is deleted or the counter is cleared: - ->>> c = Counter('aaabbc') ->>> c['b'] -= 2 # reduce the count of 'b' by two ->>> c.most_common() # 'b' is still in, but its count is zero -[('a', 3), ('c', 1), ('b', 0)] + or multiset. Elements are stored as dictionary keys and their counts + are stored as dictionary values. + + >>> c = Counter('abcdeabcdabcaba') # count elements from a string + + >>> c.most_common(3) # three most common elements + [('a', 5), ('b', 4), ('c', 3)] + >>> sorted(c) # list all unique elements + ['a', 'b', 'c', 'd', 'e'] + >>> ''.join(sorted(c.elements())) # list elements with repetitions + 'aaaaabbbbcccdde' + >>> sum(c.values()) # total of all counts + 15 + + >>> c['a'] # count of letter 'a' + 5 + >>> for elem in 'shazam': # update counts from an iterable + ... c[elem] += 1 # by adding 1 to each element's count + >>> c['a'] # now there are seven 'a' + 7 + >>> del c['b'] # remove all 'b' + >>> c['b'] # now there are zero 'b' + 0 + + >>> d = Counter('simsalabim') # make another counter + >>> c.update(d) # add in the second counter + >>> c['a'] # now there are nine 'a' + 9 + + >>> c.clear() # empty the counter + >>> c + Counter() + + Note: If a count is set to zero or reduced to zero, it will remain + in the counter until the entry is deleted or the counter is cleared: + + >>> c = Counter('aaabbc') + >>> c['b'] -= 2 # reduce the count of 'b' by two + >>> c.most_common() # 'b' is still in, but its count is zero + [('a', 3), ('c', 1), ('b', 0)] + + """ -""" @overload def __init__(self, iterable: None = None, /) -> None: """Create a new, empty Counter object. And if given, count elements -from an input iterable. Or, initialize the count from another mapping -of elements to their counts. + from an input iterable. Or, initialize the count from another mapping + of elements to their counts. ->>> c = Counter() # a new, empty counter ->>> c = Counter('gallahad') # a new counter from an iterable ->>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping ->>> c = Counter(a=4, b=2) # a new counter from keyword args + >>> c = Counter() # a new, empty counter + >>> c = Counter('gallahad') # a new counter from an iterable + >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping + >>> c = Counter(a=4, b=2) # a new counter from keyword args + + """ -""" @overload def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ... @overload @@ -427,53 +429,56 @@ of elements to their counts. @overload def __init__(self, iterable: Iterable[_T], /) -> None: ... def copy(self) -> Self: - """Return a shallow copy. -""" + """Return a shallow copy.""" + def elements(self) -> Iterator[_T]: """Iterator over elements repeating each as many times as its count. ->>> c = Counter('ABCABC') ->>> sorted(c.elements()) -['A', 'A', 'B', 'B', 'C', 'C'] + >>> c = Counter('ABCABC') + >>> sorted(c.elements()) + ['A', 'A', 'B', 'B', 'C', 'C'] -Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 ->>> import math ->>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) ->>> math.prod(prime_factors.elements()) -1836 + >>> import math + >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) + >>> math.prod(prime_factors.elements()) + 1836 -Note, if an element's count has been set to zero or is a negative -number, elements() will ignore it. + Note, if an element's count has been set to zero or is a negative + number, elements() will ignore it. + + """ -""" def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: """List the n most common elements and their counts from the most -common to the least. If n is None, then list all element counts. + common to the least. If n is None, then list all element counts. ->>> Counter('abracadabra').most_common(3) -[('a', 5), ('b', 2), ('r', 2)] + >>> Counter('abracadabra').most_common(3) + [('a', 5), ('b', 2), ('r', 2)] + + """ -""" @classmethod def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] @overload def subtract(self, iterable: None = None, /) -> None: """Like dict.update() but subtracts counts instead of replacing them. -Counts can be reduced below zero. Both the inputs and outputs are -allowed to contain zero and negative counts. + Counts can be reduced below zero. Both the inputs and outputs are + allowed to contain zero and negative counts. -Source can be an iterable, a dictionary, or another Counter instance. + Source can be an iterable, a dictionary, or another Counter instance. ->>> c = Counter('which') ->>> c.subtract('witch') # subtract elements from another iterable ->>> c.subtract(Counter('watch')) # subtract elements from another counter ->>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch -0 ->>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch --1 + >>> c = Counter('which') + >>> c.subtract('witch') # subtract elements from another iterable + >>> c.subtract(Counter('watch')) # subtract elements from another counter + >>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch + 0 + >>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch + -1 + + """ -""" @overload def subtract(self, mapping: Mapping[_T, int], /) -> None: ... @overload @@ -488,123 +493,128 @@ Source can be an iterable, a dictionary, or another Counter instance. def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: """Like dict.update() but add counts instead of replacing them. -Source can be an iterable, a dictionary, or another Counter instance. + Source can be an iterable, a dictionary, or another Counter instance. ->>> c = Counter('which') ->>> c.update('witch') # add elements from another iterable ->>> d = Counter('watch') ->>> c.update(d) # add elements from another counter ->>> c['h'] # four 'h' in which, witch, and watch -4 + >>> c = Counter('which') + >>> c.update('witch') # add elements from another iterable + >>> d = Counter('watch') + >>> c.update(d) # add elements from another counter + >>> c['h'] # four 'h' in which, witch, and watch + 4 + + """ -""" @overload def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ... @overload def update(self, iterable: None = None, /, **kwargs: int) -> None: ... def __missing__(self, key: _T) -> int: - """The count of elements not in the Counter is zero. -""" + """The count of elements not in the Counter is zero.""" + def __delitem__(self, elem: object) -> None: - """Like dict.__delitem__() but does not raise KeyError for missing values. -""" + """Like dict.__delitem__() but does not raise KeyError for missing values.""" if sys.version_info >= (3, 10): def __eq__(self, other: object) -> bool: - """True if all counts agree. Missing counts are treated as zero. -""" + """True if all counts agree. Missing counts are treated as zero.""" + def __ne__(self, other: object) -> bool: - """True if any counts disagree. Missing counts are treated as zero. -""" + """True if any counts disagree. Missing counts are treated as zero.""" def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: """Add counts from two counters. ->>> Counter('abbb') + Counter('bcc') -Counter({'b': 4, 'c': 2, 'a': 1}) + >>> Counter('abbb') + Counter('bcc') + Counter({'b': 4, 'c': 2, 'a': 1}) + + """ -""" def __sub__(self, other: Counter[_T]) -> Counter[_T]: """Subtract count, but keep only results with positive counts. ->>> Counter('abbbc') - Counter('bccd') -Counter({'b': 2, 'a': 1}) + >>> Counter('abbbc') - Counter('bccd') + Counter({'b': 2, 'a': 1}) + + """ -""" def __and__(self, other: Counter[_T]) -> Counter[_T]: """Intersection is the minimum of corresponding counts. ->>> Counter('abbb') & Counter('bcc') -Counter({'b': 1}) + >>> Counter('abbb') & Counter('bcc') + Counter({'b': 1}) + + """ -""" def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: # type: ignore[override] """Union is the maximum of value in either of the input counters. ->>> Counter('abbb') | Counter('bcc') -Counter({'b': 3, 'c': 2, 'a': 1}) + >>> Counter('abbb') | Counter('bcc') + Counter({'b': 3, 'c': 2, 'a': 1}) + + """ -""" def __pos__(self) -> Counter[_T]: - """Adds an empty counter, effectively stripping negative and zero counts -""" + """Adds an empty counter, effectively stripping negative and zero counts""" + def __neg__(self) -> Counter[_T]: """Subtracts from an empty counter. Strips positive and zero counts, -and flips the sign on negative counts. + and flips the sign on negative counts. -""" + """ # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. def __iadd__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[misc] """Inplace add from another counter, keeping only positive counts. ->>> c = Counter('abbb') ->>> c += Counter('bcc') ->>> c -Counter({'b': 4, 'c': 2, 'a': 1}) + >>> c = Counter('abbb') + >>> c += Counter('bcc') + >>> c + Counter({'b': 4, 'c': 2, 'a': 1}) + + """ -""" def __isub__(self, other: SupportsItems[_T, int]) -> Self: """Inplace subtract counter, but keep only results with positive counts. ->>> c = Counter('abbbc') ->>> c -= Counter('bccd') ->>> c -Counter({'b': 2, 'a': 1}) + >>> c = Counter('abbbc') + >>> c -= Counter('bccd') + >>> c + Counter({'b': 2, 'a': 1}) + + """ -""" def __iand__(self, other: SupportsItems[_T, int]) -> Self: """Inplace intersection is the minimum of corresponding counts. ->>> c = Counter('abbb') ->>> c &= Counter('bcc') ->>> c -Counter({'b': 1}) + >>> c = Counter('abbb') + >>> c &= Counter('bcc') + >>> c + Counter({'b': 1}) + + """ -""" def __ior__(self, other: SupportsItems[_T, int]) -> Self: # type: ignore[override,misc] """Inplace union is the maximum of value from either counter. ->>> c = Counter('abbb') ->>> c |= Counter('bcc') ->>> c -Counter({'b': 3, 'c': 2, 'a': 1}) + >>> c = Counter('abbb') + >>> c |= Counter('bcc') + >>> c + Counter({'b': 3, 'c': 2, 'a': 1}) -""" + """ if sys.version_info >= (3, 10): def total(self) -> int: - """Sum of the counts -""" + """Sum of the counts""" + def __le__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a subset of those in other. -""" + """True if all counts in self are a subset of those in other.""" + def __lt__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a proper subset of those in other. -""" + """True if all counts in self are a proper subset of those in other.""" + def __ge__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a superset of those in other. -""" + """True if all counts in self are a superset of those in other.""" + def __gt__(self, other: Counter[Any]) -> bool: - """True if all counts in self are a proper superset of those in other. -""" + """True if all counts in self are a proper superset of those in other.""" # The pure-Python implementations of the "views" classes # These are exposed at runtime in `collections/__init__.py` @@ -638,24 +648,26 @@ class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyrig @disjoint_base class OrderedDict(dict[_KT, _VT]): - """Dictionary that remembers insertion order -""" + """Dictionary that remembers insertion order""" + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: """Remove and return a (key, value) pair from the dictionary. -Pairs are returned in LIFO order if last is true or FIFO order if false. -""" + Pairs are returned in LIFO order if last is true or FIFO order if false. + """ + def move_to_end(self, key: _KT, last: bool = True) -> None: """Move an existing element to the end (or beginning if last is false). -Raise KeyError if the element does not exist. -""" + Raise KeyError if the element does not exist. + """ + def copy(self) -> Self: - """od.copy() -> a shallow copy of od -""" + """od.copy() -> a shallow copy of od""" + def __reversed__(self) -> Iterator[_KT]: - """od.__reversed__() <==> reversed(od) -""" + """od.__reversed__() <==> reversed(od)""" + def keys(self) -> _odict_keys[_KT, _VT]: ... def items(self) -> _odict_items[_KT, _VT]: ... def values(self) -> _odict_values[_KT, _VT]: ... @@ -665,8 +677,8 @@ Raise KeyError if the element does not exist. @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: - """Create a new ordered dictionary with keys from iterable and values set to value. -""" + """Create a new ordered dictionary with keys from iterable and values set to value.""" + @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... @@ -675,8 +687,9 @@ Raise KeyError if the element does not exist. def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: """Insert key with a value of default if key is not in the dictionary. -Return the value for key if key is in the dictionary, else default. -""" + Return the value for key if key is in the dictionary, else default. + """ + @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... # Same as dict.pop, but accepts keyword arguments @@ -684,9 +697,10 @@ Return the value for key if key is in the dictionary, else default. def pop(self, key: _KT) -> _VT: """od.pop(key[,default]) -> v, remove specified key and return the corresponding value. -If the key is not found, return the default if given; otherwise, -raise a KeyError. -""" + If the key is not found, return the default if given; otherwise, + raise a KeyError. + """ + @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload @@ -694,14 +708,14 @@ raise a KeyError. def __eq__(self, value: object, /) -> bool: ... @overload def __or__(self, value: dict[_KT, _VT], /) -> Self: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... @overload def __ror__(self, value: dict[_KT, _VT], /) -> Self: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] @@ -709,12 +723,13 @@ raise a KeyError. class defaultdict(dict[_KT, _VT]): """defaultdict(default_factory=None, /, [...]) --> dict with default factory -The default factory is called without arguments to produce -a new value when a key is not present, in __getitem__ only. -A defaultdict compares equal to a dict with the same items. -All remaining arguments are treated the same as if they were -passed to the dict constructor, including keyword arguments. -""" + The default factory is called without arguments to produce + a new value when a key is not present, in __getitem__ only. + A defaultdict compares equal to a dict with the same items. + All remaining arguments are treated the same as if they were + passed to the dict constructor, including keyword arguments. + """ + default_factory: Callable[[], _VT] | None @overload def __init__(self) -> None: ... @@ -751,57 +766,61 @@ passed to the dict constructor, including keyword arguments. ) -> None: ... def __missing__(self, key: _KT, /) -> _VT: """__missing__(key) # Called by __getitem__ for missing key; pseudo-code: - if self.default_factory is None: raise KeyError((key,)) - self[key] = value = self.default_factory() - return value -""" + if self.default_factory is None: raise KeyError((key,)) + self[key] = value = self.default_factory() + return value + """ + def __copy__(self) -> Self: - """D.copy() -> a shallow copy of D. -""" + """D.copy() -> a shallow copy of D.""" + def copy(self) -> Self: - """D.copy() -> a shallow copy of D. -""" + """D.copy() -> a shallow copy of D.""" + @overload def __or__(self, value: dict[_KT, _VT], /) -> Self: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... @overload def __ror__(self, value: dict[_KT, _VT], /) -> Self: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] class ChainMap(MutableMapping[_KT, _VT]): """A ChainMap groups multiple dicts (or other mappings) together -to create a single, updateable view. + to create a single, updateable view. -The underlying mappings are stored in a list. That list is public and can -be accessed or updated using the *maps* attribute. There is no other -state. + The underlying mappings are stored in a list. That list is public and can + be accessed or updated using the *maps* attribute. There is no other + state. -Lookups search the underlying mappings successively until a key is found. -In contrast, writes, updates, and deletions only operate on the first -mapping. + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + """ -""" maps: list[MutableMapping[_KT, _VT]] def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: """Initialize a ChainMap by setting *maps* to the given mappings. -If no mappings are provided, a single empty dictionary is used. + If no mappings are provided, a single empty dictionary is used. + + """ -""" def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: """New ChainMap with a new map followed by all previous maps. -If no map is provided, an empty dict is used. -Keyword arguments update the map or new empty dict. -""" + If no map is provided, an empty dict is used. + Keyword arguments update the map or new empty dict. + """ + @property def parents(self) -> Self: - """New ChainMap from maps[1:]. -""" + """New ChainMap from maps[1:].""" + def __setitem__(self, key: _KT, value: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @@ -819,21 +838,20 @@ Keyword arguments update the map or new empty dict. # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: - """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D -""" + """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D""" + @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: - """Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0]. -""" + """Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].""" + @overload def pop(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, default: _T) -> _VT | _T: ... def copy(self) -> Self: - """New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:] -""" + """New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]""" __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, # so the signature should be kept in line with `dict.fromkeys`. @@ -841,21 +859,19 @@ Keyword arguments update the map or new empty dict. @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], /) -> ChainMap[_T, Any | None]: - """Create a new ChainMap with keys from iterable and values set to value. -""" + """Create a new ChainMap with keys from iterable and values set to value.""" else: @classmethod @overload def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: - """Create a ChainMap with a single dict created from the iterable. -""" + """Create a ChainMap with a single dict created from the iterable.""" @classmethod @overload # Special-case None: the user probably wants to add non-None values later. def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: - """Create a new ChainMap with keys from iterable and values set to value. -""" + """Create a new ChainMap with keys from iterable and values set to value.""" + @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi index f2effbf1d31e5..337264c60ca09 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/collections/abc.pyi @@ -2,5 +2,6 @@ Unit tests are in test_collections. """ + from _collections_abc import * from _collections_abc import __all__ as __all__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi index 56f0b7514db0e..9e674165b9551 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/colorsys.pyi @@ -15,6 +15,7 @@ YIQ: Luminance, Chrominance (used by composite video signals) HLS: Hue, Luminance, Saturation HSV: Hue, Saturation, Value """ + from typing import Final __all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi index 6d3cba13a3caa..e6c1e174f69d1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compileall.pyi @@ -10,6 +10,7 @@ packages -- for now, you'll have to deal with packages separately.) See module py_compile for details of the actual byte-compilation. """ + import sys from _typeshed import StrPath from py_compile import PycInvalidationMode @@ -41,28 +42,29 @@ if sys.version_info >= (3, 10): ) -> bool: """Byte-compile all modules in the given directory tree. -Arguments (only dir is required): - -dir: the directory to byte-compile -maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) -ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. -force: if True, force compilation, even if timestamps are up-to-date -quiet: full output with False or 0, errors only with 1, - no output with 2 -legacy: if True, produce legacy pyc paths instead of PEP 3147 paths -optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. -workers: maximum number of parallel workers -invalidation_mode: how the up-to-dateness of the pyc will be checked -stripdir: part of path to left-strip from source file path -prependdir: path to prepend to beginning of original file path, applied - after stripdir -limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path -hardlink_dupes: hardlink duplicated pyc files -""" + Arguments (only dir is required): + + dir: the directory to byte-compile + maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) + ddir: the directory that will be prepended to the path to the + file as it is compiled into each byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + workers: maximum number of parallel workers + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path + hardlink_dupes: hardlink duplicated pyc files + """ + def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -80,26 +82,26 @@ hardlink_dupes: hardlink duplicated pyc files ) -> bool: """Byte-compile one file. -Arguments (only fullname is required): - -fullname: the file to byte-compile -ddir: if given, the directory name compiled in to the - byte-code file. -force: if True, force compilation, even if timestamps are up-to-date -quiet: full output with False or 0, errors only with 1, - no output with 2 -legacy: if True, produce legacy pyc paths instead of PEP 3147 paths -optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. -invalidation_mode: how the up-to-dateness of the pyc will be checked -stripdir: part of path to left-strip from source file path -prependdir: path to prepend to beginning of original file path, applied - after stripdir -limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path. -hardlink_dupes: hardlink duplicated pyc files -""" + Arguments (only fullname is required): + + fullname: the file to byte-compile + ddir: if given, the directory name compiled in to the + byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path. + hardlink_dupes: hardlink duplicated pyc files + """ else: def compile_dir( @@ -121,28 +123,29 @@ else: ) -> bool: """Byte-compile all modules in the given directory tree. - Arguments (only dir is required): - - dir: the directory to byte-compile - maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) - ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - workers: maximum number of parallel workers - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path - hardlink_dupes: hardlink duplicated pyc files - """ + Arguments (only dir is required): + + dir: the directory to byte-compile + maxlevels: maximum recursion level (default `sys.getrecursionlimit()`) + ddir: the directory that will be prepended to the path to the + file as it is compiled into each byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + workers: maximum number of parallel workers + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path + hardlink_dupes: hardlink duplicated pyc files + """ + def compile_file( fullname: StrPath, ddir: StrPath | None = None, @@ -160,26 +163,26 @@ else: ) -> bool: """Byte-compile one file. - Arguments (only fullname is required): - - fullname: the file to byte-compile - ddir: if given, the directory name compiled in to the - byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: full output with False or 0, errors only with 1, - no output with 2 - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: int or list of optimization levels or -1 for level of - the interpreter. Multiple levels leads to multiple compiled - files each with one optimization level. - invalidation_mode: how the up-to-dateness of the pyc will be checked - stripdir: part of path to left-strip from source file path - prependdir: path to prepend to beginning of original file path, applied - after stripdir - limit_sl_dest: ignore symlinks if they are pointing outside of - the defined path. - hardlink_dupes: hardlink duplicated pyc files - """ + Arguments (only fullname is required): + + fullname: the file to byte-compile + ddir: if given, the directory name compiled in to the + byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: int or list of optimization levels or -1 for level of + the interpreter. Multiple levels leads to multiple compiled + files each with one optimization level. + invalidation_mode: how the up-to-dateness of the pyc will be checked + stripdir: part of path to left-strip from source file path + prependdir: path to prepend to beginning of original file path, applied + after stripdir + limit_sl_dest: ignore symlinks if they are pointing outside of + the defined path. + hardlink_dupes: hardlink duplicated pyc files + """ def compile_path( skip_curdir: bool = ..., @@ -192,13 +195,13 @@ def compile_path( ) -> bool: """Byte-compile all module on sys.path. -Arguments (all optional): + Arguments (all optional): -skip_curdir: if true, skip current directory (default True) -maxlevels: max recursion level (default 0) -force: as for compile_dir() (default False) -quiet: as for compile_dir() (default 0) -legacy: as for compile_dir() (default False) -optimize: as for compile_dir() (default -1) -invalidation_mode: as for compiler_dir() -""" + skip_curdir: if true, skip current directory (default True) + maxlevels: max recursion level (default 0) + force: as for compile_dir() (default False) + quiet: as for compile_dir() (default 0) + legacy: as for compile_dir() (default False) + optimize: as for compile_dir() (default -1) + invalidation_mode: as for compiler_dir() + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi index c3c85d6d2f0a8..77866c1a1b6b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/_common/_streams.pyi @@ -1,5 +1,5 @@ -"""Internal classes used by compression modules -""" +"""Internal classes used by compression modules""" + from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase @@ -14,12 +14,11 @@ class _Reader(Protocol): def seek(self, n: int, /) -> Any: ... class BaseStream(BufferedIOBase): - """Mode-checking helper functions. -""" + """Mode-checking helper functions.""" class DecompressReader(RawIOBase): - """Adapts the decompressor API to a RawIOBase reader API -""" + """Adapts the decompressor API to a RawIOBase reader API""" + def __init__( self, fp: _Reader, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi index c5653a0753f98..074404e205abf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/bz2.pyi @@ -3,4 +3,5 @@ This module provides a file interface, classes for incremental (de)compression, and functions for one-shot (de)compression. """ + from bz2 import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi index fb1e556c29d47..6cb4250d2fac2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/gzip.pyi @@ -3,4 +3,5 @@ The user of the file doesn't have to worry about the compression, but random access is not allowed. """ + from gzip import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi index 67bca904ab63f..07f407d24467d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/lzma.pyi @@ -7,4 +7,5 @@ one-shot (de)compression. These classes and functions support both the XZ and legacy LZMA container formats, as well as raw compressed data streams. """ + from lzma import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi index 93f1f7ccf5192..87241f3133134 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zlib.pyi @@ -12,4 +12,5 @@ decompressobj([wbits[, zdict]]) -- Return a decompressor object. Compressor objects support compress() and flush() methods; decompressor objects support decompress() and flush(). """ + from zlib import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi index f43fea2d723f3..d47f8eab3b051 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/__init__.pyi @@ -1,5 +1,5 @@ -"""Python bindings to the Zstandard (zstd) compression library (RFC-8878). -""" +"""Python bindings to the Zstandard (zstd) compression library (RFC-8878).""" + import enum from _typeshed import ReadableBuffer from collections.abc import Iterable, Mapping @@ -37,8 +37,8 @@ zstd_version_info: Final[tuple[int, int, int]] COMPRESSION_LEVEL_DEFAULT: Final = _zstd.ZSTD_CLEVEL_DEFAULT class FrameInfo: - """Information about a Zstandard frame. -""" + """Information about a Zstandard frame.""" + __slots__ = ("decompressed_size", "dictionary_id") decompressed_size: int dictionary_id: int @@ -47,71 +47,76 @@ class FrameInfo: def get_frame_info(frame_buffer: ReadableBuffer) -> FrameInfo: """Get Zstandard frame information from a frame header. -*frame_buffer* is a bytes-like object. It should start from the beginning -of a frame, and needs to include at least the frame header (6 to 18 bytes). - -The returned FrameInfo object has two attributes. -'decompressed_size' is the size in bytes of the data in the frame when -decompressed, or None when the decompressed size is unknown. -'dictionary_id' is an int in the range (0, 2**32). The special value 0 -means that the dictionary ID was not recorded in the frame header, -the frame may or may not need a dictionary to be decoded, -and the ID of such a dictionary is not specified. -""" + *frame_buffer* is a bytes-like object. It should start from the beginning + of a frame, and needs to include at least the frame header (6 to 18 bytes). + + The returned FrameInfo object has two attributes. + 'decompressed_size' is the size in bytes of the data in the frame when + decompressed, or None when the decompressed size is unknown. + 'dictionary_id' is an int in the range (0, 2**32). The special value 0 + means that the dictionary ID was not recorded in the frame header, + the frame may or may not need a dictionary to be decoded, + and the ID of such a dictionary is not specified. + """ + def train_dict(samples: Iterable[ReadableBuffer], dict_size: int) -> ZstdDict: """Return a ZstdDict representing a trained Zstandard dictionary. -*samples* is an iterable of samples, where a sample is a bytes-like -object representing a file. + *samples* is an iterable of samples, where a sample is a bytes-like + object representing a file. + + *dict_size* is the dictionary's maximum size, in bytes. + """ -*dict_size* is the dictionary's maximum size, in bytes. -""" def finalize_dict(zstd_dict: ZstdDict, /, samples: Iterable[ReadableBuffer], dict_size: int, level: int) -> ZstdDict: """Return a ZstdDict representing a finalized Zstandard dictionary. -Given a custom content as a basis for dictionary, and a set of samples, -finalize *zstd_dict* by adding headers and statistics according to the -Zstandard dictionary format. - -You may compose an effective dictionary content by hand, which is used as -basis dictionary, and use some samples to finalize a dictionary. The basis -dictionary may be a "raw content" dictionary. See *is_raw* in ZstdDict. - -*samples* is an iterable of samples, where a sample is a bytes-like object -representing a file. -*dict_size* is the dictionary's maximum size, in bytes. -*level* is the expected compression level. The statistics for each -compression level differ, so tuning the dictionary to the compression level -can provide improvements. -""" + Given a custom content as a basis for dictionary, and a set of samples, + finalize *zstd_dict* by adding headers and statistics according to the + Zstandard dictionary format. + + You may compose an effective dictionary content by hand, which is used as + basis dictionary, and use some samples to finalize a dictionary. The basis + dictionary may be a "raw content" dictionary. See *is_raw* in ZstdDict. + + *samples* is an iterable of samples, where a sample is a bytes-like object + representing a file. + *dict_size* is the dictionary's maximum size, in bytes. + *level* is the expected compression level. The statistics for each + compression level differ, so tuning the dictionary to the compression level + can provide improvements. + """ + def compress( data: ReadableBuffer, level: int | None = None, options: Mapping[int, int] | None = None, zstd_dict: ZstdDict | None = None ) -> bytes: """Return Zstandard compressed *data* as bytes. -*level* is an int specifying the compression level to use, defaulting to -COMPRESSION_LEVEL_DEFAULT ('3'). -*options* is a dict object that contains advanced compression -parameters. See CompressionParameter for more on options. -*zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See -the function train_dict for how to train a ZstdDict on sample data. + *level* is an int specifying the compression level to use, defaulting to + COMPRESSION_LEVEL_DEFAULT ('3'). + *options* is a dict object that contains advanced compression + parameters. See CompressionParameter for more on options. + *zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See + the function train_dict for how to train a ZstdDict on sample data. + + For incremental compression, use a ZstdCompressor instead. + """ -For incremental compression, use a ZstdCompressor instead. -""" def decompress(data: ReadableBuffer, zstd_dict: ZstdDict | None = None, options: Mapping[int, int] | None = None) -> bytes: """Decompress one or more frames of Zstandard compressed *data*. -*zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See -the function train_dict for how to train a ZstdDict on sample data. -*options* is a dict object that contains advanced compression -parameters. See DecompressionParameter for more on options. + *zstd_dict* is a ZstdDict object, a pre-trained Zstandard dictionary. See + the function train_dict for how to train a ZstdDict on sample data. + *options* is a dict object that contains advanced compression + parameters. See DecompressionParameter for more on options. + + For incremental decompression, use a ZstdDecompressor instead. + """ -For incremental decompression, use a ZstdDecompressor instead. -""" @final class CompressionParameter(enum.IntEnum): - """Compression parameters. -""" + """Compression parameters.""" + compression_level = _zstd.ZSTD_c_compressionLevel window_log = _zstd.ZSTD_c_windowLog hash_log = _zstd.ZSTD_c_hashLog @@ -134,28 +139,29 @@ class CompressionParameter(enum.IntEnum): def bounds(self) -> tuple[int, int]: """Return the (lower, upper) int bounds of a compression parameter. -Both the lower and upper bounds are inclusive. -""" + Both the lower and upper bounds are inclusive. + """ @final class DecompressionParameter(enum.IntEnum): - """Decompression parameters. -""" + """Decompression parameters.""" + window_log_max = _zstd.ZSTD_d_windowLogMax def bounds(self) -> tuple[int, int]: """Return the (lower, upper) int bounds of a decompression parameter. -Both the lower and upper bounds are inclusive. -""" + Both the lower and upper bounds are inclusive. + """ @final class Strategy(enum.IntEnum): """Compression strategies, listed from fastest to strongest. -Note that new strategies might be added in the future. -Only the order (from fast to strong) is guaranteed, -the numeric value might change. -""" + Note that new strategies might be added in the future. + Only the order (from fast to strong) is guaranteed, + the numeric value might change. + """ + fast = _zstd.ZSTD_fast dfast = _zstd.ZSTD_dfast greedy = _zstd.ZSTD_greedy diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi index be8e472ac04d1..8ba96fd7e3692 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/compression/zstd/_zstdfile.pyi @@ -26,12 +26,13 @@ class _FileBinaryWrite(SupportsWrite[bytes], Protocol): class ZstdFile(_streams.BaseStream): """A file-like object providing transparent Zstandard (de)compression. -A ZstdFile can act as a wrapper for an existing file object, or refer -directly to a named file on disk. + A ZstdFile can act as a wrapper for an existing file object, or refer + directly to a named file on disk. + + ZstdFile provides a *binary* file interface. Data is read and returned as + bytes, and may only be written to objects that support the Buffer Protocol. + """ -ZstdFile provides a *binary* file interface. Data is read and returned as -bytes, and may only be written to objects that support the Buffer Protocol. -""" FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME @@ -48,22 +49,23 @@ bytes, and may only be written to objects that support the Buffer Protocol. ) -> None: """Open a Zstandard compressed file in binary mode. -*file* can be either an file-like object, or a file name to open. + *file* can be either an file-like object, or a file name to open. + + *mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for + creating exclusively, or 'a' for appending. These can equivalently be + given as 'rb', 'wb', 'xb' and 'ab' respectively. -*mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for -creating exclusively, or 'a' for appending. These can equivalently be -given as 'rb', 'wb', 'xb' and 'ab' respectively. + *level* is an optional int specifying the compression level to use, + or COMPRESSION_LEVEL_DEFAULT if not given. -*level* is an optional int specifying the compression level to use, -or COMPRESSION_LEVEL_DEFAULT if not given. + *options* is an optional dict for advanced compression parameters. + See CompressionParameter and DecompressionParameter for the possible + options. -*options* is an optional dict for advanced compression parameters. -See CompressionParameter and DecompressionParameter for the possible -options. + *zstd_dict* is an optional ZstdDict object, a pre-trained Zstandard + dictionary. See train_dict() to train ZstdDict on sample data. + """ -*zstd_dict* is an optional ZstdDict object, a pre-trained Zstandard -dictionary. See train_dict() to train ZstdDict on sample data. -""" @overload def __init__( self, @@ -78,74 +80,83 @@ dictionary. See train_dict() to train ZstdDict on sample data. def write(self, data: ReadableBuffer, /) -> int: """Write a bytes-like object *data* to the file. -Returns the number of uncompressed bytes written, which is -always the length of data in bytes. Note that due to buffering, -the file on disk may not reflect the data written until .flush() -or .close() is called. -""" + Returns the number of uncompressed bytes written, which is + always the length of data in bytes. Note that due to buffering, + the file on disk may not reflect the data written until .flush() + or .close() is called. + """ + def flush(self, mode: _ZstdCompressorFlushBlock | _ZstdCompressorFlushFrame = 1) -> bytes: # type: ignore[override] """Flush remaining data to the underlying stream. -The mode argument can be FLUSH_BLOCK or FLUSH_FRAME. Abuse of this -method will reduce compression ratio, use it only when necessary. + The mode argument can be FLUSH_BLOCK or FLUSH_FRAME. Abuse of this + method will reduce compression ratio, use it only when necessary. -If the program is interrupted afterwards, all data can be recovered. -To ensure saving to disk, also need to use os.fsync(fd). + If the program is interrupted afterwards, all data can be recovered. + To ensure saving to disk, also need to use os.fsync(fd). + + This method does nothing in reading mode. + """ -This method does nothing in reading mode. -""" def read(self, size: int | None = -1) -> bytes: """Read up to size uncompressed bytes from the file. -If size is negative or omitted, read until EOF is reached. -Returns b'' if the file is already at EOF. -""" + If size is negative or omitted, read until EOF is reached. + Returns b'' if the file is already at EOF. + """ + def read1(self, size: int | None = -1) -> bytes: """Read up to size uncompressed bytes, while trying to avoid -making multiple reads from the underlying stream. Reads up to a -buffer's worth of data if size is negative. + making multiple reads from the underlying stream. Reads up to a + buffer's worth of data if size is negative. + + Returns b'' if the file is at EOF. + """ -Returns b'' if the file is at EOF. -""" def readinto(self, b: WriteableBuffer) -> int: """Read bytes into b. -Returns the number of bytes read (0 for EOF). -""" + Returns the number of bytes read (0 for EOF). + """ + def readinto1(self, b: WriteableBuffer) -> int: """Read bytes into b, while trying to avoid making multiple reads -from the underlying stream. + from the underlying stream. + + Returns the number of bytes read (0 for EOF). + """ -Returns the number of bytes read (0 for EOF). -""" def readline(self, size: int | None = -1) -> bytes: """Read a line of uncompressed bytes from the file. -The terminating newline (if present) is retained. If size is -non-negative, no more than size bytes will be read (in which -case the line may be incomplete). Returns b'' if already at EOF. -""" + The terminating newline (if present) is retained. If size is + non-negative, no more than size bytes will be read (in which + case the line may be incomplete). Returns b'' if already at EOF. + """ + def seek(self, offset: int, whence: int = 0) -> int: """Change the file position. -The new position is specified by offset, relative to the -position indicated by whence. Possible values for whence are: + The new position is specified by offset, relative to the + position indicated by whence. Possible values for whence are: - 0: start of stream (default): offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive + 0: start of stream (default): offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive -Returns the new file position. + Returns the new file position. + + Note that seeking is emulated, so depending on the arguments, + this operation may be extremely slow. + """ -Note that seeking is emulated, so depending on the arguments, -this operation may be extremely slow. -""" def peek(self, size: int = -1) -> bytes: """Return buffered data without advancing the file position. -Always returns at least one byte of data, unless at EOF. -The exact number of bytes returned is unspecified. -""" + Always returns at least one byte of data, unless at EOF. + The exact number of bytes returned is unspecified. + """ + @property def name(self) -> str | bytes: ... @property @@ -166,31 +177,32 @@ def open( ) -> ZstdFile: """Open a Zstandard compressed file in binary or text mode. -file can be either a file name (given as a str, bytes, or PathLike object), -in which case the named file is opened, or it can be an existing file object -to read from or write to. + file can be either a file name (given as a str, bytes, or PathLike object), + in which case the named file is opened, or it can be an existing file object + to read from or write to. + + The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a', + 'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode. -The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a', -'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode. + The level, options, and zstd_dict parameters specify the settings the same + as ZstdFile. -The level, options, and zstd_dict parameters specify the settings the same -as ZstdFile. + When using read mode (decompression), the options parameter is a dict + representing advanced decompression options. The level parameter is not + supported in this case. When using write mode (compression), only one of + level, an int representing the compression level, or options, a dict + representing advanced compression options, may be passed. In both modes, + zstd_dict is a ZstdDict instance containing a trained Zstandard dictionary. -When using read mode (decompression), the options parameter is a dict -representing advanced decompression options. The level parameter is not -supported in this case. When using write mode (compression), only one of -level, an int representing the compression level, or options, a dict -representing advanced compression options, may be passed. In both modes, -zstd_dict is a ZstdDict instance containing a trained Zstandard dictionary. + For binary mode, this function is equivalent to the ZstdFile constructor: + ZstdFile(filename, mode, ...). In this case, the encoding, errors and + newline parameters must not be provided. -For binary mode, this function is equivalent to the ZstdFile constructor: -ZstdFile(filename, mode, ...). In this case, the encoding, errors and -newline parameters must not be provided. + For text mode, an ZstdFile object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error handling + behavior, and line ending(s). + """ -For text mode, an ZstdFile object is created, and wrapped in an -io.TextIOWrapper instance with the specified encoding, error handling -behavior, and line ending(s). -""" @overload def open( file: StrOrBytesPath | _FileBinaryWrite, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi index f15c1dc0af774..251a453c6ee66 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/__init__.pyi @@ -1,5 +1,5 @@ -"""Execute computations asynchronously using threads or processes. -""" +"""Execute computations asynchronously using threads or processes.""" + import sys from ._base import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi index 4c4d618dd28a1..a1ab245dd8432 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/_base.pyi @@ -19,34 +19,32 @@ _STATE_TO_DESCRIPTION_MAP: Final[dict[str, str]] LOGGER: Logger class Error(Exception): - """Base class for all future-related exceptions. -""" + """Base class for all future-related exceptions.""" + class CancelledError(Error): - """The Future was cancelled. -""" + """The Future was cancelled.""" if sys.version_info >= (3, 11): from builtins import TimeoutError as TimeoutError else: class TimeoutError(Error): - """The operation exceeded the given deadline. -""" + """The operation exceeded the given deadline.""" class InvalidStateError(Error): - """The operation is not allowed in this state. -""" + """The operation is not allowed in this state.""" + class BrokenExecutor(RuntimeError): """ -Raised when a executor has become non-functional after a severe failure. -""" + Raised when a executor has become non-functional after a severe failure. + """ _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _P = ParamSpec("_P") class Future(Generic[_T]): - """Represents the result of an asynchronous computation. -""" + """Represents the result of an asynchronous computation.""" + _condition: threading.Condition _state: str _result: _T | None @@ -55,113 +53,120 @@ class Future(Generic[_T]): def cancel(self) -> bool: """Cancel the future if possible. -Returns True if the future was cancelled, False otherwise. A future -cannot be cancelled if it is running or has already completed. -""" + Returns True if the future was cancelled, False otherwise. A future + cannot be cancelled if it is running or has already completed. + """ + def cancelled(self) -> bool: - """Return True if the future was cancelled. -""" + """Return True if the future was cancelled.""" + def running(self) -> bool: - """Return True if the future is currently executing. -""" + """Return True if the future is currently executing.""" + def done(self) -> bool: - """Return True if the future was cancelled or finished executing. -""" + """Return True if the future was cancelled or finished executing.""" + def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: """Attaches a callable that will be called when the future finishes. -Args: - fn: A callable that will be called with this future as its only - argument when the future completes or is cancelled. The callable - will always be called by a thread in the same process in which - it was added. If the future has already completed or been - cancelled then the callable will be called immediately. These - callables are called in the order that they were added. -""" + Args: + fn: A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. + """ + def result(self, timeout: float | None = None) -> _T: """Return the result of the call that the future represents. -Args: - timeout: The number of seconds to wait for the result if the future - isn't done. If None, then there is no limit on the wait time. + Args: + timeout: The number of seconds to wait for the result if the future + isn't done. If None, then there is no limit on the wait time. -Returns: - The result of the call that the future represents. + Returns: + The result of the call that the future represents. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + Exception: If the call raised then that exception will be raised. + """ -Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. - Exception: If the call raised then that exception will be raised. -""" def set_running_or_notify_cancel(self) -> bool: """Mark the future as running or process any cancel notifications. -Should only be used by Executor implementations and unit tests. + Should only be used by Executor implementations and unit tests. + + If the future has been cancelled (cancel() was called and returned + True) then any threads waiting on the future completing (though calls + to as_completed() or wait()) are notified and False is returned. -If the future has been cancelled (cancel() was called and returned -True) then any threads waiting on the future completing (though calls -to as_completed() or wait()) are notified and False is returned. + If the future was not cancelled then it is put in the running state + (future calls to running() will return True) and True is returned. -If the future was not cancelled then it is put in the running state -(future calls to running() will return True) and True is returned. + This method should be called by Executor implementations before + executing the work associated with this future. If this method returns + False then the work should not be executed. -This method should be called by Executor implementations before -executing the work associated with this future. If this method returns -False then the work should not be executed. + Returns: + False if the Future was cancelled, True otherwise. -Returns: - False if the Future was cancelled, True otherwise. + Raises: + RuntimeError: if this method was already called or if set_result() + or set_exception() was called. + """ -Raises: - RuntimeError: if this method was already called or if set_result() - or set_exception() was called. -""" def set_result(self, result: _T) -> None: """Sets the return value of work associated with the future. -Should only be used by Executor implementations and unit tests. -""" + Should only be used by Executor implementations and unit tests. + """ + def exception(self, timeout: float | None = None) -> BaseException | None: """Return the exception raised by the call that the future represents. -Args: - timeout: The number of seconds to wait for the exception if the - future isn't done. If None, then there is no limit on the wait - time. + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. -Returns: - The exception raised by the call that the future represents or None - if the call completed without raising. + Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + """ -Raises: - CancelledError: If the future was cancelled. - TimeoutError: If the future didn't finish executing before the given - timeout. -""" def set_exception(self, exception: BaseException | None) -> None: """Sets the result of the future as being the given exception. -Should only be used by Executor implementations and unit tests. -""" + Should only be used by Executor implementations and unit tests. + """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class Executor: - """This is an abstract base class for concrete asynchronous executors. -""" + """This is an abstract base class for concrete asynchronous executors.""" + def submit(self, fn: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: """Submits a callable to be executed with the given arguments. -Schedules the callable to be executed as fn(*args, **kwargs) and returns -a Future instance representing the execution of the callable. + Schedules the callable to be executed as fn(*args, **kwargs) and returns + a Future instance representing the execution of the callable. -Returns: - A Future representing the given call. -""" + Returns: + A Future representing the given call. + """ if sys.version_info >= (3, 14): def map( self, @@ -173,70 +178,71 @@ Returns: ) -> Iterator[_T]: """Returns an iterator equivalent to map(fn, iter). -Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: The size of the chunks the iterable will be broken into - before being passed to a child process. This argument is only - used by ProcessPoolExecutor; it is ignored by - ThreadPoolExecutor. - buffersize: The number of submitted tasks whose results have not - yet been yielded. If the buffer is full, iteration over the - iterables pauses until a result is yielded from the buffer. - If None, all input elements are eagerly collected, and a task is - submitted for each. - -Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - -Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. -""" + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken into + before being passed to a child process. This argument is only + used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. + buffersize: The number of submitted tasks whose results have not + yet been yielded. If the buffer is full, iteration over the + iterables pauses until a result is yielded from the buffer. + If None, all input elements are eagerly collected, and a task is + submitted for each. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ else: def map( self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 ) -> Iterator[_T]: """Returns an iterator equivalent to map(fn, iter). -Args: - fn: A callable that will take as many arguments as there are - passed iterables. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - chunksize: The size of the chunks the iterable will be broken into - before being passed to a child process. This argument is only - used by ProcessPoolExecutor; it is ignored by - ThreadPoolExecutor. - -Returns: - An iterator equivalent to: map(func, *iterables) but the calls may - be evaluated out-of-order. - -Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. - Exception: If fn(*args) raises for any values. -""" + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken into + before being passed to a child process. This argument is only + used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: """Clean-up the resources associated with the Executor. -It is safe to call this method several times. Otherwise, no other -methods can be called after this one. - -Args: - wait: If True then shutdown will not return until all running - futures have finished executing and the resources used by the - executor have been reclaimed. - cancel_futures: If True then shutdown will cancel all pending - futures. Futures that are completed or running will not be - cancelled. -""" + It is safe to call this method several times. Otherwise, no other + methods can be called after this one. + + Args: + wait: If True then shutdown will not return until all running + futures have finished executing and the resources used by the + executor have been reclaimed. + cancel_futures: If True then shutdown will cancel all pending + futures. Futures that are completed or running will not be + cancelled. + """ + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -256,59 +262,57 @@ class _AsCompletedFuture(Protocol[_T_co]): def as_completed(fs: Iterable[_AsCompletedFuture[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: """An iterator over the given futures that yields each as it completes. -Args: - fs: The sequence of Futures (possibly created by different Executors) to - iterate over. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. + Args: + fs: The sequence of Futures (possibly created by different Executors) to + iterate over. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. -Returns: - An iterator that yields the given Futures as they complete (finished or - cancelled). If any given Futures are duplicated, they will be returned - once. + Returns: + An iterator that yields the given Futures as they complete (finished or + cancelled). If any given Futures are duplicated, they will be returned + once. -Raises: - TimeoutError: If the entire result iterator could not be generated - before the given timeout. -""" + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + """ class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): - """DoneAndNotDoneFutures(done, not_done) -""" + """DoneAndNotDoneFutures(done, not_done)""" + done: set[Future[_T]] not_done: set[Future[_T]] -def wait( - fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" -) -> DoneAndNotDoneFutures[_T]: +def wait(fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> DoneAndNotDoneFutures[_T]: """Wait for the futures in the given sequence to complete. -Args: - fs: The sequence of Futures (possibly created by different Executors) to - wait upon. - timeout: The maximum number of seconds to wait. If None, then there - is no limit on the wait time. - return_when: Indicates when this function should return. The options - are: - - FIRST_COMPLETED - Return when any future finishes or is - cancelled. - FIRST_EXCEPTION - Return when any future finishes by raising an - exception. If no future raises an exception - then it is equivalent to ALL_COMPLETED. - ALL_COMPLETED - Return when all futures finish or are cancelled. - -Returns: - A named 2-tuple of sets. The first set, named 'done', contains the - futures that completed (is finished or cancelled) before the wait - completed. The second set, named 'not_done', contains uncompleted - futures. Duplicate futures given to *fs* are removed and will be - returned only once. -""" + Args: + fs: The sequence of Futures (possibly created by different Executors) to + wait upon. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + return_when: Indicates when this function should return. The options + are: + + FIRST_COMPLETED - Return when any future finishes or is + cancelled. + FIRST_EXCEPTION - Return when any future finishes by raising an + exception. If no future raises an exception + then it is equivalent to ALL_COMPLETED. + ALL_COMPLETED - Return when all futures finish or are cancelled. + + Returns: + A named 2-tuple of sets. The first set, named 'done', contains the + futures that completed (is finished or cancelled) before the wait + completed. The second set, named 'not_done', contains uncompleted + futures. Duplicate futures given to *fs* are removed and will be + returned only once. + """ class _Waiter: - """Provides the event that wait() and as_completed() block on. -""" + """Provides the event that wait() and as_completed() block on.""" + event: threading.Event finished_futures: list[Future[Any]] def add_result(self, future: Future[Any]) -> None: ... @@ -316,25 +320,24 @@ class _Waiter: def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): - """Used by as_completed(). -""" + """Used by as_completed().""" + lock: threading.Lock class _FirstCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_COMPLETED). -""" + """Used by wait(return_when=FIRST_COMPLETED).""" class _AllCompletedWaiter(_Waiter): - """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED). -""" + """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" + num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... class _AcquireFutures: - """A context manager that does an ordered acquire of Future conditions. -""" + """A context manager that does an ordered acquire of Future conditions.""" + futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi index 7cec6e114d3e0..20ff4dd679c37 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/interpreter.pyi @@ -1,5 +1,5 @@ -"""Implements InterpreterPoolExecutor. -""" +"""Implements InterpreterPoolExecutor.""" + import sys from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor @@ -41,8 +41,8 @@ if sys.version_info >= (3, 14): class BrokenInterpreterPool(BrokenThreadPool): """ -Raised when a worker thread in an InterpreterPoolExecutor failed initializing. -""" + Raised when a worker thread in an InterpreterPoolExecutor failed initializing. + """ class InterpreterPoolExecutor(ThreadPoolExecutor): BROKEN: type[BrokenInterpreterPool] @@ -67,14 +67,15 @@ Raised when a worker thread in an InterpreterPoolExecutor failed initializing. ) -> None: """Initializes a new InterpreterPoolExecutor instance. -Args: - max_workers: The maximum number of interpreters that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - initializer: A callable or script used to initialize - each worker interpreter. - initargs: A tuple of arguments to pass to the initializer. -""" + Args: + max_workers: The maximum number of interpreters that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + initializer: A callable or script used to initialize + each worker interpreter. + initargs: A tuple of arguments to pass to the initializer. + """ + @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi index c1e59fd272110..0264ceba46f4b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/process.pyi @@ -39,6 +39,7 @@ Process #1..n: - reads _CallItems from "Call Q", executes the calls, and puts the resulting _ResultItems in "Result Q" """ + import sys from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, MutableSequence from multiprocessing.connection import Connection @@ -112,8 +113,8 @@ class _CallItem: def __init__(self, work_id: int, fn: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... class _SafeQueue(Queue[Future[Any]]): - """Safe Queue set exception to the future object linked to a job -""" + """Safe Queue set exception to the future object linked to a job""" + pending_work_items: dict[int, _WorkItem[Any]] if sys.version_info < (3, 12): shutdown_lock: Lock @@ -141,17 +142,17 @@ class _SafeQueue(Queue[Future[Any]]): def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: - """ Iterates over zip()ed iterables in chunks. -""" + """Iterates over zip()ed iterables in chunks.""" + def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: """Processes a chunk of an iterable passed to map. -Runs the function passed to map() on a chunk of the -iterable passed to map. + Runs the function passed to map() on a chunk of the + iterable passed to map. -This function is run in a separate process. + This function is run in a separate process. -""" + """ if sys.version_info >= (3, 11): def _sendback_result( @@ -161,15 +162,13 @@ if sys.version_info >= (3, 11): exception: Exception | None = None, exit_pid: int | None = None, ) -> None: - """Safely send back the given result or exception -""" + """Safely send back the given result or exception""" else: def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None ) -> None: - """Safely send back the given result or exception -""" + """Safely send back the given result or exception""" if sys.version_info >= (3, 11): def _process_worker( @@ -181,16 +180,16 @@ if sys.version_info >= (3, 11): ) -> None: """Evaluates calls from call_queue and places the results in result_queue. -This worker is run in a separate process. + This worker is run in a separate process. -Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer -""" + Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer + """ else: def _process_worker( @@ -201,28 +200,29 @@ else: ) -> None: """Evaluates calls from call_queue and places the results in result_queue. - This worker is run in a separate process. + This worker is run in a separate process. - Args: - call_queue: A ctx.Queue of _CallItems that will be read and - evaluated by the worker. - result_queue: A ctx.Queue of _ResultItems that will written - to by the worker. - initializer: A callable initializer, or None - initargs: A tuple of args for the initializer - """ + Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer + """ class _ExecutorManagerThread(Thread): """Manages the communication between this process and the worker processes. -The manager is run in a local thread. + The manager is run in a local thread. + + Args: + executor: A reference to the ProcessPoolExecutor that owns + this thread. A weakref will be own by the manager as well as + references to internal objects used to introspect the state of + the executor. + """ -Args: - executor: A reference to the ProcessPoolExecutor that owns - this thread. A weakref will be own by the manager as well as - references to internal objects used to introspect the state of - the executor. -""" thread_wakeup: _ThreadWakeup shutdown_lock: Lock executor_reference: ref[Any] @@ -249,16 +249,16 @@ _system_limited: bool | None def _check_system_limits() -> None: ... def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: """ -Specialized implementation of itertools.chain.from_iterable. -Each item in *iterable* should be a list. This function is -careful not to keep references to yielded objects. -""" + Specialized implementation of itertools.chain.from_iterable. + Each item in *iterable* should be a list. This function is + careful not to keep references to yielded objects. + """ class BrokenProcessPool(BrokenExecutor): """ -Raised when a process in a ProcessPoolExecutor terminated abruptly -while a future was in the running state. -""" + Raised when a process in a ProcessPoolExecutor terminated abruptly + while a future was in the running state. + """ class ProcessPoolExecutor(Executor): _mp_context: BaseContext | None @@ -289,22 +289,23 @@ class ProcessPoolExecutor(Executor): ) -> None: """Initializes a new ProcessPoolExecutor instance. -Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers created - using the multiprocessing.get_context('start method') API. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - max_tasks_per_child: The maximum number of tasks a worker process - can complete before it will exit and be replaced with a fresh - worker process. The default of None means worker process will - live as long as the executor. Requires a non-'fork' mp_context - start method. When given, we default to using 'spawn' if no - mp_context is supplied. -""" + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + mp_context: A multiprocessing context to launch the workers created + using the multiprocessing.get_context('start method') API. This + object should provide SimpleQueue, Queue and Process. + initializer: A callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + max_tasks_per_child: The maximum number of tasks a worker process + can complete before it will exit and be replaced with a fresh + worker process. The default of None means worker process will + live as long as the executor. Requires a non-'fork' mp_context + start method. When given, we default to using 'spawn' if no + mp_context is supplied. + """ + @overload def __init__( self, @@ -336,15 +337,16 @@ Args: ) -> None: """Initializes a new ProcessPoolExecutor instance. - Args: - max_workers: The maximum number of processes that can be used to - execute the given calls. If None or not given then as many - worker processes will be created as the machine has processors. - mp_context: A multiprocessing context to launch the workers. This - object should provide SimpleQueue, Queue and Process. - initializer: A callable used to initialize worker processes. - initargs: A tuple of arguments to pass to the initializer. - """ + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + mp_context: A multiprocessing context to launch the workers. This + object should provide SimpleQueue, Queue and Process. + initializer: A callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + """ + @overload def __init__( self, @@ -369,19 +371,20 @@ Args: if sys.version_info >= (3, 14): def kill_workers(self) -> None: """Attempts to kill the executor's workers. -Iterates through all of the current worker processes and kills -each one that is still alive. + Iterates through all of the current worker processes and kills + each one that is still alive. + + After killing workers, the pool will be in a broken state + and no longer usable (for instance, new tasks should not be + submitted). + """ -After killing workers, the pool will be in a broken state -and no longer usable (for instance, new tasks should not be -submitted). -""" def terminate_workers(self) -> None: """Attempts to terminate the executor's workers. -Iterates through all of the current worker processes and terminates -each one that is still alive. + Iterates through all of the current worker processes and terminates + each one that is still alive. -After terminating workers, the pool will be in a broken state -and no longer usable (for instance, new tasks should not be -submitted). -""" + After terminating workers, the pool will be in a broken state + and no longer usable (for instance, new tasks should not be + submitted). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi index 98747afb2c6d5..8bf83dc8b2304 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/futures/thread.pyi @@ -1,5 +1,5 @@ -"""Implements ThreadPoolExecutor. -""" +"""Implements ThreadPoolExecutor.""" + import queue import sys from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet @@ -62,8 +62,8 @@ if sys.version_info >= (3, 14): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def _worker(executor_reference: ref[Any], ctx: WorkerContext, work_queue: queue.SimpleQueue[Any]) -> None: ... @@ -78,8 +78,8 @@ else: def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def _worker( executor_reference: ref[Any], @@ -90,8 +90,8 @@ E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). class BrokenThreadPool(BrokenExecutor): """ -Raised when a worker thread in a ThreadPoolExecutor failed initializing. -""" + Raised when a worker thread in a ThreadPoolExecutor failed initializing. + """ class ThreadPoolExecutor(Executor): if sys.version_info >= (3, 14): @@ -134,14 +134,15 @@ class ThreadPoolExecutor(Executor): ) -> None: """Initializes a new ThreadPoolExecutor instance. -Args: - max_workers: The maximum number of threads that can be used to - execute the given calls. - thread_name_prefix: An optional name prefix to give our threads. - initializer: A callable used to initialize worker threads. - initargs: A tuple of arguments to pass to the initializer. - ctxkwargs: Additional arguments to cls.prepare_context(). -""" + Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + initializer: A callable used to initialize worker threads. + initargs: A tuple of arguments to pass to the initializer. + ctxkwargs: Additional arguments to cls.prepare_context(). + """ + @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi index f4357e9a9e3e0..3485bb69cd50a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/__init__.pyi @@ -1,5 +1,5 @@ -"""Subinterpreters High Level Module. -""" +"""Subinterpreters High Level Module.""" + import sys import threading import types @@ -42,37 +42,38 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < class ExecutionFailed(InterpreterError): """An unhandled exception happened during execution. -This is raised from Interpreter.exec() and Interpreter.call(). -""" + This is raised from Interpreter.exec() and Interpreter.call(). + """ + excinfo: types.SimpleNamespace def __init__(self, excinfo: types.SimpleNamespace) -> None: ... def create() -> Interpreter: - """Return a new (idle) Python interpreter. -""" + """Return a new (idle) Python interpreter.""" + def list_all() -> list[Interpreter]: - """Return all existing interpreters. -""" + """Return all existing interpreters.""" + def get_current() -> Interpreter: - """Return the currently running interpreter. -""" + """Return the currently running interpreter.""" + def get_main() -> Interpreter: - """Return the main interpreter. -""" + """Return the main interpreter.""" class Interpreter: """A single Python interpreter. -Attributes: + Attributes: + + "id" - the unique process-global ID number for the interpreter + "whence" - indicates where the interpreter was created -"id" - the unique process-global ID number for the interpreter -"whence" - indicates where the interpreter was created + If the interpreter wasn't created by this module + then any method that modifies the interpreter will fail, + i.e. .close(), .prepare_main(), .exec(), and .call() + """ -If the interpreter wasn't created by this module -then any method that modifies the interpreter will fail, -i.e. .close(), .prepare_main(), .exec(), and .call() -""" def __new__(cls, id: int, /, _whence: _Whence | None = None, _ownsref: bool | None = None) -> Self: ... def __reduce__(self) -> tuple[type[Self], int]: ... def __hash__(self) -> int: ... @@ -84,54 +85,58 @@ i.e. .close(), .prepare_main(), .exec(), and .call() self, ) -> Literal["unknown", "runtime init", "legacy C-API", "C-API", "cross-interpreter C-API", "_interpreters module"]: ... def is_running(self) -> bool: - """Return whether or not the identified interpreter is running. -""" + """Return whether or not the identified interpreter is running.""" + def close(self) -> None: """Finalize and destroy the interpreter. -Attempting to destroy the current interpreter results -in an InterpreterError. -""" + Attempting to destroy the current interpreter results + in an InterpreterError. + """ + def prepare_main( self, ns: _SharedDict | None = None, /, **kwargs: Any ) -> None: # kwargs has same value restrictions as _SharedDict """Bind the given values into the interpreter's __main__. -The values must be shareable. -""" + The values must be shareable. + """ + def exec(self, code: str | types.CodeType | Callable[[], object], /) -> None: """Run the given source code in the interpreter. -This is essentially the same as calling the builtin "exec" -with this interpreter, using the __dict__ of its __main__ -module as both globals and locals. + This is essentially the same as calling the builtin "exec" + with this interpreter, using the __dict__ of its __main__ + module as both globals and locals. -There is no return value. + There is no return value. -If the code raises an unhandled exception then an ExecutionFailed -exception is raised, which summarizes the unhandled exception. -The actual exception is discarded because objects cannot be -shared between interpreters. + If the code raises an unhandled exception then an ExecutionFailed + exception is raised, which summarizes the unhandled exception. + The actual exception is discarded because objects cannot be + shared between interpreters. + + This blocks the current Python thread until done. During + that time, the previous interpreter is allowed to run + in other threads. + """ -This blocks the current Python thread until done. During -that time, the previous interpreter is allowed to run -in other threads. -""" def call(self, callable: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: """Call the object in the interpreter with given args/kwargs. -Nearly all callables, args, kwargs, and return values are -supported. All "shareable" objects are supported, as are -"stateless" functions (meaning non-closures that do not use -any globals). This method will fall back to pickle. + Nearly all callables, args, kwargs, and return values are + supported. All "shareable" objects are supported, as are + "stateless" functions (meaning non-closures that do not use + any globals). This method will fall back to pickle. + + If the callable raises an exception then the error display + (including full traceback) is sent back between the interpreters + and an ExecutionFailed exception is raised, much like what + happens with Interpreter.exec(). + """ -If the callable raises an exception then the error display -(including full traceback) is sent back between the interpreters -and an ExecutionFailed exception is raised, much like what -happens with Interpreter.exec(). -""" def call_in_thread(self, callable: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> threading.Thread: """Return a new thread that calls the object in the interpreter. -The return value and any raised exception are discarded. -""" + The return value and any raised exception are discarded. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi index 46a0acf6814b8..372ac39270544 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_crossinterp.pyi @@ -1,5 +1,5 @@ -"""Common code between queues and channels. -""" +"""Common code between queues and channels.""" + import sys from collections.abc import Callable from typing import Final, NewType @@ -9,8 +9,8 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < from _interpqueues import _UnboundOp class ItemInterpreterDestroyed(Exception): - """Raised when trying to get an item whose interpreter was destroyed. -""" + """Raised when trying to get an item whose interpreter was destroyed.""" + # Actually a descriptor that behaves similarly to classmethod but prevents # access from instances. classonly = classmethod @@ -18,18 +18,19 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < class UnboundItem: """Represents a cross-interpreter item no longer bound to an interpreter. -An item is unbound when the interpreter that added it to the -cross-interpreter container is destroyed. -""" + An item is unbound when the interpreter that added it to the + cross-interpreter container is destroyed. + """ + __slots__ = () def __new__(cls) -> Never: ... @classonly def singleton(cls, kind: str, module: str, name: str = "UNBOUND") -> Self: """A non-data descriptor that makes a value only visible on the class. -This is like the "classmethod" builtin, but does not show up on -instances of the class. It may be used as a decorator. -""" + This is like the "classmethod" builtin, but does not show up on + instances of the class. It may be used as a decorator. + """ # Sentinel types and alias that don't exist at runtime. _UnboundErrorType = NewType("_UnboundErrorType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi index e4976492efad6..e134d97e217fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/concurrent/interpreters/_queues.pyi @@ -1,5 +1,5 @@ -"""Cross-interpreter Queues High Level Module. -""" +"""Cross-interpreter Queues High Level Module.""" + import queue import sys from typing import Final, SupportsIndex @@ -28,34 +28,36 @@ if sys.version_info >= (3, 13): # needed to satisfy pyright checks for Python < class QueueEmpty(QueueError, queue.Empty): """Raised from get_nowait() when the queue is empty. -It is also raised from get() if it times out. -""" + It is also raised from get() if it times out. + """ + class QueueFull(QueueError, queue.Full): """Raised from put_nowait() when the queue is full. -It is also raised from put() if it times out. -""" + It is also raised from put() if it times out. + """ + class ItemInterpreterDestroyed(QueueError, _crossinterp.ItemInterpreterDestroyed): - """Raised from get() and get_nowait(). -""" + """Raised from get() and get_nowait().""" + UNBOUND: Final[UnboundItem] def create(maxsize: int = 0, *, unbounditems: _AnyUnbound = ...) -> Queue: """Return a new cross-interpreter queue. -The queue may be used to pass data safely between interpreters. + The queue may be used to pass data safely between interpreters. + + "unbounditems" sets the default for Queue.put(); see that method for + supported values. The default value is UNBOUND, which replaces + the unbound item. + """ -"unbounditems" sets the default for Queue.put(); see that method for -supported values. The default value is UNBOUND, which replaces -the unbound item. -""" def list_all() -> list[Queue]: - """Return a list of all open queues. -""" + """Return a list of all open queues.""" class Queue: - """A cross-interpreter queue. -""" + """A cross-interpreter queue.""" + def __new__(cls, id: int, /) -> Self: ... def __del__(self) -> None: ... def __hash__(self) -> int: ... @@ -81,39 +83,39 @@ the unbound item. ) -> None: """Add the object to the queue. -If "block" is true, this blocks while the queue is full. - -For most objects, the object received through Queue.get() will -be a new one, equivalent to the original and not sharing any -actual underlying data. The notable exceptions include -cross-interpreter types (like Queue) and memoryview, where the -underlying data is actually shared. Furthermore, some types -can be sent through a queue more efficiently than others. This -group includes various immutable types like int, str, bytes, and -tuple (if the items are likewise efficiently shareable). See interpreters.is_shareable(). - -"unbounditems" controls the behavior of Queue.get() for the given -object if the current interpreter (calling put()) is later -destroyed. - -If "unbounditems" is None (the default) then it uses the -queue's default, set with create_queue(), -which is usually UNBOUND. - -If "unbounditems" is UNBOUND_ERROR then get() will raise an -ItemInterpreterDestroyed exception if the original interpreter -has been destroyed. This does not otherwise affect the queue; -the next call to put() will work like normal, returning the next -item in the queue. - -If "unbounditems" is UNBOUND_REMOVE then the item will be removed -from the queue as soon as the original interpreter is destroyed. -Be aware that this will introduce an imbalance between put() -and get() calls. - -If "unbounditems" is UNBOUND then it is returned by get() in place -of the unbound item. -""" + If "block" is true, this blocks while the queue is full. + + For most objects, the object received through Queue.get() will + be a new one, equivalent to the original and not sharing any + actual underlying data. The notable exceptions include + cross-interpreter types (like Queue) and memoryview, where the + underlying data is actually shared. Furthermore, some types + can be sent through a queue more efficiently than others. This + group includes various immutable types like int, str, bytes, and + tuple (if the items are likewise efficiently shareable). See interpreters.is_shareable(). + + "unbounditems" controls the behavior of Queue.get() for the given + object if the current interpreter (calling put()) is later + destroyed. + + If "unbounditems" is None (the default) then it uses the + queue's default, set with create_queue(), + which is usually UNBOUND. + + If "unbounditems" is UNBOUND_ERROR then get() will raise an + ItemInterpreterDestroyed exception if the original interpreter + has been destroyed. This does not otherwise affect the queue; + the next call to put() will work like normal, returning the next + item in the queue. + + If "unbounditems" is UNBOUND_REMOVE then the item will be removed + from the queue as soon as the original interpreter is destroyed. + Be aware that this will introduce an imbalance between put() + and get() calls. + + If "unbounditems" is UNBOUND then it is returned by get() in place + of the unbound item. + """ else: def put( self, @@ -129,18 +131,18 @@ of the unbound item. def get(self, block: bool = True, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: """Return the next object from the queue. -If "block" is true, this blocks while the queue is empty. + If "block" is true, this blocks while the queue is empty. -If the next item's original interpreter has been destroyed -then the "next object" is determined by the value of the -"unbounditems" argument to put(). -""" + If the next item's original interpreter has been destroyed + then the "next object" is determined by the value of the + "unbounditems" argument to put(). + """ else: def get(self, timeout: SupportsIndex | None = None, *, _delay: float = 0.01) -> object: ... def get_nowait(self) -> object: """Return the next object from the channel. -If the queue is empty then raise QueueEmpty. Otherwise this -is the same as get(). -""" + If the queue is empty then raise QueueEmpty. Otherwise this + is the same as get(). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi index a5f2170bac067..18c687b76368f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/configparser.pyi @@ -142,6 +142,7 @@ ConfigParser -- responsible for parsing a list of `space_around_delimiters` is True (the default), delimiters between keys and values are surrounded by spaces. """ + import sys from _typeshed import MaybeNone, StrOrBytesPath, SupportsWrite from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence @@ -272,8 +273,8 @@ DEFAULTSECT: Final = "DEFAULT" MAX_INTERPOLATION_DEPTH: Final = 10 class Interpolation: - """Dummy interpolation that passes the value through with no changes. -""" + """Dummy interpolation that passes the value through with no changes.""" + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, defaults: _Section) -> str: ... def before_set(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... def before_read(self, parser: _Parser, section: _SectionName, option: str, value: str) -> str: ... @@ -282,22 +283,23 @@ class Interpolation: class BasicInterpolation(Interpolation): """Interpolation as implemented in the classic ConfigParser. -The option values can contain format strings which refer to other values in -the same section, or values in the special default section. + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. -For example: + For example: - something: %(dir)s/whatever + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError`. + """ -would resolve the "%(dir)s" to the value of dir. All reference -expansions are done late, on demand. If a user needs to use a bare % in -a configuration file, she can escape it by writing %%. Other % usage -is considered a user error and raises `InterpolationSyntaxError`. -""" class ExtendedInterpolation(Interpolation): """Advanced variant of interpolation, supports the syntax used by -`zc.buildout`. Enables interpolation between sections. -""" + `zc.buildout`. Enables interpolation between sections. + """ if sys.version_info < (3, 13): @deprecated( @@ -305,13 +307,14 @@ if sys.version_info < (3, 13): ) class LegacyInterpolation(Interpolation): """Deprecated interpolation used in old versions of ConfigParser. - Use BasicInterpolation or ExtendedInterpolation instead. -""" + Use BasicInterpolation or ExtendedInterpolation instead. + """ + def before_get(self, parser: _Parser, section: _SectionName, option: str, value: str, vars: _Section) -> str: ... class RawConfigParser(_Parser): - """ConfigParser that does not do interpolation. -""" + """ConfigParser that does not do interpolation.""" + _SECT_TMPL: ClassVar[str] # undocumented _OPT_TMPL: ClassVar[str] # undocumented _OPT_NV_TMPL: ClassVar[str] # undocumented @@ -433,68 +436,72 @@ class RawConfigParser(_Parser): def __contains__(self, key: object) -> bool: ... def defaults(self) -> _Section: ... def sections(self) -> _SectionNameList: - """Return a list of section names, excluding [DEFAULT] -""" + """Return a list of section names, excluding [DEFAULT]""" + def add_section(self, section: _SectionName) -> None: """Create a new section in the configuration. -Raise DuplicateSectionError if a section by the specified name -already exists. Raise ValueError if name is DEFAULT. -""" + Raise DuplicateSectionError if a section by the specified name + already exists. Raise ValueError if name is DEFAULT. + """ + def has_section(self, section: _SectionName) -> bool: """Indicate whether the named section is present in the configuration. -The DEFAULT section is not acknowledged. -""" + The DEFAULT section is not acknowledged. + """ + def options(self, section: _SectionName) -> list[str]: - """Return a list of option names for the given section name. -""" + """Return a list of option names for the given section name.""" + def has_option(self, section: _SectionName, option: str) -> bool: """Check for the existence of a given option in a given section. -If the specified `section` is None or an empty string, DEFAULT is -assumed. If the specified `section` does not exist, returns False. -""" + If the specified `section` is None or an empty string, DEFAULT is + assumed. If the specified `section` does not exist, returns False. + """ + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: """Read and parse a filename or an iterable of filenames. -Files that cannot be opened are silently ignored; this is -designed so that you can specify an iterable of potential -configuration file locations (e.g. current directory, user's -home directory, systemwide directory), and all existing -configuration files in the iterable will be read. A single -filename may also be given. + Files that cannot be opened are silently ignored; this is + designed so that you can specify an iterable of potential + configuration file locations (e.g. current directory, user's + home directory, systemwide directory), and all existing + configuration files in the iterable will be read. A single + filename may also be given. + + Return list of successfully read files. + """ -Return list of successfully read files. -""" def read_file(self, f: Iterable[str], source: str | None = None) -> None: """Like read() but the argument must be a file-like object. -The `f` argument must be iterable, returning one line at a time. -Optional second argument is the `source` specifying the name of the -file being read. If not given, it is taken from f.name. If `f` has no -`name` attribute, `` is used. -""" + The `f` argument must be iterable, returning one line at a time. + Optional second argument is the `source` specifying the name of the + file being read. If not given, it is taken from f.name. If `f` has no + `name` attribute, `` is used. + """ + def read_string(self, string: str, source: str = "") -> None: - """Read configuration from a given string. -""" + """Read configuration from a given string.""" + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: """Read configuration from a dictionary. -Keys are section names, values are dictionaries with keys and values -that should be present in the section. If the used dictionary type -preserves order, sections and their keys will be added in order. + Keys are section names, values are dictionaries with keys and values + that should be present in the section. If the used dictionary type + preserves order, sections and their keys will be added in order. -All types held in the dictionary are converted to strings during -reading, including section names, option names and keys. + All types held in the dictionary are converted to strings during + reading, including section names, option names and keys. -Optional second argument is the `source` specifying the name of the -dictionary being read. -""" + Optional second argument is the `source` specifying the name of the + dictionary being read. + """ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `parser.read_file()` instead.") def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: - """Deprecated, use read_file instead. -""" + """Deprecated, use read_file instead.""" # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload @@ -530,18 +537,19 @@ dictionary being read. def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | MaybeNone: """Get an option value for a given section. -If `vars` is provided, it must be a dictionary. The option is looked up -in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. -If the key is not found and `fallback` is provided, it is used as -a fallback value. `None` can be provided as a `fallback` value. + If `vars` is provided, it must be a dictionary. The option is looked up + in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. + If the key is not found and `fallback` is provided, it is used as + a fallback value. `None` can be provided as a `fallback` value. -If interpolation is enabled and the optional argument `raw` is False, -all interpolations are expanded in the return values. + If interpolation is enabled and the optional argument `raw` is False, + all interpolations are expanded in the return values. -Arguments `raw`, `vars`, and `fallback` are keyword only. + Arguments `raw`, `vars`, and `fallback` are keyword only. + + The section DEFAULT is special. + """ -The section DEFAULT is special. -""" @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T @@ -550,58 +558,61 @@ The section DEFAULT is special. def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: """Return a list of (name, value) tuples for each option in a section. -All % interpolations are expanded in the return values, based on the -defaults passed into the constructor, unless the optional argument -`raw` is true. Additional substitutions may be provided using the -`vars` argument, which must be a dictionary whose contents overrides -any pre-existing defaults. + All % interpolations are expanded in the return values, based on the + defaults passed into the constructor, unless the optional argument + `raw` is true. Additional substitutions may be provided using the + `vars` argument, which must be a dictionary whose contents overrides + any pre-existing defaults. + + The section DEFAULT is special. + """ -The section DEFAULT is special. -""" @overload def items(self, section: _SectionName, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... def set(self, section: _SectionName, option: str, value: str | None = None) -> None: - """Set an option. -""" + """Set an option.""" + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: """Write an .ini-format representation of the configuration state. -If `space_around_delimiters` is True (the default), delimiters -between keys and values are surrounded by spaces. + If `space_around_delimiters` is True (the default), delimiters + between keys and values are surrounded by spaces. + + Please note that comments in the original configuration file are not + preserved when writing the configuration back. + """ -Please note that comments in the original configuration file are not -preserved when writing the configuration back. -""" def remove_option(self, section: _SectionName, option: str) -> bool: - """Remove an option. -""" + """Remove an option.""" + def remove_section(self, section: _SectionName) -> bool: - """Remove a file section. -""" + """Remove a file section.""" + def optionxform(self, optionstr: str) -> str: ... @property def converters(self) -> ConverterMapping: ... class ConfigParser(RawConfigParser): - """ConfigParser implementing interpolation. -""" + """ConfigParser implementing interpolation.""" + # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] def get(self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: """Get an option value for a given section. -If `vars` is provided, it must be a dictionary. The option is looked up -in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. -If the key is not found and `fallback` is provided, it is used as -a fallback value. `None` can be provided as a `fallback` value. + If `vars` is provided, it must be a dictionary. The option is looked up + in `vars` (if provided), `section`, and in `DEFAULTSECT` in that order. + If the key is not found and `fallback` is provided, it is used as + a fallback value. `None` can be provided as a `fallback` value. -If interpolation is enabled and the optional argument `raw` is False, -all interpolations are expanded in the return values. + If interpolation is enabled and the optional argument `raw` is False, + all interpolations are expanded in the return values. -Arguments `raw`, `vars`, and `fallback` are keyword only. + Arguments `raw`, `vars`, and `fallback` are keyword only. + + The section DEFAULT is special. + """ -The section DEFAULT is special. -""" @overload def get( self, section: _SectionName, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T @@ -610,15 +621,14 @@ The section DEFAULT is special. if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `ConfigParser` instead.") class SafeConfigParser(ConfigParser): - """ConfigParser alias for backwards compatibility purposes. -""" + """ConfigParser alias for backwards compatibility purposes.""" class SectionProxy(MutableMapping[str, str]): - """A proxy for a single section from a parser. -""" + """A proxy for a single section from a parser.""" + def __init__(self, parser: RawConfigParser, name: str) -> None: - """Creates a view on a section of the specified `name` in `parser`. -""" + """Creates a view on a section of the specified `name` in `parser`.""" + def __getitem__(self, key: str) -> str: ... def __setitem__(self, key: str, value: str) -> None: ... def __delitem__(self, key: str) -> None: ... @@ -643,10 +653,11 @@ class SectionProxy(MutableMapping[str, str]): ) -> str | None: """Get an option value. -Unless `fallback` is provided, `None` will be returned if the option -is not found. + Unless `fallback` is provided, `None` will be returned if the option + is not found. + + """ -""" @overload def get( self, @@ -678,10 +689,11 @@ is not found. class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): """Enables reuse of get*() methods between the parser and section proxies. -If a parser class implements a getter directly, the value for the given -key will be ``None``. The presence of the converter name here enables -section proxies to find and use the implementation on the parser class. -""" + If a parser class implements a getter directly, the value for the given + key will be ``None``. The presence of the converter name here enables + section proxies to find and use the implementation on the parser class. + """ + GETTERCRE: ClassVar[Pattern[Any]] def __init__(self, parser: RawConfigParser) -> None: ... def __getitem__(self, key: str) -> _ConverterCallback: ... @@ -691,24 +703,25 @@ section proxies to find and use the implementation on the parser class. def __len__(self) -> int: ... class Error(Exception): - """Base class for ConfigParser exceptions. -""" + """Base class for ConfigParser exceptions.""" + message: str def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): - """Raised when no section matches a requested option. -""" + """Raised when no section matches a requested option.""" + section: _SectionName def __init__(self, section: _SectionName) -> None: ... class DuplicateSectionError(Error): """Raised when a section is repeated in an input source. -Possible repetitions that raise this exception are: multiple creation -using the API or in strict parsers when a section is found more than once -in a single input file, string or dictionary. -""" + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + section: _SectionName source: str | None lineno: int | None @@ -717,9 +730,10 @@ in a single input file, string or dictionary. class DuplicateOptionError(Error): """Raised by strict parsers when an option is repeated in an input source. -Current implementation raises this exception only when an option is found -more than once in a single file, string or dictionary. -""" + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + section: _SectionName option: str source: str | None @@ -727,40 +741,40 @@ more than once in a single file, string or dictionary. def __init__(self, section: _SectionName, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): - """A requested option was not found. -""" + """A requested option was not found.""" + section: _SectionName option: str def __init__(self, option: str, section: _SectionName) -> None: ... class InterpolationError(Error): - """Base class for interpolation-related exceptions. -""" + """Base class for interpolation-related exceptions.""" + section: _SectionName option: str def __init__(self, option: str, section: _SectionName, msg: str) -> None: ... class InterpolationDepthError(InterpolationError): - """Raised when substitutions are nested too deeply. -""" + """Raised when substitutions are nested too deeply.""" + def __init__(self, option: str, section: _SectionName, rawval: object) -> None: ... class InterpolationMissingOptionError(InterpolationError): - """A string substitution required a setting which was not available. -""" + """A string substitution required a setting which was not available.""" + reference: str def __init__(self, option: str, section: _SectionName, rawval: object, reference: str) -> None: ... class InterpolationSyntaxError(InterpolationError): """Raised when the source text contains invalid syntax. -Current implementation raises this exception when the source text into -which substitutions are made does not conform to the required syntax. -""" + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ class ParsingError(Error): - """Raised when a configuration file does not follow legal syntax. -""" + """Raised when a configuration file does not follow legal syntax.""" + source: str errors: list[tuple[int, str]] if sys.version_info >= (3, 13): @@ -784,23 +798,23 @@ class ParsingError(Error): @property @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") def filename(self) -> str: - """Deprecated, use `source'. -""" + """Deprecated, use `source'.""" + @filename.setter @deprecated("Deprecated since Python 3.2; removed in Python 3.12. Use `source` instead.") def filename(self, value: str) -> None: ... class MissingSectionHeaderError(ParsingError): - """Raised when a key-value pair is found before any section header. -""" + """Raised when a key-value pair is found before any section header.""" + lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... if sys.version_info >= (3, 13): class MultilineContinuationError(ParsingError): - """Raised when a key without value is followed by continuation line -""" + """Raised when a key without value is followed by continuation line""" + lineno: int line: str def __init__(self, filename: str, lineno: int, line: str) -> None: ... @@ -808,13 +822,14 @@ if sys.version_info >= (3, 13): if sys.version_info >= (3, 14): class UnnamedSectionDisabledError(Error): """Raised when an attempt to use UNNAMED_SECTION is made with the -feature disabled. -""" + feature disabled. + """ + msg: Final = "Support for UNNAMED_SECTION is disabled." def __init__(self) -> None: ... class InvalidWriteError(Error): """Raised when attempting to write data that the parser would read back differently. -ex: writing a key which begins with the section header pattern would read back as a -new section -""" + ex: writing a key which begins with the section header pattern would read back as a + new section + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi index dca6225851d58..2b05511c33c9f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi @@ -1,5 +1,5 @@ -"""Utilities for with-statement contexts. See PEP 343. -""" +"""Utilities for with-statement contexts. See PEP 343.""" + import abc import sys from _typeshed import FileDescriptorOrPath, Unused @@ -49,55 +49,54 @@ _CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any, Any] | _ExitFunc) # allowlist for use as a Protocol. @runtime_checkable class AbstractContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """An abstract base class for context managers. -""" + """An abstract base class for context managers.""" + __slots__ = () def __enter__(self) -> _T_co: - """Return `self` upon entering the runtime context. -""" + """Return `self` upon entering the runtime context.""" + @abstractmethod def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> _ExitT_co: - """Raise any exception triggered within the runtime context. -""" + """Raise any exception triggered within the runtime context.""" # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @runtime_checkable class AbstractAsyncContextManager(ABC, Protocol[_T_co, _ExitT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """An abstract base class for asynchronous context managers. -""" + """An abstract base class for asynchronous context managers.""" + __slots__ = () async def __aenter__(self) -> _T_co: - """Return `self` upon entering the runtime context. -""" + """Return `self` upon entering the runtime context.""" + @abstractmethod async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> _ExitT_co: - """Raise any exception triggered within the runtime context. -""" + """Raise any exception triggered within the runtime context.""" class ContextDecorator: - """A base class or mixin that enables context managers to work as decorators. -""" + """A base class or mixin that enables context managers to work as decorators.""" + def _recreate_cm(self) -> Self: """Return a recreated instance of self. -Allows an otherwise one-shot context manager like -_GeneratorContextManager to support use as -a decorator via implicit recreation. + Allows an otherwise one-shot context manager like + _GeneratorContextManager to support use as + a decorator via implicit recreation. + + This is a private interface just for _GeneratorContextManager. + See issue #11647 for details. + """ -This is a private interface just for _GeneratorContextManager. -See issue #11647 for details. -""" def __call__(self, func: _F) -> _F: ... class _GeneratorContextManagerBase(Generic[_G_co]): - """Shared functionality for @contextmanager and @asynccontextmanager. -""" + """Shared functionality for @contextmanager and @asynccontextmanager.""" + # Ideally this would use ParamSpec, but that requires (*args, **kwargs), which this isn't. see #6676 def __init__(self, func: Callable[..., _G_co], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... gen: _G_co @@ -110,8 +109,8 @@ class _GeneratorContextManager( AbstractContextManager[_T_co, bool | None], ContextDecorator, ): - """Helper for @contextmanager decorator. -""" + """Helper for @contextmanager decorator.""" + def __exit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... @@ -119,40 +118,40 @@ class _GeneratorContextManager( def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: """@contextmanager decorator. -Typical usage: + Typical usage: - @contextmanager - def some_generator(): - - try: - yield - finally: - + @contextmanager + def some_generator(): + + try: + yield + finally: + -This makes this: + This makes this: - with some_generator() as : - + with some_generator() as : + -equivalent to this: + equivalent to this: - - try: - = - - finally: - -""" + + try: + = + + finally: + + """ if sys.version_info >= (3, 10): _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) class AsyncContextDecorator: - """A base class or mixin that enables async context managers to work as decorators. -""" + """A base class or mixin that enables async context managers to work as decorators.""" + def _recreate_cm(self) -> Self: - """Return a recreated instance of self. - """ + """Return a recreated instance of self.""" + def __call__(self, func: _AF) -> _AF: ... class _AsyncGeneratorContextManager( @@ -160,8 +159,8 @@ if sys.version_info >= (3, 10): AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator, ): - """Helper for @asynccontextmanager decorator. -""" + """Helper for @asynccontextmanager decorator.""" + async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... @@ -170,8 +169,8 @@ else: class _AsyncGeneratorContextManager( _GeneratorContextManagerBase[AsyncGenerator[_T_co, _SendT_contra]], AbstractAsyncContextManager[_T_co, bool | None] ): - """Helper for @asynccontextmanager decorator. -""" + """Helper for @asynccontextmanager decorator.""" + async def __aexit__( self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... @@ -179,30 +178,31 @@ else: def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: """@asynccontextmanager decorator. -Typical usage: + Typical usage: + + @asynccontextmanager + async def some_async_generator(): + + try: + yield + finally: + + + This makes this: + + async with some_async_generator() as : + + + equivalent to this: - @asynccontextmanager - async def some_async_generator(): try: - yield + = + finally: + """ -This makes this: - - async with some_async_generator() as : - - -equivalent to this: - - - try: - = - - finally: - -""" @type_check_only class _SupportsClose(Protocol): def close(self) -> object: ... @@ -212,20 +212,21 @@ _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) class closing(AbstractContextManager[_SupportsCloseT, None]): """Context to automatically close something at the end of a block. -Code like this: + Code like this: - with closing(.open()) as f: - + with closing(.open()) as f: + -is equivalent to this: + is equivalent to this: - f = .open() - try: - - finally: - f.close() + f = .open() + try: + + finally: + f.close() + + """ -""" def __init__(self, thing: _SupportsCloseT) -> None: ... def __exit__(self, *exc_info: Unused) -> None: ... @@ -238,35 +239,37 @@ if sys.version_info >= (3, 10): class aclosing(AbstractAsyncContextManager[_SupportsAcloseT, None]): """Async context manager for safely finalizing an asynchronously cleaned-up -resource such as an async generator, calling its ``aclose()`` method. + resource such as an async generator, calling its ``aclose()`` method. -Code like this: + Code like this: - async with aclosing(.fetch()) as agen: - + async with aclosing(.fetch()) as agen: + -is equivalent to this: + is equivalent to this: - agen = .fetch() - try: - - finally: - await agen.aclose() + agen = .fetch() + try: + + finally: + await agen.aclose() + + """ -""" def __init__(self, thing: _SupportsAcloseT) -> None: ... async def __aexit__(self, *exc_info: Unused) -> None: ... class suppress(AbstractContextManager[None, bool]): """Context manager to suppress specified exceptions -After the exception is suppressed, execution proceeds with the next -statement following the with statement. + After the exception is suppressed, execution proceeds with the next + statement following the with statement. + + with suppress(FileNotFoundError): + os.remove(somefile) + # Execution still resumes here if the file was already removed + """ - with suppress(FileNotFoundError): - os.remove(somefile) - # Execution still resumes here if the file was already removed -""" def __init__(self, *exceptions: type[BaseException]) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None @@ -281,59 +284,62 @@ class _RedirectStream(AbstractContextManager[_T_io, None]): class redirect_stdout(_RedirectStream[_T_io]): """Context manager for temporarily redirecting stdout to another file. -# How to send help() to stderr -with redirect_stdout(sys.stderr): - help(dir) + # How to send help() to stderr + with redirect_stdout(sys.stderr): + help(dir) + + # How to write help() to a file + with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) + """ -# How to write help() to a file -with open('help.txt', 'w') as f: - with redirect_stdout(f): - help(pow) -""" class redirect_stderr(_RedirectStream[_T_io]): - """Context manager for temporarily redirecting stderr to another file. -""" + """Context manager for temporarily redirecting stderr to another file.""" class _BaseExitStack(Generic[_ExitT_co]): - """A base class for ExitStack and AsyncExitStack. -""" + """A base class for ExitStack and AsyncExitStack.""" + def enter_context(self, cm: AbstractContextManager[_T, _ExitT_co]) -> _T: """Enters the supplied context manager. -If successful, also pushes its __exit__ method as a callback and -returns the result of the __enter__ method. -""" + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + def push(self, exit: _CM_EF) -> _CM_EF: """Registers a callback with the standard __exit__ method signature. -Can suppress exceptions the same way __exit__ method can. -Also accepts any object with an __exit__ method (registering a call -to the method instead of the object itself). -""" + Can suppress exceptions the same way __exit__ method can. + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself). + """ + def callback(self, callback: Callable[_P, _T], /, *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: """Registers an arbitrary callback and arguments. -Cannot suppress exceptions. -""" + Cannot suppress exceptions. + """ + def pop_all(self) -> Self: - """Preserve the context stack by transferring it to a new instance. -""" + """Preserve the context stack by transferring it to a new instance.""" # In reality this is a subclass of `AbstractContextManager`; # see #7961 for why we don't do that in the stub class ExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): """Context manager for dynamic management of a stack of exit callbacks. -For example: - with ExitStack() as stack: - files = [stack.enter_context(open(fname)) for fname in filenames] - # All opened files will automatically be closed at the end of - # the with statement, even if attempts to open files later - # in the list raise an exception. -""" + For example: + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception. + """ + def close(self) -> None: - """Immediately unwind the context stack. -""" + """Immediately unwind the context stack.""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -348,40 +354,44 @@ _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any, Any] | _Exit # see #7961 for why we don't do that in the stub class AsyncExitStack(_BaseExitStack[_ExitT_co], metaclass=abc.ABCMeta): """Async context manager for dynamic management of a stack of exit -callbacks. - -For example: - async with AsyncExitStack() as stack: - connections = [await stack.enter_async_context(get_connection()) - for i in range(5)] - # All opened connections will automatically be released at the - # end of the async with statement, even if attempts to open a - # connection later in the list raise an exception. -""" + callbacks. + + For example: + async with AsyncExitStack() as stack: + connections = [await stack.enter_async_context(get_connection()) + for i in range(5)] + # All opened connections will automatically be released at the + # end of the async with statement, even if attempts to open a + # connection later in the list raise an exception. + """ + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T, _ExitT_co]) -> _T: """Enters the supplied async context manager. -If successful, also pushes its __aexit__ method as a callback and -returns the result of the __aenter__ method. -""" + If successful, also pushes its __aexit__ method as a callback and + returns the result of the __aenter__ method. + """ + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: """Registers a coroutine function with the standard __aexit__ method -signature. + signature. + + Can suppress exceptions the same way __aexit__ method can. + Also accepts any object with an __aexit__ method (registering a call + to the method instead of the object itself). + """ -Can suppress exceptions the same way __aexit__ method can. -Also accepts any object with an __aexit__ method (registering a call -to the method instead of the object itself). -""" def push_async_callback( self, callback: Callable[_P, Awaitable[_T]], /, *args: _P.args, **kwds: _P.kwargs ) -> Callable[_P, Awaitable[_T]]: """Registers an arbitrary coroutine function and arguments. -Cannot suppress exceptions. -""" + Cannot suppress exceptions. + """ + async def aclose(self) -> None: - """Immediately unwind the context stack. -""" + """Immediately unwind the context stack.""" + async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -391,13 +401,14 @@ if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T, None], AbstractAsyncContextManager[_T, None]): """Context manager that does no additional processing. -Used as a stand-in for a normal context manager, when a particular -block of code is only sometimes used with a normal context manager: + Used as a stand-in for a normal context manager, when a particular + block of code is only sometimes used with a normal context manager: + + cm = optional_cm if condition else nullcontext() + with cm: + # Perform operation, using optional_cm if condition is True + """ -cm = optional_cm if condition else nullcontext() -with cm: - # Perform operation, using optional_cm if condition is True -""" enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -412,13 +423,14 @@ else: class nullcontext(AbstractContextManager[_T, None]): """Context manager that does no additional processing. - Used as a stand-in for a normal context manager, when a particular - block of code is only sometimes used with a normal context manager: + Used as a stand-in for a normal context manager, when a particular + block of code is only sometimes used with a normal context manager: + + cm = optional_cm if condition else nullcontext() + with cm: + # Perform operation, using optional_cm if condition is True + """ - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - """ enter_result: _T @overload def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @@ -431,8 +443,8 @@ if sys.version_info >= (3, 11): _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None, None], Generic[_T_fd_or_any_path]): - """Non thread-safe context manager to change the current working directory. -""" + """Non thread-safe context manager to change the current working directory.""" + path: _T_fd_or_any_path def __init__(self, path: _T_fd_or_any_path) -> None: ... def __enter__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi index 07f9b54735f97..2f464f1e1cf33 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copy.pyi @@ -48,6 +48,7 @@ to control pickling: they can define methods called __getinitargs__(), __getstate__() and __setstate__(). See the documentation for module "pickle" for information on these methods. """ + import sys from typing import Any, Protocol, TypeVar, type_check_only @@ -68,13 +69,14 @@ PyStringMap: Any def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: """Deep copy operation on arbitrary Python objects. -See the module's __doc__ string for more info. -""" + See the module's __doc__ string for more info. + """ + def copy(x: _T) -> _T: """Shallow copy operation on arbitrary Python objects. -See the module's __doc__ string for more info. -""" + See the module's __doc__ string for more info. + """ if sys.version_info >= (3, 13): __all__ += ["replace"] @@ -82,9 +84,9 @@ if sys.version_info >= (3, 13): def replace(obj: _SupportsReplace[_RT_co], /, **changes: Any) -> _RT_co: """Return a new object replacing specified fields with new values. -This is especially useful for immutable objects, like named tuples or -frozen dataclasses. -""" + This is especially useful for immutable objects, like named tuples or + frozen dataclasses. + """ class Error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi index 888e90710ea73..003a7a67edbe6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/copyreg.pyi @@ -3,6 +3,7 @@ This is only useful to add pickle support for extension types defined in C, not for instances of user-defined classes. """ + from collections.abc import Callable, Hashable from typing import Any, SupportsInt, TypeVar from typing_extensions import TypeAlias @@ -19,11 +20,11 @@ def pickle( ) -> None: ... def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: - """Register an extension code. -""" + """Register an extension code.""" + def remove_extension(module: Hashable, name: Hashable, code: int) -> None: - """Unregister an extension code. For testing only. -""" + """Unregister an extension code. For testing only.""" + def clear_extension_cache() -> None: ... _DispatchTableType: TypeAlias = dict[type, Callable[[Any], str | _Reduce[Any]]] # imported by multiprocessing.reduction diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi index 0dde139555d77..df7d315f06982 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/crypt.pyi @@ -1,5 +1,5 @@ -"""Wrapper to the POSIX crypt library call and associated functionality. -""" +"""Wrapper to the POSIX crypt library call and associated functionality.""" + import sys from typing import Final, NamedTuple, type_check_only from typing_extensions import disjoint_base @@ -15,14 +15,15 @@ if sys.platform != "win32": if sys.version_info >= (3, 12): class _Method(_MethodBase): """Class representing a salt method per the Modular Crypt Format or the - legacy 2-character crypt method. -""" + legacy 2-character crypt method. + """ + else: @disjoint_base class _Method(_MethodBase): """Class representing a salt method per the Modular Crypt Format or the - legacy 2-character crypt method. -""" + legacy 2-character crypt method. + """ METHOD_CRYPT: Final[_Method] METHOD_MD5: Final[_Method] @@ -33,16 +34,17 @@ if sys.platform != "win32": def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: """Generate a salt for the specified method. - If not specified, the strongest available method will be used. + If not specified, the strongest available method will be used. + + """ - """ def crypt(word: str, salt: str | _Method | None = None) -> str: """Return a string representing the one-way hash of a password, with a salt - prepended. + prepended. - If ``salt`` is not specified or is ``None``, the strongest - available method will be selected and a salt generated. Otherwise, - ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as - returned by ``crypt.mksalt()``. + If ``salt`` is not specified or is ``None``, the strongest + available method will be selected and a salt generated. Otherwise, + ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as + returned by ``crypt.mksalt()``. - """ + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi index 198837aa56161..2f4cd6b12417d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/csv.pyi @@ -61,6 +61,7 @@ SETTINGS: and when writing, each quote character embedded in the data is written as two quotes """ + import sys from _csv import ( QUOTE_ALL as QUOTE_ALL, @@ -124,11 +125,12 @@ _T = TypeVar("_T") class Dialect: """Describe a CSV dialect. -This must be subclassed (see csv.excel). Valid attributes are: -delimiter, quotechar, escapechar, doublequote, skipinitialspace, -lineterminator, quoting. + This must be subclassed (see csv.excel). Valid attributes are: + delimiter, quotechar, escapechar, doublequote, skipinitialspace, + lineterminator, quoting. + + """ -""" delimiter: str quotechar: str | None escapechar: str | None @@ -140,14 +142,13 @@ lineterminator, quoting. def __init__(self) -> None: ... class excel(Dialect): - """Describe the usual properties of Excel-generated CSV files. -""" + """Describe the usual properties of Excel-generated CSV files.""" + class excel_tab(excel): - """Describe the usual properties of Excel-generated TAB-delimited files. -""" + """Describe the usual properties of Excel-generated TAB-delimited files.""" + class unix_dialect(Dialect): - """Describe the usual properties of Unix-generated CSV files. -""" + """Describe the usual properties of Unix-generated CSV files.""" class DictReader(Generic[_T]): fieldnames: Sequence[_T] | None @@ -198,8 +199,8 @@ class DictReader(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class DictWriter(Generic[_T]): fieldnames: Collection[_T] @@ -230,17 +231,19 @@ class DictWriter(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class Sniffer: """ -"Sniffs" the format of a CSV file (i.e. delimiter, quotechar) -Returns a Dialect object. -""" + "Sniffs" the format of a CSV file (i.e. delimiter, quotechar) + Returns a Dialect object. + """ + preferred: list[str] def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: """ -Returns a dialect (or None) corresponding to the sample -""" + Returns a dialect (or None) corresponding to the sample + """ + def has_header(self, sample: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi index db0c4141a5a31..03c62e5dd7eb9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/__init__.pyi @@ -1,5 +1,5 @@ -"""create and manipulate C data types in Python -""" +"""create and manipulate C data types in Python""" + import sys from _ctypes import ( RTLD_GLOBAL as RTLD_GLOBAL, @@ -50,9 +50,10 @@ if sys.version_info >= (3, 14): def POINTER(cls: str) -> type[Any]: """Create and return a new ctypes pointer type. -Pointer types are cached and reused internally, -so calling this function repeatedly is cheap. -""" + Pointer types are cached and reused internally, + so calling this function repeatedly is cheap. + """ + @overload def POINTER(cls: None) -> type[c_void_p]: ... @overload @@ -60,10 +61,10 @@ so calling this function repeatedly is cheap. def pointer(obj: _CT) -> _Pointer[_CT]: """Create a new pointer instance, pointing to 'obj'. -The returned object is of the type POINTER(type(obj)). Note that if you -just want to pass a pointer to an object to a foreign function call, you -should use byref(obj) which is much faster. -""" + The returned object is of the type POINTER(type(obj)). Note that if you + just want to pass a pointer to an object to a foreign function call, you + should use byref(obj) which is much faster. + """ else: from _ctypes import POINTER as POINTER, pointer as pointer @@ -91,18 +92,19 @@ else: class CDLL: """An instance of this class represents a loaded dll/shared -library, exporting functions using the standard C calling -convention (named 'cdecl' on Windows). + library, exporting functions using the standard C calling + convention (named 'cdecl' on Windows). -The exported functions can be accessed as attributes, or by -indexing with the function name. Examples: + The exported functions can be accessed as attributes, or by + indexing with the function name. Examples: -.qsort -> callable object -['qsort'] -> callable object + .qsort -> callable object + ['qsort'] -> callable object + + Calling the functions releases the Python GIL during the call and + reacquires it afterwards. + """ -Calling the functions releases the Python GIL during the call and -reacquires it afterwards. -""" _func_flags_: ClassVar[int] _func_restype_: ClassVar[type[_CDataType]] _name: str @@ -123,20 +125,21 @@ reacquires it afterwards. if sys.platform == "win32": class OleDLL(CDLL): """This class represents a dll exporting functions using the -Windows stdcall calling convention, and returning HRESULT. -HRESULT error values are automatically raised as OSError -exceptions. -""" + Windows stdcall calling convention, and returning HRESULT. + HRESULT error values are automatically raised as OSError + exceptions. + """ + class WinDLL(CDLL): """This class represents a dll exporting functions using the -Windows stdcall calling convention. -""" + Windows stdcall calling convention. + """ class PyDLL(CDLL): """This class represents the Python library itself. It allows -accessing Python API functions. The GIL is not released, and -Python exceptions are handled correctly. -""" + accessing Python API functions. The GIL is not released, and + Python exceptions are handled correctly. + """ class LibraryLoader(Generic[_DLLT]): def __init__(self, dlltype: type[_DLLT]) -> None: ... @@ -146,8 +149,8 @@ class LibraryLoader(Generic[_DLLT]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ cdll: LibraryLoader[CDLL] if sys.platform == "win32": @@ -177,20 +180,20 @@ def CFUNCTYPE( use_last_error: bool = False, ) -> type[_CFunctionType]: """CFUNCTYPE(restype, *argtypes, - use_errno=False, use_last_error=False) -> function prototype. + use_errno=False, use_last_error=False) -> function prototype. -restype: the result type -argtypes: a sequence specifying the argument types + restype: the result type + argtypes: a sequence specifying the argument types -The function prototype can be called in different ways to create a -callable object: + The function prototype can be called in different ways to create a + callable object: -prototype(integer address) -> foreign function -prototype(callable) -> create and return a C callable function from callable -prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method -prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal -prototype((function name, dll object)[, paramflags]) -> foreign function exported by name -""" + prototype(integer address) -> foreign function + prototype(callable) -> create and return a C callable function from callable + prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method + prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal + prototype((function name, dll object)[, paramflags]) -> foreign function exported by name + """ if sys.platform == "win32": def WINFUNCTYPE( @@ -216,17 +219,17 @@ _CastT = TypeVar("_CastT", bound=_CanCastTo) def cast(obj: _CData | _CDataType | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: """create_string_buffer(aBytes) -> character array -create_string_buffer(anInteger) -> character array -create_string_buffer(aBytes, anInteger) -> character array -""" + create_string_buffer(anInteger) -> character array + create_string_buffer(aBytes, anInteger) -> character array + """ c_buffer = create_string_buffer def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: """create_unicode_buffer(aString) -> character array -create_unicode_buffer(anInteger) -> character array -create_unicode_buffer(aString, anInteger) -> character array -""" + create_unicode_buffer(anInteger) -> character array + create_unicode_buffer(aString, anInteger) -> character array + """ if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -268,8 +271,8 @@ memset: _MemsetFunctionType def string_at(ptr: _CVoidConstPLike, size: int = -1) -> bytes: """string_at(ptr[, size]) -> string -Return the byte string at void *ptr. -""" + Return the byte string at void *ptr. + """ if sys.platform == "win32": def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... @@ -277,15 +280,15 @@ if sys.platform == "win32": def wstring_at(ptr: _CVoidConstPLike, size: int = -1) -> str: """wstring_at(ptr[, size]) -> string -Return the wide-character string at void *ptr. -""" + Return the wide-character string at void *ptr. + """ if sys.version_info >= (3, 14): def memoryview_at(ptr: _CVoidConstPLike, size: int, readonly: bool = False) -> memoryview: """memoryview_at(ptr, size[, readonly]) -> memoryview -Return a memoryview representing the memory at void *ptr. -""" + Return a memoryview representing the memory at void *ptr. + """ class py_object(_CanCastTo, _SimpleCData[_T]): _type_: ClassVar[Literal["O"]] @@ -293,8 +296,8 @@ class py_object(_CanCastTo, _SimpleCData[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class c_bool(_SimpleCData[bool]): _type_: ClassVar[Literal["?"]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi index 6d9542f1397fd..007fd7d3296c1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/_endian.pyi @@ -4,21 +4,19 @@ from ctypes import Structure, Union # At runtime, the native endianness is an alias for Structure, # while the other is a subclass with a metaclass added in. class BigEndianStructure(Structure): - """Structure with big endian byte order -""" + """Structure with big endian byte order""" + __slots__ = () class LittleEndianStructure(Structure): - """Structure base class -""" + """Structure base class""" # Same thing for these: one is an alias of Union at runtime if sys.version_info >= (3, 11): class BigEndianUnion(Union): - """Union with big endian byte order -""" + """Union with big endian byte order""" + __slots__ = () class LittleEndianUnion(Union): - """Union base class -""" + """Union base class""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi index 7f7d32926292f..0d240b1f70c6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/__init__.pyi @@ -5,6 +5,7 @@ See the relevant header files in /usr/include/mach-o And also Apple's documentation. """ + from typing import Final __version__: Final[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi index 6de817768c9af..37be9bd2414bd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dyld.pyi @@ -1,6 +1,7 @@ """ dyld emulation """ + from collections.abc import Mapping from ctypes.macholib.dylib import dylib_info as dylib_info from ctypes.macholib.framework import framework_info as framework_info @@ -9,14 +10,15 @@ __all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"] def dyld_find(name: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: """ -Find a library or framework using dyld semantics -""" + Find a library or framework using dyld semantics + """ + def framework_find(fn: str, executable_path: str | None = None, env: Mapping[str, str] | None = None) -> str: """ -Find a framework using dyld semantics in a very loose manner. + Find a framework using dyld semantics in a very loose manner. -Will take input such as: - Python - Python.framework - Python.framework/Versions/Current -""" + Will take input such as: + Python + Python.framework + Python.framework/Versions/Current + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi index 04df3c0802443..58ece6cc99f4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/dylib.pyi @@ -1,6 +1,7 @@ """ Generic dylib path manipulation """ + from typing import TypedDict, type_check_only __all__ = ["dylib_info"] @@ -16,21 +17,21 @@ class _DylibInfo(TypedDict): def dylib_info(filename: str) -> _DylibInfo | None: """ -A dylib name can take one of the following four forms: - Location/Name.SomeVersion_Suffix.dylib - Location/Name.SomeVersion.dylib - Location/Name_Suffix.dylib - Location/Name.dylib + A dylib name can take one of the following four forms: + Location/Name.SomeVersion_Suffix.dylib + Location/Name.SomeVersion.dylib + Location/Name_Suffix.dylib + Location/Name.dylib -returns None if not found or a mapping equivalent to: - dict( - location='Location', - name='Name.SomeVersion_Suffix.dylib', - shortname='Name', - version='SomeVersion', - suffix='Suffix', - ) + returns None if not found or a mapping equivalent to: + dict( + location='Location', + name='Name.SomeVersion_Suffix.dylib', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) -Note that SomeVersion and Suffix are optional and may be None -if not present. -""" + Note that SomeVersion and Suffix are optional and may be None + if not present. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi index 27840d212f2fc..f12f2b3fd152f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/macholib/framework.pyi @@ -1,6 +1,7 @@ """ Generic framework path manipulation """ + from typing import TypedDict, type_check_only __all__ = ["framework_info"] @@ -16,21 +17,21 @@ class _FrameworkInfo(TypedDict): def framework_info(filename: str) -> _FrameworkInfo | None: """ -A framework name can take one of the following four forms: - Location/Name.framework/Versions/SomeVersion/Name_Suffix - Location/Name.framework/Versions/SomeVersion/Name - Location/Name.framework/Name_Suffix - Location/Name.framework/Name + A framework name can take one of the following four forms: + Location/Name.framework/Versions/SomeVersion/Name_Suffix + Location/Name.framework/Versions/SomeVersion/Name + Location/Name.framework/Name_Suffix + Location/Name.framework/Name -returns None if not found, or a mapping equivalent to: - dict( - location='Location', - name='Name.framework/Versions/SomeVersion/Name_Suffix', - shortname='Name', - version='SomeVersion', - suffix='Suffix', - ) + returns None if not found, or a mapping equivalent to: + dict( + location='Location', + name='Name.framework/Versions/SomeVersion/Name_Suffix', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) -Note that SomeVersion and Suffix are optional and may be None -if not present -""" + Note that SomeVersion and Suffix are optional and may be None + if not present + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi index b2a02f5df4bdc..dc1a251365079 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ctypes/util.pyi @@ -4,12 +4,10 @@ def find_library(name: str) -> str | None: ... if sys.platform == "win32": def find_msvcrt() -> str | None: - """Return the name of the VC runtime dll -""" + """Return the name of the VC runtime dll""" if sys.version_info >= (3, 14): def dllist() -> list[str]: - """Return a list of loaded shared libraries in the current process. -""" + """Return a list of loaded shared libraries in the current process.""" def test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi index cf0b1e31a0756..a6fcd958e492e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/__init__.pyi @@ -9,6 +9,7 @@ the package, and perhaps a particular module inside it. ... """ + import sys from _curses import * from _curses import window as window @@ -35,11 +36,11 @@ COLOR_PAIRS: Final[int] def wrapper(func: Callable[Concatenate[window, _P], _T], /, *arg: _P.args, **kwds: _P.kwargs) -> _T: """Wrapper function that initializes curses and calls another function, -restoring normal keyboard/screen behavior on error. -The callable object 'func' is then passed the main window 'stdscr' -as its first argument, followed by any other arguments passed to -wrapper(). -""" + restoring normal keyboard/screen behavior on error. + The callable object 'func' is then passed the main window 'stdscr' + as its first argument, followed by any other arguments passed to + wrapper(). + """ # At runtime this class is unexposed and calls itself curses.ncurses_version. # That name would conflict with the actual curses.ncurses_version, which is diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi index 04a9b0d89b0ab..823f98c139b62 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/ascii.pyi @@ -1,5 +1,5 @@ -"""Constants and membership tests for ASCII characters -""" +"""Constants and membership tests for ASCII characters""" + from typing import Final, TypeVar _CharT = TypeVar("_CharT", str, int) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi index 01e73ed12f3c8..067afef730a5b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/panel.pyi @@ -2,4 +2,5 @@ Module for using panels with curses. """ + from _curses_panel import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi index 238044287398d..129178bdefcc2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/curses/textpad.pyi @@ -1,45 +1,45 @@ -"""Simple textbox editing widget with Emacs-like keybindings. -""" +"""Simple textbox editing widget with Emacs-like keybindings.""" + from _curses import window from collections.abc import Callable def rectangle(win: window, uly: int, ulx: int, lry: int, lrx: int) -> None: """Draw a rectangle with corners at the provided upper-left -and lower-right coordinates. -""" + and lower-right coordinates. + """ class Textbox: """Editing widget using the interior of a window object. - Supports the following Emacs-like key bindings: - -Ctrl-A Go to left edge of window. -Ctrl-B Cursor left, wrapping to previous line if appropriate. -Ctrl-D Delete character under cursor. -Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on). -Ctrl-F Cursor right, wrapping to next line when appropriate. -Ctrl-G Terminate, returning the window contents. -Ctrl-H Delete character backward. -Ctrl-J Terminate if the window is 1 line, otherwise insert newline. -Ctrl-K If line is blank, delete it, otherwise clear to end of line. -Ctrl-L Refresh screen. -Ctrl-N Cursor down; move down one line. -Ctrl-O Insert a blank line at cursor location. -Ctrl-P Cursor up; move up one line. - -Move operations do nothing if the cursor is at an edge where the movement -is not possible. The following synonyms are supported where possible: - -KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N -KEY_BACKSPACE = Ctrl-h -""" + Supports the following Emacs-like key bindings: + + Ctrl-A Go to left edge of window. + Ctrl-B Cursor left, wrapping to previous line if appropriate. + Ctrl-D Delete character under cursor. + Ctrl-E Go to right edge (stripspaces off) or end of line (stripspaces on). + Ctrl-F Cursor right, wrapping to next line when appropriate. + Ctrl-G Terminate, returning the window contents. + Ctrl-H Delete character backward. + Ctrl-J Terminate if the window is 1 line, otherwise insert newline. + Ctrl-K If line is blank, delete it, otherwise clear to end of line. + Ctrl-L Refresh screen. + Ctrl-N Cursor down; move down one line. + Ctrl-O Insert a blank line at cursor location. + Ctrl-P Cursor up; move up one line. + + Move operations do nothing if the cursor is at an edge where the movement + is not possible. The following synonyms are supported where possible: + + KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N + KEY_BACKSPACE = Ctrl-h + """ + stripspaces: bool def __init__(self, win: window, insert_mode: bool = False) -> None: ... def edit(self, validate: Callable[[int], int] | None = None) -> str: - """Edit in the widget window and collect the results. -""" + """Edit in the widget window and collect the results.""" + def do_command(self, ch: str | int) -> None: - """Process a single editing command. -""" + """Process a single editing command.""" + def gather(self) -> str: - """Collect and return the contents of the window. -""" + """Collect and return the contents of the window.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi index d090d300d9c9b..6a134c3df68ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dataclasses.pyi @@ -66,44 +66,46 @@ if sys.version_info >= (3, 10): @overload def asdict(obj: DataclassInstance) -> dict[str, Any]: """Return the fields of a dataclass instance as a new dictionary mapping -field names to field values. + field names to field values. -Example usage:: + Example usage:: - @dataclass - class C: - x: int - y: int + @dataclass + class C: + x: int + y: int - c = C(1, 2) - assert asdict(c) == {'x': 1, 'y': 2} + c = C(1, 2) + assert asdict(c) == {'x': 1, 'y': 2} + + If given, 'dict_factory' will be used instead of built-in dict. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. + """ -If given, 'dict_factory' will be used instead of built-in dict. -The function applies recursively to field values that are -dataclass instances. This will also look into built-in containers: -tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. -""" @overload def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload def astuple(obj: DataclassInstance) -> tuple[Any, ...]: """Return the fields of a dataclass instance as a new tuple of field values. -Example usage:: + Example usage:: - @dataclass - class C: - x: int - y: int + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert astuple(c) == (1, 2) - c = C(1, 2) - assert astuple(c) == (1, 2) + If given, 'tuple_factory' will be used instead of built-in tuple. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. + """ -If given, 'tuple_factory' will be used instead of built-in tuple. -The function applies recursively to field values that are -dataclass instances. This will also look into built-in containers: -tuples, lists, and dicts. Other objects are copied with 'copy.deepcopy()'. -""" @overload def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... @@ -126,17 +128,18 @@ if sys.version_info >= (3, 11): ) -> type[_T]: """Add dunder methods based on the fields defined in the class. -Examines PEP 526 __annotations__ to determine fields. - -If init is true, an __init__() method is added to the class. If repr -is true, a __repr__() method is added. If order is true, rich -comparison dunder methods are added. If unsafe_hash is true, a -__hash__() method is added. If frozen is true, fields may not be -assigned to after instance creation. If match_args is true, the -__match_args__ tuple is added. If kw_only is true, then by default -all fields are keyword-only. If slots is true, a new class with a -__slots__ attribute is returned. -""" + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If repr + is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method is added. If frozen is true, fields may not be + assigned to after instance creation. If match_args is true, the + __match_args__ tuple is added. If kw_only is true, then by default + all fields are keyword-only. If slots is true, a new class with a + __slots__ attribute is returned. + """ + @overload def dataclass( cls: None = None, @@ -171,19 +174,20 @@ elif sys.version_info >= (3, 10): slots: bool = False, ) -> type[_T]: """Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. - - Examines PEP 526 __annotations__ to determine fields. - - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. If match_args is true, - the __match_args__ tuple is added. If kw_only is true, then by - default all fields are keyword-only. If slots is true, an - __slots__ attribute is added. - """ + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. If match_args is true, + the __match_args__ tuple is added. If kw_only is true, then by + default all fields are keyword-only. If slots is true, an + __slots__ attribute is added. + """ + @overload def dataclass( cls: None = None, @@ -214,16 +218,17 @@ else: frozen: bool = False, ) -> type[_T]: """Returns the same class as was passed in, with dunder methods - added based on the fields defined in the class. + added based on the fields defined in the class. - Examines PEP 526 __annotations__ to determine fields. + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. + """ - If init is true, an __init__() method is added to the class. If - repr is true, a __repr__() method is added. If order is true, rich - comparison dunder methods are added. If unsafe_hash is true, a - __hash__() method function is added. If frozen is true, fields may - not be assigned to after instance creation. - """ @overload def dataclass( cls: None = None, @@ -331,8 +336,8 @@ class Field(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ # NOTE: Actual return type is 'Field[_T]', but we want to help type checkers # to understand the magic that happens at runtime. @@ -352,19 +357,20 @@ if sys.version_info >= (3, 14): ) -> _T: """Return an object to identify dataclass fields. -default is the default value of the field. default_factory is a -0-argument function called to initialize a field's value. If init -is true, the field will be a parameter to the class's __init__() -function. If repr is true, the field will be included in the -object's repr(). If hash is true, the field will be included in the -object's hash(). If compare is true, the field will be used in -comparison functions. metadata, if specified, must be a mapping -which is stored but not otherwise examined by dataclass. If kw_only -is true, the field will become a keyword-only parameter to -__init__(). doc is an optional docstring for this field. - -It is an error to specify both default and default_factory. -""" + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is true, the field will be a parameter to the class's __init__() + function. If repr is true, the field will be included in the + object's repr(). If hash is true, the field will be included in the + object's hash(). If compare is true, the field will be used in + comparison functions. metadata, if specified, must be a mapping + which is stored but not otherwise examined by dataclass. If kw_only + is true, the field will become a keyword-only parameter to + __init__(). doc is an optional docstring for this field. + + It is an error to specify both default and default_factory. + """ + @overload def field( *, @@ -407,19 +413,20 @@ elif sys.version_info >= (3, 10): ) -> _T: """Return an object to identify dataclass fields. -default is the default value of the field. default_factory is a -0-argument function called to initialize a field's value. If init -is true, the field will be a parameter to the class's __init__() -function. If repr is true, the field will be included in the -object's repr(). If hash is true, the field will be included in the -object's hash(). If compare is true, the field will be used in -comparison functions. metadata, if specified, must be a mapping -which is stored but not otherwise examined by dataclass. If kw_only -is true, the field will become a keyword-only parameter to -__init__(). - -It is an error to specify both default and default_factory. -""" + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is true, the field will be a parameter to the class's __init__() + function. If repr is true, the field will be included in the + object's repr(). If hash is true, the field will be included in the + object's hash(). If compare is true, the field will be used in + comparison functions. metadata, if specified, must be a mapping + which is stored but not otherwise examined by dataclass. If kw_only + is true, the field will become a keyword-only parameter to + __init__(). + + It is an error to specify both default and default_factory. + """ + @overload def field( *, @@ -459,17 +466,18 @@ else: ) -> _T: """Return an object to identify dataclass fields. - default is the default value of the field. default_factory is a - 0-argument function called to initialize a field's value. If init - is True, the field will be a parameter to the class's __init__() - function. If repr is True, the field will be included in the - object's repr(). If hash is True, the field will be included in - the object's hash(). If compare is True, the field will be used - in comparison functions. metadata, if specified, must be a - mapping which is stored but not otherwise examined by dataclass. + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is True, the field will be a parameter to the class's __init__() + function. If repr is True, the field will be included in the + object's repr(). If hash is True, the field will be included in + the object's hash(). If compare is True, the field will be used + in comparison functions. metadata, if specified, must be a + mapping which is stored but not otherwise examined by dataclass. + + It is an error to specify both default and default_factory. + """ - It is an error to specify both default and default_factory. - """ @overload def field( *, @@ -496,16 +504,17 @@ else: def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: """Return a tuple describing the fields of this dataclass. -Accepts a dataclass or an instance of one. Tuple elements are of -type Field. -""" + Accepts a dataclass or an instance of one. Tuple elements are of + type Field. + """ # HACK: `obj: Never` typing matches if object argument is using `Any` type. @overload def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues] """Returns True if obj is a dataclass or an instance of a -dataclass. -""" + dataclass. + """ + @overload def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ... @overload @@ -544,29 +553,29 @@ if sys.version_info >= (3, 14): ) -> type: """Return a new dynamically created dataclass. -The dataclass name will be 'cls_name'. 'fields' is an iterable -of either (name), (name, type) or (name, type, Field) objects. If type is -omitted, use the string 'typing.Any'. Field objects are created by -the equivalent of calling 'field(name, type [, Field-info])'.:: + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'.:: - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) -is equivalent to:: + is equivalent to:: - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) -For the bases and namespace parameters, see the builtin type() function. + For the bases and namespace parameters, see the builtin type() function. -The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, -slots, and weakref_slot are passed to dataclass(). + The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, + slots, and weakref_slot are passed to dataclass(). -If module parameter is defined, the '__module__' attribute of the dataclass is -set to that value. -""" + If module parameter is defined, the '__module__' attribute of the dataclass is + set to that value. + """ elif sys.version_info >= (3, 12): def make_dataclass( @@ -589,29 +598,29 @@ elif sys.version_info >= (3, 12): ) -> type: """Return a new dynamically created dataclass. -The dataclass name will be 'cls_name'. 'fields' is an iterable -of either (name), (name, type) or (name, type, Field) objects. If type is -omitted, use the string 'typing.Any'. Field objects are created by -the equivalent of calling 'field(name, type [, Field-info])'.:: + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'.:: - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) -is equivalent to:: + is equivalent to:: - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) -For the bases and namespace parameters, see the builtin type() function. + For the bases and namespace parameters, see the builtin type() function. -The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, -slots, and weakref_slot are passed to dataclass(). + The parameters init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, + slots, and weakref_slot are passed to dataclass(). -If module parameter is defined, the '__module__' attribute of the dataclass is -set to that value. -""" + If module parameter is defined, the '__module__' attribute of the dataclass is + set to that value. + """ elif sys.version_info >= (3, 11): def make_dataclass( @@ -633,26 +642,26 @@ elif sys.version_info >= (3, 11): ) -> type: """Return a new dynamically created dataclass. - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'.:: + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'.:: - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - is equivalent to:: + is equivalent to:: - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) - For the bases and namespace parameters, see the builtin type() function. + For the bases and namespace parameters, see the builtin type() function. - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ elif sys.version_info >= (3, 10): def make_dataclass( @@ -673,26 +682,26 @@ elif sys.version_info >= (3, 10): ) -> type: """Return a new dynamically created dataclass. - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - is equivalent to: + is equivalent to: - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) - For the bases and namespace parameters, see the builtin type() function. + For the bases and namespace parameters, see the builtin type() function. - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ else: def make_dataclass( @@ -710,38 +719,38 @@ else: ) -> type: """Return a new dynamically created dataclass. - The dataclass name will be 'cls_name'. 'fields' is an iterable - of either (name), (name, type) or (name, type, Field) objects. If type is - omitted, use the string 'typing.Any'. Field objects are created by - the equivalent of calling 'field(name, type [, Field-info])'. + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. - C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) - is equivalent to: + is equivalent to: - @dataclass - class C(Base): - x: 'typing.Any' - y: int - z: int = field(init=False) + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) - For the bases and namespace parameters, see the builtin type() function. + For the bases and namespace parameters, see the builtin type() function. - The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to - dataclass(). - """ + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ def replace(obj: _DataclassT, /, **changes: Any) -> _DataclassT: """Return a new object replacing specified fields with new values. -This is especially useful for frozen classes. Example usage:: + This is especially useful for frozen classes. Example usage:: - @dataclass(frozen=True) - class C: - x: int - y: int + @dataclass(frozen=True) + class C: + x: int + y: int - c = C(1, 2) - c1 = replace(c, x=3) - assert c1.x == 3 and c1.y == 2 -""" + c = C(1, 2) + c1 = replace(c, x=3) + assert c1.x == 3 and c1.y == 2 + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi index c3a35b19f358c..4571f2dcf6e65 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/datetime.pyi @@ -3,6 +3,7 @@ See https://data.iana.org/time-zones/tz-link.html for time zone and DST data sources. """ + import sys from abc import abstractmethod from time import struct_time @@ -18,44 +19,43 @@ MINYEAR: Final = 1 MAXYEAR: Final = 9999 class tzinfo: - """Abstract base class for time zone info objects. -""" + """Abstract base class for time zone info objects.""" + @abstractmethod def tzname(self, dt: datetime | None, /) -> str | None: - """datetime -> string name of time zone. -""" + """datetime -> string name of time zone.""" + @abstractmethod def utcoffset(self, dt: datetime | None, /) -> timedelta | None: - """datetime -> timedelta showing offset from UTC, negative values indicating West of UTC -""" + """datetime -> timedelta showing offset from UTC, negative values indicating West of UTC""" + @abstractmethod def dst(self, dt: datetime | None, /) -> timedelta | None: - """datetime -> DST offset as timedelta positive east of UTC. -""" + """datetime -> DST offset as timedelta positive east of UTC.""" + def fromutc(self, dt: datetime, /) -> datetime: - """datetime in UTC -> datetime in local time. -""" + """datetime in UTC -> datetime in local time.""" # Alias required to avoid name conflicts with date(time).tzinfo. _TzInfo: TypeAlias = tzinfo @final class timezone(tzinfo): - """Fixed offset from UTC implementation of tzinfo. -""" + """Fixed offset from UTC implementation of tzinfo.""" + utc: ClassVar[timezone] min: ClassVar[timezone] max: ClassVar[timezone] def __new__(cls, offset: timedelta, name: str = ...) -> Self: ... def tzname(self, dt: datetime | None, /) -> str: - """If name is specified when timezone is created, returns the name. Otherwise returns offset as 'UTC(+|-)HH:MM'. -""" + """If name is specified when timezone is created, returns the name. Otherwise returns offset as 'UTC(+|-)HH:MM'.""" + def utcoffset(self, dt: datetime | None, /) -> timedelta: - """Return fixed offset. -""" + """Return fixed offset.""" + def dst(self, dt: datetime | None, /) -> None: - """Return None. -""" + """Return None.""" + def __hash__(self) -> int: ... def __eq__(self, value: object, /) -> bool: ... @@ -76,8 +76,8 @@ class _IsoCalendarDate(tuple[int, int, int]): @disjoint_base class date: - """date(year, month, day) --> date object -""" + """date(year, month, day) --> date object""" + min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] @@ -86,27 +86,29 @@ class date: def fromtimestamp(cls, timestamp: float, /) -> Self: """Create a date from a POSIX timestamp. -The timestamp is a number, e.g. created via time.time(), that is interpreted -as local time. -""" + The timestamp is a number, e.g. created via time.time(), that is interpreted + as local time. + """ + @classmethod def today(cls) -> Self: - """Current date or datetime: same as self.__class__.fromtimestamp(time.time()). -""" + """Current date or datetime: same as self.__class__.fromtimestamp(time.time()).""" + @classmethod def fromordinal(cls, n: int, /) -> Self: - """int -> date corresponding to a proleptic Gregorian ordinal. -""" + """int -> date corresponding to a proleptic Gregorian ordinal.""" + @classmethod def fromisoformat(cls, date_string: str, /) -> Self: - """str -> Construct a date from a string in ISO 8601 format. -""" + """str -> Construct a date from a string in ISO 8601 format.""" + @classmethod def fromisocalendar(cls, year: int, week: int, day: int) -> Self: """int, int, int -> Construct a date from the ISO year, week number and weekday. -This is the inverse of the date.isocalendar() function -""" + This is the inverse of the date.isocalendar() function + """ + @property def year(self) -> int: ... @property @@ -114,62 +116,54 @@ This is the inverse of the date.isocalendar() function @property def day(self) -> int: ... def ctime(self) -> str: - """Return ctime() style string. -""" - + """Return ctime() style string.""" if sys.version_info >= (3, 14): @classmethod def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new date parsed from a string (like time.strptime()). -""" - + """string, format -> new date parsed from a string (like time.strptime()).""" # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): def strftime(self, format: str) -> str: - """format -> strftime() style string. -""" + """format -> strftime() style string.""" else: def strftime(self, format: str, /) -> str: - """format -> strftime() style string. -""" + """format -> strftime() style string.""" def __format__(self, fmt: str, /) -> str: - """Formats self with strftime. -""" + """Formats self with strftime.""" + def isoformat(self) -> str: - """Return string in ISO 8601 format, YYYY-MM-DD. -""" + """Return string in ISO 8601 format, YYYY-MM-DD.""" + def timetuple(self) -> struct_time: - """Return time tuple, compatible with time.localtime(). -""" + """Return time tuple, compatible with time.localtime().""" + def toordinal(self) -> int: - """Return proleptic Gregorian ordinal. January 1 of year 1 is day 1. -""" + """Return proleptic Gregorian ordinal. January 1 of year 1 is day 1.""" if sys.version_info >= (3, 13): def __replace__(self, /, *, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: - """The same as replace(). -""" + """The same as replace().""" def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: - """Return date with new specified fields. -""" + """Return date with new specified fields.""" + def __le__(self, value: date, /) -> bool: ... def __lt__(self, value: date, /) -> bool: ... def __ge__(self, value: date, /) -> bool: ... def __gt__(self, value: date, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __add__(self, value: timedelta, /) -> Self: - """Return self+value. -""" + """Return self+value.""" + def __radd__(self, value: timedelta, /) -> Self: - """Return value+self. -""" + """Return value+self.""" + @overload def __sub__(self, value: datetime, /) -> NoReturn: - """Return self-value. -""" + """Return self-value.""" + @overload def __sub__(self, value: Self, /) -> timedelta: ... @overload @@ -177,23 +171,25 @@ This is the inverse of the date.isocalendar() function def __hash__(self) -> int: ... def weekday(self) -> int: """Return the day of the week represented by the date. -Monday == 0 ... Sunday == 6 -""" + Monday == 0 ... Sunday == 6 + """ + def isoweekday(self) -> int: """Return the day of the week represented by the date. -Monday == 1 ... Sunday == 7 -""" + Monday == 1 ... Sunday == 7 + """ + def isocalendar(self) -> _IsoCalendarDate: - """Return a named tuple containing ISO year, week number, and weekday. -""" + """Return a named tuple containing ISO year, week number, and weekday.""" @disjoint_base class time: """time([hour[, minute[, second[, microsecond[, tzinfo]]]]]) --> a time object -All arguments are optional. tzinfo may be None, or an instance of -a tzinfo subclass. The remaining arguments may be ints. -""" + All arguments are optional. tzinfo may be None, or an instance of + a tzinfo subclass. The remaining arguments may be ints. + """ + min: ClassVar[time] max: ClassVar[time] resolution: ClassVar[timedelta] @@ -228,45 +224,39 @@ a tzinfo subclass. The remaining arguments may be ints. def isoformat(self, timespec: str = "auto") -> str: """Return string in ISO 8601 format, [HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. -The optional argument timespec specifies the number of additional terms -of the time to include. Valid options are 'auto', 'hours', 'minutes', -'seconds', 'milliseconds' and 'microseconds'. -""" + The optional argument timespec specifies the number of additional terms + of the time to include. Valid options are 'auto', 'hours', 'minutes', + 'seconds', 'milliseconds' and 'microseconds'. + """ + @classmethod def fromisoformat(cls, time_string: str, /) -> Self: - """string -> time from a string in ISO 8601 format -""" - + """string -> time from a string in ISO 8601 format""" if sys.version_info >= (3, 14): @classmethod def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new time parsed from a string (like time.strptime()). -""" - + """string, format -> new time parsed from a string (like time.strptime()).""" # On <3.12, the name of the parameter in the pure-Python implementation # didn't match the name in the C implementation, # meaning it is only *safe* to pass it as a keyword argument on 3.12+ if sys.version_info >= (3, 12): def strftime(self, format: str) -> str: - """format -> strftime() style string. -""" + """format -> strftime() style string.""" else: def strftime(self, format: str, /) -> str: - """format -> strftime() style string. -""" + """format -> strftime() style string.""" def __format__(self, fmt: str, /) -> str: - """Formats self with strftime. -""" + """Formats self with strftime.""" + def utcoffset(self) -> timedelta | None: - """Return self.tzinfo.utcoffset(self). -""" + """Return self.tzinfo.utcoffset(self).""" + def tzname(self) -> str | None: - """Return self.tzinfo.tzname(self). -""" + """Return self.tzinfo.tzname(self).""" + def dst(self) -> timedelta | None: - """Return self.tzinfo.dst(self). -""" + """Return self.tzinfo.dst(self).""" if sys.version_info >= (3, 13): def __replace__( self, @@ -279,8 +269,7 @@ of the time to include. Valid options are 'auto', 'hours', 'minutes', tzinfo: _TzInfo | None = ..., fold: int = ..., ) -> Self: - """The same as replace(). -""" + """The same as replace().""" def replace( self, @@ -292,8 +281,7 @@ of the time to include. Valid options are 'auto', 'hours', 'minutes', *, fold: int = ..., ) -> Self: - """Return time with new specified fields. -""" + """Return time with new specified fields.""" _Date: TypeAlias = date _Time: TypeAlias = time @@ -302,11 +290,12 @@ _Time: TypeAlias = time class timedelta: """Difference between two datetime values. -timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0) + timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0) + + All arguments are optional and default to 0. + Arguments may be integers or floats, and may be positive or negative. + """ -All arguments are optional and default to 0. -Arguments may be integers or floats, and may be positive or negative. -""" min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] @@ -322,81 +311,82 @@ Arguments may be integers or floats, and may be positive or negative. ) -> Self: ... @property def days(self) -> int: - """Number of days. -""" + """Number of days.""" + @property def seconds(self) -> int: - """Number of seconds (>= 0 and less than 1 day). -""" + """Number of seconds (>= 0 and less than 1 day).""" + @property def microseconds(self) -> int: - """Number of microseconds (>= 0 and less than 1 second). -""" + """Number of microseconds (>= 0 and less than 1 second).""" + def total_seconds(self) -> float: - """Total seconds in the duration. -""" + """Total seconds in the duration.""" + def __add__(self, value: timedelta, /) -> timedelta: - """Return self+value. -""" + """Return self+value.""" + def __radd__(self, value: timedelta, /) -> timedelta: - """Return value+self. -""" + """Return value+self.""" + def __sub__(self, value: timedelta, /) -> timedelta: - """Return self-value. -""" + """Return self-value.""" + def __rsub__(self, value: timedelta, /) -> timedelta: - """Return value-self. -""" + """Return value-self.""" + def __neg__(self) -> timedelta: - """-self -""" + """-self""" + def __pos__(self) -> timedelta: - """+self -""" + """+self""" + def __abs__(self) -> timedelta: - """abs(self) -""" + """abs(self)""" + def __mul__(self, value: float, /) -> timedelta: - """Return self*value. -""" + """Return self*value.""" + def __rmul__(self, value: float, /) -> timedelta: - """Return value*self. -""" + """Return value*self.""" + @overload def __floordiv__(self, value: timedelta, /) -> int: - """Return self//value. -""" + """Return self//value.""" + @overload def __floordiv__(self, value: int, /) -> timedelta: ... @overload def __truediv__(self, value: timedelta, /) -> float: - """Return self/value. -""" + """Return self/value.""" + @overload def __truediv__(self, value: float, /) -> timedelta: ... def __mod__(self, value: timedelta, /) -> timedelta: - """Return self%value. -""" + """Return self%value.""" + def __divmod__(self, value: timedelta, /) -> tuple[int, timedelta]: - """Return divmod(self, value). -""" + """Return divmod(self, value).""" + def __le__(self, value: timedelta, /) -> bool: ... def __lt__(self, value: timedelta, /) -> bool: ... def __ge__(self, value: timedelta, /) -> bool: ... def __gt__(self, value: timedelta, /) -> bool: ... def __eq__(self, value: object, /) -> bool: ... def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def __hash__(self) -> int: ... @disjoint_base class datetime(date): """datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]]) -The year, month and day arguments are required. tzinfo may be None, or an -instance of a tzinfo subclass. The remaining arguments may be ints. -""" + The year, month and day arguments are required. tzinfo may be None, or an + instance of a tzinfo subclass. The remaining arguments may be ints. + """ + min: ClassVar[datetime] max: ClassVar[datetime] def __new__( @@ -430,52 +420,50 @@ instance of a tzinfo subclass. The remaining arguments may be ints. if sys.version_info >= (3, 12): @classmethod def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = None) -> Self: - """timestamp[, tz] -> tz's local time from POSIX timestamp. -""" + """timestamp[, tz] -> tz's local time from POSIX timestamp.""" else: @classmethod def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = None) -> Self: - """timestamp[, tz] -> tz's local time from POSIX timestamp. -""" + """timestamp[, tz] -> tz's local time from POSIX timestamp.""" @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)") def utcfromtimestamp(cls, t: float, /) -> Self: - """Construct a naive UTC datetime from a POSIX timestamp. -""" + """Construct a naive UTC datetime from a POSIX timestamp.""" + @classmethod def now(cls, tz: _TzInfo | None = None) -> Self: """Returns new datetime object representing current time local to tz. - tz - Timezone object. + tz + Timezone object. + + If no tz is specified, uses local timezone. + """ -If no tz is specified, uses local timezone. -""" @classmethod @deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)") def utcnow(cls) -> Self: - """Return a new datetime representing UTC day and time. -""" + """Return a new datetime representing UTC day and time.""" + @classmethod def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: - """date, time -> datetime with same date and time fields -""" + """date, time -> datetime with same date and time fields""" + def timestamp(self) -> float: - """Return POSIX timestamp as float. -""" + """Return POSIX timestamp as float.""" + def utctimetuple(self) -> struct_time: - """Return UTC time tuple, compatible with time.localtime(). -""" + """Return UTC time tuple, compatible with time.localtime().""" + def date(self) -> _Date: - """Return date object with same year, month and day. -""" + """Return date object with same year, month and day.""" + def time(self) -> _Time: - """Return time object with same time but with tzinfo=None. -""" + """Return time object with same time but with tzinfo=None.""" + def timetz(self) -> _Time: - """Return time object with same time and tzinfo. -""" + """Return time object with same time and tzinfo.""" if sys.version_info >= (3, 13): def __replace__( self, @@ -491,8 +479,7 @@ If no tz is specified, uses local timezone. tzinfo: _TzInfo | None = ..., fold: int = ..., ) -> Self: - """The same as replace(). -""" + """The same as replace().""" def replace( self, @@ -507,31 +494,32 @@ If no tz is specified, uses local timezone. *, fold: int = ..., ) -> Self: - """Return datetime with new specified fields. -""" + """Return datetime with new specified fields.""" + def astimezone(self, tz: _TzInfo | None = None) -> Self: - """tz -> convert to local time in new timezone tz -""" + """tz -> convert to local time in new timezone tz""" + def isoformat(self, sep: str = "T", timespec: str = "auto") -> str: """[sep] -> string in ISO 8601 format, YYYY-MM-DDT[HH[:MM[:SS[.mmm[uuu]]]]][+HH:MM]. -sep is used to separate the year from the time, and defaults to 'T'. -The optional argument timespec specifies the number of additional terms -of the time to include. Valid options are 'auto', 'hours', 'minutes', -'seconds', 'milliseconds' and 'microseconds'. -""" + sep is used to separate the year from the time, and defaults to 'T'. + The optional argument timespec specifies the number of additional terms + of the time to include. Valid options are 'auto', 'hours', 'minutes', + 'seconds', 'milliseconds' and 'microseconds'. + """ + @classmethod def strptime(cls, date_string: str, format: str, /) -> Self: - """string, format -> new datetime parsed from a string (like time.strptime()). -""" + """string, format -> new datetime parsed from a string (like time.strptime()).""" + def utcoffset(self) -> timedelta | None: - """Return self.tzinfo.utcoffset(self). -""" + """Return self.tzinfo.utcoffset(self).""" + def tzname(self) -> str | None: - """Return self.tzinfo.tzname(self). -""" + """Return self.tzinfo.tzname(self).""" + def dst(self) -> timedelta | None: - """Return self.tzinfo.dst(self). -""" + """Return self.tzinfo.dst(self).""" + def __le__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __lt__(self, value: datetime, /) -> bool: ... # type: ignore[override] def __ge__(self, value: datetime, /) -> bool: ... # type: ignore[override] @@ -540,8 +528,8 @@ of the time to include. Valid options are 'auto', 'hours', 'minutes', def __hash__(self) -> int: ... @overload # type: ignore[override] def __sub__(self, value: Self, /) -> timedelta: - """Return self-value. -""" + """Return self-value.""" + @overload def __sub__(self, value: timedelta, /) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi index 4d37f4298a730..3c41015fcd043 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/__init__.pyi @@ -26,6 +26,7 @@ Future versions may change the order in which implementations are tested for existence, and add interfaces to other dbm-like implementations. """ + import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -128,48 +129,50 @@ if sys.version_info >= (3, 11): def whichdb(filename: StrOrBytesPath) -> str | None: """Guess which db package to use to open a db file. -Return values: + Return values: -- None if the database file can't be read; -- empty string if the file can be read but can't be recognized -- the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. + - None if the database file can't be read; + - empty string if the file can be read but can't be recognized + - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. + + Importing the given module may still fail, and opening the + database using that module may still fail. + """ -Importing the given module may still fail, and opening the -database using that module may still fail. -""" def open(file: StrOrBytesPath, flag: _TFlags = "r", mode: int = 0o666) -> _Database: """Open or create database at path given by *file*. -Optional argument *flag* can be 'r' (default) for read-only access, 'w' -for read-write access of an existing database, 'c' for read-write access -to a new or existing database, and 'n' for read-write access to a new -database. + Optional argument *flag* can be 'r' (default) for read-only access, 'w' + for read-write access of an existing database, 'c' for read-write access + to a new or existing database, and 'n' for read-write access to a new + database. -Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it -only if it doesn't exist; and 'n' always creates a new database. -""" + Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it + only if it doesn't exist; and 'n' always creates a new database. + """ else: def whichdb(filename: str) -> str | None: """Guess which db package to use to open a db file. - Return values: + Return values: + + - None if the database file can't be read; + - empty string if the file can be read but can't be recognized + - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. - - None if the database file can't be read; - - empty string if the file can be read but can't be recognized - - the name of the dbm submodule (e.g. "ndbm" or "gnu") if recognized. + Importing the given module may still fail, and opening the + database using that module may still fail. + """ - Importing the given module may still fail, and opening the - database using that module may still fail. - """ def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: """Open or create database at path given by *file*. - Optional argument *flag* can be 'r' (default) for read-only access, 'w' - for read-write access of an existing database, 'c' for read-write access - to a new or existing database, and 'n' for read-write access to a new - database. + Optional argument *flag* can be 'r' (default) for read-only access, 'w' + for read-write access of an existing database, 'c' for read-write access + to a new or existing database, and 'n' for read-write access to a new + database. - Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it - only if it doesn't exist; and 'n' always creates a new database. - """ + Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it + only if it doesn't exist; and 'n' always creates a new database. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi index 8766a13c4bce3..22ef756acac03 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/dumb.pyi @@ -20,6 +20,7 @@ is read when the database is opened, and some updates rewrite the whole index) - support opening for read-only (flag = 'm') """ + import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -56,30 +57,30 @@ if sys.version_info >= (3, 11): def open(file: StrOrBytesPath, flag: str = "c", mode: int = 0o666) -> _Database: """Open the database file, filename, and return corresponding object. -The flag argument, used to control how the database is opened in the -other DBM implementations, supports only the semantics of 'c' and 'n' -values. Other values will default to the semantics of 'c' value: -the database will always opened for update and will be created if it -does not exist. + The flag argument, used to control how the database is opened in the + other DBM implementations, supports only the semantics of 'c' and 'n' + values. Other values will default to the semantics of 'c' value: + the database will always opened for update and will be created if it + does not exist. -The optional mode argument is the UNIX mode of the file, used only when -the database has to be created. It defaults to octal code 0o666 (and -will be modified by the prevailing umask). + The optional mode argument is the UNIX mode of the file, used only when + the database has to be created. It defaults to octal code 0o666 (and + will be modified by the prevailing umask). -""" + """ else: def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: """Open the database file, filename, and return corresponding object. - The flag argument, used to control how the database is opened in the - other DBM implementations, supports only the semantics of 'c' and 'n' - values. Other values will default to the semantics of 'c' value: - the database will always opened for update and will be created if it - does not exist. + The flag argument, used to control how the database is opened in the + other DBM implementations, supports only the semantics of 'c' and 'n' + values. Other values will default to the semantics of 'c' value: + the database will always opened for update and will be created if it + does not exist. - The optional mode argument is the UNIX mode of the file, used only when - the database has to be created. It defaults to octal code 0o666 (and - will be modified by the prevailing umask). + The optional mode argument is the UNIX mode of the file, used only when + the database has to be created. It defaults to octal code 0o666 (and + will be modified by the prevailing umask). - """ + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi index 8715bba5f43dc..b07a1defffdf2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/gnu.pyi @@ -1,3 +1,3 @@ -"""Provide the _gdbm module as a dbm submodule. -""" +"""Provide the _gdbm module as a dbm submodule.""" + from _gdbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi index 6738efacb5241..23056a29ef2b6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/ndbm.pyi @@ -1,3 +1,3 @@ -"""Provide the _dbm module as a dbm submodule. -""" +"""Provide the _dbm module as a dbm submodule.""" + from _dbm import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi index c2ac90fef2cd1..4f30544592376 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dbm/sqlite3.pyi @@ -29,14 +29,14 @@ class _Database(MutableMapping[bytes, bytes]): def open(filename: StrOrBytesPath, /, flag: Literal["r", "w", "c", "n"] = "r", mode: int = 0o666) -> _Database: """Open a dbm.sqlite3 database and return the dbm object. -The 'filename' parameter is the name of the database file. + The 'filename' parameter is the name of the database file. -The optional 'flag' parameter can be one of ...: - 'r' (default): open an existing database for read only access - 'w': open an existing database for read/write access - 'c': create a database if it does not exist; open for read/write access - 'n': always create a new, empty database; open for read/write access + The optional 'flag' parameter can be one of ...: + 'r' (default): open an existing database for read only access + 'w': open an existing database for read/write access + 'c': create a database if it does not exist; open for read/write access + 'n': always create a new, empty database; open for read/write access -The optional 'mode' parameter is the Unix file access mode of the database; -only used when creating a new database. Default: 0o666. -""" + The optional 'mode' parameter is the Unix file access mode of the database; + only used when creating a new database. Default: 0o666. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi index e5a27d4b4099d..1f6ba755df7a2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/decimal.pyi @@ -97,6 +97,7 @@ NaN 1 >>> """ + import numbers import sys from _decimal import ( @@ -148,8 +149,8 @@ class _ContextManager: def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... class DecimalTuple(NamedTuple): - """DecimalTuple(sign, digits, exponent) -""" + """DecimalTuple(sign, digits, exponent)""" + sign: int digits: tuple[int, ...] exponent: int | Literal["n", "N", "F"] @@ -172,162 +173,166 @@ class FloatOperation(DecimalException, TypeError): ... @disjoint_base class Decimal: """Construct a new Decimal object. 'value' can be an integer, string, tuple, -or another Decimal object. If no value is given, return Decimal('0'). The -context does not affect the conversion and is only passed to determine if -the InvalidOperation trap is active. + or another Decimal object. If no value is given, return Decimal('0'). The + context does not affect the conversion and is only passed to determine if + the InvalidOperation trap is active. + + """ -""" def __new__(cls, value: _DecimalNew = "0", context: Context | None = None) -> Self: ... if sys.version_info >= (3, 14): @classmethod def from_number(cls, number: Decimal | float, /) -> Self: """Class method that converts a real number to a decimal number, exactly. - >>> Decimal.from_number(314) # int - Decimal('314') - >>> Decimal.from_number(0.1) # float - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_number(Decimal('3.14')) # another decimal instance - Decimal('3.14') + >>> Decimal.from_number(314) # int + Decimal('314') + >>> Decimal.from_number(0.1) # float + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_number(Decimal('3.14')) # another decimal instance + Decimal('3.14') -""" + """ @classmethod def from_float(cls, f: float, /) -> Self: """Class method that converts a float to a decimal number, exactly. -Since 0.1 is not exactly representable in binary floating point, -Decimal.from_float(0.1) is not the same as Decimal('0.1'). + Since 0.1 is not exactly representable in binary floating point, + Decimal.from_float(0.1) is not the same as Decimal('0.1'). - >>> Decimal.from_float(0.1) - Decimal('0.1000000000000000055511151231257827021181583404541015625') - >>> Decimal.from_float(float('nan')) - Decimal('NaN') - >>> Decimal.from_float(float('inf')) - Decimal('Infinity') - >>> Decimal.from_float(float('-inf')) - Decimal('-Infinity') + >>> Decimal.from_float(0.1) + Decimal('0.1000000000000000055511151231257827021181583404541015625') + >>> Decimal.from_float(float('nan')) + Decimal('NaN') + >>> Decimal.from_float(float('inf')) + Decimal('Infinity') + >>> Decimal.from_float(float('-inf')) + Decimal('-Infinity') -""" + """ + def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: """Compare self to other. Return a decimal value: - a or b is a NaN ==> Decimal('NaN') - a < b ==> Decimal('-1') - a == b ==> Decimal('0') - a > b ==> Decimal('1') + a or b is a NaN ==> Decimal('NaN') + a < b ==> Decimal('-1') + a == b ==> Decimal('0') + a > b ==> Decimal('1') + + """ -""" def __hash__(self) -> int: ... def as_tuple(self) -> DecimalTuple: - """Return a tuple representation of the number. + """Return a tuple representation of the number.""" -""" def as_integer_ratio(self) -> tuple[int, int]: """Decimal.as_integer_ratio() -> (int, int) -Return a pair of integers, whose ratio is exactly equal to the original -Decimal and with a positive denominator. The ratio is in lowest terms. -Raise OverflowError on infinities and a ValueError on NaNs. + Return a pair of integers, whose ratio is exactly equal to the original + Decimal and with a positive denominator. The ratio is in lowest terms. + Raise OverflowError on infinities and a ValueError on NaNs. + + """ -""" def to_eng_string(self, context: Context | None = None) -> str: """Convert to an engineering-type string. Engineering notation has an exponent -which is a multiple of 3, so there are up to 3 digits left of the decimal -place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). + which is a multiple of 3, so there are up to 3 digits left of the decimal + place. For example, Decimal('123E+1') is converted to Decimal('1.23E+3'). -The value of context.capitals determines whether the exponent sign is lower -or upper case. Otherwise, the context does not affect the operation. + The value of context.capitals determines whether the exponent sign is lower + or upper case. Otherwise, the context does not affect the operation. + + """ -""" def __abs__(self) -> Decimal: - """abs(self) -""" + """abs(self)""" + def __add__(self, value: _Decimal, /) -> Decimal: - """Return self+value. -""" + """Return self+value.""" + def __divmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(self, value). -""" + """Return divmod(self, value).""" + def __eq__(self, value: object, /) -> bool: ... def __floordiv__(self, value: _Decimal, /) -> Decimal: - """Return self//value. -""" + """Return self//value.""" + def __ge__(self, value: _ComparableNum, /) -> bool: ... def __gt__(self, value: _ComparableNum, /) -> bool: ... def __le__(self, value: _ComparableNum, /) -> bool: ... def __lt__(self, value: _ComparableNum, /) -> bool: ... def __mod__(self, value: _Decimal, /) -> Decimal: - """Return self%value. -""" + """Return self%value.""" + def __mul__(self, value: _Decimal, /) -> Decimal: - """Return self*value. -""" + """Return self*value.""" + def __neg__(self) -> Decimal: - """-self -""" + """-self""" + def __pos__(self) -> Decimal: - """+self -""" + """+self""" + def __pow__(self, value: _Decimal, mod: _Decimal | None = None, /) -> Decimal: - """Return pow(self, value, mod). -""" + """Return pow(self, value, mod).""" + def __radd__(self, value: _Decimal, /) -> Decimal: - """Return value+self. -""" + """Return value+self.""" + def __rdivmod__(self, value: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return divmod(value, self). -""" + """Return divmod(value, self).""" + def __rfloordiv__(self, value: _Decimal, /) -> Decimal: - """Return value//self. -""" + """Return value//self.""" + def __rmod__(self, value: _Decimal, /) -> Decimal: - """Return value%self. -""" + """Return value%self.""" + def __rmul__(self, value: _Decimal, /) -> Decimal: - """Return value*self. -""" + """Return value*self.""" + def __rsub__(self, value: _Decimal, /) -> Decimal: - """Return value-self. -""" + """Return value-self.""" + def __rtruediv__(self, value: _Decimal, /) -> Decimal: - """Return value/self. -""" + """Return value/self.""" + def __sub__(self, value: _Decimal, /) -> Decimal: - """Return self-value. -""" + """Return self-value.""" + def __truediv__(self, value: _Decimal, /) -> Decimal: - """Return self/value. -""" + """Return self/value.""" + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: """Return the remainder from dividing self by other. This differs from -self % other in that the sign of the remainder is chosen so as to minimize -its absolute value. More precisely, the return value is self - n * other -where n is the integer nearest to the exact value of self / other, and -if two integers are equally near then the even one is chosen. + self % other in that the sign of the remainder is chosen so as to minimize + its absolute value. More precisely, the return value is self - n * other + where n is the integer nearest to the exact value of self / other, and + if two integers are equally near then the even one is chosen. -If the result is zero then its sign will be the sign of self. + If the result is zero then its sign will be the sign of self. + + """ -""" def __float__(self) -> float: - """float(self) -""" + """float(self)""" + def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __trunc__(self) -> int: ... @property def real(self) -> Decimal: ... @property def imag(self) -> Decimal: ... def conjugate(self) -> Decimal: - """Return self. + """Return self.""" -""" def __complex__(self) -> complex: ... @overload def __round__(self) -> int: ... @@ -337,323 +342,353 @@ If the result is zero then its sign will be the sign of self. def __ceil__(self) -> int: ... def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: """Fused multiply-add. Return self*other+third with no rounding of the -intermediate product self*other. + intermediate product self*other. - >>> Decimal(2).fma(3, 5) - Decimal('11') + >>> Decimal(2).fma(3, 5) + Decimal('11') -""" + """ + def __rpow__(self, value: _Decimal, mod: Context | None = None, /) -> Decimal: - """Return pow(value, self, mod). -""" + """Return pow(value, self, mod).""" + def normalize(self, context: Context | None = None) -> Decimal: """Normalize the number by stripping the rightmost trailing zeros and -converting any result equal to Decimal('0') to Decimal('0e0'). Used -for producing canonical values for members of an equivalence class. -For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize -to the equivalent value Decimal('32.1'). + converting any result equal to Decimal('0') to Decimal('0e0'). Used + for producing canonical values for members of an equivalence class. + For example, Decimal('32.100') and Decimal('0.321000e+2') both normalize + to the equivalent value Decimal('32.1'). + + """ -""" def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: """Return a value equal to the first operand after rounding and having the -exponent of the second operand. + exponent of the second operand. - >>> Decimal('1.41421356').quantize(Decimal('1.000')) - Decimal('1.414') + >>> Decimal('1.41421356').quantize(Decimal('1.000')) + Decimal('1.414') -Unlike other operations, if the length of the coefficient after the quantize -operation would be greater than precision, then an InvalidOperation is signaled. -This guarantees that, unless there is an error condition, the quantized exponent -is always equal to that of the right-hand operand. + Unlike other operations, if the length of the coefficient after the quantize + operation would be greater than precision, then an InvalidOperation is signaled. + This guarantees that, unless there is an error condition, the quantized exponent + is always equal to that of the right-hand operand. -Also unlike other operations, quantize never signals Underflow, even if the -result is subnormal and inexact. + Also unlike other operations, quantize never signals Underflow, even if the + result is subnormal and inexact. -If the exponent of the second operand is larger than that of the first, then -rounding may be necessary. In this case, the rounding mode is determined by the -rounding argument if given, else by the given context argument; if neither -argument is given, the rounding mode of the current thread's context is used. + If the exponent of the second operand is larger than that of the first, then + rounding may be necessary. In this case, the rounding mode is determined by the + rounding argument if given, else by the given context argument; if neither + argument is given, the rounding mode of the current thread's context is used. + + """ -""" def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: """Test whether self and other have the same exponent or whether both are NaN. -This operation is unaffected by context and is quiet: no flags are changed -and no rounding is performed. As an exception, the C version may raise -InvalidOperation if the second operand cannot be converted exactly. + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + + """ -""" def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: """Round to the nearest integer, signaling Inexact or Rounded as appropriate if -rounding occurs. The rounding mode is determined by the rounding parameter -if given, else by the given context. If neither parameter is given, then the -rounding mode of the current default context is used. + rounding occurs. The rounding mode is determined by the rounding parameter + if given, else by the given context. If neither parameter is given, then the + rounding mode of the current default context is used. + + """ -""" def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: """Round to the nearest integer without signaling Inexact or Rounded. The -rounding mode is determined by the rounding parameter if given, else by -the given context. If neither parameter is given, then the rounding mode -of the current default context is used. + rounding mode is determined by the rounding parameter if given, else by + the given context. If neither parameter is given, then the rounding mode + of the current default context is used. + + """ -""" def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: """Identical to the to_integral_value() method. The to_integral() name has been -kept for compatibility with older versions. + kept for compatibility with older versions. + + """ -""" def sqrt(self, context: Context | None = None) -> Decimal: """Return the square root of the argument to full precision. The result is -correctly rounded using the ROUND_HALF_EVEN rounding mode. + correctly rounded using the ROUND_HALF_EVEN rounding mode. + + """ -""" def max(self, other: _Decimal, context: Context | None = None) -> Decimal: """Maximum of self and other. If one operand is a quiet NaN and the other is -numeric, the numeric operand is returned. + numeric, the numeric operand is returned. + + """ -""" def min(self, other: _Decimal, context: Context | None = None) -> Decimal: """Minimum of self and other. If one operand is a quiet NaN and the other is -numeric, the numeric operand is returned. + numeric, the numeric operand is returned. + + """ -""" def adjusted(self) -> int: - """Return the adjusted exponent of the number. Defined as exp + digits - 1. + """Return the adjusted exponent of the number. Defined as exp + digits - 1.""" -""" def canonical(self) -> Decimal: """Return the canonical encoding of the argument. Currently, the encoding -of a Decimal instance is always canonical, so this operation returns its -argument unchanged. + of a Decimal instance is always canonical, so this operation returns its + argument unchanged. + + """ -""" def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Identical to compare, except that all NaNs signal. + """Identical to compare, except that all NaNs signal.""" -""" def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: """Compare two operands using their abstract representation rather than -their numerical value. Similar to the compare() method, but the result -gives a total ordering on Decimal instances. Two Decimal instances with -the same numeric value but different representations compare unequal -in this ordering: + their numerical value. Similar to the compare() method, but the result + gives a total ordering on Decimal instances. Two Decimal instances with + the same numeric value but different representations compare unequal + in this ordering: - >>> Decimal('12.0').compare_total(Decimal('12')) - Decimal('-1') + >>> Decimal('12.0').compare_total(Decimal('12')) + Decimal('-1') -Quiet and signaling NaNs are also included in the total ordering. The result -of this function is Decimal('0') if both operands have the same representation, -Decimal('-1') if the first operand is lower in the total order than the second, -and Decimal('1') if the first operand is higher in the total order than the -second operand. See the specification for details of the total order. + Quiet and signaling NaNs are also included in the total ordering. The result + of this function is Decimal('0') if both operands have the same representation, + Decimal('-1') if the first operand is lower in the total order than the second, + and Decimal('1') if the first operand is higher in the total order than the + second operand. See the specification for details of the total order. -This operation is unaffected by context and is quiet: no flags are changed -and no rounding is performed. As an exception, the C version may raise -InvalidOperation if the second operand cannot be converted exactly. + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + + """ -""" def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: """Compare two operands using their abstract representation rather than their -value as in compare_total(), but ignoring the sign of each operand. + value as in compare_total(), but ignoring the sign of each operand. -x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). + x.compare_total_mag(y) is equivalent to x.copy_abs().compare_total(y.copy_abs()). -This operation is unaffected by context and is quiet: no flags are changed -and no rounding is performed. As an exception, the C version may raise -InvalidOperation if the second operand cannot be converted exactly. + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + + """ -""" def copy_abs(self) -> Decimal: """Return the absolute value of the argument. This operation is unaffected by -context and is quiet: no flags are changed and no rounding is performed. + context and is quiet: no flags are changed and no rounding is performed. + + """ -""" def copy_negate(self) -> Decimal: """Return the negation of the argument. This operation is unaffected by context -and is quiet: no flags are changed and no rounding is performed. + and is quiet: no flags are changed and no rounding is performed. + + """ -""" def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: """Return a copy of the first operand with the sign set to be the same as the -sign of the second operand. For example: + sign of the second operand. For example: - >>> Decimal('2.3').copy_sign(Decimal('-1.5')) - Decimal('-2.3') + >>> Decimal('2.3').copy_sign(Decimal('-1.5')) + Decimal('-2.3') -This operation is unaffected by context and is quiet: no flags are changed -and no rounding is performed. As an exception, the C version may raise -InvalidOperation if the second operand cannot be converted exactly. + This operation is unaffected by context and is quiet: no flags are changed + and no rounding is performed. As an exception, the C version may raise + InvalidOperation if the second operand cannot be converted exactly. + + """ -""" def exp(self, context: Context | None = None) -> Decimal: """Return the value of the (natural) exponential function e**x at the given -number. The function always uses the ROUND_HALF_EVEN mode and the result -is correctly rounded. + number. The function always uses the ROUND_HALF_EVEN mode and the result + is correctly rounded. + + """ -""" def is_canonical(self) -> bool: """Return True if the argument is canonical and False otherwise. Currently, -a Decimal instance is always canonical, so this operation always returns -True. + a Decimal instance is always canonical, so this operation always returns + True. + + """ -""" def is_finite(self) -> bool: """Return True if the argument is a finite number, and False if the argument -is infinite or a NaN. + is infinite or a NaN. + + """ -""" def is_infinite(self) -> bool: """Return True if the argument is either positive or negative infinity and -False otherwise. + False otherwise. + + """ -""" def is_nan(self) -> bool: """Return True if the argument is a (quiet or signaling) NaN and False -otherwise. + otherwise. + + """ -""" def is_normal(self, context: Context | None = None) -> bool: """Return True if the argument is a normal finite non-zero number with an -adjusted exponent greater than or equal to Emin. Return False if the -argument is zero, subnormal, infinite or a NaN. + adjusted exponent greater than or equal to Emin. Return False if the + argument is zero, subnormal, infinite or a NaN. + + """ -""" def is_qnan(self) -> bool: - """Return True if the argument is a quiet NaN, and False otherwise. + """Return True if the argument is a quiet NaN, and False otherwise.""" -""" def is_signed(self) -> bool: """Return True if the argument has a negative sign and False otherwise. -Note that both zeros and NaNs can carry signs. + Note that both zeros and NaNs can carry signs. + + """ -""" def is_snan(self) -> bool: - """Return True if the argument is a signaling NaN and False otherwise. + """Return True if the argument is a signaling NaN and False otherwise.""" -""" def is_subnormal(self, context: Context | None = None) -> bool: """Return True if the argument is subnormal, and False otherwise. A number is -subnormal if it is non-zero, finite, and has an adjusted exponent less -than Emin. + subnormal if it is non-zero, finite, and has an adjusted exponent less + than Emin. + + """ -""" def is_zero(self) -> bool: """Return True if the argument is a (positive or negative) zero and False -otherwise. + otherwise. + + """ -""" def ln(self, context: Context | None = None) -> Decimal: """Return the natural (base e) logarithm of the operand. The function always -uses the ROUND_HALF_EVEN mode and the result is correctly rounded. + uses the ROUND_HALF_EVEN mode and the result is correctly rounded. + + """ -""" def log10(self, context: Context | None = None) -> Decimal: """Return the base ten logarithm of the operand. The function always uses the -ROUND_HALF_EVEN mode and the result is correctly rounded. + ROUND_HALF_EVEN mode and the result is correctly rounded. + + """ -""" def logb(self, context: Context | None = None) -> Decimal: """For a non-zero number, return the adjusted exponent of the operand as a -Decimal instance. If the operand is a zero, then Decimal('-Infinity') is -returned and the DivisionByZero condition is raised. If the operand is -an infinity then Decimal('Infinity') is returned. + Decimal instance. If the operand is a zero, then Decimal('-Infinity') is + returned and the DivisionByZero condition is raised. If the operand is + an infinity then Decimal('Infinity') is returned. + + """ -""" def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'and' of the two (logical) operands. + """Return the digit-wise 'and' of the two (logical) operands.""" -""" def logical_invert(self, context: Context | None = None) -> Decimal: - """Return the digit-wise inversion of the (logical) operand. + """Return the digit-wise inversion of the (logical) operand.""" -""" def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'or' of the two (logical) operands. + """Return the digit-wise 'or' of the two (logical) operands.""" -""" def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: - """Return the digit-wise 'exclusive or' of the two (logical) operands. + """Return the digit-wise 'exclusive or' of the two (logical) operands.""" -""" def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: """Similar to the max() method, but the comparison is done using the absolute -values of the operands. + values of the operands. + + """ -""" def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: """Similar to the min() method, but the comparison is done using the absolute -values of the operands. + values of the operands. + + """ -""" def next_minus(self, context: Context | None = None) -> Decimal: """Return the largest number representable in the given context (or in the -current default context if no context is given) that is smaller than the -given operand. + current default context if no context is given) that is smaller than the + given operand. + + """ -""" def next_plus(self, context: Context | None = None) -> Decimal: """Return the smallest number representable in the given context (or in the -current default context if no context is given) that is larger than the -given operand. + current default context if no context is given) that is larger than the + given operand. + + """ -""" def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: """If the two operands are unequal, return the number closest to the first -operand in the direction of the second operand. If both operands are -numerically equal, return a copy of the first operand with the sign set -to be the same as the sign of the second operand. + operand in the direction of the second operand. If both operands are + numerically equal, return a copy of the first operand with the sign set + to be the same as the sign of the second operand. + + """ -""" def number_class(self, context: Context | None = None) -> str: """Return a string describing the class of the operand. The returned value -is one of the following ten strings: + is one of the following ten strings: - * '-Infinity', indicating that the operand is negative infinity. - * '-Normal', indicating that the operand is a negative normal number. - * '-Subnormal', indicating that the operand is negative and subnormal. - * '-Zero', indicating that the operand is a negative zero. - * '+Zero', indicating that the operand is a positive zero. - * '+Subnormal', indicating that the operand is positive and subnormal. - * '+Normal', indicating that the operand is a positive normal number. - * '+Infinity', indicating that the operand is positive infinity. - * 'NaN', indicating that the operand is a quiet NaN (Not a Number). - * 'sNaN', indicating that the operand is a signaling NaN. + * '-Infinity', indicating that the operand is negative infinity. + * '-Normal', indicating that the operand is a negative normal number. + * '-Subnormal', indicating that the operand is negative and subnormal. + * '-Zero', indicating that the operand is a negative zero. + * '+Zero', indicating that the operand is a positive zero. + * '+Subnormal', indicating that the operand is positive and subnormal. + * '+Normal', indicating that the operand is a positive normal number. + * '+Infinity', indicating that the operand is positive infinity. + * 'NaN', indicating that the operand is a quiet NaN (Not a Number). + * 'sNaN', indicating that the operand is a signaling NaN. -""" + """ + def radix(self) -> Decimal: """Return Decimal(10), the radix (base) in which the Decimal class does -all its arithmetic. Included for compatibility with the specification. + all its arithmetic. Included for compatibility with the specification. + + """ -""" def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: """Return the result of rotating the digits of the first operand by an amount -specified by the second operand. The second operand must be an integer in -the range -precision through precision. The absolute value of the second -operand gives the number of places to rotate. If the second operand is -positive then rotation is to the left; otherwise rotation is to the right. -The coefficient of the first operand is padded on the left with zeros to -length precision if necessary. The sign and exponent of the first operand are -unchanged. + specified by the second operand. The second operand must be an integer in + the range -precision through precision. The absolute value of the second + operand gives the number of places to rotate. If the second operand is + positive then rotation is to the left; otherwise rotation is to the right. + The coefficient of the first operand is padded on the left with zeros to + length precision if necessary. The sign and exponent of the first operand are + unchanged. + + """ -""" def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: """Return the first operand with the exponent adjusted the second. Equivalently, -return the first operand multiplied by 10**other. The second operand must be -an integer. + return the first operand multiplied by 10**other. The second operand must be + an integer. + + """ -""" def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: """Return the result of shifting the digits of the first operand by an amount -specified by the second operand. The second operand must be an integer in -the range -precision through precision. The absolute value of the second -operand gives the number of places to shift. If the second operand is -positive, then the shift is to the left; otherwise the shift is to the -right. Digits shifted into the coefficient are zeros. The sign and exponent -of the first operand are unchanged. + specified by the second operand. The second operand must be an integer in + the range -precision through precision. The absolute value of the second + operand gives the number of places to shift. If the second operand is + positive, then the shift is to the left; otherwise the shift is to the + right. Digits shifted into the coefficient are zeros. The sign and exponent + of the first operand are unchanged. + + """ -""" def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any, /) -> Self: ... @@ -662,17 +697,18 @@ of the first operand are unchanged. @disjoint_base class Context: """The context affects almost all operations and controls rounding, -Over/Underflow, raising of exceptions and much more. A new context -can be constructed as follows: + Over/Underflow, raising of exceptions and much more. A new context + can be constructed as follows: - >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, - ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, - ... traps=[InvalidOperation, DivisionByZero, Overflow], - ... flags=[]) - >>> + >>> c = Context(prec=28, Emin=-425000000, Emax=425000000, + ... rounding=ROUND_HALF_EVEN, capitals=1, clamp=1, + ... traps=[InvalidOperation, DivisionByZero, Overflow], + ... flags=[]) + >>> -""" + """ + # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, # even settable attributes like `prec` and `rounding`, # but that's inexpressible in the stub. @@ -699,299 +735,248 @@ can be constructed as follows: ) -> None: ... def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... def clear_flags(self) -> None: - """Reset all flags to False. + """Reset all flags to False.""" -""" def clear_traps(self) -> None: - """Set all traps to False. + """Set all traps to False.""" -""" def copy(self) -> Context: - """Return a duplicate of the context with all flags cleared. + """Return a duplicate of the context with all flags cleared.""" -""" def __copy__(self) -> Context: ... # see https://github.com/python/cpython/issues/94107 __hash__: ClassVar[None] # type: ignore[assignment] def Etiny(self) -> int: """Return a value equal to Emin - prec + 1, which is the minimum exponent value -for subnormal results. When underflow occurs, the exponent is set to Etiny. + for subnormal results. When underflow occurs, the exponent is set to Etiny. + + """ -""" def Etop(self) -> int: """Return a value equal to Emax - prec + 1. This is the maximum exponent -if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() -must not be negative. + if the _clamp field of the context is set to 1 (IEEE clamp mode). Etop() + must not be negative. + + """ -""" def create_decimal(self, num: _DecimalNew = "0", /) -> Decimal: """Create a new Decimal instance from num, using self as the context. Unlike the -Decimal constructor, this function observes the context limits. + Decimal constructor, this function observes the context limits. + + """ -""" def create_decimal_from_float(self, f: float, /) -> Decimal: """Create a new Decimal instance from float f. Unlike the Decimal.from_float() -class method, this function observes the context limits. + class method, this function observes the context limits. + + """ -""" def abs(self, x: _Decimal, /) -> Decimal: - """Return the absolute value of x. + """Return the absolute value of x.""" -""" def add(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the sum of x and y. + """Return the sum of x and y.""" -""" def canonical(self, x: Decimal, /) -> Decimal: - """Return a new instance of x. + """Return a new instance of x.""" -""" def compare(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically. + """Compare x and y numerically.""" -""" def compare_signal(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y numerically. All NaNs signal. + """Compare x and y numerically. All NaNs signal.""" -""" def compare_total(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation. + """Compare x and y using their abstract representation.""" -""" def compare_total_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare x and y using their abstract representation, ignoring sign. + """Compare x and y using their abstract representation, ignoring sign.""" -""" def copy_abs(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign set to 0. + """Return a copy of x with the sign set to 0.""" -""" def copy_decimal(self, x: _Decimal, /) -> Decimal: - """Return a copy of Decimal x. + """Return a copy of Decimal x.""" -""" def copy_negate(self, x: _Decimal, /) -> Decimal: - """Return a copy of x with the sign inverted. + """Return a copy of x with the sign inverted.""" -""" def copy_sign(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Copy the sign from y to x. + """Copy the sign from y to x.""" -""" def divide(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y. + """Return x divided by y.""" -""" def divide_int(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return x divided by y, truncated to an integer. + """Return x divided by y, truncated to an integer.""" -""" def divmod(self, x: _Decimal, y: _Decimal, /) -> tuple[Decimal, Decimal]: - """Return quotient and remainder of the division x / y. + """Return quotient and remainder of the division x / y.""" -""" def exp(self, x: _Decimal, /) -> Decimal: - """Return e ** x. + """Return e ** x.""" -""" def fma(self, x: _Decimal, y: _Decimal, z: _Decimal, /) -> Decimal: - """Return x multiplied by y, plus z. + """Return x multiplied by y, plus z.""" -""" def is_canonical(self, x: _Decimal, /) -> bool: - """Return True if x is canonical, False otherwise. + """Return True if x is canonical, False otherwise.""" -""" def is_finite(self, x: _Decimal, /) -> bool: - """Return True if x is finite, False otherwise. + """Return True if x is finite, False otherwise.""" -""" def is_infinite(self, x: _Decimal, /) -> bool: - """Return True if x is infinite, False otherwise. + """Return True if x is infinite, False otherwise.""" -""" def is_nan(self, x: _Decimal, /) -> bool: - """Return True if x is a qNaN or sNaN, False otherwise. + """Return True if x is a qNaN or sNaN, False otherwise.""" -""" def is_normal(self, x: _Decimal, /) -> bool: - """Return True if x is a normal number, False otherwise. + """Return True if x is a normal number, False otherwise.""" -""" def is_qnan(self, x: _Decimal, /) -> bool: - """Return True if x is a quiet NaN, False otherwise. + """Return True if x is a quiet NaN, False otherwise.""" -""" def is_signed(self, x: _Decimal, /) -> bool: - """Return True if x is negative, False otherwise. + """Return True if x is negative, False otherwise.""" -""" def is_snan(self, x: _Decimal, /) -> bool: - """Return True if x is a signaling NaN, False otherwise. + """Return True if x is a signaling NaN, False otherwise.""" -""" def is_subnormal(self, x: _Decimal, /) -> bool: - """Return True if x is subnormal, False otherwise. + """Return True if x is subnormal, False otherwise.""" -""" def is_zero(self, x: _Decimal, /) -> bool: - """Return True if x is a zero, False otherwise. + """Return True if x is a zero, False otherwise.""" -""" def ln(self, x: _Decimal, /) -> Decimal: - """Return the natural (base e) logarithm of x. + """Return the natural (base e) logarithm of x.""" -""" def log10(self, x: _Decimal, /) -> Decimal: - """Return the base 10 logarithm of x. + """Return the base 10 logarithm of x.""" -""" def logb(self, x: _Decimal, /) -> Decimal: - """Return the exponent of the magnitude of the operand's MSD. + """Return the exponent of the magnitude of the operand's MSD.""" -""" def logical_and(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise and of x and y. + """Digit-wise and of x and y.""" -""" def logical_invert(self, x: _Decimal, /) -> Decimal: - """Invert all digits of x. + """Invert all digits of x.""" -""" def logical_or(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise or of x and y. + """Digit-wise or of x and y.""" -""" def logical_xor(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Digit-wise xor of x and y. + """Digit-wise xor of x and y.""" -""" def max(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the maximum. + """Compare the values numerically and return the maximum.""" -""" def max_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored. + """Compare the values numerically with their sign ignored.""" -""" def min(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically and return the minimum. + """Compare the values numerically and return the minimum.""" -""" def min_mag(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Compare the values numerically with their sign ignored. + """Compare the values numerically with their sign ignored.""" -""" def minus(self, x: _Decimal, /) -> Decimal: """Minus corresponds to the unary prefix minus operator in Python, but applies -the context to the result. + the context to the result. + + """ -""" def multiply(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the product of x and y. + """Return the product of x and y.""" -""" def next_minus(self, x: _Decimal, /) -> Decimal: - """Return the largest representable number smaller than x. + """Return the largest representable number smaller than x.""" -""" def next_plus(self, x: _Decimal, /) -> Decimal: - """Return the smallest representable number larger than x. + """Return the smallest representable number larger than x.""" -""" def next_toward(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the number closest to x, in the direction towards y. + """Return the number closest to x, in the direction towards y.""" -""" def normalize(self, x: _Decimal, /) -> Decimal: - """Reduce x to its simplest form. Alias for reduce(x). + """Reduce x to its simplest form. Alias for reduce(x).""" -""" def number_class(self, x: _Decimal, /) -> str: - """Return an indication of the class of x. + """Return an indication of the class of x.""" -""" def plus(self, x: _Decimal, /) -> Decimal: """Plus corresponds to the unary prefix plus operator in Python, but applies -the context to the result. + the context to the result. + + """ -""" def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: """Compute a**b. If 'a' is negative, then 'b' must be integral. The result -will be inexact unless 'a' is integral and the result is finite and can -be expressed exactly in 'precision' digits. In the Python version the -result is always correctly rounded, in the C version the result is almost -always correctly rounded. + will be inexact unless 'a' is integral and the result is finite and can + be expressed exactly in 'precision' digits. In the Python version the + result is always correctly rounded, in the C version the result is almost + always correctly rounded. -If modulo is given, compute (a**b) % modulo. The following restrictions -hold: + If modulo is given, compute (a**b) % modulo. The following restrictions + hold: - * all three arguments must be integral - * 'b' must be nonnegative - * at least one of 'a' or 'b' must be nonzero - * modulo must be nonzero and less than 10**prec in absolute value + * all three arguments must be integral + * 'b' must be nonnegative + * at least one of 'a' or 'b' must be nonzero + * modulo must be nonzero and less than 10**prec in absolute value -""" + """ + def quantize(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a value equal to x (rounded), having the exponent of y. + """Return a value equal to x (rounded), having the exponent of y.""" -""" def radix(self) -> Decimal: - """Return 10. + """Return 10.""" -""" def remainder(self, x: _Decimal, y: _Decimal, /) -> Decimal: """Return the remainder from integer division. The sign of the result, -if non-zero, is the same as that of the original dividend. + if non-zero, is the same as that of the original dividend. + + """ -""" def remainder_near(self, x: _Decimal, y: _Decimal, /) -> Decimal: """Return x - y * n, where n is the integer nearest the exact value of x / y -(if the result is 0 then its sign will be the sign of x). + (if the result is 0 then its sign will be the sign of x). + + """ -""" def rotate(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, rotated by y places. + """Return a copy of x, rotated by y places.""" -""" def same_quantum(self, x: _Decimal, y: _Decimal, /) -> bool: - """Return True if the two operands have the same exponent. + """Return True if the two operands have the same exponent.""" -""" def scaleb(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the first operand after adding the second value to its exp. + """Return the first operand after adding the second value to its exp.""" -""" def shift(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return a copy of x, shifted by y places. + """Return a copy of x, shifted by y places.""" -""" def sqrt(self, x: _Decimal, /) -> Decimal: - """Square root of a non-negative number to context precision. + """Square root of a non-negative number to context precision.""" -""" def subtract(self, x: _Decimal, y: _Decimal, /) -> Decimal: - """Return the difference between x and y. + """Return the difference between x and y.""" -""" def to_eng_string(self, x: _Decimal, /) -> str: - """Convert a number to a string, using engineering notation. + """Convert a number to a string, using engineering notation.""" -""" def to_sci_string(self, x: _Decimal, /) -> str: - """Convert a number to a string using scientific notation. + """Convert a number to a string using scientific notation.""" -""" def to_integral_exact(self, x: _Decimal, /) -> Decimal: - """Round to an integer. Signal if the result is rounded or inexact. + """Round to an integer. Signal if the result is rounded or inexact.""" -""" def to_integral_value(self, x: _Decimal, /) -> Decimal: - """Round to an integer. + """Round to an integer.""" -""" def to_integral(self, x: _Decimal, /) -> Decimal: - """Identical to to_integral_value(x). - -""" + """Identical to to_integral_value(x).""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi index aa804b1198227..7f05a7996e58b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/difflib.pyi @@ -25,6 +25,7 @@ Class Differ: Class HtmlDiff: For producing HTML side by side comparison with change highlights. """ + import re import sys from collections.abc import Callable, Iterable, Iterator, Sequence @@ -49,110 +50,112 @@ __all__ = [ _T = TypeVar("_T") class Match(NamedTuple): - """Match(a, b, size) -""" + """Match(a, b, size)""" + a: int b: int size: int class SequenceMatcher(Generic[_T]): """ -SequenceMatcher is a flexible class for comparing pairs of sequences of -any type, so long as the sequence elements are hashable. The basic -algorithm predates, and is a little fancier than, an algorithm -published in the late 1980's by Ratcliff and Obershelp under the -hyperbolic name "gestalt pattern matching". The basic idea is to find -the longest contiguous matching subsequence that contains no "junk" -elements (R-O doesn't address junk). The same idea is then applied -recursively to the pieces of the sequences to the left and to the right -of the matching subsequence. This does not yield minimal edit -sequences, but does tend to yield matches that "look right" to people. - -SequenceMatcher tries to compute a "human-friendly diff" between two -sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -longest *contiguous* & junk-free matching subsequence. That's what -catches peoples' eyes. The Windows(tm) windiff has another interesting -notion, pairing up elements that appear uniquely in each sequence. -That, and the method here, appear to yield more intuitive difference -reports than does diff. This method appears to be the least vulnerable -to syncing up on blocks of "junk lines", though (like blank lines in -ordinary text files, or maybe "

" lines in HTML files). That may be -because this is the only method of the 3 that has a *concept* of -"junk" . - -Example, comparing two strings, and considering blanks to be "junk": - ->>> s = SequenceMatcher(lambda x: x == " ", -... "private Thread currentThread;", -... "private volatile Thread currentThread;") ->>> - -.ratio() returns a float in [0, 1], measuring the "similarity" of the -sequences. As a rule of thumb, a .ratio() value over 0.6 means the -sequences are close matches: - ->>> print(round(s.ratio(), 2)) -0.87 ->>> - -If you're only interested in where the sequences match, -.get_matching_blocks() is handy: - ->>> for block in s.get_matching_blocks(): -... print("a[%d] and b[%d] match for %d elements" % block) -a[0] and b[0] match for 8 elements -a[8] and b[17] match for 21 elements -a[29] and b[38] match for 0 elements - -Note that the last tuple returned by .get_matching_blocks() is always a -dummy, (len(a), len(b), 0), and this is the only case in which the last -tuple element (number of elements matched) is 0. - -If you want to know how to change the first sequence into the second, -use .get_opcodes(): - ->>> for opcode in s.get_opcodes(): -... print("%6s a[%d:%d] b[%d:%d]" % opcode) - equal a[0:8] b[0:8] -insert a[8:8] b[8:17] - equal a[8:29] b[17:38] - -See the Differ class for a fancy human-friendly file differencer, which -uses SequenceMatcher both to compare sequences of lines, and to compare -sequences of characters within similar (near-matching) lines. - -See also function get_close_matches() in this module, which shows how -simple code building on SequenceMatcher can be used to do useful work. - -Timing: Basic R-O is cubic time worst case and quadratic time expected -case. SequenceMatcher is quadratic time for the worst case and has -expected-case behavior dependent in a complicated way on how many -elements the sequences have in common; best case time is linear. -""" + SequenceMatcher is a flexible class for comparing pairs of sequences of + any type, so long as the sequence elements are hashable. The basic + algorithm predates, and is a little fancier than, an algorithm + published in the late 1980's by Ratcliff and Obershelp under the + hyperbolic name "gestalt pattern matching". The basic idea is to find + the longest contiguous matching subsequence that contains no "junk" + elements (R-O doesn't address junk). The same idea is then applied + recursively to the pieces of the sequences to the left and to the right + of the matching subsequence. This does not yield minimal edit + sequences, but does tend to yield matches that "look right" to people. + + SequenceMatcher tries to compute a "human-friendly diff" between two + sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the + longest *contiguous* & junk-free matching subsequence. That's what + catches peoples' eyes. The Windows(tm) windiff has another interesting + notion, pairing up elements that appear uniquely in each sequence. + That, and the method here, appear to yield more intuitive difference + reports than does diff. This method appears to be the least vulnerable + to syncing up on blocks of "junk lines", though (like blank lines in + ordinary text files, or maybe "

" lines in HTML files). That may be + because this is the only method of the 3 that has a *concept* of + "junk" . + + Example, comparing two strings, and considering blanks to be "junk": + + >>> s = SequenceMatcher(lambda x: x == " ", + ... "private Thread currentThread;", + ... "private volatile Thread currentThread;") + >>> + + .ratio() returns a float in [0, 1], measuring the "similarity" of the + sequences. As a rule of thumb, a .ratio() value over 0.6 means the + sequences are close matches: + + >>> print(round(s.ratio(), 2)) + 0.87 + >>> + + If you're only interested in where the sequences match, + .get_matching_blocks() is handy: + + >>> for block in s.get_matching_blocks(): + ... print("a[%d] and b[%d] match for %d elements" % block) + a[0] and b[0] match for 8 elements + a[8] and b[17] match for 21 elements + a[29] and b[38] match for 0 elements + + Note that the last tuple returned by .get_matching_blocks() is always a + dummy, (len(a), len(b), 0), and this is the only case in which the last + tuple element (number of elements matched) is 0. + + If you want to know how to change the first sequence into the second, + use .get_opcodes(): + + >>> for opcode in s.get_opcodes(): + ... print("%6s a[%d:%d] b[%d:%d]" % opcode) + equal a[0:8] b[0:8] + insert a[8:8] b[8:17] + equal a[8:29] b[17:38] + + See the Differ class for a fancy human-friendly file differencer, which + uses SequenceMatcher both to compare sequences of lines, and to compare + sequences of characters within similar (near-matching) lines. + + See also function get_close_matches() in this module, which shows how + simple code building on SequenceMatcher can be used to do useful work. + + Timing: Basic R-O is cubic time worst case and quadratic time expected + case. SequenceMatcher is quadratic time for the worst case and has + expected-case behavior dependent in a complicated way on how many + elements the sequences have in common; best case time is linear. + """ + @overload def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: """Construct a SequenceMatcher. -Optional arg isjunk is None (the default), or a one-argument -function that takes a sequence element and returns true iff the -element is junk. None is equivalent to passing "lambda x: 0", i.e. -no elements are considered to be junk. For example, pass - lambda x: x in " \\t" -if you're comparing lines as sequences of characters, and don't -want to synch up on blanks or hard tabs. - -Optional arg a is the first of two sequences to be compared. By -default, an empty string. The elements of a must be hashable. See -also .set_seqs() and .set_seq1(). - -Optional arg b is the second of two sequences to be compared. By -default, an empty string. The elements of b must be hashable. See -also .set_seqs() and .set_seq2(). - -Optional arg autojunk should be set to False to disable the -"automatic junk heuristic" that treats popular elements as junk -(see module documentation for more information). -""" + Optional arg isjunk is None (the default), or a one-argument + function that takes a sequence element and returns true iff the + element is junk. None is equivalent to passing "lambda x: 0", i.e. + no elements are considered to be junk. For example, pass + lambda x: x in " \\t" + if you're comparing lines as sequences of characters, and don't + want to synch up on blanks or hard tabs. + + Optional arg a is the first of two sequences to be compared. By + default, an empty string. The elements of a must be hashable. See + also .set_seqs() and .set_seq1(). + + Optional arg b is the second of two sequences to be compared. By + default, an empty string. The elements of b must be hashable. See + also .set_seqs() and .set_seq2(). + + Optional arg autojunk should be set to False to disable the + "automatic junk heuristic" that treats popular elements as junk + (see module documentation for more information). + """ + @overload def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload @@ -166,234 +169,245 @@ Optional arg autojunk should be set to False to disable the def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: """Set the two sequences to be compared. ->>> s = SequenceMatcher() ->>> s.set_seqs("abcd", "bcde") ->>> s.ratio() -0.75 -""" + >>> s = SequenceMatcher() + >>> s.set_seqs("abcd", "bcde") + >>> s.ratio() + 0.75 + """ + def set_seq1(self, a: Sequence[_T]) -> None: """Set the first sequence to be compared. -The second sequence to be compared is not changed. + The second sequence to be compared is not changed. ->>> s = SequenceMatcher(None, "abcd", "bcde") ->>> s.ratio() -0.75 ->>> s.set_seq1("bcde") ->>> s.ratio() -1.0 ->>> + >>> s = SequenceMatcher(None, "abcd", "bcde") + >>> s.ratio() + 0.75 + >>> s.set_seq1("bcde") + >>> s.ratio() + 1.0 + >>> -SequenceMatcher computes and caches detailed information about the -second sequence, so if you want to compare one sequence S against -many sequences, use .set_seq2(S) once and call .set_seq1(x) -repeatedly for each of the other sequences. + SequenceMatcher computes and caches detailed information about the + second sequence, so if you want to compare one sequence S against + many sequences, use .set_seq2(S) once and call .set_seq1(x) + repeatedly for each of the other sequences. + + See also set_seqs() and set_seq2(). + """ -See also set_seqs() and set_seq2(). -""" def set_seq2(self, b: Sequence[_T]) -> None: """Set the second sequence to be compared. -The first sequence to be compared is not changed. + The first sequence to be compared is not changed. ->>> s = SequenceMatcher(None, "abcd", "bcde") ->>> s.ratio() -0.75 ->>> s.set_seq2("abcd") ->>> s.ratio() -1.0 ->>> + >>> s = SequenceMatcher(None, "abcd", "bcde") + >>> s.ratio() + 0.75 + >>> s.set_seq2("abcd") + >>> s.ratio() + 1.0 + >>> -SequenceMatcher computes and caches detailed information about the -second sequence, so if you want to compare one sequence S against -many sequences, use .set_seq2(S) once and call .set_seq1(x) -repeatedly for each of the other sequences. + SequenceMatcher computes and caches detailed information about the + second sequence, so if you want to compare one sequence S against + many sequences, use .set_seq2(S) once and call .set_seq1(x) + repeatedly for each of the other sequences. + + See also set_seqs() and set_seq1(). + """ -See also set_seqs() and set_seq1(). -""" def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: """Find longest matching block in a[alo:ahi] and b[blo:bhi]. -By default it will find the longest match in the entirety of a and b. + By default it will find the longest match in the entirety of a and b. -If isjunk is not defined: + If isjunk is not defined: -Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where - alo <= i <= i+k <= ahi - blo <= j <= j+k <= bhi -and for all (i',j',k') meeting those conditions, - k >= k' - i <= i' - and if i == i', j <= j' + Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where + alo <= i <= i+k <= ahi + blo <= j <= j+k <= bhi + and for all (i',j',k') meeting those conditions, + k >= k' + i <= i' + and if i == i', j <= j' -In other words, of all maximal matching blocks, return one that -starts earliest in a, and of all those maximal matching blocks that -start earliest in a, return the one that starts earliest in b. + In other words, of all maximal matching blocks, return one that + starts earliest in a, and of all those maximal matching blocks that + start earliest in a, return the one that starts earliest in b. ->>> s = SequenceMatcher(None, " abcd", "abcd abcd") ->>> s.find_longest_match(0, 5, 0, 9) -Match(a=0, b=4, size=5) + >>> s = SequenceMatcher(None, " abcd", "abcd abcd") + >>> s.find_longest_match(0, 5, 0, 9) + Match(a=0, b=4, size=5) -If isjunk is defined, first the longest matching block is -determined as above, but with the additional restriction that no -junk element appears in the block. Then that block is extended as -far as possible by matching (only) junk elements on both sides. So -the resulting block never matches on junk except as identical junk -happens to be adjacent to an "interesting" match. + If isjunk is defined, first the longest matching block is + determined as above, but with the additional restriction that no + junk element appears in the block. Then that block is extended as + far as possible by matching (only) junk elements on both sides. So + the resulting block never matches on junk except as identical junk + happens to be adjacent to an "interesting" match. -Here's the same example as before, but considering blanks to be -junk. That prevents " abcd" from matching the " abcd" at the tail -end of the second sequence directly. Instead only the "abcd" can -match, and matches the leftmost "abcd" in the second sequence: + Here's the same example as before, but considering blanks to be + junk. That prevents " abcd" from matching the " abcd" at the tail + end of the second sequence directly. Instead only the "abcd" can + match, and matches the leftmost "abcd" in the second sequence: ->>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd") ->>> s.find_longest_match(0, 5, 0, 9) -Match(a=1, b=0, size=4) + >>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd") + >>> s.find_longest_match(0, 5, 0, 9) + Match(a=1, b=0, size=4) -If no blocks match, return (alo, blo, 0). + If no blocks match, return (alo, blo, 0). + + >>> s = SequenceMatcher(None, "ab", "c") + >>> s.find_longest_match(0, 2, 0, 1) + Match(a=0, b=0, size=0) + """ ->>> s = SequenceMatcher(None, "ab", "c") ->>> s.find_longest_match(0, 2, 0, 1) -Match(a=0, b=0, size=0) -""" def get_matching_blocks(self) -> list[Match]: """Return list of triples describing matching subsequences. -Each triple is of the form (i, j, n), and means that -a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -i and in j. New in Python 2.5, it's also guaranteed that if -(i, j, n) and (i', j', n') are adjacent triples in the list, and -the second is not the last triple in the list, then i+n != i' or -j+n != j'. IOW, adjacent triples never describe adjacent equal -blocks. + Each triple is of the form (i, j, n), and means that + a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in + i and in j. New in Python 2.5, it's also guaranteed that if + (i, j, n) and (i', j', n') are adjacent triples in the list, and + the second is not the last triple in the list, then i+n != i' or + j+n != j'. IOW, adjacent triples never describe adjacent equal + blocks. -The last triple is a dummy, (len(a), len(b), 0), and is the only -triple with n==0. + The last triple is a dummy, (len(a), len(b), 0), and is the only + triple with n==0. + + >>> s = SequenceMatcher(None, "abxcd", "abcd") + >>> list(s.get_matching_blocks()) + [Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)] + """ ->>> s = SequenceMatcher(None, "abxcd", "abcd") ->>> list(s.get_matching_blocks()) -[Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)] -""" def get_opcodes(self) -> list[tuple[Literal["replace", "delete", "insert", "equal"], int, int, int, int]]: """Return list of 5-tuples describing how to turn a into b. -Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -tuple preceding it, and likewise for j1 == the previous j2. - -The tags are strings, with these meanings: - -'replace': a[i1:i2] should be replaced by b[j1:j2] -'delete': a[i1:i2] should be deleted. - Note that j1==j2 in this case. -'insert': b[j1:j2] should be inserted at a[i1:i1]. - Note that i1==i2 in this case. -'equal': a[i1:i2] == b[j1:j2] - ->>> a = "qabxcd" ->>> b = "abycdf" ->>> s = SequenceMatcher(None, a, b) ->>> for tag, i1, i2, j1, j2 in s.get_opcodes(): -... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % -... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) - delete a[0:1] (q) b[0:0] () - equal a[1:3] (ab) b[0:2] (ab) -replace a[3:4] (x) b[2:3] (y) - equal a[4:6] (cd) b[3:5] (cd) - insert a[6:6] () b[5:6] (f) -""" + Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple + has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the + tuple preceding it, and likewise for j1 == the previous j2. + + The tags are strings, with these meanings: + + 'replace': a[i1:i2] should be replaced by b[j1:j2] + 'delete': a[i1:i2] should be deleted. + Note that j1==j2 in this case. + 'insert': b[j1:j2] should be inserted at a[i1:i1]. + Note that i1==i2 in this case. + 'equal': a[i1:i2] == b[j1:j2] + + >>> a = "qabxcd" + >>> b = "abycdf" + >>> s = SequenceMatcher(None, a, b) + >>> for tag, i1, i2, j1, j2 in s.get_opcodes(): + ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % + ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) + delete a[0:1] (q) b[0:0] () + equal a[1:3] (ab) b[0:2] (ab) + replace a[3:4] (x) b[2:3] (y) + equal a[4:6] (cd) b[3:5] (cd) + insert a[6:6] () b[5:6] (f) + """ + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: """Isolate change clusters by eliminating ranges with no changes. -Return a generator of groups with up to n lines of context. -Each group is in the same format as returned by get_opcodes(). - ->>> from pprint import pprint ->>> a = list(map(str, range(1,40))) ->>> b = a[:] ->>> b[8:8] = ['i'] # Make an insertion ->>> b[20] += 'x' # Make a replacement ->>> b[23:28] = [] # Make a deletion ->>> b[30] += 'y' # Make another replacement ->>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes())) -[[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)], - [('equal', 16, 19, 17, 20), - ('replace', 19, 20, 20, 21), - ('equal', 20, 22, 21, 23), - ('delete', 22, 27, 23, 23), - ('equal', 27, 30, 23, 26)], - [('equal', 31, 34, 27, 30), - ('replace', 34, 35, 30, 31), - ('equal', 35, 38, 31, 34)]] -""" + Return a generator of groups with up to n lines of context. + Each group is in the same format as returned by get_opcodes(). + + >>> from pprint import pprint + >>> a = list(map(str, range(1,40))) + >>> b = a[:] + >>> b[8:8] = ['i'] # Make an insertion + >>> b[20] += 'x' # Make a replacement + >>> b[23:28] = [] # Make a deletion + >>> b[30] += 'y' # Make another replacement + >>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes())) + [[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)], + [('equal', 16, 19, 17, 20), + ('replace', 19, 20, 20, 21), + ('equal', 20, 22, 21, 23), + ('delete', 22, 27, 23, 23), + ('equal', 27, 30, 23, 26)], + [('equal', 31, 34, 27, 30), + ('replace', 34, 35, 30, 31), + ('equal', 35, 38, 31, 34)]] + """ + def ratio(self) -> float: """Return a measure of the sequences' similarity (float in [0,1]). -Where T is the total number of elements in both sequences, and -M is the number of matches, this is 2.0*M / T. -Note that this is 1 if the sequences are identical, and 0 if -they have nothing in common. - -.ratio() is expensive to compute if you haven't already computed -.get_matching_blocks() or .get_opcodes(), in which case you may -want to try .quick_ratio() or .real_quick_ratio() first to get an -upper bound. - ->>> s = SequenceMatcher(None, "abcd", "bcde") ->>> s.ratio() -0.75 ->>> s.quick_ratio() -0.75 ->>> s.real_quick_ratio() -1.0 -""" + Where T is the total number of elements in both sequences, and + M is the number of matches, this is 2.0*M / T. + Note that this is 1 if the sequences are identical, and 0 if + they have nothing in common. + + .ratio() is expensive to compute if you haven't already computed + .get_matching_blocks() or .get_opcodes(), in which case you may + want to try .quick_ratio() or .real_quick_ratio() first to get an + upper bound. + + >>> s = SequenceMatcher(None, "abcd", "bcde") + >>> s.ratio() + 0.75 + >>> s.quick_ratio() + 0.75 + >>> s.real_quick_ratio() + 1.0 + """ + def quick_ratio(self) -> float: """Return an upper bound on ratio() relatively quickly. -This isn't defined beyond that it is an upper bound on .ratio(), and -is faster to compute. -""" + This isn't defined beyond that it is an upper bound on .ratio(), and + is faster to compute. + """ + def real_quick_ratio(self) -> float: """Return an upper bound on ratio() very quickly. -This isn't defined beyond that it is an upper bound on .ratio(), and -is faster to compute than either .ratio() or .quick_ratio(). -""" + This isn't defined beyond that it is an upper bound on .ratio(), and + is faster to compute than either .ratio() or .quick_ratio(). + """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ @overload def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: """Use SequenceMatcher to return list of the best "good enough" matches. -word is a sequence for which close matches are desired (typically a -string). + word is a sequence for which close matches are desired (typically a + string). -possibilities is a list of sequences against which to match word -(typically a list of strings). + possibilities is a list of sequences against which to match word + (typically a list of strings). -Optional arg n (default 3) is the maximum number of close matches to -return. n must be > 0. + Optional arg n (default 3) is the maximum number of close matches to + return. n must be > 0. -Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities -that don't score at least that similar to word are ignored. + Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities + that don't score at least that similar to word are ignored. -The best (no more than n) matches among the possibilities are returned -in a list, sorted by similarity score, most similar first. + The best (no more than n) matches among the possibilities are returned + in a list, sorted by similarity score, most similar first. + + >>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"]) + ['apple', 'ape'] + >>> import keyword as _keyword + >>> get_close_matches("wheel", _keyword.kwlist) + ['while'] + >>> get_close_matches("Apple", _keyword.kwlist) + [] + >>> get_close_matches("accept", _keyword.kwlist) + ['except'] + """ ->>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"]) -['apple', 'ape'] ->>> import keyword as _keyword ->>> get_close_matches("wheel", _keyword.kwlist) -['while'] ->>> get_close_matches("Apple", _keyword.kwlist) -[] ->>> get_close_matches("accept", _keyword.kwlist) -['except'] -""" @overload def get_close_matches( word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 @@ -401,179 +415,182 @@ def get_close_matches( class Differ: """ -Differ is a class for comparing sequences of lines of text, and -producing human-readable differences or deltas. Differ uses -SequenceMatcher both to compare sequences of lines, and to compare -sequences of characters within similar (near-matching) lines. - -Each line of a Differ delta begins with a two-letter code: - - '- ' line unique to sequence 1 - '+ ' line unique to sequence 2 - ' ' line common to both sequences - '? ' line not present in either input sequence - -Lines beginning with '? ' attempt to guide the eye to intraline -differences, and were not present in either input sequence. These lines -can be confusing if the sequences contain tab characters. - -Note that Differ makes no claim to produce a *minimal* diff. To the -contrary, minimal diffs are often counter-intuitive, because they synch -up anywhere possible, sometimes accidental matches 100 pages apart. -Restricting synch points to contiguous matches preserves some notion of -locality, at the occasional cost of producing a longer diff. - -Example: Comparing two texts. - -First we set up the texts, sequences of individual single-line strings -ending with newlines (such sequences can also be obtained from the -`readlines()` method of file-like objects): - ->>> text1 = ''' 1. Beautiful is better than ugly. -... 2. Explicit is better than implicit. -... 3. Simple is better than complex. -... 4. Complex is better than complicated. -... '''.splitlines(keepends=True) ->>> len(text1) -4 ->>> text1[0][-1] -'\\n' ->>> text2 = ''' 1. Beautiful is better than ugly. -... 3. Simple is better than complex. -... 4. Complicated is better than complex. -... 5. Flat is better than nested. -... '''.splitlines(keepends=True) - -Next we instantiate a Differ object: - ->>> d = Differ() - -Note that when instantiating a Differ object we may pass functions to -filter out line and character 'junk'. See Differ.__init__ for details. - -Finally, we compare the two: - ->>> result = list(d.compare(text1, text2)) - -'result' is a list of strings, so let's pretty-print it: - ->>> from pprint import pprint as _pprint ->>> _pprint(result) -[' 1. Beautiful is better than ugly.\\n', - '- 2. Explicit is better than implicit.\\n', - '- 3. Simple is better than complex.\\n', - '+ 3. Simple is better than complex.\\n', - '? ++\\n', - '- 4. Complex is better than complicated.\\n', - '? ^ ---- ^\\n', - '+ 4. Complicated is better than complex.\\n', - '? ++++ ^ ^\\n', - '+ 5. Flat is better than nested.\\n'] - -As a single multi-line string it looks like this: - ->>> print(''.join(result), end="") - 1. Beautiful is better than ugly. -- 2. Explicit is better than implicit. -- 3. Simple is better than complex. -+ 3. Simple is better than complex. -? ++ -- 4. Complex is better than complicated. -? ^ ---- ^ -+ 4. Complicated is better than complex. -? ++++ ^ ^ -+ 5. Flat is better than nested. -""" + Differ is a class for comparing sequences of lines of text, and + producing human-readable differences or deltas. Differ uses + SequenceMatcher both to compare sequences of lines, and to compare + sequences of characters within similar (near-matching) lines. + + Each line of a Differ delta begins with a two-letter code: + + '- ' line unique to sequence 1 + '+ ' line unique to sequence 2 + ' ' line common to both sequences + '? ' line not present in either input sequence + + Lines beginning with '? ' attempt to guide the eye to intraline + differences, and were not present in either input sequence. These lines + can be confusing if the sequences contain tab characters. + + Note that Differ makes no claim to produce a *minimal* diff. To the + contrary, minimal diffs are often counter-intuitive, because they synch + up anywhere possible, sometimes accidental matches 100 pages apart. + Restricting synch points to contiguous matches preserves some notion of + locality, at the occasional cost of producing a longer diff. + + Example: Comparing two texts. + + First we set up the texts, sequences of individual single-line strings + ending with newlines (such sequences can also be obtained from the + `readlines()` method of file-like objects): + + >>> text1 = ''' 1. Beautiful is better than ugly. + ... 2. Explicit is better than implicit. + ... 3. Simple is better than complex. + ... 4. Complex is better than complicated. + ... '''.splitlines(keepends=True) + >>> len(text1) + 4 + >>> text1[0][-1] + '\\n' + >>> text2 = ''' 1. Beautiful is better than ugly. + ... 3. Simple is better than complex. + ... 4. Complicated is better than complex. + ... 5. Flat is better than nested. + ... '''.splitlines(keepends=True) + + Next we instantiate a Differ object: + + >>> d = Differ() + + Note that when instantiating a Differ object we may pass functions to + filter out line and character 'junk'. See Differ.__init__ for details. + + Finally, we compare the two: + + >>> result = list(d.compare(text1, text2)) + + 'result' is a list of strings, so let's pretty-print it: + + >>> from pprint import pprint as _pprint + >>> _pprint(result) + [' 1. Beautiful is better than ugly.\\n', + '- 2. Explicit is better than implicit.\\n', + '- 3. Simple is better than complex.\\n', + '+ 3. Simple is better than complex.\\n', + '? ++\\n', + '- 4. Complex is better than complicated.\\n', + '? ^ ---- ^\\n', + '+ 4. Complicated is better than complex.\\n', + '? ++++ ^ ^\\n', + '+ 5. Flat is better than nested.\\n'] + + As a single multi-line string it looks like this: + + >>> print(''.join(result), end="") + 1. Beautiful is better than ugly. + - 2. Explicit is better than implicit. + - 3. Simple is better than complex. + + 3. Simple is better than complex. + ? ++ + - 4. Complex is better than complicated. + ? ^ ---- ^ + + 4. Complicated is better than complex. + ? ++++ ^ ^ + + 5. Flat is better than nested. + """ + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: """ -Construct a text differencer, with optional filters. - -The two optional keyword parameters are for filter functions: - -- `linejunk`: A function that should accept a single string argument, - and return true iff the string is junk. The module-level function - `IS_LINE_JUNK` may be used to filter out lines without visible - characters, except for at most one splat ('#'). It is recommended - to leave linejunk None; the underlying SequenceMatcher class has - an adaptive notion of "noise" lines that's better than any static - definition the author has ever been able to craft. - -- `charjunk`: A function that should accept a string of length 1. The - module-level function `IS_CHARACTER_JUNK` may be used to filter out - whitespace characters (a blank or tab; **note**: bad idea to include - newline in this!). Use of IS_CHARACTER_JUNK is recommended. -""" + Construct a text differencer, with optional filters. + + The two optional keyword parameters are for filter functions: + + - `linejunk`: A function that should accept a single string argument, + and return true iff the string is junk. The module-level function + `IS_LINE_JUNK` may be used to filter out lines without visible + characters, except for at most one splat ('#'). It is recommended + to leave linejunk None; the underlying SequenceMatcher class has + an adaptive notion of "noise" lines that's better than any static + definition the author has ever been able to craft. + + - `charjunk`: A function that should accept a string of length 1. The + module-level function `IS_CHARACTER_JUNK` may be used to filter out + whitespace characters (a blank or tab; **note**: bad idea to include + newline in this!). Use of IS_CHARACTER_JUNK is recommended. + """ + def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: """ -Compare two sequences of lines; generate the resulting delta. - -Each sequence must contain individual single-line strings ending with -newlines. Such sequences can be obtained from the `readlines()` method -of file-like objects. The delta generated also consists of newline- -terminated strings, ready to be printed as-is via the writelines() -method of a file-like object. - -Example: - ->>> print(''.join(Differ().compare('one\\ntwo\\nthree\\n'.splitlines(True), -... 'ore\\ntree\\nemu\\n'.splitlines(True))), -... end="") -- one -? ^ -+ ore -? ^ -- two -- three -? - -+ tree -+ emu -""" + Compare two sequences of lines; generate the resulting delta. + + Each sequence must contain individual single-line strings ending with + newlines. Such sequences can be obtained from the `readlines()` method + of file-like objects. The delta generated also consists of newline- + terminated strings, ready to be printed as-is via the writelines() + method of a file-like object. + + Example: + + >>> print(''.join(Differ().compare('one\\ntwo\\nthree\\n'.splitlines(True), + ... 'ore\\ntree\\nemu\\n'.splitlines(True))), + ... end="") + - one + ? ^ + + ore + ? ^ + - two + - three + ? - + + tree + + emu + """ if sys.version_info >= (3, 14): def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] | None = None) -> bool: """ -Return True for ignorable line: if `line` is blank or contains a single '#'. + Return True for ignorable line: if `line` is blank or contains a single '#'. -Examples: + Examples: ->>> IS_LINE_JUNK('\\n') -True ->>> IS_LINE_JUNK(' # \\n') -True ->>> IS_LINE_JUNK('hello\\n') -False -""" + >>> IS_LINE_JUNK('\\n') + True + >>> IS_LINE_JUNK(' # \\n') + True + >>> IS_LINE_JUNK('hello\\n') + False + """ else: def IS_LINE_JUNK(line: str, pat: Callable[[str], re.Match[str] | None] = ...) -> bool: """ -Return True for ignorable line: iff `line` is blank or contains a single '#'. + Return True for ignorable line: iff `line` is blank or contains a single '#'. -Examples: + Examples: ->>> IS_LINE_JUNK('\\n') -True ->>> IS_LINE_JUNK(' # \\n') -True ->>> IS_LINE_JUNK('hello\\n') -False -""" + >>> IS_LINE_JUNK('\\n') + True + >>> IS_LINE_JUNK(' # \\n') + True + >>> IS_LINE_JUNK('hello\\n') + False + """ def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: # ws is undocumented """ -Return True for ignorable character: iff `ch` is a space or tab. - -Examples: - ->>> IS_CHARACTER_JUNK(' ') -True ->>> IS_CHARACTER_JUNK('\\t') -True ->>> IS_CHARACTER_JUNK('\\n') -False ->>> IS_CHARACTER_JUNK('x') -False -""" + Return True for ignorable character: iff `ch` is a space or tab. + + Examples: + + >>> IS_CHARACTER_JUNK(' ') + True + >>> IS_CHARACTER_JUNK('\\t') + True + >>> IS_CHARACTER_JUNK('\\n') + False + >>> IS_CHARACTER_JUNK('x') + False + """ + def unified_diff( a: Sequence[str], b: Sequence[str], @@ -585,43 +602,44 @@ def unified_diff( lineterm: str = "\n", ) -> Iterator[str]: """ -Compare two sequences of lines; generate the delta as a unified diff. - -Unified diffs are a compact way of showing line changes and a few -lines of context. The number of context lines is set by 'n' which -defaults to three. - -By default, the diff control lines (those with ---, +++, or @@) are -created with a trailing newline. This is helpful so that inputs -created from file.readlines() result in diffs that are suitable for -file.writelines() since both the inputs and outputs have trailing -newlines. - -For inputs that do not have trailing newlines, set the lineterm -argument to "" so that the output will be uniformly newline free. - -The unidiff format normally has a header for filenames and modification -times. Any or all of these may be specified using strings for -'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -The modification times are normally expressed in the ISO 8601 format. - -Example: - ->>> for line in unified_diff('one two three four'.split(), -... 'zero one tree four'.split(), 'Original', 'Current', -... '2005-01-26 23:30:50', '2010-04-02 10:20:52', -... lineterm=''): -... print(line) # doctest: +NORMALIZE_WHITESPACE ---- Original 2005-01-26 23:30:50 -+++ Current 2010-04-02 10:20:52 -@@ -1,4 +1,4 @@ -+zero - one --two --three -+tree - four -""" + Compare two sequences of lines; generate the delta as a unified diff. + + Unified diffs are a compact way of showing line changes and a few + lines of context. The number of context lines is set by 'n' which + defaults to three. + + By default, the diff control lines (those with ---, +++, or @@) are + created with a trailing newline. This is helpful so that inputs + created from file.readlines() result in diffs that are suitable for + file.writelines() since both the inputs and outputs have trailing + newlines. + + For inputs that do not have trailing newlines, set the lineterm + argument to "" so that the output will be uniformly newline free. + + The unidiff format normally has a header for filenames and modification + times. Any or all of these may be specified using strings for + 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. + The modification times are normally expressed in the ISO 8601 format. + + Example: + + >>> for line in unified_diff('one two three four'.split(), + ... 'zero one tree four'.split(), 'Original', 'Current', + ... '2005-01-26 23:30:50', '2010-04-02 10:20:52', + ... lineterm=''): + ... print(line) # doctest: +NORMALIZE_WHITESPACE + --- Original 2005-01-26 23:30:50 + +++ Current 2010-04-02 10:20:52 + @@ -1,4 +1,4 @@ + +zero + one + -two + -three + +tree + four + """ + def context_diff( a: Sequence[str], b: Sequence[str], @@ -633,46 +651,47 @@ def context_diff( lineterm: str = "\n", ) -> Iterator[str]: """ -Compare two sequences of lines; generate the delta as a context diff. - -Context diffs are a compact way of showing line changes and a few -lines of context. The number of context lines is set by 'n' which -defaults to three. - -By default, the diff control lines (those with *** or ---) are -created with a trailing newline. This is helpful so that inputs -created from file.readlines() result in diffs that are suitable for -file.writelines() since both the inputs and outputs have trailing -newlines. - -For inputs that do not have trailing newlines, set the lineterm -argument to "" so that the output will be uniformly newline free. - -The context diff format normally has a header for filenames and -modification times. Any or all of these may be specified using -strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -The modification times are normally expressed in the ISO 8601 format. -If not specified, the strings default to blanks. - -Example: - ->>> print(''.join(context_diff('one\\ntwo\\nthree\\nfour\\n'.splitlines(True), -... 'zero\\none\\ntree\\nfour\\n'.splitlines(True), 'Original', 'Current')), -... end="") -*** Original ---- Current -*************** -*** 1,4 **** - one -! two -! three - four ---- 1,4 ---- -+ zero - one -! tree - four -""" + Compare two sequences of lines; generate the delta as a context diff. + + Context diffs are a compact way of showing line changes and a few + lines of context. The number of context lines is set by 'n' which + defaults to three. + + By default, the diff control lines (those with *** or ---) are + created with a trailing newline. This is helpful so that inputs + created from file.readlines() result in diffs that are suitable for + file.writelines() since both the inputs and outputs have trailing + newlines. + + For inputs that do not have trailing newlines, set the lineterm + argument to "" so that the output will be uniformly newline free. + + The context diff format normally has a header for filenames and + modification times. Any or all of these may be specified using + strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. + The modification times are normally expressed in the ISO 8601 format. + If not specified, the strings default to blanks. + + Example: + + >>> print(''.join(context_diff('one\\ntwo\\nthree\\nfour\\n'.splitlines(True), + ... 'zero\\none\\ntree\\nfour\\n'.splitlines(True), 'Original', 'Current')), + ... end="") + *** Original + --- Current + *************** + *** 1,4 **** + one + ! two + ! three + four + --- 1,4 ---- + + zero + one + ! tree + four + """ + def ndiff( a: Sequence[str], b: Sequence[str], @@ -680,55 +699,56 @@ def ndiff( charjunk: Callable[[str], bool] | None = ..., ) -> Iterator[str]: """ -Compare `a` and `b` (lists of strings); return a `Differ`-style delta. - -Optional keyword parameters `linejunk` and `charjunk` are for filter -functions, or can be None: - -- linejunk: A function that should accept a single string argument and - return true iff the string is junk. The default is None, and is - recommended; the underlying SequenceMatcher class has an adaptive - notion of "noise" lines. - -- charjunk: A function that accepts a character (string of length - 1), and returns true iff the character is junk. The default is - the module-level function IS_CHARACTER_JUNK, which filters out - whitespace characters (a blank or tab; note: it's a bad idea to - include newline in this!). - -Tools/scripts/ndiff.py is a command-line front-end to this function. - -Example: - ->>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), -... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) ->>> print(''.join(diff), end="") -- one -? ^ -+ ore -? ^ -- two -- three -? - -+ tree -+ emu -""" + Compare `a` and `b` (lists of strings); return a `Differ`-style delta. + + Optional keyword parameters `linejunk` and `charjunk` are for filter + functions, or can be None: + + - linejunk: A function that should accept a single string argument and + return true iff the string is junk. The default is None, and is + recommended; the underlying SequenceMatcher class has an adaptive + notion of "noise" lines. + + - charjunk: A function that accepts a character (string of length + 1), and returns true iff the character is junk. The default is + the module-level function IS_CHARACTER_JUNK, which filters out + whitespace characters (a blank or tab; note: it's a bad idea to + include newline in this!). + + Tools/scripts/ndiff.py is a command-line front-end to this function. + + Example: + + >>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), + ... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) + >>> print(''.join(diff), end="") + - one + ? ^ + + ore + ? ^ + - two + - three + ? - + + tree + + emu + """ class HtmlDiff: """For producing HTML side by side comparison with change highlights. -This class can be used to create an HTML table (or a complete HTML file -containing the table) showing a side by side, line by line comparison -of text with inter-line and intra-line change highlights. The table can -be generated in either full or contextual difference mode. + This class can be used to create an HTML table (or a complete HTML file + containing the table) showing a side by side, line by line comparison + of text with inter-line and intra-line change highlights. The table can + be generated in either full or contextual difference mode. -The following methods are provided for HTML generation: + The following methods are provided for HTML generation: -make_table -- generates HTML for a single side by side table -make_file -- generates complete HTML file with a single side by side table + make_table -- generates HTML for a single side by side table + make_file -- generates complete HTML file with a single side by side table + + See tools/scripts/diff.py for an example usage of this class. + """ -See tools/scripts/diff.py for an example usage of this class. -""" def __init__( self, tabsize: int = 8, @@ -738,14 +758,15 @@ See tools/scripts/diff.py for an example usage of this class. ) -> None: """HtmlDiff instance initializer -Arguments: -tabsize -- tab stop spacing, defaults to 8. -wrapcolumn -- column number where lines are broken and wrapped, - defaults to None where lines are not wrapped. -linejunk,charjunk -- keyword arguments passed into ndiff() (used by - HtmlDiff() to generate the side by side HTML differences). See - ndiff() documentation for argument default values and descriptions. -""" + Arguments: + tabsize -- tab stop spacing, defaults to 8. + wrapcolumn -- column number where lines are broken and wrapped, + defaults to None where lines are not wrapped. + linejunk,charjunk -- keyword arguments passed into ndiff() (used by + HtmlDiff() to generate the side by side HTML differences). See + ndiff() documentation for argument default values and descriptions. + """ + def make_file( self, fromlines: Sequence[str], @@ -759,20 +780,21 @@ linejunk,charjunk -- keyword arguments passed into ndiff() (used by ) -> str: """Returns HTML file of side by side comparison with change highlights -Arguments: -fromlines -- list of "from" lines -tolines -- list of "to" lines -fromdesc -- "from" file column header string -todesc -- "to" file column header string -context -- set to True for contextual differences (defaults to False - which shows full differences). -numlines -- number of context lines. When context is set True, - controls number of lines displayed before and after the change. - When context is False, controls the number of lines to place - the "next" link anchors before the next change (so click of - "next" link jumps to just before the change). -charset -- charset of the HTML document -""" + Arguments: + fromlines -- list of "from" lines + tolines -- list of "to" lines + fromdesc -- "from" file column header string + todesc -- "to" file column header string + context -- set to True for contextual differences (defaults to False + which shows full differences). + numlines -- number of context lines. When context is set True, + controls number of lines displayed before and after the change. + When context is False, controls the number of lines to place + the "next" link anchors before the next change (so click of + "next" link jumps to just before the change). + charset -- charset of the HTML document + """ + def make_table( self, fromlines: Sequence[str], @@ -784,42 +806,43 @@ charset -- charset of the HTML document ) -> str: """Returns HTML table of side by side comparison with change highlights -Arguments: -fromlines -- list of "from" lines -tolines -- list of "to" lines -fromdesc -- "from" file column header string -todesc -- "to" file column header string -context -- set to True for contextual differences (defaults to False - which shows full differences). -numlines -- number of context lines. When context is set True, - controls number of lines displayed before and after the change. - When context is False, controls the number of lines to place - the "next" link anchors before the next change (so click of - "next" link jumps to just before the change). -""" + Arguments: + fromlines -- list of "from" lines + tolines -- list of "to" lines + fromdesc -- "from" file column header string + todesc -- "to" file column header string + context -- set to True for contextual differences (defaults to False + which shows full differences). + numlines -- number of context lines. When context is set True, + controls number of lines displayed before and after the change. + When context is False, controls the number of lines to place + the "next" link anchors before the next change (so click of + "next" link jumps to just before the change). + """ def restore(delta: Iterable[str], which: int) -> Iterator[str]: """ -Generate one of the two sequences that generated a delta. - -Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract -lines originating from file 1 or 2 (parameter `which`), stripping off line -prefixes. - -Examples: - ->>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), -... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) ->>> diff = list(diff) ->>> print(''.join(restore(diff, 1)), end="") -one -two -three ->>> print(''.join(restore(diff, 2)), end="") -ore -tree -emu -""" + Generate one of the two sequences that generated a delta. + + Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract + lines originating from file 1 or 2 (parameter `which`), stripping off line + prefixes. + + Examples: + + >>> diff = ndiff('one\\ntwo\\nthree\\n'.splitlines(keepends=True), + ... 'ore\\ntree\\nemu\\n'.splitlines(keepends=True)) + >>> diff = list(diff) + >>> print(''.join(restore(diff, 1)), end="") + one + two + three + >>> print(''.join(restore(diff, 2)), end="") + ore + tree + emu + """ + def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], a: Iterable[bytes | bytearray], @@ -832,11 +855,11 @@ def diff_bytes( lineterm: bytes | bytearray = b"\n", ) -> Iterator[bytes]: """ -Compare `a` and `b`, two sequences of lines represented as bytes rather -than str. This is a wrapper for `dfunc`, which is typically either -unified_diff() or context_diff(). Inputs are losslessly converted to -strings so that `dfunc` only has to worry about strings, and encoded -back to bytes on return. This is necessary to compare files with -unknown or inconsistent encoding. All other inputs (except `n`) must be -bytes rather than str. -""" + Compare `a` and `b`, two sequences of lines represented as bytes rather + than str. This is a wrapper for `dfunc`, which is typically either + unified_diff() or context_diff(). Inputs are losslessly converted to + strings so that `dfunc` only has to worry about strings, and encoded + back to bytes on return. This is necessary to compare files with + unknown or inconsistent encoding. All other inputs (except `n`) must be + bytes rather than str. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi index cc67e045679e2..6da3dc252cf51 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/dis.pyi @@ -1,5 +1,5 @@ -"""Disassembler of Python byte code into mnemonics. -""" +"""Disassembler of Python byte code into mnemonics.""" + import sys import types from collections.abc import Callable, Iterator @@ -47,8 +47,8 @@ _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeTyp if sys.version_info >= (3, 11): class Positions(NamedTuple): - """Positions(lineno, end_lineno, col_offset, end_col_offset) -""" + """Positions(lineno, end_lineno, col_offset, end_col_offset)""" + lineno: int | None = None end_lineno: int | None = None col_offset: int | None = None @@ -56,8 +56,8 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 13): class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, start_offset, starts_line, line_number, label, positions, cache_info) -""" + """_Instruction(opname, opcode, arg, argval, argrepr, offset, start_offset, starts_line, line_number, label, positions, cache_info)""" + opname: str opcode: int arg: int | None @@ -73,8 +73,8 @@ if sys.version_info >= (3, 13): elif sys.version_info >= (3, 11): class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target, positions) -""" + """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target, positions)""" + opname: str opcode: int arg: int | None @@ -87,8 +87,8 @@ elif sys.version_info >= (3, 11): else: class _Instruction(NamedTuple): - """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target) -""" + """_Instruction(opname, opcode, arg, argval, argrepr, offset, starts_line, is_jump_target)""" + opname: str opcode: int arg: int | None @@ -102,66 +102,69 @@ if sys.version_info >= (3, 12): class Instruction(_Instruction): """Details for a bytecode operation. -Defined fields: - opname - human readable name for operation - opcode - numeric code for operation - arg - numeric argument to operation (if any), otherwise None - argval - resolved arg value (if known), otherwise same as arg - argrepr - human readable description of operation argument - offset - start index of operation within bytecode sequence - start_offset - start index of operation within bytecode sequence including extended args if present; - otherwise equal to Instruction.offset - starts_line - True if this opcode starts a source line, otherwise False - line_number - source line number associated with this opcode (if any), otherwise None - label - A label if this instruction is a jump target, otherwise None - positions - Optional dis.Positions object holding the span of source code - covered by this instruction - cache_info - information about the format and content of the instruction's cache - entries (if any) -""" + Defined fields: + opname - human readable name for operation + opcode - numeric code for operation + arg - numeric argument to operation (if any), otherwise None + argval - resolved arg value (if known), otherwise same as arg + argrepr - human readable description of operation argument + offset - start index of operation within bytecode sequence + start_offset - start index of operation within bytecode sequence including extended args if present; + otherwise equal to Instruction.offset + starts_line - True if this opcode starts a source line, otherwise False + line_number - source line number associated with this opcode (if any), otherwise None + label - A label if this instruction is a jump target, otherwise None + positions - Optional dis.Positions object holding the span of source code + covered by this instruction + cache_info - information about the format and content of the instruction's cache + entries (if any) + """ + if sys.version_info < (3, 13): def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: """Format instruction details for inclusion in disassembly output - *lineno_width* sets the width of the line number field (0 omits it) - *mark_as_current* inserts a '-->' marker arrow as part of the line - *offset_width* sets the width of the instruction offset field - """ + *lineno_width* sets the width of the line number field (0 omits it) + *mark_as_current* inserts a '-->' marker arrow as part of the line + *offset_width* sets the width of the instruction offset field + """ if sys.version_info >= (3, 13): @property def oparg(self) -> int: - """Alias for Instruction.arg. -""" + """Alias for Instruction.arg.""" + @property def baseopcode(self) -> int: """Numeric code for the base operation if operation is specialized. -Otherwise equal to Instruction.opcode. -""" + Otherwise equal to Instruction.opcode. + """ + @property def baseopname(self) -> str: """Human readable name for the base operation if operation is specialized. -Otherwise equal to Instruction.opname. -""" + Otherwise equal to Instruction.opname. + """ + @property def cache_offset(self) -> int: - """Start index of the cache entries following the operation. -""" + """Start index of the cache entries following the operation.""" + @property def end_offset(self) -> int: - """End index of the cache entries following the operation. -""" + """End index of the cache entries following the operation.""" + @property def jump_target(self) -> int: """Bytecode index of the jump target if this is a jump operation. -Otherwise return None. -""" + Otherwise return None. + """ + @property def is_jump_target(self) -> bool: - """True if other code jumps to here, otherwise False -""" + """True if other code jumps to here, otherwise False""" if sys.version_info >= (3, 14): @staticmethod def make( @@ -183,34 +186,36 @@ else: class Instruction(_Instruction): """Details for a bytecode operation - Defined fields: - opname - human readable name for operation - opcode - numeric code for operation - arg - numeric argument to operation (if any), otherwise None - argval - resolved arg value (if known), otherwise same as arg - argrepr - human readable description of operation argument - offset - start index of operation within bytecode sequence - starts_line - line started by this opcode (if any), otherwise None - is_jump_target - True if other code jumps to here, otherwise False - positions - Optional dis.Positions object holding the span of source code - covered by this instruction - """ + Defined fields: + opname - human readable name for operation + opcode - numeric code for operation + arg - numeric argument to operation (if any), otherwise None + argval - resolved arg value (if known), otherwise same as arg + argrepr - human readable description of operation argument + offset - start index of operation within bytecode sequence + starts_line - line started by this opcode (if any), otherwise None + is_jump_target - True if other code jumps to here, otherwise False + positions - Optional dis.Positions object holding the span of source code + covered by this instruction + """ + def _disassemble(self, lineno_width: int = 3, mark_as_current: bool = False, offset_width: int = 4) -> str: """Format instruction details for inclusion in disassembly output - *lineno_width* sets the width of the line number field (0 omits it) - *mark_as_current* inserts a '-->' marker arrow as part of the line - *offset_width* sets the width of the instruction offset field - """ + *lineno_width* sets the width of the line number field (0 omits it) + *mark_as_current* inserts a '-->' marker arrow as part of the line + *offset_width* sets the width of the instruction offset field + """ class Bytecode: """The bytecode operations of a piece of code -Instantiate this with a function, method, other compiled object, string of -code, or a code object (as returned by compile()). + Instantiate this with a function, method, other compiled object, string of + code, or a code object (as returned by compile()). + + Iterating over this yields the bytecode operations as Instruction instances. + """ -Iterating over this yields the bytecode operations as Instruction instances. -""" codeobj: types.CodeType first_line: int if sys.version_info >= (3, 14): @@ -258,42 +263,40 @@ Iterating over this yields the bytecode operations as Instruction instances. if sys.version_info >= (3, 11): @classmethod def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: - """Construct a Bytecode from the given traceback -""" + """Construct a Bytecode from the given traceback""" else: @classmethod def from_traceback(cls, tb: types.TracebackType) -> Self: - """ Construct a Bytecode from the given traceback -""" + """Construct a Bytecode from the given traceback""" def __iter__(self) -> Iterator[Instruction]: ... def info(self) -> str: - """Return formatted information about the code object. -""" + """Return formatted information about the code object.""" + def dis(self) -> str: - """Return a formatted view of the bytecode operations. -""" + """Return a formatted view of the bytecode operations.""" COMPILER_FLAG_NAMES: Final[dict[int, str]] def findlabels(code: _HaveCodeType) -> list[int]: """Detect all offsets in a byte code which are jump targets. -Return the list of offsets. + Return the list of offsets. + + """ -""" def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: """Find the offsets in a byte code which are start of lines in the source. -Generate pairs (offset, lineno) -lineno will be an integer or None the offset does not have a source line. -""" + Generate pairs (offset, lineno) + lineno will be an integer or None the offset does not have a source line. + """ + def pretty_flags(flags: int) -> str: - """Return pretty representation of code flags. -""" + """Return pretty representation of code flags.""" + def code_info(x: _HaveCodeType | str) -> str: - """Formatted details of methods, functions, or code. -""" + """Formatted details of methods, functions, or code.""" if sys.version_info >= (3, 14): # 3.14 added `show_positions` @@ -309,12 +312,13 @@ if sys.version_info >= (3, 14): ) -> None: """Disassemble classes, methods, functions, and other compiled objects. -With no argument, disassemble the last traceback. + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ -Compiled objects currently include generator objects, async generator -objects, and coroutine objects, all of which store their code object -in a special attribute. -""" def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -325,8 +329,8 @@ in a special attribute. show_offsets: bool = False, show_positions: bool = False, ) -> None: - """Disassemble a code object. -""" + """Disassemble a code object.""" + def distb( tb: types.TracebackType | None = None, *, @@ -336,8 +340,7 @@ in a special attribute. show_offsets: bool = False, show_positions: bool = False, ) -> None: - """Disassemble a traceback (default: last traceback). -""" + """Disassemble a traceback (default: last traceback).""" elif sys.version_info >= (3, 13): # 3.13 added `show_offsets` @@ -352,12 +355,13 @@ elif sys.version_info >= (3, 13): ) -> None: """Disassemble classes, methods, functions, and other compiled objects. -With no argument, disassemble the last traceback. + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ -Compiled objects currently include generator objects, async generator -objects, and coroutine objects, all of which store their code object -in a special attribute. -""" def disassemble( co: _HaveCodeType, lasti: int = -1, @@ -367,8 +371,8 @@ in a special attribute. adaptive: bool = False, show_offsets: bool = False, ) -> None: - """Disassemble a code object. -""" + """Disassemble a code object.""" + def distb( tb: types.TracebackType | None = None, *, @@ -377,8 +381,7 @@ in a special attribute. adaptive: bool = False, show_offsets: bool = False, ) -> None: - """Disassemble a traceback (default: last traceback). -""" + """Disassemble a traceback (default: last traceback).""" elif sys.version_info >= (3, 11): # 3.11 added `show_caches` and `adaptive` @@ -392,22 +395,22 @@ elif sys.version_info >= (3, 11): ) -> None: """Disassemble classes, methods, functions, and other compiled objects. - With no argument, disassemble the last traceback. + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ def disassemble( co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: - """Disassemble a code object. -""" + """Disassemble a code object.""" + def distb( tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: - """Disassemble a traceback (default: last traceback). -""" + """Disassemble a traceback (default: last traceback).""" else: def dis( @@ -415,18 +418,18 @@ else: ) -> None: """Disassemble classes, methods, functions, and other compiled objects. - With no argument, disassemble the last traceback. + With no argument, disassemble the last traceback. + + Compiled objects currently include generator objects, async generator + objects, and coroutine objects, all of which store their code object + in a special attribute. + """ - Compiled objects currently include generator objects, async generator - objects, and coroutine objects, all of which store their code object - in a special attribute. - """ def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: - """Disassemble a code object. -""" + """Disassemble a code object.""" + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: - """Disassemble a traceback (default: last traceback). -""" + """Disassemble a traceback (default: last traceback).""" if sys.version_info >= (3, 13): # 3.13 made `show_cache` `None` by default @@ -435,14 +438,14 @@ if sys.version_info >= (3, 13): ) -> Iterator[Instruction]: """Iterator for the opcodes in methods, functions or code -Generates a series of Instruction named tuples giving the details of -each operations in the supplied code. + Generates a series of Instruction named tuples giving the details of + each operations in the supplied code. -If *first_line* is not None, it indicates the line number that should -be reported for the first source line in the disassembled code. -Otherwise, the source line information (if any) is taken directly from -the disassembled code object. -""" + If *first_line* is not None, it indicates the line number that should + be reported for the first source line in the disassembled code. + Otherwise, the source line information (if any) is taken directly from + the disassembled code object. + """ elif sys.version_info >= (3, 11): def get_instructions( @@ -450,32 +453,32 @@ elif sys.version_info >= (3, 11): ) -> Iterator[Instruction]: """Iterator for the opcodes in methods, functions or code - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. + Generates a series of Instruction named tuples giving the details of + each operations in the supplied code. - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ + If *first_line* is not None, it indicates the line number that should + be reported for the first source line in the disassembled code. + Otherwise, the source line information (if any) is taken directly from + the disassembled code object. + """ else: def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: """Iterator for the opcodes in methods, functions or code - Generates a series of Instruction named tuples giving the details of - each operations in the supplied code. + Generates a series of Instruction named tuples giving the details of + each operations in the supplied code. - If *first_line* is not None, it indicates the line number that should - be reported for the first source line in the disassembled code. - Otherwise, the source line information (if any) is taken directly from - the disassembled code object. - """ + If *first_line* is not None, it indicates the line number that should + be reported for the first source line in the disassembled code. + Otherwise, the source line information (if any) is taken directly from + the disassembled code object. + """ def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: """Print details of methods, functions, or code to *file*. -If *file* is not provided, the output is printed on stdout. -""" + If *file* is not provided, the output is printed on stdout. + """ disco = disassemble diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi index bce6972bf50f3..f2363986b9478 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/__init__.pyi @@ -7,6 +7,7 @@ used from a setup script as setup (...) """ + # Attempts to improve these stubs are probably not the best use of time: # - distutils is deleted in Python 3.12 and newer # - Most users already do not use stdlib distutils, due to setuptools monkeypatching diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi index 6a673bc8b99ed..970c5623cddf5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/_msvccompiler.pyi @@ -6,6 +6,7 @@ for Microsoft Visual Studio 2015. The module is compatible with VS 2015 and later. You can find legacy support for older versions in distutils.msvc9compiler and distutils.msvccompiler. """ + from _typeshed import Incomplete from distutils.ccompiler import CCompiler from typing import ClassVar, Final @@ -15,8 +16,9 @@ PLAT_TO_VCVARS: Final[dict[str, str]] class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class. -""" + as defined by the CCompiler abstract class. + """ + compiler_type: ClassVar[str] executables: ClassVar[dict[Incomplete, Incomplete]] res_extension: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi index 22b3a7afb4bc3..da8de7406d2cb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/archive_util.pyi @@ -3,6 +3,7 @@ Utility functions for creating archive files (tarballs, zip files, that sort of thing). """ + from _typeshed import StrOrBytesPath, StrPath from typing import Literal, overload @@ -33,6 +34,7 @@ def make_archive( 'owner' and 'group' are used when creating a tar archive. By default, uses the current owner and group. """ + @overload def make_archive( base_name: StrPath, @@ -68,6 +70,7 @@ def make_tarball( Returns the output filename. """ + def make_zipfile(base_name: str, base_dir: str, verbose: bool | Literal[0, 1] = 0, dry_run: bool | Literal[0, 1] = 0) -> str: """Create a zip file from all the files under 'base_dir'. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi index 4add3a267b923..562f079f81223 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/bcppcompiler.pyi @@ -3,6 +3,7 @@ Contains BorlandCCompiler, an implementation of the abstract CCompiler class for the Borland C++ compiler. """ + from distutils.ccompiler import CCompiler class BCPPCompiler(CCompiler): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi index 017f0e2c8bbd3..3b6671e662f6c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/ccompiler.pyi @@ -3,6 +3,7 @@ Contains CCompiler, an abstract base class that defines the interface for the Distutils compiler abstraction model. """ + from _typeshed import BytesPath, StrPath, Unused from collections.abc import Callable, Iterable, Sequence from distutils.file_util import _BytesPathT, _StrPathT @@ -21,6 +22,7 @@ def gen_lib_options( directories. Returns a list of command-line options suitable for use with some compiler (depending on the two format strings passed in). """ + def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: """Generate C pre-processor options (-D, -U, -I) as used by at least two types of compilers: the typical Unix compiler and Visual C++. @@ -31,16 +33,18 @@ def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> lis of command-line options suitable for either Unix compilers or Visual C++. """ + def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: """Determine the default compiler to use for the given platform. - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. - The default values are os.name and sys.platform in case the - parameters are not given. + The default values are os.name and sys.platform in case the + parameters are not given. """ + def new_compiler( plat: str | None = None, compiler: str | None = None, @@ -58,6 +62,7 @@ def new_compiler( Microsoft compiler object under Unix -- if you supply a value for 'compiler', 'plat' is ignored. """ + def show_compilers() -> None: """Print list of available compilers (used by the "--help-compiler" options to "build", "build_ext", "build_clib"). @@ -76,6 +81,7 @@ class CCompiler: variability in how individual files are treated, most of those attributes may be varied on a per-compilation or per-link basis. """ + dry_run: bool force: bool verbose: bool @@ -95,6 +101,7 @@ class CCompiler: the order in which they are supplied by successive calls to 'add_include_dir()'. """ + def set_include_dirs(self, dirs: list[str]) -> None: """Set the list of directories that will be searched to 'dirs' (a list of strings). Overrides any preceding calls to @@ -103,6 +110,7 @@ class CCompiler: any list of standard include directories that the compiler may search by default. """ + def add_library(self, libname: str) -> None: """Add 'libname' to the list of libraries that will be included in all links driven by this compiler object. Note that 'libname' @@ -117,33 +125,39 @@ class CCompiler: names; the linker will be instructed to link against libraries as many times as they are mentioned. """ + def set_libraries(self, libnames: list[str]) -> None: """Set the list of libraries to be included in all links driven by this compiler object to 'libnames' (a list of strings). This does not affect any standard system libraries that the linker may include by default. """ + def add_library_dir(self, dir: str) -> None: """Add 'dir' to the list of directories that will be searched for libraries specified to 'add_library()' and 'set_libraries()'. The linker will be instructed to search for libraries in the order they are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. """ + def set_library_dirs(self, dirs: list[str]) -> None: """Set the list of library search directories to 'dirs' (a list of strings). This does not affect any standard library search path that the linker may search by default. """ + def add_runtime_library_dir(self, dir: str) -> None: """Add 'dir' to the list of directories that will be searched for shared libraries at runtime. """ + def set_runtime_library_dirs(self, dirs: list[str]) -> None: """Set the list of directories to search for shared libraries at runtime to 'dirs' (a list of strings). This does not affect any standard search path that the runtime linker may search by default. """ + def define_macro(self, name: str, value: str | None = None) -> None: """Define a preprocessor macro for all compilations driven by this compiler object. The optional parameter 'value' should be a @@ -151,6 +165,7 @@ class CCompiler: without an explicit value and the exact outcome depends on the compiler used (XXX true? does ANSI say anything about this?) """ + def undefine_macro(self, name: str) -> None: """Undefine a preprocessor macro for all compilations driven by this compiler object. If the same macro is defined by @@ -160,22 +175,26 @@ class CCompiler: per-compilation basis (ie. in the call to 'compile()'), then that takes precedence. """ + def add_link_object(self, object: str) -> None: """Add 'object' to the list of object files (or analogues, such as explicitly named library files or the output of "resource compilers") to be included in every link driven by this compiler object. """ + def set_link_objects(self, objects: list[str]) -> None: """Set the list of object files (or analogues) to be included in every link to 'objects'. This does not affect any standard object files that the linker may include by default (such as system libraries). """ + def detect_language(self, sources: str | list[str]) -> str | None: """Detect the language of a given file, or list of files. Uses language_map, and language_order to do the job. """ + def find_library_file(self, dirs: list[str], lib: str, debug: bool | Literal[0, 1] = 0) -> str | None: """Search the specified list of directories for a static or shared library file 'lib' and return the full path to that file. If @@ -183,6 +202,7 @@ class CCompiler: the current platform). Return None if 'lib' wasn't found in any of the specified directories. """ + def has_function( self, funcname: str, @@ -195,18 +215,22 @@ class CCompiler: the current platform. The optional arguments can be used to augment the compilation environment. """ + def library_dir_option(self, dir: str) -> str: """Return the compiler option to add 'dir' to the list of directories searched for libraries. """ + def library_option(self, lib: str) -> str: """Return the compiler option to add 'lib' to the list of libraries linked into the shared library or executable. """ + def runtime_library_dir_option(self, dir: str) -> str: """Return the compiler option to add 'dir' to the list of directories searched for runtime libraries. """ + def set_executables(self, **args: str) -> None: """Define the executables (and options for them) that will be run to perform the various stages of compilation. The exact set of @@ -224,6 +248,7 @@ class CCompiler: backslashes can override this. See 'distutils.util.split_quoted()'.) """ + def compile( self, sources: Sequence[StrPath], @@ -283,6 +308,7 @@ class CCompiler: Raises CompileError on failure. """ + def create_static_lib( self, objects: list[str], @@ -313,6 +339,7 @@ class CCompiler: Raises LibError on failure. """ + def link( self, target_desc: str, @@ -372,6 +399,7 @@ class CCompiler: Raises LinkError on failure. """ + def link_executable( self, objects: list[str], @@ -433,6 +461,7 @@ class CCompiler: Raises PreprocessError on failure. """ + @overload def executable_filename(self, basename: str, strip_dir: Literal[0, False] = 0, output_dir: StrPath = "") -> str: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi index cb48c0d275bf5..9adbe4377ddb6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cmd.pyi @@ -3,6 +3,7 @@ Provides the Command class, the base class for the command classes in the distutils.command package. """ + from _typeshed import BytesPath, StrOrBytesPath, StrPath, Unused from abc import abstractmethod from collections.abc import Callable, Iterable @@ -49,6 +50,7 @@ class Command: options, is the 'run()' method, which must also be implemented by every command class. """ + dry_run: bool | Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run distribution: Distribution # Any to work around variance issues @@ -59,6 +61,7 @@ class Command: initializer and depends on the actual command being instantiated. """ + @abstractmethod def initialize_options(self) -> None: """Set default values for all the options that this command @@ -70,6 +73,7 @@ class Command: This method must be implemented by all command classes. """ + @abstractmethod def finalize_options(self) -> None: """Set final values for all the options that this command supports. @@ -82,6 +86,7 @@ class Command: This method must be implemented by all command classes. """ + @abstractmethod def run(self) -> None: """A command's raison d'etre: carry out the action it exists to @@ -93,27 +98,32 @@ class Command: This method must be implemented by all command classes. """ + def announce(self, msg: str, level: int = 1) -> None: """If the current verbosity level is of greater than or equal to 'level' print 'msg' to stdout. """ + def debug_print(self, msg: str) -> None: """Print 'msg' to stdout if the global DEBUG (taken from the DISTUTILS_DEBUG environment variable) flag is true. """ + def ensure_string(self, option: str, default: str | None = None) -> None: """Ensure that 'option' is a string; if not defined, set it to 'default'. """ + def ensure_string_list(self, option: str) -> None: """Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\\s*/ or /\\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. """ + def ensure_filename(self, option: str) -> None: - """Ensure that 'option' is the name of an existing file. -""" + """Ensure that 'option' is the name of an existing file.""" + def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: @@ -138,6 +148,7 @@ class Command: 'command', call its 'ensure_finalized()' method, and return the finalized command object. """ + @overload def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ... @overload @@ -245,6 +256,7 @@ class Command: Distribution, which creates and finalizes the command object if necessary and then invokes its 'run()' method. """ + def get_sub_commands(self) -> list[str]: """Determine the sub-commands that are relevant in the current distribution (ie., that need to be run). This is based on the @@ -252,6 +264,7 @@ class Command: a method that we call to determine if the subcommand needs to be run for the current distribution. Return a list of command names. """ + def warn(self, msg: str) -> None: ... def execute( self, func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], msg: str | None = None, level: int = 1 @@ -270,7 +283,8 @@ class Command: """Copy a file respecting verbose, dry-run and force flags. (The former two default to whatever is in the Distribution object, and the latter defaults to false for commands that don't define it.) -""" + """ + @overload def copy_file( self, @@ -293,15 +307,16 @@ class Command: """Copy an entire directory tree respecting verbose, dry-run, and force flags. """ + @overload def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: - """Move a file respecting dry-run flag. -""" + """Move a file respecting dry-run flag.""" + @overload def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ... def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: - """Spawn an external command respecting dry-run flag. -""" + """Spawn an external command respecting dry-run flag.""" + @overload def make_archive( self, @@ -340,5 +355,6 @@ class Command: and it is true, then the command is unconditionally run -- does no timestamp checks. """ + def ensure_finalized(self) -> None: ... def dump_options(self, header=None, indent: str = "") -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi index 97efecec9fe65..b22cd819f98ba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/__init__.pyi @@ -3,6 +3,7 @@ Package containing implementation of all the standard Distutils commands. """ + import sys from . import ( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi index 14f971394cfcd..36568ce343bb2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'bdist' command (create a built [binary] distribution). """ + from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -10,8 +11,7 @@ from typing import ClassVar from ..cmd import Command def show_formats() -> None: - """Print list of available formats (arguments to "--format" option). - """ + """Print list of available formats (arguments to "--format" option).""" class bdist(Command): description: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi index 2cf149b6b06e5..3a44a95549fe7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_dumb.pyi @@ -4,6 +4,7 @@ Implements the Distutils 'bdist_dumb' command (create a "dumb" built distribution -- i.e., just an archive to be unpacked under $prefix or $exec_prefix). """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi index f1848cd457e56..f37d75b54528a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -1,6 +1,7 @@ """ Implements the bdist_msi command. """ + import sys from _typeshed import Incomplete from typing import ClassVar, Literal @@ -12,41 +13,46 @@ if sys.platform == "win32": class PyDialog(Dialog): """Dialog class with a fixed layout: controls at the top, then a ruler, - then a list of buttons: back, next, cancel. Optionally a bitmap at the - left. -""" + then a list of buttons: back, next, cancel. Optionally a bitmap at the + left. + """ + def __init__(self, *args, **kw) -> None: """Dialog(database, name, x, y, w, h, attributes, title, first, - default, cancel, bitmap=true) -""" + default, cancel, bitmap=true) + """ + def title(self, title) -> None: - """Set the title text of the dialog at the top. -""" + """Set the title text of the dialog at the top.""" + def back(self, title, next, name: str = "Back", active: bool | Literal[0, 1] = 1) -> Control: """Add a back button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated + """ - Return the button, so that events can be associated -""" def cancel(self, title, next, name: str = "Cancel", active: bool | Literal[0, 1] = 1) -> Control: """Add a cancel button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated + """ - Return the button, so that events can be associated -""" def next(self, title, next, name: str = "Next", active: bool | Literal[0, 1] = 1) -> Control: """Add a Next button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. + its name in the Control table, possibly initially disabled. + + Return the button, so that events can be associated + """ - Return the button, so that events can be associated -""" def xbutton(self, name, title, next, xpos) -> Control: """Add a button with a given title, the tab-next button, - its name in the Control table, giving its x position; the - y-position is aligned with the other buttons. + its name in the Control table, giving its x position; the + y-position is aligned with the other buttons. - Return the button, so that events can be associated -""" + Return the button, so that events can be associated + """ class bdist_msi(Command): description: str @@ -75,14 +81,15 @@ if sys.platform == "win32": def add_find_python(self) -> None: """Adds code to the installer to compute the location of Python. - Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the - registry for each version of Python. + Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the + registry for each version of Python. - Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, - else from PYTHON.MACHINE.X.Y. + Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, + else from PYTHON.MACHINE.X.Y. + + Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe + """ - Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe -""" def add_scripts(self) -> None: ... def add_ui(self) -> None: ... def get_installer_filename(self, fullname): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi index 0b3db82337b71..83432461226d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_rpm.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'bdist_rpm' command (create RPM source and binary distributions). """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi index 52c7e3ada9972..11be78a8f729a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'bdist_wininst' command: create a windows installer exe-program. """ + from _typeshed import StrOrBytesPath from distutils.cmd import Command from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi index 0e4609dd4d5c7..be3edede3e55c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'build' command. """ + from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi index 135365f2add16..dd78603653d31 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_clib.pyi @@ -4,6 +4,7 @@ Implements the Distutils 'build_clib' command, to build a C/C++ library that is included in the module distribution and needed by an extension module. """ + from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -39,6 +40,7 @@ class build_clib(Command): Raise DistutilsSetupError if the structure is invalid anywhere; just returns otherwise. """ + def get_library_names(self): ... def get_source_files(self): ... def build_libraries(self, libraries) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi index 797b1909a847a..9b70452720373 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_ext.pyi @@ -4,6 +4,7 @@ Implements the Distutils 'build_ext' command, for building extension modules (currently limited to C extensions, should accommodate C++ extensions ASAP). """ + from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import ClassVar @@ -54,6 +55,7 @@ class build_ext(Command): Raise DistutilsSetupError if the structure is invalid anywhere; just returns otherwise. """ + def get_source_files(self): ... def get_outputs(self): ... def build_extensions(self) -> None: ... @@ -64,33 +66,39 @@ class build_ext(Command): return a modified 'sources' list with SWIG source files replaced by the generated C (or C++) files. """ + def find_swig(self): """Return the name of the SWIG executable. On Unix, this is just "swig" -- it should be in the PATH. Tries a bit harder on Windows. """ + def get_ext_fullpath(self, ext_name: str) -> str: """Returns the path of the filename for a given extension. The file is located in `build_lib` or directly in the package (inplace option). """ + def get_ext_fullname(self, ext_name: str) -> str: """Returns the fullname of a given extension name. Adds the `package.` prefix -""" + """ + def get_ext_filename(self, ext_name: str) -> str: """Convert the name of an extension (eg. "foo.bar") into the name of the file from which it will be loaded (eg. "foo/bar.so", or "foo\\bar.pyd"). """ + def get_export_symbols(self, ext): """Return the list of symbols that a shared extension has to export. This either uses 'ext.export_symbols' or, if it's not provided, "PyInit_" + module_name. Only relevant on Windows, where the .pyd file (DLL) must export the module "PyInit_" function. """ + def get_libraries(self, ext): """Return the list of libraries to link against when building a shared extension. On most platforms, this is just 'ext.libraries'; diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi index 36ca381b90cc0..c35514d105256 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_py.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'build_py' command. """ + from _typeshed import Incomplete from typing import ClassVar, Literal @@ -27,19 +28,20 @@ class build_py(Command): def finalize_options(self) -> None: ... def run(self) -> None: ... def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples -""" + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir' -""" + """Return filenames for package's data files in 'src_dir'""" + def build_package_data(self) -> None: - """Copy data files into build directory -""" + """Copy data files into build directory""" + def get_package_dir(self, package): """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any). -""" + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any). + """ + def check_package(self, package, package_dir): ... def check_module(self, module, module_file): ... def find_package_modules(self, package, package_dir): ... @@ -52,13 +54,15 @@ class build_py(Command): ".py" file (relative to the distribution root) that implements the module. """ + def find_all_modules(self): """Compute the list of all modules that will be built, whether they are specified one-module-at-a-time ('self.py_modules') or by whole packages ('self.packages'). Return a list of tuples (package, module, module_file), just like 'find_modules()' and 'find_package_modules()' do. -""" + """ + def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... def get_outputs(self, include_bytecode: bool | Literal[0, 1] = 1) -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi index d655f16128620..201ef5dd9e2b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/build_scripts.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'build_scripts' command. """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi index 5bc852788a4ec..e3c560ccff01b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/check.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'check' command. """ + from _typeshed import Incomplete from typing import Any, ClassVar, Final, Literal from typing_extensions import TypeAlias @@ -30,8 +31,8 @@ class SilentReporter(_Reporter): HAS_DOCUTILS: Final[bool] class check(Command): - """This command checks the meta-data of the package. - """ + """This command checks the meta-data of the package.""" + description: str user_options: ClassVar[list[tuple[str, str, str]]] boolean_options: ClassVar[list[str]] @@ -39,15 +40,15 @@ class check(Command): metadata: int strict: int def initialize_options(self) -> None: - """Sets default values for options. -""" + """Sets default values for options.""" + def finalize_options(self) -> None: ... def warn(self, msg): - """Counts the number of warnings that occurs. -""" + """Counts the number of warnings that occurs.""" + def run(self) -> None: - """Runs the command. -""" + """Runs the command.""" + def check_metadata(self) -> None: """Ensures that all required elements of meta-data are supplied. @@ -59,6 +60,6 @@ class check(Command): Warns if any are missing. """ + def check_restructuredtext(self) -> None: - """Checks if the long string fields are reST-compliant. -""" + """Checks if the long string fields are reST-compliant.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi index 1cdcc0f853d16..363e24eeaff61 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/clean.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'clean' command. """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi index bb9994ac2cf87..79b5bac38a316 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/config.pyi @@ -8,6 +8,7 @@ list of standard commands. Also, this is a good place to put common configure-like tasks: "try to compile this C code", or "figure out where this header file lives". """ + from _typeshed import StrOrBytesPath from collections.abc import Sequence from re import Pattern @@ -46,6 +47,7 @@ class config(Command): preprocessor succeeded, false if there were any errors. ('body' probably isn't of much use, but what the heck.) """ + def search_cpp( self, pattern: Pattern[str] | str, @@ -61,12 +63,14 @@ class config(Command): preprocesses an empty file -- which can be useful to determine the symbols the preprocessor and compiler set by default. """ + def try_compile( self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: """Try to compile a source file built from 'body' and 'headers'. Return true on success, false otherwise. """ + def try_link( self, body: str, @@ -80,6 +84,7 @@ class config(Command): 'headers', to executable form. Return true on success, false otherwise. """ + def try_run( self, body: str, @@ -93,6 +98,7 @@ class config(Command): built from 'body' and 'headers'. Return true on success, false otherwise. """ + def check_func( self, func: str, @@ -116,6 +122,7 @@ class config(Command): calls it. 'libraries' and 'library_dirs' are used when linking. """ + def check_lib( self, library: str, @@ -132,6 +139,7 @@ class config(Command): 'other_libraries' will be included in the link, in case 'library' has symbols that depend on other libraries. """ + def check_header( self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi index 20112feb6598b..3d32c66f9b15d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'install' command. """ + import sys from _typeshed import Incomplete from collections.abc import Callable @@ -48,73 +49,75 @@ class install(Command): build_lib: Incomplete record: Incomplete def initialize_options(self) -> None: - """Initializes options. -""" + """Initializes options.""" config_vars: Incomplete install_libbase: Incomplete def finalize_options(self) -> None: - """Finalizes options. -""" + """Finalizes options.""" + def dump_dirs(self, msg) -> None: - """Dumps the list of user options. -""" + """Dumps the list of user options.""" + def finalize_unix(self) -> None: - """Finalizes options for posix platforms. -""" + """Finalizes options for posix platforms.""" + def finalize_other(self) -> None: - """Finalizes options for non-posix platforms -""" + """Finalizes options for non-posix platforms""" + def select_scheme(self, name) -> None: - """Sets the install directories by applying the install schemes. -""" + """Sets the install directories by applying the install schemes.""" + def expand_basedirs(self) -> None: """Calls `os.path.expanduser` on install_base, install_platbase and root. -""" + """ + def expand_dirs(self) -> None: - """Calls `os.path.expanduser` on install dirs. -""" + """Calls `os.path.expanduser` on install dirs.""" + def convert_paths(self, *names) -> None: - """Call `convert_path` over `names`. -""" + """Call `convert_path` over `names`.""" path_file: Incomplete extra_dirs: Incomplete def handle_extra_path(self) -> None: - """Set `path_file` and `extra_dirs` using `extra_path`. -""" + """Set `path_file` and `extra_dirs` using `extra_path`.""" + def change_roots(self, *names) -> None: - """Change the install directories pointed by name using root. -""" + """Change the install directories pointed by name using root.""" + def create_home_path(self) -> None: - """Create directories under ~. -""" + """Create directories under ~.""" + def run(self) -> None: - """Runs the command. -""" + """Runs the command.""" + def create_path_file(self) -> None: - """Creates the .pth file -""" + """Creates the .pth file""" + def get_outputs(self): - """Assembles the outputs of all the sub-commands. -""" + """Assembles the outputs of all the sub-commands.""" + def get_inputs(self): - """Returns the inputs of all the sub-commands -""" + """Returns the inputs of all the sub-commands""" + def has_lib(self): """Returns true if the current distribution has any Python modules to install. -""" + """ + def has_headers(self): """Returns true if the current distribution has any headers to install. -""" + """ + def has_scripts(self): """Returns true if the current distribution has any scripts to. install. -""" + """ + def has_data(self): """Returns true if the current distribution has any data to. install. -""" + """ # Any to work around variance issues sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi index 3c41c12e33316..67c031a601cda 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_data.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'install_data' command, for installing platform-independent data files. """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi index 7e09ded1a2d38..6746e210fff9a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_egg_info.pyi @@ -3,14 +3,15 @@ Implements the Distutils 'install_egg_info' command, for installing a package's PKG-INFO metadata. """ + from _typeshed import Incomplete from typing import ClassVar from ..cmd import Command class install_egg_info(Command): - """Install an .egg-info file for the package -""" + """Install an .egg-info file for the package""" + description: ClassVar[str] user_options: ClassVar[list[tuple[str, str, str]]] install_dir: Incomplete @@ -26,12 +27,14 @@ def safe_name(name): Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ + def safe_version(version): """Convert an arbitrary string to a standard version string Spaces become dots, and all other non-alphanumeric characters become dashes, with runs of multiple dashes condensed to a single dash. """ + def to_filename(name): """Convert a project or version name to its filename-escaped form diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi index 60a4403d280f5..f0eae22e2653a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_headers.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'install_headers' command, to install C/C++ header files to the Python include directory. """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi index 7cd7a3c05dc91..22ce9de26e229 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_lib.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'install_lib' command (install all Python modules). """ + from _typeshed import Incomplete from typing import ClassVar, Final @@ -32,6 +33,7 @@ class install_lib(Command): were actually run. Not affected by the "dry-run" flag or whether modules have actually been built yet. """ + def get_inputs(self): """Get the list of files that are input to this command, ie. the files that get installed as they are named in the build tree. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi index b79ad99524bbd..702c1c644b105 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/install_scripts.pyi @@ -3,6 +3,7 @@ Implements the Distutils 'install_scripts' command, for installing Python scripts. """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi index 92082edf5c54b..d7491aa079555 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/register.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'register' command (register with the repository). """ + from collections.abc import Callable from typing import Any, ClassVar @@ -17,43 +18,43 @@ class register(PyPIRCCommand): def finalize_options(self) -> None: ... def run(self) -> None: ... def check_metadata(self) -> None: - """Deprecated API. -""" + """Deprecated API.""" + def classifiers(self) -> None: - """ Fetch the list of classifiers from the server. - """ + """Fetch the list of classifiers from the server.""" + def verify_metadata(self) -> None: - """ Send the metadata to the package index server to be checked. - """ + """Send the metadata to the package index server to be checked.""" + def send_metadata(self) -> None: - """ Send the metadata to the package index server. + """Send the metadata to the package index server. - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: - [distutils] - index-servers = - pypi + [distutils] + index-servers = + pypi - [pypi] - username: fred - password: sekrit + [pypi] + username: fred + password: sekrit - Otherwise, to figure who the user is, we offer the user three - choices: + Otherwise, to figure who the user is, we offer the user three + choices: - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. """ + def build_post_data(self, action): ... def post_to_server(self, data, auth=None): - """ Post a query to the server, and return a string response. - """ + """Post a query to the server, and return a string response.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi index 74210597cd9df..e82c0ead81d3a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/sdist.pyi @@ -2,6 +2,7 @@ Implements the Distutils 'sdist' command (create a source distribution). """ + from _typeshed import Incomplete, Unused from collections.abc import Callable from typing import Any, ClassVar @@ -19,7 +20,7 @@ class sdist(Command): """Callable used for the check sub-command. Placed here so user_options can view it -""" + """ user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], Unused]]]] @@ -45,8 +46,8 @@ class sdist(Command): filelist: Incomplete def run(self) -> None: ... def check_metadata(self) -> None: - """Deprecated API. -""" + """Deprecated API.""" + def get_file_list(self) -> None: """Figure out the list of files to include in the source distribution, and put it in 'self.filelist'. This might involve @@ -54,6 +55,7 @@ class sdist(Command): reading the manifest, or just using the default file set -- it all depends on the user's options. """ + def add_defaults(self) -> None: """Add all the default files to self.filelist: - README or README.txt @@ -68,12 +70,14 @@ class sdist(Command): Warns if (README or README.txt) or setup.py are missing; everything else is optional. """ + def read_template(self) -> None: """Read and parse manifest template file named by self.template. (usually "MANIFEST.in") The parsing and processing is done by 'self.filelist', which updates itself accordingly. """ + def prune_file_list(self) -> None: """Prune off branches that might slip into the file list as created by 'read_template()', but really don't belong there: @@ -82,16 +86,19 @@ class sdist(Command): previously with --keep-temp, or it aborted) * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories """ + def write_manifest(self) -> None: """Write the file list in 'self.filelist' (presumably as filled in by 'add_defaults()' and 'read_template()') to the manifest file named by 'self.manifest'. """ + def read_manifest(self) -> None: """Read the manifest file (named by 'self.manifest') and use it to fill in 'self.filelist', the list of files to include in the source distribution. """ + def make_release_tree(self, base_dir, files) -> None: """Create the directory tree that will become the source distribution archive. All directories implied by the filenames in @@ -101,6 +108,7 @@ class sdist(Command): directory named after the distribution, containing only the files to be distributed. """ + def make_distribution(self) -> None: """Create the source distribution(s). First, we create the release tree with 'make_release_tree()'; then, we create all required @@ -109,6 +117,7 @@ class sdist(Command): 'self.keep_temp' is true). The list of archive files created is stored so it can be retrieved later by 'get_archive_files()'. """ + def get_archive_files(self): """Return the list of archive files created when the command was run, or None if the command hasn't run yet. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi index 1f250fdfb3106..a34bcb86f82f8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/command/upload.pyi @@ -4,6 +4,7 @@ distutils.command.upload Implements the Distutils 'upload' subcommand (upload package to a package index). """ + from _typeshed import Incomplete from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi index 7611e26e47774..66f8b382f886b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/config.pyi @@ -3,6 +3,7 @@ Provides the PyPIRCCommand class, the base class for the command classes that uses .pypirc in the distutils.command package. """ + from abc import abstractmethod from distutils.cmd import Command from typing import ClassVar @@ -10,8 +11,8 @@ from typing import ClassVar DEFAULT_PYPIRC: str class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ + """Base command that knows how to handle the .pypirc file""" + DEFAULT_REPOSITORY: ClassVar[str] DEFAULT_REALM: ClassVar[str] repository: None @@ -19,11 +20,11 @@ class PyPIRCCommand(Command): user_options: ClassVar[list[tuple[str, str | None, str]]] boolean_options: ClassVar[list[str]] def initialize_options(self) -> None: - """Initialize options. -""" + """Initialize options.""" + def finalize_options(self) -> None: - """Finalizes options. -""" + """Finalizes options.""" + @abstractmethod def run(self) -> None: """A command's raison d'etre: carry out the action it exists to diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi index c323fbb1fc30a..dcf1dd00a8683 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/core.pyi @@ -5,6 +5,7 @@ the 'setup' function (which is to be called from the setup script). Also indirectly provides the Distribution and Command classes, although they are really defined in distutils.dist and distutils.cmd. """ + from _typeshed import Incomplete, StrOrBytesPath from collections.abc import Mapping from distutils.cmd import Command as Command @@ -93,6 +94,7 @@ def setup( command-specific options that became attributes of each command object. """ + def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: """Run a setup script in a somewhat controlled environment, and return the Distribution instance that drives things. This is useful diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi index 8cb498c268d66..59765fecc29b8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/cygwinccompiler.pyi @@ -5,6 +5,7 @@ handles the Cygwin port of the GNU C compiler to Windows. It also contains the Mingw32CCompiler class which handles the mingw32 port of GCC (same as cygwin in no-cygwin mode). """ + from distutils.unixccompiler import UnixCCompiler from distutils.version import LooseVersion from re import Pattern @@ -16,11 +17,10 @@ def get_msvcr() -> list[str] | None: """ class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ + """Handles the Cygwin port of the GNU C compiler to Windows.""" + class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ + """Handles the Mingw32 port of the GNU C compiler to Windows.""" CONFIG_H_OK: Final = "ok" CONFIG_H_NOTOK: Final = "not ok" @@ -47,10 +47,10 @@ def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: RE_VERSION: Final[Pattern[bytes]] def get_versions() -> tuple[LooseVersion | None, ...]: - """ Try to find out the versions of gcc, ld and dllwrap. + """Try to find out the versions of gcc, ld and dllwrap. If not possible it returns None for it. """ + def is_cygwingcc() -> bool: - """Try to determine if the gcc that would be used is from cygwin. -""" + """Try to determine if the gcc that would be used is from cygwin.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi index 2bfd910109cc4..ec12c28282667 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dep_util.pyi @@ -4,6 +4,7 @@ Utility functions for simple, timestamp-based dependency of files and groups of files; also, function based entirely on such timestamp dependency analysis. """ + from _typeshed import StrOrBytesPath, SupportsLenAndGetItem from collections.abc import Iterable from typing import Literal, TypeVar @@ -17,6 +18,7 @@ def newer(source: StrOrBytesPath, target: StrOrBytesPath) -> bool | Literal[1]: both exist and 'target' is the same age or younger than 'source'. Raise DistutilsFileError if 'source' does not exist. """ + def newer_pairwise( sources: SupportsLenAndGetItem[_SourcesT], targets: SupportsLenAndGetItem[_TargetsT] ) -> tuple[list[_SourcesT], list[_TargetsT]]: @@ -25,6 +27,7 @@ def newer_pairwise( targets) where source is newer than target, according to the semantics of 'newer()'. """ + def newer_group( sources: Iterable[StrOrBytesPath], target: StrOrBytesPath, missing: Literal["error", "ignore", "newer"] = "error" ) -> Literal[0, 1]: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi index fe718ccfa75c8..8f153f618009a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dir_util.pyi @@ -2,6 +2,7 @@ Utility functions for manipulating directories and directory trees. """ + from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal @@ -16,6 +17,7 @@ def mkpath(name: str, mode: int = 0o777, verbose: bool | Literal[0, 1] = 1, dry_ If 'verbose' is true, print a one-line summary of each mkdir to stdout. Return the list of directories actually created. """ + def create_tree( base_dir: StrPath, files: Iterable[StrPath], @@ -32,6 +34,7 @@ def create_tree( will be created if it doesn't already exist. 'mode', 'verbose' and 'dry_run' flags are as for 'mkpath()'. """ + def copy_tree( src: StrPath, dst: str, @@ -61,6 +64,7 @@ def copy_tree( (the default), the destination of the symlink will be copied. 'update' and 'verbose' are the same as for 'copy_file'. """ + def remove_tree(directory: StrOrBytesPath, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0) -> None: """Recursively remove an entire directory tree. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi index 57d3283cc952d..68dc636cc6e2f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/dist.pyi @@ -3,6 +3,7 @@ Provides the Distribution class, which represents the module distribution being built/installed/distributed. """ + from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite from collections.abc import Iterable, MutableMapping from distutils.cmd import Command @@ -39,6 +40,7 @@ class DistributionMetadata: """Dummy class to hold the distribution meta-data: name, version, author, and so forth. """ + def __init__(self, path: StrOrBytesPath | None = None) -> None: ... name: str | None version: str | None @@ -58,14 +60,14 @@ class DistributionMetadata: requires: list[str] | None obsoletes: list[str] | None def read_pkg_file(self, file: IO[str]) -> None: - """Reads the metadata values from a file object. -""" + """Reads the metadata values from a file object.""" + def write_pkg_info(self, base_dir: StrPath) -> None: - """Write the PKG-INFO file into the release tree. - """ + """Write the PKG-INFO file into the release tree.""" + def write_pkg_file(self, file: SupportsWrite[str]) -> None: - """Write the PKG-INFO format data to a file object. - """ + """Write the PKG-INFO format data to a file object.""" + def get_name(self) -> str: ... def get_version(self) -> str: ... def get_fullname(self) -> str: ... @@ -104,6 +106,7 @@ class Distribution: necessary to respect the expectations that 'setup' has of Distribution. See the code for 'setup()', in core.py, for details. """ + cmdclass: dict[str, type[Command]] metadata: DistributionMetadata def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: @@ -116,12 +119,14 @@ class Distribution: 'command_obj' attribute to the empty dictionary; this will be filled in with real command objects by 'parse_command_line()'. """ + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: """Get the option dictionary for a given command. If that command's option dictionary hasn't been created yet, then create it and return the new dictionary; otherwise, return the existing option dictionary. """ + def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... global_options: ClassVar[_OptionsList] common_usage: ClassVar[str] @@ -188,21 +193,25 @@ class Distribution: execute commands (currently, this only happens if user asks for help). """ + def finalize_options(self) -> None: """Set final values for all the options on the Distribution instance, analogous to the .finalize_options() method of Command objects. """ + def handle_display_options(self, option_order): """If there were any non-global "display-only" options (--help-commands or the metadata display options) on the command line, display the requested info and return true; else return false. """ + def print_command_list(self, commands, header, max_length) -> None: """Print a subset of the list of all commands -- used by 'print_commands()'. """ + def print_commands(self) -> None: """Print out a help message listing all available commands with a description of each. The list is divided into "standard commands" @@ -211,6 +220,7 @@ class Distribution: descriptions come from the command class attribute 'description'. """ + def get_command_list(self): """Get a list of (command, description) tuples. The list is divided into "standard commands" (listed in @@ -218,9 +228,9 @@ class Distribution: self.cmdclass, but not a standard command). The descriptions come from the command class attribute 'description'. """ + def get_command_packages(self): - """Return a list of packages from which commands are loaded. -""" + """Return a list of packages from which commands are loaded.""" # NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst. @overload def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: @@ -229,6 +239,7 @@ class Distribution: object for 'command' is in the cache, then we either create and return it (if 'create' is true) or return None. """ + @overload def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ... @overload @@ -285,6 +296,7 @@ class Distribution: Raises DistutilsModuleError if the expected module could not be found, or if that module does not define the expected class. """ + @overload def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ... @overload @@ -345,6 +357,7 @@ class Distribution: Returns the reinitialized command object. """ + @overload def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ... @overload @@ -395,6 +408,7 @@ class Distribution: Uses the list of commands found and cache of command objects created by 'get_command_obj()'. """ + def run_command(self, command: str) -> None: """Do whatever it takes to run a command (including nothing at all, if the command has already been run). Specifically: if we have @@ -403,6 +417,7 @@ class Distribution: doesn't even have a command object yet, create one. Then invoke 'run()' on that command object (or an existing one). """ + def has_pure_modules(self) -> bool: ... def has_ext_modules(self) -> bool: ... def has_c_libraries(self) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi index c208bfa8046a4..c277b0a6074e7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/errors.pyi @@ -8,31 +8,36 @@ usually raised for errors that are obviously the end-user's fault This module is safe to use in "from ... import *" mode; it only exports symbols whose names start with "Distutils" and end with "Error". """ + class DistutilsError(Exception): - """The root of all Distutils evil. -""" + """The root of all Distutils evil.""" + class DistutilsModuleError(DistutilsError): """Unable to load an expected module, or to find an expected class within some module (in particular, command modules and classes). -""" + """ + class DistutilsClassError(DistutilsError): """Some command class (or possibly distribution class, if anyone feels a need to subclass Distribution) is found not to be holding up its end of the bargain, ie. implementing some part of the "command "interface. -""" + """ + class DistutilsGetoptError(DistutilsError): - """The option table provided to 'fancy_getopt()' is bogus. -""" + """The option table provided to 'fancy_getopt()' is bogus.""" + class DistutilsArgError(DistutilsError): """Raised by fancy_getopt in response to getopt.error -- ie. an error in the command line usage. -""" + """ + class DistutilsFileError(DistutilsError): """Any problems in the filesystem: expected file not found, etc. Typically this is for problems that we detect before OSError could be raised. -""" + """ + class DistutilsOptionError(DistutilsError): """Syntactic/semantic errors in command options, such as use of mutually conflicting options, or inconsistent options, @@ -40,47 +45,53 @@ class DistutilsOptionError(DistutilsError): values originating in the setup script, the command line, config files, or what-have-you -- but if we *know* something originated in the setup script, we'll raise DistutilsSetupError instead. -""" + """ + class DistutilsSetupError(DistutilsError): """For errors that can be definitely blamed on the setup script, such as invalid keyword arguments to 'setup()'. -""" + """ + class DistutilsPlatformError(DistutilsError): """We don't know how to do something on the current platform (but we do know how to do it on some platform) -- eg. trying to compile C files on a platform not supported by a CCompiler subclass. -""" + """ + class DistutilsExecError(DistutilsError): """Any problems executing an external program (such as the C compiler, when compiling C files). -""" + """ + class DistutilsInternalError(DistutilsError): """Internal inconsistencies or impossibilities (obviously, this should never be seen if the code is working!). -""" + """ + class DistutilsTemplateError(DistutilsError): - """Syntax error in a file list template. -""" + """Syntax error in a file list template.""" + class DistutilsByteCompileError(DistutilsError): - """Byte compile error. -""" + """Byte compile error.""" + class CCompilerError(Exception): - """Some compile/link operation failed. -""" + """Some compile/link operation failed.""" + class PreprocessError(CCompilerError): - """Failure to preprocess one or more C/C++ files. -""" + """Failure to preprocess one or more C/C++ files.""" + class CompileError(CCompilerError): - """Failure to compile one or more C/C++ source files. -""" + """Failure to compile one or more C/C++ source files.""" + class LibError(CCompilerError): """Failure to create a static library from one or more C/C++ object files. -""" + """ + class LinkError(CCompilerError): """Failure to link one or more C/C++ object files into an executable or shared library file. -""" + """ + class UnknownFileError(CCompilerError): - """Attempt to process an unknown file type. -""" + """Attempt to process an unknown file type.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi index e803c1799951b..e0253524f5b26 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/extension.pyi @@ -3,6 +3,7 @@ Provides the Extension class, used to describe C/C++ extension modules in setup scripts. """ + class Extension: """Just a collection of attributes that describes an extension module and everything needed to build it (hopefully in a portable @@ -67,6 +68,7 @@ class Extension: specifies that a build failure in the extension should not abort the build process, but simply not install the failing extension. """ + name: str sources: list[str] include_dirs: list[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi index 809333d123068..7254af3933ef3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -7,6 +7,7 @@ additional features: create a complete usage summary * options set attributes of a passed-in object """ + from collections.abc import Iterable, Mapping from getopt import _SliceableT, _StrSequenceT_co from re import Pattern @@ -31,6 +32,7 @@ class FancyGetopt: --quiet is the "negative alias" of --verbose, then "--quiet" on the command line sets 'verbose' to false """ + def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO: kinda wrong, `getopt(object=object())` is invalid @overload @@ -47,6 +49,7 @@ class FancyGetopt: 'args' is a modified copy of the passed-in 'args' list, which is left untouched. """ + @overload def getopt( self, args: _SliceableT[_StrSequenceT_co] | None, object: Any @@ -56,6 +59,7 @@ class FancyGetopt: previous run of 'getopt()'. Raises RuntimeError if 'getopt()' hasn't been called yet. """ + def generate_help(self, header: str | None = None) -> list[str]: """Generate help text (a list of strings, one per suggested line of output) from the option table for this FancyGetopt object. @@ -79,6 +83,7 @@ def wrap_text(text: str, width: int) -> list[str]: Split 'text' into multiple lines of no more than 'width' characters each, and return the list of strings that results. """ + def translate_longopt(opt: str) -> str: """Convert a long option name to a valid Python identifier by changing "-" to "_". @@ -87,8 +92,9 @@ def translate_longopt(opt: str) -> str: class OptionDummy: """Dummy class just used as a place to hold command-line option values as instance attributes. -""" + """ + def __init__(self, options: Iterable[str] = []) -> None: """Create a new OptionDummy instance. The attributes listed in 'options' will be initialized to None. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi index c8905305b6fa0..ba4ded2abf9f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/file_util.pyi @@ -2,6 +2,7 @@ Utility functions for operating on single files. """ + from _typeshed import BytesPath, StrOrBytesPath, StrPath from collections.abc import Iterable from typing import Literal, TypeVar, overload @@ -44,6 +45,7 @@ def copy_file( the output file, and 'copied' is true if the file was copied (or would have been copied, if 'dry_run' true). """ + @overload def copy_file( src: BytesPath, @@ -66,6 +68,7 @@ def move_file( Handles cross-device moves on Unix using 'copy_file()'. What about other systems??? """ + @overload def move_file( src: BytesPath, dst: _BytesPathT, verbose: bool | Literal[0, 1] = 1, dry_run: bool | Literal[0, 1] = 0 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi index b66f520c9b807..5e450f25478c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/filelist.pyi @@ -3,6 +3,7 @@ Provides the FileList class, used for poking about the filesystem and building lists of files. """ + from collections.abc import Iterable from re import Pattern from typing import Literal, overload @@ -22,6 +23,7 @@ class FileList: complete list of files under consideration (ie. without any filtering applied) """ + allfiles: Iterable[str] | None files: list[str] def __init__(self, warn: None = None, debug_print: None = None) -> None: ... @@ -31,6 +33,7 @@ class FileList: """Print 'msg' to stdout if the global DEBUG (taken from the DISTUTILS_DEBUG environment variable) flag is true. """ + def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... def sort(self) -> None: ... @@ -64,6 +67,7 @@ class FileList: Return True if files are found, False otherwise. """ + @overload def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -84,6 +88,7 @@ class FileList: The list 'self.files' is modified in place. Return True if files are found, False otherwise. """ + @overload def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload @@ -100,12 +105,14 @@ def findall(dir: str = ".") -> list[str]: Find all files under 'dir' and return the list of full filenames. Unless dir is '.', return full filenames with dir prepended. """ + def glob_to_re(pattern: str) -> str: """Translate a shell-like glob pattern to a regular expression; return a string containing the regex. Differs from 'fnmatch.translate()' in that '*' does not match "special characters" (which are platform-specific). """ + @overload def translate_pattern( pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 @@ -115,6 +122,7 @@ def translate_pattern( then 'pattern' is directly compiled to a regex (if it's a string) or just returned as-is (assumes it's a regex object). """ + @overload def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi index d24819777b4de..442fc5feb7824 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/log.pyi @@ -1,5 +1,5 @@ -"""A simple log mechanism styled after PEP 282. -""" +"""A simple log mechanism styled after PEP 282.""" + from typing import Any, Final DEBUG: Final = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi index 7560949a5693b..8ab7990270bb0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/msvccompiler.pyi @@ -3,9 +3,10 @@ Contains MSVCCompiler, an implementation of the abstract CCompiler class for the Microsoft Visual Studio. """ + from distutils.ccompiler import CCompiler class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class. -""" + as defined by the CCompiler abstract class. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi index 15b36ed55c10f..c2ac88c4b25d1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/spawn.pyi @@ -5,6 +5,7 @@ specific functions for launching another program in a sub-process. Also provides the 'find_executable()' to search the path for a given executable name. """ + from collections.abc import Iterable from typing import Literal @@ -29,6 +30,7 @@ def spawn( Raise DistutilsExecError if running the program fails in any way; just return on success. """ + def find_executable(executable: str, path: str | None = None) -> str | None: """Tries to find 'executable' in the directories listed in 'path'. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi index 2cf01d35a98f9..c2ba23f1607cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/sysconfig.pyi @@ -8,6 +8,7 @@ available. Written by: Fred L. Drake, Jr. Email: """ + import sys from collections.abc import Mapping from distutils.ccompiler import CCompiler @@ -29,6 +30,7 @@ def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'. """ + @overload @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") def get_config_var(name: Literal["SO"]) -> int | str | None: @@ -37,6 +39,7 @@ def get_config_var(name: Literal["SO"]) -> int | str | None: Equivalent to get_config_vars().get(name) """ + @overload def get_config_var(name: str) -> int | str | None: ... @overload @@ -50,14 +53,15 @@ def get_config_vars() -> dict[str, str | int]: With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary. """ + @overload def get_config_vars(arg: str, /, *args: str) -> list[str | int]: ... def get_config_h_filename() -> str: - """Return the path of pyconfig.h. -""" + """Return the path of pyconfig.h.""" + def get_makefile_filename() -> str: - """Return the path of the Makefile. -""" + """Return the path of the Makefile.""" + def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = None) -> str: """Return the directory containing installed Python header files. @@ -69,6 +73,7 @@ def get_python_inc(plat_specific: bool | Literal[0, 1] = 0, prefix: str | None = If 'prefix' is supplied, use it instead of sys.base_prefix or sys.base_exec_prefix -- i.e., ignore 'plat_specific'. """ + def get_python_lib( plat_specific: bool | Literal[0, 1] = 0, standard_lib: bool | Literal[0, 1] = 0, prefix: str | None = None ) -> str: @@ -85,6 +90,7 @@ def get_python_lib( If 'prefix' is supplied, use it instead of sys.base_prefix or sys.base_exec_prefix -- i.e., ignore 'plat_specific'. """ + def customize_compiler(compiler: CCompiler) -> None: """Do any platform-specific customization of a CCompiler instance. @@ -95,6 +101,6 @@ def customize_compiler(compiler: CCompiler) -> None: if sys.version_info < (3, 10): def get_python_version() -> str: """Return a string containing the major and minor Python version, - leaving off the patchlevel. Sample return values could be '1.5' - or '2.2'. - """ + leaving off the patchlevel. Sample return values could be '1.5' + or '2.2'. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi index b3897c2310de9..7c9c6c65c699b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/text_file.pyi @@ -4,67 +4,69 @@ provides the TextFile class, which gives an interface to text files that (optionally) takes care of stripping comments, ignoring blank lines, and joining lines with backslashes. """ + from typing import IO, Literal class TextFile: """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not. -""" + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not. + """ + def __init__( self, filename: str | None = None, @@ -78,44 +80,50 @@ class TextFile: collapse_join: bool | Literal[0, 1] = ..., ) -> None: """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'. -""" + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'. + """ + def open(self, filename: str) -> None: """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor. -""" + 'filename' and 'file' arguments to the constructor. + """ + def close(self) -> None: """Close the current file and forget everything we know about it - (filename, current line number). -""" + (filename, current line number). + """ + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line. -""" + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line. + """ + def readline(self) -> str | None: """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not. -""" + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not. + """ + def readlines(self) -> list[str]: """Read and return the list of all logical lines remaining in the - current file. -""" + current file. + """ + def unreadline(self, line: str) -> str: """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead. -""" + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi index 6964eda86b479..e1a17ecf3a682 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/unixccompiler.pyi @@ -12,6 +12,7 @@ the "typical" Unix-style command-line C compiler: * link static library handled by 'ar' command (possibly with 'ranlib') * link shared library handled by 'cc -shared' """ + from distutils.ccompiler import CCompiler class UnixCCompiler(CCompiler): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi index e41378f729433..8a492c6873464 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/util.pyi @@ -3,6 +3,7 @@ Miscellaneous utility functions -- anything that doesn't fit into one of the other *util.py modules. """ + from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any, Literal @@ -30,6 +31,7 @@ def get_host_platform() -> str: For other non-POSIX platforms, currently just returns 'sys.platform'. """ + def get_platform() -> str: ... def convert_path(pathname: str) -> str: """Return 'pathname' as a name that will work on the native filesystem, @@ -40,12 +42,14 @@ def convert_path(pathname: str) -> str: ValueError on non-Unix-ish systems if 'pathname' either starts or ends with a slash. """ + def change_root(new_root: StrPath, pathname: StrPath) -> str: """Return 'pathname' with 'new_root' prepended. If 'pathname' is relative, this is equivalent to "os.path.join(new_root,pathname)". Otherwise, it requires making 'pathname' relative and then joining the two, which is tricky on DOS/Windows and Mac OS. """ + def check_environ() -> None: """Ensure that 'os.environ' has all the environment variables we guarantee that users can use in config files, command-line options, @@ -54,6 +58,7 @@ def check_environ() -> None: PLAT - description of the current platform, including hardware and OS (see 'get_platform()') """ + def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: """Perform shell/Perl-style variable substitution on 'string'. Every occurrence of '$' followed by a name is considered a variable, and @@ -63,6 +68,7 @@ def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: certain values: see 'check_environ()'. Raise ValueError for any variables not found in either 'local_vars' or 'os.environ'. """ + def split_quoted(s: str) -> list[str]: """Split a string up according to Unix shell-like rules for quotes and backslashes. In short: words are delimited by spaces, as long as those @@ -73,6 +79,7 @@ def split_quoted(s: str) -> list[str]: characters are stripped from any quoted string. Returns a list of words. """ + def execute( func: Callable[[Unpack[_Ts]], Unused], args: tuple[Unpack[_Ts]], @@ -88,6 +95,7 @@ def execute( "external action" being performed), and an optional message to print. """ + def strtobool(val: str) -> Literal[0, 1]: """Convert a string representation of truth to true (1) or false (0). @@ -95,6 +103,7 @@ def strtobool(val: str) -> Literal[0, 1]: are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if 'val' is anything else. """ + def byte_compile( py_files: list[str], optimize: int = 0, @@ -133,10 +142,12 @@ def byte_compile( generated in indirect mode; unless you know what you're doing, leave it set to None. """ + def rfc822_escape(header: str) -> str: """Return a version of the string escaped for inclusion in an RFC-822 header, by ensuring there are 8 spaces space after each newline. """ + def run_2to3( files: Iterable[str], fixer_names: Iterable[str] | None = None, @@ -148,7 +159,8 @@ def run_2to3( modification is done in-place. To reduce the build time, only files modified since the last invocation of this function should be passed in the files argument. -""" + """ + def copydir_run_2to3( src: StrPath, dest: StrPath, @@ -168,7 +180,8 @@ class Mixin2to3: To configure 2to3, setup scripts may either change the class variables, or inherit from individual commands to override how 2to3 is invoked. -""" + """ + fixer_names: Iterable[str] | None options: Mapping[str, Any] | None explicit: Container[str] | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi index f7c70bddb0a12..e6c38de01efa0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/distutils/version.pyi @@ -16,6 +16,7 @@ Every version number class implements the following interface: of the same class or a string (which will be parsed to an instance of the same class, thus must follow the same rules) """ + from abc import abstractmethod from re import Pattern from typing_extensions import Self @@ -26,6 +27,7 @@ class Version: seem to be the same for all version numbering classes; and route rich comparisons to _cmp. """ + def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self | str) -> bool: ... def __le__(self, other: Self | str) -> bool: ... @@ -75,6 +77,7 @@ class StrictVersion(Version): The rationale for this version numbering system will be explained in the distutils documentation. """ + version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None @@ -114,6 +117,7 @@ class LooseVersion(Version): but may not always give the results you want (for some definition of "want"). """ + component_re: Pattern[str] vstring: str version: tuple[str | int, ...] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi index 3bc81a758b34a..55a37fc365282 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/doctest.pyi @@ -36,6 +36,7 @@ files containing doctests. There are also many ways to override parts of doctest's default behaviors. See the Library Reference Manual for details. """ + import sys import types import unittest @@ -92,8 +93,8 @@ if sys.version_info >= (3, 13): else: class TestResults(NamedTuple): - """TestResults(failed, attempted) -""" + """TestResults(failed, attempted)""" + failed: int attempted: int @@ -123,39 +124,40 @@ ELLIPSIS_MARKER: Final = "..." class Example: """ -A single doctest example, consisting of source code and expected -output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that precede the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. -""" + A single doctest example, consisting of source code and expected + output. `Example` defines the following attributes: + + - source: A single Python statement, always ending with a newline. + The constructor adds a newline if needed. + + - want: The expected output from running the source code (either + from stdout, or a traceback in case of exception). `want` ends + with a newline unless it's empty, in which case it's an empty + string. The constructor adds a newline if needed. + + - exc_msg: The exception message generated by the example, if + the example is expected to generate an exception; or `None` if + it is not expected to generate an exception. This exception + message is compared against the return value of + `traceback.format_exception_only()`. `exc_msg` ends with a + newline unless it's `None`. The constructor adds a newline + if needed. + + - lineno: The line number within the DocTest string containing + this Example where the Example begins. This line number is + zero-based, with respect to the beginning of the DocTest. + + - indent: The example's indentation in the DocTest string. + I.e., the number of space characters that precede the + example's first prompt. + + - options: A dictionary mapping from option flags to True or + False, which is used to override default options for this + example. Any option flags not contained in this dictionary + are left at their default value (as specified by the + DocTestRunner's optionflags). By default, no options are set. + """ + source: str want: str exc_msg: str | None @@ -176,28 +178,29 @@ output. `Example` defines the following attributes: class DocTest: """ -A collection of doctest examples that should be run in a single -namespace. Each `DocTest` defines the following attributes: + A collection of doctest examples that should be run in a single + namespace. Each `DocTest` defines the following attributes: - - examples: the list of examples. + - examples: the list of examples. - - globs: The namespace (aka globals) that the examples should - be run in. + - globs: The namespace (aka globals) that the examples should + be run in. - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). + - name: A name identifying the DocTest (typically, the name of + the object whose docstring this DocTest was extracted from). - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. + - filename: The name of the file that this DocTest was extracted + from, or `None` if the filename is unknown. - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. + - lineno: The line number within filename where this DocTest + begins, or `None` if the line number is unavailable. This + line number is zero-based, with respect to the beginning of + the file. + + - docstring: The string that the examples were extracted from, + or `None` if the string is unavailable. + """ - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. -""" examples: list[Example] globs: dict[str, Any] name: str @@ -214,72 +217,78 @@ namespace. Each `DocTest` defines the following attributes: docstring: str | None, ) -> None: """ -Create a new DocTest containing the given examples. The -DocTest's globals are initialized with a copy of `globs`. -""" + Create a new DocTest containing the given examples. The + DocTest's globals are initialized with a copy of `globs`. + """ + def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... def __eq__(self, other: object) -> bool: ... class DocTestParser: """ -A class used to parse strings containing doctest examples. -""" + A class used to parse strings containing doctest examples. + """ + def parse(self, string: str, name: str = "") -> list[str | Example]: """ -Divide the given string into examples and intervening text, -and return them as a list of alternating Examples and strings. -Line numbers for the Examples are 0-based. The optional -argument `name` is a name identifying this string, and is only -used for error messages. -""" + Divide the given string into examples and intervening text, + and return them as a list of alternating Examples and strings. + Line numbers for the Examples are 0-based. The optional + argument `name` is a name identifying this string, and is only + used for error messages. + """ + def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: """ -Extract all doctest examples from the given string, and -collect them into a `DocTest` object. + Extract all doctest examples from the given string, and + collect them into a `DocTest` object. + + `globs`, `name`, `filename`, and `lineno` are attributes for + the new `DocTest` object. See the documentation for `DocTest` + for more information. + """ -`globs`, `name`, `filename`, and `lineno` are attributes for -the new `DocTest` object. See the documentation for `DocTest` -for more information. -""" def get_examples(self, string: str, name: str = "") -> list[Example]: """ -Extract all doctest examples from the given string, and return -them as a list of `Example` objects. Line numbers are -0-based, because it's most common in doctests that nothing -interesting appears on the same line as opening triple-quote, -and so the first interesting line is called "line 1" then. - -The optional argument `name` is a name identifying this -string, and is only used for error messages. -""" + Extract all doctest examples from the given string, and return + them as a list of `Example` objects. Line numbers are + 0-based, because it's most common in doctests that nothing + interesting appears on the same line as opening triple-quote, + and so the first interesting line is called "line 1" then. + + The optional argument `name` is a name identifying this + string, and is only used for error messages. + """ class DocTestFinder: """ -A class used to extract the DocTests that are relevant to a given -object, from its docstring and the docstrings of its contained -objects. Doctests can currently be extracted from the following -object types: modules, functions, classes, methods, staticmethods, -classmethods, and properties. -""" + A class used to extract the DocTests that are relevant to a given + object, from its docstring and the docstrings of its contained + objects. Doctests can currently be extracted from the following + object types: modules, functions, classes, methods, staticmethods, + classmethods, and properties. + """ + def __init__( self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True ) -> None: """ -Create a new doctest finder. + Create a new doctest finder. -The optional argument `parser` specifies a class or -function that should be used to create new DocTest objects (or -objects that implement the same interface as DocTest). The -signature for this factory function should match the signature -of the DocTest constructor. + The optional argument `parser` specifies a class or + function that should be used to create new DocTest objects (or + objects that implement the same interface as DocTest). The + signature for this factory function should match the signature + of the DocTest constructor. -If the optional argument `recurse` is false, then `find` will -only examine the given object, and not any contained objects. + If the optional argument `recurse` is false, then `find` will + only examine the given object, and not any contained objects. + + If the optional argument `exclude_empty` is false, then `find` + will include tests for objects with empty docstrings. + """ -If the optional argument `exclude_empty` is false, then `find` -will include tests for objects with empty docstrings. -""" def find( self, obj: object, @@ -289,104 +298,105 @@ will include tests for objects with empty docstrings. extraglobs: dict[str, Any] | None = None, ) -> list[DocTest]: """ -Return a list of the DocTests that are defined by the given -object's docstring, or by any of its contained objects' -docstrings. - -The optional parameter `module` is the module that contains -the given object. If the module is not specified or is None, then -the test finder will attempt to automatically determine the -correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - -Contained objects whose module does not match `module` are ignored. - -If `module` is False, no attempt to find the module will be made. -This is obscure, of use mostly in tests: if `module` is False, or -is None but cannot be found automatically, then all objects are -considered to belong to the (non-existent) module, so all contained -objects will (recursively) be searched for doctests. - -The globals for each DocTest is formed by combining `globs` -and `extraglobs` (bindings in `extraglobs` override bindings -in `globs`). A new copy of the globals dictionary is created -for each DocTest. If `globs` is not specified, then it -defaults to the module's `__dict__`, if specified, or {} -otherwise. If `extraglobs` is not specified, then it defaults -to {}. + Return a list of the DocTests that are defined by the given + object's docstring, or by any of its contained objects' + docstrings. + + The optional parameter `module` is the module that contains + the given object. If the module is not specified or is None, then + the test finder will attempt to automatically determine the + correct module. The object's module is used: + + - As a default namespace, if `globs` is not specified. + - To prevent the DocTestFinder from extracting DocTests + from objects that are imported from other modules. + - To find the name of the file containing the object. + - To help find the line number of the object within its + file. + + Contained objects whose module does not match `module` are ignored. + + If `module` is False, no attempt to find the module will be made. + This is obscure, of use mostly in tests: if `module` is False, or + is None but cannot be found automatically, then all objects are + considered to belong to the (non-existent) module, so all contained + objects will (recursively) be searched for doctests. + + The globals for each DocTest is formed by combining `globs` + and `extraglobs` (bindings in `extraglobs` override bindings + in `globs`). A new copy of the globals dictionary is created + for each DocTest. If `globs` is not specified, then it + defaults to the module's `__dict__`, if specified, or {} + otherwise. If `extraglobs` is not specified, then it defaults + to {}. -""" + """ _Out: TypeAlias = Callable[[str], object] class DocTestRunner: """ -A class used to run DocTest test cases, and accumulate statistics. -The `run` method is used to process a single DocTest case. It -returns a TestResults instance. - - >>> save_colorize = _colorize.COLORIZE - >>> _colorize.COLORIZE = False - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> tests.sort(key = lambda test: test.name) - >>> for test in tests: - ... print(test.name, '->', runner.run(test)) - _TestClass -> TestResults(failed=0, attempted=2) - _TestClass.__init__ -> TestResults(failed=0, attempted=2) - _TestClass.get -> TestResults(failed=0, attempted=2) - _TestClass.square -> TestResults(failed=0, attempted=1) - -The `summarize` method prints a summary of all the test cases that -have been run by the runner, and returns an aggregated TestResults -instance: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 test in _TestClass.square - 7 tests in 4 items. - 7 passed. - Test passed. - TestResults(failed=0, attempted=7) - -The aggregated number of tried examples and failed examples is also -available via the `tries`, `failures` and `skips` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - >>> runner.skips - 0 - -The comparison between expected outputs and actual outputs is done -by an `OutputChecker`. This comparison may be customized with a -number of option flags; see the documentation for `testmod` for -more information. If the option flags are insufficient, then the -comparison may also be customized by passing a subclass of -`OutputChecker` to the constructor. - -The test runner's display output can be controlled in two ways. -First, an output function (`out`) can be passed to -`TestRunner.run`; this function will be called with strings that -should be displayed. It defaults to `sys.stdout.write`. If -capturing the output is not sufficient, then the display output -can be also customized by subclassing DocTestRunner, and -overriding the methods `report_start`, `report_success`, -`report_unexpected_exception`, and `report_failure`. - - >>> _colorize.COLORIZE = save_colorize -""" + A class used to run DocTest test cases, and accumulate statistics. + The `run` method is used to process a single DocTest case. It + returns a TestResults instance. + + >>> save_colorize = _colorize.COLORIZE + >>> _colorize.COLORIZE = False + + >>> tests = DocTestFinder().find(_TestClass) + >>> runner = DocTestRunner(verbose=False) + >>> tests.sort(key = lambda test: test.name) + >>> for test in tests: + ... print(test.name, '->', runner.run(test)) + _TestClass -> TestResults(failed=0, attempted=2) + _TestClass.__init__ -> TestResults(failed=0, attempted=2) + _TestClass.get -> TestResults(failed=0, attempted=2) + _TestClass.square -> TestResults(failed=0, attempted=1) + + The `summarize` method prints a summary of all the test cases that + have been run by the runner, and returns an aggregated TestResults + instance: + + >>> runner.summarize(verbose=1) + 4 items passed all tests: + 2 tests in _TestClass + 2 tests in _TestClass.__init__ + 2 tests in _TestClass.get + 1 test in _TestClass.square + 7 tests in 4 items. + 7 passed. + Test passed. + TestResults(failed=0, attempted=7) + + The aggregated number of tried examples and failed examples is also + available via the `tries`, `failures` and `skips` attributes: + + >>> runner.tries + 7 + >>> runner.failures + 0 + >>> runner.skips + 0 + + The comparison between expected outputs and actual outputs is done + by an `OutputChecker`. This comparison may be customized with a + number of option flags; see the documentation for `testmod` for + more information. If the option flags are insufficient, then the + comparison may also be customized by passing a subclass of + `OutputChecker` to the constructor. + + The test runner's display output can be controlled in two ways. + First, an output function (`out`) can be passed to + `TestRunner.run`; this function will be called with strings that + should be displayed. It defaults to `sys.stdout.write`. If + capturing the output is not sufficient, then the display output + can be also customized by subclassing DocTestRunner, and + overriding the methods `report_start`, `report_success`, + `report_unexpected_exception`, and `report_failure`. + + >>> _colorize.COLORIZE = save_colorize + """ + DIVIDER: str optionflags: int original_optionflags: int @@ -397,109 +407,119 @@ overriding the methods `report_start`, `report_success`, test: DocTest def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: """ -Create a new test runner. + Create a new test runner. -Optional keyword arg `checker` is the `OutputChecker` that -should be used to compare the expected outputs and actual -outputs of doctest examples. + Optional keyword arg `checker` is the `OutputChecker` that + should be used to compare the expected outputs and actual + outputs of doctest examples. -Optional keyword arg 'verbose' prints lots of stuff if true, -only failures if false; by default, it's true iff '-v' is in -sys.argv. + Optional keyword arg 'verbose' prints lots of stuff if true, + only failures if false; by default, it's true iff '-v' is in + sys.argv. + + Optional argument `optionflags` can be used to control how the + test runner compares expected output to actual output, and how + it displays failures. See the documentation for `testmod` for + more information. + """ -Optional argument `optionflags` can be used to control how the -test runner compares expected output to actual output, and how -it displays failures. See the documentation for `testmod` for -more information. -""" def report_start(self, out: _Out, test: DocTest, example: Example) -> None: """ -Report that the test runner is about to process the given -example. (Only displays a message if verbose=True) -""" + Report that the test runner is about to process the given + example. (Only displays a message if verbose=True) + """ + def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: """ -Report that the given example ran successfully. (Only -displays a message if verbose=True) -""" + Report that the given example ran successfully. (Only + displays a message if verbose=True) + """ + def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: """ -Report that the given example failed. -""" + Report that the given example failed. + """ + def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: """ -Report that the given example raised an unexpected exception. -""" + Report that the given example raised an unexpected exception. + """ + def run( self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True ) -> TestResults: """ -Run the examples in `test`, and display the results using the -writer function `out`. - -The examples are run in the namespace `test.globs`. If -`clear_globs` is true (the default), then this namespace will -be cleared after the test runs, to help with garbage -collection. If you would like to examine the namespace after -the test completes, then use `clear_globs=False`. - -`compileflags` gives the set of flags that should be used by -the Python compiler when running the examples. If not -specified, then it will default to the set of future-import -flags that apply to `globs`. - -The output of each example is checked using -`DocTestRunner.check_output`, and the results are formatted by -the `DocTestRunner.report_*` methods. -""" + Run the examples in `test`, and display the results using the + writer function `out`. + + The examples are run in the namespace `test.globs`. If + `clear_globs` is true (the default), then this namespace will + be cleared after the test runs, to help with garbage + collection. If you would like to examine the namespace after + the test completes, then use `clear_globs=False`. + + `compileflags` gives the set of flags that should be used by + the Python compiler when running the examples. If not + specified, then it will default to the set of future-import + flags that apply to `globs`. + + The output of each example is checked using + `DocTestRunner.check_output`, and the results are formatted by + the `DocTestRunner.report_*` methods. + """ + def summarize(self, verbose: bool | None = None) -> TestResults: """ -Print a summary of all the test cases that have been run by -this DocTestRunner, and return a TestResults instance. + Print a summary of all the test cases that have been run by + this DocTestRunner, and return a TestResults instance. + + The optional `verbose` argument controls how detailed the + summary is. If the verbosity is not specified, then the + DocTestRunner's verbosity is used. + """ -The optional `verbose` argument controls how detailed the -summary is. If the verbosity is not specified, then the -DocTestRunner's verbosity is used. -""" def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: """ -A class used to check whether the actual output from a doctest -example matches the expected output. `OutputChecker` defines two -methods: `check_output`, which compares a given pair of outputs, -and returns true if they match; and `output_difference`, which -returns a string describing the differences between two outputs. -""" + A class used to check whether the actual output from a doctest + example matches the expected output. `OutputChecker` defines two + methods: `check_output`, which compares a given pair of outputs, + and returns true if they match; and `output_difference`, which + returns a string describing the differences between two outputs. + """ + def check_output(self, want: str, got: str, optionflags: int) -> bool: """ -Return True iff the actual output from an example (`got`) -matches the expected output (`want`). These strings are -always considered to match if they are identical; but -depending on what option flags the test runner is using, -several non-exact match types are also possible. See the -documentation for `TestRunner` for more information about -option flags. -""" + Return True iff the actual output from an example (`got`) + matches the expected output (`want`). These strings are + always considered to match if they are identical; but + depending on what option flags the test runner is using, + several non-exact match types are also possible. See the + documentation for `TestRunner` for more information about + option flags. + """ + def output_difference(self, example: Example, got: str, optionflags: int) -> str: """ -Return a string describing the differences between the -expected output for a given example (`example`) and the actual -output (`got`). `optionflags` is the set of option flags used -to compare `want` and `got`. -""" + Return a string describing the differences between the + expected output for a given example (`example`) and the actual + output (`got`). `optionflags` is the set of option flags used + to compare `want` and `got`. + """ class DocTestFailure(Exception): """A DocTest example has failed in debugging mode. -The exception instance has variables: + The exception instance has variables: -- test: the DocTest object being run + - test: the DocTest object being run -- example: the Example object that failed + - example: the Example object that failed + + - got: the actual output + """ -- got: the actual output -""" test: DocTest example: Example got: str @@ -508,14 +528,15 @@ The exception instance has variables: class UnexpectedException(Exception): """A DocTest example has encountered an unexpected exception -The exception instance has variables: + The exception instance has variables: -- test: the DocTest object being run + - test: the DocTest object being run -- example: the Example object that failed + - example: the Example object that failed + + - exc_info: the exception info + """ -- exc_info: the exception info -""" test: DocTest example: Example exc_info: ExcInfo @@ -524,93 +545,93 @@ The exception instance has variables: class DebugRunner(DocTestRunner): """Run doc tests but raise an exception as soon as there is a failure. -If an unexpected exception occurs, an UnexpectedException is raised. -It contains the test, the example, and the original exception: + If an unexpected exception occurs, an UnexpectedException is raised. + It contains the test, the example, and the original exception: - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException as f: - ... failure = f + >>> runner = DebugRunner(verbose=False) + >>> test = DocTestParser().get_doctest('>>> raise KeyError\\n42', + ... {}, 'foo', 'foo.py', 0) + >>> try: + ... runner.run(test) + ... except UnexpectedException as f: + ... failure = f - >>> failure.test is test - True + >>> failure.test is test + True - >>> failure.example.want - '42\\n' + >>> failure.example.want + '42\\n' - >>> exc_info = failure.exc_info - >>> raise exc_info[1] # Already has the traceback - Traceback (most recent call last): - ... - KeyError + >>> exc_info = failure.exc_info + >>> raise exc_info[1] # Already has the traceback + Traceback (most recent call last): + ... + KeyError -We wrap the original exception to give the calling application -access to the test and example information. + We wrap the original exception to give the calling application + access to the test and example information. -If the output doesn't match, then a DocTestFailure is raised: + If the output doesn't match, then a DocTestFailure is raised: - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 1 + ... >>> x + ... 2 + ... ''', {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except DocTestFailure as f: - ... failure = f + >>> try: + ... runner.run(test) + ... except DocTestFailure as f: + ... failure = f -DocTestFailure objects provide access to the test: + DocTestFailure objects provide access to the test: - >>> failure.test is test - True + >>> failure.test is test + True -As well as to the example: + As well as to the example: - >>> failure.example.want - '2\\n' + >>> failure.example.want + '2\\n' -and the actual output: + and the actual output: - >>> failure.got - '1\\n' + >>> failure.got + '1\\n' -If a failure or error occurs, the globals are left intact: + If a failure or error occurs, the globals are left intact: - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 1} - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... >>> raise KeyError + ... ''', {}, 'foo', 'foo.py', 0) - >>> runner.run(test) - Traceback (most recent call last): - ... - doctest.UnexpectedException: + >>> runner.run(test) + Traceback (most recent call last): + ... + doctest.UnexpectedException: - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} + >>> del test.globs['__builtins__'] + >>> test.globs + {'x': 2} -But the globals are cleared if there is no error: + But the globals are cleared if there is no error: - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) + >>> test = DocTestParser().get_doctest(''' + ... >>> x = 2 + ... ''', {}, 'foo', 'foo.py', 0) - >>> runner.run(test) - TestResults(failed=0, attempted=1) + >>> runner.run(test) + TestResults(failed=0, attempted=1) - >>> test.globs - {} + >>> test.globs + {} -""" + """ master: DocTestRunner | None @@ -626,68 +647,69 @@ def testmod( exclude_empty: bool = False, ) -> TestResults: """m=None, name=None, globs=None, verbose=None, report=True, - optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - -Test examples in docstrings in functions and classes reachable -from module m (or the current module if m is not supplied), starting -with m.__doc__. - -Also test examples reachable from dict m.__test__ if it exists. -m.__test__ maps names to functions, classes and strings; -function and class docstrings are tested even if the name is private; -strings are tested directly, as if they were docstrings. - -Return (#failures, #tests). - -See help(doctest) for an overview. - -Optional keyword arg "name" gives the name of the module; by default -use m.__name__. - -Optional keyword arg "globs" gives a dict to be used as the globals -when executing examples; by default, use m.__dict__. A copy of this -dict is actually used for each docstring, so that each docstring's -examples start with a clean slate. - -Optional keyword arg "extraglobs" gives a dictionary that should be -merged into the globals that are used to execute examples. By -default, no extra globals are used. This is new in 2.4. - -Optional keyword arg "verbose" prints lots of stuff if true, prints -only failures if false; by default, it's true iff "-v" is in sys.argv. - -Optional keyword arg "report" prints a summary at the end when true, -else prints nothing at the end. In verbose mode, the summary is -detailed, else very brief (in fact, empty if all tests passed). - -Optional keyword arg "optionflags" or's together module constants, -and defaults to 0. This is new in 2.3. Possible values (see the -docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - SKIP - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - -Optional keyword arg "raise_on_error" raises an exception on the -first unexpected exception or failure. This allows failures to be -post-mortem debugged. - -Advanced tomfoolery: testmod runs methods of a local instance of -class doctest.Tester, then merges the results into (or creates) -global Tester instance doctest.master. Methods of doctest.master -can be called directly too, if you want to do something unusual. -Passing report=0 to testmod is especially useful then, to delay -displaying a summary. Invoke doctest.master.summarize(verbose) -when you're done fiddling. -""" + optionflags=0, extraglobs=None, raise_on_error=False, + exclude_empty=False + + Test examples in docstrings in functions and classes reachable + from module m (or the current module if m is not supplied), starting + with m.__doc__. + + Also test examples reachable from dict m.__test__ if it exists. + m.__test__ maps names to functions, classes and strings; + function and class docstrings are tested even if the name is private; + strings are tested directly, as if they were docstrings. + + Return (#failures, #tests). + + See help(doctest) for an overview. + + Optional keyword arg "name" gives the name of the module; by default + use m.__name__. + + Optional keyword arg "globs" gives a dict to be used as the globals + when executing examples; by default, use m.__dict__. A copy of this + dict is actually used for each docstring, so that each docstring's + examples start with a clean slate. + + Optional keyword arg "extraglobs" gives a dictionary that should be + merged into the globals that are used to execute examples. By + default, no extra globals are used. This is new in 2.4. + + Optional keyword arg "verbose" prints lots of stuff if true, prints + only failures if false; by default, it's true iff "-v" is in sys.argv. + + Optional keyword arg "report" prints a summary at the end when true, + else prints nothing at the end. In verbose mode, the summary is + detailed, else very brief (in fact, empty if all tests passed). + + Optional keyword arg "optionflags" or's together module constants, + and defaults to 0. This is new in 2.3. Possible values (see the + docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + + Optional keyword arg "raise_on_error" raises an exception on the + first unexpected exception or failure. This allows failures to be + post-mortem debugged. + + Advanced tomfoolery: testmod runs methods of a local instance of + class doctest.Tester, then merges the results into (or creates) + global Tester instance doctest.master. Methods of doctest.master + can be called directly too, if you want to do something unusual. + Passing report=0 to testmod is especially useful then, to delay + displaying a summary. Invoke doctest.master.summarize(verbose) + when you're done fiddling. + """ + def testfile( filename: str, module_relative: bool = True, @@ -703,81 +725,82 @@ def testfile( encoding: str | None = None, ) -> TestResults: """ -Test examples in the given file. Return (#failures, #tests). - -Optional keyword arg "module_relative" specifies how filenames -should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - -Optional keyword arg "name" gives the name of the test; by default -use the file's basename. - -Optional keyword argument "package" is a Python package or the -name of a Python package whose directory should be used as the -base directory for a module relative filename. If no package is -specified, then the calling module's directory is used as the base -directory for module relative filenames. It is an error to -specify "package" if "module_relative" is False. - -Optional keyword arg "globs" gives a dict to be used as the globals -when executing examples; by default, use {}. A copy of this dict -is actually used for each docstring, so that each docstring's -examples start with a clean slate. - -Optional keyword arg "extraglobs" gives a dictionary that should be -merged into the globals that are used to execute examples. By -default, no extra globals are used. - -Optional keyword arg "verbose" prints lots of stuff if true, prints -only failures if false; by default, it's true iff "-v" is in sys.argv. - -Optional keyword arg "report" prints a summary at the end when true, -else prints nothing at the end. In verbose mode, the summary is -detailed, else very brief (in fact, empty if all tests passed). - -Optional keyword arg "optionflags" or's together module constants, -and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - SKIP - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - -Optional keyword arg "raise_on_error" raises an exception on the -first unexpected exception or failure. This allows failures to be -post-mortem debugged. - -Optional keyword arg "parser" specifies a DocTestParser (or -subclass) that should be used to extract tests from the files. - -Optional keyword arg "encoding" specifies an encoding that should -be used to convert the file to unicode. - -Advanced tomfoolery: testmod runs methods of a local instance of -class doctest.Tester, then merges the results into (or creates) -global Tester instance doctest.master. Methods of doctest.master -can be called directly too, if you want to do something unusual. -Passing report=0 to testmod is especially useful then, to delay -displaying a summary. Invoke doctest.master.summarize(verbose) -when you're done fiddling. -""" + Test examples in the given file. Return (#failures, #tests). + + Optional keyword arg "module_relative" specifies how filenames + should be interpreted: + + - If "module_relative" is True (the default), then "filename" + specifies a module-relative path. By default, this path is + relative to the calling module's directory; but if the + "package" argument is specified, then it is relative to that + package. To ensure os-independence, "filename" should use + "/" characters to separate path segments, and should not + be an absolute path (i.e., it may not begin with "/"). + + - If "module_relative" is False, then "filename" specifies an + os-specific path. The path may be absolute or relative (to + the current working directory). + + Optional keyword arg "name" gives the name of the test; by default + use the file's basename. + + Optional keyword argument "package" is a Python package or the + name of a Python package whose directory should be used as the + base directory for a module relative filename. If no package is + specified, then the calling module's directory is used as the base + directory for module relative filenames. It is an error to + specify "package" if "module_relative" is False. + + Optional keyword arg "globs" gives a dict to be used as the globals + when executing examples; by default, use {}. A copy of this dict + is actually used for each docstring, so that each docstring's + examples start with a clean slate. + + Optional keyword arg "extraglobs" gives a dictionary that should be + merged into the globals that are used to execute examples. By + default, no extra globals are used. + + Optional keyword arg "verbose" prints lots of stuff if true, prints + only failures if false; by default, it's true iff "-v" is in sys.argv. + + Optional keyword arg "report" prints a summary at the end when true, + else prints nothing at the end. In verbose mode, the summary is + detailed, else very brief (in fact, empty if all tests passed). + + Optional keyword arg "optionflags" or's together module constants, + and defaults to 0. Possible values (see the docs for details): + + DONT_ACCEPT_TRUE_FOR_1 + DONT_ACCEPT_BLANKLINE + NORMALIZE_WHITESPACE + ELLIPSIS + SKIP + IGNORE_EXCEPTION_DETAIL + REPORT_UDIFF + REPORT_CDIFF + REPORT_NDIFF + REPORT_ONLY_FIRST_FAILURE + + Optional keyword arg "raise_on_error" raises an exception on the + first unexpected exception or failure. This allows failures to be + post-mortem debugged. + + Optional keyword arg "parser" specifies a DocTestParser (or + subclass) that should be used to extract tests from the files. + + Optional keyword arg "encoding" specifies an encoding that should + be used to convert the file to unicode. + + Advanced tomfoolery: testmod runs methods of a local instance of + class doctest.Tester, then merges the results into (or creates) + global Tester instance doctest.master. Methods of doctest.master + can be called directly too, if you want to do something unusual. + Passing report=0 to testmod is especially useful then, to delay + displaying a summary. Invoke doctest.master.summarize(verbose) + when you're done fiddling. + """ + def run_docstring_examples( f: object, globs: dict[str, Any], @@ -787,47 +810,48 @@ def run_docstring_examples( optionflags: int = 0, ) -> None: """ -Test examples in the given object's docstring (`f`), using `globs` -as globals. Optional argument `name` is used in failure messages. -If the optional argument `verbose` is true, then generate output -even if there are no failures. - -`compileflags` gives the set of flags that should be used by the -Python compiler when running the examples. If not specified, then -it will default to the set of future-import flags that apply to -`globs`. - -Optional keyword arg `optionflags` specifies options for the -testing and output. See the documentation for `testmod` for more -information. -""" + Test examples in the given object's docstring (`f`), using `globs` + as globals. Optional argument `name` is used in failure messages. + If the optional argument `verbose` is true, then generate output + even if there are no failures. + + `compileflags` gives the set of flags that should be used by the + Python compiler when running the examples. If not specified, then + it will default to the set of future-import flags that apply to + `globs`. + + Optional keyword arg `optionflags` specifies options for the + testing and output. See the documentation for `testmod` for more + information. + """ + def set_unittest_reportflags(flags: int) -> int: """Sets the unittest option flags. -The old flag is returned so that a runner could restore the old -value if it wished to: + The old flag is returned so that a runner could restore the old + value if it wished to: - >>> import doctest - >>> old = doctest._unittest_reportflags - >>> doctest.set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True + >>> import doctest + >>> old = doctest._unittest_reportflags + >>> doctest.set_unittest_reportflags(REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) == old + True - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True + >>> doctest._unittest_reportflags == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True -Only reporting flags can be set: + Only reporting flags can be set: - >>> doctest.set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) + >>> doctest.set_unittest_reportflags(ELLIPSIS) + Traceback (most recent call last): + ... + ValueError: ('Only reporting flags allowed', 8) - >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True -""" + >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF | + ... REPORT_ONLY_FIRST_FAILURE) + True + """ class DocTestCase(unittest.TestCase): def __init__( @@ -857,39 +881,39 @@ def DocTestSuite( **options: Any, ) -> _DocTestSuite: """ -Convert doctest tests for a module to a unittest test suite. + Convert doctest tests for a module to a unittest test suite. -This converts each documentation string in a module that -contains doctest tests to a unittest test case. If any of the -tests in a doc string fail, then the test case fails. An exception -is raised showing the name of the file containing the test and a -(sometimes approximate) line number. + This converts each documentation string in a module that + contains doctest tests to a unittest test case. If any of the + tests in a doc string fail, then the test case fails. An exception + is raised showing the name of the file containing the test and a + (sometimes approximate) line number. -The `module` argument provides the module to be tested. The argument -can be either a module or a module name. + The `module` argument provides the module to be tested. The argument + can be either a module or a module name. -If no argument is given, the calling module is used. + If no argument is given, the calling module is used. -A number of options may be provided as keyword arguments: + A number of options may be provided as keyword arguments: -setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. + setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. -tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. + tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. -globs - A dictionary containing initial global variables for the tests. + globs + A dictionary containing initial global variables for the tests. -optionflags - A set of doctest option flags expressed as an integer. -""" + optionflags + A set of doctest option flags expressed as an integer. + """ class DocFileCase(DocTestCase): ... @@ -905,134 +929,137 @@ def DocFileTest( def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: """A unittest suite for one or more doctest files. -The path to each doctest file is given as a string; the -interpretation of that string depends on the keyword argument -"module_relative". - -A number of options may be provided as keyword arguments: - -module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - -package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - -setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - -tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - -globs - A dictionary containing initial global variables for the tests. - -optionflags - A set of doctest option flags expressed as an integer. - -parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - -encoding - An encoding that will be used to convert the files to unicode. -""" + The path to each doctest file is given as a string; the + interpretation of that string depends on the keyword argument + "module_relative". + + A number of options may be provided as keyword arguments: + + module_relative + If "module_relative" is True, then the given file paths are + interpreted as os-independent module-relative paths. By + default, these paths are relative to the calling module's + directory; but if the "package" argument is specified, then + they are relative to that package. To ensure os-independence, + "filename" should use "/" characters to separate path + segments, and may not be an absolute path (i.e., it may not + begin with "/"). + + If "module_relative" is False, then the given file paths are + interpreted as os-specific paths. These paths may be absolute + or relative (to the current working directory). + + package + A Python package or the name of a Python package whose directory + should be used as the base directory for module relative paths. + If "package" is not specified, then the calling module's + directory is used as the base directory for module relative + filenames. It is an error to specify "package" if + "module_relative" is False. + + setUp + A set-up function. This is called before running the + tests in each file. The setUp function will be passed a DocTest + object. The setUp function can access the test globals as the + globs attribute of the test passed. + + tearDown + A tear-down function. This is called after running the + tests in each file. The tearDown function will be passed a DocTest + object. The tearDown function can access the test globals as the + globs attribute of the test passed. + + globs + A dictionary containing initial global variables for the tests. + + optionflags + A set of doctest option flags expressed as an integer. + + parser + A DocTestParser (or subclass) that should be used to extract + tests from the files. + + encoding + An encoding that will be used to convert the files to unicode. + """ + def script_from_examples(s: str) -> str: """Extract script from text with examples. -Converts text with examples to a Python script. Example input is -converted to regular code. Example output and all other words -are converted to comments: - ->>> text = ''' -... Here are examples of simple math. -... -... Python has super accurate integer addition -... -... >>> 2 + 2 -... 5 -... -... And very friendly error messages: -... -... >>> 1/0 -... To Infinity -... And -... Beyond -... -... You can use logic if you want: -... -... >>> if 0: -... ... blah -... ... blah -... ... -... -... Ho hum -... ''' - ->>> print(script_from_examples(text)) -# Here are examples of simple math. -# -# Python has super accurate integer addition -# -2 + 2 -# Expected: -## 5 -# -# And very friendly error messages: -# -1/0 -# Expected: -## To Infinity -## And -## Beyond -# -# You can use logic if you want: -# -if 0: - blah - blah -# -# Ho hum - -""" + Converts text with examples to a Python script. Example input is + converted to regular code. Example output and all other words + are converted to comments: + + >>> text = ''' + ... Here are examples of simple math. + ... + ... Python has super accurate integer addition + ... + ... >>> 2 + 2 + ... 5 + ... + ... And very friendly error messages: + ... + ... >>> 1/0 + ... To Infinity + ... And + ... Beyond + ... + ... You can use logic if you want: + ... + ... >>> if 0: + ... ... blah + ... ... blah + ... ... + ... + ... Ho hum + ... ''' + + >>> print(script_from_examples(text)) + # Here are examples of simple math. + # + # Python has super accurate integer addition + # + 2 + 2 + # Expected: + ## 5 + # + # And very friendly error messages: + # + 1/0 + # Expected: + ## To Infinity + ## And + ## Beyond + # + # You can use logic if you want: + # + if 0: + blah + blah + # + # Ho hum + + """ + def testsource(module: None | str | types.ModuleType, name: str) -> str: """Extract the test sources from a doctest docstring as a script. -Provide the module (or dotted name of the module) containing the -test to be debugged and the name (within the module) of the object -with the doc string with tests to be debugged. -""" + Provide the module (or dotted name of the module) containing the + test to be debugged and the name (within the module) of the object + with the doc string with tests to be debugged. + """ + def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: - """Debug a single doctest docstring, in argument `src` -""" + """Debug a single doctest docstring, in argument `src`""" + def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: - """Debug a test script. `src` is the script, as a string. -""" + """Debug a test script. `src` is the script, as a string.""" + def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: """Debug a single doctest docstring. -Provide the module (or dotted name of the module) containing the -test to be debugged and the name (within the module) of the object -with the docstring with tests to be debugged. -""" + Provide the module (or dotted name of the module) containing the + test to be debugged and the name (within the module) of the object + with the docstring with tests to be debugged. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi index 5ed907c877549..11a143b8d0ed0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/__init__.pyi @@ -1,5 +1,5 @@ -"""A package for parsing, handling, and generating email messages. -""" +"""A package for parsing, handling, and generating email messages.""" + from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -38,8 +38,9 @@ _ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 def message_from_string(s: str) -> Message: """Parse a string into a Message object model. -Optional _class and strict are passed to the Parser constructor. -""" + Optional _class and strict are passed to the Parser constructor. + """ + @overload def message_from_string(s: str, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload @@ -48,8 +49,9 @@ def message_from_string(s: str, _class: Callable[[], _MessageT] = ..., *, policy def message_from_bytes(s: bytes | bytearray) -> Message: """Parse a bytes string into a Message object model. -Optional _class and strict are passed to the Parser constructor. -""" + Optional _class and strict are passed to the Parser constructor. + """ + @overload def message_from_bytes(s: bytes | bytearray, _class: Callable[[], _MessageT]) -> _MessageT: ... @overload @@ -60,8 +62,9 @@ def message_from_bytes( def message_from_file(fp: IO[str]) -> Message: """Read a file and parse its contents into a Message object model. -Optional _class and strict are passed to the Parser constructor. -""" + Optional _class and strict are passed to the Parser constructor. + """ + @overload def message_from_file(fp: IO[str], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload @@ -70,8 +73,9 @@ def message_from_file(fp: IO[str], _class: Callable[[], _MessageT] = ..., *, pol def message_from_binary_file(fp: IO[bytes]) -> Message: """Read a binary file and parse its contents into a Message object model. -Optional _class and strict are passed to the Parser constructor. -""" + Optional _class and strict are passed to the Parser constructor. + """ + @overload def message_from_binary_file(fp: IO[bytes], _class: Callable[[], _MessageT]) -> _MessageT: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi index b2e2b5e617135..1c73357b5388f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_header_value_parser.pyi @@ -66,6 +66,7 @@ It is returned in place of lists of (ctext/quoted-pair) and XXX: provide complete list of token types. """ + from collections.abc import Iterable, Iterator from email.errors import HeaderParseError, MessageDefect from email.policy import Policy @@ -91,8 +92,8 @@ SPECIALSNL: Final[set[str]] # Added in Python 3.9.23, 3.10.17, 3.11.12, 3.12.9, 3.13.2 def make_quoted_pairs(value: Any) -> str: - """Escape dquote and backslash for use within a quoted-string. -""" + """Escape dquote and backslash for use within a quoted-string.""" + def quote_string(value: Any) -> str: ... rfc2047_matcher: Final[Pattern[str]] @@ -110,8 +111,8 @@ class TokenList(list[TokenList | Terminal]): def startswith_fws(self) -> bool: ... @property def as_ew_allowed(self) -> bool: - """True if all top level tokens of this part may be RFC2047 encoded. -""" + """True if all top level tokens of this part may be RFC2047 encoded.""" + @property def comments(self) -> list[str]: ... def fold(self, *, policy: Policy) -> str: ... @@ -412,9 +413,9 @@ class ValueTerminal(Terminal): def startswith_fws(self) -> bool: ... class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... + class _InvalidEwError(HeaderParseError): - """Invalid encoded word found while parsing headers. -""" + """Invalid encoded word found while parsing headers.""" DOT: Final[ValueTerminal] ListSeparator: Final[ValueTerminal] @@ -423,357 +424,377 @@ RouteComponentMarker: Final[ValueTerminal] def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: """FWS = 1*WSP -This isn't the RFC definition. We're using fws to represent tokens where -folding can be done, but when we are parsing the *un*folding has already -been done so we don't need to watch out for CRLF. + This isn't the RFC definition. We're using fws to represent tokens where + folding can be done, but when we are parsing the *un*folding has already + been done so we don't need to watch out for CRLF. + + """ -""" def get_encoded_word(value: str, terminal_type: str = "vtext") -> tuple[EncodedWord, str]: - """encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" + """encoded-word = "=?" charset "?" encoding "?" encoded-text "?=" """ - """ def get_unstructured(value: str) -> UnstructuredTokenList: """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - obs-NO-WS-CTL is control characters except WSP/CR/LF. + obs-NO-WS-CTL is control characters except WSP/CR/LF. -So, basically, we have printable runs, plus control characters or nulls in -the obsolete syntax, separated by whitespace. Since RFC 2047 uses the -obsolete syntax in its specification, but requires whitespace on either -side of the encoded words, I can see no reason to need to separate the -non-printable-non-whitespace from the printable runs if they occur, so we -parse this into xtext tokens separated by WSP tokens. + So, basically, we have printable runs, plus control characters or nulls in + the obsolete syntax, separated by whitespace. Since RFC 2047 uses the + obsolete syntax in its specification, but requires whitespace on either + side of the encoded words, I can see no reason to need to separate the + non-printable-non-whitespace from the printable runs if they occur, so we + parse this into xtext tokens separated by WSP tokens. -Because an 'unstructured' value must by definition constitute the entire -value, this 'get' routine does not return a remaining value, only the -parsed TokenList. + Because an 'unstructured' value must by definition constitute the entire + value, this 'get' routine does not return a remaining value, only the + parsed TokenList. + + """ -""" def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: """ctext = -This is not the RFC ctext, since we are handling nested comments in comment -and unquoting quoted-pairs here. We allow anything except the '()' -characters, but if we find any ASCII other than the RFC defined printable -ASCII, a NonPrintableDefect is added to the token's defects list. Since -quoted pairs are converted to their unquoted values, what is returned is -a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value -is ' '. + This is not the RFC ctext, since we are handling nested comments in comment + and unquoting quoted-pairs here. We allow anything except the '()' + characters, but if we find any ASCII other than the RFC defined printable + ASCII, a NonPrintableDefect is added to the token's defects list. Since + quoted pairs are converted to their unquoted values, what is returned is + a 'ptext' token. In this case it is a WhiteSpaceTerminal, so it's value + is ' '. + + """ -""" def get_qcontent(value: str) -> tuple[ValueTerminal, str]: """qcontent = qtext / quoted-pair -We allow anything except the DQUOTE character, but if we find any ASCII -other than the RFC defined printable ASCII, a NonPrintableDefect is -added to the token's defects list. Any quoted pairs are converted to their -unquoted values, so what is returned is a 'ptext' token. In this case it -is a ValueTerminal. + We allow anything except the DQUOTE character, but if we find any ASCII + other than the RFC defined printable ASCII, a NonPrintableDefect is + added to the token's defects list. Any quoted pairs are converted to their + unquoted values, so what is returned is a 'ptext' token. In this case it + is a ValueTerminal. + + """ -""" def get_atext(value: str) -> tuple[ValueTerminal, str]: """atext = -We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to -the token's defects list if we find non-atext characters. -""" + We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to + the token's defects list if we find non-atext characters. + """ + def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: """bare-quoted-string = DQUOTE *([FWS] qcontent) [FWS] DQUOTE -A quoted-string without the leading or trailing white space. Its -value is the text between the quote marks, with whitespace -preserved and quoted pairs decoded. -""" + A quoted-string without the leading or trailing white space. Its + value is the text between the quote marks, with whitespace + preserved and quoted pairs decoded. + """ + def get_comment(value: str) -> tuple[Comment, str]: """comment = "(" *([FWS] ccontent) [FWS] ")" - ccontent = ctext / quoted-pair / comment + ccontent = ctext / quoted-pair / comment + + We handle nested comments here, and quoted-pair in our qp-ctext routine. + """ -We handle nested comments here, and quoted-pair in our qp-ctext routine. -""" def get_cfws(value: str) -> tuple[CFWSList, str]: - """CFWS = (1*([FWS] comment) [FWS]) / FWS + """CFWS = (1*([FWS] comment) [FWS]) / FWS""" - """ def get_quoted_string(value: str) -> tuple[QuotedString, str]: """quoted-string = [CFWS] [CFWS] -'bare-quoted-string' is an intermediate class defined by this -parser and not by the RFC grammar. It is the quoted string -without any attached CFWS. -""" + 'bare-quoted-string' is an intermediate class defined by this + parser and not by the RFC grammar. It is the quoted string + without any attached CFWS. + """ + def get_atom(value: str) -> tuple[Atom, str]: """atom = [CFWS] 1*atext [CFWS] -An atom could be an rfc2047 encoded word. -""" + An atom could be an rfc2047 encoded word. + """ + def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: - """dot-text = 1*atext *("." 1*atext) + """dot-text = 1*atext *("." 1*atext)""" - """ def get_dot_atom(value: str) -> tuple[DotAtom, str]: """dot-atom = [CFWS] dot-atom-text [CFWS] -Any place we can have a dot atom, we could instead have an rfc2047 encoded -word. -""" + Any place we can have a dot atom, we could instead have an rfc2047 encoded + word. + """ + def get_word(value: str) -> tuple[Any, str]: """word = atom / quoted-string -Either atom or quoted-string may start with CFWS. We have to peel off this -CFWS first to determine which type of word to parse. Afterward we splice -the leading CFWS, if any, into the parsed sub-token. + Either atom or quoted-string may start with CFWS. We have to peel off this + CFWS first to determine which type of word to parse. Afterward we splice + the leading CFWS, if any, into the parsed sub-token. -If neither an atom or a quoted-string is found before the next special, a -HeaderParseError is raised. + If neither an atom or a quoted-string is found before the next special, a + HeaderParseError is raised. -The token returned is either an Atom or a QuotedString, as appropriate. -This means the 'word' level of the formal grammar is not represented in the -parse tree; this is because having that extra layer when manipulating the -parse tree is more confusing than it is helpful. + The token returned is either an Atom or a QuotedString, as appropriate. + This means the 'word' level of the formal grammar is not represented in the + parse tree; this is because having that extra layer when manipulating the + parse tree is more confusing than it is helpful. + + """ -""" def get_phrase(value: str) -> tuple[Phrase, str]: """phrase = 1*word / obs-phrase - obs-phrase = word *(word / "." / CFWS) + obs-phrase = word *(word / "." / CFWS) -This means a phrase can be a sequence of words, periods, and CFWS in any -order as long as it starts with at least one word. If anything other than -words is detected, an ObsoleteHeaderDefect is added to the token's defect -list. We also accept a phrase that starts with CFWS followed by a dot; -this is registered as an InvalidHeaderDefect, since it is not supported by -even the obsolete grammar. + This means a phrase can be a sequence of words, periods, and CFWS in any + order as long as it starts with at least one word. If anything other than + words is detected, an ObsoleteHeaderDefect is added to the token's defect + list. We also accept a phrase that starts with CFWS followed by a dot; + this is registered as an InvalidHeaderDefect, since it is not supported by + even the obsolete grammar. + + """ -""" def get_local_part(value: str) -> tuple[LocalPart, str]: - """local-part = dot-atom / quoted-string / obs-local-part + """local-part = dot-atom / quoted-string / obs-local-part""" - """ def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: - """obs-local-part = word *("." word) - """ + """obs-local-part = word *("." word)""" + def get_dtext(value: str) -> tuple[ValueTerminal, str]: """dtext = / obs-dtext - obs-dtext = obs-NO-WS-CTL / quoted-pair + obs-dtext = obs-NO-WS-CTL / quoted-pair -We allow anything except the excluded characters, but if we find any -ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is -added to the token's defects list. Quoted pairs are converted to their -unquoted values, so what is returned is a ptext token, in this case a -ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is -added to the returned token's defect list. + We allow anything except the excluded characters, but if we find any + ASCII other than the RFC defined printable ASCII, a NonPrintableDefect is + added to the token's defects list. Quoted pairs are converted to their + unquoted values, so what is returned is a ptext token, in this case a + ValueTerminal. If there were quoted-printables, an ObsoleteHeaderDefect is + added to the returned token's defect list. + + """ -""" def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: - """domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS] + """domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]""" - """ def get_domain(value: str) -> tuple[Domain, str]: """domain = dot-atom / domain-literal / obs-domain -obs-domain = atom *("." atom)) + obs-domain = atom *("." atom)) + + """ -""" def get_addr_spec(value: str) -> tuple[AddrSpec, str]: - """addr-spec = local-part "@" domain + """addr-spec = local-part "@" domain""" - """ def get_obs_route(value: str) -> tuple[ObsRoute, str]: """obs-route = obs-domain-list ":" -obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) + obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) + + Returns an obs-route token with the appropriate sub-tokens (that is, + there is no obs-domain-list in the parse tree). + """ -Returns an obs-route token with the appropriate sub-tokens (that is, -there is no obs-domain-list in the parse tree). -""" def get_angle_addr(value: str) -> tuple[AngleAddr, str]: """angle-addr = [CFWS] "<" addr-spec ">" [CFWS] / obs-angle-addr -obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS] + obs-angle-addr = [CFWS] "<" obs-route addr-spec ">" [CFWS] + + """ -""" def get_display_name(value: str) -> tuple[DisplayName, str]: """display-name = phrase -Because this is simply a name-rule, we don't return a display-name -token containing a phrase, but rather a display-name token with -the content of the phrase. + Because this is simply a name-rule, we don't return a display-name + token containing a phrase, but rather a display-name token with + the content of the phrase. + + """ -""" def get_name_addr(value: str) -> tuple[NameAddr, str]: - """name-addr = [display-name] angle-addr + """name-addr = [display-name] angle-addr""" - """ def get_mailbox(value: str) -> tuple[Mailbox, str]: - """mailbox = name-addr / addr-spec + """mailbox = name-addr / addr-spec""" - """ def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: """Read everything up to one of the chars in endchars. -This is outside the formal grammar. The InvalidMailbox TokenList that is -returned acts like a Mailbox, but the data attributes are None. + This is outside the formal grammar. The InvalidMailbox TokenList that is + returned acts like a Mailbox, but the data attributes are None. + + """ -""" def get_mailbox_list(value: str) -> tuple[MailboxList, str]: """mailbox-list = (mailbox *("," mailbox)) / obs-mbox-list - obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS]) + obs-mbox-list = *([CFWS] ",") mailbox *("," [mailbox / CFWS]) -For this routine we go outside the formal grammar in order to improve error -handling. We recognize the end of the mailbox list only at the end of the -value or at a ';' (the group terminator). This is so that we can turn -invalid mailboxes into InvalidMailbox tokens and continue parsing any -remaining valid mailboxes. We also allow all mailbox entries to be null, -and this condition is handled appropriately at a higher level. + For this routine we go outside the formal grammar in order to improve error + handling. We recognize the end of the mailbox list only at the end of the + value or at a ';' (the group terminator). This is so that we can turn + invalid mailboxes into InvalidMailbox tokens and continue parsing any + remaining valid mailboxes. We also allow all mailbox entries to be null, + and this condition is handled appropriately at a higher level. + + """ -""" def get_group_list(value: str) -> tuple[GroupList, str]: """group-list = mailbox-list / CFWS / obs-group-list -obs-group-list = 1*([CFWS] ",") [CFWS] + obs-group-list = 1*([CFWS] ",") [CFWS] + + """ -""" def get_group(value: str) -> tuple[Group, str]: - """group = display-name ":" [group-list] ";" [CFWS] + """group = display-name ":" [group-list] ";" [CFWS]""" - """ def get_address(value: str) -> tuple[Address, str]: """address = mailbox / group -Note that counter-intuitively, an address can be either a single address or -a list of addresses (a group). This is why the returned Address object has -a 'mailboxes' attribute which treats a single address as a list of length -one. When you need to differentiate between to two cases, extract the single -element, which is either a mailbox or a group token. + Note that counter-intuitively, an address can be either a single address or + a list of addresses (a group). This is why the returned Address object has + a 'mailboxes' attribute which treats a single address as a list of length + one. When you need to differentiate between to two cases, extract the single + element, which is either a mailbox or a group token. + + """ -""" def get_address_list(value: str) -> tuple[AddressList, str]: """address_list = (address *("," address)) / obs-addr-list - obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) + obs-addr-list = *([CFWS] ",") address *("," [address / CFWS]) -We depart from the formal grammar here by continuing to parse until the end -of the input, assuming the input to be entirely composed of an -address-list. This is always true in email parsing, and allows us -to skip invalid addresses to parse additional valid ones. + We depart from the formal grammar here by continuing to parse until the end + of the input, assuming the input to be entirely composed of an + address-list. This is always true in email parsing, and allows us + to skip invalid addresses to parse additional valid ones. -""" -def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: - """no-fold-literal = "[" *dtext "]" """ + +def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: + """no-fold-literal = "[" *dtext "]" """ + def get_msg_id(value: str) -> tuple[MsgID, str]: """msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS] -id-left = dot-atom-text / obs-id-left -id-right = dot-atom-text / no-fold-literal / obs-id-right -no-fold-literal = "[" *dtext "]" -""" -def parse_message_id(value: str) -> MessageID: - """message-id = "Message-ID:" msg-id CRLF + id-left = dot-atom-text / obs-id-left + id-right = dot-atom-text / no-fold-literal / obs-id-right + no-fold-literal = "[" *dtext "]" """ + +def parse_message_id(value: str) -> MessageID: + """message-id = "Message-ID:" msg-id CRLF""" + def parse_mime_version(value: str) -> MIMEVersion: - """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS] + """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]""" - """ def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: """Read everything up to the next ';'. -This is outside the formal grammar. The InvalidParameter TokenList that is -returned acts like a Parameter, but the data attributes are None. + This is outside the formal grammar. The InvalidParameter TokenList that is + returned acts like a Parameter, but the data attributes are None. + + """ -""" def get_ttext(value: str) -> tuple[ValueTerminal, str]: """ttext = -We allow any non-TOKEN_ENDS in ttext, but add defects to the token's -defects list if we find non-ttext characters. We also register defects for -*any* non-printables even though the RFC doesn't exclude all of them, -because we follow the spirit of RFC 5322. + We allow any non-TOKEN_ENDS in ttext, but add defects to the token's + defects list if we find non-ttext characters. We also register defects for + *any* non-printables even though the RFC doesn't exclude all of them, + because we follow the spirit of RFC 5322. + + """ -""" def get_token(value: str) -> tuple[Token, str]: """token = [CFWS] 1*ttext [CFWS] -The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or -tspecials. We also exclude tabs even though the RFC doesn't. + The RFC equivalent of ttext is any US-ASCII chars except space, ctls, or + tspecials. We also exclude tabs even though the RFC doesn't. -The RFC implies the CFWS but is not explicit about it in the BNF. + The RFC implies the CFWS but is not explicit about it in the BNF. + + """ -""" def get_attrtext(value: str) -> tuple[ValueTerminal, str]: """attrtext = 1*(any non-ATTRIBUTE_ENDS character) -We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the -token's defects list if we find non-attrtext characters. We also register -defects for *any* non-printables even though the RFC doesn't exclude all of -them, because we follow the spirit of RFC 5322. + We allow any non-ATTRIBUTE_ENDS in attrtext, but add defects to the + token's defects list if we find non-attrtext characters. We also register + defects for *any* non-printables even though the RFC doesn't exclude all of + them, because we follow the spirit of RFC 5322. + + """ -""" def get_attribute(value: str) -> tuple[Attribute, str]: """[CFWS] 1*attrtext [CFWS] -This version of the BNF makes the CFWS explicit, and as usual we use a -value terminal for the actual run of characters. The RFC equivalent of -attrtext is the token characters, with the subtraction of '*', "'", and '%'. -We include tab in the excluded set just as we do for token. + This version of the BNF makes the CFWS explicit, and as usual we use a + value terminal for the actual run of characters. The RFC equivalent of + attrtext is the token characters, with the subtraction of '*', "'", and '%'. + We include tab in the excluded set just as we do for token. + + """ -""" def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: """attrtext = 1*(any non-ATTRIBUTE_ENDS character plus '%') -This is a special parsing routine so that we get a value that -includes % escapes as a single string (which we decode as a single -string later). + This is a special parsing routine so that we get a value that + includes % escapes as a single string (which we decode as a single + string later). + + """ -""" def get_extended_attribute(value: str) -> tuple[Attribute, str]: """[CFWS] 1*extended_attrtext [CFWS] -This is like the non-extended version except we allow % characters, so that -we can pick up an encoded value as a single string. + This is like the non-extended version except we allow % characters, so that + we can pick up an encoded value as a single string. + + """ -""" def get_section(value: str) -> tuple[Section, str]: """'*' digits -The formal BNF is more complicated because leading 0s are not allowed. We -check for that and add a defect. We also assume no CFWS is allowed between -the '*' and the digits, though the RFC is not crystal clear on that. -The caller should already have dealt with leading CFWS. + The formal BNF is more complicated because leading 0s are not allowed. We + check for that and add a defect. We also assume no CFWS is allowed between + the '*' and the digits, though the RFC is not crystal clear on that. + The caller should already have dealt with leading CFWS. + + """ -""" def get_value(value: str) -> tuple[Value, str]: - """quoted-string / attribute + """quoted-string / attribute""" - """ def get_parameter(value: str) -> tuple[Parameter, str]: """attribute [section] ["*"] [CFWS] "=" value -The CFWS is implied by the RFC but not made explicit in the BNF. This -simplified form of the BNF from the RFC is made to conform with the RFC BNF -through some extra checks. We do it this way because it makes both error -recovery and working with the resulting parse tree easier. -""" + The CFWS is implied by the RFC but not made explicit in the BNF. This + simplified form of the BNF from the RFC is made to conform with the RFC BNF + through some extra checks. We do it this way because it makes both error + recovery and working with the resulting parse tree easier. + """ + def parse_mime_parameters(value: str) -> MimeParameters: """parameter *( ";" parameter ) -That BNF is meant to indicate this routine should only be called after -finding and handling the leading ';'. There is no corresponding rule in -the formal RFC grammar, but it is more convenient for us for the set of -parameters to be treated as its own TokenList. + That BNF is meant to indicate this routine should only be called after + finding and handling the leading ';'. There is no corresponding rule in + the formal RFC grammar, but it is more convenient for us for the set of + parameters to be treated as its own TokenList. -This is 'parse' routine because it consumes the remaining value, but it -would never be called to parse a full header. Instead it is called to -parse everything after the non-parameter value of a specific MIME header. + This is 'parse' routine because it consumes the remaining value, but it + would never be called to parse a full header. Instead it is called to + parse everything after the non-parameter value of a specific MIME header. + + """ -""" def parse_content_type_header(value: str) -> ContentType: """maintype "/" subtype *( ";" parameter ) -The maintype and substype are tokens. Theoretically they could -be checked against the official IANA list + x-token, but we -don't do that. -""" + The maintype and substype are tokens. Theoretically they could + be checked against the official IANA list + x-token, but we + don't do that. + """ + def parse_content_disposition_header(value: str) -> ContentDisposition: - """disposition-type *( ";" parameter ) + """disposition-type *( ";" parameter )""" - """ def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: - """mechanism - - """ + """mechanism""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi index c7877e8a25c4d..de41124c98bdc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/_policybase.pyi @@ -2,6 +2,7 @@ Allows fine grained feature control of how the package parses and emits data. """ + from abc import ABCMeta, abstractmethod from email.errors import MessageDefect from email.header import Header @@ -24,24 +25,25 @@ class _MessageFactory(Protocol[_MessageT]): class _PolicyBase(Generic[_MessageT_co]): """Policy Object basic framework. -This class is useless unless subclassed. A subclass should define -class attributes with defaults for any values that are to be -managed by the Policy object. The constructor will then allow -non-default values to be set for these attributes at instance -creation time. The instance will be callable, taking these same -attributes keyword arguments, and returning a new instance -identical to the called instance except for those values changed -by the keyword arguments. Instances may be added, yielding new -instances with any non-default values from the right hand -operand overriding those in the left hand operand. That is, + This class is useless unless subclassed. A subclass should define + class attributes with defaults for any values that are to be + managed by the Policy object. The constructor will then allow + non-default values to be set for these attributes at instance + creation time. The instance will be callable, taking these same + attributes keyword arguments, and returning a new instance + identical to the called instance except for those values changed + by the keyword arguments. Instances may be added, yielding new + instances with any non-default values from the right hand + operand overriding those in the left hand operand. That is, - A + B == A() + A + B == A() -The repr of an instance can be used to reconstruct the object -if and only if the repr of the values can be used to reconstruct -those values. + The repr of an instance can be used to reconstruct the object + if and only if the repr of the values can be used to reconstruct + those values. + + """ -""" max_line_length: int | None linesep: str cte_type: str @@ -65,9 +67,10 @@ those values. ) -> None: """Create new Policy, possibly overriding some defaults. -See class docstring for a list of overridable attributes. + See class docstring for a list of overridable attributes. + + """ -""" def clone( self, *, @@ -82,268 +85,282 @@ See class docstring for a list of overridable attributes. ) -> Self: """Return a new instance with specified attributes changed. -The new instance has the same attribute values as the current object, -except for the changes passed in as keyword arguments. + The new instance has the same attribute values as the current object, + except for the changes passed in as keyword arguments. + + """ -""" def __add__(self, other: Policy) -> Self: """Non-default values from right operand override those from left. -The object returned is a new instance of the subclass. + The object returned is a new instance of the subclass. -""" + """ class Policy(_PolicyBase[_MessageT_co], metaclass=ABCMeta): """Controls for how messages are interpreted and formatted. -Most of the classes and many of the methods in the email package accept -Policy objects as parameters. A Policy object contains a set of values and -functions that control how input is interpreted and how output is rendered. -For example, the parameter 'raise_on_defect' controls whether or not an RFC -violation results in an error being raised or not, while 'max_line_length' -controls the maximum length of output lines when a Message is serialized. - -Any valid attribute may be overridden when a Policy is created by passing -it as a keyword argument to the constructor. Policy objects are immutable, -but a new Policy object can be created with only certain values changed by -calling the Policy instance with keyword arguments. Policy objects can -also be added, producing a new Policy object in which the non-default -attributes set in the right hand operand overwrite those specified in the -left operand. - -Settable attributes: - -raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - -linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - -cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - -max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - -mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - -message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - -verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. -""" + Most of the classes and many of the methods in the email package accept + Policy objects as parameters. A Policy object contains a set of values and + functions that control how input is interpreted and how output is rendered. + For example, the parameter 'raise_on_defect' controls whether or not an RFC + violation results in an error being raised or not, while 'max_line_length' + controls the maximum length of output lines when a Message is serialized. + + Any valid attribute may be overridden when a Policy is created by passing + it as a keyword argument to the constructor. Policy objects are immutable, + but a new Policy object can be created with only certain values changed by + calling the Policy instance with keyword arguments. Policy objects can + also be added, producing a new Policy object in which the non-default + attributes set in the right hand operand overwrite those specified in the + left operand. + + Settable attributes: + + raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + + linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + + cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + + max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + + mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + + message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. + """ + # Every Message object has a `defects` attribute, so the following # methods will work for any Message object. def handle_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: """Based on policy, either raise defect or call register_defect. - handle_defect(obj, defect) + handle_defect(obj, defect) -defect should be a Defect subclass, but in any case must be an -Exception subclass. obj is the object on which the defect should be -registered if it is not raised. If the raise_on_defect is True, the -defect is raised as an error, otherwise the object and the defect are -passed to register_defect. + defect should be a Defect subclass, but in any case must be an + Exception subclass. obj is the object on which the defect should be + registered if it is not raised. If the raise_on_defect is True, the + defect is raised as an error, otherwise the object and the defect are + passed to register_defect. -This method is intended to be called by parsers that discover defects. -The email package parsers always call it with Defect instances. + This method is intended to be called by parsers that discover defects. + The email package parsers always call it with Defect instances. + + """ -""" def register_defect(self, obj: Message[Any, Any], defect: MessageDefect) -> None: """Record 'defect' on 'obj'. -Called by handle_defect if raise_on_defect is False. This method is -part of the Policy API so that Policy subclasses can implement custom -defect handling. The default implementation calls the append method of -the defects attribute of obj. The objects used by the email package by -default that get passed to this method will always have a defects -attribute with an append method. + Called by handle_defect if raise_on_defect is False. This method is + part of the Policy API so that Policy subclasses can implement custom + defect handling. The default implementation calls the append method of + the defects attribute of obj. The objects used by the email package by + default that get passed to this method will always have a defects + attribute with an append method. + + """ -""" def header_max_count(self, name: str) -> int | None: """Return the maximum allowed number of headers named 'name'. -Called when a header is added to a Message object. If the returned -value is not 0 or None, and there are already a number of headers with -the name 'name' equal to the value returned, a ValueError is raised. + Called when a header is added to a Message object. If the returned + value is not 0 or None, and there are already a number of headers with + the name 'name' equal to the value returned, a ValueError is raised. -Because the default behavior of Message's __setitem__ is to append the -value to the list of headers, it is easy to create duplicate headers -without realizing it. This method allows certain headers to be limited -in the number of instances of that header that may be added to a -Message programmatically. (The limit is not observed by the parser, -which will faithfully produce as many headers as exist in the message -being parsed.) + Because the default behavior of Message's __setitem__ is to append the + value to the list of headers, it is easy to create duplicate headers + without realizing it. This method allows certain headers to be limited + in the number of instances of that header that may be added to a + Message programmatically. (The limit is not observed by the parser, + which will faithfully produce as many headers as exist in the message + being parsed.) + + The default implementation returns None for all header names. + """ -The default implementation returns None for all header names. -""" @abstractmethod def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: """Given a list of linesep terminated strings constituting the lines of -a single header, return the (name, value) tuple that should be stored -in the model. The input lines should retain their terminating linesep -characters. The lines passed in by the email package may contain -surrogateescaped binary data. -""" + a single header, return the (name, value) tuple that should be stored + in the model. The input lines should retain their terminating linesep + characters. The lines passed in by the email package may contain + surrogateescaped binary data. + """ + @abstractmethod def header_store_parse(self, name: str, value: str) -> tuple[str, str]: """Given the header name and the value provided by the application -program, return the (name, value) that should be stored in the model. -""" + program, return the (name, value) that should be stored in the model. + """ + @abstractmethod def header_fetch_parse(self, name: str, value: str) -> str: """Given the header name and the value from the model, return the value -to be returned to the application program that is requesting that -header. The value passed in by the email package may contain -surrogateescaped binary data if the lines were parsed by a BytesParser. -The returned value should not contain any surrogateescaped data. + to be returned to the application program that is requesting that + header. The value passed in by the email package may contain + surrogateescaped binary data if the lines were parsed by a BytesParser. + The returned value should not contain any surrogateescaped data. + + """ -""" @abstractmethod def fold(self, name: str, value: str) -> str: """Given the header name and the value from the model, return a string -containing linesep characters that implement the folding of the header -according to the policy controls. The value passed in by the email -package may contain surrogateescaped binary data if the lines were -parsed by a BytesParser. The returned value should not contain any -surrogateescaped data. + containing linesep characters that implement the folding of the header + according to the policy controls. The value passed in by the email + package may contain surrogateescaped binary data if the lines were + parsed by a BytesParser. The returned value should not contain any + surrogateescaped data. + + """ -""" @abstractmethod def fold_binary(self, name: str, value: str) -> bytes: """Given the header name and the value from the model, return binary -data containing linesep characters that implement the folding of the -header according to the policy controls. The value passed in by the -email package may contain surrogateescaped binary data. + data containing linesep characters that implement the folding of the + header according to the policy controls. The value passed in by the + email package may contain surrogateescaped binary data. -""" + """ class Compat32(Policy[_MessageT_co]): """Controls for how messages are interpreted and formatted. -Most of the classes and many of the methods in the email package accept -Policy objects as parameters. A Policy object contains a set of values and -functions that control how input is interpreted and how output is rendered. -For example, the parameter 'raise_on_defect' controls whether or not an RFC -violation results in an error being raised or not, while 'max_line_length' -controls the maximum length of output lines when a Message is serialized. - -Any valid attribute may be overridden when a Policy is created by passing -it as a keyword argument to the constructor. Policy objects are immutable, -but a new Policy object can be created with only certain values changed by -calling the Policy instance with keyword arguments. Policy objects can -also be added, producing a new Policy object in which the non-default -attributes set in the right hand operand overwrite those specified in the -left operand. - -Settable attributes: - -raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - -linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - -cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - -max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - -mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - -message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - -verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. -This particular policy is the backward compatibility Policy. It -replicates the behavior of the email package version 5.1. -""" + Most of the classes and many of the methods in the email package accept + Policy objects as parameters. A Policy object contains a set of values and + functions that control how input is interpreted and how output is rendered. + For example, the parameter 'raise_on_defect' controls whether or not an RFC + violation results in an error being raised or not, while 'max_line_length' + controls the maximum length of output lines when a Message is serialized. + + Any valid attribute may be overridden when a Policy is created by passing + it as a keyword argument to the constructor. Policy objects are immutable, + but a new Policy object can be created with only certain values changed by + calling the Policy instance with keyword arguments. Policy objects can + also be added, producing a new Policy object in which the non-default + attributes set in the right hand operand overwrite those specified in the + left operand. + + Settable attributes: + + raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + + linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + + cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + + max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + + mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + + message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. + This particular policy is the backward compatibility Policy. It + replicates the behavior of the email package version 5.1. + """ + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: """Given a list of linesep terminated strings constituting the lines of -a single header, return the (name, value) tuple that should be stored -in the model. The input lines should retain their terminating linesep -characters. The lines passed in by the email package may contain -surrogateescaped binary data. -The name is parsed as everything up to the ':' and returned unmodified. -The value is determined by stripping leading whitespace off the -remainder of the first line joined with all subsequent lines, and -stripping any trailing carriage return or linefeed characters. + a single header, return the (name, value) tuple that should be stored + in the model. The input lines should retain their terminating linesep + characters. The lines passed in by the email package may contain + surrogateescaped binary data. + The name is parsed as everything up to the ':' and returned unmodified. + The value is determined by stripping leading whitespace off the + remainder of the first line joined with all subsequent lines, and + stripping any trailing carriage return or linefeed characters. + + """ -""" def header_store_parse(self, name: str, value: str) -> tuple[str, str]: """Given the header name and the value provided by the application -program, return the (name, value) that should be stored in the model. -The name and value are returned unmodified. -""" + program, return the (name, value) that should be stored in the model. + The name and value are returned unmodified. + """ + def header_fetch_parse(self, name: str, value: str) -> str | Header: # type: ignore[override] """Given the header name and the value from the model, return the value -to be returned to the application program that is requesting that -header. The value passed in by the email package may contain -surrogateescaped binary data if the lines were parsed by a BytesParser. -The returned value should not contain any surrogateescaped data. + to be returned to the application program that is requesting that + header. The value passed in by the email package may contain + surrogateescaped binary data if the lines were parsed by a BytesParser. + The returned value should not contain any surrogateescaped data. + + If the value contains binary data, it is converted into a Header object + using the unknown-8bit charset. Otherwise it is returned unmodified. + """ -If the value contains binary data, it is converted into a Header object -using the unknown-8bit charset. Otherwise it is returned unmodified. -""" def fold(self, name: str, value: str) -> str: """Given the header name and the value from the model, return a string -containing linesep characters that implement the folding of the header -according to the policy controls. The value passed in by the email -package may contain surrogateescaped binary data if the lines were -parsed by a BytesParser. The returned value should not contain any -surrogateescaped data. + containing linesep characters that implement the folding of the header + according to the policy controls. The value passed in by the email + package may contain surrogateescaped binary data if the lines were + parsed by a BytesParser. The returned value should not contain any + surrogateescaped data. -Headers are folded using the Header folding algorithm, which preserves -existing line breaks in the value, and wraps each resulting line to the -max_line_length. Non-ASCII binary data are CTE encoded using the -unknown-8bit charset. + Headers are folded using the Header folding algorithm, which preserves + existing line breaks in the value, and wraps each resulting line to the + max_line_length. Non-ASCII binary data are CTE encoded using the + unknown-8bit charset. + + """ -""" def fold_binary(self, name: str, value: str) -> bytes: """Given the header name and the value from the model, return binary -data containing linesep characters that implement the folding of the -header according to the policy controls. The value passed in by the -email package may contain surrogateescaped binary data. + data containing linesep characters that implement the folding of the + header according to the policy controls. The value passed in by the + email package may contain surrogateescaped binary data. -Headers are folded using the Header folding algorithm, which preserves -existing line breaks in the value, and wraps each resulting line to the -max_line_length. If cte_type is 7bit, non-ascii binary data is CTE -encoded using the unknown-8bit charset. Otherwise the original source -header is used, with its existing line breaks and/or binary data. + Headers are folded using the Header folding algorithm, which preserves + existing line breaks in the value, and wraps each resulting line to the + max_line_length. If cte_type is 7bit, non-ascii binary data is CTE + encoded using the unknown-8bit charset. Otherwise the original source + header is used, with its existing line breaks and/or binary data. -""" + """ compat32: Compat32[Message[str, str]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi index 43b7cc7aea562..5c606ff488193 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/base64mime.pyi @@ -19,38 +19,40 @@ necessary for proper internationalized headers; it only does dumb encoding and decoding. To deal with the various line wrapping issues, use the email.header module. """ + __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] from _typeshed import ReadableBuffer def header_length(bytearray: str | bytes | bytearray) -> int: - """Return the length of s when it is encoded with base64. -""" + """Return the length of s when it is encoded with base64.""" + def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: """Encode a single header line with Base64 encoding in a given charset. -charset names the character set to use to encode the header. It defaults -to iso-8859-1. Base64 encoding is defined in RFC 2045. -""" + charset names the character set to use to encode the header. It defaults + to iso-8859-1. Base64 encoding is defined in RFC 2045. + """ # First argument should be a buffer that supports slicing and len(). def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: """Encode a string with base64. -Each line will be wrapped at, at most, maxlinelen characters (defaults to -76 characters). + Each line will be wrapped at, at most, maxlinelen characters (defaults to + 76 characters). + + Each line of encoded text will end with eol, which defaults to "\\n". Set + this to "\\r\\n" if you will be using the result of this function directly + in an email. + """ -Each line of encoded text will end with eol, which defaults to "\\n". Set -this to "\\r\\n" if you will be using the result of this function directly -in an email. -""" def decode(string: str | ReadableBuffer) -> bytes: """Decode a raw base64 string, returning a bytes object. -This function does not parse a full MIME header value encoded with -base64 (like =?iso-8859-1?b?bmloISBuaWgh?=) -- please use the high -level email.header class for that functionality. -""" + This function does not parse a full MIME header value encoded with + base64 (like =?iso-8859-1?b?bmloISBuaWgh?=) -- please use the high + level email.header class for that functionality. + """ body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi index d78abd8f91d8a..3688abc11668d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/charset.pyi @@ -18,47 +18,48 @@ CODEC_MAP: Final[dict[str, str | None]] # undocumented class Charset: """Map character sets to their email properties. -This class provides information about the requirements imposed on email -for a specific character set. It also provides convenience routines for -converting between character sets, given the availability of the -applicable codecs. Given a character set, it will do its best to provide -information on how to use that character set in an email in an -RFC-compliant way. - -Certain character sets must be encoded with quoted-printable or base64 -when used in email headers or bodies. Certain character sets must be -converted outright, and are not allowed in email. Instances of this -module expose the following information about a character set: - -input_charset: The initial character set specified. Common aliases - are converted to their 'official' email names (e.g. latin_1 - is converted to iso-8859-1). Defaults to 7-bit us-ascii. - -header_encoding: If the character set must be encoded before it can be - used in an email header, this attribute will be set to - charset.QP (for quoted-printable), charset.BASE64 (for - base64 encoding), or charset.SHORTEST for the shortest of - QP or BASE64 encoding. Otherwise, it will be None. - -body_encoding: Same as header_encoding, but describes the encoding for the - mail message's body, which indeed may be different than the - header encoding. charset.SHORTEST is not allowed for - body_encoding. - -output_charset: Some character sets must be converted before they can be - used in email headers or bodies. If the input_charset is - one of them, this attribute will contain the name of the - charset output will be converted to. Otherwise, it will - be None. - -input_codec: The name of the Python codec used to convert the - input_charset to Unicode. If no conversion codec is - necessary, this attribute will be None. - -output_codec: The name of the Python codec used to convert Unicode - to the output_charset. If no conversion codec is necessary, - this attribute will have the same value as the input_codec. -""" + This class provides information about the requirements imposed on email + for a specific character set. It also provides convenience routines for + converting between character sets, given the availability of the + applicable codecs. Given a character set, it will do its best to provide + information on how to use that character set in an email in an + RFC-compliant way. + + Certain character sets must be encoded with quoted-printable or base64 + when used in email headers or bodies. Certain character sets must be + converted outright, and are not allowed in email. Instances of this + module expose the following information about a character set: + + input_charset: The initial character set specified. Common aliases + are converted to their 'official' email names (e.g. latin_1 + is converted to iso-8859-1). Defaults to 7-bit us-ascii. + + header_encoding: If the character set must be encoded before it can be + used in an email header, this attribute will be set to + charset.QP (for quoted-printable), charset.BASE64 (for + base64 encoding), or charset.SHORTEST for the shortest of + QP or BASE64 encoding. Otherwise, it will be None. + + body_encoding: Same as header_encoding, but describes the encoding for the + mail message's body, which indeed may be different than the + header encoding. charset.SHORTEST is not allowed for + body_encoding. + + output_charset: Some character sets must be converted before they can be + used in email headers or bodies. If the input_charset is + one of them, this attribute will contain the name of the + charset output will be converted to. Otherwise, it will + be None. + + input_codec: The name of the Python codec used to convert the + input_charset to Unicode. If no conversion codec is + necessary, this attribute will be None. + + output_codec: The name of the Python codec used to convert Unicode + to the output_charset. If no conversion codec is necessary, + this attribute will have the same value as the input_codec. + """ + input_charset: str header_encoding: int body_encoding: int @@ -69,60 +70,65 @@ output_codec: The name of the Python codec used to convert Unicode def get_body_encoding(self) -> str | Callable[[Message], None]: """Return the content-transfer-encoding used for body encoding. -This is either the string 'quoted-printable' or 'base64' depending on -the encoding used, or it is a function in which case you should call -the function with a single argument, the Message object being -encoded. The function should then set the Content-Transfer-Encoding -header itself to whatever is appropriate. + This is either the string 'quoted-printable' or 'base64' depending on + the encoding used, or it is a function in which case you should call + the function with a single argument, the Message object being + encoded. The function should then set the Content-Transfer-Encoding + header itself to whatever is appropriate. + + Returns "quoted-printable" if self.body_encoding is QP. + Returns "base64" if self.body_encoding is BASE64. + Returns conversion function otherwise. + """ -Returns "quoted-printable" if self.body_encoding is QP. -Returns "base64" if self.body_encoding is BASE64. -Returns conversion function otherwise. -""" def get_output_charset(self) -> str | None: """Return the output character set. -This is self.output_charset if that is not None, otherwise it is -self.input_charset. -""" + This is self.output_charset if that is not None, otherwise it is + self.input_charset. + """ + def header_encode(self, string: str) -> str: """Header-encode a string by converting it first to bytes. -The type of encoding (base64 or quoted-printable) will be based on -this charset's `header_encoding`. + The type of encoding (base64 or quoted-printable) will be based on + this charset's `header_encoding`. + + :param string: A unicode string for the header. It must be possible + to encode this string to bytes using the character set's + output codec. + :return: The encoded string, with RFC 2047 chrome. + """ -:param string: A unicode string for the header. It must be possible - to encode this string to bytes using the character set's - output codec. -:return: The encoded string, with RFC 2047 chrome. -""" def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str | None]: """Header-encode a string by converting it first to bytes. -This is similar to `header_encode()` except that the string is fit -into maximum line lengths as given by the argument. - -:param string: A unicode string for the header. It must be possible - to encode this string to bytes using the character set's - output codec. -:param maxlengths: Maximum line length iterator. Each element - returned from this iterator will provide the next maximum line - length. This parameter is used as an argument to built-in next() - and should never be exhausted. The maximum line lengths should - not count the RFC 2047 chrome. These line lengths are only a - hint; the splitter does the best it can. -:return: Lines of encoded strings, each with RFC 2047 chrome. -""" + This is similar to `header_encode()` except that the string is fit + into maximum line lengths as given by the argument. + + :param string: A unicode string for the header. It must be possible + to encode this string to bytes using the character set's + output codec. + :param maxlengths: Maximum line length iterator. Each element + returned from this iterator will provide the next maximum line + length. This parameter is used as an argument to built-in next() + and should never be exhausted. The maximum line lengths should + not count the RFC 2047 chrome. These line lengths are only a + hint; the splitter does the best it can. + :return: Lines of encoded strings, each with RFC 2047 chrome. + """ + @overload def body_encode(self, string: None) -> None: """Body-encode a string by converting it first to bytes. -The type of encoding (base64 or quoted-printable) will be based on -self.body_encoding. If body_encoding is None, we assume the -output charset is a 7bit encoding, so re-encoding the decoded -string using the ascii codec produces the correct string version -of the content. -""" + The type of encoding (base64 or quoted-printable) will be based on + self.body_encoding. If body_encoding is None, we assume the + output charset is a 7bit encoding, so re-encoding the decoded + string using the ascii codec produces the correct string version + of the content. + """ + @overload def body_encode(self, string: str | bytes) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -134,36 +140,38 @@ def add_charset( ) -> None: """Add character set properties to the global registry. -charset is the input character set, and must be the canonical name of a -character set. - -Optional header_enc and body_enc is either charset.QP for -quoted-printable, charset.BASE64 for base64 encoding, charset.SHORTEST for -the shortest of qp or base64 encoding, or None for no encoding. SHORTEST -is only valid for header_enc. It describes how message headers and -message bodies in the input charset are to be encoded. Default is no -encoding. - -Optional output_charset is the character set that the output should be -in. Conversions will proceed from input charset, to Unicode, to the -output charset when the method Charset.convert() is called. The default -is to output in the same character set as the input. - -Both input_charset and output_charset must have Unicode codec entries in -the module's charset-to-codec mapping; use add_codec(charset, codecname) -to add codecs the module does not know about. See the codecs module's -documentation for more information. -""" + charset is the input character set, and must be the canonical name of a + character set. + + Optional header_enc and body_enc is either charset.QP for + quoted-printable, charset.BASE64 for base64 encoding, charset.SHORTEST for + the shortest of qp or base64 encoding, or None for no encoding. SHORTEST + is only valid for header_enc. It describes how message headers and + message bodies in the input charset are to be encoded. Default is no + encoding. + + Optional output_charset is the character set that the output should be + in. Conversions will proceed from input charset, to Unicode, to the + output charset when the method Charset.convert() is called. The default + is to output in the same character set as the input. + + Both input_charset and output_charset must have Unicode codec entries in + the module's charset-to-codec mapping; use add_codec(charset, codecname) + to add codecs the module does not know about. See the codecs module's + documentation for more information. + """ + def add_alias(alias: str, canonical: str) -> None: """Add a character set alias. -alias is the alias name, e.g. latin-1 -canonical is the character set's canonical name, e.g. iso-8859-1 -""" + alias is the alias name, e.g. latin-1 + canonical is the character set's canonical name, e.g. iso-8859-1 + """ + def add_codec(charset: str, codecname: str) -> None: """Add a codec that map characters in the given charset to/from Unicode. -charset is the canonical name of a character set. codecname is the name -of a Python codec, as appropriate for the second argument to the unicode() -built-in, or to the encode() method of a Unicode string. -""" + charset is the canonical name of a character set. codecname is the name + of a Python codec, as appropriate for the second argument to the unicode() + built-in, or to the encode() method of a Unicode string. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi index 2b1768be7058b..b627d00b8845f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/encoders.pyi @@ -1,5 +1,5 @@ -"""Encodings and related functions. -""" +"""Encodings and related functions.""" + from email.message import Message __all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] @@ -7,16 +7,17 @@ __all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] def encode_base64(msg: Message) -> None: """Encode the message's payload in Base64. -Also, add an appropriate Content-Transfer-Encoding header. -""" + Also, add an appropriate Content-Transfer-Encoding header. + """ + def encode_quopri(msg: Message) -> None: """Encode the message's payload in quoted-printable. -Also, add an appropriate Content-Transfer-Encoding header. -""" + Also, add an appropriate Content-Transfer-Encoding header. + """ + def encode_7or8bit(msg: Message) -> None: - """Set the Content-Transfer-Encoding header to 7bit or 8bit. -""" + """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" + def encode_noop(msg: Message) -> None: - """Do nothing. -""" + """Do nothing.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi index d1b98bb98a224..742412a0525bb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/errors.pyi @@ -1,98 +1,92 @@ -"""email package exception classes. -""" +"""email package exception classes.""" + import sys class MessageError(Exception): - """Base class for errors in the email package. -""" + """Base class for errors in the email package.""" + class MessageParseError(MessageError): - """Base class for message parsing errors. -""" + """Base class for message parsing errors.""" + class HeaderParseError(MessageParseError): - """Error while parsing headers. -""" + """Error while parsing headers.""" + class BoundaryError(MessageParseError): - """Couldn't find terminating boundary. -""" + """Couldn't find terminating boundary.""" + class MultipartConversionError(MessageError, TypeError): - """Conversion to a multipart is prohibited. -""" + """Conversion to a multipart is prohibited.""" + class CharsetError(MessageError): - """An illegal charset was given. -""" + """An illegal charset was given.""" # Added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 class HeaderWriteError(MessageError): - """Error while writing headers. -""" + """Error while writing headers.""" class MessageDefect(ValueError): - """Base class for a message defect. -""" + """Base class for a message defect.""" + def __init__(self, line: str | None = None) -> None: ... class NoBoundaryInMultipartDefect(MessageDefect): - """A message claimed to be a multipart but had no boundary parameter. -""" + """A message claimed to be a multipart but had no boundary parameter.""" + class StartBoundaryNotFoundDefect(MessageDefect): - """The claimed start boundary was never found. -""" + """The claimed start boundary was never found.""" + class FirstHeaderLineIsContinuationDefect(MessageDefect): - """A message had a continuation line as its first header line. -""" + """A message had a continuation line as its first header line.""" + class MisplacedEnvelopeHeaderDefect(MessageDefect): - """A 'Unix-from' header was found in the middle of a header block. -""" + """A 'Unix-from' header was found in the middle of a header block.""" + class MultipartInvariantViolationDefect(MessageDefect): - """A message claimed to be a multipart but no subparts were found. -""" + """A message claimed to be a multipart but no subparts were found.""" + class InvalidMultipartContentTransferEncodingDefect(MessageDefect): - """An invalid content transfer encoding was set on the multipart itself. -""" + """An invalid content transfer encoding was set on the multipart itself.""" + class UndecodableBytesDefect(MessageDefect): - """Header contained bytes that could not be decoded -""" + """Header contained bytes that could not be decoded""" + class InvalidBase64PaddingDefect(MessageDefect): - """base64 encoded sequence had an incorrect length -""" + """base64 encoded sequence had an incorrect length""" + class InvalidBase64CharactersDefect(MessageDefect): - """base64 encoded sequence had characters not in base64 alphabet -""" + """base64 encoded sequence had characters not in base64 alphabet""" + class InvalidBase64LengthDefect(MessageDefect): - """base64 encoded sequence had invalid length (1 mod 4) -""" + """base64 encoded sequence had invalid length (1 mod 4)""" + class CloseBoundaryNotFoundDefect(MessageDefect): - """A start boundary was found, but not the corresponding close boundary. -""" + """A start boundary was found, but not the corresponding close boundary.""" + class MissingHeaderBodySeparatorDefect(MessageDefect): - """Found line with no leading whitespace and no colon before blank line. -""" + """Found line with no leading whitespace and no colon before blank line.""" MalformedHeaderDefect = MissingHeaderBodySeparatorDefect class HeaderDefect(MessageDefect): - """Base class for a header defect. -""" + """Base class for a header defect.""" + class InvalidHeaderDefect(HeaderDefect): - """Header is not valid, message gives details. -""" + """Header is not valid, message gives details.""" + class HeaderMissingRequiredValue(HeaderDefect): - """A header that must have a value had none -""" + """A header that must have a value had none""" class NonPrintableDefect(HeaderDefect): - """ASCII characters outside the ascii-printable range found -""" + """ASCII characters outside the ascii-printable range found""" + def __init__(self, non_printables: str | None) -> None: ... class ObsoleteHeaderDefect(HeaderDefect): - """Header uses syntax declared obsolete by RFC 5322 -""" + """Header uses syntax declared obsolete by RFC 5322""" + class NonASCIILocalPartDefect(HeaderDefect): - """local_part contains non-ASCII characters -""" + """local_part contains non-ASCII characters""" if sys.version_info >= (3, 10): class InvalidDateDefect(HeaderDefect): - """Header has unparsable or invalid date -""" + """Header has unparsable or invalid date""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi index 8a6b7c881047f..aedb2f6c2c7d9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/feedparser.pyi @@ -14,6 +14,7 @@ exception. Instead, when it finds something unexpected, it adds a 'defect' to the current message. Defects are just instances that live on the message object's .defects attribute. """ + from collections.abc import Callable from email._policybase import _MessageT from email.message import Message @@ -23,38 +24,39 @@ from typing import Generic, overload __all__ = ["FeedParser", "BytesFeedParser"] class FeedParser(Generic[_MessageT]): - """A feed-style parser of email. -""" + """A feed-style parser of email.""" + @overload def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: """_factory is called with no arguments to create a new message obj -The policy keyword specifies a policy object that controls a number of -aspects of the parser's operation. The default policy maintains -backward compatibility. + The policy keyword specifies a policy object that controls a number of + aspects of the parser's operation. The default policy maintains + backward compatibility. + + """ -""" @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... def feed(self, data: str) -> None: - """Push more data into the parser. -""" + """Push more data into the parser.""" + def close(self) -> _MessageT: - """Parse all remaining data and return the root message object. -""" + """Parse all remaining data and return the root message object.""" class BytesFeedParser(FeedParser[_MessageT]): - """Like FeedParser, but feed accepts bytes. -""" + """Like FeedParser, but feed accepts bytes.""" + @overload def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy[Message] = ...) -> None: """_factory is called with no arguments to create a new message obj -The policy keyword specifies a policy object that controls a number of -aspects of the parser's operation. The default policy maintains -backward compatibility. + The policy keyword specifies a policy object that controls a number of + aspects of the parser's operation. The default policy maintains + backward compatibility. + + """ -""" @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy[_MessageT] = ...) -> None: ... def feed(self, data: bytes | bytearray) -> None: ... # type: ignore[override] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi index 9adc28b4c7b42..9d1f405ee2bfc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/generator.pyi @@ -1,5 +1,5 @@ -"""Classes to generate plain text from a message object tree. -""" +"""Classes to generate plain text from a message object tree.""" + from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy @@ -14,9 +14,10 @@ _MessageT = TypeVar("_MessageT", bound=Message[Any, Any], default=Any) class Generator(Generic[_MessageT]): """Generates output from a Message object tree. -This basic generator writes the message to the given file object as plain -text. -""" + This basic generator writes the message to the given file object as plain + text. + """ + maxheaderlen: int | None policy: Policy[_MessageT] | None @overload @@ -30,26 +31,27 @@ text. ) -> None: """Create the generator for message flattening. -outfp is the output file-like object for writing the message to. It -must have a write() method. + outfp is the output file-like object for writing the message to. It + must have a write() method. -Optional mangle_from_ is a flag that, when True (the default if policy -is not set), escapes From_ lines in the body of the message by putting -a '>' in front of them. + Optional mangle_from_ is a flag that, when True (the default if policy + is not set), escapes From_ lines in the body of the message by putting + a '>' in front of them. -Optional maxheaderlen specifies the longest length for a non-continued -header. When a header line is longer (in characters, with tabs -expanded to 8 spaces) than maxheaderlen, the header will split as -defined in the Header class. Set maxheaderlen to zero to disable -header wrapping. The default is 78, as recommended (but not required) -by RFC 2822. + Optional maxheaderlen specifies the longest length for a non-continued + header. When a header line is longer (in characters, with tabs + expanded to 8 spaces) than maxheaderlen, the header will split as + defined in the Header class. Set maxheaderlen to zero to disable + header wrapping. The default is 78, as recommended (but not required) + by RFC 2822. -The policy keyword specifies a policy object that controls a number of -aspects of the generator's operation. If no policy is specified, -the policy associated with the Message object passed to the -flatten method is used. + The policy keyword specifies a policy object that controls a number of + aspects of the generator's operation. If no policy is specified, + the policy associated with the Message object passed to the + flatten method is used. + + """ -""" @overload def __init__( self, @@ -62,37 +64,38 @@ flatten method is used. def write(self, s: str) -> None: ... def flatten(self, msg: _MessageT, unixfrom: bool = False, linesep: str | None = None) -> None: """Print the message object tree rooted at msg to the output file -specified when the Generator instance was created. + specified when the Generator instance was created. + + unixfrom is a flag that forces the printing of a Unix From_ delimiter + before the first object in the message tree. If the original message + has no From_ delimiter, a 'standard' one is crafted. By default, this + is False to inhibit the printing of any From_ delimiter. -unixfrom is a flag that forces the printing of a Unix From_ delimiter -before the first object in the message tree. If the original message -has no From_ delimiter, a 'standard' one is crafted. By default, this -is False to inhibit the printing of any From_ delimiter. + Note that for subobjects, no From_ line is printed. -Note that for subobjects, no From_ line is printed. + linesep specifies the characters used to indicate a new line in + the output. The default value is determined by the policy specified + when the Generator instance was created or, if none was specified, + from the policy associated with the msg. -linesep specifies the characters used to indicate a new line in -the output. The default value is determined by the policy specified -when the Generator instance was created or, if none was specified, -from the policy associated with the msg. + """ -""" def clone(self, fp: SupportsWrite[str]) -> Self: - """Clone this generator with the exact same options. -""" + """Clone this generator with the exact same options.""" class BytesGenerator(Generator[_MessageT]): """Generates a bytes version of a Message object tree. -Functionally identical to the base Generator except that the output is -bytes and not string. When surrogates were used in the input to encode -bytes, these are decoded back to bytes for output. If the policy has -cte_type set to 7bit, then the message is transformed such that the -non-ASCII bytes are properly content transfer encoded, using the charset -unknown-8bit. + Functionally identical to the base Generator except that the output is + bytes and not string. When surrogates were used in the input to encode + bytes, these are decoded back to bytes for output. If the policy has + cte_type set to 7bit, then the message is transformed such that the + non-ASCII bytes are properly content transfer encoded, using the charset + unknown-8bit. + + The outfp object must accept bytes in its write method. + """ -The outfp object must accept bytes in its write method. -""" @overload def __init__( self: BytesGenerator[Any], # The Policy of the message is used. @@ -104,26 +107,27 @@ The outfp object must accept bytes in its write method. ) -> None: """Create the generator for message flattening. -outfp is the output file-like object for writing the message to. It -must have a write() method. + outfp is the output file-like object for writing the message to. It + must have a write() method. + + Optional mangle_from_ is a flag that, when True (the default if policy + is not set), escapes From_ lines in the body of the message by putting + a '>' in front of them. -Optional mangle_from_ is a flag that, when True (the default if policy -is not set), escapes From_ lines in the body of the message by putting -a '>' in front of them. + Optional maxheaderlen specifies the longest length for a non-continued + header. When a header line is longer (in characters, with tabs + expanded to 8 spaces) than maxheaderlen, the header will split as + defined in the Header class. Set maxheaderlen to zero to disable + header wrapping. The default is 78, as recommended (but not required) + by RFC 2822. -Optional maxheaderlen specifies the longest length for a non-continued -header. When a header line is longer (in characters, with tabs -expanded to 8 spaces) than maxheaderlen, the header will split as -defined in the Header class. Set maxheaderlen to zero to disable -header wrapping. The default is 78, as recommended (but not required) -by RFC 2822. + The policy keyword specifies a policy object that controls a number of + aspects of the generator's operation. If no policy is specified, + the policy associated with the Message object passed to the + flatten method is used. -The policy keyword specifies a policy object that controls a number of -aspects of the generator's operation. If no policy is specified, -the policy associated with the Message object passed to the -flatten method is used. + """ -""" @overload def __init__( self, @@ -137,9 +141,10 @@ flatten method is used. class DecodedGenerator(Generator[_MessageT]): """Generates a text representation of a message. -Like the Generator base class, except that non-text parts are substituted -with a format string representing the part. -""" + Like the Generator base class, except that non-text parts are substituted + with a format string representing the part. + """ + @overload def __init__( self: DecodedGenerator[Any], # The Policy of the message is used. @@ -151,26 +156,27 @@ with a format string representing the part. policy: None = None, ) -> None: """Like Generator.__init__() except that an additional optional -argument is allowed. + argument is allowed. + + Walks through all subparts of a message. If the subpart is of main + type 'text', then it prints the decoded payload of the subpart. -Walks through all subparts of a message. If the subpart is of main -type 'text', then it prints the decoded payload of the subpart. + Otherwise, fmt is a format string that is used instead of the message + payload. fmt is expanded with the following keywords (in + %(keyword)s format): -Otherwise, fmt is a format string that is used instead of the message -payload. fmt is expanded with the following keywords (in -%(keyword)s format): + type : Full MIME type of the non-text part + maintype : Main MIME type of the non-text part + subtype : Sub-MIME type of the non-text part + filename : Filename of the non-text part + description: Description associated with the non-text part + encoding : Content transfer encoding of the non-text part -type : Full MIME type of the non-text part -maintype : Main MIME type of the non-text part -subtype : Sub-MIME type of the non-text part -filename : Filename of the non-text part -description: Description associated with the non-text part -encoding : Content transfer encoding of the non-text part + The default value for fmt is None, meaning -The default value for fmt is None, meaning + [Non-text (%(type)s) part of message omitted, filename %(filename)s] + """ -[Non-text (%(type)s) part of message omitted, filename %(filename)s] -""" @overload def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi index f047800948109..c9dc5105f8853 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/header.pyi @@ -1,5 +1,5 @@ -"""Header encoding and decoding functionality. -""" +"""Header encoding and decoding functionality.""" + from collections.abc import Iterable from email.charset import Charset from typing import Any, ClassVar @@ -18,84 +18,86 @@ class Header: ) -> None: """Create a MIME-compliant header that can contain many character sets. -Optional s is the initial header value. If None, the initial header -value is not set. You can later append to the header with .append() -method calls. s may be a byte string or a Unicode string, but see the -.append() documentation for semantics. - -Optional charset serves two purposes: it has the same meaning as the -charset argument to the .append() method. It also sets the default -character set for all subsequent .append() calls that omit the charset -argument. If charset is not provided in the constructor, the us-ascii -charset is used both as s's initial charset and as the default for -subsequent .append() calls. - -The maximum line length can be specified explicitly via maxlinelen. For -splitting the first line to a shorter value (to account for the field -header which isn't included in s, e.g. 'Subject') pass in the name of -the field in header_name. The default maxlinelen is 78 as recommended -by RFC 2822. - -continuation_ws must be RFC 2822 compliant folding whitespace (usually -either a space or a hard tab) which will be prepended to continuation -lines. - -errors is passed through to the .append() call. -""" + Optional s is the initial header value. If None, the initial header + value is not set. You can later append to the header with .append() + method calls. s may be a byte string or a Unicode string, but see the + .append() documentation for semantics. + + Optional charset serves two purposes: it has the same meaning as the + charset argument to the .append() method. It also sets the default + character set for all subsequent .append() calls that omit the charset + argument. If charset is not provided in the constructor, the us-ascii + charset is used both as s's initial charset and as the default for + subsequent .append() calls. + + The maximum line length can be specified explicitly via maxlinelen. For + splitting the first line to a shorter value (to account for the field + header which isn't included in s, e.g. 'Subject') pass in the name of + the field in header_name. The default maxlinelen is 78 as recommended + by RFC 2822. + + continuation_ws must be RFC 2822 compliant folding whitespace (usually + either a space or a hard tab) which will be prepended to continuation + lines. + + errors is passed through to the .append() call. + """ + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: """Append a string to the MIME header. -Optional charset, if given, should be a Charset instance or the name -of a character set (which will be converted to a Charset instance). A -value of None (the default) means that the charset given in the -constructor is used. - -s may be a byte string or a Unicode string. If it is a byte string -(i.e. isinstance(s, str) is false), then charset is the encoding of -that byte string, and a UnicodeError will be raised if the string -cannot be decoded with that charset. If s is a Unicode string, then -charset is a hint specifying the character set of the characters in -the string. In either case, when producing an RFC 2822 compliant -header using RFC 2047 rules, the string will be encoded using the -output codec of the charset. If the string cannot be encoded to the -output codec, a UnicodeError will be raised. - -Optional 'errors' is passed as the errors argument to the decode -call if s is a byte string. -""" + Optional charset, if given, should be a Charset instance or the name + of a character set (which will be converted to a Charset instance). A + value of None (the default) means that the charset given in the + constructor is used. + + s may be a byte string or a Unicode string. If it is a byte string + (i.e. isinstance(s, str) is false), then charset is the encoding of + that byte string, and a UnicodeError will be raised if the string + cannot be decoded with that charset. If s is a Unicode string, then + charset is a hint specifying the character set of the characters in + the string. In either case, when producing an RFC 2822 compliant + header using RFC 2047 rules, the string will be encoded using the + output codec of the charset. If the string cannot be encoded to the + output codec, a UnicodeError will be raised. + + Optional 'errors' is passed as the errors argument to the decode + call if s is a byte string. + """ + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: """Encode a message header into an RFC-compliant format. -There are many issues involved in converting a given string for use in -an email header. Only certain character sets are readable in most -email clients, and as header strings can only contain a subset of -7-bit ASCII, care must be taken to properly convert and encode (with -Base64 or quoted-printable) header strings. In addition, there is a -75-character length limit on any given encoded header field, so -line-wrapping must be performed, even with double-byte character sets. - -Optional maxlinelen specifies the maximum length of each generated -line, exclusive of the linesep string. Individual lines may be longer -than maxlinelen if a folding point cannot be found. The first line -will be shorter by the length of the header name plus ": " if a header -name was specified at Header construction time. The default value for -maxlinelen is determined at header construction time. - -Optional splitchars is a string containing characters which should be -given extra weight by the splitting algorithm during normal header -wrapping. This is in very rough support of RFC 2822's 'higher level -syntactic breaks': split points preceded by a splitchar are preferred -during line splitting, with the characters preferred in the order in -which they appear in the string. Space and tab may be included in the -string to indicate whether preference should be given to one over the -other as a split point when other split chars do not appear in the line -being split. Splitchars does not affect RFC 2047 encoded lines. - -Optional linesep is a string to be used to separate the lines of -the value. The default value is the most useful for typical -Python applications, but it can be set to \\r\\n to produce RFC-compliant -line separators when needed. -""" + There are many issues involved in converting a given string for use in + an email header. Only certain character sets are readable in most + email clients, and as header strings can only contain a subset of + 7-bit ASCII, care must be taken to properly convert and encode (with + Base64 or quoted-printable) header strings. In addition, there is a + 75-character length limit on any given encoded header field, so + line-wrapping must be performed, even with double-byte character sets. + + Optional maxlinelen specifies the maximum length of each generated + line, exclusive of the linesep string. Individual lines may be longer + than maxlinelen if a folding point cannot be found. The first line + will be shorter by the length of the header name plus ": " if a header + name was specified at Header construction time. The default value for + maxlinelen is determined at header construction time. + + Optional splitchars is a string containing characters which should be + given extra weight by the splitting algorithm during normal header + wrapping. This is in very rough support of RFC 2822's 'higher level + syntactic breaks': split points preceded by a splitchar are preferred + during line splitting, with the characters preferred in the order in + which they appear in the string. Space and tab may be included in the + string to indicate whether preference should be given to one over the + other as a split point when other split chars do not appear in the line + being split. Splitchars does not affect RFC 2047 encoded lines. + + Optional linesep is a string to be used to separate the lines of + the value. The default value is the most useful for typical + Python applications, but it can be set to \\r\\n to produce RFC-compliant + line separators when needed. + """ __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... def __ne__(self, value: object, /) -> bool: ... @@ -106,23 +108,24 @@ line separators when needed. def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: """Decode a message header value without converting charset. -For historical reasons, this function may return either: + For historical reasons, this function may return either: + + 1. A list of length 1 containing a pair (str, None). + 2. A list of (bytes, charset) pairs containing each of the decoded + parts of the header. Charset is None for non-encoded parts of the header, + otherwise a lower-case string containing the name of the character set + specified in the encoded string. -1. A list of length 1 containing a pair (str, None). -2. A list of (bytes, charset) pairs containing each of the decoded - parts of the header. Charset is None for non-encoded parts of the header, - otherwise a lower-case string containing the name of the character set - specified in the encoded string. + header may be a string that may or may not contain RFC2047 encoded words, + or it may be a Header object. -header may be a string that may or may not contain RFC2047 encoded words, -or it may be a Header object. + An email.errors.HeaderParseError may be raised when certain decoding error + occurs (e.g. a base64 decoding exception). -An email.errors.HeaderParseError may be raised when certain decoding error -occurs (e.g. a base64 decoding exception). + This function exists for backwards compatibility only. For new code, we + recommend using email.headerregistry.HeaderRegistry instead. + """ -This function exists for backwards compatibility only. For new code, we -recommend using email.headerregistry.HeaderRegistry instead. -""" def make_header( decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], maxlinelen: int | None = None, @@ -131,14 +134,14 @@ def make_header( ) -> Header: """Create a Header from a sequence of pairs as returned by decode_header() -decode_header() takes a header value string and returns a sequence of -pairs of the format (decoded_string, charset) where charset is the string -name of the character set. + decode_header() takes a header value string and returns a sequence of + pairs of the format (decoded_string, charset) where charset is the string + name of the character set. -This function takes one of those sequence of pairs and returns a Header -instance. Optional maxlinelen, header_name, and continuation_ws are as in -the Header constructor. + This function takes one of those sequence of pairs and returns a Header + instance. Optional maxlinelen, header_name, and continuation_ws are as in + the Header constructor. -This function exists for backwards compatibility only, and is not -recommended for use in new code. -""" + This function exists for backwards compatibility only, and is not + recommended for use in new code. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi index 91109914e2d18..c75b8015f7f4c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/headerregistry.pyi @@ -3,6 +3,7 @@ This module provides an implementation of the HeaderRegistry API. The implementation is designed to flexibly follow RFC5322 rules. """ + import types from collections.abc import Iterable, Mapping from datetime import datetime as _datetime @@ -24,35 +25,36 @@ from typing_extensions import Self class BaseHeader(str): """Base class for message headers. -Implements generic behavior and provides tools for subclasses. - -A subclass must define a classmethod named 'parse' that takes an unfolded -value string and a dictionary as its arguments. The dictionary will -contain one key, 'defects', initialized to an empty list. After the call -the dictionary must contain two additional keys: parse_tree, set to the -parse tree obtained from parsing the header, and 'decoded', set to the -string value of the idealized representation of the data from the value. -(That is, encoded words are decoded, and values that have canonical -representations are so represented.) - -The defects key is intended to collect parsing defects, which the message -parser will subsequently dispose of as appropriate. The parser should not, -insofar as practical, raise any errors. Defects should be added to the -list instead. The standard header parsers register defects for RFC -compliance issues, for obsolete RFC syntax, and for unrecoverable parsing -errors. - -The parse method may add additional keys to the dictionary. In this case -the subclass must define an 'init' method, which will be passed the -dictionary as its keyword arguments. The method should use (usually by -setting them as the value of similarly named attributes) and remove all the -extra keys added by its parse method, and then use super to call its parent -class with the remaining arguments and keywords. - -The subclass should also make sure that a 'max_count' attribute is defined -that is either None or 1. XXX: need to better define this API. + Implements generic behavior and provides tools for subclasses. + + A subclass must define a classmethod named 'parse' that takes an unfolded + value string and a dictionary as its arguments. The dictionary will + contain one key, 'defects', initialized to an empty list. After the call + the dictionary must contain two additional keys: parse_tree, set to the + parse tree obtained from parsing the header, and 'decoded', set to the + string value of the idealized representation of the data from the value. + (That is, encoded words are decoded, and values that have canonical + representations are so represented.) + + The defects key is intended to collect parsing defects, which the message + parser will subsequently dispose of as appropriate. The parser should not, + insofar as practical, raise any errors. Defects should be added to the + list instead. The standard header parsers register defects for RFC + compliance issues, for obsolete RFC syntax, and for unrecoverable parsing + errors. + + The parse method may add additional keys to the dictionary. In this case + the subclass must define an 'init' method, which will be passed the + dictionary as its keyword arguments. The method should use (usually by + setting them as the value of similarly named attributes) and remove all the + extra keys added by its parse method, and then use super to call its parent + class with the remaining arguments and keywords. + + The subclass should also make sure that a 'max_count' attribute is defined + that is either None or 1. XXX: need to better define this API. + + """ -""" # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) max_count: ClassVar[Literal[1] | None] @property @@ -64,42 +66,43 @@ that is either None or 1. XXX: need to better define this API. def fold(self, *, policy: Policy) -> str: """Fold header according to policy. -The parsed representation of the header is folded according to -RFC5322 rules, as modified by the policy. If the parse tree -contains surrogateescaped bytes, the bytes are CTE encoded using -the charset 'unknown-8bit". + The parsed representation of the header is folded according to + RFC5322 rules, as modified by the policy. If the parse tree + contains surrogateescaped bytes, the bytes are CTE encoded using + the charset 'unknown-8bit". -Any non-ASCII characters in the parse tree are CTE encoded using -charset utf-8. XXX: make this a policy setting. + Any non-ASCII characters in the parse tree are CTE encoded using + charset utf-8. XXX: make this a policy setting. -The returned value is an ASCII-only string possibly containing linesep -characters, and ending with a linesep character. The string includes -the header name and the ': ' separator. + The returned value is an ASCII-only string possibly containing linesep + characters, and ending with a linesep character. The string includes + the header name and the ': ' separator. -""" + """ class UnstructuredHeader: max_count: ClassVar[Literal[1] | None] @staticmethod def value_parser(value: str) -> UnstructuredTokenList: """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - obs-NO-WS-CTL is control characters except WSP/CR/LF. + obs-NO-WS-CTL is control characters except WSP/CR/LF. -So, basically, we have printable runs, plus control characters or nulls in -the obsolete syntax, separated by whitespace. Since RFC 2047 uses the -obsolete syntax in its specification, but requires whitespace on either -side of the encoded words, I can see no reason to need to separate the -non-printable-non-whitespace from the printable runs if they occur, so we -parse this into xtext tokens separated by WSP tokens. + So, basically, we have printable runs, plus control characters or nulls in + the obsolete syntax, separated by whitespace. Since RFC 2047 uses the + obsolete syntax in its specification, but requires whitespace on either + side of the encoded words, I can see no reason to need to separate the + non-printable-non-whitespace from the printable runs if they occur, so we + parse this into xtext tokens separated by WSP tokens. -Because an 'unstructured' value must by definition constitute the entire -value, this 'get' routine does not return a remaining value, only the -parsed TokenList. + Because an 'unstructured' value must by definition constitute the entire + value, this 'get' routine does not return a remaining value, only the + parsed TokenList. + + """ -""" @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -109,12 +112,13 @@ class UniqueUnstructuredHeader(UnstructuredHeader): class DateHeader: """Header whose value consists of a single timestamp. -Provides an additional attribute, datetime, which is either an aware -datetime using a timezone, or a naive datetime if the timezone -in the input string is -0000. Also accepts a datetime as input. -The 'value' attribute is the normalized form of the timestamp, -which means it is the output of format_datetime on the datetime. -""" + Provides an additional attribute, datetime, which is either an aware + datetime using a timezone, or a naive datetime if the timezone + in the input string is -0000. Also accepts a datetime as input. + The 'value' attribute is the normalized form of the timestamp, + which means it is the output of format_datetime on the datetime. + """ + max_count: ClassVar[Literal[1] | None] def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... @property @@ -122,23 +126,24 @@ which means it is the output of format_datetime on the datetime. @staticmethod def value_parser(value: str) -> UnstructuredTokenList: """unstructured = (*([FWS] vchar) *WSP) / obs-unstruct - obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) - obs-utext = %d0 / obs-NO-WS-CTL / LF / CR + obs-unstruct = *((*LF *CR *(obs-utext) *LF *CR)) / FWS) + obs-utext = %d0 / obs-NO-WS-CTL / LF / CR - obs-NO-WS-CTL is control characters except WSP/CR/LF. + obs-NO-WS-CTL is control characters except WSP/CR/LF. -So, basically, we have printable runs, plus control characters or nulls in -the obsolete syntax, separated by whitespace. Since RFC 2047 uses the -obsolete syntax in its specification, but requires whitespace on either -side of the encoded words, I can see no reason to need to separate the -non-printable-non-whitespace from the printable runs if they occur, so we -parse this into xtext tokens separated by WSP tokens. + So, basically, we have printable runs, plus control characters or nulls in + the obsolete syntax, separated by whitespace. Since RFC 2047 uses the + obsolete syntax in its specification, but requires whitespace on either + side of the encoded words, I can see no reason to need to separate the + non-printable-non-whitespace from the printable runs if they occur, so we + parse this into xtext tokens separated by WSP tokens. -Because an 'unstructured' value must by definition constitute the entire -value, this 'get' routine does not return a remaining value, only the -parsed TokenList. + Because an 'unstructured' value must by definition constitute the entire + value, this 'get' routine does not return a remaining value, only the + parsed TokenList. + + """ -""" @classmethod def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... @@ -187,9 +192,8 @@ class MIMEVersionHeader: def minor(self) -> int | None: ... @staticmethod def value_parser(value: str) -> MIMEVersion: - """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS] + """mime-version = [CFWS] 1*digit [CFWS] "." [CFWS] 1*digit [CFWS]""" - """ @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @@ -212,10 +216,10 @@ class ContentTypeHeader(ParameterizedMIMEHeader): def value_parser(value: str) -> ContentType: """maintype "/" subtype *( ";" parameter ) -The maintype and substype are tokens. Theoretically they could -be checked against the official IANA list + x-token, but we -don't do that. -""" + The maintype and substype are tokens. Theoretically they could + be checked against the official IANA list + x-token, but we + don't do that. + """ class ContentDispositionHeader(ParameterizedMIMEHeader): # init is redefined but has the same signature as parent class, so is omitted from the stub @@ -223,9 +227,7 @@ class ContentDispositionHeader(ParameterizedMIMEHeader): def content_disposition(self) -> str | None: ... @staticmethod def value_parser(value: str) -> ContentDisposition: - """disposition-type *( ";" parameter ) - - """ + """disposition-type *( ";" parameter )""" class ContentTransferEncodingHeader: max_count: ClassVar[Literal[1]] @@ -236,9 +238,7 @@ class ContentTransferEncodingHeader: def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod def value_parser(value: str) -> ContentTransferEncoding: - """mechanism - - """ + """mechanism""" class MessageIDHeader: max_count: ClassVar[Literal[1]] @@ -246,8 +246,7 @@ class MessageIDHeader: def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod def value_parser(value: str) -> MessageID: - """message-id = "Message-ID:" msg-id CRLF - """ + """message-id = "Message-ID:" msg-id CRLF""" @type_check_only class _HeaderParser(Protocol): @@ -258,8 +257,8 @@ class _HeaderParser(Protocol): def parse(cls, value: str, kwds: dict[str, Any], /) -> None: ... class HeaderRegistry: - """A header_factory and header registry. -""" + """A header_factory and header registry.""" + registry: dict[str, type[_HeaderParser]] base_class: type[BaseHeader] default_class: type[_HeaderParser] @@ -268,29 +267,29 @@ class HeaderRegistry: ) -> None: """Create a header_factory that works with the Policy API. -base_class is the class that will be the last class in the created -header class's __bases__ list. default_class is the class that will be -used if "name" (see __call__) does not appear in the registry. -use_default_map controls whether or not the default mapping of names to -specialized classes is copied in to the registry when the factory is -created. The default is True. + base_class is the class that will be the last class in the created + header class's __bases__ list. default_class is the class that will be + used if "name" (see __call__) does not appear in the registry. + use_default_map controls whether or not the default mapping of names to + specialized classes is copied in to the registry when the factory is + created. The default is True. + + """ -""" def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: - """Register cls as the specialized class for handling "name" headers. + """Register cls as the specialized class for handling "name" headers.""" - """ def __getitem__(self, name: str) -> type[BaseHeader]: ... def __call__(self, name: str, value: Any) -> BaseHeader: """Create a header instance for header 'name' from 'value'. -Creates a header instance by creating a specialized class for parsing -and representing the specified header by combining the factory -base_class with a specialized class from the registry or the -default_class, and passing the name and value to the constructed -class's constructor. + Creates a header instance by creating a specialized class for parsing + and representing the specified header by combining the factory + base_class with a specialized class from the registry or the + default_class, and passing the name and value to the constructed + class's constructor. -""" + """ class Address: @property @@ -302,26 +301,27 @@ class Address: @property def addr_spec(self) -> str: """The addr_spec (username@domain) portion of the address, quoted -according to RFC 5322 rules, but with no Content Transfer Encoding. -""" + according to RFC 5322 rules, but with no Content Transfer Encoding. + """ + def __init__( self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None ) -> None: """Create an object representing a full email address. -An address can have a 'display_name', a 'username', and a 'domain'. In -addition to specifying the username and domain separately, they may be -specified together by using the addr_spec keyword *instead of* the -username and domain keywords. If an addr_spec string is specified it -must be properly quoted according to RFC 5322 rules; an error will be -raised if it is not. + An address can have a 'display_name', a 'username', and a 'domain'. In + addition to specifying the username and domain separately, they may be + specified together by using the addr_spec keyword *instead of* the + username and domain keywords. If an addr_spec string is specified it + must be properly quoted according to RFC 5322 rules; an error will be + raised if it is not. -An Address object has display_name, username, domain, and addr_spec -attributes, all of which are read-only. The addr_spec and the string -value of the object are both quoted according to RFC5322 rules, but -without any Content Transfer Encoding. + An Address object has display_name, username, domain, and addr_spec + attributes, all of which are read-only. The addr_spec and the string + value of the object are both quoted according to RFC5322 rules, but + without any Content Transfer Encoding. -""" + """ __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... @@ -333,17 +333,17 @@ class Group: def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: """Create an object representing an address group. -An address group consists of a display_name followed by colon and a -list of addresses (see Address) terminated by a semi-colon. The Group -is created by specifying a display_name and a possibly empty list of -Address objects. A Group can also be used to represent a single -address that is not in a group, which is convenient when manipulating -lists that are a combination of Groups and individual Addresses. In -this case the display_name should be set to None. In particular, the -string representation of a Group whose display_name is None is the same -as the Address object, if there is one and only one Address object in -the addresses list. + An address group consists of a display_name followed by colon and a + list of addresses (see Address) terminated by a semi-colon. The Group + is created by specifying a display_name and a possibly empty list of + Address objects. A Group can also be used to represent a single + address that is not in a group, which is convenient when manipulating + lists that are a combination of Groups and individual Addresses. In + this case the display_name should be set to None. In particular, the + string representation of a Group whose display_name is None is the same + as the Address object, if there is one and only one Address object in + the addresses list. -""" + """ __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, other: object) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi index 51cbec229ad29..54f56d7c996de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/iterators.pyi @@ -1,5 +1,5 @@ -"""Various types of useful iterators and generators. -""" +"""Various types of useful iterators and generators.""" + from _typeshed import SupportsWrite from collections.abc import Iterator from email.message import Message @@ -9,23 +9,24 @@ __all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: """Iterate over the parts, returning string payloads line-by-line. -Optional decode (default False) is passed through to .get_payload(). -""" + Optional decode (default False) is passed through to .get_payload(). + """ + def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: """Iterate over the subparts with a given MIME type. -Use 'maintype' as the main MIME type to match against; this defaults to -"text". Optional 'subtype' is the MIME subtype to match against; if -omitted, only the main type is matched. -""" + Use 'maintype' as the main MIME type to match against; this defaults to + "text". Optional 'subtype' is the MIME subtype to match against; if + omitted, only the main type is matched. + """ + def walk(self: Message) -> Iterator[Message]: """Walk over the message tree, yielding each subpart. -The walk is performed in depth-first order. This method is a -generator. -""" + The walk is performed in depth-first order. This method is a + generator. + """ # We include the seemingly private function because it is documented in the stdlib documentation. def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: - """A handy debugging aid -""" + """A handy debugging aid""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi index 227cac54340b8..308c21ad163c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/message.pyi @@ -1,5 +1,5 @@ -"""Basic message object for the email package object model. -""" +"""Basic message object for the email package object model.""" + from _typeshed import MaybeNone from collections.abc import Generator, Iterator, Sequence from email import _ParamsType, _ParamType @@ -37,18 +37,19 @@ class _SupportsDecodeToPayload(Protocol): class Message(Generic[_HeaderT_co, _HeaderParamT_contra]): """Basic message object. -A message object is defined as something that has a bunch of RFC 2822 -headers and a payload. It may optionally have an envelope header -(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a -multipart or a message/rfc822), then the payload is a list of Message -objects, otherwise it is a string. - -Message objects implement part of the 'mapping' interface, which assumes -there is exactly one occurrence of the header per message. Some headers -do in fact appear multiple times (e.g. Received) and for those headers, -you must use the explicit API to set or get all the headers. Not all of -the mapping methods are implemented. -""" + A message object is defined as something that has a bunch of RFC 2822 + headers and a payload. It may optionally have an envelope header + (a.k.a. Unix-From or From_ header). If the message is a container (i.e. a + multipart or a message/rfc822), then the payload is a list of Message + objects, otherwise it is a string. + + Message objects implement part of the 'mapping' interface, which assumes + there is exactly one occurrence of the header per message. Some headers + do in fact appear multiple times (e.g. Received) and for those headers, + you must use the explicit API to set or get all the headers. Not all of + the mapping methods are implemented. + """ + # The policy attributes and arguments in this class and its subclasses # would ideally use Policy[Self], but this is not possible. policy: Policy[Any] # undocumented @@ -57,40 +58,41 @@ the mapping methods are implemented. defects: list[MessageDefect] def __init__(self, policy: Policy[Any] = ...) -> None: ... def is_multipart(self) -> bool: - """Return True if the message consists of multiple parts. -""" + """Return True if the message consists of multiple parts.""" + def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... def attach(self, payload: _PayloadType) -> None: """Add the given payload to the current payload. -The current payload will always be a list of objects after this method -is called. If you want to set the payload to a scalar object, use -set_payload() instead. -""" + The current payload will always be a list of objects after this method + is called. If you want to set the payload to a scalar object, use + set_payload() instead. + """ # `i: int` without a multipart payload results in an error # `| MaybeNone` acts like `| Any`: can be None for cleared or unset payload, but annoying to check @overload # multipart def get_payload(self, i: int, decode: Literal[True]) -> None: """Return a reference to the payload. -The payload will either be a list object or a string. If you mutate -the list object, you modify the message's payload in place. Optional -i returns that index into the payload. + The payload will either be a list object or a string. If you mutate + the list object, you modify the message's payload in place. Optional + i returns that index into the payload. + + Optional decode is a flag indicating whether the payload should be + decoded or not, according to the Content-Transfer-Encoding header + (default is False). -Optional decode is a flag indicating whether the payload should be -decoded or not, according to the Content-Transfer-Encoding header -(default is False). + When True and the message is not a multipart, the payload will be + decoded if this header's value is `quoted-printable' or `base64'. If + some other encoding is used, or the header is missing, or if the + payload has bogus data (i.e. bogus base64 or uuencoded data), the + payload is returned as-is. -When True and the message is not a multipart, the payload will be -decoded if this header's value is `quoted-printable' or `base64'. If -some other encoding is used, or the header is missing, or if the -payload has bogus data (i.e. bogus base64 or uuencoded data), the -payload is returned as-is. + If the message is a multipart and the decode flag is True, then None + is returned. + """ -If the message is a multipart and the decode flag is True, then None -is returned. -""" @overload # multipart def get_payload(self, i: int, decode: Literal[False] = False) -> _PayloadType | MaybeNone: ... @overload # either @@ -103,14 +105,13 @@ is returned. # then an invalid payload could be passed, but this is unlikely # Not[_SupportsEncodeToPayload] @overload - def set_payload( - self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None - ) -> None: + def set_payload(self, payload: _SupportsDecodeToPayload | _PayloadType | _MultipartPayloadType, charset: None = None) -> None: """Set the payload to the given value. -Optional charset sets the message's default character set. See -set_charset() for details. -""" + Optional charset sets the message's default character set. See + set_charset() for details. + """ + @overload def set_payload( self, @@ -120,23 +121,24 @@ set_charset() for details. def set_charset(self, charset: _CharsetType) -> None: """Set the charset of the payload to a given character set. -charset can be a Charset instance, a string naming a character set, or -None. If it is a string it will be converted to a Charset instance. -If charset is None, the charset parameter will be removed from the -Content-Type field. Anything else will generate a TypeError. - -The message will be assumed to be of type text/* encoded with -charset.input_charset. It will be converted to charset.output_charset -and encoded properly, if needed, when generating the plain text -representation of the message. MIME headers (MIME-Version, -Content-Type, Content-Transfer-Encoding) will be added as needed. -""" - def get_charset(self) -> _CharsetType: - """Return the Charset instance associated with the message's payload. + charset can be a Charset instance, a string naming a character set, or + None. If it is a string it will be converted to a Charset instance. + If charset is None, the charset parameter will be removed from the + Content-Type field. Anything else will generate a TypeError. + + The message will be assumed to be of type text/* encoded with + charset.input_charset. It will be converted to charset.output_charset + and encoded properly, if needed, when generating the plain text + representation of the message. MIME headers (MIME-Version, + Content-Type, Content-Transfer-Encoding) will be added as needed. """ + + def get_charset(self) -> _CharsetType: + """Return the Charset instance associated with the message's payload.""" + def __len__(self) -> int: - """Return the total number of headers, including duplicates. -""" + """Return the total number of headers, including duplicates.""" + def __contains__(self, name: str) -> bool: ... def __iter__(self) -> Iterator[str]: ... # Same as `get` with `failobj=None`, but with the expectation that it won't return None in most scenarios @@ -146,150 +148,166 @@ Content-Type, Content-Transfer-Encoding) will be added as needed. def __getitem__(self, name: str) -> _HeaderT_co | MaybeNone: """Get a header value. -Return None if the header is missing instead of raising an exception. + Return None if the header is missing instead of raising an exception. + + Note that if the header appeared multiple times, exactly which + occurrence gets returned is undefined. Use get_all() to get all + the values matching a header field name. + """ -Note that if the header appeared multiple times, exactly which -occurrence gets returned is undefined. Use get_all() to get all -the values matching a header field name. -""" def __setitem__(self, name: str, val: _HeaderParamT_contra) -> None: """Set the value of a header. -Note: this does not overwrite an existing header with the same field -name. Use __delitem__() first to delete any existing headers. -""" + Note: this does not overwrite an existing header with the same field + name. Use __delitem__() first to delete any existing headers. + """ + def __delitem__(self, name: str) -> None: """Delete all occurrences of a header, if present. -Does not raise an exception if the header is missing. -""" + Does not raise an exception if the header is missing. + """ + def keys(self) -> list[str]: """Return a list of all the message's header field names. -These will be sorted in the order they appeared in the original -message, or were added to the message, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + def values(self) -> list[_HeaderT_co]: """Return a list of all the message's header values. -These will be sorted in the order they appeared in the original -message, or were added to the message, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + def items(self) -> list[tuple[str, _HeaderT_co]]: """Get all the message's header fields and values. -These will be sorted in the order they appeared in the original -message, or were added to the message, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they appeared in the original + message, or were added to the message, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + @overload def get(self, name: str, failobj: None = None) -> _HeaderT_co | None: """Get a header value. -Like __getitem__() but return failobj instead of None when the field -is missing. -""" + Like __getitem__() but return failobj instead of None when the field + is missing. + """ + @overload def get(self, name: str, failobj: _T) -> _HeaderT_co | _T: ... @overload def get_all(self, name: str, failobj: None = None) -> list[_HeaderT_co] | None: """Return a list of all the values for the named field. -These will be sorted in the order they appeared in the original -message, and may contain duplicates. Any fields deleted and -re-inserted are always appended to the header list. + These will be sorted in the order they appeared in the original + message, and may contain duplicates. Any fields deleted and + re-inserted are always appended to the header list. + + If no such fields exist, failobj is returned (defaults to None). + """ -If no such fields exist, failobj is returned (defaults to None). -""" @overload def get_all(self, name: str, failobj: _T) -> list[_HeaderT_co] | _T: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: """Extended header setting. -name is the header field to add. keyword arguments can be used to set -additional parameters for the header field, with underscores converted -to dashes. Normally the parameter will be added as key="value" unless -value is None, in which case only the key will be added. If a -parameter value contains non-ASCII characters it can be specified as a -three-tuple of (charset, language, value), in which case it will be -encoded according to RFC2231 rules. Otherwise it will be encoded using -the utf-8 charset and a language of ''. - -Examples: - -msg.add_header('content-disposition', 'attachment', filename='bud.gif') -msg.add_header('content-disposition', 'attachment', - filename=('utf-8', '', 'Fußballer.ppt')) -msg.add_header('content-disposition', 'attachment', - filename='Fußballer.ppt')) -""" + name is the header field to add. keyword arguments can be used to set + additional parameters for the header field, with underscores converted + to dashes. Normally the parameter will be added as key="value" unless + value is None, in which case only the key will be added. If a + parameter value contains non-ASCII characters it can be specified as a + three-tuple of (charset, language, value), in which case it will be + encoded according to RFC2231 rules. Otherwise it will be encoded using + the utf-8 charset and a language of ''. + + Examples: + + msg.add_header('content-disposition', 'attachment', filename='bud.gif') + msg.add_header('content-disposition', 'attachment', + filename=('utf-8', '', 'Fußballer.ppt')) + msg.add_header('content-disposition', 'attachment', + filename='Fußballer.ppt')) + """ + def replace_header(self, _name: str, _value: _HeaderParamT_contra) -> None: """Replace a header. -Replace the first matching header found in the message, retaining -header order and case. If no matching header was found, a KeyError is -raised. -""" + Replace the first matching header found in the message, retaining + header order and case. If no matching header was found, a KeyError is + raised. + """ + def get_content_type(self) -> str: """Return the message's content type. -The returned string is coerced to lower case of the form -'maintype/subtype'. If there was no Content-Type header in the -message, the default type as given by get_default_type() will be -returned. Since according to RFC 2045, messages always have a default -type this will always return a value. + The returned string is coerced to lower case of the form + 'maintype/subtype'. If there was no Content-Type header in the + message, the default type as given by get_default_type() will be + returned. Since according to RFC 2045, messages always have a default + type this will always return a value. + + RFC 2045 defines a message's default type to be text/plain unless it + appears inside a multipart/digest container, in which case it would be + message/rfc822. + """ -RFC 2045 defines a message's default type to be text/plain unless it -appears inside a multipart/digest container, in which case it would be -message/rfc822. -""" def get_content_maintype(self) -> str: """Return the message's main content type. -This is the 'maintype' part of the string returned by -get_content_type(). -""" + This is the 'maintype' part of the string returned by + get_content_type(). + """ + def get_content_subtype(self) -> str: """Returns the message's sub-content type. -This is the 'subtype' part of the string returned by -get_content_type(). -""" + This is the 'subtype' part of the string returned by + get_content_type(). + """ + def get_default_type(self) -> str: """Return the 'default' content type. -Most messages have a default content type of text/plain, except for -messages that are subparts of multipart/digest containers. Such -subparts have a default content type of message/rfc822. -""" + Most messages have a default content type of text/plain, except for + messages that are subparts of multipart/digest containers. Such + subparts have a default content type of message/rfc822. + """ + def set_default_type(self, ctype: str) -> None: """Set the 'default' content type. -ctype should be either "text/plain" or "message/rfc822", although this -is not enforced. The default content type is not stored in the -Content-Type header. -""" + ctype should be either "text/plain" or "message/rfc822", although this + is not enforced. The default content type is not stored in the + Content-Type header. + """ + @overload def get_params( self, failobj: None = None, header: str = "content-type", unquote: bool = True ) -> list[tuple[str, str]] | None: """Return the message's Content-Type parameters, as a list. -The elements of the returned list are 2-tuples of key/value pairs, as -split on the '=' sign. The left hand side of the '=' is the key, -while the right hand side is the value. If there is no '=' sign in -the parameter the value is the empty string. The value is as -described in the get_param() method. + The elements of the returned list are 2-tuples of key/value pairs, as + split on the '=' sign. The left hand side of the '=' is the key, + while the right hand side is the value. If there is no '=' sign in + the parameter the value is the empty string. The value is as + described in the get_param() method. + + Optional failobj is the object to return if there is no Content-Type + header. Optional header is the header to search instead of + Content-Type. If unquote is True, the value is unquoted. + """ -Optional failobj is the object to return if there is no Content-Type -header. Optional header is the header to search instead of -Content-Type. If unquote is True, the value is unquoted. -""" @overload def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... @overload @@ -298,147 +316,159 @@ Content-Type. If unquote is True, the value is unquoted. ) -> _ParamType | None: """Return the parameter value if found in the Content-Type header. -Optional failobj is the object to return if there is no Content-Type -header, or the Content-Type header has no such parameter. Optional -header is the header to search instead of Content-Type. + Optional failobj is the object to return if there is no Content-Type + header, or the Content-Type header has no such parameter. Optional + header is the header to search instead of Content-Type. + + Parameter keys are always compared case insensitively. The return + value can either be a string, or a 3-tuple if the parameter was RFC + 2231 encoded. When it's a 3-tuple, the elements of the value are of + the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and + LANGUAGE can be None, in which case you should consider VALUE to be + encoded in the us-ascii charset. You can usually ignore LANGUAGE. + The parameter value (either the returned string, or the VALUE item in + the 3-tuple) is always unquoted, unless unquote is set to False. -Parameter keys are always compared case insensitively. The return -value can either be a string, or a 3-tuple if the parameter was RFC -2231 encoded. When it's a 3-tuple, the elements of the value are of -the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and -LANGUAGE can be None, in which case you should consider VALUE to be -encoded in the us-ascii charset. You can usually ignore LANGUAGE. -The parameter value (either the returned string, or the VALUE item in -the 3-tuple) is always unquoted, unless unquote is set to False. + If your application doesn't care whether the parameter was RFC 2231 + encoded, it can turn the return value into a string as follows: -If your application doesn't care whether the parameter was RFC 2231 -encoded, it can turn the return value into a string as follows: + rawparam = msg.get_param('foo') + param = email.utils.collapse_rfc2231_value(rawparam) - rawparam = msg.get_param('foo') - param = email.utils.collapse_rfc2231_value(rawparam) + """ -""" @overload def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: """Remove the given parameter completely from the Content-Type header. -The header will be re-written in place without the parameter or its -value. All values will be quoted as necessary unless requote is -False. Optional header specifies an alternative to the Content-Type -header. -""" + The header will be re-written in place without the parameter or its + value. All values will be quoted as necessary unless requote is + False. Optional header specifies an alternative to the Content-Type + header. + """ + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: """Set the main type and subtype for the Content-Type header. -type must be a string in the form "maintype/subtype", otherwise a -ValueError is raised. + type must be a string in the form "maintype/subtype", otherwise a + ValueError is raised. + + This method replaces the Content-Type header, keeping all the + parameters in place. If requote is False, this leaves the existing + header's quoting as is. Otherwise, the parameters will be quoted (the + default). -This method replaces the Content-Type header, keeping all the -parameters in place. If requote is False, this leaves the existing -header's quoting as is. Otherwise, the parameters will be quoted (the -default). + An alternative header can be specified in the header argument. When + the Content-Type header is set, we'll always also add a MIME-Version + header. + """ -An alternative header can be specified in the header argument. When -the Content-Type header is set, we'll always also add a MIME-Version -header. -""" @overload def get_filename(self, failobj: None = None) -> str | None: """Return the filename associated with the payload if present. -The filename is extracted from the Content-Disposition header's -'filename' parameter, and it is unquoted. If that header is missing -the 'filename' parameter, this method falls back to looking for the -'name' parameter. -""" + The filename is extracted from the Content-Disposition header's + 'filename' parameter, and it is unquoted. If that header is missing + the 'filename' parameter, this method falls back to looking for the + 'name' parameter. + """ + @overload def get_filename(self, failobj: _T) -> str | _T: ... @overload def get_boundary(self, failobj: None = None) -> str | None: """Return the boundary associated with the payload if present. -The boundary is extracted from the Content-Type header's 'boundary' -parameter, and it is unquoted. -""" + The boundary is extracted from the Content-Type header's 'boundary' + parameter, and it is unquoted. + """ + @overload def get_boundary(self, failobj: _T) -> str | _T: ... def set_boundary(self, boundary: str) -> None: """Set the boundary parameter in Content-Type to 'boundary'. -This is subtly different than deleting the Content-Type header and -adding a new one with a new boundary parameter via add_header(). The -main difference is that using the set_boundary() method preserves the -order of the Content-Type header in the original message. + This is subtly different than deleting the Content-Type header and + adding a new one with a new boundary parameter via add_header(). The + main difference is that using the set_boundary() method preserves the + order of the Content-Type header in the original message. + + HeaderParseError is raised if the message has no Content-Type header. + """ -HeaderParseError is raised if the message has no Content-Type header. -""" @overload def get_content_charset(self) -> str | None: """Return the charset parameter of the Content-Type header. -The returned string is always coerced to lower case. If there is no -Content-Type header, or if that header has no charset parameter, -failobj is returned. -""" + The returned string is always coerced to lower case. If there is no + Content-Type header, or if that header has no charset parameter, + failobj is returned. + """ + @overload def get_content_charset(self, failobj: _T) -> str | _T: ... @overload def get_charsets(self, failobj: None = None) -> list[str | None]: """Return a list containing the charset(s) used in this message. -The returned list of items describes the Content-Type headers' -charset parameter for this message and all the subparts in its -payload. + The returned list of items describes the Content-Type headers' + charset parameter for this message and all the subparts in its + payload. -Each item will either be a string (the value of the charset parameter -in the Content-Type header of that part) or the value of the -'failobj' parameter (defaults to None), if the part does not have a -main MIME type of "text", or the charset is not defined. + Each item will either be a string (the value of the charset parameter + in the Content-Type header of that part) or the value of the + 'failobj' parameter (defaults to None), if the part does not have a + main MIME type of "text", or the charset is not defined. + + The list will contain one string for each part of the message, plus + one for the container message (i.e. self), so that a non-multipart + message will still return a list of length 1. + """ -The list will contain one string for each part of the message, plus -one for the container message (i.e. self), so that a non-multipart -message will still return a list of length 1. -""" @overload def get_charsets(self, failobj: _T) -> list[str | _T]: ... def walk(self) -> Generator[Self, None, None]: """Walk over the message tree, yielding each subpart. -The walk is performed in depth-first order. This method is a -generator. -""" + The walk is performed in depth-first order. This method is a + generator. + """ + def get_content_disposition(self) -> str | None: """Return the message's content-disposition if it exists, or None. -The return values can be either 'inline', 'attachment' or None -according to the rfc2183. -""" + The return values can be either 'inline', 'attachment' or None + according to the rfc2183. + """ + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy[Any] | None = None) -> str: """Return the entire formatted message as a string. -Optional 'unixfrom', when true, means include the Unix From_ envelope -header. For backward compatibility reasons, if maxheaderlen is -not specified it defaults to 0, so you must override it explicitly -if you want a different maxheaderlen. 'policy' is passed to the -Generator instance used to serialize the message; if it is not -specified the policy associated with the message instance is used. - -If the message object contains binary data that is not encoded -according to RFC standards, the non-compliant data will be replaced by -unicode "unknown character" code points. -""" + Optional 'unixfrom', when true, means include the Unix From_ envelope + header. For backward compatibility reasons, if maxheaderlen is + not specified it defaults to 0, so you must override it explicitly + if you want a different maxheaderlen. 'policy' is passed to the + Generator instance used to serialize the message; if it is not + specified the policy associated with the message instance is used. + + If the message object contains binary data that is not encoded + according to RFC standards, the non-compliant data will be replaced by + unicode "unknown character" code points. + """ + def as_bytes(self, unixfrom: bool = False, policy: Policy[Any] | None = None) -> bytes: """Return the entire formatted message as a bytes object. -Optional 'unixfrom', when true, means include the Unix From_ envelope -header. 'policy' is passed to the BytesGenerator instance used to -serialize the message; if not specified the policy associated with -the message instance is used. -""" - def __bytes__(self) -> bytes: - """Return the entire formatted message as a bytes object. + Optional 'unixfrom', when true, means include the Unix From_ envelope + header. 'policy' is passed to the BytesGenerator instance used to + serialize the message; if not specified the policy associated with + the message instance is used. """ + + def __bytes__(self) -> bytes: + """Return the entire formatted message as a bytes object.""" + def set_param( self, param: str, @@ -451,70 +481,74 @@ the message instance is used. ) -> None: """Set a parameter in the Content-Type header. -If the parameter already exists in the header, its value will be -replaced with the new value. + If the parameter already exists in the header, its value will be + replaced with the new value. -If header is Content-Type and has not yet been defined for this -message, it will be set to "text/plain" and the new parameter and -value will be appended as per RFC 2045. + If header is Content-Type and has not yet been defined for this + message, it will be set to "text/plain" and the new parameter and + value will be appended as per RFC 2045. -An alternate header can be specified in the header argument, and all -parameters will be quoted as necessary unless requote is False. + An alternate header can be specified in the header argument, and all + parameters will be quoted as necessary unless requote is False. -If charset is specified, the parameter will be encoded according to RFC -2231. Optional language specifies the RFC 2231 language, defaulting -to the empty string. Both charset and language should be strings. -""" + If charset is specified, the parameter will be encoded according to RFC + 2231. Optional language specifies the RFC 2231 language, defaulting + to the empty string. Both charset and language should be strings. + """ # The following two methods are undocumented, but a source code comment states that they are public API def set_raw(self, name: str, value: _HeaderParamT_contra) -> None: """Store name and value in the model without modification. -This is an "internal" API, intended only for use by a parser. -""" + This is an "internal" API, intended only for use by a parser. + """ + def raw_items(self) -> Iterator[tuple[str, _HeaderT_co]]: """Return the (name, value) header pairs without modification. -This is an "internal" API, intended only for use by a generator. -""" + This is an "internal" API, intended only for use by a generator. + """ class MIMEPart(Message[_HeaderRegistryT_co, _HeaderRegistryParamT_contra]): def __init__(self, policy: Policy[Any] | None = None) -> None: ... def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT_co] | None: """Return best candidate mime part for display as 'body' of message. -Do a depth first search, starting with self, looking for the first part -matching each of the items in preferencelist, and return the part -corresponding to the first item that has a match, or None if no items -have a match. If 'related' is not included in preferencelist, consider -the root part of any multipart/related encountered as a candidate -match. Ignore parts with 'Content-Disposition: attachment'. -""" + Do a depth first search, starting with self, looking for the first part + matching each of the items in preferencelist, and return the part + corresponding to the first item that has a match, or None if no items + have a match. If 'related' is not included in preferencelist, consider + the root part of any multipart/related encountered as a candidate + match. Ignore parts with 'Content-Disposition: attachment'. + """ + def attach(self, payload: Self) -> None: # type: ignore[override] """Add the given payload to the current payload. -The current payload will always be a list of objects after this method -is called. If you want to set the payload to a scalar object, use -set_payload() instead. -""" + The current payload will always be a list of objects after this method + is called. If you want to set the payload to a scalar object, use + set_payload() instead. + """ # The attachments are created via type(self) in the attach method. It's theoretically # possible to sneak other attachment types into a MIMEPart instance, but could cause # cause unforseen consequences. def iter_attachments(self) -> Iterator[Self]: """Return an iterator over the non-main parts of a multipart. -Skip the first of each occurrence of text/plain, text/html, -multipart/related, or multipart/alternative in the multipart (unless -they have a 'Content-Disposition: attachment' header) and include all -remaining subparts in the returned iterator. When applied to a -multipart/related, return all parts except the root part. Return an -empty iterator when applied to a multipart/alternative or a -non-multipart. -""" + Skip the first of each occurrence of text/plain, text/html, + multipart/related, or multipart/alternative in the multipart (unless + they have a 'Content-Disposition: attachment' header) and include all + remaining subparts in the returned iterator. When applied to a + multipart/related, return all parts except the root part. Return an + empty iterator when applied to a multipart/alternative or a + non-multipart. + """ + def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT_co]]: """Return an iterator over all immediate subparts of a multipart. -Return an empty iterator for a non-multipart. -""" + Return an empty iterator for a non-multipart. + """ + def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... def make_related(self, boundary: str | None = None) -> None: ... @@ -528,14 +562,15 @@ Return an empty iterator for a non-multipart. def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy[Any] | None = None) -> str: """Return the entire formatted message as a string. -Optional 'unixfrom', when true, means include the Unix From_ envelope -header. maxheaderlen is retained for backward compatibility with the -base Message class, but defaults to None, meaning that the policy value -for max_line_length controls the header maximum length. 'policy' is -passed to the Generator instance used to serialize the message; if it -is not specified the policy associated with the message instance is -used. -""" + Optional 'unixfrom', when true, means include the Unix From_ envelope + header. maxheaderlen is retained for backward compatibility with the + base Message class, but defaults to None, meaning that the policy value + for max_line_length controls the header maximum length. 'policy' is + passed to the Generator instance used to serialize the message; if it + is not specified the policy associated with the message instance is + used. + """ + def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi index 55a88d7d80331..b2249845d7450 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/application.pyi @@ -1,5 +1,5 @@ -"""Class representing application/* type MIME documents. -""" +"""Class representing application/* type MIME documents.""" + from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +8,8 @@ from email.policy import Policy __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): - """Class for generating application/* MIME documents. -""" + """Class for generating application/* MIME documents.""" + def __init__( self, _data: str | bytes | bytearray, @@ -21,15 +21,15 @@ class MIMEApplication(MIMENonMultipart): ) -> None: """Create an application/* type MIME document. -_data contains the bytes for the raw application data. + _data contains the bytes for the raw application data. -_subtype is the MIME content type subtype, defaulting to -'octet-stream'. + _subtype is the MIME content type subtype, defaulting to + 'octet-stream'. -_encoder is a function which will perform the actual encoding for -transport of the application data, defaulting to base64 encoding. + _encoder is a function which will perform the actual encoding for + transport of the application data, defaulting to base64 encoding. -Any additional keyword arguments are passed to the base class -constructor, which turns them into parameters on the Content-Type -header. -""" + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi index 02ec41e39623b..532e4b21b10ef 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/audio.pyi @@ -1,5 +1,5 @@ -"""Class representing audio/* type MIME documents. -""" +"""Class representing audio/* type MIME documents.""" + from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +8,8 @@ from email.policy import Policy __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): - """Class for generating audio/* MIME documents. -""" + """Class for generating audio/* MIME documents.""" + def __init__( self, _audiodata: str | bytes | bytearray, @@ -21,21 +21,21 @@ class MIMEAudio(MIMENonMultipart): ) -> None: """Create an audio/* type MIME document. -_audiodata contains the bytes for the raw audio data. If this data -can be decoded as au, wav, aiff, or aifc, then the -subtype will be automatically included in the Content-Type header. -Otherwise, you can specify the specific audio subtype via the -_subtype parameter. If _subtype is not given, and no subtype can be -guessed, a TypeError is raised. + _audiodata contains the bytes for the raw audio data. If this data + can be decoded as au, wav, aiff, or aifc, then the + subtype will be automatically included in the Content-Type header. + Otherwise, you can specify the specific audio subtype via the + _subtype parameter. If _subtype is not given, and no subtype can be + guessed, a TypeError is raised. -_encoder is a function which will perform the actual encoding for -transport of the image data. It takes one argument, which is this -Image instance. It should use get_payload() and set_payload() to -change the payload to the encoded form. It should also add any -Content-Transfer-Encoding or other headers to the message as -necessary. The default encoding is Base64. + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. -Any additional keyword arguments are passed to the base class -constructor, which turns them into parameters on the Content-Type -header. -""" + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi index bc1a615b5883a..9c173a72bae95 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/base.pyi @@ -1,5 +1,5 @@ -"""Base class for MIME specializations. -""" +"""Base class for MIME specializations.""" + import email.message from email import _ParamsType from email.policy import Policy @@ -7,12 +7,12 @@ from email.policy import Policy __all__ = ["MIMEBase"] class MIMEBase(email.message.Message): - """Base class for MIME specializations. -""" + """Base class for MIME specializations.""" + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: """This constructor adds a Content-Type: and a MIME-Version: header. -The Content-Type: header is taken from the _maintype and _subtype -arguments. Additional parameters for this header are taken from the -keyword arguments. -""" + The Content-Type: header is taken from the _maintype and _subtype + arguments. Additional parameters for this header are taken from the + keyword arguments. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi index 20f228cc21c3f..72c5ac48af093 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/image.pyi @@ -1,5 +1,5 @@ -"""Class representing image/* type MIME documents. -""" +"""Class representing image/* type MIME documents.""" + from collections.abc import Callable from email import _ParamsType from email.mime.nonmultipart import MIMENonMultipart @@ -8,8 +8,8 @@ from email.policy import Policy __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): - """Class for generating image/* type MIME documents. -""" + """Class for generating image/* type MIME documents.""" + def __init__( self, _imagedata: str | bytes | bytearray, @@ -21,20 +21,20 @@ class MIMEImage(MIMENonMultipart): ) -> None: """Create an image/* type MIME document. -_imagedata contains the bytes for the raw image data. If the data -type can be detected (jpeg, png, gif, tiff, rgb, pbm, pgm, ppm, -rast, xbm, bmp, webp, and exr attempted), then the subtype will be -automatically included in the Content-Type header. Otherwise, you can -specify the specific image subtype via the _subtype parameter. + _imagedata contains the bytes for the raw image data. If the data + type can be detected (jpeg, png, gif, tiff, rgb, pbm, pgm, ppm, + rast, xbm, bmp, webp, and exr attempted), then the subtype will be + automatically included in the Content-Type header. Otherwise, you can + specify the specific image subtype via the _subtype parameter. -_encoder is a function which will perform the actual encoding for -transport of the image data. It takes one argument, which is this -Image instance. It should use get_payload() and set_payload() to -change the payload to the encoded form. It should also add any -Content-Transfer-Encoding or other headers to the message as -necessary. The default encoding is Base64. + _encoder is a function which will perform the actual encoding for + transport of the image data. It takes one argument, which is this + Image instance. It should use get_payload() and set_payload() to + change the payload to the encoded form. It should also add any + Content-Transfer-Encoding or other headers to the message as + necessary. The default encoding is Base64. -Any additional keyword arguments are passed to the base class -constructor, which turns them into parameters on the Content-Type -header. -""" + Any additional keyword arguments are passed to the base class + constructor, which turns them into parameters on the Content-Type + header. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi index ca033b0758ca9..eb854b713e20a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/message.pyi @@ -1,5 +1,5 @@ -"""Class representing message/* MIME documents. -""" +"""Class representing message/* MIME documents.""" + from email._policybase import _MessageT from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy @@ -7,15 +7,15 @@ from email.policy import Policy __all__ = ["MIMEMessage"] class MIMEMessage(MIMENonMultipart): - """Class representing message/* MIME documents. -""" + """Class representing message/* MIME documents.""" + def __init__(self, _msg: _MessageT, _subtype: str = "rfc822", *, policy: Policy[_MessageT] | None = None) -> None: """Create a message/* type MIME document. -_msg is a message object and must be an instance of Message, or a -derived class of Message, otherwise a TypeError is raised. + _msg is a message object and must be an instance of Message, or a + derived class of Message, otherwise a TypeError is raised. -Optional _subtype defines the subtype of the contained message. The -default is "rfc822" (this is defined by the MIME standard, even though -the term "rfc822" is technically outdated by RFC 2822). -""" + Optional _subtype defines the subtype of the contained message. The + default is "rfc822" (this is defined by the MIME standard, even though + the term "rfc822" is technically outdated by RFC 2822). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi index 3ec6352afdfc4..8ac0e60e8be3f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/multipart.pyi @@ -1,5 +1,5 @@ -"""Base class for MIME multipart/* type messages. -""" +"""Base class for MIME multipart/* type messages.""" + from collections.abc import Sequence from email import _ParamsType from email._policybase import _MessageT @@ -9,8 +9,8 @@ from email.policy import Policy __all__ = ["MIMEMultipart"] class MIMEMultipart(MIMEBase): - """Base class for MIME multipart/* type messages. -""" + """Base class for MIME multipart/* type messages.""" + def __init__( self, _subtype: str = "mixed", @@ -22,19 +22,19 @@ class MIMEMultipart(MIMEBase): ) -> None: """Creates a multipart/* type message. -By default, creates a multipart/mixed message, with proper -Content-Type and MIME-Version headers. + By default, creates a multipart/mixed message, with proper + Content-Type and MIME-Version headers. -_subtype is the subtype of the multipart content type, defaulting to -'mixed'. + _subtype is the subtype of the multipart content type, defaulting to + 'mixed'. -boundary is the multipart boundary string. By default it is -calculated as needed. + boundary is the multipart boundary string. By default it is + calculated as needed. -_subparts is a sequence of initial subparts for the payload. It -must be an iterable object, such as a list. You can always -attach new subparts to the message by using the attach() method. + _subparts is a sequence of initial subparts for the payload. It + must be an iterable object, such as a list. You can always + attach new subparts to the message by using the attach() method. -Additional parameters for the Content-Type header are taken from the -keyword arguments (or passed into the _params argument). -""" + Additional parameters for the Content-Type header are taken from the + keyword arguments (or passed into the _params argument). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi index 4b4de010d3b0b..395e5a1b362f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/nonmultipart.pyi @@ -1,9 +1,8 @@ -"""Base class for MIME type messages that are not multipart. -""" +"""Base class for MIME type messages that are not multipart.""" + from email.mime.base import MIMEBase __all__ = ["MIMENonMultipart"] class MIMENonMultipart(MIMEBase): - """Base class for MIME non-multipart type messages. -""" + """Base class for MIME non-multipart type messages.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi index aaf9542d11b09..08264521d4839 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/mime/text.pyi @@ -1,23 +1,21 @@ -"""Class representing text/* type MIME documents. -""" +"""Class representing text/* type MIME documents.""" + from email._policybase import Policy from email.mime.nonmultipart import MIMENonMultipart __all__ = ["MIMEText"] class MIMEText(MIMENonMultipart): - """Class for generating text/* type MIME documents. -""" - def __init__( - self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None - ) -> None: + """Class for generating text/* type MIME documents.""" + + def __init__(self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None) -> None: """Create a text/* type MIME document. -_text is the string for this message object. + _text is the string for this message object. -_subtype is the MIME sub content type, defaulting to "plain". + _subtype is the MIME sub content type, defaulting to "plain". -_charset is the character set parameter added to the Content-Type -header. This defaults to "us-ascii". Note that as a side-effect, the -Content-Transfer-Encoding header will also be set. -""" + _charset is the character set parameter added to the Content-Type + header. This defaults to "us-ascii". Note that as a side-effect, the + Content-Transfer-Encoding header will also be set. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi index 2be6f8ae654a2..2cbc3f021849d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/parser.pyi @@ -1,5 +1,5 @@ -"""A parser of RFC 2822 and MIME email messages. -""" +"""A parser of RFC 2822 and MIME email messages.""" + from _typeshed import SupportsRead from collections.abc import Callable from email._policybase import _MessageT @@ -16,24 +16,25 @@ class Parser(Generic[_MessageT]): def __init__(self: Parser[Message[str, str]], _class: None = None) -> None: """Parser of RFC 2822 and MIME email messages. -Creates an in-memory object tree representing the email message, which -can then be manipulated and turned over to a Generator to return the -textual representation of the message. + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. + + The string must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceded by a 'Unix-from' header. The + header block is terminated either by the end of the string or by a + blank line. -The string must be formatted as a block of RFC 2822 headers and header -continuation lines, optionally preceded by a 'Unix-from' header. The -header block is terminated either by the end of the string or by a -blank line. + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. -_class is the class to instantiate for new message objects when they -must be created. This class must have a constructor that can take -zero arguments. Default is Message.Message. + The policy keyword specifies a policy object that controls a number of + aspects of the parser's operation. The default policy maintains + backward compatibility. -The policy keyword specifies a policy object that controls a number of -aspects of the parser's operation. The default policy maintains -backward compatibility. + """ -""" @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload @@ -41,19 +42,20 @@ backward compatibility. def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> _MessageT: """Create a message structure from the data in a file. -Reads all the data from the file and returns the root of the message -structure. Optional headersonly is a flag specifying whether to stop -parsing after reading the headers or not. The default is False, -meaning it parses the entire contents of the file. -""" + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + def parsestr(self, text: str, headersonly: bool = False) -> _MessageT: """Create a message structure from a string. -Returns the root of the message structure. Optional headersonly is a -flag specifying whether to stop parsing after reading the headers or -not. The default is False, meaning it parses the entire contents of -the file. -""" + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ class HeaderParser(Parser[_MessageT]): def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> _MessageT: ... @@ -65,19 +67,20 @@ class BytesParser(Generic[_MessageT]): def __init__(self: BytesParser[Message[str, str]], _class: None = None) -> None: """Parser of binary RFC 2822 and MIME email messages. -Creates an in-memory object tree representing the email message, which -can then be manipulated and turned over to a Generator to return the -textual representation of the message. + Creates an in-memory object tree representing the email message, which + can then be manipulated and turned over to a Generator to return the + textual representation of the message. -The input must be formatted as a block of RFC 2822 headers and header -continuation lines, optionally preceded by a 'Unix-from' header. The -header block is terminated either by the end of the input or by a -blank line. + The input must be formatted as a block of RFC 2822 headers and header + continuation lines, optionally preceded by a 'Unix-from' header. The + header block is terminated either by the end of the input or by a + blank line. + + _class is the class to instantiate for new message objects when they + must be created. This class must have a constructor that can take + zero arguments. Default is Message.Message. + """ -_class is the class to instantiate for new message objects when they -must be created. This class must have a constructor that can take -zero arguments. Default is Message.Message. -""" @overload def __init__(self, _class: None = None, *, policy: Policy[_MessageT]) -> None: ... @overload @@ -85,19 +88,20 @@ zero arguments. Default is Message.Message. def parse(self, fp: _WrappedBuffer, headersonly: bool = False) -> _MessageT: """Create a message structure from the data in a binary file. -Reads all the data from the file and returns the root of the message -structure. Optional headersonly is a flag specifying whether to stop -parsing after reading the headers or not. The default is False, -meaning it parses the entire contents of the file. -""" + Reads all the data from the file and returns the root of the message + structure. Optional headersonly is a flag specifying whether to stop + parsing after reading the headers or not. The default is False, + meaning it parses the entire contents of the file. + """ + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> _MessageT: """Create a message structure from a byte string. -Returns the root of the message structure. Optional headersonly is a -flag specifying whether to stop parsing after reading the headers or -not. The default is False, meaning it parses the entire contents of -the file. -""" + Returns the root of the message structure. Optional headersonly is a + flag specifying whether to stop parsing after reading the headers or + not. The default is False, meaning it parses the entire contents of + the file. + """ class BytesHeaderParser(BytesParser[_MessageT]): def parse(self, fp: _WrappedBuffer, headersonly: bool = True) -> _MessageT: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi index 3dcdb37dc44d3..14c5bda44ca16 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/policy.pyi @@ -1,6 +1,7 @@ """This will be the home for the policy that hooks in the new code that adds all the email6 features. """ + from collections.abc import Callable from email._policybase import Compat32 as Compat32, Policy as Policy, _MessageFactory, _MessageT, compat32 as compat32 from email.contentmanager import ContentManager @@ -13,113 +14,114 @@ __all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", class EmailPolicy(Policy[_MessageT]): """Controls for how messages are interpreted and formatted. -Most of the classes and many of the methods in the email package accept -Policy objects as parameters. A Policy object contains a set of values and -functions that control how input is interpreted and how output is rendered. -For example, the parameter 'raise_on_defect' controls whether or not an RFC -violation results in an error being raised or not, while 'max_line_length' -controls the maximum length of output lines when a Message is serialized. - -Any valid attribute may be overridden when a Policy is created by passing -it as a keyword argument to the constructor. Policy objects are immutable, -but a new Policy object can be created with only certain values changed by -calling the Policy instance with keyword arguments. Policy objects can -also be added, producing a new Policy object in which the non-default -attributes set in the right hand operand overwrite those specified in the -left operand. - -Settable attributes: - -raise_on_defect -- If true, then defects should be raised as errors. - Default: False. - -linesep -- string containing the value to use as separation - between output lines. Default '\\n'. - -cte_type -- Type of allowed content transfer encodings - - 7bit -- ASCII only - 8bit -- Content-Transfer-Encoding: 8bit is allowed - - Default: 8bit. Also controls the disposition of - (RFC invalid) binary data in headers; see the - documentation of the binary_fold method. - -max_line_length -- maximum length of lines, excluding 'linesep', - during serialization. None or 0 means no line - wrapping is done. Default is 78. - -mangle_from_ -- a flag that, when True escapes From_ lines in the - body of the message by putting a '>' in front of - them. This is used when the message is being - serialized by a generator. Default: False. - -message_factory -- the class to use to create new message objects. - If the value is None, the default is Message. - -verify_generated_headers - -- if true, the generator verifies that each header - they are properly folded, so that a parser won't - treat it as multiple headers, start-of-body, or - part of another header. - This is a check against custom Header & fold() - implementations. -PROVISIONAL - -The API extensions enabled by this policy are currently provisional. -Refer to the documentation for details. - -This policy adds new header parsing and folding algorithms. Instead of -simple strings, headers are custom objects with custom attributes -depending on the type of the field. The folding algorithm fully -implements RFCs 2047 and 5322. - -In addition to the settable attributes listed above that apply to -all Policies, this policy adds the following additional attributes: - -utf8 -- if False (the default) message headers will be - serialized as ASCII, using encoded words to encode - any non-ASCII characters in the source strings. If - True, the message headers will be serialized using - utf8 and will not contain encoded words (see RFC - 6532 for more on this serialization format). - -refold_source -- if the value for a header in the Message object - came from the parsing of some source, this attribute - indicates whether or not a generator should refold - that value when transforming the message back into - stream form. The possible values are: - - none -- all source values use original folding - long -- source values that have any line that is - longer than max_line_length will be - refolded - all -- all values are refolded. - - The default is 'long'. - -header_factory -- a callable that takes two arguments, 'name' and - 'value', where 'name' is a header field name and - 'value' is an unfolded header field value, and - returns a string-like object that represents that - header. A default header_factory is provided that - understands some of the RFC5322 header field types. - (Currently address fields and date fields have - special treatment, while all other fields are - treated as unstructured. This list will be - completed before the extension is marked stable.) - -content_manager -- an object with at least two methods: get_content - and set_content. When the get_content or - set_content method of a Message object is called, - it calls the corresponding method of this object, - passing it the message object as its first argument, - and any arguments or keywords that were passed to - it as additional arguments. The default - content_manager is - :data:`~email.contentmanager.raw_data_manager`. + Most of the classes and many of the methods in the email package accept + Policy objects as parameters. A Policy object contains a set of values and + functions that control how input is interpreted and how output is rendered. + For example, the parameter 'raise_on_defect' controls whether or not an RFC + violation results in an error being raised or not, while 'max_line_length' + controls the maximum length of output lines when a Message is serialized. + + Any valid attribute may be overridden when a Policy is created by passing + it as a keyword argument to the constructor. Policy objects are immutable, + but a new Policy object can be created with only certain values changed by + calling the Policy instance with keyword arguments. Policy objects can + also be added, producing a new Policy object in which the non-default + attributes set in the right hand operand overwrite those specified in the + left operand. + + Settable attributes: + + raise_on_defect -- If true, then defects should be raised as errors. + Default: False. + + linesep -- string containing the value to use as separation + between output lines. Default '\\n'. + + cte_type -- Type of allowed content transfer encodings + + 7bit -- ASCII only + 8bit -- Content-Transfer-Encoding: 8bit is allowed + + Default: 8bit. Also controls the disposition of + (RFC invalid) binary data in headers; see the + documentation of the binary_fold method. + + max_line_length -- maximum length of lines, excluding 'linesep', + during serialization. None or 0 means no line + wrapping is done. Default is 78. + + mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a '>' in front of + them. This is used when the message is being + serialized by a generator. Default: False. + + message_factory -- the class to use to create new message objects. + If the value is None, the default is Message. + + verify_generated_headers + -- if true, the generator verifies that each header + they are properly folded, so that a parser won't + treat it as multiple headers, start-of-body, or + part of another header. + This is a check against custom Header & fold() + implementations. + PROVISIONAL + + The API extensions enabled by this policy are currently provisional. + Refer to the documentation for details. + + This policy adds new header parsing and folding algorithms. Instead of + simple strings, headers are custom objects with custom attributes + depending on the type of the field. The folding algorithm fully + implements RFCs 2047 and 5322. + + In addition to the settable attributes listed above that apply to + all Policies, this policy adds the following additional attributes: + + utf8 -- if False (the default) message headers will be + serialized as ASCII, using encoded words to encode + any non-ASCII characters in the source strings. If + True, the message headers will be serialized using + utf8 and will not contain encoded words (see RFC + 6532 for more on this serialization format). + + refold_source -- if the value for a header in the Message object + came from the parsing of some source, this attribute + indicates whether or not a generator should refold + that value when transforming the message back into + stream form. The possible values are: + + none -- all source values use original folding + long -- source values that have any line that is + longer than max_line_length will be + refolded + all -- all values are refolded. + + The default is 'long'. + + header_factory -- a callable that takes two arguments, 'name' and + 'value', where 'name' is a header field name and + 'value' is an unfolded header field value, and + returns a string-like object that represents that + header. A default header_factory is provided that + understands some of the RFC5322 header field types. + (Currently address fields and date fields have + special treatment, while all other fields are + treated as unstructured. This list will be + completed before the extension is marked stable.) + + content_manager -- an object with at least two methods: get_content + and set_content. When the get_content or + set_content method of a Message object is called, + it calls the corresponding method of this object, + passing it the message object as its first argument, + and any arguments or keywords that were passed to + it as additional arguments. The default + content_manager is + :data:`~email.contentmanager.raw_data_manager`. + + """ -""" utf8: bool refold_source: str header_factory: Callable[[str, Any], Any] @@ -160,85 +162,90 @@ content_manager -- an object with at least two methods: get_content ) -> None: ... def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: """Given a list of linesep terminated strings constituting the lines of -a single header, return the (name, value) tuple that should be stored -in the model. The input lines should retain their terminating linesep -characters. The lines passed in by the email package may contain -surrogateescaped binary data. -The name is parsed as everything up to the ':' and returned unmodified. -The value is determined by stripping leading whitespace off the -remainder of the first line joined with all subsequent lines, and -stripping any trailing carriage return or linefeed characters. (This -is the same as Compat32). + a single header, return the (name, value) tuple that should be stored + in the model. The input lines should retain their terminating linesep + characters. The lines passed in by the email package may contain + surrogateescaped binary data. + The name is parsed as everything up to the ':' and returned unmodified. + The value is determined by stripping leading whitespace off the + remainder of the first line joined with all subsequent lines, and + stripping any trailing carriage return or linefeed characters. (This + is the same as Compat32). + + """ -""" def header_store_parse(self, name: str, value: Any) -> tuple[str, Any]: """Given the header name and the value provided by the application -program, return the (name, value) that should be stored in the model. -The name is returned unchanged. If the input value has a 'name' -attribute and it matches the name ignoring case, the value is returned -unchanged. Otherwise the name and value are passed to header_factory -method, and the resulting custom header object is returned as the -value. In this case a ValueError is raised if the input value contains -CR or LF characters. + program, return the (name, value) that should be stored in the model. + The name is returned unchanged. If the input value has a 'name' + attribute and it matches the name ignoring case, the value is returned + unchanged. Otherwise the name and value are passed to header_factory + method, and the resulting custom header object is returned as the + value. In this case a ValueError is raised if the input value contains + CR or LF characters. + + """ -""" def header_fetch_parse(self, name: str, value: str) -> Any: """Given the header name and the value from the model, return the value -to be returned to the application program that is requesting that -header. The value passed in by the email package may contain -surrogateescaped binary data if the lines were parsed by a BytesParser. -The returned value should not contain any surrogateescaped data. + to be returned to the application program that is requesting that + header. The value passed in by the email package may contain + surrogateescaped binary data if the lines were parsed by a BytesParser. + The returned value should not contain any surrogateescaped data. -If the value has a 'name' attribute, it is returned to unmodified. -Otherwise the name and the value with any linesep characters removed -are passed to the header_factory method, and the resulting custom -header object is returned. Any surrogateescaped bytes get turned -into the unicode unknown-character glyph. + If the value has a 'name' attribute, it is returned to unmodified. + Otherwise the name and the value with any linesep characters removed + are passed to the header_factory method, and the resulting custom + header object is returned. Any surrogateescaped bytes get turned + into the unicode unknown-character glyph. + + """ -""" def fold(self, name: str, value: str) -> Any: """Given the header name and the value from the model, return a string -containing linesep characters that implement the folding of the header -according to the policy controls. The value passed in by the email -package may contain surrogateescaped binary data if the lines were -parsed by a BytesParser. The returned value should not contain any -surrogateescaped data. - -Header folding is controlled by the refold_source policy setting. A -value is considered to be a 'source value' if and only if it does not -have a 'name' attribute (having a 'name' attribute means it is a header -object of some sort). If a source value needs to be refolded according -to the policy, it is converted into a custom header object by passing -the name and the value with any linesep characters removed to the -header_factory method. Folding of a custom header object is done by -calling its fold method with the current policy. - -Source values are split into lines using splitlines. If the value is -not to be refolded, the lines are rejoined using the linesep from the -policy and returned. The exception is lines containing non-ascii -binary data. In that case the value is refolded regardless of the -refold_source setting, which causes the binary data to be CTE encoded -using the unknown-8bit charset. + containing linesep characters that implement the folding of the header + according to the policy controls. The value passed in by the email + package may contain surrogateescaped binary data if the lines were + parsed by a BytesParser. The returned value should not contain any + surrogateescaped data. + + Header folding is controlled by the refold_source policy setting. A + value is considered to be a 'source value' if and only if it does not + have a 'name' attribute (having a 'name' attribute means it is a header + object of some sort). If a source value needs to be refolded according + to the policy, it is converted into a custom header object by passing + the name and the value with any linesep characters removed to the + header_factory method. Folding of a custom header object is done by + calling its fold method with the current policy. + + Source values are split into lines using splitlines. If the value is + not to be refolded, the lines are rejoined using the linesep from the + policy and returned. The exception is lines containing non-ascii + binary data. In that case the value is refolded regardless of the + refold_source setting, which causes the binary data to be CTE encoded + using the unknown-8bit charset. + + """ -""" def fold_binary(self, name: str, value: str) -> bytes: """Given the header name and the value from the model, return binary -data containing linesep characters that implement the folding of the -header according to the policy controls. The value passed in by the -email package may contain surrogateescaped binary data. + data containing linesep characters that implement the folding of the + header according to the policy controls. The value passed in by the + email package may contain surrogateescaped binary data. -The same as fold if cte_type is 7bit, except that the returned value is -bytes. + The same as fold if cte_type is 7bit, except that the returned value is + bytes. -If cte_type is 8bit, non-ASCII binary data is converted back into -bytes. Headers with binary data are not refolded, regardless of the -refold_header setting, since there is no way to know whether the binary -data consists of single byte characters or multibyte characters. + If cte_type is 8bit, non-ASCII binary data is converted back into + bytes. Headers with binary data are not refolded, regardless of the + refold_header setting, since there is no way to know whether the binary + data consists of single byte characters or multibyte characters. -If utf8 is true, headers are encoded to utf8, otherwise to ascii with -non-ASCII unicode rendered as encoded words. + If utf8 is true, headers are encoded to utf8, otherwise to ascii with + non-ASCII unicode rendered as encoded words. + + """ -""" def clone( self, *, @@ -257,10 +264,10 @@ non-ASCII unicode rendered as encoded words. ) -> Self: """Return a new instance with specified attributes changed. -The new instance has the same attribute values as the current object, -except for the changes passed in as keyword arguments. + The new instance has the same attribute values as the current object, + except for the changes passed in as keyword arguments. -""" + """ default: EmailPolicy[EmailMessage] SMTP: EmailPolicy[EmailMessage] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi index 1235cb497b4d2..7bdd774117e8c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/quoprimime.pyi @@ -21,6 +21,7 @@ conversion necessary for proper internationalized headers; it only does dumb encoding and decoding. To deal with the various line wrapping issues, use the email.header module. """ + from collections.abc import Iterable __all__ = [ @@ -37,73 +38,78 @@ __all__ = [ ] def header_check(octet: int) -> bool: - """Return True if the octet should be escaped with header quopri. -""" + """Return True if the octet should be escaped with header quopri.""" + def body_check(octet: int) -> bool: - """Return True if the octet should be escaped with body quopri. -""" + """Return True if the octet should be escaped with body quopri.""" + def header_length(bytearray: Iterable[int]) -> int: """Return a header quoted-printable encoding length. -Note that this does not include any RFC 2047 chrome added by -`header_encode()`. + Note that this does not include any RFC 2047 chrome added by + `header_encode()`. + + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for headers. + """ -:param bytearray: An array of bytes (a.k.a. octets). -:return: The length in bytes of the byte array when it is encoded with - quoted-printable for headers. -""" def body_length(bytearray: Iterable[int]) -> int: """Return a body quoted-printable encoding length. -:param bytearray: An array of bytes (a.k.a. octets). -:return: The length in bytes of the byte array when it is encoded with - quoted-printable for bodies. -""" + :param bytearray: An array of bytes (a.k.a. octets). + :return: The length in bytes of the byte array when it is encoded with + quoted-printable for bodies. + """ + def unquote(s: str | bytes | bytearray) -> str: - """Turn a string in the form =AB to the ASCII character with value 0xab -""" + """Turn a string in the form =AB to the ASCII character with value 0xab""" + def quote(c: str | bytes | bytearray) -> str: ... def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: """Encode a single header line with quoted-printable (like) encoding. -Defined in RFC 2045, this 'Q' encoding is similar to quoted-printable, but -used specifically for email header fields to allow charsets with mostly 7 -bit characters (and some 8 bit) to remain more or less readable in non-RFC -2045 aware mail clients. + Defined in RFC 2045, this 'Q' encoding is similar to quoted-printable, but + used specifically for email header fields to allow charsets with mostly 7 + bit characters (and some 8 bit) to remain more or less readable in non-RFC + 2045 aware mail clients. + + charset names the character set to use in the RFC 2046 header. It + defaults to iso-8859-1. + """ -charset names the character set to use in the RFC 2046 header. It -defaults to iso-8859-1. -""" def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: """Encode with quoted-printable, wrapping at maxlinelen characters. -Each line of encoded text will end with eol, which defaults to "\\n". Set -this to "\\r\\n" if you will be using the result of this function directly -in an email. + Each line of encoded text will end with eol, which defaults to "\\n". Set + this to "\\r\\n" if you will be using the result of this function directly + in an email. -Each line will be wrapped at, at most, maxlinelen characters before the -eol string (maxlinelen defaults to 76 characters, the maximum value -permitted by RFC 2045). Long lines will have the 'soft line break' -quoted-printable character "=" appended to them, so the decoded text will -be identical to the original text. + Each line will be wrapped at, at most, maxlinelen characters before the + eol string (maxlinelen defaults to 76 characters, the maximum value + permitted by RFC 2045). Long lines will have the 'soft line break' + quoted-printable character "=" appended to them, so the decoded text will + be identical to the original text. -The minimum maxlinelen is 4 to have room for a quoted character ("=XX") -followed by a soft line break. Smaller values will generate a -ValueError. + The minimum maxlinelen is 4 to have room for a quoted character ("=XX") + followed by a soft line break. Smaller values will generate a + ValueError. + + """ -""" def decode(encoded: str, eol: str = "\n") -> str: """Decode a quoted-printable string. -Lines are separated with eol, which defaults to \\n. -""" + Lines are separated with eol, which defaults to \\n. + """ + def header_decode(s: str) -> str: """Decode a string encoded with RFC 2045 MIME header 'Q' encoding. -This function does not parse a full MIME header value encoded with -quoted-printable (like =?iso-8859-1?q?Hello_World?=) -- please use -the high level email.header class for that functionality. -""" + This function does not parse a full MIME header value encoded with + quoted-printable (like =?iso-8859-1?q?Hello_World?=) -- please use + the high level email.header class for that functionality. + """ body_decode = decode decodestring = decode diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi index 1fea5d713d40f..009e95f2a866f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/email/utils.pyi @@ -1,5 +1,5 @@ -"""Miscellaneous utilities. -""" +"""Miscellaneous utilities.""" + import datetime import sys from _typeshed import Unused @@ -32,59 +32,62 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None def quote(str: str) -> str: """Prepare string to be used in a quoted string. -Turns backslash and double quote characters into quoted pairs. These -are the only characters that need to be quoted inside a quoted string. -Does not add the surrounding double quotes. -""" + Turns backslash and double quote characters into quoted pairs. These + are the only characters that need to be quoted inside a quoted string. + Does not add the surrounding double quotes. + """ + def unquote(str: str) -> str: - """Remove quotes from a string. -""" + """Remove quotes from a string.""" # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: """ -Parse addr into its constituent realname and email address parts. + Parse addr into its constituent realname and email address parts. -Return a tuple of realname and email address, unless the parse fails, in -which case return a 2-tuple of ('', ''). + Return a tuple of realname and email address, unless the parse fails, in + which case return a 2-tuple of ('', ''). + + If strict is True, use a strict parser which rejects malformed inputs. + """ -If strict is True, use a strict parser which rejects malformed inputs. -""" def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: """The inverse of parseaddr(), this takes a 2-tuple of the form -(realname, email_address) and returns the string value suitable -for an RFC 2822 From, To or Cc header. + (realname, email_address) and returns the string value suitable + for an RFC 2822 From, To or Cc header. -If the first element of pair is false, then the second element is -returned unmodified. + If the first element of pair is false, then the second element is + returned unmodified. -The optional charset is the character set that is used to encode -realname in case realname is not ASCII safe. Can be an instance of str or -a Charset-like object which has a header_encode method. Default is -'utf-8'. -""" + The optional charset is the character set that is used to encode + realname in case realname is not ASCII safe. Can be an instance of str or + a Charset-like object which has a header_encode method. Default is + 'utf-8'. + """ # `strict` parameter added in Python 3.9.20, 3.10.15, 3.11.10, 3.12.5 def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. -When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in -its place. + When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in + its place. + + If strict is true, use a strict parser which rejects malformed inputs. + """ -If strict is true, use a strict parser which rejects malformed inputs. -""" @overload def parsedate(data: None) -> None: - """Convert a time string to a time tuple. -""" + """Convert a time string to a time tuple.""" + @overload def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... @overload def parsedate_tz(data: None) -> None: """Convert a date string to a time tuple. -Accounts for military timezones. -""" + Accounts for military timezones. + """ + @overload def parsedate_tz(data: str) -> _PDTZ | None: ... @@ -98,55 +101,57 @@ else: def parsedate_to_datetime(data: str) -> datetime.datetime: ... def mktime_tz(data: _PDTZ) -> int: - """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp. -""" + """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp.""" + def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: """Returns a date string as specified by RFC 2822, e.g.: -Fri, 09 Nov 2001 01:08:47 -0000 + Fri, 09 Nov 2001 01:08:47 -0000 -Optional timeval if given is a floating-point time value as accepted by -gmtime() and localtime(), otherwise the current time is used. + Optional timeval if given is a floating-point time value as accepted by + gmtime() and localtime(), otherwise the current time is used. -Optional localtime is a flag that when True, interprets timeval, and -returns a date relative to the local timezone instead of UTC, properly -taking daylight savings time into account. + Optional localtime is a flag that when True, interprets timeval, and + returns a date relative to the local timezone instead of UTC, properly + taking daylight savings time into account. + + Optional argument usegmt means that the timezone is written out as + an ascii string, not numeric one (so "GMT" instead of "+0000"). This + is needed for HTTP, and is only used when localtime==False. + """ -Optional argument usegmt means that the timezone is written out as -an ascii string, not numeric one (so "GMT" instead of "+0000"). This -is needed for HTTP, and is only used when localtime==False. -""" def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: """Turn a datetime into a date string as specified in RFC 2822. -If usegmt is True, dt must be an aware datetime with an offset of zero. In -this case 'GMT' will be rendered instead of the normal +0000 required by -RFC2822. This is to support HTTP headers involving date stamps. -""" + If usegmt is True, dt must be an aware datetime with an offset of zero. In + this case 'GMT' will be rendered instead of the normal +0000 required by + RFC2822. This is to support HTTP headers involving date stamps. + """ if sys.version_info >= (3, 14): def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: """Return local time as an aware datetime object. -If called without arguments, return current time. Otherwise *dt* -argument should be a datetime instance, and it is converted to the -local time zone according to the system time zone database. If *dt* is -naive (that is, dt.tzinfo is None), it is assumed to be in local time. + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. -""" + """ elif sys.version_info >= (3, 12): @overload def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: """Return local time as an aware datetime object. -If called without arguments, return current time. Otherwise *dt* -argument should be a datetime instance, and it is converted to the -local time zone according to the system time zone database. If *dt* is -naive (that is, dt.tzinfo is None), it is assumed to be in local time. -The isdst parameter is ignored. + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. + The isdst parameter is ignored. + + """ -""" @overload @deprecated("The `isdst` parameter does nothing and will be removed in Python 3.14.") def localtime(dt: datetime.datetime | None = None, isdst: Unused = None) -> datetime.datetime: ... @@ -155,41 +160,43 @@ else: def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: """Return local time as an aware datetime object. - If called without arguments, return current time. Otherwise *dt* - argument should be a datetime instance, and it is converted to the - local time zone according to the system time zone database. If *dt* is - naive (that is, dt.tzinfo is None), it is assumed to be in local time. - In this case, a positive or zero value for *isdst* causes localtime to - presume initially that summer time (for example, Daylight Saving Time) - is or is not (respectively) in effect for the specified time. A - negative value for *isdst* causes the localtime() function to attempt - to divine whether summer time is in effect for the specified time. + If called without arguments, return current time. Otherwise *dt* + argument should be a datetime instance, and it is converted to the + local time zone according to the system time zone database. If *dt* is + naive (that is, dt.tzinfo is None), it is assumed to be in local time. + In this case, a positive or zero value for *isdst* causes localtime to + presume initially that summer time (for example, Daylight Saving Time) + is or is not (respectively) in effect for the specified time. A + negative value for *isdst* causes the localtime() function to attempt + to divine whether summer time is in effect for the specified time. - """ + """ def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: -<142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> + <142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> + + Optional idstring if given is a string used to strengthen the + uniqueness of the message id. Optional domain if given provides the + portion of the message id after the '@'. It defaults to the locally + defined hostname. + """ -Optional idstring if given is a string used to strengthen the -uniqueness of the message id. Optional domain if given provides the -portion of the message id after the '@'. It defaults to the locally -defined hostname. -""" def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: # May return list[str]. See issue #10431 for details. - """Decode string according to RFC 2231 -""" + """Decode string according to RFC 2231""" + def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: """Encode string according to RFC 2231. -If neither charset nor language is given, then s is returned as-is. If -charset is given but not language, the string is encoded using the empty -string for language. -""" + If neither charset nor language is given, then s is returned as-is. If + charset is given but not language, the string is encoded using the empty + string for language. + """ + def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: """Decode parameters list according to RFC 2231. -params is a sequence of 2-tuples containing (param name, string value). -""" + params is a sequence of 2-tuples containing (param name, string value). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi index 1dbb623a90395..7d26d92022bf7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/__init__.pyi @@ -27,6 +27,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import sys from codecs import CodecInfo @@ -35,14 +36,15 @@ class CodecRegistryError(LookupError, SystemError): ... def normalize_encoding(encoding: str | bytes) -> str: """Normalize an encoding name. -Normalization works as follows: all non-alphanumeric -characters except the dot used for Python package names are -collapsed and replaced with a single underscore, e.g. ' -;#' -becomes '_'. Leading and trailing underscores are removed. + Normalization works as follows: all non-alphanumeric + characters except the dot used for Python package names are + collapsed and replaced with a single underscore, e.g. ' -;#' + becomes '_'. Leading and trailing underscores are removed. -Note that encoding names should be ASCII only. + Note that encoding names should be ASCII only. + + """ -""" def search_function(encoding: str) -> CodecInfo | None: ... if sys.version_info >= (3, 14) and sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi index 8df8b5b4bcef3..ca13370cc64c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/aliases.pyi @@ -15,4 +15,5 @@ Contents: aliases have also been added. """ + aliases: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi index 839a54a56cfac..62be1000b29fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ascii.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi index d99547405e919..dedf3be9c8d7b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/base64_codec.pyi @@ -4,6 +4,7 @@ This codec de/encodes from bytes to bytes. Written by Marc-Andre Lemburg (mal@lemburg.com). """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi index 41cf27b9dc9be..cf0f6ff30ed44 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/bz2_codec.pyi @@ -6,6 +6,7 @@ bytes.transform() and bytes.untransform(). Adapted by Raymond Hettinger from zlib_codec.py which was written by Marc-Andre Lemburg (mal@lemburg.com). """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi index ed4c6998e29f9..83d14fdf043b1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/charmap.pyi @@ -9,6 +9,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _codecs import _CharMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi index 2d9d546d0aae1..d8415c0d99c41 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp037.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py. +"""Python Character Mapping Codec cp037 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP037.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi index 9911bf2bcadb8..7d4eed710a85f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1006.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1006 generated from 'MAPPINGS/VENDORS/MISC/CP1006.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1006 generated from 'MAPPINGS/VENDORS/MISC/CP1006.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi index e8aa63d4861fc..9dbbf0d295a69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1026.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1026 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP1026.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1026 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP1026.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi index 5928bba909ac9..65f02593dd57a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1125.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec for CP1125 +"""Python Character Mapping Codec for CP1125""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi index db39537e8e697..1477632e88765 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1140.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi index 768b5ba52ffec..846276ec73cc0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1250.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi index 3cfd94e72c721..a945fdd5588a8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1251.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1251 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1251.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1251 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1251.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi index 73a62eb34bf25..5e25a55b97fdf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1252.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1252 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1252.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1252 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1252.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi index 4112c83862fee..db602cb995629 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1253.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi index 7616527f6613f..3618e631b67fb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1254.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1254 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1254.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi index f6be3fe317870..1a094bc49b1bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1255.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi index bb72d150c919c..470adf7eeb682 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1256.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1256 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1256.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi index 9c314fafa000b..81a6a39c0c1a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1257.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1257 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1257.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1257 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1257.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi index 9e062ce722bb5..51322b0474634 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp1258.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py. +"""Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi index f8929b8551420..fb2b9c0234508 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp273.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp273 generated from 'python-mappings/CP273.TXT' with gencodec.py. +"""Python Character Mapping Codec cp273 generated from 'python-mappings/CP273.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi index c286b10e89a69..ef2d7e50a524d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp424.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp424 generated from 'MAPPINGS/VENDORS/MISC/CP424.TXT' with gencodec.py. +"""Python Character Mapping Codec cp424 generated from 'MAPPINGS/VENDORS/MISC/CP424.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi index dde30f54793e6..da6ce75fb45c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp437.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py. +"""Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi index 9f1feafe2627b..9d7ba23c0dd09 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp500.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py. +"""Python Character Mapping Codec cp500 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP500.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi index 0da760c221487..12222a8b75e9a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp720.pyi @@ -2,6 +2,7 @@ Vista 6.0.6002 SP2 Multiprocessor Free with the command: python Tools/unicode/genwincodec.py 720 """ + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi index dee32d17102f0..d1be95602f378 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp737.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py. +"""Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi index e3510bfd79a37..e6c838d21660a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp775.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py. +"""Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi index cf5950bb1bcc8..cf3d6c9362d6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp850.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP850.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP850.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi index 08c02032beb9d..0caf316eea694 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp852.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi index dfe94baa778c9..449b6570bb78c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp855.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi index 6960ee53524c5..f9e2b6cd6e5a7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp856.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp856 generated from 'MAPPINGS/VENDORS/MISC/CP856.TXT' with gencodec.py. +"""Python Character Mapping Codec cp856 generated from 'MAPPINGS/VENDORS/MISC/CP856.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi index 03974a0f3fe7a..10d9eedf067e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp857.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi index c00a097e44526..68a9a7186bf6a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp858.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec for CP858, modified from cp850. +"""Python Character Mapping Codec for CP858, modified from cp850.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi index f310db660229a..3cb4ca72b65bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp860.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi index 3ccf87543a751..dfe41216e8cac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp861.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP861.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP861.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi index 49d3c3058038d..0846ae9a4ee63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp862.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi index bce9e460911e8..2c8e9cd40f4d9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp863.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP863.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP863.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi index b127633844fa8..26402123e7fc1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp864.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP864.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP864.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi index 4fe0d2fb5eed7..ff9d33db6f61a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp865.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP865.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP865.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi index 010e87152bed9..f581f58b95aab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp866.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP866.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP866.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi index 56729dd9a51d5..d047ab42bbc4e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp869.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP869.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP869.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi index beaedab8cee3c..7bceeabf2180d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp874.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp874 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP874.TXT' with gencodec.py. +"""Python Character Mapping Codec cp874 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP874.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi index c4ef03a7d68f3..c14444902d081 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/cp875.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py. +"""Python Character Mapping Codec cp875 generated from 'MAPPINGS/VENDORS/MICSFT/EBCDIC/CP875.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi index 38105cadc8751..34274186368c8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hex_codec.pyi @@ -4,6 +4,7 @@ This codec de/encodes from bytes to bytes. Written by Marc-Andre Lemburg (mal@lemburg.com). """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi index e1b72d93939db..eeaea3e14ab56 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/hp_roman8.pyi @@ -8,6 +8,7 @@ Original source: LaserJet IIP Printer User's Manual HP part no (Used with permission) """ + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi index aac4d48233a4c..a557aa1885a05 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_1.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi index 485826075f70c..eb77204555dfe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_10.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi index 14a2435b249e5..ff75663e36a03 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_11.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi index 5784003dee398..b83a44185d724 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_13.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_13 generated from 'MAPPINGS/ISO8859/8859-13.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi index 1a579986fc269..99db89a0ef0b4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_14.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_14 generated from 'MAPPINGS/ISO8859/8859-14.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_14 generated from 'MAPPINGS/ISO8859/8859-14.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi index ab0702687963c..88271ed1d4ac9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_15.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_15 generated from 'MAPPINGS/ISO8859/8859-15.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_15 generated from 'MAPPINGS/ISO8859/8859-15.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi index 86b2cea4a71fc..1f6c8c8db1f4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_16.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_16 generated from 'MAPPINGS/ISO8859/8859-16.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_16 generated from 'MAPPINGS/ISO8859/8859-16.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi index 7d092ab745e91..c812f8a5c1c36 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_2.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_2 generated from 'MAPPINGS/ISO8859/8859-2.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_2 generated from 'MAPPINGS/ISO8859/8859-2.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi index 3904ff4fa378a..8414a39157763 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_3.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_3 generated from 'MAPPINGS/ISO8859/8859-3.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi index f47898334be55..49291ee183024 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_4.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi index 058a1cc3193eb..636c07388ea7e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_5.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi index cc20e96c929ce..2664a18952f12 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_6.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi index 63d81d0f5aafb..6716b9702261b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_7.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi index 8349fa7c1f97f..308832217ab1f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_8.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi index 63dc869b5033d..9d06803516a07 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/iso8859_9.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py. +"""Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi index 0dde852029a6e..85393efacd982 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_r.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py. +"""Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi index a3cde64e83566..5d8feb27ce7ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_t.pyi @@ -1,5 +1,5 @@ -"""Python Character Mapping Codec koi8_t -""" +"""Python Character Mapping Codec koi8_t""" + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi index 9bbec4eee5790..feb2736531031 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/koi8_u.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec koi8_u generated from 'python-mappings/KOI8-U.TXT' with gencodec.py. +"""Python Character Mapping Codec koi8_u generated from 'python-mappings/KOI8-U.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi index 79fb883736d59..1bcefed6c332c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/kz1048.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec kz1048 generated from 'MAPPINGS/VENDORS/MISC/KZ1048.TXT' with gencodec.py. +"""Python Character Mapping Codec kz1048 generated from 'MAPPINGS/VENDORS/MISC/KZ1048.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi index 3b8f1333efdb5..87419a70e2876 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/latin_1.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi index 01017388d52cc..b267b1eb49171 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_arabic.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py. +"""Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py.""" -""" import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi index 010aa9bb6994a..b5894c73b3da0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_croatian.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi index 62a00f17b54f7..6314158198fa4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_cyrillic.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi index 291eeaf426cd7..3b73beecb1ac7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_farsi.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_farsi generated from 'MAPPINGS/VENDORS/APPLE/FARSI.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_farsi generated from 'MAPPINGS/VENDORS/APPLE/FARSI.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi index 782d5af6052b1..9b49f6b8f099d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_greek.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi index 0bd4684e85a53..c0a02d06ce606 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_iceland.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_iceland generated from 'MAPPINGS/VENDORS/APPLE/ICELAND.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi index a68a7948f779c..3e0b50e5f8676 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_latin2.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright 2000 Guido van Rossum. """ + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi index b6194f11f0213..387546e419e69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_roman.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi index ac963c8b4078c..c2f955eb496fa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_romanian.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi index bcb971dc59151..4858ecacd4abd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mac_turkish.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec mac_turkish generated from 'MAPPINGS/VENDORS/APPLE/TURKISH.TXT' with gencodec.py. +"""Python Character Mapping Codec mac_turkish generated from 'MAPPINGS/VENDORS/APPLE/TURKISH.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi index 17a553fbc1841..9332b0587d11d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/mbcs.pyi @@ -7,6 +7,7 @@ which was written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi index 62215026427e0..65a0e13a27e0f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/oem.pyi @@ -1,6 +1,5 @@ -"""Python 'oem' Codec for Windows +"""Python 'oem' Codec for Windows""" -""" import codecs import sys from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi index dddb00be487eb..9b1e097e9634f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/palmos.pyi @@ -3,6 +3,7 @@ Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py. """ + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi index 0bae97e2c54dc..a0fca2c6d09e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/ptcp154.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright 2000 Guido van Rossum. """ + import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi index 9cee3cfa76323..69d61ef8e2faa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/punycode.pyi @@ -2,43 +2,45 @@ Written by Martin v. Löwis. """ + import codecs from typing import Literal def segregate(str: str) -> tuple[bytes, list[int]]: - """3.1 Basic code point segregation -""" + """3.1 Basic code point segregation""" + def selective_len(str: str, max: int) -> int: - """Return the length of str, considering only characters below max. -""" + """Return the length of str, considering only characters below max.""" + def selective_find(str: str, char: str, index: int, pos: int) -> tuple[int, int]: """Return a pair (index, pos), indicating the next occurrence of -char in str. index is the position of the character considering -only ordinals up to and including char, and pos is the position in -the full string. index/pos is the starting position in the full -string. -""" + char in str. index is the position of the character considering + only ordinals up to and including char, and pos is the position in + the full string. index/pos is the starting position in the full + string. + """ + def insertion_unsort(str: str, extended: list[int]) -> list[int]: - """3.2 Insertion unsort coding -""" + """3.2 Insertion unsort coding""" + def T(j: int, bias: int) -> int: ... digits: Literal[b"abcdefghijklmnopqrstuvwxyz0123456789"] def generate_generalized_integer(N: int, bias: int) -> bytes: - """3.3 Generalized variable-length integers -""" + """3.3 Generalized variable-length integers""" + def adapt(delta: int, first: bool, numchars: int) -> int: ... def generate_integers(baselen: int, deltas: list[int]) -> bytes: - """3.4 Bias adaptation -""" + """3.4 Bias adaptation""" + def punycode_encode(text: str) -> bytes: ... def decode_generalized_number(extended: bytes, extpos: int, bias: int, errors: str) -> tuple[int, int | None]: - """3.3 Generalized variable-length integers -""" + """3.3 Generalized variable-length integers""" + def insertion_sort(base: str, extended: bytes, errors: str) -> str: - """3.2 Insertion sort coding -""" + """3.2 Insertion sort coding""" + def punycode_decode(text: memoryview | bytes | bytearray | str, errors: str) -> str: ... class Codec(codecs.Codec): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi index 015db644f9b15..0670265cba928 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/quopri_codec.pyi @@ -2,6 +2,7 @@ This codec de/encodes from bytes to bytes. """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi index 2991147946c35..39f2d1153795f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/raw_unicode_escape.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi index 70d1a9cf93e47..60431b2597784 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/rot_13.pyi @@ -4,6 +4,7 @@ This codec de/encodes from str to str. Written by Marc-Andre Lemburg (mal@lemburg.com). """ + import codecs from _typeshed import SupportsRead, SupportsWrite diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi index 4c1354f8cb7c0..bce7bd673ba40 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/tis_620.pyi @@ -1,6 +1,5 @@ -"""Python Character Mapping Codec tis_620 generated from 'python-mappings/TIS-620.TXT' with gencodec.py. +"""Python Character Mapping Codec tis_620 generated from 'python-mappings/TIS-620.TXT' with gencodec.py.""" -""" import codecs from _codecs import _EncodingMap from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi index 97ae3c0831e75..d1d4f8dad8352 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/undefined.pyi @@ -9,6 +9,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi index 4e93b926ba4e4..6273f12b8bed8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/unicode_escape.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi index c5e02c77f31d6..a67441b32fc5b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi index ce5c1cc66a0bd..2c6e718beab06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_be.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi index 35b65f06bfe25..106e801258593 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_16_le.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi index 48c33cfee86e7..f294f44802e51 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32.pyi @@ -1,6 +1,7 @@ """ Python 'utf-32' Codec """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi index fc525a20df6e9..360d0f69fe189 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_be.pyi @@ -1,6 +1,7 @@ """ Python 'utf-32-be' Codec """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi index 4e2e4df224fea..40149d98d20f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_32_le.pyi @@ -1,6 +1,7 @@ """ Python 'utf-32-le' Codec """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi index d2b4072637814..cc1ac51151ed5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_7.pyi @@ -2,6 +2,7 @@ Written by Brian Quinlan (brian@sweetapp.com). """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi index 3862d0eab3b97..ae6671e94b196 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi index 2a7063a200da1..1e7d176fa4400 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -7,6 +7,7 @@ This work similar to UTF-8 with the following changes: * On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these bytes will be skipped. """ + import codecs from _typeshed import ReadableBuffer diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi index 0c127094b6859..9f59d5921e27b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/uu_codec.pyi @@ -6,6 +6,7 @@ Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were adapted from uu.py which was written by Lance Ellinghouse and modified by Jack Jansen and Fredrik Lundh. """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi index 25a18bc939ecb..7926d78c0c22c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/encodings/zlib_codec.pyi @@ -4,6 +4,7 @@ This codec de/encodes from bytes to bytes. Written by Marc-Andre Lemburg (mal@lemburg.com). """ + import codecs from _typeshed import ReadableBuffer from typing import ClassVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi index aca7c1a5200f4..c203a2e65b00b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ensurepip/__init__.pyi @@ -2,8 +2,9 @@ __all__ = ["version", "bootstrap"] def version() -> str: """ -Returns a string specifying the bundled version of pip. -""" + Returns a string specifying the bundled version of pip. + """ + def bootstrap( *, root: str | None = None, @@ -14,8 +15,8 @@ def bootstrap( verbosity: int = 0, ) -> None: """ -Bootstrap pip into the current Python installation (or the given root -directory). + Bootstrap pip into the current Python installation (or the given root + directory). -Note that calling this function will alter both sys.path and os.environ. -""" + Note that calling this function will alter both sys.path and os.environ. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi index 1234838b6810a..b9933de380be8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi @@ -58,25 +58,28 @@ _Signature: TypeAlias = Any # TODO: Unable to import Signature from inspect mod if sys.version_info >= (3, 11): class nonmember(Generic[_EnumMemberT]): """ -Protects item from becoming an Enum member during class creation. -""" + Protects item from becoming an Enum member during class creation. + """ + value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class member(Generic[_EnumMemberT]): """ -Forces item to become an Enum member during class creation. -""" + Forces item to become an Enum member during class creation. + """ + value: _EnumMemberT def __init__(self, value: _EnumMemberT) -> None: ... class _EnumDict(dict[str, Any]): """ -Track enum member order and ensure member names are not reused. + Track enum member order and ensure member names are not reused. + + EnumType will use the names found in self._member_names as the + enumeration member names. + """ -EnumType will use the names found in self._member_names as the -enumeration member names. -""" if sys.version_info >= (3, 13): def __init__(self, cls_name: str | None = None) -> None: ... else: @@ -84,13 +87,13 @@ enumeration member names. def __setitem__(self, key: str, value: Any) -> None: """ -Changes anything not dundered or not a descriptor. + Changes anything not dundered or not a descriptor. -If an enum member name is used twice, an error is raised; duplicate -values are not checked for. + If an enum member name is used twice, an error is raised; duplicate + values are not checked for. -Single underscore (sunder) names are reserved. -""" + Single underscore (sunder) names are reserved. + """ if sys.version_info >= (3, 11): # See comment above `typing.MutableMapping.update` # for why overloads are preferable to a Union here @@ -111,8 +114,9 @@ if sys.version_info >= (3, 13): # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(type): """ -Metaclass for Enum -""" + Metaclass for Enum + """ + if sys.version_info >= (3, 11): def __new__( metacls: type[_typeshed.Self], @@ -133,30 +137,31 @@ Metaclass for Enum def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: """ -Return members in definition order. -""" + Return members in definition order. + """ + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: """ -Return members in reverse definition order. -""" + Return members in reverse definition order. + """ if sys.version_info >= (3, 12): def __contains__(self: type[Any], value: object) -> bool: """Return True if `value` is in `cls`. -`value` is in `cls` if: -1) `value` is a member of `cls`, or -2) `value` is the value of one of the `cls`'s members. -3) `value` is a pseudo-member (flags) -""" + `value` is in `cls` if: + 1) `value` is a member of `cls`, or + 2) `value` is the value of one of the `cls`'s members. + 3) `value` is a pseudo-member (flags) + """ elif sys.version_info >= (3, 11): def __contains__(self: type[Any], member: object) -> bool: """ - Return True if member is a member of this enum - raises TypeError if member is not an enum member + Return True if member is a member of this enum + raises TypeError if member is not an enum member - note: in 3.12 TypeError will no longer be raised, and True will also be - returned if member is the value of a member in this enum - """ + note: in 3.12 TypeError will no longer be raised, and True will also be + returned if member is the value of a member in this enum + """ elif sys.version_info >= (3, 10): def __contains__(self: type[Any], obj: object) -> bool: ... else: @@ -164,56 +169,59 @@ Return members in reverse definition order. def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: """ -Return the member matching `name`. -""" + Return the member matching `name`. + """ + @_builtins_property def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: """ -Returns a mapping of member name->value. + Returns a mapping of member name->value. + + This mapping lists all enum members, including aliases. Note that this + is a read-only view of the internal mapping. + """ -This mapping lists all enum members, including aliases. Note that this -is a read-only view of the internal mapping. -""" def __len__(self) -> int: """ -Return the number of members (no aliases) -""" + Return the number of members (no aliases) + """ + def __bool__(self) -> Literal[True]: """ -classes/types should always be True. -""" + classes/types should always be True. + """ + def __dir__(self) -> list[str]: ... # Overload 1: Value lookup on an already existing enum class (simple case) @overload def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: """ -Either returns an existing member, or creates a new enum class. - -This method is used both when an enum class is given a value to match -to an enumeration member (i.e. Color(3)) and for the functional API -(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + Either returns an existing member, or creates a new enum class. -The value lookup branch is chosen if the enum is final. + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='RED GREEN BLUE')). -When used for the functional API: + The value lookup branch is chosen if the enum is final. -`value` will be the name of the new class. + When used for the functional API: -`names` should be either a string of white-space/comma delimited names -(values will start at `start`), or an iterator/mapping of name, value pairs. + `value` will be the name of the new class. -`module` should be set to the module this class is being created in; -if it is not set, an attempt to find that module will be made, but if -it fails the class will not be picklable. + `names` should be either a string of white-space/comma delimited names + (values will start at `start`), or an iterator/mapping of name, value pairs. -`qualname` should be set to the actual location this class can be found -at in its module; by default it is set to the global scope. If this is -not correct, unpickling will fail in some circumstances. + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. -`type`, if set, will be mixed in as the first base class. -""" + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. + `type`, if set, will be mixed in as the first base class. + """ # Overload 2: Functional API for constructing new enum classes. if sys.version_info >= (3, 11): @overload @@ -229,31 +237,31 @@ not correct, unpickling will fail in some circumstances. boundary: FlagBoundary | None = None, ) -> type[Enum]: """ -Either returns an existing member, or creates a new enum class. + Either returns an existing member, or creates a new enum class. -This method is used both when an enum class is given a value to match -to an enumeration member (i.e. Color(3)) and for the functional API -(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='RED GREEN BLUE')). -The value lookup branch is chosen if the enum is final. + The value lookup branch is chosen if the enum is final. -When used for the functional API: + When used for the functional API: -`value` will be the name of the new class. + `value` will be the name of the new class. -`names` should be either a string of white-space/comma delimited names -(values will start at `start`), or an iterator/mapping of name, value pairs. + `names` should be either a string of white-space/comma delimited names + (values will start at `start`), or an iterator/mapping of name, value pairs. -`module` should be set to the module this class is being created in; -if it is not set, an attempt to find that module will be made, but if -it fails the class will not be picklable. + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. -`qualname` should be set to the actual location this class can be found -at in its module; by default it is set to the global scope. If this is -not correct, unpickling will fail in some circumstances. + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. -`type`, if set, will be mixed in as the first base class. -""" + `type`, if set, will be mixed in as the first base class. + """ else: @overload def __call__( @@ -267,30 +275,29 @@ not correct, unpickling will fail in some circumstances. start: int = 1, ) -> type[Enum]: """ - Either returns an existing member, or creates a new enum class. - - This method is used both when an enum class is given a value to match - to an enumeration member (i.e. Color(3)) and for the functional API - (i.e. Color = Enum('Color', names='RED GREEN BLUE')). + Either returns an existing member, or creates a new enum class. - When used for the functional API: + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='RED GREEN BLUE')). - `value` will be the name of the new class. + When used for the functional API: - `names` should be either a string of white-space/comma delimited names - (values will start at `start`), or an iterator/mapping of name, value pairs. + `value` will be the name of the new class. - `module` should be set to the module this class is being created in; - if it is not set, an attempt to find that module will be made, but if - it fails the class will not be picklable. + `names` should be either a string of white-space/comma delimited names + (values will start at `start`), or an iterator/mapping of name, value pairs. - `qualname` should be set to the actual location this class can be found - at in its module; by default it is set to the global scope. If this is - not correct, unpickling will fail in some circumstances. + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. - `type`, if set, will be mixed in as the first base class. - """ + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. + `type`, if set, will be mixed in as the first base class. + """ # Overload 3 (py312+ only): Value lookup on an already existing enum class (complex case) # # >>> class Foo(enum.Enum): @@ -302,31 +309,31 @@ not correct, unpickling will fail in some circumstances. @overload def __call__(cls: type[_EnumMemberT], value: Any, *values: Any) -> _EnumMemberT: """ -Either returns an existing member, or creates a new enum class. + Either returns an existing member, or creates a new enum class. -This method is used both when an enum class is given a value to match -to an enumeration member (i.e. Color(3)) and for the functional API -(i.e. Color = Enum('Color', names='RED GREEN BLUE')). + This method is used both when an enum class is given a value to match + to an enumeration member (i.e. Color(3)) and for the functional API + (i.e. Color = Enum('Color', names='RED GREEN BLUE')). -The value lookup branch is chosen if the enum is final. + The value lookup branch is chosen if the enum is final. -When used for the functional API: + When used for the functional API: -`value` will be the name of the new class. + `value` will be the name of the new class. -`names` should be either a string of white-space/comma delimited names -(values will start at `start`), or an iterator/mapping of name, value pairs. + `names` should be either a string of white-space/comma delimited names + (values will start at `start`), or an iterator/mapping of name, value pairs. -`module` should be set to the module this class is being created in; -if it is not set, an attempt to find that module will be made, but if -it fails the class will not be picklable. + `module` should be set to the module this class is being created in; + if it is not set, an attempt to find that module will be made, but if + it fails the class will not be picklable. -`qualname` should be set to the actual location this class can be found -at in its module; by default it is set to the global scope. If this is -not correct, unpickling will fail in some circumstances. + `qualname` should be set to the actual location this class can be found + at in its module; by default it is set to the global scope. If this is + not correct, unpickling will fail in some circumstances. -`type`, if set, will be mixed in as the first base class. -""" + `type`, if set, will be mixed in as the first base class. + """ if sys.version_info >= (3, 14): @property def __signature__(cls) -> _Signature: ... @@ -341,12 +348,13 @@ if sys.version_info >= (3, 11): class property(types.DynamicClassAttribute): """ -This is a descriptor, used to define attributes that act differently -when accessed through an enum member and through an enum class. -Instance access is the same as property(), but access to an attribute -through the enum class will instead look in the class' _member_map_ for -a corresponding enum member. -""" + This is a descriptor, used to define attributes that act differently + when accessed through an enum member and through an enum class. + Instance access is the same as property(), but access to an attribute + through the enum class will instead look in the class' _member_map_ for + a corresponding enum member. + """ + def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... name: str clsname: str @@ -358,51 +366,51 @@ else: class Enum(metaclass=EnumMeta): """ -Create a collection of name/value pairs. + Create a collection of name/value pairs. + + Example enumeration: -Example enumeration: + >>> class Color(Enum): + ... RED = 1 + ... BLUE = 2 + ... GREEN = 3 ->>> class Color(Enum): -... RED = 1 -... BLUE = 2 -... GREEN = 3 + Access them by: -Access them by: + - attribute access: -- attribute access: + >>> Color.RED + - >>> Color.RED - + - value lookup: -- value lookup: + >>> Color(1) + - >>> Color(1) - + - name lookup: -- name lookup: + >>> Color['RED'] + - >>> Color['RED'] - + Enumerations can be iterated over, and know how many members they have: -Enumerations can be iterated over, and know how many members they have: + >>> len(Color) + 3 ->>> len(Color) -3 + >>> list(Color) + [, , ] ->>> list(Color) -[, , ] + Methods can be added to enumerations, and members can have their own + attributes -- see the documentation for details. + """ -Methods can be added to enumerations, and members can have their own -attributes -- see the documentation for details. -""" @_magic_enum_attr def name(self) -> str: - """The name of the Enum member. -""" + """The name of the Enum member.""" + @_magic_enum_attr def value(self) -> Any: - """The value of the Enum member. -""" + """The value of the Enum member.""" _name_: str _value_: Any _ignore_: str | list[str] @@ -413,13 +421,13 @@ attributes -- see the documentation for details. @staticmethod def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: """ -Generate the next value when not given. + Generate the next value when not given. -name: the name of the member -start: the initial start value or None -count: the number of existing members -last_values: the list of values assigned -""" + name: the name of the member + start: the initial start value or None + count: the number of existing members + last_values: the list of values assigned + """ # It's not true that `__new__` will accept any argument type, # so ideally we'd use `Any` to indicate that the argument type is inexpressible. # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` @@ -428,13 +436,15 @@ last_values: the list of values assigned def __new__(cls, value: object) -> Self: ... def __dir__(self) -> list[str]: """ -Returns public methods and other interesting attributes. -""" + Returns public methods and other interesting attributes. + """ + def __hash__(self) -> int: ... def __format__(self, format_spec: str) -> str: """ Returns format using actual value type unless __str__ has been overridden. """ + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): def __copy__(self) -> Self: ... @@ -451,19 +461,20 @@ Returns public methods and other interesting attributes. if sys.version_info >= (3, 11): class ReprEnum(Enum): """ -Only changes the repr(), leaving str() and format() to the mixed-in type. -""" + Only changes the repr(), leaving str() and format() to the mixed-in type. + """ if sys.version_info >= (3, 12): class IntEnum(int, ReprEnum): """ -Enum where members are also (and must be) ints -""" + Enum where members are also (and must be) ints + """ + _value_: int @_magic_enum_attr def value(self) -> int: - """The value of the Enum member. -""" + """The value of the Enum member.""" + def __new__(cls, value: int) -> Self: ... else: @@ -475,40 +486,43 @@ else: @disjoint_base class IntEnum(int, _IntEnumBase): """ - Enum where members are also (and must be) ints - """ + Enum where members are also (and must be) ints + """ + _value_: int @_magic_enum_attr def value(self) -> int: - """The value of the Enum member. -""" + """The value of the Enum member.""" + def __new__(cls, value: int) -> Self: ... def unique(enumeration: _EnumerationT) -> _EnumerationT: """ -Class decorator for enumerations ensuring unique member values. -""" + Class decorator for enumerations ensuring unique member values. + """ _auto_null: Any class Flag(Enum): """ -Support for flags -""" + Support for flags + """ + _name_: str | None # type: ignore[assignment] _value_: int @_magic_enum_attr def name(self) -> str | None: # type: ignore[override] - """The name of the Enum member. -""" + """The name of the Enum member.""" + @_magic_enum_attr def value(self) -> int: - """The value of the Enum member. -""" + """The value of the Enum member.""" + def __contains__(self, other: Self) -> bool: """ -Returns True if self has at least the same flags set as other. -""" + Returns True if self has at least the same flags set as other. + """ + def __bool__(self) -> bool: ... def __or__(self, other: Self) -> Self: ... def __and__(self, other: Self) -> Self: ... @@ -517,8 +531,9 @@ Returns True if self has at least the same flags set as other. if sys.version_info >= (3, 11): def __iter__(self) -> Iterator[Self]: """ -Returns flags in definition order. -""" + Returns flags in definition order. + """ + def __len__(self) -> int: ... __ror__ = __or__ __rand__ = __and__ @@ -527,24 +542,26 @@ Returns flags in definition order. if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): """ -Enum where members are also (and must be) strings -""" + Enum where members are also (and must be) strings + """ + def __new__(cls, value: str) -> Self: ... _value_: str @_magic_enum_attr def value(self) -> str: - """The value of the Enum member. -""" + """The value of the Enum member.""" + @staticmethod def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: """ -Return the lower-cased version of the member name. -""" + Return the lower-cased version of the member name. + """ class EnumCheck(StrEnum): """ -various conditions to check an enumeration for -""" + various conditions to check an enumeration for + """ + CONTINUOUS = "no skipped integer values" NAMED_FLAGS = "multi-flag aliases may not contain unnamed flags" UNIQUE = "one name per value" @@ -555,19 +572,21 @@ various conditions to check an enumeration for class verify: """ -Check an enumeration for various constraints. (see EnumCheck) -""" + Check an enumeration for various constraints. (see EnumCheck) + """ + def __init__(self, *checks: EnumCheck) -> None: ... def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... class FlagBoundary(StrEnum): """ -control how out of range values are handled -"strict" -> error is raised [default for Flag] -"conform" -> extra bits are discarded -"eject" -> lose flag status -"keep" -> keep flag status and all bits [default for IntFlag] -""" + control how out of range values are handled + "strict" -> error is raised [default for Flag] + "conform" -> extra bits are discarded + "eject" -> lose flag status + "keep" -> keep flag status and all bits [default for IntFlag] + """ + STRICT = "strict" CONFORM = "conform" EJECT = "eject" @@ -580,33 +599,37 @@ control how out of range values are handled def global_str(self: Enum) -> str: """ -use enum_name instead of class.enum_name -""" + use enum_name instead of class.enum_name + """ + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: """ -decorator that makes the repr() of an enum member reference its module -instead of its class; also exports all members to the enum's module's -global namespace -""" + decorator that makes the repr() of an enum member reference its module + instead of its class; also exports all members to the enum's module's + global namespace + """ + def global_enum_repr(self: Enum) -> str: """ -use module.enum_name instead of class.enum_name + use module.enum_name instead of class.enum_name + + the module is the last module in case of a multi-module name + """ -the module is the last module in case of a multi-module name -""" def global_flag_repr(self: Flag) -> str: """ -use module.flag_name instead of class.flag_name + use module.flag_name instead of class.flag_name -the module is the last module in case of a multi-module name -""" + the module is the last module in case of a multi-module name + """ if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases """ -Support for integer-based Flags -""" + Support for integer-based Flags + """ + def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -621,8 +644,9 @@ elif sys.version_info >= (3, 11): @disjoint_base class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases """ - Support for integer-based Flags - """ + Support for integer-based Flags + """ + def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -636,8 +660,9 @@ else: @disjoint_base class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases """ - Support for integer-based Flags - """ + Support for integer-based Flags + """ + def __new__(cls, value: int) -> Self: ... def __or__(self, other: int) -> Self: ... def __and__(self, other: int) -> Self: ... @@ -649,8 +674,9 @@ else: class auto: """ -Instances are replaced with an appropriate value in Enum class suites. -""" + Instances are replaced with an appropriate value in Enum class suites. + """ + _value_: Any @_magic_enum_attr def value(self) -> Any: ... @@ -662,8 +688,8 @@ Instances are replaced with an appropriate value in Enum class suites. # Ideally type checkers would special case auto enough to handle this, # but until then this is a slightly inaccurate helping hand. def __or__(self, other: int | Self) -> Self: - """Return self|value. -""" + """Return self|value.""" + def __and__(self, other: int | Self) -> Self: ... def __xor__(self, other: int | Self) -> Self: ... __ror__ = __or__ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi index 2fc54d52889f1..386c055ae2c4e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/errno.pyi @@ -11,6 +11,7 @@ Symbols that are not relevant to the underlying system are not defined. To map error codes to error messages, use the function os.strerror(), e.g. os.strerror(2) could return 'No such file or directory'. """ + import sys from collections.abc import Mapping from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi index a4b630709ac4d..561223b39b94d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/faulthandler.pyi @@ -1,46 +1,40 @@ -"""faulthandler module. -""" +"""faulthandler module.""" + import sys from _typeshed import FileDescriptorLike def cancel_dump_traceback_later() -> None: - """Cancel the previous call to dump_traceback_later(). -""" + """Cancel the previous call to dump_traceback_later().""" + def disable() -> None: - """Disable the fault handler. -""" + """Disable the fault handler.""" + def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: - """Dump the traceback of the current thread, or of all threads if all_threads is True, into file. -""" + """Dump the traceback of the current thread, or of all threads if all_threads is True, into file.""" if sys.version_info >= (3, 14): def dump_c_stack(file: FileDescriptorLike = ...) -> None: - """Dump the C stack of the current thread. -""" + """Dump the C stack of the current thread.""" def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: """Dump the traceback of all threads in timeout seconds, -or each timeout seconds if repeat is True. If exit is True, call _exit(1) which is not safe. -""" + or each timeout seconds if repeat is True. If exit is True, call _exit(1) which is not safe. + """ if sys.version_info >= (3, 14): def enable(file: FileDescriptorLike = ..., all_threads: bool = ..., c_stack: bool = True) -> None: - """Enable the fault handler. -""" + """Enable the fault handler.""" else: def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: - """Enable the fault handler. -""" + """Enable the fault handler.""" def is_enabled() -> bool: - """Check if the handler is enabled. -""" + """Check if the handler is enabled.""" if sys.platform != "win32": def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: - """Register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file. -""" + """Register a handler for the signal 'signum': dump the traceback of the current thread, or of all threads if all_threads is True, into file.""" + def unregister(signum: int, /) -> None: - """Unregister the handler of the signal 'signum' registered by register(). -""" + """Unregister the handler of the signal 'signum' registered by register().""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi index 714dd0257a336..561e6585c8975 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fcntl.pyi @@ -3,6 +3,7 @@ descriptors. It is an interface to the fcntl() and ioctl() Unix routines. File descriptors can be obtained with the fileno() method of a file or socket object. """ + import sys from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer from typing import Any, Final, Literal, overload @@ -145,16 +146,17 @@ if sys.platform != "win32": def fcntl(fd: FileDescriptorLike, cmd: int, arg: int = 0, /) -> int: """Perform the operation `cmd` on file descriptor fd. -The values used for `cmd` are operating system dependent, and are available -as constants in the fcntl module, using the same names as used in -the relevant C header files. The argument arg is optional, and -defaults to 0; it may be an int or a string. If arg is given as a string, -the return value of fcntl is a string of that length, containing the -resulting value put in the arg buffer by the operating system. The length -of the arg string is not allowed to exceed 1024 bytes. If the arg given -is an integer or if none is specified, the result value is an integer -corresponding to the return value of the fcntl call in the C code. -""" + The values used for `cmd` are operating system dependent, and are available + as constants in the fcntl module, using the same names as used in + the relevant C header files. The argument arg is optional, and + defaults to 0; it may be an int or a string. If arg is given as a string, + the return value of fcntl is a string of that length, containing the + resulting value put in the arg buffer by the operating system. The length + of the arg string is not allowed to exceed 1024 bytes. If the arg given + is an integer or if none is specified, the result value is an integer + corresponding to the return value of the fcntl call in the C code. + """ + @overload def fcntl(fd: FileDescriptorLike, cmd: int, arg: str | ReadOnlyBuffer, /) -> bytes: ... # If arg is an int, return int @@ -162,33 +164,33 @@ corresponding to the return value of the fcntl call in the C code. def ioctl(fd: FileDescriptorLike, request: int, arg: int = 0, mutate_flag: bool = True, /) -> int: """Perform the operation `request` on file descriptor `fd`. -The values used for `request` are operating system dependent, and are available -as constants in the fcntl or termios library modules, using the same names as -used in the relevant C header files. - -The argument `arg` is optional, and defaults to 0; it may be an int or a -buffer containing character data (most likely a string or an array). - -If the argument is a mutable buffer (such as an array) and if the -mutate_flag argument (which is only allowed in this case) is true then the -buffer is (in effect) passed to the operating system and changes made by -the OS will be reflected in the contents of the buffer after the call has -returned. The return value is the integer returned by the ioctl system -call. - -If the argument is a mutable buffer and the mutable_flag argument is false, -the behavior is as if a string had been passed. - -If the argument is an immutable buffer (most likely a string) then a copy -of the buffer is passed to the operating system and the return value is a -string of the same length containing whatever the operating system put in -the buffer. The length of the arg buffer in this case is not allowed to -exceed 1024 bytes. - -If the arg given is an integer or if none is specified, the result value is -an integer corresponding to the return value of the ioctl call in the C -code. -""" + The values used for `request` are operating system dependent, and are available + as constants in the fcntl or termios library modules, using the same names as + used in the relevant C header files. + + The argument `arg` is optional, and defaults to 0; it may be an int or a + buffer containing character data (most likely a string or an array). + + If the argument is a mutable buffer (such as an array) and if the + mutate_flag argument (which is only allowed in this case) is true then the + buffer is (in effect) passed to the operating system and changes made by + the OS will be reflected in the contents of the buffer after the call has + returned. The return value is the integer returned by the ioctl system + call. + + If the argument is a mutable buffer and the mutable_flag argument is false, + the behavior is as if a string had been passed. + + If the argument is an immutable buffer (most likely a string) then a copy + of the buffer is passed to the operating system and the return value is a + string of the same length containing whatever the operating system put in + the buffer. The length of the arg buffer in this case is not allowed to + exceed 1024 bytes. + + If the arg given is an integer or if none is specified, the result value is + an integer corresponding to the return value of the ioctl call in the C + code. + """ # The return type works as follows: # - If arg is a read-write buffer, return int if mutate_flag is True, otherwise bytes # - If arg is a read-only buffer, return bytes (and ignore the value of mutate_flag) @@ -203,30 +205,31 @@ code. def flock(fd: FileDescriptorLike, operation: int, /) -> None: """Perform the lock operation `operation` on file descriptor `fd`. -See the Unix manual page for flock(2) for details (On some systems, this -function is emulated using fcntl()). -""" + See the Unix manual page for flock(2) for details (On some systems, this + function is emulated using fcntl()). + """ + def lockf(fd: FileDescriptorLike, cmd: int, len: int = 0, start: int = 0, whence: int = 0, /) -> Any: """A wrapper around the fcntl() locking calls. -`fd` is the file descriptor of the file to lock or unlock, and operation is one -of the following values: + `fd` is the file descriptor of the file to lock or unlock, and operation is one + of the following values: - LOCK_UN - unlock - LOCK_SH - acquire a shared lock - LOCK_EX - acquire an exclusive lock + LOCK_UN - unlock + LOCK_SH - acquire a shared lock + LOCK_EX - acquire an exclusive lock -When operation is LOCK_SH or LOCK_EX, it can also be bitwise ORed with -LOCK_NB to avoid blocking on lock acquisition. If LOCK_NB is used and the -lock cannot be acquired, an OSError will be raised and the exception will -have an errno attribute set to EACCES or EAGAIN (depending on the operating -system -- for portability, check for either value). + When operation is LOCK_SH or LOCK_EX, it can also be bitwise ORed with + LOCK_NB to avoid blocking on lock acquisition. If LOCK_NB is used and the + lock cannot be acquired, an OSError will be raised and the exception will + have an errno attribute set to EACCES or EAGAIN (depending on the operating + system -- for portability, check for either value). -`len` is the number of bytes to lock, with the default meaning to lock to -EOF. `start` is the byte offset, relative to `whence`, to that the lock -starts. `whence` is as with fileobj.seek(), specifically: + `len` is the number of bytes to lock, with the default meaning to lock to + EOF. `start` is the byte offset, relative to `whence`, to that the lock + starts. `whence` is as with fileobj.seek(), specifically: - 0 - relative to the start of the file (SEEK_SET) - 1 - relative to the current buffer position (SEEK_CUR) - 2 - relative to the end of the file (SEEK_END) -""" + 0 - relative to the start of the file (SEEK_SET) + 1 - relative to the current buffer position (SEEK_CUR) + 2 - relative to the end of the file (SEEK_END) + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi index d0ab8bfeda6ae..33e29bcc32775 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/filecmp.pyi @@ -9,6 +9,7 @@ Functions: clear_cache() """ + import sys from _typeshed import GenericPath, StrOrBytesPath from collections.abc import Callable, Iterable, Sequence @@ -23,79 +24,81 @@ BUFSIZE: Final = 8192 def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: """Compare two files. -Arguments: + Arguments: -f1 -- First file name + f1 -- First file name -f2 -- Second file name + f2 -- Second file name -shallow -- treat files as identical if their stat signatures (type, size, - mtime) are identical. Otherwise, files are considered different - if their sizes or contents differ. [default: True] + shallow -- treat files as identical if their stat signatures (type, size, + mtime) are identical. Otherwise, files are considered different + if their sizes or contents differ. [default: True] -Return value: + Return value: -True if the files are the same, False otherwise. + True if the files are the same, False otherwise. -This function uses a cache for past comparisons and the results, -with cache entries invalidated if their stat information -changes. The cache may be cleared by calling clear_cache(). + This function uses a cache for past comparisons and the results, + with cache entries invalidated if their stat information + changes. The cache may be cleared by calling clear_cache(). + + """ -""" def cmpfiles( a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True ) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: """Compare common files in two directories. -a, b -- directory names -common -- list of file names found in both directories -shallow -- if true, do comparison based solely on stat() information + a, b -- directory names + common -- list of file names found in both directories + shallow -- if true, do comparison based solely on stat() information -Returns a tuple of three lists: - files that compare equal - files that are different - filenames that aren't regular files. + Returns a tuple of three lists: + files that compare equal + files that are different + filenames that aren't regular files. -""" + """ class dircmp(Generic[AnyStr]): """A class that manages the comparison of 2 directories. -dircmp(a, b, ignore=None, hide=None, *, shallow=True) - A and B are directories. - IGNORE is a list of names to ignore, - defaults to DEFAULT_IGNORES. - HIDE is a list of names to hide, - defaults to [os.curdir, os.pardir]. - SHALLOW specifies whether to just check the stat signature (do not read - the files). - defaults to True. - -High level usage: - x = dircmp(dir1, dir2) - x.report() -> prints a report on the differences between dir1 and dir2 - or - x.report_partial_closure() -> prints report on differences between dir1 - and dir2, and reports on common immediate subdirectories. - x.report_full_closure() -> like report_partial_closure, - but fully recursive. - -Attributes: - left_list, right_list: The files in dir1 and dir2, - filtered by hide and ignore. - common: a list of names in both dir1 and dir2. - left_only, right_only: names only in dir1, dir2. - common_dirs: subdirectories in both dir1 and dir2. - common_files: files in both dir1 and dir2. - common_funny: names in both dir1 and dir2 where the type differs between - dir1 and dir2, or the name is not stat-able. - same_files: list of identical files. - diff_files: list of filenames which differ. - funny_files: list of files which could not be compared. - subdirs: a dictionary of dircmp instances (or MyDirCmp instances if this - object is of type MyDirCmp, a subclass of dircmp), keyed by names - in common_dirs. - """ + dircmp(a, b, ignore=None, hide=None, *, shallow=True) + A and B are directories. + IGNORE is a list of names to ignore, + defaults to DEFAULT_IGNORES. + HIDE is a list of names to hide, + defaults to [os.curdir, os.pardir]. + SHALLOW specifies whether to just check the stat signature (do not read + the files). + defaults to True. + + High level usage: + x = dircmp(dir1, dir2) + x.report() -> prints a report on the differences between dir1 and dir2 + or + x.report_partial_closure() -> prints report on differences between dir1 + and dir2, and reports on common immediate subdirectories. + x.report_full_closure() -> like report_partial_closure, + but fully recursive. + + Attributes: + left_list, right_list: The files in dir1 and dir2, + filtered by hide and ignore. + common: a list of names in both dir1 and dir2. + left_only, right_only: names only in dir1, dir2. + common_dirs: subdirectories in both dir1 and dir2. + common_files: files in both dir1 and dir2. + common_funny: names in both dir1 and dir2 where the type differs between + dir1 and dir2, or the name is not stat-able. + same_files: list of identical files. + diff_files: list of filenames which differ. + funny_files: list of files which could not be compared. + subdirs: a dictionary of dircmp instances (or MyDirCmp instances if this + object is of type MyDirCmp, a subclass of dircmp), keyed by names + in common_dirs. + """ + if sys.version_info >= (3, 13): def __init__( self, @@ -144,9 +147,8 @@ Attributes: def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def clear_cache() -> None: - """Clear the filecmp cache. -""" + """Clear the filecmp cache.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi index 6a689ed6b3a1c..9959c68f10ed5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fileinput.pyi @@ -64,6 +64,7 @@ deleted when the output file is closed. In-place filtering is disabled when standard input is read. XXX The current implementation does not work for MS-DOS 8+3 filesystems. """ + import sys from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable @@ -111,10 +112,11 @@ if sys.version_info >= (3, 10): ) -> FileInput[str]: """Return an instance of the FileInput class, which can be iterated. -The parameters are passed to the constructor of the FileInput class. -The returned instance, in addition to being an iterator, -keeps global state for the functions of this module,. -""" + The parameters are passed to the constructor of the FileInput class. + The returned instance, in addition to being an iterator, + keeps global state for the functions of this module,. + """ + @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -151,10 +153,11 @@ else: ) -> FileInput[str]: """Return an instance of the FileInput class, which can be iterated. - The parameters are passed to the constructor of the FileInput class. - The returned instance, in addition to being an iterator, - keeps global state for the functions of this module,. - """ + The parameters are passed to the constructor of the FileInput class. + The returned instance, in addition to being an iterator, + keeps global state for the functions of this module,. + """ + @overload def input( files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, @@ -175,63 +178,70 @@ else: ) -> FileInput[Any]: ... def close() -> None: - """Close the sequence. -""" + """Close the sequence.""" + def nextfile() -> None: """ -Close the current file so that the next iteration will read the first -line from the next file (if any); lines not read from the file will -not count towards the cumulative line count. The filename is not -changed until after the first line of the next file has been read. -Before the first line has been read, this function has no effect; -it cannot be used to skip the first file. After the last line of the -last file has been read, this function has no effect. -""" + Close the current file so that the next iteration will read the first + line from the next file (if any); lines not read from the file will + not count towards the cumulative line count. The filename is not + changed until after the first line of the next file has been read. + Before the first line has been read, this function has no effect; + it cannot be used to skip the first file. After the last line of the + last file has been read, this function has no effect. + """ + def filename() -> str: """ -Return the name of the file currently being read. -Before the first line has been read, returns None. -""" + Return the name of the file currently being read. + Before the first line has been read, returns None. + """ + def lineno() -> int: """ -Return the cumulative line number of the line that has just been read. -Before the first line has been read, returns 0. After the last line -of the last file has been read, returns the line number of that line. -""" + Return the cumulative line number of the line that has just been read. + Before the first line has been read, returns 0. After the last line + of the last file has been read, returns the line number of that line. + """ + def filelineno() -> int: """ -Return the line number in the current file. Before the first line -has been read, returns 0. After the last line of the last file has -been read, returns the line number of that line within the file. -""" + Return the line number in the current file. Before the first line + has been read, returns 0. After the last line of the last file has + been read, returns the line number of that line within the file. + """ + def fileno() -> int: """ -Return the file number of the current file. When no file is currently -opened, returns -1. -""" + Return the file number of the current file. When no file is currently + opened, returns -1. + """ + def isfirstline() -> bool: """ -Returns true the line just read is the first line of its file, -otherwise returns false. -""" + Returns true the line just read is the first line of its file, + otherwise returns false. + """ + def isstdin() -> bool: """ -Returns true if the last line was read from sys.stdin, -otherwise returns false. -""" + Returns true if the last line was read from sys.stdin, + otherwise returns false. + """ class FileInput(Generic[AnyStr]): """FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None) -Class FileInput is the implementation of the module; its methods -filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(), -nextfile() and close() correspond to the functions of the same name -in the module. -In addition it has a readline() method which returns the next -input line, and a __getitem__() method which implements the -sequence behavior. The sequence must be accessed in strictly -sequential order; random access and readline() cannot be mixed. -""" + Class FileInput is the implementation of the module; its methods + filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(), + nextfile() and close() correspond to the functions of the same name + in the module. + In addition it has a readline() method which returns the next + input line, and a __getitem__() method which implements the + sequence behavior. The sequence must be accessed in strictly + sequential order; random access and readline() cannot be mixed. + """ + if sys.version_info >= (3, 10): # encoding and errors are added @overload @@ -326,8 +336,8 @@ sequential order; random access and readline() cannot be mixed. def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ if sys.version_info >= (3, 10): def hook_compressed( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi index 7d17b35450b5f..bea649b871061 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fnmatch.pyi @@ -9,6 +9,7 @@ expression. They cache the compiled regular expressions for speed. The function translate(PATTERN) returns a regular expression corresponding to PATTERN. (It does not compile it.) """ + import sys from collections.abc import Iterable from typing import AnyStr @@ -20,34 +21,35 @@ if sys.version_info >= (3, 14): def fnmatch(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN. -Patterns are Unix shell style: + Patterns are Unix shell style: -* matches everything -? matches any single character -[seq] matches any character in seq -[!seq] matches any char not in seq + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + An initial period in FILENAME is not special. + Both FILENAME and PATTERN are first case-normalized + if the operating system requires it. + If you don't want this, use fnmatchcase(FILENAME, PATTERN). + """ -An initial period in FILENAME is not special. -Both FILENAME and PATTERN are first case-normalized -if the operating system requires it. -If you don't want this, use fnmatchcase(FILENAME, PATTERN). -""" def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: """Test whether FILENAME matches PATTERN, including case. -This is a version of fnmatch() which doesn't case-normalize -its arguments. -""" + This is a version of fnmatch() which doesn't case-normalize + its arguments. + """ + def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: - """Construct a list from those elements of the iterable NAMES that match PAT. -""" + """Construct a list from those elements of the iterable NAMES that match PAT.""" + def translate(pat: str) -> str: """Translate a shell PATTERN to a regular expression. -There is no way to quote meta-characters. -""" + There is no way to quote meta-characters. + """ if sys.version_info >= (3, 14): def filterfalse(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: - """Construct a list from those elements of the iterable NAMES that do not match PAT. -""" + """Construct a list from those elements of the iterable NAMES that do not match PAT.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi index a7347732f67a5..56176d6dea426 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/formatter.pyi @@ -17,6 +17,7 @@ implementations all work with abstract devices. The interface makes available mechanisms for setting the properties which formatter objects manage and inserting data into the output. """ + from collections.abc import Iterable from typing import IO, Any from typing_extensions import TypeAlias @@ -35,6 +36,7 @@ class NullFormatter: interface but don't need to inherit any implementation. """ + writer: NullWriter | None def __init__(self, writer: NullWriter | None = None) -> None: ... def end_paragraph(self, blankline: int) -> None: ... @@ -63,6 +65,7 @@ class AbstractFormatter: implement a full-featured World Wide Web browser. """ + writer: NullWriter align: str | None align_stack: list[str | None] @@ -106,6 +109,7 @@ class NullWriter: which do not need to inherit any implementation methods. """ + def flush(self) -> None: ... def new_alignment(self, align: str | None) -> None: ... def new_font(self, font: _FontType) -> None: ... @@ -135,6 +139,7 @@ class DumbWriter(NullWriter): of paragraphs. """ + file: IO[str] maxcol: int def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi index ed2177505f362..07e88ca468ca9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/fractions.pyi @@ -1,5 +1,5 @@ -"""Fraction, infinite-precision, rational numbers. -""" +"""Fraction, infinite-precision, rational numbers.""" + import sys from collections.abc import Callable from decimal import Decimal @@ -18,56 +18,58 @@ class _ConvertibleToIntegerRatio(Protocol): class Fraction(Rational): """This class implements rational numbers. -In the two-argument form of the constructor, Fraction(8, 6) will -produce a rational number equivalent to 4/3. Both arguments must -be Rational. The numerator defaults to 0 and the denominator -defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. + In the two-argument form of the constructor, Fraction(8, 6) will + produce a rational number equivalent to 4/3. Both arguments must + be Rational. The numerator defaults to 0 and the denominator + defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. + + Fractions can also be constructed from: -Fractions can also be constructed from: + - numeric strings similar to those accepted by the + float constructor (for example, '-2.3' or '1e10') - - numeric strings similar to those accepted by the - float constructor (for example, '-2.3' or '1e10') + - strings of the form '123/456' - - strings of the form '123/456' + - float and Decimal instances - - float and Decimal instances + - other Rational instances (including integers) - - other Rational instances (including integers) + """ -""" __slots__ = ("_numerator", "_denominator") @overload def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: """Constructs a Rational. -Takes a string like '3/2' or '1.5', another Rational instance, a -numerator/denominator pair, or a float. - -Examples --------- - ->>> Fraction(10, -8) -Fraction(-5, 4) ->>> Fraction(Fraction(1, 7), 5) -Fraction(1, 35) ->>> Fraction(Fraction(1, 7), Fraction(2, 3)) -Fraction(3, 14) ->>> Fraction('314') -Fraction(314, 1) ->>> Fraction('-35/4') -Fraction(-35, 4) ->>> Fraction('3.1415') # conversion from numeric string -Fraction(6283, 2000) ->>> Fraction('-47e-2') # string may include a decimal exponent -Fraction(-47, 100) ->>> Fraction(1.47) # direct construction from float (exact conversion) -Fraction(6620291452234629, 4503599627370496) ->>> Fraction(2.25) -Fraction(9, 4) ->>> Fraction(Decimal('1.47')) -Fraction(147, 100) - -""" + Takes a string like '3/2' or '1.5', another Rational instance, a + numerator/denominator pair, or a float. + + Examples + -------- + + >>> Fraction(10, -8) + Fraction(-5, 4) + >>> Fraction(Fraction(1, 7), 5) + Fraction(1, 35) + >>> Fraction(Fraction(1, 7), Fraction(2, 3)) + Fraction(3, 14) + >>> Fraction('314') + Fraction(314, 1) + >>> Fraction('-35/4') + Fraction(-35, 4) + >>> Fraction('3.1415') # conversion from numeric string + Fraction(6283, 2000) + >>> Fraction('-47e-2') # string may include a decimal exponent + Fraction(-47, 100) + >>> Fraction(1.47) # direct construction from float (exact conversion) + Fraction(6620291452234629, 4503599627370496) + >>> Fraction(2.25) + Fraction(9, 4) + >>> Fraction(Decimal('1.47')) + Fraction(147, 100) + + """ + @overload def __new__(cls, numerator: float | Decimal | str) -> Self: ... @@ -76,66 +78,67 @@ Fraction(147, 100) def __new__(cls, numerator: _ConvertibleToIntegerRatio) -> Self: """Constructs a Rational. -Takes a string like '3/2' or '1.5', another Rational instance, a -numerator/denominator pair, or a float. - -Examples --------- - ->>> Fraction(10, -8) -Fraction(-5, 4) ->>> Fraction(Fraction(1, 7), 5) -Fraction(1, 35) ->>> Fraction(Fraction(1, 7), Fraction(2, 3)) -Fraction(3, 14) ->>> Fraction('314') -Fraction(314, 1) ->>> Fraction('-35/4') -Fraction(-35, 4) ->>> Fraction('3.1415') # conversion from numeric string -Fraction(6283, 2000) ->>> Fraction('-47e-2') # string may include a decimal exponent -Fraction(-47, 100) ->>> Fraction(1.47) # direct construction from float (exact conversion) -Fraction(6620291452234629, 4503599627370496) ->>> Fraction(2.25) -Fraction(9, 4) ->>> Fraction(Decimal('1.47')) -Fraction(147, 100) - -""" + Takes a string like '3/2' or '1.5', another Rational instance, a + numerator/denominator pair, or a float. + + Examples + -------- + + >>> Fraction(10, -8) + Fraction(-5, 4) + >>> Fraction(Fraction(1, 7), 5) + Fraction(1, 35) + >>> Fraction(Fraction(1, 7), Fraction(2, 3)) + Fraction(3, 14) + >>> Fraction('314') + Fraction(314, 1) + >>> Fraction('-35/4') + Fraction(-35, 4) + >>> Fraction('3.1415') # conversion from numeric string + Fraction(6283, 2000) + >>> Fraction('-47e-2') # string may include a decimal exponent + Fraction(-47, 100) + >>> Fraction(1.47) # direct construction from float (exact conversion) + Fraction(6620291452234629, 4503599627370496) + >>> Fraction(2.25) + Fraction(9, 4) + >>> Fraction(Decimal('1.47')) + Fraction(147, 100) + + """ @classmethod def from_float(cls, f: float) -> Self: """Converts a finite float to a rational number, exactly. -Beware that Fraction.from_float(0.3) != Fraction(3, 10). + Beware that Fraction.from_float(0.3) != Fraction(3, 10). + + """ -""" @classmethod def from_decimal(cls, dec: Decimal) -> Self: - """Converts a finite Decimal instance to a rational number, exactly. -""" + """Converts a finite Decimal instance to a rational number, exactly.""" + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: """Closest Fraction to self with denominator at most max_denominator. ->>> Fraction('3.141592653589793').limit_denominator(10) -Fraction(22, 7) ->>> Fraction('3.141592653589793').limit_denominator(100) -Fraction(311, 99) ->>> Fraction(4321, 8765).limit_denominator(10000) -Fraction(4321, 8765) + >>> Fraction('3.141592653589793').limit_denominator(10) + Fraction(22, 7) + >>> Fraction('3.141592653589793').limit_denominator(100) + Fraction(311, 99) + >>> Fraction(4321, 8765).limit_denominator(10000) + Fraction(4321, 8765) + + """ -""" def as_integer_ratio(self) -> tuple[int, int]: """Return a pair of integers, whose ratio is equal to the original Fraction. -The ratio is in lowest terms and has a positive denominator. -""" + The ratio is in lowest terms and has a positive denominator. + """ if sys.version_info >= (3, 12): def is_integer(self) -> bool: - """Return True if the Fraction is an integer. -""" + """Return True if the Fraction is an integer.""" @property def numerator(a) -> int: ... @@ -143,102 +146,102 @@ The ratio is in lowest terms and has a positive denominator. def denominator(a) -> int: ... @overload def __add__(a, b: int | Fraction) -> Fraction: - """a + b -""" + """a + b""" + @overload def __add__(a, b: float) -> float: ... @overload def __add__(a, b: complex) -> complex: ... @overload def __radd__(b, a: int | Fraction) -> Fraction: - """a + b -""" + """a + b""" + @overload def __radd__(b, a: float) -> float: ... @overload def __radd__(b, a: complex) -> complex: ... @overload def __sub__(a, b: int | Fraction) -> Fraction: - """a - b -""" + """a - b""" + @overload def __sub__(a, b: float) -> float: ... @overload def __sub__(a, b: complex) -> complex: ... @overload def __rsub__(b, a: int | Fraction) -> Fraction: - """a - b -""" + """a - b""" + @overload def __rsub__(b, a: float) -> float: ... @overload def __rsub__(b, a: complex) -> complex: ... @overload def __mul__(a, b: int | Fraction) -> Fraction: - """a * b -""" + """a * b""" + @overload def __mul__(a, b: float) -> float: ... @overload def __mul__(a, b: complex) -> complex: ... @overload def __rmul__(b, a: int | Fraction) -> Fraction: - """a * b -""" + """a * b""" + @overload def __rmul__(b, a: float) -> float: ... @overload def __rmul__(b, a: complex) -> complex: ... @overload def __truediv__(a, b: int | Fraction) -> Fraction: - """a / b -""" + """a / b""" + @overload def __truediv__(a, b: float) -> float: ... @overload def __truediv__(a, b: complex) -> complex: ... @overload def __rtruediv__(b, a: int | Fraction) -> Fraction: - """a / b -""" + """a / b""" + @overload def __rtruediv__(b, a: float) -> float: ... @overload def __rtruediv__(b, a: complex) -> complex: ... @overload def __floordiv__(a, b: int | Fraction) -> int: - """a // b -""" + """a // b""" + @overload def __floordiv__(a, b: float) -> float: ... @overload def __rfloordiv__(b, a: int | Fraction) -> int: - """a // b -""" + """a // b""" + @overload def __rfloordiv__(b, a: float) -> float: ... @overload def __mod__(a, b: int | Fraction) -> Fraction: - """a % b -""" + """a % b""" + @overload def __mod__(a, b: float) -> float: ... @overload def __rmod__(b, a: int | Fraction) -> Fraction: - """a % b -""" + """a % b""" + @overload def __rmod__(b, a: float) -> float: ... @overload def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: - """(a // b, a % b) -""" + """(a // b, a % b)""" + @overload def __divmod__(a, b: float) -> tuple[float, Fraction]: ... @overload def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: - """(a // b, a % b) -""" + """(a // b, a % b)""" + @overload def __rdivmod__(a, b: float) -> tuple[float, Fraction]: ... if sys.version_info >= (3, 14): @@ -246,11 +249,12 @@ The ratio is in lowest terms and has a positive denominator. def __pow__(a, b: int, modulo: None = None) -> Fraction: """a ** b -If b is not an integer, the result will be a float or complex -since roots are generally irrational. If b is an integer, the -result will be rational. + If b is not an integer, the result will be a float or complex + since roots are generally irrational. If b is an integer, the + result will be rational. + + """ -""" @overload def __pow__(a, b: float | Fraction, modulo: None = None) -> float: ... @overload @@ -260,11 +264,12 @@ result will be rational. def __pow__(a, b: int) -> Fraction: """a ** b -If b is not an integer, the result will be a float or complex -since roots are generally irrational. If b is an integer, the -result will be rational. + If b is not an integer, the result will be a float or complex + since roots are generally irrational. If b is an integer, the + result will be rational. + + """ -""" @overload def __pow__(a, b: float | Fraction) -> float: ... @overload @@ -272,89 +277,88 @@ result will be rational. if sys.version_info >= (3, 14): @overload def __rpow__(b, a: float | Fraction, modulo: None = None) -> float: - """a ** b -""" + """a ** b""" + @overload def __rpow__(b, a: complex, modulo: None = None) -> complex: ... else: @overload def __rpow__(b, a: float | Fraction) -> float: - """a ** b -""" + """a ** b""" + @overload def __rpow__(b, a: complex) -> complex: ... def __pos__(a) -> Fraction: - """+a: Coerces a subclass instance to Fraction -""" + """+a: Coerces a subclass instance to Fraction""" + def __neg__(a) -> Fraction: - """-a -""" + """-a""" + def __abs__(a) -> Fraction: - """abs(a) -""" + """abs(a)""" + def __trunc__(a) -> int: - """math.trunc(a) -""" + """math.trunc(a)""" + def __floor__(a) -> int: - """math.floor(a) -""" + """math.floor(a)""" + def __ceil__(a) -> int: - """math.ceil(a) -""" + """math.ceil(a)""" + @overload def __round__(self, ndigits: None = None) -> int: """round(self, ndigits) -Rounds half toward even. -""" + Rounds half toward even. + """ + @overload def __round__(self, ndigits: int) -> Fraction: ... def __hash__(self) -> int: # type: ignore[override] - """hash(self) -""" + """hash(self)""" + def __eq__(a, b: object) -> bool: - """a == b -""" + """a == b""" + def __lt__(a, b: _ComparableNum) -> bool: - """a < b -""" + """a < b""" + def __gt__(a, b: _ComparableNum) -> bool: - """a > b -""" + """a > b""" + def __le__(a, b: _ComparableNum) -> bool: - """a <= b -""" + """a <= b""" + def __ge__(a, b: _ComparableNum) -> bool: - """a >= b -""" + """a >= b""" + def __bool__(a) -> bool: - """a != 0 -""" + """a != 0""" + def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: - """int(a) -""" + """int(a)""" # Not actually defined within fractions.py, but provides more useful # overrides @property def real(self) -> Fraction: - """Real numbers are their real component. -""" + """Real numbers are their real component.""" + @property def imag(self) -> Literal[0]: - """Real numbers have no imaginary component. -""" + """Real numbers have no imaginary component.""" + def conjugate(self) -> Fraction: - """Conjugate is a no-op for Reals. -""" + """Conjugate is a no-op for Reals.""" if sys.version_info >= (3, 14): @classmethod def from_number(cls, number: float | Rational | _ConvertibleToIntegerRatio) -> Self: """Converts a finite real number to a rational number, exactly. -Beware that Fraction.from_number(0.3) != Fraction(3, 10). + Beware that Fraction.from_number(0.3) != Fraction(3, 10). -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi index 94cde6eac5d15..229f9cce34675 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ftplib.pyi @@ -27,6 +27,7 @@ drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr A nice test that reveals some of the network dialogue would be: python ftplib.py -d localhost -l -p -l """ + import sys from _typeshed import SupportsRead, SupportsReadline from collections.abc import Callable, Iterable, Iterator @@ -55,26 +56,27 @@ all_errors: tuple[type[Exception], ...] class FTP: """An FTP client class. -To create a connection, call the class using these arguments: - host, user, passwd, acct, timeout, source_address, encoding - -The first four arguments are all strings, and have default value ''. -The parameter ´timeout´ must be numeric and defaults to None if not -passed, meaning that no timeout will be set on any ftp socket(s). -If a timeout is passed, then this is now the default timeout for all ftp -socket operations for this instance. -The last parameter is the encoding of filenames, which defaults to utf-8. - -Then use self.connect() with optional host and port argument. - -To download a file, use ftp.retrlines('RETR ' + filename), -or ftp.retrbinary() with slightly different arguments. -To upload a file, use ftp.storlines() or ftp.storbinary(), -which have an open file as argument (see their definitions -below for details). -The download/upload functions first issue appropriate TYPE -and PORT or PASV commands. -""" + To create a connection, call the class using these arguments: + host, user, passwd, acct, timeout, source_address, encoding + + The first four arguments are all strings, and have default value ''. + The parameter ´timeout´ must be numeric and defaults to None if not + passed, meaning that no timeout will be set on any ftp socket(s). + If a timeout is passed, then this is now the default timeout for all ftp + socket operations for this instance. + The last parameter is the encoding of filenames, which defaults to utf-8. + + Then use self.connect() with optional host and port argument. + + To download a file, use ftp.retrlines('RETR ' + filename), + or ftp.retrbinary() with slightly different arguments. + To upload a file, use ftp.storlines() or ftp.storbinary(), + which have an open file as argument (see their definitions + below for details). + The download/upload functions first issue appropriate TYPE + and PORT or PASV commands. + """ + debugging: int host: str port: int @@ -104,43 +106,47 @@ and PORT or PASV commands. encoding: str = "utf-8", ) -> None: """Initialization method (called by class instantiation). -Initialize host to localhost, port to standard ftp port. -Optional arguments are host (for connect()), -and user, passwd, acct (for login()). -""" - def connect( - self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None - ) -> str: + Initialize host to localhost, port to standard ftp port. + Optional arguments are host (for connect()), + and user, passwd, acct (for login()). + """ + + def connect(self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None) -> str: """Connect to host. Arguments are: -- host: hostname to connect to (string, default previous host) -- port: port to connect to (integer, default previous port) -- timeout: the timeout to set against the ftp socket(s) -- source_address: a 2-tuple (host, port) for the socket to bind - to as its source address before connecting. -""" + - host: hostname to connect to (string, default previous host) + - port: port to connect to (integer, default previous port) + - timeout: the timeout to set against the ftp socket(s) + - source_address: a 2-tuple (host, port) for the socket to bind + to as its source address before connecting. + """ + def getwelcome(self) -> str: """Get the welcome message from the server. -(this is read and squirreled away by connect()) -""" + (this is read and squirreled away by connect()) + """ + def set_debuglevel(self, level: int) -> None: """Set the debugging level. -The required argument level means: -0: no debugging output (default) -1: print commands and responses but not body text etc. -2: also print raw lines read and sent before stripping CR/LF -""" + The required argument level means: + 0: no debugging output (default) + 1: print commands and responses but not body text etc. + 2: also print raw lines read and sent before stripping CR/LF + """ + def debug(self, level: int) -> None: """Set the debugging level. -The required argument level means: -0: no debugging output (default) -1: print commands and responses but not body text etc. -2: also print raw lines read and sent before stripping CR/LF -""" + The required argument level means: + 0: no debugging output (default) + 1: print commands and responses but not body text etc. + 2: also print raw lines read and sent before stripping CR/LF + """ + def set_pasv(self, val: bool | Literal[0, 1]) -> None: """Use passive or active mode for data transfers. -With a false argument, use the normal PORT mode, -With a true argument, use the PASV command. -""" + With a false argument, use the normal PORT mode, + With a true argument, use the PASV command. + """ + def sanitize(self, s: str) -> str: ... def putline(self, line: str) -> None: ... def putcmd(self, line: str) -> None: ... @@ -148,71 +154,74 @@ With a true argument, use the PASV command. def getmultiline(self) -> str: ... def getresp(self) -> str: ... def voidresp(self) -> str: - """Expect a response beginning with '2'. -""" + """Expect a response beginning with '2'.""" + def abort(self) -> str: """Abort a file transfer. Uses out-of-band data. -This does not follow the procedure from the RFC to send Telnet -IP and Synch; that doesn't seem to work with the servers I've -tried. Instead, just send the ABOR command as OOB data. -""" + This does not follow the procedure from the RFC to send Telnet + IP and Synch; that doesn't seem to work with the servers I've + tried. Instead, just send the ABOR command as OOB data. + """ + def sendcmd(self, cmd: str) -> str: - """Send a command and return the response. -""" + """Send a command and return the response.""" + def voidcmd(self, cmd: str) -> str: - """Send a command and expect a response beginning with '2'. -""" + """Send a command and expect a response beginning with '2'.""" + def sendport(self, host: str, port: int) -> str: """Send a PORT command with the current host and the given -port number. -""" + port number. + """ + def sendeprt(self, host: str, port: int) -> str: - """Send an EPRT command with the current host and the given port number. -""" + """Send an EPRT command with the current host and the given port number.""" + def makeport(self) -> socket: - """Create a new socket and send a PORT command for it. -""" + """Create a new socket and send a PORT command for it.""" + def makepasv(self) -> tuple[str, int]: - """Internal: Does the PASV or EPSV handshake -> (address, port) -""" + """Internal: Does the PASV or EPSV handshake -> (address, port)""" + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: - """Login, default anonymous. -""" + """Login, default anonymous.""" # In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: """Initiate a transfer over the data connection. -If the transfer is active, send a port command and the -transfer command, and accept the connection. If the server is -passive, send a pasv command, connect to it, and start the -transfer command. Either way, return the socket for the -connection and the expected size of the transfer. The -expected size may be None if it could not be determined. - -Optional 'rest' argument can be a string that is sent as the -argument to a REST command. This is essentially a server -marker used to tell the server to skip over any data up to the -given marker. -""" + If the transfer is active, send a port command and the + transfer command, and accept the connection. If the server is + passive, send a pasv command, connect to it, and start the + transfer command. Either way, return the socket for the + connection and the expected size of the transfer. The + expected size may be None if it could not be determined. + + Optional 'rest' argument can be a string that is sent as the + argument to a REST command. This is essentially a server + marker used to tell the server to skip over any data up to the + given marker. + """ + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: - """Like ntransfercmd() but returns only the socket. -""" + """Like ntransfercmd() but returns only the socket.""" + def retrbinary( self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None ) -> str: """Retrieve data in binary mode. A new port is created for you. -Args: - cmd: A RETR command. - callback: A single parameter callable to be called on each - block of data read. - blocksize: The maximum number of bytes to read from the - socket at one time. [default: 8192] - rest: Passed to transfercmd(). [default: None] + Args: + cmd: A RETR command. + callback: A single parameter callable to be called on each + block of data read. + blocksize: The maximum number of bytes to read from the + socket at one time. [default: 8192] + rest: Passed to transfercmd(). [default: None] + + Returns: + The response code. + """ -Returns: - The response code. -""" def storbinary( self, cmd: str, @@ -223,129 +232,133 @@ Returns: ) -> str: """Store a file in binary mode. A new port is created for you. -Args: - cmd: A STOR command. - fp: A file-like object with a read(num_bytes) method. - blocksize: The maximum data size to read from fp and send over - the connection at once. [default: 8192] - callback: An optional single parameter callable that is called on - each block of data after it is sent. [default: None] - rest: Passed to transfercmd(). [default: None] - -Returns: - The response code. -""" + Args: + cmd: A STOR command. + fp: A file-like object with a read(num_bytes) method. + blocksize: The maximum data size to read from fp and send over + the connection at once. [default: 8192] + callback: An optional single parameter callable that is called on + each block of data after it is sent. [default: None] + rest: Passed to transfercmd(). [default: None] + + Returns: + The response code. + """ + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: """Retrieve data in line mode. A new port is created for you. -Args: - cmd: A RETR, LIST, or NLST command. - callback: An optional single parameter callable that is called - for each line with the trailing CRLF stripped. - [default: print_line()] + Args: + cmd: A RETR, LIST, or NLST command. + callback: An optional single parameter callable that is called + for each line with the trailing CRLF stripped. + [default: print_line()] + + Returns: + The response code. + """ -Returns: - The response code. -""" def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: """Store a file in line mode. A new port is created for you. -Args: - cmd: A STOR command. - fp: A file-like object with a readline() method. - callback: An optional single parameter callable that is called on - each line after it is sent. [default: None] + Args: + cmd: A STOR command. + fp: A file-like object with a readline() method. + callback: An optional single parameter callable that is called on + each line after it is sent. [default: None] + + Returns: + The response code. + """ -Returns: - The response code. -""" def acct(self, password: str) -> str: - """Send new account name. -""" + """Send new account name.""" + def nlst(self, *args: str) -> list[str]: - """Return a list of files in a given directory (default the current). -""" + """Return a list of files in a given directory (default the current).""" # Technically only the last arg can be a Callable but ... def dir(self, *args: str | Callable[[str], object]) -> None: """List a directory in long form. -By default list current directory to stdout. -Optional last argument is callback function; all -non-empty arguments before it are concatenated to the -LIST command. (This *should* only be used for a pathname.) -""" + By default list current directory to stdout. + Optional last argument is callback function; all + non-empty arguments before it are concatenated to the + LIST command. (This *should* only be used for a pathname.) + """ + def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: """List a directory in a standardized format by using MLSD -command (RFC-3659). If path is omitted the current directory -is assumed. "facts" is a list of strings representing the type -of information desired (e.g. ["type", "size", "perm"]). - -Return a generator object yielding a tuple of two elements -for every file found in path. -First element is the file name, the second one is a dictionary -including a variable number of "facts" depending on the server -and whether "facts" argument has been provided. -""" + command (RFC-3659). If path is omitted the current directory + is assumed. "facts" is a list of strings representing the type + of information desired (e.g. ["type", "size", "perm"]). + + Return a generator object yielding a tuple of two elements + for every file found in path. + First element is the file name, the second one is a dictionary + including a variable number of "facts" depending on the server + and whether "facts" argument has been provided. + """ + def rename(self, fromname: str, toname: str) -> str: - """Rename a file. -""" + """Rename a file.""" + def delete(self, filename: str) -> str: - """Delete a file. -""" + """Delete a file.""" + def cwd(self, dirname: str) -> str: - """Change to a directory. -""" + """Change to a directory.""" + def size(self, filename: str) -> int | None: - """Retrieve the size of a file. -""" + """Retrieve the size of a file.""" + def mkd(self, dirname: str) -> str: - """Make a directory, return its full pathname. -""" + """Make a directory, return its full pathname.""" + def rmd(self, dirname: str) -> str: - """Remove a directory. -""" + """Remove a directory.""" + def pwd(self) -> str: - """Return current working directory. -""" + """Return current working directory.""" + def quit(self) -> str: - """Quit, and close the connection. -""" + """Quit, and close the connection.""" + def close(self) -> None: - """Close the connection without assuming anything about it. -""" + """Close the connection without assuming anything about it.""" class FTP_TLS(FTP): """A FTP subclass which adds TLS support to FTP as described -in RFC-4217. + in RFC-4217. -Connect as usual to port 21 implicitly securing the FTP control -connection before authenticating. + Connect as usual to port 21 implicitly securing the FTP control + connection before authenticating. -Securing the data connection requires user to explicitly ask -for it by calling prot_p() method. + Securing the data connection requires user to explicitly ask + for it by calling prot_p() method. + + Usage example: + >>> from ftplib import FTP_TLS + >>> ftps = FTP_TLS('ftp.python.org') + >>> ftps.login() # login anonymously previously securing control channel + '230 Guest login ok, access restrictions apply.' + >>> ftps.prot_p() # switch to secure data connection + '200 Protection level set to P' + >>> ftps.retrlines('LIST') # list directory content securely + total 9 + drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . + drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. + drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin + drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc + d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming + drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib + drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub + drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr + -rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg + '226 Transfer complete.' + >>> ftps.quit() + '221 Goodbye.' + >>> + """ -Usage example: ->>> from ftplib import FTP_TLS ->>> ftps = FTP_TLS('ftp.python.org') ->>> ftps.login() # login anonymously previously securing control channel -'230 Guest login ok, access restrictions apply.' ->>> ftps.prot_p() # switch to secure data connection -'200 Protection level set to P' ->>> ftps.retrlines('LIST') # list directory content securely -total 9 -drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . -drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. -drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin -drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc -d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming -drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib -drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub -drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr --rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg -'226 Transfer complete.' ->>> ftps.quit() -'221 Goodbye.' ->>> -""" if sys.version_info >= (3, 12): def __init__( self, @@ -380,40 +393,40 @@ drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr context: SSLContext def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... def auth(self) -> str: - """Set up secure control connection by using TLS/SSL. -""" + """Set up secure control connection by using TLS/SSL.""" + def prot_p(self) -> str: - """Set up secure data connection. -""" + """Set up secure data connection.""" + def prot_c(self) -> str: - """Set up clear text data connection. -""" + """Set up clear text data connection.""" + def ccc(self) -> str: - """Switch back to a clear-text control connection. -""" + """Switch back to a clear-text control connection.""" def parse150(resp: str) -> int | None: # undocumented """Parse the '150' response for a RETR request. -Returns the expected transfer size or None; size is not guaranteed to -be present in the 150 message. -""" + Returns the expected transfer size or None; size is not guaranteed to + be present in the 150 message. + """ + def parse227(resp: str) -> tuple[str, int]: # undocumented """Parse the '227' response for a PASV request. -Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)' -Return ('host.addr.as.numbers', port#) tuple. -""" + Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)' + Return ('host.addr.as.numbers', port#) tuple. + """ + def parse229(resp: str, peer: Any) -> tuple[str, int]: # undocumented """Parse the '229' response for an EPSV request. -Raises error_proto if it does not contain '(|||port|)' -Return ('host.addr.as.numbers', port#) tuple. -""" + Raises error_proto if it does not contain '(|||port|)' + Return ('host.addr.as.numbers', port#) tuple. + """ + def parse257(resp: str) -> str: # undocumented """Parse the '257' response for a MKD or PWD request. -This is a response to a MKD or PWD request: a directory name. -Returns the directoryname in the 257 reply. -""" -def ftpcp( - source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" -) -> None: # undocumented - """Copy file from one FTP-instance to another. -""" + This is a response to a MKD or PWD request: a directory name. + Returns the directoryname in the 257 reply. + """ + +def ftpcp(source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I") -> None: # undocumented + """Copy file from one FTP-instance to another.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi index 6a36572440556..cd65e31ae8d8f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/functools.pyi @@ -1,5 +1,5 @@ -"""functools.py - Tools for working with functions and callable objects -""" +"""functools.py - Tools for working with functions and callable objects""" + import sys import types from _typeshed import SupportsAllComparisons, SupportsItems @@ -38,44 +38,44 @@ if sys.version_info >= (3, 14): def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], /, initial: _T) -> _T: """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. -This effectively reduces the iterable to a single value. If initial is present, -it is placed before the items of the iterable in the calculation, and serves as -a default when the iterable is empty. + This effectively reduces the iterable to a single value. If initial is present, + it is placed before the items of the iterable in the calculation, and serves as + a default when the iterable is empty. -For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) -calculates ((((1 + 2) + 3) + 4) + 5). -""" + For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) + calculates ((((1 + 2) + 3) + 4) + 5). + """ else: @overload def reduce(function: Callable[[_T, _S], _T], iterable: Iterable[_S], initial: _T, /) -> _T: """reduce(function, iterable[, initial], /) -> value -Apply a function of two arguments cumulatively to the items of an iterable, from left to right. + Apply a function of two arguments cumulatively to the items of an iterable, from left to right. -This effectively reduces the iterable to a single value. If initial is present, -it is placed before the items of the iterable in the calculation, and serves as -a default when the iterable is empty. + This effectively reduces the iterable to a single value. If initial is present, + it is placed before the items of the iterable in the calculation, and serves as + a default when the iterable is empty. -For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) -calculates ((((1 + 2) + 3) + 4) + 5). -""" + For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) + calculates ((((1 + 2) + 3) + 4) + 5). + """ @overload def reduce(function: Callable[[_T, _T], _T], iterable: Iterable[_T], /) -> _T: """Apply a function of two arguments cumulatively to the items of an iterable, from left to right. -This effectively reduces the iterable to a single value. If initial is present, -it is placed before the items of the iterable in the calculation, and serves as -a default when the iterable is empty. + This effectively reduces the iterable to a single value. If initial is present, + it is placed before the items of the iterable in the calculation, and serves as + a default when the iterable is empty. -For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) -calculates ((((1 + 2) + 3) + 4) + 5). -""" + For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) + calculates ((((1 + 2) + 3) + 4) + 5). + """ class _CacheInfo(NamedTuple): - """CacheInfo(hits, misses, maxsize, currsize) -""" + """CacheInfo(hits, misses, maxsize, currsize)""" + hits: int misses: int maxsize: int | None @@ -90,28 +90,29 @@ class _CacheParameters(TypedDict): class _lru_cache_wrapper(Generic[_T]): """Create a cached callable that wraps another function. -user_function: the function being cached + user_function: the function being cached + + maxsize: 0 for no caching + None for unlimited cache size + n for a bounded cache -maxsize: 0 for no caching - None for unlimited cache size - n for a bounded cache + typed: False cache f(3) and f(3.0) as identical calls + True cache f(3) and f(3.0) as distinct calls -typed: False cache f(3) and f(3.0) as identical calls - True cache f(3) and f(3.0) as distinct calls + cache_info_type: namedtuple class with the fields: + hits misses currsize maxsize + """ -cache_info_type: namedtuple class with the fields: - hits misses currsize maxsize -""" __wrapped__: Callable[..., _T] def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: - """Call self as a function. -""" + """Call self as a function.""" + def cache_info(self) -> _CacheInfo: - """Report cache statistics -""" + """Report cache statistics""" + def cache_clear(self) -> None: - """Clear the cache and cache statistics -""" + """Clear the cache and cache statistics""" + def cache_parameters(self) -> _CacheParameters: ... def __copy__(self) -> _lru_cache_wrapper[_T]: ... def __deepcopy__(self, memo: Any, /) -> _lru_cache_wrapper[_T]: ... @@ -120,23 +121,24 @@ cache_info_type: namedtuple class with the fields: def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: """Least-recently-used cache decorator. -If *maxsize* is set to None, the LRU features are disabled and the cache -can grow without bound. + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. -If *typed* is True, arguments of different types will be cached separately. -For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as -distinct calls with distinct results. Some types such as str and int may -be cached separately even when typed is false. + If *typed* is True, arguments of different types will be cached separately. + For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as + distinct calls with distinct results. Some types such as str and int may + be cached separately even when typed is false. -Arguments to the cached function must be hashable. + Arguments to the cached function must be hashable. -View the cache statistics named tuple (hits, misses, maxsize, currsize) -with f.cache_info(). Clear the cache and statistics with f.cache_clear(). -Access the underlying function with f.__wrapped__. + View the cache statistics named tuple (hits, misses, maxsize, currsize) + with f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. -See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU) + See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU) + + """ -""" @overload def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... @@ -190,15 +192,16 @@ if sys.version_info >= (3, 14): ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: """Update a wrapper function to look like the wrapped function -wrapper is the function to be updated -wrapped is the original function -assigned is a tuple naming the attributes assigned directly -from the wrapped function to the wrapper function (defaults to -functools.WRAPPER_ASSIGNMENTS) -updated is a tuple naming the attributes of the wrapper that -are updated with the corresponding attribute from the wrapped -function (defaults to functools.WRAPPER_UPDATES) -""" + wrapper is the function to be updated + wrapped is the original function + assigned is a tuple naming the attributes assigned directly + from the wrapped function to the wrapper function (defaults to + functools.WRAPPER_ASSIGNMENTS) + updated is a tuple naming the attributes of the wrapper that + are updated with the corresponding attribute from the wrapped + function (defaults to functools.WRAPPER_UPDATES) + """ + def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotate__", "__type_params__"), @@ -206,12 +209,12 @@ function (defaults to functools.WRAPPER_UPDATES) ) -> _Wrapper[_PWrapped, _RWrapped]: """Decorator factory to apply update_wrapper() to a wrapper function -Returns a decorator that invokes update_wrapper() with the decorated -function as the wrapper argument and the arguments to wraps() as the -remaining arguments. Default arguments are as for update_wrapper(). -This is a convenience function to simplify applying partial() to -update_wrapper(). -""" + Returns a decorator that invokes update_wrapper() with the decorated + function as the wrapper argument and the arguments to wraps() as the + remaining arguments. Default arguments are as for update_wrapper(). + This is a convenience function to simplify applying partial() to + update_wrapper(). + """ elif sys.version_info >= (3, 12): def update_wrapper( @@ -222,15 +225,16 @@ elif sys.version_info >= (3, 12): ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: """Update a wrapper function to look like the wrapped function -wrapper is the function to be updated -wrapped is the original function -assigned is a tuple naming the attributes assigned directly -from the wrapped function to the wrapper function (defaults to -functools.WRAPPER_ASSIGNMENTS) -updated is a tuple naming the attributes of the wrapper that -are updated with the corresponding attribute from the wrapped -function (defaults to functools.WRAPPER_UPDATES) -""" + wrapper is the function to be updated + wrapped is the original function + assigned is a tuple naming the attributes assigned directly + from the wrapped function to the wrapper function (defaults to + functools.WRAPPER_ASSIGNMENTS) + updated is a tuple naming the attributes of the wrapper that + are updated with the corresponding attribute from the wrapped + function (defaults to functools.WRAPPER_UPDATES) + """ + def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), @@ -238,12 +242,12 @@ function (defaults to functools.WRAPPER_UPDATES) ) -> _Wrapper[_PWrapped, _RWrapped]: """Decorator factory to apply update_wrapper() to a wrapper function -Returns a decorator that invokes update_wrapper() with the decorated -function as the wrapper argument and the arguments to wraps() as the -remaining arguments. Default arguments are as for update_wrapper(). -This is a convenience function to simplify applying partial() to -update_wrapper(). -""" + Returns a decorator that invokes update_wrapper() with the decorated + function as the wrapper argument and the arguments to wraps() as the + remaining arguments. Default arguments are as for update_wrapper(). + This is a convenience function to simplify applying partial() to + update_wrapper(). + """ else: def update_wrapper( @@ -254,15 +258,16 @@ else: ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWrapper]: """Update a wrapper function to look like the wrapped function - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) - """ + wrapper is the function to be updated + wrapped is the original function + assigned is a tuple naming the attributes assigned directly + from the wrapped function to the wrapper function (defaults to + functools.WRAPPER_ASSIGNMENTS) + updated is a tuple naming the attributes of the wrapper that + are updated with the corresponding attribute from the wrapped + function (defaults to functools.WRAPPER_UPDATES) + """ + def wraps( wrapped: Callable[_PWrapped, _RWrapped], assigned: Iterable[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), @@ -270,57 +275,59 @@ else: ) -> _Wrapper[_PWrapped, _RWrapped]: """Decorator factory to apply update_wrapper() to a wrapper function - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). - """ + Returns a decorator that invokes update_wrapper() with the decorated + function as the wrapper argument and the arguments to wraps() as the + remaining arguments. Default arguments are as for update_wrapper(). + This is a convenience function to simplify applying partial() to + update_wrapper(). + """ def total_ordering(cls: type[_T]) -> type[_T]: - """Class decorator that fills in missing ordering methods -""" + """Class decorator that fills in missing ordering methods""" + def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: """Convert a cmp= function into a key= function. - mycmp - Function that compares two objects. -""" + mycmp + Function that compares two objects. + """ + @disjoint_base class partial(Generic[_T]): """Create a new function with partial application of the given arguments -and keywords. -""" + and keywords. + """ + @property def func(self) -> Callable[..., _T]: - """function object to use in future partial calls -""" + """function object to use in future partial calls""" + @property def args(self) -> tuple[Any, ...]: - """tuple of arguments to future partial calls -""" + """tuple of arguments to future partial calls""" + @property def keywords(self) -> dict[str, Any]: - """dictionary of keyword arguments to future partial calls -""" + """dictionary of keyword arguments to future partial calls""" + def __new__(cls, func: Callable[..., _T], /, *args: Any, **kwargs: Any) -> Self: ... def __call__(self, /, *args: Any, **kwargs: Any) -> _T: - """Call self as a function. -""" + """Call self as a function.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" # With protocols, this could change into a generic protocol that defines __get__ and returns _T _Descriptor: TypeAlias = Any class partialmethod(Generic[_T]): """Method descriptor with partial application of the given arguments -and keywords. + and keywords. + + Supports wrapping existing descriptors and handles non-descriptor + callables as instance methods. + """ -Supports wrapping existing descriptors and handles non-descriptor -callables as instance methods. -""" func: Callable[..., _T] | _Descriptor args: tuple[Any, ...] keywords: dict[str, Any] @@ -341,8 +348,8 @@ callables as instance methods. def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ if sys.version_info >= (3, 11): _RegType: TypeAlias = type[Any] | types.UnionType @@ -370,19 +377,20 @@ class _SingleDispatchCallable(Generic[_T]): def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: """Single-dispatch generic function decorator. -Transforms a function into a generic function, which can have different -behaviours depending upon the type of its first argument. The decorated -function acts as the default implementation, and additional -implementations can be registered using the register() attribute of the -generic function. -""" + Transforms a function into a generic function, which can have different + behaviours depending upon the type of its first argument. The decorated + function acts as the default implementation, and additional + implementations can be registered using the register() attribute of the + generic function. + """ class singledispatchmethod(Generic[_T]): """Single-dispatch generic method descriptor. -Supports wrapping existing descriptors and handles non-descriptor -callables as instance methods. -""" + Supports wrapping existing descriptors and handles non-descriptor + callables as instance methods. + """ + dispatcher: _SingleDispatchCallable[_T] func: Callable[..., _T] def __init__(self, func: Callable[..., _T]) -> None: ... @@ -392,8 +400,9 @@ callables as instance methods. def register(self, cls: _RegType, method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: """generic_method.register(cls, func) -> func -Registers a new implementation for the given *cls* on a *generic_method*. -""" + Registers a new implementation for the given *cls* on a *generic_method*. + """ + @overload def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload @@ -414,12 +423,12 @@ class cached_property(Generic[_T_co]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def cache(user_function: Callable[..., _T], /) -> _lru_cache_wrapper[_T]: - """Simple lightweight unbounded cache. Sometimes called "memoize". -""" + """Simple lightweight unbounded cache. Sometimes called "memoize".""" + def _make_key( args: tuple[Hashable, ...], kwds: SupportsItems[Any, Any], @@ -432,22 +441,22 @@ def _make_key( ) -> Hashable: """Make a cache key from optionally typed positional and keyword arguments -The key is constructed in a way that is flat as possible rather than -as a nested structure that would take more memory. + The key is constructed in a way that is flat as possible rather than + as a nested structure that would take more memory. -If there is only a single argument and its data type is known to cache -its hash value, then that argument is returned without a wrapper. This -saves space and improves lookup speed. + If there is only a single argument and its data type is known to cache + its hash value, then that argument is returned without a wrapper. This + saves space and improves lookup speed. -""" + """ if sys.version_info >= (3, 14): @final class _PlaceholderType: """The type of the Placeholder singleton. -Used as a placeholder for partial arguments. -""" + Used as a placeholder for partial arguments. + """ Placeholder: Final[_PlaceholderType] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi index 92c63d5890b45..6b231fa7bc287 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gc.pyi @@ -19,6 +19,7 @@ freeze() -- Freeze all tracked objects and ignore them for future collections. unfreeze() -- Unfreeze all objects in the permanent generation. get_freeze_count() -- Return the number of objects in the permanent generation. """ + from collections.abc import Callable from typing import Any, Final, Literal from typing_extensions import TypeAlias @@ -37,88 +38,94 @@ garbage: list[Any] def collect(generation: int = 2) -> int: """Run the garbage collector. -With no arguments, run a full collection. The optional argument -may be an integer specifying which generation to collect. A ValueError -is raised if the generation number is invalid. + With no arguments, run a full collection. The optional argument + may be an integer specifying which generation to collect. A ValueError + is raised if the generation number is invalid. + + The number of unreachable objects is returned. + """ -The number of unreachable objects is returned. -""" def disable() -> None: - """Disable automatic garbage collection. -""" + """Disable automatic garbage collection.""" + def enable() -> None: - """Enable automatic garbage collection. -""" + """Enable automatic garbage collection.""" + def get_count() -> tuple[int, int, int]: - """Return a three-tuple of the current collection counts. -""" + """Return a three-tuple of the current collection counts.""" + def get_debug() -> int: - """Get the garbage collection debugging flags. -""" + """Get the garbage collection debugging flags.""" + def get_objects(generation: int | None = None) -> list[Any]: """Return a list of objects tracked by the collector (excluding the list returned). - generation - Generation to extract the objects from. + generation + Generation to extract the objects from. + + If generation is not None, return only the objects tracked by the collector + that are in that generation. + """ -If generation is not None, return only the objects tracked by the collector -that are in that generation. -""" def freeze() -> None: """Freeze all current tracked objects and ignore them for future collections. -This can be used before a POSIX fork() call to make the gc copy-on-write friendly. -Note: collection before a POSIX fork() call may free pages for future allocation -which can cause copy-on-write. -""" + This can be used before a POSIX fork() call to make the gc copy-on-write friendly. + Note: collection before a POSIX fork() call may free pages for future allocation + which can cause copy-on-write. + """ + def unfreeze() -> None: """Unfreeze all objects in the permanent generation. -Put all objects in the permanent generation back into oldest generation. -""" + Put all objects in the permanent generation back into oldest generation. + """ + def get_freeze_count() -> int: - """Return the number of objects in the permanent generation. -""" + """Return the number of objects in the permanent generation.""" + def get_referents(*objs: Any) -> list[Any]: - """Return the list of objects that are directly referred to by 'objs'. -""" + """Return the list of objects that are directly referred to by 'objs'.""" + def get_referrers(*objs: Any) -> list[Any]: - """Return the list of objects that directly refer to any of 'objs'. -""" + """Return the list of objects that directly refer to any of 'objs'.""" + def get_stats() -> list[dict[str, Any]]: - """Return a list of dictionaries containing per-generation statistics. -""" + """Return a list of dictionaries containing per-generation statistics.""" + def get_threshold() -> tuple[int, int, int]: - """Return the current collection thresholds. -""" + """Return the current collection thresholds.""" + def is_tracked(obj: Any, /) -> bool: """Returns true if the object is tracked by the garbage collector. -Simple atomic objects will return false. -""" + Simple atomic objects will return false. + """ + def is_finalized(obj: Any, /) -> bool: - """Returns true if the object has been already finalized by the GC. -""" + """Returns true if the object has been already finalized by the GC.""" + def isenabled() -> bool: - """Returns true if automatic garbage collection is enabled. -""" + """Returns true if automatic garbage collection is enabled.""" + def set_debug(flags: int, /) -> None: """Set the garbage collection debugging flags. - flags - An integer that can have the following bits turned on: - DEBUG_STATS - Print statistics during collection. - DEBUG_COLLECTABLE - Print collectable objects found. - DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects - found. - DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them. - DEBUG_LEAK - Debug leaking programs (everything but STATS). + flags + An integer that can have the following bits turned on: + DEBUG_STATS - Print statistics during collection. + DEBUG_COLLECTABLE - Print collectable objects found. + DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects + found. + DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them. + DEBUG_LEAK - Debug leaking programs (everything but STATS). + + Debugging information is written to sys.stderr. + """ -Debugging information is written to sys.stderr. -""" def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ..., /) -> None: """set_threshold(threshold0, [threshold1, [threshold2]]) -Set the collection thresholds (the collection frequency). + Set the collection thresholds (the collection frequency). -Setting 'threshold0' to zero disables collection. -""" + Setting 'threshold0' to zero disables collection. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi index b5089fe749894..af647a69b4e18 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/genericpath.pyi @@ -3,6 +3,7 @@ Path operations common to more than one OS Do not use directly. The OS specific modules import the appropriate functions from this module themselves. """ + import os import sys from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRichComparisonT @@ -34,8 +35,8 @@ if sys.version_info >= (3, 13): # type. But because this only works when T is str, we need Sequence[T] instead. @overload def commonprefix(m: Sequence[LiteralString]) -> LiteralString: - """Given a list of pathnames, returns the longest common leading component -""" + """Given a list of pathnames, returns the longest common leading component""" + @overload def commonprefix(m: Sequence[StrPath]) -> str: ... @overload @@ -45,59 +46,58 @@ def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[Support @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... def exists(path: FileDescriptorOrPath) -> bool: - """Test whether a path exists. Returns False for broken symbolic links -""" + """Test whether a path exists. Returns False for broken symbolic links""" + def getsize(filename: FileDescriptorOrPath) -> int: - """Return the size of a file, reported by os.stat(). -""" + """Return the size of a file, reported by os.stat().""" + def isfile(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a regular file -""" + """Test whether a path is a regular file""" + def isdir(s: FileDescriptorOrPath) -> bool: - """Return true if the pathname refers to an existing directory. -""" + """Return true if the pathname refers to an existing directory.""" if sys.version_info >= (3, 12): def islink(path: StrOrBytesPath) -> bool: - """Test whether a path is a symbolic link -""" + """Test whether a path is a symbolic link""" # These return float if os.stat_float_times() == True, # but int is a subclass of float. def getatime(filename: FileDescriptorOrPath) -> float: - """Return the last access time of a file, reported by os.stat(). -""" + """Return the last access time of a file, reported by os.stat().""" + def getmtime(filename: FileDescriptorOrPath) -> float: - """Return the last modification time of a file, reported by os.stat(). -""" + """Return the last modification time of a file, reported by os.stat().""" + def getctime(filename: FileDescriptorOrPath) -> float: - """Return the metadata change time of a file, reported by os.stat(). -""" + """Return the metadata change time of a file, reported by os.stat().""" + def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: """Test whether two pathnames reference the same actual file or directory -This is determined by the device number and i-node number and -raises an exception if an os.stat() call on either pathname fails. -""" + This is determined by the device number and i-node number and + raises an exception if an os.stat() call on either pathname fails. + """ + def sameopenfile(fp1: int, fp2: int) -> bool: - """Test whether two open file objects reference the same file -""" + """Test whether two open file objects reference the same file""" + def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: - """Test whether two stat buffers reference the same file -""" + """Test whether two stat buffers reference the same file""" if sys.version_info >= (3, 13): def isjunction(path: StrOrBytesPath) -> bool: """Test whether a path is a junction -Junctions are not supported on the current platform -""" + Junctions are not supported on the current platform + """ + def isdevdrive(path: StrOrBytesPath) -> bool: """Determines whether the specified path is on a Windows Dev Drive. -Dev Drives are not supported on the current platform -""" + Dev Drives are not supported on the current platform + """ + def lexists(path: StrOrBytesPath) -> bool: - """Test whether a path exists. Returns True for broken symbolic links -""" + """Test whether a path exists. Returns True for broken symbolic links""" # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.4 _AllowMissingType = NewType("_AllowMissingType", object) diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi index 5530bfc9f81be..a6e2a8f73dbc9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getopt.pyi @@ -13,6 +13,7 @@ to be intermixed. GetoptError -- exception (class) raised with 'opt' attribute, which is the option involved with the exception. """ + from collections.abc import Iterable, Sequence from typing import Protocol, TypeVar, overload, type_check_only @@ -32,47 +33,48 @@ def getopt( ) -> tuple[list[tuple[str, str]], _StrSequenceT_co]: """getopt(args, options[, long_options]) -> opts, args -Parses command line options and parameter list. args is the -argument list to be parsed, without the leading reference to the -running program. Typically, this means "sys.argv[1:]". shortopts -is the string of option letters that the script wants to -recognize, with options that require an argument followed by a -colon and options that accept an optional argument followed by -two colons (i.e., the same format that Unix getopt() uses). If -specified, longopts is a list of strings with the names of the -long options which should be supported. The leading '--' -characters should not be included in the option name. Options -which require an argument should be followed by an equal sign -('='). Options which accept an optional argument should be -followed by an equal sign and question mark ('=?'). - -The return value consists of two elements: the first is a list of -(option, value) pairs; the second is the list of program arguments -left after the option list was stripped (this is a trailing slice -of the first argument). Each option-and-value pair returned has -the option as its first element, prefixed with a hyphen (e.g., -'-x'), and the option argument as its second element, or an empty -string if the option has no argument. The options occur in the -list in the same order in which they were found, thus allowing -multiple occurrences. Long and short options may be mixed. + Parses command line options and parameter list. args is the + argument list to be parsed, without the leading reference to the + running program. Typically, this means "sys.argv[1:]". shortopts + is the string of option letters that the script wants to + recognize, with options that require an argument followed by a + colon and options that accept an optional argument followed by + two colons (i.e., the same format that Unix getopt() uses). If + specified, longopts is a list of strings with the names of the + long options which should be supported. The leading '--' + characters should not be included in the option name. Options + which require an argument should be followed by an equal sign + ('='). Options which accept an optional argument should be + followed by an equal sign and question mark ('=?'). + + The return value consists of two elements: the first is a list of + (option, value) pairs; the second is the list of program arguments + left after the option list was stripped (this is a trailing slice + of the first argument). Each option-and-value pair returned has + the option as its first element, prefixed with a hyphen (e.g., + '-x'), and the option argument as its second element, or an empty + string if the option has no argument. The options occur in the + list in the same order in which they were found, thus allowing + multiple occurrences. Long and short options may be mixed. + + """ -""" def gnu_getopt( args: Sequence[str], shortopts: str, longopts: Iterable[str] | str = [] ) -> tuple[list[tuple[str, str]], list[str]]: """getopt(args, options[, long_options]) -> opts, args -This function works like getopt(), except that GNU style scanning -mode is used by default. This means that option and non-option -arguments may be intermixed. The getopt() function stops -processing options as soon as a non-option argument is -encountered. + This function works like getopt(), except that GNU style scanning + mode is used by default. This means that option and non-option + arguments may be intermixed. The getopt() function stops + processing options as soon as a non-option argument is + encountered. -If the first character of the option string is '+', or if the -environment variable POSIXLY_CORRECT is set, then option -processing stops as soon as a non-option argument is encountered. + If the first character of the option string is '+', or if the + environment variable POSIXLY_CORRECT is set, then option + processing stops as soon as a non-option argument is encountered. -""" + """ class GetoptError(Exception): msg: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi index 3ad62219f048c..3242fd5779a82 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/getpass.pyi @@ -10,6 +10,7 @@ GetPassWarning - This UserWarning is issued when getpass() cannot prevent On Windows, the msvcrt module will be used. """ + import sys from typing import TextIO @@ -19,48 +20,48 @@ if sys.version_info >= (3, 14): def getpass(prompt: str = "Password: ", stream: TextIO | None = None, *, echo_char: str | None = None) -> str: """Prompt for a password, with echo turned off. -Args: - prompt: Written on stream to ask for the input. Default: 'Password: ' - stream: A writable file object to display the prompt. Defaults to - the tty. If no tty is available defaults to sys.stderr. - echo_char: A single ASCII character to mask input (e.g., '*'). - If None, input is hidden. -Returns: - The seKr3t input. -Raises: - EOFError: If our input tty or stdin was closed. - GetPassWarning: When we were unable to turn echo off on the input. - -Always restores terminal settings before returning. -""" + Args: + prompt: Written on stream to ask for the input. Default: 'Password: ' + stream: A writable file object to display the prompt. Defaults to + the tty. If no tty is available defaults to sys.stderr. + echo_char: A single ASCII character to mask input (e.g., '*'). + If None, input is hidden. + Returns: + The seKr3t input. + Raises: + EOFError: If our input tty or stdin was closed. + GetPassWarning: When we were unable to turn echo off on the input. + + Always restores terminal settings before returning. + """ else: def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: """Prompt for a password, with echo turned off. -Args: - prompt: Written on stream to ask for the input. Default: 'Password: ' - stream: A writable file object to display the prompt. Defaults to - the tty. If no tty is available defaults to sys.stderr. -Returns: - The seKr3t input. -Raises: - EOFError: If our input tty or stdin was closed. - GetPassWarning: When we were unable to turn echo off on the input. - -Always restores terminal settings before returning. -""" + Args: + prompt: Written on stream to ask for the input. Default: 'Password: ' + stream: A writable file object to display the prompt. Defaults to + the tty. If no tty is available defaults to sys.stderr. + Returns: + The seKr3t input. + Raises: + EOFError: If our input tty or stdin was closed. + GetPassWarning: When we were unable to turn echo off on the input. + + Always restores terminal settings before returning. + """ def getuser() -> str: """Get the username from the environment or password database. -First try various environment variables, then the password -database. This works on Windows as long as USERNAME is set. -Any failure to find a username raises OSError. + First try various environment variables, then the password + database. This works on Windows as long as USERNAME is set. + Any failure to find a username raises OSError. -.. versionchanged:: 3.13 - Previously, various exceptions beyond just :exc:`OSError` - were raised. -""" + .. versionchanged:: 3.13 + Previously, various exceptions beyond just :exc:`OSError` + were raised. + """ class GetPassWarning(UserWarning): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi index 3a5cc0e5071b7..9b61fdad980ab 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gettext.pyi @@ -9,6 +9,7 @@ languages. L10N refers to the adaptation of your program, once internationalized, to the local language and cultural habits. """ + import io import sys from _typeshed import StrPath @@ -199,5 +200,5 @@ Catalog = translation def c2py(plural: str) -> Callable[[int], int]: """Gets a C expression as used in PO files for plural forms and returns a -Python function that implements an equivalent expression. -""" + Python function that implements an equivalent expression. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi index a28283efb09aa..d3a6642a4c2fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/glob.pyi @@ -1,5 +1,5 @@ -"""Filename globbing utility. -""" +"""Filename globbing utility.""" + import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, Sequence @@ -36,17 +36,18 @@ if sys.version_info >= (3, 11): ) -> list[AnyStr]: """Return a list of paths matching a pathname pattern. -The pattern may contain simple shell-style wildcards a la -fnmatch. Unlike fnmatch, filenames starting with a -dot are special cases that are not matched by '*' and '?' -patterns by default. + The pattern may contain simple shell-style wildcards a la + fnmatch. Unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns by default. + + If `include_hidden` is true, the patterns '*', '?', '**' will match hidden + directories. -If `include_hidden` is true, the patterns '*', '?', '**' will match hidden -directories. + If `recursive` is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ -If `recursive` is true, the pattern '**' will match any files and -zero or more directories and subdirectories. -""" def iglob( pathname: AnyStr, *, @@ -57,14 +58,14 @@ zero or more directories and subdirectories. ) -> Iterator[AnyStr]: """Return an iterator which yields the paths matching a pathname pattern. -The pattern may contain simple shell-style wildcards a la -fnmatch. However, unlike fnmatch, filenames starting with a -dot are special cases that are not matched by '*' and '?' -patterns. + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. -If recursive is true, the pattern '**' will match any files and -zero or more directories and subdirectories. -""" + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ elif sys.version_info >= (3, 10): def glob( @@ -72,70 +73,70 @@ elif sys.version_info >= (3, 10): ) -> list[AnyStr]: """Return a list of paths matching a pathname pattern. - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ def iglob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False ) -> Iterator[AnyStr]: """Return an iterator which yields the paths matching a pathname pattern. - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ else: def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: """Return a list of paths matching a pathname pattern. - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: """Return an iterator which yields the paths matching a pathname pattern. - The pattern may contain simple shell-style wildcards a la - fnmatch. However, unlike fnmatch, filenames starting with a - dot are special cases that are not matched by '*' and '?' - patterns. + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. - If recursive is true, the pattern '**' will match any files and - zero or more directories and subdirectories. - """ + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ def escape(pathname: AnyStr) -> AnyStr: - """Escape all special characters. - """ + """Escape all special characters.""" + def has_magic(s: str | bytes) -> bool: ... # undocumented if sys.version_info >= (3, 13): - def translate( - pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None - ) -> str: + def translate(pat: str, *, recursive: bool = False, include_hidden: bool = False, seps: Sequence[str] | None = None) -> str: """Translate a pathname with shell wildcards to a regular expression. -If `recursive` is true, the pattern segment '**' will match any number of -path segments. + If `recursive` is true, the pattern segment '**' will match any number of + path segments. -If `include_hidden` is true, wildcards can match path segments beginning -with a dot ('.'). + If `include_hidden` is true, wildcards can match path segments beginning + with a dot ('.'). -If a sequence of separator characters is given to `seps`, they will be -used to split the pattern into segments and match path separators. If not -given, os.path.sep and os.path.altsep (where available) are used. -""" + If a sequence of separator characters is given to `seps`, they will be + used to split the pattern into segments and match path separators. If not + given, os.path.sep and os.path.altsep (where available) are used. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi index f673d641451ab..793c8f0975775 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/graphlib.pyi @@ -11,8 +11,8 @@ if sys.version_info >= (3, 11): from types import GenericAlias class TopologicalSorter(Generic[_T]): - """Provides functionality to topologically sort a graph of hashable nodes -""" + """Provides functionality to topologically sort a graph of hashable nodes""" + @overload def __init__(self, graph: None = None) -> None: ... @overload @@ -20,85 +20,90 @@ class TopologicalSorter(Generic[_T]): def add(self, node: _T, *predecessors: _T) -> None: """Add a new node and its predecessors to the graph. -Both the *node* and all elements in *predecessors* must be hashable. + Both the *node* and all elements in *predecessors* must be hashable. + + If called multiple times with the same node argument, the set of dependencies + will be the union of all dependencies passed in. -If called multiple times with the same node argument, the set of dependencies -will be the union of all dependencies passed in. + It is possible to add a node with no dependencies (*predecessors* is not provided) + as well as provide a dependency twice. If a node that has not been provided before + is included among *predecessors* it will be automatically added to the graph with + no predecessors of its own. -It is possible to add a node with no dependencies (*predecessors* is not provided) -as well as provide a dependency twice. If a node that has not been provided before -is included among *predecessors* it will be automatically added to the graph with -no predecessors of its own. + Raises ValueError if called after "prepare". + """ -Raises ValueError if called after "prepare". -""" def prepare(self) -> None: """Mark the graph as finished and check for cycles in the graph. -If any cycle is detected, "CycleError" will be raised, but "get_ready" can -still be used to obtain as many nodes as possible until cycles block more -progress. After a call to this function, the graph cannot be modified and -therefore no more nodes can be added using "add". + If any cycle is detected, "CycleError" will be raised, but "get_ready" can + still be used to obtain as many nodes as possible until cycles block more + progress. After a call to this function, the graph cannot be modified and + therefore no more nodes can be added using "add". + + Raise ValueError if nodes have already been passed out of the sorter. -Raise ValueError if nodes have already been passed out of the sorter. + """ -""" def is_active(self) -> bool: """Return ``True`` if more progress can be made and ``False`` otherwise. -Progress can be made if cycles do not block the resolution and either there -are still nodes ready that haven't yet been returned by "get_ready" or the -number of nodes marked "done" is less than the number that have been returned -by "get_ready". + Progress can be made if cycles do not block the resolution and either there + are still nodes ready that haven't yet been returned by "get_ready" or the + number of nodes marked "done" is less than the number that have been returned + by "get_ready". + + Raises ValueError if called without calling "prepare" previously. + """ -Raises ValueError if called without calling "prepare" previously. -""" def __bool__(self) -> bool: ... def done(self, *nodes: _T) -> None: """Marks a set of nodes returned by "get_ready" as processed. -This method unblocks any successor of each node in *nodes* for being returned -in the future by a call to "get_ready". + This method unblocks any successor of each node in *nodes* for being returned + in the future by a call to "get_ready". + + Raises ValueError if any node in *nodes* has already been marked as + processed by a previous call to this method, if a node was not added to the + graph by using "add" or if called without calling "prepare" previously or if + node has not yet been returned by "get_ready". + """ -Raises ValueError if any node in *nodes* has already been marked as -processed by a previous call to this method, if a node was not added to the -graph by using "add" or if called without calling "prepare" previously or if -node has not yet been returned by "get_ready". -""" def get_ready(self) -> tuple[_T, ...]: """Return a tuple of all the nodes that are ready. -Initially it returns all nodes with no predecessors; once those are marked -as processed by calling "done", further calls will return all new nodes that -have all their predecessors already processed. Once no more progress can be made, -empty tuples are returned. + Initially it returns all nodes with no predecessors; once those are marked + as processed by calling "done", further calls will return all new nodes that + have all their predecessors already processed. Once no more progress can be made, + empty tuples are returned. + + Raises ValueError if called without calling "prepare" previously. + """ -Raises ValueError if called without calling "prepare" previously. -""" def static_order(self) -> Iterable[_T]: """Returns an iterable of nodes in a topological order. -The particular order that is returned may depend on the specific -order in which the items were inserted in the graph. + The particular order that is returned may depend on the specific + order in which the items were inserted in the graph. -Using this method does not require to call "prepare" or "done". If any -cycle is detected, :exc:`CycleError` will be raised. -""" + Using this method does not require to call "prepare" or "done". If any + cycle is detected, :exc:`CycleError` will be raised. + """ if sys.version_info >= (3, 11): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class CycleError(ValueError): """Subclass of ValueError raised by TopologicalSorter.prepare if cycles -exist in the working graph. - -If multiple cycles exist, only one undefined choice among them will be reported -and included in the exception. The detected cycle can be accessed via the second -element in the *args* attribute of the exception instance and consists in a list -of nodes, such that each node is, in the graph, an immediate predecessor of the -next node in the list. In the reported list, the first and the last node will be -the same, to make it clear that it is cyclic. -""" + exist in the working graph. + + If multiple cycles exist, only one undefined choice among them will be reported + and included in the exception. The detected cycle can be accessed via the second + element in the *args* attribute of the exception instance and consists in a list + of nodes, such that each node is, in the graph, an immediate predecessor of the + next node in the list. In the reported list, the first and the last node will be + the same, to make it clear that it is cyclic. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi index 07f3a73897194..2364e81f8904a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/grp.pyi @@ -13,6 +13,7 @@ users are not explicitly listed as members of the groups they are in according to the password database. Check both databases to get complete membership information.) """ + import sys from _typeshed import structseq from typing import Any, Final, final @@ -22,43 +23,45 @@ if sys.platform != "win32": class struct_group(structseq[Any], tuple[str, str | None, int, list[str]]): """grp.struct_group: Results from getgr*() routines. -This object may be accessed either as a tuple of - (gr_name,gr_passwd,gr_gid,gr_mem) -or via the object attributes as named in the above tuple. -""" + This object may be accessed either as a tuple of + (gr_name,gr_passwd,gr_gid,gr_mem) + or via the object attributes as named in the above tuple. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("gr_name", "gr_passwd", "gr_gid", "gr_mem") @property def gr_name(self) -> str: - """group name -""" + """group name""" + @property def gr_passwd(self) -> str | None: - """password -""" + """password""" + @property def gr_gid(self) -> int: - """group id -""" + """group id""" + @property def gr_mem(self) -> list[str]: - """group members -""" + """group members""" def getgrall() -> list[struct_group]: """Return a list of all available group entries, in arbitrary order. -An entry whose name starts with '+' or '-' represents an instruction -to use YP/NIS and may not be accessible via getgrnam or getgrgid. -""" + An entry whose name starts with '+' or '-' represents an instruction + to use YP/NIS and may not be accessible via getgrnam or getgrgid. + """ + def getgrgid(id: int) -> struct_group: """Return the group database entry for the given numeric group ID. -If id is not valid, raise KeyError. -""" + If id is not valid, raise KeyError. + """ + def getgrnam(name: str) -> struct_group: """Return the group database entry for the given group name. -If name is not valid, raise KeyError. -""" + If name is not valid, raise KeyError. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi index 6b91b4a27d6c1..ac3fe1a3c0908 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/gzip.pyi @@ -3,6 +3,7 @@ The user of the file doesn't have to worry about the compression, but random access is not allowed. """ + import sys import zlib from _typeshed import ReadableBuffer, SizedBuffer, StrOrBytesPath, WriteableBuffer @@ -59,22 +60,23 @@ def open( ) -> GzipFile: """Open a gzip-compressed file in binary or text mode. -The filename argument can be an actual filename (a str or bytes object), or -an existing file object to read from or write to. + The filename argument can be an actual filename (a str or bytes object), or + an existing file object to read from or write to. -The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for -binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is -"rb", and the default compresslevel is 9. + The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for + binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is + "rb", and the default compresslevel is 9. -For binary mode, this function is equivalent to the GzipFile constructor: -GzipFile(filename, mode, compresslevel). In this case, the encoding, errors -and newline arguments must not be provided. + For binary mode, this function is equivalent to the GzipFile constructor: + GzipFile(filename, mode, compresslevel). In this case, the encoding, errors + and newline arguments must not be provided. -For text mode, a GzipFile object is created, and wrapped in an -io.TextIOWrapper instance with the specified encoding, error handling -behavior, and line ending(s). + For text mode, a GzipFile object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error handling + behavior, and line ending(s). + + """ -""" @overload def open( filename: StrOrBytesPath | _WritableFileobj, @@ -105,9 +107,10 @@ def open( class _PaddedFile: """Minimal read-only file object that prepends a string to the contents -of an actual file. Shouldn't be used outside of gzip.py, as it lacks -essential functionality. -""" + of an actual file. Shouldn't be used outside of gzip.py, as it lacks + essential functionality. + """ + file: _ReadableFileobj def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... def read(self, size: int) -> bytes: ... @@ -116,17 +119,17 @@ essential functionality. def seekable(self) -> bool: ... class BadGzipFile(OSError): - """Exception raised in some cases for invalid gzip files. -""" + """Exception raised in some cases for invalid gzip files.""" class GzipFile(BaseStream): """The GzipFile class simulates most of the methods of a file object with -the exception of the truncate() method. + the exception of the truncate() method. -This class only supports opening files in binary mode. If you need to open a -compressed file in text mode, use the gzip.open() function. + This class only supports opening files in binary mode. If you need to open a + compressed file in text mode, use the gzip.open() function. + + """ -""" myfileobj: FileIO | None mode: object name: str @@ -143,37 +146,38 @@ compressed file in text mode, use the gzip.open() function. ) -> None: """Constructor for the GzipFile class. -At least one of fileobj and filename must be given a -non-trivial value. + At least one of fileobj and filename must be given a + non-trivial value. -The new class instance is based on fileobj, which can be a regular -file, an io.BytesIO object, or any other object which simulates a file. -It defaults to None, in which case filename is opened to provide -a file object. + The new class instance is based on fileobj, which can be a regular + file, an io.BytesIO object, or any other object which simulates a file. + It defaults to None, in which case filename is opened to provide + a file object. -When fileobj is not None, the filename argument is only used to be -included in the gzip file header, which may include the original -filename of the uncompressed file. It defaults to the filename of -fileobj, if discernible; otherwise, it defaults to the empty string, -and in this case the original filename is not included in the header. + When fileobj is not None, the filename argument is only used to be + included in the gzip file header, which may include the original + filename of the uncompressed file. It defaults to the filename of + fileobj, if discernible; otherwise, it defaults to the empty string, + and in this case the original filename is not included in the header. -The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or -'xb' depending on whether the file will be read or written. The default -is the mode of fileobj if discernible; otherwise, the default is 'rb'. -A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and -'wb', 'a' and 'ab', and 'x' and 'xb'. + The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', 'wb', 'x', or + 'xb' depending on whether the file will be read or written. The default + is the mode of fileobj if discernible; otherwise, the default is 'rb'. + A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and + 'wb', 'a' and 'ab', and 'x' and 'xb'. -The compresslevel argument is an integer from 0 to 9 controlling the -level of compression; 1 is fastest and produces the least compression, -and 9 is slowest and produces the most compression. 0 is no compression -at all. The default is 9. + The compresslevel argument is an integer from 0 to 9 controlling the + level of compression; 1 is fastest and produces the least compression, + and 9 is slowest and produces the most compression. 0 is no compression + at all. The default is 9. -The optional mtime argument is the timestamp requested by gzip. The time -is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. -If mtime is omitted or None, the current time is used. Use mtime = 0 -to generate a compressed stream that does not depend on creation time. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. + + """ -""" @overload def __init__( self, @@ -217,29 +221,31 @@ to generate a compressed stream that does not depend on creation time. @property def mtime(self) -> int | None: - """Last modification time read from stream, or None -""" + """Last modification time read from stream, or None""" crc: int def write(self, data: ReadableBuffer) -> int: ... def read(self, size: int | None = -1) -> bytes: ... def read1(self, size: int = -1) -> bytes: """Implements BufferedIOBase.read1() -Reads up to a buffer's worth of data if size is negative. -""" + Reads up to a buffer's worth of data if size is negative. + """ + def peek(self, n: int) -> bytes: ... def close(self) -> None: ... def flush(self, zlib_mode: int = 2) -> None: ... def fileno(self) -> int: """Invoke the underlying file object's fileno() method. -This will raise AttributeError if the underlying file object -doesn't support fileno(). -""" + This will raise AttributeError if the underlying file object + doesn't support fileno(). + """ + def rewind(self) -> None: """Return the uncompressed stream file position indicator to the -beginning of the file -""" + beginning of the file + """ + def seek(self, offset: int, whence: int = 0) -> int: ... def readline(self, size: int | None = -1) -> bytes: ... @@ -254,21 +260,21 @@ if sys.version_info >= (3, 14): def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float = 0) -> bytes: """Compress data in one shot and return the compressed string. -compresslevel sets the compression level in range of 0-9. -mtime can be used to set the modification time. -The modification time is set to 0 by default, for reproducibility. -""" + compresslevel sets the compression level in range of 0-9. + mtime can be used to set the modification time. + The modification time is set to 0 by default, for reproducibility. + """ else: def compress(data: SizedBuffer, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: """Compress data in one shot and return the compressed string. -compresslevel sets the compression level in range of 0-9. -mtime can be used to set the modification time. The modification time is -set to the current time by default. -""" + compresslevel sets the compression level in range of 0-9. + mtime can be used to set the modification time. The modification time is + set to the current time by default. + """ def decompress(data: ReadableBuffer) -> bytes: """Decompress a gzip compressed string in one shot. -Return the decompressed string. -""" + Return the decompressed string. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi index 1c1a2ce69d80f..faeafece16a43 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hashlib.pyi @@ -48,6 +48,7 @@ More condensed: 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2' """ + import sys from _blake2 import blake2b as blake2b, blake2s as blake2s from _hashlib import ( @@ -118,8 +119,8 @@ else: def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> HASH: """new(name, data=b'') - Return a new hashing object using the named algorithm; -optionally initialized with data (which must be a bytes-like object). -""" + optionally initialized with data (which must be a bytes-like object). + """ algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] @@ -139,14 +140,14 @@ if sys.version_info >= (3, 11): ) -> HASH: """Hash the contents of a file-like object. Returns a digest object. -*fileobj* must be a file-like object opened for reading in binary mode. -It accepts file objects from open(), io.BytesIO(), and SocketIO objects. -The function may bypass Python's I/O and use the file descriptor *fileno* -directly. + *fileobj* must be a file-like object opened for reading in binary mode. + It accepts file objects from open(), io.BytesIO(), and SocketIO objects. + The function may bypass Python's I/O and use the file descriptor *fileno* + directly. -*digest* must either be a hash algorithm name as a *str*, a hash -constructor, or a callable that returns a hash object. -""" + *digest* must either be a hash algorithm name as a *str*, a hash + constructor, or a callable that returns a hash object. + """ # Legacy typing-only alias _Hash = HASH diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi index 2c7943f1b2538..0eb884cb035a6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/heapq.pyi @@ -29,6 +29,7 @@ These two make it possible to view the heap as a regular Python list without surprises: heap[0] is the smallest item, and heap.sort() maintains the heap invariant! """ + from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Generator, Iterable @@ -45,30 +46,32 @@ def merge( ) -> Generator[_S]: """Merge multiple sorted inputs into a single sorted output. -Similar to sorted(itertools.chain(*iterables)) but returns a generator, -does not pull the data into memory all at once, and assumes that each of -the input streams is already sorted (smallest to largest). + Similar to sorted(itertools.chain(*iterables)) but returns a generator, + does not pull the data into memory all at once, and assumes that each of + the input streams is already sorted (smallest to largest). ->>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) -[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] + >>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) + [0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] -If *key* is not None, applies a key function to each element to determine -its sort order. + If *key* is not None, applies a key function to each element to determine + its sort order. ->>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len)) -['dog', 'cat', 'fish', 'horse', 'kangaroo'] + >>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len)) + ['dog', 'cat', 'fish', 'horse', 'kangaroo'] + + """ -""" def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: """Find the n largest elements in a dataset. -Equivalent to: sorted(iterable, key=key, reverse=True)[:n] -""" + Equivalent to: sorted(iterable, key=key, reverse=True)[:n] + """ + def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: """Find the n smallest elements in a dataset. -Equivalent to: sorted(iterable, key=key)[:n] -""" + Equivalent to: sorted(iterable, key=key)[:n] + """ + def _heapify_max(heap: list[Any], /) -> None: # undocumented - """Maxheap variant of heapify. -""" + """Maxheap variant of heapify.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi index 20ce107d93525..4aa8aea3cd0c6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/hmac.pyi @@ -2,6 +2,7 @@ Implements the HMAC algorithm as described by RFC 2104. """ + from _hashlib import _HashObject, compare_digest as compare_digest from _typeshed import ReadableBuffer, SizedBuffer from collections.abc import Callable @@ -22,28 +23,30 @@ digest_size: None def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: """Create a new hashing object and return it. -key: bytes or buffer, The starting key for the hash. -msg: bytes or buffer, Initial input for the hash, or None. -digestmod: A hash name suitable for hashlib.new(). *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. + key: bytes or buffer, The starting key for the hash. + msg: bytes or buffer, Initial input for the hash, or None. + digestmod: A hash name suitable for hashlib.new(). *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. - Required as of 3.8, despite its position after the optional - msg argument. Passing it as a keyword argument is - recommended, though not required for legacy API reasons. + Required as of 3.8, despite its position after the optional + msg argument. Passing it as a keyword argument is + recommended, though not required for legacy API reasons. + + You can now feed arbitrary bytes into the object using its update() + method, and can ask for the hash value at any time by calling its digest() + or hexdigest() methods. + """ -You can now feed arbitrary bytes into the object using its update() -method, and can ask for the hash value at any time by calling its digest() -or hexdigest() methods. -""" @overload def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... class HMAC: """RFC 2104 HMAC class. Also complies with RFC 4231. -This supports the API for Cryptographic Hash Functions (PEP 247). -""" + This supports the API for Cryptographic Hash Functions (PEP 247). + """ + __slots__ = ("_hmac", "_inner", "_outer", "block_size", "digest_size") digest_size: int block_size: int @@ -52,41 +55,43 @@ This supports the API for Cryptographic Hash Functions (PEP 247). def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: """Create a new HMAC object. -key: bytes or buffer, key for the keyed hash object. -msg: bytes or buffer, Initial input for the hash or None. -digestmod: A hash name suitable for hashlib.new(). *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. + key: bytes or buffer, key for the keyed hash object. + msg: bytes or buffer, Initial input for the hash or None. + digestmod: A hash name suitable for hashlib.new(). *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. + + Required as of 3.8, despite its position after the optional + msg argument. Passing it as a keyword argument is + recommended, though not required for legacy API reasons. + """ - Required as of 3.8, despite its position after the optional - msg argument. Passing it as a keyword argument is - recommended, though not required for legacy API reasons. -""" def update(self, msg: ReadableBuffer) -> None: - """Feed data from msg into this hashing object. -""" + """Feed data from msg into this hashing object.""" + def digest(self) -> bytes: """Return the hash value of this hashing object. -This returns the hmac value as bytes. The object is -not altered in any way by this function; you can continue -updating the object after calling this function. -""" - def hexdigest(self) -> str: - """Like digest(), but returns a string of hexadecimal digits instead. + This returns the hmac value as bytes. The object is + not altered in any way by this function; you can continue + updating the object after calling this function. """ + + def hexdigest(self) -> str: + """Like digest(), but returns a string of hexadecimal digits instead.""" + def copy(self) -> HMAC: """Return a separate copy of this hashing object. -An update to this copy won't affect the original object. -""" + An update to this copy won't affect the original object. + """ def digest(key: SizedBuffer, msg: ReadableBuffer, digest: _DigestMod) -> bytes: """Fast inline implementation of HMAC. -key: bytes or buffer, The key for the keyed hash object. -msg: bytes or buffer, Input message. -digest: A hash name suitable for hashlib.new() for best performance. *OR* - A hashlib constructor returning a new hash object. *OR* - A module supporting PEP 247. -""" + key: bytes or buffer, The key for the keyed hash object. + msg: bytes or buffer, Input message. + digest: A hash name suitable for hashlib.new() for best performance. *OR* + A hashlib constructor returning a new hash object. *OR* + A module supporting PEP 247. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi index f17c0ae4020b9..af5799ee0c726 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/__init__.pyi @@ -1,22 +1,24 @@ """ General functions for HTML manipulation. """ + from typing import AnyStr __all__ = ["escape", "unescape"] def escape(s: AnyStr, quote: bool = True) -> AnyStr: """ -Replace special characters "&", "<" and ">" to HTML-safe sequences. -If the optional flag quote is true (the default), the quotation mark -characters, both double quote (") and single quote (') characters are also -translated. -""" + Replace special characters "&", "<" and ">" to HTML-safe sequences. + If the optional flag quote is true (the default), the quotation mark + characters, both double quote (") and single quote (') characters are also + translated. + """ + def unescape(s: AnyStr) -> AnyStr: """ -Convert all named and numeric character references (e.g. >, >, -&x3e;) in the string s to the corresponding unicode characters. -This function uses the rules defined by the HTML 5 standard -for both valid and invalid character references, and the list of -HTML 5 named character references defined in html.entities.html5. -""" + Convert all named and numeric character references (e.g. >, >, + &x3e;) in the string s to the corresponding unicode characters. + This function uses the rules defined by the HTML 5 standard + for both valid and invalid character references, and the list of + HTML 5 named character references defined in html.entities.html5. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi index eaea3c72f04ce..0f7b8b04bbeca 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/entities.pyi @@ -1,5 +1,5 @@ -"""HTML character entity references. -""" +"""HTML character entity references.""" + from typing import Final __all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi index 9941559b19496..085b51186db67 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/html/parser.pyi @@ -1,5 +1,5 @@ -"""A parser for HTML and XHTML. -""" +"""A parser for HTML and XHTML.""" + from _markupbase import ParserBase from re import Pattern from typing import Final @@ -9,24 +9,25 @@ __all__ = ["HTMLParser"] class HTMLParser(ParserBase): """Find tags and other markup and call handler functions. -Usage: - p = HTMLParser() - p.feed(data) - ... - p.close() + Usage: + p = HTMLParser() + p.feed(data) + ... + p.close() + + Start tags are handled by calling self.handle_starttag() or + self.handle_startendtag(); end tags by self.handle_endtag(). The + data between tags is passed from the parser to the derived class + by calling self.handle_data() with the data as argument (the data + may be split up in arbitrary chunks). If convert_charrefs is + True the character references are converted automatically to the + corresponding Unicode character (and self.handle_data() is no + longer split in chunks), otherwise they are passed by calling + self.handle_entityref() or self.handle_charref() with the string + containing respectively the named or numeric reference as the + argument. + """ -Start tags are handled by calling self.handle_starttag() or -self.handle_startendtag(); end tags by self.handle_endtag(). The -data between tags is passed from the parser to the derived class -by calling self.handle_data() with the data as argument (the data -may be split up in arbitrary chunks). If convert_charrefs is -True the character references are converted automatically to the -corresponding Unicode character (and self.handle_data() is no -longer split in chunks), otherwise they are passed by calling -self.handle_entityref() or self.handle_charref() with the string -containing respectively the named or numeric reference as the -argument. -""" CDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] # Added in Python 3.9.23, 3.10.18, 3.11.13, 3.12.11, 3.13.6 RCDATA_CONTENT_ELEMENTS: Final[tuple[str, ...]] @@ -34,21 +35,23 @@ argument. def __init__(self, *, convert_charrefs: bool = True) -> None: """Initialize and reset this instance. -If convert_charrefs is True (the default), all character references -are automatically converted to the corresponding Unicode characters. -""" + If convert_charrefs is True (the default), all character references + are automatically converted to the corresponding Unicode characters. + """ + def feed(self, data: str) -> None: """Feed data to the parser. -Call this as often as you want, with as little or as much text -as you want (may include '\\n'). -""" + Call this as often as you want, with as little or as much text + as you want (may include '\\n'). + """ + def close(self) -> None: - """Handle any buffered data. -""" + """Handle any buffered data.""" + def get_starttag_text(self) -> str | None: - """Return full source of start tag: '<...>'. -""" + """Return full source of start tag: '<...>'.""" + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi index a99aea1b903f2..1b65f162193f9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/__init__.pyi @@ -12,22 +12,23 @@ else: class HTTPStatus(IntEnum): """HTTP status codes and reason phrases -Status codes from the following RFCs are all observed: - - * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - * RFC 7725: An HTTP Status Code to Report Legal Obstacles - * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) - * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) - * RFC 8297: An HTTP Status Code for Indicating Hints - * RFC 8470: Using Early Data in HTTP -""" + Status codes from the following RFCs are all observed: + + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + @property def phrase(self) -> str: ... @property @@ -125,11 +126,12 @@ if sys.version_info >= (3, 11): class HTTPMethod(StrEnum): """HTTP methods and descriptions -Methods from the following RFCs are all observed: + Methods from the following RFCs are all observed: + + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 5789: PATCH Method for HTTP + """ - * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 - * RFC 5789: PATCH Method for HTTP -""" @property def description(self) -> str: ... CONNECT = "CONNECT" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi index acae10bb7e216..4fe786738c744 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/client.pyi @@ -67,6 +67,7 @@ Unread-response _CS_IDLE Req-started-unread-response _CS_REQ_STARTED Req-sent-unread-response _CS_REQ_SENT """ + import email.message import io import ssl @@ -191,18 +192,18 @@ class HTTPMessage(email.message.Message[str, str]): def getallmatchingheaders(self, name: str) -> list[str]: # undocumented """Find all header lines matching a given header name. -Look through the list of headers and find all lines matching a given -header name (and their continuation lines). A list of the lines is -returned, without interpretation. If the header does not occur, an -empty list is returned. If the header occurs multiple times, all -occurrences are returned. Case is not important in the header name. + Look through the list of headers and find all lines matching a given + header name (and their continuation lines). A list of the lines is + returned, without interpretation. If the header does not occur, an + empty list is returned. If the header occurs multiple times, all + occurrences are returned. Case is not important in the header name. -""" + """ @overload def parse_headers(fp: SupportsReadline[bytes], _class: Callable[[], _MessageT]) -> _MessageT: - """Parses only RFC2822 headers from a file pointer. -""" + """Parses only RFC2822 headers from a file pointer.""" + @overload def parse_headers(fp: SupportsReadline[bytes]) -> HTTPMessage: ... @@ -226,38 +227,41 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incomp def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... def peek(self, n: int = -1) -> bytes: ... def read(self, amt: int | None = None) -> bytes: - """Read and return the response body, or up to the next amt bytes. -""" + """Read and return the response body, or up to the next amt bytes.""" + def read1(self, n: int = -1) -> bytes: """Read with at most one underlying system call. If at least one -byte is buffered, return that instead. -""" + byte is buffered, return that instead. + """ + def readinto(self, b: WriteableBuffer) -> int: """Read up to len(b) bytes into bytearray b and return the number -of bytes read. -""" + of bytes read. + """ + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] @overload def getheader(self, name: str) -> str | None: """Returns the value of the header matching *name*. -If there are multiple matching headers, the values are -combined into a single string separated by commas and spaces. + If there are multiple matching headers, the values are + combined into a single string separated by commas and spaces. -If no matching header is found, returns *default* or None if -the *default* is not specified. + If no matching header is found, returns *default* or None if + the *default* is not specified. -If the headers are unknown, raises http.client.ResponseNotReady. + If the headers are unknown, raises http.client.ResponseNotReady. + + """ -""" @overload def getheader(self, name: str, default: _T) -> str | _T: ... def getheaders(self) -> list[tuple[str, str]]: - """Return list of (header, value) tuples. -""" + """Return list of (header, value) tuples.""" + def isclosed(self) -> bool: - """True if the connection is closed. -""" + """True if the connection is closed.""" + def __iter__(self) -> Iterator[bytes]: ... def __enter__(self) -> Self: ... def __exit__( @@ -265,40 +269,43 @@ If the headers are unknown, raises http.client.ResponseNotReady. ) -> None: ... def info(self) -> email.message.Message: """Returns an instance of the class mimetools.Message containing -meta-information associated with the URL. + meta-information associated with the URL. -When the method is HTTP, these headers are those returned by -the server at the head of the retrieved HTML page (including -Content-Length and Content-Type). + When the method is HTTP, these headers are those returned by + the server at the head of the retrieved HTML page (including + Content-Length and Content-Type). -When the method is FTP, a Content-Length header will be -present if (as is now usual) the server passed back a file -length in response to the FTP retrieval request. A -Content-Type header will be present if the MIME type can be -guessed. + When the method is FTP, a Content-Length header will be + present if (as is now usual) the server passed back a file + length in response to the FTP retrieval request. A + Content-Type header will be present if the MIME type can be + guessed. -When the method is local-file, returned headers will include -a Date representing the file's last-modified time, a -Content-Length giving file size, and a Content-Type -containing a guess at the file's type. See also the -description of the mimetools module. + When the method is local-file, returned headers will include + a Date representing the file's last-modified time, a + Content-Length giving file size, and a Content-Type + containing a guess at the file's type. See also the + description of the mimetools module. + + """ -""" def geturl(self) -> str: """Return the real URL of the page. -In some cases, the HTTP server redirects a client to another -URL. The urlopen() function handles this transparently, but in -some cases the caller needs to know which URL the client was -redirected to. The geturl() method can be used to get at this -redirected URL. + In some cases, the HTTP server redirects a client to another + URL. The urlopen() function handles this transparently, but in + some cases the caller needs to know which URL the client was + redirected to. The geturl() method can be used to get at this + redirected URL. + + """ -""" def getcode(self) -> int: """Return the HTTP status code that was sent with the response, -or None if the URL is not an HTTP URL. + or None if the URL is not an HTTP URL. + + """ -""" def begin(self) -> None: ... class HTTPConnection: @@ -327,89 +334,94 @@ class HTTPConnection: *, encode_chunked: bool = False, ) -> None: - """Send a complete request to the server. -""" + """Send a complete request to the server.""" + def getresponse(self) -> HTTPResponse: """Get the response from the server. -If the HTTPConnection is in the correct state, returns an -instance of HTTPResponse or of whatever object is returned by -the response_class variable. + If the HTTPConnection is in the correct state, returns an + instance of HTTPResponse or of whatever object is returned by + the response_class variable. + + If a request has not been sent or if a previous response has + not be handled, ResponseNotReady is raised. If the HTTP + response indicates that the connection should be closed, then + it will be closed before the response is returned. When the + connection is closed, the underlying socket is closed. + """ -If a request has not been sent or if a previous response has -not be handled, ResponseNotReady is raised. If the HTTP -response indicates that the connection should be closed, then -it will be closed before the response is returned. When the -connection is closed, the underlying socket is closed. -""" def set_debuglevel(self, level: int) -> None: ... if sys.version_info >= (3, 12): def get_proxy_response_headers(self) -> HTTPMessage | None: """ -Returns a dictionary with the headers of the response -received from the proxy server to the CONNECT request -sent to set the tunnel. + Returns a dictionary with the headers of the response + received from the proxy server to the CONNECT request + sent to set the tunnel. -If the CONNECT request was not sent, the method returns None. -""" + If the CONNECT request was not sent, the method returns None. + """ def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: """Set up host and port for HTTP CONNECT tunnelling. -In a connection that uses HTTP CONNECT tunnelling, the host passed to -the constructor is used as a proxy server that relays all communication -to the endpoint passed to `set_tunnel`. This done by sending an HTTP -CONNECT request to the proxy server when the connection is established. + In a connection that uses HTTP CONNECT tunnelling, the host passed to + the constructor is used as a proxy server that relays all communication + to the endpoint passed to `set_tunnel`. This done by sending an HTTP + CONNECT request to the proxy server when the connection is established. -This method must be called before the HTTP connection has been -established. + This method must be called before the HTTP connection has been + established. -The headers argument should be a mapping of extra HTTP headers to send -with the CONNECT request. + The headers argument should be a mapping of extra HTTP headers to send + with the CONNECT request. + + As HTTP/1.1 is used for HTTP CONNECT tunnelling request, as per the RFC + (https://tools.ietf.org/html/rfc7231#section-4.3.6), a HTTP Host: + header must be provided, matching the authority-form of the request + target provided as the destination for the CONNECT request. If a + HTTP Host: header is not provided via the headers argument, one + is generated and transmitted automatically. + """ -As HTTP/1.1 is used for HTTP CONNECT tunnelling request, as per the RFC -(https://tools.ietf.org/html/rfc7231#section-4.3.6), a HTTP Host: -header must be provided, matching the authority-form of the request -target provided as the destination for the CONNECT request. If a -HTTP Host: header is not provided via the headers argument, one -is generated and transmitted automatically. -""" def connect(self) -> None: - """Connect to the host and port specified in __init__. -""" + """Connect to the host and port specified in __init__.""" + def close(self) -> None: - """Close the connection to the HTTP server. -""" + """Close the connection to the HTTP server.""" + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: """Send a request to the server. -'method' specifies an HTTP request method, e.g. 'GET'. -'url' specifies the object being requested, e.g. '/index.html'. -'skip_host' if True does not add automatically a 'Host:' header -'skip_accept_encoding' if True does not add automatically an - 'Accept-Encoding:' header -""" + 'method' specifies an HTTP request method, e.g. 'GET'. + 'url' specifies the object being requested, e.g. '/index.html'. + 'skip_host' if True does not add automatically a 'Host:' header + 'skip_accept_encoding' if True does not add automatically an + 'Accept-Encoding:' header + """ + def putheader(self, header: str | bytes, *values: _HeaderValue) -> None: """Send a request header line to the server. -For example: h.putheader('Accept', 'text/html') -""" + For example: h.putheader('Accept', 'text/html') + """ + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: """Indicate that the last header line has been sent to the server. -This method sends the request to the server. The optional message_body -argument can be used to pass a message body associated with the -request. -""" + This method sends the request to the server. The optional message_body + argument can be used to pass a message body associated with the + request. + """ + def send(self, data: _DataType | str) -> None: """Send 'data' to the server. -``data`` can be a string object, a bytes object, an array object, a -file-like object that supports a .read() method, or an iterable object. -""" + ``data`` can be a string object, a bytes object, an array object, a + file-like object that supports a .read() method, or an iterable object. + """ class HTTPSConnection(HTTPConnection): - """This class allows communication via SSL. -""" + """This class allows communication via SSL.""" + # Can be `None` if `.connect()` was not called: sock: ssl.SSLSocket | MaybeNone if sys.version_info >= (3, 12): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi index 083be4b39f145..a2f876c853e06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookiejar.pyi @@ -24,6 +24,7 @@ http://wwwsearch.sf.net/): MSIECookieJar """ + import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -50,9 +51,10 @@ class LoadError(OSError): ... class CookieJar: """Collection of HTTP cookies. -You may not need to know about this class: try -urllib.request.build_opener(HTTPCookieProcessor).open(url). -""" + You may not need to know about this class: try + urllib.request.build_opener(HTTPCookieProcessor).open(url). + """ + non_word_re: ClassVar[Pattern[str]] # undocumented quote_re: ClassVar[Pattern[str]] # undocumented strict_domain_re: ClassVar[Pattern[str]] # undocumented @@ -63,166 +65,173 @@ urllib.request.build_opener(HTTPCookieProcessor).open(url). def add_cookie_header(self, request: Request) -> None: """Add correct Cookie: header to request (urllib.request.Request object). -The Cookie2 header is also added unless policy.hide_cookie2 is true. + The Cookie2 header is also added unless policy.hide_cookie2 is true. + + """ -""" def extract_cookies(self, response: HTTPResponse, request: Request) -> None: - """Extract cookies from response, where allowable given the request. -""" + """Extract cookies from response, where allowable given the request.""" + def set_policy(self, policy: CookiePolicy) -> None: ... def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: - """Return sequence of Cookie objects extracted from response object. -""" + """Return sequence of Cookie objects extracted from response object.""" + def set_cookie(self, cookie: Cookie) -> None: - """Set a cookie, without checking whether or not it should be set. -""" + """Set a cookie, without checking whether or not it should be set.""" + def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: - """Set a cookie if policy says it's OK to do so. -""" + """Set a cookie if policy says it's OK to do so.""" + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: """Clear some cookies. -Invoking this method without arguments will clear all cookies. If -given a single argument, only cookies belonging to that domain will be -removed. If given two arguments, cookies belonging to the specified -path within that domain are removed. If given three arguments, then -the cookie with the specified name, path and domain is removed. + Invoking this method without arguments will clear all cookies. If + given a single argument, only cookies belonging to that domain will be + removed. If given two arguments, cookies belonging to the specified + path within that domain are removed. If given three arguments, then + the cookie with the specified name, path and domain is removed. -Raises KeyError if no matching cookie exists. + Raises KeyError if no matching cookie exists. + + """ -""" def clear_session_cookies(self) -> None: """Discard all session cookies. -Note that the .save() method won't save session cookies anyway, unless -you ask otherwise by passing a true ignore_discard argument. + Note that the .save() method won't save session cookies anyway, unless + you ask otherwise by passing a true ignore_discard argument. + + """ -""" def clear_expired_cookies(self) -> None: # undocumented """Discard all expired cookies. -You probably don't need to call this method: expired cookies are never -sent back to the server (provided you're using DefaultCookiePolicy), -this method is called by CookieJar itself every so often, and the -.save() method won't save expired cookies anyway (unless you ask -otherwise by passing a true ignore_expires argument). + You probably don't need to call this method: expired cookies are never + sent back to the server (provided you're using DefaultCookiePolicy), + this method is called by CookieJar itself every so often, and the + .save() method won't save expired cookies anyway (unless you ask + otherwise by passing a true ignore_expires argument). + + """ -""" def __iter__(self) -> Iterator[Cookie]: ... def __len__(self) -> int: - """Return number of contained cookies. -""" + """Return number of contained cookies.""" class FileCookieJar(CookieJar): - """CookieJar that can be loaded from and saved to a file. -""" + """CookieJar that can be loaded from and saved to a file.""" + filename: str | None delayload: bool def __init__(self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: """ -Cookies are NOT loaded from the named file until either the .load() or -.revert() method is called. + Cookies are NOT loaded from the named file until either the .load() or + .revert() method is called. + + """ -""" def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: - """Save cookies to a file. -""" + """Save cookies to a file.""" + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: - """Load cookies from a file. -""" + """Load cookies from a file.""" + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: """Clear all cookies and reload cookies from a saved file. -Raises LoadError (or OSError) if reversion is not successful; the -object's state will not be altered if this happens. + Raises LoadError (or OSError) if reversion is not successful; the + object's state will not be altered if this happens. -""" + """ class MozillaCookieJar(FileCookieJar): """ -WARNING: you may want to backup your browser's cookies file if you use -this class to save cookies. I *think* it works, but there have been -bugs in the past! + WARNING: you may want to backup your browser's cookies file if you use + this class to save cookies. I *think* it works, but there have been + bugs in the past! -This class differs from CookieJar only in the format it uses to save and -load cookies to and from a file. This class uses the Mozilla/Netscape -'cookies.txt' format. curl and lynx use this file format, too. + This class differs from CookieJar only in the format it uses to save and + load cookies to and from a file. This class uses the Mozilla/Netscape + 'cookies.txt' format. curl and lynx use this file format, too. -Don't expect cookies saved while the browser is running to be noticed by -the browser (in fact, Mozilla on unix will overwrite your saved cookies if -you change them on disk while it's running; on Windows, you probably can't -save at all while the browser is running). + Don't expect cookies saved while the browser is running to be noticed by + the browser (in fact, Mozilla on unix will overwrite your saved cookies if + you change them on disk while it's running; on Windows, you probably can't + save at all while the browser is running). -Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to -Netscape cookies on saving. + Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to + Netscape cookies on saving. -In particular, the cookie version and port number information is lost, -together with information about whether or not Path, Port and Discard were -specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the -domain as set in the HTTP header started with a dot (yes, I'm aware some -domains in Netscape files start with a dot and some don't -- trust me, you -really don't want to know any more about this). + In particular, the cookie version and port number information is lost, + together with information about whether or not Path, Port and Discard were + specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the + domain as set in the HTTP header started with a dot (yes, I'm aware some + domains in Netscape files start with a dot and some don't -- trust me, you + really don't want to know any more about this). -Note that though Mozilla and Netscape use the same format, they use -slightly different headers. The class saves cookies using the Netscape -header by default (Mozilla can cope with that). + Note that though Mozilla and Netscape use the same format, they use + slightly different headers. The class saves cookies using the Netscape + header by default (Mozilla can cope with that). + + """ -""" if sys.version_info < (3, 10): header: ClassVar[str] # undocumented class LWPCookieJar(FileCookieJar): """ -The LWPCookieJar saves a sequence of "Set-Cookie3" lines. -"Set-Cookie3" is the format used by the libwww-perl library, not known -to be compatible with any browser, but which is easy to read and -doesn't lose information about RFC 2965 cookies. + The LWPCookieJar saves a sequence of "Set-Cookie3" lines. + "Set-Cookie3" is the format used by the libwww-perl library, not known + to be compatible with any browser, but which is easy to read and + doesn't lose information about RFC 2965 cookies. -Additional methods + Additional methods -as_lwp_str(ignore_discard=True, ignore_expired=True) + as_lwp_str(ignore_discard=True, ignore_expired=True) + + """ -""" def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: # undocumented """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. -ignore_discard and ignore_expires: see docstring for FileCookieJar.save + ignore_discard and ignore_expires: see docstring for FileCookieJar.save -""" + """ class CookiePolicy: """Defines which cookies get accepted from and returned to server. -May also modify cookies, though this is probably a bad idea. + May also modify cookies, though this is probably a bad idea. -The subclass DefaultCookiePolicy defines the standard rules for Netscape -and RFC 2965 cookies -- override that if you want a customized policy. + The subclass DefaultCookiePolicy defines the standard rules for Netscape + and RFC 2965 cookies -- override that if you want a customized policy. + + """ -""" netscape: bool rfc2965: bool hide_cookie2: bool def set_ok(self, cookie: Cookie, request: Request) -> bool: """Return true if (and only if) cookie should be accepted from server. -Currently, pre-expired cookies never get this far -- the CookieJar -class deletes such cookies itself. + Currently, pre-expired cookies never get this far -- the CookieJar + class deletes such cookies itself. + + """ -""" def return_ok(self, cookie: Cookie, request: Request) -> bool: - """Return true if (and only if) cookie should be returned to server. -""" + """Return true if (and only if) cookie should be returned to server.""" + def domain_return_ok(self, domain: str, request: Request) -> bool: - """Return false if cookies should not be returned, given cookie domain. - """ + """Return false if cookies should not be returned, given cookie domain.""" + def path_return_ok(self, path: str, request: Request) -> bool: - """Return false if cookies should not be returned, given cookie path. - """ + """Return false if cookies should not be returned, given cookie path.""" class DefaultCookiePolicy(CookiePolicy): - """Implements the standard rules for accepting and returning cookies. -""" + """Implements the standard rules for accepting and returning cookies.""" + rfc2109_as_netscape: bool strict_domain: bool strict_rfc2965_unverifiable: bool @@ -251,21 +260,21 @@ class DefaultCookiePolicy(CookiePolicy): strict_ns_set_path: bool = False, secure_protocols: Sequence[str] = ("https", "wss"), ) -> None: - """Constructor arguments should be passed as keyword arguments only. -""" + """Constructor arguments should be passed as keyword arguments only.""" + def blocked_domains(self) -> tuple[str, ...]: - """Return the sequence of blocked domains (as a tuple). -""" + """Return the sequence of blocked domains (as a tuple).""" + def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: - """Set the sequence of blocked domains. -""" + """Set the sequence of blocked domains.""" + def is_blocked(self, domain: str) -> bool: ... def allowed_domains(self) -> tuple[str, ...] | None: - """Return None, or the sequence of allowed domains (as a tuple). -""" + """Return None, or the sequence of allowed domains (as a tuple).""" + def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: - """Set the sequence of allowed domains, or None. -""" + """Set the sequence of allowed domains, or None.""" + def is_not_allowed(self, domain: str) -> bool: ... def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented def set_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented @@ -283,20 +292,21 @@ class DefaultCookiePolicy(CookiePolicy): class Cookie: """HTTP Cookie. -This class represents both Netscape and RFC 2965 cookies. + This class represents both Netscape and RFC 2965 cookies. -This is deliberately a very simple class. It just holds attributes. It's -possible to construct Cookie instances that don't comply with the cookie -standards. CookieJar.make_cookies is the factory function for Cookie -objects -- it deals with cookie parsing, supplying defaults, and -normalising to the representation used in this class. CookiePolicy is -responsible for checking them to see whether they should be accepted from -and returned to the server. + This is deliberately a very simple class. It just holds attributes. It's + possible to construct Cookie instances that don't comply with the cookie + standards. CookieJar.make_cookies is the factory function for Cookie + objects -- it deals with cookie parsing, supplying defaults, and + normalising to the representation used in this class. CookiePolicy is + responsible for checking them to see whether they should be accepted from + and returned to the server. -Note that the port may be present in the headers, but unspecified ("Port" -rather than"Port=80", for example); if this is the case, port is None. + Note that the port may be present in the headers, but unspecified ("Port" + rather than"Port=80", for example); if this is the case, port is None. + + """ -""" version: int | None name: str value: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi index b07dad10db629..e07ddcccae82f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/cookies.pyi @@ -87,6 +87,7 @@ the value to a string, when the values are set dictionary-style. Finis. """ + from collections.abc import Iterable, Mapping from types import GenericAlias from typing import Any, Generic, TypeVar, overload @@ -101,10 +102,11 @@ _T = TypeVar("_T") def _quote(str: None) -> None: """Quote a string for use in a cookie header. -If the string does not need to be double-quoted, then just return the -string. Otherwise, surround the string in doublequotes and quote -(with a \\) special characters. -""" + If the string does not need to be double-quoted, then just return the + string. Otherwise, surround the string in doublequotes and quote + (with a \\) special characters. + """ + @overload def _quote(str: str) -> str: ... @overload @@ -117,11 +119,12 @@ class CookieError(Exception): ... class Morsel(dict[str, Any], Generic[_T]): """A class to hold ONE (key, value) pair. -In a cookie, each such pair may have several attributes, so this class is -used to keep the attributes associated with the appropriate key,value pair. -This class also includes a coded_value attribute, which is used to hold -the network representation of the value. -""" + In a cookie, each such pair may have several attributes, so this class is + used to keep the attributes associated with the appropriate key,value pair. + This class also includes a coded_value attribute, which is used to hold + the network representation of the value. + """ + @property def value(self) -> str: ... @property @@ -146,47 +149,48 @@ the network representation of the value. def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): - """A container class for a set of Morsels. -""" + """A container class for a set of Morsels.""" + def __init__(self, input: _DataType | None = None) -> None: ... def value_decode(self, val: str) -> tuple[_T, str]: """real_value, coded_value = value_decode(STRING) -Called prior to setting a cookie's value from the network -representation. The VALUE is the value read from HTTP -header. -Override this function to modify the behavior of cookies. -""" + Called prior to setting a cookie's value from the network + representation. The VALUE is the value read from HTTP + header. + Override this function to modify the behavior of cookies. + """ + def value_encode(self, val: _T) -> tuple[_T, str]: """real_value, coded_value = value_encode(VALUE) -Called prior to setting a cookie's value from the dictionary -representation. The VALUE is the value being assigned. -Override this function to modify the behavior of cookies. -""" + Called prior to setting a cookie's value from the dictionary + representation. The VALUE is the value being assigned. + Override this function to modify the behavior of cookies. + """ + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: - """Return a string suitable for HTTP. -""" + """Return a string suitable for HTTP.""" __str__ = output def js_output(self, attrs: list[str] | None = None) -> str: - """Return a string suitable for JavaScript. -""" + """Return a string suitable for JavaScript.""" + def load(self, rawdata: _DataType) -> None: """Load cookies from a string (presumably HTTP_COOKIE) or -from a dictionary. Loading cookies from a dictionary 'd' -is equivalent to calling: - map(Cookie.__setitem__, d.keys(), d.values()) -""" + from a dictionary. Loading cookies from a dictionary 'd' + is equivalent to calling: + map(Cookie.__setitem__, d.keys(), d.values()) + """ + def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: - """Dictionary style assignment. -""" + """Dictionary style assignment.""" class SimpleCookie(BaseCookie[str]): """ -SimpleCookie supports strings as cookie values. When setting -the value using the dictionary assignment notation, SimpleCookie -calls the builtin str() to convert the value to a string. Values -received from HTTP are kept as strings. -""" + SimpleCookie supports strings as cookie values. When setting + the value using the dictionary assignment notation, SimpleCookie + calls the builtin str() to convert the value to a string. Values + received from HTTP are kept as strings. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi index cd23a69ebc198..f558baca6b5e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/http/server.pyi @@ -30,6 +30,7 @@ XXX To do: - log user-agent header and other interesting goodies - send error log to separate file """ + import _socket import email.message import io @@ -91,113 +92,113 @@ if sys.version_info >= (3, 14): alpn_protocols: Iterable[str] | None = None, ) -> None: ... def server_activate(self) -> None: - """Wrap the socket in SSLSocket. -""" + """Wrap the socket in SSLSocket.""" class ThreadingHTTPSServer(socketserver.ThreadingMixIn, HTTPSServer): ... class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): """HTTP request handler base class. -The following explanation of HTTP serves to guide you through the -code as well as to expose any misunderstandings I may have about -HTTP (so you don't need to read the code to figure out I'm wrong -:-). + The following explanation of HTTP serves to guide you through the + code as well as to expose any misunderstandings I may have about + HTTP (so you don't need to read the code to figure out I'm wrong + :-). -HTTP (HyperText Transfer Protocol) is an extensible protocol on -top of a reliable stream transport (e.g. TCP/IP). The protocol -recognizes three parts to a request: + HTTP (HyperText Transfer Protocol) is an extensible protocol on + top of a reliable stream transport (e.g. TCP/IP). The protocol + recognizes three parts to a request: -1. One line identifying the request type and path -2. An optional set of RFC-822-style headers -3. An optional data part + 1. One line identifying the request type and path + 2. An optional set of RFC-822-style headers + 3. An optional data part -The headers and data are separated by a blank line. + The headers and data are separated by a blank line. -The first line of the request has the form + The first line of the request has the form - + -where is a (case-sensitive) keyword such as GET or POST, - is a string containing path information for the request, -and should be the string "HTTP/1.0" or "HTTP/1.1". - is encoded using the URL encoding scheme (using %xx to signify -the ASCII character with hex code xx). + where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, + and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify + the ASCII character with hex code xx). -The specification specifies that lines are separated by CRLF but -for compatibility with the widest range of clients recommends -servers also handle LF. Similarly, whitespace in the request line -is treated sensibly (allowing multiple spaces between components -and allowing trailing whitespace). + The specification specifies that lines are separated by CRLF but + for compatibility with the widest range of clients recommends + servers also handle LF. Similarly, whitespace in the request line + is treated sensibly (allowing multiple spaces between components + and allowing trailing whitespace). -Similarly, for output, lines ought to be separated by CRLF pairs -but most clients grok LF characters just fine. + Similarly, for output, lines ought to be separated by CRLF pairs + but most clients grok LF characters just fine. -If the first line of the request has the form + If the first line of the request has the form - + -(i.e. is left out) then this is assumed to be an HTTP -0.9 request; this form has no optional headers and data part and -the reply consists of just the data. + (i.e. is left out) then this is assumed to be an HTTP + 0.9 request; this form has no optional headers and data part and + the reply consists of just the data. -The reply form of the HTTP 1.x protocol again has three parts: + The reply form of the HTTP 1.x protocol again has three parts: -1. One line giving the response code -2. An optional set of RFC-822-style headers -3. The data + 1. One line giving the response code + 2. An optional set of RFC-822-style headers + 3. The data -Again, the headers and data are separated by a blank line. + Again, the headers and data are separated by a blank line. -The response code line has the form + The response code line has the form - + -where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), - is a 3-digit response code indicating success or -failure of the request, and is an optional -human-readable string explaining what the response code means. + where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or + failure of the request, and is an optional + human-readable string explaining what the response code means. -This server parses the request and the headers, and then calls a -function specific to the request type (). Specifically, -a request SPAM will be handled by a method do_SPAM(). If no -such method exists the server sends an error response to the -client. If it exists, it is called with no arguments: + This server parses the request and the headers, and then calls a + function specific to the request type (). Specifically, + a request SPAM will be handled by a method do_SPAM(). If no + such method exists the server sends an error response to the + client. If it exists, it is called with no arguments: -do_SPAM() + do_SPAM() -Note that the request name is case sensitive (i.e. SPAM and spam -are different requests). + Note that the request name is case sensitive (i.e. SPAM and spam + are different requests). -The various request details are stored in instance variables: + The various request details are stored in instance variables: -- client_address is the client IP address in the form (host, -port); + - client_address is the client IP address in the form (host, + port); -- command, path and version are the broken-down request line; + - command, path and version are the broken-down request line; -- headers is an instance of email.message.Message (or a derived -class) containing the header information; + - headers is an instance of email.message.Message (or a derived + class) containing the header information; -- rfile is a file object open for reading positioned at the -start of the optional input data part; + - rfile is a file object open for reading positioned at the + start of the optional input data part; -- wfile is a file object open for writing. + - wfile is a file object open for writing. -IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! -The first thing to be written must be the response line. Then -follow 0 or more header lines, then a blank line, and then the -actual data (if any). The meaning of the header lines depends on -the command executed by the server; in most cases, when data is -returned, there should be at least one header line of the form + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form -Content-type: / + Content-type: / -where and should be registered MIME types, -e.g. "text/html" or "text/plain". + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ -""" client_address: tuple[str, int] close_connection: bool requestline: str @@ -218,131 +219,139 @@ e.g. "text/html" or "text/plain". def handle_one_request(self) -> None: """Handle a single HTTP request. -You normally don't need to override this method; see the class -__doc__ string for information on how to handle specific HTTP -commands such as GET and POST. + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ -""" def handle_expect_100(self) -> bool: """Decide what to do with an "Expect: 100-continue" header. -If the client is expecting a 100 Continue response, we must -respond with either a 100 Continue or a final response before -waiting for the request body. The default is to always respond -with a 100 Continue. You can behave differently (for example, -reject unauthorized requests) by overriding this method. + If the client is expecting a 100 Continue response, we must + respond with either a 100 Continue or a final response before + waiting for the request body. The default is to always respond + with a 100 Continue. You can behave differently (for example, + reject unauthorized requests) by overriding this method. -This method should either return True (possibly after sending -a 100 Continue response) or send an error response and return -False. + This method should either return True (possibly after sending + a 100 Continue response) or send an error response and return + False. + + """ -""" def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: """Send and log an error reply. -Arguments are -* code: an HTTP error code - 3 digits -* message: a simple optional 1 line reason phrase. - *( HTAB / SP / VCHAR / %x80-FF ) - defaults to short entry matching the response code -* explain: a detailed message defaults to the long entry - matching the response code. + Arguments are + * code: an HTTP error code + 3 digits + * message: a simple optional 1 line reason phrase. + *( HTAB / SP / VCHAR / %x80-FF ) + defaults to short entry matching the response code + * explain: a detailed message defaults to the long entry + matching the response code. -This sends an error response (so it must be called before any -output has been generated), logs the error, and finally sends -a piece of HTML explaining the error to the user. + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ -""" def send_response(self, code: int, message: str | None = None) -> None: """Add the response header to the headers buffer and log the -response code. + response code. -Also send two standard headers with the server software -version and the current date. + Also send two standard headers with the server software + version and the current date. + + """ -""" def send_header(self, keyword: str, value: str) -> None: - """Send a MIME header to the headers buffer. -""" + """Send a MIME header to the headers buffer.""" + def send_response_only(self, code: int, message: str | None = None) -> None: - """Send the response header only. -""" + """Send the response header only.""" + def end_headers(self) -> None: - """Send the blank line ending the MIME headers. -""" + """Send the blank line ending the MIME headers.""" + def flush_headers(self) -> None: ... def log_request(self, code: int | str = "-", size: int | str = "-") -> None: """Log an accepted request. -This is called by send_response(). + This is called by send_response(). + + """ -""" def log_error(self, format: str, *args: Any) -> None: """Log an error. -This is called when a request cannot be fulfilled. By -default it passes the message on to log_message(). + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). -Arguments are the same as for log_message(). + Arguments are the same as for log_message(). -XXX This should go to the separate error log. + XXX This should go to the separate error log. + + """ -""" def log_message(self, format: str, *args: Any) -> None: """Log an arbitrary message. -This is used by all other logging functions. Override -it if you have specific logging wishes. + This is used by all other logging functions. Override + it if you have specific logging wishes. -The first argument, FORMAT, is a format string for the -message to be logged. If the format string contains -any % escapes requiring parameters, they should be -specified as subsequent arguments (it's just like -printf!). + The first argument, FORMAT, is a format string for the + message to be logged. If the format string contains + any % escapes requiring parameters, they should be + specified as subsequent arguments (it's just like + printf!). -The client ip and current date/time are prefixed to -every message. + The client ip and current date/time are prefixed to + every message. -Unicode control characters are replaced with escaped hex -before writing the output to stderr. + Unicode control characters are replaced with escaped hex + before writing the output to stderr. + + """ -""" def version_string(self) -> str: - """Return the server software version string. -""" + """Return the server software version string.""" + def date_time_string(self, timestamp: float | None = None) -> str: - """Return the current date and time formatted for a message header. -""" + """Return the current date and time formatted for a message header.""" + def log_date_time_string(self) -> str: - """Return the current time formatted for logging. -""" + """Return the current time formatted for logging.""" + def address_string(self) -> str: - """Return the client address. -""" + """Return the client address.""" + def parse_request(self) -> bool: # undocumented """Parse a request (internal). -The request should be stored in self.raw_requestline; the results -are in self.command, self.path, self.request_version and -self.headers. + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. -Return True for success, False for failure; on failure, any relevant -error response has already been sent back. + Return True for success, False for failure; on failure, any relevant + error response has already been sent back. -""" + """ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): """Simple HTTP request handler with GET and HEAD commands. -This serves files from the current directory and any of its -subdirectories. The MIME type for files is determined by -calling the .guess_type() method. + This serves files from the current directory and any of its + subdirectories. The MIME type for files is determined by + calling the .guess_type() method. -The GET and HEAD requests are identical except that the HEAD -request omits the actual contents of the file. + The GET and HEAD requests are identical except that the HEAD + request omits the actual contents of the file. + + """ -""" extensions_map: dict[str, str] if sys.version_info >= (3, 12): index_pages: ClassVar[tuple[str, ...]] @@ -356,152 +365,159 @@ request omits the actual contents of the file. directory: StrPath | None = None, ) -> None: ... def do_GET(self) -> None: - """Serve a GET request. -""" + """Serve a GET request.""" + def do_HEAD(self) -> None: - """Serve a HEAD request. -""" + """Serve a HEAD request.""" + def send_head(self) -> io.BytesIO | BinaryIO | None: # undocumented """Common code for GET and HEAD commands. -This sends the response code and MIME headers. + This sends the response code and MIME headers. -Return value is either a file object (which has to be copied -to the outputfile by the caller unless the command was HEAD, -and must be closed by the caller under all circumstances), or -None, in which case the caller has nothing further to do. + Return value is either a file object (which has to be copied + to the outputfile by the caller unless the command was HEAD, + and must be closed by the caller under all circumstances), or + None, in which case the caller has nothing further to do. + + """ -""" def list_directory(self, path: StrPath) -> io.BytesIO | None: # undocumented """Helper to produce a directory listing (absent index.html). -Return value is either a file object, or None (indicating an -error). In either case, the headers are sent, making the -interface the same as for send_head(). + Return value is either a file object, or None (indicating an + error). In either case, the headers are sent, making the + interface the same as for send_head(). + + """ -""" def translate_path(self, path: str) -> str: # undocumented """Translate a /-separated PATH to the local filename syntax. -Components that mean special things to the local file system -(e.g. drive or directory names) are ignored. (XXX They should -probably be diagnosed.) + Components that mean special things to the local file system + (e.g. drive or directory names) are ignored. (XXX They should + probably be diagnosed.) + + """ -""" def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: # undocumented """Copy all data between two file objects. -The SOURCE argument is a file object open for reading -(or anything with a read() method) and the DESTINATION -argument is a file object open for writing (or -anything with a write() method). + The SOURCE argument is a file object open for reading + (or anything with a read() method) and the DESTINATION + argument is a file object open for writing (or + anything with a write() method). -The only reason for overriding this would be to change -the block size or perhaps to replace newlines by CRLF --- note however that this the default server uses this -to copy binary data as well. + The only reason for overriding this would be to change + the block size or perhaps to replace newlines by CRLF + -- note however that this the default server uses this + to copy binary data as well. + + """ -""" def guess_type(self, path: StrPath) -> str: # undocumented """Guess the type of a file. -Argument is a PATH (a filename). + Argument is a PATH (a filename). -Return value is a string of the form type/subtype, -usable for a MIME Content-type header. + Return value is a string of the form type/subtype, + usable for a MIME Content-type header. -The default implementation looks the file's extension -up in the table self.extensions_map, using application/octet-stream -as a default; however it would be permissible (if -slow) to look inside the data to make a better guess. + The default implementation looks the file's extension + up in the table self.extensions_map, using application/octet-stream + as a default; however it would be permissible (if + slow) to look inside the data to make a better guess. -""" + """ def executable(path: StrPath) -> bool: # undocumented - """Test for executable file. -""" + """Test for executable file.""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): """Complete HTTP server with GET, HEAD and POST commands. -GET and HEAD also support running CGI scripts. + GET and HEAD also support running CGI scripts. -The POST command is *only* implemented for CGI scripts. + The POST command is *only* implemented for CGI scripts. + + """ -""" cgi_directories: list[str] have_fork: bool # undocumented def do_POST(self) -> None: """Serve a POST request. -This is only implemented for CGI scripts. + This is only implemented for CGI scripts. + + """ -""" def is_cgi(self) -> bool: # undocumented """Test whether self.path corresponds to a CGI script. -Returns True and updates the cgi_info attribute to the tuple -(dir, rest) if self.path requires running a CGI script. -Returns False otherwise. + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. -If any exception is raised, the caller should assume that -self.path was rejected as invalid and act accordingly. + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. -The default implementation tests whether the normalized url -path begins with one of the strings in self.cgi_directories -(and the next character is a '/' or the end of the string). + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ -""" def is_executable(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is an executable file. -""" + """Test whether argument path is an executable file.""" + def is_python(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is a Python script. -""" + """Test whether argument path is a Python script.""" + def run_cgi(self) -> None: # undocumented - """Execute a CGI script. -""" + """Execute a CGI script.""" else: class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): """Complete HTTP server with GET, HEAD and POST commands. - GET and HEAD also support running CGI scripts. + GET and HEAD also support running CGI scripts. - The POST command is *only* implemented for CGI scripts. + The POST command is *only* implemented for CGI scripts. + + """ - """ cgi_directories: list[str] have_fork: bool # undocumented def do_POST(self) -> None: """Serve a POST request. - This is only implemented for CGI scripts. + This is only implemented for CGI scripts. + + """ - """ def is_cgi(self) -> bool: # undocumented """Test whether self.path corresponds to a CGI script. - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. + Returns True and updates the cgi_info attribute to the tuple + (dir, rest) if self.path requires running a CGI script. + Returns False otherwise. - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. + If any exception is raised, the caller should assume that + self.path was rejected as invalid and act accordingly. - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). + The default implementation tests whether the normalized url + path begins with one of the strings in self.cgi_directories + (and the next character is a '/' or the end of the string). + + """ - """ def is_executable(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is an executable file. -""" + """Test whether argument path is an executable file.""" + def is_python(self, path: StrPath) -> bool: # undocumented - """Test whether argument path is a Python script. -""" + """Test whether argument path is a Python script.""" + def run_cgi(self) -> None: # undocumented - """Execute a CGI script. -""" + """Execute a CGI script.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi index 34362ab1928ee..dca7377e76f22 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imaplib.pyi @@ -9,6 +9,7 @@ Public functions: Internaldate2tuple ParseFlags Time2Internaldate """ + import subprocess import sys import time @@ -36,50 +37,51 @@ Commands: dict[str, tuple[str, ...]] class IMAP4: """IMAP4 client class. -Instantiate with: IMAP4([host[, port[, timeout=None]]]) - - host - host's name (default: localhost); - port - port number (default: standard IMAP4 port). - timeout - socket timeout (default: None) - If timeout is not given or is None, - the global default socket timeout is used - -All IMAP4rev1 commands are supported by methods of the same -name (in lowercase). - -All arguments to commands are converted to strings, except for -AUTHENTICATE, and the last argument to APPEND which is passed as -an IMAP4 literal. If necessary (the string contains any -non-printing characters or white-space and isn't enclosed with -either parentheses or double quotes) each string is quoted. -However, the 'password' argument to the LOGIN command is always -quoted. If you want to avoid having an argument string quoted -(eg: the 'flags' argument to STORE) then enclose the string in -parentheses (eg: "(\\Deleted)"). - -Each command returns a tuple: (type, [data, ...]) where 'type' -is usually 'OK' or 'NO', and 'data' is either the text from the -tagged response, or untagged results from command. Each 'data' -is either a string, or a tuple. If a tuple, then the first part -is the header of the response, and the second part contains -the data (ie: 'literal' value). - -Errors raise the exception class .error(""). -IMAP4 server errors raise .abort(""), -which is a sub-class of 'error'. Mailbox status changes -from READ-WRITE to READ-ONLY raise the exception class -.readonly(""), which is a sub-class of 'abort'. - -"error" exceptions imply a program error. -"abort" exceptions imply the connection should be reset, and - the command re-tried. -"readonly" exceptions imply the command should be re-tried. - -Note: to use this module, you must read the RFCs pertaining to the -IMAP4 protocol, as the semantics of the arguments to each IMAP4 -command are left to the invoker, not to mention the results. Also, -most IMAP servers implement a sub-set of the commands available here. -""" + Instantiate with: IMAP4([host[, port[, timeout=None]]]) + + host - host's name (default: localhost); + port - port number (default: standard IMAP4 port). + timeout - socket timeout (default: None) + If timeout is not given or is None, + the global default socket timeout is used + + All IMAP4rev1 commands are supported by methods of the same + name (in lowercase). + + All arguments to commands are converted to strings, except for + AUTHENTICATE, and the last argument to APPEND which is passed as + an IMAP4 literal. If necessary (the string contains any + non-printing characters or white-space and isn't enclosed with + either parentheses or double quotes) each string is quoted. + However, the 'password' argument to the LOGIN command is always + quoted. If you want to avoid having an argument string quoted + (eg: the 'flags' argument to STORE) then enclose the string in + parentheses (eg: "(\\Deleted)"). + + Each command returns a tuple: (type, [data, ...]) where 'type' + is usually 'OK' or 'NO', and 'data' is either the text from the + tagged response, or untagged results from command. Each 'data' + is either a string, or a tuple. If a tuple, then the first part + is the header of the response, and the second part contains + the data (ie: 'literal' value). + + Errors raise the exception class .error(""). + IMAP4 server errors raise .abort(""), + which is a sub-class of 'error'. Mailbox status changes + from READ-WRITE to READ-ONLY raise the exception class + .readonly(""), which is a sub-class of 'abort'. + + "error" exceptions imply a program error. + "abort" exceptions imply the connection should be reset, and + the command re-tried. + "readonly" exceptions imply the command should be re-tried. + + Note: to use this module, you must read the RFCs pertaining to the + IMAP4 protocol, as the semantics of the arguments to each IMAP4 + command are left to the invoker, not to mention the results. Also, + most IMAP servers implement a sub-set of the commands available here. + """ + class error(Exception): ... class abort(error): ... class readonly(abort): ... @@ -100,10 +102,10 @@ most IMAP servers implement a sub-set of the commands available here. def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: """Setup connection to remote server on "host:port" - (default: localhost:standard IMAP4 port). -This connection will be used by the routines: - read, readline, send, shutdown. -""" + (default: localhost:standard IMAP4 port). + This connection will be used by the routines: + read, readline, send, shutdown. + """ if sys.version_info >= (3, 14): @property @deprecated("IMAP4.file is unsupported, can cause errors, and may be removed.") @@ -116,327 +118,371 @@ This connection will be used by the routines: port: int sock: _socket def read(self, size: int) -> bytes: - """Read 'size' bytes from remote. -""" + """Read 'size' bytes from remote.""" + def readline(self) -> bytes: - """Read line from remote. -""" + """Read line from remote.""" + def send(self, data: ReadableBuffer) -> None: - """Send data to remote. -""" + """Send data to remote.""" + def shutdown(self) -> None: - """Close I/O established in "open". -""" + """Close I/O established in "open".""" + def socket(self) -> _socket: """Return socket instance used to connect to IMAP4 server. -socket = .socket() -""" + socket = .socket() + """ + def recent(self) -> _CommandResults: """Return most recent 'RECENT' responses if any exist, -else prompt server for an update using the 'NOOP' command. + else prompt server for an update using the 'NOOP' command. -(typ, [data]) = .recent() + (typ, [data]) = .recent() + + 'data' is None if no new messages, + else list of RECENT responses, most recent last. + """ -'data' is None if no new messages, -else list of RECENT responses, most recent last. -""" def response(self, code: str) -> _CommandResults: """Return data for response 'code' if received, or None. -Old value for response 'code' is cleared. + Old value for response 'code' is cleared. + + (code, [data]) = .response(code) + """ -(code, [data]) = .response(code) -""" def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: """Append message to named mailbox. -(typ, [data]) = .append(mailbox, flags, date_time, message) + (typ, [data]) = .append(mailbox, flags, date_time, message) + + All args except 'message' can be None. + """ - All args except 'message' can be None. -""" def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: """Authenticate command - requires response processing. -'mechanism' specifies which authentication mechanism is to -be used - it must appear in .capabilities in the -form AUTH=. + 'mechanism' specifies which authentication mechanism is to + be used - it must appear in .capabilities in the + form AUTH=. -'authobject' must be a callable object: + 'authobject' must be a callable object: - data = authobject(response) + data = authobject(response) + + It will be called to process server continuation responses; the + response argument it is passed will be a bytes. It should return bytes + data that will be base64 encoded and sent to the server. It should + return None if the client abort response '*' should be sent instead. + """ -It will be called to process server continuation responses; the -response argument it is passed will be a bytes. It should return bytes -data that will be base64 encoded and sent to the server. It should -return None if the client abort response '*' should be sent instead. -""" def capability(self) -> _CommandResults: """(typ, [data]) = .capability() -Fetch capabilities list from server. -""" + Fetch capabilities list from server. + """ + def check(self) -> _CommandResults: """Checkpoint mailbox on server. -(typ, [data]) = .check() -""" + (typ, [data]) = .check() + """ + def close(self) -> _CommandResults: """Close currently selected mailbox. -Deleted messages are removed from writable mailbox. -This is the recommended command before 'LOGOUT'. + Deleted messages are removed from writable mailbox. + This is the recommended command before 'LOGOUT'. + + (typ, [data]) = .close() + """ -(typ, [data]) = .close() -""" def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: """Copy 'message_set' messages onto end of 'new_mailbox'. -(typ, [data]) = .copy(message_set, new_mailbox) -""" + (typ, [data]) = .copy(message_set, new_mailbox) + """ + def create(self, mailbox: str) -> _CommandResults: """Create new mailbox. -(typ, [data]) = .create(mailbox) -""" + (typ, [data]) = .create(mailbox) + """ + def delete(self, mailbox: str) -> _CommandResults: """Delete old mailbox. -(typ, [data]) = .delete(mailbox) -""" + (typ, [data]) = .delete(mailbox) + """ + def deleteacl(self, mailbox: str, who: str) -> _CommandResults: """Delete the ACLs (remove any rights) set for who on mailbox. -(typ, [data]) = .deleteacl(mailbox, who) -""" + (typ, [data]) = .deleteacl(mailbox, who) + """ + def enable(self, capability: str) -> _CommandResults: """Send an RFC5161 enable string to the server. -(typ, [data]) = .enable(capability) -""" + (typ, [data]) = .enable(capability) + """ + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def expunge(self) -> _CommandResults: """Permanently remove deleted items from selected mailbox. -Generates 'EXPUNGE' response for each deleted message. + Generates 'EXPUNGE' response for each deleted message. -(typ, [data]) = .expunge() + (typ, [data]) = .expunge() + + 'data' is list of 'EXPUNGE'd message numbers in order received. + """ -'data' is list of 'EXPUNGE'd message numbers in order received. -""" def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: """Fetch (parts of) messages. -(typ, [data, ...]) = .fetch(message_set, message_parts) + (typ, [data, ...]) = .fetch(message_set, message_parts) -'message_parts' should be a string of selected parts -enclosed in parentheses, eg: "(UID BODY[TEXT])". + 'message_parts' should be a string of selected parts + enclosed in parentheses, eg: "(UID BODY[TEXT])". + + 'data' are tuples of message part envelope and data. + """ -'data' are tuples of message part envelope and data. -""" def getacl(self, mailbox: str) -> _CommandResults: """Get the ACLs for a mailbox. -(typ, [data]) = .getacl(mailbox) -""" + (typ, [data]) = .getacl(mailbox) + """ + def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: """(typ, [data]) = .getannotation(mailbox, entry, attribute) -Retrieve ANNOTATIONs. -""" + Retrieve ANNOTATIONs. + """ + def getquota(self, root: str) -> _CommandResults: """Get the quota root's resource usage and limits. -Part of the IMAP4 QUOTA extension defined in rfc2087. + Part of the IMAP4 QUOTA extension defined in rfc2087. + + (typ, [data]) = .getquota(root) + """ -(typ, [data]) = .getquota(root) -""" def getquotaroot(self, mailbox: str) -> _CommandResults: """Get the list of quota roots for the named mailbox. -(typ, [[QUOTAROOT responses...], [QUOTA responses]]) = .getquotaroot(mailbox) -""" + (typ, [[QUOTAROOT responses...], [QUOTA responses]]) = .getquotaroot(mailbox) + """ if sys.version_info >= (3, 14): def idle(self, duration: float | None = None) -> Idler: """Return an iterable IDLE context manager producing untagged responses. -If the argument is not None, limit iteration to 'duration' seconds. + If the argument is not None, limit iteration to 'duration' seconds. -with M.idle(duration=29 * 60) as idler: - for typ, data in idler: - print(typ, data) + with M.idle(duration=29 * 60) as idler: + for typ, data in idler: + print(typ, data) -Note: 'duration' requires a socket connection (not IMAP4_stream). -""" + Note: 'duration' requires a socket connection (not IMAP4_stream). + """ def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: """List mailbox names in directory matching pattern. -(typ, [data]) = .list(directory='""', pattern='*') + (typ, [data]) = .list(directory='""', pattern='*') + + 'data' is list of LIST responses. + """ -'data' is list of LIST responses. -""" def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: """Identify client using plaintext password. -(typ, [data]) = .login(user, password) + (typ, [data]) = .login(user, password) + + NB: 'password' will be quoted. + """ -NB: 'password' will be quoted. -""" def login_cram_md5(self, user: str, password: str) -> _CommandResults: """Force use of CRAM-MD5 authentication. -(typ, [data]) = .login_cram_md5(user, password) -""" + (typ, [data]) = .login_cram_md5(user, password) + """ + def logout(self) -> tuple[str, _AnyResponseData]: """Shutdown connection to server. -(typ, [data]) = .logout() + (typ, [data]) = .logout() + + Returns server 'BYE' response. + """ -Returns server 'BYE' response. -""" def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: """List 'subscribed' mailbox names in directory matching pattern. -(typ, [data, ...]) = .lsub(directory='""', pattern='*') + (typ, [data, ...]) = .lsub(directory='""', pattern='*') + + 'data' are tuples of message part envelope and data. + """ -'data' are tuples of message part envelope and data. -""" def myrights(self, mailbox: str) -> _CommandResults: """Show my ACLs for a mailbox (i.e. the rights that I have on mailbox). -(typ, [data]) = .myrights(mailbox) -""" + (typ, [data]) = .myrights(mailbox) + """ + def namespace(self) -> _CommandResults: """Returns IMAP namespaces ala rfc2342 -(typ, [data, ...]) = .namespace() -""" + (typ, [data, ...]) = .namespace() + """ + def noop(self) -> tuple[str, _list[bytes]]: """Send NOOP command. -(typ, [data]) = .noop() -""" + (typ, [data]) = .noop() + """ + def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: """Fetch truncated part of a message. -(typ, [data, ...]) = .partial(message_num, message_part, start, length) + (typ, [data, ...]) = .partial(message_num, message_part, start, length) + + 'data' is tuple of message part envelope and data. + """ -'data' is tuple of message part envelope and data. -""" def proxyauth(self, user: str) -> _CommandResults: """Assume authentication as "user". -Allows an authorised administrator to proxy into any user's -mailbox. + Allows an authorised administrator to proxy into any user's + mailbox. + + (typ, [data]) = .proxyauth(user) + """ -(typ, [data]) = .proxyauth(user) -""" def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: """Rename old mailbox name to new. -(typ, [data]) = .rename(oldmailbox, newmailbox) -""" + (typ, [data]) = .rename(oldmailbox, newmailbox) + """ + def search(self, charset: str | None, *criteria: str) -> _CommandResults: """Search mailbox for matching messages. -(typ, [data]) = .search(charset, criterion, ...) + (typ, [data]) = .search(charset, criterion, ...) + + 'data' is space separated list of matching message numbers. + If UTF8 is enabled, charset MUST be None. + """ -'data' is space separated list of matching message numbers. -If UTF8 is enabled, charset MUST be None. -""" def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: """Select a mailbox. -Flush all untagged responses. + Flush all untagged responses. -(typ, [data]) = .select(mailbox='INBOX', readonly=False) + (typ, [data]) = .select(mailbox='INBOX', readonly=False) -'data' is count of messages in mailbox ('EXISTS' response). + 'data' is count of messages in mailbox ('EXISTS' response). + + Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so + other responses should be obtained via .response('FLAGS') etc. + """ -Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so -other responses should be obtained via .response('FLAGS') etc. -""" def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: """Set a mailbox acl. -(typ, [data]) = .setacl(mailbox, who, what) -""" + (typ, [data]) = .setacl(mailbox, who, what) + """ + def setannotation(self, *args: str) -> _CommandResults: """(typ, [data]) = .setannotation(mailbox[, entry, attribute]+) -Set ANNOTATIONs. -""" + Set ANNOTATIONs. + """ + def setquota(self, root: str, limits: str) -> _CommandResults: """Set the quota root's resource limits. -(typ, [data]) = .setquota(root, limits) -""" + (typ, [data]) = .setquota(root, limits) + """ + def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: """IMAP4rev1 extension SORT command. -(typ, [data]) = .sort(sort_criteria, charset, search_criteria, ...) -""" + (typ, [data]) = .sort(sort_criteria, charset, search_criteria, ...) + """ + def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... def status(self, mailbox: str, names: str) -> _CommandResults: """Request named status conditions for mailbox. -(typ, [data]) = .status(mailbox, names) -""" + (typ, [data]) = .status(mailbox, names) + """ + def store(self, message_set: str, command: str, flags: str) -> _CommandResults: """Alters flag dispositions for messages in mailbox. -(typ, [data]) = .store(message_set, command, flags) -""" + (typ, [data]) = .store(message_set, command, flags) + """ + def subscribe(self, mailbox: str) -> _CommandResults: """Subscribe to new mailbox. -(typ, [data]) = .subscribe(mailbox) -""" + (typ, [data]) = .subscribe(mailbox) + """ + def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: """IMAPrev1 extension THREAD command. -(type, [data]) = .thread(threading_algorithm, charset, search_criteria, ...) -""" + (type, [data]) = .thread(threading_algorithm, charset, search_criteria, ...) + """ + def uid(self, command: str, *args: str) -> _CommandResults: """Execute "command arg ..." with messages identified by UID, - rather than message number. + rather than message number. -(typ, [data]) = .uid(command, arg1, arg2, ...) + (typ, [data]) = .uid(command, arg1, arg2, ...) + + Returns response appropriate to 'command'. + """ -Returns response appropriate to 'command'. -""" def unsubscribe(self, mailbox: str) -> _CommandResults: """Unsubscribe from old mailbox. -(typ, [data]) = .unsubscribe(mailbox) -""" + (typ, [data]) = .unsubscribe(mailbox) + """ + def unselect(self) -> _CommandResults: """Free server's resources associated with the selected mailbox -and returns the server to the authenticated state. -This command performs the same actions as CLOSE, except -that no messages are permanently removed from the currently -selected mailbox. + and returns the server to the authenticated state. + This command performs the same actions as CLOSE, except + that no messages are permanently removed from the currently + selected mailbox. + + (typ, [data]) = .unselect() + """ -(typ, [data]) = .unselect() -""" def xatom(self, name: str, *args: str) -> _CommandResults: """Allow simple extension commands - notified by server in CAPABILITY response. + notified by server in CAPABILITY response. -Assumes command is legal in current state. + Assumes command is legal in current state. -(typ, [data]) = .xatom(name, arg, ...) + (typ, [data]) = .xatom(name, arg, ...) + + Returns response appropriate to extension command 'name'. + """ -Returns response appropriate to extension command 'name'. -""" def print_log(self) -> None: ... if sys.version_info >= (3, 14): class Idler: """Iterable IDLE context manager: start IDLE & produce untagged responses. -An object of this type is returned by the IMAP4.idle() method. + An object of this type is returned by the IMAP4.idle() method. + + Note: The name and structure of this class are subject to change. + """ -Note: The name and structure of this class are subject to change. -""" def __init__(self, imap: IMAP4, duration: float | None = None) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, exc_type: object, exc_val: Unused, exc_tb: Unused) -> Literal[False]: ... @@ -445,29 +491,30 @@ Note: The name and structure of this class are subject to change. def burst(self, interval: float = 0.1) -> Generator[tuple[str, float | None]]: """Yield a burst of responses no more than 'interval' seconds apart. -with M.idle() as idler: - # get a response and any others following by < 0.1 seconds - batch = list(idler.burst()) - print(f'processing {len(batch)} responses...') - print(batch) + with M.idle() as idler: + # get a response and any others following by < 0.1 seconds + batch = list(idler.burst()) + print(f'processing {len(batch)} responses...') + print(batch) -Note: This generator requires a socket connection (not IMAP4_stream). -""" + Note: This generator requires a socket connection (not IMAP4_stream). + """ class IMAP4_SSL(IMAP4): """IMAP4 client class over SSL connection -Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) + Instantiate with: IMAP4_SSL([host[, port[, ssl_context[, timeout=None]]]]) - host - host's name (default: localhost); - port - port number (default: standard IMAP4 SSL port); - ssl_context - a SSLContext object that contains your certificate chain - and private key (default: None) - timeout - socket timeout (default: None) If timeout is not given or is None, - the global default socket timeout is used + host - host's name (default: localhost); + port - port number (default: standard IMAP4 SSL port); + ssl_context - a SSLContext object that contains your certificate chain + and private key (default: None) + timeout - socket timeout (default: None) If timeout is not given or is None, + the global default socket timeout is used + + for more documentation see the docstring of the parent class IMAP4. + """ -for more documentation see the docstring of the parent class IMAP4. -""" if sys.version_info < (3, 12): keyfile: str certfile: str @@ -495,21 +542,23 @@ for more documentation see the docstring of the parent class IMAP4. def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: """Setup connection to remote server on "host:port". - (default: localhost:standard IMAP4 SSL port). -This connection will be used by the routines: - read, readline, send, shutdown. -""" + (default: localhost:standard IMAP4 SSL port). + This connection will be used by the routines: + read, readline, send, shutdown. + """ + def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): """IMAP4 client class over a stream -Instantiate with: IMAP4_stream(command) + Instantiate with: IMAP4_stream(command) - "command" - a string that can be passed to subprocess.Popen() + "command" - a string that can be passed to subprocess.Popen() + + for more documentation see the docstring of the parent class IMAP4. + """ -for more documentation see the docstring of the parent class IMAP4. -""" command: str def __init__(self, command: str) -> None: ... if sys.version_info >= (3, 14): @@ -523,14 +572,15 @@ for more documentation see the docstring of the parent class IMAP4. readfile: IO[Any] def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: """Setup a stream connection. -This connection will be used by the routines: - read, readline, send, shutdown. -""" + This connection will be used by the routines: + read, readline, send, shutdown. + """ class _Authenticator: """Private class to provide en/decoding -for base64-based authentication conversation. -""" + for base64-based authentication conversation. + """ + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... @@ -540,23 +590,24 @@ for base64-based authentication conversation. def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: """Parse an IMAP4 INTERNALDATE string. -Return corresponding local time. The return value is a -time.struct_time tuple or None if the string has wrong format. -""" + Return corresponding local time. The return value is a + time.struct_time tuple or None if the string has wrong format. + """ + def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: - """Convert integer to A-P string representation. -""" + """Convert integer to A-P string representation.""" + def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: - """Convert IMAP4 flags response to python tuple. -""" + """Convert IMAP4 flags response to python tuple.""" + def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: """Convert date_time to IMAP4 INTERNALDATE representation. -Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The -date_time argument can be a number (int or float) representing -seconds since epoch (as returned by time.time()), a 9-tuple -representing local time, an instance of time.struct_time (as -returned by time.localtime()), an aware datetime instance or a -double-quoted string. In the last case, it is assumed to already -be in the correct format. -""" + Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The + date_time argument can be a number (int or float) representing + seconds since epoch (as returned by time.time()), a 9-tuple + representing local time, an instance of time.struct_time (as + returned by time.localtime()), an aware datetime instance or a + double-quoted string. In the last case, it is assumed to already + be in the correct format. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi index bcd398f0b4b88..3b6536960e8d7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imghdr.pyi @@ -1,5 +1,5 @@ -"""Recognize image file formats based on their first few bytes. -""" +"""Recognize image file formats based on their first few bytes.""" + from _typeshed import StrPath from collections.abc import Callable from typing import Any, BinaryIO, Protocol, overload, type_check_only @@ -14,8 +14,8 @@ class _ReadableBinary(Protocol): @overload def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: - """Return the type of image contained in a file or byte stream. -""" + """Return the type of image contained in a file or byte stream.""" + @overload def what(file: Any, h: bytes) -> str | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi index a5e9ec829d1ac..119a9a497dfb3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/imp.pyi @@ -5,6 +5,7 @@ In most cases it is preferred you consider using the importlib module's functionality over this module. """ + import types from _imp import ( acquire_lock as acquire_lock, @@ -41,14 +42,16 @@ def new_module(name: str) -> types.ModuleType: The module is not entered into sys.modules. """ + def get_magic() -> bytes: """**DEPRECATED** Return the magic number for .pyc files. """ + def get_tag() -> str: - """Return the magic tag for .pyc files. -""" + """Return the magic tag for .pyc files.""" + def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: """**DEPRECATED** @@ -63,6 +66,7 @@ def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: If sys.implementation.cache_tag is None then NotImplementedError is raised. """ + def source_from_cache(path: StrPath) -> str: """**DEPRECATED** @@ -74,9 +78,9 @@ def source_from_cache(path: StrPath) -> str: sys.implementation.cache_tag is None then NotImplementedError is raised. """ + def get_suffixes() -> list[tuple[str, str, int]]: - """**DEPRECATED** -""" + """**DEPRECATED**""" class NullImporter: """**DEPRECATED** @@ -84,10 +88,10 @@ class NullImporter: Null import object. """ + def __init__(self, path: StrPath) -> None: ... def find_module(self, fullname: Any) -> None: - """Always returns None. -""" + """Always returns None.""" # Technically, a text file has to support a slightly different set of operations than a binary file, # but we ignore that here. @@ -103,11 +107,11 @@ class _FileLike(Protocol): # PathLike doesn't work for the pathname argument here def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: - """**DEPRECATED** -""" + """**DEPRECATED**""" + def load_package(name: str, path: StrPath) -> types.ModuleType: - """**DEPRECATED** -""" + """**DEPRECATED**""" + def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: """**DEPRECATED** @@ -131,6 +135,7 @@ def find_module( submodule name and the package's __path__. """ + def reload(module: types.ModuleType) -> types.ModuleType: """**DEPRECATED** @@ -139,14 +144,16 @@ def reload(module: types.ModuleType) -> types.ModuleType: The module must have been successfully imported before. """ + def init_builtin(name: str) -> types.ModuleType | None: """**DEPRECATED** Load and return a built-in module by name, or None is such module doesn't exist """ + def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: # file argument is ignored """**DEPRECATED** - Load an extension module. - """ + Load an extension module. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi index 22ee90bae3b65..11d7bf3c480fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/__init__.pyi @@ -1,5 +1,5 @@ -"""A pure Python implementation of import. -""" +"""A pure Python implementation of import.""" + import sys from importlib._bootstrap import __import__ as __import__ from importlib.abc import Loader @@ -12,30 +12,31 @@ __all__ = ["__import__", "import_module", "invalidate_caches", "reload"] def import_module(name: str, package: str | None = None) -> ModuleType: """Import a module. -The 'package' argument is required when performing a relative import. It -specifies the package to use as the anchor point from which to resolve the -relative import to an absolute import. + The 'package' argument is required when performing a relative import. It + specifies the package to use as the anchor point from which to resolve the + relative import to an absolute import. -""" + """ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `importlib.util.find_spec()` instead.") def find_loader(name: str, path: str | None = None) -> Loader | None: """Return the loader for the specified module. - This is a backward-compatible wrapper around find_spec(). + This is a backward-compatible wrapper around find_spec(). - This function is deprecated in favor of importlib.util.find_spec(). + This function is deprecated in favor of importlib.util.find_spec(). - """ + """ def invalidate_caches() -> None: """Call the invalidate_caches() method on all meta path finders stored in -sys.meta_path (where implemented). -""" + sys.meta_path (where implemented). + """ + def reload(module: ModuleType) -> ModuleType: """Reload the module and return it. -The module must have been successfully imported before. + The module must have been successfully imported before. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi index 20a094cbde421..c85e8004cd37c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_abc.pyi @@ -1,5 +1,5 @@ -"""Subset of importlib.abc used to reduce importlib.util imports. -""" +"""Subset of importlib.abc used to reduce importlib.util imports.""" + import sys import types from abc import ABCMeta @@ -8,21 +8,21 @@ from typing_extensions import deprecated if sys.version_info >= (3, 10): class Loader(metaclass=ABCMeta): - """Abstract base class for import loaders. -""" + """Abstract base class for import loaders.""" + def load_module(self, fullname: str) -> types.ModuleType: """Return the loaded module. -The module must be added to sys.modules and have import-related -attributes set properly. The fullname is a str. + The module must be added to sys.modules and have import-related + attributes set properly. The fullname is a str. -ImportError is raised on failure. + ImportError is raised on failure. -This method is deprecated in favor of loader.exec_module(). If -exec_module() exists then it is used to provide a backwards-compatible -functionality for this method. + This method is deprecated in favor of loader.exec_module(). If + exec_module() exists then it is used to provide a backwards-compatible + functionality for this method. -""" + """ if sys.version_info < (3, 12): @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " @@ -31,20 +31,20 @@ functionality for this method. def module_repr(self, module: types.ModuleType) -> str: """Return a module's repr. - Used by the module type when the method does not raise - NotImplementedError. + Used by the module type when the method does not raise + NotImplementedError. - This method is deprecated. + This method is deprecated. - """ + """ def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: """Return a module to initialize and into which to load. -This method should raise ImportError if anything prevents it -from creating a new module. It may return None to indicate -that the spec should create the new module. -""" + This method should raise ImportError if anything prevents it + from creating a new module. It may return None to indicate + that the spec should create the new module. + """ # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi index 908b1e66af4ee..116884f228c01 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap.pyi @@ -6,5 +6,6 @@ such it requires the injection of specific modules and attributes in order to work. One should use importlib as the public-facing version of this module. """ + from _frozen_importlib import * from _frozen_importlib import __import__ as __import__, _init_module_attrs as _init_module_attrs diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi index cd0334025260b..a4d2aeccd2744 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/_bootstrap_external.pyi @@ -6,5 +6,6 @@ such it requires the injection of specific modules and attributes in order to work. One should use importlib as the public-facing version of this module. """ + from _frozen_importlib_external import * from _frozen_importlib_external import _NamespaceLoader as _NamespaceLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi index cc518d8832242..ae2f291d558f3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/abc.pyi @@ -1,5 +1,5 @@ -"""Abstract base classes related to import. -""" +"""Abstract base classes related to import.""" + import _ast import sys import types @@ -31,37 +31,39 @@ if sys.version_info >= (3, 10): from importlib._abc import Loader as Loader else: class Loader(metaclass=ABCMeta): - """Abstract base class for import loaders. -""" + """Abstract base class for import loaders.""" + def load_module(self, fullname: str) -> types.ModuleType: """Return the loaded module. - The module must be added to sys.modules and have import-related - attributes set properly. The fullname is a str. + The module must be added to sys.modules and have import-related + attributes set properly. The fullname is a str. - ImportError is raised on failure. + ImportError is raised on failure. - This method is deprecated in favor of loader.exec_module(). If - exec_module() exists then it is used to provide a backwards-compatible - functionality for this method. + This method is deprecated in favor of loader.exec_module(). If + exec_module() exists then it is used to provide a backwards-compatible + functionality for this method. + + """ - """ def module_repr(self, module: types.ModuleType) -> str: """Return a module's repr. - Used by the module type when the method does not raise - NotImplementedError. + Used by the module type when the method does not raise + NotImplementedError. - This method is deprecated. + This method is deprecated. + + """ - """ def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: """Return a module to initialize and into which to load. - This method should raise ImportError if anything prevents it - from creating a new module. It may return None to indicate - that the spec should create the new module. - """ + This method should raise ImportError if anything prevents it + from creating a new module. It may return None to indicate + that the spec should create the new module. + """ # Not defined on the actual class for backwards-compatibility reasons, # but expected in new code. def exec_module(self, module: types.ModuleType) -> None: ... @@ -71,329 +73,345 @@ if sys.version_info < (3, 12): class Finder(metaclass=ABCMeta): """Legacy abstract base class for import finders. - It may be subclassed for compatibility with legacy third party - reimplementations of the import system. Otherwise, finder - implementations should derive from the more specific MetaPathFinder - or PathEntryFinder ABCs. + It may be subclassed for compatibility with legacy third party + reimplementations of the import system. Otherwise, finder + implementations should derive from the more specific MetaPathFinder + or PathEntryFinder ABCs. - Deprecated since Python 3.3 - """ + Deprecated since Python 3.3 + """ @deprecated("Deprecated since Python 3.7. Use `importlib.resources.abc.TraversableResources` instead.") class ResourceLoader(Loader): """Abstract base class for loaders which can return data from their -back-end storage to facilitate reading data to perform an import. + back-end storage to facilitate reading data to perform an import. + + This ABC represents one of the optional protocols specified by PEP 302. -This ABC represents one of the optional protocols specified by PEP 302. + For directly loading resources, use TraversableResources instead. This class + primarily exists for backwards compatibility with other ABCs in this module. -For directly loading resources, use TraversableResources instead. This class -primarily exists for backwards compatibility with other ABCs in this module. + """ -""" @abstractmethod def get_data(self, path: str) -> bytes: """Abstract method which when implemented should return the bytes for -the specified path. The path must be a str. -""" + the specified path. The path must be a str. + """ class InspectLoader(Loader): """Abstract base class for loaders which support inspection about the -modules they can load. + modules they can load. + + This ABC represents one of the optional protocols specified by PEP 302. -This ABC represents one of the optional protocols specified by PEP 302. + """ -""" def is_package(self, fullname: str) -> bool: """Optional method which when implemented should return whether the -module is a package. The fullname is a str. Returns a bool. + module is a package. The fullname is a str. Returns a bool. + + Raises ImportError if the module cannot be found. + """ -Raises ImportError if the module cannot be found. -""" def get_code(self, fullname: str) -> types.CodeType | None: """Method which returns the code object for the module. -The fullname is a str. Returns a types.CodeType if possible, else -returns None if a code object does not make sense -(e.g. built-in module). Raises ImportError if the module cannot be -found. -""" + The fullname is a str. Returns a types.CodeType if possible, else + returns None if a code object does not make sense + (e.g. built-in module). Raises ImportError if the module cannot be + found. + """ + @abstractmethod def get_source(self, fullname: str) -> str | None: """Abstract method which should return the source code for the -module. The fullname is a str. Returns a str. + module. The fullname is a str. Returns a str. + + Raises ImportError if the module cannot be found. + """ -Raises ImportError if the module cannot be found. -""" def exec_module(self, module: types.ModuleType) -> None: - """Execute the module. -""" + """Execute the module.""" + @staticmethod def source_to_code( data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: bytes | StrPath = "" ) -> types.CodeType: """Compile 'data' into a code object. -The 'data' argument can be anything that compile() can handle. The'path' -argument should be where the data was retrieved (when applicable). -""" + The 'data' argument can be anything that compile() can handle. The'path' + argument should be where the data was retrieved (when applicable). + """ class ExecutionLoader(InspectLoader): """Abstract base class for loaders that wish to support the execution of -modules as scripts. + modules as scripts. + + This ABC represents one of the optional protocols specified in PEP 302. -This ABC represents one of the optional protocols specified in PEP 302. + """ -""" @abstractmethod def get_filename(self, fullname: str) -> str: """Abstract method which should return the value that __file__ is to be -set to. + set to. -Raises ImportError if the module cannot be found. -""" + Raises ImportError if the module cannot be found. + """ class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): # type: ignore[misc] # incompatible definitions of source_to_code in the base classes """Abstract base class for loading source code (and optionally any -corresponding bytecode). + corresponding bytecode). + + To support loading from source code, the abstractmethods inherited from + ResourceLoader and ExecutionLoader need to be implemented. To also support + loading from bytecode, the optional methods specified directly by this ABC + is required. -To support loading from source code, the abstractmethods inherited from -ResourceLoader and ExecutionLoader need to be implemented. To also support -loading from bytecode, the optional methods specified directly by this ABC -is required. + Inherited abstractmethods not implemented in this ABC: -Inherited abstractmethods not implemented in this ABC: + * ResourceLoader.get_data + * ExecutionLoader.get_filename - * ResourceLoader.get_data - * ExecutionLoader.get_filename + """ -""" @deprecated("Deprecated since Python 3.3. Use `importlib.resources.abc.SourceLoader.path_stats` instead.") def path_mtime(self, path: str) -> float: - """Return the (int) modification time for the path (str). -""" + """Return the (int) modification time for the path (str).""" + def set_data(self, path: str, data: bytes) -> None: """Write the bytes to the path (if possible). -Accepts a str path and data as bytes. + Accepts a str path and data as bytes. + + Any needed intermediary directories are to be created. If for some + reason the file cannot be written because of permissions, fail + silently. + """ -Any needed intermediary directories are to be created. If for some -reason the file cannot be written because of permissions, fail -silently. -""" def get_source(self, fullname: str) -> str | None: - """Concrete implementation of InspectLoader.get_source. -""" + """Concrete implementation of InspectLoader.get_source.""" + def path_stats(self, path: str) -> Mapping[str, Any]: """Return a metadata dict for the source pointed to by the path (str). -Possible keys: -- 'mtime' (mandatory) is the numeric timestamp of last source - code modification; -- 'size' (optional) is the size in bytes of the source code. -""" + Possible keys: + - 'mtime' (mandatory) is the numeric timestamp of last source + code modification; + - 'size' (optional) is the size in bytes of the source code. + """ # The base classes differ starting in 3.10: if sys.version_info >= (3, 10): # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(metaclass=ABCMeta): - """Abstract base class for import finders on sys.meta_path. -""" + """Abstract base class for import finders on sys.meta_path.""" + if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `MetaPathFinder.find_spec()` instead.") def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: """Return a loader for the module. - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. + If no module is found, return None. The fullname is a str and + the path is a list of strings or None. - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() exists then backwards-compatible + functionality is provided for this method. - """ + """ def invalidate_caches(self) -> None: """An optional method for clearing the finder's cache, if any. -This method is used by importlib.invalidate_caches(). -""" + This method is used by importlib.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(metaclass=ABCMeta): - """Abstract base class for path entry finders used by PathFinder. -""" + """Abstract base class for path entry finders used by PathFinder.""" + if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `PathEntryFinder.find_spec()` instead.") def find_module(self, fullname: str) -> Loader | None: """Try to find a loader for the specified module by delegating to - self.find_loader(). + self.find_loader(). - This method is deprecated in favor of finder.find_spec(). + This method is deprecated in favor of finder.find_spec(). + + """ - """ @deprecated("Deprecated since Python 3.4; removed in Python 3.12. Use `find_spec()` instead.") def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: """Return (loader, namespace portion) for the path entry. - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. + The fullname is a str. The namespace portion is a sequence of + path entries contributing to part of a namespace package. The + sequence may be empty. If loader is not None, the portion will + be ignored. - The portion will be discarded if another path entry finder - locates the module as a normal module or package. + The portion will be discarded if another path entry finder + locates the module as a normal module or package. - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() is provided than backwards-compatible + functionality is provided. + """ def invalidate_caches(self) -> None: """An optional method for clearing the finder's cache, if any. -This method is used by PathFinder.invalidate_caches(). -""" + This method is used by PathFinder.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... else: # Please keep in sync with _typeshed.importlib.MetaPathFinderProtocol class MetaPathFinder(Finder): - """Abstract base class for import finders on sys.meta_path. -""" + """Abstract base class for import finders on sys.meta_path.""" + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: """Return a loader for the module. - If no module is found, return None. The fullname is a str and - the path is a list of strings or None. + If no module is found, return None. The fullname is a str and + the path is a list of strings or None. - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() exists then backwards-compatible - functionality is provided for this method. + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() exists then backwards-compatible + functionality is provided for this method. + + """ - """ def invalidate_caches(self) -> None: """An optional method for clearing the finder's cache, if any. - This method is used by importlib.invalidate_caches(). - """ + This method is used by importlib.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec( self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ..., / ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): - """Abstract base class for path entry finders used by PathFinder. -""" + """Abstract base class for path entry finders used by PathFinder.""" + def find_module(self, fullname: str) -> Loader | None: """Try to find a loader for the specified module by delegating to - self.find_loader(). + self.find_loader(). - This method is deprecated in favor of finder.find_spec(). + This method is deprecated in favor of finder.find_spec(). + + """ - """ def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: """Return (loader, namespace portion) for the path entry. - The fullname is a str. The namespace portion is a sequence of - path entries contributing to part of a namespace package. The - sequence may be empty. If loader is not None, the portion will - be ignored. + The fullname is a str. The namespace portion is a sequence of + path entries contributing to part of a namespace package. The + sequence may be empty. If loader is not None, the portion will + be ignored. - The portion will be discarded if another path entry finder - locates the module as a normal module or package. + The portion will be discarded if another path entry finder + locates the module as a normal module or package. + + This method is deprecated since Python 3.4 in favor of + finder.find_spec(). If find_spec() is provided than backwards-compatible + functionality is provided. + """ - This method is deprecated since Python 3.4 in favor of - finder.find_spec(). If find_spec() is provided than backwards-compatible - functionality is provided. - """ def invalidate_caches(self) -> None: """An optional method for clearing the finder's cache, if any. - This method is used by PathFinder.invalidate_caches(). - """ + This method is used by PathFinder.invalidate_caches(). + """ # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader, metaclass=ABCMeta): """Abstract base class partially implementing the ResourceLoader and -ExecutionLoader ABCs. -""" + ExecutionLoader ABCs. + """ + name: str path: str def __init__(self, fullname: str, path: str) -> None: """Cache the module name and the path to the file found by the -finder. -""" + finder. + """ + def get_data(self, path: str) -> bytes: - """Return the data from path as raw bytes. -""" + """Return the data from path as raw bytes.""" + def get_filename(self, fullname: str | None = None) -> str: - """Return the path to the source file as found by the finder. -""" + """Return the path to the source file as found by the finder.""" + def load_module(self, fullname: str | None = None) -> types.ModuleType: """Load a module from a file. -This method is deprecated. Use exec_module() instead. + This method is deprecated. Use exec_module() instead. -""" + """ if sys.version_info < (3, 11): class ResourceReader(metaclass=ABCMeta): - """Abstract base class for loaders to provide resource reading support. -""" + """Abstract base class for loaders to provide resource reading support.""" + @abstractmethod def open_resource(self, resource: str) -> IO[bytes]: """Return an opened, file-like object for binary reading. - The 'resource' argument is expected to represent only a file name. - If the resource cannot be found, FileNotFoundError is raised. - """ + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + @abstractmethod def resource_path(self, resource: str) -> str: """Return the file system path to the specified resource. - The 'resource' argument is expected to represent only a file name. - If the resource does not exist on the file system, raise - FileNotFoundError. - """ + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ if sys.version_info >= (3, 10): @abstractmethod def is_resource(self, path: str) -> bool: """Return True if the named 'path' is a resource. - Files are resources, directories are not. - """ + Files are resources, directories are not. + """ else: @abstractmethod def is_resource(self, name: str) -> bool: - """Return True if the named 'name' is consider a resource. -""" + """Return True if the named 'name' is consider a resource.""" @abstractmethod def contents(self) -> Iterator[str]: - """Return an iterable of entries in `package`. -""" + """Return an iterable of entries in `package`.""" @runtime_checkable class Traversable(Protocol): """ - An object with a subset of pathlib.Path methods suitable for - traversing directories and opening files. - """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + @abstractmethod def is_dir(self) -> bool: """ - Return True if self is a dir - """ + Return True if self is a dir + """ + @abstractmethod def is_file(self) -> bool: """ - Return True if self is a file - """ + Return True if self is a file + """ + @abstractmethod def iterdir(self) -> Iterator[Traversable]: """ - Yield Traversable objects in self - """ + Yield Traversable objects in self + """ if sys.version_info >= (3, 11): @abstractmethod def joinpath(self, *descendants: str) -> Traversable: ... @@ -401,9 +419,8 @@ if sys.version_info < (3, 11): @abstractmethod def joinpath(self, child: str, /) -> Traversable: """ - Return Traversable child in self - """ - + Return Traversable child in self + """ # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @@ -411,12 +428,13 @@ if sys.version_info < (3, 11): @abstractmethod def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: """ - mode may be 'r' or 'rb' to open as text or binary. Return a handle - suitable for reading (same as pathlib.Path.open). + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ - When opening as text, accepts encoding parameters such as those - accepted by io.TextIOWrapper. - """ @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @@ -424,40 +442,42 @@ if sys.version_info < (3, 11): @abstractmethod def name(self) -> str: """ - The base name of this object without any parent references. - """ + The base name of this object without any parent references. + """ if sys.version_info >= (3, 10): def __truediv__(self, child: str, /) -> Traversable: """ - Return Traversable child in self - """ + Return Traversable child in self + """ else: @abstractmethod def __truediv__(self, child: str, /) -> Traversable: """ - Return Traversable child in self - """ + Return Traversable child in self + """ @abstractmethod def read_bytes(self) -> bytes: """ - Read contents of self as bytes - """ + Read contents of self as bytes + """ + @abstractmethod def read_text(self, encoding: str | None = None) -> str: """ - Read contents of self as text - """ + Read contents of self as text + """ class TraversableResources(ResourceReader): """ - The required interface for providing traversable - resources. - """ + The required interface for providing traversable + resources. + """ + @abstractmethod def files(self) -> Traversable: - """Return a Traversable object for the loaded package. -""" + """Return a Traversable object for the loaded package.""" + def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi index 802c90838782a..9cbc94326faf6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/machinery.pyi @@ -1,5 +1,5 @@ -"""The machinery of importlib: finders, loaders, hooks, etc. -""" +"""The machinery of importlib: finders, loaders, hooks, etc.""" + import sys from importlib._bootstrap import BuiltinImporter as BuiltinImporter, FrozenImporter as FrozenImporter, ModuleSpec as ModuleSpec from importlib._bootstrap_external import ( @@ -22,8 +22,7 @@ if sys.version_info >= (3, 14): from importlib._bootstrap_external import AppleFrameworkLoader as AppleFrameworkLoader def all_suffixes() -> list[str]: - """Returns a list of all recognized module suffixes for this process -""" + """Returns a list of all recognized module suffixes for this process""" if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi index edf7f9bfa5061..f2e832714a6f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -37,44 +37,43 @@ if sys.version_info >= (3, 10): from importlib.metadata._meta import PackageMetadata as PackageMetadata, SimplePath def packages_distributions() -> Mapping[str, list[str]]: """ -Return a mapping of top-level packages to their -distributions. - ->>> import collections.abc ->>> pkgs = packages_distributions() ->>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) -True -""" + Return a mapping of top-level packages to their + distributions. + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ _SimplePath: TypeAlias = SimplePath else: _SimplePath: TypeAlias = Path class PackageNotFoundError(ModuleNotFoundError): - """The package was not found. -""" + """The package was not found.""" + @property def name(self) -> str: # type: ignore[override] - """module name -""" + """module name""" if sys.version_info >= (3, 13): _EntryPointBase = object elif sys.version_info >= (3, 11): class DeprecatedTuple: """ - Provide subscript item access for backward compatibility. + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ - >>> recwarn = getfixture('recwarn') - >>> ep = EntryPoint(name='name', value='value', group='group') - >>> ep[:] - ('name', 'value', 'group') - >>> ep[0] - 'name' - >>> len(recwarn) - 1 - """ def __getitem__(self, item: int) -> str: ... _EntryPointBase = DeprecatedTuple @@ -88,19 +87,20 @@ if sys.version_info >= (3, 11): class EntryPoint(_EntryPointBase): """An entry point as defined by Python packaging conventions. -See `the packaging docs on entry points -`_ -for more information. - ->>> ep = EntryPoint( -... name=None, group=None, value='package.module:attr [extra1, extra2]') ->>> ep.module -'package.module' ->>> ep.attr -'attr' ->>> ep.extras -['extra1', 'extra2'] -""" + See `the packaging docs on entry points + `_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ + pattern: ClassVar[Pattern[str]] name: str value: str @@ -109,9 +109,10 @@ for more information. def __init__(self, name: str, value: str, group: str) -> None: ... def load(self) -> Any: # Callable[[], Any] or an importable module """Load the entry point from its definition. If only a module -is indicated by the value, return that module. Otherwise, -return the named object. -""" + is indicated by the value, return that module. Otherwise, + return the named object. + """ + @property def extras(self) -> list[str]: ... @property @@ -130,58 +131,61 @@ return the named object. extras: list[str] = ..., ) -> bool: # undocumented """ -EntryPoint matches the given parameters. - ->>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') ->>> ep.matches(group='foo') -True ->>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') -True ->>> ep.matches(group='foo', name='other') -False ->>> ep.matches() -True ->>> ep.matches(extras=['extra1', 'extra2']) -True ->>> ep.matches(module='bing') -True ->>> ep.matches(attr='bong') -True -""" + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ + def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... def __lt__(self, other: object) -> bool: ... if sys.version_info < (3, 12): def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really """ - Supply iter so one may construct dicts of EntryPoints by name. - """ + Supply iter so one may construct dicts of EntryPoints by name. + """ else: @disjoint_base class EntryPoint(_EntryPointBase): """An entry point as defined by Python packaging conventions. - See `the packaging docs on entry points - `_ - for more information. - - >>> ep = EntryPoint( - ... name=None, group=None, value='package.module:attr [extra1, extra2]') - >>> ep.module - 'package.module' - >>> ep.attr - 'attr' - >>> ep.extras - ['extra1', 'extra2'] - """ + See `the packaging docs on entry points + `_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ + pattern: ClassVar[Pattern[str]] def load(self) -> Any: # Callable[[], Any] or an importable module """Load the entry point from its definition. If only a module - is indicated by the value, return that module. Otherwise, - return the named object. - """ + is indicated by the value, return that module. Otherwise, + return the named object. + """ + @property def extras(self) -> list[str]: ... @property @@ -201,41 +205,43 @@ else: extras: list[str] = ..., ) -> bool: # undocumented """ - EntryPoint matches the given parameters. - - >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') - >>> ep.matches(group='foo') - True - >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') - True - >>> ep.matches(group='foo', name='other') - False - >>> ep.matches() - True - >>> ep.matches(extras=['extra1', 'extra2']) - True - >>> ep.matches(module='bing') - True - >>> ep.matches(attr='bong') - True - """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ def __hash__(self) -> int: ... def __iter__(self) -> Iterator[Any]: # result of iter((str, Self)), really """ - Supply iter so one may construct dicts of EntryPoints by name. - """ + Supply iter so one may construct dicts of EntryPoints by name. + """ if sys.version_info >= (3, 12): class EntryPoints(tuple[EntryPoint, ...]): """ -An immutable collection of selectable EntryPoint objects. -""" + An immutable collection of selectable EntryPoint objects. + """ + __slots__ = () def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] """ -Get the EntryPoint in self matching name. -""" + Get the EntryPoint in self matching name. + """ + def select( self, *, @@ -247,64 +253,69 @@ Get the EntryPoint in self matching name. extras: list[str] = ..., ) -> EntryPoints: """ -Select entry points from self that match the -given parameters (typically group and/or name). -""" + Select entry points from self that match the + given parameters (typically group and/or name). + """ + @property def names(self) -> set[str]: """ -Return the set of all names of all entry points. -""" + Return the set of all names of all entry points. + """ + @property def groups(self) -> set[str]: """ -Return the set of all groups of all entry points. -""" + Return the set of all groups of all entry points. + """ elif sys.version_info >= (3, 10): class DeprecatedList(list[_T]): """ - Allow an otherwise immutable object to implement mutability - for compatibility. - - >>> recwarn = getfixture('recwarn') - >>> dl = DeprecatedList(range(3)) - >>> dl[0] = 1 - >>> dl.append(3) - >>> del dl[3] - >>> dl.reverse() - >>> dl.sort() - >>> dl.extend([4]) - >>> dl.pop(-1) - 4 - >>> dl.remove(1) - >>> dl += [5] - >>> dl + [6] - [1, 2, 5, 6] - >>> dl + (6,) - [1, 2, 5, 6] - >>> dl.insert(0, 0) - >>> dl - [0, 1, 2, 5] - >>> dl == [0, 1, 2, 5] - True - >>> dl == (0, 1, 2, 5) - True - >>> len(recwarn) - 1 - """ + Allow an otherwise immutable object to implement mutability + for compatibility. + + >>> recwarn = getfixture('recwarn') + >>> dl = DeprecatedList(range(3)) + >>> dl[0] = 1 + >>> dl.append(3) + >>> del dl[3] + >>> dl.reverse() + >>> dl.sort() + >>> dl.extend([4]) + >>> dl.pop(-1) + 4 + >>> dl.remove(1) + >>> dl += [5] + >>> dl + [6] + [1, 2, 5, 6] + >>> dl + (6,) + [1, 2, 5, 6] + >>> dl.insert(0, 0) + >>> dl + [0, 1, 2, 5] + >>> dl == [0, 1, 2, 5] + True + >>> dl == (0, 1, 2, 5) + True + >>> len(recwarn) + 1 + """ + __slots__ = () class EntryPoints(DeprecatedList[EntryPoint]): # use as list is deprecated since 3.10 """ - An immutable collection of selectable EntryPoint objects. - """ + An immutable collection of selectable EntryPoint objects. + """ + # int argument is deprecated since 3.10 __slots__ = () def __getitem__(self, name: int | str) -> EntryPoint: # type: ignore[override] """ - Get the EntryPoint in self matching name. - """ + Get the EntryPoint in self matching name. + """ + def select( self, *, @@ -316,47 +327,50 @@ elif sys.version_info >= (3, 10): extras: list[str] = ..., ) -> EntryPoints: """ - Select entry points from self that match the - given parameters (typically group and/or name). - """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + @property def names(self) -> set[str]: """ - Return the set of all names of all entry points. - """ + Return the set of all names of all entry points. + """ + @property def groups(self) -> set[str]: """ - Return the set of all groups of all entry points. + Return the set of all groups of all entry points. - For coverage while SelectableGroups is present. - >>> EntryPoints().groups - set() - """ + For coverage while SelectableGroups is present. + >>> EntryPoints().groups + set() + """ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): class Deprecated(Generic[_KT, _VT]): """ - Compatibility add-in for mapping to indicate that - mapping behavior is deprecated. - - >>> recwarn = getfixture('recwarn') - >>> class DeprecatedDict(Deprecated, dict): pass - >>> dd = DeprecatedDict(foo='bar') - >>> dd.get('baz', None) - >>> dd['foo'] - 'bar' - >>> list(dd) - ['foo'] - >>> list(dd.keys()) - ['foo'] - >>> 'foo' in dd - True - >>> list(dd.values()) - ['bar'] - >>> len(recwarn) - 1 - """ + Compatibility add-in for mapping to indicate that + mapping behavior is deprecated. + + >>> recwarn = getfixture('recwarn') + >>> class DeprecatedDict(Deprecated, dict): pass + >>> dd = DeprecatedDict(foo='bar') + >>> dd.get('baz', None) + >>> dd['foo'] + 'bar' + >>> list(dd) + ['foo'] + >>> list(dd.keys()) + ['foo'] + >>> 'foo' in dd + True + >>> list(dd.values()) + ['bar'] + >>> len(recwarn) + 1 + """ + def __getitem__(self, name: _KT) -> _VT: ... @overload def get(self, name: _KT, default: None = None) -> _VT | None: ... @@ -372,9 +386,10 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `select` instead.") class SelectableGroups(Deprecated[str, EntryPoints], dict[str, EntryPoints]): # use as dict is deprecated since 3.10 """ - A backward- and forward-compatible result from - entry_points that fully implements the dict interface. - """ + A backward- and forward-compatible result from + entry_points that fully implements the dict interface. + """ + @classmethod def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property @@ -382,10 +397,11 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): @property def names(self) -> set[str]: """ - for coverage: - >>> SelectableGroups().names - set() - """ + for coverage: + >>> SelectableGroups().names + set() + """ + @overload def select(self) -> Self: ... @overload @@ -401,13 +417,12 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12): ) -> EntryPoints: ... class PackagePath(pathlib.PurePosixPath): - """A reference to a path in a package -""" + """A reference to a path in a package""" + def read_text(self, encoding: str = "utf-8") -> str: ... def read_binary(self) -> bytes: ... def locate(self) -> PathLike[str]: - """Return a path-like object for this path -""" + """Return a path-like object for this path""" # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: hash: FileHash | None size: int | None @@ -426,65 +441,70 @@ else: class Distribution(_distribution_parent): """ -An abstract Python distribution package. - -Custom providers may derive from this class and define -the abstract methods to provide a concrete implementation -for their environment. Some providers may opt to override -the default implementation of some properties to bypass -the file-reading mechanism. -""" + An abstract Python distribution package. + + Custom providers may derive from this class and define + the abstract methods to provide a concrete implementation + for their environment. Some providers may opt to override + the default implementation of some properties to bypass + the file-reading mechanism. + """ + @abc.abstractmethod def read_text(self, filename: str) -> str | None: """Attempt to load metadata file given by the name. -Python distribution metadata is organized by blobs of text -typically represented as "files" in the metadata directory -(e.g. package-1.0.dist-info). These files include things -like: + Python distribution metadata is organized by blobs of text + typically represented as "files" in the metadata directory + (e.g. package-1.0.dist-info). These files include things + like: + + - METADATA: The distribution metadata including fields + like Name and Version and Description. + - entry_points.txt: A series of entry points as defined in + `the entry points spec `_. + - RECORD: A record of files according to + `this recording spec `_. -- METADATA: The distribution metadata including fields - like Name and Version and Description. -- entry_points.txt: A series of entry points as defined in - `the entry points spec `_. -- RECORD: A record of files according to - `this recording spec `_. + A package may provide any set of files, including those + not listed here or none at all. -A package may provide any set of files, including those -not listed here or none at all. + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ -:param filename: The name of the file in the distribution info. -:return: The text if found, otherwise None. -""" @abc.abstractmethod def locate_file(self, path: StrPath) -> _SimplePath: """ -Given a path to a file in this distribution, return a SimplePath -to it. -""" + Given a path to a file in this distribution, return a SimplePath + to it. + """ + @classmethod def from_name(cls, name: str) -> Distribution: """Return the Distribution for the given package name. -:param name: The name of the distribution package to search for. -:return: The Distribution instance (or subclass thereof) for the named - package, if found. -:raises PackageNotFoundError: When the named package's distribution - metadata cannot be found. -:raises ValueError: When an invalid value is supplied for name. -""" + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. + """ + @overload @classmethod def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: """Return an iterable of Distribution objects for all packages. -Pass a ``context`` or pass keyword arguments for constructing -a context. + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for packages matching + the context. + """ -:context: A ``DistributionFinder.Context`` object. -:return: Iterable of Distribution objects for packages matching - the context. -""" @overload @classmethod def discover( @@ -494,135 +514,137 @@ a context. def at(path: StrPath) -> PathDistribution: """Return a Distribution for the indicated metadata path. -:param path: a string or path-like object -:return: a concrete Distribution instance for the path -""" - + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ if sys.version_info >= (3, 10): @property def metadata(self) -> PackageMetadata: """Return the parsed metadata for this Distribution. -The returned object will have keys that name the various bits of -metadata per the -`Core metadata specifications `_. + The returned object will have keys that name the various bits of + metadata per the + `Core metadata specifications `_. + + Custom providers may provide the METADATA file or override this + property. + """ -Custom providers may provide the METADATA file or override this -property. -""" @property def entry_points(self) -> EntryPoints: """ -Return EntryPoints for this distribution. + Return EntryPoints for this distribution. -Custom providers may provide the ``entry_points.txt`` file -or override this property. -""" + Custom providers may provide the ``entry_points.txt`` file + or override this property. + """ else: @property def metadata(self) -> Message: """Return the parsed metadata for this Distribution. - The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. - """ + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + @property def entry_points(self) -> list[EntryPoint]: ... @property def version(self) -> str: - """Return the 'Version' metadata for the distribution package. -""" + """Return the 'Version' metadata for the distribution package.""" + @property def files(self) -> list[PackagePath] | None: """Files in this distribution. -:return: List of PackagePath for this distribution or None + :return: List of PackagePath for this distribution or None -Result is `None` if the metadata file that enumerates files -(i.e. RECORD for dist-info, or installed-files.txt or -SOURCES.txt for egg-info) is missing. -Result may be empty if the metadata exists but is empty. + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info, or installed-files.txt or + SOURCES.txt for egg-info) is missing. + Result may be empty if the metadata exists but is empty. + + Custom providers are recommended to provide a "RECORD" file (in + ``read_text``) or override this property to allow for callers to be + able to resolve filenames provided by the package. + """ -Custom providers are recommended to provide a "RECORD" file (in -``read_text``) or override this property to allow for callers to be -able to resolve filenames provided by the package. -""" @property def requires(self) -> list[str] | None: - """Generated requirements specified for this Distribution -""" + """Generated requirements specified for this Distribution""" if sys.version_info >= (3, 10): @property def name(self) -> str: - """Return the 'Name' metadata for the distribution package. -""" + """Return the 'Name' metadata for the distribution package.""" if sys.version_info >= (3, 13): @property def origin(self) -> types.SimpleNamespace | None: ... class DistributionFinder(MetaPathFinder): """ -A MetaPathFinder capable of discovering installed distributions. + A MetaPathFinder capable of discovering installed distributions. + + Custom providers should implement this interface in order to + supply metadata. + """ -Custom providers should implement this interface in order to -supply metadata. -""" class Context: """ -Keyword arguments presented by the caller to -``distributions()`` or ``Distribution.discover()`` -to narrow the scope of a search for distributions -in all DistributionFinders. - -Each DistributionFinder may expect any parameters -and should attempt to honor the canonical -parameters defined below when appropriate. - -This mechanism gives a custom provider a means to -solicit additional details from the caller beyond -"name" and "path" when searching distributions. -For example, imagine a provider that exposes suites -of packages in either a "public" or "private" ``realm``. -A caller may wish to query only for distributions in -a particular realm and could call -``distributions(realm="private")`` to signal to the -custom provider to only include distributions from that -realm. -""" + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + + This mechanism gives a custom provider a means to + solicit additional details from the caller beyond + "name" and "path" when searching distributions. + For example, imagine a provider that exposes suites + of packages in either a "public" or "private" ``realm``. + A caller may wish to query only for distributions in + a particular realm and could call + ``distributions(realm="private")`` to signal to the + custom provider to only include distributions from that + realm. + """ + name: str | None def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... @property def path(self) -> list[str]: """ -The sequence of directory path that a distribution finder -should search. + The sequence of directory path that a distribution finder + should search. -Typically refers to Python installed package paths such as -"site-packages" directories and defaults to ``sys.path``. -""" + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. + """ @abc.abstractmethod def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: """ -Find distributions. + Find distributions. -Return an iterable of all Distribution instances capable of -loading the metadata for packages matching the ``context``, -a DistributionFinder.Context instance. -""" + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ class MetadataPathFinder(DistributionFinder): @classmethod def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: """ -Find distributions. + Find distributions. -Return an iterable of all Distribution instances capable of -loading the metadata for packages matching ``context.name`` -(or all names if ``None`` indicated) along the paths in the list -of directories ``context.path``. -""" + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ if sys.version_info >= (3, 11): @classmethod def invalidate_caches(cls) -> None: ... @@ -635,43 +657,47 @@ class PathDistribution(Distribution): def __init__(self, path: _SimplePath) -> None: """Construct a distribution. -:param path: SimplePath indicating the metadata directory. -""" + :param path: SimplePath indicating the metadata directory. + """ + def read_text(self, filename: StrPath) -> str | None: """Attempt to load metadata file given by the name. -Python distribution metadata is organized by blobs of text -typically represented as "files" in the metadata directory -(e.g. package-1.0.dist-info). These files include things -like: + Python distribution metadata is organized by blobs of text + typically represented as "files" in the metadata directory + (e.g. package-1.0.dist-info). These files include things + like: -- METADATA: The distribution metadata including fields - like Name and Version and Description. -- entry_points.txt: A series of entry points as defined in - `the entry points spec `_. -- RECORD: A record of files according to - `this recording spec `_. + - METADATA: The distribution metadata including fields + like Name and Version and Description. + - entry_points.txt: A series of entry points as defined in + `the entry points spec `_. + - RECORD: A record of files according to + `this recording spec `_. -A package may provide any set of files, including those -not listed here or none at all. + A package may provide any set of files, including those + not listed here or none at all. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ -:param filename: The name of the file in the distribution info. -:return: The text if found, otherwise None. -""" def locate_file(self, path: StrPath) -> _SimplePath: ... def distribution(distribution_name: str) -> Distribution: """Get the ``Distribution`` instance for the named package. -:param distribution_name: The name of the distribution package as a string. -:return: A ``Distribution`` instance (or subclass thereof). -""" + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + @overload def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: """Get all ``Distribution`` instances in the current environment. -:return: An iterable of ``Distribution`` instances. -""" + :return: An iterable of ``Distribution`` instances. + """ + @overload def distributions( *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any @@ -681,17 +707,17 @@ if sys.version_info >= (3, 10): def metadata(distribution_name: str) -> PackageMetadata: """Get the metadata for the named package. -:param distribution_name: The name of the distribution package to query. -:return: A PackageMetadata containing the parsed metadata. -""" + :param distribution_name: The name of the distribution package to query. + :return: A PackageMetadata containing the parsed metadata. + """ else: def metadata(distribution_name: str) -> Message: """Get the metadata for the named package. - :param distribution_name: The name of the distribution package to query. - :return: An email.Message containing the parsed metadata. - """ + :param distribution_name: The name of the distribution package to query. + :return: An email.Message containing the parsed metadata. + """ if sys.version_info >= (3, 12): def entry_points( @@ -699,32 +725,33 @@ if sys.version_info >= (3, 12): ) -> EntryPoints: """Return EntryPoint objects for all installed packages. -Pass selection parameters (group or name) to filter the -result to entry points matching those properties (see -EntryPoints.select()). + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). -:return: EntryPoints for all installed packages. -""" + :return: EntryPoints for all installed packages. + """ elif sys.version_info >= (3, 10): @overload def entry_points() -> SelectableGroups: """Return EntryPoint objects for all installed packages. - Pass selection parameters (group or name) to filter the - result to entry points matching those properties (see - EntryPoints.select()). + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). - For compatibility, returns ``SelectableGroups`` object unless - selection parameters are supplied. In the future, this function - will return ``EntryPoints`` instead of ``SelectableGroups`` - even when no selection parameters are supplied. + For compatibility, returns ``SelectableGroups`` object unless + selection parameters are supplied. In the future, this function + will return ``EntryPoints`` instead of ``SelectableGroups`` + even when no selection parameters are supplied. - For maximum future compatibility, pass selection parameters - or invoke ``.select`` with parameters on the result. + For maximum future compatibility, pass selection parameters + or invoke ``.select`` with parameters on the result. + + :return: EntryPoints or SelectableGroups for all installed packages. + """ - :return: EntryPoints or SelectableGroups for all installed packages. - """ @overload def entry_points( *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... @@ -734,26 +761,28 @@ else: def entry_points() -> dict[str, list[EntryPoint]]: """Return EntryPoint objects for all installed packages. - :return: EntryPoint objects for all installed packages. - """ + :return: EntryPoint objects for all installed packages. + """ def version(distribution_name: str) -> str: """Get the version string for the named package. -:param distribution_name: The name of the distribution package to query. -:return: The version string for the package as defined in the package's - "Version" metadata key. -""" + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + def files(distribution_name: str) -> list[PackagePath] | None: """Return a list of files for the named package. -:param distribution_name: The name of the distribution package to query. -:return: List of files composing the distribution. -""" + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + def requires(distribution_name: str) -> list[str] | None: """ -Return a list of requirements for the named package. + Return a list of requirements for the named package. -:return: An iterable of requirements, suitable for - packaging.requirement.Requirement. -""" + :return: An iterable of requirements, suitable for + packaging.requirement.Requirement. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi index 194b203053cd1..530e2463fcc2e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -16,27 +16,29 @@ class PackageMetadata(Protocol): @property def json(self) -> dict[str, str | list[str]]: """ -A JSON-compatible form of the metadata. -""" + A JSON-compatible form of the metadata. + """ + @overload def get_all(self, name: str, failobj: None = None) -> list[Any] | None: - """Helper for @overload to raise when called. -""" + """Helper for @overload to raise when called.""" + @overload def get_all(self, name: str, failobj: _T) -> list[Any] | _T: ... if sys.version_info >= (3, 12): @overload def get(self, name: str, failobj: None = None) -> str | None: - """Helper for @overload to raise when called. -""" + """Helper for @overload to raise when called.""" + @overload def get(self, name: str, failobj: _T) -> _T | str: ... if sys.version_info >= (3, 13): class SimplePath(Protocol): """ -A minimal subset of pathlib.Path required by Distribution. -""" + A minimal subset of pathlib.Path required by Distribution. + """ + def joinpath(self, other: StrPath, /) -> SimplePath: ... def __truediv__(self, other: StrPath, /) -> SimplePath: ... # Incorrect at runtime @@ -49,8 +51,9 @@ A minimal subset of pathlib.Path required by Distribution. elif sys.version_info >= (3, 12): class SimplePath(Protocol[_T_co]): """ - A minimal subset of pathlib.Path required by PathDistribution. - """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + # At runtime this is defined as taking `str | _T`, but that causes trouble. # See #11436. def joinpath(self, other: str, /) -> _T_co: ... @@ -63,8 +66,9 @@ elif sys.version_info >= (3, 12): else: class SimplePath(Protocol): """ - A minimal subset of pathlib.Path required by PathDistribution. - """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + # Actually takes only self at runtime, but that's clearly wrong def joinpath(self, other: Any, /) -> SimplePath: ... # Not defined as a property at runtime, but it should be diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi index 0c91f06d784a0..029111a3c7f3c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/readers.pyi @@ -4,6 +4,7 @@ Compatibility shim for .resources.readers as found on Python 3.10. Consumers that can rely on Python 3.11 should use the other module directly. """ + # On py311+, things are actually defined in importlib.resources.readers, # and re-exported here, # but doing it this way leads to less code duplication for us @@ -40,10 +41,11 @@ if sys.version_info >= (3, 10): def __init__(self, loader: FileLoader) -> None: ... def resource_path(self, resource: StrPath) -> str: """ -Return the file system path to prevent -`resources.path()` from creating a temporary -copy. -""" + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + def files(self) -> pathlib.Path: ... class ZipReader(abc.TraversableResources): @@ -53,18 +55,20 @@ copy. def open_resource(self, resource: str) -> BufferedReader: ... def is_resource(self, path: StrPath) -> bool: """ -Workaround for `zipfile.Path.is_file` returning true -for non-existent paths. -""" + Workaround for `zipfile.Path.is_file` returning true + for non-existent paths. + """ + def files(self) -> zipfile.Path: ... class MultiplexedPath(abc.Traversable): """ -Given a series of Traversable objects, implement a merged -version of the interface across all objects. Useful for -namespace packages which may be multihomed at a single -name. -""" + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + def __init__(self, *paths: abc.Traversable) -> None: ... def iterdir(self) -> Iterator[abc.Traversable]: ... def read_bytes(self) -> NoReturn: ... @@ -91,8 +95,9 @@ name. def __init__(self, namespace_path: Iterable[str]) -> None: ... def resource_path(self, resource: str) -> str: """ -Return the file system path to prevent -`resources.path()` from creating a temporary -copy. -""" + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + def files(self) -> MultiplexedPath: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi index 15bf92bb1baea..bc87745ca6582 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/__init__.pyi @@ -6,6 +6,7 @@ and importlib_resources in PyPI. See https://github.com/python/importlib_metadata/wiki/Development-Methodology for more detail. """ + import os import sys from collections.abc import Iterator @@ -67,68 +68,70 @@ if sys.version_info >= (3, 13): else: def open_binary(package: Package, resource: Resource) -> BinaryIO: - """Return a file-like object opened for binary reading of the resource. -""" + """Return a file-like object opened for binary reading of the resource.""" + def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: - """Return a file-like object opened for text reading of the resource. -""" + """Return a file-like object opened for text reading of the resource.""" + def read_binary(package: Package, resource: Resource) -> bytes: - """Return the binary contents of the resource. -""" + """Return the binary contents of the resource.""" + def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: """Return the decoded string of the resource. - The decoding-related arguments have the same semantics as those of - bytes.decode(). - """ + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + def path(package: Package, resource: Resource) -> AbstractContextManager[Path, Literal[False]]: """A context manager providing a file path object to the resource. - If the resource does not already exist on its own on the file system, - a temporary file will be created. If the file was created, the file - will be deleted upon exiting the context manager (no exception is - raised if the file was deleted prior to the context manager - exiting). - """ + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + def is_resource(package: Package, name: str) -> bool: """True if `name` is a resource inside `package`. - Directories are *not* resources. - """ + Directories are *not* resources. + """ if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") def contents(package: Package) -> Iterator[str]: """Return an iterable of entries in `package`. - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ else: def contents(package: Package) -> Iterator[str]: """Return an iterable of entries in 'package'. - Note that not all entries are resources. Specifically, directories are - not considered resources. Use `is_resource()` on each entry returned here - to check if it is a resource or not. - """ + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ if sys.version_info >= (3, 11): from importlib.resources._common import as_file as as_file else: def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: """ - Given a Traversable object, return that object as a - path on the local file system in a context manager. - """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ if sys.version_info >= (3, 11): from importlib.resources._common import files as files else: def files(package: Package) -> Traversable: """ - Get a Traversable resource from a package - """ + Get a Traversable resource from a package + """ if sys.version_info >= (3, 11): from importlib.resources.abc import ResourceReader as ResourceReader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi index bb29887137deb..c4250d4e27ae1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_common.pyi @@ -19,21 +19,23 @@ if sys.version_info >= (3, 11): func: Callable[[Anchor | None], Traversable], ) -> Callable[[Anchor | None, Anchor | None], Traversable]: """ -Replace 'package' parameter as 'anchor' and warn about the change. + Replace 'package' parameter as 'anchor' and warn about the change. -Other errors should fall through. + Other errors should fall through. ->>> files('a', 'b') -Traceback (most recent call last): -TypeError: files() takes from 0 to 1 positional arguments but 2 were given + >>> files('a', 'b') + Traceback (most recent call last): + TypeError: files() takes from 0 to 1 positional arguments but 2 were given + + Remove this compatibility in Python 3.14. + """ -Remove this compatibility in Python 3.14. -""" @overload def files(anchor: Anchor | None = None) -> Traversable: """ -Get a Traversable resource for an anchor. -""" + Get a Traversable resource for an anchor. + """ + @overload @deprecated("Deprecated since Python 3.12; will be removed in Python 3.15. Use `anchor` parameter instead.") def files(package: Anchor | None = None) -> Traversable: ... @@ -41,14 +43,13 @@ Get a Traversable resource for an anchor. else: def files(package: Package) -> Traversable: """ - Get a Traversable resource from a package - """ + Get a Traversable resource from a package + """ def get_resource_reader(package: types.ModuleType) -> ResourceReader | None: """ -Return the package's loader if it's a ResourceReader. -""" - + Return the package's loader if it's a ResourceReader. + """ if sys.version_info >= (3, 12): def resolve(cand: Anchor | None) -> types.ModuleType: ... @@ -59,16 +60,17 @@ Return the package's loader if it's a ResourceReader. def get_package(package: Package) -> types.ModuleType: """Take a package name or module object and return the module. - Raise an exception if the resolved module is not a package. - """ + Raise an exception if the resolved module is not a package. + """ def from_package(package: types.ModuleType) -> Traversable: """ -Return a Traversable object for the given package. + Return a Traversable object for the given package. + + """ -""" def as_file(path: Traversable) -> AbstractContextManager[Path, Literal[False]]: """ -Given a Traversable object, return that object as a -path on the local file system in a context manager. -""" + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi index b03d8bb1b8099..9bea507086bc0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/_functional.pyi @@ -1,5 +1,5 @@ -"""Simplified function-based API for importlib.resources -""" +"""Simplified function-based API for importlib.resources""" + import sys # Even though this file is 3.13+ only, Pyright will complain in stubtest for older versions. @@ -14,39 +14,40 @@ if sys.version_info >= (3, 13): from typing_extensions import Unpack, deprecated def open_binary(anchor: Anchor, *path_names: StrPath) -> BinaryIO: - """Open for binary reading the *resource* within *package*. -""" + """Open for binary reading the *resource* within *package*.""" + @overload def open_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" ) -> TextIOWrapper: - """Open for text reading the *resource* within *package*. -""" + """Open for text reading the *resource* within *package*.""" + @overload def open_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> TextIOWrapper: ... def read_binary(anchor: Anchor, *path_names: StrPath) -> bytes: - """Read and return contents of *resource* within *package* as bytes. -""" + """Read and return contents of *resource* within *package* as bytes.""" + @overload def read_text( anchor: Anchor, *path_names: Unpack[tuple[StrPath]], encoding: str | None = "utf-8", errors: str | None = "strict" ) -> str: - """Read and return contents of *resource* within *package* as str. -""" + """Read and return contents of *resource* within *package* as str.""" + @overload def read_text(anchor: Anchor, *path_names: StrPath, encoding: str | None, errors: str | None = "strict") -> str: ... def path(anchor: Anchor, *path_names: StrPath) -> AbstractContextManager[Path, Literal[False]]: - """Return the path to the *resource* as an actual file system path. -""" + """Return the path to the *resource* as an actual file system path.""" + def is_resource(anchor: Anchor, *path_names: StrPath) -> bool: """Return ``True`` if there is a resource named *name* in the package, -Otherwise returns ``False``. -""" + Otherwise returns ``False``. + """ + @deprecated("Deprecated since Python 3.11. Use `files(anchor).iterdir()`.") def contents(anchor: Anchor, *path_names: StrPath) -> Iterator[str]: """Return an iterable over the named resources within the package. -The iterable returns :class:`str` resources (e.g. files). -The iterable does not recurse into subdirectories. -""" + The iterable returns :class:`str` resources (e.g. files). + The iterable does not recurse into subdirectories. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi index 93de4dcb2effa..af8faaea598a3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/abc.pyi @@ -6,68 +6,73 @@ from typing import IO, Any, Literal, Protocol, overload, runtime_checkable if sys.version_info >= (3, 11): class ResourceReader(metaclass=ABCMeta): - """Abstract base class for loaders to provide resource reading support. -""" + """Abstract base class for loaders to provide resource reading support.""" + @abstractmethod def open_resource(self, resource: str) -> IO[bytes]: """Return an opened, file-like object for binary reading. -The 'resource' argument is expected to represent only a file name. -If the resource cannot be found, FileNotFoundError is raised. -""" + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + @abstractmethod def resource_path(self, resource: str) -> str: """Return the file system path to the specified resource. -The 'resource' argument is expected to represent only a file name. -If the resource does not exist on the file system, raise -FileNotFoundError. -""" + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + @abstractmethod def is_resource(self, path: str) -> bool: """Return True if the named 'path' is a resource. -Files are resources, directories are not. -""" + Files are resources, directories are not. + """ + @abstractmethod def contents(self) -> Iterator[str]: - """Return an iterable of entries in `package`. -""" + """Return an iterable of entries in `package`.""" @runtime_checkable class Traversable(Protocol): """ -An object with a subset of pathlib.Path methods suitable for -traversing directories and opening files. + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + Any exceptions that occur when accessing the backing resource + may propagate unaltered. + """ -Any exceptions that occur when accessing the backing resource -may propagate unaltered. -""" @abstractmethod def is_dir(self) -> bool: """ -Return True if self is a directory -""" + Return True if self is a directory + """ + @abstractmethod def is_file(self) -> bool: """ -Return True if self is a file -""" + Return True if self is a file + """ + @abstractmethod def iterdir(self) -> Iterator[Traversable]: """ -Yield Traversable objects in self -""" + Yield Traversable objects in self + """ + @abstractmethod def joinpath(self, *descendants: str) -> Traversable: """ -Return Traversable resolved with any descendants applied. - -Each descendant should be a path segment relative to self -and each may contain multiple levels separated by -``posixpath.sep`` (``/``). -""" + Return Traversable resolved with any descendants applied. + Each descendant should be a path segment relative to self + and each may contain multiple levels separated by + ``posixpath.sep`` (``/``). + """ # The documentation and runtime protocol allows *args, **kwargs arguments, # but this would mean that all implementers would have to support them, # which is not the case. @@ -75,12 +80,13 @@ and each may contain multiple levels separated by @abstractmethod def open(self, mode: Literal["r"] = "r", *, encoding: str | None = None, errors: str | None = None) -> IO[str]: """ -mode may be 'r' or 'rb' to open as text or binary. Return a handle -suitable for reading (same as pathlib.Path.open). + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ -When opening as text, accepts encoding parameters such as those -accepted by io.TextIOWrapper. -""" @overload @abstractmethod def open(self, mode: Literal["rb"]) -> IO[bytes]: ... @@ -88,32 +94,36 @@ accepted by io.TextIOWrapper. @abstractmethod def name(self) -> str: """ -The base name of this object without any parent references. -""" + The base name of this object without any parent references. + """ + def __truediv__(self, child: str, /) -> Traversable: """ -Return Traversable child in self -""" + Return Traversable child in self + """ + @abstractmethod def read_bytes(self) -> bytes: """ -Read contents of self as bytes -""" + Read contents of self as bytes + """ + @abstractmethod def read_text(self, encoding: str | None = None) -> str: """ -Read contents of self as text -""" + Read contents of self as text + """ class TraversableResources(ResourceReader): """ -The required interface for providing traversable -resources. -""" + The required interface for providing traversable + resources. + """ + @abstractmethod def files(self) -> Traversable: - """Return a Traversable object for the loaded package. -""" + """Return a Traversable object for the loaded package.""" + def open_resource(self, resource: str) -> BufferedReader: ... def resource_path(self, resource: Any) -> str: ... def is_resource(self, path: str) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi index c67da99ac38cf..27ded93f6efcb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/resources/simple.pyi @@ -1,6 +1,7 @@ """ Interface adapters for low-level readers. """ + import abc import sys from collections.abc import Iterator @@ -13,38 +14,44 @@ if sys.version_info >= (3, 11): class SimpleReader(abc.ABC): """ -The minimum, low-level interface required from a resource -provider. -""" + The minimum, low-level interface required from a resource + provider. + """ + @property @abc.abstractmethod def package(self) -> str: """ -The name of the package for which this reader loads resources. -""" + The name of the package for which this reader loads resources. + """ + @abc.abstractmethod def children(self) -> list[SimpleReader]: """ -Obtain an iterable of SimpleReader for available -child containers (e.g. directories). -""" + Obtain an iterable of SimpleReader for available + child containers (e.g. directories). + """ + @abc.abstractmethod def resources(self) -> list[str]: """ -Obtain available named resources for this virtual package. -""" + Obtain available named resources for this virtual package. + """ + @abc.abstractmethod def open_binary(self, resource: str) -> BinaryIO: """ -Obtain a File-like for a named resource. -""" + Obtain a File-like for a named resource. + """ + @property def name(self) -> str: ... class ResourceHandle(Traversable, metaclass=abc.ABCMeta): """ -Handle to a named resource in a ResourceReader. -""" + Handle to a named resource in a ResourceReader. + """ + parent: ResourceContainer def __init__(self, parent: ResourceContainer, name: str) -> None: ... def is_file(self) -> Literal[True]: ... @@ -67,8 +74,9 @@ Handle to a named resource in a ResourceReader. class ResourceContainer(Traversable, metaclass=abc.ABCMeta): """ -Traversable container for a package's resources via its reader. -""" + Traversable container for a package's resources via its reader. + """ + reader: SimpleReader def __init__(self, reader: SimpleReader) -> None: ... def is_dir(self) -> Literal[True]: ... @@ -80,8 +88,9 @@ Traversable container for a package's resources via its reader. class TraversableReader(TraversableResources, SimpleReader, metaclass=abc.ABCMeta): """ -A TraversableResources based on SimpleReader. Resource providers -may derive from this class to provide the TraversableResources -interface by supplying the SimpleReader interface. -""" + A TraversableResources based on SimpleReader. Resource providers + may derive from this class to provide the TraversableResources + interface by supplying the SimpleReader interface. + """ + def files(self) -> ResourceContainer: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi index 03ee7324ea648..4c6f2308448f0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/simple.pyi @@ -4,6 +4,7 @@ Compatibility shim for .resources.simple as found on Python 3.10. Consumers that can rely on Python 3.11 should use the other module directly. """ + import sys if sys.version_info >= (3, 11): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi index eb55331f9069a..73acc3a83a030 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/importlib/util.pyi @@ -1,5 +1,5 @@ -"""Utility code for constructing importers, etc. -""" +"""Utility code for constructing importers, etc.""" + import importlib.machinery import sys import types @@ -26,21 +26,22 @@ if sys.version_info < (3, 12): def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: """Decorator to handle selecting the proper module for loaders. - The decorated function is passed the module to use instead of the module - name. The module passed in to the function is either from sys.modules if - it already exists or is a new module. If the module is new, then __name__ - is set the first argument to the method, __loader__ is set to self, and - __package__ is set accordingly (if self.is_package() is defined) will be set - before it is passed to the decorated function (if self.is_package() does - not work for the module it will be set post-load). + The decorated function is passed the module to use instead of the module + name. The module passed in to the function is either from sys.modules if + it already exists or is a new module. If the module is new, then __name__ + is set the first argument to the method, __loader__ is set to self, and + __package__ is set accordingly (if self.is_package() is defined) will be set + before it is passed to the decorated function (if self.is_package() does + not work for the module it will be set post-load). - If an exception is raised and the decorator created the module it is - subsequently removed from sys.modules. + If an exception is raised and the decorator created the module it is + subsequently removed from sys.modules. - The decorator assumes that the decorated function takes the module name as - the second argument. + The decorator assumes that the decorated function takes the module name as + the second argument. + + """ - """ @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." @@ -48,9 +49,10 @@ if sys.version_info < (3, 12): def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: """Set __loader__ on the returned module. - This function is deprecated. + This function is deprecated. + + """ - """ @deprecated( "Deprecated since Python 3.4; removed in Python 3.12. " "`__name__`, `__package__` and `__loader__` are now set automatically." @@ -58,46 +60,44 @@ if sys.version_info < (3, 12): def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: """Set __package__ on the returned module. - This function is deprecated. + This function is deprecated. - """ + """ def resolve_name(name: str, package: str | None) -> str: - """Resolve a relative module name to an absolute one. -""" + """Resolve a relative module name to an absolute one.""" + def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: """Return the spec for the specified module. -First, sys.modules is checked to see if the module was already imported. If -so, then sys.modules[name].__spec__ is returned. If that happens to be -set to None, then ValueError is raised. If the module is not in -sys.modules, then sys.meta_path is searched for a suitable spec with the -value of 'path' given to the finders. None is returned if no spec could -be found. + First, sys.modules is checked to see if the module was already imported. If + so, then sys.modules[name].__spec__ is returned. If that happens to be + set to None, then ValueError is raised. If the module is not in + sys.modules, then sys.meta_path is searched for a suitable spec with the + value of 'path' given to the finders. None is returned if no spec could + be found. -If the name is for submodule (contains a dot), the parent module is -automatically imported. + If the name is for submodule (contains a dot), the parent module is + automatically imported. -The name and package arguments work the same as importlib.import_module(). -In other words, relative module names (with leading dots) work. + The name and package arguments work the same as importlib.import_module(). + In other words, relative module names (with leading dots) work. -""" + """ class LazyLoader(Loader): - """A loader that creates a module which defers loading until attribute access. -""" + """A loader that creates a module which defers loading until attribute access.""" + def __init__(self, loader: Loader) -> None: ... @classmethod def factory(cls, loader: Loader) -> Callable[..., LazyLoader]: - """Construct a callable which returns the eager loader made lazy. -""" + """Construct a callable which returns the eager loader made lazy.""" + def exec_module(self, module: types.ModuleType) -> None: - """Make the module load lazily. -""" + """Make the module load lazily.""" def source_hash(source_bytes: ReadableBuffer) -> bytes: - """Return the hash of *source_bytes* as used in hash-based pyc files. -""" + """Return the hash of *source_bytes* as used in hash-based pyc files.""" if sys.version_info >= (3, 14): __all__ = [ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi index 6c77116a35483..08d3d9d299832 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/inspect.pyi @@ -25,6 +25,7 @@ Here are some of the useful functions provided by this module: signature() - get a Signature object for the callable """ + import dis import enum import sys @@ -184,8 +185,8 @@ _V_contra = TypeVar("_V_contra", contravariant=True) class EndOfBlock(Exception): ... class BlockFinder: - """Provide a tokeneater() method to detect the end of a code block. -""" + """Provide a tokeneater() method to detect the end of a code block.""" + indent: int islambda: bool started: bool @@ -220,8 +221,9 @@ _GetMembersReturn: TypeAlias = list[tuple[str, _T]] @overload def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: """Return all members of an object as (name, value) pairs sorted by name. -Optionally, only return members that satisfy a given predicate. -""" + Optionally, only return members that satisfy a given predicate. + """ + @overload def getmembers(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload @@ -231,71 +233,71 @@ if sys.version_info >= (3, 11): @overload def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturn[_T]: """Return all members of an object as (name, value) pairs sorted by name -without triggering dynamic lookup via the descriptor protocol, -__getattr__ or __getattribute__. Optionally, only return members that -satisfy a given predicate. - -Note: this function may not be able to retrieve all members - that getmembers can fetch (like dynamically created attributes) - and may find members that getmembers can't (like descriptors - that raise AttributeError). It can also return descriptor objects - instead of instance members in some cases. -""" + without triggering dynamic lookup via the descriptor protocol, + __getattr__ or __getattribute__. Optionally, only return members that + satisfy a given predicate. + + Note: this function may not be able to retrieve all members + that getmembers can fetch (like dynamically created attributes) + and may find members that getmembers can't (like descriptors + that raise AttributeError). It can also return descriptor objects + instead of instance members in some cases. + """ + @overload def getmembers_static(object: object, predicate: _GetMembersPredicateTypeIs[_T]) -> _GetMembersReturn[_T]: ... @overload def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn[Any]: ... def getmodulename(path: StrPath) -> str | None: - """Return the module name for a given file, or None. -""" + """Return the module name for a given file, or None.""" + def ismodule(object: object) -> TypeIs[ModuleType]: - """Return true if the object is a module. -""" + """Return true if the object is a module.""" + def isclass(object: object) -> TypeIs[type[Any]]: - """Return true if the object is a class. -""" + """Return true if the object is a class.""" + def ismethod(object: object) -> TypeIs[MethodType]: - """Return true if the object is an instance method. -""" + """Return true if the object is an instance method.""" if sys.version_info >= (3, 14): # Not TypeIs because it does not return True for all modules def ispackage(object: object) -> TypeGuard[ModuleType]: - """Return true if the object is a package. -""" + """Return true if the object is a package.""" def isfunction(object: object) -> TypeIs[FunctionType]: """Return true if the object is a user-defined function. -Function objects provide these attributes: - __doc__ documentation string - __name__ name with which this function was defined - __qualname__ qualified name of this function - __module__ name of the module the function was defined in or None - __code__ code object containing compiled function bytecode - __defaults__ tuple of any default values for arguments - __globals__ global namespace in which this function was defined - __annotations__ dict of parameter annotations - __kwdefaults__ dict of keyword only parameters with defaults - __dict__ namespace which is supporting arbitrary function attributes - __closure__ a tuple of cells or None - __type_params__ tuple of type parameters -""" + Function objects provide these attributes: + __doc__ documentation string + __name__ name with which this function was defined + __qualname__ qualified name of this function + __module__ name of the module the function was defined in or None + __code__ code object containing compiled function bytecode + __defaults__ tuple of any default values for arguments + __globals__ global namespace in which this function was defined + __annotations__ dict of parameter annotations + __kwdefaults__ dict of keyword only parameters with defaults + __dict__ namespace which is supporting arbitrary function attributes + __closure__ a tuple of cells or None + __type_params__ tuple of type parameters + """ if sys.version_info >= (3, 12): def markcoroutinefunction(func: _F) -> _F: """ -Decorator to ensure callable is recognised as a coroutine function. -""" + Decorator to ensure callable is recognised as a coroutine function. + """ @overload def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: """Return true if the object is a user-defined generator function. -Generator function objects provide the same attributes as functions. -See help(isfunction) for a list of attributes. -""" + Generator function objects provide the same attributes as functions. + See help(isfunction) for a list of attributes. + """ + @overload def isgeneratorfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... @overload @@ -304,9 +306,10 @@ def isgeneratorfunction(obj: object) -> TypeGuard[Callable[..., GeneratorType[An def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: """Return true if the object is a coroutine function. -Coroutine functions are normally defined with "async def" syntax, but may -be marked via markcoroutinefunction. -""" + Coroutine functions are normally defined with "async def" syntax, but may + be marked via markcoroutinefunction. + """ + @overload def iscoroutinefunction(obj: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... @overload @@ -316,33 +319,35 @@ def iscoroutinefunction(obj: object) -> TypeGuard[Callable[..., CoroutineType[An def isgenerator(object: object) -> TypeIs[GeneratorType[Any, Any, Any]]: """Return true if the object is a generator. -Generator objects provide these attributes: - gi_code code object - gi_frame frame object or possibly None once the generator has - been exhausted - gi_running set to 1 when generator is executing, 0 otherwise - gi_yieldfrom object being iterated by yield from or None - - __iter__() defined to support iteration over container - close() raises a new GeneratorExit exception inside the - generator to terminate the iteration - send() resumes the generator and "sends" a value that becomes - the result of the current yield-expression - throw() used to raise an exception inside the generator -""" + Generator objects provide these attributes: + gi_code code object + gi_frame frame object or possibly None once the generator has + been exhausted + gi_running set to 1 when generator is executing, 0 otherwise + gi_yieldfrom object being iterated by yield from or None + + __iter__() defined to support iteration over container + close() raises a new GeneratorExit exception inside the + generator to terminate the iteration + send() resumes the generator and "sends" a value that becomes + the result of the current yield-expression + throw() used to raise an exception inside the generator + """ + def iscoroutine(object: object) -> TypeIs[CoroutineType[Any, Any, Any]]: - """Return true if the object is a coroutine. -""" + """Return true if the object is a coroutine.""" + def isawaitable(object: object) -> TypeIs[Awaitable[Any]]: - """Return true if object can be passed to an ``await`` expression. -""" + """Return true if object can be passed to an ``await`` expression.""" + @overload def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: """Return true if the object is an asynchronous generator function. -Asynchronous generator functions are defined with "async def" -syntax and have "yield" expressions in their body. -""" + Asynchronous generator functions are defined with "async def" + syntax and have "yield" expressions in their body. + """ + @overload def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... @overload @@ -356,77 +361,79 @@ class _SupportsDelete(Protocol[_T_contra]): def __delete__(self, instance: _T_contra, /) -> None: ... def isasyncgen(object: object) -> TypeIs[AsyncGeneratorType[Any, Any]]: - """Return true if the object is an asynchronous generator. -""" + """Return true if the object is an asynchronous generator.""" + def istraceback(object: object) -> TypeIs[TracebackType]: """Return true if the object is a traceback. -Traceback objects provide these attributes: - tb_frame frame object at this level - tb_lasti index of last attempted instruction in bytecode - tb_lineno current line number in Python source code - tb_next next inner traceback object (called by this level) -""" + Traceback objects provide these attributes: + tb_frame frame object at this level + tb_lasti index of last attempted instruction in bytecode + tb_lineno current line number in Python source code + tb_next next inner traceback object (called by this level) + """ + def isframe(object: object) -> TypeIs[FrameType]: """Return true if the object is a frame object. -Frame objects provide these attributes: - f_back next outer frame object (this frame's caller) - f_builtins built-in namespace seen by this frame - f_code code object being executed in this frame - f_globals global namespace seen by this frame - f_lasti index of last attempted instruction in bytecode - f_lineno current line number in Python source code - f_locals local namespace seen by this frame - f_trace tracing function for this frame, or None - f_trace_lines is a tracing event triggered for each source line? - f_trace_opcodes are per-opcode events being requested? - - clear() used to clear all references to local variables -""" + Frame objects provide these attributes: + f_back next outer frame object (this frame's caller) + f_builtins built-in namespace seen by this frame + f_code code object being executed in this frame + f_globals global namespace seen by this frame + f_lasti index of last attempted instruction in bytecode + f_lineno current line number in Python source code + f_locals local namespace seen by this frame + f_trace tracing function for this frame, or None + f_trace_lines is a tracing event triggered for each source line? + f_trace_opcodes are per-opcode events being requested? + + clear() used to clear all references to local variables + """ + def iscode(object: object) -> TypeIs[CodeType]: """Return true if the object is a code object. -Code objects provide these attributes: - co_argcount number of arguments (not including *, ** args - or keyword only arguments) - co_code string of raw compiled bytecode - co_cellvars tuple of names of cell variables - co_consts tuple of constants used in the bytecode - co_filename name of file in which this code object was created - co_firstlineno number of first line in Python source code - co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg - | 16=nested | 32=generator | 64=nofree | 128=coroutine - | 256=iterable_coroutine | 512=async_generator - | 0x4000000=has_docstring - co_freevars tuple of names of free variables - co_posonlyargcount number of positional only arguments - co_kwonlyargcount number of keyword only arguments (not including ** arg) - co_lnotab encoded mapping of line numbers to bytecode indices - co_name name with which this code object was defined - co_names tuple of names other than arguments and function locals - co_nlocals number of local variables - co_stacksize virtual machine stack space required - co_varnames tuple of names of arguments and local variables - co_qualname fully qualified function name - - co_lines() returns an iterator that yields successive bytecode ranges - co_positions() returns an iterator of source code positions for each bytecode instruction - replace() returns a copy of the code object with a new values -""" + Code objects provide these attributes: + co_argcount number of arguments (not including *, ** args + or keyword only arguments) + co_code string of raw compiled bytecode + co_cellvars tuple of names of cell variables + co_consts tuple of constants used in the bytecode + co_filename name of file in which this code object was created + co_firstlineno number of first line in Python source code + co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg + | 16=nested | 32=generator | 64=nofree | 128=coroutine + | 256=iterable_coroutine | 512=async_generator + | 0x4000000=has_docstring + co_freevars tuple of names of free variables + co_posonlyargcount number of positional only arguments + co_kwonlyargcount number of keyword only arguments (not including ** arg) + co_lnotab encoded mapping of line numbers to bytecode indices + co_name name with which this code object was defined + co_names tuple of names other than arguments and function locals + co_nlocals number of local variables + co_stacksize virtual machine stack space required + co_varnames tuple of names of arguments and local variables + co_qualname fully qualified function name + + co_lines() returns an iterator that yields successive bytecode ranges + co_positions() returns an iterator of source code positions for each bytecode instruction + replace() returns a copy of the code object with a new values + """ + def isbuiltin(object: object) -> TypeIs[BuiltinFunctionType]: """Return true if the object is a built-in function or method. -Built-in functions and methods provide these attributes: - __doc__ documentation string - __name__ original name of this function or method - __self__ instance to which a method is bound, or None -""" + Built-in functions and methods provide these attributes: + __doc__ documentation string + __name__ original name of this function or method + __self__ instance to which a method is bound, or None + """ if sys.version_info >= (3, 11): def ismethodwrapper(object: object) -> TypeIs[MethodWrapperType]: - """Return true if the object is a method wrapper. -""" + """Return true if the object is a method wrapper.""" def isroutine( object: object, @@ -440,48 +447,51 @@ def isroutine( | MethodDescriptorType | ClassMethodDescriptorType ]: - """Return true if the object is any kind of function or method. -""" + """Return true if the object is any kind of function or method.""" + def ismethoddescriptor(object: object) -> TypeIs[MethodDescriptorType]: """Return true if the object is a method descriptor. -But not if ismethod() or isclass() or isfunction() are true. + But not if ismethod() or isclass() or isfunction() are true. -This is new in Python 2.2, and, for example, is true of int.__add__. -An object passing this test has a __get__ attribute, but not a -__set__ attribute or a __delete__ attribute. Beyond that, the set -of attributes varies; __name__ is usually sensible, and __doc__ -often is. + This is new in Python 2.2, and, for example, is true of int.__add__. + An object passing this test has a __get__ attribute, but not a + __set__ attribute or a __delete__ attribute. Beyond that, the set + of attributes varies; __name__ is usually sensible, and __doc__ + often is. + + Methods implemented via descriptors that also pass one of the other + tests return false from the ismethoddescriptor() test, simply because + the other tests promise more -- you can, e.g., count on having the + __func__ attribute (etc) when an object passes ismethod(). + """ -Methods implemented via descriptors that also pass one of the other -tests return false from the ismethoddescriptor() test, simply because -the other tests promise more -- you can, e.g., count on having the -__func__ attribute (etc) when an object passes ismethod(). -""" def ismemberdescriptor(object: object) -> TypeIs[MemberDescriptorType]: """Return true if the object is a member descriptor. -Member descriptors are specialized descriptors defined in extension -modules. -""" + Member descriptors are specialized descriptors defined in extension + modules. + """ + def isabstract(object: object) -> bool: - """Return true if the object is an abstract base class (ABC). -""" + """Return true if the object is an abstract base class (ABC).""" + def isgetsetdescriptor(object: object) -> TypeIs[GetSetDescriptorType]: """Return true if the object is a getset descriptor. -getset descriptors are specialized descriptors defined in extension -modules. -""" + getset descriptors are specialized descriptors defined in extension + modules. + """ + def isdatadescriptor(object: object) -> TypeIs[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: """Return true if the object is a data descriptor. -Data descriptors have a __set__ or a __delete__ attribute. Examples are -properties (defined in Python) and getsets and members (defined in C). -Typically, data descriptors will also have __name__ and __doc__ attributes -(properties, getsets, and members have both of these attributes), but this -is not guaranteed. -""" + Data descriptors have a __set__ or a __delete__ attribute. Examples are + properties (defined in Python) and getsets and members (defined in C). + Typically, data descriptors will also have __name__ and __doc__ attributes + (properties, getsets, and members have both of these attributes), but this + is not guaranteed. + """ # # Retrieving source code @@ -493,24 +503,25 @@ _SourceObjectType: TypeAlias = ( def findsource(object: _SourceObjectType) -> tuple[list[str], int]: """Return the entire source file and starting line number for an object. -The argument may be a module, class, method, function, traceback, frame, -or code object. The source code is returned as a list of all the lines -in the file and the line number indexes a line in that list. An OSError -is raised if the source code cannot be retrieved. -""" + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of all the lines + in the file and the line number indexes a line in that list. An OSError + is raised if the source code cannot be retrieved. + """ + def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: """Return an absolute path to the source or compiled file for an object. -The idea is for each object to have a unique origin, so this routine -normalizes the result as much as possible. -""" + The idea is for each object to have a unique origin, so this routine + normalizes the result as much as possible. + """ # Special-case the two most common input types here # to avoid the annoyingly vague `Sequence[str]` return type @overload def getblock(lines: list[str]) -> list[str]: - """Extract the block of code at the top of the given list of lines. -""" + """Extract the block of code at the top of the given list of lines.""" + @overload def getblock(lines: tuple[str, ...]) -> tuple[str, ...]: ... @overload @@ -518,50 +529,55 @@ def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> str | None: """Get the documentation string for an object. -All tabs are expanded to spaces. To clean up docstrings that are -indented to line up with blocks of code, any whitespace than can be -uniformly removed from the second line onwards is removed. -""" + All tabs are expanded to spaces. To clean up docstrings that are + indented to line up with blocks of code, any whitespace than can be + uniformly removed from the second line onwards is removed. + """ + def getcomments(object: object) -> str | None: """Get lines of comments immediately preceding an object's source code. -Returns None when source can't be found. -""" + Returns None when source can't be found. + """ + def getfile(object: _SourceObjectType) -> str: - """Work out which source or compiled file an object was defined in. -""" + """Work out which source or compiled file an object was defined in.""" + def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: - """Return the module an object was defined in, or None if not found. -""" + """Return the module an object was defined in, or None if not found.""" + def getsourcefile(object: _SourceObjectType) -> str | None: """Return the filename that can be used to locate an object's source. -Return None if no way can be identified to get the source. -""" + Return None if no way can be identified to get the source. + """ + def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: """Return a list of source lines and starting line number for an object. -The argument may be a module, class, method, function, traceback, frame, -or code object. The source code is returned as a list of the lines -corresponding to the object and the line number indicates where in the -original source file the first line of code was found. An OSError is -raised if the source code cannot be retrieved. -""" + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of the lines + corresponding to the object and the line number indicates where in the + original source file the first line of code was found. An OSError is + raised if the source code cannot be retrieved. + """ + def getsource(object: _SourceObjectType) -> str: """Return the text of the source code for an object. -The argument may be a module, class, method, function, traceback, frame, -or code object. The source code is returned as a single string. An -OSError is raised if the source code cannot be retrieved. -""" + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a single string. An + OSError is raised if the source code cannot be retrieved. + """ + def cleandoc(doc: str) -> str: """Clean up indentation from docstrings. -Any whitespace that can be uniformly removed from the second line -onwards is removed. -""" + Any whitespace that can be uniformly removed from the second line + onwards is removed. + """ + def indentsize(line: str) -> int: - """Return the indent size, in spaces, at the start of a line of text. -""" + """Return the indent size, in spaces, at the start of a line of text.""" _IntrospectableCallable: TypeAlias = Callable[..., Any] @@ -578,8 +594,7 @@ if sys.version_info >= (3, 14): eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 ) -> Signature: - """Get a signature object for the passed callable. -""" + """Get a signature object for the passed callable.""" elif sys.version_info >= (3, 10): def signature( @@ -590,50 +605,48 @@ elif sys.version_info >= (3, 10): locals: Mapping[str, Any] | None = None, eval_str: bool = False, ) -> Signature: - """Get a signature object for the passed callable. -""" + """Get a signature object for the passed callable.""" else: def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: - """Get a signature object for the passed callable. -""" + """Get a signature object for the passed callable.""" class _void: - """A private marker - used in Parameter & Signature. -""" + """A private marker - used in Parameter & Signature.""" + class _empty: - """Marker object for Signature.empty and Parameter.empty. -""" + """Marker object for Signature.empty and Parameter.empty.""" class Signature: """A Signature object represents the overall signature of a function. -It stores a Parameter object for each parameter accepted by the -function, as well as information specific to the function itself. - -A Signature object has the following public attributes and methods: - -* parameters : OrderedDict - An ordered mapping of parameters' names to the corresponding - Parameter objects (keyword-only arguments are in the same order - as listed in `code.co_varnames`). -* return_annotation : object - The annotation for the return type of the function if specified. - If the function has no annotation for its return type, this - attribute is set to `Signature.empty`. -* bind(*args, **kwargs) -> BoundArguments - Creates a mapping from positional and keyword arguments to - parameters. -* bind_partial(*args, **kwargs) -> BoundArguments - Creates a partial mapping from positional and keyword arguments - to parameters (simulating 'functools.partial' behavior.) -""" + It stores a Parameter object for each parameter accepted by the + function, as well as information specific to the function itself. + + A Signature object has the following public attributes and methods: + + * parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in `code.co_varnames`). + * return_annotation : object + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is set to `Signature.empty`. + * bind(*args, **kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. + * bind_partial(*args, **kwargs) -> BoundArguments + Creates a partial mapping from positional and keyword arguments + to parameters (simulating 'functools.partial' behavior.) + """ + __slots__ = ("_return_annotation", "_parameters") def __init__( self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True ) -> None: """Constructs Signature from the given list of Parameter -objects and 'return_annotation'. All arguments are optional. -""" + objects and 'return_annotation'. All arguments are optional. + """ empty = _empty @property def parameters(self) -> types.MappingProxyType[str, Parameter]: ... @@ -641,19 +654,21 @@ objects and 'return_annotation'. All arguments are optional. def return_annotation(self) -> Any: ... def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: """Get a BoundArguments object, that maps the passed `args` -and `kwargs` to the function's signature. Raises `TypeError` -if the passed arguments can not be bound. -""" + and `kwargs` to the function's signature. Raises `TypeError` + if the passed arguments can not be bound. + """ + def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: """Get a BoundArguments object, that partially maps the -passed `args` and `kwargs` to the function's signature. -Raises `TypeError` if the passed arguments can not be bound. -""" + passed `args` and `kwargs` to the function's signature. + Raises `TypeError` if the passed arguments can not be bound. + """ + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: """Creates a customized copy of the Signature. -Pass 'parameters' and/or 'return_annotation' arguments -to override them in the new copy. -""" + Pass 'parameters' and/or 'return_annotation' arguments + to override them in the new copy. + """ __replace__ = replace if sys.version_info >= (3, 14): @classmethod @@ -667,8 +682,7 @@ to override them in the new copy. eval_str: bool = False, annotation_format: Format = Format.VALUE, # noqa: Y011 ) -> Self: - """Constructs Signature for the given callable object. -""" + """Constructs Signature for the given callable object.""" elif sys.version_info >= (3, 10): @classmethod def from_callable( @@ -680,36 +694,34 @@ to override them in the new copy. locals: Mapping[str, Any] | None = None, eval_str: bool = False, ) -> Self: - """Constructs Signature for the given callable object. -""" + """Constructs Signature for the given callable object.""" else: @classmethod def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: - """Constructs Signature for the given callable object. -""" + """Constructs Signature for the given callable object.""" if sys.version_info >= (3, 14): def format(self, *, max_width: int | None = None, quote_annotation_strings: bool = True) -> str: """Create a string representation of the Signature object. -If *max_width* integer is passed, -signature will try to fit into the *max_width*. -If signature is longer than *max_width*, -all parameters will be on separate lines. + If *max_width* integer is passed, + signature will try to fit into the *max_width*. + If signature is longer than *max_width*, + all parameters will be on separate lines. -If *quote_annotation_strings* is False, annotations -in the signature are displayed without opening and closing quotation -marks. This is useful when the signature was created with the -STRING format or when ``from __future__ import annotations`` was used. -""" + If *quote_annotation_strings* is False, annotations + in the signature are displayed without opening and closing quotation + marks. This is useful when the signature was created with the + STRING format or when ``from __future__ import annotations`` was used. + """ elif sys.version_info >= (3, 13): def format(self, *, max_width: int | None = None) -> str: """Create a string representation of the Signature object. -If *max_width* integer is passed, -signature will try to fit into the *max_width*. -If signature is longer than *max_width*, -all parameters will be on separate lines. -""" + If *max_width* integer is passed, + signature will try to fit into the *max_width*. + If signature is longer than *max_width*, + all parameters will be on separate lines. + """ def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @@ -726,53 +738,53 @@ elif sys.version_info >= (3, 10): ) -> dict[str, AnnotationForm]: # values are type expressions """Compute the annotations dict for an object. -obj may be a callable, class, or module. -Passing in an object of any other type raises TypeError. - -Returns a dict. get_annotations() returns a new dict every time -it's called; calling it twice on the same object will return two -different but equivalent dicts. - -This function handles several details for you: - - * If eval_str is true, values of type str will - be un-stringized using eval(). This is intended - for use with stringized annotations - ("from __future__ import annotations"). - * If obj doesn't have an annotations dict, returns an - empty dict. (Functions and methods always have an - annotations dict; classes, modules, and other types of - callables may not.) - * Ignores inherited annotations on classes. If a class - doesn't have its own annotations dict, returns an empty dict. - * All accesses to object members and dict values are done - using getattr() and dict.get() for safety. - * Always, always, always returns a freshly-created dict. - -eval_str controls whether or not values of type str are replaced -with the result of calling eval() on those values: - - * If eval_str is true, eval() is called on values of type str. - * If eval_str is false (the default), values of type str are unchanged. - -globals and locals are passed in to eval(); see the documentation -for eval() for more information. If either globals or locals is -None, this function may replace that value with a context-specific -default, contingent on type(obj): - - * If obj is a module, globals defaults to obj.__dict__. - * If obj is a class, globals defaults to - sys.modules[obj.__module__].__dict__ and locals - defaults to the obj class namespace. - * If obj is a callable, globals defaults to obj.__globals__, - although if obj is a wrapped function (using - functools.update_wrapper()) it is first unwrapped. -""" + obj may be a callable, class, or module. + Passing in an object of any other type raises TypeError. + + Returns a dict. get_annotations() returns a new dict every time + it's called; calling it twice on the same object will return two + different but equivalent dicts. + + This function handles several details for you: + + * If eval_str is true, values of type str will + be un-stringized using eval(). This is intended + for use with stringized annotations + ("from __future__ import annotations"). + * If obj doesn't have an annotations dict, returns an + empty dict. (Functions and methods always have an + annotations dict; classes, modules, and other types of + callables may not.) + * Ignores inherited annotations on classes. If a class + doesn't have its own annotations dict, returns an empty dict. + * All accesses to object members and dict values are done + using getattr() and dict.get() for safety. + * Always, always, always returns a freshly-created dict. + + eval_str controls whether or not values of type str are replaced + with the result of calling eval() on those values: + + * If eval_str is true, eval() is called on values of type str. + * If eval_str is false (the default), values of type str are unchanged. + + globals and locals are passed in to eval(); see the documentation + for eval() for more information. If either globals or locals is + None, this function may replace that value with a context-specific + default, contingent on type(obj): + + * If obj is a module, globals defaults to obj.__dict__. + * If obj is a class, globals defaults to + sys.modules[obj.__module__].__dict__ and locals + defaults to the obj class namespace. + * If obj is a callable, globals defaults to obj.__globals__, + although if obj is a wrapped function (using + functools.update_wrapper()) it is first unwrapped. + """ # The name is the same as the enum's name in CPython class _ParameterKind(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + POSITIONAL_ONLY = 0 POSITIONAL_OR_KEYWORD = 1 VAR_POSITIONAL = 2 @@ -793,42 +805,44 @@ if sys.version_info >= (3, 12): ) -> Literal["AGEN_CREATED", "AGEN_RUNNING", "AGEN_SUSPENDED", "AGEN_CLOSED"]: """Get current state of an asynchronous generator object. -Possible states are: - AGEN_CREATED: Waiting to start execution. - AGEN_RUNNING: Currently being executed by the interpreter. - AGEN_SUSPENDED: Currently suspended at a yield expression. - AGEN_CLOSED: Execution has completed. -""" + Possible states are: + AGEN_CREATED: Waiting to start execution. + AGEN_RUNNING: Currently being executed by the interpreter. + AGEN_SUSPENDED: Currently suspended at a yield expression. + AGEN_CLOSED: Execution has completed. + """ + def getasyncgenlocals(agen: AsyncGeneratorType[Any, Any]) -> dict[str, Any]: """ -Get the mapping of asynchronous generator local variables to their current -values. + Get the mapping of asynchronous generator local variables to their current + values. -A dict is returned, with the keys the local variable names and values the -bound values. -""" + A dict is returned, with the keys the local variable names and values the + bound values. + """ class Parameter: """Represents a parameter in a function signature. -Has the following public attributes: - -* name : str - The name of the parameter as a string. -* default : object - The default value for the parameter if specified. If the - parameter has no default value, this attribute is set to - `Parameter.empty`. -* annotation - The annotation for the parameter if specified. If the - parameter has no annotation, this attribute is set to - `Parameter.empty`. -* kind : str - Describes how argument values are bound to the parameter. - Possible values: `Parameter.POSITIONAL_ONLY`, - `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, - `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. -""" + Has the following public attributes: + + * name : str + The name of the parameter as a string. + * default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is set to + `Parameter.empty`. + * annotation + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is set to + `Parameter.empty`. + * kind : str + Describes how argument values are bound to the parameter. + Possible values: `Parameter.POSITIONAL_ONLY`, + `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, + `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. + """ + __slots__ = ("_name", "_kind", "_default", "_annotation") def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... empty = _empty @@ -854,8 +868,7 @@ Has the following public attributes: default: Any = ..., annotation: Any = ..., ) -> Self: - """Creates a customized copy of the Parameter. -""" + """Creates a customized copy of the Parameter.""" if sys.version_info >= (3, 13): __replace__ = replace @@ -864,20 +877,21 @@ Has the following public attributes: class BoundArguments: """Result of `Signature.bind` call. Holds the mapping of arguments -to the function's parameters. - -Has the following public attributes: - -* arguments : dict - An ordered mutable mapping of parameters' names to arguments' values. - Does not contain arguments' default values. -* signature : Signature - The Signature object that created this instance. -* args : tuple - Tuple of positional arguments values. -* kwargs : dict - Dict of keyword arguments values. -""" + to the function's parameters. + + Has the following public attributes: + + * arguments : dict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. + * signature : Signature + The Signature object that created this instance. + * args : tuple + Tuple of positional arguments values. + * kwargs : dict + Dict of keyword arguments values. + """ + __slots__ = ("arguments", "_signature", "__weakref__") arguments: OrderedDict[str, Any] @property @@ -890,12 +904,13 @@ Has the following public attributes: def apply_defaults(self) -> None: """Set default values for missing arguments. -For variable-positional arguments (*args) the default is an -empty tuple. + For variable-positional arguments (*args) the default is an + empty tuple. + + For variable-keyword arguments (**kwargs) the default is an + empty dict. + """ -For variable-keyword arguments (**kwargs) the default is an -empty dict. -""" def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -908,20 +923,20 @@ _ClassTreeItem: TypeAlias = list[tuple[type, ...]] | list[_ClassTreeItem] def getclasstree(classes: list[type], unique: bool = False) -> _ClassTreeItem: """Arrange the given list of classes into a hierarchy of nested lists. -Where a nested list appears, it contains classes derived from the class -whose entry immediately precedes the list. Each entry is a 2-tuple -containing a class and a tuple of its base classes. If the 'unique' -argument is true, exactly one entry appears in the returned structure -for each class in the given list. Otherwise, classes using multiple -inheritance and their descendants will appear multiple times. -""" + Where a nested list appears, it contains classes derived from the class + whose entry immediately precedes the list. Each entry is a 2-tuple + containing a class and a tuple of its base classes. If the 'unique' + argument is true, exactly one entry appears in the returned structure + for each class in the given list. Otherwise, classes using multiple + inheritance and their descendants will appear multiple times. + """ + def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> _ClassTreeItem: - """Recursive helper function for getclasstree(). -""" + """Recursive helper function for getclasstree().""" class Arguments(NamedTuple): - """Arguments(args, varargs, varkw) -""" + """Arguments(args, varargs, varkw)""" + args: list[str] varargs: str | None varkw: str | None @@ -929,17 +944,17 @@ class Arguments(NamedTuple): def getargs(co: CodeType) -> Arguments: """Get information about the arguments accepted by a code object. -Three things are returned: (args, varargs, varkw), where -'args' is the list of argument names. Keyword-only arguments are -appended. 'varargs' and 'varkw' are the names of the * and ** -arguments or None. -""" + Three things are returned: (args, varargs, varkw), where + 'args' is the list of argument names. Keyword-only arguments are + appended. 'varargs' and 'varkw' are the names of the * and ** + arguments or None. + """ if sys.version_info < (3, 11): @deprecated("Deprecated since Python 3.0; removed in Python 3.11.") class ArgSpec(NamedTuple): - """ArgSpec(args, varargs, keywords, defaults) -""" + """ArgSpec(args, varargs, keywords, defaults)""" + args: list[str] varargs: str | None keywords: str | None @@ -949,27 +964,27 @@ if sys.version_info < (3, 11): def getargspec(func: object) -> ArgSpec: """Get the names and default values of a function's parameters. - A tuple of four things is returned: (args, varargs, keywords, defaults). - 'args' is a list of the argument names, including keyword-only argument names. - 'varargs' and 'keywords' are the names of the * and ** parameters or None. - 'defaults' is an n-tuple of the default values of the last n parameters. + A tuple of four things is returned: (args, varargs, keywords, defaults). + 'args' is a list of the argument names, including keyword-only argument names. + 'varargs' and 'keywords' are the names of the * and ** parameters or None. + 'defaults' is an n-tuple of the default values of the last n parameters. - This function is deprecated, as it does not support annotations or - keyword-only parameters and will raise ValueError if either is present - on the supplied callable. + This function is deprecated, as it does not support annotations or + keyword-only parameters and will raise ValueError if either is present + on the supplied callable. - For a more structured introspection API, use inspect.signature() instead. + For a more structured introspection API, use inspect.signature() instead. - Alternatively, use getfullargspec() for an API with a similar namedtuple - based interface, but full support for annotations and keyword-only - parameters. + Alternatively, use getfullargspec() for an API with a similar namedtuple + based interface, but full support for annotations and keyword-only + parameters. - Deprecated since Python 3.5, use `inspect.getfullargspec()`. - """ + Deprecated since Python 3.5, use `inspect.getfullargspec()`. + """ class FullArgSpec(NamedTuple): - """FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations) -""" + """FullArgSpec(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations)""" + args: list[str] varargs: str | None varkw: str | None @@ -981,23 +996,23 @@ class FullArgSpec(NamedTuple): def getfullargspec(func: object) -> FullArgSpec: """Get the names and default values of a callable object's parameters. -A tuple of seven things is returned: -(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations). -'args' is a list of the parameter names. -'varargs' and 'varkw' are the names of the * and ** parameters or None. -'defaults' is an n-tuple of the default values of the last n parameters. -'kwonlyargs' is a list of keyword-only parameter names. -'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults. -'annotations' is a dictionary mapping parameter names to annotations. - -Notable differences from inspect.signature(): - - the "self" parameter is always reported, even for bound methods - - wrapper chains defined by __wrapped__ *not* unwrapped automatically -""" + A tuple of seven things is returned: + (args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations). + 'args' is a list of the parameter names. + 'varargs' and 'varkw' are the names of the * and ** parameters or None. + 'defaults' is an n-tuple of the default values of the last n parameters. + 'kwonlyargs' is a list of keyword-only parameter names. + 'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults. + 'annotations' is a dictionary mapping parameter names to annotations. + + Notable differences from inspect.signature(): + - the "self" parameter is always reported, even for bound methods + - wrapper chains defined by __wrapped__ *not* unwrapped automatically + """ class ArgInfo(NamedTuple): - """ArgInfo(args, varargs, keywords, locals) -""" + """ArgInfo(args, varargs, keywords, locals)""" + args: list[str] varargs: str | None keywords: str | None @@ -1006,11 +1021,11 @@ class ArgInfo(NamedTuple): def getargvalues(frame: FrameType) -> ArgInfo: """Get information about arguments passed into a particular frame. -A tuple of four things is returned: (args, varargs, varkw, locals). -'args' is a list of the argument names. -'varargs' and 'varkw' are the names of the * and ** arguments or None. -'locals' is the locals dictionary of the given frame. -""" + A tuple of four things is returned: (args, varargs, varkw, locals). + 'args' is a list of the argument names. + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'locals' is the locals dictionary of the given frame. + """ if sys.version_info >= (3, 14): def formatannotation(annotation: object, base_module: str | None = None, *, quote_annotation_strings: bool = True) -> str: ... @@ -1041,15 +1056,15 @@ if sys.version_info < (3, 11): ) -> str: """Format an argument spec from the values returned by getfullargspec. - The first seven arguments are (args, varargs, varkw, defaults, - kwonlyargs, kwonlydefaults, annotations). The other five arguments - are the corresponding optional formatting functions that are called to - turn names and values into strings. The last argument is an optional - function to format the sequence of arguments. + The first seven arguments are (args, varargs, varkw, defaults, + kwonlyargs, kwonlydefaults, annotations). The other five arguments + are the corresponding optional formatting functions that are called to + turn names and values into strings. The last argument is an optional + function to format the sequence of arguments. - Deprecated since Python 3.5: use the `signature` function and `Signature` - objects. - """ + Deprecated since Python 3.5: use the `signature` function and `Signature` + objects. + """ def formatargvalues( args: list[str], @@ -1063,25 +1078,26 @@ def formatargvalues( ) -> str: """Format an argument spec from the 4 values returned by getargvalues. -The first four arguments are (args, varargs, varkw, locals). The -next four arguments are the corresponding optional formatting functions -that are called to turn names and values into strings. The ninth -argument is an optional function to format the sequence of arguments. -""" + The first four arguments are (args, varargs, varkw, locals). The + next four arguments are the corresponding optional formatting functions + that are called to turn names and values into strings. The ninth + argument is an optional function to format the sequence of arguments. + """ + def getmro(cls: type) -> tuple[type, ...]: - """Return tuple of base classes (including cls) in method resolution order. -""" + """Return tuple of base classes (including cls) in method resolution order.""" + def getcallargs(func: Callable[_P, Any], /, *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: """Get the mapping of arguments to values. -A dict is returned, with keys the function argument names (including the -names of the * and ** arguments, if any), and values the respective bound -values from 'positional' and 'named'. -""" + A dict is returned, with keys the function argument names (including the + names of the * and ** arguments, if any), and values the respective bound + values from 'positional' and 'named'. + """ class ClosureVars(NamedTuple): - """ClosureVars(nonlocals, globals, builtins, unbound) -""" + """ClosureVars(nonlocals, globals, builtins, unbound)""" + nonlocals: Mapping[str, Any] globals: Mapping[str, Any] builtins: Mapping[str, Any] @@ -1089,28 +1105,29 @@ class ClosureVars(NamedTuple): def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: """ -Get the mapping of free variables to their current values. + Get the mapping of free variables to their current values. + + Returns a named tuple of dicts mapping the current nonlocal, global + and builtin references as seen by the body of the function. A final + set of unbound names that could not be resolved is also provided. + """ -Returns a named tuple of dicts mapping the current nonlocal, global -and builtin references as seen by the body of the function. A final -set of unbound names that could not be resolved is also provided. -""" def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: """Get the object wrapped by *func*. -Follows the chain of :attr:`__wrapped__` attributes returning the last -object in the chain. + Follows the chain of :attr:`__wrapped__` attributes returning the last + object in the chain. -*stop* is an optional callback accepting an object in the wrapper chain -as its sole argument that allows the unwrapping to be terminated early if -the callback returns a true value. If the callback never returns a true -value, the last object in the chain is returned as usual. For example, -:func:`signature` uses this to stop unwrapping if any object in the -chain has a ``__signature__`` attribute defined. + *stop* is an optional callback accepting an object in the wrapper chain + as its sole argument that allows the unwrapping to be terminated early if + the callback returns a true value. If the callback never returns a true + value, the last object in the chain is returned as usual. For example, + :func:`signature` uses this to stop unwrapping if any object in the + chain has a ``__signature__`` attribute defined. -:exc:`ValueError` is raised if a cycle is encountered. + :exc:`ValueError` is raised if a cycle is encountered. - """ + """ # # The interpreter stack @@ -1118,8 +1135,8 @@ chain has a ``__signature__`` attribute defined. if sys.version_info >= (3, 11): class _Traceback(NamedTuple): - """_Traceback(filename, lineno, function, code_context, index) -""" + """_Traceback(filename, lineno, function, code_context, index)""" + filename: str lineno: int function: str @@ -1127,8 +1144,8 @@ if sys.version_info >= (3, 11): index: int | None # type: ignore[assignment] class _FrameInfo(NamedTuple): - """_FrameInfo(frame, filename, lineno, function, code_context, index) -""" + """_FrameInfo(frame, filename, lineno, function, code_context, index)""" + frame: FrameType filename: str lineno: int @@ -1196,8 +1213,8 @@ if sys.version_info >= (3, 11): else: class Traceback(NamedTuple): - """Traceback(filename, lineno, function, code_context, index) -""" + """Traceback(filename, lineno, function, code_context, index)""" + filename: str lineno: int function: str @@ -1205,8 +1222,8 @@ else: index: int | None # type: ignore[assignment] class FrameInfo(NamedTuple): - """FrameInfo(frame, filename, lineno, function, code_context, index) -""" + """FrameInfo(frame, filename, lineno, function, code_context, index)""" + frame: FrameType filename: str lineno: int @@ -1217,36 +1234,38 @@ else: def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: """Get information about a frame or traceback object. -A tuple of five things is returned: the filename, the line number of -the current line, the function name, a list of lines of context from -the source code, and the index of the current line within that list. -The optional second argument specifies the number of lines of context -to return, which are centered around the current line. -""" + A tuple of five things is returned: the filename, the line number of + the current line, the function name, a list of lines of context from + the source code, and the index of the current line within that list. + The optional second argument specifies the number of lines of context + to return, which are centered around the current line. + """ + def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: """Get a list of records for a frame and all higher (calling) frames. -Each record contains a frame object, filename, line number, function -name, a list of lines of context, and index within the context. -""" + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context. + """ + def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: """Get a list of records for a traceback's frame and all lower frames. -Each record contains a frame object, filename, line number, function -name, a list of lines of context, and index within the context. -""" + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context. + """ + def getlineno(frame: FrameType) -> int: - """Get the line number from a frame object, allowing for optimization. -""" + """Get the line number from a frame object, allowing for optimization.""" + def currentframe() -> FrameType | None: - """Return the frame of the caller or None if this is not possible. -""" + """Return the frame of the caller or None if this is not possible.""" + def stack(context: int = 1) -> list[FrameInfo]: - """Return a list of records for the stack above the caller's frame. -""" + """Return a list of records for the stack above the caller's frame.""" + def trace(context: int = 1) -> list[FrameInfo]: - """Return a list of records for the stack below the current exception. -""" + """Return a list of records for the stack below the current exception.""" # # Fetching attributes statically @@ -1254,15 +1273,15 @@ def trace(context: int = 1) -> list[FrameInfo]: def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: """Retrieve attributes without triggering dynamic lookup via the -descriptor protocol, __getattr__ or __getattribute__. - -Note: this function may not be able to retrieve all attributes -that getattr can fetch (like dynamically created attributes) -and may find attributes that getattr can't (like descriptors -that raise AttributeError). It can also return descriptor objects -instead of instance members in some cases. See the -documentation for details. -""" + descriptor protocol, __getattr__ or __getattribute__. + + Note: this function may not be able to retrieve all attributes + that getattr can fetch (like dynamically created attributes) + and may find attributes that getattr can't (like descriptors + that raise AttributeError). It can also return descriptor objects + instead of instance members in some cases. See the + documentation for details. + """ # # Current State of Generators and Coroutines @@ -1278,12 +1297,12 @@ def getgeneratorstate( ) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: """Get current state of a generator-iterator. -Possible states are: - GEN_CREATED: Waiting to start execution. - GEN_RUNNING: Currently being executed by the interpreter. - GEN_SUSPENDED: Currently suspended at a yield expression. - GEN_CLOSED: Execution has completed. -""" + Possible states are: + GEN_CREATED: Waiting to start execution. + GEN_RUNNING: Currently being executed by the interpreter. + GEN_SUSPENDED: Currently suspended at a yield expression. + GEN_CLOSED: Execution has completed. + """ CORO_CREATED: Final = "CORO_CREATED" CORO_RUNNING: Final = "CORO_RUNNING" @@ -1295,34 +1314,36 @@ def getcoroutinestate( ) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: """Get current state of a coroutine object. -Possible states are: - CORO_CREATED: Waiting to start execution. - CORO_RUNNING: Currently being executed by the interpreter. - CORO_SUSPENDED: Currently suspended at an await expression. - CORO_CLOSED: Execution has completed. -""" + Possible states are: + CORO_CREATED: Waiting to start execution. + CORO_RUNNING: Currently being executed by the interpreter. + CORO_SUSPENDED: Currently suspended at an await expression. + CORO_CLOSED: Execution has completed. + """ + def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: """ -Get the mapping of generator local variables to their current values. + Get the mapping of generator local variables to their current values. + + A dict is returned, with the keys the local variable names and values the + bound values. + """ -A dict is returned, with the keys the local variable names and values the -bound values. -""" def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: """ -Get the mapping of coroutine local variables to their current values. + Get the mapping of coroutine local variables to their current values. -A dict is returned, with the keys the local variable names and values the -bound values. -""" + A dict is returned, with the keys the local variable names and values the + bound values. + """ # Create private type alias to avoid conflict with symbol of same # name created in Attribute class. _Object: TypeAlias = object class Attribute(NamedTuple): - """Attribute(name, kind, defining_class, object) -""" + """Attribute(name, kind, defining_class, object)""" + name: str kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type @@ -1331,29 +1352,29 @@ class Attribute(NamedTuple): def classify_class_attrs(cls: type) -> list[Attribute]: """Return list of attribute-descriptor tuples. -For each name in dir(cls), the return list contains a 4-tuple -with these elements: + For each name in dir(cls), the return list contains a 4-tuple + with these elements: - 0. The name (a string). + 0. The name (a string). - 1. The kind of attribute this is, one of these strings: - 'class method' created via classmethod() - 'static method' created via staticmethod() - 'property' created via property() - 'method' any other flavor of method or descriptor - 'data' not a method + 1. The kind of attribute this is, one of these strings: + 'class method' created via classmethod() + 'static method' created via staticmethod() + 'property' created via property() + 'method' any other flavor of method or descriptor + 'data' not a method - 2. The class which defined this attribute (a class). + 2. The class which defined this attribute (a class). - 3. The object as obtained by calling getattr; if this fails, or if the - resulting object does not live anywhere in the class' mro (including - metaclasses) then the object is looked up in the defining class's - dict (found by walking the mro). + 3. The object as obtained by calling getattr; if this fails, or if the + resulting object does not live anywhere in the class' mro (including + metaclasses) then the object is looked up in the defining class's + dict (found by walking the mro). -If one of the items in dir(cls) is stored in the metaclass it will now -be discovered and not have None be listed as the class in which it was -defined. Any items whose home class cannot be discovered are skipped. -""" + If one of the items in dir(cls) is stored in the metaclass it will now + be discovered and not have None be listed as the class in which it was + defined. Any items whose home class cannot be discovered are skipped. + """ class ClassFoundException(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi index 6bb344429a755..94d8ed0257f05 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/io.pyi @@ -32,6 +32,7 @@ DEFAULT_BUFFER_SIZE I/O classes. open() uses the file's blksize (as obtained by os.stat) if possible. """ + import abc import sys from _io import ( @@ -94,83 +95,87 @@ SEEK_CUR: Final = 1 SEEK_END: Final = 2 class UnsupportedOperation(OSError, ValueError): ... + class IOBase(_IOBase, metaclass=abc.ABCMeta): """The abstract base class for all I/O classes. -This class provides dummy implementations for many methods that -derived classes can override selectively; the default implementations -represent a file that cannot be read, written or seeked. + This class provides dummy implementations for many methods that + derived classes can override selectively; the default implementations + represent a file that cannot be read, written or seeked. -Even though IOBase does not declare read, readinto, or write because -their signatures will vary, implementations and clients should -consider those methods part of the interface. Also, implementations -may raise UnsupportedOperation when operations they do not support are -called. + Even though IOBase does not declare read, readinto, or write because + their signatures will vary, implementations and clients should + consider those methods part of the interface. Also, implementations + may raise UnsupportedOperation when operations they do not support are + called. -The basic type used for binary data read from or written to a file is -bytes. Other bytes-like objects are accepted as method arguments too. -In some cases (such as readinto), a writable object is required. Text -I/O classes work with str data. + The basic type used for binary data read from or written to a file is + bytes. Other bytes-like objects are accepted as method arguments too. + In some cases (such as readinto), a writable object is required. Text + I/O classes work with str data. -Note that calling any method (except additional calls to close(), -which are ignored) on a closed stream should raise a ValueError. + Note that calling any method (except additional calls to close(), + which are ignored) on a closed stream should raise a ValueError. -IOBase (and its subclasses) support the iterator protocol, meaning -that an IOBase object can be iterated over yielding the lines in a -stream. + IOBase (and its subclasses) support the iterator protocol, meaning + that an IOBase object can be iterated over yielding the lines in a + stream. -IOBase also supports the :keyword:`with` statement. In this example, -fp is closed after the suite of the with statement is complete: + IOBase also supports the :keyword:`with` statement. In this example, + fp is closed after the suite of the with statement is complete: + + with open('spam.txt', 'r') as fp: + fp.write('Spam and eggs!') + """ -with open('spam.txt', 'r') as fp: - fp.write('Spam and eggs!') -""" class RawIOBase(_RawIOBase, IOBase): - """Base class for raw binary I/O. -""" + """Base class for raw binary I/O.""" + class BufferedIOBase(_BufferedIOBase, IOBase): """Base class for buffered IO objects. -The main difference with RawIOBase is that the read() method -supports omitting the size argument, and does not have a default -implementation that defers to readinto(). + The main difference with RawIOBase is that the read() method + supports omitting the size argument, and does not have a default + implementation that defers to readinto(). -In addition, read(), readinto() and write() may raise -BlockingIOError if the underlying raw stream is in non-blocking -mode and not ready; unlike their raw counterparts, they will never -return None. + In addition, read(), readinto() and write() may raise + BlockingIOError if the underlying raw stream is in non-blocking + mode and not ready; unlike their raw counterparts, they will never + return None. + + A typical implementation should not inherit from a RawIOBase + implementation, but wrap one. + """ -A typical implementation should not inherit from a RawIOBase -implementation, but wrap one. -""" class TextIOBase(_TextIOBase, IOBase): """Base class for text I/O. -This class provides a character and line based interface to stream -I/O. There is no readinto method because Python's character strings -are immutable. -""" + This class provides a character and line based interface to stream + I/O. There is no readinto method because Python's character strings + are immutable. + """ if sys.version_info >= (3, 14): class Reader(Protocol[_T_co]): """Protocol for simple I/O reader instances. -This protocol only supports blocking I/O. -""" + This protocol only supports blocking I/O. + """ + __slots__ = () def read(self, size: int = ..., /) -> _T_co: """Read data from the input stream and return it. -If *size* is specified, at most *size* items (bytes/characters) will be -read. -""" + If *size* is specified, at most *size* items (bytes/characters) will be + read. + """ class Writer(Protocol[_T_contra]): """Protocol for simple I/O writer instances. -This protocol only supports blocking I/O. -""" + This protocol only supports blocking I/O. + """ + __slots__ = () def write(self, data: _T_contra, /) -> int: - """Write *data* to the output stream and return the number of items written. -""" + """Write *data* to the output stream and return the number of items written.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi index 458aa23d6b6d2..f8397ff5b33ed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ipaddress.pyi @@ -4,6 +4,7 @@ This library is used to create/poke/manipulate IPv4 and IPv6 addresses and networks. """ + import sys from collections.abc import Iterable, Iterator from typing import Any, Final, Generic, Literal, TypeVar, overload @@ -22,82 +23,84 @@ _RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Int def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: """Take an IP string/int and return an object of the correct type. -Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. -Returns: - An IPv4Address or IPv6Address object. + Returns: + An IPv4Address or IPv6Address object. -Raises: - ValueError: if the *address* passed isn't either a v4 or a v6 - address + Raises: + ValueError: if the *address* passed isn't either a v4 or a v6 + address + + """ -""" def ip_network( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True ) -> IPv4Network | IPv6Network: """Take an IP string/int and return an object of the correct type. -Args: - address: A string or integer, the IP network. Either IPv4 or - IPv6 networks may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. + Args: + address: A string or integer, the IP network. Either IPv4 or + IPv6 networks may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. -Returns: - An IPv4Network or IPv6Network object. + Returns: + An IPv4Network or IPv6Network object. -Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. Or if the network has host bits set. + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if the network has host bits set. + + """ -""" def ip_interface( address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], ) -> IPv4Interface | IPv6Interface: """Take an IP string/int and return an object of the correct type. -Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. -Returns: - An IPv4Interface or IPv6Interface object. + Returns: + An IPv4Interface or IPv6Interface object. -Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. -Notes: - The IPv?Interface classes describe an Address on a particular - Network, so they're basically a combination of both the Address - and Network classes. + Notes: + The IPv?Interface classes describe an Address on a particular + Network, so they're basically a combination of both the Address + and Network classes. -""" + """ class _IPAddressBase: - """The mother class. -""" + """The mother class.""" + __slots__ = () @property def compressed(self) -> str: - """Return the shorthand version of the IP address as a string. -""" + """Return the shorthand version of the IP address as a string.""" + @property def exploded(self) -> str: - """Return the longhand version of the IP address as a string. -""" + """Return the longhand version of the IP address as a string.""" + @property def reverse_pointer(self) -> str: """The name of the reverse DNS pointer for the IP address, e.g.: ->>> ipaddress.ip_address("127.0.0.1").reverse_pointer -'1.0.0.127.in-addr.arpa' ->>> ipaddress.ip_address("2001:db8::1").reverse_pointer -'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' + >>> ipaddress.ip_address("127.0.0.1").reverse_pointer + '1.0.0.127.in-addr.arpa' + >>> ipaddress.ip_address("2001:db8::1").reverse_pointer + '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' -""" + """ if sys.version_info < (3, 14): @property def version(self) -> int: ... @@ -105,9 +108,10 @@ class _IPAddressBase: class _BaseAddress(_IPAddressBase): """A generic IP object. -This IP class contains the version independent methods which are -used by single IP addresses. -""" + This IP class contains the version independent methods which are + used by single IP addresses. + """ + __slots__ = () def __add__(self, other: int) -> Self: ... def __hash__(self) -> int: ... @@ -116,44 +120,44 @@ used by single IP addresses. def __format__(self, fmt: str) -> str: """Returns an IP address as a formatted string. -Supported presentation types are: -'s': returns the IP address as a string (default) -'b': converts to binary and returns a zero-padded string -'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string -'n': the same as 'b' for IPv4 and 'x' for IPv6 + Supported presentation types are: + 's': returns the IP address as a string (default) + 'b': converts to binary and returns a zero-padded string + 'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string + 'n': the same as 'b' for IPv4 and 'x' for IPv6 + + For binary and hex presentation types, the alternate form specifier + '#' and the grouping option '_' are supported. + """ -For binary and hex presentation types, the alternate form specifier -'#' and the grouping option '_' are supported. -""" def __eq__(self, other: object) -> bool: ... def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): def __ge__(self, other: Self) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __gt__(self, other: Self) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __le__(self, other: Self) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" else: def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" class _BaseNetwork(_IPAddressBase, Generic[_A]): """A generic IP network object. -This IP class contains the version independent methods which are -used by networks. -""" + This IP class contains the version independent methods which are + used by networks. + """ + network_address: _A netmask: _A def __contains__(self, other: Any) -> bool: ... @@ -164,226 +168,236 @@ used by networks. def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): def __ge__(self, other: Self) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __gt__(self, other: Self) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __le__(self, other: Self) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" else: def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" def address_exclude(self, other: Self) -> Iterator[Self]: """Remove an address from a larger block. -For example: + For example: - addr1 = ip_network('192.0.2.0/28') - addr2 = ip_network('192.0.2.1/32') - list(addr1.address_exclude(addr2)) = - [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), - IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] + addr1 = ip_network('192.0.2.0/28') + addr2 = ip_network('192.0.2.1/32') + list(addr1.address_exclude(addr2)) = + [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'), + IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')] -or IPv6: + or IPv6: - addr1 = ip_network('2001:db8::1/32') - addr2 = ip_network('2001:db8::1/128') - list(addr1.address_exclude(addr2)) = - [ip_network('2001:db8::1/128'), - ip_network('2001:db8::2/127'), - ip_network('2001:db8::4/126'), - ip_network('2001:db8::8/125'), - ... - ip_network('2001:db8:8000::/33')] + addr1 = ip_network('2001:db8::1/32') + addr2 = ip_network('2001:db8::1/128') + list(addr1.address_exclude(addr2)) = + [ip_network('2001:db8::1/128'), + ip_network('2001:db8::2/127'), + ip_network('2001:db8::4/126'), + ip_network('2001:db8::8/125'), + ... + ip_network('2001:db8:8000::/33')] -Args: - other: An IPv4Network or IPv6Network object of the same type. + Args: + other: An IPv4Network or IPv6Network object of the same type. -Returns: - An iterator of the IPv(4|6)Network objects which is self - minus other. + Returns: + An iterator of the IPv(4|6)Network objects which is self + minus other. -Raises: - TypeError: If self and other are of differing address - versions, or if other is not a network object. - ValueError: If other is not completely contained by self. + Raises: + TypeError: If self and other are of differing address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + + """ -""" @property def broadcast_address(self) -> _A: ... def compare_networks(self, other: Self) -> int: """Compare two IP objects. -This is only concerned about the comparison of the integer -representation of the network addresses. This means that the -host bits aren't considered at all in this method. If you want -to compare host bits, you can easily enough do a -'HostA._ip < HostB._ip' - -Args: - other: An IP object. - -Returns: - If the IP versions of self and other are the same, returns: - - -1 if self < other: - eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') - IPv6Network('2001:db8::1000/124') < - IPv6Network('2001:db8::2000/124') - 0 if self == other - eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') - IPv6Network('2001:db8::1000/124') == - IPv6Network('2001:db8::1000/124') - 1 if self > other - eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') - IPv6Network('2001:db8::2000/124') > - IPv6Network('2001:db8::1000/124') - - Raises: - TypeError if the IP versions are different. + This is only concerned about the comparison of the integer + representation of the network addresses. This means that the + host bits aren't considered at all in this method. If you want + to compare host bits, you can easily enough do a + 'HostA._ip < HostB._ip' + + Args: + other: An IP object. + + Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25') + IPv6Network('2001:db8::1000/124') < + IPv6Network('2001:db8::2000/124') + 0 if self == other + eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24') + IPv6Network('2001:db8::1000/124') == + IPv6Network('2001:db8::1000/124') + 1 if self > other + eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') + IPv6Network('2001:db8::2000/124') > + IPv6Network('2001:db8::1000/124') + + Raises: + TypeError if the IP versions are different. + + """ -""" def hosts(self) -> Iterator[_A] | list[_A]: """Generate Iterator over usable hosts in a network. -This is like __iter__ except it doesn't return the network -or broadcast addresses. + This is like __iter__ except it doesn't return the network + or broadcast addresses. + + """ -""" @property def is_global(self) -> bool: """Test if this address is allocated for public networks. -Returns: - A boolean, True if the address is not reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. + Returns: + A boolean, True if the address is not reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ -""" @property def is_link_local(self) -> bool: """Test if the address is reserved for link-local. -Returns: - A boolean, True if the address is reserved per RFC 4291. + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ -""" @property def is_loopback(self) -> bool: """Test if the address is a loopback address. -Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ -""" @property def is_multicast(self) -> bool: """Test if the address is reserved for multicast use. -Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ -""" @property def is_private(self) -> bool: """Test if this network belongs to a private range. -Returns: - A boolean, True if the network is reserved per - iana-ipv4-special-registry or iana-ipv6-special-registry. + Returns: + A boolean, True if the network is reserved per + iana-ipv4-special-registry or iana-ipv6-special-registry. + + """ -""" @property def is_reserved(self) -> bool: """Test if the address is otherwise IETF reserved. -Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ -""" @property def is_unspecified(self) -> bool: """Test if the address is unspecified. -Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ -""" @property def num_addresses(self) -> int: - """Number of hosts in the current subnet. -""" + """Number of hosts in the current subnet.""" + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: - """Tell if self is partly contained in other. -""" + """Tell if self is partly contained in other.""" + @property def prefixlen(self) -> int: ... def subnet_of(self, other: Self) -> bool: - """Return True if this network is a subnet of other. -""" + """Return True if this network is a subnet of other.""" + def supernet_of(self, other: Self) -> bool: - """Return True if this network is a supernet of other. -""" + """Return True if this network is a supernet of other.""" + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: """The subnets which join to make the current subnet. -In the case that self contains only one IP -(self._prefixlen == 32 for IPv4 or self._prefixlen == 128 -for IPv6), yield an iterator with just ourself. + In the case that self contains only one IP + (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 + for IPv6), yield an iterator with just ourself. -Args: - prefixlen_diff: An integer, the amount the prefix length - should be increased by. This should not be set if - new_prefix is also set. - new_prefix: The desired new prefix length. This must be a - larger number (smaller prefix) than the existing prefix. - This should not be set if prefixlen_diff is also set. + Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. -Returns: - An iterator of IPv(4|6) objects. + Returns: + An iterator of IPv(4|6) objects. -Raises: - ValueError: The prefixlen_diff is too small or too large. - OR - prefixlen_diff and new_prefix are both set or new_prefix - is a smaller number than the current prefix (smaller - number means a larger network) + Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + + """ -""" def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: """The supernet containing the current network. -Args: - prefixlen_diff: An integer, the amount the prefix length of - the network should be decreased by. For example, given a - /24 network and a prefixlen_diff of 3, a supernet with a - /21 netmask is returned. + Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. -Returns: - An IPv4 network object. + Returns: + An IPv4 network object. -Raises: - ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have - a negative prefix length. - OR - If prefixlen_diff and new_prefix are both set or new_prefix is a - larger number than the current prefix (larger number means a - smaller network) + Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have + a negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + + """ -""" @property def with_hostmask(self) -> str: ... @property @@ -396,10 +410,11 @@ Raises: class _BaseV4: """Base IPv4 object. -The following methods are used by IPv4 objects in both single IP -addresses and networks. + The following methods are used by IPv4 objects in both single IP + addresses and networks. + + """ -""" __slots__ = () if sys.version_info >= (3, 14): version: Final = 4 @@ -411,158 +426,166 @@ addresses and networks. def max_prefixlen(self) -> Literal[32]: ... class IPv4Address(_BaseV4, _BaseAddress): - """Represent and manipulate single IPv4 Addresses. -""" + """Represent and manipulate single IPv4 Addresses.""" + __slots__ = ("_ip", "__weakref__") def __init__(self, address: object) -> None: """ -Args: - address: A string or integer representing the IP + Args: + address: A string or integer representing the IP - Additionally, an integer can be passed, so - IPv4Address('192.0.2.1') == IPv4Address(3221225985). - or, more generally - IPv4Address(int(IPv4Address('192.0.2.1'))) == - IPv4Address('192.0.2.1') + Additionally, an integer can be passed, so + IPv4Address('192.0.2.1') == IPv4Address(3221225985). + or, more generally + IPv4Address(int(IPv4Address('192.0.2.1'))) == + IPv4Address('192.0.2.1') -Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + + """ -""" @property def is_global(self) -> bool: """``True`` if the address is defined as globally reachable by -iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ -(for IPv6) with the following exception: + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ + (for IPv6) with the following exception: -For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the -semantics of the underlying IPv4 addresses and the following condition holds -(see :attr:`IPv6Address.ipv4_mapped`):: + For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: - address.is_global == address.ipv4_mapped.is_global + address.is_global == address.ipv4_mapped.is_global + + ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` + IPv4 range where they are both ``False``. + """ -``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` -IPv4 range where they are both ``False``. -""" @property def is_link_local(self) -> bool: """Test if the address is reserved for link-local. -Returns: - A boolean, True if the address is link-local per RFC 3927. + Returns: + A boolean, True if the address is link-local per RFC 3927. + + """ -""" @property def is_loopback(self) -> bool: """Test if the address is a loopback address. -Returns: - A boolean, True if the address is a loopback per RFC 3330. + Returns: + A boolean, True if the address is a loopback per RFC 3330. + + """ -""" @property def is_multicast(self) -> bool: """Test if the address is reserved for multicast use. -Returns: - A boolean, True if the address is multicast. - See RFC 3171 for details. + Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + + """ -""" @property def is_private(self) -> bool: """``True`` if the address is defined as not globally reachable by -iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ -(for IPv6) with the following exceptions: + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ + (for IPv6) with the following exceptions: -* ``is_private`` is ``False`` for ``100.64.0.0/10`` -* For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: + * ``is_private`` is ``False`` for ``100.64.0.0/10`` + * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: - address.is_private == address.ipv4_mapped.is_private + address.is_private == address.ipv4_mapped.is_private + + ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` + IPv4 range where they are both ``False``. + """ -``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` -IPv4 range where they are both ``False``. -""" @property def is_reserved(self) -> bool: """Test if the address is otherwise IETF reserved. -Returns: - A boolean, True if the address is within the - reserved IPv4 Network range. + Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + + """ -""" @property def is_unspecified(self) -> bool: """Test if the address is unspecified. -Returns: - A boolean, True if this is the unspecified address as defined in - RFC 5735 3. + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + + """ -""" @property def packed(self) -> bytes: - """The binary representation of this address. -""" + """The binary representation of this address.""" if sys.version_info >= (3, 13): @property def ipv6_mapped(self) -> IPv6Address: """Return the IPv4-mapped IPv6 address. -Returns: - The IPv4-mapped IPv6 address per RFC 4291. + Returns: + The IPv4-mapped IPv6 address per RFC 4291. -""" + """ class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): """This class represents and manipulates 32-bit IPv4 network + addresses.. -Attributes: [examples for IPv4Network('192.0.2.0/27')] - .network_address: IPv4Address('192.0.2.0') - .hostmask: IPv4Address('0.0.0.31') - .broadcast_address: IPv4Address('192.0.2.32') - .netmask: IPv4Address('255.255.255.224') - .prefixlen: 27 + Attributes: [examples for IPv4Network('192.0.2.0/27')] + .network_address: IPv4Address('192.0.2.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast_address: IPv4Address('192.0.2.32') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + + """ -""" def __init__(self, address: object, strict: bool = True) -> None: """Instantiate a new IPv4 network object. -Args: - address: A string or integer representing the IP [& network]. - '192.0.2.0/24' - '192.0.2.0/255.255.255.0' - '192.0.2.0/0.0.0.255' - are all functionally the same in IPv4. Similarly, - '192.0.2.1' - '192.0.2.1/255.255.255.255' - '192.0.2.1/32' - are also functionally equivalent. That is to say, failing to - provide a subnetmask will create an object with a mask of /32. - - If the mask (portion after the / in the argument) is given in - dotted quad form, it is treated as a netmask if it starts with a - non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it - starts with a zero field (e.g. 0.255.255.255 == /8), with the - single exception of an all-zero mask which is treated as a - netmask == /0. If no mask is given, a default of /32 is used. - - Additionally, an integer can be passed, so - IPv4Network('192.0.2.1') == IPv4Network(3221225985) - or, more generally - IPv4Interface(int(IPv4Interface('192.0.2.1'))) == - IPv4Interface('192.0.2.1') - -Raises: - AddressValueError: If ipaddress isn't a valid IPv4 address. - NetmaskValueError: If the netmask isn't valid for - an IPv4 address. - ValueError: If strict is True and a network address is not - supplied. -""" + Args: + address: A string or integer representing the IP [& network]. + '192.0.2.0/24' + '192.0.2.0/255.255.255.0' + '192.0.2.0/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.0.2.1' + '192.0.2.1/255.255.255.255' + '192.0.2.1/32' + are also functionally equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.0.2.1') == IPv4Network(3221225985) + or, more generally + IPv4Interface(int(IPv4Interface('192.0.2.1'))) == + IPv4Interface('192.0.2.1') + + Raises: + AddressValueError: If ipaddress isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict is True and a network address is not + supplied. + """ class IPv4Interface(IPv4Address): netmask: IPv4Address @@ -583,10 +606,11 @@ class IPv4Interface(IPv4Address): class _BaseV6: """Base IPv6 object. -The following methods are used by IPv6 objects in both single IP -addresses and networks. + The following methods are used by IPv6 objects in both single IP + addresses and networks. + + """ -""" __slots__ = () if sys.version_info >= (3, 14): version: Final = 6 @@ -598,212 +622,227 @@ addresses and networks. def max_prefixlen(self) -> Literal[128]: ... class IPv6Address(_BaseV6, _BaseAddress): - """Represent and manipulate single IPv6 Addresses. -""" + """Represent and manipulate single IPv6 Addresses.""" + __slots__ = ("_ip", "_scope_id", "__weakref__") def __init__(self, address: object) -> None: """Instantiate a new IPv6 address object. -Args: - address: A string or integer representing the IP + Args: + address: A string or integer representing the IP - Additionally, an integer can be passed, so - IPv6Address('2001:db8::') == - IPv6Address(42540766411282592856903984951653826560) - or, more generally - IPv6Address(int(IPv6Address('2001:db8::'))) == - IPv6Address('2001:db8::') + Additionally, an integer can be passed, so + IPv6Address('2001:db8::') == + IPv6Address(42540766411282592856903984951653826560) + or, more generally + IPv6Address(int(IPv6Address('2001:db8::'))) == + IPv6Address('2001:db8::') -Raises: - AddressValueError: If address isn't a valid IPv6 address. + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + """ -""" @property def is_global(self) -> bool: """``True`` if the address is defined as globally reachable by -iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ -(for IPv6) with the following exception: + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ + (for IPv6) with the following exception: -For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the -semantics of the underlying IPv4 addresses and the following condition holds -(see :attr:`IPv6Address.ipv4_mapped`):: + For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: - address.is_global == address.ipv4_mapped.is_global + address.is_global == address.ipv4_mapped.is_global + + ``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` + IPv4 range where they are both ``False``. + """ -``is_global`` has value opposite to :attr:`is_private`, except for the ``100.64.0.0/10`` -IPv4 range where they are both ``False``. -""" @property def is_link_local(self) -> bool: """Test if the address is reserved for link-local. -Returns: - A boolean, True if the address is reserved per RFC 4291. + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ -""" @property def is_loopback(self) -> bool: """Test if the address is a loopback address. -Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ -""" @property def is_multicast(self) -> bool: """Test if the address is reserved for multicast use. -Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ -""" @property def is_private(self) -> bool: """``True`` if the address is defined as not globally reachable by -iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ -(for IPv6) with the following exceptions: + iana-ipv4-special-registry_ (for IPv4) or iana-ipv6-special-registry_ + (for IPv6) with the following exceptions: -* ``is_private`` is ``False`` for ``100.64.0.0/10`` -* For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the - semantics of the underlying IPv4 addresses and the following condition holds - (see :attr:`IPv6Address.ipv4_mapped`):: + * ``is_private`` is ``False`` for ``100.64.0.0/10`` + * For IPv4-mapped IPv6-addresses the ``is_private`` value is determined by the + semantics of the underlying IPv4 addresses and the following condition holds + (see :attr:`IPv6Address.ipv4_mapped`):: - address.is_private == address.ipv4_mapped.is_private + address.is_private == address.ipv4_mapped.is_private + + ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` + IPv4 range where they are both ``False``. + """ -``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` -IPv4 range where they are both ``False``. -""" @property def is_reserved(self) -> bool: """Test if the address is otherwise IETF reserved. -Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ -""" @property def is_unspecified(self) -> bool: """Test if the address is unspecified. -Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ -""" @property def packed(self) -> bytes: - """The binary representation of this address. -""" + """The binary representation of this address.""" + @property def ipv4_mapped(self) -> IPv4Address | None: """Return the IPv4 mapped address. -Returns: - If the IPv6 address is a v4 mapped address, return the - IPv4 mapped address. Return None otherwise. + Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + + """ -""" @property def is_site_local(self) -> bool: """Test if the address is reserved for site-local. -Note that the site-local address space has been deprecated by RFC 3879. -Use is_private to test if this address is in the space of unique local -addresses as defined by RFC 4193. + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. -Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ -""" @property def sixtofour(self) -> IPv4Address | None: """Return the IPv4 6to4 embedded address. -Returns: - The IPv4 6to4-embedded address if present or None if the - address doesn't appear to contain a 6to4 embedded address. + Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + + """ -""" @property def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: """Tuple of embedded teredo IPs. -Returns: - Tuple of the (server, client) IPs or None if the address - doesn't appear to be a teredo address (doesn't start with - 2001::/32) + Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + + """ -""" @property def scope_id(self) -> str | None: """Identifier of a particular zone of the address's scope. -See RFC 4007 for details. + See RFC 4007 for details. -Returns: - A string identifying the zone of the address if specified, else None. + Returns: + A string identifying the zone of the address if specified, else None. + + """ -""" def __hash__(self) -> int: ... def __eq__(self, other: object) -> bool: ... class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): """This class represents and manipulates 128-bit IPv6 networks. -Attributes: [examples for IPv6('2001:db8::1000/124')] - .network_address: IPv6Address('2001:db8::1000') - .hostmask: IPv6Address('::f') - .broadcast_address: IPv6Address('2001:db8::100f') - .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') - .prefixlen: 124 + Attributes: [examples for IPv6('2001:db8::1000/124')] + .network_address: IPv6Address('2001:db8::1000') + .hostmask: IPv6Address('::f') + .broadcast_address: IPv6Address('2001:db8::100f') + .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') + .prefixlen: 124 + + """ -""" def __init__(self, address: object, strict: bool = True) -> None: """Instantiate a new IPv6 Network object. -Args: - address: A string or integer representing the IPv6 network or the - IP and prefix/netmask. - '2001:db8::/128' - '2001:db8:0000:0000:0000:0000:0000:0000/128' - '2001:db8::' - are all functionally the same in IPv6. That is to say, - failing to provide a subnetmask will create an object with - a mask of /128. - - Additionally, an integer can be passed, so - IPv6Network('2001:db8::') == - IPv6Network(42540766411282592856903984951653826560) - or, more generally - IPv6Network(int(IPv6Network('2001:db8::'))) == - IPv6Network('2001:db8::') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 2001:db8::1000/124 and not an - IP address on a network, eg, 2001:db8::1/124. - -Raises: - AddressValueError: If address isn't a valid IPv6 address. - NetmaskValueError: If the netmask isn't valid for - an IPv6 address. - ValueError: If strict was True and a network address was not - supplied. -""" + Args: + address: A string or integer representing the IPv6 network or the + IP and prefix/netmask. + '2001:db8::/128' + '2001:db8:0000:0000:0000:0000:0000:0000/128' + '2001:db8::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:db8::') == + IPv6Network(42540766411282592856903984951653826560) + or, more generally + IPv6Network(int(IPv6Network('2001:db8::'))) == + IPv6Network('2001:db8::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 2001:db8::1000/124 and not an + IP address on a network, eg, 2001:db8::1/124. + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. + """ + @property def is_site_local(self) -> bool: """Test if the address is reserved for site-local. -Note that the site-local address space has been deprecated by RFC 3879. -Use is_private to test if this address is in the space of unique local -addresses as defined by RFC 4193. + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. -Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. -""" + """ class IPv6Interface(IPv6Address): netmask: IPv6Address @@ -824,56 +863,58 @@ class IPv6Interface(IPv6Address): def v4_int_to_packed(address: int) -> bytes: """Represent an address as 4 packed bytes in network (big-endian) order. -Args: - address: An integer representation of an IPv4 IP address. + Args: + address: An integer representation of an IPv4 IP address. -Returns: - The integer address packed as 4 bytes in network (big-endian) order. + Returns: + The integer address packed as 4 bytes in network (big-endian) order. -Raises: - ValueError: If the integer is negative or too large to be an - IPv4 IP address. + Raises: + ValueError: If the integer is negative or too large to be an + IPv4 IP address. + + """ -""" def v6_int_to_packed(address: int) -> bytes: """Represent an address as 16 packed bytes in network (big-endian) order. -Args: - address: An integer representation of an IPv6 IP address. + Args: + address: An integer representation of an IPv6 IP address. -Returns: - The integer address packed as 16 bytes in network (big-endian) order. + Returns: + The integer address packed as 16 bytes in network (big-endian) order. -""" + """ # Third overload is technically incorrect, but convenient when first and last are return values of ip_address() @overload def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: """Summarize a network range given the first and last IP addresses. -Example: - >>> list(summarize_address_range(IPv4Address('192.0.2.0'), - ... IPv4Address('192.0.2.130'))) - ... #doctest: +NORMALIZE_WHITESPACE - [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), - IPv4Network('192.0.2.130/32')] + Example: + >>> list(summarize_address_range(IPv4Address('192.0.2.0'), + ... IPv4Address('192.0.2.130'))) + ... #doctest: +NORMALIZE_WHITESPACE + [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), + IPv4Network('192.0.2.130/32')] -Args: - first: the first IPv4Address or IPv6Address in the range. - last: the last IPv4Address or IPv6Address in the range. + Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. -Returns: - An iterator of the summarized IPv(4|6) network objects. + Returns: + An iterator of the summarized IPv(4|6) network objects. -Raise: - TypeError: - If the first and last objects are not IP addresses. - If the first and last objects are not the same version. - ValueError: - If the last object is not greater than the first. - If the version of the first address is not 4 or 6. + Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version of the first address is not 4 or 6. + + """ -""" @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... @overload @@ -883,48 +924,49 @@ def summarize_address_range( def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: """Collapse a list of IP objects. -Example: - collapse_addresses([IPv4Network('192.0.2.0/25'), - IPv4Network('192.0.2.128/25')]) -> - [IPv4Network('192.0.2.0/24')] + Example: + collapse_addresses([IPv4Network('192.0.2.0/25'), + IPv4Network('192.0.2.128/25')]) -> + [IPv4Network('192.0.2.0/24')] -Args: - addresses: An iterable of IPv4Network or IPv6Network objects. + Args: + addresses: An iterable of IPv4Network or IPv6Network objects. -Returns: - An iterator of the collapsed IPv(4|6)Network objects. + Returns: + An iterator of the collapsed IPv(4|6)Network objects. -Raises: - TypeError: If passed a list of mixed version objects. + Raises: + TypeError: If passed a list of mixed version objects. + + """ -""" @overload def get_mixed_type_key(obj: _A) -> tuple[int, _A]: """Return a key suitable for sorting between networks and addresses. -Address and Network objects are not sortable by default; they're -fundamentally different so the expression + Address and Network objects are not sortable by default; they're + fundamentally different so the expression - IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') + IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') -doesn't make any sense. There are some times however, where you may wish -to have ipaddress sort these for you anyway. If you need to do this, you -can use this function as the key= argument to sorted(). + doesn't make any sense. There are some times however, where you may wish + to have ipaddress sort these for you anyway. If you need to do this, you + can use this function as the key= argument to sorted(). -Args: - obj: either a Network or Address object. -Returns: - appropriate key. + Args: + obj: either a Network or Address object. + Returns: + appropriate key. + + """ -""" @overload def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... @overload def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... class AddressValueError(ValueError): - """A Value Error related to the address. -""" + """A Value Error related to the address.""" + class NetmaskValueError(ValueError): - """A Value Error related to the netmask. -""" + """A Value Error related to the netmask.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi index e34674de9ef01..4c1eaf164d8b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/itertools.pyi @@ -28,6 +28,7 @@ permutations(p[, r]) combinations(p, r) combinations_with_replacement(p, r) """ + import sys from _typeshed import MaybeNone from collections.abc import Callable, Iterable, Iterator @@ -61,13 +62,14 @@ _Predicate: TypeAlias = Callable[[_T], object] class count(Generic[_N]): """Return a count object whose .__next__() method returns consecutive values. -Equivalent to: - def count(firstval=0, step=1): - x = firstval - while 1: - yield x - x += step -""" + Equivalent to: + def count(firstval=0, step=1): + x = firstval + while 1: + yield x + x += step + """ + @overload def __new__(cls) -> count[int]: ... @overload @@ -75,205 +77,201 @@ Equivalent to: @overload def __new__(cls, *, step: _N) -> count[_N]: ... def __next__(self) -> _N: - """Implement next(self). -""" + """Implement next(self).""" + def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" @disjoint_base class cycle(Generic[_T]): - """Return elements from the iterable until it is exhausted. Then repeat the sequence indefinitely. -""" + """Return elements from the iterable until it is exhausted. Then repeat the sequence indefinitely.""" + def __new__(cls, iterable: Iterable[_T], /) -> Self: ... def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" + def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" @disjoint_base class repeat(Generic[_T]): """repeat(object [,times]) -> create an iterator which returns the object -for the specified number of times. If not specified, returns the object -endlessly. -""" + for the specified number of times. If not specified, returns the object + endlessly. + """ + @overload def __new__(cls, object: _T) -> Self: ... @overload def __new__(cls, object: _T, times: int) -> Self: ... def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" + def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __length_hint__(self) -> int: - """Private method returning an estimate of len(list(it)). -""" + """Private method returning an estimate of len(list(it)).""" @disjoint_base class accumulate(Generic[_T]): - """Return series of accumulated sums (or other binary function results). -""" + """Return series of accumulated sums (or other binary function results).""" + @overload def __new__(cls, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> Self: ... @overload def __new__(cls, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class chain(Generic[_T]): """Return a chain object whose .__next__() method returns elements from the -first iterable until it is exhausted, then elements from the next -iterable, until all of the iterables are exhausted. -""" + first iterable until it is exhausted, then elements from the next + iterable, until all of the iterables are exhausted. + """ + def __new__(cls, *iterables: Iterable[_T]) -> Self: ... def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" + def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable def from_iterable(cls: type[Any], iterable: Iterable[Iterable[_S]], /) -> chain[_S]: - """Alternative chain() constructor taking a single iterable argument that evaluates lazily. -""" + """Alternative chain() constructor taking a single iterable argument that evaluates lazily.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @disjoint_base class compress(Generic[_T]): """Return data elements corresponding to true selector elements. -Forms a shorter iterator from selected data elements using the selectors to -choose the data elements. -""" + Forms a shorter iterator from selected data elements using the selectors to + choose the data elements. + """ + def __new__(cls, data: Iterable[_T], selectors: Iterable[Any]) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class dropwhile(Generic[_T]): """Drop items from the iterable while predicate(item) is true. -Afterwards, return every element until the iterable is exhausted. -""" + Afterwards, return every element until the iterable is exhausted. + """ + def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class filterfalse(Generic[_T]): """Return those items of iterable for which function(item) is false. -If function is None, return the items that are false. -""" + If function is None, return the items that are false. + """ + def __new__(cls, function: _Predicate[_T] | None, iterable: Iterable[_T], /) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class groupby(Generic[_T_co, _S_co]): """make an iterator that returns consecutive keys and groups from the iterable - iterable - Elements to divide into groups according to the key function. - key - A function for computing the group category for each element. - If the key function is not specified or is None, the element itself - is used for grouping. -""" + iterable + Elements to divide into groups according to the key function. + key + A function for computing the group category for each element. + If the key function is not specified or is None, the element itself + is used for grouping. + """ + @overload def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> tuple[_T_co, Iterator[_S_co]]: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class islice(Generic[_T]): """islice(iterable, stop) --> islice object -islice(iterable, start, stop[, step]) --> islice object - -Return an iterator whose next() method returns selected values from an -iterable. If start is specified, will skip all preceding elements; -otherwise, start defaults to zero. Step defaults to one. If -specified as another value, step determines how many values are -skipped between successive calls. Works like a slice() on a list -but returns an iterator. -""" + islice(iterable, start, stop[, step]) --> islice object + + Return an iterator whose next() method returns selected values from an + iterable. If start is specified, will skip all preceding elements; + otherwise, start defaults to zero. Step defaults to one. If + specified as another value, step determines how many values are + skipped between successive calls. Works like a slice() on a list + but returns an iterator. + """ + @overload def __new__(cls, iterable: Iterable[_T], stop: int | None, /) -> Self: ... @overload def __new__(cls, iterable: Iterable[_T], start: int | None, stop: int | None, step: int | None = ..., /) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class starmap(Generic[_T_co]): - """Return an iterator whose values are returned from the function evaluated with an argument tuple taken from the given sequence. -""" + """Return an iterator whose values are returned from the function evaluated with an argument tuple taken from the given sequence.""" + def __new__(cls, function: Callable[..., _T], iterable: Iterable[Iterable[Any]], /) -> starmap[_T]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class takewhile(Generic[_T]): - """Return successive entries from an iterable as long as the predicate evaluates to true for each entry. -""" + """Return successive entries from an iterable as long as the predicate evaluates to true for each entry.""" + def __new__(cls, predicate: _Predicate[_T], iterable: Iterable[_T], /) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T: - """Implement next(self). -""" + """Implement next(self).""" def tee(iterable: Iterable[_T], n: int = 2, /) -> tuple[Iterator[_T], ...]: - """Returns a tuple of n independent iterators. -""" + """Returns a tuple of n independent iterators.""" + @disjoint_base class zip_longest(Generic[_T_co]): """Return a zip_longest object whose .__next__() method returns a tuple where -the i-th element comes from the i-th iterable argument. The .__next__() -method continues until the longest iterable in the argument sequence -is exhausted and then it raises StopIteration. When the shorter iterables -are exhausted, the fillvalue is substituted in their place. The fillvalue -defaults to None or can be specified by a keyword argument. -""" + the i-th element comes from the i-th iterable argument. The .__next__() + method continues until the longest iterable in the argument sequence + is exhausted and then it raises StopIteration. When the shorter iterables + are exhausted, the fillvalue is substituted in their place. The fillvalue + defaults to None or can be specified by a keyword argument. + """ + # one iterable (fillvalue doesn't matter) @overload def __new__(cls, iter1: Iterable[_T1], /, *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ... @@ -349,28 +347,28 @@ defaults to None or can be specified by a keyword argument. fillvalue: _T, ) -> zip_longest[tuple[_T, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class product(Generic[_T_co]): """Cartesian product of input iterables. Equivalent to nested for-loops. -For example, product(A, B) returns the same as: ((x,y) for x in A for y in B). -The leftmost iterators are in the outermost for-loop, so the output tuples -cycle in a manner similar to an odometer (with the rightmost element changing -on every iteration). + For example, product(A, B) returns the same as: ((x,y) for x in A for y in B). + The leftmost iterators are in the outermost for-loop, so the output tuples + cycle in a manner similar to an odometer (with the rightmost element changing + on every iteration). -To compute the product of an iterable with itself, specify the number -of repetitions with the optional repeat keyword argument. For example, -product(A, repeat=4) means the same as product(A, A, A, A). + To compute the product of an iterable with itself, specify the number + of repetitions with the optional repeat keyword argument. For example, + product(A, repeat=4) means the same as product(A, A, A, A). + + product('ab', range(3)) --> ('a',0) ('a',1) ('a',2) ('b',0) ('b',1) ('b',2) + product((0,1), (0,1), (0,1)) --> (0,0,0) (0,0,1) (0,1,0) (0,1,1) (1,0,0) ... + """ -product('ab', range(3)) --> ('a',0) ('a',1) ('a',2) ('b',0) ('b',1) ('b',2) -product((0,1), (0,1), (0,1)) --> (0,0,0) (0,0,1) (0,1,0) (0,1,1) (1,0,0) ... -""" @overload def __new__(cls, iter1: Iterable[_T1], /) -> product[tuple[_T1]]: ... @overload @@ -453,18 +451,18 @@ product((0,1), (0,1), (0,1)) --> (0,0,0) (0,0,1) (0,1,0) (0,1,1) (1,0,0) ... @overload def __new__(cls, *iterables: Iterable[_T1], repeat: int = 1) -> product[tuple[_T1, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class permutations(Generic[_T_co]): """Return successive r-length permutations of elements in the iterable. -permutations(range(3), 2) --> (0,1), (0,2), (1,0), (1,2), (2,0), (2,1) -""" + permutations(range(3), 2) --> (0,1), (0,2), (1,0), (1,2), (2,0), (2,1) + """ + @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> permutations[tuple[_T, _T]]: ... @overload @@ -476,18 +474,18 @@ permutations(range(3), 2) --> (0,1), (0,2), (1,0), (1,2), (2,0), (2,1) @overload def __new__(cls, iterable: Iterable[_T], r: int | None = ...) -> permutations[tuple[_T, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class combinations(Generic[_T_co]): """Return successive r-length combinations of elements in the iterable. -combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3) -""" + combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3) + """ + @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... @overload @@ -499,18 +497,18 @@ combinations(range(4), 3) --> (0,1,2), (0,1,3), (0,2,3), (1,2,3) @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" @disjoint_base class combinations_with_replacement(Generic[_T_co]): """Return successive r-length combinations of elements in the iterable allowing individual elements to have successive repeats. -combinations_with_replacement('ABC', 2) --> ('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C') -""" + combinations_with_replacement('ABC', 2) --> ('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C') + """ + @overload def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations_with_replacement[tuple[_T, _T]]: ... @overload @@ -522,55 +520,54 @@ combinations_with_replacement('ABC', 2) --> ('A','A'), ('A','B'), ('A','C'), ('B @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations_with_replacement[tuple[_T, ...]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" if sys.version_info >= (3, 10): @disjoint_base class pairwise(Generic[_T_co]): """Return an iterator of overlapping pairs taken from the input iterator. - s -> (s0,s1), (s1,s2), (s2, s3), ... -""" + s -> (s0,s1), (s1,s2), (s2, s3), ... + """ + def __new__(cls, iterable: Iterable[_T], /) -> pairwise[tuple[_T, _T]]: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _T_co: - """Implement next(self). -""" + """Implement next(self).""" if sys.version_info >= (3, 12): @disjoint_base class batched(Generic[_T_co]): """Batch data into tuples of length n. The last batch may be shorter than n. -Loops over the input iterable and accumulates data into tuples -up to size n. The input is consumed lazily, just enough to -fill a batch. The result is yielded as soon as a batch is full -or when the input iterable is exhausted. + Loops over the input iterable and accumulates data into tuples + up to size n. The input is consumed lazily, just enough to + fill a batch. The result is yielded as soon as a batch is full + or when the input iterable is exhausted. - >>> for batch in batched('ABCDEFG', 3): - ... print(batch) - ... - ('A', 'B', 'C') - ('D', 'E', 'F') - ('G',) + >>> for batch in batched('ABCDEFG', 3): + ... print(batch) + ... + ('A', 'B', 'C') + ('D', 'E', 'F') + ('G',) + + If "strict" is True, raises a ValueError if the final batch is shorter + than n. + """ -If "strict" is True, raises a ValueError if the final batch is shorter -than n. -""" if sys.version_info >= (3, 13): def __new__(cls, iterable: Iterable[_T_co], n: int, *, strict: bool = False) -> Self: ... else: def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> tuple[_T_co, ...]: - """Implement next(self). -""" + """Implement next(self).""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi index 8d9912dd25f0e..b911c1315a5c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/__init__.pyi @@ -95,6 +95,7 @@ Using json from the shell to validate and pretty-print:: $ echo '{ 1.2:3.4}' | python -m json Expecting property name enclosed in double quotes: line 1 column 3 (char 2) """ + from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable from typing import Any @@ -120,44 +121,45 @@ def dumps( ) -> str: """Serialize ``obj`` to a JSON formatted ``str``. -If ``skipkeys`` is true then ``dict`` keys that are not basic types -(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped -instead of raising a ``TypeError``. + If ``skipkeys`` is true then ``dict`` keys that are not basic types + (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped + instead of raising a ``TypeError``. -If ``ensure_ascii`` is false, then the return value can contain non-ASCII -characters if they appear in strings contained in ``obj``. Otherwise, all -such characters are escaped in JSON strings. + If ``ensure_ascii`` is false, then the return value can contain non-ASCII + characters if they appear in strings contained in ``obj``. Otherwise, all + such characters are escaped in JSON strings. -If ``check_circular`` is false, then the circular reference check -for container types will be skipped and a circular reference will -result in an ``RecursionError`` (or worse). + If ``check_circular`` is false, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``RecursionError`` (or worse). -If ``allow_nan`` is false, then it will be a ``ValueError`` to -serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in -strict compliance of the JSON specification, instead of using the -JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + If ``allow_nan`` is false, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in + strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). -If ``indent`` is a non-negative integer, then JSON array elements and -object members will be pretty-printed with that indent level. An indent -level of 0 will only insert newlines. ``None`` is the most compact -representation. + If ``indent`` is a non-negative integer, then JSON array elements and + object members will be pretty-printed with that indent level. An indent + level of 0 will only insert newlines. ``None`` is the most compact + representation. -If specified, ``separators`` should be an ``(item_separator, key_separator)`` -tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and -``(',', ': ')`` otherwise. To get the most compact JSON representation, -you should specify ``(',', ':')`` to eliminate whitespace. + If specified, ``separators`` should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and + ``(',', ': ')`` otherwise. To get the most compact JSON representation, + you should specify ``(',', ':')`` to eliminate whitespace. -``default(obj)`` is a function that should return a serializable version -of obj or raise TypeError. The default simply raises TypeError. + ``default(obj)`` is a function that should return a serializable version + of obj or raise TypeError. The default simply raises TypeError. -If *sort_keys* is true (default: ``False``), then the output of -dictionaries will be sorted by key. + If *sort_keys* is true (default: ``False``), then the output of + dictionaries will be sorted by key. -To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the -``.default()`` method to serialize additional types), specify it with -the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + + """ -""" def dump( obj: Any, fp: SupportsWrite[str], @@ -174,46 +176,47 @@ def dump( **kwds: Any, ) -> None: """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a -``.write()``-supporting file-like object). + ``.write()``-supporting file-like object). -If ``skipkeys`` is true then ``dict`` keys that are not basic types -(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped -instead of raising a ``TypeError``. + If ``skipkeys`` is true then ``dict`` keys that are not basic types + (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped + instead of raising a ``TypeError``. -If ``ensure_ascii`` is false, then the strings written to ``fp`` can -contain non-ASCII characters if they appear in strings contained in -``obj``. Otherwise, all such characters are escaped in JSON strings. + If ``ensure_ascii`` is false, then the strings written to ``fp`` can + contain non-ASCII characters if they appear in strings contained in + ``obj``. Otherwise, all such characters are escaped in JSON strings. -If ``check_circular`` is false, then the circular reference check -for container types will be skipped and a circular reference will -result in an ``RecursionError`` (or worse). + If ``check_circular`` is false, then the circular reference check + for container types will be skipped and a circular reference will + result in an ``RecursionError`` (or worse). -If ``allow_nan`` is false, then it will be a ``ValueError`` to -serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) -in strict compliance of the JSON specification, instead of using the -JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). + If ``allow_nan`` is false, then it will be a ``ValueError`` to + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) + in strict compliance of the JSON specification, instead of using the + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). -If ``indent`` is a non-negative integer, then JSON array elements and -object members will be pretty-printed with that indent level. An indent -level of 0 will only insert newlines. ``None`` is the most compact -representation. + If ``indent`` is a non-negative integer, then JSON array elements and + object members will be pretty-printed with that indent level. An indent + level of 0 will only insert newlines. ``None`` is the most compact + representation. -If specified, ``separators`` should be an ``(item_separator, key_separator)`` -tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and -``(',', ': ')`` otherwise. To get the most compact JSON representation, -you should specify ``(',', ':')`` to eliminate whitespace. + If specified, ``separators`` should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and + ``(',', ': ')`` otherwise. To get the most compact JSON representation, + you should specify ``(',', ':')`` to eliminate whitespace. -``default(obj)`` is a function that should return a serializable version -of obj or raise TypeError. The default simply raises TypeError. + ``default(obj)`` is a function that should return a serializable version + of obj or raise TypeError. The default simply raises TypeError. -If *sort_keys* is true (default: ``False``), then the output of -dictionaries will be sorted by key. + If *sort_keys* is true (default: ``False``), then the output of + dictionaries will be sorted by key. -To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the -``.default()`` method to serialize additional types), specify it with -the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the + ``.default()`` method to serialize additional types), specify it with + the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. + + """ -""" def loads( s: str | bytes | bytearray, *, @@ -226,37 +229,38 @@ def loads( **kwds: Any, ) -> Any: """Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance -containing a JSON document) to a Python object. - -``object_hook`` is an optional function that will be called with the -result of any object literal decode (a ``dict``). The return value of -``object_hook`` will be used instead of the ``dict``. This feature -can be used to implement custom decoders (e.g. JSON-RPC class hinting). - -``object_pairs_hook`` is an optional function that will be called with the -result of any object literal decoded with an ordered list of pairs. The -return value of ``object_pairs_hook`` will be used instead of the ``dict``. -This feature can be used to implement custom decoders. If ``object_hook`` -is also defined, the ``object_pairs_hook`` takes priority. - -``parse_float``, if specified, will be called with the string -of every JSON float to be decoded. By default this is equivalent to -float(num_str). This can be used to use another datatype or parser -for JSON floats (e.g. decimal.Decimal). - -``parse_int``, if specified, will be called with the string -of every JSON int to be decoded. By default this is equivalent to -int(num_str). This can be used to use another datatype or parser -for JSON integers (e.g. float). - -``parse_constant``, if specified, will be called with one of the -following strings: -Infinity, Infinity, NaN. -This can be used to raise an exception if invalid JSON numbers -are encountered. - -To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` -kwarg; otherwise ``JSONDecoder`` is used. -""" + containing a JSON document) to a Python object. + + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). + + ``object_pairs_hook`` is an optional function that will be called with the + result of any object literal decoded with an ordered list of pairs. The + return value of ``object_pairs_hook`` will be used instead of the ``dict``. + This feature can be used to implement custom decoders. If ``object_hook`` + is also defined, the ``object_pairs_hook`` takes priority. + + ``parse_float``, if specified, will be called with the string + of every JSON float to be decoded. By default this is equivalent to + float(num_str). This can be used to use another datatype or parser + for JSON floats (e.g. decimal.Decimal). + + ``parse_int``, if specified, will be called with the string + of every JSON int to be decoded. By default this is equivalent to + int(num_str). This can be used to use another datatype or parser + for JSON integers (e.g. float). + + ``parse_constant``, if specified, will be called with one of the + following strings: -Infinity, Infinity, NaN. + This can be used to raise an exception if invalid JSON numbers + are encountered. + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg; otherwise ``JSONDecoder`` is used. + """ + def load( fp: SupportsRead[str | bytes], *, @@ -269,20 +273,21 @@ def load( **kwds: Any, ) -> Any: """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing -a JSON document) to a Python object. + a JSON document) to a Python object. -``object_hook`` is an optional function that will be called with the -result of any object literal decode (a ``dict``). The return value of -``object_hook`` will be used instead of the ``dict``. This feature -can be used to implement custom decoders (e.g. JSON-RPC class hinting). + ``object_hook`` is an optional function that will be called with the + result of any object literal decode (a ``dict``). The return value of + ``object_hook`` will be used instead of the ``dict``. This feature + can be used to implement custom decoders (e.g. JSON-RPC class hinting). -``object_pairs_hook`` is an optional function that will be called with the -result of any object literal decoded with an ordered list of pairs. The -return value of ``object_pairs_hook`` will be used instead of the ``dict``. -This feature can be used to implement custom decoders. If ``object_hook`` -is also defined, the ``object_pairs_hook`` takes priority. + ``object_pairs_hook`` is an optional function that will be called with the + result of any object literal decoded with an ordered list of pairs. The + return value of ``object_pairs_hook`` will be used instead of the ``dict``. + This feature can be used to implement custom decoders. If ``object_hook`` + is also defined, the ``object_pairs_hook`` takes priority. + + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` + kwarg; otherwise ``JSONDecoder`` is used. + """ -To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` -kwarg; otherwise ``JSONDecoder`` is used. -""" def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi index 41e4cd16e41d2..9f579adab15e5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/decoder.pyi @@ -1,5 +1,5 @@ -"""Implementation of JSONDecoder -""" +"""Implementation of JSONDecoder""" + from collections.abc import Callable from typing import Any @@ -8,13 +8,14 @@ __all__ = ["JSONDecoder", "JSONDecodeError"] class JSONDecodeError(ValueError): """Subclass of ValueError with the following additional properties: -msg: The unformatted error message -doc: The JSON document being parsed -pos: The start index of doc where parsing failed -lineno: The line corresponding to pos -colno: The column corresponding to pos + msg: The unformatted error message + doc: The JSON document being parsed + pos: The start index of doc where parsing failed + lineno: The line corresponding to pos + colno: The column corresponding to pos + + """ -""" msg: str doc: str pos: int @@ -25,32 +26,33 @@ colno: The column corresponding to pos class JSONDecoder: """Simple JSON decoder -Performs the following translations in decoding by default: - -+---------------+-------------------+ -| JSON | Python | -+===============+===================+ -| object | dict | -+---------------+-------------------+ -| array | list | -+---------------+-------------------+ -| string | str | -+---------------+-------------------+ -| number (int) | int | -+---------------+-------------------+ -| number (real) | float | -+---------------+-------------------+ -| true | True | -+---------------+-------------------+ -| false | False | -+---------------+-------------------+ -| null | None | -+---------------+-------------------+ - -It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as -their corresponding ``float`` values, which is outside the JSON spec. - -""" + Performs the following translations in decoding by default: + + +---------------+-------------------+ + | JSON | Python | + +===============+===================+ + | object | dict | + +---------------+-------------------+ + | array | list | + +---------------+-------------------+ + | string | str | + +---------------+-------------------+ + | number (int) | int | + +---------------+-------------------+ + | number (real) | float | + +---------------+-------------------+ + | true | True | + +---------------+-------------------+ + | false | False | + +---------------+-------------------+ + | null | None | + +---------------+-------------------+ + + It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as + their corresponding ``float`` values, which is outside the JSON spec. + + """ + object_hook: Callable[[dict[str, Any]], Any] parse_float: Callable[[str], Any] parse_int: Callable[[str], Any] @@ -68,48 +70,50 @@ their corresponding ``float`` values, which is outside the JSON spec. object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, ) -> None: """``object_hook``, if specified, will be called with the result -of every JSON object decoded and its return value will be used in -place of the given ``dict``. This can be used to provide custom -deserializations (e.g. to support JSON-RPC class hinting). - -``object_pairs_hook``, if specified will be called with the result of -every JSON object decoded with an ordered list of pairs. The return -value of ``object_pairs_hook`` will be used instead of the ``dict``. -This feature can be used to implement custom decoders. -If ``object_hook`` is also defined, the ``object_pairs_hook`` takes -priority. - -``parse_float``, if specified, will be called with the string -of every JSON float to be decoded. By default this is equivalent to -float(num_str). This can be used to use another datatype or parser -for JSON floats (e.g. decimal.Decimal). - -``parse_int``, if specified, will be called with the string -of every JSON int to be decoded. By default this is equivalent to -int(num_str). This can be used to use another datatype or parser -for JSON integers (e.g. float). - -``parse_constant``, if specified, will be called with one of the -following strings: -Infinity, Infinity, NaN. -This can be used to raise an exception if invalid JSON numbers -are encountered. - -If ``strict`` is false (true is the default), then control -characters will be allowed inside strings. Control characters in -this context are those with character codes in the 0-31 range, -including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``. -""" + of every JSON object decoded and its return value will be used in + place of the given ``dict``. This can be used to provide custom + deserializations (e.g. to support JSON-RPC class hinting). + + ``object_pairs_hook``, if specified will be called with the result of + every JSON object decoded with an ordered list of pairs. The return + value of ``object_pairs_hook`` will be used instead of the ``dict``. + This feature can be used to implement custom decoders. + If ``object_hook`` is also defined, the ``object_pairs_hook`` takes + priority. + + ``parse_float``, if specified, will be called with the string + of every JSON float to be decoded. By default this is equivalent to + float(num_str). This can be used to use another datatype or parser + for JSON floats (e.g. decimal.Decimal). + + ``parse_int``, if specified, will be called with the string + of every JSON int to be decoded. By default this is equivalent to + int(num_str). This can be used to use another datatype or parser + for JSON integers (e.g. float). + + ``parse_constant``, if specified, will be called with one of the + following strings: -Infinity, Infinity, NaN. + This can be used to raise an exception if invalid JSON numbers + are encountered. + + If ``strict`` is false (true is the default), then control + characters will be allowed inside strings. Control characters in + this context are those with character codes in the 0-31 range, + including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``. + """ + def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: # _w is undocumented """Return the Python representation of ``s`` (a ``str`` instance -containing a JSON document). + containing a JSON document). + + """ -""" def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: """Decode a JSON document from ``s`` (a ``str`` beginning with -a JSON document) and return a 2-tuple of the Python -representation and the index in ``s`` where the document ended. + a JSON document) and return a 2-tuple of the Python + representation and the index in ``s`` where the document ended. -This can be used to decode a JSON document from a string that may -have extraneous data at the end. + This can be used to decode a JSON document from a string that may + have extraneous data at the end. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi index 205cefa0fb604..b28d343d59356 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/encoder.pyi @@ -1,5 +1,5 @@ -"""Implementation of JSONEncoder -""" +"""Implementation of JSONEncoder""" + from collections.abc import Callable, Iterator from re import Pattern from typing import Any, Final @@ -11,53 +11,53 @@ ESCAPE_DCT: Final[dict[str, str]] # undocumented INFINITY: Final[float] # undocumented def py_encode_basestring(s: str) -> str: # undocumented - """Return a JSON representation of a Python string + """Return a JSON representation of a Python string""" - """ def py_encode_basestring_ascii(s: str) -> str: # undocumented - """Return an ASCII-only JSON representation of a Python string + """Return an ASCII-only JSON representation of a Python string""" - """ def encode_basestring(s: str, /) -> str: # undocumented """encode_basestring(string) -> string -Return a JSON representation of a Python string -""" + Return a JSON representation of a Python string + """ + def encode_basestring_ascii(s: str, /) -> str: # undocumented """encode_basestring_ascii(string) -> string -Return an ASCII-only JSON representation of a Python string -""" + Return an ASCII-only JSON representation of a Python string + """ class JSONEncoder: """Extensible JSON encoder for Python data structures. -Supports the following objects and types by default: - -+-------------------+---------------+ -| Python | JSON | -+===================+===============+ -| dict | object | -+-------------------+---------------+ -| list, tuple | array | -+-------------------+---------------+ -| str | string | -+-------------------+---------------+ -| int, float | number | -+-------------------+---------------+ -| True | true | -+-------------------+---------------+ -| False | false | -+-------------------+---------------+ -| None | null | -+-------------------+---------------+ - -To extend this to recognize other objects, subclass and implement a -``.default()`` method with another method that returns a serializable -object for ``o`` if possible, otherwise it should call the superclass -implementation (to raise ``TypeError``). - -""" + Supports the following objects and types by default: + + +-------------------+---------------+ + | Python | JSON | + +===================+===============+ + | dict | object | + +-------------------+---------------+ + | list, tuple | array | + +-------------------+---------------+ + | str | string | + +-------------------+---------------+ + | int, float | number | + +-------------------+---------------+ + | True | true | + +-------------------+---------------+ + | False | false | + +-------------------+---------------+ + | None | null | + +-------------------+---------------+ + + To extend this to recognize other objects, subclass and implement a + ``.default()`` method with another method that returns a serializable + object for ``o`` if possible, otherwise it should call the superclass + implementation (to raise ``TypeError``). + + """ + item_separator: str key_separator: str @@ -81,77 +81,80 @@ implementation (to raise ``TypeError``). ) -> None: """Constructor for JSONEncoder, with sensible defaults. -If skipkeys is false, then it is a TypeError to attempt -encoding of keys that are not str, int, float, bool or None. -If skipkeys is True, such items are simply skipped. + If skipkeys is false, then it is a TypeError to attempt + encoding of keys that are not str, int, float, bool or None. + If skipkeys is True, such items are simply skipped. + + If ensure_ascii is true, the output is guaranteed to be str + objects with all incoming non-ASCII characters escaped. If + ensure_ascii is false, the output can contain non-ASCII characters. -If ensure_ascii is true, the output is guaranteed to be str -objects with all incoming non-ASCII characters escaped. If -ensure_ascii is false, the output can contain non-ASCII characters. + If check_circular is true, then lists, dicts, and custom encoded + objects will be checked for circular references during encoding to + prevent an infinite recursion (which would cause an RecursionError). + Otherwise, no such check takes place. -If check_circular is true, then lists, dicts, and custom encoded -objects will be checked for circular references during encoding to -prevent an infinite recursion (which would cause an RecursionError). -Otherwise, no such check takes place. + If allow_nan is true, then NaN, Infinity, and -Infinity will be + encoded as such. This behavior is not JSON specification compliant, + but is consistent with most JavaScript based encoders and decoders. + Otherwise, it will be a ValueError to encode such floats. -If allow_nan is true, then NaN, Infinity, and -Infinity will be -encoded as such. This behavior is not JSON specification compliant, -but is consistent with most JavaScript based encoders and decoders. -Otherwise, it will be a ValueError to encode such floats. + If sort_keys is true, then the output of dictionaries will be + sorted by key; this is useful for regression tests to ensure + that JSON serializations can be compared on a day-to-day basis. -If sort_keys is true, then the output of dictionaries will be -sorted by key; this is useful for regression tests to ensure -that JSON serializations can be compared on a day-to-day basis. + If indent is a non-negative integer, then JSON array + elements and object members will be pretty-printed with that + indent level. An indent level of 0 will only insert newlines. + None is the most compact representation. -If indent is a non-negative integer, then JSON array -elements and object members will be pretty-printed with that -indent level. An indent level of 0 will only insert newlines. -None is the most compact representation. + If specified, separators should be an (item_separator, key_separator) + tuple. The default is (', ', ': ') if *indent* is ``None`` and + (',', ': ') otherwise. To get the most compact JSON representation, + you should specify (',', ':') to eliminate whitespace. -If specified, separators should be an (item_separator, key_separator) -tuple. The default is (', ', ': ') if *indent* is ``None`` and -(',', ': ') otherwise. To get the most compact JSON representation, -you should specify (',', ':') to eliminate whitespace. + If specified, default is a function that gets called for objects + that can't otherwise be serialized. It should return a JSON encodable + version of the object or raise a ``TypeError``. -If specified, default is a function that gets called for objects -that can't otherwise be serialized. It should return a JSON encodable -version of the object or raise a ``TypeError``. + """ -""" def default(self, o: Any) -> Any: """Implement this method in a subclass such that it returns -a serializable object for ``o``, or calls the base implementation -(to raise a ``TypeError``). - -For example, to support arbitrary iterators, you could -implement default like this:: - - def default(self, o): - try: - iterable = iter(o) - except TypeError: - pass - else: - return list(iterable) - # Let the base class default method raise the TypeError - return super().default(o) - -""" + a serializable object for ``o``, or calls the base implementation + (to raise a ``TypeError``). + + For example, to support arbitrary iterators, you could + implement default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + # Let the base class default method raise the TypeError + return super().default(o) + + """ + def encode(self, o: Any) -> str: """Return a JSON string representation of a Python data structure. ->>> from json.encoder import JSONEncoder ->>> JSONEncoder().encode({"foo": ["bar", "baz"]}) -'{"foo": ["bar", "baz"]}' + >>> from json.encoder import JSONEncoder + >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) + '{"foo": ["bar", "baz"]}' + + """ -""" def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: """Encode the given object and yield each string -representation as available. + representation as available. -For example:: + For example:: - for chunk in JSONEncoder().iterencode(bigobject): - mysocket.write(chunk) + for chunk in JSONEncoder().iterencode(bigobject): + mysocket.write(chunk) -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi index 176199758b490..ea8f53c47c291 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/scanner.pyi @@ -1,5 +1,5 @@ -"""JSON token scanner -""" +"""JSON token scanner""" + from _json import make_scanner as make_scanner from re import Pattern from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi index 1603efe4b546f..1ca4f2ec301b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/json/tool.pyi @@ -3,4 +3,5 @@ See `json.__main__` for a usage example (invocation as `python -m json.tool` is supported for backwards compatibility). """ + def main() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi index 6f3207b54f81a..dde44c3fe1c5b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/keyword.pyi @@ -9,22 +9,21 @@ the python source tree and run: Alternatively, you can run 'make regen-keyword'. """ + from collections.abc import Sequence from typing import Final __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] def iskeyword(s: str, /) -> bool: - """x.__contains__(y) <==> y in x. -""" + """x.__contains__(y) <==> y in x.""" # a list at runtime, but you're not meant to mutate it; # type it as a sequence kwlist: Final[Sequence[str]] def issoftkeyword(s: str, /) -> bool: - """x.__contains__(y) <==> y in x. -""" + """x.__contains__(y) <==> y in x.""" # a list at runtime, but you're not meant to mutate it; # type it as a sequence diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi index 104c54bb9f99c..5f2b10c0dd9f7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/btm_matcher.pyi @@ -5,6 +5,7 @@ created. The linear automaton traverses the linear paths from the leaves to the root of the AST and returns a set of nodes for further matching. This reduces significantly the number of candidate nodes. """ + from _typeshed import Incomplete, SupportsGetItem from collections import defaultdict from collections.abc import Iterable @@ -13,8 +14,8 @@ from .fixer_base import BaseFix from .pytree import Leaf, Node class BMNode: - """Class for a node of the Aho-Corasick automaton used in matching -""" + """Class for a node of the Aho-Corasick automaton used in matching""" + count: Incomplete transition_table: Incomplete fixers: Incomplete @@ -25,7 +26,8 @@ class BMNode: class BottomMatcher: """The main matcher class. After instantiating the patterns should be added using the add_fixer method -""" + """ + match: Incomplete root: Incomplete nodes: Incomplete @@ -37,10 +39,11 @@ class BottomMatcher: to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached -""" + """ + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: - """Recursively adds a linear pattern to the AC automaton -""" + """Recursively adds a linear pattern to the AC automaton""" + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: """The main interface with the bottom matcher. The tree is traversed from the bottom using the constructed @@ -57,8 +60,8 @@ class BottomMatcher: Returns: A dictionary of node matches with fixers as the keys """ + def print_ac(self) -> None: - """Prints a graphviz diagram of the BM automaton(for debugging) -""" + """Prints a graphviz diagram of the BM automaton(for debugging)""" def type_repr(type_num: int) -> str | int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi index 2983afc568251..95baa2bef0869 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixer_base.pyi @@ -1,5 +1,5 @@ -"""Base class for fixers (optional, but recommended). -""" +"""Base class for fixers (optional, but recommended).""" + from _typeshed import Incomplete, StrPath from abc import ABCMeta, abstractmethod from collections.abc import MutableMapping @@ -17,6 +17,7 @@ class BaseFix: For example, the class name for a fixer named 'has_key' should be FixHasKey. """ + PATTERN: ClassVar[str | None] pattern: Incomplete | None pattern_tree: Incomplete | None @@ -39,17 +40,20 @@ class BaseFix: that could be used to customize the fixer through the command line. log: a list to append warnings and other messages to. """ + def compile_pattern(self) -> None: """Compiles self.PATTERN into self.pattern. Subclass may override if it doesn't want to use self.{pattern,PATTERN} in .match(). """ + def set_filename(self, filename: StrPath) -> None: """Set the filename. The main refactoring tool should call this. """ + def match(self, node: _N) -> Literal[False] | dict[str, _N]: """Returns match for a given parse tree node. @@ -59,6 +63,7 @@ class BaseFix: Subclass may override. """ + @abstractmethod def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: """Returns the transformation for a given parse tree node. @@ -74,6 +79,7 @@ class BaseFix: Subclass *must* override. """ + def new_name(self, template: str = "xxx_todo_changeme") -> str: """Return a string suitable for use as an identifier @@ -88,6 +94,7 @@ class BaseFix: First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ + def warning(self, node: Base, reason: str) -> None: """Used for warning the user about possible uncertainty in the translation. @@ -95,6 +102,7 @@ class BaseFix: First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ + def start_tree(self, tree: Node, filename: StrPath) -> None: """Some fixers need to maintain tree-wide state. This method is called once, at the start of tree fix-up. @@ -102,6 +110,7 @@ class BaseFix: tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ + def finish_tree(self, tree: Node, filename: StrPath) -> None: """Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. @@ -111,8 +120,8 @@ class BaseFix: """ class ConditionalFix(BaseFix, metaclass=ABCMeta): - """ Base class for fixers which not execute if an import is found. -""" + """Base class for fixers which not execute if an import is found.""" + skip_on: ClassVar[str | None] def start_tree(self, tree: Node, filename: StrPath, /) -> None: ... def should_skip(self, node: Base) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi index 9124c31fdbacd..2ce199a27a5e8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi @@ -2,6 +2,7 @@ This converts apply(func, v, k) into (func)(*v, **k). """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi index 18fd9de57eb24..ce79d93f4ecb9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -1,5 +1,5 @@ -"""Fixer that replaces deprecated unittest method names. -""" +"""Fixer that replaces deprecated unittest method names.""" + from typing import ClassVar, Final, Literal from ..fixer_base import BaseFix diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi index 41be9dc244ded..49c66c877dff8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -1,5 +1,5 @@ -"""Fixer for basestring -> str. -""" +"""Fixer for basestring -> str.""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi index dfc572b3ce6e0..bc798b5ac7313 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -1,5 +1,5 @@ -"""Fixer that changes buffer(...) into memoryview(...). -""" +"""Fixer that changes buffer(...) into memoryview(...).""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi index d5aac66aca903..3cc91c55696a4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi @@ -23,6 +23,7 @@ original d.iterkeys() was also redundant we don't fix this. And there are (rare) contexts where it makes a difference (e.g. when passing it as an argument to a function that introspects the argument). """ + from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi index c5f07a3ac742f..5d12137d55b4d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_except.pyi @@ -19,6 +19,7 @@ The following cases will be converted: except E as t: T = t.args """ + from collections.abc import Generator, Iterable from typing import ClassVar, Literal, TypeVar diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi index 04a663e397eca..b8a77be440265 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi @@ -5,6 +5,7 @@ exec() function. exec code in ns1, ns2 -> exec(code, ns1, ns2) """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi index e4b82b3e44ffe..9d77b2d8db601 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -3,6 +3,7 @@ This converts usages of the execfile function into calls to the built-in exec() function. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi index b49c19caa2cb8..b0cd78d8fef06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -1,6 +1,7 @@ """ Convert use of sys.exitfunc to use the atexit module. """ + from _typeshed import Incomplete, StrPath from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi index d621ace99f3cc..ef5907daacb52 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi @@ -9,6 +9,7 @@ filter(F, X) to return a string if X is a string and a tuple if X is a tuple. That would require type inference, which we don't do. Let Python 2.6 figure it out. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi index 61883779c514a..f9d236bd0f365 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -1,5 +1,5 @@ -"""Fix function attribute names (f.func_x -> f.__x__). -""" +"""Fix function attribute names (f.func_x -> f.__x__).""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi index aa0c4ae23925f..bf23287d81027 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_future.pyi @@ -2,6 +2,7 @@ from __future__ import foo is replaced with an empty line. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi index d55be458e39d2..fb20d477180ef 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -1,6 +1,7 @@ """ Fixer that changes os.getcwdu() to os.getcwd(). """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi index 65a8d792c5213..1323b5209ddd6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -25,6 +25,7 @@ CAVEATS: this is currently not done. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi index 8705524ac96b8..a654a002d46bc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -25,6 +25,7 @@ into v = sorted(EXPR) foo(v) """ + from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi index 9133c010df63f..87577fd281fda 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_import.pyi @@ -9,6 +9,7 @@ And this import: Becomes: from . import spam """ + from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi index 1b189d8afe03b..4aa8734f15a57 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi @@ -1,5 +1,5 @@ -"""Fix incompatible imports and module references. -""" +"""Fix incompatible imports and module references.""" + from _typeshed import StrPath from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi index e5d66c4e24a8f..dba1376beda7c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -1,6 +1,7 @@ """Fix incompatible imports and module references that must be fixed after fix_imports. """ + from typing import Final from . import fix_imports diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi index 46ef83f93265c..1105e93218103 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_input.pyi @@ -1,5 +1,5 @@ -"""Fixer that changes input(...) into eval(input(...)). -""" +"""Fixer that changes input(...) into eval(input(...)).""" + from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi index 1776f31b6a528..4808fe189d1b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi @@ -2,6 +2,7 @@ intern(s) -> sys.intern(s) """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi index ba1e08af281a5..7d236eef8de49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -5,6 +5,7 @@ tokens as a leftover of the long -> int / unicode -> str conversion. eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) -> isinstance(x, int) """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi index 1cf2f5627184e..2bba3174b7b50 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -1,11 +1,12 @@ -""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and - itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) +"""Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and +itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) - imports from itertools are fixed in fix_itertools_import.py +imports from itertools are fixed in fix_itertools_import.py + +If itertools is imported as something else (ie: import itertools as it; +it.izip(spam, eggs)) method calls will not get fixed. +""" - If itertools is imported as something else (ie: import itertools as it; - it.izip(spam, eggs)) method calls will not get fixed. - """ from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi index 8991a1454bf2c..554f71d74f00c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -1,5 +1,5 @@ -""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) -""" +"""Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse)""" + from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi index 6e880f043f1e7..5b4e0002ec8c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_long.pyi @@ -1,5 +1,5 @@ -"""Fixer that turns 'long' into 'int' everywhere. -""" +"""Fixer that turns 'long' into 'int' everywhere.""" + from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi index d6625ce9d357d..866c5dc8b3ded 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_map.pyi @@ -15,6 +15,7 @@ map(F, X, Y, ...) to go on until the longest argument is exhausted, substituting None for missing values -- like zip(), it now stops as soon as the shortest argument is exhausted. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi index e48cd179e941d..70ced90db3798 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -1,20 +1,21 @@ """Fixer for __metaclass__ = X -> (metaclass=X) methods. - The various forms of classef (inherits nothing, inherits once, inherits - many) don't parse the same in the CST so we look at ALL classes for - a __metaclass__ and if we find one normalize the inherits to all be - an arglist. +The various forms of classef (inherits nothing, inherits once, inherits +many) don't parse the same in the CST so we look at ALL classes for +a __metaclass__ and if we find one normalize the inherits to all be +an arglist. - For one-liner classes ('class X: pass') there is no indent/dedent so - we normalize those into having a suite. +For one-liner classes ('class X: pass') there is no indent/dedent so +we normalize those into having a suite. - Moving the __metaclass__ into the classdef can also cause the class - body to be empty so there is some special casing for that as well. +Moving the __metaclass__ into the classdef can also cause the class +body to be empty so there is some special casing for that as well. - This fixer also tries very hard to keep original indenting and spacing - in all those corner cases. +This fixer also tries very hard to keep original indenting and spacing +in all those corner cases. """ + from collections.abc import Generator from typing import ClassVar, Literal @@ -22,25 +23,28 @@ from .. import fixer_base from ..pytree import Base def has_metaclass(parent): - """ we have to check the cls_node without changing it. - There are two possibilities: - 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') - 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """we have to check the cls_node without changing it. + There are two possibilities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') """ + def fixup_parse_tree(cls_node) -> None: - """ one-line classes don't get a suite in the parse tree so we add - one to normalize the tree + """one-line classes don't get a suite in the parse tree so we add + one to normalize the tree """ + def fixup_simple_stmt(parent, i, stmt_node) -> None: - """ if there is a semi-colon all the parts count as part of the same - simple_stmt. We just want the __metaclass__ part so we move - everything after the semi-colon into its own simple_stmt node + """if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything after the semi-colon into its own simple_stmt node """ + def remove_trailing_newline(node) -> None: ... def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... def fixup_indent(suite) -> None: - """ If an INDENT is followed by a thing with a prefix then nuke the prefix - Otherwise we get in trouble when removing __metaclass__ at suite start + """If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start """ class FixMetaclass(fixer_base.BaseFix): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi index 640a1cc38ced3..b886514c168e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -1,5 +1,5 @@ -"""Fix bound method attributes (method.im_? -> method.__?__). -""" +"""Fix bound method attributes (method.im_? -> method.__?__).""" + from typing import ClassVar, Final, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi index 2a93bf7d29f15..0e851740281ef 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi @@ -1,5 +1,5 @@ -"""Fixer that turns <> into !=. -""" +"""Fixer that turns <> into !=.""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi index c5b23da8913d5..87a4d61e79ebf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_next.pyi @@ -1,5 +1,5 @@ -"""Fixer for it.next() -> next(it), per PEP 3114. -""" +"""Fixer for it.next() -> next(it), per PEP 3114.""" + from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi index fcce615738ad1..60bd7dffbaf19 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -1,5 +1,5 @@ -"""Fixer for __nonzero__ -> __bool__ methods. -""" +"""Fixer for __nonzero__ -> __bool__ methods.""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi index a96101bc9d8dc..71dc9016199bd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -1,5 +1,5 @@ -"""Fixer that turns 1L into 1, 0755 into 0o755. -""" +"""Fixer that turns 1L into 1, 0755 into 0o755.""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi index f97d15ee3d7e7..08750a96891a2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi @@ -8,6 +8,7 @@ operator.isNumberType(obj) -> isinstance(obj, numbers.Number) operator.repeat(obj, n) -> operator.mul(obj, n) operator.irepeat(obj, n) -> operator.imul(obj, n) """ + from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi index ee44f1c79aee4..ba7f01a849ccd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi @@ -2,6 +2,7 @@ This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi index 597172431ccb0..1d6a8c661a20e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_print.pyi @@ -9,6 +9,7 @@ Change: No changes are applied if print_function is imported from __future__ """ + from _typeshed import Incomplete from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi index 0867c8497e22c..d5fa6461e6cc8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi @@ -20,6 +20,7 @@ CAVEATS: any client code would have to be changed as well, we don't automate this. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi index 567d3f46b81d7..717649bf41e55 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -1,5 +1,5 @@ -"""Fixer that changes raw_input(...) into input(...). -""" +"""Fixer that changes raw_input(...) into input(...).""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi index 2205476c64c7e..24a992749ec95 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -3,6 +3,7 @@ Makes sure reduce() is imported from the functools module if reduce is used in that module. """ + from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi index e2bbb8a3da65e..23f98533310b1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi @@ -2,6 +2,7 @@ reload(s) -> importlib.reload(s) """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi index e675428ccaf55..ff1d30d77b589 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi @@ -3,6 +3,7 @@ Fixes: * sys.maxint -> sys.maxsize """ + from collections.abc import Generator from typing import ClassVar, Final, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi index 4f4d0439a0372..84569e39856b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi @@ -1,5 +1,5 @@ -"""Fixer that transforms `xyzzy` into repr(xyzzy). -""" +"""Fixer that transforms `xyzzy` into repr(xyzzy).""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi index c06fbc683f836..eb550907dee2d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -1,6 +1,7 @@ """ Optional fixer to transform set() calls to set literals. """ + from lib2to3 import fixer_base from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi index f2842de99afa8..adb75ffcc3e4a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -1,5 +1,5 @@ -"""Fixer for StandardError -> Exception. -""" +"""Fixer for StandardError -> Exception.""" + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi index f09393c84f46b..7797fbae94bcd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -4,6 +4,7 @@ sys.exc_type -> sys.exc_info()[0] sys.exc_value -> sys.exc_info()[1] sys.exc_traceback -> sys.exc_info()[2] """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi index a1f1e6aaa6057..63291df410cd4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi @@ -6,6 +6,7 @@ g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) g.throw("foo"[, V[, T]]) will warn about string exceptions. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi index e98aa87078d2a..263a340f477fd 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -16,6 +16,7 @@ It will also support lambdas: # The parens are a syntax error in Python 3 lambda (x): x + y -> lambda x: x + y """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi index 9d8f687cae2a9..ea9f88f8fed62 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_types.pyi @@ -15,6 +15,7 @@ There should be another fixer that handles at least the following constants: type('') -> str """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi index d4a63f023455a..2a4d1e9ced15d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -7,6 +7,7 @@ * Change u"..." into "...". """ + from _typeshed import StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi index f1dd7cc4a99ba..214350c28e523 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -1,7 +1,8 @@ """Fix changes imports of urllib which are now incompatible. - This is rather similar to fix_imports, but because of the more - complex nature of the fixing for urllib, it has its own fixer. +This is rather similar to fix_imports, but because of the more +complex nature of the fixing for urllib, it has its own fixer. """ + from collections.abc import Generator from typing import Final, Literal @@ -15,15 +16,17 @@ class FixUrllib(FixImports): def build_pattern(self): ... def transform_import(self, node, results) -> None: """Transform for the basic import case. Replaces the old - import name with a comma separated list of its - replacements. + import name with a comma separated list of its + replacements. """ + def transform_member(self, node, results): """Transform for imports of specific module elements. Replaces - the module to be imported from with the appropriate new - module. + the module to be imported from with the appropriate new + module. """ + def transform_dot(self, node, results) -> None: - """Transform for calls to module members in code. -""" + """Transform for calls to module members in code.""" + def transform(self, node, results) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi index be919631b00de..43db27641fa01 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -4,6 +4,7 @@ This also changes '{a :b}' into '{a: b}', but does not touch other uses of colons. It does not touch other uses of whitespace. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi index 04be7d3590ec3..aa87720f33b63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -1,5 +1,5 @@ -"""Fixer that changes xrange(...) into range(...). -""" +"""Fixer that changes xrange(...) into range(...).""" + from _typeshed import Incomplete, StrPath from typing import ClassVar, Literal diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi index 87505bec7d7d6..d48c2ef460e82 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -2,6 +2,7 @@ This fixer will also convert g(f.xreadlines) into g(f.__iter__). """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi index 82f3dc0c02bee..06a30dd1be292 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi @@ -6,6 +6,7 @@ top-level namespace. We avoid the transformation if the zip() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. """ + from typing import ClassVar, Literal from .. import fixer_base diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi index 4f3b31db89210..1a176b656c3d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/main.pyi @@ -1,6 +1,7 @@ """ Main program for 2to3. """ + from _typeshed import FileDescriptorOrPath from collections.abc import Container, Iterable, Iterator, Mapping, Sequence from logging import _ExcInfoType @@ -9,8 +10,7 @@ from typing import AnyStr, Literal from . import refactor as refactor def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: - """Return a unified diff of two strings. -""" + """Return a unified diff of two strings.""" class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): """ @@ -21,6 +21,7 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): have an extra file suffix appended to their name for use in situations where you do not want to replace the input files. """ + nobackups: bool show_diffs: bool def __init__( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi index 18e1656194b96..8a246f1044397 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -1,5 +1,5 @@ -"""The pgen2 package. -""" +"""The pgen2 package.""" + from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi index 426cd416fa705..957375a0bc620 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -3,6 +3,7 @@ This provides a high-level interface to parse a file into a syntax tree. """ + from _typeshed import StrPath from collections.abc import Iterable from logging import Logger @@ -19,26 +20,22 @@ class Driver: logger: Logger convert: _Convert def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... - def parse_tokens( - self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False - ) -> _NL: - """Parse a series of tokens and return the syntax tree. -""" + def parse_tokens(self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False) -> _NL: + """Parse a series of tokens and return the syntax tree.""" + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree. -""" + """Parse a stream and return the syntax tree.""" + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: - """Parse a stream and return the syntax tree. -""" + """Parse a stream and return the syntax tree.""" + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: - """Parse a file and return the syntax tree. -""" + """Parse a file and return the syntax tree.""" + def parse_string(self, text: str, debug: bool = False) -> _NL: - """Parse a string and return the syntax tree. -""" + """Parse a string and return the syntax tree.""" def load_grammar( gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None ) -> Grammar: - """Load the grammar (maybe from a pickle). -""" + """Load the grammar (maybe from a pickle).""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi index 33a215e8e43a9..abfd18b546326 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -8,6 +8,7 @@ token module; the Python tokenize module reports all operators as the fallback token code OP, but the parser needs the actual token code. """ + from _typeshed import StrPath from typing_extensions import Self, TypeAlias @@ -68,6 +69,7 @@ class Grammar: tokens -- a dict mapping token numbers to arc labels. """ + symbol2number: dict[str, int] number2symbol: dict[int, str] states: list[_DFA] @@ -78,18 +80,18 @@ class Grammar: symbol2label: dict[str, int] start: int def dump(self, filename: StrPath) -> None: - """Dump the grammar tables to a pickle file. -""" + """Dump the grammar tables to a pickle file.""" + def load(self, filename: StrPath) -> None: - """Load the grammar tables from a pickle file. -""" + """Load the grammar tables from a pickle file.""" + def copy(self) -> Self: """ Copy the grammar. """ + def report(self) -> None: - """Dump the grammar tables to standard output, for debugging. -""" + """Dump the grammar tables to standard output, for debugging.""" opmap_raw: str opmap: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi index 1d0be4b33d493..9b991d20e4fd1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/literals.pyi @@ -1,5 +1,5 @@ -"""Safely evaluate Python string literals without using eval(). -""" +"""Safely evaluate Python string literals without using eval().""" + from re import Match simple_escapes: dict[str, str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi index 494e0996c09c5..a1841da5f6ea1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -6,6 +6,7 @@ See Parser/parser.c in the Python distribution for additional info on how this parsing engine works. """ + from _typeshed import Incomplete from collections.abc import Sequence from typing_extensions import TypeAlias @@ -17,8 +18,8 @@ from .grammar import _DFAS, Grammar _Context: TypeAlias = Sequence[Incomplete] class ParseError(Exception): - """Exception to signal the parser is stuck. -""" + """Exception to signal the parser is stuck.""" + msg: str type: int value: str | None @@ -54,6 +55,7 @@ class Parser: reinitialized by calling setup()). """ + grammar: Grammar convert: _Convert stack: list[tuple[_DFAS, int, _RawNode]] @@ -88,6 +90,7 @@ class Parser: up to the converter function. """ + def setup(self, start: int | None = None) -> None: """Prepare for parsing. @@ -101,18 +104,18 @@ class Parser: state determined by the (implicit or explicit) start symbol. """ + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: - """Add a token; return True iff this is the end of the program. -""" + """Add a token; return True iff this is the end of the program.""" + def classify(self, type: int, value: str | None, context: _Context) -> int: - """Turn a token into a label. (Internal) -""" + """Turn a token into a label. (Internal)""" + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: - """Shift a token. (Internal) -""" + """Shift a token. (Internal)""" + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: - """Push a nonterminal. (Internal) -""" + """Push a nonterminal. (Internal)""" + def pop(self) -> None: - """Pop a nonterminal. (Internal) -""" + """Pop a nonterminal. (Internal)""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi index bf8c78a97760d..e465beca208e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/token.pyi @@ -1,5 +1,5 @@ -"""Token constants (from "token.h"). -""" +"""Token constants (from "token.h").""" + from typing import Final ENDMARKER: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index a46433b312451..ec35feb9ad494 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -22,6 +22,7 @@ are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found. """ + from collections.abc import Callable, Iterable, Iterator from typing_extensions import TypeAlias @@ -146,6 +147,7 @@ def untokenize(iterable: Iterable[_TokenInfo]) -> str: t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ + def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: """ The generate_tokens() generator requires one argument, readline, which diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi index 9add5c89d95fe..1f43c10e7bcf4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,5 +1,5 @@ -"""Export the Python grammar and symbols. -""" +"""Export the Python grammar and symbols.""" + from .pgen2.grammar import Grammar class Symbols: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi index 18b945242f0be..1c2a2b4d97c1d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/pytree.pyi @@ -6,6 +6,7 @@ even the comments and whitespace between tokens. There's also a pattern matching implementation here. """ + from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused from abc import abstractmethod from collections.abc import Iterable, Iterator, MutableSequence @@ -33,6 +34,7 @@ class Base: A node may be a subnode of at most one parent. """ + type: int parent: Node | None prefix: str @@ -56,6 +58,7 @@ class Base: Nodes should be considered equal if they have the same structure, ignoring the prefix string and other context information. """ + @abstractmethod def clone(self) -> Self: """ @@ -63,6 +66,7 @@ class Base: This must be implemented by the concrete subclass. """ + @abstractmethod def post_order(self) -> Iterator[Self]: """ @@ -70,6 +74,7 @@ class Base: This must be implemented by the concrete subclass. """ + @abstractmethod def pre_order(self) -> Iterator[Self]: """ @@ -77,30 +82,34 @@ class Base: This must be implemented by the concrete subclass. """ + def replace(self, new: _NL | list[_NL]) -> None: - """Replace this node with a new one in the parent. -""" + """Replace this node with a new one in the parent.""" + def get_lineno(self) -> int: - """Return the line number which generated the invocant node. -""" + """Return the line number which generated the invocant node.""" + def changed(self) -> None: ... def remove(self) -> int | None: """ Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. """ + @property def next_sibling(self) -> _NL | None: """ The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None """ + @property def prev_sibling(self) -> _NL | None: """ The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. """ + def leaves(self) -> Iterator[Leaf]: ... def depth(self) -> int: ... def get_suffix(self) -> str: @@ -110,8 +119,8 @@ class Base: """ class Node(Base): - """Concrete implementation for interior nodes. -""" + """Concrete implementation for interior nodes.""" + fixers_applied: MutableSequence[BaseFix] | None # Is Unbound until set in refactor.RefactoringTool future_features: frozenset[Incomplete] @@ -133,33 +142,37 @@ class Node(Base): As a side effect, the parent pointers of the children are updated. """ + def _eq(self, other: Base) -> bool: - """Compare two nodes for equality. -""" + """Compare two nodes for equality.""" + def clone(self) -> Node: - """Return a cloned (deep) copy of self. -""" + """Return a cloned (deep) copy of self.""" + def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree. -""" + """Return a post-order iterator for the tree.""" + def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree. -""" + """Return a pre-order iterator for the tree.""" + def set_child(self, i: int, child: _NL) -> None: """ Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately. """ + def insert_child(self, i: int, child: _NL) -> None: """ Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately. """ + def append_child(self, child: _NL) -> None: """ Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately. """ + def __unicode__(self) -> str: """ Return a pretty string representation. @@ -168,8 +181,8 @@ class Node(Base): """ class Leaf(Base): - """Concrete implementation for leaf nodes. -""" + """Concrete implementation for leaf nodes.""" + lineno: int column: int value: str @@ -188,18 +201,19 @@ class Leaf(Base): Takes a type constant (a token number < 256), a string value, and an optional context keyword argument. """ + def _eq(self, other: Base) -> bool: - """Compare two nodes for equality. -""" + """Compare two nodes for equality.""" + def clone(self) -> Leaf: - """Return a cloned (deep) copy of self. -""" + """Return a cloned (deep) copy of self.""" + def post_order(self) -> Iterator[Self]: - """Return a post-order iterator for the tree. -""" + """Return a post-order iterator for the tree.""" + def pre_order(self) -> Iterator[Self]: - """Return a pre-order iterator for the tree. -""" + """Return a pre-order iterator for the tree.""" + def __unicode__(self) -> str: """ Return a pretty string representation. @@ -230,6 +244,7 @@ class BasePattern: - NodePattern matches a single node (usually non-leaf); - WildcardPattern matches a sequence of nodes of variable length. """ + type: int content: str | None name: str | None @@ -239,6 +254,7 @@ class BasePattern: Returns either self or another node with the same effect. """ + def match(self, node: _NL, results: _Results | None = None) -> bool: """ Does this pattern exactly match a node? @@ -250,12 +266,14 @@ class BasePattern: Default implementation for non-wildcard patterns. """ + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: """ Does this pattern exactly match a sequence of nodes? Default implementation for non-wildcard patterns. """ + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: """ Generator yielding all matches for this pattern. @@ -308,6 +326,7 @@ class WildcardPattern(BasePattern): except it always uses non-greedy matching. """ + min: int max: int def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: @@ -359,4 +378,4 @@ def generate_matches( (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. - """ + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi index 0d53fe6534513..cd788ee2dc624 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lib2to3/refactor.pyi @@ -4,6 +4,7 @@ Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ + from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem from collections.abc import Container, Generator, Iterable, Mapping from logging import Logger, _ExcInfoType @@ -18,16 +19,15 @@ from .pgen2.grammar import Grammar from .pytree import Node def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: - """Return a sorted list of all available fix names in the given package. -""" + """Return a sorted list of all available fix names in the given package.""" + def get_fixers_from_package(pkg_name: str) -> list[str]: """ Return the fully qualified names for fixers in the package pkg_name. """ class FixerError(Exception): - """A fixer could not be loaded. -""" + """A fixer could not be loaded.""" class RefactoringTool: CLASS_PREFIX: ClassVar[str] @@ -58,6 +58,7 @@ class RefactoringTool: options: a dict with configuration. explicit: a list of fixers to run even if they are explicit. """ + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: """Inspects the options to load the requested patterns and handlers. @@ -66,13 +67,14 @@ class RefactoringTool: want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: - """Called when an error occurs. -""" + """Called when an error occurs.""" + @overload def log_message(self, msg: object) -> None: - """Hook to log a message. -""" + """Hook to log a message.""" + @overload def log_message(self, msg: str, *args: object) -> None: ... @overload @@ -82,10 +84,11 @@ class RefactoringTool: def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: """Called with the old version, new version, and filename of a refactored file. -""" + """ + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: - """Refactor a list of files and directories. -""" + """Refactor a list of files and directories.""" + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: """Descends down a directory and refactor every Python file found. @@ -93,13 +96,15 @@ class RefactoringTool: Files and subdirectories starting with '.' are skipped. """ + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: """ Do our best to decode a Python source file correctly. """ + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: - """Refactors a file. -""" + """Refactors a file.""" + def refactor_string(self, data: str, name: str) -> Node | None: """Refactor a given input string. @@ -111,6 +116,7 @@ class RefactoringTool: An AST corresponding to the refactored input stream; None if there were errors during the parse. """ + def refactor_stdin(self, doctests_only: bool = False) -> None: ... def refactor_tree(self, tree: Node, name: str) -> bool: """Refactors a parse tree (modifying the tree in place). @@ -127,6 +133,7 @@ class RefactoringTool: Returns: True if the tree was modified, False otherwise. """ + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: """Traverse an AST, applying a set of fixers to each node. @@ -139,12 +146,14 @@ class RefactoringTool: Returns: None """ + def processed_file( self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None ) -> None: """ Called when a file has been refactored and there may be changes. """ + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: """Writes a string to a file. @@ -166,6 +175,7 @@ class RefactoringTool: since, like most parsers, it is not geared towards preserving the original source.) """ + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: """Refactors one doctest. @@ -174,6 +184,7 @@ class RefactoringTool: with "..." (identically indented). """ + def summarize(self) -> None: ... def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: """Parses a block into a tree. @@ -181,11 +192,12 @@ class RefactoringTool: This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. """ + def wrap_toks( self, block: Iterable[str], lineno: int, indent: int ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: - """Wraps a tokenize stream to systematically modify start/end. -""" + """Wraps a tokenize stream to systematically modify start/end.""" + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: """Generates lines as expected by tokenize from a list of lines. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi index aca0d8d35698f..368ce1b044990 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/linecache.pyi @@ -4,6 +4,7 @@ This is intended to read lines from modules imported -- hence if a filename is not found, it will look down the module search path for a file by that name. """ + from collections.abc import Callable from typing import Any from typing_extensions import TypeAlias @@ -19,34 +20,38 @@ cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: """Get a line for a Python source file from the cache. -Update the cache if it doesn't contain an entry for this file already. -""" + Update the cache if it doesn't contain an entry for this file already. + """ + def clearcache() -> None: - """Clear the cache entirely. -""" + """Clear the cache entirely.""" + def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: """Get the lines for a Python source file from the cache. -Update the cache if it doesn't contain an entry for this file already. -""" + Update the cache if it doesn't contain an entry for this file already. + """ + def checkcache(filename: str | None = None) -> None: """Discard cache entries that are out of date. -(This is not checked upon each call!) -""" + (This is not checked upon each call!) + """ + def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: """Update a cache entry and return its list of lines. -If something's wrong, print a message, discard the cache entry, -and return an empty list. -""" + If something's wrong, print a message, discard the cache entry, + and return an empty list. + """ + def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: """Seed the cache for filename with module_globals. -The module loader will be asked for the source only when getlines is -called, not immediately. + The module loader will be asked for the source only when getlines is + called, not immediately. -If there is an entry in the cache already, it is not altered. + If there is an entry in the cache already, it is not altered. -:return: True if a lazy load is registered in the cache, - otherwise False. To register such a load a module loader with a - get_source method must be found, the filename must be a cacheable - filename, and the filename must not be already cached. -""" + :return: True if a lazy load is registered in the cache, + otherwise False. To register such a load a module loader with a + get_source method must be found, the filename must be a cacheable + filename, and the filename must not be already cached. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi index bc61693520760..ed836048f435e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/locale.pyi @@ -9,6 +9,7 @@ maps them to values suitable for passing to the C lib's setlocale() function. It also includes default encodings for all supported locale names. """ + import sys from _locale import ( CHAR_MAX as CHAR_MAX, @@ -140,124 +141,124 @@ if sys.platform != "win32": class Error(Exception): ... -def getdefaultlocale( - envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") -) -> tuple[_str | None, _str | None]: +def getdefaultlocale(envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE")) -> tuple[_str | None, _str | None]: """Tries to determine the default locale settings and returns -them as tuple (language code, encoding). + them as tuple (language code, encoding). -According to POSIX, a program which has not called -setlocale(LC_ALL, "") runs using the portable 'C' locale. -Calling setlocale(LC_ALL, "") lets it use the default locale as -defined by the LANG variable. Since we don't want to interfere -with the current locale setting we thus emulate the behavior -in the way described above. + According to POSIX, a program which has not called + setlocale(LC_ALL, "") runs using the portable 'C' locale. + Calling setlocale(LC_ALL, "") lets it use the default locale as + defined by the LANG variable. Since we don't want to interfere + with the current locale setting we thus emulate the behavior + in the way described above. -To maintain compatibility with other platforms, not only the -LANG variable is tested, but a list of variables given as -envvars parameter. The first found to be defined will be -used. envvars defaults to the search path used in GNU gettext; -it must always contain the variable name 'LANG'. + To maintain compatibility with other platforms, not only the + LANG variable is tested, but a list of variables given as + envvars parameter. The first found to be defined will be + used. envvars defaults to the search path used in GNU gettext; + it must always contain the variable name 'LANG'. -Except for the code 'C', the language code corresponds to RFC -1766. code and encoding can be None in case the values cannot -be determined. + Except for the code 'C', the language code corresponds to RFC + 1766. code and encoding can be None in case the values cannot + be determined. + + """ -""" def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: """Returns the current setting for the given locale category as -tuple (language code, encoding). + tuple (language code, encoding). -category may be one of the LC_* value except LC_ALL. It -defaults to LC_CTYPE. + category may be one of the LC_* value except LC_ALL. It + defaults to LC_CTYPE. -Except for the code 'C', the language code corresponds to RFC -1766. code and encoding can be None in case the values cannot -be determined. + Except for the code 'C', the language code corresponds to RFC + 1766. code and encoding can be None in case the values cannot + be determined. + + """ -""" def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: """Set the locale for the given category. The locale can be -a string, an iterable of two strings (language code and encoding), -or None. + a string, an iterable of two strings (language code and encoding), + or None. -Iterables are converted to strings using the locale aliasing -engine. Locale strings are passed directly to the C lib. + Iterables are converted to strings using the locale aliasing + engine. Locale strings are passed directly to the C lib. -category may be given as one of the LC_* values. + category may be given as one of the LC_* values. + + """ -""" def getpreferredencoding(do_setlocale: bool = True) -> _str: """Return the charset that the user is likely using, -according to the system configuration. -""" + according to the system configuration. + """ + def normalize(localename: _str) -> _str: """Returns a normalized locale code for the given locale -name. + name. -The returned locale code is formatted for use with -setlocale(). + The returned locale code is formatted for use with + setlocale(). -If normalization fails, the original name is returned -unchanged. + If normalization fails, the original name is returned + unchanged. -If the given encoding is not known, the function defaults to -the default encoding for the locale code just like setlocale() -does. + If the given encoding is not known, the function defaults to + the default encoding for the locale code just like setlocale() + does. -""" + """ if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `locale.setlocale(locale.LC_ALL, '')` instead.") def resetlocale(category: int = ...) -> None: - """ Sets the locale for category to the default setting. + """Sets the locale for category to the default setting. - The default setting is determined by calling - getdefaultlocale(). category defaults to LC_ALL. + The default setting is determined by calling + getdefaultlocale(). category defaults to LC_ALL. - """ + """ else: def resetlocale(category: int = ...) -> None: - """ Sets the locale for category to the default setting. + """Sets the locale for category to the default setting. - The default setting is determined by calling - getdefaultlocale(). category defaults to LC_ALL. + The default setting is determined by calling + getdefaultlocale(). category defaults to LC_ALL. - """ + """ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.7; removed in Python 3.12. Use `locale.format_string()` instead.") - def format( - percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any - ) -> _str: - """Deprecated, use format_string instead. -""" + def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: + """Deprecated, use format_string instead.""" def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: """Formats a string in the same way that the % formatting would use, -but takes the current locale into account. + but takes the current locale into account. + + Grouping is applied if the third parameter is true. + Conversion uses monetary thousands separator and grouping strings if + forth parameter monetary is true. + """ -Grouping is applied if the third parameter is true. -Conversion uses monetary thousands separator and grouping strings if -forth parameter monetary is true. -""" def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: """Formats val according to the currency settings -in the current locale. -""" + in the current locale. + """ + def delocalize(string: _str) -> _str: - """Parses a string as a normalized number according to the locale settings. -""" + """Parses a string as a normalized number according to the locale settings.""" + def atof(string: _str, func: Callable[[_str], float] = ...) -> float: - """Parses a string as a float according to the locale settings. -""" + """Parses a string as a float according to the locale settings.""" + def atoi(string: _str) -> int: - """Converts a string to an integer according to the locale settings. -""" + """Converts a string to an integer according to the locale settings.""" + def str(val: float) -> _str: - """Convert float to string, taking the locale into account. -""" + """Convert float to string, taking the locale into account.""" locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi index 7638e1c12ebd1..657c65a8b6934 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/__init__.pyi @@ -6,6 +6,7 @@ Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ + import sys import threading from _typeshed import StrPath, SupportsWrite @@ -100,63 +101,66 @@ _nameToLevel: dict[str, int] class Filterer: """ -A base class for loggers and handlers which allows them to share -common code. -""" + A base class for loggers and handlers which allows them to share + common code. + """ + filters: list[_FilterType] def addFilter(self, filter: _FilterType) -> None: """ -Add the specified filter to this handler. -""" + Add the specified filter to this handler. + """ + def removeFilter(self, filter: _FilterType) -> None: """ -Remove the specified filter from this handler. -""" + Remove the specified filter from this handler. + """ if sys.version_info >= (3, 12): def filter(self, record: LogRecord) -> bool | LogRecord: """ -Determine if a record is loggable by consulting all the filters. + Determine if a record is loggable by consulting all the filters. -The default is to allow the record to be logged; any filter can veto -this by returning a false value. -If a filter attached to a handler returns a log record instance, -then that instance is used in place of the original log record in -any further processing of the event by that handler. -If a filter returns any other true value, the original log record -is used in any further processing of the event by that handler. + The default is to allow the record to be logged; any filter can veto + this by returning a false value. + If a filter attached to a handler returns a log record instance, + then that instance is used in place of the original log record in + any further processing of the event by that handler. + If a filter returns any other true value, the original log record + is used in any further processing of the event by that handler. -If none of the filters return false values, this method returns -a log record. -If any of the filters return a false value, this method returns -a false value. + If none of the filters return false values, this method returns + a log record. + If any of the filters return a false value, this method returns + a false value. -.. versionchanged:: 3.2 + .. versionchanged:: 3.2 - Allow filters to be just callables. + Allow filters to be just callables. -.. versionchanged:: 3.12 - Allow filters to return a LogRecord instead of - modifying it in place. -""" + .. versionchanged:: 3.12 + Allow filters to return a LogRecord instead of + modifying it in place. + """ else: def filter(self, record: LogRecord) -> bool: """ - Determine if a record is loggable by consulting all the filters. + Determine if a record is loggable by consulting all the filters. - The default is to allow the record to be logged; any filter can veto - this and the record is then dropped. Returns a zero value if a record - is to be dropped, else non-zero. + The default is to allow the record to be logged; any filter can veto + this and the record is then dropped. Returns a zero value if a record + is to be dropped, else non-zero. - .. versionchanged:: 3.2 + .. versionchanged:: 3.2 - Allow filters to be just callables. - """ + Allow filters to be just callables. + """ class Manager: # undocumented """ -There is [under normal circumstances] just one Manager instance, which -holds the hierarchy of loggers. -""" + There is [under normal circumstances] just one Manager instance, which + holds the hierarchy of loggers. + """ + root: RootLogger disable: int emittedNoHandlerWarning: bool @@ -165,44 +169,48 @@ holds the hierarchy of loggers. logRecordFactory: Callable[..., LogRecord] | None def __init__(self, rootnode: RootLogger) -> None: """ -Initialize the manager with the root node of the logger hierarchy. -""" + Initialize the manager with the root node of the logger hierarchy. + """ + def getLogger(self, name: str) -> Logger: """ -Get a logger with the specified name (channel name), creating it -if it doesn't yet exist. This name is a dot-separated hierarchical -name, such as "a", "a.b", "a.b.c" or similar. + Get a logger with the specified name (channel name), creating it + if it doesn't yet exist. This name is a dot-separated hierarchical + name, such as "a", "a.b", "a.b.c" or similar. + + If a PlaceHolder existed for the specified name [i.e. the logger + didn't exist but a child of it did], replace it with the created + logger and fix up the parent/child references which pointed to the + placeholder to now point to the logger. + """ -If a PlaceHolder existed for the specified name [i.e. the logger -didn't exist but a child of it did], replace it with the created -logger and fix up the parent/child references which pointed to the -placeholder to now point to the logger. -""" def setLoggerClass(self, klass: type[Logger]) -> None: """ -Set the class to be used when instantiating a logger with this Manager. -""" + Set the class to be used when instantiating a logger with this Manager. + """ + def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: """ -Set the factory to be used when instantiating a log record with this -Manager. -""" + Set the factory to be used when instantiating a log record with this + Manager. + """ class Logger(Filterer): """ -Instances of the Logger class represent a single logging channel. A -"logging channel" indicates an area of an application. Exactly how an -"area" is defined is up to the application developer. Since an -application can have any number of areas, logging channels are identified -by a unique string. Application areas can be nested (e.g. an area -of "input processing" might include sub-areas "read CSV files", "read -XLS files" and "read Gnumeric files"). To cater for this natural nesting, -channel names are organized into a namespace hierarchy where levels are -separated by periods, much like the Java or Python package namespace. So -in the instance given above, channel names might be "input" for the upper -level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels. -There is no arbitrary limit to the depth of nesting. -""" + Instances of the Logger class represent a single logging channel. A + "logging channel" indicates an area of an application. Exactly how an + "area" is defined is up to the application developer. Since an + application can have any number of areas, logging channels are identified + by a unique string. Application areas can be nested (e.g. an area + of "input processing" might include sub-areas "read CSV files", "read + XLS files" and "read Gnumeric files"). To cater for this natural nesting, + channel names are organized into a namespace hierarchy where levels are + separated by periods, much like the Java or Python package namespace. So + in the instance given above, channel names might be "input" for the upper + level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels. + There is no arbitrary limit to the depth of nesting. + """ + name: str # undocumented level: int # undocumented parent: Logger | None # undocumented @@ -213,38 +221,42 @@ There is no arbitrary limit to the depth of nesting. manager: Manager # undocumented def __init__(self, name: str, level: _Level = 0) -> None: """ -Initialize the logger with a name and an optional level. -""" + Initialize the logger with a name and an optional level. + """ + def setLevel(self, level: _Level) -> None: """ -Set the logging level of this logger. level must be an int or a str. -""" + Set the logging level of this logger. level must be an int or a str. + """ + def isEnabledFor(self, level: int) -> bool: """ -Is this logger enabled for level 'level'? -""" + Is this logger enabled for level 'level'? + """ + def getEffectiveLevel(self) -> int: """ -Get the effective level for this logger. + Get the effective level for this logger. + + Loop through this logger and its parents in the logger hierarchy, + looking for a non-zero logging level. Return the first one found. + """ -Loop through this logger and its parents in the logger hierarchy, -looking for a non-zero logging level. Return the first one found. -""" def getChild(self, suffix: str) -> Self: # see python/typing#980 """ -Get a logger which is a descendant to this one. + Get a logger which is a descendant to this one. -This is a convenience method, such that + This is a convenience method, such that -logging.getLogger('abc').getChild('def.ghi') + logging.getLogger('abc').getChild('def.ghi') -is the same as + is the same as -logging.getLogger('abc.def.ghi') + logging.getLogger('abc.def.ghi') -It's useful, for example, when the parent logger is named using -__name__ rather than a literal string. -""" + It's useful, for example, when the parent logger is named using + __name__ rather than a literal string. + """ if sys.version_info >= (3, 12): def getChildren(self) -> set[Logger]: ... @@ -258,13 +270,14 @@ __name__ rather than a literal string. extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with severity 'DEBUG'. + Log 'msg % args' with severity 'DEBUG'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) + """ -logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) -""" def info( self, msg: object, @@ -275,13 +288,14 @@ logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with severity 'INFO'. + Log 'msg % args' with severity 'INFO'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.info("Houston, we have a %s", "notable problem", exc_info=True) + """ -logger.info("Houston, we have a %s", "notable problem", exc_info=True) -""" def warning( self, msg: object, @@ -292,13 +306,14 @@ logger.info("Houston, we have a %s", "notable problem", exc_info=True) extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with severity 'WARNING'. + Log 'msg % args' with severity 'WARNING'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) + """ -logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) -""" @deprecated("Deprecated since Python 3.3. Use `Logger.warning()` instead.") def warn( self, @@ -319,13 +334,14 @@ logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with severity 'ERROR'. + Log 'msg % args' with severity 'ERROR'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.error("Houston, we have a %s", "major problem", exc_info=True) + """ -logger.error("Houston, we have a %s", "major problem", exc_info=True) -""" def exception( self, msg: object, @@ -336,8 +352,9 @@ logger.error("Houston, we have a %s", "major problem", exc_info=True) extra: Mapping[str, object] | None = None, ) -> None: """ -Convenience method for logging an ERROR with exception information. -""" + Convenience method for logging an ERROR with exception information. + """ + def critical( self, msg: object, @@ -348,13 +365,14 @@ Convenience method for logging an ERROR with exception information. extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with severity 'CRITICAL'. + Log 'msg % args' with severity 'CRITICAL'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.critical("Houston, we have a %s", "major disaster", exc_info=True) + """ -logger.critical("Houston, we have a %s", "major disaster", exc_info=True) -""" def log( self, level: int, @@ -366,13 +384,14 @@ logger.critical("Houston, we have a %s", "major disaster", exc_info=True) extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with the integer severity 'level'. + Log 'msg % args' with the integer severity 'level'. -To pass exception information, use the keyword argument exc_info with -a true value, e.g. + To pass exception information, use the keyword argument exc_info with + a true value, e.g. + + logger.log(level, "We have a %s", "mysterious problem", exc_info=True) + """ -logger.log(level, "We have a %s", "mysterious problem", exc_info=True) -""" def _log( self, level: int, @@ -384,30 +403,34 @@ logger.log(level, "We have a %s", "mysterious problem", exc_info=True) stacklevel: int = 1, ) -> None: # undocumented """ -Low-level logging routine which creates a LogRecord and then calls -all the handlers of this logger to handle the record. -""" + Low-level logging routine which creates a LogRecord and then calls + all the handlers of this logger to handle the record. + """ fatal = critical def addHandler(self, hdlr: Handler) -> None: """ -Add the specified handler to this logger. -""" + Add the specified handler to this logger. + """ + def removeHandler(self, hdlr: Handler) -> None: """ -Remove the specified handler from this logger. -""" + Remove the specified handler from this logger. + """ + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: """ -Find the stack frame of the caller so that we can note the source -file name, line number and function name. -""" + Find the stack frame of the caller so that we can note the source + file name, line number and function name. + """ + def handle(self, record: LogRecord) -> None: """ -Call the handlers for the specified record. + Call the handlers for the specified record. + + This method is used for unpickled records received from a socket, as + well as those created locally. Logger-level filtering is applied. + """ -This method is used for unpickled records received from a socket, as -well as those created locally. Logger-level filtering is applied. -""" def makeRecord( self, name: str, @@ -422,29 +445,31 @@ well as those created locally. Logger-level filtering is applied. sinfo: str | None = None, ) -> LogRecord: """ -A factory method which can be overridden in subclasses to create -specialized LogRecords. -""" + A factory method which can be overridden in subclasses to create + specialized LogRecords. + """ + def hasHandlers(self) -> bool: """ -See if this logger has any handlers configured. + See if this logger has any handlers configured. + + Loop through all handlers for this logger and its parents in the + logger hierarchy. Return True if a handler was found, else False. + Stop searching up the hierarchy whenever a logger with the "propagate" + attribute set to zero is found - that will be the last logger which + is checked for the existence of handlers. + """ -Loop through all handlers for this logger and its parents in the -logger hierarchy. Return True if a handler was found, else False. -Stop searching up the hierarchy whenever a logger with the "propagate" -attribute set to zero is found - that will be the last logger which -is checked for the existence of handlers. -""" def callHandlers(self, record: LogRecord) -> None: # undocumented """ -Pass a record to all relevant handlers. + Pass a record to all relevant handlers. -Loop through all handlers for this logger and its parents in the -logger hierarchy. If no handler was found, output a one-off error -message to sys.stderr. Stop searching up the hierarchy whenever a -logger with the "propagate" attribute set to zero is found - that -will be the last logger whose handlers are called. -""" + Loop through all handlers for this logger and its parents in the + logger hierarchy. If no handler was found, output a one-off error + message to sys.stderr. Stop searching up the hierarchy whenever a + logger with the "propagate" attribute set to zero is found - that + will be the last logger whose handlers are called. + """ CRITICAL: Final = 50 FATAL: Final = CRITICAL @@ -457,153 +482,167 @@ NOTSET: Final = 0 class Handler(Filterer): """ -Handler instances dispatch logging events to specific destinations. + Handler instances dispatch logging events to specific destinations. + + The base handler class. Acts as a placeholder which defines the Handler + interface. Handlers can optionally use Formatter instances to format + records as desired. By default, no formatter is specified; in this case, + the 'raw' message as determined by record.message is logged. + """ -The base handler class. Acts as a placeholder which defines the Handler -interface. Handlers can optionally use Formatter instances to format -records as desired. By default, no formatter is specified; in this case, -the 'raw' message as determined by record.message is logged. -""" level: int # undocumented formatter: Formatter | None # undocumented lock: threading.Lock | None # undocumented name: str | None # undocumented def __init__(self, level: _Level = 0) -> None: """ -Initializes the instance - basically setting the formatter to None -and the filter list to empty. -""" + Initializes the instance - basically setting the formatter to None + and the filter list to empty. + """ + def get_name(self) -> str: ... # undocumented def set_name(self, name: str) -> None: ... # undocumented def createLock(self) -> None: """ -Acquire a thread lock for serializing access to the underlying I/O. -""" + Acquire a thread lock for serializing access to the underlying I/O. + """ + def acquire(self) -> None: """ -Acquire the I/O thread lock. -""" + Acquire the I/O thread lock. + """ + def release(self) -> None: """ -Release the I/O thread lock. -""" + Release the I/O thread lock. + """ + def setLevel(self, level: _Level) -> None: """ -Set the logging level of this handler. level must be an int or a str. -""" + Set the logging level of this handler. level must be an int or a str. + """ + def setFormatter(self, fmt: Formatter | None) -> None: """ -Set the formatter for this handler. -""" + Set the formatter for this handler. + """ + def flush(self) -> None: """ -Ensure all logging output has been flushed. + Ensure all logging output has been flushed. + + This version does nothing and is intended to be implemented by + subclasses. + """ -This version does nothing and is intended to be implemented by -subclasses. -""" def close(self) -> None: """ -Tidy up any resources used by the handler. + Tidy up any resources used by the handler. + + This version removes the handler from an internal map of handlers, + _handlers, which is used for handler lookup by name. Subclasses + should ensure that this gets called from overridden close() + methods. + """ -This version removes the handler from an internal map of handlers, -_handlers, which is used for handler lookup by name. Subclasses -should ensure that this gets called from overridden close() -methods. -""" def handle(self, record: LogRecord) -> bool: """ -Conditionally emit the specified logging record. + Conditionally emit the specified logging record. -Emission depends on filters which may have been added to the handler. -Wrap the actual emission of the record with acquisition/release of -the I/O thread lock. + Emission depends on filters which may have been added to the handler. + Wrap the actual emission of the record with acquisition/release of + the I/O thread lock. + + Returns an instance of the log record that was emitted + if it passed all filters, otherwise a false value is returned. + """ -Returns an instance of the log record that was emitted -if it passed all filters, otherwise a false value is returned. -""" def handleError(self, record: LogRecord) -> None: """ -Handle errors which occur during an emit() call. + Handle errors which occur during an emit() call. + + This method should be called from handlers when an exception is + encountered during an emit() call. If raiseExceptions is false, + exceptions get silently ignored. This is what is mostly wanted + for a logging system - most users will not care about errors in + the logging system, they are more interested in application errors. + You could, however, replace this with a custom handler if you wish. + The record which was being processed is passed in to this method. + """ -This method should be called from handlers when an exception is -encountered during an emit() call. If raiseExceptions is false, -exceptions get silently ignored. This is what is mostly wanted -for a logging system - most users will not care about errors in -the logging system, they are more interested in application errors. -You could, however, replace this with a custom handler if you wish. -The record which was being processed is passed in to this method. -""" def format(self, record: LogRecord) -> str: """ -Format the specified record. + Format the specified record. + + If a formatter is set, use it. Otherwise, use the default formatter + for the module. + """ -If a formatter is set, use it. Otherwise, use the default formatter -for the module. -""" def emit(self, record: LogRecord) -> None: """ -Do whatever it takes to actually log the specified logging record. + Do whatever it takes to actually log the specified logging record. -This version is intended to be implemented by subclasses and so -raises a NotImplementedError. -""" + This version is intended to be implemented by subclasses and so + raises a NotImplementedError. + """ if sys.version_info >= (3, 12): def getHandlerByName(name: str) -> Handler | None: """ -Get a handler with the specified *name*, or None if there isn't one with -that name. -""" + Get a handler with the specified *name*, or None if there isn't one with + that name. + """ + def getHandlerNames() -> frozenset[str]: """ -Return all known handler names as an immutable set. -""" + Return all known handler names as an immutable set. + """ class Formatter: """ -Formatter instances are used to convert a LogRecord to text. - -Formatters need to know how a LogRecord is constructed. They are -responsible for converting a LogRecord to (usually) a string which can -be interpreted by either a human or an external system. The base Formatter -allows a formatting string to be specified. If none is supplied, the -style-dependent default value, "%(message)s", "{message}", or -"${message}", is used. - -The Formatter can be initialized with a format string which makes use of -knowledge of the LogRecord attributes - e.g. the default value mentioned -above makes use of the fact that the user's message and arguments are pre- -formatted into a LogRecord's message attribute. Currently, the useful -attributes in a LogRecord are described by: - -%(name)s Name of the logger (logging channel) -%(levelno)s Numeric logging level for the message (DEBUG, INFO, - WARNING, ERROR, CRITICAL) -%(levelname)s Text logging level for the message ("DEBUG", "INFO", - "WARNING", "ERROR", "CRITICAL") -%(pathname)s Full pathname of the source file where the logging - call was issued (if available) -%(filename)s Filename portion of pathname -%(module)s Module (name portion of filename) -%(lineno)d Source line number where the logging call was issued - (if available) -%(funcName)s Function name -%(created)f Time when the LogRecord was created (time.time_ns() / 1e9 - return value) -%(asctime)s Textual time when the LogRecord was created -%(msecs)d Millisecond portion of the creation time -%(relativeCreated)d Time in milliseconds when the LogRecord was created, - relative to the time the logging module was loaded - (typically at application startup time) -%(thread)d Thread ID (if available) -%(threadName)s Thread name (if available) -%(taskName)s Task name (if available) -%(process)d Process ID (if available) -%(processName)s Process name (if available) -%(message)s The result of record.getMessage(), computed just as - the record is emitted -""" + Formatter instances are used to convert a LogRecord to text. + + Formatters need to know how a LogRecord is constructed. They are + responsible for converting a LogRecord to (usually) a string which can + be interpreted by either a human or an external system. The base Formatter + allows a formatting string to be specified. If none is supplied, the + style-dependent default value, "%(message)s", "{message}", or + "${message}", is used. + + The Formatter can be initialized with a format string which makes use of + knowledge of the LogRecord attributes - e.g. the default value mentioned + above makes use of the fact that the user's message and arguments are pre- + formatted into a LogRecord's message attribute. Currently, the useful + attributes in a LogRecord are described by: + + %(name)s Name of the logger (logging channel) + %(levelno)s Numeric logging level for the message (DEBUG, INFO, + WARNING, ERROR, CRITICAL) + %(levelname)s Text logging level for the message ("DEBUG", "INFO", + "WARNING", "ERROR", "CRITICAL") + %(pathname)s Full pathname of the source file where the logging + call was issued (if available) + %(filename)s Filename portion of pathname + %(module)s Module (name portion of filename) + %(lineno)d Source line number where the logging call was issued + (if available) + %(funcName)s Function name + %(created)f Time when the LogRecord was created (time.time_ns() / 1e9 + return value) + %(asctime)s Textual time when the LogRecord was created + %(msecs)d Millisecond portion of the creation time + %(relativeCreated)d Time in milliseconds when the LogRecord was created, + relative to the time the logging module was loaded + (typically at application startup time) + %(thread)d Thread ID (if available) + %(threadName)s Thread name (if available) + %(taskName)s Task name (if available) + %(process)d Process ID (if available) + %(processName)s Process name (if available) + %(message)s The result of record.getMessage(), computed just as + the record is emitted + """ + converter: Callable[[float | None], struct_time] _fmt: str | None # undocumented datefmt: str | None # undocumented @@ -622,168 +661,178 @@ attributes in a LogRecord are described by: defaults: Mapping[str, Any] | None = None, ) -> None: """ -Initialize the formatter with specified format strings. + Initialize the formatter with specified format strings. -Initialize the formatter either with the specified format string, or a -default as described above. Allow for specialized date formatting with -the optional datefmt argument. If datefmt is omitted, you get an -ISO8601-like (or RFC 3339-like) format. + Initialize the formatter either with the specified format string, or a + default as described above. Allow for specialized date formatting with + the optional datefmt argument. If datefmt is omitted, you get an + ISO8601-like (or RFC 3339-like) format. -Use a style parameter of '%', '{' or '$' to specify that you want to -use one of %-formatting, :meth:`str.format` (``{}``) formatting or -:class:`string.Template` formatting in your format string. + Use a style parameter of '%', '{' or '$' to specify that you want to + use one of %-formatting, :meth:`str.format` (``{}``) formatting or + :class:`string.Template` formatting in your format string. -.. versionchanged:: 3.2 - Added the ``style`` parameter. -""" + .. versionchanged:: 3.2 + Added the ``style`` parameter. + """ else: def __init__( self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True ) -> None: """ - Initialize the formatter with specified format strings. + Initialize the formatter with specified format strings. - Initialize the formatter either with the specified format string, or a - default as described above. Allow for specialized date formatting with - the optional datefmt argument. If datefmt is omitted, you get an - ISO8601-like (or RFC 3339-like) format. + Initialize the formatter either with the specified format string, or a + default as described above. Allow for specialized date formatting with + the optional datefmt argument. If datefmt is omitted, you get an + ISO8601-like (or RFC 3339-like) format. - Use a style parameter of '%', '{' or '$' to specify that you want to - use one of %-formatting, :meth:`str.format` (``{}``) formatting or - :class:`string.Template` formatting in your format string. + Use a style parameter of '%', '{' or '$' to specify that you want to + use one of %-formatting, :meth:`str.format` (``{}``) formatting or + :class:`string.Template` formatting in your format string. - .. versionchanged:: 3.2 - Added the ``style`` parameter. - """ + .. versionchanged:: 3.2 + Added the ``style`` parameter. + """ def format(self, record: LogRecord) -> str: """ -Format the specified record as text. + Format the specified record as text. + + The record's attribute dictionary is used as the operand to a + string formatting operation which yields the returned string. + Before formatting the dictionary, a couple of preparatory steps + are carried out. The message attribute of the record is computed + using LogRecord.getMessage(). If the formatting string uses the + time (as determined by a call to usesTime(), formatTime() is + called to format the event time. If there is exception information, + it is formatted using formatException() and appended to the message. + """ -The record's attribute dictionary is used as the operand to a -string formatting operation which yields the returned string. -Before formatting the dictionary, a couple of preparatory steps -are carried out. The message attribute of the record is computed -using LogRecord.getMessage(). If the formatting string uses the -time (as determined by a call to usesTime(), formatTime() is -called to format the event time. If there is exception information, -it is formatted using formatException() and appended to the message. -""" def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: """ -Return the creation time of the specified LogRecord as formatted text. - -This method should be called from format() by a formatter which -wants to make use of a formatted time. This method can be overridden -in formatters to provide for any specific requirement, but the -basic behaviour is as follows: if datefmt (a string) is specified, -it is used with time.strftime() to format the creation time of the -record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used. -The resulting string is returned. This function uses a user-configurable -function to convert the creation time to a tuple. By default, -time.localtime() is used; to change this for a particular formatter -instance, set the 'converter' attribute to a function with the same -signature as time.localtime() or time.gmtime(). To change it for all -formatters, for example if you want all logging times to be shown in GMT, -set the 'converter' attribute in the Formatter class. -""" + Return the creation time of the specified LogRecord as formatted text. + + This method should be called from format() by a formatter which + wants to make use of a formatted time. This method can be overridden + in formatters to provide for any specific requirement, but the + basic behaviour is as follows: if datefmt (a string) is specified, + it is used with time.strftime() to format the creation time of the + record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used. + The resulting string is returned. This function uses a user-configurable + function to convert the creation time to a tuple. By default, + time.localtime() is used; to change this for a particular formatter + instance, set the 'converter' attribute to a function with the same + signature as time.localtime() or time.gmtime(). To change it for all + formatters, for example if you want all logging times to be shown in GMT, + set the 'converter' attribute in the Formatter class. + """ + def formatException(self, ei: _SysExcInfoType) -> str: """ -Format and return the specified exception information as a string. + Format and return the specified exception information as a string. + + This default implementation just uses + traceback.print_exception() + """ -This default implementation just uses -traceback.print_exception() -""" def formatMessage(self, record: LogRecord) -> str: ... # undocumented def formatStack(self, stack_info: str) -> str: """ -This method is provided as an extension point for specialized -formatting of stack information. + This method is provided as an extension point for specialized + formatting of stack information. -The input data is a string as returned from a call to -:func:`traceback.print_stack`, but with the last trailing newline -removed. + The input data is a string as returned from a call to + :func:`traceback.print_stack`, but with the last trailing newline + removed. + + The base implementation just returns the value passed in. + """ -The base implementation just returns the value passed in. -""" def usesTime(self) -> bool: # undocumented """ -Check if the format uses the creation time of the record. -""" + Check if the format uses the creation time of the record. + """ class BufferingFormatter: """ -A formatter suitable for formatting a number of records. -""" + A formatter suitable for formatting a number of records. + """ + linefmt: Formatter def __init__(self, linefmt: Formatter | None = None) -> None: """ -Optionally specify a formatter which will be used to format each -individual record. -""" + Optionally specify a formatter which will be used to format each + individual record. + """ + def formatHeader(self, records: Sequence[LogRecord]) -> str: """ -Return the header string for the specified records. -""" + Return the header string for the specified records. + """ + def formatFooter(self, records: Sequence[LogRecord]) -> str: """ -Return the footer string for the specified records. -""" + Return the footer string for the specified records. + """ + def format(self, records: Sequence[LogRecord]) -> str: """ -Format the specified records and return the result as a string. -""" + Format the specified records and return the result as a string. + """ class Filter: """ -Filter instances are used to perform arbitrary filtering of LogRecords. + Filter instances are used to perform arbitrary filtering of LogRecords. + + Loggers and Handlers can optionally use Filter instances to filter + records as desired. The base filter class only allows events which are + below a certain point in the logger hierarchy. For example, a filter + initialized with "A.B" will allow events logged by loggers "A.B", + "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If + initialized with the empty string, all events are passed. + """ -Loggers and Handlers can optionally use Filter instances to filter -records as desired. The base filter class only allows events which are -below a certain point in the logger hierarchy. For example, a filter -initialized with "A.B" will allow events logged by loggers "A.B", -"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If -initialized with the empty string, all events are passed. -""" name: str # undocumented nlen: int # undocumented def __init__(self, name: str = "") -> None: """ -Initialize a filter. + Initialize a filter. -Initialize with the name of the logger which, together with its -children, will have its events allowed through the filter. If no -name is specified, allow every event. -""" + Initialize with the name of the logger which, together with its + children, will have its events allowed through the filter. If no + name is specified, allow every event. + """ if sys.version_info >= (3, 12): def filter(self, record: LogRecord) -> bool | LogRecord: """ -Determine if the specified record is to be logged. + Determine if the specified record is to be logged. -Returns True if the record should be logged, or False otherwise. -If deemed appropriate, the record may be modified in-place. -""" + Returns True if the record should be logged, or False otherwise. + If deemed appropriate, the record may be modified in-place. + """ else: def filter(self, record: LogRecord) -> bool: """ - Determine if the specified record is to be logged. + Determine if the specified record is to be logged. - Returns True if the record should be logged, or False otherwise. - If deemed appropriate, the record may be modified in-place. - """ + Returns True if the record should be logged, or False otherwise. + If deemed appropriate, the record may be modified in-place. + """ class LogRecord: """ -A LogRecord instance represents an event being logged. + A LogRecord instance represents an event being logged. + + LogRecord instances are created every time something is logged. They + contain all the information pertinent to the event being logged. The + main information passed in is in msg and args, which are combined + using str(msg) % args to create the message field of the record. The + record also includes information such as when the record was created, + the source line where the logging call was made, and any exception + information to be logged. + """ -LogRecord instances are created every time something is logged. They -contain all the information pertinent to the event being logged. The -main information passed in is in msg and args, which are combined -using str(msg) % args to create the message field of the record. The -record also includes information such as when the record was created, -the source line where the logging call was made, and any exception -information to be logged. -""" # args can be set to None by logging.handlers.QueueHandler # (see https://bugs.python.org/issue44473) args: _ArgsType | None @@ -825,15 +874,16 @@ information to be logged. sinfo: str | None = None, ) -> None: """ -Initialize a logging record with interesting information. -""" + Initialize a logging record with interesting information. + """ + def getMessage(self) -> str: """ -Return the message for this LogRecord. + Return the message for this LogRecord. -Return the message for this LogRecord after merging any user-supplied -arguments with the message. -""" + Return the message for this LogRecord after merging any user-supplied + arguments with the message. + """ # Allows setting contextual information on LogRecord objects as per the docs, see #7833 def __setattr__(self, name: str, value: Any, /) -> None: ... @@ -841,59 +891,59 @@ _L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) class LoggerAdapter(Generic[_L]): """ -An adapter for loggers which makes it easier to specify contextual -information in logging output. -""" + An adapter for loggers which makes it easier to specify contextual + information in logging output. + """ + logger: _L manager: Manager # undocumented if sys.version_info >= (3, 13): def __init__(self, logger: _L, extra: Mapping[str, object] | None = None, merge_extra: bool = False) -> None: """ -Initialize the adapter with a logger and a dict-like object which -provides contextual information. This constructor signature allows -easy stacking of LoggerAdapters, if so desired. + Initialize the adapter with a logger and a dict-like object which + provides contextual information. This constructor signature allows + easy stacking of LoggerAdapters, if so desired. -You can effectively pass keyword arguments as shown in the -following example: + You can effectively pass keyword arguments as shown in the + following example: -adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) -By default, LoggerAdapter objects will drop the "extra" argument -passed on the individual log calls to use its own instead. + By default, LoggerAdapter objects will drop the "extra" argument + passed on the individual log calls to use its own instead. -Initializing it with merge_extra=True will instead merge both -maps when logging, the individual call extra taking precedence -over the LoggerAdapter instance extra + Initializing it with merge_extra=True will instead merge both + maps when logging, the individual call extra taking precedence + over the LoggerAdapter instance extra -.. versionchanged:: 3.13 - The *merge_extra* argument was added. -""" + .. versionchanged:: 3.13 + The *merge_extra* argument was added. + """ elif sys.version_info >= (3, 10): def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: """ - Initialize the adapter with a logger and a dict-like object which - provides contextual information. This constructor signature allows - easy stacking of LoggerAdapters, if so desired. + Initialize the adapter with a logger and a dict-like object which + provides contextual information. This constructor signature allows + easy stacking of LoggerAdapters, if so desired. - You can effectively pass keyword arguments as shown in the - following example: + You can effectively pass keyword arguments as shown in the + following example: - adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) - """ + adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + """ else: def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: """ - Initialize the adapter with a logger and a dict-like object which - provides contextual information. This constructor signature allows - easy stacking of LoggerAdapters, if so desired. - - You can effectively pass keyword arguments as shown in the - following example: + Initialize the adapter with a logger and a dict-like object which + provides contextual information. This constructor signature allows + easy stacking of LoggerAdapters, if so desired. - adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) - """ + You can effectively pass keyword arguments as shown in the + following example: + adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2")) + """ if sys.version_info >= (3, 10): extra: Mapping[str, object] | None else: @@ -904,14 +954,15 @@ over the LoggerAdapter instance extra def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: """ -Process the logging message and keyword arguments passed in to -a logging call to insert contextual information. You can either -manipulate the message itself, the keyword args or both. Return -the message and kwargs modified (or not) to suit your needs. + Process the logging message and keyword arguments passed in to + a logging call to insert contextual information. You can either + manipulate the message itself, the keyword args or both. Return + the message and kwargs modified (or not) to suit your needs. + + Normally, you'll only need to override this one method in a + LoggerAdapter subclass for your specific needs. + """ -Normally, you'll only need to override this one method in a -LoggerAdapter subclass for your specific needs. -""" def debug( self, msg: object, @@ -923,8 +974,9 @@ LoggerAdapter subclass for your specific needs. **kwargs: object, ) -> None: """ -Delegate a debug call to the underlying logger. -""" + Delegate a debug call to the underlying logger. + """ + def info( self, msg: object, @@ -936,8 +988,9 @@ Delegate a debug call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate an info call to the underlying logger. -""" + Delegate an info call to the underlying logger. + """ + def warning( self, msg: object, @@ -949,8 +1002,9 @@ Delegate an info call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate a warning call to the underlying logger. -""" + Delegate a warning call to the underlying logger. + """ + @deprecated("Deprecated since Python 3.3. Use `LoggerAdapter.warning()` instead.") def warn( self, @@ -973,8 +1027,9 @@ Delegate a warning call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate an error call to the underlying logger. -""" + Delegate an error call to the underlying logger. + """ + def exception( self, msg: object, @@ -986,8 +1041,9 @@ Delegate an error call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate an exception call to the underlying logger. -""" + Delegate an exception call to the underlying logger. + """ + def critical( self, msg: object, @@ -999,8 +1055,9 @@ Delegate an exception call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate a critical call to the underlying logger. -""" + Delegate a critical call to the underlying logger. + """ + def log( self, level: int, @@ -1013,25 +1070,29 @@ Delegate a critical call to the underlying logger. **kwargs: object, ) -> None: """ -Delegate a log call to the underlying logger, after adding -contextual information from this adapter instance. -""" + Delegate a log call to the underlying logger, after adding + contextual information from this adapter instance. + """ + def isEnabledFor(self, level: int) -> bool: """ -Is this logger enabled for level 'level'? -""" + Is this logger enabled for level 'level'? + """ + def getEffectiveLevel(self) -> int: """ -Get the effective level for the underlying logger. -""" + Get the effective level for the underlying logger. + """ + def setLevel(self, level: _Level) -> None: """ -Set the specified level on the underlying logger. -""" + Set the specified level on the underlying logger. + """ + def hasHandlers(self) -> bool: """ -See if the underlying logger has any handlers. -""" + See if the underlying logger has any handlers. + """ if sys.version_info >= (3, 11): def _log( self, @@ -1044,8 +1105,8 @@ See if the underlying logger has any handlers. stack_info: bool = False, ) -> None: # undocumented """ -Low-level log implementation, proxied to allow nested logger adapters. -""" + Low-level log implementation, proxied to allow nested logger adapters. + """ else: def _log( self, @@ -1057,8 +1118,8 @@ Low-level log implementation, proxied to allow nested logger adapters. stack_info: bool = False, ) -> None: # undocumented """ - Low-level log implementation, proxied to allow nested logger adapters. - """ + Low-level log implementation, proxied to allow nested logger adapters. + """ @property def name(self) -> str: ... # undocumented @@ -1066,23 +1127,26 @@ Low-level log implementation, proxied to allow nested logger adapters. def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def getLogger(name: str | None = None) -> Logger: """ -Return a logger with the specified name, creating it if necessary. + Return a logger with the specified name, creating it if necessary. + + If no name is specified, return the root logger. + """ -If no name is specified, return the root logger. -""" def getLoggerClass() -> type[Logger]: """ -Return the class to be used when instantiating a logger. -""" + Return the class to be used when instantiating a logger. + """ + def getLogRecordFactory() -> Callable[..., LogRecord]: """ -Return the factory to be used when instantiating a log record. -""" + Return the factory to be used when instantiating a log record. + """ + def debug( msg: object, *args: object, @@ -1092,10 +1156,11 @@ def debug( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'DEBUG' on the root logger. If the logger has -no handlers, call basicConfig() to add a console handler with a pre-defined -format. -""" + Log a message with severity 'DEBUG' on the root logger. If the logger has + no handlers, call basicConfig() to add a console handler with a pre-defined + format. + """ + def info( msg: object, *args: object, @@ -1105,10 +1170,11 @@ def info( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'INFO' on the root logger. If the logger has -no handlers, call basicConfig() to add a console handler with a pre-defined -format. -""" + Log a message with severity 'INFO' on the root logger. If the logger has + no handlers, call basicConfig() to add a console handler with a pre-defined + format. + """ + def warning( msg: object, *args: object, @@ -1118,10 +1184,11 @@ def warning( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'WARNING' on the root logger. If the logger has -no handlers, call basicConfig() to add a console handler with a pre-defined -format. -""" + Log a message with severity 'WARNING' on the root logger. If the logger has + no handlers, call basicConfig() to add a console handler with a pre-defined + format. + """ + @deprecated("Deprecated since Python 3.3. Use `warning()` instead.") def warn( msg: object, @@ -1140,10 +1207,11 @@ def error( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'ERROR' on the root logger. If the logger has -no handlers, call basicConfig() to add a console handler with a pre-defined -format. -""" + Log a message with severity 'ERROR' on the root logger. If the logger has + no handlers, call basicConfig() to add a console handler with a pre-defined + format. + """ + def critical( msg: object, *args: object, @@ -1153,10 +1221,11 @@ def critical( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'CRITICAL' on the root logger. If the logger -has no handlers, call basicConfig() to add a console handler with a -pre-defined format. -""" + Log a message with severity 'CRITICAL' on the root logger. If the logger + has no handlers, call basicConfig() to add a console handler with a + pre-defined format. + """ + def exception( msg: object, *args: object, @@ -1166,10 +1235,11 @@ def exception( extra: Mapping[str, object] | None = None, ) -> None: """ -Log a message with severity 'ERROR' on the root logger, with exception -information. If the logger has no handlers, basicConfig() is called to add -a console handler with a pre-defined format. -""" + Log a message with severity 'ERROR' on the root logger, with exception + information. If the logger has no handlers, basicConfig() is called to add + a console handler with a pre-defined format. + """ + def log( level: int, msg: object, @@ -1180,42 +1250,45 @@ def log( extra: Mapping[str, object] | None = None, ) -> None: """ -Log 'msg % args' with the integer severity 'level' on the root logger. If -the logger has no handlers, call basicConfig() to add a console handler -with a pre-defined format. -""" + Log 'msg % args' with the integer severity 'level' on the root logger. If + the logger has no handlers, call basicConfig() to add a console handler + with a pre-defined format. + """ fatal = critical def disable(level: int = 50) -> None: """ -Disable all logging calls of severity 'level' and below. -""" + Disable all logging calls of severity 'level' and below. + """ + def addLevelName(level: int, levelName: str) -> None: """ -Associate 'levelName' with 'level'. + Associate 'levelName' with 'level'. + + This is used when converting levels to text during message formatting. + """ -This is used when converting levels to text during message formatting. -""" @overload def getLevelName(level: int) -> str: """ -Return the textual or numeric representation of logging level 'level'. + Return the textual or numeric representation of logging level 'level'. -If the level is one of the predefined levels (CRITICAL, ERROR, WARNING, -INFO, DEBUG) then you get the corresponding string. If you have -associated levels with names using addLevelName then the name you have -associated with 'level' is returned. + If the level is one of the predefined levels (CRITICAL, ERROR, WARNING, + INFO, DEBUG) then you get the corresponding string. If you have + associated levels with names using addLevelName then the name you have + associated with 'level' is returned. -If a numeric value corresponding to one of the defined levels is passed -in, the corresponding string representation is returned. + If a numeric value corresponding to one of the defined levels is passed + in, the corresponding string representation is returned. -If a string representation of the level is passed in, the corresponding -numeric value is returned. + If a string representation of the level is passed in, the corresponding + numeric value is returned. + + If no matching numeric or string value is passed in, the string + 'Level %s' % level is returned. + """ -If no matching numeric or string value is passed in, the string -'Level %s' % level is returned. -""" @overload @deprecated("The str -> int case is considered a mistake.") def getLevelName(level: str) -> Any: ... @@ -1225,11 +1298,12 @@ if sys.version_info >= (3, 11): def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: """ -Make a LogRecord whose attributes are defined by the specified dictionary, -This function is useful for converting a logging event received over -a socket connection (which is sent as a dictionary) into a LogRecord -instance. -""" + Make a LogRecord whose attributes are defined by the specified dictionary, + This function is useful for converting a logging event received over + a socket connection (which is sent as a dictionary) into a LogRecord + instance. + """ + def basicConfig( *, filename: StrPath | None = ..., @@ -1245,98 +1319,102 @@ def basicConfig( errors: str | None = ..., ) -> None: """ -Do basic configuration for the logging system. - -This function does nothing if the root logger already has handlers -configured, unless the keyword argument *force* is set to ``True``. -It is a convenience method intended for use by simple scripts -to do one-shot configuration of the logging package. - -The default behaviour is to create a StreamHandler which writes to -sys.stderr, set a formatter using the BASIC_FORMAT format string, and -add the handler to the root logger. - -A number of optional keyword arguments may be specified, which can alter -the default behaviour. - -filename Specifies that a FileHandler be created, using the specified - filename, rather than a StreamHandler. -filemode Specifies the mode to open the file, if filename is specified - (if filemode is unspecified, it defaults to 'a'). -format Use the specified format string for the handler. -datefmt Use the specified date/time format. -style If a format string is specified, use this to specify the - type of format string (possible values '%', '{', '$', for - %-formatting, :meth:`str.format` and :class:`string.Template` - - defaults to '%'). -level Set the root logger level to the specified level. -stream Use the specified stream to initialize the StreamHandler. Note - that this argument is incompatible with 'filename' - if both - are present, 'stream' is ignored. -handlers If specified, this should be an iterable of already created - handlers, which will be added to the root logger. Any handler - in the list which does not have a formatter assigned will be - assigned the formatter created in this function. -force If this keyword is specified as true, any existing handlers - attached to the root logger are removed and closed, before - carrying out the configuration as specified by the other - arguments. -encoding If specified together with a filename, this encoding is passed to - the created FileHandler, causing it to be used when the file is - opened. -errors If specified together with a filename, this value is passed to the - created FileHandler, causing it to be used when the file is - opened in text mode. If not specified, the default value is - `backslashreplace`. - -Note that you could specify a stream created using open(filename, mode) -rather than passing the filename and mode in. However, it should be -remembered that StreamHandler does not close its stream (since it may be -using sys.stdout or sys.stderr), whereas FileHandler closes its stream -when the handler is closed. - -.. versionchanged:: 3.2 - Added the ``style`` parameter. - -.. versionchanged:: 3.3 - Added the ``handlers`` parameter. A ``ValueError`` is now thrown for - incompatible arguments (e.g. ``handlers`` specified together with - ``filename``/``filemode``, or ``filename``/``filemode`` specified - together with ``stream``, or ``handlers`` specified together with - ``stream``. - -.. versionchanged:: 3.8 - Added the ``force`` parameter. - -.. versionchanged:: 3.9 - Added the ``encoding`` and ``errors`` parameters. -""" + Do basic configuration for the logging system. + + This function does nothing if the root logger already has handlers + configured, unless the keyword argument *force* is set to ``True``. + It is a convenience method intended for use by simple scripts + to do one-shot configuration of the logging package. + + The default behaviour is to create a StreamHandler which writes to + sys.stderr, set a formatter using the BASIC_FORMAT format string, and + add the handler to the root logger. + + A number of optional keyword arguments may be specified, which can alter + the default behaviour. + + filename Specifies that a FileHandler be created, using the specified + filename, rather than a StreamHandler. + filemode Specifies the mode to open the file, if filename is specified + (if filemode is unspecified, it defaults to 'a'). + format Use the specified format string for the handler. + datefmt Use the specified date/time format. + style If a format string is specified, use this to specify the + type of format string (possible values '%', '{', '$', for + %-formatting, :meth:`str.format` and :class:`string.Template` + - defaults to '%'). + level Set the root logger level to the specified level. + stream Use the specified stream to initialize the StreamHandler. Note + that this argument is incompatible with 'filename' - if both + are present, 'stream' is ignored. + handlers If specified, this should be an iterable of already created + handlers, which will be added to the root logger. Any handler + in the list which does not have a formatter assigned will be + assigned the formatter created in this function. + force If this keyword is specified as true, any existing handlers + attached to the root logger are removed and closed, before + carrying out the configuration as specified by the other + arguments. + encoding If specified together with a filename, this encoding is passed to + the created FileHandler, causing it to be used when the file is + opened. + errors If specified together with a filename, this value is passed to the + created FileHandler, causing it to be used when the file is + opened in text mode. If not specified, the default value is + `backslashreplace`. + + Note that you could specify a stream created using open(filename, mode) + rather than passing the filename and mode in. However, it should be + remembered that StreamHandler does not close its stream (since it may be + using sys.stdout or sys.stderr), whereas FileHandler closes its stream + when the handler is closed. + + .. versionchanged:: 3.2 + Added the ``style`` parameter. + + .. versionchanged:: 3.3 + Added the ``handlers`` parameter. A ``ValueError`` is now thrown for + incompatible arguments (e.g. ``handlers`` specified together with + ``filename``/``filemode``, or ``filename``/``filemode`` specified + together with ``stream``, or ``handlers`` specified together with + ``stream``. + + .. versionchanged:: 3.8 + Added the ``force`` parameter. + + .. versionchanged:: 3.9 + Added the ``encoding`` and ``errors`` parameters. + """ + def shutdown(handlerList: Sequence[Any] = ...) -> None: # handlerList is undocumented """ -Perform any cleanup actions in the logging system (e.g. flushing -buffers). + Perform any cleanup actions in the logging system (e.g. flushing + buffers). + + Should be called at application exit. + """ -Should be called at application exit. -""" def setLoggerClass(klass: type[Logger]) -> None: """ -Set the class to be used when instantiating a logger. The class should -define __init__() such that only a name argument is required, and the -__init__() should call Logger.__init__() -""" + Set the class to be used when instantiating a logger. The class should + define __init__() such that only a name argument is required, and the + __init__() should call Logger.__init__() + """ + def captureWarnings(capture: bool) -> None: """ -If capture is true, redirect all warnings to the logging package. -If capture is False, ensure that warnings are not redirected to logging -but to their original destinations. -""" + If capture is true, redirect all warnings to the logging package. + If capture is False, ensure that warnings are not redirected to logging + but to their original destinations. + """ + def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: """ -Set the factory to be used when instantiating a log record. + Set the factory to be used when instantiating a log record. -:param factory: A callable which will be called to instantiate -a log record. -""" + :param factory: A callable which will be called to instantiate + a log record. + """ lastResort: Handler | None @@ -1344,40 +1422,43 @@ _StreamT = TypeVar("_StreamT", bound=SupportsWrite[str]) class StreamHandler(Handler, Generic[_StreamT]): """ -A handler class which writes logging records, appropriately formatted, -to a stream. Note that this class does not close the stream, as -sys.stdout or sys.stderr may be used. -""" + A handler class which writes logging records, appropriately formatted, + to a stream. Note that this class does not close the stream, as + sys.stdout or sys.stderr may be used. + """ + stream: _StreamT # undocumented terminator: str @overload def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: """ -Initialize the handler. + Initialize the handler. + + If stream is not specified, sys.stderr is used. + """ -If stream is not specified, sys.stderr is used. -""" @overload def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780 def setStream(self, stream: _StreamT) -> _StreamT | None: """ -Sets the StreamHandler's stream to the specified value, -if it is different. + Sets the StreamHandler's stream to the specified value, + if it is different. -Returns the old stream, if the stream was changed, or None -if it wasn't. -""" + Returns the old stream, if the stream was changed, or None + if it wasn't. + """ if sys.version_info >= (3, 11): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class FileHandler(StreamHandler[TextIOWrapper]): """ -A handler class which writes formatted logging records to disk files. -""" + A handler class which writes formatted logging records to disk files. + """ + baseFilename: str # undocumented mode: str # undocumented encoding: str | None # undocumented @@ -1387,53 +1468,57 @@ A handler class which writes formatted logging records to disk files. self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: """ -Open the specified file and use it as the stream for logging. -""" + Open the specified file and use it as the stream for logging. + """ + def _open(self) -> TextIOWrapper: # undocumented """ -Open the current base file with the (original) mode and encoding. -Return the resulting stream. -""" + Open the current base file with the (original) mode and encoding. + Return the resulting stream. + """ class NullHandler(Handler): """ -This handler does nothing. It's intended to be used to avoid the -"No handlers could be found for logger XXX" one-off warning. This is -important for library code, which may contain code to log events. If a user -of the library does not configure logging, the one-off warning might be -produced; to avoid this, the library developer simply needs to instantiate -a NullHandler and add it to the top-level logger of the library module or -package. -""" + This handler does nothing. It's intended to be used to avoid the + "No handlers could be found for logger XXX" one-off warning. This is + important for library code, which may contain code to log events. If a user + of the library does not configure logging, the one-off warning might be + produced; to avoid this, the library developer simply needs to instantiate + a NullHandler and add it to the top-level logger of the library module or + package. + """ class PlaceHolder: # undocumented """ -PlaceHolder instances are used in the Manager logger hierarchy to take -the place of nodes for which no loggers have been defined. This class is -intended for internal use only and not as part of the public API. -""" + PlaceHolder instances are used in the Manager logger hierarchy to take + the place of nodes for which no loggers have been defined. This class is + intended for internal use only and not as part of the public API. + """ + loggerMap: dict[Logger, None] def __init__(self, alogger: Logger) -> None: """ -Initialize with the specified logger being a child of this placeholder. -""" + Initialize with the specified logger being a child of this placeholder. + """ + def append(self, alogger: Logger) -> None: """ -Add the specified logger as a child of this placeholder. -""" + Add the specified logger as a child of this placeholder. + """ # Below aren't in module docs but still visible class RootLogger(Logger): """ -A root logger is not that different to any other logger, except that -it must have a logging level and there is only one instance of it in -the hierarchy. -""" + A root logger is not that different to any other logger, except that + it must have a logging level and there is only one instance of it in + the hierarchy. + """ + def __init__(self, level: int) -> None: """ -Initialize the logger with the name "root". -""" + Initialize the logger with the name "root". + """ root: RootLogger @@ -1450,8 +1535,8 @@ class PercentStyle: # undocumented def usesTime(self) -> bool: ... def validate(self) -> None: - """Validate the input format, ensure it matches the correct style -""" + """Validate the input format, ensure it matches the correct style""" + def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi index 81292bd302b1b..7c083b255cce2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/config.pyi @@ -7,6 +7,7 @@ Copyright (C) 2001-2022 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ + import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence @@ -73,8 +74,7 @@ class _DictConfigArgs(TypedDict, total=False): # Also accept a TypedDict type, to allow callers to use TypedDict # types, and for somewhat stricter type checking of dict literals. def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: - """Configure logging using a dictionary. -""" + """Configure logging using a dictionary.""" if sys.version_info >= (3, 10): def fileConfig( @@ -84,13 +84,13 @@ if sys.version_info >= (3, 10): encoding: str | None = None, ) -> None: """ -Read the logging configuration from a ConfigParser-format file. + Read the logging configuration from a ConfigParser-format file. -This can be called several times from an application, allowing an end user -the ability to select from various pre-canned configurations (if the -developer provides a mechanism to present the choices and load the chosen -configuration). -""" + This can be called several times from an application, allowing an end user + the ability to select from various pre-canned configurations (if the + developer provides a mechanism to present the choices and load the chosen + configuration). + """ else: def fileConfig( @@ -99,55 +99,56 @@ else: disable_existing_loggers: bool = True, ) -> None: """ - Read the logging configuration from a ConfigParser-format file. + Read the logging configuration from a ConfigParser-format file. - This can be called several times from an application, allowing an end user - the ability to select from various pre-canned configurations (if the - developer provides a mechanism to present the choices and load the chosen - configuration). - """ + This can be called several times from an application, allowing an end user + the ability to select from various pre-canned configurations (if the + developer provides a mechanism to present the choices and load the chosen + configuration). + """ def valid_ident(s: str) -> Literal[True]: ... # undocumented def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: """ -Start up a socket server on the specified port, and listen for new -configurations. - -These will be sent as a file suitable for processing by fileConfig(). -Returns a Thread object on which you can call start() to start the server, -and which you can join() when appropriate. To stop the server, call -stopListening(). - -Use the ``verify`` argument to verify any bytes received across the wire -from a client. If specified, it should be a callable which receives a -single argument - the bytes of configuration data received across the -network - and it should return either ``None``, to indicate that the -passed in bytes could not be verified and should be discarded, or a -byte string which is then passed to the configuration machinery as -normal. Note that you can return transformed bytes, e.g. by decrypting -the bytes passed in. -""" + Start up a socket server on the specified port, and listen for new + configurations. + + These will be sent as a file suitable for processing by fileConfig(). + Returns a Thread object on which you can call start() to start the server, + and which you can join() when appropriate. To stop the server, call + stopListening(). + + Use the ``verify`` argument to verify any bytes received across the wire + from a client. If specified, it should be a callable which receives a + single argument - the bytes of configuration data received across the + network - and it should return either ``None``, to indicate that the + passed in bytes could not be verified and should be discarded, or a + byte string which is then passed to the configuration machinery as + normal. Note that you can return transformed bytes, e.g. by decrypting + the bytes passed in. + """ + def stopListening() -> None: """ -Stop the listening server which was created with a call to listen(). -""" + Stop the listening server which was created with a call to listen(). + """ class ConvertingMixin: # undocumented - """For ConvertingXXX's, this mixin class provides common functions -""" + """For ConvertingXXX's, this mixin class provides common functions""" + def convert_with_key(self, key: Any, value: Any, replace: bool = True) -> Any: ... def convert(self, value: Any) -> Any: ... class ConvertingDict(dict[Hashable, Any], ConvertingMixin): # undocumented - """A converting dictionary wrapper. -""" + """A converting dictionary wrapper.""" + def __getitem__(self, key: Hashable) -> Any: ... def get(self, key: Hashable, default: Any = None) -> Any: ... def pop(self, key: Hashable, default: Any = None) -> Any: ... class ConvertingList(list[Any], ConvertingMixin): # undocumented - """A converting list wrapper. -""" + """A converting list wrapper.""" + @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -156,8 +157,8 @@ class ConvertingList(list[Any], ConvertingMixin): # undocumented if sys.version_info >= (3, 12): class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented - """A converting tuple wrapper. -""" + """A converting tuple wrapper.""" + @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -166,8 +167,8 @@ if sys.version_info >= (3, 12): else: @disjoint_base class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented - """A converting tuple wrapper. -""" + """A converting tuple wrapper.""" + @overload def __getitem__(self, key: SupportsIndex) -> Any: ... @overload @@ -175,8 +176,9 @@ else: class BaseConfigurator: """ -The configurator base class which defines some useful defaults. -""" + The configurator base class which defines some useful defaults. + """ + CONVERT_PATTERN: Pattern[str] WORD_PATTERN: Pattern[str] DOT_PATTERN: Pattern[str] @@ -190,62 +192,64 @@ The configurator base class which defines some useful defaults. def __init__(self, config: _DictConfigArgs | dict[str, Any]) -> None: ... def resolve(self, s: str) -> Any: """ -Resolve strings to objects using standard import and attribute -syntax. -""" + Resolve strings to objects using standard import and attribute + syntax. + """ + def ext_convert(self, value: str) -> Any: - """Default converter for the ext:// protocol. -""" + """Default converter for the ext:// protocol.""" + def cfg_convert(self, value: str) -> Any: - """Default converter for the cfg:// protocol. -""" + """Default converter for the cfg:// protocol.""" + def convert(self, value: Any) -> Any: """ -Convert values to an appropriate type. dicts, lists and tuples are -replaced by their converting alternatives. Strings are checked to -see if they have a conversion format and are converted if they do. -""" + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + def configure_custom(self, config: dict[str, Any]) -> Any: - """Configure an object with a user-supplied factory. -""" + """Configure an object with a user-supplied factory.""" + def as_tuple(self, value: list[Any] | tuple[Any, ...]) -> tuple[Any, ...]: - """Utility function which converts lists to tuples. -""" + """Utility function which converts lists to tuples.""" class DictConfigurator(BaseConfigurator): """ -Configure logging using a dictionary-like object to describe the -configuration. -""" + Configure logging using a dictionary-like object to describe the + configuration. + """ + def configure(self) -> None: # undocumented - """Do the configuration. -""" + """Do the configuration.""" + def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: # undocumented - """Configure a formatter from a dictionary. -""" + """Configure a formatter from a dictionary.""" + def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: # undocumented - """Configure a filter from a dictionary. -""" + """Configure a filter from a dictionary.""" + def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: # undocumented - """Add filters to a filterer from a list of names. -""" + """Add filters to a filterer from a list of names.""" + def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: # undocumented - """Configure a handler from a dictionary. -""" + """Configure a handler from a dictionary.""" + def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: # undocumented - """Add handlers to a logger from a list of names. -""" + """Add handlers to a logger from a list of names.""" + def common_logger_config( self, logger: Logger, config: _LoggerConfiguration, incremental: bool = False ) -> None: # undocumented """ -Perform configuration which is common to root and non-root loggers. -""" + Perform configuration which is common to root and non-root loggers. + """ + def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented - """Configure a non-root logger from a dictionary. -""" + """Configure a non-root logger from a dictionary.""" + def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: # undocumented - """Configure a root logger from a dictionary. -""" + """Configure a root logger from a dictionary.""" dictConfigClass = DictConfigurator diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi index f203dd418d93e..cbf27e4b69c8d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/logging/handlers.pyi @@ -6,6 +6,7 @@ Copyright (C) 2001-2021 Vinay Sajip. All Rights Reserved. To use, simply 'import logging.handlers' and log away! """ + import datetime import http.client import ssl @@ -31,23 +32,24 @@ SYSLOG_TCP_PORT: Final = 514 class WatchedFileHandler(FileHandler): """ -A handler for logging to a file, which watches the file -to see if it has changed while in use. This can happen because of -usage of programs such as newsyslog and logrotate which perform -log file rotation. This handler, intended for use under Unix, -watches the file to see if it has changed since the last emit. -(A file has changed if its device or inode have changed.) -If it has changed, the old file stream is closed, and the file -opened to get a new stream. - -This handler is not appropriate for use under Windows, because -under Windows open files cannot be moved or renamed - logging -opens the files with exclusive locks - and so there is no need -for such a handler. - -This handler is based on a suggestion and patch by Chad J. -Schroeder. -""" + A handler for logging to a file, which watches the file + to see if it has changed while in use. This can happen because of + usage of programs such as newsyslog and logrotate which perform + log file rotation. This handler, intended for use under Unix, + watches the file to see if it has changed since the last emit. + (A file has changed if its device or inode have changed.) + If it has changed, the old file stream is closed, and the file + opened to get a new stream. + + This handler is not appropriate for use under Windows, because + under Windows open files cannot be moved or renamed - logging + opens the files with exclusive locks - and so there is no need + for such a handler. + + This handler is based on a suggestion and patch by Chad J. + Schroeder. + """ + dev: int # undocumented ino: int # undocumented def __init__( @@ -56,60 +58,64 @@ Schroeder. def _statstream(self) -> None: ... # undocumented def reopenIfNeeded(self) -> None: """ -Reopen log file if needed. + Reopen log file if needed. -Checks if the underlying file has changed, and if it -has, close the old stream and reopen the file to get the -current stream. -""" + Checks if the underlying file has changed, and if it + has, close the old stream and reopen the file to get the + current stream. + """ class BaseRotatingHandler(FileHandler): """ -Base class for handlers that rotate log files at a certain point. -Not meant to be instantiated directly. Instead, use RotatingFileHandler -or TimedRotatingFileHandler. -""" + Base class for handlers that rotate log files at a certain point. + Not meant to be instantiated directly. Instead, use RotatingFileHandler + or TimedRotatingFileHandler. + """ + namer: Callable[[str], str] | None rotator: Callable[[str, str], None] | None def __init__( self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: """ -Use the specified filename for streamed logging -""" + Use the specified filename for streamed logging + """ + def rotation_filename(self, default_name: str) -> str: """ -Modify the filename of a log file when rotating. + Modify the filename of a log file when rotating. -This is provided so that a custom filename can be provided. + This is provided so that a custom filename can be provided. -The default implementation calls the 'namer' attribute of the -handler, if it's callable, passing the default name to -it. If the attribute isn't callable (the default is None), the name -is returned unchanged. + The default implementation calls the 'namer' attribute of the + handler, if it's callable, passing the default name to + it. If the attribute isn't callable (the default is None), the name + is returned unchanged. + + :param default_name: The default name for the log file. + """ -:param default_name: The default name for the log file. -""" def rotate(self, source: str, dest: str) -> None: """ -When rotating, rotate the current log. + When rotating, rotate the current log. -The default implementation calls the 'rotator' attribute of the -handler, if it's callable, passing the source and dest arguments to -it. If the attribute isn't callable (the default is None), the source -is simply renamed to the destination. + The default implementation calls the 'rotator' attribute of the + handler, if it's callable, passing the source and dest arguments to + it. If the attribute isn't callable (the default is None), the source + is simply renamed to the destination. -:param source: The source filename. This is normally the base - filename, e.g. 'test.log' -:param dest: The destination filename. This is normally - what the source is rotated to, e.g. 'test.log.1'. -""" + :param source: The source filename. This is normally the base + filename, e.g. 'test.log' + :param dest: The destination filename. This is normally + what the source is rotated to, e.g. 'test.log.1'. + """ class RotatingFileHandler(BaseRotatingHandler): """ -Handler for logging to a set of files, which switches from one file -to the next when the current file reaches a certain size. -""" + Handler for logging to a set of files, which switches from one file + to the next when the current file reaches a certain size. + """ + maxBytes: int # undocumented backupCount: int # undocumented def __init__( @@ -123,45 +129,48 @@ to the next when the current file reaches a certain size. errors: str | None = None, ) -> None: """ -Open the specified file and use it as the stream for logging. + Open the specified file and use it as the stream for logging. -By default, the file grows indefinitely. You can specify particular -values of maxBytes and backupCount to allow the file to rollover at -a predetermined size. + By default, the file grows indefinitely. You can specify particular + values of maxBytes and backupCount to allow the file to rollover at + a predetermined size. -Rollover occurs whenever the current log file is nearly maxBytes in -length. If backupCount is >= 1, the system will successively create -new files with the same pathname as the base file, but with extensions -".1", ".2" etc. appended to it. For example, with a backupCount of 5 -and a base file name of "app.log", you would get "app.log", -"app.log.1", "app.log.2", ... through to "app.log.5". The file being -written to is always "app.log" - when it gets filled up, it is closed -and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. -exist, then they are renamed to "app.log.2", "app.log.3" etc. -respectively. + Rollover occurs whenever the current log file is nearly maxBytes in + length. If backupCount is >= 1, the system will successively create + new files with the same pathname as the base file, but with extensions + ".1", ".2" etc. appended to it. For example, with a backupCount of 5 + and a base file name of "app.log", you would get "app.log", + "app.log.1", "app.log.2", ... through to "app.log.5". The file being + written to is always "app.log" - when it gets filled up, it is closed + and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. + exist, then they are renamed to "app.log.2", "app.log.3" etc. + respectively. + + If maxBytes is zero, rollover never occurs. + """ -If maxBytes is zero, rollover never occurs. -""" def doRollover(self) -> None: """ -Do a rollover, as described in __init__(). -""" + Do a rollover, as described in __init__(). + """ + def shouldRollover(self, record: LogRecord) -> int: # undocumented """ -Determine if rollover should occur. + Determine if rollover should occur. -Basically, see if the supplied record would cause the file to exceed -the size limit we have. -""" + Basically, see if the supplied record would cause the file to exceed + the size limit we have. + """ class TimedRotatingFileHandler(BaseRotatingHandler): """ -Handler for logging to a file, rotating the log file at certain timed -intervals. + Handler for logging to a file, rotating the log file at certain timed + intervals. + + If backupCount is > 0, when rollover is done, no more than backupCount + files are kept - the oldest ones are deleted. + """ -If backupCount is > 0, when rollover is done, no more than backupCount -files are kept - the oldest ones are deleted. -""" when: str # undocumented backupCount: int # undocumented utc: bool # undocumented @@ -185,42 +194,46 @@ files are kept - the oldest ones are deleted. ) -> None: ... def doRollover(self) -> None: """ -do a rollover; in this case, a date/time stamp is appended to the filename -when the rollover happens. However, you want the file to be named for the -start of the interval, not the current time. If there is a backup count, -then we have to get a list of matching filenames, sort them and remove -the one with the oldest suffix. -""" + do a rollover; in this case, a date/time stamp is appended to the filename + when the rollover happens. However, you want the file to be named for the + start of the interval, not the current time. If there is a backup count, + then we have to get a list of matching filenames, sort them and remove + the one with the oldest suffix. + """ + def shouldRollover(self, record: LogRecord) -> int: # undocumented """ -Determine if rollover should occur. + Determine if rollover should occur. + + record is not used, as we are just comparing times, but it is needed so + the method signatures are the same + """ -record is not used, as we are just comparing times, but it is needed so -the method signatures are the same -""" def computeRollover(self, currentTime: int) -> int: # undocumented """ -Work out the rollover time based on the specified time. -""" + Work out the rollover time based on the specified time. + """ + def getFilesToDelete(self) -> list[str]: # undocumented """ -Determine the files to delete when rolling over. + Determine the files to delete when rolling over. -More specific than the earlier method, which just used glob.glob(). -""" + More specific than the earlier method, which just used glob.glob(). + """ class SocketHandler(Handler): """ -A handler class which writes logging records, in pickle format, to -a streaming socket. The socket is kept open across logging calls. -If the peer resets it, an attempt is made to reconnect on the next call. -The pickle which is sent is that of the LogRecord's attribute dictionary -(__dict__), so that the receiver does not need to have the logging module -installed in order to process the logging event. - -To unpickle the record at the receiving end into a LogRecord, use the -makeLogRecord function. -""" + A handler class which writes logging records, in pickle format, to + a streaming socket. The socket is kept open across logging calls. + If the peer resets it, an attempt is made to reconnect on the next call. + The pickle which is sent is that of the LogRecord's attribute dictionary + (__dict__), so that the receiver does not need to have the logging module + installed in order to process the logging event. + + To unpickle the record at the receiving end into a LogRecord, use the + makeLogRecord function. + """ + host: str # undocumented port: int | None # undocumented address: tuple[str, int] | str # undocumented @@ -232,61 +245,67 @@ makeLogRecord function. retryMax: float # undocumented def __init__(self, host: str, port: int | None) -> None: """ -Initializes the handler with a specific host address and port. + Initializes the handler with a specific host address and port. + + When the attribute *closeOnError* is set to True - if a socket error + occurs, the socket is silently closed and then reopened on the next + logging call. + """ -When the attribute *closeOnError* is set to True - if a socket error -occurs, the socket is silently closed and then reopened on the next -logging call. -""" def makeSocket(self, timeout: float = 1) -> socket: # timeout is undocumented """ -A factory method which allows subclasses to define the precise -type of socket they want. -""" + A factory method which allows subclasses to define the precise + type of socket they want. + """ + def makePickle(self, record: LogRecord) -> bytes: """ -Pickles the record in binary format with a length prefix, and -returns it ready for transmission across the socket. -""" + Pickles the record in binary format with a length prefix, and + returns it ready for transmission across the socket. + """ + def send(self, s: ReadableBuffer) -> None: """ -Send a pickled string to the socket. + Send a pickled string to the socket. + + This function allows for partial sends which can happen when the + network is busy. + """ -This function allows for partial sends which can happen when the -network is busy. -""" def createSocket(self) -> None: """ -Try to create a socket, using an exponential backoff with -a max retry time. Thanks to Robert Olson for the original patch -(SF #815911) which has been slightly refactored. -""" + Try to create a socket, using an exponential backoff with + a max retry time. Thanks to Robert Olson for the original patch + (SF #815911) which has been slightly refactored. + """ class DatagramHandler(SocketHandler): """ -A handler class which writes logging records, in pickle format, to -a datagram socket. The pickle which is sent is that of the LogRecord's -attribute dictionary (__dict__), so that the receiver does not need to -have the logging module installed in order to process the logging event. + A handler class which writes logging records, in pickle format, to + a datagram socket. The pickle which is sent is that of the LogRecord's + attribute dictionary (__dict__), so that the receiver does not need to + have the logging module installed in order to process the logging event. -To unpickle the record at the receiving end into a LogRecord, use the -makeLogRecord function. + To unpickle the record at the receiving end into a LogRecord, use the + makeLogRecord function. + + """ -""" def makeSocket(self) -> socket: # type: ignore[override] """ -The factory method of SocketHandler is here overridden to create -a UDP socket (SOCK_DGRAM). -""" + The factory method of SocketHandler is here overridden to create + a UDP socket (SOCK_DGRAM). + """ class SysLogHandler(Handler): """ -A handler class which sends formatted logging records to a syslog -server. Based on Sam Rushing's syslog module: -http://www.nightmare.com/squirl/python-ext/misc/syslog.py -Contributed by Nicolas Untz (after which minor refactoring changes -have been made). -""" + A handler class which sends formatted logging records to a syslog + server. Based on Sam Rushing's syslog module: + http://www.nightmare.com/squirl/python-ext/misc/syslog.py + Contributed by Nicolas Untz (after which minor refactoring changes + have been made). + """ + LOG_EMERG: int LOG_ALERT: int LOG_CRIT: int @@ -339,100 +358,104 @@ have been made). timeout: float | None = None, ) -> None: """ -Initialize a handler. - -If address is specified as a string, a UNIX socket is used. To log to a -local syslogd, "SysLogHandler(address="/dev/log")" can be used. -If facility is not specified, LOG_USER is used. If socktype is -specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific -socket type will be used. For Unix sockets, you can also specify a -socktype of None, in which case socket.SOCK_DGRAM will be used, falling -back to socket.SOCK_STREAM. -""" + Initialize a handler. + + If address is specified as a string, a UNIX socket is used. To log to a + local syslogd, "SysLogHandler(address="/dev/log")" can be used. + If facility is not specified, LOG_USER is used. If socktype is + specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific + socket type will be used. For Unix sockets, you can also specify a + socktype of None, in which case socket.SOCK_DGRAM will be used, falling + back to socket.SOCK_STREAM. + """ else: def __init__( self, address: tuple[str, int] | str = ("localhost", 514), facility: str | int = 1, socktype: SocketKind | None = None ) -> None: """ -Initialize a handler. - -If address is specified as a string, a UNIX socket is used. To log to a -local syslogd, "SysLogHandler(address="/dev/log")" can be used. -If facility is not specified, LOG_USER is used. If socktype is -specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific -socket type will be used. For Unix sockets, you can also specify a -socktype of None, in which case socket.SOCK_DGRAM will be used, falling -back to socket.SOCK_STREAM. -""" + Initialize a handler. + + If address is specified as a string, a UNIX socket is used. To log to a + local syslogd, "SysLogHandler(address="/dev/log")" can be used. + If facility is not specified, LOG_USER is used. If socktype is + specified as socket.SOCK_DGRAM or socket.SOCK_STREAM, that specific + socket type will be used. For Unix sockets, you can also specify a + socktype of None, in which case socket.SOCK_DGRAM will be used, falling + back to socket.SOCK_STREAM. + """ if sys.version_info >= (3, 11): def createSocket(self) -> None: """ -Try to create a socket and, if it's not a datagram socket, connect it -to the other end. This method is called during handler initialization, -but it's not regarded as an error if the other end isn't listening yet ---- the method will be called again when emitting an event, -if there is no socket at that point. -""" + Try to create a socket and, if it's not a datagram socket, connect it + to the other end. This method is called during handler initialization, + but it's not regarded as an error if the other end isn't listening yet + --- the method will be called again when emitting an event, + if there is no socket at that point. + """ def encodePriority(self, facility: int | str, priority: int | str) -> int: """ -Encode the facility and priority. You can pass in strings or -integers - if strings are passed, the facility_names and -priority_names mapping dictionaries are used to convert them to -integers. -""" + Encode the facility and priority. You can pass in strings or + integers - if strings are passed, the facility_names and + priority_names mapping dictionaries are used to convert them to + integers. + """ + def mapPriority(self, levelName: str) -> str: """ -Map a logging level name to a key in the priority_names map. -This is useful in two scenarios: when custom levels are being -used, and in the case where you can't do a straightforward -mapping by lowercasing the logging level name because of locale- -specific issues (see SF #1524081). -""" + Map a logging level name to a key in the priority_names map. + This is useful in two scenarios: when custom levels are being + used, and in the case where you can't do a straightforward + mapping by lowercasing the logging level name because of locale- + specific issues (see SF #1524081). + """ class NTEventLogHandler(Handler): """ -A handler class which sends events to the NT Event Log. Adds a -registry entry for the specified application name. If no dllname is -provided, win32service.pyd (which contains some basic message -placeholders) is used. Note that use of these placeholders will make -your event logs big, as the entire message source is held in the log. -If you want slimmer logs, you have to pass in the name of your own DLL -which contains the message definitions you want to use in the event log. -""" + A handler class which sends events to the NT Event Log. Adds a + registry entry for the specified application name. If no dllname is + provided, win32service.pyd (which contains some basic message + placeholders) is used. Note that use of these placeholders will make + your event logs big, as the entire message source is held in the log. + If you want slimmer logs, you have to pass in the name of your own DLL + which contains the message definitions you want to use in the event log. + """ + def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... def getEventCategory(self, record: LogRecord) -> int: """ -Return the event category for the record. + Return the event category for the record. -Override this if you want to specify your own categories. This version -returns 0. -""" + Override this if you want to specify your own categories. This version + returns 0. + """ # TODO: correct return value? def getEventType(self, record: LogRecord) -> int: """ -Return the event type for the record. + Return the event type for the record. + + Override this if you want to specify your own types. This version does + a mapping using the handler's typemap attribute, which is set up in + __init__() to a dictionary which contains mappings for DEBUG, INFO, + WARNING, ERROR and CRITICAL. If you are using your own levels you will + either need to override this method or place a suitable dictionary in + the handler's typemap attribute. + """ -Override this if you want to specify your own types. This version does -a mapping using the handler's typemap attribute, which is set up in -__init__() to a dictionary which contains mappings for DEBUG, INFO, -WARNING, ERROR and CRITICAL. If you are using your own levels you will -either need to override this method or place a suitable dictionary in -the handler's typemap attribute. -""" def getMessageID(self, record: LogRecord) -> int: """ -Return the message ID for the event record. If you are using your -own messages, you could do this by having the msg passed to the -logger being an ID rather than a formatting string. Then, in here, -you could use a dictionary lookup to get the message ID. This -version returns 1, which is the base message ID in win32service.pyd. -""" + Return the message ID for the event record. If you are using your + own messages, you could do this by having the msg passed to the + logger being an ID rather than a formatting string. Then, in here, + you could use a dictionary lookup to get the message ID. This + version returns 1, which is the base message ID in win32service.pyd. + """ class SMTPHandler(Handler): """ -A handler class which sends an SMTP email for each logging event. -""" + A handler class which sends an SMTP email for each logging event. + """ + mailhost: str # undocumented mailport: int | None # undocumented username: str | None # undocumented @@ -454,82 +477,88 @@ A handler class which sends an SMTP email for each logging event. timeout: float = 5.0, ) -> None: """ -Initialize the handler. - -Initialize the instance with the from and to addresses and subject -line of the email. To specify a non-standard SMTP port, use the -(host, port) tuple format for the mailhost argument. To specify -authentication credentials, supply a (username, password) tuple -for the credentials argument. To specify the use of a secure -protocol (TLS), pass in a tuple for the secure argument. This will -only be used when authentication credentials are supplied. The tuple -will be either an empty tuple, or a single-value tuple with the name -of a keyfile, or a 2-value tuple with the names of the keyfile and -certificate file. (This tuple is passed to the -`ssl.SSLContext.load_cert_chain` method). -A timeout in seconds can be specified for the SMTP connection (the -default is one second). -""" + Initialize the handler. + + Initialize the instance with the from and to addresses and subject + line of the email. To specify a non-standard SMTP port, use the + (host, port) tuple format for the mailhost argument. To specify + authentication credentials, supply a (username, password) tuple + for the credentials argument. To specify the use of a secure + protocol (TLS), pass in a tuple for the secure argument. This will + only be used when authentication credentials are supplied. The tuple + will be either an empty tuple, or a single-value tuple with the name + of a keyfile, or a 2-value tuple with the names of the keyfile and + certificate file. (This tuple is passed to the + `ssl.SSLContext.load_cert_chain` method). + A timeout in seconds can be specified for the SMTP connection (the + default is one second). + """ + def getSubject(self, record: LogRecord) -> str: """ -Determine the subject for the email. + Determine the subject for the email. -If you want to specify a subject line which is record-dependent, -override this method. -""" + If you want to specify a subject line which is record-dependent, + override this method. + """ class BufferingHandler(Handler): """ -A handler class which buffers logging records in memory. Whenever each -record is added to the buffer, a check is made to see if the buffer should -be flushed. If it should, then flush() is expected to do what's needed. - """ + A handler class which buffers logging records in memory. Whenever each + record is added to the buffer, a check is made to see if the buffer should + be flushed. If it should, then flush() is expected to do what's needed. + """ + capacity: int # undocumented buffer: list[LogRecord] # undocumented def __init__(self, capacity: int) -> None: """ -Initialize the handler with the buffer size. -""" + Initialize the handler with the buffer size. + """ + def shouldFlush(self, record: LogRecord) -> bool: """ -Should the handler flush its buffer? + Should the handler flush its buffer? -Returns true if the buffer is up to capacity. This method can be -overridden to implement custom flushing strategies. -""" + Returns true if the buffer is up to capacity. This method can be + overridden to implement custom flushing strategies. + """ class MemoryHandler(BufferingHandler): """ -A handler class which buffers logging records in memory, periodically -flushing them to a target handler. Flushing occurs whenever the buffer -is full, or when an event of a certain severity or greater is seen. -""" + A handler class which buffers logging records in memory, periodically + flushing them to a target handler. Flushing occurs whenever the buffer + is full, or when an event of a certain severity or greater is seen. + """ + flushLevel: int # undocumented target: Handler | None # undocumented flushOnClose: bool # undocumented def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: """ -Initialize the handler with the buffer size, the level at which -flushing should occur and an optional target. + Initialize the handler with the buffer size, the level at which + flushing should occur and an optional target. -Note that without a target being set either here or via setTarget(), -a MemoryHandler is no use to anyone! + Note that without a target being set either here or via setTarget(), + a MemoryHandler is no use to anyone! + + The ``flushOnClose`` argument is ``True`` for backward compatibility + reasons - the old behaviour is that when the handler is closed, the + buffer is flushed, even if the flush level hasn't been exceeded nor the + capacity exceeded. To prevent this, set ``flushOnClose`` to ``False``. + """ -The ``flushOnClose`` argument is ``True`` for backward compatibility -reasons - the old behaviour is that when the handler is closed, the -buffer is flushed, even if the flush level hasn't been exceeded nor the -capacity exceeded. To prevent this, set ``flushOnClose`` to ``False``. -""" def setTarget(self, target: Handler | None) -> None: """ -Set the target handler for this handler. -""" + Set the target handler for this handler. + """ class HTTPHandler(Handler): """ -A class which sends records to a web server, using either GET or -POST semantics. -""" + A class which sends records to a web server, using either GET or + POST semantics. + """ + host: str # undocumented url: str # undocumented method: str # undocumented @@ -546,22 +575,24 @@ POST semantics. context: ssl.SSLContext | None = None, ) -> None: """ -Initialize the instance with the host, the request URL, and the method -("GET" or "POST") -""" + Initialize the instance with the host, the request URL, and the method + ("GET" or "POST") + """ + def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: """ -Default implementation of mapping the log record into a dict -that is sent as the CGI data. Overwrite in your class. -Contributed by Franz Glasner. -""" + Default implementation of mapping the log record into a dict + that is sent as the CGI data. Overwrite in your class. + Contributed by Franz Glasner. + """ + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: # undocumented """ -get a HTTP[S]Connection. + get a HTTP[S]Connection. -Override when a custom connection is required, for example if -there is a proxy. -""" + Override when a custom connection is required, for example if + there is a proxy. + """ @type_check_only class _QueueLike(Protocol[_T]): @@ -570,115 +601,125 @@ class _QueueLike(Protocol[_T]): class QueueHandler(Handler): """ -This handler sends events to a queue. Typically, it would be used together -with a multiprocessing Queue to centralise logging to file in one process -(in a multi-process application), so as to avoid file write contention -between processes. + This handler sends events to a queue. Typically, it would be used together + with a multiprocessing Queue to centralise logging to file in one process + (in a multi-process application), so as to avoid file write contention + between processes. + + This code is new in Python 3.2, but this class can be copy pasted into + user code for use with earlier Python versions. + """ -This code is new in Python 3.2, but this class can be copy pasted into -user code for use with earlier Python versions. -""" queue: _QueueLike[Any] def __init__(self, queue: _QueueLike[Any]) -> None: """ -Initialise an instance, using the passed queue. -""" + Initialise an instance, using the passed queue. + """ + def prepare(self, record: LogRecord) -> Any: """ -Prepare a record for queuing. The object returned by this method is -enqueued. + Prepare a record for queuing. The object returned by this method is + enqueued. -The base implementation formats the record to merge the message and -arguments, and removes unpickleable items from the record in-place. -Specifically, it overwrites the record's `msg` and -`message` attributes with the merged message (obtained by -calling the handler's `format` method), and sets the `args`, -`exc_info` and `exc_text` attributes to None. + The base implementation formats the record to merge the message and + arguments, and removes unpickleable items from the record in-place. + Specifically, it overwrites the record's `msg` and + `message` attributes with the merged message (obtained by + calling the handler's `format` method), and sets the `args`, + `exc_info` and `exc_text` attributes to None. + + You might want to override this method if you want to convert + the record to a dict or JSON string, or send a modified copy + of the record while leaving the original intact. + """ -You might want to override this method if you want to convert -the record to a dict or JSON string, or send a modified copy -of the record while leaving the original intact. -""" def enqueue(self, record: LogRecord) -> None: """ -Enqueue a record. + Enqueue a record. -The base implementation uses put_nowait. You may want to override -this method if you want to use blocking, timeouts or custom queue -implementations. -""" + The base implementation uses put_nowait. You may want to override + this method if you want to use blocking, timeouts or custom queue + implementations. + """ if sys.version_info >= (3, 12): listener: QueueListener | None class QueueListener: """ -This class implements an internal threaded listener which watches for -LogRecords being added to a queue, removes them and passes them to a -list of handlers for processing. -""" + This class implements an internal threaded listener which watches for + LogRecords being added to a queue, removes them and passes them to a + list of handlers for processing. + """ + handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: _QueueLike[Any] # undocumented _thread: Thread | None # undocumented def __init__(self, queue: _QueueLike[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: """ -Initialise an instance with the specified queue and -handlers. -""" + Initialise an instance with the specified queue and + handlers. + """ + def dequeue(self, block: bool) -> LogRecord: """ -Dequeue a record and return it, optionally blocking. + Dequeue a record and return it, optionally blocking. + + The base implementation uses get. You may want to override this method + if you want to use timeouts or work with custom queue implementations. + """ -The base implementation uses get. You may want to override this method -if you want to use timeouts or work with custom queue implementations. -""" def prepare(self, record: LogRecord) -> Any: """ -Prepare a record for handling. + Prepare a record for handling. + + This method just returns the passed-in record. You may want to + override this method if you need to do any custom marshalling or + manipulation of the record before passing it to the handlers. + """ -This method just returns the passed-in record. You may want to -override this method if you need to do any custom marshalling or -manipulation of the record before passing it to the handlers. -""" def start(self) -> None: """ -Start the listener. + Start the listener. + + This starts up a background thread to monitor the queue for + LogRecords to process. + """ -This starts up a background thread to monitor the queue for -LogRecords to process. -""" def stop(self) -> None: """ -Stop the listener. + Stop the listener. + + This asks the thread to terminate, and then waits for it to do so. + Note that if you don't call this before your application exits, there + may be some records still left on the queue, which won't be processed. + """ -This asks the thread to terminate, and then waits for it to do so. -Note that if you don't call this before your application exits, there -may be some records still left on the queue, which won't be processed. -""" def enqueue_sentinel(self) -> None: """ -This is used to enqueue the sentinel record. + This is used to enqueue the sentinel record. -The base implementation uses put_nowait. You may want to override this -method if you want to use timeouts or work with custom queue -implementations. -""" - def handle(self, record: LogRecord) -> None: + The base implementation uses put_nowait. You may want to override this + method if you want to use timeouts or work with custom queue + implementations. """ -Handle a record. -This just loops through the handlers offering them the record -to handle. -""" + def handle(self, record: LogRecord) -> None: + """ + Handle a record. + This just loops through the handlers offering them the record + to handle. + """ if sys.version_info >= (3, 14): def __enter__(self) -> Self: """ -For use as a context manager. Starts the listener. -""" + For use as a context manager. Starts the listener. + """ + def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None ) -> None: """ -For use as a context manager. Stops the listener. -""" + For use as a context manager. Stops the listener. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi index 0a76e80017d86..65b1f1898f251 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/lzma.pyi @@ -7,6 +7,7 @@ one-shot (de)compression. These classes and functions support both the XZ and legacy LZMA container formats, as well as raw compressed data streams. """ + import sys from _lzma import ( CHECK_CRC32 as CHECK_CRC32, @@ -100,12 +101,13 @@ _PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes """A file object providing transparent LZMA (de)compression. -An LZMAFile can act as a wrapper for an existing file object, or -refer directly to a named file on disk. + An LZMAFile can act as a wrapper for an existing file object, or + refer directly to a named file on disk. + + Note that LZMAFile provides a *binary* file interface - data read + is returned as bytes, and data to be written must be given as bytes. + """ -Note that LZMAFile provides a *binary* file interface - data read -is returned as bytes, and data to be written must be given as bytes. -""" def __init__( self, filename: _PathOrFile | None = None, @@ -118,96 +120,102 @@ is returned as bytes, and data to be written must be given as bytes. ) -> None: """Open an LZMA-compressed file in binary mode. -filename can be either an actual file name (given as a str, -bytes, or PathLike object), in which case the named file is -opened, or it can be an existing file object to read from or -write to. - -mode can be "r" for reading (default), "w" for (over)writing, -"x" for creating exclusively, or "a" for appending. These can -equivalently be given as "rb", "wb", "xb" and "ab" respectively. - -format specifies the container format to use for the file. -If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the -default is FORMAT_XZ. - -check specifies the integrity check to use. This argument can -only be used when opening a file for writing. For FORMAT_XZ, -the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not -support integrity checks - for these formats, check must be -omitted, or be CHECK_NONE. - -When opening a file for reading, the *preset* argument is not -meaningful, and should be omitted. The *filters* argument should -also be omitted, except when format is FORMAT_RAW (in which case -it is required). - -When opening a file for writing, the settings used by the -compressor can be specified either as a preset compression -level (with the *preset* argument), or in detail as a custom -filter chain (with the *filters* argument). For FORMAT_XZ and -FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset -level. For FORMAT_RAW, the caller must always specify a filter -chain; the raw compressor does not support preset compression -levels. - -preset (if provided) should be an integer in the range 0-9, -optionally OR-ed with the constant PRESET_EXTREME. - -filters (if provided) should be a sequence of dicts. Each dict -should have an entry for "id" indicating ID of the filter, plus -additional entries for options to the filter. -""" + filename can be either an actual file name (given as a str, + bytes, or PathLike object), in which case the named file is + opened, or it can be an existing file object to read from or + write to. + + mode can be "r" for reading (default), "w" for (over)writing, + "x" for creating exclusively, or "a" for appending. These can + equivalently be given as "rb", "wb", "xb" and "ab" respectively. + + format specifies the container format to use for the file. + If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the + default is FORMAT_XZ. + + check specifies the integrity check to use. This argument can + only be used when opening a file for writing. For FORMAT_XZ, + the default is CHECK_CRC64. FORMAT_ALONE and FORMAT_RAW do not + support integrity checks - for these formats, check must be + omitted, or be CHECK_NONE. + + When opening a file for reading, the *preset* argument is not + meaningful, and should be omitted. The *filters* argument should + also be omitted, except when format is FORMAT_RAW (in which case + it is required). + + When opening a file for writing, the settings used by the + compressor can be specified either as a preset compression + level (with the *preset* argument), or in detail as a custom + filter chain (with the *filters* argument). For FORMAT_XZ and + FORMAT_ALONE, the default is to use the PRESET_DEFAULT preset + level. For FORMAT_RAW, the caller must always specify a filter + chain; the raw compressor does not support preset compression + levels. + + preset (if provided) should be an integer in the range 0-9, + optionally OR-ed with the constant PRESET_EXTREME. + + filters (if provided) should be a sequence of dicts. Each dict + should have an entry for "id" indicating ID of the filter, plus + additional entries for options to the filter. + """ + def __enter__(self) -> Self: ... def peek(self, size: int = -1) -> bytes: """Return buffered data without advancing the file position. -Always returns at least one byte of data, unless at EOF. -The exact number of bytes returned is unspecified. -""" + Always returns at least one byte of data, unless at EOF. + The exact number of bytes returned is unspecified. + """ + def read(self, size: int | None = -1) -> bytes: """Read up to size uncompressed bytes from the file. -If size is negative or omitted, read until EOF is reached. -Returns b"" if the file is already at EOF. -""" + If size is negative or omitted, read until EOF is reached. + Returns b"" if the file is already at EOF. + """ + def read1(self, size: int = -1) -> bytes: """Read up to size uncompressed bytes, while trying to avoid -making multiple reads from the underlying stream. Reads up to a -buffer's worth of data if size is negative. + making multiple reads from the underlying stream. Reads up to a + buffer's worth of data if size is negative. + + Returns b"" if the file is at EOF. + """ -Returns b"" if the file is at EOF. -""" def readline(self, size: int | None = -1) -> bytes: """Read a line of uncompressed bytes from the file. -The terminating newline (if present) is retained. If size is -non-negative, no more than size bytes will be read (in which -case the line may be incomplete). Returns b'' if already at EOF. -""" + The terminating newline (if present) is retained. If size is + non-negative, no more than size bytes will be read (in which + case the line may be incomplete). Returns b'' if already at EOF. + """ + def write(self, data: ReadableBuffer) -> int: """Write a bytes object to the file. -Returns the number of uncompressed bytes written, which is -always the length of data in bytes. Note that due to buffering, -the file on disk may not reflect the data written until close() -is called. -""" + Returns the number of uncompressed bytes written, which is + always the length of data in bytes. Note that due to buffering, + the file on disk may not reflect the data written until close() + is called. + """ + def seek(self, offset: int, whence: int = 0) -> int: """Change the file position. -The new position is specified by offset, relative to the -position indicated by whence. Possible values for whence are: + The new position is specified by offset, relative to the + position indicated by whence. Possible values for whence are: - 0: start of stream (default): offset must not be negative - 1: current stream position - 2: end of stream; offset must not be positive + 0: start of stream (default): offset must not be negative + 1: current stream position + 2: end of stream; offset must not be positive -Returns the new file position. + Returns the new file position. -Note that seeking is emulated, so depending on the parameters, -this operation may be extremely slow. -""" + Note that seeking is emulated, so depending on the parameters, + this operation may be extremely slow. + """ @overload def open( @@ -224,27 +232,28 @@ def open( ) -> LZMAFile: """Open an LZMA-compressed file in binary or text mode. -filename can be either an actual file name (given as a str, bytes, -or PathLike object), in which case the named file is opened, or it -can be an existing file object to read from or write to. + filename can be either an actual file name (given as a str, bytes, + or PathLike object), in which case the named file is opened, or it + can be an existing file object to read from or write to. -The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb", -"a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text -mode. + The mode argument can be "r", "rb" (default), "w", "wb", "x", "xb", + "a", or "ab" for binary mode, or "rt", "wt", "xt", or "at" for text + mode. -The format, check, preset and filters arguments specify the -compression settings, as for LZMACompressor, LZMADecompressor and -LZMAFile. + The format, check, preset and filters arguments specify the + compression settings, as for LZMACompressor, LZMADecompressor and + LZMAFile. -For binary mode, this function is equivalent to the LZMAFile -constructor: LZMAFile(filename, mode, ...). In this case, the -encoding, errors and newline arguments must not be provided. + For binary mode, this function is equivalent to the LZMAFile + constructor: LZMAFile(filename, mode, ...). In this case, the + encoding, errors and newline arguments must not be provided. -For text mode, an LZMAFile object is created, and wrapped in an -io.TextIOWrapper instance with the specified encoding, error -handling behavior, and line ending(s). + For text mode, an LZMAFile object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error + handling behavior, and line ending(s). + + """ -""" @overload def open( filename: _PathOrFile, @@ -302,18 +311,17 @@ def compress( ) -> bytes: """Compress a block of data. -Refer to LZMACompressor's docstring for a description of the -optional arguments *format*, *check*, *preset* and *filters*. + Refer to LZMACompressor's docstring for a description of the + optional arguments *format*, *check*, *preset* and *filters*. -For incremental compression, use an LZMACompressor instead. -""" -def decompress( - data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None -) -> bytes: + For incremental compression, use an LZMACompressor instead. + """ + +def decompress(data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None) -> bytes: """Decompress a block of data. -Refer to LZMADecompressor's docstring for a description of the -optional arguments *format*, *check* and *filters*. + Refer to LZMADecompressor's docstring for a description of the + optional arguments *format*, *check* and *filters*. -For incremental decompression, use an LZMADecompressor instead. -""" + For incremental decompression, use an LZMADecompressor instead. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi index 0cd45a9c989a5..01570c1c2ed67 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailbox.pyi @@ -1,5 +1,5 @@ -"""Read/write support for Maildir, mbox, MH, Babyl, and MMDF mailboxes. -""" +"""Read/write support for Maildir, mbox, MH, Babyl, and MMDF mailboxes.""" + import email.message import io import sys @@ -49,507 +49,489 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): - """A group of messages in a particular place. -""" + """A group of messages in a particular place.""" + _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: - """Initialize a Mailbox instance. -""" + """Initialize a Mailbox instance.""" + @overload def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: - """Add message and return assigned key. -""" + """Add message and return assigned key.""" + @abstractmethod def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist. -""" + """Remove the keyed message; raise KeyError if it doesn't exist.""" + def __delitem__(self, key: str) -> None: ... def discard(self, key: str) -> None: - """If the keyed message exists, remove it. -""" + """If the keyed message exists, remove it.""" + @abstractmethod def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist. -""" + """Replace the keyed message; raise KeyError if it doesn't exist.""" + @overload def get(self, key: str, default: None = None) -> _MessageT | None: - """Return the keyed message, or default if it doesn't exist. -""" + """Return the keyed message, or default if it doesn't exist.""" + @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... def __getitem__(self, key: str) -> _MessageT: - """Return the keyed message; raise KeyError if it doesn't exist. -""" + """Return the keyed message; raise KeyError if it doesn't exist.""" + @abstractmethod def get_message(self, key: str) -> _MessageT: - """Return a Message representation or raise a KeyError. -""" + """Return a Message representation or raise a KeyError.""" + def get_string(self, key: str) -> str: """Return a string representation or raise a KeyError. -Uses email.message.Message to create a 7bit clean string -representation of the message. -""" + Uses email.message.Message to create a 7bit clean string + representation of the message. + """ + @abstractmethod def get_bytes(self, key: str) -> bytes: - """Return a byte string representation or raise a KeyError. -""" + """Return a byte string representation or raise a KeyError.""" # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here @abstractmethod def get_file(self, key: str) -> Any: - """Return a file-like representation or raise a KeyError. -""" + """Return a file-like representation or raise a KeyError.""" + @abstractmethod def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys. -""" + """Return an iterator over keys.""" + def keys(self) -> list[str]: - """Return a list of keys. -""" + """Return a list of keys.""" + def itervalues(self) -> Iterator[_MessageT]: - """Return an iterator over all messages. -""" + """Return an iterator over all messages.""" + def __iter__(self) -> Iterator[_MessageT]: ... def values(self) -> list[_MessageT]: - """Return a list of messages. Memory intensive. -""" + """Return a list of messages. Memory intensive.""" + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: - """Return an iterator over (key, message) tuples. -""" + """Return an iterator over (key, message) tuples.""" + def items(self) -> list[tuple[str, _MessageT]]: - """Return a list of (key, message) tuples. Memory intensive. -""" + """Return a list of (key, message) tuples. Memory intensive.""" + @abstractmethod def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise. -""" + """Return True if the keyed message exists, False otherwise.""" + @abstractmethod def __len__(self) -> int: - """Return a count of messages in the mailbox. -""" + """Return a count of messages in the mailbox.""" + def clear(self) -> None: - """Delete all messages. -""" + """Delete all messages.""" + @overload def pop(self, key: str, default: None = None) -> _MessageT | None: - """Delete the keyed message and return it, or default. -""" + """Delete the keyed message and return it, or default.""" + @overload def pop(self, key: str, default: _T) -> _MessageT | _T: ... def popitem(self) -> tuple[str, _MessageT]: - """Delete an arbitrary (key, message) pair and return it. -""" + """Delete an arbitrary (key, message) pair and return it.""" + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: - """Change the messages that correspond to certain keys. -""" + """Change the messages that correspond to certain keys.""" + @abstractmethod def flush(self) -> None: - """Write any pending changes to the disk. -""" + """Write any pending changes to the disk.""" + @abstractmethod def lock(self) -> None: - """Lock the mailbox. -""" + """Lock the mailbox.""" + @abstractmethod def unlock(self) -> None: - """Unlock the mailbox if it is locked. -""" + """Unlock the mailbox if it is locked.""" + @abstractmethod def close(self) -> None: - """Flush and close the mailbox. -""" + """Flush and close the mailbox.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class Maildir(Mailbox[MaildirMessage]): - """A qmail-style Maildir mailbox. -""" + """A qmail-style Maildir mailbox.""" + colon: str - def __init__( - self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True - ) -> None: - """Initialize a Maildir instance. -""" + def __init__(self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True) -> None: + """Initialize a Maildir instance.""" + def add(self, message: _MessageData) -> str: - """Add message and return assigned key. -""" + """Add message and return assigned key.""" + def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist. -""" + """Remove the keyed message; raise KeyError if it doesn't exist.""" + def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist. -""" + """Replace the keyed message; raise KeyError if it doesn't exist.""" + def get_message(self, key: str) -> MaildirMessage: - """Return a Message representation or raise a KeyError. -""" + """Return a Message representation or raise a KeyError.""" + def get_bytes(self, key: str) -> bytes: - """Return a bytes representation or raise a KeyError. -""" + """Return a bytes representation or raise a KeyError.""" + def get_file(self, key: str) -> _ProxyFile[bytes]: - """Return a file-like representation or raise a KeyError. -""" + """Return a file-like representation or raise a KeyError.""" if sys.version_info >= (3, 13): def get_info(self, key: str) -> str: - """Get the keyed message's "info" as a string. -""" + """Get the keyed message's "info" as a string.""" + def set_info(self, key: str, info: str) -> None: - """Set the keyed message's "info" string. -""" + """Set the keyed message's "info" string.""" + def get_flags(self, key: str) -> str: - """Return as a string the standard flags that are set on the keyed message. -""" + """Return as a string the standard flags that are set on the keyed message.""" + def set_flags(self, key: str, flags: str) -> None: - """Set the given flags and unset all others on the keyed message. -""" + """Set the given flags and unset all others on the keyed message.""" + def add_flag(self, key: str, flag: str) -> None: - """Set the given flag(s) without changing others on the keyed message. -""" + """Set the given flag(s) without changing others on the keyed message.""" + def remove_flag(self, key: str, flag: str) -> None: - """Unset the given string flag(s) without changing others on the keyed message. -""" + """Unset the given string flag(s) without changing others on the keyed message.""" def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys. -""" + """Return an iterator over keys.""" + def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise. -""" + """Return True if the keyed message exists, False otherwise.""" + def __len__(self) -> int: - """Return a count of messages in the mailbox. -""" + """Return a count of messages in the mailbox.""" + def flush(self) -> None: - """Write any pending changes to disk. -""" + """Write any pending changes to disk.""" + def lock(self) -> None: - """Lock the mailbox. -""" + """Lock the mailbox.""" + def unlock(self) -> None: - """Unlock the mailbox if it is locked. -""" + """Unlock the mailbox if it is locked.""" + def close(self) -> None: - """Flush and close the mailbox. -""" + """Flush and close the mailbox.""" + def list_folders(self) -> list[str]: - """Return a list of folder names. -""" + """Return a list of folder names.""" + def get_folder(self, folder: str) -> Maildir: - """Return a Maildir instance for the named folder. -""" + """Return a Maildir instance for the named folder.""" + def add_folder(self, folder: str) -> Maildir: - """Create a folder and return a Maildir instance representing it. -""" + """Create a folder and return a Maildir instance representing it.""" + def remove_folder(self, folder: str) -> None: - """Delete the named folder, which must be empty. -""" + """Delete the named folder, which must be empty.""" + def clean(self) -> None: - """Delete old files in "tmp". -""" + """Delete old files in "tmp".""" + def next(self) -> str | None: - """Return the next message in a one-time iteration. -""" + """Return the next message in a one-time iteration.""" class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): - """A single-file mailbox. -""" + """A single-file mailbox.""" + def add(self, message: _MessageData) -> str: - """Add message and return assigned key. -""" + """Add message and return assigned key.""" + def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist. -""" + """Remove the keyed message; raise KeyError if it doesn't exist.""" + def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist. -""" + """Replace the keyed message; raise KeyError if it doesn't exist.""" + def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys. -""" + """Return an iterator over keys.""" + def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise. -""" + """Return True if the keyed message exists, False otherwise.""" + def __len__(self) -> int: - """Return a count of messages in the mailbox. -""" + """Return a count of messages in the mailbox.""" + def lock(self) -> None: - """Lock the mailbox. -""" + """Lock the mailbox.""" + def unlock(self) -> None: - """Unlock the mailbox if it is locked. -""" + """Unlock the mailbox if it is locked.""" + def flush(self) -> None: - """Write any pending changes to disk. -""" + """Write any pending changes to disk.""" + def close(self) -> None: - """Flush and close the mailbox. -""" + """Flush and close the mailbox.""" class _mboxMMDF(_singlefileMailbox[_MessageT]): - """An mbox or MMDF mailbox. -""" + """An mbox or MMDF mailbox.""" + def get_message(self, key: str) -> _MessageT: - """Return a Message representation or raise a KeyError. -""" + """Return a Message representation or raise a KeyError.""" + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: - """Return a file-like representation or raise a KeyError. -""" + """Return a file-like representation or raise a KeyError.""" + def get_bytes(self, key: str, from_: bool = False) -> bytes: - """Return a string representation or raise a KeyError. -""" + """Return a string representation or raise a KeyError.""" + def get_string(self, key: str, from_: bool = False) -> str: - """Return a string representation or raise a KeyError. -""" + """Return a string representation or raise a KeyError.""" class mbox(_mboxMMDF[mboxMessage]): - """A classic mbox mailbox. -""" + """A classic mbox mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: - """Initialize an mbox mailbox. -""" + """Initialize an mbox mailbox.""" class MMDF(_mboxMMDF[MMDFMessage]): - """An MMDF mailbox. -""" + """An MMDF mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: - """Initialize an MMDF mailbox. -""" + """Initialize an MMDF mailbox.""" class MH(Mailbox[MHMessage]): - """An MH mailbox. -""" + """An MH mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: - """Initialize an MH instance. -""" + """Initialize an MH instance.""" + def add(self, message: _MessageData) -> str: - """Add message and return assigned key. -""" + """Add message and return assigned key.""" + def remove(self, key: str) -> None: - """Remove the keyed message; raise KeyError if it doesn't exist. -""" + """Remove the keyed message; raise KeyError if it doesn't exist.""" + def __setitem__(self, key: str, message: _MessageData) -> None: - """Replace the keyed message; raise KeyError if it doesn't exist. -""" + """Replace the keyed message; raise KeyError if it doesn't exist.""" + def get_message(self, key: str) -> MHMessage: - """Return a Message representation or raise a KeyError. -""" + """Return a Message representation or raise a KeyError.""" + def get_bytes(self, key: str) -> bytes: - """Return a bytes representation or raise a KeyError. -""" + """Return a bytes representation or raise a KeyError.""" + def get_file(self, key: str) -> _ProxyFile[bytes]: - """Return a file-like representation or raise a KeyError. -""" + """Return a file-like representation or raise a KeyError.""" + def iterkeys(self) -> Iterator[str]: - """Return an iterator over keys. -""" + """Return an iterator over keys.""" + def __contains__(self, key: str) -> bool: - """Return True if the keyed message exists, False otherwise. -""" + """Return True if the keyed message exists, False otherwise.""" + def __len__(self) -> int: - """Return a count of messages in the mailbox. -""" + """Return a count of messages in the mailbox.""" + def flush(self) -> None: - """Write any pending changes to the disk. -""" + """Write any pending changes to the disk.""" + def lock(self) -> None: - """Lock the mailbox. -""" + """Lock the mailbox.""" + def unlock(self) -> None: - """Unlock the mailbox if it is locked. -""" + """Unlock the mailbox if it is locked.""" + def close(self) -> None: - """Flush and close the mailbox. -""" + """Flush and close the mailbox.""" + def list_folders(self) -> list[str]: - """Return a list of folder names. -""" + """Return a list of folder names.""" + def get_folder(self, folder: StrPath) -> MH: - """Return an MH instance for the named folder. -""" + """Return an MH instance for the named folder.""" + def add_folder(self, folder: StrPath) -> MH: - """Create a folder and return an MH instance representing it. -""" + """Create a folder and return an MH instance representing it.""" + def remove_folder(self, folder: StrPath) -> None: - """Delete the named folder, which must be empty. -""" + """Delete the named folder, which must be empty.""" + def get_sequences(self) -> dict[str, list[int]]: - """Return a name-to-key-list dictionary to define each sequence. -""" + """Return a name-to-key-list dictionary to define each sequence.""" + def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: - """Set sequences using the given name-to-key-list dictionary. -""" + """Set sequences using the given name-to-key-list dictionary.""" + def pack(self) -> None: - """Re-name messages to eliminate numbering gaps. Invalidates keys. -""" + """Re-name messages to eliminate numbering gaps. Invalidates keys.""" class Babyl(_singlefileMailbox[BabylMessage]): - """An Rmail-style Babyl mailbox. -""" + """An Rmail-style Babyl mailbox.""" + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: - """Initialize a Babyl mailbox. -""" + """Initialize a Babyl mailbox.""" + def get_message(self, key: str) -> BabylMessage: - """Return a Message representation or raise a KeyError. -""" + """Return a Message representation or raise a KeyError.""" + def get_bytes(self, key: str) -> bytes: - """Return a string representation or raise a KeyError. -""" + """Return a string representation or raise a KeyError.""" + def get_file(self, key: str) -> IO[bytes]: - """Return a file-like representation or raise a KeyError. -""" + """Return a file-like representation or raise a KeyError.""" + def get_labels(self) -> list[str]: - """Return a list of user-defined labels in the mailbox. -""" + """Return a list of user-defined labels in the mailbox.""" class Message(email.message.Message): - """Message with mailbox-format-specific properties. -""" + """Message with mailbox-format-specific properties.""" + def __init__(self, message: _MessageData | None = None) -> None: - """Initialize a Message instance. -""" + """Initialize a Message instance.""" class MaildirMessage(Message): - """Message with Maildir-specific properties. -""" + """Message with Maildir-specific properties.""" + def get_subdir(self) -> str: - """Return 'new' or 'cur'. -""" + """Return 'new' or 'cur'.""" + def set_subdir(self, subdir: Literal["new", "cur"]) -> None: - """Set subdir to 'new' or 'cur'. -""" + """Set subdir to 'new' or 'cur'.""" + def get_flags(self) -> str: - """Return as a string the flags that are set. -""" + """Return as a string the flags that are set.""" + def set_flags(self, flags: Iterable[str]) -> None: - """Set the given flags and unset all others. -""" + """Set the given flags and unset all others.""" + def add_flag(self, flag: str) -> None: - """Set the given flag(s) without changing others. -""" + """Set the given flag(s) without changing others.""" + def remove_flag(self, flag: str) -> None: - """Unset the given string flag(s) without changing others. -""" + """Unset the given string flag(s) without changing others.""" + def get_date(self) -> int: - """Return delivery date of message, in seconds since the epoch. -""" + """Return delivery date of message, in seconds since the epoch.""" + def set_date(self, date: float) -> None: - """Set delivery date of message, in seconds since the epoch. -""" + """Set delivery date of message, in seconds since the epoch.""" + def get_info(self) -> str: - """Get the message's "info" as a string. -""" + """Get the message's "info" as a string.""" + def set_info(self, info: str) -> None: - """Set the message's "info" string. -""" + """Set the message's "info" string.""" class _mboxMMDFMessage(Message): - """Message with mbox- or MMDF-specific properties. -""" + """Message with mbox- or MMDF-specific properties.""" + def get_from(self) -> str: - """Return contents of "From " line. -""" + """Return contents of "From " line.""" + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: - """Set "From " line, formatting and appending time_ if specified. -""" + """Set "From " line, formatting and appending time_ if specified.""" + def get_flags(self) -> str: - """Return as a string the flags that are set. -""" + """Return as a string the flags that are set.""" + def set_flags(self, flags: Iterable[str]) -> None: - """Set the given flags and unset all others. -""" + """Set the given flags and unset all others.""" + def add_flag(self, flag: str) -> None: - """Set the given flag(s) without changing others. -""" + """Set the given flag(s) without changing others.""" + def remove_flag(self, flag: str) -> None: - """Unset the given string flag(s) without changing others. -""" + """Unset the given string flag(s) without changing others.""" class mboxMessage(_mboxMMDFMessage): - """Message with mbox-specific properties. -""" + """Message with mbox-specific properties.""" class MHMessage(Message): - """Message with MH-specific properties. -""" + """Message with MH-specific properties.""" + def get_sequences(self) -> list[str]: - """Return a list of sequences that include the message. -""" + """Return a list of sequences that include the message.""" + def set_sequences(self, sequences: Iterable[str]) -> None: - """Set the list of sequences that include the message. -""" + """Set the list of sequences that include the message.""" + def add_sequence(self, sequence: str) -> None: - """Add sequence to list of sequences including the message. -""" + """Add sequence to list of sequences including the message.""" + def remove_sequence(self, sequence: str) -> None: - """Remove sequence from the list of sequences including the message. -""" + """Remove sequence from the list of sequences including the message.""" class BabylMessage(Message): - """Message with Babyl-specific properties. -""" + """Message with Babyl-specific properties.""" + def get_labels(self) -> list[str]: - """Return a list of labels on the message. -""" + """Return a list of labels on the message.""" + def set_labels(self, labels: Iterable[str]) -> None: - """Set the list of labels on the message. -""" + """Set the list of labels on the message.""" + def add_label(self, label: str) -> None: - """Add label to list of labels on the message. -""" + """Add label to list of labels on the message.""" + def remove_label(self, label: str) -> None: - """Remove label from the list of labels on the message. -""" + """Remove label from the list of labels on the message.""" + def get_visible(self) -> Message: - """Return a Message representation of visible headers. -""" + """Return a Message representation of visible headers.""" + def set_visible(self, visible: _MessageData) -> None: - """Set the Message representation of visible headers. -""" + """Set the Message representation of visible headers.""" + def update_visible(self) -> None: - """Update and/or sensibly generate a set of visible headers. -""" + """Update and/or sensibly generate a set of visible headers.""" class MMDFMessage(_mboxMMDFMessage): - """Message with MMDF-specific properties. -""" + """Message with MMDF-specific properties.""" class _ProxyFile(Generic[AnyStr]): - """A read-only wrapper of a file. -""" + """A read-only wrapper of a file.""" + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: - """Initialize a _ProxyFile. -""" + """Initialize a _ProxyFile.""" + def read(self, size: int | None = None) -> AnyStr: - """Read bytes. -""" + """Read bytes.""" + def read1(self, size: int | None = None) -> AnyStr: - """Read bytes. -""" + """Read bytes.""" + def readline(self, size: int | None = None) -> AnyStr: - """Read a line. -""" + """Read a line.""" + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: - """Read multiple lines. -""" + """Read multiple lines.""" + def __iter__(self) -> Iterator[AnyStr]: - """Iterate over lines. -""" + """Iterate over lines.""" + def tell(self) -> int: - """Return the position. -""" + """Return the position.""" + def seek(self, offset: int, whence: int = 0) -> None: - """Change position. -""" + """Change position.""" + def close(self) -> None: - """Close the file. -""" + """Close the file.""" + def __enter__(self) -> Self: - """Context management protocol support. -""" + """Context management protocol support.""" + def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -560,28 +542,26 @@ class _ProxyFile(Generic[AnyStr]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class _PartialFile(_ProxyFile[AnyStr]): - """A read-only wrapper of part of a file. -""" + """A read-only wrapper of part of a file.""" + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: - """Initialize a _PartialFile. -""" + """Initialize a _PartialFile.""" class Error(Exception): - """Raised for module-specific errors. -""" + """Raised for module-specific errors.""" + class NoSuchMailboxError(Error): - """The specified mailbox does not exist and won't be created. -""" + """The specified mailbox does not exist and won't be created.""" + class NotEmptyError(Error): - """The specified mailbox is not empty and deletion was requested. -""" + """The specified mailbox is not empty and deletion was requested.""" + class ExternalClashError(Error): - """Another process caused an action to fail. -""" + """Another process caused an action to fail.""" + class FormatError(Error): - """A file appears to have an invalid format. -""" + """A file appears to have an invalid format.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi index 685f2dacba371..6848c9929c9c2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mailcap.pyi @@ -1,5 +1,5 @@ -"""Mailcap file handling. See RFC 1524. -""" +"""Mailcap file handling. See RFC 1524.""" + from collections.abc import Mapping, Sequence from typing_extensions import TypeAlias @@ -18,6 +18,7 @@ def findmatch( entry to use. """ + def getcaps() -> dict[str, list[_Cap]]: """Return a dictionary containing the mailcap database. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi index 49f5f12e4fbdf..760ed43735f69 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/marshal.pyi @@ -26,6 +26,7 @@ load() -- read value from a file dumps() -- marshal value as a bytes object loads() -- read value from a bytes-like object """ + import builtins import sys import types @@ -60,134 +61,139 @@ if sys.version_info >= (3, 14): def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 5, /, *, allow_code: bool = True) -> None: """Write the value on the open file. - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. - allow_code - Allow to write code objects. - -If the value has (or contains an object that has) an unsupported type, a -ValueError exception is raised - but garbage data will also be written -to the file. The object will not be properly read back by load(). -""" + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + allow_code + Allow to write code objects. + + If the value has (or contains an object that has) an unsupported type, a + ValueError exception is raised - but garbage data will also be written + to the file. The object will not be properly read back by load(). + """ + def dumps(value: _Marshallable, version: int = 5, /, *, allow_code: bool = True) -> bytes: """Return the bytes object that would be written to a file by dump(value, file). - value - Must be a supported type. - version - Indicates the data format that dumps should use. - allow_code - Allow to write code objects. + value + Must be a supported type. + version + Indicates the data format that dumps should use. + allow_code + Allow to write code objects. -Raise a ValueError exception if value has (or contains an object that has) an -unsupported type. -""" + Raise a ValueError exception if value has (or contains an object that has) an + unsupported type. + """ elif sys.version_info >= (3, 13): def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /, *, allow_code: bool = True) -> None: """Write the value on the open file. - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. - allow_code - Allow to write code objects. - -If the value has (or contains an object that has) an unsupported type, a -ValueError exception is raised - but garbage data will also be written -to the file. The object will not be properly read back by load(). -""" + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + allow_code + Allow to write code objects. + + If the value has (or contains an object that has) an unsupported type, a + ValueError exception is raised - but garbage data will also be written + to the file. The object will not be properly read back by load(). + """ + def dumps(value: _Marshallable, version: int = 4, /, *, allow_code: bool = True) -> bytes: """Return the bytes object that would be written to a file by dump(value, file). - value - Must be a supported type. - version - Indicates the data format that dumps should use. - allow_code - Allow to write code objects. + value + Must be a supported type. + version + Indicates the data format that dumps should use. + allow_code + Allow to write code objects. -Raise a ValueError exception if value has (or contains an object that has) an -unsupported type. -""" + Raise a ValueError exception if value has (or contains an object that has) an + unsupported type. + """ else: def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: """Write the value on the open file. - value - Must be a supported type. - file - Must be a writeable binary file. - version - Indicates the data format that dump should use. + value + Must be a supported type. + file + Must be a writeable binary file. + version + Indicates the data format that dump should use. + + If the value has (or contains an object that has) an unsupported type, a + ValueError exception is raised - but garbage data will also be written + to the file. The object will not be properly read back by load(). + """ -If the value has (or contains an object that has) an unsupported type, a -ValueError exception is raised - but garbage data will also be written -to the file. The object will not be properly read back by load(). -""" def dumps(value: _Marshallable, version: int = 4, /) -> bytes: """Return the bytes object that would be written to a file by dump(value, file). - value - Must be a supported type. - version - Indicates the data format that dumps should use. + value + Must be a supported type. + version + Indicates the data format that dumps should use. -Raise a ValueError exception if value has (or contains an object that has) an -unsupported type. -""" + Raise a ValueError exception if value has (or contains an object that has) an + unsupported type. + """ if sys.version_info >= (3, 13): def load(file: SupportsRead[bytes], /, *, allow_code: bool = True) -> Any: """Read one value from the open file and return it. - file - Must be readable binary file. - allow_code - Allow to load code objects. + file + Must be readable binary file. + allow_code + Allow to load code objects. -If no valid value is read (e.g. because the data has a different Python -version's incompatible marshal format), raise EOFError, ValueError or -TypeError. + If no valid value is read (e.g. because the data has a different Python + version's incompatible marshal format), raise EOFError, ValueError or + TypeError. + + Note: If an object containing an unsupported type was marshalled with + dump(), load() will substitute None for the unmarshallable type. + """ -Note: If an object containing an unsupported type was marshalled with -dump(), load() will substitute None for the unmarshallable type. -""" def loads(bytes: ReadableBuffer, /, *, allow_code: bool = True) -> Any: """Convert the bytes-like object to a value. - allow_code - Allow to load code objects. + allow_code + Allow to load code objects. -If no valid value is found, raise EOFError, ValueError or TypeError. Extra -bytes in the input are ignored. -""" + If no valid value is found, raise EOFError, ValueError or TypeError. Extra + bytes in the input are ignored. + """ else: def load(file: SupportsRead[bytes], /) -> Any: """Read one value from the open file and return it. - file - Must be readable binary file. + file + Must be readable binary file. -If no valid value is read (e.g. because the data has a different Python -version's incompatible marshal format), raise EOFError, ValueError or -TypeError. + If no valid value is read (e.g. because the data has a different Python + version's incompatible marshal format), raise EOFError, ValueError or + TypeError. + + Note: If an object containing an unsupported type was marshalled with + dump(), load() will substitute None for the unmarshallable type. + """ -Note: If an object containing an unsupported type was marshalled with -dump(), load() will substitute None for the unmarshallable type. -""" def loads(bytes: ReadableBuffer, /) -> Any: """Convert the bytes-like object to a value. -If no valid value is found, raise EOFError, ValueError or TypeError. Extra -bytes in the input are ignored. -""" + If no valid value is found, raise EOFError, ValueError or TypeError. Extra + bytes in the input are ignored. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi index 1b9df2bcb830f..e8ee56217c8c5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/math.pyi @@ -1,6 +1,7 @@ """This module provides access to the mathematical functions defined by the C standard. """ + import sys from _typeshed import SupportsMul, SupportsRMul from collections.abc import Iterable @@ -21,37 +22,39 @@ tau: Final[float] def acos(x: _SupportsFloatOrIndex, /) -> float: """Return the arc cosine (measured in radians) of x. -The result is between 0 and pi. -""" + The result is between 0 and pi. + """ + def acosh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic cosine of x. -""" + """Return the inverse hyperbolic cosine of x.""" + def asin(x: _SupportsFloatOrIndex, /) -> float: """Return the arc sine (measured in radians) of x. -The result is between -pi/2 and pi/2. -""" + The result is between -pi/2 and pi/2. + """ + def asinh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic sine of x. -""" + """Return the inverse hyperbolic sine of x.""" + def atan(x: _SupportsFloatOrIndex, /) -> float: """Return the arc tangent (measured in radians) of x. -The result is between -pi/2 and pi/2. -""" + The result is between -pi/2 and pi/2. + """ + def atan2(y: _SupportsFloatOrIndex, x: _SupportsFloatOrIndex, /) -> float: """Return the arc tangent (measured in radians) of y/x. -Unlike atan(y/x), the signs of both x and y are considered. -""" + Unlike atan(y/x), the signs of both x and y are considered. + """ + def atanh(x: _SupportsFloatOrIndex, /) -> float: - """Return the inverse hyperbolic tangent of x. -""" + """Return the inverse hyperbolic tangent of x.""" if sys.version_info >= (3, 11): def cbrt(x: _SupportsFloatOrIndex, /) -> float: - """Return the cube root of x. -""" + """Return the cube root of x.""" @type_check_only class _SupportsCeil(Protocol[_T_co]): @@ -61,73 +64,76 @@ class _SupportsCeil(Protocol[_T_co]): def ceil(x: _SupportsCeil[_T], /) -> _T: """Return the ceiling of x as an Integral. -This is the smallest integer >= x. -""" + This is the smallest integer >= x. + """ + @overload def ceil(x: _SupportsFloatOrIndex, /) -> int: ... def comb(n: SupportsIndex, k: SupportsIndex, /) -> int: """Number of ways to choose k items from n items without repetition and without order. -Evaluates to n! / (k! * (n - k)!) when k <= n and evaluates -to zero when k > n. + Evaluates to n! / (k! * (n - k)!) when k <= n and evaluates + to zero when k > n. -Also called the binomial coefficient because it is equivalent -to the coefficient of k-th term in polynomial expansion of the -expression (1 + x)**n. + Also called the binomial coefficient because it is equivalent + to the coefficient of k-th term in polynomial expansion of the + expression (1 + x)**n. + + Raises TypeError if either of the arguments are not integers. + Raises ValueError if either of the arguments are negative. + """ -Raises TypeError if either of the arguments are not integers. -Raises ValueError if either of the arguments are negative. -""" def copysign(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: """Return a float with the magnitude (absolute value) of x but the sign of y. -On platforms that support signed zeros, copysign(1.0, -0.0) -returns -1.0. -""" + On platforms that support signed zeros, copysign(1.0, -0.0) + returns -1.0. + """ + def cos(x: _SupportsFloatOrIndex, /) -> float: - """Return the cosine of x (measured in radians). -""" + """Return the cosine of x (measured in radians).""" + def cosh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic cosine of x. -""" + """Return the hyperbolic cosine of x.""" + def degrees(x: _SupportsFloatOrIndex, /) -> float: - """Convert angle x from radians to degrees. -""" + """Convert angle x from radians to degrees.""" + def dist(p: Iterable[_SupportsFloatOrIndex], q: Iterable[_SupportsFloatOrIndex], /) -> float: """Return the Euclidean distance between two points p and q. -The points should be specified as sequences (or iterables) of -coordinates. Both inputs must have the same dimension. + The points should be specified as sequences (or iterables) of + coordinates. Both inputs must have the same dimension. + + Roughly equivalent to: + sqrt(sum((px - qx) ** 2.0 for px, qx in zip(p, q))) + """ -Roughly equivalent to: - sqrt(sum((px - qx) ** 2.0 for px, qx in zip(p, q))) -""" def erf(x: _SupportsFloatOrIndex, /) -> float: - """Error function at x. -""" + """Error function at x.""" + def erfc(x: _SupportsFloatOrIndex, /) -> float: - """Complementary error function at x. -""" + """Complementary error function at x.""" + def exp(x: _SupportsFloatOrIndex, /) -> float: - """Return e raised to the power of x. -""" + """Return e raised to the power of x.""" if sys.version_info >= (3, 11): def exp2(x: _SupportsFloatOrIndex, /) -> float: - """Return 2 raised to the power of x. -""" + """Return 2 raised to the power of x.""" def expm1(x: _SupportsFloatOrIndex, /) -> float: """Return exp(x)-1. -This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x. -""" + This function avoids the loss of precision involved in the direct evaluation of exp(x)-1 for small x. + """ + def fabs(x: _SupportsFloatOrIndex, /) -> float: - """Return the absolute value of the float x. -""" + """Return the absolute value of the float x.""" + def factorial(x: SupportsIndex, /) -> int: - """Find n!. -""" + """Find n!.""" + @type_check_only class _SupportsFloor(Protocol[_T_co]): def __floor__(self) -> _T_co: ... @@ -136,46 +142,51 @@ class _SupportsFloor(Protocol[_T_co]): def floor(x: _SupportsFloor[_T], /) -> _T: """Return the floor of x as an Integral. -This is the largest integer <= x. -""" + This is the largest integer <= x. + """ + @overload def floor(x: _SupportsFloatOrIndex, /) -> int: ... def fmod(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: """Return fmod(x, y), according to platform C. -x % y may differ. -""" + x % y may differ. + """ + def frexp(x: _SupportsFloatOrIndex, /) -> tuple[float, int]: """Return the mantissa and exponent of x, as pair (m, e). -m is a float and e is an int, such that x = m * 2.**e. -If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0. -""" + m is a float and e is an int, such that x = m * 2.**e. + If x is 0, m and e are both 0. Else 0.5 <= abs(m) < 1.0. + """ + def fsum(seq: Iterable[_SupportsFloatOrIndex], /) -> float: """Return an accurate floating-point sum of values in the iterable seq. -Assumes IEEE-754 floating-point arithmetic. -""" + Assumes IEEE-754 floating-point arithmetic. + """ + def gamma(x: _SupportsFloatOrIndex, /) -> float: - """Gamma function at x. -""" + """Gamma function at x.""" + def gcd(*integers: SupportsIndex) -> int: - """Greatest Common Divisor. -""" + """Greatest Common Divisor.""" + def hypot(*coordinates: _SupportsFloatOrIndex) -> float: """Multidimensional Euclidean distance from the origin to a point. -Roughly equivalent to: - sqrt(sum(x**2 for x in coordinates)) + Roughly equivalent to: + sqrt(sum(x**2 for x in coordinates)) -For a two dimensional point (x, y), gives the hypotenuse -using the Pythagorean theorem: sqrt(x*x + y*y). + For a two dimensional point (x, y), gives the hypotenuse + using the Pythagorean theorem: sqrt(x*x + y*y). -For example, the hypotenuse of a 3/4/5 right triangle is: + For example, the hypotenuse of a 3/4/5 right triangle is: + + >>> hypot(3.0, 4.0) + 5.0 + """ - >>> hypot(3.0, 4.0) - 5.0 -""" def isclose( a: _SupportsFloatOrIndex, b: _SupportsFloatOrIndex, @@ -185,98 +196,101 @@ def isclose( ) -> bool: """Determine whether two floating-point numbers are close in value. - rel_tol - maximum difference for being considered "close", relative to the - magnitude of the input values - abs_tol - maximum difference for being considered "close", regardless of the - magnitude of the input values + rel_tol + maximum difference for being considered "close", relative to the + magnitude of the input values + abs_tol + maximum difference for being considered "close", regardless of the + magnitude of the input values -Return True if a is close in value to b, and False otherwise. + Return True if a is close in value to b, and False otherwise. -For the values to be considered close, the difference between them -must be smaller than at least one of the tolerances. + For the values to be considered close, the difference between them + must be smaller than at least one of the tolerances. + + -inf, inf and NaN behave similarly to the IEEE 754 Standard. That + is, NaN is not close to anything, even itself. inf and -inf are + only close to themselves. + """ --inf, inf and NaN behave similarly to the IEEE 754 Standard. That -is, NaN is not close to anything, even itself. inf and -inf are -only close to themselves. -""" def isinf(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is a positive or negative infinity, and False otherwise. -""" + """Return True if x is a positive or negative infinity, and False otherwise.""" + def isfinite(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is neither an infinity nor a NaN, and False otherwise. -""" + """Return True if x is neither an infinity nor a NaN, and False otherwise.""" + def isnan(x: _SupportsFloatOrIndex, /) -> bool: - """Return True if x is a NaN (not a number), and False otherwise. -""" + """Return True if x is a NaN (not a number), and False otherwise.""" + def isqrt(n: SupportsIndex, /) -> int: - """Return the integer part of the square root of the input. -""" + """Return the integer part of the square root of the input.""" + def lcm(*integers: SupportsIndex) -> int: - """Least Common Multiple. -""" + """Least Common Multiple.""" + def ldexp(x: _SupportsFloatOrIndex, i: int, /) -> float: """Return x * (2**i). -This is essentially the inverse of frexp(). -""" + This is essentially the inverse of frexp(). + """ + def lgamma(x: _SupportsFloatOrIndex, /) -> float: - """Natural logarithm of absolute value of Gamma function at x. -""" + """Natural logarithm of absolute value of Gamma function at x.""" + def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: """log(x, [base=math.e]) -Return the logarithm of x to the given base. + Return the logarithm of x to the given base. + + If the base is not specified, returns the natural logarithm (base e) of x. + """ -If the base is not specified, returns the natural logarithm (base e) of x. -""" def log10(x: _SupportsFloatOrIndex, /) -> float: - """Return the base 10 logarithm of x. -""" + """Return the base 10 logarithm of x.""" + def log1p(x: _SupportsFloatOrIndex, /) -> float: """Return the natural logarithm of 1+x (base e). -The result is computed in a way which is accurate for x near zero. -""" + The result is computed in a way which is accurate for x near zero. + """ + def log2(x: _SupportsFloatOrIndex, /) -> float: - """Return the base 2 logarithm of x. -""" + """Return the base 2 logarithm of x.""" + def modf(x: _SupportsFloatOrIndex, /) -> tuple[float, float]: """Return the fractional and integer parts of x. -Both results carry the sign of x and are floats. -""" + Both results carry the sign of x and are floats. + """ if sys.version_info >= (3, 12): def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /, *, steps: SupportsIndex | None = None) -> float: """Return the floating-point value the given number of steps after x towards y. -If steps is not specified or is None, it defaults to 1. + If steps is not specified or is None, it defaults to 1. -Raises a TypeError, if x or y is not a double, or if steps is not an integer. -Raises ValueError if steps is negative. -""" + Raises a TypeError, if x or y is not a double, or if steps is not an integer. + Raises ValueError if steps is negative. + """ else: def nextafter(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return the next floating-point value after x towards y. -""" + """Return the next floating-point value after x towards y.""" def perm(n: SupportsIndex, k: SupportsIndex | None = None, /) -> int: """Number of ways to choose k items from n items without repetition and with order. -Evaluates to n! / (n - k)! when k <= n and evaluates -to zero when k > n. + Evaluates to n! / (n - k)! when k <= n and evaluates + to zero when k > n. -If k is not specified or is None, then k defaults to n -and the function returns n!. + If k is not specified or is None, then k defaults to n + and the function returns n!. + + Raises TypeError if either of the arguments are not integers. + Raises ValueError if either of the arguments are negative. + """ -Raises TypeError if either of the arguments are not integers. -Raises ValueError if either of the arguments are negative. -""" def pow(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: - """Return x**y (x to the power of y). -""" + """Return x**y (x to the power of y).""" _PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] _NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] @@ -299,54 +313,54 @@ _SupportsProdNoDefaultT = TypeVar("_SupportsProdNoDefaultT", bound=_SupportsProd def prod(iterable: Iterable[bool | _LiteralInteger], /, *, start: int = 1) -> int: # type: ignore[overload-overlap] """Calculate the product of all the elements in the input iterable. -The default start value for the product is 1. + The default start value for the product is 1. + + When the iterable is empty, return the start value. This function is + intended specifically for use with numeric values and may reject + non-numeric types. + """ -When the iterable is empty, return the start value. This function is -intended specifically for use with numeric values and may reject -non-numeric types. -""" @overload def prod(iterable: Iterable[_SupportsProdNoDefaultT], /) -> _SupportsProdNoDefaultT | Literal[1]: ... @overload def prod(iterable: Iterable[_MultiplicableT1], /, *, start: _MultiplicableT2) -> _MultiplicableT1 | _MultiplicableT2: ... def radians(x: _SupportsFloatOrIndex, /) -> float: - """Convert angle x from degrees to radians. -""" + """Convert angle x from degrees to radians.""" + def remainder(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, /) -> float: """Difference between x and the closest integer multiple of y. -Return x - n*y where n*y is the closest integer multiple of y. -In the case where x is exactly halfway between two multiples of -y, the nearest even value of n is used. The result is always exact. -""" + Return x - n*y where n*y is the closest integer multiple of y. + In the case where x is exactly halfway between two multiples of + y, the nearest even value of n is used. The result is always exact. + """ + def sin(x: _SupportsFloatOrIndex, /) -> float: - """Return the sine of x (measured in radians). -""" + """Return the sine of x (measured in radians).""" + def sinh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic sine of x. -""" + """Return the hyperbolic sine of x.""" if sys.version_info >= (3, 12): def sumprod(p: Iterable[float], q: Iterable[float], /) -> float: """Return the sum of products of values from two iterables p and q. -Roughly equivalent to: + Roughly equivalent to: - sum(map(operator.mul, p, q, strict=True)) + sum(map(operator.mul, p, q, strict=True)) -For float and mixed int/float inputs, the intermediate products -and sums are computed with extended precision. -""" + For float and mixed int/float inputs, the intermediate products + and sums are computed with extended precision. + """ def sqrt(x: _SupportsFloatOrIndex, /) -> float: - """Return the square root of x. -""" + """Return the square root of x.""" + def tan(x: _SupportsFloatOrIndex, /) -> float: - """Return the tangent of x (measured in radians). -""" + """Return the tangent of x (measured in radians).""" + def tanh(x: _SupportsFloatOrIndex, /) -> float: - """Return the hyperbolic tangent of x. -""" + """Return the hyperbolic tangent of x.""" # Is different from `_typeshed.SupportsTrunc`, which is not generic @type_check_only @@ -356,15 +370,15 @@ class _SupportsTrunc(Protocol[_T_co]): def trunc(x: _SupportsTrunc[_T], /) -> _T: """Truncates the Real x to the nearest Integral toward 0. -Uses the __trunc__ magic method. -""" + Uses the __trunc__ magic method. + """ + def ulp(x: _SupportsFloatOrIndex, /) -> float: - """Return the value of the least significant bit of the float x. -""" + """Return the value of the least significant bit of the float x.""" if sys.version_info >= (3, 13): def fma(x: _SupportsFloatOrIndex, y: _SupportsFloatOrIndex, z: _SupportsFloatOrIndex, /) -> float: """Fused multiply-add operation. -Compute (x * y) + z with a single round. -""" + Compute (x * y) + z with a single round. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi index 5b5450897fd4b..4e54a0eb8538f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mimetypes.pyi @@ -22,6 +22,7 @@ init([files]) -- parse a list of files, default knownfiles (on Windows, the default values are taken from the registry) read_mime_types(file) -- parse one file, return a dictionary or None """ + import sys from _typeshed import StrPath from collections.abc import Sequence @@ -49,67 +50,70 @@ if sys.version_info >= (3, 13): def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: """Guess the type of a file based on its URL. -Return value is a tuple (type, encoding) where type is None if the -type can't be guessed (no or unknown suffix) or a string of the -form type/subtype, usable for a MIME Content-type header; and -encoding is None for no encoding or the name of the program used -to encode (e.g. compress or gzip). The mappings are table -driven. Encoding suffixes are case sensitive; type suffixes are -first tried case sensitive, then case insensitive. + Return value is a tuple (type, encoding) where type is None if the + type can't be guessed (no or unknown suffix) or a string of the + form type/subtype, usable for a MIME Content-type header; and + encoding is None for no encoding or the name of the program used + to encode (e.g. compress or gzip). The mappings are table + driven. Encoding suffixes are case sensitive; type suffixes are + first tried case sensitive, then case insensitive. -The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped -to ".tar.gz". (This is table-driven too, using the dictionary -suffix_map). + The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped + to ".tar.gz". (This is table-driven too, using the dictionary + suffix_map). + + Optional 'strict' argument when false adds a bunch of commonly found, but + non-standard types. + """ -Optional 'strict' argument when false adds a bunch of commonly found, but -non-standard types. -""" def guess_all_extensions(type: str, strict: bool = True) -> list[str]: """Guess the extensions for a file based on its MIME type. -Return value is a list of strings giving the possible filename -extensions, including the leading dot ('.'). The extension is not -guaranteed to have been associated with any particular data -stream, but would be mapped to the MIME type 'type' by -guess_type(). If no extension can be guessed for 'type', None -is returned. + Return value is a list of strings giving the possible filename + extensions, including the leading dot ('.'). The extension is not + guaranteed to have been associated with any particular data + stream, but would be mapped to the MIME type 'type' by + guess_type(). If no extension can be guessed for 'type', None + is returned. + + Optional 'strict' argument when false adds a bunch of commonly found, + but non-standard types. + """ -Optional 'strict' argument when false adds a bunch of commonly found, -but non-standard types. -""" def guess_extension(type: str, strict: bool = True) -> str | None: """Guess the extension for a file based on its MIME type. -Return value is a string giving a filename extension, including the -leading dot ('.'). The extension is not guaranteed to have been -associated with any particular data stream, but would be mapped to the -MIME type 'type' by guess_type(). If no extension can be guessed for -'type', None is returned. + Return value is a string giving a filename extension, including the + leading dot ('.'). The extension is not guaranteed to have been + associated with any particular data stream, but would be mapped to the + MIME type 'type' by guess_type(). If no extension can be guessed for + 'type', None is returned. + + Optional 'strict' argument when false adds a bunch of commonly found, + but non-standard types. + """ -Optional 'strict' argument when false adds a bunch of commonly found, -but non-standard types. -""" def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... def add_type(type: str, ext: str, strict: bool = True) -> None: """Add a mapping between a type and an extension. -When the extension is already known, the new -type will replace the old one. When the type -is already known the extension will be added -to the list of known extensions. + When the extension is already known, the new + type will replace the old one. When the type + is already known the extension will be added + to the list of known extensions. -If strict is true, information will be added to -list of standard types, else to the list of non-standard -types. -""" + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + """ if sys.version_info >= (3, 13): def guess_file_type(path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: """Guess the type of a file based on its path. -Similar to guess_type(), but takes file path instead of URL. -""" + Similar to guess_type(), but takes file path instead of URL. + """ inited: bool knownfiles: list[str] @@ -121,10 +125,11 @@ common_types: dict[str, str] class MimeTypes: """MIME-types datastore. -This datastore can handle information from mime.types-style files -and supports basic determination of MIME type from a filename or -URL, and can guess a reasonable extension given a MIME type. -""" + This datastore can handle information from mime.types-style files + and supports basic determination of MIME type from a filename or + URL, and can guess a reasonable extension given a MIME type. + """ + suffix_map: dict[str, str] encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] @@ -133,87 +138,93 @@ URL, and can guess a reasonable extension given a MIME type. def add_type(self, type: str, ext: str, strict: bool = True) -> None: """Add a mapping between a type and an extension. -When the extension is already known, the new -type will replace the old one. When the type -is already known the extension will be added -to the list of known extensions. + When the extension is already known, the new + type will replace the old one. When the type + is already known the extension will be added + to the list of known extensions. -If strict is true, information will be added to -list of standard types, else to the list of non-standard -types. + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + + Valid extensions are empty or start with a '.'. + """ -Valid extensions are empty or start with a '.'. -""" def guess_extension(self, type: str, strict: bool = True) -> str | None: """Guess the extension for a file based on its MIME type. -Return value is a string giving a filename extension, -including the leading dot ('.'). The extension is not -guaranteed to have been associated with any particular data -stream, but would be mapped to the MIME type 'type' by -guess_type(). If no extension can be guessed for 'type', None -is returned. + Return value is a string giving a filename extension, + including the leading dot ('.'). The extension is not + guaranteed to have been associated with any particular data + stream, but would be mapped to the MIME type 'type' by + guess_type(). If no extension can be guessed for 'type', None + is returned. + + Optional 'strict' argument when false adds a bunch of commonly found, + but non-standard types. + """ -Optional 'strict' argument when false adds a bunch of commonly found, -but non-standard types. -""" def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: """Guess the type of a file which is either a URL or a path-like object. -Return value is a tuple (type, encoding) where type is None if -the type can't be guessed (no or unknown suffix) or a string -of the form type/subtype, usable for a MIME Content-type -header; and encoding is None for no encoding or the name of -the program used to encode (e.g. compress or gzip). The -mappings are table driven. Encoding suffixes are case -sensitive; type suffixes are first tried case sensitive, then -case insensitive. - -The suffixes .tgz, .taz and .tz (case sensitive!) are all -mapped to '.tar.gz'. (This is table-driven too, using the -dictionary suffix_map.) - -Optional 'strict' argument when False adds a bunch of commonly found, -but non-standard types. -""" + Return value is a tuple (type, encoding) where type is None if + the type can't be guessed (no or unknown suffix) or a string + of the form type/subtype, usable for a MIME Content-type + header; and encoding is None for no encoding or the name of + the program used to encode (e.g. compress or gzip). The + mappings are table driven. Encoding suffixes are case + sensitive; type suffixes are first tried case sensitive, then + case insensitive. + + The suffixes .tgz, .taz and .tz (case sensitive!) are all + mapped to '.tar.gz'. (This is table-driven too, using the + dictionary suffix_map.) + + Optional 'strict' argument when False adds a bunch of commonly found, + but non-standard types. + """ + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: """Guess the extensions for a file based on its MIME type. -Return value is a list of strings giving the possible filename -extensions, including the leading dot ('.'). The extension is not -guaranteed to have been associated with any particular data stream, -but would be mapped to the MIME type 'type' by guess_type(). + Return value is a list of strings giving the possible filename + extensions, including the leading dot ('.'). The extension is not + guaranteed to have been associated with any particular data stream, + but would be mapped to the MIME type 'type' by guess_type(). + + Optional 'strict' argument when false adds a bunch of commonly found, + but non-standard types. + """ -Optional 'strict' argument when false adds a bunch of commonly found, -but non-standard types. -""" def read(self, filename: str, strict: bool = True) -> None: """ -Read a single mime.types-format file, specified by pathname. + Read a single mime.types-format file, specified by pathname. + + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + """ -If strict is true, information will be added to -list of standard types, else to the list of non-standard -types. -""" def readfp(self, fp: IO[str], strict: bool = True) -> None: """ -Read a single mime.types-format file. + Read a single mime.types-format file. + + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + """ -If strict is true, information will be added to -list of standard types, else to the list of non-standard -types. -""" def read_windows_registry(self, strict: bool = True) -> None: """ -Load the MIME types database from Windows registry. + Load the MIME types database from Windows registry. -If strict is true, information will be added to -list of standard types, else to the list of non-standard -types. -""" + If strict is true, information will be added to + list of standard types, else to the list of non-standard + types. + """ if sys.version_info >= (3, 13): def guess_file_type(self, path: StrPath, *, strict: bool = True) -> tuple[str | None, str | None]: """Guess the type of a file based on its path. -Similar to guess_type(), but takes file path instead of URL. -""" + Similar to guess_type(), but takes file path instead of URL. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi index b56a4ba347f68..42214fb31ac63 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/mmap.pyi @@ -35,26 +35,27 @@ PAGESIZE: Final[int] class mmap: """Windows: mmap(fileno, length[, tagname[, access[, offset]]]) -Maps length bytes from the file specified by the file handle fileno, -and returns a mmap object. If length is larger than the current size -of the file, the file is extended to contain length bytes. If length -is 0, the maximum length of the map is the current size of the file, -except that if the file is empty Windows raises an exception (you cannot -create an empty mapping on Windows). - -Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]]) - -Maps length bytes from the file specified by the file descriptor fileno, -and returns a mmap object. If length is 0, the maximum length of the map -will be the current size of the file when mmap is called. -flags specifies the nature of the mapping. MAP_PRIVATE creates a -private copy-on-write mapping, so changes to the contents of the mmap -object will be private to this process, and MAP_SHARED creates a mapping -that's shared with all other processes mapping the same areas of the file. -The default value is MAP_SHARED. - -To map anonymous memory, pass -1 as the fileno (both versions). -""" + Maps length bytes from the file specified by the file handle fileno, + and returns a mmap object. If length is larger than the current size + of the file, the file is extended to contain length bytes. If length + is 0, the maximum length of the map is the current size of the file, + except that if the file is empty Windows raises an exception (you cannot + create an empty mapping on Windows). + + Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]]) + + Maps length bytes from the file specified by the file descriptor fileno, + and returns a mmap object. If length is 0, the maximum length of the map + will be the current size of the file when mmap is called. + flags specifies the nature of the mapping. MAP_PRIVATE creates a + private copy-on-write mapping, so changes to the contents of the mmap + object will be private to this process, and MAP_SHARED creates a mapping + that's shared with all other processes mapping the same areas of the file. + The default value is MAP_SHARED. + + To map anonymous memory, pass -1 as the fileno (both versions). + """ + if sys.platform == "win32": def __new__(self, fileno: int, length: int, tagname: str | None = None, access: int = 0, offset: int = 0) -> Self: ... else: @@ -90,8 +91,7 @@ To map anonymous memory, pass -1 as the fileno (both versions). def tell(self) -> int: ... def write_byte(self, byte: int) -> None: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" closed: bool if sys.platform != "win32": def madvise(self, option: int, start: int = 0, length: int = ...) -> None: ... @@ -102,17 +102,17 @@ To map anonymous memory, pass -1 as the fileno (both versions). def write(self, bytes: ReadableBuffer) -> int: ... @overload def __getitem__(self, key: int, /) -> int: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> bytes: ... def __delitem__(self, key: int | slice, /) -> NoReturn: - """Delete self[key]. -""" + """Delete self[key].""" + @overload def __setitem__(self, key: int, value: int, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: ReadableBuffer, /) -> None: ... # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, @@ -124,11 +124,10 @@ To map anonymous memory, pass -1 as the fileno (both versions). def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" + def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object. -""" + """Release the buffer object that exposes the underlying memory of the object.""" if sys.version_info >= (3, 13): def seekable(self) -> Literal[True]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi index 3659ceed9c4e4..692cb04850ad9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/modulefinder.pyi @@ -1,5 +1,5 @@ -"""Find modules used by a script, using introspection. -""" +"""Find modules used by a script, using introspection.""" + import sys from collections.abc import Container, Iterable, Iterator, Sequence from types import CodeType @@ -64,22 +64,25 @@ class ModuleFinder: ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented def report(self) -> None: """Print a report to stdout, listing the found modules with their -paths, as well as modules that are missing, or seem to be missing. -""" + paths, as well as modules that are missing, or seem to be missing. + """ + def any_missing(self) -> list[str]: # undocumented """Return a list of modules that appear to be missing. Use -any_missing_maybe() if you want to know which modules are -certain to be missing, and which *may* be missing. -""" + any_missing_maybe() if you want to know which modules are + certain to be missing, and which *may* be missing. + """ + def any_missing_maybe(self) -> tuple[list[str], list[str]]: # undocumented """Return two lists, one with modules that are certainly missing -and one with modules that *may* be missing. The latter names could -either be submodules *or* just global names in the package. + and one with modules that *may* be missing. The latter names could + either be submodules *or* just global names in the package. + + The reason it can't always be determined is that it's impossible to + tell which names are imported when "from module import *" is done + with an extension module, short of actually importing it. + """ -The reason it can't always be determined is that it's impossible to -tell which names are imported when "from module import *" is done -with an extension module, short of actually importing it. -""" def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented def test() -> ModuleFinder | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi index 80b5054294496..0983860e54e6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msilib/__init__.pyi @@ -38,8 +38,8 @@ if sys.platform == "win32": seqno: int | type[_Unspecified] = ..., cond: str | type[_Unspecified] = ..., ) -> None: - """Change the sequence number of an action in a sequence list -""" + """Change the sequence number of an action in a sequence list""" + def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... def add_stream(db: _Database, name: str, path: str) -> None: ... def init_database( @@ -84,14 +84,15 @@ if sys.platform == "win32": componentflags: int | None = None, ) -> None: """Create a new directory in the Directory table. There is a current component - at each point in time for the directory, which is either explicitly created - through start_component, or implicitly when files are added for the first - time. Files are added into the current component, and into the cab file. - To create a directory, a base directory object needs to be specified (can be - None), the path to the physical directory, and a logical directory name. - Default specifies the DefaultDir slot in the directory table. componentflags - specifies the default flags that new components get. -""" + at each point in time for the directory, which is either explicitly created + through start_component, or implicitly when files are added for the first + time. Files are added into the current component, and into the cab file. + To create a directory, a base directory object needs to be specified (can be + None), the path to the physical directory, and a logical directory name. + Default specifies the DefaultDir slot in the directory table. componentflags + specifies the default flags that new components get. + """ + def start_component( self, component: str | None = None, @@ -101,26 +102,28 @@ if sys.platform == "win32": uuid: str | None = None, ) -> None: """Add an entry to the Component table, and make this component the current for this - directory. If no component name is given, the directory name is used. If no feature - is given, the current feature is used. If no flags are given, the directory's default - flags are used. If no keyfile is given, the KeyPath is left null in the Component - table. -""" + directory. If no component name is given, the directory name is used. If no feature + is given, the current feature is used. If no flags are given, the directory's default + flags are used. If no keyfile is given, the KeyPath is left null in the Component + table. + """ + def make_short(self, file: str) -> str: ... def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: """Add a file to the current component of the directory, starting a new one - if there is no current component. By default, the file name in the source - and the file table will be identical. If the src file is specified, it is - interpreted relative to the current directory. Optionally, a version and a - language can be specified for the entry in the File table. -""" + if there is no current component. By default, the file name in the source + and the file table will be identical. If the src file is specified, it is + interpreted relative to the current directory. Optionally, a version and a + language can be specified for the entry in the File table. + """ + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: """Add a list of files to the current component as specified in the - glob pattern. Individual files can be excluded in the exclude list. -""" + glob pattern. Individual files can be excluded in the exclude list. + """ + def remove_pyc(self) -> None: - """Remove .pyc files on uninstall -""" + """Remove .pyc files on uninstall""" class Binary: name: str diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi index 26d790eef4d5c..2a11014103a27 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/msvcrt.pyi @@ -16,80 +16,85 @@ if sys.platform == "win32": def locking(fd: int, mode: int, nbytes: int, /) -> None: """Lock part of a file based on file descriptor fd from the C runtime. -Raises OSError on failure. The locked region of the file extends from -the current file position for nbytes bytes, and may continue beyond -the end of the file. mode must be one of the LK_* constants listed -below. Multiple regions in a file may be locked at the same time, but -may not overlap. Adjacent regions are not merged; they must be unlocked -individually. -""" + Raises OSError on failure. The locked region of the file extends from + the current file position for nbytes bytes, and may continue beyond + the end of the file. mode must be one of the LK_* constants listed + below. Multiple regions in a file may be locked at the same time, but + may not overlap. Adjacent regions are not merged; they must be unlocked + individually. + """ + def setmode(fd: int, mode: int, /) -> int: """Set the line-end translation mode for the file descriptor fd. -To set it to text mode, flags should be os.O_TEXT; for binary, it -should be os.O_BINARY. + To set it to text mode, flags should be os.O_TEXT; for binary, it + should be os.O_BINARY. + + Return value is the previous mode. + """ -Return value is the previous mode. -""" def open_osfhandle(handle: int, flags: int, /) -> int: """Create a C runtime file descriptor from the file handle handle. -The flags parameter should be a bitwise OR of os.O_APPEND, os.O_RDONLY, -and os.O_TEXT. The returned file descriptor may be used as a parameter -to os.fdopen() to create a file object. -""" + The flags parameter should be a bitwise OR of os.O_APPEND, os.O_RDONLY, + and os.O_TEXT. The returned file descriptor may be used as a parameter + to os.fdopen() to create a file object. + """ + def get_osfhandle(fd: int, /) -> int: """Return the file handle for the file descriptor fd. -Raises OSError if fd is not recognized. -""" + Raises OSError if fd is not recognized. + """ + def kbhit() -> bool: - """Returns a nonzero value if a keypress is waiting to be read. Otherwise, return 0. -""" + """Returns a nonzero value if a keypress is waiting to be read. Otherwise, return 0.""" + def getch() -> bytes: """Read a keypress and return the resulting character as a byte string. -Nothing is echoed to the console. This call will block if a keypress is -not already available, but will not wait for Enter to be pressed. If the -pressed key was a special function key, this will return '\\000' or -'\\xe0'; the next call will return the keycode. The Control-C keypress -cannot be read with this function. -""" + Nothing is echoed to the console. This call will block if a keypress is + not already available, but will not wait for Enter to be pressed. If the + pressed key was a special function key, this will return '\\000' or + '\\xe0'; the next call will return the keycode. The Control-C keypress + cannot be read with this function. + """ + def getwch() -> str: - """Wide char variant of getch(), returning a Unicode value. -""" + """Wide char variant of getch(), returning a Unicode value.""" + def getche() -> bytes: - """Similar to getch(), but the keypress will be echoed if possible. -""" + """Similar to getch(), but the keypress will be echoed if possible.""" + def getwche() -> str: - """Wide char variant of getche(), returning a Unicode value. -""" + """Wide char variant of getche(), returning a Unicode value.""" + def putch(char: bytes | bytearray, /) -> None: - """Print the byte string char to the console without buffering. -""" + """Print the byte string char to the console without buffering.""" + def putwch(unicode_char: str, /) -> None: - """Wide char variant of putch(), accepting a Unicode value. -""" + """Wide char variant of putch(), accepting a Unicode value.""" + def ungetch(char: bytes | bytearray, /) -> None: """Opposite of getch. -Cause the byte string char to be "pushed back" into the -console buffer; it will be the next character read by -getch() or getche(). -""" + Cause the byte string char to be "pushed back" into the + console buffer; it will be the next character read by + getch() or getche(). + """ + def ungetwch(unicode_char: str, /) -> None: - """Wide char variant of ungetch(), accepting a Unicode value. -""" + """Wide char variant of ungetch(), accepting a Unicode value.""" + def heapmin() -> None: """Minimize the malloc() heap. -Force the malloc() heap to clean itself up and return unused blocks -to the operating system. On failure, this raises OSError. -""" + Force the malloc() heap to clean itself up and return unused blocks + to the operating system. On failure, this raises OSError. + """ + def SetErrorMode(mode: int, /) -> int: - """Wrapper around SetErrorMode. -""" + """Wrapper around SetErrorMode.""" if sys.version_info >= (3, 10): def GetErrorMode() -> int: # undocumented - """Wrapper around GetErrorMode. -""" + """Wrapper around GetErrorMode.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi index 33ff02214196d..3fe3d082c30dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/connection.pyi @@ -19,43 +19,45 @@ class _ConnectionBase(Generic[_SendT_contra, _RecvT_co]): def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: # undocumented - """True if the connection is closed -""" + """True if the connection is closed""" + @property def readable(self) -> bool: # undocumented - """True if the connection is readable -""" + """True if the connection is readable""" + @property def writable(self) -> bool: # undocumented - """True if the connection is writable -""" + """True if the connection is writable""" + def fileno(self) -> int: - """File descriptor or handle of the connection -""" + """File descriptor or handle of the connection""" + def close(self) -> None: - """Close the connection -""" + """Close the connection""" + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: - """Send the bytes data from a bytes-like object -""" + """Send the bytes data from a bytes-like object""" + def send(self, obj: _SendT_contra) -> None: - """Send a (picklable) object -""" + """Send a (picklable) object""" + def recv_bytes(self, maxlength: int | None = None) -> bytes: """ -Receive bytes data as a bytes object. -""" + Receive bytes data as a bytes object. + """ + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: """ -Receive bytes data into a writeable bytes-like object. -Return the number of bytes read. -""" + Receive bytes data into a writeable bytes-like object. + Return the number of bytes read. + """ + def recv(self) -> _RecvT_co: - """Receive a (picklable) object -""" + """Receive a (picklable) object""" + def poll(self, timeout: float | None = 0.0) -> bool: - """Whether there is any input available to be read -""" + """Whether there is any input available to be read""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None @@ -64,38 +66,41 @@ Return the number of bytes read. class Connection(_ConnectionBase[_SendT_contra, _RecvT_co]): """ -Connection class based on an arbitrary file descriptor (Unix only), or -a socket handle (Windows). -""" + Connection class based on an arbitrary file descriptor (Unix only), or + a socket handle (Windows). + """ if sys.platform == "win32": class PipeConnection(_ConnectionBase[_SendT_contra, _RecvT_co]): """ -Connection class based on a Windows named pipe. -Overlapped I/O is used, so the handles must have been created -with FILE_FLAG_OVERLAPPED. -""" + Connection class based on a Windows named pipe. + Overlapped I/O is used, so the handles must have been created + with FILE_FLAG_OVERLAPPED. + """ class Listener: """ -Returns a listener object. + Returns a listener object. + + This is a wrapper for a bound socket which is 'listening' for + connections, or for a Windows named pipe. + """ -This is a wrapper for a bound socket which is 'listening' for -connections, or for a Windows named pipe. -""" def __init__( self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... def accept(self) -> Connection[Incomplete, Incomplete]: """ -Accept a connection on the bound socket or named pipe of `self`. + Accept a connection on the bound socket or named pipe of `self`. + + Returns a `Connection` object. + """ -Returns a `Connection` object. -""" def close(self) -> None: """ -Close the bound socket or named pipe of `self`. -""" + Close the bound socket or named pipe of `self`. + """ + @property def address(self) -> _Address: ... @property @@ -117,14 +122,15 @@ def wait( object_list: Iterable[Connection[_SendT_contra, _RecvT_co] | socket.socket | int], timeout: float | None = None ) -> list[Connection[_SendT_contra, _RecvT_co] | socket.socket | int]: """ -Wait till an object in object_list is ready/readable. + Wait till an object in object_list is ready/readable. + + Returns list of those objects in object_list which are ready/readable. + """ -Returns list of those objects in object_list which are ready/readable. -""" def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection[Any, Any]: """ -Returns a connection to the address of a `Listener` -""" + Returns a connection to the address of a `Listener` + """ # N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection @@ -135,11 +141,11 @@ Returns a connection to the address of a `Listener` if sys.platform != "win32": def Pipe(duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: """ -Returns pair of connection objects at either end of a pipe -""" + Returns pair of connection objects at either end of a pipe + """ else: def Pipe(duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: """ -Returns pair of connection objects at either end of a pipe -""" + Returns pair of connection objects at either end of a pipe + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi index c347ed1cbc798..21be42237d796 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/context.pyi @@ -39,28 +39,30 @@ class BaseContext: @staticmethod def current_process() -> BaseProcess: """ -Return process object representing the current process -""" + Return process object representing the current process + """ + @staticmethod def parent_process() -> BaseProcess | None: """ -Return process object representing the parent process -""" + Return process object representing the parent process + """ + @staticmethod def active_children() -> list[BaseProcess]: """ -Return list of process objects corresponding to live child processes -""" + Return list of process objects corresponding to live child processes + """ + def cpu_count(self) -> int: - """Returns the number of CPUs in the system -""" + """Returns the number of CPUs in the system""" + def Manager(self) -> SyncManager: """Returns a manager associated with a running server process -The managers methods such as `Lock()`, `Condition()` and `Queue()` -can be used to create shared objects. -""" - + The managers methods such as `Lock()`, `Condition()` and `Queue()` + can be used to create shared objects. + """ # N.B. Keep this in sync with multiprocessing.connection.Pipe. # _ConnectionBase is the common base class of Connection and PipeConnection # and can be used in cross-platform code. @@ -69,45 +71,43 @@ can be used to create shared objects. # However, TypeVars scoped entirely within a return annotation is unspecified in the spec. if sys.platform != "win32": def Pipe(self, duplex: bool = True) -> tuple[Connection[Any, Any], Connection[Any, Any]]: - """Returns two connection object connected by a pipe -""" + """Returns two connection object connected by a pipe""" else: def Pipe(self, duplex: bool = True) -> tuple[PipeConnection[Any, Any], PipeConnection[Any, Any]]: - """Returns two connection object connected by a pipe -""" + """Returns two connection object connected by a pipe""" def Barrier( self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None ) -> synchronize.Barrier: - """Returns a barrier object -""" + """Returns a barrier object""" + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: - """Returns a bounded semaphore object -""" + """Returns a bounded semaphore object""" + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: - """Returns a condition object -""" + """Returns a condition object""" + def Event(self) -> synchronize.Event: - """Returns an event object -""" + """Returns an event object""" + def Lock(self) -> synchronize.Lock: - """Returns a non-recursive lock object -""" + """Returns a non-recursive lock object""" + def RLock(self) -> synchronize.RLock: - """Returns a recursive lock object -""" + """Returns a recursive lock object""" + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: - """Returns a semaphore object -""" + """Returns a semaphore object""" + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: - """Returns a queue object -""" + """Returns a queue object""" + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: - """Returns a queue object -""" + """Returns a queue object""" + def SimpleQueue(self) -> queues.SimpleQueue[Any]: - """Returns a queue object -""" + """Returns a queue object""" + def Pool( self, processes: int | None = None, @@ -115,26 +115,26 @@ can be used to create shared objects. initargs: Iterable[Any] = (), maxtasksperchild: int | None = None, ) -> _Pool: - """Returns a process pool object -""" + """Returns a process pool object""" + @overload def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: - """Returns a shared object -""" + """Returns a shared object""" + @overload def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... @overload def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: - """Returns a shared array -""" + """Returns a shared array""" + @overload def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload def Value( self, typecode_or_type: type[_SimpleCData[_T]], *args: Any, lock: Literal[True] | _LockLike = True ) -> Synchronized[_T]: - """Returns a synchronized shared object -""" + """Returns a synchronized shared object""" + @overload def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> Synchronized[_CT]: ... @overload @@ -147,8 +147,8 @@ can be used to create shared objects. def Array( self, typecode_or_type: type[_SimpleCData[_T]], size_or_initializer: int | Sequence[Any], *, lock: Literal[False] ) -> SynchronizedArray[_T]: - """Returns a synchronized shared array -""" + """Returns a synchronized shared array""" + @overload def Array( self, typecode_or_type: type[c_char], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True @@ -171,28 +171,32 @@ can be used to create shared objects. ) -> Any: ... def freeze_support(self) -> None: """Check whether this is a fake forked process in a frozen executable. -If so then run code specified by commandline and exit. -""" + If so then run code specified by commandline and exit. + """ + def get_logger(self) -> Logger: """Return package logger -- if it does not already exist then -it is created. -""" + it is created. + """ + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: - """Turn on logging and add a handler which prints to stderr -""" + """Turn on logging and add a handler which prints to stderr""" + def allow_connection_pickling(self) -> None: """Install support for sending connections and sockets -between processes -""" + between processes + """ + def set_executable(self, executable: str) -> None: """Sets the path to a python.exe or pythonw.exe binary used to run -child processes instead of sys.executable when using the 'spawn' -start method. Useful for people embedding Python. -""" + child processes instead of sys.executable when using the 'spawn' + start method. Useful for people embedding Python. + """ + def set_forkserver_preload(self, module_names: list[str]) -> None: """Set list of module names to try to load in forkserver process. -This is really just a hint. -""" + This is really just a hint. + """ if sys.platform != "win32": @overload def get_context(self, method: None = None) -> DefaultContext: ... @@ -220,8 +224,9 @@ This is really just a hint. @property def reducer(self) -> str: """Controls how objects will be reduced to a form that can be -shared with other processes. -""" + shared with other processes. + """ + @reducer.setter def reducer(self, reduction: str) -> None: ... def _check_available(self) -> None: ... @@ -236,8 +241,7 @@ class DefaultContext(BaseContext): def __init__(self, context: BaseContext) -> None: ... def get_start_method(self, allow_none: bool = False) -> str: ... def get_all_start_methods(self) -> list[str]: - """Returns a list of the supported start methods, default first. -""" + """Returns a list of the supported start methods, default first.""" _default_context: DefaultContext diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi index 3d499b902e26d..00208b1103cb0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -11,29 +11,31 @@ SIGNED_STRUCT: Final[Struct] class ForkServer: def set_forkserver_preload(self, modules_names: list[str]) -> None: - """Set list of module names to try to load in forkserver process. -""" + """Set list of module names to try to load in forkserver process.""" + def get_inherited_fds(self) -> list[int] | None: """Return list of fds inherited from parent process. -This returns None if the current process was not started by fork -server. -""" + This returns None if the current process was not started by fork + server. + """ + def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: """Request forkserver to create a child process. -Returns a pair of fds (status_r, data_w). The calling process can read -the child process's pid and (eventually) its returncode from status_r. -The calling process should write to data_w the pickled preparation and -process data. -""" + Returns a pair of fds (status_r, data_w). The calling process can read + the child process's pid and (eventually) its returncode from status_r. + The calling process should write to data_w the pickled preparation and + process data. + """ + def ensure_running(self) -> None: """Make sure that a fork server is running. -This can be called from any process. Note that usually a child -process will just reuse the forkserver started by its parent, so -ensure_running() will do nothing. -""" + This can be called from any process. Note that usually a child + process will just reuse the forkserver started by its parent, so + ensure_running() will do nothing. + """ if sys.version_info >= (3, 14): def main( @@ -45,8 +47,7 @@ if sys.version_info >= (3, 14): *, authkey_r: int | None = None, ) -> None: - """Run forkserver. -""" + """Run forkserver.""" else: def main( @@ -56,8 +57,7 @@ else: main_path: str | None = None, sys_path: Unused = None, ) -> None: - """Run forkserver. -""" + """Run forkserver.""" def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi index d6a89156870bb..894d2dfc622b3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/heap.pyi @@ -9,8 +9,9 @@ __all__ = ["BufferWrapper"] class Arena: """ -A shared memory area backed by a temporary file (POSIX). -""" + A shared memory area backed by a temporary file (POSIX). + """ + size: int buffer: mmap if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi index 8dc0d9ce92538..c4c8182c1ad27 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/managers.pyi @@ -39,8 +39,9 @@ _Namespace: TypeAlias = Namespace class Token: """ -Type to uniquely identify a shared object -""" + Type to uniquely identify a shared object + """ + __slots__ = ("typeid", "address", "id") typeid: str | bytes | None address: _Address | None @@ -51,8 +52,9 @@ Type to uniquely identify a shared object class BaseProxy: """ -A base for proxies of shared objects -""" + A base for proxies of shared objects + """ + _address_to_local: dict[_Address, Any] _mutex: Any def __init__( @@ -68,12 +70,14 @@ A base for proxies of shared objects def __deepcopy__(self, memo: Any | None) -> Any: ... def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: """ -Try to call a method of the referent and return a copy of the result -""" + Try to call a method of the referent and return a copy of the result + """ + def _getvalue(self) -> Any: """ -Get a copy of the value of the referent -""" + Get a copy of the value of the referent + """ + def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... class ValueProxy(BaseProxy, Generic[_T]): @@ -83,8 +87,8 @@ class ValueProxy(BaseProxy, Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ if sys.version_info >= (3, 13): class _BaseDictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -115,8 +119,8 @@ if sys.version_info >= (3, 13): def __class_getitem__(cls, args: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ else: class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): @@ -226,8 +230,8 @@ class ListProxy(BaseListProxy[_T]): def __class_getitem__(cls, args: Any, /) -> Any: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ # Send is (kind, result) # Receive is (id, methodname, args, kwds) @@ -236,8 +240,9 @@ _ServerConnection: TypeAlias = Connection[tuple[str, Any], tuple[str, str, Itera # Returned by BaseManager.get_server() class Server: """ -Server class which runs in a process controlled by a manager object -""" + Server class which runs in a process controlled by a manager object + """ + address: _Address | None id_to_obj: dict[str, tuple[Any, set[str], dict[str, str]]] fallback_mapping: dict[str, Callable[[_ServerConnection, str, Any], Any]] @@ -252,59 +257,68 @@ Server class which runs in a process controlled by a manager object ) -> None: ... def serve_forever(self) -> None: """ -Run the server forever -""" + Run the server forever + """ + def accepter(self) -> None: ... if sys.version_info >= (3, 10): def handle_request(self, conn: _ServerConnection) -> None: """ -Handle a new connection -""" + Handle a new connection + """ else: def handle_request(self, c: _ServerConnection) -> None: """ - Handle a new connection - """ + Handle a new connection + """ def serve_client(self, conn: _ServerConnection) -> None: """ -Handle requests from the proxies in a particular process/thread -""" + Handle requests from the proxies in a particular process/thread + """ + def fallback_getvalue(self, conn: _ServerConnection, ident: str, obj: _T) -> _T: ... def fallback_str(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def fallback_repr(self, conn: _ServerConnection, ident: str, obj: Any) -> str: ... def dummy(self, c: _ServerConnection) -> None: ... def debug_info(self, c: _ServerConnection) -> str: """ -Return some info --- useful to spot problems with refcounting -""" + Return some info --- useful to spot problems with refcounting + """ + def number_of_objects(self, c: _ServerConnection) -> int: """ -Number of shared objects -""" + Number of shared objects + """ + def shutdown(self, c: _ServerConnection) -> None: """ -Shutdown this process -""" + Shutdown this process + """ + def create(self, c: _ServerConnection, typeid: str, /, *args: Any, **kwds: Any) -> tuple[str, tuple[str, ...]]: """ -Create a new shared object and return its id -""" + Create a new shared object and return its id + """ + def get_methods(self, c: _ServerConnection, token: Token) -> set[str]: """ -Return the methods of the shared object indicated by token -""" + Return the methods of the shared object indicated by token + """ + def accept_connection(self, c: _ServerConnection, name: str) -> None: """ -Spawn a new thread to serve this connection -""" + Spawn a new thread to serve this connection + """ + def incref(self, c: _ServerConnection, ident: str) -> None: ... def decref(self, c: _ServerConnection, ident: str) -> None: ... class BaseManager: """ -Base class for managers -""" + Base class for managers + """ + if sys.version_info >= (3, 11): def __init__( self, @@ -326,21 +340,24 @@ Base class for managers def get_server(self) -> Server: """ -Return server object with serve_forever() method and address attribute -""" + Return server object with serve_forever() method and address attribute + """ + def connect(self) -> None: """ -Connect manager object to the server process -""" + Connect manager object to the server process + """ + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: """ -Spawn a server process for this manager object -""" + Spawn a server process for this manager object + """ shutdown: _Finalize # only available after start() was called def join(self, timeout: float | None = None) -> None: # undocumented """ -Join the manager process (if it has been spawned) -""" + Join the manager process (if it has been spawned) + """ + @property def address(self) -> _Address | None: ... @classmethod @@ -354,8 +371,9 @@ Join the manager process (if it has been spawned) create_method: bool = True, ) -> None: """ -Register a typeid with the manager type -""" + Register a typeid with the manager type + """ + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -363,14 +381,15 @@ Register a typeid with the manager type class SyncManager(BaseManager): """ -Subclass of `BaseManager` which supports a number of shared object types. + Subclass of `BaseManager` which supports a number of shared object types. + + The types registered are those intended for the synchronization + of threads, plus `dict`, `list` and `Namespace`. -The types registered are those intended for the synchronization -of threads, plus `dict`, `list` and `Namespace`. + The `multiprocessing.Manager()` function creates started instances of + this class. + """ -The `multiprocessing.Manager()` function creates started instances of -this class. -""" def Barrier( self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None ) -> threading.Barrier: ... @@ -424,35 +443,39 @@ class RemoteError(Exception): ... class SharedMemoryServer(Server): def track_segment(self, c: _ServerConnection, segment_name: str) -> None: - """Adds the supplied shared memory block name to Server's tracker. -""" + """Adds the supplied shared memory block name to Server's tracker.""" + def release_segment(self, c: _ServerConnection, segment_name: str) -> None: """Calls unlink() on the shared memory block with the supplied name -and removes it from the tracker instance inside the Server. -""" + and removes it from the tracker instance inside the Server. + """ + def list_segments(self, c: _ServerConnection) -> list[str]: """Returns a list of names of shared memory blocks that the Server -is currently tracking. -""" + is currently tracking. + """ class SharedMemoryManager(BaseManager): """Like SyncManager but uses SharedMemoryServer instead of Server. -It provides methods for creating and returning SharedMemory instances -and for creating a list-like object (ShareableList) backed by shared -memory. It also provides methods that create and return Proxy Objects -that support synchronization across processes (i.e. multi-process-safe -locks and semaphores). -""" + It provides methods for creating and returning SharedMemory instances + and for creating a list-like object (ShareableList) backed by shared + memory. It also provides methods that create and return Proxy Objects + that support synchronization across processes (i.e. multi-process-safe + locks and semaphores). + """ + def get_server(self) -> SharedMemoryServer: - """Better than monkeypatching for now; merge into Server ultimately -""" + """Better than monkeypatching for now; merge into Server ultimately""" + def SharedMemory(self, size: int) -> _SharedMemory: """Returns a new SharedMemory instance with the specified size in -bytes, to be tracked by the manager. -""" + bytes, to be tracked by the manager. + """ + def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: """Returns a new ShareableList instance populated with the values -from the input sequence, to be tracked by the manager. -""" + from the input sequence, to be tracked by the manager. + """ + def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi index e31ca99591fbf..c8c6ff3d3104e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/pool.pyi @@ -20,8 +20,8 @@ class ApplyResult(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ # alias created during issue #17805 AsyncResult = ApplyResult @@ -46,8 +46,9 @@ class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool: """ -Class which supports an async version of applying functions to arguments. -""" + Class which supports an async version of applying functions to arguments. + """ + def __init__( self, processes: int | None = None, @@ -60,9 +61,10 @@ Class which supports an async version of applying functions to arguments. def Process(ctx: DefaultContext, *args: Any, **kwds: Any) -> Process: ... def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: """ -Equivalent of `func(*args, **kwds)`. -Pool must be running. -""" + Equivalent of `func(*args, **kwds)`. + Pool must be running. + """ + def apply_async( self, func: Callable[..., _T], @@ -72,13 +74,15 @@ Pool must be running. error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[_T]: """ -Asynchronous version of `apply()` method. -""" + Asynchronous version of `apply()` method. + """ + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: """ -Apply `func` to each element in `iterable`, collecting the results -in a list that is returned. -""" + Apply `func` to each element in `iterable`, collecting the results + in a list that is returned. + """ + def map_async( self, func: Callable[[_S], _T], @@ -88,22 +92,26 @@ in a list that is returned. error_callback: Callable[[BaseException], object] | None = None, ) -> MapResult[_T]: """ -Asynchronous version of `map()` method. -""" + Asynchronous version of `map()` method. + """ + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: """ -Equivalent of `map()` -- can be MUCH slower than `Pool.map()`. -""" + Equivalent of `map()` -- can be MUCH slower than `Pool.map()`. + """ + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: """ -Like `imap()` method but ordering of results is arbitrary. -""" + Like `imap()` method but ordering of results is arbitrary. + """ + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: """ -Like `map()` method but the elements of the `iterable` are expected to -be iterables as well and will be unpacked as arguments. Hence -`func` and (a, b) becomes func(a, b). -""" + Like `map()` method but the elements of the `iterable` are expected to + be iterables as well and will be unpacked as arguments. Hence + `func` and (a, b) becomes func(a, b). + """ + def starmap_async( self, func: Callable[..., _T], @@ -113,8 +121,9 @@ be iterables as well and will be unpacked as arguments. Hence error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[list[_T]]: """ -Asynchronous version of `starmap()` method. -""" + Asynchronous version of `starmap()` method. + """ + def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi index 87c44fe9d5fc5..632a7657fd616 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -14,8 +14,9 @@ if sys.platform == "win32": class Popen: """ -Start a subprocess to run the code of a process object -""" + Start a subprocess to run the code of a process object + """ + finalizer: Finalize method: ClassVar[str] pid: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi index d4d0f6bf235bd..f740eb50c0eb4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/process.pyi @@ -5,10 +5,11 @@ __all__ = ["BaseProcess", "current_process", "active_children", "parent_process" class BaseProcess: """ -Process objects represent activity that is run in a separate process + Process objects represent activity that is run in a separate process + + The class is analogous to `threading.Thread` + """ -The class is analogous to `threading.Thread` -""" name: str daemon: bool authkey: bytes @@ -25,66 +26,78 @@ The class is analogous to `threading.Thread` ) -> None: ... def run(self) -> None: """ -Method to be run in sub-process; can be overridden in sub-class -""" + Method to be run in sub-process; can be overridden in sub-class + """ + def start(self) -> None: """ -Start child process -""" + Start child process + """ + def terminate(self) -> None: """ -Terminate process; sends SIGTERM signal or uses TerminateProcess() -""" + Terminate process; sends SIGTERM signal or uses TerminateProcess() + """ + def kill(self) -> None: """ -Terminate process; sends SIGKILL signal or uses TerminateProcess() -""" + Terminate process; sends SIGKILL signal or uses TerminateProcess() + """ + def close(self) -> None: """ -Close the Process object. + Close the Process object. + + This method releases resources held by the Process object. It is + an error to call this method if the child process is still running. + """ -This method releases resources held by the Process object. It is -an error to call this method if the child process is still running. -""" def join(self, timeout: float | None = None) -> None: """ -Wait until child process terminates -""" + Wait until child process terminates + """ + def is_alive(self) -> bool: """ -Return whether process is alive -""" + Return whether process is alive + """ + @property def exitcode(self) -> int | None: """ -Return exit code of process or `None` if it has yet to stop -""" + Return exit code of process or `None` if it has yet to stop + """ + @property def ident(self) -> int | None: """ -Return identifier (PID) of process or `None` if it has yet to start -""" + Return identifier (PID) of process or `None` if it has yet to start + """ + @property def pid(self) -> int | None: """ -Return identifier (PID) of process or `None` if it has yet to start -""" + Return identifier (PID) of process or `None` if it has yet to start + """ + @property def sentinel(self) -> int: """ -Return a file descriptor (Unix) or handle (Windows) suitable for -waiting for process termination. -""" + Return a file descriptor (Unix) or handle (Windows) suitable for + waiting for process termination. + """ def current_process() -> BaseProcess: """ -Return process object representing the current process -""" + Return process object representing the current process + """ + def active_children() -> list[BaseProcess]: """ -Return list of process objects corresponding to live child processes -""" + Return list of process objects corresponding to live child processes + """ + def parent_process() -> BaseProcess | None: """ -Return process object representing the parent process -""" + Return process object representing the parent process + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi index d21cc96dd3bf8..dfdeab7538166 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/queues.pyi @@ -24,8 +24,8 @@ class Queue(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class JoinableQueue(Queue[_T]): def task_done(self) -> None: ... @@ -40,5 +40,5 @@ class SimpleQueue(Generic[_T]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi index ff0ef0627d0f8..325d472f9a599 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/reduction.pyi @@ -18,14 +18,14 @@ else: HAVE_SEND_HANDLE: Final[bool] class ForkingPickler(pickle.Pickler): - """Pickler subclass used by multiprocessing. -""" + """Pickler subclass used by multiprocessing.""" + dispatch_table: _DispatchTableType def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: - """Register a reduce function for a type. -""" + """Register a reduce function for a type.""" + @classmethod def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... loads = pickle.loads @@ -33,52 +33,48 @@ class ForkingPickler(pickle.Pickler): register = ForkingPickler.register def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: - """Replacement for pickle.dump() using ForkingPickler. -""" + """Replacement for pickle.dump() using ForkingPickler.""" if sys.platform == "win32": def duplicate( handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None ) -> int: - """Duplicate a handle. (target_process is a handle not a pid!) -""" + """Duplicate a handle. (target_process is a handle not a pid!)""" + def steal_handle(source_pid: int, handle: int) -> int: - """Steal a handle from process identified by source_pid. -""" + """Steal a handle from process identified by source_pid.""" + def send_handle(conn: connection.PipeConnection[DupHandle, Any], handle: int, destination_pid: int) -> None: - """Send a handle over a local connection. -""" + """Send a handle over a local connection.""" + def recv_handle(conn: connection.PipeConnection[Any, DupHandle]) -> int: - """Receive a handle over a local connection. -""" + """Receive a handle over a local connection.""" class DupHandle: - """Picklable wrapper for a handle. -""" + """Picklable wrapper for a handle.""" + def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... def detach(self) -> int: - """Get the handle. This should only be called once. -""" + """Get the handle. This should only be called once.""" else: if sys.version_info < (3, 14): ACKNOWLEDGE: Final[bool] def recvfds(sock: socket, size: int) -> list[int]: - """Receive an array of fds over an AF_UNIX socket. -""" + """Receive an array of fds over an AF_UNIX socket.""" + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: - """Send a handle over a local connection. -""" + """Send a handle over a local connection.""" + def recv_handle(conn: HasFileno) -> int: - """Receive a handle over a local connection. -""" + """Receive a handle over a local connection.""" + def sendfds(sock: socket, fds: list[int]) -> None: - """Send an array of fds over an AF_UNIX socket. -""" + """Send an array of fds over an AF_UNIX socket.""" + def DupFd(fd: int) -> Any: # Return type is really hard to get right - """Return a wrapper for an fd. -""" + """Return a wrapper for an fd.""" # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -100,9 +96,10 @@ else: class AbstractReducer(metaclass=ABCMeta): """Abstract base class for use in implementing a Reduction class -suitable for use in replacing the standard reduction mechanism -used in multiprocessing. -""" + suitable for use in replacing the standard reduction mechanism + used in multiprocessing. + """ + ForkingPickler = _ForkingPickler register = _register dump = _dump diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi index feae80357d427..fd8a166e6637c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -7,24 +7,21 @@ if sys.platform == "win32": __all__ += ["DupSocket"] class DupSocket: - """Picklable wrapper for a socket. -""" + """Picklable wrapper for a socket.""" + def __init__(self, sock: socket) -> None: ... def detach(self) -> socket: - """Get the socket. This should only be called once. -""" + """Get the socket. This should only be called once.""" else: __all__ += ["DupFd"] class DupFd: - """Wrapper for fd which can be used at any time. -""" + """Wrapper for fd which can be used at any time.""" + def __init__(self, fd: int) -> None: ... def detach(self) -> int: - """Get the fd. This should only be called once. -""" + """Get the fd. This should only be called once.""" def stop(timeout: float | None = None) -> None: - """Stop the background thread and clear registered resources. -""" + """Stop the background thread and clear registered resources.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi index a9ffd3d68eb26..0f8cc7817d484 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -9,15 +9,15 @@ class ResourceTracker: def ensure_running(self) -> None: """Make sure that resource tracker process is running. -This can be run from any process. Usually a child process will use -the resource created by its parent. -""" + This can be run from any process. Usually a child process will use + the resource created by its parent. + """ + def register(self, name: Sized, rtype: str) -> None: - """Register name of resource with resource tracker. -""" + """Register name of resource with resource tracker.""" + def unregister(self, name: Sized, rtype: str) -> None: - """Unregister name of resource with resource tracker. -""" + """Unregister name of resource with resource tracker.""" if sys.version_info >= (3, 12): def __del__(self) -> None: ... @@ -28,5 +28,4 @@ unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd def main(fd: FileDescriptorOrPath) -> None: - """Run resource tracker. -""" + """Run resource tracker.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi index 1e0e3a17d2db9..1d7059758de5a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -3,6 +3,7 @@ The API of this package is currently provisional. Refer to the documentation for details. """ + import sys from collections.abc import Iterable from types import GenericAlias @@ -15,20 +16,21 @@ _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: """Creates a new shared memory block or attaches to an existing -shared memory block. - -Every shared memory block is assigned a unique name. This enables -one process to create a shared memory block with a particular name -so that a different process can attach to that same shared memory -block using that same name. - -As a resource for sharing data across processes, shared memory blocks -may outlive the original process that created them. When one process -no longer needs access to a shared memory block that might still be -needed by other processes, the close() method should be called. -When a shared memory block is no longer needed by any process, the -unlink() method should be called to ensure proper cleanup. -""" + shared memory block. + + Every shared memory block is assigned a unique name. This enables + one process to create a shared memory block with a particular name + so that a different process can attach to that same shared memory + block using that same name. + + As a resource for sharing data across processes, shared memory blocks + may outlive the original process that created them. When one process + no longer needs access to a shared memory block that might still be + needed by other processes, the close() method should be called. + When a shared memory block is no longer needed by any process, the + unlink() method should be called to ensure proper cleanup. + """ + if sys.version_info >= (3, 13): def __init__(self, name: str | None = None, create: bool = False, size: int = 0, *, track: bool = True) -> None: ... else: @@ -36,45 +38,48 @@ unlink() method should be called to ensure proper cleanup. @property def buf(self) -> memoryview | None: - """A memoryview of contents of the shared memory block. -""" + """A memoryview of contents of the shared memory block.""" + @property def name(self) -> str: - """Unique name that identifies the shared memory block. -""" + """Unique name that identifies the shared memory block.""" + @property def size(self) -> int: - """Size in bytes. -""" + """Size in bytes.""" + def close(self) -> None: """Closes access to the shared memory from this instance but does -not destroy the shared memory block. -""" + not destroy the shared memory block. + """ + def unlink(self) -> None: """Requests that the underlying shared memory block be destroyed. -Unlink should be called once (and only once) across all handles -which have access to the shared memory block, even if these -handles belong to different processes. Closing and unlinking may -happen in any order, but trying to access data inside a shared -memory block after unlinking may result in memory errors, -depending on platform. + Unlink should be called once (and only once) across all handles + which have access to the shared memory block, even if these + handles belong to different processes. Closing and unlinking may + happen in any order, but trying to access data inside a shared + memory block after unlinking may result in memory errors, + depending on platform. + + This method has no effect on Windows, where the only way to + delete a shared memory block is to close all handles. + """ -This method has no effect on Windows, where the only way to -delete a shared memory block is to close all handles. -""" def __del__(self) -> None: ... class ShareableList(Generic[_SLT]): """Pattern for a mutable list-like object shareable via a shared -memory block. It differs from the built-in list type in that these -lists can not change their overall length (i.e. no append, insert, -etc.) + memory block. It differs from the built-in list type in that these + lists can not change their overall length (i.e. no append, insert, + etc.) + + Because values are packed into a memoryview as bytes, the struct + packing format for any storable value must require no more than 8 + characters to describe its format. + """ -Because values are packed into a memoryview as bytes, the struct -packing format for any storable value must require no more than 8 -characters to describe its format. -""" shm: SharedMemory @overload def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... @@ -86,17 +91,18 @@ characters to describe its format. def __len__(self) -> int: ... @property def format(self) -> str: - """The struct packing format used by all currently stored items. -""" + """The struct packing format used by all currently stored items.""" + def count(self, value: _SLT) -> int: - """L.count(value) -> integer -- return number of occurrences of value. -""" + """L.count(value) -> integer -- return number of occurrences of value.""" + def index(self, value: _SLT) -> int: """L.index(value) -> integer -- return first index of value. -Raises ValueError if the value is not present. -""" + Raises ValueError if the value is not present. + """ + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi index f561480e57aeb..7349cb2711eb6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -15,22 +15,25 @@ _CT = TypeVar("_CT", bound=_CData) @overload def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: """ -Returns a ctypes object allocated from shared memory -""" + Returns a ctypes object allocated from shared memory + """ + @overload def RawValue(typecode_or_type: str, *args: Any) -> Any: ... @overload def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: """ -Returns a ctypes array allocated from shared memory -""" + Returns a ctypes array allocated from shared memory + """ + @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: """ -Return a synchronization wrapper for a Value -""" + Return a synchronization wrapper for a Value + """ + @overload def Value( typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None @@ -48,8 +51,9 @@ def Array( typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None ) -> _CT: """ -Return a synchronization wrapper for a RawArray -""" + Return a synchronization wrapper for a RawArray + """ + @overload def Array( typecode_or_type: type[c_char], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi index 58aa3cf225d22..99f1fc3278473 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/spawn.pyi @@ -19,35 +19,39 @@ def set_executable(exe: str) -> None: ... def get_executable() -> str: ... def is_forking(argv: Sequence[str]) -> bool: """ -Return whether commandline indicates we are forking -""" + Return whether commandline indicates we are forking + """ + def freeze_support() -> None: """ -Run code for process object if this in not the main process -""" + Run code for process object if this in not the main process + """ + def get_command_line(**kwds: Any) -> list[str]: """ -Returns prefix of command line used for spawning a child process -""" + Returns prefix of command line used for spawning a child process + """ + def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: """ -Run code specified by data received over pipe -""" + Run code specified by data received over pipe + """ # undocumented def _main(fd: int, parent_sentinel: int) -> int: ... def get_preparation_data(name: str) -> dict[str, Any]: """ -Return info about parent needed by child to unpickle process object -""" + Return info about parent needed by child to unpickle process object + """ old_main_modules: list[ModuleType] def prepare(data: Mapping[str, Any]) -> None: """ -Try to get current process ready to unpickle process object -""" + Try to get current process ready to unpickle process object + """ + def import_main_path(main_path: str) -> None: """ -Set sys.modules['__main__'] to module at main_path -""" + Set sys.modules['__main__'] to module at main_path + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi index 06eee50fa945b..c3592f8bc98c0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/multiprocessing/util.pyi @@ -50,12 +50,14 @@ if sys.version_info >= (3, 14): def sub_warning(msg: object, *args: object) -> None: ... def get_logger() -> Logger: """ -Returns logger used by multiprocessing -""" + Returns logger used by multiprocessing + """ + def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: """ -Turn on logging and add a handler which prints to stderr -""" + Turn on logging and add a handler which prints to stderr + """ + def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: Final[bool] @@ -65,8 +67,9 @@ def register_after_fork(obj: _T, func: Callable[[_T], object]) -> None: ... class Finalize(Generic[_R_co]): """ -Class which supports object finalization using weakrefs -""" + Class which supports object finalization using weakrefs + """ + # "args" and "kwargs" are passed as arguments to "callback". @overload def __init__( @@ -99,21 +102,23 @@ Class which supports object finalization using weakrefs getpid: Callable[[], int] = ..., ) -> _R_co: """ -Run the callback unless it has already been called or cancelled -""" + Run the callback unless it has already been called or cancelled + """ + def cancel(self) -> None: """ -Cancel finalization of the object -""" + Cancel finalization of the object + """ + def still_active(self) -> bool: """ -Return whether this finalizer is still waiting to invoke callback -""" + Return whether this finalizer is still waiting to invoke callback + """ def is_exiting() -> bool: """ -Returns true if the process is shutting down -""" + Returns true if the process is shutting down + """ class ForkAwareThreadLock: acquire: Callable[[bool, float], bool] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi index 82d19d60bec26..e7aa5955b8b03 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/netrc.pyi @@ -1,5 +1,5 @@ -"""An object-oriented interface to .netrc files. -""" +"""An object-oriented interface to .netrc files.""" + import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -7,8 +7,8 @@ from typing_extensions import TypeAlias __all__ = ["netrc", "NetrcParseError"] class NetrcParseError(Exception): - """Exception raised on syntax errors in the .netrc file. -""" + """Exception raised on syntax errors in the .netrc file.""" + filename: str | None lineno: int | None msg: str @@ -25,5 +25,4 @@ class netrc: macros: dict[str, list[str]] def __init__(self, file: StrOrBytesPath | None = None) -> None: ... def authenticators(self, host: str) -> _NetrcTuple | None: - """Return a (user, account, password) tuple for given host. -""" + """Return a (user, account, password) tuple for given host.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi index 5cda780e7f673..699a0ff4e0b96 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nis.pyi @@ -1,28 +1,31 @@ -"""This module contains functions for accessing NIS maps. -""" +"""This module contains functions for accessing NIS maps.""" + import sys if sys.platform != "win32": def cat(map: str, domain: str = ...) -> dict[str, str]: """cat(map, domain = defaultdomain) -Returns the entire map as a dictionary. Optionally domain can be -specified but it defaults to the system default domain. -""" + Returns the entire map as a dictionary. Optionally domain can be + specified but it defaults to the system default domain. + """ + def get_default_domain() -> str: """get_default_domain() -> str -Corresponds to the C library yp_get_default_domain() call, returning -the default NIS domain. -""" + Corresponds to the C library yp_get_default_domain() call, returning + the default NIS domain. + """ + def maps(domain: str = ...) -> list[str]: """maps(domain = defaultdomain) -Returns an array of all available NIS maps within a domain. If domain -is not specified it defaults to the system default domain. -""" + Returns an array of all available NIS maps within a domain. If domain + is not specified it defaults to the system default domain. + """ + def match(key: str, map: str, domain: str = ...) -> str: """match(key, map, domain = defaultdomain) -Corresponds to the C library yp_match() call, returning the value of -key in the given map. Optionally domain can be specified but it -defaults to the system default domain. -""" + Corresponds to the C library yp_match() call, returning the value of + key in the given map. Optionally domain can be specified but it + defaults to the system default domain. + """ class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi index 6823e240a79a1..67e82dec00e00 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nntplib.pyi @@ -26,6 +26,7 @@ For descriptions of all methods, read the comments in the code below. Note that all arguments and return values representing article numbers are strings, not numbers, since they are rarely used for calculations. """ + import datetime import socket import ssl @@ -50,40 +51,39 @@ __all__ = [ _File: TypeAlias = IO[bytes] | bytes | str | None class NNTPError(Exception): - """Base class for all nntplib exceptions -""" + """Base class for all nntplib exceptions""" + response: str class NNTPReplyError(NNTPError): - """Unexpected [123]xx reply -""" + """Unexpected [123]xx reply""" + class NNTPTemporaryError(NNTPError): - """4xx errors -""" + """4xx errors""" + class NNTPPermanentError(NNTPError): - """5xx errors -""" + """5xx errors""" + class NNTPProtocolError(NNTPError): - """Response does not begin with [1-5] -""" + """Response does not begin with [1-5]""" + class NNTPDataError(NNTPError): - """Error in response data -""" + """Error in response data""" NNTP_PORT: Final = 119 NNTP_SSL_PORT: Final = 563 class GroupInfo(NamedTuple): - """GroupInfo(group, last, first, flag) -""" + """GroupInfo(group, last, first, flag)""" + group: str last: str first: str flag: str class ArticleInfo(NamedTuple): - """ArticleInfo(number, message_id, lines) -""" + """ArticleInfo(number, message_id, lines)""" + number: int message_id: str lines: list[bytes] @@ -91,7 +91,7 @@ class ArticleInfo(NamedTuple): def decode_header(header_str: str) -> str: """Takes a unicode string representing a munged header value and decodes it as a (possibly non-ASCII) readable value. -""" + """ class NNTP: encoding: str @@ -135,6 +135,7 @@ class NNTP: unexpected NNTPPermanentErrors, you might need to set readermode. """ + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... def getwelcome(self) -> str: @@ -142,24 +143,28 @@ class NNTP: (this is read and squirreled away by __init__()). If the response code is 200, posting is allowed; if it 201, posting is not allowed. -""" + """ + def getcapabilities(self) -> dict[str, _list[str]]: """Get the server capabilities, as read by __init__(). If the CAPABILITIES command is not supported, an empty dict is returned. -""" + """ + def set_debuglevel(self, level: int) -> None: """Set the debugging level. Argument 'level' means: 0: no debugging output (default) 1: print commands and responses but not body text etc. 2: also print raw lines read and sent before stripping CR/LF -""" + """ + def debug(self, level: int) -> None: """Set the debugging level. Argument 'level' means: 0: no debugging output (default) 1: print commands and responses but not body text etc. 2: also print raw lines read and sent before stripping CR/LF -""" + """ + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: """Process a CAPABILITIES command. Not supported by all servers. Return: @@ -167,6 +172,7 @@ class NNTP: - caps: a dictionary mapping capability names to lists of tokens (for example {'VERSION': ['2'], 'OVER': [], LIST: ['ACTIVE', 'HEADERS'] }) """ + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: """Process a NEWGROUPS command. Arguments: - date: a date or datetime object @@ -174,6 +180,7 @@ class NNTP: - resp: server response if successful - list: list of newsgroup names """ + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: """Process a NEWNEWS command. Arguments: - group: group name or '*' @@ -182,6 +189,7 @@ class NNTP: - resp: server response if successful - list: list of message ids """ + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: """Process a LIST or LIST ACTIVE command. Arguments: - group_pattern: a pattern indicating which groups to query @@ -190,6 +198,7 @@ class NNTP: - resp: server response if successful - list: list of (group, last, first, flag) (strings) """ + def description(self, group: str) -> str: """Get a description for a single group. If more than one group matches ('group' is a pattern), return the first. If no @@ -201,10 +210,11 @@ class NNTP: NOTE: This neither checks for a wildcard in 'group' nor does it check whether the group actually exists. -""" + """ + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: - """Get descriptions for a range of groups. -""" + """Get descriptions for a range of groups.""" + def group(self, name: str) -> tuple[str, int, int, int, str]: """Process a GROUP command. Argument: - group: the group name @@ -215,6 +225,7 @@ class NNTP: - last: last article number - name: the group name """ + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: """Process a HELP command. Argument: - file: Filename string or file object to store the result in @@ -223,6 +234,7 @@ class NNTP: - list: list of strings returned by the server in response to the HELP command """ + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: """Process a STAT command. Argument: - message_spec: article number or message id (if not specified, @@ -232,12 +244,13 @@ class NNTP: - art_num: the article number - message_id: the message id """ + def next(self) -> tuple[str, int, str]: - """Process a NEXT command. No arguments. Return as for STAT. -""" + """Process a NEXT command. No arguments. Return as for STAT.""" + def last(self) -> tuple[str, int, str]: - """Process a LAST command. No arguments. Return as for STAT. -""" + """Process a LAST command. No arguments. Return as for STAT.""" + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: """Process a HEAD command. Argument: - message_spec: article number or message id @@ -246,6 +259,7 @@ class NNTP: - resp: server response if successful - ArticleInfo: (article number, message id, list of header lines) """ + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: """Process a BODY command. Argument: - message_spec: article number or message id @@ -254,6 +268,7 @@ class NNTP: - resp: server response if successful - ArticleInfo: (article number, message id, list of body lines) """ + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: """Process an ARTICLE command. Argument: - message_spec: article number or message id @@ -262,10 +277,12 @@ class NNTP: - resp: server response if successful - ArticleInfo: (article number, message id, list of article lines) """ + def slave(self) -> str: """Process a SLAVE command. Returns: - resp: server response if successful """ + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: """Process an XHDR command (optional server extension). Arguments: - hdr: the header type (e.g. 'subject') @@ -275,6 +292,7 @@ class NNTP: - resp: server response if successful - list: list of (nr, value) strings """ + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: """Process an XOVER command (optional server extension) Arguments: - start: start of range @@ -284,6 +302,7 @@ class NNTP: - resp: server response if successful - list: list of dicts containing the response fields """ + def over( self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: @@ -303,18 +322,21 @@ class NNTP: NOTE: the "message id" form isn't supported by XOVER """ + def date(self) -> tuple[str, datetime.datetime]: """Process the DATE command. Returns: - resp: server response if successful - date: datetime object """ + def post(self, data: bytes | Iterable[bytes]) -> str: """Process a POST command. Arguments: - data: bytes object, iterable or file containing the article Returns: - resp: server response if successful -""" + """ + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: """Process an IHAVE command. Arguments: - message_id: message-id of the article @@ -322,16 +344,18 @@ class NNTP: Returns: - resp: server response if successful Note that if the server refuses the article an exception is raised. -""" + """ + def quit(self) -> str: """Process a QUIT command and close the socket. Returns: - resp: server response if successful -""" + """ + def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... def starttls(self, context: ssl.SSLContext | None = None) -> None: """Process a STARTTLS command. Arguments: - - context: SSL context to use for the encrypted connection - """ + - context: SSL context to use for the encrypted connection + """ class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -348,5 +372,5 @@ class NNTP_SSL(NNTP): timeout: float = ..., ) -> None: """This works identically to NNTP.__init__, except for the change - in default port and the `ssl_context` argument for SSL connections. - """ + in default port and the `ssl_context` argument for SSL connections. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi index a5f1fb6363257..c10b791bd4683 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nt.pyi @@ -3,6 +3,7 @@ standardized by the C Standard and the POSIX standard (a thinly disguised Unix interface). Refer to the library manual and corresponding Unix manual entries for more information on calls. """ + import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi index 78c016e732467..337fbe46c83d4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ntpath.pyi @@ -3,6 +3,7 @@ Instead of importing this module directly, import os and refer to this module as os.path. """ + import sys from _typeshed import BytesPath, StrOrBytesPath, StrPath from genericpath import ( @@ -126,5 +127,4 @@ else: if sys.version_info >= (3, 13): def isreserved(path: StrOrBytesPath) -> bool: - """Return true if the pathname is reserved by the system. -""" + """Return true if the pathname is reserved by the system.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi index c82b1d40798f0..b4341638fe5cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/nturl2path.pyi @@ -3,6 +3,7 @@ This module only exists to provide OS-specific code for urllib.requests, thus do not use directly. """ + import sys from typing_extensions import deprecated @@ -10,20 +11,22 @@ if sys.version_info >= (3, 14): @deprecated("The `nturl2path` module is deprecated since Python 3.14.") def url2pathname(url: str) -> str: """OS-specific conversion from a relative URL of the 'file' scheme -to a file system path; not recommended for general use. -""" + to a file system path; not recommended for general use. + """ + @deprecated("The `nturl2path` module is deprecated since Python 3.14.") def pathname2url(p: str) -> str: """OS-specific conversion from a file system path to a relative URL -of the 'file' scheme; not recommended for general use. -""" + of the 'file' scheme; not recommended for general use. + """ else: def url2pathname(url: str) -> str: """OS-specific conversion from a relative URL of the 'file' scheme -to a file system path; not recommended for general use. -""" + to a file system path; not recommended for general use. + """ + def pathname2url(p: str) -> str: """OS-specific conversion from a file system path to a relative URL -of the 'file' scheme; not recommended for general use. -""" + of the 'file' scheme; not recommended for general use. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi index c635e9b6746b7..dcd77c4188c55 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/numbers.pyi @@ -2,6 +2,7 @@ TODO: Fill out more detailed documentation on the operators. """ + # Note: these stubs are incomplete. The more complex type # signatures are currently omitted. # @@ -67,107 +68,109 @@ class _IntegralLike(_RealLike, Protocol): class Number(metaclass=ABCMeta): """All numbers inherit from this class. -If you just want to check if an argument x is a number, without -caring what kind, use isinstance(x, Number). -""" + If you just want to check if an argument x is a number, without + caring what kind, use isinstance(x, Number). + """ + __slots__ = () @abstractmethod def __hash__(self) -> int: - """The type of the None singleton. -""" + """The type of the None singleton.""" # See comment at the top of the file # for why some of these return types are purposefully vague class Complex(Number, _ComplexLike): """Complex defines the operations that work on the builtin complex type. -In short, those are: a conversion to complex, .real, .imag, +, -, -*, /, **, abs(), .conjugate, ==, and !=. + In short, those are: a conversion to complex, .real, .imag, +, -, + *, /, **, abs(), .conjugate, ==, and !=. + + If it is given heterogeneous arguments, and doesn't have special + knowledge about them, it should fall back to the builtin complex + type as described below. + """ -If it is given heterogeneous arguments, and doesn't have special -knowledge about them, it should fall back to the builtin complex -type as described below. -""" __slots__ = () @abstractmethod def __complex__(self) -> complex: - """Return a builtin complex instance. Called for complex(self). -""" + """Return a builtin complex instance. Called for complex(self).""" + def __bool__(self) -> bool: - """True if self != 0. Called for bool(self). -""" + """True if self != 0. Called for bool(self).""" + @property @abstractmethod def real(self) -> _RealLike: """Retrieve the real component of this number. -This should subclass Real. -""" + This should subclass Real. + """ + @property @abstractmethod def imag(self) -> _RealLike: """Retrieve the imaginary component of this number. -This should subclass Real. -""" + This should subclass Real. + """ + @abstractmethod def __add__(self, other) -> _ComplexLike: - """self + other -""" + """self + other""" + @abstractmethod def __radd__(self, other) -> _ComplexLike: - """other + self -""" + """other + self""" + @abstractmethod def __neg__(self) -> _ComplexLike: - """-self -""" + """-self""" + @abstractmethod def __pos__(self) -> _ComplexLike: - """+self -""" + """+self""" + def __sub__(self, other) -> _ComplexLike: - """self - other -""" + """self - other""" + def __rsub__(self, other) -> _ComplexLike: - """other - self -""" + """other - self""" + @abstractmethod def __mul__(self, other) -> _ComplexLike: - """self * other -""" + """self * other""" + @abstractmethod def __rmul__(self, other) -> _ComplexLike: - """other * self -""" + """other * self""" + @abstractmethod def __truediv__(self, other) -> _ComplexLike: - """self / other: Should promote to float when necessary. -""" + """self / other: Should promote to float when necessary.""" + @abstractmethod def __rtruediv__(self, other) -> _ComplexLike: - """other / self -""" + """other / self""" + @abstractmethod def __pow__(self, exponent) -> _ComplexLike: - """self ** exponent; should promote to float or complex when necessary. -""" + """self ** exponent; should promote to float or complex when necessary.""" + @abstractmethod def __rpow__(self, base) -> _ComplexLike: - """base ** self -""" + """base ** self""" + @abstractmethod def __abs__(self) -> _RealLike: - """Returns the Real distance from 0. Called for abs(self). -""" + """Returns the Real distance from 0. Called for abs(self).""" + @abstractmethod def conjugate(self) -> _ComplexLike: - """(x+y*i).conjugate() returns (x-y*i). -""" + """(x+y*i).conjugate() returns (x-y*i).""" + @abstractmethod def __eq__(self, other: object) -> bool: - """self == other -""" + """self == other""" __hash__: ClassVar[None] # type: ignore[assignment] # See comment at the top of the file @@ -175,240 +178,248 @@ This should subclass Real. class Real(Complex, _RealLike): """To Complex, Real adds the operations that work on real numbers. -In short, those are: a conversion to float, trunc(), divmod, -%, <, <=, >, and >=. + In short, those are: a conversion to float, trunc(), divmod, + %, <, <=, >, and >=. + + Real also provides defaults for the derived operations. + """ -Real also provides defaults for the derived operations. -""" __slots__ = () @abstractmethod def __float__(self) -> float: """Any Real can be converted to a native float object. -Called for float(self). -""" + Called for float(self). + """ + @abstractmethod def __trunc__(self) -> _IntegralLike: """trunc(self): Truncates self to an Integral. -Returns an Integral i such that: - * i > 0 iff self > 0; - * abs(i) <= abs(self); - * for any Integral j satisfying the first two conditions, - abs(i) >= abs(j) [i.e. i has "maximal" abs among those]. -i.e. "truncate towards 0". -""" + Returns an Integral i such that: + * i > 0 iff self > 0; + * abs(i) <= abs(self); + * for any Integral j satisfying the first two conditions, + abs(i) >= abs(j) [i.e. i has "maximal" abs among those]. + i.e. "truncate towards 0". + """ + @abstractmethod def __floor__(self) -> _IntegralLike: - """Finds the greatest Integral <= self. -""" + """Finds the greatest Integral <= self.""" + @abstractmethod def __ceil__(self) -> _IntegralLike: - """Finds the least Integral >= self. -""" + """Finds the least Integral >= self.""" + @abstractmethod @overload def __round__(self, ndigits: None = None) -> _IntegralLike: """Rounds self to ndigits decimal places, defaulting to 0. -If ndigits is omitted or None, returns an Integral, otherwise -returns a Real. Rounds half toward even. -""" + If ndigits is omitted or None, returns an Integral, otherwise + returns a Real. Rounds half toward even. + """ + @abstractmethod @overload def __round__(self, ndigits: int) -> _RealLike: ... def __divmod__(self, other) -> tuple[_RealLike, _RealLike]: """divmod(self, other): The pair (self // other, self % other). -Sometimes this can be computed faster than the pair of -operations. -""" + Sometimes this can be computed faster than the pair of + operations. + """ + def __rdivmod__(self, other) -> tuple[_RealLike, _RealLike]: """divmod(other, self): The pair (other // self, other % self). -Sometimes this can be computed faster than the pair of -operations. -""" + Sometimes this can be computed faster than the pair of + operations. + """ + @abstractmethod def __floordiv__(self, other) -> _RealLike: - """self // other: The floor() of self/other. -""" + """self // other: The floor() of self/other.""" + @abstractmethod def __rfloordiv__(self, other) -> _RealLike: - """other // self: The floor() of other/self. -""" + """other // self: The floor() of other/self.""" + @abstractmethod def __mod__(self, other) -> _RealLike: - """self % other -""" + """self % other""" + @abstractmethod def __rmod__(self, other) -> _RealLike: - """other % self -""" + """other % self""" + @abstractmethod def __lt__(self, other) -> bool: """self < other -< on Reals defines a total ordering, except perhaps for NaN. -""" + < on Reals defines a total ordering, except perhaps for NaN. + """ + @abstractmethod def __le__(self, other) -> bool: - """self <= other -""" + """self <= other""" + def __complex__(self) -> complex: - """complex(self) == complex(float(self), 0) -""" + """complex(self) == complex(float(self), 0)""" + @property def real(self) -> _RealLike: - """Real numbers are their real component. -""" + """Real numbers are their real component.""" + @property def imag(self) -> Literal[0]: - """Real numbers have no imaginary component. -""" + """Real numbers have no imaginary component.""" + def conjugate(self) -> _RealLike: - """Conjugate is a no-op for Reals. -""" + """Conjugate is a no-op for Reals.""" # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod def __pos__(self) -> _RealLike: - """+self -""" + """+self""" + @abstractmethod def __neg__(self) -> _RealLike: - """-self -""" + """-self""" # See comment at the top of the file # for why some of these return types are purposefully vague class Rational(Real): - """.numerator and .denominator should be in lowest terms. -""" + """.numerator and .denominator should be in lowest terms.""" + __slots__ = () @property @abstractmethod def numerator(self) -> _IntegralLike: - """The numerator of a rational number in lowest terms. -""" + """The numerator of a rational number in lowest terms.""" + @property @abstractmethod def denominator(self) -> _IntegralLike: """The denominator of a rational number in lowest terms. -This denominator should be positive. -""" + This denominator should be positive. + """ + def __float__(self) -> float: """float(self) = self.numerator / self.denominator -It's important that this conversion use the integer's "true" -division rather than casting one side to float before dividing -so that ratios of huge integers convert without overflowing. + It's important that this conversion use the integer's "true" + division rather than casting one side to float before dividing + so that ratios of huge integers convert without overflowing. -""" + """ # See comment at the top of the file # for why some of these return types are purposefully vague class Integral(Rational, _IntegralLike): """Integral adds methods that work on integral numbers. -In short, these are conversion to int, pow with modulus, and the -bit-string operations. -""" + In short, these are conversion to int, pow with modulus, and the + bit-string operations. + """ + __slots__ = () @abstractmethod def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __index__(self) -> int: - """Called whenever an index is needed, such as in slicing -""" + """Called whenever an index is needed, such as in slicing""" + @abstractmethod def __pow__(self, exponent, modulus=None) -> _IntegralLike: """self ** exponent % modulus, but maybe faster. -Accept the modulus argument if you want to support the -3-argument version of pow(). Raise a TypeError if exponent < 0 -or any argument isn't Integral. Otherwise, just implement the -2-argument version described in Complex. -""" + Accept the modulus argument if you want to support the + 3-argument version of pow(). Raise a TypeError if exponent < 0 + or any argument isn't Integral. Otherwise, just implement the + 2-argument version described in Complex. + """ + @abstractmethod def __lshift__(self, other) -> _IntegralLike: - """self << other -""" + """self << other""" + @abstractmethod def __rlshift__(self, other) -> _IntegralLike: - """other << self -""" + """other << self""" + @abstractmethod def __rshift__(self, other) -> _IntegralLike: - """self >> other -""" + """self >> other""" + @abstractmethod def __rrshift__(self, other) -> _IntegralLike: - """other >> self -""" + """other >> self""" + @abstractmethod def __and__(self, other) -> _IntegralLike: - """self & other -""" + """self & other""" + @abstractmethod def __rand__(self, other) -> _IntegralLike: - """other & self -""" + """other & self""" + @abstractmethod def __xor__(self, other) -> _IntegralLike: - """self ^ other -""" + """self ^ other""" + @abstractmethod def __rxor__(self, other) -> _IntegralLike: - """other ^ self -""" + """other ^ self""" + @abstractmethod def __or__(self, other) -> _IntegralLike: - """self | other -""" + """self | other""" + @abstractmethod def __ror__(self, other) -> _IntegralLike: - """other | self -""" + """other | self""" + @abstractmethod def __invert__(self) -> _IntegralLike: - """~self -""" + """~self""" + def __float__(self) -> float: - """float(self) == float(int(self)) -""" + """float(self) == float(int(self))""" + @property def numerator(self) -> _IntegralLike: - """Integers are their own numerators. -""" + """Integers are their own numerators.""" + @property def denominator(self) -> Literal[1]: - """Integers have a denominator of 1. -""" + """Integers have a denominator of 1.""" # Not actually overridden at runtime, # but we override these in the stub to give them more precise return types: @abstractmethod def __pos__(self) -> _IntegralLike: - """+self -""" + """+self""" + @abstractmethod def __neg__(self) -> _IntegralLike: - """-self -""" + """-self""" + @abstractmethod def __abs__(self) -> _IntegralLike: - """Returns the Real distance from 0. Called for abs(self). -""" + """Returns the Real distance from 0. Called for abs(self).""" + @abstractmethod @overload def __round__(self, ndigits: None = None) -> _IntegralLike: """Rounds self to ndigits decimal places, defaulting to 0. -If ndigits is omitted or None, returns an Integral, otherwise -returns a Real. Rounds half toward even. -""" + If ndigits is omitted or None, returns an Integral, otherwise + returns a Real. Rounds half toward even. + """ + @abstractmethod @overload def __round__(self, ndigits: int) -> _IntegralLike: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi index 8873005c7697e..080a968911290 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/opcode.pyi @@ -2,6 +2,7 @@ opcode module - potentially shared between dis and other modules which operate on bytecodes (e.g. peephole optimizers). """ + import sys from typing import Final, Literal @@ -49,5 +50,4 @@ HAVE_ARGUMENT: Final = 43 EXTENDED_ARG: Final = 69 def stack_effect(opcode: int, oparg: int | None = None, /, *, jump: bool | None = None) -> int: - """Compute the stack effect of the opcode. -""" + """Compute the stack effect of the opcode.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi index d18e1cd698985..000e90a72e303 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/operator.pyi @@ -6,6 +6,7 @@ is equivalent to the expression x+y. The function names are those used for special methods; variants without leading and trailing '__' are also provided for convenience. """ + import sys from _operator import ( abs as abs, @@ -191,11 +192,12 @@ if sys.version_info >= (3, 11): @final class attrgetter(Generic[_T_co]): """Return a callable object that fetches the given attribute(s) from its operand. -After f = attrgetter('name'), the call f(r) returns r.name. -After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date). -After h = attrgetter('name.first', 'name.last'), the call h(r) returns -(r.name.first, r.name.last). -""" + After f = attrgetter('name'), the call f(r) returns r.name. + After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date). + After h = attrgetter('name.first', 'name.last'), the call h(r) returns + (r.name.first, r.name.last). + """ + @overload def __new__(cls, attr: str, /) -> attrgetter[Any]: ... @overload @@ -207,15 +209,15 @@ After h = attrgetter('name.first', 'name.last'), the call h(r) returns @overload def __new__(cls, attr: str, /, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... def __call__(self, obj: Any, /) -> _T_co: - """Call self as a function. -""" + """Call self as a function.""" @final class itemgetter(Generic[_T_co]): """Return a callable object that fetches the given item(s) from its operand. -After f = itemgetter(2), the call f(r) returns r[2]. -After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) -""" + After f = itemgetter(2), the call f(r) returns r[2]. + After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) + """ + @overload def __new__(cls, item: _T, /) -> itemgetter[_T]: ... @overload @@ -230,17 +232,16 @@ After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) # # These issues are best demonstrated by the `itertools.check_itertools_recipes.unique_justseen` test. def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: - """Call self as a function. -""" + """Call self as a function.""" @final class methodcaller: """Return a callable object that calls the given method on its operand. -After f = methodcaller('name'), the call f(r) returns r.name(). -After g = methodcaller('name', 'date', foo=1), the call g(r) returns -r.name('date', foo=1). -""" + After f = methodcaller('name'), the call f(r) returns r.name(). + After g = methodcaller('name', 'date', foo=1), the call g(r) returns + r.name('date', foo=1). + """ + def __new__(cls, name: str, /, *args: Any, **kwargs: Any) -> Self: ... def __call__(self, obj: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi index 67c46a9bff19e..f2cd353e92b49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/optparse.pyi @@ -20,6 +20,7 @@ Simple usage example: (options, args) = parser.parse_args() """ + import builtins from _typeshed import MaybeNone, SupportsWrite from abc import abstractmethod @@ -60,77 +61,82 @@ class OptParseError(Exception): class BadOptionError(OptParseError): """ -Raised if an invalid option is seen on the command line. -""" + Raised if an invalid option is seen on the command line. + """ + opt_str: str def __init__(self, opt_str: str) -> None: ... class AmbiguousOptionError(BadOptionError): """ -Raised if an ambiguous option is seen on the command line. -""" + Raised if an ambiguous option is seen on the command line. + """ + possibilities: Iterable[str] def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... class OptionError(OptParseError): """ -Raised if an Option instance is created with invalid or -inconsistent arguments. -""" + Raised if an Option instance is created with invalid or + inconsistent arguments. + """ + option_id: str def __init__(self, msg: str, option: Option) -> None: ... class OptionConflictError(OptionError): """ -Raised if conflicting options are added to an OptionParser. -""" + Raised if conflicting options are added to an OptionParser. + """ + class OptionValueError(OptParseError): """ -Raised if an invalid option value is encountered on the command -line. -""" + Raised if an invalid option value is encountered on the command + line. + """ class HelpFormatter: """ -Abstract base class for formatting option help. OptionParser -instances should use one of the HelpFormatter subclasses for -formatting help; by default IndentedHelpFormatter is used. - -Instance attributes: - parser : OptionParser - the controlling OptionParser instance - indent_increment : int - the number of columns to indent per nesting level - max_help_position : int - the maximum starting column for option help text - help_position : int - the calculated starting column for option help text; - initially the same as the maximum - width : int - total number of columns for output (pass None to constructor for - this value to be taken from the $COLUMNS environment variable) - level : int - current indentation level - current_indent : int - current indentation level (in columns) - help_width : int - number of columns available for option help text (calculated) - default_tag : str - text to replace with each option's default value, "%default" - by default. Set to false value to disable default value expansion. - option_strings : { Option : str } - maps Option instances to the snippet of help text explaining - the syntax of that option, e.g. "-h, --help" or - "-fFILE, --file=FILE" - _short_opt_fmt : str - format string controlling how short options with values are - printed in help text. Must be either "%s%s" ("-fFILE") or - "%s %s" ("-f FILE"), because those are the two syntaxes that - Optik supports. - _long_opt_fmt : str - similar but for long options; must be either "%s %s" ("--file FILE") - or "%s=%s" ("--file=FILE"). -""" + Abstract base class for formatting option help. OptionParser + instances should use one of the HelpFormatter subclasses for + formatting help; by default IndentedHelpFormatter is used. + + Instance attributes: + parser : OptionParser + the controlling OptionParser instance + indent_increment : int + the number of columns to indent per nesting level + max_help_position : int + the maximum starting column for option help text + help_position : int + the calculated starting column for option help text; + initially the same as the maximum + width : int + total number of columns for output (pass None to constructor for + this value to be taken from the $COLUMNS environment variable) + level : int + current indentation level + current_indent : int + current indentation level (in columns) + help_width : int + number of columns available for option help text (calculated) + default_tag : str + text to replace with each option's default value, "%default" + by default. Set to false value to disable default value expansion. + option_strings : { Option : str } + maps Option instances to the snippet of help text explaining + the syntax of that option, e.g. "-h, --help" or + "-fFILE, --file=FILE" + _short_opt_fmt : str + format string controlling how short options with values are + printed in help text. Must be either "%s%s" ("-fFILE") or + "%s %s" ("-f FILE"), because those are the two syntaxes that + Optik supports. + _long_opt_fmt : str + similar but for long options; must be either "%s %s" ("--file FILE") + or "%s=%s" ("--file=FILE"). + """ + NO_DEFAULT_VALUE: str _long_opt_fmt: str _short_opt_fmt: str @@ -156,8 +162,8 @@ Instance attributes: def format_heading(self, heading: str) -> str: ... def format_option(self, option: Option) -> str: ... def format_option_strings(self, option: Option) -> str: - """Return a comma-separated list of option strings & metavariables. -""" + """Return a comma-separated list of option strings & metavariables.""" + @abstractmethod def format_usage(self, usage: str) -> str: ... def indent(self) -> None: ... @@ -167,8 +173,8 @@ Instance attributes: def store_option_strings(self, parser: OptionParser) -> None: ... class IndentedHelpFormatter(HelpFormatter): - """Format help with indented section bodies. - """ + """Format help with indented section bodies.""" + def __init__( self, indent_increment: int = 2, @@ -180,8 +186,8 @@ class IndentedHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class TitledHelpFormatter(HelpFormatter): - """Format help with underlined section headers. - """ + """Format help with underlined section headers.""" + def __init__( self, indent_increment: int = 0, @@ -194,23 +200,24 @@ class TitledHelpFormatter(HelpFormatter): class Option: """ -Instance attributes: - _short_opts : [string] - _long_opts : [string] - - action : string - type : string - dest : string - default : any - nargs : int - const : any - choices : [string] - callback : function - callback_args : (any*) - callback_kwargs : { string : any } - help : string - metavar : string -""" + Instance attributes: + _short_opts : [string] + _long_opts : [string] + + action : string + type : string + dest : string + default : any + nargs : int + const : any + choices : [string] + callback : function + callback_args : (any*) + callback_kwargs : { string : any } + help : string + metavar : string + """ + ACTIONS: tuple[str, ...] ALWAYS_TYPED_ACTIONS: tuple[str, ...] ATTRS: list[str] @@ -276,34 +283,35 @@ make_option = Option class OptionContainer: """ -Abstract base class. - -Class attributes: - standard_option_list : [Option] - list of standard options that will be accepted by all instances - of this parser class (intended to be overridden by subclasses). - -Instance attributes: - option_list : [Option] - the list of Option objects contained by this OptionContainer - _short_opt : { string : Option } - dictionary mapping short option strings, eg. "-f" or "-X", - to the Option instances that implement them. If an Option - has multiple short option strings, it will appear in this - dictionary multiple times. [1] - _long_opt : { string : Option } - dictionary mapping long option strings, eg. "--file" or - "--exclude", to the Option instances that implement them. - Again, a given Option can occur multiple times in this - dictionary. [1] - defaults : { string : any } - dictionary mapping option destination names to default - values for each destination [1] - -[1] These mappings are common to (shared by) all components of the - controlling OptionParser, where they are initially created. + Abstract base class. + + Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + + Instance attributes: + option_list : [Option] + the list of Option objects contained by this OptionContainer + _short_opt : { string : Option } + dictionary mapping short option strings, eg. "-f" or "-X", + to the Option instances that implement them. If an Option + has multiple short option strings, it will appear in this + dictionary multiple times. [1] + _long_opt : { string : Option } + dictionary mapping long option strings, eg. "--file" or + "--exclude", to the Option instances that implement them. + Again, a given Option can occur multiple times in this + dictionary. [1] + defaults : { string : any } + dictionary mapping option destination names to default + values for each destination [1] + + [1] These mappings are common to (shared by) all components of the + controlling OptionParser, where they are initially created. + + """ -""" _long_opt: dict[str, Option] _short_opt: dict[str, Option] conflict_handler: str @@ -319,8 +327,9 @@ Instance attributes: @overload def add_option(self, opt: Option, /) -> Option: """add_option(Option) -add_option(opt_str, ..., kwarg=val, ...) -""" + add_option(opt_str, ..., kwarg=val, ...) + """ + @overload def add_option( self, @@ -343,8 +352,8 @@ add_option(opt_str, ..., kwarg=val, ...) ) -> Option: ... def add_options(self, option_list: Iterable[Option]) -> None: ... def destroy(self) -> None: - """see OptionParser.destroy(). -""" + """see OptionParser.destroy().""" + def format_option_help(self, formatter: HelpFormatter) -> str: ... def format_description(self, formatter: HelpFormatter) -> str: ... def format_help(self, formatter: HelpFormatter) -> str: ... @@ -368,17 +377,19 @@ class Values: def _update(self, dict: Mapping[str, object], mode: Literal["careful", "loose"]) -> None: ... def _update_careful(self, dict: Mapping[str, object]) -> None: """ -Update the option values from an arbitrary dictionary, but only -use keys from dict that already have a corresponding attribute -in self. Any keys in dict without a corresponding attribute -are silently ignored. -""" + Update the option values from an arbitrary dictionary, but only + use keys from dict that already have a corresponding attribute + in self. Any keys in dict without a corresponding attribute + are silently ignored. + """ + def _update_loose(self, dict: Mapping[str, object]) -> None: """ -Update the option values from an arbitrary dictionary, -using all keys from the dictionary regardless of whether -they have a corresponding attribute in self or not. -""" + Update the option values from an arbitrary dictionary, + using all keys from the dictionary regardless of whether + they have a corresponding attribute in self or not. + """ + def ensure_value(self, attr: str, value: object) -> Any: ... # return type cannot be known statically def read_file(self, filename: str, mode: Literal["careful", "loose"] = "careful") -> None: ... def read_module(self, modname: str, mode: Literal["careful", "loose"] = "careful") -> None: ... @@ -392,72 +403,73 @@ they have a corresponding attribute in self or not. class OptionParser(OptionContainer): """ -Class attributes: - standard_option_list : [Option] - list of standard options that will be accepted by all instances - of this parser class (intended to be overridden by subclasses). - -Instance attributes: - usage : string - a usage string for your program. Before it is displayed - to the user, "%prog" will be expanded to the name of - your program (self.prog or os.path.basename(sys.argv[0])). - prog : string - the name of the current program (to override - os.path.basename(sys.argv[0])). - description : string - A paragraph of text giving a brief overview of your program. - optparse reformats this paragraph to fit the current terminal - width and prints it when the user requests help (after usage, - but before the list of options). - epilog : string - paragraph of help text to print after option help - - option_groups : [OptionGroup] - list of option groups in this parser (option groups are - irrelevant for parsing the command-line, but very useful - for generating help) - - allow_interspersed_args : bool = true - if true, positional arguments may be interspersed with options. - Assuming -a and -b each take a single argument, the command-line - -ablah foo bar -bboo baz - will be interpreted the same as - -ablah -bboo -- foo bar baz - If this flag were false, that command line would be interpreted as - -ablah -- foo bar -bboo baz - -- ie. we stop processing options as soon as we see the first - non-option argument. (This is the tradition followed by - Python's getopt module, Perl's Getopt::Std, and other argument- - parsing libraries, but it is generally annoying to users.) - - process_default_values : bool = true - if true, option default values are processed similarly to option - values from the command line: that is, they are passed to the - type-checking function for the option's type (as long as the - default value is a string). (This really only matters if you - have defined custom types; see SF bug #955889.) Set it to false - to restore the behaviour of Optik 1.4.1 and earlier. - - rargs : [string] - the argument list currently being parsed. Only set when - parse_args() is active, and continually trimmed down as - we consume arguments. Mainly there for the benefit of - callback options. - largs : [string] - the list of leftover arguments that we have skipped while - parsing options. If allow_interspersed_args is false, this - list is always empty. - values : Values - the set of option values currently being accumulated. Only - set when parse_args() is active. Also mainly for callbacks. - -Because of the 'rargs', 'largs', and 'values' attributes, -OptionParser is not thread-safe. If, for some perverse reason, you -need to parse command-line arguments simultaneously in different -threads, use different OptionParser instances. + Class attributes: + standard_option_list : [Option] + list of standard options that will be accepted by all instances + of this parser class (intended to be overridden by subclasses). + + Instance attributes: + usage : string + a usage string for your program. Before it is displayed + to the user, "%prog" will be expanded to the name of + your program (self.prog or os.path.basename(sys.argv[0])). + prog : string + the name of the current program (to override + os.path.basename(sys.argv[0])). + description : string + A paragraph of text giving a brief overview of your program. + optparse reformats this paragraph to fit the current terminal + width and prints it when the user requests help (after usage, + but before the list of options). + epilog : string + paragraph of help text to print after option help + + option_groups : [OptionGroup] + list of option groups in this parser (option groups are + irrelevant for parsing the command-line, but very useful + for generating help) + + allow_interspersed_args : bool = true + if true, positional arguments may be interspersed with options. + Assuming -a and -b each take a single argument, the command-line + -ablah foo bar -bboo baz + will be interpreted the same as + -ablah -bboo -- foo bar baz + If this flag were false, that command line would be interpreted as + -ablah -- foo bar -bboo baz + -- ie. we stop processing options as soon as we see the first + non-option argument. (This is the tradition followed by + Python's getopt module, Perl's Getopt::Std, and other argument- + parsing libraries, but it is generally annoying to users.) + + process_default_values : bool = true + if true, option default values are processed similarly to option + values from the command line: that is, they are passed to the + type-checking function for the option's type (as long as the + default value is a string). (This really only matters if you + have defined custom types; see SF bug #955889.) Set it to false + to restore the behaviour of Optik 1.4.1 and earlier. + + rargs : [string] + the argument list currently being parsed. Only set when + parse_args() is active, and continually trimmed down as + we consume arguments. Mainly there for the benefit of + callback options. + largs : [string] + the list of leftover arguments that we have skipped while + parsing options. If allow_interspersed_args is false, this + list is always empty. + values : Values + the set of option values currently being accumulated. Only + set when parse_args() is active. Also mainly for callbacks. + + Because of the 'rargs', 'largs', and 'values' attributes, + OptionParser is not thread-safe. If, for some perverse reason, you + need to parse command-line arguments simultaneously in different + threads, use different OptionParser instances. + + """ -""" allow_interspersed_args: bool epilog: str | None formatter: HelpFormatter @@ -493,21 +505,23 @@ threads, use different OptionParser instances. def _match_long_opt(self, opt: str) -> str: """_match_long_opt(opt : string) -> string -Determine which long option string 'opt' matches, ie. which one -it is an unambiguous abbreviation for. Raises BadOptionError if -'opt' doesn't unambiguously match any long option string. -""" + Determine which long option string 'opt' matches, ie. which one + it is an unambiguous abbreviation for. Raises BadOptionError if + 'opt' doesn't unambiguously match any long option string. + """ + def _populate_option_list(self, option_list: Iterable[Option] | None, add_help: bool = True) -> None: ... def _process_args(self, largs: list[str], rargs: list[str], values: Values) -> None: """_process_args(largs : [string], - rargs : [string], - values : Values) + rargs : [string], + values : Values) + + Process command-line arguments and populate 'values', consuming + options and arguments from 'rargs'. If 'allow_interspersed_args' is + false, stop at the first non-option argument. If true, accumulate any + interspersed non-option arguments in 'largs'. + """ -Process command-line arguments and populate 'values', consuming -options and arguments from 'rargs'. If 'allow_interspersed_args' is -false, stop at the first non-option argument. If true, accumulate any -interspersed non-option arguments in 'largs'. -""" def _process_long_opt(self, rargs: list[str], values: Values) -> None: ... def _process_short_opts(self, rargs: list[str], values: Values) -> None: ... @overload @@ -516,35 +530,39 @@ interspersed non-option arguments in 'largs'. def add_option_group(self, title: str, /, description: str | None = None) -> OptionGroup: ... def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: """ -check_values(values : Values, args : [string]) --> (values : Values, args : [string]) - -Check that the supplied option values and leftover arguments are -valid. Returns the option values and leftover arguments -(possibly adjusted, possibly completely new -- whatever you -like). Default implementation just returns the passed-in -values; subclasses may override as desired. -""" + check_values(values : Values, args : [string]) + -> (values : Values, args : [string]) + + Check that the supplied option values and leftover arguments are + valid. Returns the option values and leftover arguments + (possibly adjusted, possibly completely new -- whatever you + like). Default implementation just returns the passed-in + values; subclasses may override as desired. + """ + def disable_interspersed_args(self) -> None: """Set parsing to stop on the first non-option. Use this if -you have a command processor which runs another command that -has options of its own and you want to make sure these options -don't get confused. -""" + you have a command processor which runs another command that + has options of its own and you want to make sure these options + don't get confused. + """ + def enable_interspersed_args(self) -> None: """Set parsing to not stop on the first non-option, allowing -interspersing switches with command arguments. This is the -default behavior. See also disable_interspersed_args() and the -class documentation description of the attribute -allow_interspersed_args. -""" + interspersing switches with command arguments. This is the + default behavior. See also disable_interspersed_args() and the + class documentation description of the attribute + allow_interspersed_args. + """ + def error(self, msg: str) -> NoReturn: """error(msg : string) -Print a usage message incorporating 'msg' to stderr and exit. -If you override this in a subclass, it should not return -- it -should either exit or raise an exception. -""" + Print a usage message incorporating 'msg' to stderr and exit. + If you override this in a subclass, it should not return -- it + should either exit or raise an exception. + """ + def exit(self, status: int = 0, msg: str | None = None) -> NoReturn: ... def expand_prog_name(self, s: str) -> str: ... def format_epilog(self, formatter: HelpFormatter) -> str: ... @@ -557,41 +575,45 @@ should either exit or raise an exception. def get_version(self) -> str: ... def parse_args(self, args: list[str] | None = None, values: Values | None = None) -> tuple[Values, list[str]]: """ -parse_args(args : [string] = sys.argv[1:], - values : Values = None) --> (values : Values, args : [string]) - -Parse the command-line options found in 'args' (default: -sys.argv[1:]). Any errors result in a call to 'error()', which -by default prints the usage message to stderr and calls -sys.exit() with an error message. On success returns a pair -(values, args) where 'values' is a Values instance (with all -your option values) and 'args' is the list of arguments left -over after parsing options. -""" + parse_args(args : [string] = sys.argv[1:], + values : Values = None) + -> (values : Values, args : [string]) + + Parse the command-line options found in 'args' (default: + sys.argv[1:]). Any errors result in a call to 'error()', which + by default prints the usage message to stderr and calls + sys.exit() with an error message. On success returns a pair + (values, args) where 'values' is a Values instance (with all + your option values) and 'args' is the list of arguments left + over after parsing options. + """ + def print_usage(self, file: SupportsWrite[str] | None = None) -> None: """print_usage(file : file = stdout) -Print the usage message for the current program (self.usage) to -'file' (default stdout). Any occurrence of the string "%prog" in -self.usage is replaced with the name of the current program -(basename of sys.argv[0]). Does nothing if self.usage is empty -or not defined. -""" + Print the usage message for the current program (self.usage) to + 'file' (default stdout). Any occurrence of the string "%prog" in + self.usage is replaced with the name of the current program + (basename of sys.argv[0]). Does nothing if self.usage is empty + or not defined. + """ + def print_help(self, file: SupportsWrite[str] | None = None) -> None: """print_help(file : file = stdout) -Print an extended help message, listing all options and any -help text provided with them, to 'file' (default stdout). -""" + Print an extended help message, listing all options and any + help text provided with them, to 'file' (default stdout). + """ + def print_version(self, file: SupportsWrite[str] | None = None) -> None: """print_version(file : file = stdout) -Print the version message for this program (self.version) to -'file' (default stdout). As with print_usage(), any occurrence -of "%prog" in self.version is replaced by the current program's -name. Does nothing if self.version is empty or undefined. -""" + Print the version message for this program (self.version) to + 'file' (default stdout). As with print_usage(), any occurrence + of "%prog" in self.version is replaced by the current program's + name. Does nothing if self.version is empty or undefined. + """ + def set_default(self, dest: str, value: Any) -> None: ... # default value can be "any" type def set_defaults(self, **kwargs: Any) -> None: ... # default values can be "any" type def set_process_default_values(self, process: bool) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 13cd998b7ff45..88f6a919a15fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -20,6 +20,7 @@ only use functions that are defined by all platforms (e.g., unlink and opendir), and leave all pathname manipulation to os.path (e.g., split and join). """ + import sys from _typeshed import ( AnyStr_co, @@ -799,15 +800,16 @@ TMP_MAX: Final[int] # Undocumented, but used by tempfile class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): """stat_result: Result from stat, fstat, or lstat. -This object may be accessed either as a tuple of - (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) -or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on. + This object may be accessed either as a tuple of + (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) + or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on. -Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev, -or st_flags, they are available as attributes only. + Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev, + or st_flags, they are available as attributes only. + + See os.stat for more information. + """ -See os.stat for more information. -""" # The constructor of this class takes an iterable of variable length (though it must be at least 10). # # However, this class behaves like a tuple of 10 elements, @@ -823,40 +825,39 @@ See os.stat for more information. @property def st_mode(self) -> int: # protection bits, - """protection bits -""" + """protection bits""" + @property def st_ino(self) -> int: # inode number, - """inode -""" + """inode""" + @property def st_dev(self) -> int: # device, - """device -""" + """device""" + @property def st_nlink(self) -> int: # number of hard links, - """number of hard links -""" + """number of hard links""" + @property def st_uid(self) -> int: # user id of owner, - """user ID of owner -""" + """user ID of owner""" + @property def st_gid(self) -> int: # group id of owner, - """group ID of owner -""" + """group ID of owner""" + @property def st_size(self) -> int: # size of file, in bytes, - """total size, in bytes -""" + """total size, in bytes""" + @property def st_atime(self) -> float: # time of most recent access, - """time of last access -""" + """time of last access""" + @property def st_mtime(self) -> float: # time of most recent content modification, - """time of last modification -""" + """time of last modification""" # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) if sys.version_info >= (3, 12) and sys.platform == "win32": @property @@ -866,75 +867,66 @@ Use st_birthtime instead to retrieve the file creation time. \ In the future, this property will contain the last metadata change time.""" ) def st_ctime(self) -> float: - """time of last change -""" + """time of last change""" else: @property def st_ctime(self) -> float: - """time of last change -""" + """time of last change""" @property def st_atime_ns(self) -> int: # time of most recent access, in nanoseconds - """time of last access in nanoseconds -""" + """time of last access in nanoseconds""" + @property def st_mtime_ns(self) -> int: # time of most recent content modification in nanoseconds - """time of last modification in nanoseconds -""" + """time of last modification in nanoseconds""" # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds @property def st_ctime_ns(self) -> int: - """time of last change in nanoseconds -""" + """time of last change in nanoseconds""" if sys.platform == "win32": @property def st_file_attributes(self) -> int: - """Windows file attribute bits -""" + """Windows file attribute bits""" + @property def st_reparse_tag(self) -> int: - """Windows reparse tag -""" + """Windows reparse tag""" if sys.version_info >= (3, 12): @property def st_birthtime(self) -> float: # time of file creation in seconds - """time of creation -""" + """time of creation""" + @property def st_birthtime_ns(self) -> int: # time of file creation in nanoseconds - """time of creation in nanoseconds -""" + """time of creation in nanoseconds""" else: @property def st_blocks(self) -> int: # number of blocks allocated for file - """number of blocks allocated -""" + """number of blocks allocated""" + @property def st_blksize(self) -> int: # filesystem blocksize - """blocksize for filesystem I/O -""" + """blocksize for filesystem I/O""" + @property def st_rdev(self) -> int: # type of device if an inode device - """device type (if inode device) -""" + """device type (if inode device)""" if sys.platform != "linux": # These properties are available on MacOS, but not Ubuntu. # On other Unix systems (such as FreeBSD), the following attributes may be # available (but may be only filled out if root tries to use them): @property def st_gen(self) -> int: # file generation number - """generation number -""" + """generation number""" + @property def st_birthtime(self) -> float: # time of file creation in seconds - """time of creation -""" + """time of creation""" if sys.platform == "darwin": @property def st_flags(self) -> int: # user defined flags for file - """user defined flags for file -""" + """user defined flags for file""" # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 @@ -943,13 +935,12 @@ In the future, this property will contain the last metadata change time.""" # on the allowlist for use as a Protocol starting in 3.14. @runtime_checkable class PathLike(ABC, Protocol[AnyStr_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """Abstract base class for implementing the file system path protocol. -""" + """Abstract base class for implementing the file system path protocol.""" + __slots__ = () @abstractmethod def __fspath__(self) -> AnyStr_co: - """Return the file system path representation of the object. -""" + """Return the file system path representation of the object.""" @overload def listdir(path: StrPath | None = None) -> list[str]: @@ -966,6 +957,7 @@ On some platforms, path may also be specified as an open file descriptor;\\ The list is in arbitrary order. It does not include the special entries '.' and '..' even if they are present in the directory. """ + @overload def listdir(path: BytesPath) -> list[bytes]: ... @overload @@ -977,48 +969,47 @@ class DirEntry(Generic[AnyStr]): @property def name(self) -> AnyStr: - """the entry's base filename, relative to scandir() "path" argument -""" + """the entry's base filename, relative to scandir() "path" argument""" + @property def path(self) -> AnyStr: - """the entry's full path name; equivalent to os.path.join(scandir_path, entry.name) -""" + """the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)""" + def inode(self) -> int: - """Return inode of the entry; cached per entry. -""" + """Return inode of the entry; cached per entry.""" + def is_dir(self, *, follow_symlinks: bool = True) -> bool: - """Return True if the entry is a directory; cached per entry. -""" + """Return True if the entry is a directory; cached per entry.""" + def is_file(self, *, follow_symlinks: bool = True) -> bool: - """Return True if the entry is a file; cached per entry. -""" + """Return True if the entry is a file; cached per entry.""" + def is_symlink(self) -> bool: - """Return True if the entry is a symbolic link; cached per entry. -""" + """Return True if the entry is a symbolic link; cached per entry.""" + def stat(self, *, follow_symlinks: bool = True) -> stat_result: - """Return stat_result object for the entry; cached per entry. -""" + """Return stat_result object for the entry; cached per entry.""" + def __fspath__(self) -> AnyStr: - """Returns the path for the entry. -""" + """Returns the path for the entry.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" if sys.version_info >= (3, 12): def is_junction(self) -> bool: - """Return True if the entry is a junction; cached per entry. -""" + """Return True if the entry is a junction; cached per entry.""" @final class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): """statvfs_result: Result from statvfs or fstatvfs. -This object may be accessed either as a tuple of - (bsize, frsize, blocks, bfree, bavail, files, ffree, favail, flag, namemax), -or via the attributes f_bsize, f_frsize, f_blocks, f_bfree, and so on. + This object may be accessed either as a tuple of + (bsize, frsize, blocks, bfree, bavail, files, ffree, favail, flag, namemax), + or via the attributes f_bsize, f_frsize, f_blocks, f_bfree, and so on. + + See os.statvfs for more information. + """ -See os.statvfs for more information. -""" if sys.version_info >= (3, 10): __match_args__: Final = ( "f_bsize", @@ -1059,195 +1050,199 @@ See os.statvfs for more information. # ----- os function stubs ----- def fsencode(filename: StrOrBytesPath) -> bytes: """Encode filename (an os.PathLike, bytes, or str) to the filesystem -encoding with 'surrogateescape' error handler, return bytes unchanged. -On Windows, use 'strict' error handler if the file system encoding is -'mbcs' (which is the default encoding). -""" + encoding with 'surrogateescape' error handler, return bytes unchanged. + On Windows, use 'strict' error handler if the file system encoding is + 'mbcs' (which is the default encoding). + """ + def fsdecode(filename: StrOrBytesPath) -> str: """Decode filename (an os.PathLike, bytes, or str) from the filesystem -encoding with 'surrogateescape' error handler, return str unchanged. On -Windows, use 'strict' error handler if the file system encoding is -'mbcs' (which is the default encoding). -""" + encoding with 'surrogateescape' error handler, return str unchanged. On + Windows, use 'strict' error handler if the file system encoding is + 'mbcs' (which is the default encoding). + """ + @overload def fspath(path: str) -> str: """Return the file system path representation of the object. -If the object is str or bytes, then allow it to pass through as-is. If the -object defines __fspath__(), then return the result of that method. All other -types raise a TypeError. -""" + If the object is str or bytes, then allow it to pass through as-is. If the + object defines __fspath__(), then return the result of that method. All other + types raise a TypeError. + """ + @overload def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[AnyStr]) -> AnyStr: ... def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: """Returns the sequence of directories that will be searched for the -named executable (similar to a shell) when launching a process. + named executable (similar to a shell) when launching a process. + + *env* must be an environment variable dict or None. If *env* is None, + os.environ will be used. + """ -*env* must be an environment variable dict or None. If *env* is None, -os.environ will be used. -""" def getlogin() -> str: - """Return the actual login name. -""" + """Return the actual login name.""" + def getpid() -> int: - """Return the current process id. -""" + """Return the current process id.""" + def getppid() -> int: """Return the parent's process id. -If the parent process has already exited, Windows machines will still -return its id; others systems will return the id of the 'init' process (1). -""" + If the parent process has already exited, Windows machines will still + return its id; others systems will return the id of the 'init' process (1). + """ + def strerror(code: int, /) -> str: - """Translate an error code to a message string. -""" + """Translate an error code to a message string.""" + def umask(mask: int, /) -> int: - """Set the current numeric umask and return the previous umask. -""" + """Set the current numeric umask and return the previous umask.""" + @final class uname_result(structseq[str], tuple[str, str, str, str, str]): """uname_result: Result from os.uname(). -This object may be accessed either as a tuple of - (sysname, nodename, release, version, machine), -or via the attributes sysname, nodename, release, version, and machine. + This object may be accessed either as a tuple of + (sysname, nodename, release, version, machine), + or via the attributes sysname, nodename, release, version, and machine. + + See os.uname for more information. + """ -See os.uname for more information. -""" if sys.version_info >= (3, 10): __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") @property def sysname(self) -> str: - """operating system name -""" + """operating system name""" + @property def nodename(self) -> str: - """name of machine on network (implementation-defined) -""" + """name of machine on network (implementation-defined)""" + @property def release(self) -> str: - """operating system release -""" + """operating system release""" + @property def version(self) -> str: - """operating system version -""" + """operating system version""" + @property def machine(self) -> str: - """hardware identifier -""" + """hardware identifier""" if sys.platform != "win32": def ctermid() -> str: - """Return the name of the controlling terminal for this process. -""" + """Return the name of the controlling terminal for this process.""" + def getegid() -> int: - """Return the current process's effective group id. -""" + """Return the current process's effective group id.""" + def geteuid() -> int: - """Return the current process's effective user id. -""" + """Return the current process's effective user id.""" + def getgid() -> int: - """Return the current process's group id. -""" + """Return the current process's group id.""" + def getgrouplist(user: str, group: int, /) -> list[int]: """Returns a list of groups to which a user belongs. - user - username to lookup - group - base group id of the user -""" + user + username to lookup + group + base group id of the user + """ + def getgroups() -> list[int]: # Unix only, behaves differently on Mac - """Return list of supplemental group IDs for the process. -""" + """Return list of supplemental group IDs for the process.""" + def initgroups(username: str, gid: int, /) -> None: """Initialize the group access list. -Call the system initgroups() to initialize the group access list with all of -the groups of which the specified username is a member, plus the specified -group id. -""" + Call the system initgroups() to initialize the group access list with all of + the groups of which the specified username is a member, plus the specified + group id. + """ + def getpgid(pid: int) -> int: - """Call the system call getpgid(), and return the result. -""" + """Call the system call getpgid(), and return the result.""" + def getpgrp() -> int: - """Return the current process group id. -""" + """Return the current process group id.""" + def getpriority(which: int, who: int) -> int: - """Return program scheduling priority. -""" + """Return program scheduling priority.""" + def setpriority(which: int, who: int, priority: int) -> None: - """Set program scheduling priority. -""" + """Set program scheduling priority.""" if sys.platform != "darwin": def getresuid() -> tuple[int, int, int]: - """Return a tuple of the current process's real, effective, and saved user ids. -""" + """Return a tuple of the current process's real, effective, and saved user ids.""" + def getresgid() -> tuple[int, int, int]: - """Return a tuple of the current process's real, effective, and saved group ids. -""" + """Return a tuple of the current process's real, effective, and saved group ids.""" def getuid() -> int: - """Return the current process's user id. -""" + """Return the current process's user id.""" + def setegid(egid: int, /) -> None: - """Set the current process's effective group id. -""" + """Set the current process's effective group id.""" + def seteuid(euid: int, /) -> None: - """Set the current process's effective user id. -""" + """Set the current process's effective user id.""" + def setgid(gid: int, /) -> None: - """Set the current process's group id. -""" + """Set the current process's group id.""" + def setgroups(groups: Sequence[int], /) -> None: - """Set the groups of the current process to list. -""" + """Set the groups of the current process to list.""" + def setpgrp() -> None: - """Make the current process the leader of its process group. -""" + """Make the current process the leader of its process group.""" + def setpgid(pid: int, pgrp: int, /) -> None: - """Call the system call setpgid(pid, pgrp). -""" + """Call the system call setpgid(pid, pgrp).""" + def setregid(rgid: int, egid: int, /) -> None: - """Set the current process's real and effective group ids. -""" + """Set the current process's real and effective group ids.""" if sys.platform != "darwin": def setresgid(rgid: int, egid: int, sgid: int, /) -> None: - """Set the current process's real, effective, and saved group ids. -""" + """Set the current process's real, effective, and saved group ids.""" + def setresuid(ruid: int, euid: int, suid: int, /) -> None: - """Set the current process's real, effective, and saved user ids. -""" + """Set the current process's real, effective, and saved user ids.""" def setreuid(ruid: int, euid: int, /) -> None: - """Set the current process's real and effective user ids. -""" + """Set the current process's real and effective user ids.""" + def getsid(pid: int, /) -> int: - """Call the system call getsid(pid) and return the result. -""" + """Call the system call getsid(pid) and return the result.""" + def setsid() -> None: - """Call the system call setsid(). -""" + """Call the system call setsid().""" + def setuid(uid: int, /) -> None: - """Set the current process's user id. -""" + """Set the current process's user id.""" + def uname() -> uname_result: """Return an object identifying the current operating system. -The object behaves like a named tuple with the following fields: - (sysname, nodename, release, version, machine) -""" + The object behaves like a named tuple with the following fields: + (sysname, nodename, release, version, machine) + """ @overload def getenv(key: str) -> str | None: """Get an environment variable, return None if it doesn't exist. -The optional second argument can specify an alternate default. -key, default and the result are str. -""" + The optional second argument can specify an alternate default. + key, default and the result are str. + """ + @overload def getenv(key: str, default: _T) -> str | _T: ... @@ -1255,25 +1250,24 @@ if sys.platform != "win32": @overload def getenvb(key: bytes) -> bytes | None: """Get an environment variable, return None if it doesn't exist. -The optional second argument can specify an alternate default. -key, default and the result are bytes. -""" + The optional second argument can specify an alternate default. + key, default and the result are bytes. + """ + @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... def putenv(name: StrOrBytesPath, value: StrOrBytesPath, /) -> None: - """Change or add an environment variable. -""" + """Change or add an environment variable.""" + def unsetenv(name: StrOrBytesPath, /) -> None: - """Delete an environment variable. -""" + """Delete an environment variable.""" else: def putenv(name: str, value: str, /) -> None: - """Change or add an environment variable. -""" + """Change or add an environment variable.""" + def unsetenv(name: str, /) -> None: - """Delete an environment variable. -""" + """Delete an environment variable.""" _Opener: TypeAlias = Callable[[str, int], int] @@ -1355,219 +1349,231 @@ def fdopen( opener: _Opener | None = ..., ) -> IO[Any]: ... def close(fd: int) -> None: - """Close a file descriptor. -""" + """Close a file descriptor.""" + def closerange(fd_low: int, fd_high: int, /) -> None: - """Closes all file descriptors in [fd_low, fd_high), ignoring errors. -""" + """Closes all file descriptors in [fd_low, fd_high), ignoring errors.""" + def device_encoding(fd: int) -> str | None: """Return a string describing the encoding of a terminal's file descriptor. -The file descriptor must be attached to a terminal. -If the device is not a terminal, return None. -""" + The file descriptor must be attached to a terminal. + If the device is not a terminal, return None. + """ + def dup(fd: int, /) -> int: - """Return a duplicate of a file descriptor. -""" + """Return a duplicate of a file descriptor.""" + def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: - """Duplicate file descriptor. -""" + """Duplicate file descriptor.""" + def fstat(fd: int) -> stat_result: """Perform a stat system call on the given file descriptor. -Like stat(), but for an open file descriptor. -Equivalent to os.stat(fd). -""" + Like stat(), but for an open file descriptor. + Equivalent to os.stat(fd). + """ + def ftruncate(fd: int, length: int, /) -> None: - """Truncate a file, specified by file descriptor, to a specific length. -""" + """Truncate a file, specified by file descriptor, to a specific length.""" + def fsync(fd: FileDescriptorLike) -> None: - """Force write of fd to disk. -""" + """Force write of fd to disk.""" + def isatty(fd: int, /) -> bool: """Return True if the fd is connected to a terminal. -Return True if the file descriptor is an open file descriptor -connected to the slave end of a terminal. -""" + Return True if the file descriptor is an open file descriptor + connected to the slave end of a terminal. + """ if sys.platform != "win32" and sys.version_info >= (3, 11): def login_tty(fd: int, /) -> None: """Prepare the tty of which fd is a file descriptor for a new login session. -Make the calling process a session leader; make the tty the -controlling tty, the stdin, the stdout, and the stderr of the -calling process; close fd. -""" + Make the calling process a session leader; make the tty the + controlling tty, the stdin, the stdout, and the stderr of the + calling process; close fd. + """ if sys.version_info >= (3, 11): def lseek(fd: int, position: int, whence: int, /) -> int: """Set the position of a file descriptor. Return the new position. - fd - An open file descriptor, as returned by os.open(). - position - Position, interpreted relative to 'whence'. - whence - The relative position to seek from. Valid values are: - - SEEK_SET: seek from the start of the file. - - SEEK_CUR: seek from the current file position. - - SEEK_END: seek from the end of the file. - -The return value is the number of bytes relative to the beginning of the file. -""" + fd + An open file descriptor, as returned by os.open(). + position + Position, interpreted relative to 'whence'. + whence + The relative position to seek from. Valid values are: + - SEEK_SET: seek from the start of the file. + - SEEK_CUR: seek from the current file position. + - SEEK_END: seek from the end of the file. + + The return value is the number of bytes relative to the beginning of the file. + """ else: def lseek(fd: int, position: int, how: int, /) -> int: """Set the position of a file descriptor. Return the new position. -Return the new cursor position in number of bytes -relative to the beginning of the file. -""" + Return the new cursor position in number of bytes + relative to the beginning of the file. + """ def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: """Open a file for low level IO. Returns a file descriptor (integer). -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ + def pipe() -> tuple[int, int]: """Create a pipe. -Returns a tuple of two file descriptors: - (read_fd, write_fd) -""" + Returns a tuple of two file descriptors: + (read_fd, write_fd) + """ + def read(fd: int, length: int, /) -> bytes: - """Read from a file descriptor. Returns a bytes object. -""" + """Read from a file descriptor. Returns a bytes object.""" if sys.version_info >= (3, 12) or sys.platform != "win32": def get_blocking(fd: int, /) -> bool: """Get the blocking mode of the file descriptor. -Return False if the O_NONBLOCK flag is set, True if the flag is cleared. -""" + Return False if the O_NONBLOCK flag is set, True if the flag is cleared. + """ + def set_blocking(fd: int, blocking: bool, /) -> None: """Set the blocking mode of the specified file descriptor. -Set the O_NONBLOCK flag if blocking is False, -clear the O_NONBLOCK flag otherwise. -""" + Set the O_NONBLOCK flag if blocking is False, + clear the O_NONBLOCK flag otherwise. + """ if sys.platform != "win32": def fchown(fd: int, uid: int, gid: int) -> None: """Change the owner and group id of the file specified by file descriptor. -Equivalent to os.chown(fd, uid, gid). -""" + Equivalent to os.chown(fd, uid, gid). + """ + def fpathconf(fd: int, name: str | int, /) -> int: """Return the configuration limit name for the file descriptor fd. -If there is no limit, return -1. -""" + If there is no limit, return -1. + """ + def fstatvfs(fd: int, /) -> statvfs_result: """Perform an fstatvfs system call on the given fd. -Equivalent to statvfs(fd). -""" + Equivalent to statvfs(fd). + """ + def lockf(fd: int, command: int, length: int, /) -> None: """Apply, test or remove a POSIX lock on an open file descriptor. - fd - An open file descriptor. - command - One of F_LOCK, F_TLOCK, F_ULOCK or F_TEST. - length - The number of bytes to lock, starting at the current position. -""" + fd + An open file descriptor. + command + One of F_LOCK, F_TLOCK, F_ULOCK or F_TEST. + length + The number of bytes to lock, starting at the current position. + """ + def openpty() -> tuple[int, int]: # some flavors of Unix """Open a pseudo-terminal. -Return a tuple of (master_fd, slave_fd) containing open file descriptors -for both the master and slave ends. -""" + Return a tuple of (master_fd, slave_fd) containing open file descriptors + for both the master and slave ends. + """ if sys.platform != "darwin": def fdatasync(fd: FileDescriptorLike) -> None: - """Force write of fd to disk without forcing update of metadata. -""" + """Force write of fd to disk without forcing update of metadata.""" + def pipe2(flags: int, /) -> tuple[int, int]: # some flavors of Unix """Create a pipe with flags set atomically. -Returns a tuple of two file descriptors: - (read_fd, write_fd) + Returns a tuple of two file descriptors: + (read_fd, write_fd) + + flags can be constructed by ORing together one or more of these values: + O_NONBLOCK, O_CLOEXEC. + """ -flags can be constructed by ORing together one or more of these values: -O_NONBLOCK, O_CLOEXEC. -""" def posix_fallocate(fd: int, offset: int, length: int, /) -> None: """Ensure a file has allocated at least a particular number of bytes on disk. -Ensure that the file specified by fd encompasses a range of bytes -starting at offset bytes from the beginning and continuing for length bytes. -""" + Ensure that the file specified by fd encompasses a range of bytes + starting at offset bytes from the beginning and continuing for length bytes. + """ + def posix_fadvise(fd: int, offset: int, length: int, advice: int, /) -> None: """Announce an intention to access data in a specific pattern. -Announce an intention to access data in a specific pattern, thus allowing -the kernel to make optimizations. -The advice applies to the region of the file specified by fd starting at -offset and continuing for length bytes. -advice is one of POSIX_FADV_NORMAL, POSIX_FADV_SEQUENTIAL, -POSIX_FADV_RANDOM, POSIX_FADV_NOREUSE, POSIX_FADV_WILLNEED, or -POSIX_FADV_DONTNEED. -""" + Announce an intention to access data in a specific pattern, thus allowing + the kernel to make optimizations. + The advice applies to the region of the file specified by fd starting at + offset and continuing for length bytes. + advice is one of POSIX_FADV_NORMAL, POSIX_FADV_SEQUENTIAL, + POSIX_FADV_RANDOM, POSIX_FADV_NOREUSE, POSIX_FADV_WILLNEED, or + POSIX_FADV_DONTNEED. + """ def pread(fd: int, length: int, offset: int, /) -> bytes: """Read a number of bytes from a file descriptor starting at a particular offset. -Read length bytes from file descriptor fd, starting at offset bytes from -the beginning of the file. The file offset remains unchanged. -""" + Read length bytes from file descriptor fd, starting at offset bytes from + the beginning of the file. The file offset remains unchanged. + """ + def pwrite(fd: int, buffer: ReadableBuffer, offset: int, /) -> int: """Write bytes to a file descriptor starting at a particular offset. -Write buffer to fd, starting at offset bytes from the beginning of -the file. Returns the number of bytes written. Does not change the -current file offset. -""" + Write buffer to fd, starting at offset bytes from the beginning of + the file. Returns the number of bytes written. Does not change the + current file offset. + """ # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not def preadv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], offset: int, flags: int = 0, /) -> int: """Reads from a file descriptor into a number of mutable bytes-like objects. -Combines the functionality of readv() and pread(). As readv(), it will -transfer data into each buffer until it is full and then move on to the next -buffer in the sequence to hold the rest of the data. Its fourth argument, -specifies the file offset at which the input operation is to be performed. It -will return the total number of bytes read (which can be less than the total -capacity of all the objects). + Combines the functionality of readv() and pread(). As readv(), it will + transfer data into each buffer until it is full and then move on to the next + buffer in the sequence to hold the rest of the data. Its fourth argument, + specifies the file offset at which the input operation is to be performed. It + will return the total number of bytes read (which can be less than the total + capacity of all the objects). -The flags argument contains a bitwise OR of zero or more of the following flags: + The flags argument contains a bitwise OR of zero or more of the following flags: -- RWF_HIPRI -- RWF_NOWAIT + - RWF_HIPRI + - RWF_NOWAIT + + Using non-zero flags requires Linux 4.6 or newer. + """ -Using non-zero flags requires Linux 4.6 or newer. -""" def pwritev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], offset: int, flags: int = 0, /) -> int: """Writes the contents of bytes-like objects to a file descriptor at a given offset. -Combines the functionality of writev() and pwrite(). All buffers must be a sequence -of bytes-like objects. Buffers are processed in array order. Entire contents of first -buffer is written before proceeding to second, and so on. The operating system may -set a limit (sysconf() value SC_IOV_MAX) on the number of buffers that can be used. -This function writes the contents of each object to the file descriptor and returns -the total number of bytes written. + Combines the functionality of writev() and pwrite(). All buffers must be a sequence + of bytes-like objects. Buffers are processed in array order. Entire contents of first + buffer is written before proceeding to second, and so on. The operating system may + set a limit (sysconf() value SC_IOV_MAX) on the number of buffers that can be used. + This function writes the contents of each object to the file descriptor and returns + the total number of bytes written. -The flags argument contains a bitwise OR of zero or more of the following flags: + The flags argument contains a bitwise OR of zero or more of the following flags: -- RWF_DSYNC -- RWF_SYNC -- RWF_APPEND + - RWF_DSYNC + - RWF_SYNC + - RWF_APPEND -Using non-zero flags requires Linux 4.7 or newer. -""" + Using non-zero flags requires Linux 4.7 or newer. + """ if sys.platform != "darwin": if sys.version_info >= (3, 10): RWF_APPEND: Final[int] # docs say available on 3.7+, stubtest says otherwise @@ -1578,8 +1584,7 @@ Using non-zero flags requires Linux 4.7 or newer. if sys.platform == "linux": def sendfile(out_fd: FileDescriptor, in_fd: FileDescriptor, offset: int | None, count: int) -> int: - """Copy count bytes from file descriptor in_fd to file descriptor out_fd. -""" + """Copy count bytes from file descriptor in_fd to file descriptor out_fd.""" else: def sendfile( out_fd: FileDescriptor, @@ -1590,209 +1595,207 @@ Using non-zero flags requires Linux 4.7 or newer. trailers: Sequence[ReadableBuffer] = (), flags: int = 0, ) -> int: # FreeBSD and Mac OS X only - """Copy count bytes from file descriptor in_fd to file descriptor out_fd. -""" + """Copy count bytes from file descriptor in_fd to file descriptor out_fd.""" def readv(fd: int, buffers: SupportsLenAndGetItem[WriteableBuffer], /) -> int: """Read from a file descriptor fd into an iterable of buffers. -The buffers should be mutable buffers accepting bytes. -readv will transfer data into each buffer until it is full -and then move on to the next buffer in the sequence to hold -the rest of the data. + The buffers should be mutable buffers accepting bytes. + readv will transfer data into each buffer until it is full + and then move on to the next buffer in the sequence to hold + the rest of the data. + + readv returns the total number of bytes read, + which may be less than the total capacity of all the buffers. + """ -readv returns the total number of bytes read, -which may be less than the total capacity of all the buffers. -""" def writev(fd: int, buffers: SupportsLenAndGetItem[ReadableBuffer], /) -> int: """Iterate over buffers, and write the contents of each to a file descriptor. -Returns the total number of bytes written. -buffers must be a sequence of bytes-like objects. -""" + Returns the total number of bytes written. + buffers must be a sequence of bytes-like objects. + """ if sys.version_info >= (3, 14): def readinto(fd: int, buffer: ReadableBuffer, /) -> int: """Read into a buffer object from a file descriptor. -The buffer should be mutable and bytes-like. On success, returns the number of -bytes read. Less bytes may be read than the size of the buffer. The underlying -system call will be retried when interrupted by a signal, unless the signal -handler raises an exception. Other errors will not be retried and an error will -be raised. + The buffer should be mutable and bytes-like. On success, returns the number of + bytes read. Less bytes may be read than the size of the buffer. The underlying + system call will be retried when interrupted by a signal, unless the signal + handler raises an exception. Other errors will not be retried and an error will + be raised. -Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 -(which can be used to check for errors without reading data). Never returns -negative. -""" + Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 + (which can be used to check for errors without reading data). Never returns + negative. + """ @final class terminal_size(structseq[int], tuple[int, int]): - """A tuple of (columns, lines) for holding terminal window size -""" + """A tuple of (columns, lines) for holding terminal window size""" + if sys.version_info >= (3, 10): __match_args__: Final = ("columns", "lines") @property def columns(self) -> int: - """width of the terminal window in characters -""" + """width of the terminal window in characters""" + @property def lines(self) -> int: - """height of the terminal window in characters -""" + """height of the terminal window in characters""" def get_terminal_size(fd: int = ..., /) -> terminal_size: """Return the size of the terminal window as (columns, lines). -The optional argument fd (default standard output) specifies -which file descriptor should be queried. + The optional argument fd (default standard output) specifies + which file descriptor should be queried. -If the file descriptor is not connected to a terminal, an OSError -is thrown. + If the file descriptor is not connected to a terminal, an OSError + is thrown. -This function will only be defined if an implementation is -available for this system. + This function will only be defined if an implementation is + available for this system. + + shutil.get_terminal_size is the high-level function which should + normally be used, os.get_terminal_size is the low-level implementation. + """ -shutil.get_terminal_size is the high-level function which should -normally be used, os.get_terminal_size is the low-level implementation. -""" def get_inheritable(fd: int, /) -> bool: - """Get the close-on-exe flag of the specified file descriptor. -""" + """Get the close-on-exe flag of the specified file descriptor.""" + def set_inheritable(fd: int, inheritable: bool, /) -> None: - """Set the inheritable flag of the specified file descriptor. -""" + """Set the inheritable flag of the specified file descriptor.""" if sys.platform == "win32": def get_handle_inheritable(handle: int, /) -> bool: - """Get the close-on-exe flag of the specified file descriptor. -""" + """Get the close-on-exe flag of the specified file descriptor.""" + def set_handle_inheritable(handle: int, inheritable: bool, /) -> None: - """Set the inheritable flag of the specified handle. -""" + """Set the inheritable flag of the specified handle.""" if sys.platform != "win32": # Unix only def tcgetpgrp(fd: int, /) -> int: - """Return the process group associated with the terminal specified by fd. -""" + """Return the process group associated with the terminal specified by fd.""" + def tcsetpgrp(fd: int, pgid: int, /) -> None: - """Set the process group associated with the terminal specified by fd. -""" + """Set the process group associated with the terminal specified by fd.""" + def ttyname(fd: int, /) -> str: """Return the name of the terminal device connected to 'fd'. - fd - Integer file descriptor handle. -""" + fd + Integer file descriptor handle. + """ def write(fd: int, data: ReadableBuffer, /) -> int: - """Write a bytes object to a file descriptor. -""" + """Write a bytes object to a file descriptor.""" + def access( path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True ) -> bool: """Use the real uid/gid to test for access to a path. - path - Path to be tested; can be string, bytes, or a path-like object. - mode - Operating-system mode bitfield. Can be F_OK to test existence, - or the inclusive-OR of R_OK, W_OK, and X_OK. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that - directory. - effective_ids - If True, access will use the effective uid/gid instead of - the real uid/gid. - follow_symlinks - If False, and the last element of the path is a symbolic link, - access will examine the symbolic link itself instead of the file - the link points to. - -dir_fd, effective_ids, and follow_symlinks may not be implemented - on your platform. If they are unavailable, using them will raise a - NotImplementedError. + path + Path to be tested; can be string, bytes, or a path-like object. + mode + Operating-system mode bitfield. Can be F_OK to test existence, + or the inclusive-OR of R_OK, W_OK, and X_OK. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that + directory. + effective_ids + If True, access will use the effective uid/gid instead of + the real uid/gid. + follow_symlinks + If False, and the last element of the path is a symbolic link, + access will examine the symbolic link itself instead of the file + the link points to. + + dir_fd, effective_ids, and follow_symlinks may not be implemented + on your platform. If they are unavailable, using them will raise a + NotImplementedError. + + Note that most operations will use the effective uid/gid, therefore this + routine can be used in a suid/sgid environment to test if the invoking user + has the specified access to the path. + """ -Note that most operations will use the effective uid/gid, therefore this - routine can be used in a suid/sgid environment to test if the invoking user - has the specified access to the path. -""" def chdir(path: FileDescriptorOrPath) -> None: """Change the current working directory to the specified path. -path may always be specified as a string. -On some platforms, path may also be specified as an open file descriptor. -If this functionality is unavailable, using it raises an exception. -""" + path may always be specified as a string. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + """ if sys.platform != "win32": def fchdir(fd: FileDescriptorLike) -> None: """Change to the directory of the given file descriptor. -fd must be opened on a directory, not a file. -Equivalent to os.chdir(fd). -""" + fd must be opened on a directory, not a file. + Equivalent to os.chdir(fd). + """ def getcwd() -> str: - """Return a unicode string representing the current working directory. -""" + """Return a unicode string representing the current working directory.""" + def getcwdb() -> bytes: - """Return a bytes string representing the current working directory. -""" + """Return a bytes string representing the current working directory.""" + def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: """Change the access permissions of a file. - path - Path to be modified. May always be specified as a str, bytes, or a path-like object. - On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - mode - Operating-system mode bitfield. - Be careful when using number literals for *mode*. The conventional UNIX notation for - numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in - Python. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that - directory. - follow_symlinks - If False, and the last element of the path is a symbolic link, - chmod will modify the symbolic link itself instead of the file - the link points to. - -It is an error to use dir_fd or follow_symlinks when specifying path as - an open file descriptor. -dir_fd and follow_symlinks may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. -""" + path + Path to be modified. May always be specified as a str, bytes, or a path-like object. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + mode + Operating-system mode bitfield. + Be careful when using number literals for *mode*. The conventional UNIX notation for + numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in + Python. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that + directory. + follow_symlinks + If False, and the last element of the path is a symbolic link, + chmod will modify the symbolic link itself instead of the file + the link points to. + + It is an error to use dir_fd or follow_symlinks when specifying path as + an open file descriptor. + dir_fd and follow_symlinks may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. + """ if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: # some flavors of Unix """Set file flags. -If follow_symlinks is False, and the last element of the path is a symbolic - link, chflags will change flags on the symbolic link itself instead of the - file the link points to. -follow_symlinks may not be implemented on your platform. If it is -unavailable, using it will raise a NotImplementedError. -""" + If follow_symlinks is False, and the last element of the path is a symbolic + link, chflags will change flags on the symbolic link itself instead of the + file the link points to. + follow_symlinks may not be implemented on your platform. If it is + unavailable, using it will raise a NotImplementedError. + """ + def lchflags(path: StrOrBytesPath, flags: int) -> None: """Set file flags. -This function will not follow symbolic links. -Equivalent to chflags(path, flags, follow_symlinks=False). -""" + This function will not follow symbolic links. + Equivalent to chflags(path, flags, follow_symlinks=False). + """ if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: - """Change root directory to path. -""" - def chown( - path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True - ) -> None: + """Change root directory to path.""" + + def chown(path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: """Change the owner and group id of path to the numeric uid and gid.\\ path @@ -1819,12 +1822,13 @@ It is an error to use dir_fd or follow_symlinks when specifying path as dir_fd and follow_symlinks may not be implemented on your platform. If they are unavailable, using them will raise a NotImplementedError. """ + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: """Change the owner and group id of path to the numeric uid and gid. -This function will not follow symbolic links. -Equivalent to os.chown(path, uid, gid, follow_symlinks=False). -""" + This function will not follow symbolic links. + Equivalent to os.chown(path, uid, gid, follow_symlinks=False). + """ def link( src: StrOrBytesPath, @@ -1836,159 +1840,167 @@ def link( ) -> None: """Create a hard link to a file. -If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. -If follow_symlinks is False, and the last element of src is a symbolic - link, link will create a link to the symbolic link itself instead of the - file the link points to. -src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your - platform. If they are unavailable, using them will raise a - NotImplementedError. -""" + If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. + If follow_symlinks is False, and the last element of src is a symbolic + link, link will create a link to the symbolic link itself instead of the + file the link points to. + src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your + platform. If they are unavailable, using them will raise a + NotImplementedError. + """ + def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: """Perform a stat system call on the given path, without following symbolic links. -Like stat(), but do not follow symbolic links. -Equivalent to stat(path, follow_symlinks=False). -""" + Like stat(), but do not follow symbolic links. + Equivalent to stat(path, follow_symlinks=False). + """ + def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: """Create a directory. -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. -The mode argument is ignored on Windows. Where it is used, the current umask -value is first masked out. -""" + The mode argument is ignored on Windows. Where it is used, the current umask + value is first masked out. + """ if sys.platform != "win32": def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: # Unix only """Create a "fifo" (a POSIX named pipe). -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: """makedirs(name [, mode=0o777][, exist_ok=False]) -Super-mkdir; create a leaf directory and all intermediate ones. Works like -mkdir, except that any intermediate path segment (not just the rightmost) -will be created if it does not exist. If the target directory already -exists, raise an OSError if exist_ok is False. Otherwise no exception is -raised. This is recursive. + Super-mkdir; create a leaf directory and all intermediate ones. Works like + mkdir, except that any intermediate path segment (not just the rightmost) + will be created if it does not exist. If the target directory already + exists, raise an OSError if exist_ok is False. Otherwise no exception is + raised. This is recursive. -""" + """ if sys.platform != "win32": def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: """Create a node in the file system. -Create a node in the file system (file, device special file or named pipe) -at path. mode specifies both the permissions to use and the -type of node to be created, being combined (bitwise OR) with one of -S_IFREG, S_IFCHR, S_IFBLK, and S_IFIFO. If S_IFCHR or S_IFBLK is set on mode, -device defines the newly created device special file (probably using -os.makedev()). Otherwise device is ignored. + Create a node in the file system (file, device special file or named pipe) + at path. mode specifies both the permissions to use and the + type of node to be created, being combined (bitwise OR) with one of + S_IFREG, S_IFCHR, S_IFBLK, and S_IFIFO. If S_IFCHR or S_IFBLK is set on mode, + device defines the newly created device special file (probably using + os.makedev()). Otherwise device is ignored. + + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" def major(device: int, /) -> int: - """Extracts a device major number from a raw device number. -""" + """Extracts a device major number from a raw device number.""" + def minor(device: int, /) -> int: - """Extracts a device minor number from a raw device number. -""" + """Extracts a device minor number from a raw device number.""" + def makedev(major: int, minor: int, /) -> int: - """Composes a raw device number from the major and minor device numbers. -""" + """Composes a raw device number from the major and minor device numbers.""" + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: # Unix only """Return the configuration limit name for the file or directory path. -If there is no limit, return -1. -On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. -""" + If there is no limit, return -1. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + """ def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: """Return a string representing the path to which the symbolic link points. -If dir_fd is not None, it should be a file descriptor open to a directory, -and path should be relative; path will then be relative to that directory. + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + + dir_fd may not be implemented on your platform. If it is unavailable, + using it will raise a NotImplementedError. + """ -dir_fd may not be implemented on your platform. If it is unavailable, -using it will raise a NotImplementedError. -""" def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: """Remove a file (same as unlink()). -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ + def removedirs(name: StrOrBytesPath) -> None: """removedirs(name) -Super-rmdir; remove a leaf directory and all empty intermediate -ones. Works like rmdir except that, if the leaf directory is -successfully removed, directories corresponding to rightmost path -segments will be pruned away until either the whole path is -consumed or an error occurs. Errors during this latter phase are -ignored -- they generally mean that a directory was not empty. + Super-rmdir; remove a leaf directory and all empty intermediate + ones. Works like rmdir except that, if the leaf directory is + successfully removed, directories corresponding to rightmost path + segments will be pruned away until either the whole path is + consumed or an error occurs. Errors during this latter phase are + ignored -- they generally mean that a directory was not empty. + + """ -""" def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: """Rename a file or directory. -If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. -src_dir_fd and dst_dir_fd, may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. -""" + If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. + src_dir_fd and dst_dir_fd, may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. + """ + def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: """renames(old, new) -Super-rename; create directories as necessary and delete any left -empty. Works like rename, except creation of any intermediate -directories needed to make the new pathname good is attempted -first. After the rename, directories corresponding to rightmost -path segments of the old name will be pruned until either the -whole path is consumed or a nonempty directory is found. + Super-rename; create directories as necessary and delete any left + empty. Works like rename, except creation of any intermediate + directories needed to make the new pathname good is attempted + first. After the rename, directories corresponding to rightmost + path segments of the old name will be pruned until either the + whole path is consumed or a nonempty directory is found. -Note: this function can fail with the new directory structure made -if you lack permissions needed to unlink the leaf directory or -file. + Note: this function can fail with the new directory structure made + if you lack permissions needed to unlink the leaf directory or + file. -""" -def replace( - src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None -) -> None: + """ + +def replace(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: """Rename a file or directory, overwriting the destination. -If either src_dir_fd or dst_dir_fd is not None, it should be a file - descriptor open to a directory, and the respective path string (src or dst) - should be relative; the path will then be relative to that directory. -src_dir_fd and dst_dir_fd, may not be implemented on your platform. - If they are unavailable, using them will raise a NotImplementedError. -""" + If either src_dir_fd or dst_dir_fd is not None, it should be a file + descriptor open to a directory, and the respective path string (src or dst) + should be relative; the path will then be relative to that directory. + src_dir_fd and dst_dir_fd, may not be implemented on your platform. + If they are unavailable, using them will raise a NotImplementedError. + """ + def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: """Remove a directory. -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ + @final @type_check_only class _ScandirIterator(Generic[AnyStr]): @@ -2003,12 +2015,13 @@ class _ScandirIterator(Generic[AnyStr]): def scandir(path: None = None) -> _ScandirIterator[str]: """Return an iterator of DirEntry objects for given path. -path can be specified as either str, bytes, or a path-like object. If path -is bytes, the names of yielded DirEntry objects will also be bytes; in -all other circumstances they will be str. + path can be specified as either str, bytes, or a path-like object. If path + is bytes, the names of yielded DirEntry objects will also be bytes; in + all other circumstances they will be str. + + If path is None, uses the path='.'. + """ -If path is None, uses the path='.'. -""" @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload @@ -2016,70 +2029,69 @@ def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: """Perform a stat system call on the given path. - path - Path to be examined; can be string, bytes, a path-like object or - open-file-descriptor int. - dir_fd - If not None, it should be a file descriptor open to a directory, - and path should be a relative string; path will then be relative to - that directory. - follow_symlinks - If False, and the last element of the path is a symbolic link, - stat will examine the symbolic link itself instead of the file - the link points to. - -dir_fd and follow_symlinks may not be implemented - on your platform. If they are unavailable, using them will raise a - NotImplementedError. - -It's an error to use dir_fd or follow_symlinks when specifying path as - an open file descriptor. -""" + path + Path to be examined; can be string, bytes, a path-like object or + open-file-descriptor int. + dir_fd + If not None, it should be a file descriptor open to a directory, + and path should be a relative string; path will then be relative to + that directory. + follow_symlinks + If False, and the last element of the path is a symbolic link, + stat will examine the symbolic link itself instead of the file + the link points to. + + dir_fd and follow_symlinks may not be implemented + on your platform. If they are unavailable, using them will raise a + NotImplementedError. + + It's an error to use dir_fd or follow_symlinks when specifying path as + an open file descriptor. + """ if sys.platform != "win32": def statvfs(path: FileDescriptorOrPath) -> statvfs_result: # Unix only """Perform a statvfs system call on the given path. -path may always be specified as a string. -On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. -""" + path may always be specified as a string. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + """ -def symlink( - src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None -) -> None: +def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None) -> None: """Create a symbolic link pointing to src named dst. -target_is_directory is required on Windows if the target is to be - interpreted as a directory. (On Windows, symlink requires - Windows 6.0 or greater, and raises a NotImplementedError otherwise.) - target_is_directory is ignored on non-Windows platforms. + target_is_directory is required on Windows if the target is to be + interpreted as a directory. (On Windows, symlink requires + Windows 6.0 or greater, and raises a NotImplementedError otherwise.) + target_is_directory is ignored on non-Windows platforms. -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ if sys.platform != "win32": def sync() -> None: # Unix only - """Force write of everything to disk. -""" + """Force write of everything to disk.""" def truncate(path: FileDescriptorOrPath, length: int) -> None: # Unix only up to version 3.4 """Truncate a file, specified by path, to a specific length. -On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. -""" + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + """ + def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: """Remove a file (same as remove()). -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -dir_fd may not be implemented on your platform. - If it is unavailable, using it will raise a NotImplementedError. -""" + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + dir_fd may not be implemented on your platform. + If it is unavailable, using it will raise a NotImplementedError. + """ + def utime( path: FileDescriptorOrPath, times: tuple[int, int] | tuple[float, float] | None = None, @@ -2090,28 +2102,28 @@ def utime( ) -> None: """Set the access and modified time of path. -path may always be specified as a string. -On some platforms, path may also be specified as an open file descriptor. - If this functionality is unavailable, using it raises an exception. - -If times is not None, it must be a tuple (atime, mtime); - atime and mtime should be expressed as float seconds since the epoch. -If ns is specified, it must be a tuple (atime_ns, mtime_ns); - atime_ns and mtime_ns should be expressed as integer nanoseconds - since the epoch. -If times is None and ns is unspecified, utime uses the current time. -Specifying tuples for both times and ns is an error. - -If dir_fd is not None, it should be a file descriptor open to a directory, - and path should be relative; path will then be relative to that directory. -If follow_symlinks is False, and the last element of the path is a symbolic - link, utime will modify the symbolic link itself instead of the file the - link points to. -It is an error to use dir_fd or follow_symlinks when specifying path - as an open file descriptor. -dir_fd and follow_symlinks may not be available on your platform. - If they are unavailable, using them will raise a NotImplementedError. -""" + path may always be specified as a string. + On some platforms, path may also be specified as an open file descriptor. + If this functionality is unavailable, using it raises an exception. + + If times is not None, it must be a tuple (atime, mtime); + atime and mtime should be expressed as float seconds since the epoch. + If ns is specified, it must be a tuple (atime_ns, mtime_ns); + atime_ns and mtime_ns should be expressed as integer nanoseconds + since the epoch. + If times is None and ns is unspecified, utime uses the current time. + Specifying tuples for both times and ns is an error. + + If dir_fd is not None, it should be a file descriptor open to a directory, + and path should be relative; path will then be relative to that directory. + If follow_symlinks is False, and the last element of the path is a symbolic + link, utime will modify the symbolic link itself instead of the file the + link points to. + It is an error to use dir_fd or follow_symlinks when specifying path + as an open file descriptor. + dir_fd and follow_symlinks may not be available on your platform. + If they are unavailable, using them will raise a NotImplementedError. + """ _OnError: TypeAlias = Callable[[OSError], object] @@ -2120,63 +2132,63 @@ def walk( ) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: """Directory tree generator. -For each directory in the directory tree rooted at top (including top -itself, but excluding '.' and '..'), yields a 3-tuple - - dirpath, dirnames, filenames - -dirpath is a string, the path to the directory. dirnames is a list of -the names of the subdirectories in dirpath (including symlinks to directories, -and excluding '.' and '..'). -filenames is a list of the names of the non-directory files in dirpath. -Note that the names in the lists are just names, with no path components. -To get a full path (which begins with top) to a file or directory in -dirpath, do os.path.join(dirpath, name). - -If optional arg 'topdown' is true or not specified, the triple for a -directory is generated before the triples for any of its subdirectories -(directories are generated top down). If topdown is false, the triple -for a directory is generated after the triples for all of its -subdirectories (directories are generated bottom up). - -When topdown is true, the caller can modify the dirnames list in-place -(e.g., via del or slice assignment), and walk will only recurse into the -subdirectories whose names remain in dirnames; this can be used to prune the -search, or to impose a specific order of visiting. Modifying dirnames when -topdown is false has no effect on the behavior of os.walk(), since the -directories in dirnames have already been generated by the time dirnames -itself is generated. No matter the value of topdown, the list of -subdirectories is retrieved before the tuples for the directory and its -subdirectories are generated. - -By default errors from the os.scandir() call are ignored. If -optional arg 'onerror' is specified, it should be a function; it -will be called with one argument, an OSError instance. It can -report the error to continue with the walk, or raise the exception -to abort the walk. Note that the filename is available as the -filename attribute of the exception object. - -By default, os.walk does not follow symbolic links to subdirectories on -systems that support them. In order to get this functionality, set the -optional argument 'followlinks' to true. - -Caution: if you pass a relative pathname for top, don't change the -current working directory between resumptions of walk. walk never -changes the current directory, and assumes that the client doesn't -either. - -Example: - -import os -from os.path import join, getsize -for root, dirs, files in os.walk('python/Lib/xml'): - print(root, "consumes ") - print(sum(getsize(join(root, name)) for name in files), end=" ") - print("bytes in", len(files), "non-directory files") - if '__pycache__' in dirs: - dirs.remove('__pycache__') # don't visit __pycache__ directories - -""" + For each directory in the directory tree rooted at top (including top + itself, but excluding '.' and '..'), yields a 3-tuple + + dirpath, dirnames, filenames + + dirpath is a string, the path to the directory. dirnames is a list of + the names of the subdirectories in dirpath (including symlinks to directories, + and excluding '.' and '..'). + filenames is a list of the names of the non-directory files in dirpath. + Note that the names in the lists are just names, with no path components. + To get a full path (which begins with top) to a file or directory in + dirpath, do os.path.join(dirpath, name). + + If optional arg 'topdown' is true or not specified, the triple for a + directory is generated before the triples for any of its subdirectories + (directories are generated top down). If topdown is false, the triple + for a directory is generated after the triples for all of its + subdirectories (directories are generated bottom up). + + When topdown is true, the caller can modify the dirnames list in-place + (e.g., via del or slice assignment), and walk will only recurse into the + subdirectories whose names remain in dirnames; this can be used to prune the + search, or to impose a specific order of visiting. Modifying dirnames when + topdown is false has no effect on the behavior of os.walk(), since the + directories in dirnames have already been generated by the time dirnames + itself is generated. No matter the value of topdown, the list of + subdirectories is retrieved before the tuples for the directory and its + subdirectories are generated. + + By default errors from the os.scandir() call are ignored. If + optional arg 'onerror' is specified, it should be a function; it + will be called with one argument, an OSError instance. It can + report the error to continue with the walk, or raise the exception + to abort the walk. Note that the filename is available as the + filename attribute of the exception object. + + By default, os.walk does not follow symbolic links to subdirectories on + systems that support them. In order to get this functionality, set the + optional argument 'followlinks' to true. + + Caution: if you pass a relative pathname for top, don't change the + current working directory between resumptions of walk. walk never + changes the current directory, and assumes that the client doesn't + either. + + Example: + + import os + from os.path import join, getsize + for root, dirs, files in os.walk('python/Lib/xml'): + print(root, "consumes ") + print(sum(getsize(join(root, name)) for name in files), end=" ") + print("bytes in", len(files), "non-directory files") + if '__pycache__' in dirs: + dirs.remove('__pycache__') # don't visit __pycache__ directories + + """ if sys.platform != "win32": @overload @@ -2190,36 +2202,37 @@ if sys.platform != "win32": ) -> Iterator[tuple[str, list[str], list[str], int]]: """Directory tree generator. -This behaves exactly like walk(), except that it yields a 4-tuple + This behaves exactly like walk(), except that it yields a 4-tuple - dirpath, dirnames, filenames, dirfd + dirpath, dirnames, filenames, dirfd -`dirpath`, `dirnames` and `filenames` are identical to walk() output, -and `dirfd` is a file descriptor referring to the directory `dirpath`. + `dirpath`, `dirnames` and `filenames` are identical to walk() output, + and `dirfd` is a file descriptor referring to the directory `dirpath`. -The advantage of fwalk() over walk() is that it's safe against symlink -races (when follow_symlinks is False). + The advantage of fwalk() over walk() is that it's safe against symlink + races (when follow_symlinks is False). + + If dir_fd is not None, it should be a file descriptor open to a directory, + and top should be relative; top will then be relative to that directory. + (dir_fd is always supported for fwalk.) + + Caution: + Since fwalk() yields file descriptors, those are only valid until the + next iteration step, so you should dup() them if you want to keep them + for a longer period. + + Example: + + import os + for root, dirs, files, rootfd in os.fwalk('python/Lib/xml'): + print(root, "consumes", end="") + print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files), + end="") + print("bytes in", len(files), "non-directory files") + if '__pycache__' in dirs: + dirs.remove('__pycache__') # don't visit __pycache__ directories + """ -If dir_fd is not None, it should be a file descriptor open to a directory, - and top should be relative; top will then be relative to that directory. - (dir_fd is always supported for fwalk.) - -Caution: -Since fwalk() yields file descriptors, those are only valid until the -next iteration step, so you should dup() them if you want to keep them -for a longer period. - -Example: - -import os -for root, dirs, files, rootfd in os.fwalk('python/Lib/xml'): - print(root, "consumes", end="") - print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files), - end="") - print("bytes in", len(files), "non-directory files") - if '__pycache__' in dirs: - dirs.remove('__pycache__') # don't visit __pycache__ directories -""" @overload def fwalk( top: BytesPath, @@ -2233,28 +2246,31 @@ for root, dirs, files, rootfd in os.fwalk('python/Lib/xml'): def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: """Return the value of extended attribute attribute on path. -path may be either a string, a path-like object, or an open file descriptor. -If follow_symlinks is False, and the last element of the path is a symbolic - link, getxattr will examine the symbolic link itself instead of the file - the link points to. -""" + path may be either a string, a path-like object, or an open file descriptor. + If follow_symlinks is False, and the last element of the path is a symbolic + link, getxattr will examine the symbolic link itself instead of the file + the link points to. + """ + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: """Return a list of extended attributes on path. -path may be either None, a string, a path-like object, or an open file descriptor. -if path is None, listxattr will examine the current directory. -If follow_symlinks is False, and the last element of the path is a symbolic - link, listxattr will examine the symbolic link itself instead of the file - the link points to. -""" + path may be either None, a string, a path-like object, or an open file descriptor. + if path is None, listxattr will examine the current directory. + If follow_symlinks is False, and the last element of the path is a symbolic + link, listxattr will examine the symbolic link itself instead of the file + the link points to. + """ + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: """Remove extended attribute attribute on path. -path may be either a string, a path-like object, or an open file descriptor. -If follow_symlinks is False, and the last element of the path is a symbolic - link, removexattr will modify the symbolic link itself instead of the file - the link points to. -""" + path may be either a string, a path-like object, or an open file descriptor. + If follow_symlinks is False, and the last element of the path is a symbolic + link, removexattr will modify the symbolic link itself instead of the file + the link points to. + """ + def setxattr( path: FileDescriptorOrPath, attribute: StrOrBytesPath, @@ -2265,51 +2281,49 @@ If follow_symlinks is False, and the last element of the path is a symbolic ) -> None: """Set extended attribute attribute on path to value. -path may be either a string, a path-like object, or an open file descriptor. -If follow_symlinks is False, and the last element of the path is a symbolic - link, setxattr will modify the symbolic link itself instead of the file - the link points to. -""" + path may be either a string, a path-like object, or an open file descriptor. + If follow_symlinks is False, and the last element of the path is a symbolic + link, setxattr will modify the symbolic link itself instead of the file + the link points to. + """ def abort() -> NoReturn: """Abort the interpreter immediately. -This function 'dumps core' or otherwise fails in the hardest way possible -on the hosting operating system. This function never returns. -""" + This function 'dumps core' or otherwise fails in the hardest way possible + on the hosting operating system. This function never returns. + """ # These are defined as execl(file, *args) but the first *arg is mandatory. def execl(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: """execl(file, *args) -Execute the executable file with argument list args, replacing the -current process. -""" + Execute the executable file with argument list args, replacing the + current process. + """ + def execlp(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]]]]) -> NoReturn: """execlp(file, *args) -Execute the executable file (which is searched for along $PATH) -with argument list args, replacing the current process. -""" + Execute the executable file (which is searched for along $PATH) + with argument list args, replacing the current process. + """ # These are: execle(file, *args, env) but env is pulled from the last element of the args. -def execle( - file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] -) -> NoReturn: +def execle(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]]) -> NoReturn: """execle(file, *args, env) -Execute the executable file with argument list args and -environment env, replacing the current process. -""" -def execlpe( - file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]] -) -> NoReturn: + Execute the executable file with argument list args and + environment env, replacing the current process. + """ + +def execlpe(file: StrOrBytesPath, *args: Unpack[tuple[StrOrBytesPath, Unpack[tuple[StrOrBytesPath, ...]], _ExecEnv]]) -> NoReturn: """execlpe(file, *args, env) -Execute the executable file (which is searched for along $PATH) -with argument list args and environment env, replacing the current -process. -""" + Execute the executable file (which is searched for along $PATH) + with argument list args and environment env, replacing the current + process. + """ # The docs say `args: tuple or list of strings` # The implementation enforces tuple or list so we can't use Sequence. @@ -2335,64 +2349,68 @@ _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] def execv(path: StrOrBytesPath, argv: _ExecVArgs, /) -> NoReturn: """Execute an executable path with arguments, replacing current process. - path - Path of executable file. - argv - Tuple or list of strings. -""" + path + Path of executable file. + argv + Tuple or list of strings. + """ + def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: """Execute an executable path with arguments, replacing current process. - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. -""" + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + """ + def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: """execvp(file, args) -Execute the executable file (which is searched for along $PATH) -with argument list args, replacing the current process. -args may be a list or tuple of strings. -""" + Execute the executable file (which is searched for along $PATH) + with argument list args, replacing the current process. + args may be a list or tuple of strings. + """ + def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: """execvpe(file, args, env) -Execute the executable file (which is searched for along $PATH) -with argument list args and environment env, replacing the -current process. -args may be a list or tuple of strings. -""" + Execute the executable file (which is searched for along $PATH) + with argument list args and environment env, replacing the + current process. + args may be a list or tuple of strings. + """ + def _exit(status: int) -> NoReturn: - """Exit to the system with specified status, without normal exit processing. -""" + """Exit to the system with specified status, without normal exit processing.""" + def kill(pid: int, signal: int, /) -> None: - """Kill a process with a signal. -""" + """Kill a process with a signal.""" if sys.platform != "win32": # Unix only def fork() -> int: """Fork a child process. -Return 0 to child process and PID of child to parent process. -""" + Return 0 to child process and PID of child to parent process. + """ + def forkpty() -> tuple[int, int]: # some flavors of Unix """Fork a new process with a new pseudo-terminal as controlling tty. -Returns a tuple of (pid, master_fd). -Like fork(), return pid of 0 to the child process, -and pid of child to the parent process. -To both, return fd of newly opened pseudo-terminal. -""" + Returns a tuple of (pid, master_fd). + Like fork(), return pid of 0 to the child process, + and pid of child to the parent process. + To both, return fd of newly opened pseudo-terminal. + """ + def killpg(pgid: int, signal: int, /) -> None: - """Kill a process group with a signal. -""" + """Kill a process group with a signal.""" + def nice(increment: int, /) -> int: - """Add increment to the priority of process and return the new priority. -""" + """Add increment to the priority of process and return the new priority.""" if sys.platform != "darwin" and sys.platform != "linux": def plock(op: int, /) -> None: ... @@ -2420,117 +2438,121 @@ def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: """spawnl(mode, file, *args) -> integer -Execute file with arguments from args in a subprocess. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file with arguments from args in a subprocess. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise sig """spawnle(mode, file, *args, env) -> integer -Execute file with arguments from args in a subprocess with the -supplied environment. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file with arguments from args in a subprocess with the + supplied environment. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ if sys.platform != "win32": def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: """spawnv(mode, file, args) -> integer -Execute file with arguments from args in a subprocess. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file with arguments from args in a subprocess. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: """spawnve(mode, file, args, env) -> integer -Execute file with arguments from args in a subprocess with the -specified environment. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file with arguments from args in a subprocess with the + specified environment. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ else: def spawnv(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, /) -> int: """Execute the program specified by path in a new process. - mode - Mode of process creation. - path - Path of executable file. - argv - Tuple or list of strings. -""" + mode + Mode of process creation. + path + Path of executable file. + argv + Tuple or list of strings. + """ + def spawnve(mode: int, path: StrOrBytesPath, argv: _ExecVArgs, env: _ExecEnv, /) -> int: """Execute the program specified by path in a new process. - mode - Mode of process creation. - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. -""" + mode + Mode of process creation. + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + """ def system(command: StrOrBytesPath) -> int: - """Execute the command in a subshell. -""" + """Execute the command in a subshell.""" + @final class times_result(structseq[float], tuple[float, float, float, float, float]): """times_result: Result from os.times(). -This object may be accessed either as a tuple of - (user, system, children_user, children_system, elapsed), -or via the attributes user, system, children_user, children_system, -and elapsed. + This object may be accessed either as a tuple of + (user, system, children_user, children_system, elapsed), + or via the attributes user, system, children_user, children_system, + and elapsed. + + See os.times for more information. + """ -See os.times for more information. -""" if sys.version_info >= (3, 10): __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") @property def user(self) -> float: - """user time -""" + """user time""" + @property def system(self) -> float: - """system time -""" + """system time""" + @property def children_user(self) -> float: - """user time of children -""" + """user time of children""" + @property def children_system(self) -> float: - """system time of children -""" + """system time of children""" + @property def elapsed(self) -> float: - """elapsed time since an arbitrary point in the past -""" + """elapsed time since an arbitrary point in the past""" def times() -> times_result: """Return a collection containing process timing information. -The object returned behaves like a named tuple with these fields: - (utime, stime, cutime, cstime, elapsed_time) -All fields are floating-point numbers. -""" + The object returned behaves like a named tuple with these fields: + (utime, stime, cutime, cstime, elapsed_time) + All fields are floating-point numbers. + """ + def waitpid(pid: int, options: int, /) -> tuple[int, int]: """Wait for completion of a given child process. -Returns a tuple of information regarding the child process: - (pid, status) + Returns a tuple of information regarding the child process: + (pid, status) -The options argument is ignored on Windows. -""" + The options argument is ignored on Windows. + """ if sys.platform == "win32": if sys.version_info >= (3, 10): @@ -2543,106 +2565,111 @@ if sys.platform == "win32": ) -> None: """Start a file with its associated application. -When "operation" is not specified or "open", this acts like -double-clicking the file in Explorer, or giving the file name as an -argument to the DOS "start" command: the file is opened with whatever -application (if any) its extension is associated. -When another "operation" is given, it specifies what should be done with -the file. A typical operation is "print". + When "operation" is not specified or "open", this acts like + double-clicking the file in Explorer, or giving the file name as an + argument to the DOS "start" command: the file is opened with whatever + application (if any) its extension is associated. + When another "operation" is given, it specifies what should be done with + the file. A typical operation is "print". -"arguments" is passed to the application, but should be omitted if the -file is a document. + "arguments" is passed to the application, but should be omitted if the + file is a document. -"cwd" is the working directory for the operation. If "filepath" is -relative, it will be resolved against this directory. This argument -should usually be an absolute path. + "cwd" is the working directory for the operation. If "filepath" is + relative, it will be resolved against this directory. This argument + should usually be an absolute path. -"show_cmd" can be used to override the recommended visibility option. -See the Windows ShellExecute documentation for values. + "show_cmd" can be used to override the recommended visibility option. + See the Windows ShellExecute documentation for values. -startfile returns as soon as the associated application is launched. -There is no option to wait for the application to close, and no way -to retrieve the application's exit status. + startfile returns as soon as the associated application is launched. + There is no option to wait for the application to close, and no way + to retrieve the application's exit status. -The filepath is relative to the current directory. If you want to use -an absolute path, make sure the first character is not a slash ("/"); -the underlying Win32 ShellExecute function doesn't work if it is. -""" + The filepath is relative to the current directory. If you want to use + an absolute path, make sure the first character is not a slash ("/"); + the underlying Win32 ShellExecute function doesn't work if it is. + """ else: def startfile(filepath: StrOrBytesPath, operation: str = ...) -> None: """Start a file with its associated application. -When "operation" is not specified or "open", this acts like -double-clicking the file in Explorer, or giving the file name as an -argument to the DOS "start" command: the file is opened with whatever -application (if any) its extension is associated. -When another "operation" is given, it specifies what should be done with -the file. A typical operation is "print". + When "operation" is not specified or "open", this acts like + double-clicking the file in Explorer, or giving the file name as an + argument to the DOS "start" command: the file is opened with whatever + application (if any) its extension is associated. + When another "operation" is given, it specifies what should be done with + the file. A typical operation is "print". -startfile returns as soon as the associated application is launched. -There is no option to wait for the application to close, and no way -to retrieve the application's exit status. + startfile returns as soon as the associated application is launched. + There is no option to wait for the application to close, and no way + to retrieve the application's exit status. -The filepath is relative to the current directory. If you want to use -an absolute path, make sure the first character is not a slash ("/"); -the underlying Win32 ShellExecute function doesn't work if it is. -""" + The filepath is relative to the current directory. If you want to use + an absolute path, make sure the first character is not a slash ("/"); + the underlying Win32 ShellExecute function doesn't work if it is. + """ else: def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: """spawnlp(mode, file, *args) -> integer -Execute file (which is looked for along $PATH) with arguments from -args in a subprocess with the supplied environment. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file (which is looked for along $PATH) with arguments from + args in a subprocess with the supplied environment. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: # Imprecise signature """spawnlpe(mode, file, *args, env) -> integer -Execute file (which is looked for along $PATH) with arguments from -args in a subprocess with the supplied environment. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file (which is looked for along $PATH) with arguments from + args in a subprocess with the supplied environment. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: """spawnvp(mode, file, args) -> integer -Execute file (which is looked for along $PATH) with arguments from -args in a subprocess. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file (which is looked for along $PATH) with arguments from + args in a subprocess. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: """spawnvpe(mode, file, args, env) -> integer -Execute file (which is looked for along $PATH) with arguments from -args in a subprocess with the supplied environment. -If mode == P_NOWAIT return the pid of the process. -If mode == P_WAIT return the process's exit code if it exits normally; -otherwise return -SIG, where SIG is the signal that killed it. -""" + Execute file (which is looked for along $PATH) with arguments from + args in a subprocess with the supplied environment. + If mode == P_NOWAIT return the pid of the process. + If mode == P_WAIT return the process's exit code if it exits normally; + otherwise return -SIG, where SIG is the signal that killed it. + """ + def wait() -> tuple[int, int]: # Unix only """Wait for completion of a child process. -Returns a tuple of information about the child process: - (pid, status) -""" + Returns a tuple of information about the child process: + (pid, status) + """ # Added to MacOS in 3.13 if sys.platform != "darwin" or sys.version_info >= (3, 13): @final class waitid_result(structseq[int], tuple[int, int, int, int, int]): """waitid_result: Result from waitid. -This object may be accessed either as a tuple of - (si_pid, si_uid, si_signo, si_status, si_code), -or via the attributes si_pid, si_uid, and so on. + This object may be accessed either as a tuple of + (si_pid, si_uid, si_signo, si_status, si_code), + or via the attributes si_pid, si_uid, and so on. + + See os.waitid for more information. + """ -See os.waitid for more information. -""" if sys.version_info >= (3, 10): __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") @@ -2660,59 +2687,61 @@ See os.waitid for more information. def waitid(idtype: int, ident: int, options: int, /) -> waitid_result | None: """Returns the result of waiting for a process or processes. - idtype - Must be one of be P_PID, P_PGID or P_ALL. - id - The id to wait on. - options - Constructed from the ORing of one or more of WEXITED, WSTOPPED - or WCONTINUED and additionally may be ORed with WNOHANG or WNOWAIT. - -Returns either waitid_result or None if WNOHANG is specified and there are -no children in a waitable state. -""" + idtype + Must be one of be P_PID, P_PGID or P_ALL. + id + The id to wait on. + options + Constructed from the ORing of one or more of WEXITED, WSTOPPED + or WCONTINUED and additionally may be ORed with WNOHANG or WNOWAIT. + Returns either waitid_result or None if WNOHANG is specified and there are + no children in a waitable state. + """ from resource import struct_rusage def wait3(options: int) -> tuple[int, int, struct_rusage]: """Wait for completion of a child process. -Returns a tuple of information about the child process: - (pid, status, rusage) -""" + Returns a tuple of information about the child process: + (pid, status, rusage) + """ + def wait4(pid: int, options: int) -> tuple[int, int, struct_rusage]: """Wait for completion of a specific child process. -Returns a tuple of information about the child process: - (pid, status, rusage) -""" + Returns a tuple of information about the child process: + (pid, status, rusage) + """ + def WCOREDUMP(status: int, /) -> bool: - """Return True if the process returning status was dumped to a core file. -""" + """Return True if the process returning status was dumped to a core file.""" + def WIFCONTINUED(status: int) -> bool: """Return True if a particular process was continued from a job control stop. -Return True if the process returning status was continued from a -job control stop. -""" + Return True if the process returning status was continued from a + job control stop. + """ + def WIFSTOPPED(status: int) -> bool: - """Return True if the process returning status was stopped. -""" + """Return True if the process returning status was stopped.""" + def WIFSIGNALED(status: int) -> bool: - """Return True if the process returning status was terminated by a signal. -""" + """Return True if the process returning status was terminated by a signal.""" + def WIFEXITED(status: int) -> bool: - """Return True if the process returning status exited via the exit() system call. -""" + """Return True if the process returning status exited via the exit() system call.""" + def WEXITSTATUS(status: int) -> int: - """Return the process return code from status. -""" + """Return the process return code from status.""" + def WSTOPSIG(status: int) -> int: - """Return the signal that stopped the process that provided the status value. -""" + """Return the signal that stopped the process that provided the status value.""" + def WTERMSIG(status: int) -> int: - """Return the signal that terminated the process that provided the status value. -""" + """Return the signal that terminated the process that provided the status value.""" + def posix_spawn( path: StrOrBytesPath, argv: _ExecVArgs, @@ -2729,27 +2758,28 @@ job control stop. ) -> int: """Execute the program specified by path in a new process. - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - file_actions - A sequence of file action tuples. - setpgroup - The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids - If the value is `true` the POSIX_SPAWN_RESETIDS will be activated. - setsid - If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. - setsigmask - The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. - setsigdef - The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. - scheduler - A tuple with the scheduler policy (optional) and parameters. -""" + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + file_actions + A sequence of file action tuples. + setpgroup + The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. + resetids + If the value is `true` the POSIX_SPAWN_RESETIDS will be activated. + setsid + If the value is `true` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. + setsigmask + The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. + setsigdef + The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. + scheduler + A tuple with the scheduler policy (optional) and parameters. + """ + def posix_spawnp( path: StrOrBytesPath, argv: _ExecVArgs, @@ -2766,27 +2796,27 @@ job control stop. ) -> int: """Execute the program specified by path in a new process. - path - Path of executable file. - argv - Tuple or list of strings. - env - Dictionary of strings mapping to strings. - file_actions - A sequence of file action tuples. - setpgroup - The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. - resetids - If the value is `True` the POSIX_SPAWN_RESETIDS will be activated. - setsid - If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. - setsigmask - The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. - setsigdef - The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. - scheduler - A tuple with the scheduler policy (optional) and parameters. -""" + path + Path of executable file. + argv + Tuple or list of strings. + env + Dictionary of strings mapping to strings. + file_actions + A sequence of file action tuples. + setpgroup + The pgroup to use with the POSIX_SPAWN_SETPGROUP flag. + resetids + If the value is `True` the POSIX_SPAWN_RESETIDS will be activated. + setsid + If the value is `True` the POSIX_SPAWN_SETSID or POSIX_SPAWN_SETSID_NP will be activated. + setsigmask + The sigmask to use with the POSIX_SPAWN_SETSIGMASK flag. + setsigdef + The sigmask to use with the POSIX_SPAWN_SETSIGDEF flag. + scheduler + A tuple with the scheduler policy (optional) and parameters. + """ POSIX_SPAWN_OPEN: Final = 0 POSIX_SPAWN_CLOSE: Final = 1 POSIX_SPAWN_DUP2: Final = 2 @@ -2796,72 +2826,77 @@ if sys.platform != "win32": class sched_param(structseq[int], tuple[int]): """Currently has only one field: sched_priority - sched_priority - A scheduling parameter. -""" + sched_priority + A scheduling parameter. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("sched_priority",) def __new__(cls, sched_priority: int) -> Self: ... @property def sched_priority(self) -> int: - """the scheduling priority -""" + """the scheduling priority""" def sched_get_priority_min(policy: int) -> int: # some flavors of Unix - """Get the minimum scheduling priority for policy. -""" + """Get the minimum scheduling priority for policy.""" + def sched_get_priority_max(policy: int) -> int: # some flavors of Unix - """Get the maximum scheduling priority for policy. -""" + """Get the maximum scheduling priority for policy.""" + def sched_yield() -> None: # some flavors of Unix - """Voluntarily relinquish the CPU. -""" + """Voluntarily relinquish the CPU.""" if sys.platform != "darwin": def sched_setscheduler(pid: int, policy: int, param: sched_param, /) -> None: # some flavors of Unix """Set the scheduling policy for the process identified by pid. -If pid is 0, the calling process is changed. -param is an instance of sched_param. -""" + If pid is 0, the calling process is changed. + param is an instance of sched_param. + """ + def sched_getscheduler(pid: int, /) -> int: # some flavors of Unix """Get the scheduling policy for the process identified by pid. -Passing 0 for pid returns the scheduling policy for the calling process. -""" + Passing 0 for pid returns the scheduling policy for the calling process. + """ + def sched_rr_get_interval(pid: int, /) -> float: # some flavors of Unix """Return the round-robin quantum for the process identified by pid, in seconds. -Value returned is a float. -""" + Value returned is a float. + """ + def sched_setparam(pid: int, param: sched_param, /) -> None: # some flavors of Unix """Set scheduling parameters for the process identified by pid. -If pid is 0, sets parameters for the calling process. -param should be an instance of sched_param. -""" + If pid is 0, sets parameters for the calling process. + param should be an instance of sched_param. + """ + def sched_getparam(pid: int, /) -> sched_param: # some flavors of Unix """Returns scheduling parameters for the process identified by pid. -If pid is 0, returns parameters for the calling process. -Return value is an instance of sched_param. -""" + If pid is 0, returns parameters for the calling process. + Return value is an instance of sched_param. + """ + def sched_setaffinity(pid: int, mask: Iterable[int], /) -> None: # some flavors of Unix """Set the CPU affinity of the process identified by pid to mask. -mask should be an iterable of integers identifying CPUs. -""" + mask should be an iterable of integers identifying CPUs. + """ + def sched_getaffinity(pid: int, /) -> set[int]: # some flavors of Unix """Return the affinity of the process identified by pid (or the current process if zero). -The affinity is returned as a set of CPU identifiers. -""" + The affinity is returned as a set of CPU identifiers. + """ def cpu_count() -> int | None: """Return the number of logical CPUs in the system. -Return None if indeterminable. -""" + Return None if indeterminable. + """ if sys.version_info >= (3, 13): # Documented to return `int | None`, but falls back to `len(sched_getaffinity(0))` when @@ -2869,42 +2904,40 @@ if sys.version_info >= (3, 13): if sys.platform != "win32" and sys.platform != "darwin": def process_cpu_count() -> int: """ -Get the number of CPUs of the current process. + Get the number of CPUs of the current process. -Return the number of logical CPUs usable by the calling thread of the -current process. Return None if indeterminable. -""" + Return the number of logical CPUs usable by the calling thread of the + current process. Return None if indeterminable. + """ else: def process_cpu_count() -> int | None: """Return the number of logical CPUs in the system. -Return None if indeterminable. -""" + Return None if indeterminable. + """ if sys.platform != "win32": # Unix only def confstr(name: str | int, /) -> str | None: - """Return a string-valued system configuration variable. -""" + """Return a string-valued system configuration variable.""" + def getloadavg() -> tuple[float, float, float]: """Return average recent system load information. -Return the number of processes in the system run queue averaged over -the last 1, 5, and 15 minutes as a tuple of three floats. -Raises OSError if the load average was unobtainable. -""" + Return the number of processes in the system run queue averaged over + the last 1, 5, and 15 minutes as a tuple of three floats. + Raises OSError if the load average was unobtainable. + """ + def sysconf(name: str | int, /) -> int: - """Return an integer-valued system configuration variable. -""" + """Return an integer-valued system configuration variable.""" if sys.platform == "linux": def getrandom(size: int, flags: int = 0) -> bytes: - """Obtain a series of random bytes. -""" + """Obtain a series of random bytes.""" def urandom(size: int, /) -> bytes: - """Return a bytes object containing random bytes suitable for cryptographic use. -""" + """Return a bytes object containing random bytes suitable for cryptographic use.""" if sys.platform != "win32": def register_at_fork( @@ -2915,16 +2948,16 @@ if sys.platform != "win32": ) -> None: """Register callables to be called when forking a new process. - before - A callable to be called in the parent before the fork() syscall. - after_in_child - A callable to be called in the child after fork(). - after_in_parent - A callable to be called in the parent after fork(). + before + A callable to be called in the parent before the fork() syscall. + after_in_child + A callable to be called in the child after fork(). + after_in_parent + A callable to be called in the parent after fork(). -'before' callbacks are called in reverse order. -'after_in_child' and 'after_in_parent' callbacks are called in order. -""" + 'before' callbacks are called in reverse order. + 'after_in_child' and 'after_in_parent' callbacks are called in order. + """ if sys.platform == "win32": class _AddedDllDirectory: @@ -2937,13 +2970,13 @@ if sys.platform == "win32": def add_dll_directory(path: str) -> _AddedDllDirectory: """Add a path to the DLL search path. -This search path is used when resolving dependencies for imported -extension modules (the module itself is resolved through sys.path), -and also by ctypes. + This search path is used when resolving dependencies for imported + extension modules (the module itself is resolved through sys.path), + and also by ctypes. -Remove the directory by calling close() on the returned object or -using it in a with statement. -""" + Remove the directory by calling close() on the returned object or + using it in a with statement. + """ if sys.platform == "linux": MFD_CLOEXEC: Final[int] @@ -2967,44 +3000,44 @@ if sys.platform == "linux": def copy_file_range(src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ...) -> int: """Copy count bytes from one file descriptor to another. - src - Source file descriptor. - dst - Destination file descriptor. - count - Number of bytes to copy. - offset_src - Starting offset in src. - offset_dst - Starting offset in dst. - -If offset_src is None, then src is read from the current position; -respectively for offset_dst. -""" + src + Source file descriptor. + dst + Destination file descriptor. + count + Number of bytes to copy. + offset_src + Starting offset in src. + offset_dst + Starting offset in dst. + + If offset_src is None, then src is read from the current position; + respectively for offset_dst. + """ def waitstatus_to_exitcode(status: int) -> int: """Convert a wait status to an exit code. -On Unix: + On Unix: -* If WIFEXITED(status) is true, return WEXITSTATUS(status). -* If WIFSIGNALED(status) is true, return -WTERMSIG(status). -* Otherwise, raise a ValueError. + * If WIFEXITED(status) is true, return WEXITSTATUS(status). + * If WIFSIGNALED(status) is true, return -WTERMSIG(status). + * Otherwise, raise a ValueError. -On Windows, return status shifted right by 8 bits. + On Windows, return status shifted right by 8 bits. -On Unix, if the process is being traced or if waitpid() was called with -WUNTRACED option, the caller must first check if WIFSTOPPED(status) is true. -This function must not be called if WIFSTOPPED(status) is true. -""" + On Unix, if the process is being traced or if waitpid() was called with + WUNTRACED option, the caller must first check if WIFSTOPPED(status) is true. + This function must not be called if WIFSTOPPED(status) is true. + """ if sys.platform == "linux": def pidfd_open(pid: int, flags: int = ...) -> int: """Return a file descriptor referring to the process *pid*. -The descriptor can be used to perform process management without races and -signals. -""" + The descriptor can be used to perform process management without races and + signals. + """ if sys.version_info >= (3, 12) and sys.platform == "linux": PIDFD_NONBLOCK: Final = 2048 @@ -3013,18 +3046,20 @@ if sys.version_info >= (3, 12) and sys.platform == "win32": def listdrives() -> list[str]: """Return a list containing the names of drives in the system. -A drive name typically looks like 'C:\\\\'. -""" + A drive name typically looks like 'C:\\\\'. + """ + def listmounts(volume: str) -> list[str]: """Return a list containing mount points for a particular volume. -'volume' should be a GUID path as returned from os.listvolumes. -""" + 'volume' should be a GUID path as returned from os.listvolumes. + """ + def listvolumes() -> list[str]: """Return a list containing the volumes in the system. -Volumes are typically represented as a GUID path. -""" + Volumes are typically represented as a GUID path. + """ if sys.version_info >= (3, 10) and sys.platform == "linux": EFD_CLOEXEC: Final[int] @@ -3034,14 +3069,14 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": SPLICE_F_MOVE: Final[int] SPLICE_F_NONBLOCK: Final[int] def eventfd(initval: int, flags: int = 524288) -> FileDescriptor: - """Creates and returns an event notification file descriptor. -""" + """Creates and returns an event notification file descriptor.""" + def eventfd_read(fd: FileDescriptor) -> int: - """Read eventfd value -""" + """Read eventfd value""" + def eventfd_write(fd: FileDescriptor, value: int) -> None: - """Write eventfd value. -""" + """Write eventfd value.""" + def splice( src: FileDescriptor, dst: FileDescriptor, @@ -3052,23 +3087,23 @@ if sys.version_info >= (3, 10) and sys.platform == "linux": ) -> int: """Transfer count bytes from one pipe to a descriptor or vice versa. - src - Source file descriptor. - dst - Destination file descriptor. - count - Number of bytes to copy. - offset_src - Starting offset in src. - offset_dst - Starting offset in dst. - flags - Flags to modify the semantics of the call. - -If offset_src is None, then src is read from the current position; -respectively for offset_dst. The offset associated to the file -descriptor that refers to a pipe must be None. -""" + src + Source file descriptor. + dst + Destination file descriptor. + count + Number of bytes to copy. + offset_src + Starting offset in src. + offset_dst + Starting offset in dst. + flags + Flags to modify the semantics of the call. + + If offset_src is None, then src is read from the current position; + respectively for offset_dst. The offset associated to the file + descriptor that refers to a pipe must be None. + """ if sys.version_info >= (3, 12) and sys.platform == "linux": CLONE_FILES: Final[int] @@ -3088,51 +3123,55 @@ if sys.version_info >= (3, 12) and sys.platform == "linux": def unshare(flags: int) -> None: """Disassociate parts of a process (or thread) execution context. - flags - Namespaces to be unshared. -""" + flags + Namespaces to be unshared. + """ + def setns(fd: FileDescriptorLike, nstype: int = 0) -> None: """Move the calling thread into different namespaces. - fd - A file descriptor to a namespace. - nstype - Type of namespace. -""" + fd + A file descriptor to a namespace. + nstype + Type of namespace. + """ if sys.version_info >= (3, 13) and sys.platform != "win32": def posix_openpt(oflag: int, /) -> int: """Open and return a file descriptor for a master pseudo-terminal device. -Performs a posix_openpt() C function call. The oflag argument is used to -set file status flags and file access modes as specified in the manual page -of posix_openpt() of your system. -""" + Performs a posix_openpt() C function call. The oflag argument is used to + set file status flags and file access modes as specified in the manual page + of posix_openpt() of your system. + """ + def grantpt(fd: FileDescriptorLike, /) -> None: """Grant access to the slave pseudo-terminal device. - fd - File descriptor of a master pseudo-terminal device. + fd + File descriptor of a master pseudo-terminal device. + + Performs a grantpt() C function call. + """ -Performs a grantpt() C function call. -""" def unlockpt(fd: FileDescriptorLike, /) -> None: """Unlock a pseudo-terminal master/slave pair. - fd - File descriptor of a master pseudo-terminal device. + fd + File descriptor of a master pseudo-terminal device. + + Performs an unlockpt() C function call. + """ -Performs an unlockpt() C function call. -""" def ptsname(fd: FileDescriptorLike, /) -> str: """Return the name of the slave pseudo-terminal device. - fd - File descriptor of a master pseudo-terminal device. + fd + File descriptor of a master pseudo-terminal device. -If the ptsname_r() C function is available, it is called; -otherwise, performs a ptsname() C function call. -""" + If the ptsname_r() C function is available, it is called; + otherwise, performs a ptsname() C function call. + """ if sys.version_info >= (3, 13) and sys.platform == "linux": TFD_TIMER_ABSTIME: Final = 1 @@ -3144,76 +3183,80 @@ if sys.version_info >= (3, 13) and sys.platform == "linux": def timerfd_create(clockid: int, /, *, flags: int = 0) -> int: """Create and return a timer file descriptor. - clockid - A valid clock ID constant as timer file descriptor. + clockid + A valid clock ID constant as timer file descriptor. - time.CLOCK_REALTIME - time.CLOCK_MONOTONIC - time.CLOCK_BOOTTIME - flags - 0 or a bit mask of os.TFD_NONBLOCK or os.TFD_CLOEXEC. + time.CLOCK_REALTIME + time.CLOCK_MONOTONIC + time.CLOCK_BOOTTIME + flags + 0 or a bit mask of os.TFD_NONBLOCK or os.TFD_CLOEXEC. - os.TFD_NONBLOCK - If *TFD_NONBLOCK* is set as a flag, read doesn't blocks. - If *TFD_NONBLOCK* is not set as a flag, read block until the timer fires. + os.TFD_NONBLOCK + If *TFD_NONBLOCK* is set as a flag, read doesn't blocks. + If *TFD_NONBLOCK* is not set as a flag, read block until the timer fires. + + os.TFD_CLOEXEC + If *TFD_CLOEXEC* is set as a flag, enable the close-on-exec flag + """ - os.TFD_CLOEXEC - If *TFD_CLOEXEC* is set as a flag, enable the close-on-exec flag -""" def timerfd_settime( fd: FileDescriptor, /, *, flags: int = 0, initial: float = 0.0, interval: float = 0.0 ) -> tuple[float, float]: """Alter a timer file descriptor's internal timer in seconds. - fd - A timer file descriptor. - flags - 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. - initial - The initial expiration time, in seconds. - interval - The timer's interval, in seconds. -""" + fd + A timer file descriptor. + flags + 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. + initial + The initial expiration time, in seconds. + interval + The timer's interval, in seconds. + """ + def timerfd_settime_ns(fd: FileDescriptor, /, *, flags: int = 0, initial: int = 0, interval: int = 0) -> tuple[int, int]: """Alter a timer file descriptor's internal timer in nanoseconds. - fd - A timer file descriptor. - flags - 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. - initial - initial expiration timing in seconds. - interval - interval for the timer in seconds. -""" + fd + A timer file descriptor. + flags + 0 or a bit mask of TFD_TIMER_ABSTIME or TFD_TIMER_CANCEL_ON_SET. + initial + initial expiration timing in seconds. + interval + interval for the timer in seconds. + """ + def timerfd_gettime(fd: FileDescriptor, /) -> tuple[float, float]: """Return a tuple of a timer file descriptor's (interval, next expiration) in float seconds. - fd - A timer file descriptor. -""" + fd + A timer file descriptor. + """ + def timerfd_gettime_ns(fd: FileDescriptor, /) -> tuple[int, int]: """Return a tuple of a timer file descriptor's (interval, next expiration) in nanoseconds. - fd - A timer file descriptor. -""" + fd + A timer file descriptor. + """ if sys.version_info >= (3, 13) or sys.platform != "win32": # Added to Windows in 3.13. def fchmod(fd: int, mode: int) -> None: """Change the access permissions of the file given by file descriptor fd. - fd - The file descriptor of the file to be modified. - mode - Operating-system mode bitfield. - Be careful when using number literals for *mode*. The conventional UNIX notation for - numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in - Python. + fd + The file descriptor of the file to be modified. + mode + Operating-system mode bitfield. + Be careful when using number literals for *mode*. The conventional UNIX notation for + numeric modes uses an octal base, which needs to be indicated with a ``0o`` prefix in + Python. -Equivalent to os.chmod(fd, mode). -""" + Equivalent to os.chmod(fd, mode). + """ if sys.platform != "linux": if sys.version_info >= (3, 13) or sys.platform != "win32": @@ -3221,6 +3264,6 @@ if sys.platform != "linux": def lchmod(path: StrOrBytesPath, mode: int) -> None: """Change the access permissions of a file, without following symbolic links. -If path is a symlink, this affects the link itself rather than the target. -Equivalent to chmod(path, mode, follow_symlinks=False)." -""" + If path is a symlink, this affects the link itself rather than the target. + Equivalent to chmod(path, mode, follow_symlinks=False)." + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi index 907017dc0532d..4ba2953ca212d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/path.pyi @@ -9,6 +9,7 @@ platform, and is an alias to another module (e.g. ntpath). Some of this can actually be useful on non-Posix systems too, e.g. for manipulation of the pathname component of URLs. """ + import sys if sys.platform == "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi index 344d3cc8367d5..0c616fefc9053 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/parser.pyi @@ -1,57 +1,55 @@ -"""This is an interface to Python's internal parser. -""" +"""This is an interface to Python's internal parser.""" + from _typeshed import StrOrBytesPath from collections.abc import Sequence from types import CodeType from typing import Any, ClassVar, final def expr(source: str) -> STType: - """Creates an ST object from an expression. -""" + """Creates an ST object from an expression.""" + def suite(source: str) -> STType: - """Creates an ST object from a suite. -""" + """Creates an ST object from a suite.""" + def sequence2st(sequence: Sequence[Any]) -> STType: - """Creates an ST object from a tree representation. -""" + """Creates an ST object from a tree representation.""" + def tuple2st(sequence: Sequence[Any]) -> STType: - """Creates an ST object from a tree representation. -""" + """Creates an ST object from a tree representation.""" + def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: - """Creates a list-tree representation of an ST. -""" + """Creates a list-tree representation of an ST.""" + def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: - """Creates a tuple-tree representation of an ST. -""" + """Creates a tuple-tree representation of an ST.""" + def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: - """Compiles an ST object into a code object. -""" + """Compiles an ST object into a code object.""" + def isexpr(st: STType) -> bool: - """Determines if an ST object was created from an expression. -""" + """Determines if an ST object was created from an expression.""" + def issuite(st: STType) -> bool: - """Determines if an ST object was created from a suite. -""" + """Determines if an ST object was created from a suite.""" class ParserError(Exception): ... @final class STType: - """Intermediate representation of a Python parse tree. -""" + """Intermediate representation of a Python parse tree.""" + __hash__: ClassVar[None] # type: ignore[assignment] def compile(self, filename: StrOrBytesPath = ...) -> CodeType: - """Compile this ST object into a code object. -""" + """Compile this ST object into a code object.""" + def isexpr(self) -> bool: - """Determines if this ST object was created from an expression. -""" + """Determines if this ST object was created from an expression.""" + def issuite(self) -> bool: - """Determines if this ST object was created from a suite. -""" + """Determines if this ST object was created from a suite.""" + def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: - """Creates a list-tree representation of this ST. -""" + """Creates a list-tree representation of this ST.""" + def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: - """Creates a tuple-tree representation of this ST. -""" + """Creates a tuple-tree representation of this ST.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi index eca854db596c3..bfd6d681e68cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/__init__.pyi @@ -4,6 +4,7 @@ This module provides classes to represent abstract paths and concrete paths with operations that have semantics appropriate for different operating systems. """ + import sys import types from _typeshed import ( @@ -37,12 +38,13 @@ if sys.version_info >= (3, 13): class PurePath(PathLike[str]): """Base class for manipulating paths without I/O. -PurePath represents a filesystem path and offers operations which -don't imply any actual filesystem I/O. Depending on your system, -instantiating a PurePath will return either a PurePosixPath or a -PureWindowsPath object. You can also instantiate either of these classes -directly, regardless of your system. -""" + PurePath represents a filesystem path and offers operations which + don't imply any actual filesystem I/O. Depending on your system, + instantiating a PurePath will return either a PurePosixPath or a + PureWindowsPath object. You can also instantiate either of these classes + directly, regardless of your system. + """ + if sys.version_info >= (3, 13): __slots__ = ( "_raw_paths", @@ -72,64 +74,67 @@ directly, regardless of your system. parser: ClassVar[types.ModuleType] def full_match(self, pattern: StrPath, *, case_sensitive: bool | None = None) -> bool: """ -Return True if this path matches the given glob-style pattern. The -pattern is matched against the entire path. -""" + Return True if this path matches the given glob-style pattern. The + pattern is matched against the entire path. + """ @property def parts(self) -> tuple[str, ...]: """An object providing sequence-like access to the -components in the filesystem path. -""" + components in the filesystem path. + """ + @property def drive(self) -> str: - """The drive prefix (letter or UNC path), if any. -""" + """The drive prefix (letter or UNC path), if any.""" + @property def root(self) -> str: - """The root of the path, if any. -""" + """The root of the path, if any.""" + @property def anchor(self) -> str: - """The concatenation of the drive and root, or ''. -""" + """The concatenation of the drive and root, or ''.""" + @property def name(self) -> str: - """The final path component, if any. -""" + """The final path component, if any.""" + @property def suffix(self) -> str: """ -The final component's last suffix, if any. + The final component's last suffix, if any. + + This includes the leading period. For example: '.txt' + """ -This includes the leading period. For example: '.txt' -""" @property def suffixes(self) -> list[str]: """ -A list of the final component's suffixes, if any. + A list of the final component's suffixes, if any. + + These include the leading periods. For example: ['.tar', '.gz'] + """ -These include the leading periods. For example: ['.tar', '.gz'] -""" @property def stem(self) -> str: - """The final path component, minus its last suffix. -""" + """The final path component, minus its last suffix.""" if sys.version_info >= (3, 12): def __new__(cls, *args: StrPath, **kwargs: Unused) -> Self: """Construct a PurePath from one or several strings and or existing -PurePath objects. The strings and path objects are combined so as -to yield a canonicalized path, which is incorporated into the -new PurePath object. -""" + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ + def __init__(self, *args: StrPath) -> None: ... # pyright: ignore[reportInconsistentConstructor] else: def __new__(cls, *args: StrPath) -> Self: """Construct a PurePath from one or several strings and or existing - PurePath objects. The strings and path objects are combined so as - to yield a canonicalized path, which is incorporated into the - new PurePath object. - """ + PurePath objects. The strings and path objects are combined so as + to yield a canonicalized path, which is incorporated into the + new PurePath object. + """ def __hash__(self) -> int: ... def __fspath__(self) -> str: ... @@ -141,19 +146,21 @@ new PurePath object. def __rtruediv__(self, key: StrPath) -> Self: ... def __bytes__(self) -> bytes: """Return the bytes representation of the path. This is only -recommended to use under Unix. -""" + recommended to use under Unix. + """ + def as_posix(self) -> str: """Return the string representation of the path with forward (/) -slashes. -""" + slashes. + """ + def as_uri(self) -> str: - """Return the path as a URI. -""" + """Return the path as a URI.""" + def is_absolute(self) -> bool: """True if the path is absolute (has both a root and, if applicable, -a drive). -""" + a drive). + """ if sys.version_info >= (3, 13): @deprecated( "Deprecated since Python 3.13; will be removed in Python 3.15. " @@ -161,125 +168,124 @@ a drive). ) def is_reserved(self) -> bool: """Return True if the path contains one of the special names reserved -by the system, if any. -""" + by the system, if any. + """ else: def is_reserved(self) -> bool: """Return True if the path contains one of the special names reserved - by the system, if any. -""" + by the system, if any. + """ if sys.version_info >= (3, 14): def is_relative_to(self, other: StrPath) -> bool: - """Return True if the path is relative to another path or False. - """ + """Return True if the path is relative to another path or False.""" elif sys.version_info >= (3, 12): def is_relative_to(self, other: StrPath, /, *_deprecated: StrPath) -> bool: - """Return True if the path is relative to another path or False. - """ + """Return True if the path is relative to another path or False.""" else: def is_relative_to(self, *other: StrPath) -> bool: - """Return True if the path is relative to another path or False. - """ - + """Return True if the path is relative to another path or False.""" if sys.version_info >= (3, 12): def match(self, path_pattern: str, *, case_sensitive: bool | None = None) -> bool: """ -Return True if this path matches the given pattern. If the pattern is -relative, matching is done from the right; otherwise, the entire path -is matched. The recursive wildcard '**' is *not* supported by this -method. -""" + Return True if this path matches the given pattern. If the pattern is + relative, matching is done from the right; otherwise, the entire path + is matched. The recursive wildcard '**' is *not* supported by this + method. + """ else: def match(self, path_pattern: str) -> bool: """ - Return True if this path matches the given pattern. - """ - + Return True if this path matches the given pattern. + """ if sys.version_info >= (3, 14): def relative_to(self, other: StrPath, *, walk_up: bool = False) -> Self: """Return the relative path to another path identified by the passed -arguments. If the operation is not possible (because this is not -related to the other path), raise ValueError. + arguments. If the operation is not possible (because this is not + related to the other path), raise ValueError. -The *walk_up* parameter controls whether `..` may be used to resolve -the path. -""" + The *walk_up* parameter controls whether `..` may be used to resolve + the path. + """ elif sys.version_info >= (3, 12): def relative_to(self, other: StrPath, /, *_deprecated: StrPath, walk_up: bool = False) -> Self: """Return the relative path to another path identified by the passed -arguments. If the operation is not possible (because this is not -related to the other path), raise ValueError. + arguments. If the operation is not possible (because this is not + related to the other path), raise ValueError. -The *walk_up* parameter controls whether `..` may be used to resolve -the path. -""" + The *walk_up* parameter controls whether `..` may be used to resolve + the path. + """ else: def relative_to(self, *other: StrPath) -> Self: """Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - a subpath of the other path), raise ValueError. - """ + arguments. If the operation is not possible (because this is not + a subpath of the other path), raise ValueError. + """ def with_name(self, name: str) -> Self: - """Return a new path with the file name changed. -""" + """Return a new path with the file name changed.""" + def with_stem(self, stem: str) -> Self: - """Return a new path with the stem changed. -""" + """Return a new path with the stem changed.""" + def with_suffix(self, suffix: str) -> Self: """Return a new path with the file suffix changed. If the path -has no suffix, add given suffix. If the given suffix is an empty -string, remove the suffix from the path. -""" + has no suffix, add given suffix. If the given suffix is an empty + string, remove the suffix from the path. + """ + def joinpath(self, *other: StrPath) -> Self: """Combine this path with one or several arguments, and return a -new path representing either a subpath (if all arguments are relative -paths) or a totally different path (if one of the arguments is -anchored). -""" + new path representing either a subpath (if all arguments are relative + paths) or a totally different path (if one of the arguments is + anchored). + """ + @property def parents(self) -> Sequence[Self]: - """A sequence of this path's logical parents. -""" + """A sequence of this path's logical parents.""" + @property def parent(self) -> Self: - """The logical parent of the path. -""" + """The logical parent of the path.""" if sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... if sys.version_info >= (3, 12): def with_segments(self, *args: StrPath) -> Self: """Construct a new path object from any number of path-like objects. -Subclasses may override this method to customize how new path objects -are created from methods like `iterdir()`. -""" + Subclasses may override this method to customize how new path objects + are created from methods like `iterdir()`. + """ class PurePosixPath(PurePath): """PurePath subclass for non-Windows systems. -On a POSIX system, instantiating a PurePath should return this object. -However, you can also instantiate it directly on any system. -""" + On a POSIX system, instantiating a PurePath should return this object. + However, you can also instantiate it directly on any system. + """ + __slots__ = () class PureWindowsPath(PurePath): """PurePath subclass for Windows systems. -On a Windows system, instantiating a PurePath should return this object. -However, you can also instantiate it directly on any system. -""" + On a Windows system, instantiating a PurePath should return this object. + However, you can also instantiate it directly on any system. + """ + __slots__ = () class Path(PurePath): """PurePath subclass that can make system calls. -Path represents a filesystem path but unlike PurePath, also offers -methods to do system calls on path objects. Depending on your system, -instantiating a Path will return either a PosixPath or a WindowsPath -object. You can also instantiate a PosixPath or WindowsPath directly, -but cannot instantiate a WindowsPath on a POSIX system or vice versa. -""" + Path represents a filesystem path but unlike PurePath, also offers + methods to do system calls on path objects. Depending on your system, + instantiating a Path will return either a PosixPath or a WindowsPath + object. You can also instantiate a PosixPath or WindowsPath directly, + but cannot instantiate a WindowsPath on a POSIX system or vice versa. + """ + if sys.version_info >= (3, 14): __slots__ = ("_info",) elif sys.version_info >= (3, 10): @@ -294,191 +300,205 @@ but cannot instantiate a WindowsPath on a POSIX system or vice versa. @classmethod def cwd(cls) -> Self: - """Return a new path pointing to the current working directory. -""" + """Return a new path pointing to the current working directory.""" if sys.version_info >= (3, 10): def stat(self, *, follow_symlinks: bool = True) -> stat_result: """ -Return the result of the stat() system call on this path, like -os.stat() does. -""" + Return the result of the stat() system call on this path, like + os.stat() does. + """ + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: """ -Change the permissions of the path, like os.chmod(). -""" + Change the permissions of the path, like os.chmod(). + """ else: def stat(self) -> stat_result: """ - Return the result of the stat() system call on this path, like - os.stat() does. - """ - def chmod(self, mode: int) -> None: + Return the result of the stat() system call on this path, like + os.stat() does. """ - Change the permissions of the path, like os.chmod(). - """ + def chmod(self, mode: int) -> None: + """ + Change the permissions of the path, like os.chmod(). + """ if sys.version_info >= (3, 13): @classmethod def from_uri(cls, uri: str) -> Self: - """Return a new path from the given 'file' URI. -""" + """Return a new path from the given 'file' URI.""" + def is_dir(self, *, follow_symlinks: bool = True) -> bool: """ -Whether this path is a directory. -""" + Whether this path is a directory. + """ + def is_file(self, *, follow_symlinks: bool = True) -> bool: """ -Whether this path is a regular file (also True for symlinks pointing -to regular files). -""" + Whether this path is a regular file (also True for symlinks pointing + to regular files). + """ + def read_text(self, encoding: str | None = None, errors: str | None = None, newline: str | None = None) -> str: """ -Open the file in text mode, read it, and close the file. -""" + Open the file in text mode, read it, and close the file. + """ else: def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def is_dir(self) -> bool: """ - Whether this path is a directory. - """ + Whether this path is a directory. + """ + def is_file(self) -> bool: """ - Whether this path is a regular file (also True for symlinks pointing - to regular files). - """ - def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: + Whether this path is a regular file (also True for symlinks pointing + to regular files). """ - Open the file in text mode, read it, and close the file. - """ + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: + """ + Open the file in text mode, read it, and close the file. + """ if sys.version_info >= (3, 13): def glob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: """Iterate over this subtree and yield all existing files (of any -kind, including directories) matching the given relative pattern. -""" - def rglob( - self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False - ) -> Iterator[Self]: + kind, including directories) matching the given relative pattern. + """ + + def rglob(self, pattern: str, *, case_sensitive: bool | None = None, recurse_symlinks: bool = False) -> Iterator[Self]: """Recursively yield all existing files (of any kind, including -directories) matching the given relative pattern, anywhere in -this subtree. -""" + directories) matching the given relative pattern, anywhere in + this subtree. + """ elif sys.version_info >= (3, 12): def glob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ + kind, including directories) matching the given relative pattern. + """ + def rglob(self, pattern: str, *, case_sensitive: bool | None = None) -> Generator[Self, None, None]: """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ + directories) matching the given relative pattern, anywhere in + this subtree. + """ else: def glob(self, pattern: str) -> Generator[Self, None, None]: """Iterate over this subtree and yield all existing files (of any - kind, including directories) matching the given relative pattern. - """ + kind, including directories) matching the given relative pattern. + """ + def rglob(self, pattern: str) -> Generator[Self, None, None]: """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - + directories) matching the given relative pattern, anywhere in + this subtree. + """ if sys.version_info >= (3, 12): def exists(self, *, follow_symlinks: bool = True) -> bool: """ -Whether this path exists. + Whether this path exists. -This method normally follows symlinks; to check whether a symlink exists, -add the argument follow_symlinks=False. -""" + This method normally follows symlinks; to check whether a symlink exists, + add the argument follow_symlinks=False. + """ else: def exists(self) -> bool: """ - Whether this path exists. - """ + Whether this path exists. + """ def is_symlink(self) -> bool: """ -Whether this path is a symbolic link. -""" + Whether this path is a symbolic link. + """ + def is_socket(self) -> bool: """ -Whether this path is a socket. -""" + Whether this path is a socket. + """ + def is_fifo(self) -> bool: """ -Whether this path is a FIFO. -""" + Whether this path is a FIFO. + """ + def is_block_device(self) -> bool: """ -Whether this path is a block device. -""" + Whether this path is a block device. + """ + def is_char_device(self) -> bool: """ -Whether this path is a character device. -""" + Whether this path is a character device. + """ if sys.version_info >= (3, 12): def is_junction(self) -> bool: """ -Whether this path is a junction. -""" + Whether this path is a junction. + """ def iterdir(self) -> Generator[Self, None, None]: """Yield path objects of the directory contents. -The children are yielded in arbitrary order, and the -special entries '.' and '..' are not included. -""" + The children are yielded in arbitrary order, and the + special entries '.' and '..' are not included. + """ + def lchmod(self, mode: int) -> None: """ -Like chmod(), except if the path points to a symlink, the symlink's -permissions are changed, rather than its target's. -""" + Like chmod(), except if the path points to a symlink, the symlink's + permissions are changed, rather than its target's. + """ + def lstat(self) -> stat_result: """ -Like stat(), except if the path points to a symlink, the symlink's -status information is returned, rather than its target's. -""" - def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: + Like stat(), except if the path points to a symlink, the symlink's + status information is returned, rather than its target's. """ -Create a new directory at this given path. -""" + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: + """ + Create a new directory at this given path. + """ if sys.version_info >= (3, 14): @property def info(self) -> PathInfo: """ -A PathInfo object that exposes the file type and other file attributes -of this path. -""" + A PathInfo object that exposes the file type and other file attributes + of this path. + """ + @overload def move_into(self, target_dir: _PathT) -> _PathT: # type: ignore[overload-overlap] """ -Move this file or directory tree into the given existing directory. -""" + Move this file or directory tree into the given existing directory. + """ + @overload def move_into(self, target_dir: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload def move(self, target: _PathT) -> _PathT: # type: ignore[overload-overlap] """ -Recursively move this file or directory tree to the given destination. -""" + Recursively move this file or directory tree to the given destination. + """ + @overload def move(self, target: StrPath) -> Self: ... # type: ignore[overload-overlap] @overload def copy_into(self, target_dir: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] """ -Copy this file or directory tree into the given existing directory. -""" + Copy this file or directory tree into the given existing directory. + """ + @overload def copy_into(self, target_dir: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @overload def copy(self, target: _PathT, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> _PathT: # type: ignore[overload-overlap] """ -Recursively copy this file or directory tree to the given destination. -""" + Recursively copy this file or directory tree to the given destination. + """ + @overload def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> Self: ... # type: ignore[overload-overlap] @@ -495,9 +515,9 @@ Recursively copy this file or directory tree to the given destination. newline: str | None = None, ) -> TextIOWrapper: """ -Open the file pointed to by this path and return a file object, as -the built-in open() function does. -""" + Open the file pointed to by this path and return a file object, as + the built-in open() function does. + """ # Unbuffered binary mode: returns a FileIO @overload def open( @@ -548,219 +568,231 @@ the built-in open() function does. # raises UnsupportedOperation: def owner(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] """ -Return the login name of the file owner. -""" + Return the login name of the file owner. + """ + def group(self: Never, *, follow_symlinks: bool = True) -> str: # type: ignore[misc] """ -Return the group name of the file gid. -""" + Return the group name of the file gid. + """ else: def owner(self: Never) -> str: # type: ignore[misc] """ - Return the login name of the file owner. - """ + Return the login name of the file owner. + """ + def group(self: Never) -> str: # type: ignore[misc] """ - Return the group name of the file gid. - """ + Return the group name of the file gid. + """ else: if sys.version_info >= (3, 13): def owner(self, *, follow_symlinks: bool = True) -> str: """ -Return the login name of the file owner. -""" + Return the login name of the file owner. + """ + def group(self, *, follow_symlinks: bool = True) -> str: """ -Return the group name of the file gid. -""" + Return the group name of the file gid. + """ else: def owner(self) -> str: """ - Return the login name of the file owner. - """ - def group(self) -> str: + Return the login name of the file owner. """ - Return the group name of the file gid. - """ + def group(self) -> str: + """ + Return the group name of the file gid. + """ # This method does "exist" on Windows on <3.12, but always raises NotImplementedError # On py312+, it works properly on Windows, as with all other platforms if sys.platform == "win32" and sys.version_info < (3, 12): def is_mount(self: Never) -> bool: # type: ignore[misc] """ - Check if this path is a POSIX mount point - """ + Check if this path is a POSIX mount point + """ else: def is_mount(self) -> bool: """ -Check if this path is a mount point -""" + Check if this path is a mount point + """ def readlink(self) -> Self: """ -Return the path to which the symbolic link points. -""" - + Return the path to which the symbolic link points. + """ if sys.version_info >= (3, 10): def rename(self, target: StrPath) -> Self: """ -Rename this path to the target path. + Rename this path to the target path. -The target path may be absolute or relative. Relative paths are -interpreted relative to the current working directory, *not* the -directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ -Returns the new Path instance pointing to the target path. -""" def replace(self, target: StrPath) -> Self: """ -Rename this path to the target path, overwriting if that path exists. + Rename this path to the target path, overwriting if that path exists. -The target path may be absolute or relative. Relative paths are -interpreted relative to the current working directory, *not* the -directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. -Returns the new Path instance pointing to the target path. -""" + Returns the new Path instance pointing to the target path. + """ else: def rename(self, target: str | PurePath) -> Self: """ - Rename this path to the target path. + Rename this path to the target path. - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ - Returns the new Path instance pointing to the target path. - """ def replace(self, target: str | PurePath) -> Self: """ - Rename this path to the target path, overwriting if that path exists. + Rename this path to the target path, overwriting if that path exists. - The target path may be absolute or relative. Relative paths are - interpreted relative to the current working directory, *not* the - directory of the Path object. + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. - Returns the new Path instance pointing to the target path. - """ + Returns the new Path instance pointing to the target path. + """ def resolve(self, strict: bool = False) -> Self: """ -Make the path absolute, resolving all symlinks on the way and also -normalizing it. -""" + Make the path absolute, resolving all symlinks on the way and also + normalizing it. + """ + def rmdir(self) -> None: """ -Remove this directory. The directory must be empty. -""" + Remove this directory. The directory must be empty. + """ + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: """ -Make this path a symlink pointing to the target path. -Note the order of arguments (link, target) is the reverse of os.symlink. -""" + Make this path a symlink pointing to the target path. + Note the order of arguments (link, target) is the reverse of os.symlink. + """ if sys.version_info >= (3, 10): def hardlink_to(self, target: StrOrBytesPath) -> None: """ -Make this path a hard link pointing to the same file as *target*. + Make this path a hard link pointing to the same file as *target*. -Note the order of arguments (self, target) is the reverse of os.link's. -""" + Note the order of arguments (self, target) is the reverse of os.link's. + """ def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: """ -Create this file with the given access mode, if it doesn't exist. -""" + Create this file with the given access mode, if it doesn't exist. + """ + def unlink(self, missing_ok: bool = False) -> None: """ -Remove this file or link. -If the path is a directory, use rmdir() instead. -""" + Remove this file or link. + If the path is a directory, use rmdir() instead. + """ + @classmethod def home(cls) -> Self: - """Return a new path pointing to expanduser('~'). - """ + """Return a new path pointing to expanduser('~').""" + def absolute(self) -> Self: """Return an absolute version of this path -No normalization or symlink resolution is performed. + No normalization or symlink resolution is performed. + + Use resolve() to resolve symlinks and remove '..' segments. + """ -Use resolve() to resolve symlinks and remove '..' segments. -""" def expanduser(self) -> Self: """Return a new path with expanded ~ and ~user constructs -(as returned by os.path.expanduser) -""" + (as returned by os.path.expanduser) + """ + def read_bytes(self) -> bytes: """ -Open the file in bytes mode, read it, and close the file. -""" + Open the file in bytes mode, read it, and close the file. + """ + def samefile(self, other_path: StrPath) -> bool: """Return whether other_path is the same or not as this file -(as returned by os.path.samefile()). -""" + (as returned by os.path.samefile()). + """ + def write_bytes(self, data: ReadableBuffer) -> int: """ -Open the file in bytes mode, write to it, and close the file. -""" + Open the file in bytes mode, write to it, and close the file. + """ if sys.version_info >= (3, 10): def write_text( self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> int: """ -Open the file in text mode, write to it, and close the file. -""" + Open the file in text mode, write to it, and close the file. + """ else: def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: """ - Open the file in text mode, write to it, and close the file. - """ + Open the file in text mode, write to it, and close the file. + """ if sys.version_info < (3, 12): if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `hardlink_to()` instead.") def link_to(self, target: StrOrBytesPath) -> None: """ - Make the target path a hard link pointing to this path. + Make the target path a hard link pointing to this path. - Note this function does not make this path a hard link to *target*, - despite the implication of the function and argument names. The order - of arguments (target, link) is the reverse of Path.symlink_to, but - matches that of os.link. + Note this function does not make this path a hard link to *target*, + despite the implication of the function and argument names. The order + of arguments (target, link) is the reverse of Path.symlink_to, but + matches that of os.link. - Deprecated since Python 3.10 and scheduled for removal in Python 3.12. - Use `hardlink_to()` instead. - """ + Deprecated since Python 3.10 and scheduled for removal in Python 3.12. + Use `hardlink_to()` instead. + """ else: def link_to(self, target: StrOrBytesPath) -> None: """ - Make the target path a hard link pointing to this path. + Make the target path a hard link pointing to this path. - Note this function does not make this path a hard link to *target*, - despite the implication of the function and argument names. The order - of arguments (target, link) is the reverse of Path.symlink_to, but - matches that of os.link. + Note this function does not make this path a hard link to *target*, + despite the implication of the function and argument names. The order + of arguments (target, link) is the reverse of Path.symlink_to, but + matches that of os.link. - """ + """ if sys.version_info >= (3, 12): def walk( self, top_down: bool = True, on_error: Callable[[OSError], object] | None = None, follow_symlinks: bool = False ) -> Iterator[tuple[Self, list[str], list[str]]]: - """Walk the directory tree from this directory, similar to os.walk(). -""" + """Walk the directory tree from this directory, similar to os.walk().""" class PosixPath(Path, PurePosixPath): """Path subclass for non-Windows systems. -On a POSIX system, instantiating a Path should return this object. -""" + On a POSIX system, instantiating a Path should return this object. + """ + __slots__ = () class WindowsPath(Path, PureWindowsPath): """Path subclass for Windows systems. -On a Windows system, instantiating a Path should return this object. -""" + On a Windows system, instantiating a Path should return this object. + """ + __slots__ = () if sys.version_info >= (3, 13): class UnsupportedOperation(NotImplementedError): - """An exception that is raised when an unsupported operation is attempted. - """ + """An exception that is raised when an unsupported operation is attempted.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi index 5b88245dec9b6..f5419ed28c44f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pathlib/types.pyi @@ -1,13 +1,15 @@ """ Protocols for supporting classes in pathlib. """ + from typing import Protocol, runtime_checkable @runtime_checkable class PathInfo(Protocol): """Protocol for path info objects, which support querying the file type. -Methods may return cached results. -""" + Methods may return cached results. + """ + def exists(self, *, follow_symlinks: bool = True) -> bool: ... def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... def is_file(self, *, follow_symlinks: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi index 979ab6ec8f97e..6fc9e2a4fc9ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pdb.pyi @@ -335,6 +335,7 @@ a 'global' command, e.g.: (Pdb) global list_options; list_options = ['-l'] (Pdb) """ + import signal import sys from bdb import Bdb, _Backend @@ -358,87 +359,89 @@ _Mode: TypeAlias = Literal["inline", "cli"] line_prefix: Final[str] # undocumented class Restart(Exception): - """Causes a debugger to be restarted for the debugged python program. -""" + """Causes a debugger to be restarted for the debugged python program.""" def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: """Execute the *statement* (given as a string or a code object) -under debugger control. + under debugger control. -The debugger prompt appears before any code is executed; you can set -breakpoints and type continue, or you can step through the statement -using step or next. + The debugger prompt appears before any code is executed; you can set + breakpoints and type continue, or you can step through the statement + using step or next. + + The optional *globals* and *locals* arguments specify the + environment in which the code is executed; by default the + dictionary of the module __main__ is used (see the explanation of + the built-in exec() or eval() functions.). + """ -The optional *globals* and *locals* arguments specify the -environment in which the code is executed; by default the -dictionary of the module __main__ is used (see the explanation of -the built-in exec() or eval() functions.). -""" def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: """Evaluate the *expression* (given as a string or a code object) -under debugger control. + under debugger control. + + When runeval() returns, it returns the value of the expression. + Otherwise this function is similar to run(). + """ -When runeval() returns, it returns the value of the expression. -Otherwise this function is similar to run(). -""" def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: """Call the function (a function or method object, not a string) -with the given arguments. + with the given arguments. -When runcall() returns, it returns whatever the function call -returned. The debugger prompt appears as soon as the function is -entered. -""" + When runcall() returns, it returns whatever the function call + returned. The debugger prompt appears as soon as the function is + entered. + """ if sys.version_info >= (3, 14): def set_default_backend(backend: _Backend) -> None: - """Set the default backend to use for Pdb instances. -""" + """Set the default backend to use for Pdb instances.""" + def get_default_backend() -> _Backend: - """Get the default backend to use for Pdb instances. -""" + """Get the default backend to use for Pdb instances.""" + def set_trace(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: """Enter the debugger at the calling stack frame. -This is useful to hard-code a breakpoint at a given point in a -program, even if the code is not otherwise being debugged (e.g. when -an assertion fails). If given, *header* is printed to the console -just before debugging begins. *commands* is an optional list of -pdb commands to run when the debugger starts. -""" + This is useful to hard-code a breakpoint at a given point in a + program, even if the code is not otherwise being debugged (e.g. when + an assertion fails). If given, *header* is printed to the console + just before debugging begins. *commands* is an optional list of + pdb commands to run when the debugger starts. + """ + async def set_trace_async(*, header: str | None = None, commands: Iterable[str] | None = None) -> None: """Enter the debugger at the calling stack frame, but in async mode. -This should be used as await pdb.set_trace_async(). Users can do await -if they enter the debugger with this function. Otherwise it's the same -as set_trace(). -""" + This should be used as await pdb.set_trace_async(). Users can do await + if they enter the debugger with this function. Otherwise it's the same + as set_trace(). + """ else: def set_trace(*, header: str | None = None) -> None: """Enter the debugger at the calling stack frame. -This is useful to hard-code a breakpoint at a given point in a -program, even if the code is not otherwise being debugged (e.g. when -an assertion fails). If given, *header* is printed to the console -just before debugging begins. -""" + This is useful to hard-code a breakpoint at a given point in a + program, even if the code is not otherwise being debugged (e.g. when + an assertion fails). If given, *header* is printed to the console + just before debugging begins. + """ def post_mortem(t: TracebackType | None = None) -> None: """Enter post-mortem debugging of the given *traceback*, or *exception* -object. + object. -If no traceback is given, it uses the one of the exception that is -currently being handled (an exception must be being handled if the -default is to be used). + If no traceback is given, it uses the one of the exception that is + currently being handled (an exception must be being handled if the + default is to be used). + + If `t` is an exception object, the `exceptions` command makes it possible to + list and inspect its chained exceptions (if any). + """ -If `t` is an exception object, the `exceptions` command makes it possible to -list and inspect its chained exceptions (if any). -""" def pm() -> None: - """Enter post-mortem debugging of the traceback found in sys.last_exc. -""" + """Enter post-mortem debugging of the traceback found in sys.last_exc.""" class Pdb(Bdb, Cmd): # Everything here is undocumented, except for __init__ @@ -501,12 +504,11 @@ class Pdb(Bdb, Cmd): def bp_commands(self, frame: FrameType) -> bool: """Call every command that was set for the current active breakpoint -(if there is one). - -Returns True if the normal interaction function must be called, -False otherwise. -""" + (if there is one). + Returns True if the normal interaction function must be called, + False otherwise. + """ if sys.version_info >= (3, 13): def interaction(self, frame: FrameType | None, tb_or_exc: TracebackType | BaseException | None) -> None: ... else: @@ -514,29 +516,30 @@ False otherwise. def displayhook(self, obj: object) -> None: """Custom displayhook for the exec in default(), which prevents -assignment of the _ variable in the builtins. -""" + assignment of the _ variable in the builtins. + """ + def handle_command_def(self, line: str) -> bool: - """Handles one command line during command list definition. -""" + """Handles one command line during command list definition.""" + def defaultFile(self) -> str: - """Produce a reasonable default. -""" + """Produce a reasonable default.""" + def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... if sys.version_info >= (3, 14): def checkline(self, filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> int: """Check whether specified line seems to be executable. -Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank -line or EOF). Warning: testing is not comprehensive. -""" + Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank + line or EOF). Warning: testing is not comprehensive. + """ else: def checkline(self, filename: str, lineno: int) -> int: """Check whether specified line seems to be executable. -Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank -line or EOF). Warning: testing is not comprehensive. -""" + Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank + line or EOF). Warning: testing is not comprehensive. + """ def _getval(self, arg: str) -> object: ... if sys.version_info >= (3, 14): @@ -548,16 +551,16 @@ line or EOF). Warning: testing is not comprehensive. def lookupmodule(self, filename: str) -> str | None: """Helper function for break/clear parsing -- may be overridden. -lookupmodule() translates (possibly incomplete) file or module name -into an absolute file name. + lookupmodule() translates (possibly incomplete) file or module name + into an absolute file name. -filename could be in format of: - * an absolute path like '/path/to/file.py' - * a relative path like 'file.py' or 'dir/file.py' - * a module name like 'module' or 'package.module' + filename could be in format of: + * an absolute path like '/path/to/file.py' + * a relative path like 'file.py' or 'dir/file.py' + * a module name like 'module' or 'package.module' -files and modules will be searched in sys.path. -""" + files and modules will be searched in sys.path. + """ if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... @@ -569,305 +572,331 @@ files and modules will be searched in sys.path. def do_commands(self, arg: str) -> bool | None: """(Pdb) commands [bpnumber] -(com) ... -(com) end -(Pdb) - -Specify a list of commands for breakpoint number bpnumber. -The commands themselves are entered on the following lines. -Type a line containing just 'end' to terminate the commands. -The commands are executed when the breakpoint is hit. - -To remove all commands from a breakpoint, type commands and -follow it immediately with end; that is, give no commands. - -With no bpnumber argument, commands refers to the last -breakpoint set. - -You can use breakpoint commands to start your program up -again. Simply use the continue command, or step, or any other -command that resumes execution. - -Specifying any command resuming execution (currently continue, -step, next, return, jump, quit and their abbreviations) -terminates the command list (as if that command was -immediately followed by end). This is because any time you -resume execution (even with a simple next or step), you may -encounter another breakpoint -- which could have its own -command list, leading to ambiguities about which list to -execute. - -If you use the 'silent' command in the command list, the usual -message about stopping at a breakpoint is not printed. This -may be desirable for breakpoints that are to print a specific -message and then continue. If none of the other commands -print anything, you will see no sign that the breakpoint was -reached. -""" + (com) ... + (com) end + (Pdb) + + Specify a list of commands for breakpoint number bpnumber. + The commands themselves are entered on the following lines. + Type a line containing just 'end' to terminate the commands. + The commands are executed when the breakpoint is hit. + + To remove all commands from a breakpoint, type commands and + follow it immediately with end; that is, give no commands. + + With no bpnumber argument, commands refers to the last + breakpoint set. + + You can use breakpoint commands to start your program up + again. Simply use the continue command, or step, or any other + command that resumes execution. + + Specifying any command resuming execution (currently continue, + step, next, return, jump, quit and their abbreviations) + terminates the command list (as if that command was + immediately followed by end). This is because any time you + resume execution (even with a simple next or step), you may + encounter another breakpoint -- which could have its own + command list, leading to ambiguities about which list to + execute. + + If you use the 'silent' command in the command list, the usual + message about stopping at a breakpoint is not printed. This + may be desirable for breakpoints that are to print a specific + message and then continue. If none of the other commands + print anything, you will see no sign that the breakpoint was + reached. + """ if sys.version_info >= (3, 14): def do_break(self, arg: str, temporary: bool = False) -> bool | None: """b(reak) [ ([filename:]lineno | function) [, condition] ] -Without argument, list all breaks. + Without argument, list all breaks. -With a line number argument, set a break at this line in the -current file. With a function name, set a break at the first -executable line of that function. If a second argument is -present, it is a string specifying an expression which must -evaluate to true before the breakpoint is honored. + With a line number argument, set a break at this line in the + current file. With a function name, set a break at the first + executable line of that function. If a second argument is + present, it is a string specifying an expression which must + evaluate to true before the breakpoint is honored. -The line number may be prefixed with a filename and a colon, -to specify a breakpoint in another file (probably one that -hasn't been loaded yet). The file is searched for on -sys.path; the .py suffix may be omitted. -""" + The line number may be prefixed with a filename and a colon, + to specify a breakpoint in another file (probably one that + hasn't been loaded yet). The file is searched for on + sys.path; the .py suffix may be omitted. + """ else: def do_break(self, arg: str, temporary: bool | Literal[0, 1] = 0) -> bool | None: """b(reak) [ ([filename:]lineno | function) [, condition] ] -Without argument, list all breaks. + Without argument, list all breaks. -With a line number argument, set a break at this line in the -current file. With a function name, set a break at the first -executable line of that function. If a second argument is -present, it is a string specifying an expression which must -evaluate to true before the breakpoint is honored. + With a line number argument, set a break at this line in the + current file. With a function name, set a break at the first + executable line of that function. If a second argument is + present, it is a string specifying an expression which must + evaluate to true before the breakpoint is honored. -The line number may be prefixed with a filename and a colon, -to specify a breakpoint in another file (probably one that -hasn't been loaded yet). The file is searched for on -sys.path; the .py suffix may be omitted. -""" + The line number may be prefixed with a filename and a colon, + to specify a breakpoint in another file (probably one that + hasn't been loaded yet). The file is searched for on + sys.path; the .py suffix may be omitted. + """ def do_tbreak(self, arg: str) -> bool | None: """tbreak [ ([filename:]lineno | function) [, condition] ] -Same arguments as break, but sets a temporary breakpoint: it -is automatically deleted when first hit. -""" + Same arguments as break, but sets a temporary breakpoint: it + is automatically deleted when first hit. + """ + def do_enable(self, arg: str) -> bool | None: """enable bpnumber [bpnumber ...] -Enables the breakpoints given as a space separated list of -breakpoint numbers. -""" + Enables the breakpoints given as a space separated list of + breakpoint numbers. + """ + def do_disable(self, arg: str) -> bool | None: """disable bpnumber [bpnumber ...] -Disables the breakpoints given as a space separated list of -breakpoint numbers. Disabling a breakpoint means it cannot -cause the program to stop execution, but unlike clearing a -breakpoint, it remains in the list of breakpoints and can be -(re-)enabled. -""" + Disables the breakpoints given as a space separated list of + breakpoint numbers. Disabling a breakpoint means it cannot + cause the program to stop execution, but unlike clearing a + breakpoint, it remains in the list of breakpoints and can be + (re-)enabled. + """ + def do_condition(self, arg: str) -> bool | None: """condition bpnumber [condition] -Set a new condition for the breakpoint, an expression which -must evaluate to true before the breakpoint is honored. If -condition is absent, any existing condition is removed; i.e., -the breakpoint is made unconditional. -""" + Set a new condition for the breakpoint, an expression which + must evaluate to true before the breakpoint is honored. If + condition is absent, any existing condition is removed; i.e., + the breakpoint is made unconditional. + """ + def do_ignore(self, arg: str) -> bool | None: """ignore bpnumber [count] -Set the ignore count for the given breakpoint number. If -count is omitted, the ignore count is set to 0. A breakpoint -becomes active when the ignore count is zero. When non-zero, -the count is decremented each time the breakpoint is reached -and the breakpoint is not disabled and any associated -condition evaluates to true. -""" + Set the ignore count for the given breakpoint number. If + count is omitted, the ignore count is set to 0. A breakpoint + becomes active when the ignore count is zero. When non-zero, + the count is decremented each time the breakpoint is reached + and the breakpoint is not disabled and any associated + condition evaluates to true. + """ + def do_clear(self, arg: str) -> bool | None: """cl(ear) [filename:lineno | bpnumber ...] -With a space separated list of breakpoint numbers, clear -those breakpoints. Without argument, clear all breaks (but -first ask confirmation). With a filename:lineno argument, -clear all breaks at that line in that file. -""" + With a space separated list of breakpoint numbers, clear + those breakpoints. Without argument, clear all breaks (but + first ask confirmation). With a filename:lineno argument, + clear all breaks at that line in that file. + """ + def do_where(self, arg: str) -> bool | None: """w(here) [count] -Print a stack trace. If count is not specified, print the full stack. -If count is 0, print the current frame entry. If count is positive, -print count entries from the most recent frame. If count is negative, -print -count entries from the least recent frame. -An arrow indicates the "current frame", which determines the -context of most commands. 'bt' is an alias for this command. -""" + Print a stack trace. If count is not specified, print the full stack. + If count is 0, print the current frame entry. If count is positive, + print count entries from the most recent frame. If count is negative, + print -count entries from the least recent frame. + An arrow indicates the "current frame", which determines the + context of most commands. 'bt' is an alias for this command. + """ if sys.version_info >= (3, 13): def do_exceptions(self, arg: str) -> bool | None: """exceptions [number] -List or change current exception in an exception chain. + List or change current exception in an exception chain. -Without arguments, list all the current exception in the exception -chain. Exceptions will be numbered, with the current exception indicated -with an arrow. + Without arguments, list all the current exception in the exception + chain. Exceptions will be numbered, with the current exception indicated + with an arrow. -If given an integer as argument, switch to the exception at that index. -""" + If given an integer as argument, switch to the exception at that index. + """ def do_up(self, arg: str) -> bool | None: """u(p) [count] -Move the current frame count (default one) levels up in the -stack trace (to an older frame). -""" + Move the current frame count (default one) levels up in the + stack trace (to an older frame). + """ + def do_down(self, arg: str) -> bool | None: """d(own) [count] -Move the current frame count (default one) levels down in the -stack trace (to a newer frame). -""" + Move the current frame count (default one) levels down in the + stack trace (to a newer frame). + """ + def do_until(self, arg: str) -> bool | None: """unt(il) [lineno] -Without argument, continue execution until the line with a -number greater than the current one is reached. With a line -number, continue execution until a line with a number greater -or equal to that is reached. In both cases, also stop when -the current frame returns. -""" + Without argument, continue execution until the line with a + number greater than the current one is reached. With a line + number, continue execution until a line with a number greater + or equal to that is reached. In both cases, also stop when + the current frame returns. + """ + def do_step(self, arg: str) -> bool | None: """s(tep) -Execute the current line, stop at the first possible occasion -(either in a function that is called or in the current -function). -""" + Execute the current line, stop at the first possible occasion + (either in a function that is called or in the current + function). + """ + def do_next(self, arg: str) -> bool | None: """n(ext) -Continue execution until the next line in the current function -is reached or it returns. -""" + Continue execution until the next line in the current function + is reached or it returns. + """ + def do_run(self, arg: str) -> bool | None: """run [args...] -Restart the debugged python program. If a string is supplied -it is split with "shlex", and the result is used as the new -sys.argv. History, breakpoints, actions and debugger options -are preserved. "restart" is an alias for "run". -""" + Restart the debugged python program. If a string is supplied + it is split with "shlex", and the result is used as the new + sys.argv. History, breakpoints, actions and debugger options + are preserved. "restart" is an alias for "run". + """ + def do_return(self, arg: str) -> bool | None: """r(eturn) -Continue execution until the current function returns. -""" + Continue execution until the current function returns. + """ + def do_continue(self, arg: str) -> bool | None: """c(ont(inue)) -Continue execution, only stop when a breakpoint is encountered. -""" + Continue execution, only stop when a breakpoint is encountered. + """ + def do_jump(self, arg: str) -> bool | None: """j(ump) lineno -Set the next line that will be executed. Only available in -the bottom-most frame. This lets you jump back and execute -code again, or jump forward to skip code that you don't want -to run. + Set the next line that will be executed. Only available in + the bottom-most frame. This lets you jump back and execute + code again, or jump forward to skip code that you don't want + to run. + + It should be noted that not all jumps are allowed -- for + instance it is not possible to jump into the middle of a + for loop or out of a finally clause. + """ -It should be noted that not all jumps are allowed -- for -instance it is not possible to jump into the middle of a -for loop or out of a finally clause. -""" def do_debug(self, arg: str) -> bool | None: """debug code -Enter a recursive debugger that steps through the code -argument (which is an arbitrary expression or statement to be -executed in the current environment). -""" + Enter a recursive debugger that steps through the code + argument (which is an arbitrary expression or statement to be + executed in the current environment). + """ + def do_quit(self, arg: str) -> bool | None: """q(uit) | exit -Quit from the debugger. The program being executed is aborted. -""" + Quit from the debugger. The program being executed is aborted. + """ + def do_EOF(self, arg: str) -> bool | None: """EOF -Handles the receipt of EOF as a command. -""" + Handles the receipt of EOF as a command. + """ + def do_args(self, arg: str) -> bool | None: """a(rgs) -Print the argument list of the current function. -""" + Print the argument list of the current function. + """ + def do_retval(self, arg: str) -> bool | None: """retval -Print the return value for the last return of a function. -""" + Print the return value for the last return of a function. + """ + def do_p(self, arg: str) -> bool | None: """p expression -Print the value of the expression. -""" + Print the value of the expression. + """ + def do_pp(self, arg: str) -> bool | None: """pp expression -Pretty-print the value of the expression. -""" + Pretty-print the value of the expression. + """ + def do_list(self, arg: str) -> bool | None: """l(ist) [first[, last] | .] -List source code for the current file. Without arguments, -list 11 lines around the current line or continue the previous -listing. With . as argument, list 11 lines around the current -line. With one argument, list 11 lines starting at that line. -With two arguments, list the given range; if the second -argument is less than the first, it is a count. + List source code for the current file. Without arguments, + list 11 lines around the current line or continue the previous + listing. With . as argument, list 11 lines around the current + line. With one argument, list 11 lines starting at that line. + With two arguments, list the given range; if the second + argument is less than the first, it is a count. + + The current line in the current frame is indicated by "->". + If an exception is being debugged, the line where the + exception was originally raised or propagated is indicated by + ">>", if it differs from the current line. + """ -The current line in the current frame is indicated by "->". -If an exception is being debugged, the line where the -exception was originally raised or propagated is indicated by -">>", if it differs from the current line. -""" def do_whatis(self, arg: str) -> bool | None: """whatis expression -Print the type of the argument. -""" + Print the type of the argument. + """ + def do_alias(self, arg: str) -> bool | None: """alias [name [command]] -Create an alias called 'name' that executes 'command'. The -command must *not* be enclosed in quotes. Replaceable -parameters can be indicated by %1, %2, and so on, while %* is -replaced by all the parameters. If no command is given, the -current alias for name is shown. If no name is given, all -aliases are listed. - -Aliases may be nested and can contain anything that can be -legally typed at the pdb prompt. Note! You *can* override -internal pdb commands with aliases! Those internal commands -are then hidden until the alias is removed. Aliasing is -recursively applied to the first word of the command line; all -other words in the line are left alone. - -As an example, here are two useful aliases (especially when -placed in the .pdbrc file): + Create an alias called 'name' that executes 'command'. The + command must *not* be enclosed in quotes. Replaceable + parameters can be indicated by %1, %2, and so on, while %* is + replaced by all the parameters. If no command is given, the + current alias for name is shown. If no name is given, all + aliases are listed. + + Aliases may be nested and can contain anything that can be + legally typed at the pdb prompt. Note! You *can* override + internal pdb commands with aliases! Those internal commands + are then hidden until the alias is removed. Aliasing is + recursively applied to the first word of the command line; all + other words in the line are left alone. + + As an example, here are two useful aliases (especially when + placed in the .pdbrc file): + + # Print instance variables (usage "pi classInst") + alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) + # Print instance variables in self + alias ps pi self + """ -# Print instance variables (usage "pi classInst") -alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k]) -# Print instance variables in self -alias ps pi self -""" def do_unalias(self, arg: str) -> bool | None: """unalias name -Delete the specified alias. -""" + Delete the specified alias. + """ + def do_help(self, arg: str) -> bool | None: """h(elp) -Without argument, print the list of available commands. -With a command name as argument, print help about that command. -"help pdb" shows the full pdb documentation. -"help exec" gives help on the ! command. -""" + Without argument, print the list of available commands. + With a command name as argument, print help about that command. + "help pdb" shows the full pdb documentation. + "help exec" gives help on the ! command. + """ do_b = do_break do_cl = do_clear do_w = do_where @@ -891,17 +920,18 @@ With a command name as argument, print help about that command. def help_exec(self) -> None: """(!) statement -Execute the (one-line) statement in the context of the current -stack frame. The exclamation point can be omitted unless the -first word of the statement resembles a debugger command, e.g.: -(Pdb) ! n=42 -(Pdb) + Execute the (one-line) statement in the context of the current + stack frame. The exclamation point can be omitted unless the + first word of the statement resembles a debugger command, e.g.: + (Pdb) ! n=42 + (Pdb) + + To assign to a global variable you must always prefix the command with + a 'global' command, e.g.: + (Pdb) global list_options; list_options = ['-l'] + (Pdb) + """ -To assign to a global variable you must always prefix the command with -a 'global' command, e.g.: -(Pdb) global list_options; list_options = ['-l'] -(Pdb) -""" def help_pdb(self) -> None: ... def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... if sys.version_info >= (3, 13): @@ -919,52 +949,54 @@ a 'global' command, e.g.: @property def rlcompleter(self) -> type[Completer]: """Return the `Completer` class from `rlcompleter`, while avoiding the -side effects of changing the completer from `import rlcompleter`. + side effects of changing the completer from `import rlcompleter`. -This is a compromise between GH-138860 and GH-139289. If GH-139289 is -fixed, then we don't need this and we can just `import rlcompleter` in -`Pdb.__init__`. -""" + This is a compromise between GH-138860 and GH-139289. If GH-139289 is + fixed, then we don't need this and we can just `import rlcompleter` in + `Pdb.__init__`. + """ def _select_frame(self, number: int) -> None: ... def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... - def _print_lines( - self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None - ) -> None: - """Print a range of lines. -""" + def _print_lines(self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None) -> None: + """Print a range of lines.""" + def _cmdloop(self) -> None: ... def do_display(self, arg: str) -> bool | None: """display [expression] -Display the value of the expression if it changed, each time execution -stops in the current frame. + Display the value of the expression if it changed, each time execution + stops in the current frame. + + Without expression, list all display expressions for the current frame. + """ -Without expression, list all display expressions for the current frame. -""" def do_interact(self, arg: str) -> bool | None: """interact -Start an interactive interpreter whose global namespace -contains all the (global and local) names found in the current scope. -""" + Start an interactive interpreter whose global namespace + contains all the (global and local) names found in the current scope. + """ + def do_longlist(self, arg: str) -> bool | None: """ll | longlist -List the whole source code for the current function or frame. -""" + List the whole source code for the current function or frame. + """ + def do_source(self, arg: str) -> bool | None: """source expression -Try to get source code for the given object and display it. -""" + Try to get source code for the given object and display it. + """ + def do_undisplay(self, arg: str) -> bool | None: """undisplay [expression] -Do not display the expression any more in the current frame. + Do not display the expression any more in the current frame. -Without expression, clear all display expressions for the current frame. -""" + Without expression, clear all display expressions for the current frame. + """ do_ll = do_longlist def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... def _complete_bpnumber(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... @@ -1004,6 +1036,6 @@ if sys.version_info < (3, 10): def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): - """String that doesn't quote its repr. -""" + """String that doesn't quote its repr.""" + def __repr__(self) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi index 847657500d83b..1348b91018be7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickle.pyi @@ -22,6 +22,7 @@ Misc variables: compatible_formats """ + from _pickle import ( PickleError as PickleError, Pickler as Pickler, @@ -133,22 +134,22 @@ bytes_types: tuple[type[Any], ...] # undocumented @final class PickleBuffer: - """Wrapper for potentially out-of-band buffers -""" + """Wrapper for potentially out-of-band buffers""" + def __new__(cls, buffer: ReadableBuffer) -> Self: ... def raw(self) -> memoryview: """Return a memoryview of the raw memory underlying this buffer. -Will raise BufferError is the buffer isn't contiguous. -""" + Will raise BufferError is the buffer isn't contiguous. + """ + def release(self) -> None: - """Release the underlying buffer exposed by the PickleBuffer object. -""" + """Release the underlying buffer exposed by the PickleBuffer object.""" + def __buffer__(self, flags: int, /) -> memoryview: - """Return a buffer object that exposes the underlying memory of the object. -""" + """Return a buffer object that exposes the underlying memory of the object.""" + def __release_buffer__(self, buffer: memoryview, /) -> None: - """Release the buffer object that exposes the underlying memory of the object. -""" + """Release the buffer object that exposes the underlying memory of the object.""" MARK: Final = b"(" STOP: Final = b"." @@ -232,43 +233,44 @@ READONLY_BUFFER: Final = b"\x98" def encode_long(x: int) -> bytes: # undocumented """Encode a long to a two's complement little-endian binary string. -Note that 0 is a special case, returning an empty string, to save a -byte in the LONG1 pickling context. - ->>> encode_long(0) -b'' ->>> encode_long(255) -b'\\xff\\x00' ->>> encode_long(32767) -b'\\xff\\x7f' ->>> encode_long(-256) -b'\\x00\\xff' ->>> encode_long(-32768) -b'\\x00\\x80' ->>> encode_long(-128) -b'\\x80' ->>> encode_long(127) -b'\\x7f' ->>> -""" + Note that 0 is a special case, returning an empty string, to save a + byte in the LONG1 pickling context. + + >>> encode_long(0) + b'' + >>> encode_long(255) + b'\\xff\\x00' + >>> encode_long(32767) + b'\\xff\\x7f' + >>> encode_long(-256) + b'\\x00\\xff' + >>> encode_long(-32768) + b'\\x00\\x80' + >>> encode_long(-128) + b'\\x80' + >>> encode_long(127) + b'\\x7f' + >>> + """ + def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: # undocumented """Decode a long from a two's complement little-endian binary string. ->>> decode_long(b'') -0 ->>> decode_long(b"\\xff\\x00") -255 ->>> decode_long(b"\\xff\\x7f") -32767 ->>> decode_long(b"\\x00\\xff") --256 ->>> decode_long(b"\\x00\\x80") --32768 ->>> decode_long(b"\\x80") --128 ->>> decode_long(b"\\x7f") -127 -""" + >>> decode_long(b'') + 0 + >>> decode_long(b"\\xff\\x00") + 255 + >>> decode_long(b"\\xff\\x7f") + 32767 + >>> decode_long(b"\\x00\\xff") + -256 + >>> decode_long(b"\\x00\\x80") + -32768 + >>> decode_long(b"\\x80") + -128 + >>> decode_long(b"\\x7f") + 127 + """ # undocumented pure-Python implementations class _Pickler: @@ -287,48 +289,50 @@ class _Pickler: ) -> None: """This takes a binary file for writing a pickle data stream. -The optional *protocol* argument tells the pickler to use the -given protocol; supported protocols are 0, 1, 2, 3, 4 and 5. -The default protocol is 5. It was introduced in Python 3.8, and -is incompatible with previous versions. + The optional *protocol* argument tells the pickler to use the + given protocol; supported protocols are 0, 1, 2, 3, 4 and 5. + The default protocol is 5. It was introduced in Python 3.8, and + is incompatible with previous versions. -Specifying a negative protocol version selects the highest -protocol version supported. The higher the protocol used, the -more recent the version of Python needed to read the pickle -produced. + Specifying a negative protocol version selects the highest + protocol version supported. The higher the protocol used, the + more recent the version of Python needed to read the pickle + produced. -The *file* argument must have a write() method that accepts a -single bytes argument. It can thus be a file object opened for -binary writing, an io.BytesIO instance, or any other custom -object that meets this interface. + The *file* argument must have a write() method that accepts a + single bytes argument. It can thus be a file object opened for + binary writing, an io.BytesIO instance, or any other custom + object that meets this interface. -If *fix_imports* is True and *protocol* is less than 3, pickle -will try to map the new Python 3 names to the old module names -used in Python 2, so that the pickle data stream is readable -with Python 2. + If *fix_imports* is True and *protocol* is less than 3, pickle + will try to map the new Python 3 names to the old module names + used in Python 2, so that the pickle data stream is readable + with Python 2. -If *buffer_callback* is None (the default), buffer views are -serialized into *file* as part of the pickle stream. + If *buffer_callback* is None (the default), buffer views are + serialized into *file* as part of the pickle stream. -If *buffer_callback* is not None, then it can be called any number -of times with a buffer view. If the callback returns a false value -(such as None), the given buffer is out-of-band; otherwise the -buffer is serialized in-band, i.e. inside the pickle stream. + If *buffer_callback* is not None, then it can be called any number + of times with a buffer view. If the callback returns a false value + (such as None), the given buffer is out-of-band; otherwise the + buffer is serialized in-band, i.e. inside the pickle stream. + + It is an error if *buffer_callback* is not None and *protocol* + is None or smaller than 5. + """ -It is an error if *buffer_callback* is not None and *protocol* -is None or smaller than 5. -""" def dump(self, obj: Any) -> None: - """Write a pickled representation of obj to the open file. -""" + """Write a pickled representation of obj to the open file.""" + def clear_memo(self) -> None: """Clears the pickler's "memo". -The memo is the data structure that remembers which objects the -pickler has already seen, so that shared or recursive objects -are pickled by reference and not by value. This method is -useful when re-using picklers. -""" + The memo is the data structure that remembers which objects the + pickler has already seen, so that shared or recursive objects + are pickled by reference and not by value. This method is + useful when re-using picklers. + """ + def persistent_id(self, obj: Any) -> Any: ... class _Unpickler: @@ -344,45 +348,47 @@ class _Unpickler: ) -> None: """This takes a binary file for reading a pickle data stream. -The protocol version of the pickle is detected automatically, so -no proto argument is needed. - -The argument *file* must have two methods, a read() method that -takes an integer argument, and a readline() method that requires -no arguments. Both methods should return bytes. Thus *file* -can be a binary file object opened for reading, an io.BytesIO -object, or any other custom object that meets this interface. - -The file-like object must have two methods, a read() method -that takes an integer argument, and a readline() method that -requires no arguments. Both methods should return bytes. -Thus file-like object can be a binary file object opened for -reading, a BytesIO object, or any other custom object that -meets this interface. - -If *buffers* is not None, it should be an iterable of buffer-enabled -objects that is consumed each time the pickle stream references -an out-of-band buffer view. Such buffers have been given in order -to the *buffer_callback* of a Pickler object. - -If *buffers* is None (the default), then the buffers are taken -from the pickle stream, assuming they are serialized there. -It is an error for *buffers* to be None if the pickle stream -was produced with a non-None *buffer_callback*. - -Other optional arguments are *fix_imports*, *encoding* and -*errors*, which are used to control compatibility support for -pickle stream generated by Python 2. If *fix_imports* is True, -pickle will try to map the old Python 2 names to the new names -used in Python 3. The *encoding* and *errors* tell pickle how -to decode 8-bit string instances pickled by Python 2; these -default to 'ASCII' and 'strict', respectively. *encoding* can be -'bytes' to read these 8-bit string instances as bytes objects. -""" + The protocol version of the pickle is detected automatically, so + no proto argument is needed. + + The argument *file* must have two methods, a read() method that + takes an integer argument, and a readline() method that requires + no arguments. Both methods should return bytes. Thus *file* + can be a binary file object opened for reading, an io.BytesIO + object, or any other custom object that meets this interface. + + The file-like object must have two methods, a read() method + that takes an integer argument, and a readline() method that + requires no arguments. Both methods should return bytes. + Thus file-like object can be a binary file object opened for + reading, a BytesIO object, or any other custom object that + meets this interface. + + If *buffers* is not None, it should be an iterable of buffer-enabled + objects that is consumed each time the pickle stream references + an out-of-band buffer view. Such buffers have been given in order + to the *buffer_callback* of a Pickler object. + + If *buffers* is None (the default), then the buffers are taken + from the pickle stream, assuming they are serialized there. + It is an error for *buffers* to be None if the pickle stream + was produced with a non-None *buffer_callback*. + + Other optional arguments are *fix_imports*, *encoding* and + *errors*, which are used to control compatibility support for + pickle stream generated by Python 2. If *fix_imports* is True, + pickle will try to map the old Python 2 names to the new names + used in Python 3. The *encoding* and *errors* tell pickle how + to decode 8-bit string instances pickled by Python 2; these + default to 'ASCII' and 'strict', respectively. *encoding* can be + 'bytes' to read these 8-bit string instances as bytes objects. + """ + def load(self) -> Any: """Read a pickled object representation from the open file. -Return the reconstituted object hierarchy specified in the file. -""" + Return the reconstituted object hierarchy specified in the file. + """ + def find_class(self, module: str, name: str) -> Any: ... def persistent_load(self, pid: Any) -> Any: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi index cd3f3401d4446..e5497b1ecb2f6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pickletools.pyi @@ -1,4 +1,4 @@ -""""Executable documentation" for the pickle module. +""" "Executable documentation" for the pickle module. Extensive comments about the pickle protocols and pickle-machine opcodes can be found here. Some functions meant for external use: @@ -9,6 +9,7 @@ genops(pickle) dis(pickle, out=None, memo=None, indentlevel=4) Print a symbolic disassembly of a pickle. """ + import sys from collections.abc import Callable, Iterator, MutableMapping from typing import IO, Any, Final @@ -35,114 +36,112 @@ class ArgumentDescriptor: def read_uint1(f: IO[bytes]) -> int: """ ->>> import io ->>> read_uint1(io.BytesIO(b'\\xff')) -255 -""" + >>> import io + >>> read_uint1(io.BytesIO(b'\\xff')) + 255 + """ uint1: ArgumentDescriptor def read_uint2(f: IO[bytes]) -> int: """ ->>> import io ->>> read_uint2(io.BytesIO(b'\\xff\\x00')) -255 ->>> read_uint2(io.BytesIO(b'\\xff\\xff')) -65535 -""" + >>> import io + >>> read_uint2(io.BytesIO(b'\\xff\\x00')) + 255 + >>> read_uint2(io.BytesIO(b'\\xff\\xff')) + 65535 + """ uint2: ArgumentDescriptor def read_int4(f: IO[bytes]) -> int: """ ->>> import io ->>> read_int4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) -255 ->>> read_int4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == -(2**31) -True -""" + >>> import io + >>> read_int4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) + 255 + >>> read_int4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == -(2**31) + True + """ int4: ArgumentDescriptor def read_uint4(f: IO[bytes]) -> int: """ ->>> import io ->>> read_uint4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) -255 ->>> read_uint4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == 2**31 -True -""" + >>> import io + >>> read_uint4(io.BytesIO(b'\\xff\\x00\\x00\\x00')) + 255 + >>> read_uint4(io.BytesIO(b'\\x00\\x00\\x00\\x80')) == 2**31 + True + """ uint4: ArgumentDescriptor def read_uint8(f: IO[bytes]) -> int: """ ->>> import io ->>> read_uint8(io.BytesIO(b'\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00')) -255 ->>> read_uint8(io.BytesIO(b'\\xff' * 8)) == 2**64-1 -True -""" + >>> import io + >>> read_uint8(io.BytesIO(b'\\xff\\x00\\x00\\x00\\x00\\x00\\x00\\x00')) + 255 + >>> read_uint8(io.BytesIO(b'\\xff' * 8)) == 2**64-1 + True + """ uint8: ArgumentDescriptor if sys.version_info >= (3, 12): - def read_stringnl( - f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1" - ) -> bytes | str: + def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True, *, encoding: str = "latin-1") -> bytes | str: """ ->>> import io ->>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) -'abcd' + >>> import io + >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) + 'abcd' ->>> read_stringnl(io.BytesIO(b"\\n")) -Traceback (most recent call last): -... -ValueError: no string quotes around b'' + >>> read_stringnl(io.BytesIO(b"\\n")) + Traceback (most recent call last): + ... + ValueError: no string quotes around b'' ->>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) -'' + >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) + '' ->>> read_stringnl(io.BytesIO(b"''\\n")) -'' + >>> read_stringnl(io.BytesIO(b"''\\n")) + '' ->>> read_stringnl(io.BytesIO(b'"abcd"')) -Traceback (most recent call last): -... -ValueError: no newline found when trying to read stringnl + >>> read_stringnl(io.BytesIO(b'"abcd"')) + Traceback (most recent call last): + ... + ValueError: no newline found when trying to read stringnl -Embedded escapes are undone in the result. ->>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) -'a\\n\\\\b\\x00c\\td' -""" + Embedded escapes are undone in the result. + >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) + 'a\\n\\\\b\\x00c\\td' + """ else: def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: """ - >>> import io - >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) - 'abcd' + >>> import io + >>> read_stringnl(io.BytesIO(b"'abcd'\\nefg\\n")) + 'abcd' - >>> read_stringnl(io.BytesIO(b"\\n")) - Traceback (most recent call last): - ... - ValueError: no string quotes around b'' + >>> read_stringnl(io.BytesIO(b"\\n")) + Traceback (most recent call last): + ... + ValueError: no string quotes around b'' - >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) - '' + >>> read_stringnl(io.BytesIO(b"\\n"), stripquotes=False) + '' - >>> read_stringnl(io.BytesIO(b"''\\n")) - '' + >>> read_stringnl(io.BytesIO(b"''\\n")) + '' - >>> read_stringnl(io.BytesIO(b'"abcd"')) - Traceback (most recent call last): - ... - ValueError: no newline found when trying to read stringnl + >>> read_stringnl(io.BytesIO(b'"abcd"')) + Traceback (most recent call last): + ... + ValueError: no newline found when trying to read stringnl - Embedded escapes are undone in the result. - >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) - 'a\\n\\\\b\\x00c\\td' - """ + Embedded escapes are undone in the result. + >>> read_stringnl(io.BytesIO(br"'a\\n\\\\b\\x00c\\td'" + b"\\n'e'")) + 'a\\n\\\\b\\x00c\\td' + """ stringnl: ArgumentDescriptor @@ -152,227 +151,228 @@ stringnl_noescape: ArgumentDescriptor def read_stringnl_noescape_pair(f: IO[bytes]) -> str: """ ->>> import io ->>> read_stringnl_noescape_pair(io.BytesIO(b"Queue\\nEmpty\\njunk")) -'Queue Empty' -""" + >>> import io + >>> read_stringnl_noescape_pair(io.BytesIO(b"Queue\\nEmpty\\njunk")) + 'Queue Empty' + """ stringnl_noescape_pair: ArgumentDescriptor def read_string1(f: IO[bytes]) -> str: """ ->>> import io ->>> read_string1(io.BytesIO(b"\\x00")) -'' ->>> read_string1(io.BytesIO(b"\\x03abcdef")) -'abc' -""" + >>> import io + >>> read_string1(io.BytesIO(b"\\x00")) + '' + >>> read_string1(io.BytesIO(b"\\x03abcdef")) + 'abc' + """ string1: ArgumentDescriptor def read_string4(f: IO[bytes]) -> str: """ ->>> import io ->>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) -'' ->>> read_string4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) -'abc' ->>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) -Traceback (most recent call last): -... -ValueError: expected 50331648 bytes in a string4, but only 6 remain -""" + >>> import io + >>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) + '' + >>> read_string4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) + 'abc' + >>> read_string4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) + Traceback (most recent call last): + ... + ValueError: expected 50331648 bytes in a string4, but only 6 remain + """ string4: ArgumentDescriptor def read_bytes1(f: IO[bytes]) -> bytes: """ ->>> import io ->>> read_bytes1(io.BytesIO(b"\\x00")) -b'' ->>> read_bytes1(io.BytesIO(b"\\x03abcdef")) -b'abc' -""" + >>> import io + >>> read_bytes1(io.BytesIO(b"\\x00")) + b'' + >>> read_bytes1(io.BytesIO(b"\\x03abcdef")) + b'abc' + """ bytes1: ArgumentDescriptor def read_bytes4(f: IO[bytes]) -> bytes: """ ->>> import io ->>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) -b'' ->>> read_bytes4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) -b'abc' ->>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) -Traceback (most recent call last): -... -ValueError: expected 50331648 bytes in a bytes4, but only 6 remain -""" + >>> import io + >>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x00abc")) + b'' + >>> read_bytes4(io.BytesIO(b"\\x03\\x00\\x00\\x00abcdef")) + b'abc' + >>> read_bytes4(io.BytesIO(b"\\x00\\x00\\x00\\x03abcdef")) + Traceback (most recent call last): + ... + ValueError: expected 50331648 bytes in a bytes4, but only 6 remain + """ bytes4: ArgumentDescriptor def read_bytes8(f: IO[bytes]) -> bytes: """ ->>> import io, struct, sys ->>> read_bytes8(io.BytesIO(b"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00abc")) -b'' ->>> read_bytes8(io.BytesIO(b"\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00abcdef")) -b'abc' ->>> bigsize8 = struct.pack(">> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS -Traceback (most recent call last): -... -ValueError: expected ... bytes in a bytes8, but only 6 remain -""" + >>> import io, struct, sys + >>> read_bytes8(io.BytesIO(b"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00abc")) + b'' + >>> read_bytes8(io.BytesIO(b"\\x03\\x00\\x00\\x00\\x00\\x00\\x00\\x00abcdef")) + b'abc' + >>> bigsize8 = struct.pack(">> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + ValueError: expected ... bytes in a bytes8, but only 6 remain + """ bytes8: ArgumentDescriptor def read_unicodestringnl(f: IO[bytes]) -> str: """ ->>> import io ->>> read_unicodestringnl(io.BytesIO(b"abc\\\\uabcd\\njunk")) == 'abc\\uabcd' -True -""" + >>> import io + >>> read_unicodestringnl(io.BytesIO(b"abc\\\\uabcd\\njunk")) == 'abc\\uabcd' + True + """ unicodestringnl: ArgumentDescriptor def read_unicodestring1(f: IO[bytes]) -> str: """ ->>> import io ->>> s = 'abcd\\uabcd' ->>> enc = s.encode('utf-8') ->>> enc -b'abcd\\xea\\xaf\\x8d' ->>> n = bytes([len(enc)]) # little-endian 1-byte length ->>> t = read_unicodestring1(io.BytesIO(n + enc + b'junk')) ->>> s == t -True - ->>> read_unicodestring1(io.BytesIO(n + enc[:-1])) -Traceback (most recent call last): -... -ValueError: expected 7 bytes in a unicodestring1, but only 6 remain -""" + >>> import io + >>> s = 'abcd\\uabcd' + >>> enc = s.encode('utf-8') + >>> enc + b'abcd\\xea\\xaf\\x8d' + >>> n = bytes([len(enc)]) # little-endian 1-byte length + >>> t = read_unicodestring1(io.BytesIO(n + enc + b'junk')) + >>> s == t + True + + >>> read_unicodestring1(io.BytesIO(n + enc[:-1])) + Traceback (most recent call last): + ... + ValueError: expected 7 bytes in a unicodestring1, but only 6 remain + """ unicodestring1: ArgumentDescriptor def read_unicodestring4(f: IO[bytes]) -> str: """ ->>> import io ->>> s = 'abcd\\uabcd' ->>> enc = s.encode('utf-8') ->>> enc -b'abcd\\xea\\xaf\\x8d' ->>> n = bytes([len(enc), 0, 0, 0]) # little-endian 4-byte length ->>> t = read_unicodestring4(io.BytesIO(n + enc + b'junk')) ->>> s == t -True - ->>> read_unicodestring4(io.BytesIO(n + enc[:-1])) -Traceback (most recent call last): -... -ValueError: expected 7 bytes in a unicodestring4, but only 6 remain -""" + >>> import io + >>> s = 'abcd\\uabcd' + >>> enc = s.encode('utf-8') + >>> enc + b'abcd\\xea\\xaf\\x8d' + >>> n = bytes([len(enc), 0, 0, 0]) # little-endian 4-byte length + >>> t = read_unicodestring4(io.BytesIO(n + enc + b'junk')) + >>> s == t + True + + >>> read_unicodestring4(io.BytesIO(n + enc[:-1])) + Traceback (most recent call last): + ... + ValueError: expected 7 bytes in a unicodestring4, but only 6 remain + """ unicodestring4: ArgumentDescriptor def read_unicodestring8(f: IO[bytes]) -> str: """ ->>> import io ->>> s = 'abcd\\uabcd' ->>> enc = s.encode('utf-8') ->>> enc -b'abcd\\xea\\xaf\\x8d' ->>> n = bytes([len(enc)]) + b'\\0' * 7 # little-endian 8-byte length ->>> t = read_unicodestring8(io.BytesIO(n + enc + b'junk')) ->>> s == t -True - ->>> read_unicodestring8(io.BytesIO(n + enc[:-1])) -Traceback (most recent call last): -... -ValueError: expected 7 bytes in a unicodestring8, but only 6 remain -""" + >>> import io + >>> s = 'abcd\\uabcd' + >>> enc = s.encode('utf-8') + >>> enc + b'abcd\\xea\\xaf\\x8d' + >>> n = bytes([len(enc)]) + b'\\0' * 7 # little-endian 8-byte length + >>> t = read_unicodestring8(io.BytesIO(n + enc + b'junk')) + >>> s == t + True + + >>> read_unicodestring8(io.BytesIO(n + enc[:-1])) + Traceback (most recent call last): + ... + ValueError: expected 7 bytes in a unicodestring8, but only 6 remain + """ unicodestring8: ArgumentDescriptor def read_decimalnl_short(f: IO[bytes]) -> int: """ ->>> import io ->>> read_decimalnl_short(io.BytesIO(b"1234\\n56")) -1234 + >>> import io + >>> read_decimalnl_short(io.BytesIO(b"1234\\n56")) + 1234 + + >>> read_decimalnl_short(io.BytesIO(b"1234L\\n56")) + Traceback (most recent call last): + ... + ValueError: invalid literal for int() with base 10: b'1234L' + """ ->>> read_decimalnl_short(io.BytesIO(b"1234L\\n56")) -Traceback (most recent call last): -... -ValueError: invalid literal for int() with base 10: b'1234L' -""" def read_decimalnl_long(f: IO[bytes]) -> int: """ ->>> import io + >>> import io ->>> read_decimalnl_long(io.BytesIO(b"1234L\\n56")) -1234 + >>> read_decimalnl_long(io.BytesIO(b"1234L\\n56")) + 1234 ->>> read_decimalnl_long(io.BytesIO(b"123456789012345678901234L\\n6")) -123456789012345678901234 -""" + >>> read_decimalnl_long(io.BytesIO(b"123456789012345678901234L\\n6")) + 123456789012345678901234 + """ decimalnl_short: ArgumentDescriptor decimalnl_long: ArgumentDescriptor def read_floatnl(f: IO[bytes]) -> float: """ ->>> import io ->>> read_floatnl(io.BytesIO(b"-1.25\\n6")) --1.25 -""" + >>> import io + >>> read_floatnl(io.BytesIO(b"-1.25\\n6")) + -1.25 + """ floatnl: ArgumentDescriptor def read_float8(f: IO[bytes]) -> float: """ ->>> import io, struct ->>> raw = struct.pack(">d", -1.25) ->>> raw -b'\\xbf\\xf4\\x00\\x00\\x00\\x00\\x00\\x00' ->>> read_float8(io.BytesIO(raw + b"\\n")) --1.25 -""" + >>> import io, struct + >>> raw = struct.pack(">d", -1.25) + >>> raw + b'\\xbf\\xf4\\x00\\x00\\x00\\x00\\x00\\x00' + >>> read_float8(io.BytesIO(raw + b"\\n")) + -1.25 + """ float8: ArgumentDescriptor def read_long1(f: IO[bytes]) -> int: """ ->>> import io ->>> read_long1(io.BytesIO(b"\\x00")) -0 ->>> read_long1(io.BytesIO(b"\\x02\\xff\\x00")) -255 ->>> read_long1(io.BytesIO(b"\\x02\\xff\\x7f")) -32767 ->>> read_long1(io.BytesIO(b"\\x02\\x00\\xff")) --256 ->>> read_long1(io.BytesIO(b"\\x02\\x00\\x80")) --32768 -""" + >>> import io + >>> read_long1(io.BytesIO(b"\\x00")) + 0 + >>> read_long1(io.BytesIO(b"\\x02\\xff\\x00")) + 255 + >>> read_long1(io.BytesIO(b"\\x02\\xff\\x7f")) + 32767 + >>> read_long1(io.BytesIO(b"\\x02\\x00\\xff")) + -256 + >>> read_long1(io.BytesIO(b"\\x02\\x00\\x80")) + -32768 + """ long1: ArgumentDescriptor def read_long4(f: IO[bytes]) -> int: """ ->>> import io ->>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x00")) -255 ->>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x7f")) -32767 ->>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\xff")) --256 ->>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\x80")) --32768 ->>> read_long1(io.BytesIO(b"\\x00\\x00\\x00\\x00")) -0 -""" + >>> import io + >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x00")) + 255 + >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\xff\\x7f")) + 32767 + >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\xff")) + -256 + >>> read_long4(io.BytesIO(b"\\x02\\x00\\x00\\x00\\x00\\x80")) + -32768 + >>> read_long1(io.BytesIO(b"\\x00\\x00\\x00\\x00")) + 0 + """ long4: ArgumentDescriptor @@ -427,29 +427,30 @@ opcodes: list[OpcodeInfo] def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: """Generate all the opcodes in a pickle. -'pickle' is a file-like object, or string, containing the pickle. + 'pickle' is a file-like object, or string, containing the pickle. -Each opcode in the pickle is generated, from the current pickle position, -stopping after a STOP opcode is delivered. A triple is generated for -each opcode: + Each opcode in the pickle is generated, from the current pickle position, + stopping after a STOP opcode is delivered. A triple is generated for + each opcode: - opcode, arg, pos + opcode, arg, pos -opcode is an OpcodeInfo record, describing the current opcode. + opcode is an OpcodeInfo record, describing the current opcode. -If the opcode has an argument embedded in the pickle, arg is its decoded -value, as a Python object. If the opcode doesn't have an argument, arg -is None. + If the opcode has an argument embedded in the pickle, arg is its decoded + value, as a Python object. If the opcode doesn't have an argument, arg + is None. + + If the pickle has a tell() method, pos was the value of pickle.tell() + before reading the current opcode. If the pickle is a bytes object, + it's wrapped in a BytesIO object, and the latter's tell() result is + used. Else (the pickle doesn't have a tell(), and it's not obvious how + to query its current position) pos is None. + """ -If the pickle has a tell() method, pos was the value of pickle.tell() -before reading the current opcode. If the pickle is a bytes object, -it's wrapped in a BytesIO object, and the latter's tell() result is -used. Else (the pickle doesn't have a tell(), and it's not obvious how -to query its current position) pos is None. -""" def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: - """Optimize a pickle string by removing unused PUT opcodes -""" + """Optimize a pickle string by removing unused PUT opcodes""" + def dis( pickle: bytes | bytearray | IO[bytes], out: IO[str] | None = None, @@ -459,38 +460,38 @@ def dis( ) -> None: """Produce a symbolic disassembly of a pickle. -'pickle' is a file-like object, or string, containing a (at least one) -pickle. The pickle is disassembled from the current position, through -the first STOP opcode encountered. + 'pickle' is a file-like object, or string, containing a (at least one) + pickle. The pickle is disassembled from the current position, through + the first STOP opcode encountered. -Optional arg 'out' is a file-like object to which the disassembly is -printed. It defaults to sys.stdout. + Optional arg 'out' is a file-like object to which the disassembly is + printed. It defaults to sys.stdout. -Optional arg 'memo' is a Python dict, used as the pickle's memo. It -may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes. -Passing the same memo object to another dis() call then allows disassembly -to proceed across multiple pickles that were all created by the same -pickler with the same memo. Ordinarily you don't need to worry about this. + Optional arg 'memo' is a Python dict, used as the pickle's memo. It + may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes. + Passing the same memo object to another dis() call then allows disassembly + to proceed across multiple pickles that were all created by the same + pickler with the same memo. Ordinarily you don't need to worry about this. -Optional arg 'indentlevel' is the number of blanks by which to indent -a new MARK level. It defaults to 4. + Optional arg 'indentlevel' is the number of blanks by which to indent + a new MARK level. It defaults to 4. -Optional arg 'annotate' if nonzero instructs dis() to add short -description of the opcode on each line of disassembled output. -The value given to 'annotate' must be an integer and is used as a -hint for the column where annotation should start. The default -value is 0, meaning no annotations. + Optional arg 'annotate' if nonzero instructs dis() to add short + description of the opcode on each line of disassembled output. + The value given to 'annotate' must be an integer and is used as a + hint for the column where annotation should start. The default + value is 0, meaning no annotations. -In addition to printing the disassembly, some sanity checks are made: + In addition to printing the disassembly, some sanity checks are made: -+ All embedded opcode arguments "make sense". + + All embedded opcode arguments "make sense". -+ Explicit and implicit pop operations have enough items on the stack. + + Explicit and implicit pop operations have enough items on the stack. -+ When an opcode implicitly refers to a markobject, a markobject is - actually on the stack. + + When an opcode implicitly refers to a markobject, a markobject is + actually on the stack. -+ A memo entry isn't referenced before it's defined. + + A memo entry isn't referenced before it's defined. -+ The markobject isn't stored in the memo. -""" + + The markobject isn't stored in the memo. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi index 2fc3232dff2c8..360cb519ff272 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pipes.pyi @@ -55,37 +55,39 @@ for the built-in function open() or for os.popen(). To create a new template object initialized to a given one: t2 = t.clone() """ + import os __all__ = ["Template"] class Template: - """Class representing a pipeline template. -""" + """Class representing a pipeline template.""" + def reset(self) -> None: - """t.reset() restores a pipeline template to its initial state. -""" + """t.reset() restores a pipeline template to its initial state.""" + def clone(self) -> Template: """t.clone() returns a new pipeline template with identical initial state as the current one. -""" + """ + def debug(self, flag: bool) -> None: - """t.debug(flag) turns debugging on or off. -""" + """t.debug(flag) turns debugging on or off.""" + def append(self, cmd: str, kind: str) -> None: - """t.append(cmd, kind) adds a new step at the end. -""" + """t.append(cmd, kind) adds a new step at the end.""" + def prepend(self, cmd: str, kind: str) -> None: - """t.prepend(cmd, kind) adds a new step at the front. -""" + """t.prepend(cmd, kind) adds a new step at the front.""" + def open(self, file: str, rw: str) -> os._wrap_close: """t.open(file, rw) returns a pipe or file object open for reading or writing; the file is the other end of the pipeline. -""" + """ + def copy(self, infile: str, outfile: str) -> int: ... # Not documented, but widely used. # Documented as shlex.quote since 3.3. def quote(s: str) -> str: - """Return a shell-escaped version of the string *s*. -""" + """Return a shell-escaped version of the string *s*.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi index 35641908a5705..955d0862fa4ea 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pkgutil.pyi @@ -1,5 +1,5 @@ -"""Utilities to support packages. -""" +"""Utilities to support packages.""" + import sys from _typeshed import StrOrBytesPath, SupportsRead from _typeshed.importlib import LoaderProtocol, MetaPathFinderProtocol, PathEntryFinderProtocol @@ -25,8 +25,8 @@ if sys.version_info < (3, 12): _PathT = TypeVar("_PathT", bound=Iterable[str]) class ModuleInfo(NamedTuple): - """A namedtuple with minimal info about a module. -""" + """A namedtuple with minimal info about a module.""" + module_finder: MetaPathFinderProtocol | PathEntryFinderProtocol name: str ispkg: bool @@ -34,54 +34,55 @@ class ModuleInfo(NamedTuple): def extend_path(path: _PathT, name: str) -> _PathT: """Extend a package's path. -Intended use is to place the following code in a package's __init__.py: - - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) - -For each directory on sys.path that has a subdirectory that -matches the package name, add the subdirectory to the package's -__path__. This is useful if one wants to distribute different -parts of a single logical package as multiple directories. - -It also looks for *.pkg files beginning where * matches the name -argument. This feature is similar to *.pth files (see site.py), -except that it doesn't special-case lines starting with 'import'. -A *.pkg file is trusted at face value: apart from checking for -duplicates, all entries found in a *.pkg file are added to the -path, regardless of whether they are exist the filesystem. (This -is a feature.) - -If the input path is not a list (as is the case for frozen -packages) it is returned unchanged. The input path is not -modified; an extended copy is returned. Items are only appended -to the copy at the end. - -It is assumed that sys.path is a sequence. Items of sys.path that -are not (unicode or 8-bit) strings referring to existing -directories are ignored. Unicode items of sys.path that cause -errors when used as filenames may cause this function to raise an -exception (in line with os.path.isdir() behavior). -""" + Intended use is to place the following code in a package's __init__.py: + + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + + For each directory on sys.path that has a subdirectory that + matches the package name, add the subdirectory to the package's + __path__. This is useful if one wants to distribute different + parts of a single logical package as multiple directories. + + It also looks for *.pkg files beginning where * matches the name + argument. This feature is similar to *.pth files (see site.py), + except that it doesn't special-case lines starting with 'import'. + A *.pkg file is trusted at face value: apart from checking for + duplicates, all entries found in a *.pkg file are added to the + path, regardless of whether they are exist the filesystem. (This + is a feature.) + + If the input path is not a list (as is the case for frozen + packages) it is returned unchanged. The input path is not + modified; an extended copy is returned. Items are only appended + to the copy at the end. + + It is assumed that sys.path is a sequence. Items of sys.path that + are not (unicode or 8-bit) strings referring to existing + directories are ignored. Unicode items of sys.path that cause + errors when used as filenames may cause this function to raise an + exception (in line with os.path.isdir() behavior). + """ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpImporter: """PEP 302 Finder that wraps Python's "classic" import algorithm - ImpImporter(dirname) produces a PEP 302 finder that searches that - directory. ImpImporter(None) produces a PEP 302 finder that searches - the current sys.path, plus any modules that are frozen or built-in. + ImpImporter(dirname) produces a PEP 302 finder that searches that + directory. ImpImporter(None) produces a PEP 302 finder that searches + the current sys.path, plus any modules that are frozen or built-in. + + Note that ImpImporter does not currently support being used by placement + on sys.meta_path. + """ - Note that ImpImporter does not currently support being used by placement - on sys.meta_path. - """ def __init__(self, path: StrOrBytesPath | None = None) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.12. Use the `importlib` module instead.") class ImpLoader: - """PEP 302 Loader that wraps Python's "classic" import algorithm - """ + """PEP 302 Loader that wraps Python's "classic" import algorithm""" + def __init__(self, fullname: str, file: IO[str], filename: StrOrBytesPath, etc: tuple[str, str, int]) -> None: ... if sys.version_info < (3, 14): @@ -90,146 +91,153 @@ if sys.version_info < (3, 14): def find_loader(fullname: str) -> LoaderProtocol | None: """Find a "loader" object for fullname -This is a backwards compatibility wrapper around -importlib.util.find_spec that converts most failures to ImportError -and only returns the loader rather than the full spec -""" + This is a backwards compatibility wrapper around + importlib.util.find_spec that converts most failures to ImportError + and only returns the loader rather than the full spec + """ + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `importlib.util.find_spec()` instead.") def get_loader(module_or_name: str) -> LoaderProtocol | None: """Get a "loader" object for module_or_name -Returns None if the module cannot be found or imported. -If the named module is not already imported, its containing package -(if any) is imported, in order to establish the package __path__. -""" + Returns None if the module cannot be found or imported. + If the named module is not already imported, its containing package + (if any) is imported, in order to establish the package __path__. + """ else: def find_loader(fullname: str) -> LoaderProtocol | None: """Find a "loader" object for fullname - This is a backwards compatibility wrapper around - importlib.util.find_spec that converts most failures to ImportError - and only returns the loader rather than the full spec - """ + This is a backwards compatibility wrapper around + importlib.util.find_spec that converts most failures to ImportError + and only returns the loader rather than the full spec + """ + def get_loader(module_or_name: str) -> LoaderProtocol | None: """Get a "loader" object for module_or_name - Returns None if the module cannot be found or imported. - If the named module is not already imported, its containing package - (if any) is imported, in order to establish the package __path__. - """ + Returns None if the module cannot be found or imported. + If the named module is not already imported, its containing package + (if any) is imported, in order to establish the package __path__. + """ def get_importer(path_item: StrOrBytesPath) -> PathEntryFinderProtocol | None: """Retrieve a finder for the given path item -The returned finder is cached in sys.path_importer_cache -if it was newly created by a path hook. + The returned finder is cached in sys.path_importer_cache + if it was newly created by a path hook. + + The cache (or part of it) can be cleared manually if a + rescan of sys.path_hooks is necessary. + """ -The cache (or part of it) can be cleared manually if a -rescan of sys.path_hooks is necessary. -""" def iter_importers(fullname: str = "") -> Iterator[MetaPathFinderProtocol | PathEntryFinderProtocol]: """Yield finders for the given module name -If fullname contains a '.', the finders will be for the package -containing fullname, otherwise they will be all registered top level -finders (i.e. those on both sys.meta_path and sys.path_hooks). + If fullname contains a '.', the finders will be for the package + containing fullname, otherwise they will be all registered top level + finders (i.e. those on both sys.meta_path and sys.path_hooks). -If the named module is in a package, that package is imported as a side -effect of invoking this function. + If the named module is in a package, that package is imported as a side + effect of invoking this function. + + If no module name is specified, all top level finders are produced. + """ -If no module name is specified, all top level finders are produced. -""" def iter_modules(path: Iterable[StrOrBytesPath] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: """Yields ModuleInfo for all submodules on path, -or, if path is None, all top-level modules on sys.path. + or, if path is None, all top-level modules on sys.path. -'path' should be either None or a list of paths to look for -modules in. + 'path' should be either None or a list of paths to look for + modules in. + + 'prefix' is a string to output on the front of every module name + on output. + """ -'prefix' is a string to output on the front of every module name -on output. -""" def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( path: Iterable[StrOrBytesPath] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None ) -> Iterator[ModuleInfo]: """Yields ModuleInfo for all modules recursively -on path, or, if path is None, all accessible modules. + on path, or, if path is None, all accessible modules. + + 'path' should be either None or a list of paths to look for + modules in. -'path' should be either None or a list of paths to look for -modules in. + 'prefix' is a string to output on the front of every module name + on output. -'prefix' is a string to output on the front of every module name -on output. + Note that this function must import all *packages* (NOT all + modules!) on the given path, in order to access the __path__ + attribute to find submodules. -Note that this function must import all *packages* (NOT all -modules!) on the given path, in order to access the __path__ -attribute to find submodules. + 'onerror' is a function which gets called with one argument (the + name of the package which was being imported) if any exception + occurs while trying to import a package. If no onerror function is + supplied, ImportErrors are caught and ignored, while all other + exceptions are propagated, terminating the search. -'onerror' is a function which gets called with one argument (the -name of the package which was being imported) if any exception -occurs while trying to import a package. If no onerror function is -supplied, ImportErrors are caught and ignored, while all other -exceptions are propagated, terminating the search. + Examples: -Examples: + # list all modules python can access + walk_packages() -# list all modules python can access -walk_packages() + # list all submodules of ctypes + walk_packages(ctypes.__path__, ctypes.__name__+'.') + """ -# list all submodules of ctypes -walk_packages(ctypes.__path__, ctypes.__name__+'.') -""" def get_data(package: str, resource: str) -> bytes | None: """Get a resource from a package. -This is a wrapper round the PEP 302 loader get_data API. The package -argument should be the name of a package, in standard module format -(foo.bar). The resource argument should be in the form of a relative -filename, using '/' as the path separator. The parent directory name '..' -is not allowed, and nor is a rooted name (starting with a '/'). + This is a wrapper round the PEP 302 loader get_data API. The package + argument should be the name of a package, in standard module format + (foo.bar). The resource argument should be in the form of a relative + filename, using '/' as the path separator. The parent directory name '..' + is not allowed, and nor is a rooted name (starting with a '/'). -The function returns a binary string, which is the contents of the -specified resource. + The function returns a binary string, which is the contents of the + specified resource. -For packages located in the filesystem, which have already been imported, -this is the rough equivalent of + For packages located in the filesystem, which have already been imported, + this is the rough equivalent of - d = os.path.dirname(sys.modules[package].__file__) - data = open(os.path.join(d, resource), 'rb').read() + d = os.path.dirname(sys.modules[package].__file__) + data = open(os.path.join(d, resource), 'rb').read() + + If the package cannot be located or loaded, or it uses a PEP 302 loader + which does not support get_data(), then None is returned. + """ -If the package cannot be located or loaded, or it uses a PEP 302 loader -which does not support get_data(), then None is returned. -""" def resolve_name(name: str) -> Any: """ -Resolve a name to an object. - -It is expected that `name` will be a string in one of the following -formats, where W is shorthand for a valid Python identifier and dot stands -for a literal period in these pseudo-regexes: - -W(.W)* -W(.W)*:(W(.W)*)? - -The first form is intended for backward compatibility only. It assumes that -some part of the dotted name is a package, and the rest is an object -somewhere within that package, possibly nested inside other objects. -Because the place where the package stops and the object hierarchy starts -can't be inferred by inspection, repeated attempts to import must be done -with this form. - -In the second form, the caller makes the division point clear through the -provision of a single colon: the dotted name to the left of the colon is a -package to be imported, and the dotted name to the right is the object -hierarchy within that package. Only one import is needed in this form. If -it ends with the colon, then a module object is returned. - -The function will return an object (which might be a module), or raise one -of the following exceptions: - -ValueError - if `name` isn't in a recognised format -ImportError - if an import failed when it shouldn't have -AttributeError - if a failure occurred when traversing the object hierarchy - within the imported package to get to the desired object. -""" + Resolve a name to an object. + + It is expected that `name` will be a string in one of the following + formats, where W is shorthand for a valid Python identifier and dot stands + for a literal period in these pseudo-regexes: + + W(.W)* + W(.W)*:(W(.W)*)? + + The first form is intended for backward compatibility only. It assumes that + some part of the dotted name is a package, and the rest is an object + somewhere within that package, possibly nested inside other objects. + Because the place where the package stops and the object hierarchy starts + can't be inferred by inspection, repeated attempts to import must be done + with this form. + + In the second form, the caller makes the division point clear through the + provision of a single colon: the dotted name to the left of the colon is a + package to be imported, and the dotted name to the right is the object + hierarchy within that package. Only one import is needed in this form. If + it ends with the colon, then a module object is returned. + + The function will return an object (which might be a module), or raise one + of the following exceptions: + + ValueError - if `name` isn't in a recognised format + ImportError - if an import failed when it shouldn't have + AttributeError - if a failure occurred when traversing the object hierarchy + within the imported package to get to the desired object. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi index 70d5957cfb399..cc2e1aa5694ec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/platform.pyi @@ -6,24 +6,26 @@ information concatenated as single string to stdout. The output format is usable as part of a filename. """ + import sys from typing import NamedTuple, type_check_only from typing_extensions import Self, deprecated, disjoint_base def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: """Tries to determine the libc version that the file executable -(which defaults to the Python interpreter) is linked against. + (which defaults to the Python interpreter) is linked against. -Returns a tuple of strings (lib,version) which default to the -given parameters in case the lookup fails. + Returns a tuple of strings (lib,version) which default to the + given parameters in case the lookup fails. -Note that the function has intimate knowledge of how different -libc versions add symbols to the executable and thus is probably -only usable for executables compiled using gcc. + Note that the function has intimate knowledge of how different + libc versions add symbols to the executable and thus is probably + only usable for executables compiled using gcc. -The file is read and scanned in chunks of chunksize bytes. + The file is read and scanned in chunks of chunksize bytes. + + """ -""" def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... def win32_edition() -> str: ... def win32_is_iot() -> bool: ... @@ -31,12 +33,12 @@ def mac_ver( release: str = "", versioninfo: tuple[str, str, str] = ("", "", ""), machine: str = "" ) -> tuple[str, tuple[str, str, str], str]: """Get macOS version information and return it as tuple (release, -versioninfo, machine) with versioninfo being a tuple (version, -dev_stage, non_release_version). + versioninfo, machine) with versioninfo being a tuple (version, + dev_stage, non_release_version). -Entries which cannot be determined are set to the parameter values -which default to ''. All tuple entries are strings. -""" + Entries which cannot be determined are set to the parameter values + which default to ''. All tuple entries are strings. + """ if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13; will be removed in Python 3.15.") @@ -48,14 +50,14 @@ if sys.version_info >= (3, 13): ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: """Version interface for Jython. -Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being -a tuple (vm_name, vm_release, vm_vendor) and osinfo being a -tuple (os_name, os_version, os_arch). + Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being + a tuple (vm_name, vm_release, vm_vendor) and osinfo being a + tuple (os_name, os_version, os_arch). -Values which cannot be determined are set to the defaults -given as parameters (which all default to ''). + Values which cannot be determined are set to the defaults + given as parameters (which all default to ''). -""" + """ else: def java_ver( @@ -64,7 +66,7 @@ else: vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", ""), ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: - """ Version interface for Jython. + """Version interface for Jython. Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being a tuple (vm_name, vm_release, vm_vendor) and osinfo being a @@ -73,36 +75,37 @@ else: Values which cannot be determined are set to the defaults given as parameters (which all default to ''). - """ + """ def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: """Returns (system, release, version) aliased to common -marketing names used for some systems. + marketing names used for some systems. -It also does some reordering of the information in some cases -where it would otherwise cause confusion. + It also does some reordering of the information in some cases + where it would otherwise cause confusion. + + """ -""" def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: """Queries the given executable (defaults to the Python interpreter -binary) for various architecture information. + binary) for various architecture information. -Returns a tuple (bits, linkage) which contains information about -the bit architecture and the linkage format used for the -executable. Both values are returned as strings. + Returns a tuple (bits, linkage) which contains information about + the bit architecture and the linkage format used for the + executable. Both values are returned as strings. -Values that cannot be determined are returned as given by the -parameter presets. If bits is given as '', the sizeof(pointer) -(or sizeof(long) on Python version < 1.5.2) is used as -indicator for the supported pointer size. + Values that cannot be determined are returned as given by the + parameter presets. If bits is given as '', the sizeof(pointer) + (or sizeof(long) on Python version < 1.5.2) is used as + indicator for the supported pointer size. -The function relies on the system's "file" command to do the -actual work. This is available on most if not all Unix -platforms. On some non-Unix platforms where the "file" command -does not exist and the executable is set to the Python interpreter -binary defaults from _default_architecture are used. + The function relies on the system's "file" command to do the + actual work. This is available on most if not all Unix + platforms. On some non-Unix platforms where the "file" command + does not exist and the executable is set to the Python interpreter + binary defaults from _default_architecture are used. -""" + """ # This class is not exposed. It calls itself platform.uname_result_base. # At runtime it only has 5 fields. @@ -123,16 +126,17 @@ class _uname_result_base(NamedTuple): if sys.version_info >= (3, 12): class uname_result(_uname_result_base): """ -A uname_result that's largely compatible with a -simple namedtuple except that 'processor' is -resolved late and cached to avoid calling "uname" -except when needed. -""" + A uname_result that's largely compatible with a + simple namedtuple except that 'processor' is + resolved late and cached to avoid calling "uname" + except when needed. + """ + __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: - """Create new instance of uname_result_base(system, node, release, version, machine) -""" + """Create new instance of uname_result_base(system, node, release, version, machine)""" + @property def processor(self) -> str: ... @@ -140,153 +144,167 @@ else: @disjoint_base class uname_result(_uname_result_base): """ - A uname_result that's largely compatible with a - simple namedtuple except that 'processor' is - resolved late and cached to avoid calling "uname" - except when needed. - """ + A uname_result that's largely compatible with a + simple namedtuple except that 'processor' is + resolved late and cached to avoid calling "uname" + except when needed. + """ + if sys.version_info >= (3, 10): __match_args__ = ("system", "node", "release", "version", "machine") # pyright: ignore[reportAssignmentType] def __new__(_cls, system: str, node: str, release: str, version: str, machine: str) -> Self: - """Create new instance of uname_result_base(system, node, release, version, machine) -""" + """Create new instance of uname_result_base(system, node, release, version, machine)""" + @property def processor(self) -> str: ... def uname() -> uname_result: """Fairly portable uname interface. Returns a tuple -of strings (system, node, release, version, machine, processor) -identifying the underlying platform. + of strings (system, node, release, version, machine, processor) + identifying the underlying platform. -Note that unlike the os.uname function this also returns -possible processor information as an additional tuple entry. + Note that unlike the os.uname function this also returns + possible processor information as an additional tuple entry. -Entries which cannot be determined are set to ''. + Entries which cannot be determined are set to ''. + + """ -""" def system() -> str: """Returns the system/OS name, e.g. 'Linux', 'Windows' or 'Java'. -An empty string is returned if the value cannot be determined. + An empty string is returned if the value cannot be determined. + + """ -""" def node() -> str: """Returns the computer's network name (which may not be fully -qualified) + qualified) -An empty string is returned if the value cannot be determined. + An empty string is returned if the value cannot be determined. + + """ -""" def release() -> str: """Returns the system's release, e.g. '2.2.0' or 'NT' -An empty string is returned if the value cannot be determined. + An empty string is returned if the value cannot be determined. + + """ -""" def version() -> str: """Returns the system's release version, e.g. '#3 on degas' -An empty string is returned if the value cannot be determined. + An empty string is returned if the value cannot be determined. + + """ -""" def machine() -> str: """Returns the machine type, e.g. 'i386' -An empty string is returned if the value cannot be determined. + An empty string is returned if the value cannot be determined. + + """ -""" def processor() -> str: """Returns the (true) processor name, e.g. 'amdk6' -An empty string is returned if the value cannot be -determined. Note that many platforms do not provide this -information or simply return the same value as for machine(), -e.g. NetBSD does this. + An empty string is returned if the value cannot be + determined. Note that many platforms do not provide this + information or simply return the same value as for machine(), + e.g. NetBSD does this. + + """ -""" def python_implementation() -> str: """Returns a string identifying the Python implementation. -Currently, the following implementations are identified: - 'CPython' (C implementation of Python), - 'Jython' (Java implementation of Python), - 'PyPy' (Python implementation of Python). + Currently, the following implementations are identified: + 'CPython' (C implementation of Python), + 'Jython' (Java implementation of Python), + 'PyPy' (Python implementation of Python). + + """ -""" def python_version() -> str: """Returns the Python version as string 'major.minor.patchlevel' -Note that unlike the Python sys.version, the returned value -will always include the patchlevel (it defaults to 0). + Note that unlike the Python sys.version, the returned value + will always include the patchlevel (it defaults to 0). + + """ -""" def python_version_tuple() -> tuple[str, str, str]: """Returns the Python version as tuple (major, minor, patchlevel) -of strings. + of strings. -Note that unlike the Python sys.version, the returned value -will always include the patchlevel (it defaults to 0). + Note that unlike the Python sys.version, the returned value + will always include the patchlevel (it defaults to 0). + + """ -""" def python_branch() -> str: """Returns a string identifying the Python implementation -branch. + branch. -For CPython this is the SCM branch from which the -Python binary was built. + For CPython this is the SCM branch from which the + Python binary was built. -If not available, an empty string is returned. + If not available, an empty string is returned. + + """ -""" def python_revision() -> str: """Returns a string identifying the Python implementation -revision. + revision. -For CPython this is the SCM revision from which the -Python binary was built. + For CPython this is the SCM revision from which the + Python binary was built. -If not available, an empty string is returned. + If not available, an empty string is returned. + + """ -""" def python_build() -> tuple[str, str]: """Returns a tuple (buildno, builddate) stating the Python -build number and date as strings. + build number and date as strings. + + """ -""" def python_compiler() -> str: """Returns a string identifying the compiler used for compiling -Python. + Python. + + """ -""" def platform(aliased: bool = False, terse: bool = False) -> str: """Returns a single string identifying the underlying platform -with as much useful information as possible (but no more :). + with as much useful information as possible (but no more :). -The output is intended to be human readable rather than -machine parseable. It may look different on different -platforms and this is intended. + The output is intended to be human readable rather than + machine parseable. It may look different on different + platforms and this is intended. -If "aliased" is true, the function will use aliases for -various platforms that report system names which differ from -their common names, e.g. SunOS will be reported as -Solaris. The system_alias() function is used to implement -this. + If "aliased" is true, the function will use aliases for + various platforms that report system names which differ from + their common names, e.g. SunOS will be reported as + Solaris. The system_alias() function is used to implement + this. -Setting terse to true causes the function to return only the -absolute minimum information needed to identify the platform. + Setting terse to true causes the function to return only the + absolute minimum information needed to identify the platform. -""" + """ if sys.version_info >= (3, 10): def freedesktop_os_release() -> dict[str, str]: - """Return operation system identification from freedesktop.org os-release - """ + """Return operation system identification from freedesktop.org os-release""" if sys.version_info >= (3, 13): class AndroidVer(NamedTuple): - """AndroidVer(release, api_level, manufacturer, model, device, is_emulator) -""" + """AndroidVer(release, api_level, manufacturer, model, device, is_emulator)""" + release: str api_level: int manufacturer: str @@ -295,8 +313,8 @@ if sys.version_info >= (3, 13): is_emulator: bool class IOSVersionInfo(NamedTuple): - """IOSVersionInfo(system, release, model, is_simulator) -""" + """IOSVersionInfo(system, release, model, is_simulator)""" + system: str release: str model: str @@ -312,13 +330,12 @@ if sys.version_info >= (3, 13): ) -> AndroidVer: ... def ios_ver(system: str = "", release: str = "", model: str = "", is_simulator: bool = False) -> IOSVersionInfo: """Get iOS version information, and return it as a namedtuple: - (system, release, model, is_simulator). + (system, release, model, is_simulator). -If values can't be determined, they are set to values provided as -parameters. -""" + If values can't be determined, they are set to values provided as + parameters. + """ if sys.version_info >= (3, 14): def invalidate_caches() -> None: - """Invalidate the cached results. -""" + """Invalidate the cached results.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi index 87d6d149da8f3..845d5a7d2d4bf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/plistlib.pyi @@ -54,6 +54,7 @@ Parse Plist example: pl = plistlib.loads(plist) print(pl["foo"]) """ + import sys from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping @@ -65,8 +66,8 @@ from typing_extensions import Self __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] class PlistFormat(Enum): - """An enumeration. -""" + """An enumeration.""" + FMT_XML = 1 FMT_BINARY = 2 @@ -81,8 +82,9 @@ if sys.version_info >= (3, 13): aware_datetime: bool = False, ) -> Any: """Read a .plist file. 'fp' should be a readable and binary file object. -Return the unpacked root object (which usually is a dictionary). -""" + Return the unpacked root object (which usually is a dictionary). + """ + def loads( value: ReadableBuffer | str, *, @@ -91,20 +93,19 @@ Return the unpacked root object (which usually is a dictionary). aware_datetime: bool = False, ) -> Any: """Read a .plist file from a bytes object. -Return the unpacked root object (which usually is a dictionary). -""" + Return the unpacked root object (which usually is a dictionary). + """ else: def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: """Read a .plist file. 'fp' should be a readable and binary file object. - Return the unpacked root object (which usually is a dictionary). - """ - def loads( - value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... - ) -> Any: + Return the unpacked root object (which usually is a dictionary). + """ + + def loads(value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: """Read a .plist file from a bytes object. - Return the unpacked root object (which usually is a dictionary). - """ + Return the unpacked root object (which usually is a dictionary). + """ if sys.version_info >= (3, 13): def dump( @@ -117,8 +118,9 @@ if sys.version_info >= (3, 13): aware_datetime: bool = False, ) -> None: """Write 'value' to a .plist file. 'fp' should be a writable, -binary file object. -""" + binary file object. + """ + def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, @@ -127,8 +129,7 @@ binary file object. sort_keys: bool = True, aware_datetime: bool = False, ) -> bytes: - """Return a bytes object with the contents for a .plist file. - """ + """Return a bytes object with the contents for a .plist file.""" else: def dump( @@ -140,8 +141,9 @@ else: skipkeys: bool = False, ) -> None: """Write 'value' to a .plist file. 'fp' should be a writable, - binary file object. - """ + binary file object. + """ + def dumps( value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, @@ -149,8 +151,7 @@ else: skipkeys: bool = False, sort_keys: bool = True, ) -> bytes: - """Return a bytes object with the contents for a .plist file. - """ + """Return a bytes object with the contents for a .plist file.""" class UID: data: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi index 44755f30f675a..c5c59d710f586 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/poplib.pyi @@ -2,6 +2,7 @@ Based on the J. Myers POP3 draft, Jan. 96 """ + import socket import ssl import sys @@ -25,47 +26,48 @@ HAVE_SSL: Final[bool] class POP3: """This class supports both the minimal and optional command sets. -Arguments can be strings or integers (where appropriate) -(e.g.: retr(1) and retr('1') both work equally well. - -Minimal Command Set: - USER name user(name) - PASS string pass_(string) - STAT stat() - LIST [msg] list(msg = None) - RETR msg retr(msg) - DELE msg dele(msg) - NOOP noop() - RSET rset() - QUIT quit() - -Optional Commands (some servers support these): - RPOP name rpop(name) - APOP name digest apop(name, digest) - TOP msg n top(msg, n) - UIDL [msg] uidl(msg = None) - CAPA capa() - STLS stls() - UTF8 utf8() - -Raises one exception: 'error_proto'. - -Instantiate with: - POP3(hostname, port=110) - -NB: the POP protocol locks the mailbox from user - authorization until QUIT, so be sure to get in, suck - the messages, and quit, each time you access the - mailbox. - - POP is a line-based protocol, which means large mail - messages consume lots of python cycles reading them - line-by-line. - - If it's available on your mail server, use IMAP4 - instead, it doesn't suffer from the two problems - above. -""" + Arguments can be strings or integers (where appropriate) + (e.g.: retr(1) and retr('1') both work equally well. + + Minimal Command Set: + USER name user(name) + PASS string pass_(string) + STAT stat() + LIST [msg] list(msg = None) + RETR msg retr(msg) + DELE msg dele(msg) + NOOP noop() + RSET rset() + QUIT quit() + + Optional Commands (some servers support these): + RPOP name rpop(name) + APOP name digest apop(name, digest) + TOP msg n top(msg, n) + UIDL [msg] uidl(msg = None) + CAPA capa() + STLS stls() + UTF8 utf8() + + Raises one exception: 'error_proto'. + + Instantiate with: + POP3(hostname, port=110) + + NB: the POP protocol locks the mailbox from user + authorization until QUIT, so be sure to get in, suck + the messages, and quit, each time you access the + mailbox. + + POP is a line-based protocol, which means large mail + messages consume lots of python cycles reading them + line-by-line. + + If it's available on your mail server, use IMAP4 + instead, it doesn't suffer from the two problems + above. + """ + encoding: str host: str port: int @@ -78,126 +80,137 @@ NB: the POP protocol locks the mailbox from user def user(self, user: str) -> bytes: """Send user name, return response -(should indicate password required). -""" + (should indicate password required). + """ + def pass_(self, pswd: str) -> bytes: """Send password, return response -(response includes message count, mailbox size). + (response includes message count, mailbox size). + + NB: mailbox is locked by server from here to 'quit()' + """ -NB: mailbox is locked by server from here to 'quit()' -""" def stat(self) -> tuple[int, int]: """Get mailbox status. -Result is tuple of 2 ints (message count, mailbox size) -""" + Result is tuple of 2 ints (message count, mailbox size) + """ + def list(self, which: Any | None = None) -> _LongResp: """Request listing, return result. -Result without a message number argument is in form -['response', ['mesg_num octets', ...], octets]. + Result without a message number argument is in form + ['response', ['mesg_num octets', ...], octets]. + + Result when a message number argument is given is a + single response: the "scan listing" for that message. + """ -Result when a message number argument is given is a -single response: the "scan listing" for that message. -""" def retr(self, which: Any) -> _LongResp: """Retrieve whole message number 'which'. -Result is in form ['response', ['line', ...], octets]. -""" + Result is in form ['response', ['line', ...], octets]. + """ + def dele(self, which: Any) -> bytes: """Delete message number 'which'. -Result is 'response'. -""" + Result is 'response'. + """ + def noop(self) -> bytes: """Does nothing. -One supposes the response indicates the server is alive. -""" + One supposes the response indicates the server is alive. + """ + def rset(self) -> bytes: - """Unmark all messages marked for deletion. -""" + """Unmark all messages marked for deletion.""" + def quit(self) -> bytes: - """Signoff: commit changes on server, unlock mailbox, close connection. -""" + """Signoff: commit changes on server, unlock mailbox, close connection.""" + def close(self) -> None: - """Close the connection without assuming anything about it. -""" + """Close the connection without assuming anything about it.""" + def rpop(self, user: str) -> bytes: - """Send RPOP command to access the mailbox with an alternate user. -""" + """Send RPOP command to access the mailbox with an alternate user.""" timestamp: Pattern[str] def apop(self, user: str, password: str) -> bytes: """Authorisation -- only possible if server has supplied a timestamp in initial greeting. + - only possible if server has supplied a timestamp in initial greeting. -Args: - user - mailbox user; - password - mailbox password. + Args: + user - mailbox user; + password - mailbox password. + + NB: mailbox is locked by server from here to 'quit()' + """ -NB: mailbox is locked by server from here to 'quit()' -""" def top(self, which: Any, howmuch: int) -> _LongResp: """Retrieve message header of message number 'which' -and first 'howmuch' lines of message body. + and first 'howmuch' lines of message body. + + Result is in form ['response', ['line', ...], octets]. + """ -Result is in form ['response', ['line', ...], octets]. -""" @overload def uidl(self) -> _LongResp: """Return message digest (unique id) list. -If 'which', result contains unique id for that message -in the form 'response mesgnum uid', otherwise result is -the list ['response', ['mesgnum uid', ...], octets] -""" + If 'which', result contains unique id for that message + in the form 'response mesgnum uid', otherwise result is + the list ['response', ['mesgnum uid', ...], octets] + """ + @overload def uidl(self, which: Any) -> bytes: ... def utf8(self) -> bytes: - """Try to enter UTF-8 mode (see RFC 6856). Returns server response. - """ + """Try to enter UTF-8 mode (see RFC 6856). Returns server response.""" + def capa(self) -> dict[str, _list[str]]: """Return server capabilities (RFC 2449) as a dictionary ->>> c=poplib.POP3('localhost') ->>> c.capa() -{'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'], - 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [], - 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [], - 'UIDL': [], 'RESP-CODES': []} ->>> - -Really, according to RFC 2449, the cyrus folks should avoid -having the implementation split into multiple arguments... -""" + >>> c=poplib.POP3('localhost') + >>> c.capa() + {'IMPLEMENTATION': ['Cyrus', 'POP3', 'server', 'v2.2.12'], + 'TOP': [], 'LOGIN-DELAY': ['0'], 'AUTH-RESP-CODE': [], + 'EXPIRE': ['NEVER'], 'USER': [], 'STLS': [], 'PIPELINING': [], + 'UIDL': [], 'RESP-CODES': []} + >>> + + Really, according to RFC 2449, the cyrus folks should avoid + having the implementation split into multiple arguments... + """ + def stls(self, context: ssl.SSLContext | None = None) -> bytes: """Start a TLS session on the active connection as specified in RFC 2595. -context - a ssl.SSLContext -""" + context - a ssl.SSLContext + """ class POP3_SSL(POP3): """POP3 client class over SSL connection -Instantiate with: POP3_SSL(hostname, port=995, context=None) + Instantiate with: POP3_SSL(hostname, port=995, context=None) - hostname - the hostname of the pop3 over ssl server - port - port number - context - a ssl.SSLContext + hostname - the hostname of the pop3 over ssl server + port - port number + context - a ssl.SSLContext + + See the methods of the parent class POP3 for more documentation. + """ -See the methods of the parent class POP3 for more documentation. -""" if sys.version_info >= (3, 12): def __init__( self, host: str, port: int = 995, *, timeout: float = ..., context: ssl.SSLContext | None = None ) -> None: ... def stls(self, context: Any = None) -> NoReturn: """The method unconditionally raises an exception since the -STLS command doesn't make any sense on an already established -SSL/TLS session. -""" + STLS command doesn't make any sense on an already established + SSL/TLS session. + """ else: def __init__( self, diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi index 9d62e94b5399d..52aeeae72d2b7 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posix.pyi @@ -3,6 +3,7 @@ standardized by the C Standard and the POSIX standard (a thinly disguised Unix interface). Refer to the library manual and corresponding Unix manual entries for more information on calls. """ + import sys if sys.platform != "win32": diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi index 1d0284b8f6e8d..d501e9538ff6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/posixpath.pyi @@ -9,6 +9,7 @@ platform, and is an alias to another module (e.g. ntpath). Some of this can actually be useful on non-Posix systems too, e.g. for manipulation of the pathname component of URLs. """ + import sys from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Iterable @@ -94,52 +95,54 @@ devnull: LiteralString # Overloads are necessary to work around python/mypy#17952 & python/mypy#11880 @overload def abspath(path: PathLike[AnyStr]) -> AnyStr: - """Return an absolute path. -""" + """Return an absolute path.""" + @overload def abspath(path: AnyStr) -> AnyStr: ... @overload def basename(p: PathLike[AnyStr]) -> AnyStr: - """Returns the final component of a pathname -""" + """Returns the final component of a pathname""" + @overload def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def dirname(p: PathLike[AnyStr]) -> AnyStr: - """Returns the directory component of a pathname -""" + """Returns the directory component of a pathname""" + @overload def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def expanduser(path: PathLike[AnyStr]) -> AnyStr: """Expand ~ and ~user constructions. If user or $HOME is unknown, -do nothing. -""" + do nothing. + """ + @overload def expanduser(path: AnyStr) -> AnyStr: ... @overload def expandvars(path: PathLike[AnyStr]) -> AnyStr: """Expand shell variables of form $var and ${var}. Unknown variables -are left unchanged. -""" + are left unchanged. + """ + @overload def expandvars(path: AnyStr) -> AnyStr: ... @overload def normcase(s: PathLike[AnyStr]) -> AnyStr: - """Normalize case of pathname. Has no effect under Posix -""" + """Normalize case of pathname. Has no effect under Posix""" + @overload def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def normpath(path: PathLike[AnyStr]) -> AnyStr: - """Normalize path, eliminating double slashes, etc. -""" + """Normalize path, eliminating double slashes, etc.""" + @overload def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... @overload def commonpath(paths: Iterable[LiteralString]) -> LiteralString: - """Given a sequence of path names, returns the longest common sub-path. -""" + """Given a sequence of path names, returns the longest common sub-path.""" + @overload def commonpath(paths: Iterable[StrPath]) -> str: ... @overload @@ -151,10 +154,11 @@ def commonpath(paths: Iterable[BytesPath]) -> bytes: ... @overload def join(a: LiteralString, /, *paths: LiteralString) -> LiteralString: """Join two or more pathname components, inserting '/' as needed. -If any component is an absolute path, all previous path components -will be discarded. An empty last part will result in a path that -ends with a separator. -""" + If any component is an absolute path, all previous path components + will be discarded. An empty last part will result in a path that + ends with a separator. + """ + @overload def join(a: StrPath, /, *paths: StrPath) -> str: ... @overload @@ -162,14 +166,15 @@ def join(a: BytesPath, /, *paths: BytesPath) -> bytes: ... @overload def realpath(filename: PathLike[AnyStr], *, strict: bool | _AllowMissingType = False) -> AnyStr: """Return the canonical path of the specified filename, eliminating any -symbolic links encountered in the path. -""" + symbolic links encountered in the path. + """ + @overload def realpath(filename: AnyStr, *, strict: bool | _AllowMissingType = False) -> AnyStr: ... @overload def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: - """Return a relative version of a path -""" + """Return a relative version of a path""" + @overload def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... @overload @@ -177,49 +182,53 @@ def relpath(path: StrPath, start: StrPath | None = None) -> str: ... @overload def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: """Split a pathname. Returns tuple "(head, tail)" where "tail" is -everything after the final slash. Either part may be empty. -""" + everything after the final slash. Either part may be empty. + """ + @overload def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: """Split a pathname into drive and path. On Posix, drive is always -empty. -""" + empty. + """ + @overload def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... @overload def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: """Split the extension from a pathname. -Extension is everything from the last dot to the end, ignoring -leading dots. Returns "(root, ext)"; ext may be empty. -""" + Extension is everything from the last dot to the end, ignoring + leading dots. Returns "(root, ext)"; ext may be empty. + """ + @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... def isabs(s: StrOrBytesPath) -> bool: - """Test whether a path is absolute -""" + """Test whether a path is absolute""" + def islink(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a symbolic link -""" + """Test whether a path is a symbolic link""" + def ismount(path: FileDescriptorOrPath) -> bool: - """Test whether a path is a mount point -""" + """Test whether a path is a mount point""" + def lexists(path: FileDescriptorOrPath) -> bool: - """Test whether a path exists. Returns True for broken symbolic links -""" + """Test whether a path exists. Returns True for broken symbolic links""" if sys.version_info >= (3, 12): def isjunction(path: StrOrBytesPath) -> bool: """Test whether a path is a junction -Junctions are not supported on the current platform -""" + Junctions are not supported on the current platform + """ + @overload def splitroot(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr, AnyOrLiteralStr]: """Split a pathname into drive, root and tail. -The tail contains anything after the root. -""" + The tail contains anything after the root. + """ + @overload def splitroot(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr, AnyStr]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi index ebe3dd14848ff..6146ea8d584fc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pprint.pyi @@ -23,6 +23,7 @@ saferepr() data structures. """ + import sys from _typeshed import SupportsWrite from collections import deque @@ -41,8 +42,7 @@ if sys.version_info >= (3, 10): sort_dicts: bool = True, underscore_numbers: bool = False, ) -> str: - """Format a Python object into a pretty-printed representation. -""" + """Format a Python object into a pretty-printed representation.""" else: def pformat( @@ -54,8 +54,7 @@ else: compact: bool = False, sort_dicts: bool = True, ) -> str: - """Format a Python object into a pretty-printed representation. -""" + """Format a Python object into a pretty-printed representation.""" if sys.version_info >= (3, 10): def pp( @@ -69,8 +68,7 @@ if sys.version_info >= (3, 10): sort_dicts: bool = False, underscore_numbers: bool = False, ) -> None: - """Pretty-print a Python object -""" + """Pretty-print a Python object""" else: def pp( @@ -83,8 +81,7 @@ else: compact: bool = False, sort_dicts: bool = False, ) -> None: - """Pretty-print a Python object -""" + """Pretty-print a Python object""" if sys.version_info >= (3, 10): def pprint( @@ -98,8 +95,7 @@ if sys.version_info >= (3, 10): sort_dicts: bool = True, underscore_numbers: bool = False, ) -> None: - """Pretty-print a Python object to a stream [default is sys.stdout]. -""" + """Pretty-print a Python object to a stream [default is sys.stdout].""" else: def pprint( @@ -112,18 +108,16 @@ else: compact: bool = False, sort_dicts: bool = True, ) -> None: - """Pretty-print a Python object to a stream [default is sys.stdout]. -""" + """Pretty-print a Python object to a stream [default is sys.stdout].""" def isreadable(object: object) -> bool: - """Determine if saferepr(object) is readable by eval(). -""" + """Determine if saferepr(object) is readable by eval().""" + def isrecursive(object: object) -> bool: - """Determine if object requires a recursive representation. -""" + """Determine if object requires a recursive representation.""" + def saferepr(object: object) -> str: - """Version of repr() which can handle recursive data structures. -""" + """Version of repr() which can handle recursive data structures.""" class PrettyPrinter: if sys.version_info >= (3, 10): @@ -139,31 +133,31 @@ class PrettyPrinter: underscore_numbers: bool = False, ) -> None: """Handle pretty printing operations onto a stream using a set of -configured parameters. + configured parameters. -indent - Number of spaces to indent for each level of nesting. + indent + Number of spaces to indent for each level of nesting. -width - Attempted maximum number of columns in the output. + width + Attempted maximum number of columns in the output. -depth - The maximum depth to print out nested structures. + depth + The maximum depth to print out nested structures. -stream - The desired output stream. If omitted (or false), the standard - output stream available at construction will be used. + stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. -compact - If true, several items will be combined in one line. + compact + If true, several items will be combined in one line. -sort_dicts - If true, dict keys are sorted. + sort_dicts + If true, dict keys are sorted. -underscore_numbers - If true, digit groups are separated with underscores. + underscore_numbers + If true, digit groups are separated with underscores. -""" + """ else: def __init__( self, @@ -176,28 +170,28 @@ underscore_numbers sort_dicts: bool = True, ) -> None: """Handle pretty printing operations onto a stream using a set of - configured parameters. + configured parameters. - indent - Number of spaces to indent for each level of nesting. + indent + Number of spaces to indent for each level of nesting. - width - Attempted maximum number of columns in the output. + width + Attempted maximum number of columns in the output. - depth - The maximum depth to print out nested structures. + depth + The maximum depth to print out nested structures. - stream - The desired output stream. If omitted (or false), the standard - output stream available at construction will be used. + stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. - compact - If true, several items will be combined in one line. + compact + If true, several items will be combined in one line. - sort_dicts - If true, dict keys are sorted. + sort_dicts + If true, dict keys are sorted. - """ + """ def pformat(self, object: object) -> str: ... def pprint(self, object: object) -> None: ... @@ -205,9 +199,10 @@ underscore_numbers def isrecursive(self, object: object) -> bool: ... def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: """Format object for a specific context, returning a string -and flags indicating whether the representation is 'readable' -and whether the object represents a recursive construct. -""" + and flags indicating whether the representation is 'readable' + and whether the object represents a recursive construct. + """ + def _format( self, object: object, stream: SupportsWrite[str], indent: int, allowance: int, context: dict[int, int], level: int ) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi index ff85e2dc18b1b..dff0edf5e6c6e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/profile.pyi @@ -1,5 +1,5 @@ -"""Class for profiling Python code. -""" +"""Class for profiling Python code.""" + from _typeshed import StrOrBytesPath from collections.abc import Callable, Mapping from typing import Any, TypeVar @@ -10,22 +10,23 @@ __all__ = ["run", "runctx", "Profile"] def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: """Run statement under profiler optionally saving results in filename -This function takes a single argument that can be passed to the -"exec" statement, and an optional file name. In all cases this -routine attempts to "exec" its first argument and gather profiling -statistics from the execution. If no file name is present, then this -function automatically prints a simple profiling report, sorted by the -standard name string (file/line/function-name) that is presented in -each line. -""" + This function takes a single argument that can be passed to the + "exec" statement, and an optional file name. In all cases this + routine attempts to "exec" its first argument and gather profiling + statistics from the execution. If no file name is present, then this + function automatically prints a simple profiling report, sorted by the + standard name string (file/line/function-name) that is presented in + each line. + """ + def runctx( statement: str, globals: dict[str, Any], locals: Mapping[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: """Run statement under profiler, supplying your own globals and locals, -optionally saving results in filename. + optionally saving results in filename. -statement and filename have the same semantics as profile.run -""" + statement and filename have the same semantics as profile.run + """ _T = TypeVar("_T") _P = ParamSpec("_P") @@ -34,41 +35,42 @@ _Label: TypeAlias = tuple[str, int, str] class Profile: """Profiler class. -self.cur is always a tuple. Each such tuple corresponds to a stack -frame that is currently active (self.cur[-2]). The following are the -definitions of its members. We use this external "parallel stack" to -avoid contaminating the program that we are profiling. (old profiler -used to write into the frames local dictionary!!) Derived classes -can change the definition of some entries, as long as they leave -[-2:] intact (frame and previous tuple). In case an internal error is -detected, the -3 element is used as the function name. + self.cur is always a tuple. Each such tuple corresponds to a stack + frame that is currently active (self.cur[-2]). The following are the + definitions of its members. We use this external "parallel stack" to + avoid contaminating the program that we are profiling. (old profiler + used to write into the frames local dictionary!!) Derived classes + can change the definition of some entries, as long as they leave + [-2:] intact (frame and previous tuple). In case an internal error is + detected, the -3 element is used as the function name. + + [ 0] = Time that needs to be charged to the parent frame's function. + It is used so that a function call will not have to access the + timing data for the parent frame. + [ 1] = Total time spent in this frame's function, excluding time in + subfunctions (this latter is tallied in cur[2]). + [ 2] = Total time spent in subfunctions, excluding time executing the + frame's function (this latter is tallied in cur[1]). + [-3] = Name of the function that corresponds to this frame. + [-2] = Actual frame that we correspond to (used to sync exception handling). + [-1] = Our parent 6-tuple (corresponds to frame.f_back). -[ 0] = Time that needs to be charged to the parent frame's function. - It is used so that a function call will not have to access the - timing data for the parent frame. -[ 1] = Total time spent in this frame's function, excluding time in - subfunctions (this latter is tallied in cur[2]). -[ 2] = Total time spent in subfunctions, excluding time executing the - frame's function (this latter is tallied in cur[1]). -[-3] = Name of the function that corresponds to this frame. -[-2] = Actual frame that we correspond to (used to sync exception handling). -[-1] = Our parent 6-tuple (corresponds to frame.f_back). + Timing data for each function is stored as a 5-tuple in the dictionary + self.timings[]. The index is always the name stored in self.cur[-3]. + The following are the definitions of the members: -Timing data for each function is stored as a 5-tuple in the dictionary -self.timings[]. The index is always the name stored in self.cur[-3]. -The following are the definitions of the members: + [0] = The number of times this function was called, not counting direct + or indirect recursion, + [1] = Number of times this function appears on the stack, minus one + [2] = Total time spent internal to this function + [3] = Cumulative time that this function was present on the stack. In + non-recursive functions, this is the total execution time from start + to finish of each invocation of a function, including time spent in + all subfunctions. + [4] = A dictionary indicating for each function name, the number of times + it was called by us. + """ -[0] = The number of times this function was called, not counting direct - or indirect recursion, -[1] = Number of times this function appears on the stack, minus one -[2] = Total time spent internal to this function -[3] = Cumulative time that this function was present on the stack. In - non-recursive functions, this is the total execution time from start - to finish of each invocation of a function, including time spent in - all subfunctions. -[4] = A dictionary indicating for each function name, the number of times - it was called by us. -""" bias: int stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi index 474a21e4d2fe8..fe07c90aea1af 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pstats.pyi @@ -1,5 +1,5 @@ -"""Class for printing reports on profiled python code. -""" +"""Class for printing reports on profiled python code.""" + import sys from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -20,8 +20,8 @@ _Selector: TypeAlias = str | float | int if sys.version_info >= (3, 11): class SortKey(StrEnum): - """An enumeration. -""" + """An enumeration.""" + CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -34,8 +34,8 @@ if sys.version_info >= (3, 11): else: class SortKey(str, Enum): - """An enumeration. -""" + """An enumeration.""" + CALLS = "calls" CUMULATIVE = "cumulative" FILENAME = "filename" @@ -48,8 +48,8 @@ else: @dataclass(unsafe_hash=True) class FunctionProfile: - """FunctionProfile(ncalls: str, tottime: float, percall_tottime: float, cumtime: float, percall_cumtime: float, file_name: str, line_number: int) -""" + """FunctionProfile(ncalls: str, tottime: float, percall_tottime: float, cumtime: float, percall_cumtime: float, file_name: str, line_number: int)""" + ncalls: str tottime: float percall_tottime: float @@ -60,8 +60,8 @@ class FunctionProfile: @dataclass(unsafe_hash=True) class StatsProfile: - """Class for keeping track of an item in inventory. -""" + """Class for keeping track of an item in inventory.""" + total_tt: float func_profiles: dict[str, FunctionProfile] @@ -69,36 +69,37 @@ _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] class Stats: """This class is used for creating reports from data generated by the -Profile class. It is a "friend" of that class, and imports data either -by direct access to members of Profile class, or by reading in a dictionary -that was emitted (via marshal) from the Profile class. - -The big change from the previous Profiler (in terms of raw functionality) -is that an "add()" method has been provided to combine Stats from -several distinct profile runs. Both the constructor and the add() -method now take arbitrarily many file names as arguments. - -All the print methods now take an argument that indicates how many lines -to print. If the arg is a floating-point number between 0 and 1.0, then -it is taken as a decimal percentage of the available lines to be printed -(e.g., .1 means print 10% of all available lines). If it is an integer, -it is taken to mean the number of lines of data that you wish to have -printed. - -The sort_stats() method now processes some additional options (i.e., in -addition to the old -1, 0, 1, or 2 that are respectively interpreted as -'stdname', 'calls', 'time', and 'cumulative'). It takes either an -arbitrary number of quoted strings or SortKey enum to select the sort -order. - -For example sort_stats('time', 'name') or sort_stats(SortKey.TIME, -SortKey.NAME) sorts on the major key of 'internal function time', and on -the minor key of 'the name of the function'. Look at the two tables in -sort_stats() and get_sort_arg_defs(self) for more examples. - -All methods return self, so you can string together commands like: - Stats('foo', 'goo').strip_dirs().sort_stats('calls'). print_stats(5).print_callers(5) -""" + Profile class. It is a "friend" of that class, and imports data either + by direct access to members of Profile class, or by reading in a dictionary + that was emitted (via marshal) from the Profile class. + + The big change from the previous Profiler (in terms of raw functionality) + is that an "add()" method has been provided to combine Stats from + several distinct profile runs. Both the constructor and the add() + method now take arbitrarily many file names as arguments. + + All the print methods now take an argument that indicates how many lines + to print. If the arg is a floating-point number between 0 and 1.0, then + it is taken as a decimal percentage of the available lines to be printed + (e.g., .1 means print 10% of all available lines). If it is an integer, + it is taken to mean the number of lines of data that you wish to have + printed. + + The sort_stats() method now processes some additional options (i.e., in + addition to the old -1, 0, 1, or 2 that are respectively interpreted as + 'stdname', 'calls', 'time', and 'cumulative'). It takes either an + arbitrary number of quoted strings or SortKey enum to select the sort + order. + + For example sort_stats('time', 'name') or sort_stats(SortKey.TIME, + SortKey.NAME) sorts on the major key of 'internal function time', and on + the minor key of 'the name of the function'. Look at the two tables in + sort_stats() and get_sort_arg_defs(self) for more examples. + + All methods return self, so you can string together commands like: + Stats('foo', 'goo').strip_dirs().sort_stats('calls'). print_stats(5).print_callers(5) + """ + sort_arg_dict_default: _SortArgDict def __init__( self, @@ -112,11 +113,11 @@ All methods return self, so you can string together commands like: def get_top_level_stats(self) -> None: ... def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... def dump_stats(self, filename: StrOrBytesPath) -> None: - """Write the profile data to a file we know how to load back. -""" + """Write the profile data to a file we know how to load back.""" + def get_sort_arg_defs(self) -> _SortArgDict: - """Expand all abbreviations that are unique. -""" + """Expand all abbreviations that are unique.""" + @overload def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload @@ -127,10 +128,11 @@ All methods return self, so you can string together commands like: def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... def get_stats_profile(self) -> StatsProfile: """This method returns an instance of StatsProfile, which contains a mapping -of function names to instances of FunctionProfile. Each FunctionProfile -instance holds information related to the function's profile such as how -long the function took to run, how many times it was called, etc... -""" + of function names to instances of FunctionProfile. Each FunctionProfile + instance holds information related to the function's profile such as how + long the function took to run, how many times it was called, etc... + """ + def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... def print_stats(self, *amount: _Selector) -> Self: ... def print_callees(self, *amount: _Selector) -> Self: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi index 52a1824fd7a53..28d5ae4280124 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pty.pyi @@ -1,5 +1,5 @@ -"""Pseudo terminal utilities. -""" +"""Pseudo terminal utilities.""" + import sys from collections.abc import Callable, Iterable from typing import Final @@ -16,41 +16,42 @@ if sys.platform != "win32": CHILD: Final = 0 def openpty() -> tuple[int, int]: """openpty() -> (master_fd, slave_fd) -Open a pty master/slave pair, using os.openpty() if possible. -""" - + Open a pty master/slave pair, using os.openpty() if possible. + """ if sys.version_info < (3, 14): if sys.version_info >= (3, 12): @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") def master_open() -> tuple[int, str]: """master_open() -> (master_fd, slave_name) -Open a pty master and return the fd, and the filename of the slave end. -Deprecated, use openpty() instead. -""" + Open a pty master and return the fd, and the filename of the slave end. + Deprecated, use openpty() instead. + """ + @deprecated("Deprecated since Python 3.12; removed in Python 3.14. Use `openpty()` instead.") def slave_open(tty_name: str) -> int: """slave_open(tty_name) -> slave_fd -Open the pty slave and acquire the controlling terminal, returning -opened filedescriptor. -Deprecated, use openpty() instead. -""" + Open the pty slave and acquire the controlling terminal, returning + opened filedescriptor. + Deprecated, use openpty() instead. + """ else: def master_open() -> tuple[int, str]: """master_open() -> (master_fd, slave_name) - Open a pty master and return the fd, and the filename of the slave end. - Deprecated, use openpty() instead. -""" + Open a pty master and return the fd, and the filename of the slave end. + Deprecated, use openpty() instead. + """ + def slave_open(tty_name: str) -> int: """slave_open(tty_name) -> slave_fd - Open the pty slave and acquire the controlling terminal, returning - opened filedescriptor. - Deprecated, use openpty() instead. -""" + Open the pty slave and acquire the controlling terminal, returning + opened filedescriptor. + Deprecated, use openpty() instead. + """ def fork() -> tuple[int, int]: """fork() -> (pid, master_fd) -Fork and make the child a session leader with a controlling terminal. -""" + Fork and make the child a session leader with a controlling terminal. + """ + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: - """Create a spawned process. -""" + """Create a spawned process.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi index 5a257d0b571ba..ceb7dc7cf7453 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pwd.pyi @@ -7,6 +7,7 @@ pw_name, pw_passwd, pw_uid, pw_gid, pw_gecos, pw_dir, pw_shell. The uid and gid items are integers, all others are strings. An exception is raised if the entry asked for cannot be found. """ + import sys from _typeshed import structseq from typing import Any, Final, final @@ -16,54 +17,56 @@ if sys.platform != "win32": class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): """pwd.struct_passwd: Results from getpw*() routines. -This object may be accessed either as a tuple of - (pw_name,pw_passwd,pw_uid,pw_gid,pw_gecos,pw_dir,pw_shell) -or via the object attributes as named in the above tuple. -""" + This object may be accessed either as a tuple of + (pw_name,pw_passwd,pw_uid,pw_gid,pw_gecos,pw_dir,pw_shell) + or via the object attributes as named in the above tuple. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("pw_name", "pw_passwd", "pw_uid", "pw_gid", "pw_gecos", "pw_dir", "pw_shell") @property def pw_name(self) -> str: - """user name -""" + """user name""" + @property def pw_passwd(self) -> str: - """password -""" + """password""" + @property def pw_uid(self) -> int: - """user id -""" + """user id""" + @property def pw_gid(self) -> int: - """group id -""" + """group id""" + @property def pw_gecos(self) -> str: - """real name -""" + """real name""" + @property def pw_dir(self) -> str: - """home directory -""" + """home directory""" + @property def pw_shell(self) -> str: - """shell program -""" + """shell program""" def getpwall() -> list[struct_passwd]: """Return a list of all available password database entries, in arbitrary order. -See help(pwd) for more on password database entries. -""" + See help(pwd) for more on password database entries. + """ + def getpwuid(uid: int, /) -> struct_passwd: """Return the password database entry for the given numeric user ID. -See `help(pwd)` for more on password database entries. -""" + See `help(pwd)` for more on password database entries. + """ + def getpwnam(name: str, /) -> struct_passwd: """Return the password database entry for the given user name. -See `help(pwd)` for more on password database entries. -""" + See `help(pwd)` for more on password database entries. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi index 50eaf660cd072..65c5f48879fa2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/py_compile.pyi @@ -2,6 +2,7 @@ This module has intimate knowledge of the format of .pyc files. """ + import enum import sys from typing import AnyStr @@ -10,31 +11,32 @@ __all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] class PyCompileError(Exception): """Exception raised when an error occurs while attempting to -compile the file. + compile the file. -To raise this exception, use + To raise this exception, use - raise PyCompileError(exc_type,exc_value,file[,msg]) + raise PyCompileError(exc_type,exc_value,file[,msg]) -where + where - exc_type: exception type to be used in error message - type name can be accesses as class variable - 'exc_type_name' + exc_type: exception type to be used in error message + type name can be accesses as class variable + 'exc_type_name' - exc_value: exception value to be used in error message - can be accesses as class variable 'exc_value' + exc_value: exception value to be used in error message + can be accesses as class variable 'exc_value' - file: name of file being compiled to be used in error message - can be accesses as class variable 'file' + file: name of file being compiled to be used in error message + can be accesses as class variable 'file' - msg: string message to be written as error message - If no value is given, a default exception message will be - given, consistent with 'standard' py_compile output. - message (or default) can be accesses as class variable - 'msg' + msg: string message to be written as error message + If no value is given, a default exception message will be + given, consistent with 'standard' py_compile output. + message (or default) can be accesses as class variable + 'msg' + + """ -""" exc_type_name: str exc_value: BaseException file: str @@ -42,8 +44,8 @@ where def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): - """An enumeration. -""" + """An enumeration.""" + TIMESTAMP = 1 CHECKED_HASH = 2 UNCHECKED_HASH = 3 @@ -60,47 +62,47 @@ def compile( ) -> AnyStr | None: """Byte-compile one Python source file to Python bytecode. -:param file: The source file name. -:param cfile: The target byte compiled file name. When not given, this - defaults to the PEP 3147/PEP 488 location. -:param dfile: Purported file name, i.e. the file name that shows up in - error messages. Defaults to the source file name. -:param doraise: Flag indicating whether or not an exception should be - raised when a compile error is found. If an exception occurs and this - flag is set to False, a string indicating the nature of the exception - will be printed, and the function will return to the caller. If an - exception occurs and this flag is set to True, a PyCompileError - exception will be raised. -:param optimize: The optimization level for the compiler. Valid values - are -1, 0, 1 and 2. A value of -1 means to use the optimization - level of the current interpreter, as given by -O command line options. -:param invalidation_mode: -:param quiet: Return full output with False or 0, errors only with 1, - and no output with 2. - -:return: Path to the resulting byte compiled file. - -Note that it isn't necessary to byte-compile Python modules for -execution efficiency -- Python itself byte-compiles a module when -it is loaded, and if it can, writes out the bytecode to the -corresponding .pyc file. - -However, if a Python installation is shared between users, it is a -good idea to byte-compile all modules upon installation, since -other users may not be able to write in the source directories, -and thus they won't be able to write the .pyc file, and then -they would be byte-compiling every module each time it is loaded. -This can slow down program start-up considerably. - -See compileall.py for a script/module that uses this module to -byte-compile all installed files (or all files in selected -directories). - -Do note that FileExistsError is raised if cfile ends up pointing at a -non-regular file or symlink. Because the compilation uses a file renaming, -the resulting file would be regular and thus not the same type of file as -it was previously. -""" + :param file: The source file name. + :param cfile: The target byte compiled file name. When not given, this + defaults to the PEP 3147/PEP 488 location. + :param dfile: Purported file name, i.e. the file name that shows up in + error messages. Defaults to the source file name. + :param doraise: Flag indicating whether or not an exception should be + raised when a compile error is found. If an exception occurs and this + flag is set to False, a string indicating the nature of the exception + will be printed, and the function will return to the caller. If an + exception occurs and this flag is set to True, a PyCompileError + exception will be raised. + :param optimize: The optimization level for the compiler. Valid values + are -1, 0, 1 and 2. A value of -1 means to use the optimization + level of the current interpreter, as given by -O command line options. + :param invalidation_mode: + :param quiet: Return full output with False or 0, errors only with 1, + and no output with 2. + + :return: Path to the resulting byte compiled file. + + Note that it isn't necessary to byte-compile Python modules for + execution efficiency -- Python itself byte-compiles a module when + it is loaded, and if it can, writes out the bytecode to the + corresponding .pyc file. + + However, if a Python installation is shared between users, it is a + good idea to byte-compile all modules upon installation, since + other users may not be able to write in the source directories, + and thus they won't be able to write the .pyc file, and then + they would be byte-compiling every module each time it is loaded. + This can slow down program start-up considerably. + + See compileall.py for a script/module that uses this module to + byte-compile all installed files (or all files in selected + directories). + + Do note that FileExistsError is raised if cfile ends up pointing at a + non-regular file or symlink. Because the compilation uses a file renaming, + the resulting file would be regular and thus not the same type of file as + it was previously. + """ if sys.version_info >= (3, 10): def main() -> None: ... @@ -109,11 +111,11 @@ else: def main(args: list[str] | None = None) -> int: """Compile several source files. - The files named in 'args' (or on the command line, if 'args' is - not specified) are compiled and the resulting bytecode is cached - in the normal manner. This function does not search a directory - structure to locate source files; it only compiles files named - explicitly. If '-' is the only parameter in args, the list of - files is taken from standard input. + The files named in 'args' (or on the command line, if 'args' is + not specified) are compiled and the resulting bytecode is cached + in the normal manner. This function does not search a directory + structure to locate source files; it only compiles files named + explicitly. If '-' is the only parameter in args, the list of + files is taken from standard input. - """ + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi index b6e3bb27bb21a..ad8a1aead66a0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyclbr.pyi @@ -40,14 +40,15 @@ string giving the name of the super class. Since import statements are recognized and imported modules are scanned as well, this shouldn't happen often. """ + import sys from collections.abc import Mapping, Sequence __all__ = ["readmodule", "readmodule_ex", "Class", "Function"] class _Object: - """Information about Python class or function. -""" + """Information about Python class or function.""" + module: str name: str file: int @@ -70,8 +71,8 @@ class _Object: def __init__(self, module: str, name: str, file: str, lineno: int, parent: _Object | None) -> None: ... class Function(_Object): - """Information about a Python function, including methods. -""" + """Information about a Python function, including methods.""" + if sys.version_info >= (3, 10): is_async: bool @@ -94,8 +95,8 @@ class Function(_Object): def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... class Class(_Object): - """Information about a Python class. -""" + """Information about a Python class.""" + super: list[Class | str] | None methods: dict[str, int] parent: Class | None @@ -121,12 +122,13 @@ class Class(_Object): def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: """Return Class objects for the top-level classes in module. -This is the original interface, before Functions were added. -""" + This is the original interface, before Functions were added. + """ + def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: """Return a dictionary with all functions and classes in module. -Search for module in PATH + sys.path. -If possible, include imported superclasses. -Do this by reading source, without importing (and executing) it. -""" + Search for module in PATH + sys.path. + If possible, include imported superclasses. + Do this by reading source, without importing (and executing) it. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi index 62f58ffaf7987..cd5a5c1b94a60 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pydoc.pyi @@ -36,6 +36,7 @@ This can be overridden by setting the PYTHONDOCS environment variable to a different URL or to a local directory containing the Library Reference Manual pages. """ + import sys from _typeshed import OptExcInfo, SupportsWrite, Unused from abc import abstractmethod @@ -60,58 +61,54 @@ class _Pager(Protocol): def __call__(self, text: str, title: str = "") -> None: ... def pathdirs() -> list[str]: - """Convert sys.path into a list of absolute, existing, unique paths. -""" + """Convert sys.path into a list of absolute, existing, unique paths.""" + def getdoc(object: object) -> str: - """Get the doc string or comments for an object. -""" + """Get the doc string or comments for an object.""" + def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: - """Split a doc string into a synopsis line (if any) and the rest. -""" + """Split a doc string into a synopsis line (if any) and the rest.""" + def classname(object: object, modname: str) -> str: - """Get a class name and qualify it with a module name if necessary. -""" + """Get a class name and qualify it with a module name if necessary.""" + def isdata(object: object) -> bool: - """Check if an object is of a type that probably means it's data. -""" + """Check if an object is of a type that probably means it's data.""" + def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: - """Do a series of global replacements on a string. -""" + """Do a series of global replacements on a string.""" + def cram(text: str, maxlen: int) -> str: - """Omit part of a string if needed to make it fit in a maximum length. -""" + """Omit part of a string if needed to make it fit in a maximum length.""" + def stripid(text: str) -> str: - """Remove the hexadecimal id from a Python object representation. -""" + """Remove the hexadecimal id from a Python object representation.""" + def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: - """Decide whether to show documentation on a variable. -""" + """Decide whether to show documentation on a variable.""" + def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: - """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods. -""" + """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods.""" if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13.") def ispackage(path: str) -> bool: # undocumented - """Guess whether a path refers to a package directory. -""" + """Guess whether a path refers to a package directory.""" else: def ispackage(path: str) -> bool: # undocumented - """Guess whether a path refers to a package directory. -""" + """Guess whether a path refers to a package directory.""" def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: - """Return the one-line summary of a file object, if present -""" + """Return the one-line summary of a file object, if present""" + def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: - """Get the one-line summary out of a module file. -""" + """Get the one-line summary out of a module file.""" class ErrorDuringImport(Exception): - """Errors that occurred while trying to import something to document it. -""" + """Errors that occurred while trying to import something to document it.""" + filename: str exc: type[BaseException] | None value: BaseException | None @@ -119,57 +116,56 @@ class ErrorDuringImport(Exception): def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... def importfile(path: str) -> ModuleType: - """Import a Python source file or compiled file given its path. -""" + """Import a Python source file or compiled file given its path.""" + def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: """Import a module; handle errors; return None if the module isn't found. -If the module *is* found but an exception occurs, it's wrapped in an -ErrorDuringImport exception and reraised. Unlike __import__, if a -package path is specified, the module at the end of the path is returned, -not the package at the beginning. If the optional 'forceload' argument -is 1, we reload the module from disk (unless it's a dynamic extension). -""" + If the module *is* found but an exception occurs, it's wrapped in an + ErrorDuringImport exception and reraised. Unlike __import__, if a + package path is specified, the module at the end of the path is returned, + not the package at the beginning. If the optional 'forceload' argument + is 1, we reload the module from disk (unless it's a dynamic extension). + """ class Doc: PYTHONDOCS: str def document(self, object: object, name: str | None = None, *args: Any) -> str: - """Generate documentation for an object. -""" + """Generate documentation for an object.""" + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docclass(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docother(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + @abstractmethod def docdata(self, object: object, name: str | None = None, *args: Any) -> str: - """Raise an exception for unimplemented types. -""" + """Raise an exception for unimplemented types.""" + def getdocloc(self, object: object, basedir: str = ...) -> str | None: - """Return the location of module docs or None -""" + """Return the location of module docs or None""" class HTMLRepr(Repr): - """Class for safely making an HTML representation of a Python object. -""" + """Class for safely making an HTML representation of a Python object.""" + def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... @@ -180,18 +176,17 @@ class HTMLRepr(Repr): def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): - """Formatter class for HTML documentation. -""" + """Formatter class for HTML documentation.""" + _repr_instance: HTMLRepr repr = _repr_instance.repr escape = _repr_instance.escape def page(self, title: str, contents: str) -> str: - """Format an HTML page. -""" + """Format an HTML page.""" if sys.version_info >= (3, 11): def heading(self, title: str, extras: str = "") -> str: - """Format a page heading. -""" + """Format a page heading.""" + def section( self, title: str, @@ -202,15 +197,14 @@ class HTMLDoc(Doc): marginalia: str | None = None, gap: str = " ", ) -> str: - """Format a section with a heading. -""" + """Format a section with a heading.""" + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: - """Format a list of items into a multi-column list. -""" + """Format a list of items into a multi-column list.""" else: def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: - """Format a page heading. -""" + """Format a page heading.""" + def section( self, title: str, @@ -222,31 +216,30 @@ class HTMLDoc(Doc): marginalia: str | None = None, gap: str = " ", ) -> str: - """Format a section with a heading. -""" + """Format a section with a heading.""" + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: - """Format a list of items into a multi-column list. -""" + """Format a list of items into a multi-column list.""" def bigsection(self, title: str, *args: Any) -> str: - """Format a section with a big heading. -""" + """Format a section with a big heading.""" + def preformat(self, text: str) -> str: - """Format literal preformatted text. -""" + """Format literal preformatted text.""" + def grey(self, text: str) -> str: ... def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: - """Make a link for an identifier, given name-to-URL mappings. -""" + """Make a link for an identifier, given name-to-URL mappings.""" + def classlink(self, object: object, modname: str) -> str: - """Make a link for a class. -""" + """Make a link for a class.""" + def modulelink(self, object: object) -> str: - """Make a link for a module. -""" + """Make a link for a module.""" + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: - """Make a link for a module or package to display in an index. -""" + """Make a link for a module or package to display in an index.""" + def markup( self, text: str, @@ -256,16 +249,15 @@ class HTMLDoc(Doc): methods: Mapping[str, str] = {}, ) -> str: """Mark up some plain text, given a context of symbols to look for. -Each context dictionary maps object names to anchor names. -""" - def formattree( - self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None - ) -> str: - """Produce HTML for a class tree as given by inspect.getclasstree(). -""" + Each context dictionary maps object names to anchor names. + """ + + def formattree(self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None) -> str: + """Produce HTML for a class tree as given by inspect.getclasstree().""" + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: - """Produce HTML documentation for a module object. -""" + """Produce HTML documentation for a module object.""" + def docclass( self, object: object, @@ -275,14 +267,13 @@ Each context dictionary maps object names to anchor names. classes: Mapping[str, str] = {}, *ignored: Unused, ) -> str: - """Produce HTML documentation for a class object. -""" + """Produce HTML documentation for a class object.""" + def formatvalue(self, object: object) -> str: - """Format an argument default value as text. -""" + """Format an argument default value as text.""" + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: - """Produce HTML documentation for a data object. -""" + """Produce HTML documentation for a data object.""" if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -295,18 +286,17 @@ Each context dictionary maps object names to anchor names. cl: type | None = None, homecls: type | None = None, ) -> str: - """Produce HTML documentation for a function or method object. -""" + """Produce HTML documentation for a function or method object.""" + def docproperty( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused ) -> str: - """Produce html documentation for a data descriptor. -""" + """Produce html documentation for a data descriptor.""" + def docdata( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused ) -> str: - """Produce html documentation for a data descriptor. -""" + """Produce html documentation for a data descriptor.""" else: def docroutine( # type: ignore[override] self, @@ -318,29 +308,26 @@ Each context dictionary maps object names to anchor names. methods: Mapping[str, str] = {}, cl: type | None = None, ) -> str: - """Produce HTML documentation for a function or method object. -""" + """Produce HTML documentation for a function or method object.""" + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce html documentation for a data descriptor. -""" + """Produce html documentation for a data descriptor.""" + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce html documentation for a data descriptor. -""" + """Produce html documentation for a data descriptor.""" if sys.version_info >= (3, 11): def parentlink(self, object: type | ModuleType, modname: str) -> str: - """Make a link for the enclosing class or module. -""" + """Make a link for the enclosing class or module.""" def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: - """Generate an HTML index for a directory of modules. -""" + """Generate an HTML index for a directory of modules.""" + def filelink(self, url: str, path: str) -> str: - """Make a link to source file. -""" + """Make a link to source file.""" class TextRepr(Repr): - """Class for safely making a text representation of a Python object. -""" + """Class for safely making a text representation of a Python object.""" + def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... @@ -348,30 +335,29 @@ class TextRepr(Repr): def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): - """Formatter class for text documentation. -""" + """Formatter class for text documentation.""" + _repr_instance: TextRepr repr = _repr_instance.repr def bold(self, text: str) -> str: - """Format a string in bold by overstriking. -""" + """Format a string in bold by overstriking.""" + def indent(self, text: str, prefix: str = " ") -> str: - """Indent text by prepending a given prefix to each line. -""" + """Indent text by prepending a given prefix to each line.""" + def section(self, title: str, contents: str) -> str: - """Format a section with a given heading. -""" + """Format a section with a given heading.""" + def formattree( self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" ) -> str: - """Render in text a class tree as returned by inspect.getclasstree(). -""" + """Render in text a class tree as returned by inspect.getclasstree().""" + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Unused) -> str: - """Produce text documentation for a given class object. -""" + """Produce text documentation for a given class object.""" + def formatvalue(self, object: object) -> str: - """Format an argument default value as text. -""" + """Format an argument default value as text.""" if sys.version_info >= (3, 11): def docroutine( # type: ignore[override] self, @@ -381,21 +367,21 @@ class TextDoc(Doc): cl: Any | None = None, homecls: Any | None = None, ) -> str: - """Produce text documentation for a function or method object. -""" + """Produce text documentation for a function or method object.""" + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Unused) -> str: - """Produce text documentation for a given module object. -""" + """Produce text documentation for a given module object.""" + def docproperty( self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None, *ignored: Unused ) -> str: - """Produce text documentation for a data descriptor. -""" + """Produce text documentation for a data descriptor.""" + def docdata( self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None, *ignored: Unused ) -> str: - """Produce text documentation for a data descriptor. -""" + """Produce text documentation for a data descriptor.""" + def docother( self, object: object, @@ -406,21 +392,20 @@ class TextDoc(Doc): maxlen: int | None = None, doc: Any | None = None, ) -> str: - """Produce text documentation for a data object. -""" + """Produce text documentation for a data object.""" else: def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a function or method object. -""" + """Produce text documentation for a function or method object.""" + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a given module object. -""" + """Produce text documentation for a given module object.""" + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a data descriptor. -""" + """Produce text documentation for a data descriptor.""" + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: # type: ignore[override] - """Produce text documentation for a data descriptor. -""" + """Produce text documentation for a data descriptor.""" + def docother( # type: ignore[override] self, object: object, @@ -430,46 +415,40 @@ class TextDoc(Doc): maxlen: int | None = None, doc: Any | None = None, ) -> str: - """Produce text documentation for a data object. -""" + """Produce text documentation for a data object.""" if sys.version_info >= (3, 13): def pager(text: str, title: str = "") -> None: - """The first time this is called, determine what kind of pager to use. -""" + """The first time this is called, determine what kind of pager to use.""" else: def pager(text: str) -> None: - """The first time this is called, determine what kind of pager to use. -""" + """The first time this is called, determine what kind of pager to use.""" def plain(text: str) -> str: - """Remove boldface formatting from text. -""" + """Remove boldface formatting from text.""" + def describe(thing: Any) -> str: - """Produce a short description of the given thing. -""" + """Produce a short description of the given thing.""" + def locate(path: str, forceload: bool = ...) -> object: - """Locate an object by name or dotted path, importing as necessary. -""" + """Locate an object by name or dotted path, importing as necessary.""" if sys.version_info >= (3, 13): def get_pager() -> _Pager: - """Decide what method to use for paging through text. -""" + """Decide what method to use for paging through text.""" + def pipe_pager(text: str, cmd: str, title: str = "") -> None: - """Page through text by feeding it to another program. -""" + """Page through text by feeding it to another program.""" + def tempfile_pager(text: str, cmd: str, title: str = "") -> None: - """Page through text by invoking a program on a temporary file. -""" + """Page through text by invoking a program on a temporary file.""" + def tty_pager(text: str, title: str = "") -> None: - """Page through text on a text terminal. -""" - def plain_pager(text: str, title: str = "") -> None: - """Simply print unformatted text. This is the ultimate fallback. -""" + """Page through text on a text terminal.""" + def plain_pager(text: str, title: str = "") -> None: + """Simply print unformatted text. This is the ultimate fallback.""" # For backwards compatibility. getpager = get_pager pipepager = pipe_pager @@ -478,32 +457,30 @@ if sys.version_info >= (3, 13): plainpager = plain_pager else: def getpager() -> Callable[[str], None]: - """Decide what method to use for paging through text. -""" + """Decide what method to use for paging through text.""" + def pipepager(text: str, cmd: str) -> None: - """Page through text by feeding it to another program. -""" + """Page through text by feeding it to another program.""" + def tempfilepager(text: str, cmd: str) -> None: - """Page through text by invoking a program on a temporary file. -""" + """Page through text by invoking a program on a temporary file.""" + def ttypager(text: str) -> None: - """Page through text on a text terminal. -""" + """Page through text on a text terminal.""" + def plainpager(text: str) -> None: - """Simply print unformatted text. This is the ultimate fallback. -""" + """Simply print unformatted text. This is the ultimate fallback.""" text: TextDoc html: HTMLDoc def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: - """Given an object or a path to an object, get the object and its name. -""" + """Given an object or a path to an object, get the object and its name.""" + def render_doc( thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None ) -> str: - """Render text documentation, given an object or a path to an object. -""" + """Render text documentation, given an object or a path to an object.""" if sys.version_info >= (3, 11): def doc( @@ -513,8 +490,7 @@ if sys.version_info >= (3, 11): output: SupportsWrite[str] | None = None, is_cli: bool = False, ) -> None: - """Display text documentation, given an object or a path to an object. -""" + """Display text documentation, given an object or a path to an object.""" else: def doc( @@ -523,15 +499,13 @@ else: forceload: bool = ..., output: SupportsWrite[str] | None = None, ) -> None: - """Display text documentation, given an object or a path to an object. -""" + """Display text documentation, given an object or a path to an object.""" def writedoc(thing: str | object, forceload: bool = ...) -> None: - """Write HTML documentation to a file in the current directory. -""" + """Write HTML documentation to a file in the current directory.""" + def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: - """Write out HTML documentation for all modules in a directory tree. -""" + """Write out HTML documentation for all modules in a directory tree.""" class Helper: keywords: dict[str, str | tuple[str, str]] @@ -545,8 +519,7 @@ class Helper: def __call__(self, request: str | Helper | object = ...) -> None: ... def interact(self) -> None: ... def getline(self, prompt: str) -> str: - """Read one line, using input() when appropriate. -""" + """Read one line, using input() when appropriate.""" if sys.version_info >= (3, 11): def help(self, request: Any, is_cli: bool = False) -> None: ... else: @@ -564,8 +537,8 @@ class Helper: help: Helper class ModuleScanner: - """An interruptible scanner that searches module synopses. -""" + """An interruptible scanner that searches module synopses.""" + quit: bool def run( self, @@ -576,9 +549,8 @@ class ModuleScanner: ) -> None: ... def apropos(key: str) -> None: - """Print all the one-line module summaries that contain a substring. -""" + """Print all the one-line module summaries that contain a substring.""" + def ispath(x: object) -> TypeGuard[str]: ... def cli() -> None: - """Command-line interface (looks at sys.argv to decide what to do). -""" + """Command-line interface (looks at sys.argv to decide what to do).""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi index 70c6015b3d6a5..d58d06ba3151a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,5 +1,5 @@ -"""Python wrapper for Expat parser. -""" +"""Python wrapper for Expat parser.""" + from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable from pyexpat import errors as errors, model as model @@ -21,52 +21,56 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - """XML parser -""" + """XML parser""" + def Parse(self, data: str | ReadableBuffer, isfinal: bool = False, /) -> int: """Parse XML data. -'isfinal' should be true at end of input. -""" + 'isfinal' should be true at end of input. + """ + def ParseFile(self, file: SupportsRead[bytes], /) -> int: - """Parse XML data from file-like object. -""" + """Parse XML data from file-like object.""" + def SetBase(self, base: str, /) -> None: - """Set the base URL for the parser. -""" + """Set the base URL for the parser.""" + def GetBase(self) -> str | None: - """Return base URL string for the parser. -""" + """Return base URL string for the parser.""" + def GetInputContext(self) -> bytes | None: """Return the untranslated text of the input that caused the current event. -If the event was generated by a large amount of text (such as a start tag -for an element with many attributes), not all of the text may be available. -""" + If the event was generated by a large amount of text (such as a start tag + for an element with many attributes), not all of the text may be available. + """ + def ExternalEntityParserCreate(self, context: str | None, encoding: str = ..., /) -> XMLParserType: - """Create a parser for parsing an external entity based on the information passed to the ExternalEntityRefHandler. -""" + """Create a parser for parsing an external entity based on the information passed to the ExternalEntityRefHandler.""" + def SetParamEntityParsing(self, flag: int, /) -> int: """Controls parsing of parameter entities (including the external DTD subset). -Possible flag values are XML_PARAM_ENTITY_PARSING_NEVER, -XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE and -XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag -was successful. -""" + Possible flag values are XML_PARAM_ENTITY_PARSING_NEVER, + XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE and + XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag + was successful. + """ + def UseForeignDTD(self, flag: bool = True, /) -> None: """Allows the application to provide an artificial external subset if one is not specified as part of the document instance. -This readily allows the use of a 'default' document type controlled by the -application, while still getting the advantage of providing document type -information to the parser. 'flag' defaults to True if not provided. -""" + This readily allows the use of a 'default' document type controlled by the + application, while still getting the advantage of providing document type + information to the parser. 'flag' defaults to True if not provided. + """ + def GetReparseDeferralEnabled(self) -> bool: - """Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0. -""" + """Retrieve reparse deferral enabled status; always returns false with Expat <2.6.0.""" + def SetReparseDeferralEnabled(self, enabled: bool, /) -> None: - """Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0. -""" + """Enable/Disable reparse deferral; enabled by default with Expat >=2.6.0.""" + @property def intern(self) -> dict[str, str]: ... buffer_size: int @@ -111,14 +115,12 @@ information to the parser. 'flag' defaults to True if not provided. SkippedEntityHandler: Callable[[str, bool], Any] | None def ErrorString(code: int, /) -> str: - """Returns string error for given number. -""" + """Returns string error for given number.""" # intern is undocumented def ParserCreate( encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None ) -> XMLParserType: - """Return a new XML parser object. -""" + """Return a new XML parser object.""" expat_CAPI: CapsuleType diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi index a95389c17d03d..3b5bbeb23e998 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/errors.pyi @@ -1,5 +1,5 @@ -"""Constants used to describe error conditions. -""" +"""Constants used to describe error conditions.""" + import sys from typing import Final from typing_extensions import LiteralString diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi index 298a88cedd9e1..3de4eec9d8dd8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/pyexpat/model.pyi @@ -1,5 +1,5 @@ -"""Constants used to interpret content model information. -""" +"""Constants used to interpret content model information.""" + from typing import Final XML_CTYPE_ANY: Final = 2 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi index 5ba93db64c506..8ab929e446a33 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/queue.pyi @@ -1,5 +1,5 @@ -"""A multi-producer, multi-consumer queue. -""" +"""A multi-producer, multi-consumer queue.""" + import sys from _queue import Empty as Empty, SimpleQueue as SimpleQueue from _typeshed import SupportsRichComparisonT @@ -14,19 +14,18 @@ if sys.version_info >= (3, 13): _T = TypeVar("_T") class Full(Exception): - """Exception raised by Queue.put(block=0)/put_nowait(). -""" + """Exception raised by Queue.put(block=0)/put_nowait().""" if sys.version_info >= (3, 13): class ShutDown(Exception): - """Raised when put/get with shut-down queue. -""" + """Raised when put/get with shut-down queue.""" class Queue(Generic[_T]): """Create a queue object with a given maximum size. -If maxsize is <= 0, the queue size is infinite. -""" + If maxsize is <= 0, the queue size is infinite. + """ + maxsize: int mutex: Lock # undocumented @@ -44,119 +43,127 @@ If maxsize is <= 0, the queue size is infinite. def empty(self) -> bool: """Return True if the queue is empty, False otherwise (not reliable!). -This method is likely to be removed at some point. Use qsize() == 0 -as a direct substitute, but be aware that either approach risks a race -condition where a queue can grow before the result of empty() or -qsize() can be used. + This method is likely to be removed at some point. Use qsize() == 0 + as a direct substitute, but be aware that either approach risks a race + condition where a queue can grow before the result of empty() or + qsize() can be used. + + To create code that needs to wait for all queued tasks to be + completed, the preferred technique is to use the join() method. + """ -To create code that needs to wait for all queued tasks to be -completed, the preferred technique is to use the join() method. -""" def full(self) -> bool: """Return True if the queue is full, False otherwise (not reliable!). -This method is likely to be removed at some point. Use qsize() >= n -as a direct substitute, but be aware that either approach risks a race -condition where a queue can shrink before the result of full() or -qsize() can be used. -""" + This method is likely to be removed at some point. Use qsize() >= n + as a direct substitute, but be aware that either approach risks a race + condition where a queue can shrink before the result of full() or + qsize() can be used. + """ + def get(self, block: bool = True, timeout: float | None = None) -> _T: """Remove and return an item from the queue. -If optional args 'block' is true and 'timeout' is None (the default), -block if necessary until an item is available. If 'timeout' is -a non-negative number, it blocks at most 'timeout' seconds and raises -the Empty exception if no item was available within that time. -Otherwise ('block' is false), return an item if one is immediately -available, else raise the Empty exception ('timeout' is ignored -in that case). - -Raises ShutDown if the queue has been shut down and is empty, -or if the queue has been shut down immediately. -""" + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until an item is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the Empty exception if no item was available within that time. + Otherwise ('block' is false), return an item if one is immediately + available, else raise the Empty exception ('timeout' is ignored + in that case). + + Raises ShutDown if the queue has been shut down and is empty, + or if the queue has been shut down immediately. + """ + def get_nowait(self) -> _T: """Remove and return an item from the queue without blocking. -Only get an item if one is immediately available. Otherwise -raise the Empty exception. -""" + Only get an item if one is immediately available. Otherwise + raise the Empty exception. + """ if sys.version_info >= (3, 13): def shutdown(self, immediate: bool = False) -> None: """Shut-down the queue, making queue gets and puts raise ShutDown. -By default, gets will only raise once the queue is empty. Set -'immediate' to True to make gets raise immediately instead. + By default, gets will only raise once the queue is empty. Set + 'immediate' to True to make gets raise immediately instead. -All blocked callers of put() and get() will be unblocked. If -'immediate', a task is marked as done for each item remaining in -the queue, which may unblock callers of join(). -""" + All blocked callers of put() and get() will be unblocked. If + 'immediate', a task is marked as done for each item remaining in + the queue, which may unblock callers of join(). + """ def _get(self) -> _T: ... def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: """Put an item into the queue. -If optional args 'block' is true and 'timeout' is None (the default), -block if necessary until a free slot is available. If 'timeout' is -a non-negative number, it blocks at most 'timeout' seconds and raises -the Full exception if no free slot was available within that time. -Otherwise ('block' is false), put an item on the queue if a free slot -is immediately available, else raise the Full exception ('timeout' -is ignored in that case). + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until a free slot is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the Full exception if no free slot was available within that time. + Otherwise ('block' is false), put an item on the queue if a free slot + is immediately available, else raise the Full exception ('timeout' + is ignored in that case). + + Raises ShutDown if the queue has been shut down. + """ -Raises ShutDown if the queue has been shut down. -""" def put_nowait(self, item: _T) -> None: """Put an item into the queue without blocking. -Only enqueue the item if a free slot is immediately available. -Otherwise raise the Full exception. -""" + Only enqueue the item if a free slot is immediately available. + Otherwise raise the Full exception. + """ + def _put(self, item: _T) -> None: ... def join(self) -> None: """Blocks until all items in the Queue have been gotten and processed. -The count of unfinished tasks goes up whenever an item is added to the -queue. The count goes down whenever a consumer thread calls task_done() -to indicate the item was retrieved and all work on it is complete. + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer thread calls task_done() + to indicate the item was retrieved and all work on it is complete. + + When the count of unfinished tasks drops to zero, join() unblocks. + """ -When the count of unfinished tasks drops to zero, join() unblocks. -""" def qsize(self) -> int: - """Return the approximate size of the queue (not reliable!). -""" + """Return the approximate size of the queue (not reliable!).""" + def _qsize(self) -> int: ... def task_done(self) -> None: """Indicate that a formerly enqueued task is complete. -Used by Queue consumer threads. For each get() used to fetch a task, -a subsequent call to task_done() tells the queue that the processing -on the task is complete. + Used by Queue consumer threads. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. -If a join() is currently blocking, it will resume when all items -have been processed (meaning that a task_done() call was received -for every item that had been put() into the queue). + If a join() is currently blocking, it will resume when all items + have been processed (meaning that a task_done() call was received + for every item that had been put() into the queue). -shutdown(immediate=True) calls task_done() for each remaining item in -the queue. + shutdown(immediate=True) calls task_done() for each remaining item in + the queue. + + Raises a ValueError if called more times than there were items + placed in the queue. + """ -Raises a ValueError if called more times than there were items -placed in the queue. -""" def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class PriorityQueue(Queue[SupportsRichComparisonT]): """Variant of Queue that retrieves open entries in priority order (lowest first). -Entries are typically tuples of the form: (priority number, data). -""" + Entries are typically tuples of the form: (priority number, data). + """ + queue: list[SupportsRichComparisonT] class LifoQueue(Queue[_T]): - """Variant of Queue that retrieves most recently added entries first. -""" + """Variant of Queue that retrieves most recently added entries first.""" + queue: list[_T] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi index bd56474b91b37..4594503f76e3c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/quopri.pyi @@ -1,5 +1,5 @@ -"""Conversions to/from quoted-printable transport encoding as per RFC 1521. -""" +"""Conversions to/from quoted-printable transport encoding as per RFC 1521.""" + from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite from typing import Protocol, type_check_only @@ -11,16 +11,18 @@ class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: """Read 'input', apply quoted-printable encoding, and write to 'output'. -'input' and 'output' are binary file objects. The 'quotetabs' flag -indicates whether embedded tabs and spaces should be quoted. Note that -line-ending tabs and spaces are always encoded, as per RFC 1521. -The 'header' flag indicates whether we are encoding spaces as _ as per RFC -1522. -""" + 'input' and 'output' are binary file objects. The 'quotetabs' flag + indicates whether embedded tabs and spaces should be quoted. Note that + line-ending tabs and spaces are always encoded, as per RFC 1521. + The 'header' flag indicates whether we are encoding spaces as _ as per RFC + 1522. + """ + def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: """Read 'input', apply quoted-printable decoding, and write to 'output'. -'input' and 'output' are binary file objects. -If 'header' is true, decode underscore as space (per RFC 1522). -""" + 'input' and 'output' are binary file objects. + If 'header' is true, decode underscore as space (per RFC 1522). + """ + def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi index 358c4bc120563..1ba0e2106407a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/random.pyi @@ -45,6 +45,7 @@ General notes on the underlying Mersenne Twister core generator: and is, therefore, threadsafe. """ + import _random import sys from _typeshed import SupportsLenAndGetItem @@ -89,22 +90,23 @@ _T = TypeVar("_T") class Random(_random.Random): """Random number generator base class used by bound module functions. -Used to instantiate instances of Random to get generators that don't -share state. + Used to instantiate instances of Random to get generators that don't + share state. -Class Random can also be subclassed if you want to use a different basic -generator of your own devising: in that case, override the following -methods: random(), seed(), getstate(), and setstate(). -Optionally, implement a getrandbits() method so that randrange() -can cover arbitrarily large ranges. + Class Random can also be subclassed if you want to use a different basic + generator of your own devising: in that case, override the following + methods: random(), seed(), getstate(), and setstate(). + Optionally, implement a getrandbits() method so that randrange() + can cover arbitrarily large ranges. + + """ -""" VERSION: ClassVar[int] def __init__(self, x: int | float | str | bytes | bytearray | None = None) -> None: # noqa: Y041 """Initialize an instance. -Optional argument x controls seeding, as for Random.seed(). -""" + Optional argument x controls seeding, as for Random.seed(). + """ # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. @@ -115,42 +117,44 @@ Optional argument x controls seeding, as for Random.seed(). def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: # type: ignore[override] # noqa: Y041 """Initialize internal state from a seed. -The only supported seed types are None, int, float, -str, bytes, and bytearray. + The only supported seed types are None, int, float, + str, bytes, and bytearray. -None or no argument seeds from current time or from an operating -system specific randomness source if available. + None or no argument seeds from current time or from an operating + system specific randomness source if available. -If *a* is an int, all bits are used. + If *a* is an int, all bits are used. -For version 2 (the default), all of the bits are used if *a* is a str, -bytes, or bytearray. For version 1 (provided for reproducing random -sequences from older versions of Python), the algorithm for str and -bytes generates a narrower range of seeds. + For version 2 (the default), all of the bits are used if *a* is a str, + bytes, or bytearray. For version 1 (provided for reproducing random + sequences from older versions of Python), the algorithm for str and + bytes generates a narrower range of seeds. + + """ -""" def getstate(self) -> tuple[Any, ...]: - """Return internal state; can be passed to setstate() later. -""" + """Return internal state; can be passed to setstate() later.""" + def setstate(self, state: tuple[Any, ...]) -> None: - """Restore internal state from object returned by getstate(). -""" + """Restore internal state from object returned by getstate().""" + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: """Choose a random item from range(stop) or range(start, stop[, step]). -Roughly equivalent to ``choice(range(start, stop, step))`` but -supports arbitrarily large ranges and is optimized for common cases. + Roughly equivalent to ``choice(range(start, stop, step))`` but + supports arbitrarily large ranges and is optimized for common cases. -""" - def randint(self, a: int, b: int) -> int: - """Return random integer in range [a, b], including both end points. """ + + def randint(self, a: int, b: int) -> int: + """Return random integer in range [a, b], including both end points.""" + def randbytes(self, n: int) -> bytes: - """Generate n random bytes. -""" + """Generate n random bytes.""" + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: - """Choose a random element from a non-empty sequence. -""" + """Choose a random element from a non-empty sequence.""" + def choices( self, population: SupportsLenAndGetItem[_T], @@ -161,268 +165,270 @@ supports arbitrarily large ranges and is optimized for common cases. ) -> list[_T]: """Return a k sized list of population elements chosen with replacement. -If the relative weights or cumulative weights are not specified, -the selections are made with equal probability. + If the relative weights or cumulative weights are not specified, + the selections are made with equal probability. -""" + """ if sys.version_info >= (3, 11): def shuffle(self, x: MutableSequence[Any]) -> None: - """Shuffle list x in place, and return None. -""" + """Shuffle list x in place, and return None.""" else: def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: """Shuffle list x in place, and return None. - Optional argument random is a 0-argument function returning a - random float in [0.0, 1.0); if it is the default None, the - standard random.random will be used. + Optional argument random is a 0-argument function returning a + random float in [0.0, 1.0); if it is the default None, the + standard random.random will be used. - """ + """ if sys.version_info >= (3, 11): def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: """Chooses k unique random elements from a population sequence. -Returns a new list containing elements from the population while -leaving the original population unchanged. The resulting list is -in selection order so that all sub-slices will also be valid random -samples. This allows raffle winners (the sample) to be partitioned -into grand prize and second place winners (the subslices). + Returns a new list containing elements from the population while + leaving the original population unchanged. The resulting list is + in selection order so that all sub-slices will also be valid random + samples. This allows raffle winners (the sample) to be partitioned + into grand prize and second place winners (the subslices). -Members of the population need not be hashable or unique. If the -population contains repeats, then each occurrence is a possible -selection in the sample. + Members of the population need not be hashable or unique. If the + population contains repeats, then each occurrence is a possible + selection in the sample. -Repeated elements can be specified one at a time or with the optional -counts parameter. For example: + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: - sample(['red', 'blue'], counts=[4, 2], k=5) + sample(['red', 'blue'], counts=[4, 2], k=5) -is equivalent to: + is equivalent to: - sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) -To choose a sample from a range of integers, use range() for the -population argument. This is especially fast and space efficient -for sampling from a large population: + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: - sample(range(10000000), 60) + sample(range(10000000), 60) -""" + """ else: - def sample( - self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None - ) -> list[_T]: + def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: """Chooses k unique random elements from a population sequence or set. - Returns a new list containing elements from the population while - leaving the original population unchanged. The resulting list is - in selection order so that all sub-slices will also be valid random - samples. This allows raffle winners (the sample) to be partitioned - into grand prize and second place winners (the subslices). + Returns a new list containing elements from the population while + leaving the original population unchanged. The resulting list is + in selection order so that all sub-slices will also be valid random + samples. This allows raffle winners (the sample) to be partitioned + into grand prize and second place winners (the subslices). - Members of the population need not be hashable or unique. If the - population contains repeats, then each occurrence is a possible - selection in the sample. + Members of the population need not be hashable or unique. If the + population contains repeats, then each occurrence is a possible + selection in the sample. - Repeated elements can be specified one at a time or with the optional - counts parameter. For example: + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: - sample(['red', 'blue'], counts=[4, 2], k=5) + sample(['red', 'blue'], counts=[4, 2], k=5) - is equivalent to: + is equivalent to: - sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) - To choose a sample from a range of integers, use range() for the - population argument. This is especially fast and space efficient - for sampling from a large population: + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: - sample(range(10000000), 60) + sample(range(10000000), 60) - """ + """ def uniform(self, a: float, b: float) -> float: """Get a random number in the range [a, b) or [a, b] depending on rounding. -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = (a + b) / 2 - Var[X] = (b - a) ** 2 / 12 + E[X] = (a + b) / 2 + Var[X] = (b - a) ** 2 / 12 + + """ -""" def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: """Triangular distribution. -Continuous distribution bounded by given lower and upper limits, -and having a given mode value in-between. + Continuous distribution bounded by given lower and upper limits, + and having a given mode value in-between. -http://en.wikipedia.org/wiki/Triangular_distribution + http://en.wikipedia.org/wiki/Triangular_distribution -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = (low + high + mode) / 3 - Var[X] = (low**2 + high**2 + mode**2 - low*high - low*mode - high*mode) / 18 + E[X] = (low + high + mode) / 3 + Var[X] = (low**2 + high**2 + mode**2 - low*high - low*mode - high*mode) / 18 -""" + """ if sys.version_info >= (3, 12): def binomialvariate(self, n: int = 1, p: float = 0.5) -> int: """Binomial random variable. -Gives the number of successes for *n* independent trials -with the probability of success in each trial being *p*: + Gives the number of successes for *n* independent trials + with the probability of success in each trial being *p*: - sum(random() < p for i in range(n)) + sum(random() < p for i in range(n)) -Returns an integer in the range: + Returns an integer in the range: - 0 <= X <= n + 0 <= X <= n -The integer is chosen with the probability: + The integer is chosen with the probability: - P(X == k) = math.comb(n, k) * p ** k * (1 - p) ** (n - k) + P(X == k) = math.comb(n, k) * p ** k * (1 - p) ** (n - k) -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = n * p - Var[X] = n * p * (1 - p) + E[X] = n * p + Var[X] = n * p * (1 - p) -""" + """ def betavariate(self, alpha: float, beta: float) -> float: """Beta distribution. -Conditions on the parameters are alpha > 0 and beta > 0. -Returned values range between 0 and 1. + Conditions on the parameters are alpha > 0 and beta > 0. + Returned values range between 0 and 1. -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = alpha / (alpha + beta) - Var[X] = alpha * beta / ((alpha + beta)**2 * (alpha + beta + 1)) + E[X] = alpha / (alpha + beta) + Var[X] = alpha * beta / ((alpha + beta)**2 * (alpha + beta + 1)) -""" + """ if sys.version_info >= (3, 12): def expovariate(self, lambd: float = 1.0) -> float: """Exponential distribution. -lambd is 1.0 divided by the desired mean. It should be -nonzero. (The parameter would be called "lambda", but that is -a reserved word in Python.) Returned values range from 0 to -positive infinity if lambd is positive, and from negative -infinity to 0 if lambd is negative. + lambd is 1.0 divided by the desired mean. It should be + nonzero. (The parameter would be called "lambda", but that is + a reserved word in Python.) Returned values range from 0 to + positive infinity if lambd is positive, and from negative + infinity to 0 if lambd is negative. -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = 1 / lambd - Var[X] = 1 / lambd ** 2 + E[X] = 1 / lambd + Var[X] = 1 / lambd ** 2 -""" + """ else: def expovariate(self, lambd: float) -> float: """Exponential distribution. - lambd is 1.0 divided by the desired mean. It should be - nonzero. (The parameter would be called "lambda", but that is - a reserved word in Python.) Returned values range from 0 to - positive infinity if lambd is positive, and from negative - infinity to 0 if lambd is negative. + lambd is 1.0 divided by the desired mean. It should be + nonzero. (The parameter would be called "lambda", but that is + a reserved word in Python.) Returned values range from 0 to + positive infinity if lambd is positive, and from negative + infinity to 0 if lambd is negative. - """ + """ def gammavariate(self, alpha: float, beta: float) -> float: """Gamma distribution. Not the gamma function! -Conditions on the parameters are alpha > 0 and beta > 0. + Conditions on the parameters are alpha > 0 and beta > 0. -The probability distribution function is: + The probability distribution function is: - x ** (alpha - 1) * math.exp(-x / beta) - pdf(x) = -------------------------------------- - math.gamma(alpha) * beta ** alpha + x ** (alpha - 1) * math.exp(-x / beta) + pdf(x) = -------------------------------------- + math.gamma(alpha) * beta ** alpha -The mean (expected value) and variance of the random variable are: + The mean (expected value) and variance of the random variable are: - E[X] = alpha * beta - Var[X] = alpha * beta ** 2 + E[X] = alpha * beta + Var[X] = alpha * beta ** 2 -""" + """ if sys.version_info >= (3, 11): def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: """Gaussian distribution. -mu is the mean, and sigma is the standard deviation. This is -slightly faster than the normalvariate() function. + mu is the mean, and sigma is the standard deviation. This is + slightly faster than the normalvariate() function. -Not thread-safe without a lock around calls. + Not thread-safe without a lock around calls. + + """ -""" def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: """Normal distribution. -mu is the mean, and sigma is the standard deviation. + mu is the mean, and sigma is the standard deviation. -""" + """ else: def gauss(self, mu: float, sigma: float) -> float: """Gaussian distribution. - mu is the mean, and sigma is the standard deviation. This is - slightly faster than the normalvariate() function. + mu is the mean, and sigma is the standard deviation. This is + slightly faster than the normalvariate() function. - Not thread-safe without a lock around calls. + Not thread-safe without a lock around calls. + + """ - """ def normalvariate(self, mu: float, sigma: float) -> float: """Normal distribution. - mu is the mean, and sigma is the standard deviation. + mu is the mean, and sigma is the standard deviation. - """ + """ def lognormvariate(self, mu: float, sigma: float) -> float: """Log normal distribution. -If you take the natural logarithm of this distribution, you'll get a -normal distribution with mean mu and standard deviation sigma. -mu can have any value, and sigma must be greater than zero. + If you take the natural logarithm of this distribution, you'll get a + normal distribution with mean mu and standard deviation sigma. + mu can have any value, and sigma must be greater than zero. + + """ -""" def vonmisesvariate(self, mu: float, kappa: float) -> float: """Circular data distribution. -mu is the mean angle, expressed in radians between 0 and 2*pi, and -kappa is the concentration parameter, which must be greater than or -equal to zero. If kappa is equal to zero, this distribution reduces -to a uniform random angle over the range 0 to 2*pi. + mu is the mean angle, expressed in radians between 0 and 2*pi, and + kappa is the concentration parameter, which must be greater than or + equal to zero. If kappa is equal to zero, this distribution reduces + to a uniform random angle over the range 0 to 2*pi. + + """ -""" def paretovariate(self, alpha: float) -> float: - """Pareto distribution. alpha is the shape parameter. -""" + """Pareto distribution. alpha is the shape parameter.""" + def weibullvariate(self, alpha: float, beta: float) -> float: """Weibull distribution. -alpha is the scale parameter and beta is the shape parameter. + alpha is the scale parameter and beta is the shape parameter. -""" + """ # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): """Alternate random number generator using sources provided -by the operating system (such as /dev/urandom on Unix or -CryptGenRandom on Windows). + by the operating system (such as /dev/urandom on Unix or + CryptGenRandom on Windows). - Not available on all systems (see os.urandom() for details). + Not available on all systems (see os.urandom() for details). + + """ -""" def getrandbits(self, k: int) -> int: # k can be passed by keyword - """getrandbits(k) -> x. Generates an int with k random bits. -""" + """getrandbits(k) -> x. Generates an int with k random bits.""" + def getstate(self, *args: Any, **kwds: Any) -> NoReturn: - """Method should not be called for a system random number generator. -""" + """Method should not be called for a system random number generator.""" + def setstate(self, *args: Any, **kwds: Any) -> NoReturn: - """Method should not be called for a system random number generator. -""" + """Method should not be called for a system random number generator.""" _inst: Random seed = _inst.seed diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi index b253aff3552c9..af6be98d28d49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/re.pyi @@ -105,6 +105,7 @@ This module also defines exception 'PatternError', aliased to 'error' for backward compatibility. """ + import enum import sre_compile import sre_constants @@ -163,14 +164,15 @@ _T = TypeVar("_T") class error(Exception): """Exception raised for invalid regular expressions. -Attributes: + Attributes: + + msg: The unformatted error message + pattern: The regular expression pattern + pos: The index in the pattern where compilation failed (may be None) + lineno: The line corresponding to pos (may be None) + colno: The column corresponding to pos (may be None) + """ - msg: The unformatted error message - pattern: The regular expression pattern - pos: The index in the pattern where compilation failed (may be None) - lineno: The line corresponding to pos (may be None) - colno: The column corresponding to pos (may be None) -""" msg: str pattern: str | bytes | None pos: int | None @@ -181,39 +183,38 @@ Attributes: @final class Match(Generic[AnyStr]): """The result of re.match() and re.search(). -Match objects always have a boolean value of True. -""" + Match objects always have a boolean value of True. + """ + @property def pos(self) -> int: - """The index into the string at which the RE engine started looking for a match. -""" + """The index into the string at which the RE engine started looking for a match.""" + @property def endpos(self) -> int: - """The index into the string beyond which the RE engine will not go. -""" + """The index into the string beyond which the RE engine will not go.""" + @property def lastindex(self) -> int | None: - """The integer index of the last matched capturing group. -""" + """The integer index of the last matched capturing group.""" + @property def lastgroup(self) -> str | None: - """The name of the last matched capturing group. -""" + """The name of the last matched capturing group.""" + @property def string(self) -> AnyStr: - """The string passed to match() or search(). -""" - + """The string passed to match() or search().""" # The regular expression object whose match() or search() method produced # this match instance. @property def re(self) -> Pattern[AnyStr]: - """The regular expression object. -""" + """The regular expression object.""" + @overload def expand(self: Match[str], template: str) -> str: - """Return the string obtained by doing backslash substitution on the string template, as done by the sub() method. -""" + """Return the string obtained by doing backslash substitution on the string template, as done by the sub() method.""" + @overload def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... @overload @@ -222,9 +223,10 @@ Match objects always have a boolean value of True. @overload def group(self, group: Literal[0] = 0, /) -> AnyStr: """group([group1, ...]) -> str or tuple. - Return subgroup(s) of the match by indices or names. - For 0 returns the entire match. -""" + Return subgroup(s) of the match by indices or names. + For 0 returns the entire match. + """ + @overload def group(self, group: str | int, /) -> AnyStr | MaybeNone: ... @overload @@ -235,9 +237,10 @@ Match objects always have a boolean value of True. def groups(self) -> tuple[AnyStr | MaybeNone, ...]: """Return a tuple containing all the subgroups of the match, from 1. - default - Is used for groups that did not participate in the match. -""" + default + Is used for groups that did not participate in the match. + """ + @overload def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... # Each value in groupdict()'s return dict is either "AnyStr" or @@ -246,77 +249,78 @@ Match objects always have a boolean value of True. def groupdict(self) -> dict[str, AnyStr | MaybeNone]: """Return a dictionary containing all the named subgroups of the match, keyed by the subgroup name. - default - Is used for groups that did not participate in the match. -""" + default + Is used for groups that did not participate in the match. + """ + @overload def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... def start(self, group: int | str = 0, /) -> int: - """Return index of the start of the substring matched by group. -""" + """Return index of the start of the substring matched by group.""" + def end(self, group: int | str = 0, /) -> int: - """Return index of the end of the substring matched by group. -""" + """Return index of the end of the substring matched by group.""" + def span(self, group: int | str = 0, /) -> tuple[int, int]: - """For match object m, return the 2-tuple (m.start(group), m.end(group)). -""" + """For match object m, return the 2-tuple (m.start(group), m.end(group)).""" + @property def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload def __getitem__(self, key: Literal[0], /) -> AnyStr: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: int | str, /) -> AnyStr | MaybeNone: ... def __copy__(self) -> Match[AnyStr]: ... def __deepcopy__(self, memo: Any, /) -> Match[AnyStr]: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @final class Pattern(Generic[AnyStr]): - """Compiled regular expression object. -""" + """Compiled regular expression object.""" + @property def flags(self) -> int: - """The regex matching flags. -""" + """The regex matching flags.""" + @property def groupindex(self) -> Mapping[str, int]: - """A dictionary mapping group names to group numbers. -""" + """A dictionary mapping group names to group numbers.""" + @property def groups(self) -> int: - """The number of capturing groups in the pattern. -""" + """The number of capturing groups in the pattern.""" + @property def pattern(self) -> AnyStr: - """The pattern string from which the RE object was compiled. -""" + """The pattern string from which the RE object was compiled.""" + @overload def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: """Scan through string looking for a match, and return a corresponding match object instance. -Return None if no position in the string matches. -""" + Return None if no position in the string matches. + """ + @overload def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: - """Matches zero or more characters at the beginning of the string. -""" + """Matches zero or more characters at the beginning of the string.""" + @overload def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... @overload def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: - """Matches against all of the string. -""" + """Matches against all of the string.""" + @overload def fullmatch( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -325,8 +329,8 @@ Return None if no position in the string matches. def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | MaybeNone]: - """Split string by the occurrences of pattern. -""" + """Split string by the occurrences of pattern.""" + @overload def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | MaybeNone]: ... @overload @@ -334,8 +338,8 @@ Return None if no position in the string matches. # return type depends on the number of groups in the pattern @overload def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: - """Return a list of all non-overlapping matches of pattern in string. -""" + """Return a list of all non-overlapping matches of pattern in string.""" + @overload def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload @@ -344,8 +348,9 @@ Return None if no position in the string matches. def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: """Return an iterator over all non-overlapping matches for the RE pattern in string. -For each match, the iterator returns a match object. -""" + For each match, the iterator returns a match object. + """ + @overload def finditer( self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize @@ -354,8 +359,8 @@ For each match, the iterator returns a match object. def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... @overload def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: - """Return the string obtained by replacing the leftmost non-overlapping occurrences of pattern in string by the replacement repl. -""" + """Return the string obtained by replacing the leftmost non-overlapping occurrences of pattern in string by the replacement repl.""" + @overload def sub( self: Pattern[bytes], @@ -367,8 +372,8 @@ For each match, the iterator returns a match object. def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... @overload def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: - """Return the tuple (new_string, number_of_subs_made) found by replacing the leftmost non-overlapping occurrences of pattern with the replacement repl. -""" + """Return the tuple (new_string, number_of_subs_made) found by replacing the leftmost non-overlapping occurrences of pattern with the replacement repl.""" + @overload def subn( self: Pattern[bytes], @@ -383,14 +388,13 @@ For each match, the iterator returns a match object. def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" # ----- re variables and constants ----- class RegexFlag(enum.IntFlag): - """An enumeration. -""" + """An enumeration.""" + A = sre_compile.SRE_FLAG_ASCII ASCII = A DEBUG = sre_compile.SRE_FLAG_DEBUG @@ -443,41 +447,45 @@ _FlagsType: TypeAlias = int | RegexFlag # because the pattern must be hashable. @overload def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: - """Compile a regular expression pattern, returning a Pattern object. -""" + """Compile a regular expression pattern, returning a Pattern object.""" + @overload def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: """Scan through string looking for a match to the pattern, returning -a Match object, or None if no match was found. -""" + a Match object, or None if no match was found. + """ + @overload def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: """Try to apply the pattern at the start of the string, returning -a Match object, or None if no match was found. -""" + a Match object, or None if no match was found. + """ + @overload def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: """Try to apply the pattern to all of the string, returning -a Match object, or None if no match was found. -""" + a Match object, or None if no match was found. + """ + @overload def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | MaybeNone]: """Split the source string by the occurrences of the pattern, -returning a list containing the resulting substrings. If -capturing parentheses are used in pattern, then the text of all -groups in the pattern are also returned as part of the resulting -list. If maxsplit is nonzero, at most maxsplit splits occur, -and the remainder of the string is returned as the final element -of the list. -""" + returning a list containing the resulting substrings. If + capturing parentheses are used in pattern, then the text of all + groups in the pattern are also returned as part of the resulting + list. If maxsplit is nonzero, at most maxsplit splits occur, + and the remainder of the string is returned as the final element + of the list. + """ + @overload def split( pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 @@ -486,21 +494,23 @@ def split( def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: """Return a list of all non-overlapping matches in the string. -If one or more capturing groups are present in the pattern, return -a list of groups; this will be a list of tuples if the pattern -has more than one group. + If one or more capturing groups are present in the pattern, return + a list of groups; this will be a list of tuples if the pattern + has more than one group. + + Empty matches are included in the result. + """ -Empty matches are included in the result. -""" @overload def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... @overload def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: """Return an iterator over all non-overlapping matches in the -string. For each match, the iterator returns a Match object. + string. For each match, the iterator returns a Match object. + + Empty matches are included in the result. + """ -Empty matches are included in the result. -""" @overload def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... @overload @@ -508,12 +518,13 @@ def sub( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> str: """Return the string obtained by replacing the leftmost -non-overlapping occurrences of the pattern in string by the -replacement repl. repl can be either a string or a callable; -if a string, backslash escapes in it are processed. If it is -a callable, it's passed the Match object and must return -a replacement string to be used. -""" + non-overlapping occurrences of the pattern in string by the + replacement repl. repl can be either a string or a callable; + if a string, backslash escapes in it are processed. If it is + a callable, it's passed the Match object and must return + a replacement string to be used. + """ + @overload def sub( pattern: bytes | Pattern[bytes], @@ -527,14 +538,15 @@ def subn( pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> tuple[str, int]: """Return a 2-tuple containing (new_string, number). -new_string is the string obtained by replacing the leftmost -non-overlapping occurrences of the pattern in the source -string by the replacement repl. number is the number of -substitutions that were made. repl can be either a string or a -callable; if a string, backslash escapes in it are processed. -If it is a callable, it's passed the Match object and must -return a replacement string to be used. -""" + new_string is the string obtained by replacing the leftmost + non-overlapping occurrences of the pattern in the source + string by the replacement repl. number is the number of + substitutions that were made. repl can be either a string or a + callable; if a string, backslash escapes in it are processed. + If it is a callable, it's passed the Match object and must + return a replacement string to be used. + """ + @overload def subn( pattern: bytes | Pattern[bytes], @@ -545,19 +557,17 @@ def subn( ) -> tuple[bytes, int]: ... def escape(pattern: AnyStr) -> AnyStr: """ -Escape special characters in a string. -""" + Escape special characters in a string. + """ + def purge() -> None: - """Clear the regular expression caches -""" + """Clear the regular expression caches""" if sys.version_info < (3, 13): if sys.version_info >= (3, 11): @deprecated("Deprecated since Python 3.11; removed in Python 3.13. Use `re.compile()` instead.") def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented - """Compile a template pattern, returning a Pattern object, deprecated -""" + """Compile a template pattern, returning a Pattern object, deprecated""" else: def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: # undocumented - """Compile a template pattern, returning a Pattern object -""" + """Compile a template pattern, returning a Pattern object""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi index cf4b4bb4a5ef8..0358c632ee3d0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/readline.pyi @@ -1,5 +1,5 @@ -"""Importing this module enables command line editing using libedit readline. -""" +"""Importing this module enables command line editing using libedit readline.""" + import sys from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence @@ -11,113 +11,121 @@ if sys.platform != "win32": _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] def parse_and_bind(string: str, /) -> None: - """Execute the init line provided in the string argument. -""" + """Execute the init line provided in the string argument.""" + def read_init_file(filename: StrOrBytesPath | None = None, /) -> None: """Execute a readline initialization file. -The default filename is the last filename used. -""" + The default filename is the last filename used. + """ + def get_line_buffer() -> str: - """Return the current contents of the line buffer. -""" + """Return the current contents of the line buffer.""" + def insert_text(string: str, /) -> None: - """Insert text into the line buffer at the cursor position. -""" + """Insert text into the line buffer at the cursor position.""" + def redisplay() -> None: - """Change what's displayed on the screen to reflect contents of the line buffer. -""" + """Change what's displayed on the screen to reflect contents of the line buffer.""" + def read_history_file(filename: StrOrBytesPath | None = None, /) -> None: """Load a readline history file. -The default filename is ~/.history. -""" + The default filename is ~/.history. + """ + def write_history_file(filename: StrOrBytesPath | None = None, /) -> None: """Save a readline history file. -The default filename is ~/.history. -""" + The default filename is ~/.history. + """ + def append_history_file(nelements: int, filename: StrOrBytesPath | None = None, /) -> None: """Append the last nelements items of the history list to file. -The default filename is ~/.history. -""" + The default filename is ~/.history. + """ + def get_history_length() -> int: - """Return the maximum number of lines that will be written to the history file. -""" + """Return the maximum number of lines that will be written to the history file.""" + def set_history_length(length: int, /) -> None: """Set the maximal number of lines which will be written to the history file. -A negative length is used to inhibit history truncation. -""" + A negative length is used to inhibit history truncation. + """ + def clear_history() -> None: - """Clear the current readline history. -""" + """Clear the current readline history.""" + def get_current_history_length() -> int: - """Return the current (not the maximum) length of history. -""" + """Return the current (not the maximum) length of history.""" + def get_history_item(index: int, /) -> str: - """Return the current contents of history item at one-based index. -""" + """Return the current contents of history item at one-based index.""" + def remove_history_item(pos: int, /) -> None: - """Remove history item given by its zero-based position. -""" + """Remove history item given by its zero-based position.""" + def replace_history_item(pos: int, line: str, /) -> None: """Replaces history item given by its position with contents of line. -pos is zero-based. -""" + pos is zero-based. + """ + def add_history(string: str, /) -> None: - """Add an item to the history buffer. -""" + """Add an item to the history buffer.""" + def set_auto_history(enabled: bool, /) -> None: - """Enables or disables automatic history. -""" + """Enables or disables automatic history.""" + def set_startup_hook(function: Callable[[], object] | None = None, /) -> None: """Set or remove the function invoked by the rl_startup_hook callback. -The function is called with no arguments just -before readline prints the first prompt. -""" + The function is called with no arguments just + before readline prints the first prompt. + """ + def set_pre_input_hook(function: Callable[[], object] | None = None, /) -> None: """Set or remove the function invoked by the rl_pre_input_hook callback. -The function is called with no arguments after the first prompt -has been printed and just before readline starts reading input -characters. -""" + The function is called with no arguments after the first prompt + has been printed and just before readline starts reading input + characters. + """ + def set_completer(function: _Completer | None = None, /) -> None: """Set or remove the completer function. -The function is called as function(text, state), -for state in 0, 1, 2, ..., until it returns a non-string. -It should return the next possible completion starting with 'text'. -""" + The function is called as function(text, state), + for state in 0, 1, 2, ..., until it returns a non-string. + It should return the next possible completion starting with 'text'. + """ + def get_completer() -> _Completer | None: - """Get the current completer function. -""" + """Get the current completer function.""" + def get_completion_type() -> int: - """Get the type of completion being attempted. -""" + """Get the type of completion being attempted.""" + def get_begidx() -> int: - """Get the beginning index of the completion scope. -""" + """Get the beginning index of the completion scope.""" + def get_endidx() -> int: - """Get the ending index of the completion scope. -""" + """Get the ending index of the completion scope.""" + def set_completer_delims(string: str, /) -> None: - """Set the word delimiters for completion. -""" + """Set the word delimiters for completion.""" + def get_completer_delims() -> str: - """Get the word delimiters for completion. -""" + """Get the word delimiters for completion.""" + def set_completion_display_matches_hook(function: _CompDisp | None = None, /) -> None: """Set or remove the completion display function. -The function is called as - function(substitution, [matches], longest_match_length) -once each time matches need to be displayed. -""" - + The function is called as + function(substitution, [matches], longest_match_length) + once each time matches need to be displayed. + """ if sys.version_info >= (3, 13): backend: Literal["readline", "editline"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi index da748220c5b4f..e94cc4db69fdc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/reprlib.pyi @@ -1,5 +1,5 @@ -"""Redo the builtin repr() (representation) but with limits on most sizes. -""" +"""Redo the builtin repr() (representation) but with limits on most sizes.""" + import sys from array import array from collections import deque @@ -12,8 +12,7 @@ __all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc: TypeAlias = Callable[[Any], str] def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: - """Decorator to make a repr function return fillvalue for a recursive call -""" + """Decorator to make a repr function return fillvalue for a recursive call""" class Repr: maxlevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi index 89868fe24cfdc..f5b27d28cef0e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/resource.pyi @@ -32,11 +32,12 @@ if sys.platform != "win32": ): """struct_rusage: Result from getrusage. -This object may be accessed either as a tuple of - (utime,stime,maxrss,ixrss,idrss,isrss,minflt,majflt, - nswap,inblock,oublock,msgsnd,msgrcv,nsignals,nvcsw,nivcsw) -or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. -""" + This object may be accessed either as a tuple of + (utime,stime,maxrss,ixrss,idrss,isrss,minflt,majflt, + nswap,inblock,oublock,msgsnd,msgrcv,nsignals,nvcsw,nivcsw) + or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ( "ru_utime", @@ -59,68 +60,67 @@ or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. @property def ru_utime(self) -> float: - """user time used -""" + """user time used""" + @property def ru_stime(self) -> float: - """system time used -""" + """system time used""" + @property def ru_maxrss(self) -> int: - """max. resident set size -""" + """max. resident set size""" + @property def ru_ixrss(self) -> int: - """shared memory size -""" + """shared memory size""" + @property def ru_idrss(self) -> int: - """unshared data size -""" + """unshared data size""" + @property def ru_isrss(self) -> int: - """unshared stack size -""" + """unshared stack size""" + @property def ru_minflt(self) -> int: - """page faults not requiring I/O -""" + """page faults not requiring I/O""" + @property def ru_majflt(self) -> int: - """page faults requiring I/O -""" + """page faults requiring I/O""" + @property def ru_nswap(self) -> int: - """number of swap outs -""" + """number of swap outs""" + @property def ru_inblock(self) -> int: - """block input operations -""" + """block input operations""" + @property def ru_oublock(self) -> int: - """block output operations -""" + """block output operations""" + @property def ru_msgsnd(self) -> int: - """IPC messages sent -""" + """IPC messages sent""" + @property def ru_msgrcv(self) -> int: - """IPC messages received -""" + """IPC messages received""" + @property def ru_nsignals(self) -> int: - """signals received -""" + """signals received""" + @property def ru_nvcsw(self) -> int: - """voluntary context switches -""" + """voluntary context switches""" + @property def ru_nivcsw(self) -> int: - """involuntary context switches -""" + """involuntary context switches""" def getpagesize() -> int: ... def getrlimit(resource: int, /) -> tuple[int, int]: ... @@ -131,6 +131,5 @@ or via the attributes ru_utime, ru_stime, ru_maxrss, and so on. def prlimit(pid: int, resource: int, limits: tuple[int, int] | None = None, /) -> tuple[int, int]: ... else: def prlimit(pid: int, resource: int, limits: tuple[int, int] = ..., /) -> tuple[int, int]: - """prlimit(pid, resource, [limits]) -""" + """prlimit(pid, resource, [limits])""" error = OSError diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi index 0feeb5166783f..37185ea5d74be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/rlcompleter.pyi @@ -28,6 +28,7 @@ Notes: used, and this module (and the readline module) are silently inactive. """ + from typing import Any __all__ = ["Completer"] @@ -36,40 +37,43 @@ class Completer: def __init__(self, namespace: dict[str, Any] | None = None) -> None: """Create a new completer for the command line. -Completer([namespace]) -> completer instance. + Completer([namespace]) -> completer instance. -If unspecified, the default namespace where completions are performed -is __main__ (technically, __main__.__dict__). Namespaces should be -given as dictionaries. + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. -Completer instances should be used as the completion mechanism of -readline via the set_completer() call: + Completer instances should be used as the completion mechanism of + readline via the set_completer() call: + + readline.set_completer(Completer(my_namespace).complete) + """ -readline.set_completer(Completer(my_namespace).complete) -""" def complete(self, text: str, state: int) -> str | None: """Return the next possible completion for 'text'. -This is called successively with state == 0, 1, 2, ... until it -returns None. The completion should begin with 'text'. + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ -""" def attr_matches(self, text: str) -> list[str]: """Compute matches when text contains a dot. -Assuming the text is of the form NAME.NAME....[NAME], and is -evaluable in self.namespace, it will be evaluated and its attributes -(as revealed by dir()) are used as possible completions. (For class -instances, class members are also considered.) + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluable in self.namespace, it will be evaluated and its attributes + (as revealed by dir()) are used as possible completions. (For class + instances, class members are also considered.) -WARNING: this can still invoke arbitrary C code, if an object -with a __getattr__ hook is evaluated. + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ -""" def global_matches(self, text: str) -> list[str]: """Compute matches when text is a simple name. -Return a list of all keywords, built-in functions and names currently -defined in self.namespace that match. + Return a list of all keywords, built-in functions and names currently + defined in self.namespace that match. -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi index 9f1e53a465d1e..a52a3bd7f3f40 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/runpy.pyi @@ -6,6 +6,7 @@ module namespace instead of the native filesystem. This allows Python code to play nicely with non-filesystem based PEP 302 importers when locating support scripts as well as when importing modules. """ + from _typeshed import Unused from types import ModuleType from typing import Any @@ -14,8 +15,8 @@ from typing_extensions import Self __all__ = ["run_module", "run_path"] class _TempModule: - """Temporarily replace a module in sys.modules with an empty namespace -""" + """Temporarily replace a module in sys.modules with an empty namespace""" + mod_name: str module: ModuleType def __init__(self, mod_name: str) -> None: ... @@ -33,35 +34,36 @@ def run_module( ) -> dict[str, Any]: """Execute a module's code without importing it. -mod_name -- an absolute module name or package name. + mod_name -- an absolute module name or package name. -Optional arguments: -init_globals -- dictionary used to pre-populate the module’s -globals dictionary before the code is executed. + Optional arguments: + init_globals -- dictionary used to pre-populate the module’s + globals dictionary before the code is executed. -run_name -- if not None, this will be used for setting __name__; -otherwise, __name__ will be set to mod_name + '__main__' if the -named module is a package and to just mod_name otherwise. + run_name -- if not None, this will be used for setting __name__; + otherwise, __name__ will be set to mod_name + '__main__' if the + named module is a package and to just mod_name otherwise. -alter_sys -- if True, sys.argv[0] is updated with the value of -__file__ and sys.modules[__name__] is updated with a temporary -module object for the module being executed. Both are -restored to their original values before the function returns. + alter_sys -- if True, sys.argv[0] is updated with the value of + __file__ and sys.modules[__name__] is updated with a temporary + module object for the module being executed. Both are + restored to their original values before the function returns. + + Returns the resulting module globals dictionary. + """ -Returns the resulting module globals dictionary. -""" def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: """Execute code located at the specified filesystem location. -path_name -- filesystem location of a Python script, zipfile, -or directory containing a top level __main__.py script. + path_name -- filesystem location of a Python script, zipfile, + or directory containing a top level __main__.py script. -Optional arguments: -init_globals -- dictionary used to pre-populate the module’s -globals dictionary before the code is executed. + Optional arguments: + init_globals -- dictionary used to pre-populate the module’s + globals dictionary before the code is executed. -run_name -- if not None, this will be used to set __name__; -otherwise, '' will be used for __name__. + run_name -- if not None, this will be used to set __name__; + otherwise, '' will be used for __name__. -Returns the resulting module globals dictionary. -""" + Returns the resulting module globals dictionary. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi index 0e56c9f5c0cb4..0567d514ac3dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sched.pyi @@ -22,6 +22,7 @@ arguments are be packed in a sequence) and keyword parameters in "kwargs". The action function may be an instance method so it has another way to reference private data (besides global variables). """ + import sys from collections.abc import Callable from typing import Any, ClassVar, NamedTuple, type_check_only @@ -33,8 +34,8 @@ _ActionCallback: TypeAlias = Callable[..., Any] if sys.version_info >= (3, 10): class Event(NamedTuple): - """Event(time, priority, sequence, action, argument, kwargs) -""" + """Event(time, priority, sequence, action, argument, kwargs)""" + time: float priority: Any sequence: int @@ -60,64 +61,69 @@ class scheduler: def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: """Initialize a new instance, passing the time and delay -functions -""" + functions + """ + def enterabs( self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... ) -> Event: """Enter a new event in the queue at an absolute time. -Returns an ID for the event which can be used to remove it, -if necessary. + Returns an ID for the event which can be used to remove it, + if necessary. + + """ -""" def enter( self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... ) -> Event: """A variant that specifies the time as a relative time. -This is actually the more commonly used interface. + This is actually the more commonly used interface. + + """ -""" def run(self, blocking: bool = True) -> float | None: """Execute events until the queue is empty. -If blocking is False executes the scheduled events due to -expire soonest (if any) and then return the deadline of the -next scheduled call in the scheduler. - -When there is a positive delay until the first event, the -delay function is called and the event is left in the queue; -otherwise, the event is removed from the queue and executed -(its action function is called, passing it the argument). If -the delay function returns prematurely, it is simply -restarted. - -It is legal for both the delay function and the action -function to modify the queue or to raise an exception; -exceptions are not caught but the scheduler's state remains -well-defined so run() may be called again. - -A questionable hack is added to allow other threads to run: -just after an event is executed, a delay of 0 is executed, to -avoid monopolizing the CPU when other threads are also -runnable. + If blocking is False executes the scheduled events due to + expire soonest (if any) and then return the deadline of the + next scheduled call in the scheduler. + + When there is a positive delay until the first event, the + delay function is called and the event is left in the queue; + otherwise, the event is removed from the queue and executed + (its action function is called, passing it the argument). If + the delay function returns prematurely, it is simply + restarted. + + It is legal for both the delay function and the action + function to modify the queue or to raise an exception; + exceptions are not caught but the scheduler's state remains + well-defined so run() may be called again. + + A questionable hack is added to allow other threads to run: + just after an event is executed, a delay of 0 is executed, to + avoid monopolizing the CPU when other threads are also + runnable. + + """ -""" def cancel(self, event: Event) -> None: """Remove an event from the queue. -This must be presented the ID as returned by enter(). -If the event is not in the queue, this raises ValueError. + This must be presented the ID as returned by enter(). + If the event is not in the queue, this raises ValueError. + + """ -""" def empty(self) -> bool: - """Check whether the queue is empty. -""" + """Check whether the queue is empty.""" + @property def queue(self) -> list[Event]: """An ordered list of upcoming events. -Events are named tuples with fields for: - time, priority, action, arguments, kwargs + Events are named tuples with fields for: + time, priority, action, arguments, kwargs -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi index 4726ab1bd82dd..d6c5de9b37615 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/secrets.pyi @@ -5,6 +5,7 @@ See PEP 506 for more information. https://peps.python.org/pep-0506/ """ + from _typeshed import SupportsLenAndGetItem from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom @@ -15,42 +16,44 @@ __all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "to _T = TypeVar("_T") def randbelow(exclusive_upper_bound: int) -> int: - """Return a random int in the range [0, n). -""" + """Return a random int in the range [0, n).""" + def randbits(k: int) -> int: - """getrandbits(k) -> x. Generates an int with k random bits. -""" + """getrandbits(k) -> x. Generates an int with k random bits.""" + def choice(seq: SupportsLenAndGetItem[_T]) -> _T: - """Choose a random element from a non-empty sequence. -""" + """Choose a random element from a non-empty sequence.""" + def token_bytes(nbytes: int | None = None) -> bytes: """Return a random byte string containing *nbytes* bytes. -If *nbytes* is ``None`` or not supplied, a reasonable -default is used. + If *nbytes* is ``None`` or not supplied, a reasonable + default is used. ->>> token_bytes(16) #doctest:+SKIP -b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + >>> token_bytes(16) #doctest:+SKIP + b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + + """ -""" def token_hex(nbytes: int | None = None) -> str: """Return a random text string, in hexadecimal. -The string has *nbytes* random bytes, each byte converted to two -hex digits. If *nbytes* is ``None`` or not supplied, a reasonable -default is used. + The string has *nbytes* random bytes, each byte converted to two + hex digits. If *nbytes* is ``None`` or not supplied, a reasonable + default is used. ->>> token_hex(16) #doctest:+SKIP -'f9bf78b9a18ce6d46a0cd2b0b86df9da' + >>> token_hex(16) #doctest:+SKIP + 'f9bf78b9a18ce6d46a0cd2b0b86df9da' + + """ -""" def token_urlsafe(nbytes: int | None = None) -> str: """Return a random URL-safe text string, in Base64 encoding. -The string has *nbytes* random bytes. If *nbytes* is ``None`` -or not supplied, a reasonable default is used. + The string has *nbytes* random bytes. If *nbytes* is ``None`` + or not supplied, a reasonable default is used. ->>> token_urlsafe(16) #doctest:+SKIP -'Drmhze6EPcv0fN_81Bj-nA' + >>> token_urlsafe(16) #doctest:+SKIP + 'Drmhze6EPcv0fN_81Bj-nA' -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi index 421671107a0ec..93d3b5a72f05f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/select.pyi @@ -3,6 +3,7 @@ *** IMPORTANT NOTICE *** On Windows, only sockets are supported; on Unix, all file descriptors. """ + import sys from _typeshed import FileDescriptorLike from collections.abc import Iterable @@ -32,9 +33,10 @@ if sys.platform != "win32": class poll: """Returns a polling object. -This object supports registering and unregistering file descriptors, and then -polling them for I/O events. -""" + This object supports registering and unregistering file descriptors, and then + polling them for I/O events. + """ + # default value is select.POLLIN | select.POLLPRI | select.POLLOUT def register(self, fd: FileDescriptorLike, eventmask: int = 7, /) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int, /) -> None: ... @@ -46,27 +48,27 @@ def select( ) -> tuple[list[Any], list[Any], list[Any]]: """Wait until one or more file descriptors are ready for some kind of I/O. -The first three arguments are iterables of file descriptors to be waited for: -rlist -- wait until ready for reading -wlist -- wait until ready for writing -xlist -- wait for an "exceptional condition" -If only one kind of condition is required, pass [] for the other lists. + The first three arguments are iterables of file descriptors to be waited for: + rlist -- wait until ready for reading + wlist -- wait until ready for writing + xlist -- wait for an "exceptional condition" + If only one kind of condition is required, pass [] for the other lists. -A file descriptor is either a socket or file object, or a small integer -gotten from a fileno() method call on one of those. + A file descriptor is either a socket or file object, or a small integer + gotten from a fileno() method call on one of those. -The optional 4th argument specifies a timeout in seconds; it may be -a floating-point number to specify fractions of seconds. If it is absent -or None, the call will never time out. + The optional 4th argument specifies a timeout in seconds; it may be + a floating-point number to specify fractions of seconds. If it is absent + or None, the call will never time out. -The return value is a tuple of three lists corresponding to the first three -arguments; each contains the subset of the corresponding file descriptors -that are ready. + The return value is a tuple of three lists corresponding to the first three + arguments; each contains the subset of the corresponding file descriptors + that are ready. -*** IMPORTANT NOTICE *** -On Windows, only sockets are supported; on Unix, all file -descriptors can be used. -""" + *** IMPORTANT NOTICE *** + On Windows, only sockets are supported; on Unix, all file + descriptors can be used. + """ error = OSError @@ -76,19 +78,20 @@ if sys.platform != "linux" and sys.platform != "win32": class kevent: """kevent(ident, filter=KQ_FILTER_READ, flags=KQ_EV_ADD, fflags=0, data=0, udata=0) -This object is the equivalent of the struct kevent for the C API. + This object is the equivalent of the struct kevent for the C API. -See the kqueue manpage for more detailed information about the meaning -of the arguments. + See the kqueue manpage for more detailed information about the meaning + of the arguments. + + One minor note: while you might hope that udata could store a + reference to a python object, it cannot, because it is impossible to + keep a proper reference count of the object once it's passed into the + kernel. Therefore, I have restricted it to only storing an integer. I + recommend ignoring it and simply using the 'ident' field to key off + of. You could also set up a dictionary on the python side to store a + udata->object mapping. + """ -One minor note: while you might hope that udata could store a -reference to a python object, it cannot, because it is impossible to -keep a proper reference count of the object once it's passed into the -kernel. Therefore, I have restricted it to only storing an integer. I -recommend ignoring it and simply using the 'ident' field to key off -of. You could also set up a dictionary on the python side to store a -udata->object mapping. -""" data: Any fflags: int filter: int @@ -111,46 +114,46 @@ udata->object mapping. class kqueue: """Kqueue syscall wrapper. -For example, to start watching a socket for input: ->>> kq = kqueue() ->>> sock = socket() ->>> sock.connect((host, port)) ->>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_ADD)], 0) + For example, to start watching a socket for input: + >>> kq = kqueue() + >>> sock = socket() + >>> sock.connect((host, port)) + >>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_ADD)], 0) -To wait one second for it to become writeable: ->>> kq.control(None, 1, 1000) + To wait one second for it to become writeable: + >>> kq.control(None, 1, 1000) + + To stop listening: + >>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_DELETE)], 0) + """ -To stop listening: ->>> kq.control([kevent(sock, KQ_FILTER_WRITE, KQ_EV_DELETE)], 0) -""" closed: bool def __init__(self) -> None: ... def close(self) -> None: """Close the kqueue control file descriptor. -Further operations on the kqueue object will raise an exception. -""" - def control( - self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, / - ) -> list[kevent]: + Further operations on the kqueue object will raise an exception. + """ + + def control(self, changelist: Iterable[kevent] | None, maxevents: int, timeout: float | None = None, /) -> list[kevent]: """Calls the kernel kevent function. - changelist - Must be an iterable of kevent objects describing the changes to be made - to the kernel's watch list or None. - maxevents - The maximum number of events that the kernel will return. - timeout - The maximum time to wait in seconds, or else None to wait forever. - This accepts floats for smaller timeouts, too. -""" + changelist + Must be an iterable of kevent objects describing the changes to be made + to the kernel's watch list or None. + maxevents + The maximum number of events that the kernel will return. + timeout + The maximum time to wait in seconds, or else None to wait forever. + This accepts floats for smaller timeouts, too. + """ + def fileno(self) -> int: - """Return the kqueue control file descriptor. -""" + """Return the kqueue control file descriptor.""" + @classmethod def fromfd(cls, fd: FileDescriptorLike, /) -> kqueue: - """Create a kqueue object from a given control fd. -""" + """Create a kqueue object from a given control fd.""" KQ_EV_ADD: Final[int] KQ_EV_CLEAR: Final[int] @@ -197,12 +200,13 @@ if sys.platform == "linux": class epoll: """select.epoll(sizehint=-1, flags=0) -Returns an epolling object + Returns an epolling object + + sizehint must be a positive integer or -1 for the default size. The + sizehint is used to optimize internal data structures. It doesn't limit + the maximum number of monitored events. + """ -sizehint must be a positive integer or -1 for the default size. The -sizehint is used to optimize internal data structures. It doesn't limit -the maximum number of monitored events. -""" def __new__(self, sizehint: int = ..., flags: int = ...) -> Self: ... def __enter__(self) -> Self: ... def __exit__( @@ -215,52 +219,55 @@ the maximum number of monitored events. def close(self) -> None: """Close the epoll control file descriptor. -Further operations on the epoll object will raise an exception. -""" + Further operations on the epoll object will raise an exception. + """ closed: bool def fileno(self) -> int: - """Return the epoll control file descriptor. -""" + """Return the epoll control file descriptor.""" + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: """Registers a new fd or raises an OSError if the fd is already registered. - fd - the target file descriptor of the operation - eventmask - a bit set composed of the various EPOLL constants + fd + the target file descriptor of the operation + eventmask + a bit set composed of the various EPOLL constants + + The epoll interface supports all file descriptors that support poll. + """ -The epoll interface supports all file descriptors that support poll. -""" def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: """Modify event mask for a registered file descriptor. - fd - the target file descriptor of the operation - eventmask - a bit set composed of the various EPOLL constants -""" + fd + the target file descriptor of the operation + eventmask + a bit set composed of the various EPOLL constants + """ + def unregister(self, fd: FileDescriptorLike) -> None: """Remove a registered file descriptor from the epoll object. - fd - the target file descriptor of the operation -""" + fd + the target file descriptor of the operation + """ + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: """Wait for events on the epoll file descriptor. - timeout - the maximum time to wait in seconds (as float); - a timeout of None or -1 makes poll wait indefinitely - maxevents - the maximum number of events returned; -1 means no limit + timeout + the maximum time to wait in seconds (as float); + a timeout of None or -1 makes poll wait indefinitely + maxevents + the maximum number of events returned; -1 means no limit + + Returns a list containing any descriptors that have events to report, + as a list of (fd, events) 2-tuples. + """ -Returns a list containing any descriptors that have events to report, -as a list of (fd, events) 2-tuples. -""" @classmethod def fromfd(cls, fd: FileDescriptorLike, /) -> epoll: - """Create an epoll object from a given control fd. -""" + """Create an epoll object from a given control fd.""" EPOLLERR: Final[int] EPOLLEXCLUSIVE: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi index c53f95de620a1..ed95b56b13847 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/selectors.pyi @@ -3,6 +3,7 @@ This module allows high-level and efficient I/O multiplexing, built upon the `select` module primitives. """ + import sys from _typeshed import FileDescriptor, FileDescriptorLike, Unused from abc import ABCMeta, abstractmethod @@ -20,7 +21,8 @@ class SelectorKey(NamedTuple): Object used to associate a file object to its backing file descriptor, selected event mask, and attached data. -""" + """ + fileobj: FileDescriptorLike fd: FileDescriptor events: _EventMask @@ -29,131 +31,137 @@ class SelectorKey(NamedTuple): class BaseSelector(metaclass=ABCMeta): """Selector abstract base class. -A selector supports registering file objects to be monitored for specific -I/O events. + A selector supports registering file objects to be monitored for specific + I/O events. -A file object is a file descriptor or any object with a `fileno()` method. -An arbitrary object can be attached to the file object, which can be used -for example to store context information, a callback, etc. + A file object is a file descriptor or any object with a `fileno()` method. + An arbitrary object can be attached to the file object, which can be used + for example to store context information, a callback, etc. + + A selector can use various implementations (select(), poll(), epoll()...) + depending on the platform. The default `Selector` class uses the most + efficient implementation on the current platform. + """ -A selector can use various implementations (select(), poll(), epoll()...) -depending on the platform. The default `Selector` class uses the most -efficient implementation on the current platform. -""" @abstractmethod def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: """Register a file object. -Parameters: -fileobj -- file object or file descriptor -events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) -data -- attached data + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data -Returns: -SelectorKey instance + Returns: + SelectorKey instance -Raises: -ValueError if events is invalid -KeyError if fileobj is already registered -OSError if fileobj is closed or otherwise is unacceptable to - the underlying system call (if a system call is made) + Raises: + ValueError if events is invalid + KeyError if fileobj is already registered + OSError if fileobj is closed or otherwise is unacceptable to + the underlying system call (if a system call is made) + + Note: + OSError may or may not be raised + """ -Note: -OSError may or may not be raised -""" @abstractmethod def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: """Unregister a file object. -Parameters: -fileobj -- file object or file descriptor + Parameters: + fileobj -- file object or file descriptor -Returns: -SelectorKey instance + Returns: + SelectorKey instance -Raises: -KeyError if fileobj is not registered + Raises: + KeyError if fileobj is not registered + + Note: + If fileobj is registered but has since been closed this does + *not* raise OSError (even if the wrapped syscall does) + """ -Note: -If fileobj is registered but has since been closed this does -*not* raise OSError (even if the wrapped syscall does) -""" def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: """Change a registered file object monitored events or attached data. -Parameters: -fileobj -- file object or file descriptor -events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) -data -- attached data + Parameters: + fileobj -- file object or file descriptor + events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) + data -- attached data -Returns: -SelectorKey instance + Returns: + SelectorKey instance + + Raises: + Anything that unregister() or register() raises + """ -Raises: -Anything that unregister() or register() raises -""" @abstractmethod def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: """Perform the actual selection, until some monitored file objects are -ready or a timeout expires. - -Parameters: -timeout -- if timeout > 0, this specifies the maximum wait time, in - seconds - if timeout <= 0, the select() call won't block, and will - report the currently ready file objects - if timeout is None, select() will block until a monitored - file object becomes ready - -Returns: -list of (key, events) for ready file objects -`events` is a bitwise mask of EVENT_READ|EVENT_WRITE -""" + ready or a timeout expires. + + Parameters: + timeout -- if timeout > 0, this specifies the maximum wait time, in + seconds + if timeout <= 0, the select() call won't block, and will + report the currently ready file objects + if timeout is None, select() will block until a monitored + file object becomes ready + + Returns: + list of (key, events) for ready file objects + `events` is a bitwise mask of EVENT_READ|EVENT_WRITE + """ + def close(self) -> None: """Close the selector. -This must be called to make sure that any underlying resource is freed. -""" + This must be called to make sure that any underlying resource is freed. + """ + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: """Return the key associated to a registered file object. -Returns: -SelectorKey for this file object -""" + Returns: + SelectorKey for this file object + """ + @abstractmethod def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: - """Return a mapping of file objects to selector keys. -""" + """Return a mapping of file objects to selector keys.""" + def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... class _BaseSelectorImpl(BaseSelector, metaclass=ABCMeta): - """Base selector implementation. -""" + """Base selector implementation.""" + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class SelectSelector(_BaseSelectorImpl): - """Select-based selector. -""" + """Select-based selector.""" + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... class _PollLikeSelector(_BaseSelectorImpl): - """Base class shared between poll, epoll and devpoll selectors. -""" + """Base class shared between poll, epoll and devpoll selectors.""" + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": class PollSelector(_PollLikeSelector): - """Poll-based selector. -""" + """Poll-based selector.""" if sys.platform == "linux": class EpollSelector(_PollLikeSelector): - """Epoll-based selector. -""" + """Epoll-based selector.""" + def fileno(self) -> int: ... if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": @@ -163,8 +171,8 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win if sys.platform != "win32" and sys.platform != "linux": class KqueueSelector(_BaseSelectorImpl): - """Kqueue-based selector. -""" + """Kqueue-based selector.""" + def fileno(self) -> int: ... def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... @@ -172,8 +180,8 @@ if sys.platform != "win32" and sys.platform != "linux": # The runtime logic is more fine-grained than a `sys.platform` check; # not really expressible in the stubs class DefaultSelector(_BaseSelectorImpl): - """Epoll-based selector. -""" + """Epoll-based selector.""" + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... if sys.platform != "win32": def fileno(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi index c2cefe1ab2540..7fd7398162125 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shelve.pyi @@ -55,6 +55,7 @@ entries that you access. You can call d.sync() to write back all the entries in the cache, and empty the cache (d.sync() also synchronizes the persistent dictionary on disk, if feasible). """ + import sys from _typeshed import StrOrBytesPath from collections.abc import Iterator, MutableMapping @@ -71,9 +72,10 @@ _VT = TypeVar("_VT") class Shelf(MutableMapping[str, _VT]): """Base class for shelf implementations. -This is initialized with a dictionary-like object. -See the module's __doc__ string for an overview of the interface. -""" + This is initialized with a dictionary-like object. + See the module's __doc__ string for an overview of the interface. + """ + def __init__( self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" ) -> None: ... @@ -100,15 +102,16 @@ See the module's __doc__ string for an overview of the interface. class BsdDbShelf(Shelf[_VT]): """Shelf implementation using the "BSD" db interface. -This adds methods first(), next(), previous(), last() and -set_location() that have no counterpart in [g]dbm databases. + This adds methods first(), next(), previous(), last() and + set_location() that have no counterpart in [g]dbm databases. -The actual database must be opened using one of the "bsddb" -modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or -bsddb.rnopen) and passed to the constructor. + The actual database must be opened using one of the "bsddb" + modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or + bsddb.rnopen) and passed to the constructor. + + See the module's __doc__ string for an overview of the interface. + """ -See the module's __doc__ string for an overview of the interface. -""" def set_location(self, key: str) -> tuple[str, _VT]: ... def next(self) -> tuple[str, _VT]: ... def previous(self) -> tuple[str, _VT]: ... @@ -118,9 +121,10 @@ See the module's __doc__ string for an overview of the interface. class DbfilenameShelf(Shelf[_VT]): """Shelf implementation using the "dbm" generic dbm interface. -This is initialized with the filename for the dbm database. -See the module's __doc__ string for an overview of the interface. -""" + This is initialized with the filename for the dbm database. + See the module's __doc__ string for an overview of the interface. + """ + if sys.version_info >= (3, 11): def __init__( self, filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False @@ -129,31 +133,29 @@ See the module's __doc__ string for an overview of the interface. def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... if sys.version_info >= (3, 11): - def open( - filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False - ) -> Shelf[Any]: + def open(filename: StrOrBytesPath, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: """Open a persistent dictionary for reading and writing. -The filename parameter is the base filename for the underlying -database. As a side-effect, an extension may be added to the -filename and more than one file may be created. The optional flag -parameter has the same interpretation as the flag parameter of -dbm.open(). The optional protocol parameter specifies the -version of the pickle protocol. + The filename parameter is the base filename for the underlying + database. As a side-effect, an extension may be added to the + filename and more than one file may be created. The optional flag + parameter has the same interpretation as the flag parameter of + dbm.open(). The optional protocol parameter specifies the + version of the pickle protocol. -See the module's __doc__ string for an overview of the interface. -""" + See the module's __doc__ string for an overview of the interface. + """ else: def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: """Open a persistent dictionary for reading and writing. - The filename parameter is the base filename for the underlying - database. As a side-effect, an extension may be added to the - filename and more than one file may be created. The optional flag - parameter has the same interpretation as the flag parameter of - dbm.open(). The optional protocol parameter specifies the - version of the pickle protocol. + The filename parameter is the base filename for the underlying + database. As a side-effect, an extension may be added to the + filename and more than one file may be created. The optional flag + parameter has the same interpretation as the flag parameter of + dbm.open(). The optional protocol parameter specifies the + version of the pickle protocol. - See the module's __doc__ string for an overview of the interface. - """ + See the module's __doc__ string for an overview of the interface. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi index 9adebbb7ef8d5..732cd5b06ac6d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shlex.pyi @@ -1,5 +1,5 @@ -"""A lexical analyzer class for simple shell-like syntaxes. -""" +"""A lexical analyzer class for simple shell-like syntaxes.""" + import sys from collections import deque from collections.abc import Iterable @@ -17,29 +17,27 @@ class _ShlexInstream(Protocol): if sys.version_info >= (3, 12): def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax. -""" + """Split the string *s* using shell-like syntax.""" else: @overload def split(s: str | _ShlexInstream, comments: bool = False, posix: bool = True) -> list[str]: - """Split the string *s* using shell-like syntax. -""" + """Split the string *s* using shell-like syntax.""" + @overload @deprecated("Passing None for 's' to shlex.split() is deprecated and will raise an error in Python 3.12.") def split(s: None, comments: bool = False, posix: bool = True) -> list[str]: ... def join(split_command: Iterable[str]) -> str: - """Return a shell-escaped string from *split_command*. -""" + """Return a shell-escaped string from *split_command*.""" + def quote(s: str) -> str: - """Return a shell-escaped version of the string *s*. -""" + """Return a shell-escaped version of the string *s*.""" # TODO: Make generic over infile once PEP 696 is implemented. class shlex: - """A lexical analyzer class for simple shell-like syntaxes. -""" + """A lexical analyzer class for simple shell-like syntaxes.""" + commenters: str wordchars: str whitespace: str @@ -65,23 +63,23 @@ class shlex: punctuation_chars: bool | str = False, ) -> None: ... def get_token(self) -> str | None: - """Get a token from the input stream (or from stack if it's nonempty) -""" + """Get a token from the input stream (or from stack if it's nonempty)""" + def push_token(self, tok: str) -> None: - """Push a token onto the stack popped by the get_token method -""" + """Push a token onto the stack popped by the get_token method""" + def read_token(self) -> str | None: ... def sourcehook(self, newfile: str) -> tuple[str, TextIOWrapper] | None: - """Hook called on a filename to be sourced. -""" + """Hook called on a filename to be sourced.""" + def push_source(self, newstream: str | _ShlexInstream, newfile: str | None = None) -> None: - """Push an input source onto the lexer's input source stack. -""" + """Push an input source onto the lexer's input source stack.""" + def pop_source(self) -> None: - """Pop the input source stack. -""" + """Pop the input source stack.""" + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: - """Emit a C-compiler-like, Emacs-friendly error-message leader. -""" + """Emit a C-compiler-like, Emacs-friendly error-message leader.""" + def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi index 96fa1ac74df5e..8a79660dff987 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/shutil.pyi @@ -3,6 +3,7 @@ XXX The functions here don't copy the resource fork or other metadata on Mac. """ + import os import sys from _typeshed import BytesPath, ExcInfo, FileDescriptorOrPath, MaybeNone, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite @@ -46,95 +47,101 @@ _StrPathT = TypeVar("_StrPathT", bound=StrPath) _BytesPathT = TypeVar("_BytesPathT", bound=BytesPath) class Error(OSError): ... + class SameFileError(Error): - """Raised when source and destination are the same file. -""" + """Raised when source and destination are the same file.""" + class SpecialFileError(OSError): """Raised when trying to do a kind of operation (e.g. copying) which is -not supported on a special file (e.g. a named pipe) -""" + not supported on a special file (e.g. a named pipe) + """ if sys.version_info >= (3, 14): ExecError = RuntimeError # Deprecated in Python 3.14; removal scheduled for Python 3.16 else: class ExecError(OSError): - """Raised when a command could not be executed -""" + """Raised when a command could not be executed""" class ReadError(OSError): - """Raised when an archive cannot be read -""" + """Raised when an archive cannot be read""" + class RegistryError(Exception): """Raised when a registry operation with the archiving -and unpacking registries fails -""" + and unpacking registries fails + """ def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: - """copy data from file-like object fsrc to file-like object fdst -""" + """copy data from file-like object fsrc to file-like object fdst""" + def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: """Copy data from src to dst in the most efficient way possible. -If follow_symlinks is not set and src is a symbolic link, a new -symlink will be created instead of copying the file it points to. + If follow_symlinks is not set and src is a symbolic link, a new + symlink will be created instead of copying the file it points to. + + """ -""" def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: """Copy mode bits from src to dst. -If follow_symlinks is not set, symlinks aren't followed if and only -if both `src` and `dst` are symlinks. If `lchmod` isn't available -(e.g. Linux) this method does nothing. + If follow_symlinks is not set, symlinks aren't followed if and only + if both `src` and `dst` are symlinks. If `lchmod` isn't available + (e.g. Linux) this method does nothing. + + """ -""" def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: """Copy file metadata -Copy the permission bits, last access time, last modification time, and -flags from `src` to `dst`. On Linux, copystat() also copies the "extended -attributes" where possible. The file contents, owner, and group are -unaffected. `src` and `dst` are path-like objects or path names given as -strings. + Copy the permission bits, last access time, last modification time, and + flags from `src` to `dst`. On Linux, copystat() also copies the "extended + attributes" where possible. The file contents, owner, and group are + unaffected. `src` and `dst` are path-like objects or path names given as + strings. + + If the optional flag `follow_symlinks` is not set, symlinks aren't + followed if and only if both `src` and `dst` are symlinks. + """ -If the optional flag `follow_symlinks` is not set, symlinks aren't -followed if and only if both `src` and `dst` are symlinks. -""" @overload def copy(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: """Copy data and mode bits ("cp src dst"). Return the file's destination. -The destination may be a directory. + The destination may be a directory. -If follow_symlinks is false, symlinks won't be followed. This -resembles GNU's "cp -P src dst". + If follow_symlinks is false, symlinks won't be followed. This + resembles GNU's "cp -P src dst". -If source and destination are the same file, a SameFileError will be -raised. + If source and destination are the same file, a SameFileError will be + raised. + + """ -""" @overload def copy(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... @overload def copy2(src: StrPath, dst: _StrPathT, *, follow_symlinks: bool = True) -> _StrPathT | str: """Copy data and metadata. Return the file's destination. -Metadata is copied with copystat(). Please see the copystat function -for more information. + Metadata is copied with copystat(). Please see the copystat function + for more information. -The destination may be a directory. + The destination may be a directory. + + If follow_symlinks is false, symlinks won't be followed. This + resembles GNU's "cp -P src dst". + """ -If follow_symlinks is false, symlinks won't be followed. This -resembles GNU's "cp -P src dst". -""" @overload def copy2(src: BytesPath, dst: _BytesPathT, *, follow_symlinks: bool = True) -> _BytesPathT | bytes: ... def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: """Function that can be used as copytree() ignore parameter. -Patterns is a sequence of glob-style patterns -that are used to exclude files -""" + Patterns is a sequence of glob-style patterns + that are used to exclude files + """ + def copytree( src: StrPath, dst: _StrPathT, @@ -146,42 +153,42 @@ def copytree( ) -> _StrPathT: """Recursively copy a directory tree and return the destination directory. -If exception(s) occur, an Error is raised with a list of reasons. - -If the optional symlinks flag is true, symbolic links in the -source tree result in symbolic links in the destination tree; if -it is false, the contents of the files pointed to by symbolic -links are copied. If the file pointed to by the symlink doesn't -exist, an exception will be added in the list of errors raised in -an Error exception at the end of the copy process. - -You can set the optional ignore_dangling_symlinks flag to true if you -want to silence this exception. Notice that this has no effect on -platforms that don't support os.symlink. - -The optional ignore argument is a callable. If given, it -is called with the `src` parameter, which is the directory -being visited by copytree(), and `names` which is the list of -`src` contents, as returned by os.listdir(): - - callable(src, names) -> ignored_names - -Since copytree() is called recursively, the callable will be -called once for each directory that is copied. It returns a -list of names relative to the `src` directory that should -not be copied. - -The optional copy_function argument is a callable that will be used -to copy each file. It will be called with the source path and the -destination path as arguments. By default, copy2() is used, but any -function that supports the same signature (like copy()) can be used. - -If dirs_exist_ok is false (the default) and `dst` already exists, a -`FileExistsError` is raised. If `dirs_exist_ok` is true, the copying -operation will continue if it encounters existing directories, and files -within the `dst` tree will be overwritten by corresponding files from the -`src` tree. -""" + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed to by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + If dirs_exist_ok is false (the default) and `dst` already exists, a + `FileExistsError` is raised. If `dirs_exist_ok` is true, the copying + operation will continue if it encounters existing directories, and files + within the `dst` tree will be overwritten by corresponding files from the + `src` tree. + """ _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], str, ExcInfo], object] _OnExcCallback: TypeAlias = Callable[[Callable[..., Any], str, BaseException], object] @@ -245,34 +252,34 @@ _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], # (#6832) def move(src: StrPath, dst: _StrPathT, copy_function: _CopyFn = ...) -> _StrPathT | str | MaybeNone: """Recursively move a file or directory to another location. This is -similar to the Unix "mv" command. Return the file or directory's -destination. + similar to the Unix "mv" command. Return the file or directory's + destination. -If dst is an existing directory or a symlink to a directory, then src is -moved inside that directory. The destination path in that directory must -not already exist. + If dst is an existing directory or a symlink to a directory, then src is + moved inside that directory. The destination path in that directory must + not already exist. -If dst already exists but is not a directory, it may be overwritten -depending on os.rename() semantics. + If dst already exists but is not a directory, it may be overwritten + depending on os.rename() semantics. -If the destination is on our current filesystem, then rename() is used. -Otherwise, src is copied to the destination and then removed. Symlinks are -recreated under the new name if os.rename() fails because of cross -filesystem renames. + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. Symlinks are + recreated under the new name if os.rename() fails because of cross + filesystem renames. -The optional `copy_function` argument is a callable that will be used -to copy the source or it will be delegated to `copytree`. -By default, copy2() is used, but any function that supports the same -signature (like copy()) can be used. + The optional `copy_function` argument is a callable that will be used + to copy the source or it will be delegated to `copytree`. + By default, copy2() is used, but any function that supports the same + signature (like copy()) can be used. -A lot more could be done here... A look at a mv.c shows a lot of -the issues this implementation glosses over. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. -""" + """ class _ntuple_diskusage(NamedTuple): - """usage(total, used, free) -""" + """usage(total, used, free)""" + total: int used: int free: int @@ -280,9 +287,9 @@ class _ntuple_diskusage(NamedTuple): def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: """Return disk usage statistics about the given path. -Returned value is a named tuple with attributes 'total', 'used' and -'free', which are the amount of total, used and free space, in bytes. -""" + Returned value is a named tuple with attributes 'total', 'used' and + 'free', which are the amount of total, used and free space, in bytes. + """ # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's @@ -299,16 +306,17 @@ if sys.version_info >= (3, 13): ) -> None: """Change owner user and group of the given path. -user and group can be the uid/gid or the user/group names, and in that case, -they are converted to their respective uid/gid. + user and group can be the uid/gid or the user/group names, and in that case, + they are converted to their respective uid/gid. -If dir_fd is set, it should be an open file descriptor to the directory to -be used as the root of *path* if it is relative. + If dir_fd is set, it should be an open file descriptor to the directory to + be used as the root of *path* if it is relative. + + If follow_symlinks is set to False and the last element of the path is a + symbolic link, chown will modify the link itself and not the file being + referenced by the link. + """ -If follow_symlinks is set to False and the last element of the path is a -symbolic link, chown will modify the link itself and not the file being -referenced by the link. -""" @overload def chown( path: FileDescriptorOrPath, @@ -332,9 +340,10 @@ else: def chown(path: FileDescriptorOrPath, user: str | int, group: None = None) -> None: """Change owner user and group of the given path. - user and group can be the uid/gid or the user/group names, and in that case, - they are converted to their respective uid/gid. - """ + user and group can be the uid/gid or the user/group names, and in that case, + they are converted to their respective uid/gid. + """ + @overload def chown(path: FileDescriptorOrPath, user: None = None, *, group: str | int) -> None: ... @overload @@ -347,6 +356,18 @@ if sys.platform == "win32" and sys.version_info < (3, 12): @deprecated("On Windows before Python 3.12, using a PathLike as `cmd` would always fail or return `None`.") def which(cmd: os.PathLike[str], mode: int = 1, path: StrPath | None = None) -> NoReturn: """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + +@overload +def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: + """Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such file. @@ -356,17 +377,6 @@ if sys.platform == "win32" and sys.version_info < (3, 12): """ -@overload -def which(cmd: StrPath, mode: int = 1, path: StrPath | None = None) -> str | None: - """Given a command, mode, and a PATH string, return the path which -conforms to the given mode on the PATH, or None if there is no such -file. - -`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result -of os.environ.get("PATH"), or can be overridden with a custom search -path. - -""" @overload def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... def make_archive( @@ -382,37 +392,40 @@ def make_archive( ) -> str: """Create an archive file (eg. zip or tar). -'base_name' is the name of the file to create, minus any format-specific -extension; 'format' is the archive format: one of "zip", "tar", "gztar", -"bztar", "xztar", or "zstdtar". Or any other registered format. + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "gztar", + "bztar", "xztar", or "zstdtar". Or any other registered format. -'root_dir' is a directory that will be the root directory of the -archive; ie. we typically chdir into 'root_dir' before creating the -archive. 'base_dir' is the directory where we start archiving from; -ie. 'base_dir' will be the common prefix of all files and -directories in the archive. 'root_dir' and 'base_dir' both default -to the current directory. Returns the name of the archive file. + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ -'owner' and 'group' are used when creating a tar archive. By default, -uses the current owner and group. -""" def get_archive_formats() -> list[tuple[str, str]]: """Returns a list of supported formats for archiving and unarchiving. -Each element of the returned sequence is a tuple (name, description) -""" + Each element of the returned sequence is a tuple (name, description) + """ + @overload def register_archive_format( name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" ) -> None: """Registers an archive format. -name is the name of the format. function is the callable that will be -used to create archives. If provided, extra_args is a sequence of -(name, value) tuples that will be passed as arguments to the callable. -description can be provided to describe the format, and will be returned -by the get_archive_formats() function. -""" + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + @overload def register_archive_format( name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" @@ -423,21 +436,22 @@ def unpack_archive( ) -> None: """Unpack an archive. -`filename` is the name of the archive. + `filename` is the name of the archive. -`extract_dir` is the name of the target directory, where the archive -is unpacked. If not provided, the current working directory is used. + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. -`format` is the archive format: one of "zip", "tar", "gztar", "bztar", -"xztar", or "zstdtar". Or any other registered format. If not provided, -unpack_archive will use the filename extension and see if an unpacker -was registered for that extension. + `format` is the archive format: one of "zip", "tar", "gztar", "bztar", + "xztar", or "zstdtar". Or any other registered format. If not provided, + unpack_archive will use the filename extension and see if an unpacker + was registered for that extension. -In case none is found, a ValueError is raised. + In case none is found, a ValueError is raised. + + If `filter` is given, it is passed to the underlying + extraction function. + """ -If `filter` is given, it is passed to the underlying -extraction function. -""" @overload def register_unpack_format( name: str, @@ -448,48 +462,50 @@ def register_unpack_format( ) -> None: """Registers an unpack format. -`name` is the name of the format. `extensions` is a list of extensions -corresponding to the format. + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. -`function` is the callable that will be -used to unpack archives. The callable will receive archives to unpack. -If it's unable to handle an archive, it needs to raise a ReadError -exception. + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ -If provided, `extra_args` is a sequence of -(name, value) tuples that will be passed as arguments to the callable. -description can be provided to describe the format, and will be returned -by the get_unpack_formats() function. -""" @overload def register_unpack_format( name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... def unregister_unpack_format(name: str) -> None: - """Removes the pack format from the registry. -""" + """Removes the pack format from the registry.""" + def get_unpack_formats() -> list[tuple[str, list[str], str]]: """Returns a list of supported formats for unpacking. -Each element of the returned sequence is a tuple -(name, extensions, description) -""" + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: """Get the size of the terminal window. -For each of the two dimensions, the environment variable, COLUMNS -and LINES respectively, is checked. If the variable is defined and -the value is a positive integer, it is used. + For each of the two dimensions, the environment variable, COLUMNS + and LINES respectively, is checked. If the variable is defined and + the value is a positive integer, it is used. -When COLUMNS or LINES is not defined, which is the common case, -the terminal connected to sys.__stdout__ is queried -by invoking os.get_terminal_size. + When COLUMNS or LINES is not defined, which is the common case, + the terminal connected to sys.__stdout__ is queried + by invoking os.get_terminal_size. -If the terminal size cannot be successfully queried, either because -the system doesn't support querying, or because we are not -connected to a terminal, the value given in fallback parameter -is used. Fallback defaults to (80, 24) which is the default -size used by many terminal emulators. + If the terminal size cannot be successfully queried, either because + the system doesn't support querying, or because we are not + connected to a terminal, the value given in fallback parameter + is used. Fallback defaults to (80, 24) which is the default + size used by many terminal emulators. -The value returned is a named tuple of type os.terminal_size. -""" + The value returned is a named tuple of type os.terminal_size. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi index 831c7a6b73f95..0a10912372264 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/signal.pyi @@ -9,8 +9,8 @@ from typing_extensions import Never, TypeAlias NSIG: int class Signals(IntEnum): - """An enumeration. -""" + """An enumeration.""" + SIGABRT = 6 SIGFPE = 8 SIGILL = 4 @@ -60,8 +60,8 @@ class Signals(IntEnum): SIGSTKFLT = 16 class Handlers(IntEnum): - """An enumeration. -""" + """An enumeration.""" + SIG_DFL = 0 SIG_IGN = 1 @@ -74,50 +74,52 @@ _HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | def default_int_handler(signalnum: int, frame: FrameType | None, /) -> Never: """The default handler for SIGINT installed by Python. -It raises KeyboardInterrupt. -""" + It raises KeyboardInterrupt. + """ if sys.version_info >= (3, 10): # arguments changed in 3.10.2 def getsignal(signalnum: _SIGNUM) -> _HANDLER: """Return the current action for the given signal. -The return value can be: - SIG_IGN -- if the signal is being ignored - SIG_DFL -- if the default action for the signal is in effect - None -- if an unknown handler is in effect - anything else -- the callable Python object used as a handler -""" + The return value can be: + SIG_IGN -- if the signal is being ignored + SIG_DFL -- if the default action for the signal is in effect + None -- if an unknown handler is in effect + anything else -- the callable Python object used as a handler + """ + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: """Set the action for the given signal. -The action can be SIG_DFL, SIG_IGN, or a callable Python object. -The previous action is returned. See getsignal() for possible return values. + The action can be SIG_DFL, SIG_IGN, or a callable Python object. + The previous action is returned. See getsignal() for possible return values. -*** IMPORTANT NOTICE *** -A signal handler function is called with two arguments: -the first is the signal number, the second is the interrupted stack frame. -""" + *** IMPORTANT NOTICE *** + A signal handler function is called with two arguments: + the first is the signal number, the second is the interrupted stack frame. + """ else: def getsignal(signalnum: _SIGNUM, /) -> _HANDLER: """Return the current action for the given signal. -The return value can be: - SIG_IGN -- if the signal is being ignored - SIG_DFL -- if the default action for the signal is in effect - None -- if an unknown handler is in effect - anything else -- the callable Python object used as a handler -""" + The return value can be: + SIG_IGN -- if the signal is being ignored + SIG_DFL -- if the default action for the signal is in effect + None -- if an unknown handler is in effect + anything else -- the callable Python object used as a handler + """ + def signal(signalnum: _SIGNUM, handler: _HANDLER, /) -> _HANDLER: """Set the action for the given signal. -The action can be SIG_DFL, SIG_IGN, or a callable Python object. -The previous action is returned. See getsignal() for possible return values. + The action can be SIG_DFL, SIG_IGN, or a callable Python object. + The previous action is returned. See getsignal() for possible return values. -*** IMPORTANT NOTICE *** -A signal handler function is called with two arguments: -the first is the signal number, the second is the interrupted stack frame. -""" + *** IMPORTANT NOTICE *** + A signal handler function is called with two arguments: + the first is the signal number, the second is the interrupted stack frame. + """ SIGABRT: Final = Signals.SIGABRT SIGFPE: Final = Signals.SIGFPE @@ -165,8 +167,8 @@ else: ITIMER_VIRTUAL: int class Sigmasks(IntEnum): - """An enumeration. -""" + """An enumeration.""" + SIG_BLOCK = 0 SIG_UNBLOCK = 1 SIG_SETMASK = 2 @@ -175,62 +177,61 @@ else: SIG_UNBLOCK: Final = Sigmasks.SIG_UNBLOCK SIG_SETMASK: Final = Sigmasks.SIG_SETMASK def alarm(seconds: int, /) -> int: - """Arrange for SIGALRM to arrive after the given number of seconds. -""" + """Arrange for SIGALRM to arrive after the given number of seconds.""" + def getitimer(which: int, /) -> tuple[float, float]: - """Returns current value of given itimer. -""" + """Returns current value of given itimer.""" + def pause() -> None: - """Wait until a signal arrives. -""" + """Wait until a signal arrives.""" + def pthread_kill(thread_id: int, signalnum: int, /) -> None: - """Send a signal to a thread. -""" + """Send a signal to a thread.""" if sys.version_info >= (3, 10): # arguments changed in 3.10.2 def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: - """Fetch and/or change the signal mask of the calling thread. -""" + """Fetch and/or change the signal mask of the calling thread.""" else: def pthread_sigmask(how: int, mask: Iterable[int], /) -> set[_SIGNUM]: - """Fetch and/or change the signal mask of the calling thread. -""" + """Fetch and/or change the signal mask of the calling thread.""" def setitimer(which: int, seconds: float, interval: float = 0.0, /) -> tuple[float, float]: """Sets given itimer (one of ITIMER_REAL, ITIMER_VIRTUAL or ITIMER_PROF). -The timer will fire after value seconds and after that every interval seconds. -The itimer can be cleared by setting seconds to zero. + The timer will fire after value seconds and after that every interval seconds. + The itimer can be cleared by setting seconds to zero. + + Returns old values as a tuple: (delay, interval). + """ -Returns old values as a tuple: (delay, interval). -""" def siginterrupt(signalnum: int, flag: bool, /) -> None: """Change system call restart behaviour. -If flag is False, system calls will be restarted when interrupted by -signal sig, else system calls will be interrupted. -""" + If flag is False, system calls will be restarted when interrupted by + signal sig, else system calls will be interrupted. + """ + def sigpending() -> Any: """Examine pending signals. -Returns a set of signal numbers that are pending for delivery to -the calling thread. -""" + Returns a set of signal numbers that are pending for delivery to + the calling thread. + """ if sys.version_info >= (3, 10): # argument changed in 3.10.2 def sigwait(sigset: Iterable[int]) -> _SIGNUM: """Wait for a signal. -Suspend execution of the calling thread until the delivery of one of the -signals specified in the signal set sigset. The function accepts the signal -and returns the signal number. -""" + Suspend execution of the calling thread until the delivery of one of the + signals specified in the signal set sigset. The function accepts the signal + and returns the signal number. + """ else: def sigwait(sigset: Iterable[int], /) -> _SIGNUM: """Wait for a signal. -Suspend execution of the calling thread until the delivery of one of the -signals specified in the signal set sigset. The function accepts the signal -and returns the signal number. -""" + Suspend execution of the calling thread until the delivery of one of the + signals specified in the signal set sigset. The function accepts the signal + and returns the signal number. + """ if sys.platform != "darwin": SIGCLD: Final = Signals.SIGCHLD # alias SIGPOLL: Final = Signals.SIGIO # alias @@ -244,79 +245,81 @@ and returns the signal number. class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): """struct_siginfo: Result from sigwaitinfo or sigtimedwait. -This object may be accessed either as a tuple of -(si_signo, si_code, si_errno, si_pid, si_uid, si_status, si_band), -or via the attributes si_signo, si_code, and so on. -""" + This object may be accessed either as a tuple of + (si_signo, si_code, si_errno, si_pid, si_uid, si_status, si_band), + or via the attributes si_signo, si_code, and so on. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("si_signo", "si_code", "si_errno", "si_pid", "si_uid", "si_status", "si_band") @property def si_signo(self) -> int: - """signal number -""" + """signal number""" + @property def si_code(self) -> int: - """signal code -""" + """signal code""" + @property def si_errno(self) -> int: - """errno associated with this signal -""" + """errno associated with this signal""" + @property def si_pid(self) -> int: - """sending process ID -""" + """sending process ID""" + @property def si_uid(self) -> int: - """real user ID of sending process -""" + """real user ID of sending process""" + @property def si_status(self) -> int: - """exit value or signal -""" + """exit value or signal""" + @property def si_band(self) -> int: - """band event for SIGPOLL -""" + """band event for SIGPOLL""" def sigtimedwait(sigset: Iterable[int], timeout: float, /) -> struct_siginfo | None: """Like sigwaitinfo(), but with a timeout. -The timeout is specified in seconds, with floating-point numbers allowed. -""" + The timeout is specified in seconds, with floating-point numbers allowed. + """ + def sigwaitinfo(sigset: Iterable[int], /) -> struct_siginfo: """Wait synchronously until one of the signals in *sigset* is delivered. -Returns a struct_siginfo containing information about the signal. -""" + Returns a struct_siginfo containing information about the signal. + """ def strsignal(signalnum: _SIGNUM, /) -> str | None: """Return the system description of the given signal. -Returns the description of signal *signalnum*, such as "Interrupt" -for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no -description. Raises :exc:`ValueError` if *signalnum* is invalid. -""" + Returns the description of signal *signalnum*, such as "Interrupt" + for :const:`SIGINT`. Returns :const:`None` if *signalnum* has no + description. Raises :exc:`ValueError` if *signalnum* is invalid. + """ + def valid_signals() -> set[Signals]: """Return a set of valid signal numbers on this platform. -The signal numbers returned by this function can be safely passed to -functions like `pthread_sigmask`. -""" + The signal numbers returned by this function can be safely passed to + functions like `pthread_sigmask`. + """ + def raise_signal(signalnum: _SIGNUM, /) -> None: - """Send a signal to the executing process. -""" + """Send a signal to the executing process.""" + def set_wakeup_fd(fd: int, /, *, warn_on_full_buffer: bool = True) -> int: """Sets the fd to be written to (with the signal number) when a signal comes in. -A library can use this to wakeup select or poll. -The previous fd or -1 is returned. + A library can use this to wakeup select or poll. + The previous fd or -1 is returned. -The fd must be non-blocking. -""" + The fd must be non-blocking. + """ if sys.platform == "linux": def pidfd_send_signal(pidfd: int, sig: int, siginfo: None = None, flags: int = 0, /) -> None: - """Send a signal to a process referred to by a pid file descriptor. -""" + """Send a signal to a process referred to by a pid file descriptor.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi index c9cfed4f6e8da..9c302c7617a71 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/site.pyi @@ -68,6 +68,7 @@ named sitecustomize, which can perform arbitrary additional site-specific customizations. If this import fails with an ImportError exception, it is silently ignored. """ + import sys from _typeshed import StrPath from collections.abc import Iterable @@ -80,103 +81,112 @@ USER_BASE: str | None def main() -> None: """Add standard site-specific directories to the module search path. -This function is called automatically when this module is imported, -unless the python interpreter was started with the -S flag. -""" + This function is called automatically when this module is imported, + unless the python interpreter was started with the -S flag. + """ + def abs_paths() -> None: # undocumented - """Set all module __file__ and __cached__ attributes to an absolute path -""" + """Set all module __file__ and __cached__ attributes to an absolute path""" + def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: # undocumented """Process a .pth file within the site-packages directory: -For each line in the file, either combine it with sitedir to a path -and add that to known_paths, or execute it if it starts with 'import '. -""" + For each line in the file, either combine it with sitedir to a path + and add that to known_paths, or execute it if it starts with 'import '. + """ + def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: """Add 'sitedir' argument to sys.path if missing and handle .pth files in -'sitedir' -""" + 'sitedir' + """ + def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: # undocumented - """Add site-packages to sys.path -""" + """Add site-packages to sys.path""" + def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: # undocumented """Add a per user site-package to sys.path -Each user has its own python directory with site-packages in the -home directory. -""" + Each user has its own python directory with site-packages in the + home directory. + """ + def check_enableusersite() -> bool | None: # undocumented """Check if user site directory is safe for inclusion -The function tests for the command line flag (including environment var), -process uid/gid equal to effective uid/gid. + The function tests for the command line flag (including environment var), + process uid/gid equal to effective uid/gid. -None: Disabled for security reasons -False: Disabled by user (command line option) -True: Safe and enabled -""" + None: Disabled for security reasons + False: Disabled by user (command line option) + True: Safe and enabled + """ if sys.version_info >= (3, 13): def gethistoryfile() -> str: # undocumented """Check if the PYTHON_HISTORY environment variable is set and define -it as the .python_history file. If PYTHON_HISTORY is not set, use the -default .python_history file. -""" + it as the .python_history file. If PYTHON_HISTORY is not set, use the + default .python_history file. + """ def enablerlcompleter() -> None: # undocumented """Enable default readline configuration on interactive prompts, by -registering a sys.__interactivehook__. -""" + registering a sys.__interactivehook__. + """ if sys.version_info >= (3, 13): def register_readline() -> None: # undocumented """Configure readline completion on interactive prompts. -If the readline module can be imported, the hook will set the Tab key -as completion key and register ~/.python_history as history file. -This can be overridden in the sitecustomize or usercustomize module, -or in a PYTHONSTARTUP file. -""" + If the readline module can be imported, the hook will set the Tab key + as completion key and register ~/.python_history as history file. + This can be overridden in the sitecustomize or usercustomize module, + or in a PYTHONSTARTUP file. + """ def execsitecustomize() -> None: # undocumented - """Run custom site specific code, if available. -""" + """Run custom site specific code, if available.""" + def execusercustomize() -> None: # undocumented - """Run custom user specific code, if available. -""" + """Run custom user specific code, if available.""" + def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: """Returns a list containing all global site-packages directories. -For each directory present in ``prefixes`` (or the global ``PREFIXES``), -this function will find its `site-packages` subdirectory depending on the -system environment, and will return a list of full paths. -""" + For each directory present in ``prefixes`` (or the global ``PREFIXES``), + this function will find its `site-packages` subdirectory depending on the + system environment, and will return a list of full paths. + """ + def getuserbase() -> str: """Returns the `user base` directory path. -The `user base` directory can be used to store data. If the global -variable ``USER_BASE`` is not initialized yet, this function will also set -it. -""" + The `user base` directory can be used to store data. If the global + variable ``USER_BASE`` is not initialized yet, this function will also set + it. + """ + def getusersitepackages() -> str: """Returns the user-specific site-packages directory path. -If the global variable ``USER_SITE`` is not initialized yet, this -function will also set it. -""" + If the global variable ``USER_SITE`` is not initialized yet, this + function will also set it. + """ + def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented def removeduppaths() -> set[str]: # undocumented """Remove duplicate entries from sys.path along with making them -absolute -""" + absolute + """ + def setcopyright() -> None: # undocumented - """Set 'copyright' and 'credits' in builtins -""" + """Set 'copyright' and 'credits' in builtins""" + def sethelper() -> None: ... # undocumented def setquit() -> None: # undocumented """Define new builtins 'quit' and 'exit'. -These are objects which make the interpreter exit when called. -The repr of each object contains a hint at how it works. + These are objects which make the interpreter exit when called. + The repr of each object contains a hint at how it works. + + """ -""" def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi index b18979838eda6..b201659d1b2fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtpd.pyi @@ -42,6 +42,7 @@ If localhost is not given then `localhost' is used, and if localport is not given then 8025 is used. If remotehost is not given then `localhost' is used, and if remoteport is not given, then 25 is used. """ + import asynchat import asyncore import socket @@ -121,9 +122,7 @@ class SMTPServer(asyncore.dispatcher): decode_data: bool = False, ) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... - def process_message( - self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any - ) -> str | None: + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any) -> str | None: """Override this abstract method to handle messages from the client. peer is a tuple containing (ipaddr, port) of the client that made the diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi index 35f8721a3855e..d2df39ff39189 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/smtplib.pyi @@ -29,6 +29,7 @@ Example: (250, "Somebody OverHere ") >>> s.quit() """ + import sys from _socket import _Address as _SourceAddress from _typeshed import ReadableBuffer, SizedBuffer @@ -69,30 +70,32 @@ bCRLF: Final[bytes] OLDSTYLE_AUTH: Final[Pattern[str]] class SMTPException(OSError): - """Base class for all exceptions raised by this module. -""" + """Base class for all exceptions raised by this module.""" + class SMTPNotSupportedError(SMTPException): """The command or option is not supported by the SMTP server. -This exception is raised when an attempt is made to run a command or a -command with an option which is not supported by the server. -""" + This exception is raised when an attempt is made to run a command or a + command with an option which is not supported by the server. + """ + class SMTPServerDisconnected(SMTPException): """Not connected to any SMTP server. -This exception is raised when the server unexpectedly disconnects, -or when an attempt is made to use the SMTP instance before -connecting it to a server. -""" + This exception is raised when the server unexpectedly disconnects, + or when an attempt is made to use the SMTP instance before + connecting it to a server. + """ class SMTPResponseException(SMTPException): """Base class for all exceptions that include an SMTP error code. -These exceptions are generated in some instances when the SMTP -server returns an error code. The error code is stored in the -`smtp_code' attribute of the error, and the `smtp_error' attribute -is set to the error message. -""" + These exceptions are generated in some instances when the SMTP + server returns an error code. The error code is stored in the + `smtp_code' attribute of the error, and the `smtp_error' attribute + is set to the error message. + """ + smtp_code: int smtp_error: bytes | str args: tuple[int, bytes | str] | tuple[int, bytes, str] @@ -101,9 +104,10 @@ is set to the error message. class SMTPSenderRefused(SMTPResponseException): """Sender address refused. -In addition to the attributes set by on all SMTPResponseException -exceptions, this sets 'sender' to the string that the SMTP refused. -""" + In addition to the attributes set by on all SMTPResponseException + exceptions, this sets 'sender' to the string that the SMTP refused. + """ + smtp_error: bytes sender: str args: tuple[int, bytes, str] @@ -112,41 +116,44 @@ exceptions, this sets 'sender' to the string that the SMTP refused. class SMTPRecipientsRefused(SMTPException): """All recipient addresses refused. -The errors for each recipient are accessible through the attribute -'recipients', which is a dictionary of exactly the same sort as -SMTP.sendmail() returns. -""" + The errors for each recipient are accessible through the attribute + 'recipients', which is a dictionary of exactly the same sort as + SMTP.sendmail() returns. + """ + recipients: _SendErrs args: tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... class SMTPDataError(SMTPResponseException): - """The SMTP server didn't accept the data. -""" + """The SMTP server didn't accept the data.""" + class SMTPConnectError(SMTPResponseException): - """Error during connection establishment. -""" + """Error during connection establishment.""" + class SMTPHeloError(SMTPResponseException): - """The server refused our HELO reply. -""" + """The server refused our HELO reply.""" + class SMTPAuthenticationError(SMTPResponseException): """Authentication error. -Most probably the server didn't accept the username/password -combination provided. -""" + Most probably the server didn't accept the username/password + combination provided. + """ def quoteaddr(addrstring: str) -> str: """Quote a subset of the email addresses defined by RFC 821. -Should be able to handle anything email.utils.parseaddr can handle. -""" + Should be able to handle anything email.utils.parseaddr can handle. + """ + def quotedata(data: str) -> str: """Quote data for email. -Double leading '.', and change Unix newline '\\n', or Mac '\\r' into -internet CRLF end-of-line. -""" + Double leading '.', and change Unix newline '\\n', or Mac '\\r' into + internet CRLF end-of-line. + """ + @type_check_only class _AuthObject(Protocol): @overload @@ -156,33 +163,34 @@ class _AuthObject(Protocol): class SMTP: """This class manages a connection to an SMTP or ESMTP server. -SMTP Objects: - SMTP objects have the following attributes: - helo_resp - This is the message given by the server in response to the - most recent HELO command. - - ehlo_resp - This is the message given by the server in response to the - most recent EHLO command. This is usually multiline. - - does_esmtp - This is a True value _after you do an EHLO command_, if the - server supports ESMTP. - - esmtp_features - This is a dictionary, which, if the server supports ESMTP, - will _after you do an EHLO command_, contain the names of the - SMTP service extensions this server supports, and their - parameters (if any). - - Note, all extension names are mapped to lower case in the - dictionary. - - See each method's docstrings for details. In general, there is a - method of the same name to perform each SMTP command. There is also a - method called 'sendmail' that will do an entire mail transaction. + SMTP Objects: + SMTP objects have the following attributes: + helo_resp + This is the message given by the server in response to the + most recent HELO command. + + ehlo_resp + This is the message given by the server in response to the + most recent EHLO command. This is usually multiline. + + does_esmtp + This is a True value _after you do an EHLO command_, if the + server supports ESMTP. + + esmtp_features + This is a dictionary, which, if the server supports ESMTP, + will _after you do an EHLO command_, contain the names of the + SMTP service extensions this server supports, and their + parameters (if any). + + Note, all extension names are mapped to lower case in the + dictionary. + + See each method's docstrings for details. In general, there is a + method of the same name to perform each SMTP command. There is also a + method called 'sendmail' that will do an entire mail transaction. """ + debuglevel: int sock: socket | None # Type of file should match what socket.makefile() returns @@ -207,19 +215,20 @@ SMTP Objects: ) -> None: """Initialize a new instance. -If specified, `host` is the name of the remote host to which to -connect. If specified, `port` specifies the port to which to connect. -By default, smtplib.SMTP_PORT is used. If a host is specified the -connect method is called, and if it returns anything other than a -success code an SMTPConnectError is raised. If specified, -`local_hostname` is used as the FQDN of the local host in the HELO/EHLO -command. Otherwise, the local hostname is found using -socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host, -port) for the socket to bind to as its source address before -connecting. If the host is '' and port is 0, the OS default behavior -will be used. + If specified, `host` is the name of the remote host to which to + connect. If specified, `port` specifies the port to which to connect. + By default, smtplib.SMTP_PORT is used. If a host is specified the + connect method is called, and if it returns anything other than a + success code an SMTPConnectError is raised. If specified, + `local_hostname` is used as the FQDN of the local host in the HELO/EHLO + command. Otherwise, the local hostname is found using + socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host, + port) for the socket to bind to as its source address before + connecting. If the host is '' and port is 0, the OS default behavior + will be used. + + """ -""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None @@ -227,207 +236,216 @@ will be used. def set_debuglevel(self, debuglevel: int) -> None: """Set the debug output level. -A non-false value results in debug messages for connection and for all -messages sent to and received from the server. + A non-false value results in debug messages for connection and for all + messages sent to and received from the server. + + """ -""" def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: """Connect to a host on a given port. -If the hostname ends with a colon (':') followed by a number, and -there is no port specified, that suffix will be stripped off and the -number interpreted as the port number to use. + If the hostname ends with a colon (':') followed by a number, and + there is no port specified, that suffix will be stripped off and the + number interpreted as the port number to use. -Note: This method is automatically invoked by __init__, if a host is -specified during instantiation. + Note: This method is automatically invoked by __init__, if a host is + specified during instantiation. + + """ -""" def send(self, s: ReadableBuffer | str) -> None: - """Send 's' to the server. -""" + """Send 's' to the server.""" + def putcmd(self, cmd: str, args: str = "") -> None: - """Send a command to the server. -""" + """Send a command to the server.""" + def getreply(self) -> _Reply: """Get a reply from the server. -Returns a tuple consisting of: + Returns a tuple consisting of: - - server response code (e.g. '250', or such, if all goes well) - Note: returns -1 if it can't read response code. + - server response code (e.g. '250', or such, if all goes well) + Note: returns -1 if it can't read response code. - - server response string corresponding to response code (multiline - responses are converted to a single, multiline string). + - server response string corresponding to response code (multiline + responses are converted to a single, multiline string). + + Raises SMTPServerDisconnected if end-of-file is reached. + """ -Raises SMTPServerDisconnected if end-of-file is reached. -""" def docmd(self, cmd: str, args: str = "") -> _Reply: - """Send a command, and return its response code. -""" + """Send a command, and return its response code.""" + def helo(self, name: str = "") -> _Reply: """SMTP 'helo' command. -Hostname to send for this command defaults to the FQDN of the local -host. -""" + Hostname to send for this command defaults to the FQDN of the local + host. + """ + def ehlo(self, name: str = "") -> _Reply: """SMTP 'ehlo' command. -Hostname to send for this command defaults to the FQDN of the local -host. -""" + Hostname to send for this command defaults to the FQDN of the local + host. + """ + def has_extn(self, opt: str) -> bool: - """Does the server support a given SMTP service extension? -""" + """Does the server support a given SMTP service extension?""" + def help(self, args: str = "") -> bytes: """SMTP 'help' command. -Returns help text from server. -""" + Returns help text from server. + """ + def rset(self) -> _Reply: - """SMTP 'rset' command -- resets session. -""" + """SMTP 'rset' command -- resets session.""" + def noop(self) -> _Reply: - """SMTP 'noop' command -- doesn't do anything :> -""" + """SMTP 'noop' command -- doesn't do anything :>""" + def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: """SMTP 'mail' command -- begins mail xfer session. -This method may raise the following exceptions: + This method may raise the following exceptions: + + SMTPNotSupportedError The options parameter includes 'SMTPUTF8' + but the SMTPUTF8 extension is not supported by + the server. + """ - SMTPNotSupportedError The options parameter includes 'SMTPUTF8' - but the SMTPUTF8 extension is not supported by - the server. -""" def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: - """SMTP 'rcpt' command -- indicates 1 recipient for this mail. -""" + """SMTP 'rcpt' command -- indicates 1 recipient for this mail.""" + def data(self, msg: ReadableBuffer | str) -> _Reply: """SMTP 'DATA' command -- sends message data to server. -Automatically quotes lines beginning with a period per rfc821. -Raises SMTPDataError if there is an unexpected reply to the -DATA command; the return value from this method is the final -response code received when the all data is sent. If msg -is a string, lone '\\r' and '\\n' characters are converted to -'\\r\\n' characters. If msg is bytes, it is transmitted as is. -""" + Automatically quotes lines beginning with a period per rfc821. + Raises SMTPDataError if there is an unexpected reply to the + DATA command; the return value from this method is the final + response code received when the all data is sent. If msg + is a string, lone '\\r' and '\\n' characters are converted to + '\\r\\n' characters. If msg is bytes, it is transmitted as is. + """ + def verify(self, address: str) -> _Reply: - """SMTP 'verify' command -- checks for address validity. -""" + """SMTP 'verify' command -- checks for address validity.""" vrfy = verify def expn(self, address: str) -> _Reply: - """SMTP 'expn' command -- expands a mailing list. -""" + """SMTP 'expn' command -- expands a mailing list.""" + def ehlo_or_helo_if_needed(self) -> None: """Call self.ehlo() and/or self.helo() if needed. -If there has been no previous EHLO or HELO command this session, this -method tries ESMTP EHLO first. + If there has been no previous EHLO or HELO command this session, this + method tries ESMTP EHLO first. -This method may raise the following exceptions: + This method may raise the following exceptions: - SMTPHeloError The server didn't reply properly to - the helo greeting. -""" + SMTPHeloError The server didn't reply properly to + the helo greeting. + """ user: str password: str def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: """Authentication command - requires response processing. -'mechanism' specifies which authentication mechanism is to -be used - the valid values are those listed in the 'auth' -element of 'esmtp_features'. + 'mechanism' specifies which authentication mechanism is to + be used - the valid values are those listed in the 'auth' + element of 'esmtp_features'. -'authobject' must be a callable object taking a single argument: + 'authobject' must be a callable object taking a single argument: - data = authobject(challenge) + data = authobject(challenge) -It will be called to process the server's challenge response; the -challenge argument it is passed will be a bytes. It should return -an ASCII string that will be base64 encoded and sent to the server. + It will be called to process the server's challenge response; the + challenge argument it is passed will be a bytes. It should return + an ASCII string that will be base64 encoded and sent to the server. + + Keyword arguments: + - initial_response_ok: Allow sending the RFC 4954 initial-response + to the AUTH command, if the authentication methods supports it. + """ -Keyword arguments: - - initial_response_ok: Allow sending the RFC 4954 initial-response - to the AUTH command, if the authentication methods supports it. -""" @overload def auth_cram_md5(self, challenge: None = None) -> None: """Authobject to use with CRAM-MD5 authentication. Requires self.user -and self.password to be set. -""" + and self.password to be set. + """ + @overload def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: """Authobject to use with PLAIN authentication. Requires self.user and -self.password to be set. -""" + self.password to be set. + """ + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: """Authobject to use with LOGIN authentication. Requires self.user and -self.password to be set. -""" + self.password to be set. + """ + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: """Log in on an SMTP server that requires authentication. -The arguments are: - - user: The user name to authenticate with. - - password: The password for the authentication. + The arguments are: + - user: The user name to authenticate with. + - password: The password for the authentication. -Keyword arguments: - - initial_response_ok: Allow sending the RFC 4954 initial-response - to the AUTH command, if the authentication methods supports it. + Keyword arguments: + - initial_response_ok: Allow sending the RFC 4954 initial-response + to the AUTH command, if the authentication methods supports it. -If there has been no previous EHLO or HELO command this session, this -method tries ESMTP EHLO first. + If there has been no previous EHLO or HELO command this session, this + method tries ESMTP EHLO first. -This method will return normally if the authentication was successful. + This method will return normally if the authentication was successful. -This method may raise the following exceptions: + This method may raise the following exceptions: - SMTPHeloError The server didn't reply properly to - the helo greeting. - SMTPAuthenticationError The server didn't accept the username/ - password combination. - SMTPNotSupportedError The AUTH command is not supported by the - server. - SMTPException No suitable authentication method was - found. -""" + SMTPHeloError The server didn't reply properly to + the helo greeting. + SMTPAuthenticationError The server didn't accept the username/ + password combination. + SMTPNotSupportedError The AUTH command is not supported by the + server. + SMTPException No suitable authentication method was + found. + """ if sys.version_info >= (3, 12): def starttls(self, *, context: SSLContext | None = None) -> _Reply: """Puts the connection to the SMTP server into TLS mode. -If there has been no previous EHLO or HELO command this session, this -method tries ESMTP EHLO first. + If there has been no previous EHLO or HELO command this session, this + method tries ESMTP EHLO first. -If the server supports TLS, this will encrypt the rest of the SMTP -session. If you provide the context parameter, -the identity of the SMTP server and client can be checked. This, -however, depends on whether the socket module really checks the -certificates. + If the server supports TLS, this will encrypt the rest of the SMTP + session. If you provide the context parameter, + the identity of the SMTP server and client can be checked. This, + however, depends on whether the socket module really checks the + certificates. -This method may raise the following exceptions: + This method may raise the following exceptions: - SMTPHeloError The server didn't reply properly to - the helo greeting. -""" + SMTPHeloError The server didn't reply properly to + the helo greeting. + """ else: - def starttls( - self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None - ) -> _Reply: + def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: """Puts the connection to the SMTP server into TLS mode. - If there has been no previous EHLO or HELO command this session, this - method tries ESMTP EHLO first. + If there has been no previous EHLO or HELO command this session, this + method tries ESMTP EHLO first. - If the server supports TLS, this will encrypt the rest of the SMTP - session. If you provide the keyfile and certfile parameters, - the identity of the SMTP server and client can be checked. This, - however, depends on whether the socket module really checks the - certificates. + If the server supports TLS, this will encrypt the rest of the SMTP + session. If you provide the keyfile and certfile parameters, + the identity of the SMTP server and client can be checked. This, + however, depends on whether the socket module really checks the + certificates. - This method may raise the following exceptions: + This method may raise the following exceptions: - SMTPHeloError The server didn't reply properly to - the helo greeting. - """ + SMTPHeloError The server didn't reply properly to + the helo greeting. + """ def sendmail( self, @@ -499,6 +517,7 @@ of the four addresses, and one was rejected, with the error code empty dictionary. """ + def send_message( self, msg: _Message, @@ -509,40 +528,41 @@ empty dictionary. ) -> _SendErrs: """Converts message to a bytestring and passes it to sendmail. -The arguments are as for sendmail, except that msg is an -email.message.Message object. If from_addr is None or to_addrs is -None, these arguments are taken from the headers of the Message as -described in RFC 2822 (a ValueError is raised if there is more than -one set of 'Resent-' headers). Regardless of the values of from_addr and -to_addr, any Bcc field (or Resent-Bcc field, when the Message is a -resent) of the Message object won't be transmitted. The Message -object is then serialized using email.generator.BytesGenerator and -sendmail is called to transmit the message. If the sender or any of -the recipient addresses contain non-ASCII and the server advertises the -SMTPUTF8 capability, the policy is cloned with utf8 set to True for the -serialization, and SMTPUTF8 and BODY=8BITMIME are asserted on the send. -If the server does not support SMTPUTF8, an SMTPNotSupported error is -raised. Otherwise the generator is called without modifying the -policy. + The arguments are as for sendmail, except that msg is an + email.message.Message object. If from_addr is None or to_addrs is + None, these arguments are taken from the headers of the Message as + described in RFC 2822 (a ValueError is raised if there is more than + one set of 'Resent-' headers). Regardless of the values of from_addr and + to_addr, any Bcc field (or Resent-Bcc field, when the Message is a + resent) of the Message object won't be transmitted. The Message + object is then serialized using email.generator.BytesGenerator and + sendmail is called to transmit the message. If the sender or any of + the recipient addresses contain non-ASCII and the server advertises the + SMTPUTF8 capability, the policy is cloned with utf8 set to True for the + serialization, and SMTPUTF8 and BODY=8BITMIME are asserted on the send. + If the server does not support SMTPUTF8, an SMTPNotSupported error is + raised. Otherwise the generator is called without modifying the + policy. + + """ -""" def close(self) -> None: - """Close the connection to the SMTP server. -""" + """Close the connection to the SMTP server.""" + def quit(self) -> _Reply: - """Terminate the SMTP session. -""" + """Terminate the SMTP session.""" class SMTP_SSL(SMTP): """This is a subclass derived from SMTP that connects over an SSL -encrypted socket (to use this class you need a socket module that was -compiled with SSL support). If host is not specified, '' (the local -host) is used. If port is omitted, the standard SMTP-over-SSL port -(465) is used. local_hostname and source_address have the same meaning -as they do in the SMTP class. context also optional, can contain a -SSLContext. + encrypted socket (to use this class you need a socket module that was + compiled with SSL support). If host is not specified, '' (the local + host) is used. If port is omitted, the standard SMTP-over-SSL port + (465) is used. local_hostname and source_address have the same meaning + as they do in the SMTP class. context also optional, can contain a + SSLContext. + + """ -""" keyfile: str | None certfile: str | None context: SSLContext @@ -575,17 +595,18 @@ LMTP_PORT: Final = 2003 class LMTP(SMTP): """LMTP - Local Mail Transfer Protocol -The LMTP protocol, which is very similar to ESMTP, is heavily based -on the standard SMTP client. It's common to use Unix sockets for -LMTP, so our connect() method must support that as well as a regular -host:port server. local_hostname and source_address have the same -meaning as they do in the SMTP class. To specify a Unix socket, -you must use an absolute path as the host, starting with a '/'. + The LMTP protocol, which is very similar to ESMTP, is heavily based + on the standard SMTP client. It's common to use Unix sockets for + LMTP, so our connect() method must support that as well as a regular + host:port server. local_hostname and source_address have the same + meaning as they do in the SMTP class. To specify a Unix socket, + you must use an absolute path as the host, starting with a '/'. + + Authentication is supported, using the regular SMTP mechanism. When + using a Unix socket, LMTP generally don't support or require any + authentication, but your mileage might vary. + """ -Authentication is supported, using the regular SMTP mechanism. When -using a Unix socket, LMTP generally don't support or require any -authentication, but your mileage might vary. -""" def __init__( self, host: str = "", @@ -594,5 +615,4 @@ authentication, but your mileage might vary. source_address: _SourceAddress | None = None, timeout: float = ..., ) -> None: - """Initialize a new instance. -""" + """Initialize a new instance.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi index a680cd23ecf84..e6863787046a8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sndhdr.pyi @@ -26,14 +26,15 @@ argument is "." (testing all files in the current directory). The option -r tells it to recurse down directories found inside explicitly given directories. """ + from _typeshed import StrOrBytesPath from typing import NamedTuple __all__ = ["what", "whathdr"] class SndHeaders(NamedTuple): - """SndHeaders(filetype, framerate, nchannels, nframes, sampwidth) -""" + """SndHeaders(filetype, framerate, nchannels, nframes, sampwidth)""" + filetype: str framerate: int nchannels: int @@ -41,8 +42,7 @@ class SndHeaders(NamedTuple): sampwidth: int | str def what(filename: StrOrBytesPath) -> SndHeaders | None: - """Guess the type of a sound file. -""" + """Guess the type of a sound file.""" + def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: - """Recognize sound headers. -""" + """Recognize sound headers.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi index b51f3da0582a9..ad2af8629207e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socket.pyi @@ -44,6 +44,7 @@ Integer constants: Many other constants may be defined; these may be used in calls to the setsockopt() and getsockopt() methods. """ + # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. @@ -1122,8 +1123,8 @@ else: class timeout(error): ... class AddressFamily(IntEnum): - """An enumeration. -""" + """An enumeration.""" + AF_INET = 2 AF_INET6 = 10 AF_APPLETALK = 5 @@ -1228,8 +1229,8 @@ if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darw AF_DIVERT: Final = AddressFamily.AF_DIVERT class SocketKind(IntEnum): - """An enumeration. -""" + """An enumeration.""" + SOCK_STREAM = 1 SOCK_DGRAM = 2 SOCK_RAW = 3 @@ -1249,8 +1250,8 @@ if sys.platform == "linux": SOCK_NONBLOCK: Final = SocketKind.SOCK_NONBLOCK class MsgFlag(IntFlag): - """An enumeration. -""" + """An enumeration.""" + MSG_CTRUNC = 8 MSG_DONTROUTE = 4 MSG_OOB = 1 @@ -1312,8 +1313,8 @@ if sys.platform != "win32" and sys.platform != "linux": MSG_EOF: Final = MsgFlag.MSG_EOF class AddressInfo(IntFlag): - """An enumeration. -""" + """An enumeration.""" + AI_ADDRCONFIG = 32 AI_ALL = 16 AI_CANONNAME = 2 @@ -1354,8 +1355,8 @@ class _SendableFile(Protocol): # def fileno(self) -> int: ... class socket(_socket.socket): - """A subclass of _socket.socket adding the makefile() method. -""" + """A subclass of _socket.socket adding the makefile() method.""" + __slots__ = ["__weakref__", "_io_refs", "_closed"] def __init__( self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None @@ -1365,16 +1366,17 @@ class socket(_socket.socket): def dup(self) -> Self: """dup() -> socket object -Duplicate the socket. Return a new socket object connected to the same -system resource. The new socket is non-inheritable. -""" + Duplicate the socket. Return a new socket object connected to the same + system resource. The new socket is non-inheritable. + """ + def accept(self) -> tuple[socket, _RetAddress]: """accept() -> (socket object, address info) -Wait for an incoming connection. Return a new socket -representing the connection, and the address of the client. -For IP sockets, the address info is a pair (hostaddr, port). -""" + Wait for an incoming connection. Return a new socket + representing the connection, and the address of the client. + For IP sockets, the address info is a pair (hostaddr, port). + """ # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @overload @@ -1389,10 +1391,11 @@ For IP sockets, the address info is a pair (hostaddr, port). ) -> SocketIO: """makefile(...) -> an I/O stream connected to the socket -The arguments are as for io.open() after the filename, except the only -supported mode values are 'r' (default), 'w', 'b', or a combination of -those. -""" + The arguments are as for io.open() after the filename, except the only + supported mode values are 'r' (default), 'w', 'b', or a combination of + those. + """ + @overload def makefile( self, @@ -1446,42 +1449,42 @@ those. def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: """sendfile(file[, offset[, count]]) -> sent -Send a file until EOF is reached by using high-performance -os.sendfile() and return the total number of bytes which -were sent. -*file* must be a regular file object opened in binary mode. -If os.sendfile() is not available (e.g. Windows) or file is -not a regular file socket.send() will be used instead. -*offset* tells from where to start reading the file. -If specified, *count* is the total number of bytes to transmit -as opposed to sending the file until EOF is reached. -File position is updated on return or also in case of error in -which case file.tell() can be used to figure out the number of -bytes which were sent. -The socket must be of SOCK_STREAM type. -Non-blocking sockets are not supported. -""" + Send a file until EOF is reached by using high-performance + os.sendfile() and return the total number of bytes which + were sent. + *file* must be a regular file object opened in binary mode. + If os.sendfile() is not available (e.g. Windows) or file is + not a regular file socket.send() will be used instead. + *offset* tells from where to start reading the file. + If specified, *count* is the total number of bytes to transmit + as opposed to sending the file until EOF is reached. + File position is updated on return or also in case of error in + which case file.tell() can be used to figure out the number of + bytes which were sent. + The socket must be of SOCK_STREAM type. + Non-blocking sockets are not supported. + """ + @property def family(self) -> AddressFamily: - """Read-only access to the address family for this socket. - """ + """Read-only access to the address family for this socket.""" + @property def type(self) -> SocketKind: - """Read-only access to the socket type. - """ + """Read-only access to the socket type.""" + def get_inheritable(self) -> bool: - """Get the inheritable flag of the socket -""" + """Get the inheritable flag of the socket""" + def set_inheritable(self, inheritable: bool) -> None: - """Set the inheritable flag of the socket -""" + """Set the inheritable flag of the socket""" def fromfd(fd: SupportsIndex, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: """fromfd(fd, family, type[, proto]) -> socket object -Create a socket object from a duplicate of the given file -descriptor. The remaining arguments are the same as for socket(). -""" + Create a socket object from a duplicate of the given file + descriptor. The remaining arguments are the same as for socket(). + """ if sys.platform != "win32": def send_fds( @@ -1489,65 +1492,69 @@ if sys.platform != "win32": ) -> int: """send_fds(sock, buffers, fds[, flags[, address]]) -> integer -Send the list of file descriptors fds over an AF_UNIX socket. -""" + Send the list of file descriptors fds over an AF_UNIX socket. + """ + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: """recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file -descriptors, msg_flags, address) + descriptors, msg_flags, address) -Receive up to maxfds file descriptors returning the message -data and a list containing the descriptors. -""" + Receive up to maxfds file descriptors returning the message + data and a list containing the descriptors. + """ if sys.platform == "win32": def fromshare(info: bytes) -> socket: """fromshare(info) -> socket object -Create a socket object from the bytes object returned by -socket.share(pid). -""" + Create a socket object from the bytes object returned by + socket.share(pid). + """ if sys.platform == "win32": def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: """socketpair([family[, type[, proto]]]) -> (socket object, socket object) -Create a pair of socket objects from the sockets returned by the platform -socketpair() function. -The arguments are the same as for socket() except the default family is AF_UNIX -if defined on the platform; otherwise, the default is AF_INET. -""" + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is AF_UNIX + if defined on the platform; otherwise, the default is AF_INET. + """ else: def socketpair( family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 ) -> tuple[socket, socket]: """socketpair([family[, type[, proto]]]) -> (socket object, socket object) -Create a pair of socket objects from the sockets returned by the platform -socketpair() function. -The arguments are the same as for socket() except the default family is AF_UNIX -if defined on the platform; otherwise, the default is AF_INET. -""" + Create a pair of socket objects from the sockets returned by the platform + socketpair() function. + The arguments are the same as for socket() except the default family is AF_UNIX + if defined on the platform; otherwise, the default is AF_INET. + """ class SocketIO(RawIOBase): """Raw I/O implementation for stream sockets. -This class supports the makefile() method on sockets. It provides -the raw I/O interface on top of a socket object. -""" + This class supports the makefile() method on sockets. It provides + the raw I/O interface on top of a socket object. + """ + def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... def readinto(self, b: WriteableBuffer) -> int | None: """Read up to len(b) bytes into the writable buffer *b* and return -the number of bytes read. If the socket is non-blocking and no bytes -are available, None is returned. + the number of bytes read. If the socket is non-blocking and no bytes + are available, None is returned. + + If *b* is non-empty, a 0 return value indicates that the connection + was shutdown at the other end. + """ -If *b* is non-empty, a 0 return value indicates that the connection -was shutdown at the other end. -""" def write(self, b: ReadableBuffer) -> int | None: """Write the given bytes or bytearray object *b* to the socket -and return the number of bytes written. This can be less than -len(b) if not all data could be written. If the socket is -non-blocking and no bytes could be written None is returned. -""" + and return the number of bytes written. This can be less than + len(b) if not all data could be written. If the socket is + non-blocking and no bytes could be written None is returned. + """ + @property def name(self) -> int: ... # return value is really "int" @property @@ -1556,13 +1563,13 @@ non-blocking and no bytes could be written None is returned. def getfqdn(name: str = "") -> str: """Get fully qualified domain name from name. -An empty argument is interpreted as meaning the local host. + An empty argument is interpreted as meaning the local host. -First the hostname returned by gethostbyaddr() is checked, then -possibly existing aliases. In case no FQDN is available and `name` -was given, it is returned unchanged. If `name` was empty, '0.0.0.0' or '::', -hostname from gethostname() is returned. -""" + First the hostname returned by gethostbyaddr() is checked, then + possibly existing aliases. In case no FQDN is available and `name` + was given, it is returned unchanged. If `name` was empty, '0.0.0.0' or '::', + hostname from gethostname() is returned. + """ if sys.version_info >= (3, 11): def create_connection( @@ -1574,17 +1581,17 @@ if sys.version_info >= (3, 11): ) -> socket: """Connect to *address* and return the socket object. -Convenience function. Connect to *address* (a 2-tuple ``(host, -port)``) and return the socket object. Passing the optional -*timeout* parameter will set the timeout on the socket instance -before attempting to connect. If no *timeout* is supplied, the -global default timeout setting returned by :func:`getdefaulttimeout` -is used. If *source_address* is set it must be a tuple of (host, port) -for the socket to bind as a source address before making the connection. -A host of '' or port 0 tells the OS to use the default. When a connection -cannot be created, raises the last error if *all_errors* is False, -and an ExceptionGroup of all errors if *all_errors* is True. -""" + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + A host of '' or port 0 tells the OS to use the default. When a connection + cannot be created, raises the last error if *all_errors* is False, + and an ExceptionGroup of all errors if *all_errors* is True. + """ else: def create_connection( @@ -1592,40 +1599,41 @@ else: ) -> socket: """Connect to *address* and return the socket object. - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. If *source_address* is set it must be a tuple of (host, port) - for the socket to bind as a source address before making the connection. - A host of '' or port 0 tells the OS to use the default. - """ + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + A host of '' or port 0 tells the OS to use the default. + """ def has_dualstack_ipv6() -> bool: """Return True if the platform supports creating a SOCK_STREAM socket -which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections. -""" + which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections. + """ + def create_server( address: _Address, *, family: int = ..., backlog: int | None = None, reuse_port: bool = False, dualstack_ipv6: bool = False ) -> socket: """Convenience function which creates a SOCK_STREAM type socket -bound to *address* (a 2-tuple (host, port)) and return the socket -object. - -*family* should be either AF_INET or AF_INET6. -*backlog* is the queue size passed to socket.listen(). -*reuse_port* dictates whether to use the SO_REUSEPORT socket option. -*dualstack_ipv6*: if true and the platform supports it, it will -create an AF_INET6 socket able to accept both IPv4 or IPv6 -connections. When false it will explicitly disable this option on -platforms that enable it by default (e.g. Linux). - ->>> with create_server(('', 8000)) as server: -... while True: -... conn, addr = server.accept() -... # handle new connection -""" + bound to *address* (a 2-tuple (host, port)) and return the socket + object. + + *family* should be either AF_INET or AF_INET6. + *backlog* is the queue size passed to socket.listen(). + *reuse_port* dictates whether to use the SO_REUSEPORT socket option. + *dualstack_ipv6*: if true and the platform supports it, it will + create an AF_INET6 socket able to accept both IPv4 or IPv6 + connections. When false it will explicitly disable this option on + platforms that enable it by default (e.g. Linux). + + >>> with create_server(('', 8000)) as server: + ... while True: + ... conn, addr = server.accept() + ... # handle new connection + """ # The 5th tuple item is the socket address, for IP4, IP6, or IP6 if Python is compiled with --disable-ipv6, respectively. def getaddrinfo( @@ -1633,14 +1641,14 @@ def getaddrinfo( ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes]]]: """Resolve host and port into list of address info entries. -Translate the host/port argument into a sequence of 5-tuples that contain -all the necessary arguments for creating a socket connected to that service. -host is a domain name, a string representation of an IPv4/v6 address or -None. port is a string service name such as 'http', a numeric port number or -None. By passing None as the value of host and port, you can pass NULL to -the underlying C API. + Translate the host/port argument into a sequence of 5-tuples that contain + all the necessary arguments for creating a socket connected to that service. + host is a domain name, a string representation of an IPv4/v6 address or + None. port is a string service name such as 'http', a numeric port number or + None. By passing None as the value of host and port, you can pass NULL to + the underlying C API. -The family, type and proto arguments can be optionally specified in order to -narrow the list of addresses returned. Passing zero as a value for each of -these arguments selects the full range of results. -""" + The family, type and proto arguments can be optionally specified in order to + narrow the list of addresses returned. Passing zero as a value for each of + these arguments selects the full range of results. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi index d449c2ebc66d0..efd983449177e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/socketserver.pyi @@ -117,6 +117,7 @@ BaseServer: entry is processed by a RequestHandlerClass. """ + import sys import types from _socket import _Address, _RetAddress @@ -160,113 +161,123 @@ _AfInet6Address: TypeAlias = tuple[str | bytes | bytearray, int, int, int] # ad class BaseServer: """Base class for server classes. -Methods for the caller: + Methods for the caller: -- __init__(server_address, RequestHandlerClass) -- serve_forever(poll_interval=0.5) -- shutdown() -- handle_request() # if you do not use serve_forever() -- fileno() -> int # for selector + - __init__(server_address, RequestHandlerClass) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you do not use serve_forever() + - fileno() -> int # for selector -Methods that may be overridden: + Methods that may be overridden: -- server_bind() -- server_activate() -- get_request() -> request, client_address -- handle_timeout() -- verify_request(request, client_address) -- server_close() -- process_request(request, client_address) -- shutdown_request(request) -- close_request(request) -- service_actions() -- handle_error() + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - server_close() + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - service_actions() + - handle_error() -Methods for derived classes: + Methods for derived classes: -- finish_request(request, client_address) + - finish_request(request, client_address) -Class variables that may be overridden by derived classes or -instances: + Class variables that may be overridden by derived classes or + instances: -- timeout -- address_family -- socket_type -- allow_reuse_address -- allow_reuse_port + - timeout + - address_family + - socket_type + - allow_reuse_address + - allow_reuse_port -Instance variables: + Instance variables: -- RequestHandlerClass -- socket + - RequestHandlerClass + - socket + + """ -""" server_address: _Address timeout: float | None RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] def __init__( self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: - """Constructor. May be extended, do not override. -""" + """Constructor. May be extended, do not override.""" + def handle_request(self) -> None: """Handle one request, possibly blocking. -Respects self.timeout. -""" + Respects self.timeout. + """ + def serve_forever(self, poll_interval: float = 0.5) -> None: """Handle one request at a time until shutdown. -Polls for shutdown every poll_interval seconds. Ignores -self.timeout. If you need to do periodic tasks, do them in -another thread. -""" + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + def shutdown(self) -> None: """Stops the serve_forever loop. -Blocks until the loop has finished. This must be called while -serve_forever() is running in another thread, or it will -deadlock. -""" + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will + deadlock. + """ + def server_close(self) -> None: """Called to clean-up the server. -May be overridden. + May be overridden. + + """ -""" def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Finish one request by instantiating RequestHandlerClass. -""" + """Finish one request by instantiating RequestHandlerClass.""" + def get_request(self) -> tuple[Any, Any]: ... # Not implemented here, but expected to exist on subclasses def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: """Handle an error gracefully. May be overridden. -The default is to print a traceback and continue. + The default is to print a traceback and continue. + + """ -""" def handle_timeout(self) -> None: """Called if no new request arrives within self.timeout. -Overridden by ForkingMixIn. -""" + Overridden by ForkingMixIn. + """ + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: """Call finish_request. -Overridden by ForkingMixIn and ThreadingMixIn. + Overridden by ForkingMixIn and ThreadingMixIn. + + """ -""" def server_activate(self) -> None: """Called by constructor to activate the server. -May be overridden. + May be overridden. + + """ -""" def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: """Verify the request. May be overridden. -Return True if we should proceed with this request. + Return True if we should proceed with this request. + + """ -""" def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None @@ -274,62 +285,63 @@ Return True if we should proceed with this request. def service_actions(self) -> None: """Called by the serve_forever() loop. -May be overridden by a subclass / Mixin to implement any code that -needs to be run during the loop. -""" + May be overridden by a subclass / Mixin to implement any code that + needs to be run during the loop. + """ + def shutdown_request(self, request: _RequestType) -> None: # undocumented - """Called to shutdown and close an individual request. -""" + """Called to shutdown and close an individual request.""" + def close_request(self, request: _RequestType) -> None: # undocumented - """Called to clean up an individual request. -""" + """Called to clean up an individual request.""" class TCPServer(BaseServer): """Base class for various socket-based server classes. -Defaults to synchronous IP stream (i.e., TCP). + Defaults to synchronous IP stream (i.e., TCP). -Methods for the caller: + Methods for the caller: -- __init__(server_address, RequestHandlerClass, bind_and_activate=True) -- serve_forever(poll_interval=0.5) -- shutdown() -- handle_request() # if you don't use serve_forever() -- fileno() -> int # for selector + - __init__(server_address, RequestHandlerClass, bind_and_activate=True) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you don't use serve_forever() + - fileno() -> int # for selector -Methods that may be overridden: + Methods that may be overridden: -- server_bind() -- server_activate() -- get_request() -> request, client_address -- handle_timeout() -- verify_request(request, client_address) -- process_request(request, client_address) -- shutdown_request(request) -- close_request(request) -- handle_error() + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() -Methods for derived classes: + Methods for derived classes: -- finish_request(request, client_address) + - finish_request(request, client_address) -Class variables that may be overridden by derived classes or -instances: + Class variables that may be overridden by derived classes or + instances: -- timeout -- address_family -- socket_type -- request_queue_size (only for stream sockets) -- allow_reuse_address -- allow_reuse_port + - timeout + - address_family + - socket_type + - request_queue_size (only for stream sockets) + - allow_reuse_address + - allow_reuse_port -Instance variables: + Instance variables: -- server_address -- RequestHandlerClass -- socket + - server_address + - RequestHandlerClass + - socket + + """ -""" address_family: int socket: _socket allow_reuse_address: bool @@ -344,30 +356,32 @@ Instance variables: RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, ) -> None: - """Constructor. May be extended, do not override. -""" + """Constructor. May be extended, do not override.""" + def fileno(self) -> int: """Return socket file number. -Interface required by selector. + Interface required by selector. + + """ -""" def get_request(self) -> tuple[_socket, _RetAddress]: """Get the request and client address from the socket. -May be overridden. + May be overridden. + + """ -""" def server_bind(self) -> None: """Called by constructor to bind the socket. -May be overridden. + May be overridden. -""" + """ class UDPServer(TCPServer): - """UDP server class. -""" + """UDP server class.""" + max_packet_size: ClassVar[int] def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] @@ -380,8 +394,7 @@ if sys.platform != "win32": RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, ) -> None: - """Constructor. May be extended, do not override. -""" + """Constructor. May be extended, do not override.""" class UnixDatagramServer(UDPServer): server_address: _AfUnixAddress # type: ignore[assignment] @@ -391,49 +404,51 @@ if sys.platform != "win32": RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], bind_and_activate: bool = True, ) -> None: - """Constructor. May be extended, do not override. -""" + """Constructor. May be extended, do not override.""" if sys.platform != "win32": class ForkingMixIn: - """Mix-in class to handle each request in a new process. -""" + """Mix-in class to handle each request in a new process.""" + timeout: float | None # undocumented active_children: set[int] | None # undocumented max_children: int # undocumented block_on_close: bool def collect_children(self, *, blocking: bool = False) -> None: # undocumented - """Internal routine to wait for children that have exited. -""" + """Internal routine to wait for children that have exited.""" + def handle_timeout(self) -> None: # undocumented """Wait for zombies after self.timeout seconds of inactivity. -May be extended, do not override. -""" + May be extended, do not override. + """ + def service_actions(self) -> None: # undocumented """Collect the zombie child processes regularly in the ForkingMixIn. -service_actions is called in the BaseServer's serve_forever loop. -""" + service_actions is called in the BaseServer's serve_forever loop. + """ + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Fork a new subprocess to process the request. -""" + """Fork a new subprocess to process the request.""" + def server_close(self) -> None: ... class ThreadingMixIn: - """Mix-in class to handle each request in a new thread. -""" + """Mix-in class to handle each request in a new thread.""" + daemon_threads: bool block_on_close: bool def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: # undocumented """Same as in BaseServer but as a thread. -In addition, exception handling is done here. + In addition, exception handling is done here. + + """ -""" def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: - """Start a new thread to process the request. -""" + """Start a new thread to process the request.""" + def server_close(self) -> None: ... if sys.platform != "win32": @@ -453,19 +468,20 @@ if sys.platform != "win32": class BaseRequestHandler: """Base class for request handler classes. -This class is instantiated for each request to be handled. The -constructor sets the instance variables request, client_address -and server, and then calls the handle() method. To implement a -specific service, all you need to do is to derive a class which -defines a handle() method. + This class is instantiated for each request to be handled. The + constructor sets the instance variables request, client_address + and server, and then calls the handle() method. To implement a + specific service, all you need to do is to derive a class which + defines a handle() method. -The handle() method can find the request as self.request, the -client address as self.client_address, and the server (in case it -needs access to per-server information) as self.server. Since a -separate instance is created for each request, the handle() method -can define other arbitrary instance variables. + The handle() method can find the request as self.request, the + client address as self.client_address, and the server (in case it + needs access to per-server information) as self.server. Since a + separate instance is created for each request, the handle() method + can define other arbitrary instance variables. + + """ -""" # `request` is technically of type _RequestType, # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see @@ -481,8 +497,8 @@ can define other arbitrary instance variables. def finish(self) -> None: ... class StreamRequestHandler(BaseRequestHandler): - """Define self.rfile and self.wfile for stream sockets. -""" + """Define self.rfile and self.wfile for stream sockets.""" + rbufsize: ClassVar[int] # undocumented wbufsize: ClassVar[int] # undocumented timeout: ClassVar[float | None] # undocumented @@ -492,8 +508,8 @@ class StreamRequestHandler(BaseRequestHandler): wfile: BufferedIOBase class DatagramRequestHandler(BaseRequestHandler): - """Define self.rfile and self.wfile for datagram sockets. -""" + """Define self.rfile and self.wfile for datagram sockets.""" + packet: bytes # undocumented socket: _socket # undocumented rfile: BufferedIOBase diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi index 1a7059699ef04..70e8e3ea4c1b2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/spwd.pyi @@ -8,6 +8,7 @@ The sp_namp and sp_pwdp are strings, the rest are integers. An exception is raised if the entry asked for cannot be found. You have to be root to be able to use this module. """ + import sys from _typeshed import structseq from typing import Any, Final, final @@ -17,10 +18,11 @@ if sys.platform != "win32": class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): """spwd.struct_spwd: Results from getsp*() routines. -This object may be accessed either as a 9-tuple of - (sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag) -or via the object attributes as named in the above tuple. -""" + This object may be accessed either as a 9-tuple of + (sp_namp,sp_pwdp,sp_lstchg,sp_min,sp_max,sp_warn,sp_inact,sp_expire,sp_flag) + or via the object attributes as named in the above tuple. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ( "sp_namp", @@ -36,57 +38,56 @@ or via the object attributes as named in the above tuple. @property def sp_namp(self) -> str: - """login name -""" + """login name""" + @property def sp_pwdp(self) -> str: - """encrypted password -""" + """encrypted password""" + @property def sp_lstchg(self) -> int: - """date of last change -""" + """date of last change""" + @property def sp_min(self) -> int: - """min #days between changes -""" + """min #days between changes""" + @property def sp_max(self) -> int: - """max #days between changes -""" + """max #days between changes""" + @property def sp_warn(self) -> int: - """#days before pw expires to warn user about it -""" + """#days before pw expires to warn user about it""" + @property def sp_inact(self) -> int: - """#days after pw expires until account is disabled -""" + """#days after pw expires until account is disabled""" + @property def sp_expire(self) -> int: - """#days since 1970-01-01 when account expires -""" + """#days since 1970-01-01 when account expires""" + @property def sp_flag(self) -> int: - """reserved -""" + """reserved""" # Deprecated aliases below. @property def sp_nam(self) -> str: - """login name; deprecated -""" + """login name; deprecated""" + @property def sp_pwd(self) -> str: - """encrypted password; deprecated -""" + """encrypted password; deprecated""" def getspall() -> list[struct_spwd]: """Return a list of all available shadow password database entries, in arbitrary order. -See `help(spwd)` for more on shadow password database entries. -""" + See `help(spwd)` for more on shadow password database entries. + """ + def getspnam(arg: str, /) -> struct_spwd: """Return the shadow password database entry for the given user name. -See `help(spwd)` for more on shadow password database entries. -""" + See `help(spwd)` for more on shadow password database entries. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi index 792e15b45c91e..e378b4d434026 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sqlite3/__init__.pyi @@ -31,6 +31,7 @@ its execute() method to perform SQL queries: The sqlite3 module is written by Gerhard Häring . """ + import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping, Sequence @@ -303,8 +304,8 @@ class Warning(Exception): ... @disjoint_base class Connection: - """SQLite database connection object. -""" + """SQLite database connection object.""" + @property def DataError(self) -> type[DataError]: ... @property @@ -366,37 +367,38 @@ class Connection: def close(self) -> None: """Close the database connection. -Any pending transaction is not committed implicitly. -""" + Any pending transaction is not committed implicitly. + """ if sys.version_info >= (3, 11): def blobopen(self, table: str, column: str, row: int, /, *, readonly: bool = False, name: str = "main") -> Blob: """Open and return a BLOB object. - table - Table name. - column - Column name. - row - Row index. - readonly - Open the BLOB without write permissions. - name - Database name. -""" + table + Table name. + column + Column name. + row + Row index. + readonly + Open the BLOB without write permissions. + name + Database name. + """ def commit(self) -> None: """Commit any pending transaction to the database. -If there is no open transaction, this method is a no-op. -""" + If there is no open transaction, this method is a no-op. + """ + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: """Creates a new aggregate. -Note: Passing keyword arguments 'name', 'n_arg' and 'aggregate_class' -to _sqlite3.Connection.create_aggregate() is deprecated. Parameters -'name', 'n_arg' and 'aggregate_class' will become positional-only in -Python 3.15. -""" + Note: Passing keyword arguments 'name', 'n_arg' and 'aggregate_class' + to _sqlite3.Connection.create_aggregate() is deprecated. Parameters + 'name', 'n_arg' and 'aggregate_class' will become positional-only in + Python 3.15. + """ if sys.version_info >= (3, 11): # num_params determines how many params will be passed to the aggregate class. We provide an overload # for the case where num_params = 1, which is expected to be the common case. @@ -406,15 +408,15 @@ Python 3.15. ) -> None: """Creates or redefines an aggregate window function. Non-standard. - name - The name of the SQL aggregate window function to be created or - redefined. - num_params - The number of arguments the step and inverse methods takes. - aggregate_class - A class with step(), finalize(), value(), and inverse() methods. - Set to None to clear the window function. -""" + name + The name of the SQL aggregate window function to be created or + redefined. + num_params + The number of arguments the step and inverse methods takes. + aggregate_class + A class with step(), finalize(), value(), and inverse() methods. + Set to None to clear the window function. + """ # And for num_params = -1, which means the aggregate must accept any number of parameters. @overload def create_window_function( @@ -426,98 +428,97 @@ Python 3.15. ) -> None: ... def create_collation(self, name: str, callback: Callable[[str, str], int | SupportsIndex] | None, /) -> None: - """Creates a collation function. -""" + """Creates a collation function.""" + def create_function( self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False ) -> None: """Creates a new function. -Note: Passing keyword arguments 'name', 'narg' and 'func' to -_sqlite3.Connection.create_function() is deprecated. Parameters -'name', 'narg' and 'func' will become positional-only in Python 3.15. -""" + Note: Passing keyword arguments 'name', 'narg' and 'func' to + _sqlite3.Connection.create_function() is deprecated. Parameters + 'name', 'narg' and 'func' will become positional-only in Python 3.15. + """ + @overload def cursor(self, factory: None = None) -> Cursor: - """Return a cursor for the connection. -""" + """Return a cursor for the connection.""" + @overload def cursor(self, factory: Callable[[Connection], _CursorT]) -> _CursorT: ... def execute(self, sql: str, parameters: _Parameters = ..., /) -> Cursor: - """Executes an SQL statement. -""" + """Executes an SQL statement.""" + def executemany(self, sql: str, parameters: Iterable[_Parameters], /) -> Cursor: - """Repeatedly executes an SQL statement. -""" + """Repeatedly executes an SQL statement.""" + def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once. -""" + """Executes multiple SQL statements at once.""" + def interrupt(self) -> None: - """Abort any pending database operation. -""" + """Abort any pending database operation.""" if sys.version_info >= (3, 13): def iterdump(self, *, filter: str | None = None) -> Generator[str, None, None]: """Returns iterator to the dump of the database in an SQL text format. - filter - An optional LIKE pattern for database objects to dump -""" + filter + An optional LIKE pattern for database objects to dump + """ else: def iterdump(self) -> Generator[str, None, None]: - """Returns iterator to the dump of the database in an SQL text format. -""" + """Returns iterator to the dump of the database in an SQL text format.""" def rollback(self) -> None: """Roll back to the start of any pending transaction. -If there is no open transaction, this method is a no-op. -""" + If there is no open transaction, this method is a no-op. + """ + def set_authorizer( self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None ) -> None: """Set authorizer callback. -Note: Passing keyword argument 'authorizer_callback' to -_sqlite3.Connection.set_authorizer() is deprecated. Parameter -'authorizer_callback' will become positional-only in Python 3.15. -""" + Note: Passing keyword argument 'authorizer_callback' to + _sqlite3.Connection.set_authorizer() is deprecated. Parameter + 'authorizer_callback' will become positional-only in Python 3.15. + """ + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: """Set progress handler callback. - progress_handler - A callable that takes no arguments. - If the callable returns non-zero, the current query is terminated, - and an exception is raised. - n - The number of SQLite virtual machine instructions that are - executed between invocations of 'progress_handler'. + progress_handler + A callable that takes no arguments. + If the callable returns non-zero, the current query is terminated, + and an exception is raised. + n + The number of SQLite virtual machine instructions that are + executed between invocations of 'progress_handler'. -If 'progress_handler' is None or 'n' is 0, the progress handler is disabled. + If 'progress_handler' is None or 'n' is 0, the progress handler is disabled. + + Note: Passing keyword argument 'progress_handler' to + _sqlite3.Connection.set_progress_handler() is deprecated. Parameter + 'progress_handler' will become positional-only in Python 3.15. + """ -Note: Passing keyword argument 'progress_handler' to -_sqlite3.Connection.set_progress_handler() is deprecated. Parameter -'progress_handler' will become positional-only in Python 3.15. -""" def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: """Set a trace callback called for each SQL statement (passed as unicode). -Note: Passing keyword argument 'trace_callback' to -_sqlite3.Connection.set_trace_callback() is deprecated. Parameter -'trace_callback' will become positional-only in Python 3.15. -""" + Note: Passing keyword argument 'trace_callback' to + _sqlite3.Connection.set_trace_callback() is deprecated. Parameter + 'trace_callback' will become positional-only in Python 3.15. + """ # enable_load_extension and load_extension is not available on python distributions compiled # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 def enable_load_extension(self, enable: bool, /) -> None: - """Enable dynamic loading of SQLite extension modules. -""" + """Enable dynamic loading of SQLite extension modules.""" if sys.version_info >= (3, 12): def load_extension(self, name: str, /, *, entrypoint: str | None = None) -> None: - """Load SQLite extension module. -""" + """Load SQLite extension module.""" else: def load_extension(self, name: str, /) -> None: - """Load SQLite extension module. -""" + """Load SQLite extension module.""" def backup( self, @@ -528,88 +529,92 @@ _sqlite3.Connection.set_trace_callback() is deprecated. Parameter name: str = "main", sleep: float = 0.25, ) -> None: - """Makes a backup of the database. -""" + """Makes a backup of the database.""" if sys.version_info >= (3, 11): def setlimit(self, category: int, limit: int, /) -> int: """Set connection run-time limits. - category - The limit category to be set. - limit - The new limit. If the new limit is a negative number, the limit is - unchanged. + category + The limit category to be set. + limit + The new limit. If the new limit is a negative number, the limit is + unchanged. + + Attempts to increase a limit above its hard upper bound are silently truncated + to the hard upper bound. Regardless of whether or not the limit was changed, + the prior value of the limit is returned. + """ -Attempts to increase a limit above its hard upper bound are silently truncated -to the hard upper bound. Regardless of whether or not the limit was changed, -the prior value of the limit is returned. -""" def getlimit(self, category: int, /) -> int: """Get connection run-time limits. - category - The limit category to be queried. -""" + category + The limit category to be queried. + """ + def serialize(self, *, name: str = "main") -> bytes: """Serialize a database into a byte string. - name - Which database to serialize. + name + Which database to serialize. + + For an ordinary on-disk database file, the serialization is just a copy of the + disk file. For an in-memory database or a "temp" database, the serialization is + the same sequence of bytes which would be written to disk if that database + were backed up to disk. + """ -For an ordinary on-disk database file, the serialization is just a copy of the -disk file. For an in-memory database or a "temp" database, the serialization is -the same sequence of bytes which would be written to disk if that database -were backed up to disk. -""" def deserialize(self, data: ReadableBuffer, /, *, name: str = "main") -> None: """Load a serialized database. - data - The serialized database content. - name - Which database to reopen with the deserialization. + data + The serialized database content. + name + Which database to reopen with the deserialization. -The deserialize interface causes the database connection to disconnect from the -target database, and then reopen it as an in-memory database based on the given -serialized data. + The deserialize interface causes the database connection to disconnect from the + target database, and then reopen it as an in-memory database based on the given + serialized data. -The deserialize interface will fail with SQLITE_BUSY if the database is -currently in a read transaction or is involved in a backup operation. -""" + The deserialize interface will fail with SQLITE_BUSY if the database is + currently in a read transaction or is involved in a backup operation. + """ if sys.version_info >= (3, 12): def getconfig(self, op: int, /) -> bool: """Query a boolean connection configuration option. - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. -""" + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + """ + def setconfig(self, op: int, enable: bool = True, /) -> bool: """Set a boolean connection configuration option. - op - The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. -""" + op + The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes. + """ def __call__(self, sql: str, /) -> _Statement: - """Call self as a function. -""" + """Call self as a function.""" + def __enter__(self) -> Self: """Called when the connection is used as a context manager. -Returns itself as a convenience to the caller. -""" + Returns itself as a convenience to the caller. + """ + def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None, / ) -> Literal[False]: """Called when the connection is used as a context manager. -If there was any exception, a rollback takes place; otherwise we commit. -""" + If there was any exception, a rollback takes place; otherwise we commit. + """ @disjoint_base class Cursor: - """SQLite database cursor class. -""" + """SQLite database cursor class.""" + arraysize: int @property def connection(self) -> Connection: ... @@ -623,69 +628,67 @@ class Cursor: def rowcount(self) -> int: ... def __init__(self, cursor: Connection, /) -> None: ... def close(self) -> None: - """Closes the cursor. -""" + """Closes the cursor.""" + def execute(self, sql: str, parameters: _Parameters = (), /) -> Self: - """Executes an SQL statement. -""" + """Executes an SQL statement.""" + def executemany(self, sql: str, seq_of_parameters: Iterable[_Parameters], /) -> Self: - """Repeatedly executes an SQL statement. -""" + """Repeatedly executes an SQL statement.""" + def executescript(self, sql_script: str, /) -> Cursor: - """Executes multiple SQL statements at once. -""" + """Executes multiple SQL statements at once.""" + def fetchall(self) -> list[Any]: - """Fetches all rows from the resultset. -""" + """Fetches all rows from the resultset.""" + def fetchmany(self, size: int | None = 1) -> list[Any]: """Fetches several rows from the resultset. - size - The default value is set by the Cursor.arraysize attribute. -""" + size + The default value is set by the Cursor.arraysize attribute. + """ # Returns either a row (as created by the row_factory) or None, but # putting None in the return annotation causes annoying false positives. def fetchone(self) -> Any: - """Fetches one row from the resultset. -""" + """Fetches one row from the resultset.""" + def setinputsizes(self, sizes: Unused, /) -> None: # does nothing - """Required by DB-API. Does nothing in sqlite3. -""" + """Required by DB-API. Does nothing in sqlite3.""" + def setoutputsize(self, size: Unused, column: Unused = None, /) -> None: # does nothing - """Required by DB-API. Does nothing in sqlite3. -""" + """Required by DB-API. Does nothing in sqlite3.""" + def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> Any: - """Implement next(self). -""" + """Implement next(self).""" @final class PrepareProtocol: - """PEP 246 style object adaption protocol type. -""" + """PEP 246 style object adaption protocol type.""" + def __init__(self, *args: object, **kwargs: object) -> None: ... @disjoint_base class Row(Sequence[Any]): def __new__(cls, cursor: Cursor, data: tuple[Any, ...], /) -> Self: ... def keys(self) -> list[str]: - """Returns the keys of the row. -""" + """Returns the keys of the row.""" + @overload def __getitem__(self, key: int | str, /) -> Any: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> tuple[Any, ...]: ... def __hash__(self) -> int: ... def __iter__(self) -> Iterator[Any]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" # These return NotImplemented for anything that is not a Row. def __eq__(self, value: object, /) -> bool: ... def __ge__(self, value: object, /) -> bool: ... @@ -703,47 +706,48 @@ if sys.version_info >= (3, 11): @final class Blob: def close(self) -> None: - """Close the blob. -""" + """Close the blob.""" + def read(self, length: int = -1, /) -> bytes: """Read data at the current offset position. - length - Read length in bytes. + length + Read length in bytes. + + If the end of the blob is reached, the data up to end of file will be returned. + When length is not specified, or is negative, Blob.read() will read until the + end of the blob. + """ -If the end of the blob is reached, the data up to end of file will be returned. -When length is not specified, or is negative, Blob.read() will read until the -end of the blob. -""" def write(self, data: ReadableBuffer, /) -> None: """Write data at the current offset. -This function cannot change the blob length. Writing beyond the end of the -blob will result in an exception being raised. -""" + This function cannot change the blob length. Writing beyond the end of the + blob will result in an exception being raised. + """ + def tell(self) -> int: - """Return the current access position for the blob. -""" + """Return the current access position for the blob.""" # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END def seek(self, offset: int, origin: int = 0, /) -> None: """Set the current access position to offset. -The origin argument defaults to os.SEEK_SET (absolute blob positioning). -Other values for origin are os.SEEK_CUR (seek relative to the current position) -and os.SEEK_END (seek relative to the blob's end). -""" + The origin argument defaults to os.SEEK_SET (absolute blob positioning). + Other values for origin are os.SEEK_CUR (seek relative to the current position) + and os.SEEK_END (seek relative to the blob's end). + """ + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __enter__(self) -> Self: - """Blob context manager enter. -""" + """Blob context manager enter.""" + def __exit__(self, type: object, val: object, tb: object, /) -> Literal[False]: - """Blob context manager exit. -""" + """Blob context manager exit.""" + def __getitem__(self, key: SupportsIndex | slice, /) -> int: - """Return self[key]. -""" + """Return self[key].""" + def __setitem__(self, key: SupportsIndex | slice, value: int, /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi index eef5169667ea1..b3205231f6f5d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_compile.pyi @@ -1,5 +1,5 @@ -"""Internal support module for sre -""" +"""Internal support module for sre""" + from re import Pattern from sre_constants import * from sre_constants import _NamedIntConstant diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi index 4c8186812ee3f..d1c52ccfb1025 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_constants.pyi @@ -1,5 +1,5 @@ -"""Internal support module for sre -""" +"""Internal support module for sre""" + import sys from re import error as error from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi index af61c65c64905..89d33fcdd2379 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sre_parse.pyi @@ -1,5 +1,5 @@ -"""Internal support module for sre -""" +"""Internal support module for sre""" + import sys from collections.abc import Iterable from re import Match, Pattern as _Pattern diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi index 05a6f027ec316..b4540c8ca8c97 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/ssl.pyi @@ -87,6 +87,7 @@ ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY """ + import enum import socket import sys @@ -154,32 +155,33 @@ class _Cipher(TypedDict): symmetric: str class SSLError(OSError): - """An error occurred in the SSL implementation. -""" + """An error occurred in the SSL implementation.""" + library: str reason: str class SSLZeroReturnError(SSLError): - """SSL/TLS session closed cleanly. -""" + """SSL/TLS session closed cleanly.""" + class SSLWantReadError(SSLError): """Non-blocking SSL socket needs to read more data -before the requested operation can be completed. -""" + before the requested operation can be completed. + """ + class SSLWantWriteError(SSLError): """Non-blocking SSL socket needs to write more data -before the requested operation can be completed. -""" + before the requested operation can be completed. + """ + class SSLSyscallError(SSLError): - """System error when attempting SSL operation. -""" + """System error when attempting SSL operation.""" + class SSLEOFError(SSLError): - """SSL/TLS connection terminated abruptly. -""" + """SSL/TLS connection terminated abruptly.""" class SSLCertVerificationError(SSLError, ValueError): - """A certificate could not be verified. -""" + """A certificate could not be verified.""" + verify_code: int verify_message: str @@ -202,60 +204,61 @@ if sys.version_info < (3, 12): @deprecated("Deprecated since Python 3.7; removed in Python 3.12.") def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed. + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed. - The function matches IP addresses rather than dNSNames if hostname is a - valid ipaddress string. IPv4 addresses are supported on all platforms. - IPv6 addresses are supported on platforms with IPv6 support (AF_INET6 - and inet_pton). + The function matches IP addresses rather than dNSNames if hostname is a + valid ipaddress string. IPv4 addresses are supported on all platforms. + IPv6 addresses are supported on platforms with IPv6 support (AF_INET6 + and inet_pton). - CertificateError is raised on failure. On success, the function - returns nothing. - """ + CertificateError is raised on failure. On success, the function + returns nothing. + """ def cert_time_to_seconds(cert_time: str) -> int: """Return the time in seconds since the Epoch, given the timestring -representing the "notBefore" or "notAfter" date from a certificate -in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale). + representing the "notBefore" or "notAfter" date from a certificate + in ``"%b %d %H:%M:%S %Y %Z"`` strptime format (C locale). -"notBefore" or "notAfter" dates must use UTC (RFC 5280). + "notBefore" or "notAfter" dates must use UTC (RFC 5280). -Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec -UTC should be specified as GMT (see ASN1_TIME_print()) -""" + Month is one of: Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec + UTC should be specified as GMT (see ASN1_TIME_print()) + """ if sys.version_info >= (3, 10): def get_server_certificate( addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... ) -> str: """Retrieve the certificate from the server at the specified address, -and return it as a PEM-encoded string. -If 'ca_certs' is specified, validate the server cert against it. -If 'ssl_version' is specified, use it in the connection attempt. -If 'timeout' is specified, use it in the connection attempt. -""" + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt. + If 'timeout' is specified, use it in the connection attempt. + """ else: def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: """Retrieve the certificate from the server at the specified address, - and return it as a PEM-encoded string. - If 'ca_certs' is specified, validate the server cert against it. - If 'ssl_version' is specified, use it in the connection attempt. -""" + and return it as a PEM-encoded string. + If 'ca_certs' is specified, validate the server cert against it. + If 'ssl_version' is specified, use it in the connection attempt. + """ def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: """Takes a certificate in binary DER format and returns the -PEM version of it as a string. -""" + PEM version of it as a string. + """ + def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: """Takes a certificate in ASCII PEM format and returns the -DER-encoded version of it as a byte sequence -""" + DER-encoded version of it as a byte sequence + """ class DefaultVerifyPaths(NamedTuple): - """DefaultVerifyPaths(cafile, capath, openssl_cafile_env, openssl_cafile, openssl_capath_env, openssl_capath) -""" + """DefaultVerifyPaths(cafile, capath, openssl_cafile_env, openssl_cafile, openssl_capath_env, openssl_capath)""" + cafile: str capath: str openssl_cafile_env: str @@ -264,12 +267,11 @@ class DefaultVerifyPaths(NamedTuple): openssl_capath: str def get_default_verify_paths() -> DefaultVerifyPaths: - """Return paths to default cafile and capath. - """ + """Return paths to default cafile and capath.""" class VerifyMode(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + CERT_NONE = 0 CERT_OPTIONAL = 1 CERT_REQUIRED = 2 @@ -279,8 +281,8 @@ CERT_OPTIONAL: Final = VerifyMode.CERT_OPTIONAL CERT_REQUIRED: Final = VerifyMode.CERT_REQUIRED class VerifyFlags(enum.IntFlag): - """An enumeration. -""" + """An enumeration.""" + VERIFY_DEFAULT = 0 VERIFY_CRL_CHECK_LEAF = 4 VERIFY_CRL_CHECK_CHAIN = 12 @@ -301,8 +303,8 @@ if sys.version_info >= (3, 10): VERIFY_X509_PARTIAL_CHAIN: Final = VerifyFlags.VERIFY_X509_PARTIAL_CHAIN class _SSLMethod(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + PROTOCOL_SSLv23 = 2 PROTOCOL_SSLv2 = ... PROTOCOL_SSLv3 = ... @@ -324,8 +326,8 @@ PROTOCOL_TLS_CLIENT: Final = _SSLMethod.PROTOCOL_TLS_CLIENT PROTOCOL_TLS_SERVER: Final = _SSLMethod.PROTOCOL_TLS_SERVER class Options(enum.IntFlag): - """An enumeration. -""" + """An enumeration.""" + OP_ALL = 2147483728 OP_NO_SSLv2 = 0 OP_NO_SSLv3 = 33554432 @@ -371,8 +373,8 @@ HAS_NEVER_CHECK_COMMON_NAME: Final[bool] CHANNEL_BINDING_TYPES: Final[list[str]] class AlertDescription(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + ALERT_DESCRIPTION_ACCESS_DENIED = 49 ALERT_DESCRIPTION_BAD_CERTIFICATE = 42 ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE = 114 @@ -438,21 +440,20 @@ class _ASN1ObjectBase(NamedTuple): oid: str class _ASN1Object(_ASN1ObjectBase): - """ASN.1 object identifier lookup - """ + """ASN.1 object identifier lookup""" + def __new__(cls, oid: str) -> Self: ... @classmethod def fromnid(cls, nid: int) -> Self: - """Create _ASN1Object from OpenSSL numeric ID - """ + """Create _ASN1Object from OpenSSL numeric ID""" + @classmethod def fromname(cls, name: str) -> Self: - """Create _ASN1Object from short name, long name or OID - """ + """Create _ASN1Object from short name, long name or OID""" class Purpose(_ASN1Object, enum.Enum): - """SSLContext purpose flags with X509v3 Extended Key Usage objects - """ + """SSLContext purpose flags with X509v3 Extended Key Usage objects""" + # Normally this class would inherit __new__ from _ASN1Object, but # because this is an enum, the inherited __new__ is replaced at runtime with # Enum.__new__. @@ -462,26 +463,29 @@ class Purpose(_ASN1Object, enum.Enum): class SSLSocket(socket.socket): """This class implements a subtype of socket.socket that wraps -the underlying OS socket in an SSL context when necessary, and -provides read and write methods over that channel. -""" + the underlying OS socket in an SSL context when necessary, and + provides read and write methods over that channel. + """ + context: SSLContext server_side: bool server_hostname: str | None session: SSLSession | None @property def session_reused(self) -> bool | None: - """Was the client session reused during handshake -""" + """Was the client session reused during handshake""" + def __init__(self, *args: Any, **kwargs: Any) -> None: ... def connect(self, addr: socket._Address) -> None: """Connects to remote ADDR, and then wraps the connection in -an SSL channel. -""" + an SSL channel. + """ + def connect_ex(self, addr: socket._Address) -> int: """Connects to remote ADDR, and then wraps the connection in -an SSL channel. -""" + an SSL channel. + """ + def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... @@ -497,78 +501,87 @@ an SSL channel. def shutdown(self, how: int) -> None: ... def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: """Read up to LEN bytes and return them. -Return zero-length string on EOF. -""" + Return zero-length string on EOF. + """ + def write(self, data: ReadableBuffer) -> int: """Write DATA to the underlying SSL channel. Returns -number of bytes of DATA actually transmitted. -""" + number of bytes of DATA actually transmitted. + """ + def do_handshake(self, block: bool = False) -> None: # block is undocumented - """Start the SSL/TLS handshake. -""" + """Start the SSL/TLS handshake.""" + @overload def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: """Returns a formatted version of the data in the certificate provided -by the other end of the SSL channel. + by the other end of the SSL channel. + + Return None if no certificate was provided, {} if a certificate was + provided, but not validated. + """ -Return None if no certificate was provided, {} if a certificate was -provided, but not validated. -""" @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... def cipher(self) -> tuple[str, str, int] | None: """Return the currently selected cipher as a 3-tuple ``(name, -ssl_version, secret_bits)``. -""" + ssl_version, secret_bits)``. + """ + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: """Return a list of ciphers shared by the client during the handshake or -None if this is not a valid server connection. -""" + None if this is not a valid server connection. + """ + def compression(self) -> str | None: """Return the current compression algorithm in use, or ``None`` if -compression was not negotiated or not supported by one of the peers. -""" + compression was not negotiated or not supported by one of the peers. + """ + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: """Get channel binding data for current connection. Raise ValueError -if the requested `cb_type` is not supported. Return bytes of the data -or None if the data is not available (e.g. before the handshake). -""" + if the requested `cb_type` is not supported. Return bytes of the data + or None if the data is not available (e.g. before the handshake). + """ + def selected_alpn_protocol(self) -> str | None: """Return the currently selected ALPN protocol as a string, or ``None`` -if a next protocol was not negotiated or if ALPN is not supported by one -of the peers. -""" + if a next protocol was not negotiated or if ALPN is not supported by one + of the peers. + """ if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") def selected_npn_protocol(self) -> str | None: """Return the currently selected NPN protocol as a string, or ``None`` -if a next protocol was not negotiated or if NPN is not supported by one -of the peers. -""" + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. + """ else: def selected_npn_protocol(self) -> str | None: """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. -""" + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. + """ def accept(self) -> tuple[SSLSocket, socket._RetAddress]: """Accepts a new connection from a remote client, and returns -a tuple containing that new connection wrapped with a server-side -SSL channel, and the address of the remote client. -""" + a tuple containing that new connection wrapped with a server-side + SSL channel, and the address of the remote client. + """ + def unwrap(self) -> socket.socket: - """Start the SSL shutdown handshake. -""" + """Start the SSL shutdown handshake.""" + def version(self) -> str | None: """Return a string identifying the protocol version used by the -current SSL channel. -""" + current SSL channel. + """ + def pending(self) -> int: - """Return the number of bytes that can be read immediately. -""" + """Return the number of bytes that can be read immediately.""" + def verify_client_post_handshake(self) -> None: ... # These methods always raise `NotImplementedError`: def recvmsg(self, *args: Never, **kwargs: Never) -> Never: ... # type: ignore[override] @@ -577,19 +590,20 @@ current SSL channel. if sys.version_info >= (3, 13): def get_verified_chain(self) -> list[bytes]: """Returns verified certificate chain provided by the other -end of the SSL channel as a list of DER-encoded bytes. + end of the SSL channel as a list of DER-encoded bytes. + + If certificate verification was disabled method acts the same as + ``SSLSocket.get_unverified_chain``. + """ -If certificate verification was disabled method acts the same as -``SSLSocket.get_unverified_chain``. -""" def get_unverified_chain(self) -> list[bytes]: """Returns raw certificate chain provided by the other -end of the SSL channel as a list of DER-encoded bytes. -""" + end of the SSL channel as a list of DER-encoded bytes. + """ class TLSVersion(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + MINIMUM_SUPPORTED = -2 MAXIMUM_SUPPORTED = -1 SSLv3 = 768 @@ -600,8 +614,9 @@ class TLSVersion(enum.IntEnum): class SSLContext(_SSLContext): """An SSLContext holds various SSL-related configuration options and -data, such as certificates and possibly a private key. -""" + data, such as certificates and possibly a private key. + """ + options: Options verify_flags: VerifyFlags verify_mode: VerifyMode @@ -639,12 +654,13 @@ data, such as certificates and possibly a private key. def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: """Returns a list of dicts with information of loaded CA certs. -If the optional argument is True, returns a DER-encoded copy of the CA -certificate. + If the optional argument is True, returns a DER-encoded copy of the CA + certificate. + + NOTE: Certificates in a capath directory aren't loaded unless they have + been used at least once. + """ -NOTE: Certificates in a capath directory aren't loaded unless they have -been used at least once. -""" @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload @@ -689,10 +705,10 @@ def create_default_context( ) -> SSLContext: """Create a SSLContext object with default settings. -NOTE: The protocol and settings may change anytime without prior - deprecation. The values represent a fair balance between maximum - compatibility and security. -""" + NOTE: The protocol and settings may change anytime without prior + deprecation. The values represent a fair balance between maximum + compatibility and security. + """ if sys.version_info >= (3, 10): def _create_unverified_context( @@ -709,11 +725,11 @@ if sys.version_info >= (3, 10): ) -> SSLContext: """Create a SSLContext object for Python stdlib modules -All Python stdlib modules shall use this function to create SSLContext -objects in order to keep common settings in one place. The configuration -is less restrict than create_default_context()'s to increase backward -compatibility. -""" + All Python stdlib modules shall use this function to create SSLContext + objects in order to keep common settings in one place. The configuration + is less restrict than create_default_context()'s to increase backward + compatibility. + """ else: def _create_unverified_context( @@ -730,135 +746,145 @@ else: ) -> SSLContext: """Create a SSLContext object for Python stdlib modules - All Python stdlib modules shall use this function to create SSLContext - objects in order to keep common settings in one place. The configuration - is less restrict than create_default_context()'s to increase backward - compatibility. - """ + All Python stdlib modules shall use this function to create SSLContext + objects in order to keep common settings in one place. The configuration + is less restrict than create_default_context()'s to increase backward + compatibility. + """ _create_default_https_context = create_default_context class SSLObject: """This class implements an interface on top of a low-level SSL object as -implemented by OpenSSL. This object captures the state of an SSL connection -but does not provide any network IO itself. IO needs to be performed -through separate "BIO" objects which are OpenSSL's IO abstraction layer. + implemented by OpenSSL. This object captures the state of an SSL connection + but does not provide any network IO itself. IO needs to be performed + through separate "BIO" objects which are OpenSSL's IO abstraction layer. -This class does not have a public constructor. Instances are returned by -``SSLContext.wrap_bio``. This class is typically used by framework authors -that want to implement asynchronous IO for SSL through memory buffers. + This class does not have a public constructor. Instances are returned by + ``SSLContext.wrap_bio``. This class is typically used by framework authors + that want to implement asynchronous IO for SSL through memory buffers. -When compared to ``SSLSocket``, this object lacks the following features: + When compared to ``SSLSocket``, this object lacks the following features: + + * Any form of network IO, including methods such as ``recv`` and ``send``. + * The ``do_handshake_on_connect`` and ``suppress_ragged_eofs`` machinery. + """ - * Any form of network IO, including methods such as ``recv`` and ``send``. - * The ``do_handshake_on_connect`` and ``suppress_ragged_eofs`` machinery. -""" context: SSLContext @property def server_side(self) -> bool: - """Whether this is a server-side socket. -""" + """Whether this is a server-side socket.""" + @property def server_hostname(self) -> str | None: """The currently set server hostname (for SNI), or ``None`` if no -server hostname is set. -""" + server hostname is set. + """ session: SSLSession | None @property def session_reused(self) -> bool: - """Was the client session reused during handshake -""" + """Was the client session reused during handshake""" + def __init__(self, *args: Any, **kwargs: Any) -> None: ... def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: """Read up to 'len' bytes from the SSL object and return them. -If 'buffer' is provided, read into this buffer and return the number of -bytes read. -""" + If 'buffer' is provided, read into this buffer and return the number of + bytes read. + """ + def write(self, data: ReadableBuffer) -> int: """Write 'data' to the SSL object and return the number of bytes -written. + written. + + The 'data' argument must support the buffer interface. + """ -The 'data' argument must support the buffer interface. -""" @overload def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: """Returns a formatted version of the data in the certificate provided -by the other end of the SSL channel. + by the other end of the SSL channel. + + Return None if no certificate was provided, {} if a certificate was + provided, but not validated. + """ -Return None if no certificate was provided, {} if a certificate was -provided, but not validated. -""" @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... def selected_alpn_protocol(self) -> str | None: """Return the currently selected ALPN protocol as a string, or ``None`` -if a next protocol was not negotiated or if ALPN is not supported by one -of the peers. -""" + if a next protocol was not negotiated or if ALPN is not supported by one + of the peers. + """ if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10. Use ALPN instead.") def selected_npn_protocol(self) -> str | None: """Return the currently selected NPN protocol as a string, or ``None`` -if a next protocol was not negotiated or if NPN is not supported by one -of the peers. -""" + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. + """ else: def selected_npn_protocol(self) -> str | None: """Return the currently selected NPN protocol as a string, or ``None`` - if a next protocol was not negotiated or if NPN is not supported by one - of the peers. -""" + if a next protocol was not negotiated or if NPN is not supported by one + of the peers. + """ def cipher(self) -> tuple[str, str, int] | None: """Return the currently selected cipher as a 3-tuple ``(name, -ssl_version, secret_bits)``. -""" + ssl_version, secret_bits)``. + """ + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: """Return a list of ciphers shared by the client during the handshake or -None if this is not a valid server connection. -""" + None if this is not a valid server connection. + """ + def compression(self) -> str | None: """Return the current compression algorithm in use, or ``None`` if -compression was not negotiated or not supported by one of the peers. -""" + compression was not negotiated or not supported by one of the peers. + """ + def pending(self) -> int: - """Return the number of bytes that can be read immediately. -""" + """Return the number of bytes that can be read immediately.""" + def do_handshake(self) -> None: - """Start the SSL/TLS handshake. -""" + """Start the SSL/TLS handshake.""" + def unwrap(self) -> None: - """Start the SSL shutdown handshake. -""" + """Start the SSL shutdown handshake.""" + def version(self) -> str | None: """Return a string identifying the protocol version used by the -current SSL channel. -""" + current SSL channel. + """ + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: """Get channel binding data for current connection. Raise ValueError -if the requested `cb_type` is not supported. Return bytes of the data -or None if the data is not available (e.g. before the handshake). -""" + if the requested `cb_type` is not supported. Return bytes of the data + or None if the data is not available (e.g. before the handshake). + """ + def verify_client_post_handshake(self) -> None: ... if sys.version_info >= (3, 13): def get_verified_chain(self) -> list[bytes]: """Returns verified certificate chain provided by the other -end of the SSL channel as a list of DER-encoded bytes. + end of the SSL channel as a list of DER-encoded bytes. + + If certificate verification was disabled method acts the same as + ``SSLSocket.get_unverified_chain``. + """ -If certificate verification was disabled method acts the same as -``SSLSocket.get_unverified_chain``. -""" def get_unverified_chain(self) -> list[bytes]: """Returns raw certificate chain provided by the other -end of the SSL channel as a list of DER-encoded bytes. -""" + end of the SSL channel as a list of DER-encoded bytes. + """ class SSLErrorNumber(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + SSL_ERROR_EOF = 8 SSL_ERROR_INVALID_ERROR_CODE = 10 SSL_ERROR_SSL = 1 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi index da24b541e81a7..cee0bea41f991 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stat.pyi @@ -2,6 +2,7 @@ Suggested usage: from stat import * """ + import sys from _stat import * from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi index 225dbea69c3a8..f5f731c46275e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/statistics.pyi @@ -103,6 +103,7 @@ Exceptions A single exception is defined: StatisticsError is a subclass of ValueError. """ + import sys from _typeshed import SupportsRichComparisonT from collections.abc import Callable, Hashable, Iterable, Sequence @@ -152,565 +153,576 @@ if sys.version_info >= (3, 11): def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: """Convert data to floats and compute the arithmetic mean. -This runs faster than the mean() function and it always returns a float. -If the input dataset is empty, it raises a StatisticsError. + This runs faster than the mean() function and it always returns a float. + If the input dataset is empty, it raises a StatisticsError. ->>> fmean([3.5, 4.0, 5.25]) -4.25 + >>> fmean([3.5, 4.0, 5.25]) + 4.25 -""" + """ else: def fmean(data: Iterable[SupportsFloat]) -> float: """Convert data to floats and compute the arithmetic mean. - This runs faster than the mean() function and it always returns a float. - If the input dataset is empty, it raises a StatisticsError. + This runs faster than the mean() function and it always returns a float. + If the input dataset is empty, it raises a StatisticsError. - >>> fmean([3.5, 4.0, 5.25]) - 4.25 - """ + >>> fmean([3.5, 4.0, 5.25]) + 4.25 + """ def geometric_mean(data: Iterable[SupportsFloat]) -> float: """Convert data to floats and compute the geometric mean. -Raises a StatisticsError if the input dataset is empty -or if it contains a negative value. + Raises a StatisticsError if the input dataset is empty + or if it contains a negative value. -Returns zero if the product of inputs is zero. + Returns zero if the product of inputs is zero. -No special efforts are made to achieve exact results. -(However, this may change in the future.) + No special efforts are made to achieve exact results. + (However, this may change in the future.) ->>> round(geometric_mean([54, 24, 36]), 9) -36.0 + >>> round(geometric_mean([54, 24, 36]), 9) + 36.0 + + """ -""" def mean(data: Iterable[_NumberT]) -> _NumberT: """Return the sample arithmetic mean of data. ->>> mean([1, 2, 3, 4, 4]) -2.8 + >>> mean([1, 2, 3, 4, 4]) + 2.8 ->>> from fractions import Fraction as F ->>> mean([F(3, 7), F(1, 21), F(5, 3), F(1, 3)]) -Fraction(13, 21) + >>> from fractions import Fraction as F + >>> mean([F(3, 7), F(1, 21), F(5, 3), F(1, 3)]) + Fraction(13, 21) ->>> from decimal import Decimal as D ->>> mean([D("0.5"), D("0.75"), D("0.625"), D("0.375")]) -Decimal('0.5625') + >>> from decimal import Decimal as D + >>> mean([D("0.5"), D("0.75"), D("0.625"), D("0.375")]) + Decimal('0.5625') -If ``data`` is empty, StatisticsError will be raised. + If ``data`` is empty, StatisticsError will be raised. -""" + """ if sys.version_info >= (3, 10): def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: """Return the harmonic mean of data. -The harmonic mean is the reciprocal of the arithmetic mean of the -reciprocals of the data. It can be used for averaging ratios or -rates, for example speeds. + The harmonic mean is the reciprocal of the arithmetic mean of the + reciprocals of the data. It can be used for averaging ratios or + rates, for example speeds. -Suppose a car travels 40 km/hr for 5 km and then speeds-up to -60 km/hr for another 5 km. What is the average speed? + Suppose a car travels 40 km/hr for 5 km and then speeds-up to + 60 km/hr for another 5 km. What is the average speed? - >>> harmonic_mean([40, 60]) - 48.0 + >>> harmonic_mean([40, 60]) + 48.0 -Suppose a car travels 40 km/hr for 5 km, and when traffic clears, -speeds-up to 60 km/hr for the remaining 30 km of the journey. What -is the average speed? + Suppose a car travels 40 km/hr for 5 km, and when traffic clears, + speeds-up to 60 km/hr for the remaining 30 km of the journey. What + is the average speed? - >>> harmonic_mean([40, 60], weights=[5, 30]) - 56.0 + >>> harmonic_mean([40, 60], weights=[5, 30]) + 56.0 -If ``data`` is empty, or any element is less than zero, -``harmonic_mean`` will raise ``StatisticsError``. + If ``data`` is empty, or any element is less than zero, + ``harmonic_mean`` will raise ``StatisticsError``. -""" + """ else: def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: """Return the harmonic mean of data. - The harmonic mean, sometimes called the subcontrary mean, is the - reciprocal of the arithmetic mean of the reciprocals of the data, - and is often appropriate when averaging quantities which are rates - or ratios, for example speeds. Example: + The harmonic mean, sometimes called the subcontrary mean, is the + reciprocal of the arithmetic mean of the reciprocals of the data, + and is often appropriate when averaging quantities which are rates + or ratios, for example speeds. Example: - Suppose an investor purchases an equal value of shares in each of - three companies, with P/E (price/earning) ratios of 2.5, 3 and 10. - What is the average P/E ratio for the investor's portfolio? + Suppose an investor purchases an equal value of shares in each of + three companies, with P/E (price/earning) ratios of 2.5, 3 and 10. + What is the average P/E ratio for the investor's portfolio? - >>> harmonic_mean([2.5, 3, 10]) # For an equal investment portfolio. - 3.6 + >>> harmonic_mean([2.5, 3, 10]) # For an equal investment portfolio. + 3.6 - Using the arithmetic mean would give an average of about 5.167, which - is too high. + Using the arithmetic mean would give an average of about 5.167, which + is too high. - If ``data`` is empty, or any element is less than zero, - ``harmonic_mean`` will raise ``StatisticsError``. - """ + If ``data`` is empty, or any element is less than zero, + ``harmonic_mean`` will raise ``StatisticsError``. + """ def median(data: Iterable[_NumberT]) -> _NumberT: """Return the median (middle value) of numeric data. -When the number of data points is odd, return the middle data point. -When the number of data points is even, the median is interpolated by -taking the average of the two middle values: + When the number of data points is odd, return the middle data point. + When the number of data points is even, the median is interpolated by + taking the average of the two middle values: ->>> median([1, 3, 5]) -3 ->>> median([1, 3, 5, 7]) -4.0 + >>> median([1, 3, 5]) + 3 + >>> median([1, 3, 5, 7]) + 4.0 + + """ -""" def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: """Return the low median of numeric data. -When the number of data points is odd, the middle value is returned. -When it is even, the smaller of the two middle values is returned. + When the number of data points is odd, the middle value is returned. + When it is even, the smaller of the two middle values is returned. + + >>> median_low([1, 3, 5]) + 3 + >>> median_low([1, 3, 5, 7]) + 3 ->>> median_low([1, 3, 5]) -3 ->>> median_low([1, 3, 5, 7]) -3 + """ -""" def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: """Return the high median of data. -When the number of data points is odd, the middle value is returned. -When it is even, the larger of the two middle values is returned. + When the number of data points is odd, the middle value is returned. + When it is even, the larger of the two middle values is returned. ->>> median_high([1, 3, 5]) -3 ->>> median_high([1, 3, 5, 7]) -5 + >>> median_high([1, 3, 5]) + 3 + >>> median_high([1, 3, 5, 7]) + 5 -""" + """ if sys.version_info >= (3, 11): def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: """Estimates the median for numeric data binned around the midpoints -of consecutive, fixed-width intervals. + of consecutive, fixed-width intervals. -The *data* can be any iterable of numeric data with each value being -exactly the midpoint of a bin. At least one value must be present. + The *data* can be any iterable of numeric data with each value being + exactly the midpoint of a bin. At least one value must be present. -The *interval* is width of each bin. + The *interval* is width of each bin. -For example, demographic information may have been summarized into -consecutive ten-year age groups with each group being represented -by the 5-year midpoints of the intervals: + For example, demographic information may have been summarized into + consecutive ten-year age groups with each group being represented + by the 5-year midpoints of the intervals: - >>> demographics = Counter({ - ... 25: 172, # 20 to 30 years old - ... 35: 484, # 30 to 40 years old - ... 45: 387, # 40 to 50 years old - ... 55: 22, # 50 to 60 years old - ... 65: 6, # 60 to 70 years old - ... }) + >>> demographics = Counter({ + ... 25: 172, # 20 to 30 years old + ... 35: 484, # 30 to 40 years old + ... 45: 387, # 40 to 50 years old + ... 55: 22, # 50 to 60 years old + ... 65: 6, # 60 to 70 years old + ... }) -The 50th percentile (median) is the 536th person out of the 1071 -member cohort. That person is in the 30 to 40 year old age group. + The 50th percentile (median) is the 536th person out of the 1071 + member cohort. That person is in the 30 to 40 year old age group. -The regular median() function would assume that everyone in the -tricenarian age group was exactly 35 years old. A more tenable -assumption is that the 484 members of that age group are evenly -distributed between 30 and 40. For that, we use median_grouped(). + The regular median() function would assume that everyone in the + tricenarian age group was exactly 35 years old. A more tenable + assumption is that the 484 members of that age group are evenly + distributed between 30 and 40. For that, we use median_grouped(). - >>> data = list(demographics.elements()) - >>> median(data) - 35 - >>> round(median_grouped(data, interval=10), 1) - 37.5 + >>> data = list(demographics.elements()) + >>> median(data) + 35 + >>> round(median_grouped(data, interval=10), 1) + 37.5 -The caller is responsible for making sure the data points are separated -by exact multiples of *interval*. This is essential for getting a -correct result. The function does not check this precondition. + The caller is responsible for making sure the data points are separated + by exact multiples of *interval*. This is essential for getting a + correct result. The function does not check this precondition. -Inputs may be any numeric type that can be coerced to a float during -the interpolation step. + Inputs may be any numeric type that can be coerced to a float during + the interpolation step. -""" + """ else: def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: """Return the 50th percentile (median) of grouped continuous data. - >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) - 3.7 - >>> median_grouped([52, 52, 53, 54]) - 52.5 + >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) + 3.7 + >>> median_grouped([52, 52, 53, 54]) + 52.5 - This calculates the median as the 50th percentile, and should be - used when your data is continuous and grouped. In the above example, - the values 1, 2, 3, etc. actually represent the midpoint of classes - 0.5-1.5, 1.5-2.5, 2.5-3.5, etc. The middle value falls somewhere in - class 3.5-4.5, and interpolation is used to estimate it. + This calculates the median as the 50th percentile, and should be + used when your data is continuous and grouped. In the above example, + the values 1, 2, 3, etc. actually represent the midpoint of classes + 0.5-1.5, 1.5-2.5, 2.5-3.5, etc. The middle value falls somewhere in + class 3.5-4.5, and interpolation is used to estimate it. - Optional argument ``interval`` represents the class interval, and - defaults to 1. Changing the class interval naturally will change the - interpolated 50th percentile value: + Optional argument ``interval`` represents the class interval, and + defaults to 1. Changing the class interval naturally will change the + interpolated 50th percentile value: - >>> median_grouped([1, 3, 3, 5, 7], interval=1) - 3.25 - >>> median_grouped([1, 3, 3, 5, 7], interval=2) - 3.5 + >>> median_grouped([1, 3, 3, 5, 7], interval=1) + 3.25 + >>> median_grouped([1, 3, 3, 5, 7], interval=2) + 3.5 - This function does not check whether the data points are at least - ``interval`` apart. - """ + This function does not check whether the data points are at least + ``interval`` apart. + """ def mode(data: Iterable[_HashableT]) -> _HashableT: """Return the most common data point from discrete or nominal data. -``mode`` assumes discrete data, and returns a single value. This is the -standard treatment of the mode as commonly taught in schools: + ``mode`` assumes discrete data, and returns a single value. This is the + standard treatment of the mode as commonly taught in schools: - >>> mode([1, 1, 2, 3, 3, 3, 3, 4]) - 3 + >>> mode([1, 1, 2, 3, 3, 3, 3, 4]) + 3 -This also works with nominal (non-numeric) data: + This also works with nominal (non-numeric) data: - >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) - 'red' + >>> mode(["red", "blue", "blue", "red", "green", "red", "red"]) + 'red' -If there are multiple modes with same frequency, return the first one -encountered: + If there are multiple modes with same frequency, return the first one + encountered: - >>> mode(['red', 'red', 'green', 'blue', 'blue']) - 'red' + >>> mode(['red', 'red', 'green', 'blue', 'blue']) + 'red' -If *data* is empty, ``mode``, raises StatisticsError. + If *data* is empty, ``mode``, raises StatisticsError. + + """ -""" def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: """Return a list of the most frequently occurring values. -Will return more than one result if there are multiple modes -or an empty list if *data* is empty. + Will return more than one result if there are multiple modes + or an empty list if *data* is empty. ->>> multimode('aabbbbbbbbcc') -['b'] ->>> multimode('aabbbbccddddeeffffgg') -['b', 'd', 'f'] ->>> multimode('') -[] + >>> multimode('aabbbbbbbbcc') + ['b'] + >>> multimode('aabbbbccddddeeffffgg') + ['b', 'd', 'f'] + >>> multimode('') + [] + + """ -""" def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: """Return the square root of the population variance. -See ``pvariance`` for arguments and other details. + See ``pvariance`` for arguments and other details. ->>> pstdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) -0.986893273527251 + >>> pstdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) + 0.986893273527251 + + """ -""" def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: """Return the population variance of ``data``. -data should be a sequence or iterable of Real-valued numbers, with at least one -value. The optional argument mu, if given, should be the mean of -the data. If it is missing or None, the mean is automatically calculated. + data should be a sequence or iterable of Real-valued numbers, with at least one + value. The optional argument mu, if given, should be the mean of + the data. If it is missing or None, the mean is automatically calculated. -Use this function to calculate the variance from the entire population. -To estimate the variance from a sample, the ``variance`` function is -usually a better choice. + Use this function to calculate the variance from the entire population. + To estimate the variance from a sample, the ``variance`` function is + usually a better choice. -Examples: + Examples: ->>> data = [0.0, 0.25, 0.25, 1.25, 1.5, 1.75, 2.75, 3.25] ->>> pvariance(data) -1.25 + >>> data = [0.0, 0.25, 0.25, 1.25, 1.5, 1.75, 2.75, 3.25] + >>> pvariance(data) + 1.25 -If you have already calculated the mean of the data, you can pass it as -the optional second argument to avoid recalculating it: + If you have already calculated the mean of the data, you can pass it as + the optional second argument to avoid recalculating it: ->>> mu = mean(data) ->>> pvariance(data, mu) -1.25 + >>> mu = mean(data) + >>> pvariance(data, mu) + 1.25 -Decimals and Fractions are supported: + Decimals and Fractions are supported: ->>> from decimal import Decimal as D ->>> pvariance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) -Decimal('24.815') + >>> from decimal import Decimal as D + >>> pvariance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) + Decimal('24.815') ->>> from fractions import Fraction as F ->>> pvariance([F(1, 4), F(5, 4), F(1, 2)]) -Fraction(13, 72) + >>> from fractions import Fraction as F + >>> pvariance([F(1, 4), F(5, 4), F(1, 2)]) + Fraction(13, 72) -""" -def quantiles( - data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" -) -> list[_NumberT]: + """ + +def quantiles(data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive") -> list[_NumberT]: """Divide *data* into *n* continuous intervals with equal probability. -Returns a list of (n - 1) cut points separating the intervals. + Returns a list of (n - 1) cut points separating the intervals. -Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. -Set *n* to 100 for percentiles which gives the 99 cuts points that -separate *data* in to 100 equal sized groups. + Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. + Set *n* to 100 for percentiles which gives the 99 cuts points that + separate *data* in to 100 equal sized groups. -The *data* can be any iterable containing sample. -The cut points are linearly interpolated between data points. + The *data* can be any iterable containing sample. + The cut points are linearly interpolated between data points. -If *method* is set to *inclusive*, *data* is treated as population -data. The minimum value is treated as the 0th percentile and the -maximum value is treated as the 100th percentile. + If *method* is set to *inclusive*, *data* is treated as population + data. The minimum value is treated as the 0th percentile and the + maximum value is treated as the 100th percentile. + + """ -""" def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: """Return the square root of the sample variance. -See ``variance`` for arguments and other details. + See ``variance`` for arguments and other details. ->>> stdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) -1.0810874155219827 + >>> stdev([1.5, 2.5, 2.5, 2.75, 3.25, 4.75]) + 1.0810874155219827 + + """ -""" def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: """Return the sample variance of data. -data should be an iterable of Real-valued numbers, with at least two -values. The optional argument xbar, if given, should be the mean of -the data. If it is missing or None, the mean is automatically calculated. + data should be an iterable of Real-valued numbers, with at least two + values. The optional argument xbar, if given, should be the mean of + the data. If it is missing or None, the mean is automatically calculated. -Use this function when your data is a sample from a population. To -calculate the variance from the entire population, see ``pvariance``. + Use this function when your data is a sample from a population. To + calculate the variance from the entire population, see ``pvariance``. -Examples: + Examples: ->>> data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5] ->>> variance(data) -1.3720238095238095 + >>> data = [2.75, 1.75, 1.25, 0.25, 0.5, 1.25, 3.5] + >>> variance(data) + 1.3720238095238095 -If you have already calculated the mean of your data, you can pass it as -the optional second argument ``xbar`` to avoid recalculating it: + If you have already calculated the mean of your data, you can pass it as + the optional second argument ``xbar`` to avoid recalculating it: ->>> m = mean(data) ->>> variance(data, m) -1.3720238095238095 + >>> m = mean(data) + >>> variance(data, m) + 1.3720238095238095 -This function does not check that ``xbar`` is actually the mean of -``data``. Giving arbitrary values for ``xbar`` may lead to invalid or -impossible results. + This function does not check that ``xbar`` is actually the mean of + ``data``. Giving arbitrary values for ``xbar`` may lead to invalid or + impossible results. -Decimals and Fractions are supported: + Decimals and Fractions are supported: ->>> from decimal import Decimal as D ->>> variance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) -Decimal('31.01875') + >>> from decimal import Decimal as D + >>> variance([D("27.5"), D("30.25"), D("30.25"), D("34.5"), D("41.75")]) + Decimal('31.01875') ->>> from fractions import Fraction as F ->>> variance([F(1, 6), F(1, 2), F(5, 3)]) -Fraction(67, 108) + >>> from fractions import Fraction as F + >>> variance([F(1, 6), F(1, 2), F(5, 3)]) + Fraction(67, 108) -""" + """ class NormalDist: - """Normal distribution of a random variable -""" + """Normal distribution of a random variable""" + __slots__ = {"_mu": "Arithmetic mean of a normal distribution", "_sigma": "Standard deviation of a normal distribution"} def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: - """NormalDist where mu is the mean and sigma is the standard deviation. -""" + """NormalDist where mu is the mean and sigma is the standard deviation.""" + @property def mean(self) -> float: - """Arithmetic mean of the normal distribution. -""" + """Arithmetic mean of the normal distribution.""" + @property def median(self) -> float: - """Return the median of the normal distribution -""" + """Return the median of the normal distribution""" + @property def mode(self) -> float: """Return the mode of the normal distribution -The mode is the value x where which the probability density -function (pdf) takes its maximum value. -""" + The mode is the value x where which the probability density + function (pdf) takes its maximum value. + """ + @property def stdev(self) -> float: - """Standard deviation of the normal distribution. -""" + """Standard deviation of the normal distribution.""" + @property def variance(self) -> float: - """Square of the standard deviation. -""" + """Square of the standard deviation.""" + @classmethod def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: - """Make a normal distribution instance from sample data. -""" + """Make a normal distribution instance from sample data.""" + def samples(self, n: SupportsIndex, *, seed: _Seed | None = None) -> list[float]: - """Generate *n* samples for a given mean and standard deviation. -""" + """Generate *n* samples for a given mean and standard deviation.""" + def pdf(self, x: float) -> float: - """Probability density function. P(x <= X < x+dx) / dx -""" + """Probability density function. P(x <= X < x+dx) / dx""" + def cdf(self, x: float) -> float: - """Cumulative distribution function. P(X <= x) -""" + """Cumulative distribution function. P(X <= x)""" + def inv_cdf(self, p: float) -> float: """Inverse cumulative distribution function. x : P(X <= x) = p -Finds the value of the random variable such that the probability of -the variable being less than or equal to that value equals the given -probability. + Finds the value of the random variable such that the probability of + the variable being less than or equal to that value equals the given + probability. + + This function is also called the percent point function or quantile + function. + """ -This function is also called the percent point function or quantile -function. -""" def overlap(self, other: NormalDist) -> float: """Compute the overlapping coefficient (OVL) between two normal distributions. -Measures the agreement between two normal probability distributions. -Returns a value between 0.0 and 1.0 giving the overlapping area in -the two underlying probability density functions. + Measures the agreement between two normal probability distributions. + Returns a value between 0.0 and 1.0 giving the overlapping area in + the two underlying probability density functions. + + >>> N1 = NormalDist(2.4, 1.6) + >>> N2 = NormalDist(3.2, 2.0) + >>> N1.overlap(N2) + 0.8035050657330205 + """ - >>> N1 = NormalDist(2.4, 1.6) - >>> N2 = NormalDist(3.2, 2.0) - >>> N1.overlap(N2) - 0.8035050657330205 -""" def quantiles(self, n: int = 4) -> list[float]: """Divide into *n* continuous intervals with equal probability. -Returns a list of (n - 1) cut points separating the intervals. + Returns a list of (n - 1) cut points separating the intervals. + + Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. + Set *n* to 100 for percentiles which gives the 99 cuts points that + separate the normal distribution in to 100 equal sized groups. + """ -Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. -Set *n* to 100 for percentiles which gives the 99 cuts points that -separate the normal distribution in to 100 equal sized groups. -""" def zscore(self, x: float) -> float: """Compute the Standard Score. (x - mean) / stdev -Describes *x* in terms of the number of standard deviations -above or below the mean of the normal distribution. -""" + Describes *x* in terms of the number of standard deviations + above or below the mean of the normal distribution. + """ + def __eq__(x1, x2: object) -> bool: - """Two NormalDist objects are equal if their mu and sigma are both equal. -""" + """Two NormalDist objects are equal if their mu and sigma are both equal.""" + def __add__(x1, x2: float | NormalDist) -> NormalDist: """Add a constant or another NormalDist instance. -If *other* is a constant, translate mu by the constant, -leaving sigma unchanged. + If *other* is a constant, translate mu by the constant, + leaving sigma unchanged. + + If *other* is a NormalDist, add both the means and the variances. + Mathematically, this works only if the two distributions are + independent or if they are jointly normally distributed. + """ -If *other* is a NormalDist, add both the means and the variances. -Mathematically, this works only if the two distributions are -independent or if they are jointly normally distributed. -""" def __sub__(x1, x2: float | NormalDist) -> NormalDist: """Subtract a constant or another NormalDist instance. -If *other* is a constant, translate by the constant mu, -leaving sigma unchanged. + If *other* is a constant, translate by the constant mu, + leaving sigma unchanged. + + If *other* is a NormalDist, subtract the means and add the variances. + Mathematically, this works only if the two distributions are + independent or if they are jointly normally distributed. + """ -If *other* is a NormalDist, subtract the means and add the variances. -Mathematically, this works only if the two distributions are -independent or if they are jointly normally distributed. -""" def __mul__(x1, x2: float) -> NormalDist: """Multiply both mu and sigma by a constant. -Used for rescaling, perhaps to change measurement units. -Sigma is scaled with the absolute value of the constant. -""" + Used for rescaling, perhaps to change measurement units. + Sigma is scaled with the absolute value of the constant. + """ + def __truediv__(x1, x2: float) -> NormalDist: """Divide both mu and sigma by a constant. -Used for rescaling, perhaps to change measurement units. -Sigma is scaled with the absolute value of the constant. -""" + Used for rescaling, perhaps to change measurement units. + Sigma is scaled with the absolute value of the constant. + """ + def __pos__(x1) -> NormalDist: - """Return a copy of the instance. -""" + """Return a copy of the instance.""" + def __neg__(x1) -> NormalDist: - """Negates mu while keeping sigma the same. -""" + """Negates mu while keeping sigma the same.""" __radd__ = __add__ def __rsub__(x1, x2: float | NormalDist) -> NormalDist: - """Subtract a NormalDist from a constant or another NormalDist. -""" + """Subtract a NormalDist from a constant or another NormalDist.""" __rmul__ = __mul__ def __hash__(self) -> int: - """NormalDist objects hash equal if their mu and sigma are both equal. -""" + """NormalDist objects hash equal if their mu and sigma are both equal.""" if sys.version_info >= (3, 12): - def correlation( - x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear" - ) -> float: + def correlation(x: Sequence[_Number], y: Sequence[_Number], /, *, method: Literal["linear", "ranked"] = "linear") -> float: """Pearson's correlation coefficient -Return the Pearson's correlation coefficient for two inputs. Pearson's -correlation coefficient *r* takes values between -1 and +1. It measures -the strength and direction of a linear relationship. + Return the Pearson's correlation coefficient for two inputs. Pearson's + correlation coefficient *r* takes values between -1 and +1. It measures + the strength and direction of a linear relationship. ->>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] ->>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] ->>> correlation(x, x) -1.0 ->>> correlation(x, y) --1.0 + >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] + >>> correlation(x, x) + 1.0 + >>> correlation(x, y) + -1.0 -If *method* is "ranked", computes Spearman's rank correlation coefficient -for two inputs. The data is replaced by ranks. Ties are averaged -so that equal values receive the same rank. The resulting coefficient -measures the strength of a monotonic relationship. + If *method* is "ranked", computes Spearman's rank correlation coefficient + for two inputs. The data is replaced by ranks. Ties are averaged + so that equal values receive the same rank. The resulting coefficient + measures the strength of a monotonic relationship. -Spearman's rank correlation coefficient is appropriate for ordinal -data or for continuous data that doesn't meet the linear proportion -requirement for Pearson's correlation coefficient. + Spearman's rank correlation coefficient is appropriate for ordinal + data or for continuous data that doesn't meet the linear proportion + requirement for Pearson's correlation coefficient. -""" + """ elif sys.version_info >= (3, 10): def correlation(x: Sequence[_Number], y: Sequence[_Number], /) -> float: """Pearson's correlation coefficient - Return the Pearson's correlation coefficient for two inputs. Pearson's - correlation coefficient *r* takes values between -1 and +1. It measures the - strength and direction of the linear relationship, where +1 means very - strong, positive linear relationship, -1 very strong, negative linear - relationship, and 0 no linear relationship. + Return the Pearson's correlation coefficient for two inputs. Pearson's + correlation coefficient *r* takes values between -1 and +1. It measures the + strength and direction of the linear relationship, where +1 means very + strong, positive linear relationship, -1 very strong, negative linear + relationship, and 0 no linear relationship. - >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] - >>> correlation(x, x) - 1.0 - >>> correlation(x, y) - -1.0 + >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> y = [9, 8, 7, 6, 5, 4, 3, 2, 1] + >>> correlation(x, x) + 1.0 + >>> correlation(x, y) + -1.0 - """ + """ if sys.version_info >= (3, 10): def covariance(x: Sequence[_Number], y: Sequence[_Number], /) -> float: """Covariance -Return the sample covariance of two inputs *x* and *y*. Covariance -is a measure of the joint variability of two inputs. + Return the sample covariance of two inputs *x* and *y*. Covariance + is a measure of the joint variability of two inputs. ->>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] ->>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] ->>> covariance(x, y) -0.75 ->>> z = [9, 8, 7, 6, 5, 4, 3, 2, 1] ->>> covariance(x, z) --7.5 ->>> covariance(z, x) --7.5 + >>> x = [1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> y = [1, 2, 3, 1, 2, 3, 1, 2, 3] + >>> covariance(x, y) + 0.75 + >>> z = [9, 8, 7, 6, 5, 4, 3, 2, 1] + >>> covariance(x, z) + -7.5 + >>> covariance(z, x) + -7.5 -""" + """ class LinearRegression(NamedTuple): - """LinearRegression(slope, intercept) -""" + """LinearRegression(slope, intercept)""" + slope: float intercept: float @@ -720,68 +732,68 @@ if sys.version_info >= (3, 11): ) -> LinearRegression: """Slope and intercept for simple linear regression. -Return the slope and intercept of simple linear regression -parameters estimated using ordinary least squares. Simple linear -regression describes relationship between an independent variable -*x* and a dependent variable *y* in terms of a linear function: + Return the slope and intercept of simple linear regression + parameters estimated using ordinary least squares. Simple linear + regression describes relationship between an independent variable + *x* and a dependent variable *y* in terms of a linear function: - y = slope * x + intercept + noise + y = slope * x + intercept + noise -where *slope* and *intercept* are the regression parameters that are -estimated, and noise represents the variability of the data that was -not explained by the linear regression (it is equal to the -difference between predicted and actual values of the dependent -variable). + where *slope* and *intercept* are the regression parameters that are + estimated, and noise represents the variability of the data that was + not explained by the linear regression (it is equal to the + difference between predicted and actual values of the dependent + variable). -The parameters are returned as a named tuple. + The parameters are returned as a named tuple. ->>> x = [1, 2, 3, 4, 5] ->>> noise = NormalDist().samples(5, seed=42) ->>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] ->>> linear_regression(x, y) #doctest: +ELLIPSIS -LinearRegression(slope=3.17495..., intercept=1.00925...) + >>> x = [1, 2, 3, 4, 5] + >>> noise = NormalDist().samples(5, seed=42) + >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] + >>> linear_regression(x, y) #doctest: +ELLIPSIS + LinearRegression(slope=3.17495..., intercept=1.00925...) -If *proportional* is true, the independent variable *x* and the -dependent variable *y* are assumed to be directly proportional. -The data is fit to a line passing through the origin. + If *proportional* is true, the independent variable *x* and the + dependent variable *y* are assumed to be directly proportional. + The data is fit to a line passing through the origin. -Since the *intercept* will always be 0.0, the underlying linear -function simplifies to: + Since the *intercept* will always be 0.0, the underlying linear + function simplifies to: - y = slope * x + noise + y = slope * x + noise ->>> y = [3 * x[i] + noise[i] for i in range(5)] ->>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS -LinearRegression(slope=2.90475..., intercept=0.0) + >>> y = [3 * x[i] + noise[i] for i in range(5)] + >>> linear_regression(x, y, proportional=True) #doctest: +ELLIPSIS + LinearRegression(slope=2.90475..., intercept=0.0) -""" + """ elif sys.version_info >= (3, 10): def linear_regression(regressor: Sequence[_Number], dependent_variable: Sequence[_Number], /) -> LinearRegression: """Slope and intercept for simple linear regression. - Return the slope and intercept of simple linear regression - parameters estimated using ordinary least squares. Simple linear - regression describes relationship between an independent variable - *x* and a dependent variable *y* in terms of linear function: + Return the slope and intercept of simple linear regression + parameters estimated using ordinary least squares. Simple linear + regression describes relationship between an independent variable + *x* and a dependent variable *y* in terms of linear function: - y = slope * x + intercept + noise + y = slope * x + intercept + noise - where *slope* and *intercept* are the regression parameters that are - estimated, and noise represents the variability of the data that was - not explained by the linear regression (it is equal to the - difference between predicted and actual values of the dependent - variable). + where *slope* and *intercept* are the regression parameters that are + estimated, and noise represents the variability of the data that was + not explained by the linear regression (it is equal to the + difference between predicted and actual values of the dependent + variable). - The parameters are returned as a named tuple. + The parameters are returned as a named tuple. - >>> x = [1, 2, 3, 4, 5] - >>> noise = NormalDist().samples(5, seed=42) - >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] - >>> linear_regression(x, y) #doctest: +ELLIPSIS - LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) + >>> x = [1, 2, 3, 4, 5] + >>> noise = NormalDist().samples(5, seed=42) + >>> y = [3 * x[i] + 2 + noise[i] for i in range(5)] + >>> linear_regression(x, y) #doctest: +ELLIPSIS + LinearRegression(slope=3.09078914170..., intercept=1.75684970486...) - """ + """ if sys.version_info >= (3, 13): _Kernel: TypeAlias = Literal[ @@ -799,124 +811,123 @@ if sys.version_info >= (3, 13): "triweight", "cosine", ] - def kde( - data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False - ) -> Callable[[float], float]: + def kde(data: Sequence[float], h: float, kernel: _Kernel = "normal", *, cumulative: bool = False) -> Callable[[float], float]: """Kernel Density Estimation: Create a continuous probability density -function or cumulative distribution function from discrete samples. - -The basic idea is to smooth the data using a kernel function -to help draw inferences about a population from a sample. - -The degree of smoothing is controlled by the scaling parameter h -which is called the bandwidth. Smaller values emphasize local -features while larger values give smoother results. - -The kernel determines the relative weights of the sample data -points. Generally, the choice of kernel shape does not matter -as much as the more influential bandwidth smoothing parameter. - -Kernels that give some weight to every sample point: - - normal (gauss) - logistic - sigmoid - -Kernels that only give weight to sample points within -the bandwidth: - - rectangular (uniform) - triangular - parabolic (epanechnikov) - quartic (biweight) - triweight - cosine - -If *cumulative* is true, will return a cumulative distribution function. - -A StatisticsError will be raised if the data sequence is empty. - -Example -------- - -Given a sample of six data points, construct a continuous -function that estimates the underlying probability density: - - >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] - >>> f_hat = kde(sample, h=1.5) - -Compute the area under the curve: - - >>> area = sum(f_hat(x) for x in range(-20, 20)) - >>> round(area, 4) - 1.0 - -Plot the estimated probability density function at -evenly spaced points from -6 to 10: - - >>> for x in range(-6, 11): - ... density = f_hat(x) - ... plot = ' ' * int(density * 400) + 'x' - ... print(f'{x:2}: {density:.3f} {plot}') - ... - -6: 0.002 x - -5: 0.009 x - -4: 0.031 x - -3: 0.070 x - -2: 0.111 x - -1: 0.125 x - 0: 0.110 x - 1: 0.086 x - 2: 0.068 x - 3: 0.059 x - 4: 0.066 x - 5: 0.082 x - 6: 0.082 x - 7: 0.058 x - 8: 0.028 x - 9: 0.009 x - 10: 0.002 x - -Estimate P(4.5 < X <= 7.5), the probability that a new sample value -will be between 4.5 and 7.5: - - >>> cdf = kde(sample, h=1.5, cumulative=True) - >>> round(cdf(7.5) - cdf(4.5), 2) - 0.22 - -References ----------- + function or cumulative distribution function from discrete samples. -Kernel density estimation and its application: -https://www.itm-conferences.org/articles/itmconf/pdf/2018/08/itmconf_sam2018_00037.pdf + The basic idea is to smooth the data using a kernel function + to help draw inferences about a population from a sample. -Kernel functions in common use: -https://en.wikipedia.org/wiki/Kernel_(statistics)#kernel_functions_in_common_use + The degree of smoothing is controlled by the scaling parameter h + which is called the bandwidth. Smaller values emphasize local + features while larger values give smoother results. -Interactive graphical demonstration and exploration: -https://demonstrations.wolfram.com/KernelDensityEstimation/ + The kernel determines the relative weights of the sample data + points. Generally, the choice of kernel shape does not matter + as much as the more influential bandwidth smoothing parameter. -Kernel estimation of cumulative distribution function of a random variable with bounded support -https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf + Kernels that give some weight to every sample point: + + normal (gauss) + logistic + sigmoid + + Kernels that only give weight to sample points within + the bandwidth: + + rectangular (uniform) + triangular + parabolic (epanechnikov) + quartic (biweight) + triweight + cosine + + If *cumulative* is true, will return a cumulative distribution function. + + A StatisticsError will be raised if the data sequence is empty. + + Example + ------- + + Given a sample of six data points, construct a continuous + function that estimates the underlying probability density: + + >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> f_hat = kde(sample, h=1.5) + + Compute the area under the curve: + + >>> area = sum(f_hat(x) for x in range(-20, 20)) + >>> round(area, 4) + 1.0 + + Plot the estimated probability density function at + evenly spaced points from -6 to 10: + + >>> for x in range(-6, 11): + ... density = f_hat(x) + ... plot = ' ' * int(density * 400) + 'x' + ... print(f'{x:2}: {density:.3f} {plot}') + ... + -6: 0.002 x + -5: 0.009 x + -4: 0.031 x + -3: 0.070 x + -2: 0.111 x + -1: 0.125 x + 0: 0.110 x + 1: 0.086 x + 2: 0.068 x + 3: 0.059 x + 4: 0.066 x + 5: 0.082 x + 6: 0.082 x + 7: 0.058 x + 8: 0.028 x + 9: 0.009 x + 10: 0.002 x + + Estimate P(4.5 < X <= 7.5), the probability that a new sample value + will be between 4.5 and 7.5: + + >>> cdf = kde(sample, h=1.5, cumulative=True) + >>> round(cdf(7.5) - cdf(4.5), 2) + 0.22 + + References + ---------- + + Kernel density estimation and its application: + https://www.itm-conferences.org/articles/itmconf/pdf/2018/08/itmconf_sam2018_00037.pdf + + Kernel functions in common use: + https://en.wikipedia.org/wiki/Kernel_(statistics)#kernel_functions_in_common_use + + Interactive graphical demonstration and exploration: + https://demonstrations.wolfram.com/KernelDensityEstimation/ + + Kernel estimation of cumulative distribution function of a random variable with bounded support + https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf + + """ -""" def kde_random( data: Sequence[float], h: float, kernel: _Kernel = "normal", *, seed: _Seed | None = None ) -> Callable[[], float]: """Return a function that makes a random selection from the estimated -probability density function created by kde(data, h, kernel). + probability density function created by kde(data, h, kernel). -Providing a *seed* allows reproducible selections within a single -thread. The seed may be an integer, float, str, or bytes. + Providing a *seed* allows reproducible selections within a single + thread. The seed may be an integer, float, str, or bytes. -A StatisticsError will be raised if the *data* sequence is empty. + A StatisticsError will be raised if the *data* sequence is empty. -Example: + Example: ->>> data = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] ->>> rand = kde_random(data, h=1.5, seed=8675309) ->>> new_selections = [rand() for i in range(10)] ->>> [round(x, 1) for x in new_selections] -[0.7, 6.2, 1.2, 6.9, 7.0, 1.8, 2.5, -0.5, -1.8, 5.6] + >>> data = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> rand = kde_random(data, h=1.5, seed=8675309) + >>> new_selections = [rand() for i in range(10)] + >>> [round(x, 1) for x in new_selections] + [0.7, 6.2, 1.2, 6.9, 7.0, 1.8, 2.5, -0.5, -1.8, 5.6] -""" + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi index 5b8e9e02c1125..6d87737eff51f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/__init__.pyi @@ -13,6 +13,7 @@ punctuation -- a string containing all ASCII punctuation characters printable -- a string containing all ASCII characters considered printable """ + import sys from _typeshed import StrOrLiteralStr from collections.abc import Iterable, Mapping, Sequence @@ -48,18 +49,18 @@ printable: Final[LiteralString] # string too long def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: """capwords(s [,sep]) -> string -Split the argument into words using split, capitalize each -word using capitalize, and join the capitalized words using -join. If the optional second argument sep is absent or None, -runs of whitespace characters are replaced by a single space -and leading and trailing whitespace are removed, otherwise -sep is used to split and join the words. + Split the argument into words using split, capitalize each + word using capitalize, and join the capitalized words using + join. If the optional second argument sep is absent or None, + runs of whitespace characters are replaced by a single space + and leading and trailing whitespace are removed, otherwise + sep is used to split and join the words. -""" + """ class Template: - """A string class for supporting $-substitutions. -""" + """A string class for supporting $-substitutions.""" + template: str delimiter: ClassVar[str] idpattern: ClassVar[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi index ced5ba899d636..8690a8093b048 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/string/templatelib.pyi @@ -1,5 +1,5 @@ -"""Support for template string literals (t-strings). -""" +"""Support for template string literals (t-strings).""" + from collections.abc import Iterator from types import GenericAlias from typing import Any, Literal, TypeVar, final, overload @@ -8,30 +8,29 @@ _T = TypeVar("_T") @final class Template: # TODO: consider making `Template` generic on `TypeVarTuple` - """Template object -""" + """Template object""" + strings: tuple[str, ...] interpolations: tuple[Interpolation, ...] def __new__(cls, *args: str | Interpolation) -> Template: ... def __iter__(self) -> Iterator[str | Interpolation]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __add__(self, other: Template, /) -> Template: - """Return self+value. -""" + """Return self+value.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" + @property def values(self) -> tuple[Any, ...]: # Tuple of interpolation values, which can have any type - """Values of interpolations -""" + """Values of interpolations""" @final class Interpolation: - """Interpolation object -""" + """Interpolation object""" + value: Any # TODO: consider making `Interpolation` generic in runtime expression: str conversion: Literal["a", "r", "s"] | None @@ -43,12 +42,11 @@ class Interpolation: cls, value: Any, expression: str = "", conversion: Literal["a", "r", "s"] | None = None, format_spec: str = "" ) -> Interpolation: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" @overload def convert(obj: _T, /, conversion: None) -> _T: - """Convert *obj* using formatted string literal semantics. -""" + """Convert *obj* using formatted string literal semantics.""" + @overload def convert(obj: object, /, conversion: Literal["r", "s", "a"]) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi index d92673ccb76d4..75a354552168e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/stringprep.pyi @@ -3,6 +3,7 @@ There are two kinds of tables: sets, for which a member test is provided, and mappings, for which a mapping function is provided. """ + from typing import Final b1_set: Final[set[int]] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi index 3ae8477fc9cd3..9cc366060b83d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/struct.pyi @@ -27,6 +27,7 @@ Whitespace between formats is ignored. The variable struct.error is an exception raised on errors. """ + from _struct import * __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi index cca4bd3213ee0..60e4906577077 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/subprocess.pyi @@ -31,6 +31,7 @@ getoutput(...): Runs a command in the shell, waits for it to complete, getstatusoutput(...): Runs a command in the shell, waits for it to complete, then returns a (exitcode, output) tuple """ + import sys from _typeshed import MaybeNone, ReadableBuffer, StrOrBytesPath from collections.abc import Callable, Collection, Iterable, Mapping, Sequence @@ -110,14 +111,15 @@ _USE_POSIX_SPAWN: Final[bool] class CompletedProcess(Generic[_T]): """A process that has finished running. -This is returned by run(). + This is returned by run(). + + Attributes: + args: The list or str args passed to run(). + returncode: The exit code of the process, negative for signals. + stdout: The standard output (None if not captured). + stderr: The standard error (None if not captured). + """ -Attributes: - args: The list or str args passed to run(). - returncode: The exit code of the process, negative for signals. - stdout: The standard output (None if not captured). - stderr: The standard error (None if not captured). -""" # morally: _CMD args: Any returncode: int @@ -127,13 +129,13 @@ Attributes: stderr: _T def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... def check_returncode(self) -> None: - """Raise CalledProcessError if the exit code is non-zero. -""" + """Raise CalledProcessError if the exit code is non-zero.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -173,32 +175,33 @@ if sys.version_info >= (3, 11): ) -> CompletedProcess[str]: """Run command with arguments and return a CompletedProcess instance. -The returned instance will have attributes args, returncode, stdout and -stderr. By default, stdout and stderr are not captured, and those attributes -will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, -or pass capture_output=True to capture both. + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, + or pass capture_output=True to capture both. -If check is True and the exit code was non-zero, it raises a -CalledProcessError. The CalledProcessError object will have the return code -in the returncode attribute, and output & stderr attributes if those streams -were captured. + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. -If timeout (seconds) is given and the process takes too long, - a TimeoutExpired exception will be raised. + If timeout (seconds) is given and the process takes too long, + a TimeoutExpired exception will be raised. -There is an optional argument "input", allowing you to -pass bytes or a string to the subprocess's stdin. If you use this argument -you may not also use the Popen constructor's "stdin" argument, as -it will be used internally. + There is an optional argument "input", allowing you to + pass bytes or a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. -By default, all communication is in bytes, and therefore any "input" should -be bytes, and the stdout and stderr will be bytes. If in text mode, any -"input" should be a string, and stdout and stderr will be strings decoded -according to locale encoding, or by "encoding" if set. Text mode is -triggered by setting any of text, encoding, errors or universal_newlines. + By default, all communication is in bytes, and therefore any "input" should + be bytes, and the stdout and stderr will be bytes. If in text mode, any + "input" should be a string, and stdout and stderr will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode is + triggered by setting any of text, encoding, errors or universal_newlines. + + The other arguments are the same as for the Popen constructor. + """ -The other arguments are the same as for the Popen constructor. -""" @overload def run( args: _CMD, @@ -408,32 +411,33 @@ elif sys.version_info >= (3, 10): ) -> CompletedProcess[str]: """Run command with arguments and return a CompletedProcess instance. - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, - or pass capture_output=True to capture both. + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them, + or pass capture_output=True to capture both. - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. + If timeout is given, and the process takes too long, a TimeoutExpired + exception will be raised. - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. + There is an optional argument "input", allowing you to + pass bytes or a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. + By default, all communication is in bytes, and therefore any "input" should + be bytes, and the stdout and stderr will be bytes. If in text mode, any + "input" should be a string, and stdout and stderr will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode is + triggered by setting any of text, encoding, errors or universal_newlines. + + The other arguments are the same as for the Popen constructor. + """ - The other arguments are the same as for the Popen constructor. - """ @overload def run( args: _CMD, @@ -637,31 +641,32 @@ else: ) -> CompletedProcess[str]: """Run command with arguments and return a CompletedProcess instance. - The returned instance will have attributes args, returncode, stdout and - stderr. By default, stdout and stderr are not captured, and those attributes - will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. + The returned instance will have attributes args, returncode, stdout and + stderr. By default, stdout and stderr are not captured, and those attributes + will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. - If check is True and the exit code was non-zero, it raises a - CalledProcessError. The CalledProcessError object will have the return code - in the returncode attribute, and output & stderr attributes if those streams - were captured. + If check is True and the exit code was non-zero, it raises a + CalledProcessError. The CalledProcessError object will have the return code + in the returncode attribute, and output & stderr attributes if those streams + were captured. - If timeout is given, and the process takes too long, a TimeoutExpired - exception will be raised. + If timeout is given, and the process takes too long, a TimeoutExpired + exception will be raised. - There is an optional argument "input", allowing you to - pass bytes or a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it will be used internally. + There is an optional argument "input", allowing you to + pass bytes or a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it will be used internally. - By default, all communication is in bytes, and therefore any "input" should - be bytes, and the stdout and stderr will be bytes. If in text mode, any - "input" should be a string, and stdout and stderr will be strings decoded - according to locale encoding, or by "encoding" if set. Text mode is - triggered by setting any of text, encoding, errors or universal_newlines. + By default, all communication is in bytes, and therefore any "input" should + be bytes, and the stdout and stderr will be bytes. If in text mode, any + "input" should be a string, and stdout and stderr will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode is + triggered by setting any of text, encoding, errors or universal_newlines. + + The other arguments are the same as for the Popen constructor. + """ - The other arguments are the same as for the Popen constructor. - """ @overload def run( args: _CMD, @@ -857,12 +862,12 @@ if sys.version_info >= (3, 11): process_group: int | None = None, ) -> int: """Run command with arguments. Wait for command to complete or -for timeout seconds, then return the returncode attribute. + for timeout seconds, then return the returncode attribute. -The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: -retcode = call(["ls", "-l"]) -""" + retcode = call(["ls", "-l"]) + """ elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -895,12 +900,12 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> int: """Run command with arguments. Wait for command to complete or - timeout, then return the returncode attribute. + timeout, then return the returncode attribute. - The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: - retcode = call(["ls", "-l"]) - """ + retcode = call(["ls", "-l"]) + """ else: def call( @@ -931,12 +936,12 @@ else: umask: int = -1, ) -> int: """Run command with arguments. Wait for command to complete or - timeout, then return the returncode attribute. + timeout, then return the returncode attribute. - The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: - retcode = call(["ls", "-l"]) - """ + retcode = call(["ls", "-l"]) + """ # Same args as Popen.__init__ if sys.version_info >= (3, 11): @@ -971,14 +976,14 @@ if sys.version_info >= (3, 11): process_group: int | None = None, ) -> int: """Run command with arguments. Wait for command to complete. If -the exit code was zero then return, otherwise raise -CalledProcessError. The CalledProcessError object will have the -return code in the returncode attribute. + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. -The arguments are the same as for the call function. Example: + The arguments are the same as for the call function. Example: -check_call(["ls", "-l"]) -""" + check_call(["ls", "-l"]) + """ elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -1011,14 +1016,14 @@ elif sys.version_info >= (3, 10): pipesize: int = -1, ) -> int: """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. - The arguments are the same as for the call function. Example: + The arguments are the same as for the call function. Example: - check_call(["ls", "-l"]) - """ + check_call(["ls", "-l"]) + """ else: def check_call( @@ -1049,14 +1054,14 @@ else: umask: int = -1, ) -> int: """Run command with arguments. Wait for command to complete. If - the exit code was zero then return, otherwise raise - CalledProcessError. The CalledProcessError object will have the - return code in the returncode attribute. + the exit code was zero then return, otherwise raise + CalledProcessError. The CalledProcessError object will have the + return code in the returncode attribute. - The arguments are the same as for the call function. Example: + The arguments are the same as for the call function. Example: - check_call(["ls", "-l"]) - """ + check_call(["ls", "-l"]) + """ if sys.version_info >= (3, 11): # 3.11 adds "process_group" argument @@ -1093,38 +1098,39 @@ if sys.version_info >= (3, 11): ) -> str: """Run command with arguments and return its output. -If the exit code was non-zero it raises a CalledProcessError. The -CalledProcessError object will have the return code in the returncode -attribute and output in the output attribute. + If the exit code was non-zero it raises a CalledProcessError. The + CalledProcessError object will have the return code in the returncode + attribute and output in the output attribute. -The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: ->>> check_output(["ls", "-l", "/dev/null"]) -b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + >>> check_output(["ls", "-l", "/dev/null"]) + b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' -The stdout argument is not allowed as it is used internally. -To capture standard error in the result, use stderr=STDOUT. + The stdout argument is not allowed as it is used internally. + To capture standard error in the result, use stderr=STDOUT. ->>> check_output(["/bin/sh", "-c", -... "ls -l non_existent_file ; exit 0"], -... stderr=STDOUT) -b'ls: non_existent_file: No such file or directory\\n' + >>> check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT) + b'ls: non_existent_file: No such file or directory\\n' -There is an additional optional argument, "input", allowing you to -pass a string to the subprocess's stdin. If you use this argument -you may not also use the Popen constructor's "stdin" argument, as -it too will be used internally. Example: + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example: ->>> check_output(["sed", "-e", "s/foo/bar/"], -... input=b"when in the course of fooman events\\n") -b'when in the course of barman events\\n' + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\\n") + b'when in the course of barman events\\n' + + By default, all communication is in bytes, and therefore any "input" + should be bytes, and the return value will be bytes. If in text mode, + any "input" should be a string, and the return value will be a string + decoded according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or universal_newlines. + """ -By default, all communication is in bytes, and therefore any "input" -should be bytes, and the return value will be bytes. If in text mode, -any "input" should be a string, and the return value will be a string -decoded according to locale encoding, or by "encoding" if set. Text mode -is triggered by setting any of text, encoding, errors or universal_newlines. -""" @overload def check_output( args: _CMD, @@ -1316,38 +1322,39 @@ elif sys.version_info >= (3, 10): ) -> str: """Run command with arguments and return its output. - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. + If the exit code was non-zero it raises a CalledProcessError. The + CalledProcessError object will have the return code in the returncode + attribute and output in the output attribute. - The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + >>> check_output(["ls", "-l", "/dev/null"]) + b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. + The stdout argument is not allowed as it is used internally. + To capture standard error in the result, use stderr=STDOUT. - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\\n' + >>> check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT) + b'ls: non_existent_file: No such file or directory\\n' - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example: - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\\n") - b'when in the course of barman events\\n' + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\\n") + b'when in the course of barman events\\n' + + By default, all communication is in bytes, and therefore any "input" + should be bytes, and the return value will be bytes. If in text mode, + any "input" should be a string, and the return value will be a string + decoded according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or universal_newlines. + """ - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ @overload def check_output( args: _CMD, @@ -1532,38 +1539,39 @@ else: ) -> str: """Run command with arguments and return its output. - If the exit code was non-zero it raises a CalledProcessError. The - CalledProcessError object will have the return code in the returncode - attribute and output in the output attribute. + If the exit code was non-zero it raises a CalledProcessError. The + CalledProcessError object will have the return code in the returncode + attribute and output in the output attribute. - The arguments are the same as for the Popen constructor. Example: + The arguments are the same as for the Popen constructor. Example: - >>> check_output(["ls", "-l", "/dev/null"]) - b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' + >>> check_output(["ls", "-l", "/dev/null"]) + b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\\n' - The stdout argument is not allowed as it is used internally. - To capture standard error in the result, use stderr=STDOUT. + The stdout argument is not allowed as it is used internally. + To capture standard error in the result, use stderr=STDOUT. - >>> check_output(["/bin/sh", "-c", - ... "ls -l non_existent_file ; exit 0"], - ... stderr=STDOUT) - b'ls: non_existent_file: No such file or directory\\n' + >>> check_output(["/bin/sh", "-c", + ... "ls -l non_existent_file ; exit 0"], + ... stderr=STDOUT) + b'ls: non_existent_file: No such file or directory\\n' - There is an additional optional argument, "input", allowing you to - pass a string to the subprocess's stdin. If you use this argument - you may not also use the Popen constructor's "stdin" argument, as - it too will be used internally. Example: + There is an additional optional argument, "input", allowing you to + pass a string to the subprocess's stdin. If you use this argument + you may not also use the Popen constructor's "stdin" argument, as + it too will be used internally. Example: - >>> check_output(["sed", "-e", "s/foo/bar/"], - ... input=b"when in the course of fooman events\\n") - b'when in the course of barman events\\n' + >>> check_output(["sed", "-e", "s/foo/bar/"], + ... input=b"when in the course of fooman events\\n") + b'when in the course of barman events\\n' + + By default, all communication is in bytes, and therefore any "input" + should be bytes, and the return value will be bytes. If in text mode, + any "input" should be a string, and the return value will be a string + decoded according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or universal_newlines. + """ - By default, all communication is in bytes, and therefore any "input" - should be bytes, and the return value will be bytes. If in text mode, - any "input" should be a string, and the return value will be a string - decoded according to locale encoding, or by "encoding" if set. Text mode - is triggered by setting any of text, encoding, errors or universal_newlines. - """ @overload def check_output( args: _CMD, @@ -1719,11 +1727,12 @@ class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): """This exception is raised when the timeout expires while waiting for a -child process. + child process. + + Attributes: + cmd, output, stdout, stderr, timeout + """ -Attributes: - cmd, output, stdout, stderr, timeout -""" def __init__( self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None ) -> None: ... @@ -1737,11 +1746,12 @@ Attributes: class CalledProcessError(SubprocessError): """Raised when run() is called with check=True and the process -returns a non-zero exit status. + returns a non-zero exit status. + + Attributes: + cmd, returncode, stdout, stderr, output + """ -Attributes: - cmd, returncode, stdout, stderr, output -""" returncode: int # morally: _CMD cmd: Any @@ -1758,59 +1768,60 @@ Attributes: class Popen(Generic[AnyStr]): """Execute a child program in a new process. -For a complete description of the arguments see the Python documentation. + For a complete description of the arguments see the Python documentation. -Arguments: - args: A string, or a sequence of program arguments. + Arguments: + args: A string, or a sequence of program arguments. - bufsize: supplied as the buffering argument to the open() function when - creating the stdin/stdout/stderr pipe file objects + bufsize: supplied as the buffering argument to the open() function when + creating the stdin/stdout/stderr pipe file objects - executable: A replacement program to execute. + executable: A replacement program to execute. - stdin, stdout and stderr: These specify the executed programs' standard - input, standard output and standard error file handles, respectively. + stdin, stdout and stderr: These specify the executed programs' standard + input, standard output and standard error file handles, respectively. - preexec_fn: (POSIX only) An object to be called in the child process - just before the child is executed. + preexec_fn: (POSIX only) An object to be called in the child process + just before the child is executed. - close_fds: Controls closing or inheriting of file descriptors. + close_fds: Controls closing or inheriting of file descriptors. - shell: If true, the command will be executed through the shell. + shell: If true, the command will be executed through the shell. - cwd: Sets the current directory before the child is executed. + cwd: Sets the current directory before the child is executed. - env: Defines the environment variables for the new process. + env: Defines the environment variables for the new process. - text: If true, decode stdin, stdout and stderr using the given encoding - (if set) or the system default otherwise. + text: If true, decode stdin, stdout and stderr using the given encoding + (if set) or the system default otherwise. - universal_newlines: Alias of text, provided for backwards compatibility. + universal_newlines: Alias of text, provided for backwards compatibility. - startupinfo and creationflags (Windows only) + startupinfo and creationflags (Windows only) - restore_signals (POSIX only) + restore_signals (POSIX only) - start_new_session (POSIX only) + start_new_session (POSIX only) - process_group (POSIX only) + process_group (POSIX only) - group (POSIX only) + group (POSIX only) - extra_groups (POSIX only) + extra_groups (POSIX only) - user (POSIX only) + user (POSIX only) - umask (POSIX only) + umask (POSIX only) - pass_fds (POSIX only) + pass_fds (POSIX only) - encoding and errors: Text mode encoding and error handling to use for - file objects stdin, stdout and stderr. + encoding and errors: Text mode encoding and error handling to use for + file objects stdin, stdout and stderr. + + Attributes: + stdin, stdout, stderr, pid, returncode + """ -Attributes: - stdin, stdout, stderr, pid, returncode -""" args: _CMD stdin: IO[AnyStr] | None stdout: IO[AnyStr] | None @@ -1852,8 +1863,8 @@ Attributes: pipesize: int = -1, process_group: int | None = None, ) -> None: - """Create new Popen instance. -""" + """Create new Popen instance.""" + @overload def __init__( self: Popen[str], @@ -2042,8 +2053,8 @@ Attributes: umask: int = -1, pipesize: int = -1, ) -> None: - """Create new Popen instance. -""" + """Create new Popen instance.""" + @overload def __init__( self: Popen[str], @@ -2225,8 +2236,8 @@ Attributes: extra_groups: Iterable[str | int] | None = None, umask: int = -1, ) -> None: - """Create new Popen instance. -""" + """Create new Popen instance.""" + @overload def __init__( self: Popen[str], @@ -2376,40 +2387,41 @@ Attributes: def poll(self) -> int | None: """Check if child process has terminated. Set and return returncode -attribute. -""" + attribute. + """ + def wait(self, timeout: float | None = None) -> int: - """Wait for child process to terminate; returns self.returncode. -""" + """Wait for child process to terminate; returns self.returncode.""" # morally the members of the returned tuple should be optional # TODO: this should allow ReadableBuffer for Popen[bytes], but adding # overloads for that runs into a mypy bug (python/mypy#14070). def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: """Interact with process: Send data to stdin and close it. -Read data from stdout and stderr, until end-of-file is -reached. Wait for process to terminate. - -The optional "input" argument should be data to be sent to the -child process, or None, if no data should be sent to the child. -communicate() returns a tuple (stdout, stderr). - -By default, all communication is in bytes, and therefore any -"input" should be bytes, and the (stdout, stderr) will be bytes. -If in text mode (indicated by self.text_mode), any "input" should -be a string, and (stdout, stderr) will be strings decoded -according to locale encoding, or by "encoding" if set. Text mode -is triggered by setting any of text, encoding, errors or -universal_newlines. -""" + Read data from stdout and stderr, until end-of-file is + reached. Wait for process to terminate. + + The optional "input" argument should be data to be sent to the + child process, or None, if no data should be sent to the child. + communicate() returns a tuple (stdout, stderr). + + By default, all communication is in bytes, and therefore any + "input" should be bytes, and the (stdout, stderr) will be bytes. + If in text mode (indicated by self.text_mode), any "input" should + be a string, and (stdout, stderr) will be strings decoded + according to locale encoding, or by "encoding" if set. Text mode + is triggered by setting any of text, encoding, errors or + universal_newlines. + """ + def send_signal(self, sig: int) -> None: - """Send a signal to the process. -""" + """Send a signal to the process.""" + def terminate(self) -> None: - """Terminate the process with SIGTERM - """ + """Terminate the process with SIGTERM""" + def kill(self) -> None: - """Kill the process with SIGKILL - """ + """Kill the process with SIGKILL""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None @@ -2418,101 +2430,103 @@ universal_newlines. def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ # The result really is always a str. if sys.version_info >= (3, 11): def getstatusoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: """Return (exitcode, output) of executing cmd in a shell. -Execute the string 'cmd' in a shell with 'check_output' and -return a 2-tuple (status, output). The locale encoding is used -to decode the output and process newlines. - -A trailing newline is stripped from the output. -The exit status for the command can be interpreted -according to the rules for the function 'wait'. Example: - ->>> import subprocess ->>> subprocess.getstatusoutput('ls /bin/ls') -(0, '/bin/ls') ->>> subprocess.getstatusoutput('cat /bin/junk') -(1, 'cat: /bin/junk: No such file or directory') ->>> subprocess.getstatusoutput('/bin/junk') -(127, 'sh: /bin/junk: not found') ->>> subprocess.getstatusoutput('/bin/kill $$') -(-15, '') -""" + Execute the string 'cmd' in a shell with 'check_output' and + return a 2-tuple (status, output). The locale encoding is used + to decode the output and process newlines. + + A trailing newline is stripped from the output. + The exit status for the command can be interpreted + according to the rules for the function 'wait'. Example: + + >>> import subprocess + >>> subprocess.getstatusoutput('ls /bin/ls') + (0, '/bin/ls') + >>> subprocess.getstatusoutput('cat /bin/junk') + (1, 'cat: /bin/junk: No such file or directory') + >>> subprocess.getstatusoutput('/bin/junk') + (127, 'sh: /bin/junk: not found') + >>> subprocess.getstatusoutput('/bin/kill $$') + (-15, '') + """ + def getoutput(cmd: _CMD, *, encoding: str | None = None, errors: str | None = None) -> str: """Return output (stdout or stderr) of executing cmd in a shell. -Like getstatusoutput(), except the exit status is ignored and the return -value is a string containing the command's output. Example: + Like getstatusoutput(), except the exit status is ignored and the return + value is a string containing the command's output. Example: ->>> import subprocess ->>> subprocess.getoutput('ls /bin/ls') -'/bin/ls' -""" + >>> import subprocess + >>> subprocess.getoutput('ls /bin/ls') + '/bin/ls' + """ else: def getstatusoutput(cmd: _CMD) -> tuple[int, str]: """Return (exitcode, output) of executing cmd in a shell. - Execute the string 'cmd' in a shell with 'check_output' and - return a 2-tuple (status, output). The locale encoding is used - to decode the output and process newlines. - - A trailing newline is stripped from the output. - The exit status for the command can be interpreted - according to the rules for the function 'wait'. Example: - - >>> import subprocess - >>> subprocess.getstatusoutput('ls /bin/ls') - (0, '/bin/ls') - >>> subprocess.getstatusoutput('cat /bin/junk') - (1, 'cat: /bin/junk: No such file or directory') - >>> subprocess.getstatusoutput('/bin/junk') - (127, 'sh: /bin/junk: not found') - >>> subprocess.getstatusoutput('/bin/kill $$') - (-15, '') - """ + Execute the string 'cmd' in a shell with 'check_output' and + return a 2-tuple (status, output). The locale encoding is used + to decode the output and process newlines. + + A trailing newline is stripped from the output. + The exit status for the command can be interpreted + according to the rules for the function 'wait'. Example: + + >>> import subprocess + >>> subprocess.getstatusoutput('ls /bin/ls') + (0, '/bin/ls') + >>> subprocess.getstatusoutput('cat /bin/junk') + (1, 'cat: /bin/junk: No such file or directory') + >>> subprocess.getstatusoutput('/bin/junk') + (127, 'sh: /bin/junk: not found') + >>> subprocess.getstatusoutput('/bin/kill $$') + (-15, '') + """ + def getoutput(cmd: _CMD) -> str: """Return output (stdout or stderr) of executing cmd in a shell. - Like getstatusoutput(), except the exit status is ignored and the return - value is a string containing the command's output. Example: + Like getstatusoutput(), except the exit status is ignored and the return + value is a string containing the command's output. Example: - >>> import subprocess - >>> subprocess.getoutput('ls /bin/ls') - '/bin/ls' - """ + >>> import subprocess + >>> subprocess.getoutput('ls /bin/ls') + '/bin/ls' + """ def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: # undocumented """ -Translate a sequence of arguments into a command line -string, using the same rules as the MS C runtime: + Translate a sequence of arguments into a command line + string, using the same rules as the MS C runtime: -1) Arguments are delimited by white space, which is either a - space or a tab. + 1) Arguments are delimited by white space, which is either a + space or a tab. -2) A string surrounded by double quotation marks is - interpreted as a single argument, regardless of white space - contained within. A quoted string can be embedded in an - argument. + 2) A string surrounded by double quotation marks is + interpreted as a single argument, regardless of white space + contained within. A quoted string can be embedded in an + argument. -3) A double quotation mark preceded by a backslash is - interpreted as a literal double quotation mark. + 3) A double quotation mark preceded by a backslash is + interpreted as a literal double quotation mark. -4) Backslashes are interpreted literally, unless they - immediately precede a double quotation mark. + 4) Backslashes are interpreted literally, unless they + immediately precede a double quotation mark. -5) If backslashes immediately precede a double quotation mark, - every pair of backslashes is interpreted as a literal - backslash. If the number of backslashes is odd, the last - backslash escapes the next double quotation mark as - described in rule 3. -""" + 5) If backslashes immediately precede a double quotation mark, + every pair of backslashes is interpreted as a literal + backslash. If the number of backslashes is odd, the last + backslash escapes the next double quotation mark as + described in rule 3. + """ if sys.platform == "win32": if sys.version_info >= (3, 13): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi index 7e4e7ff2186ac..5dbcffbe8914d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sunau.pyi @@ -102,6 +102,7 @@ close() to patch up the sizes in the header. The close() method is called automatically when the class instance is destroyed. """ + from _typeshed import Unused from typing import IO, Any, Final, Literal, NamedTuple, NoReturn, overload from typing_extensions import Self, TypeAlias @@ -126,8 +127,8 @@ AUDIO_FILE_ENCODING_ALAW_8: Final = 27 AUDIO_UNKNOWN_SIZE: Final = 0xFFFFFFFF class _sunau_params(NamedTuple): - """_sunau_params(nchannels, sampwidth, framerate, nframes, comptype, compname) -""" + """_sunau_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" + nchannels: int sampwidth: int framerate: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi index 48ee2795602c2..3eb6e5d171a89 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symbol.pyi @@ -1,5 +1,5 @@ -"""Non-terminal symbols of Python grammar (from "graminit.h"). -""" +"""Non-terminal symbols of Python grammar (from "graminit.h").""" + from typing import Final single_input: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi index 0a28503b3f84e..14d5f762b5e2a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/symtable.pyi @@ -1,5 +1,5 @@ -"""Interface to the compiler's internal symbol tables -""" +"""Interface to the compiler's internal symbol tables""" + import sys from _collections_abc import dict_keys from collections.abc import Sequence @@ -14,9 +14,9 @@ if sys.version_info >= (3, 13): def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: """Return the toplevel *SymbolTable* for the source code. -*filename* is the name of the file with the code -and *compile_type* is the *compile()* mode argument. -""" + *filename* is the name of the file with the code + and *compile_type* is the *compile()* mode argument. + """ if sys.version_info >= (3, 13): from enum import StrEnum @@ -36,166 +36,168 @@ class SymbolTable: def get_type(self) -> SymbolTableType: """Return the type of the symbol table. -The value returned is one of the values in -the ``SymbolTableType`` enumeration. -""" + The value returned is one of the values in + the ``SymbolTableType`` enumeration. + """ else: def get_type(self) -> str: """Return the type of the symbol table. - The values returned are 'class', 'module', 'function', - 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. - """ + The values returned are 'class', 'module', 'function', + 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. + """ def get_id(self) -> int: - """Return an identifier for the table. - """ + """Return an identifier for the table.""" + def get_name(self) -> str: """Return the table's name. -This corresponds to the name of the class, function -or 'top' if the table is for a class, function or -global respectively. -""" + This corresponds to the name of the class, function + or 'top' if the table is for a class, function or + global respectively. + """ + def get_lineno(self) -> int: """Return the number of the first line in the -block for the table. -""" + block for the table. + """ + def is_optimized(self) -> bool: """Return *True* if the locals in the table -are optimizable. -""" + are optimizable. + """ + def is_nested(self) -> bool: """Return *True* if the block is a nested class -or function. -""" - def has_children(self) -> bool: - """Return *True* if the block has nested namespaces. + or function. """ + + def has_children(self) -> bool: + """Return *True* if the block has nested namespaces.""" + def get_identifiers(self) -> dict_keys[str, int]: - """Return a view object containing the names of symbols in the table. - """ + """Return a view object containing the names of symbols in the table.""" + def lookup(self, name: str) -> Symbol: """Lookup a *name* in the table. -Returns a *Symbol* instance. -""" + Returns a *Symbol* instance. + """ + def get_symbols(self) -> list[Symbol]: """Return a list of *Symbol* instances for -names in the table. -""" - def get_children(self) -> list[SymbolTable]: - """Return a list of the nested symbol tables. + names in the table. """ + def get_children(self) -> list[SymbolTable]: + """Return a list of the nested symbol tables.""" + class Function(SymbolTable): def get_parameters(self) -> tuple[str, ...]: - """Return a tuple of parameters to the function. - """ + """Return a tuple of parameters to the function.""" + def get_locals(self) -> tuple[str, ...]: - """Return a tuple of locals in the function. - """ + """Return a tuple of locals in the function.""" + def get_globals(self) -> tuple[str, ...]: - """Return a tuple of globals in the function. - """ + """Return a tuple of globals in the function.""" + def get_frees(self) -> tuple[str, ...]: - """Return a tuple of free variables in the function. - """ + """Return a tuple of free variables in the function.""" + def get_nonlocals(self) -> tuple[str, ...]: - """Return a tuple of nonlocals in the function. - """ + """Return a tuple of nonlocals in the function.""" class Class(SymbolTable): if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14; will be removed in Python 3.16.") def get_methods(self) -> tuple[str, ...]: - """Return a tuple of methods declared in the class. - """ + """Return a tuple of methods declared in the class.""" else: def get_methods(self) -> tuple[str, ...]: - """Return a tuple of methods declared in the class. - """ + """Return a tuple of methods declared in the class.""" class Symbol: def __init__( self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False ) -> None: ... def is_nonlocal(self) -> bool: - """Return *True* if the symbol is nonlocal. -""" + """Return *True* if the symbol is nonlocal.""" + def get_name(self) -> str: - """Return a name of a symbol. - """ + """Return a name of a symbol.""" + def is_referenced(self) -> bool: """Return *True* if the symbol is used in -its block. -""" - def is_parameter(self) -> bool: - """Return *True* if the symbol is a parameter. + its block. """ + + def is_parameter(self) -> bool: + """Return *True* if the symbol is a parameter.""" if sys.version_info >= (3, 14): def is_type_parameter(self) -> bool: - """Return *True* if the symbol is a type parameter. - """ + """Return *True* if the symbol is a type parameter.""" def is_global(self) -> bool: - """Return *True* if the symbol is global. - """ + """Return *True* if the symbol is global.""" + def is_declared_global(self) -> bool: """Return *True* if the symbol is declared global -with a global statement. -""" - def is_local(self) -> bool: - """Return *True* if the symbol is local. + with a global statement. """ + + def is_local(self) -> bool: + """Return *True* if the symbol is local.""" + def is_annotated(self) -> bool: - """Return *True* if the symbol is annotated. - """ + """Return *True* if the symbol is annotated.""" + def is_free(self) -> bool: """Return *True* if a referenced symbol is -not assigned to. -""" + not assigned to. + """ if sys.version_info >= (3, 14): def is_free_class(self) -> bool: """Return *True* if a class-scoped symbol is free from -the perspective of a method. -""" + the perspective of a method. + """ def is_imported(self) -> bool: """Return *True* if the symbol is created from -an import statement. -""" + an import statement. + """ + def is_assigned(self) -> bool: - """Return *True* if a symbol is assigned to. -""" + """Return *True* if a symbol is assigned to.""" if sys.version_info >= (3, 14): def is_comp_iter(self) -> bool: - """Return *True* if the symbol is a comprehension iteration variable. - """ + """Return *True* if the symbol is a comprehension iteration variable.""" + def is_comp_cell(self) -> bool: - """Return *True* if the symbol is a cell in an inlined comprehension. - """ + """Return *True* if the symbol is a cell in an inlined comprehension.""" def is_namespace(self) -> bool: """Returns *True* if name binding introduces new namespace. -If the name is used as the target of a function or class -statement, this will be true. + If the name is used as the target of a function or class + statement, this will be true. + + Note that a single name can be bound to multiple objects. If + is_namespace() is true, the name may also be bound to other + objects, like an int or list, that does not introduce a new + namespace. + """ -Note that a single name can be bound to multiple objects. If -is_namespace() is true, the name may also be bound to other -objects, like an int or list, that does not introduce a new -namespace. -""" def get_namespaces(self) -> Sequence[SymbolTable]: - """Return a list of namespaces bound to this name -""" + """Return a list of namespaces bound to this name""" + def get_namespace(self) -> SymbolTable: """Return the single namespace bound to this name. -Raises ValueError if the name is bound to multiple namespaces -or no namespace. -""" + Raises ValueError if the name is bound to multiple namespaces + or no namespace. + """ class SymbolTableFactory: def new(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index 90f3a284e90aa..21514c7609d2d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -69,6 +69,7 @@ setprofile() -- set the global profiling function setrecursionlimit() -- set the max recursion depth for the interpreter settrace() -- set the global debug tracing function """ + import sys from _typeshed import MaybeNone, OptExcInfo, ProfileFunction, StrOrBytesPath, TraceFunction, structseq from _typeshed.importlib import MetaPathFinderProtocol, PathEntryFinderProtocol @@ -175,8 +176,9 @@ flags: _flags class _flags(_UninstantiableStructseq, tuple[int, ...]): """sys.flags -Flags provided through command line arguments or environment vars. -""" + Flags provided through command line arguments or environment vars. + """ + # `safe_path` was added in py311 if sys.version_info >= (3, 11): __match_args__: Final = ( @@ -222,88 +224,83 @@ Flags provided through command line arguments or environment vars. @property def debug(self) -> int: - """-d -""" + """-d""" + @property def inspect(self) -> int: - """-i -""" + """-i""" + @property def interactive(self) -> int: - """-i -""" + """-i""" + @property def optimize(self) -> int: - """-O or -OO -""" + """-O or -OO""" + @property def dont_write_bytecode(self) -> int: - """-B -""" + """-B""" + @property def no_user_site(self) -> int: - """-s -""" + """-s""" + @property def no_site(self) -> int: - """-S -""" + """-S""" + @property def ignore_environment(self) -> int: - """-E -""" + """-E""" + @property def verbose(self) -> int: - """-v -""" + """-v""" + @property def bytes_warning(self) -> int: - """-b -""" + """-b""" + @property def quiet(self) -> int: - """-q -""" + """-q""" + @property def hash_randomization(self) -> int: - """-R -""" + """-R""" + @property def isolated(self) -> int: - """-I -""" + """-I""" + @property def dev_mode(self) -> bool: - """-X dev -""" + """-X dev""" + @property def utf8_mode(self) -> int: - """-X utf8 -""" + """-X utf8""" if sys.version_info >= (3, 10): @property def warn_default_encoding(self) -> int: - """-X warn_default_encoding -""" + """-X warn_default_encoding""" if sys.version_info >= (3, 11): @property def safe_path(self) -> bool: - """-P -""" + """-P""" if sys.version_info >= (3, 13): @property def gil(self) -> Literal[0, 1]: - """-X gil -""" + """-X gil""" if sys.version_info >= (3, 14): @property def thread_inherit_context(self) -> Literal[0, 1]: - """-X thread_inherit_context -""" + """-X thread_inherit_context""" + @property def context_aware_warnings(self) -> Literal[0, 1]: - """-X context_aware_warnings -""" + """-X context_aware_warnings""" # Whether or not this exists on lower versions of Python # may depend on which patch release you're using # (it was backported to all Python versions on 3.8+ as a security fix) @@ -311,8 +308,7 @@ Flags provided through command line arguments or environment vars. # and present in all versions of 3.11 and later. @property def int_max_str_digits(self) -> int: - """-X int_max_str_digits -""" + """-X int_max_str_digits""" float_info: _float_info @@ -322,10 +318,11 @@ float_info: _float_info class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): """sys.float_info -A named tuple holding information about the float type. It contains low level -information about the precision and internal representation. Please study -your system's :file:`float.h` for more information. -""" + A named tuple holding information about the float type. It contains low level + information about the precision and internal representation. Please study + your system's :file:`float.h` for more information. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ( "max", @@ -343,48 +340,47 @@ your system's :file:`float.h` for more information. @property def max(self) -> float: # DBL_MAX - """DBL_MAX -- maximum representable finite float -""" + """DBL_MAX -- maximum representable finite float""" + @property def max_exp(self) -> int: # DBL_MAX_EXP - """DBL_MAX_EXP -- maximum int e such that radix**(e-1) is representable -""" + """DBL_MAX_EXP -- maximum int e such that radix**(e-1) is representable""" + @property def max_10_exp(self) -> int: # DBL_MAX_10_EXP - """DBL_MAX_10_EXP -- maximum int e such that 10**e is representable -""" + """DBL_MAX_10_EXP -- maximum int e such that 10**e is representable""" + @property def min(self) -> float: # DBL_MIN - """DBL_MIN -- Minimum positive normalized float -""" + """DBL_MIN -- Minimum positive normalized float""" + @property def min_exp(self) -> int: # DBL_MIN_EXP - """DBL_MIN_EXP -- minimum int e such that radix**(e-1) is a normalized float -""" + """DBL_MIN_EXP -- minimum int e such that radix**(e-1) is a normalized float""" + @property def min_10_exp(self) -> int: # DBL_MIN_10_EXP - """DBL_MIN_10_EXP -- minimum int e such that 10**e is a normalized float -""" + """DBL_MIN_10_EXP -- minimum int e such that 10**e is a normalized float""" + @property def dig(self) -> int: # DBL_DIG - """DBL_DIG -- maximum number of decimal digits that can be faithfully represented in a float -""" + """DBL_DIG -- maximum number of decimal digits that can be faithfully represented in a float""" + @property def mant_dig(self) -> int: # DBL_MANT_DIG - """DBL_MANT_DIG -- mantissa digits -""" + """DBL_MANT_DIG -- mantissa digits""" + @property def epsilon(self) -> float: # DBL_EPSILON - """DBL_EPSILON -- Difference between 1 and the next representable float -""" + """DBL_EPSILON -- Difference between 1 and the next representable float""" + @property def radix(self) -> int: # FLT_RADIX - """FLT_RADIX -- radix of exponent -""" + """FLT_RADIX -- radix of exponent""" + @property def rounds(self) -> int: # FLT_ROUNDS - """FLT_ROUNDS -- rounding mode used for arithmetic operations -""" + """FLT_ROUNDS -- rounding mode used for arithmetic operations""" hash_info: _hash_info @@ -394,48 +390,48 @@ hash_info: _hash_info class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): """hash_info -A named tuple providing parameters used for computing -hashes. The attributes are read only. -""" + A named tuple providing parameters used for computing + hashes. The attributes are read only. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("width", "modulus", "inf", "nan", "imag", "algorithm", "hash_bits", "seed_bits", "cutoff") @property def width(self) -> int: - """width of the type used for hashing, in bits -""" + """width of the type used for hashing, in bits""" + @property def modulus(self) -> int: - """prime number giving the modulus on which the hash function is based -""" + """prime number giving the modulus on which the hash function is based""" + @property def inf(self) -> int: - """value to be used for hash of a positive infinity -""" + """value to be used for hash of a positive infinity""" + @property def nan(self) -> int: - """value to be used for hash of a nan -""" + """value to be used for hash of a nan""" + @property def imag(self) -> int: - """multiplier used for the imaginary part of a complex number -""" + """multiplier used for the imaginary part of a complex number""" + @property def algorithm(self) -> str: - """name of the algorithm for hashing of str, bytes and memoryviews -""" + """name of the algorithm for hashing of str, bytes and memoryviews""" + @property def hash_bits(self) -> int: - """internal output size of hash algorithm -""" + """internal output size of hash algorithm""" + @property def seed_bits(self) -> int: - """seed size of hash algorithm -""" + """seed size of hash algorithm""" + @property def cutoff(self) -> int: # undocumented - """small string optimization cutoff -""" + """small string optimization cutoff""" implementation: _implementation @@ -460,28 +456,28 @@ int_info: _int_info class _int_info(structseq[int], tuple[int, int, int, int]): """sys.int_info -A named tuple that holds information about Python's -internal representation of integers. The attributes are read only. -""" + A named tuple that holds information about Python's + internal representation of integers. The attributes are read only. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("bits_per_digit", "sizeof_digit", "default_max_str_digits", "str_digits_check_threshold") @property def bits_per_digit(self) -> int: - """size of a digit in bits -""" + """size of a digit in bits""" + @property def sizeof_digit(self) -> int: - """size in bytes of the C type used to represent a digit -""" + """size in bytes of the C type used to represent a digit""" + @property def default_max_str_digits(self) -> int: - """maximum string conversion digits limitation -""" + """maximum string conversion digits limitation""" + @property def str_digits_check_threshold(self) -> int: - """minimum positive value for int_max_str_digits -""" + """minimum positive value for int_max_str_digits""" _ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"] _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None @@ -492,23 +488,23 @@ _ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]): """sys.thread_info -A named tuple holding information about the thread implementation. -""" + A named tuple holding information about the thread implementation. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("name", "lock", "version") @property def name(self) -> _ThreadInfoName: - """name of the thread implementation -""" + """name of the thread implementation""" + @property def lock(self) -> _ThreadInfoLock: - """name of the lock implementation -""" + """name of the lock implementation""" + @property def version(self) -> str | None: - """name and version of the thread library -""" + """name and version of the thread library""" thread_info: _thread_info _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] @@ -519,181 +515,187 @@ _ReleaseLevel: TypeAlias = Literal["alpha", "beta", "candidate", "final"] class _version_info(_UninstantiableStructseq, tuple[int, int, int, _ReleaseLevel, int]): """sys.version_info -Version information as a named tuple. -""" + Version information as a named tuple. + """ + if sys.version_info >= (3, 10): __match_args__: Final = ("major", "minor", "micro", "releaselevel", "serial") @property def major(self) -> int: - """Major release number -""" + """Major release number""" + @property def minor(self) -> int: - """Minor release number -""" + """Minor release number""" + @property def micro(self) -> int: - """Patch release number -""" + """Patch release number""" + @property def releaselevel(self) -> _ReleaseLevel: - """'alpha', 'beta', 'candidate', or 'final' -""" + """'alpha', 'beta', 'candidate', or 'final'""" + @property def serial(self) -> int: - """Serial release number -""" + """Serial release number""" version_info: _version_info def call_tracing(func: Callable[..., _T], args: Any, /) -> _T: """Call func(*args), while tracing is enabled. -The tracing state is saved, and restored afterwards. This is intended -to be called from a debugger from a checkpoint, to recursively debug -some other code. -""" + The tracing state is saved, and restored afterwards. This is intended + to be called from a debugger from a checkpoint, to recursively debug + some other code. + """ if sys.version_info >= (3, 13): @deprecated("Deprecated since Python 3.13. Use `_clear_internal_caches()` instead.") def _clear_type_cache() -> None: - """Clear the internal type lookup cache. -""" + """Clear the internal type lookup cache.""" else: def _clear_type_cache() -> None: - """Clear the internal type lookup cache. -""" + """Clear the internal type lookup cache.""" def _current_frames() -> dict[int, FrameType]: """Return a dict mapping each thread's thread id to its current stack frame. -This function should be used for specialized purposes only. -""" + This function should be used for specialized purposes only. + """ + def _getframe(depth: int = 0, /) -> FrameType: """Return a frame object from the call stack. -If optional integer depth is given, return the frame object that many -calls below the top of the stack. If that is deeper than the call -stack, ValueError is raised. The default for depth is zero, returning -the frame at the top of the call stack. + If optional integer depth is given, return the frame object that many + calls below the top of the stack. If that is deeper than the call + stack, ValueError is raised. The default for depth is zero, returning + the frame at the top of the call stack. -This function should be used for internal and specialized purposes -only. -""" + This function should be used for internal and specialized purposes + only. + """ if sys.version_info >= (3, 12): def _getframemodulename(depth: int = 0) -> str | None: """Return the name of the module for a calling frame. -The default depth returns the module containing the call to this API. -A more typical use in a library will pass a depth of 1 to get the user's -module rather than the library module. + The default depth returns the module containing the call to this API. + A more typical use in a library will pass a depth of 1 to get the user's + module rather than the library module. -If no frame, module, or name can be found, returns None. -""" + If no frame, module, or name can be found, returns None. + """ def _debugmallocstats() -> None: """Print summary info to stderr about the state of pymalloc's structures. -In Py_DEBUG mode, also perform some expensive internal consistency -checks. -""" + In Py_DEBUG mode, also perform some expensive internal consistency + checks. + """ + def __displayhook__(object: object, /) -> None: - """Print an object to sys.stdout and also save it in builtins._ -""" + """Print an object to sys.stdout and also save it in builtins._""" + def __excepthook__(exctype: type[BaseException], value: BaseException, traceback: TracebackType | None, /) -> None: - """Handle an exception by displaying it with a traceback on sys.stderr. -""" + """Handle an exception by displaying it with a traceback on sys.stderr.""" + def exc_info() -> OptExcInfo: """Return current exception information: (type, value, traceback). -Return information about the most recent exception caught by an except -clause in the current stack frame or in an older stack frame. -""" + Return information about the most recent exception caught by an except + clause in the current stack frame or in an older stack frame. + """ if sys.version_info >= (3, 11): def exception() -> BaseException | None: """Return the current exception. -Return the most recent exception caught by an except clause -in the current stack frame or in an older stack frame, or None -if no such exception exists. -""" + Return the most recent exception caught by an except clause + in the current stack frame or in an older stack frame, or None + if no such exception exists. + """ def exit(status: _ExitCode = None, /) -> NoReturn: """Exit the interpreter by raising SystemExit(status). -If the status is omitted or None, it defaults to zero (i.e., success). -If the status is an integer, it will be used as the system exit status. -If it is another kind of object, it will be printed and the system -exit status will be one (i.e., failure). -""" + If the status is omitted or None, it defaults to zero (i.e., success). + If the status is an integer, it will be used as the system exit status. + If it is another kind of object, it will be printed and the system + exit status will be one (i.e., failure). + """ + def getallocatedblocks() -> int: - """Return the number of memory blocks currently allocated. -""" + """Return the number of memory blocks currently allocated.""" + def getdefaultencoding() -> str: - """Return the current default encoding used by the Unicode implementation. -""" + """Return the current default encoding used by the Unicode implementation.""" if sys.platform != "win32": def getdlopenflags() -> int: """Return the current value of the flags that are used for dlopen calls. -The flag constants are defined in the os module. -""" + The flag constants are defined in the os module. + """ def getfilesystemencoding() -> str: - """Return the encoding used to convert Unicode filenames to OS filenames. -""" + """Return the encoding used to convert Unicode filenames to OS filenames.""" + def getfilesystemencodeerrors() -> str: - """Return the error mode used Unicode to OS filename conversion. -""" + """Return the error mode used Unicode to OS filename conversion.""" + def getrefcount(object: Any, /) -> int: """Return the reference count of object. -The count returned is generally one higher than you might expect, -because it includes the (temporary) reference as an argument to -getrefcount(). -""" + The count returned is generally one higher than you might expect, + because it includes the (temporary) reference as an argument to + getrefcount(). + """ + def getrecursionlimit() -> int: """Return the current value of the recursion limit. -The recursion limit is the maximum depth of the Python interpreter -stack. This limit prevents infinite recursion from causing an overflow -of the C stack and crashing Python. -""" + The recursion limit is the maximum depth of the Python interpreter + stack. This limit prevents infinite recursion from causing an overflow + of the C stack and crashing Python. + """ + def getsizeof(obj: object, default: int = ...) -> int: """getsizeof(object [, default]) -> int -Return the size of object in bytes. -""" + Return the size of object in bytes. + """ + def getswitchinterval() -> float: - """Return the current thread switch interval; see sys.setswitchinterval(). -""" + """Return the current thread switch interval; see sys.setswitchinterval().""" + def getprofile() -> ProfileFunction | None: """Return the profiling function set with sys.setprofile. -See the profiler chapter in the library manual. -""" + See the profiler chapter in the library manual. + """ + def setprofile(function: ProfileFunction | None, /) -> None: """Set the profiling function. -It will be called on each function call and return. See the profiler -chapter in the library manual. -""" + It will be called on each function call and return. See the profiler + chapter in the library manual. + """ + def gettrace() -> TraceFunction | None: """Return the global debug tracing function set with sys.settrace. -See the debugger chapter in the library manual. -""" + See the debugger chapter in the library manual. + """ + def settrace(function: TraceFunction | None, /) -> None: """Set the global debug tracing function. -It will be called on each function call. See the debugger chapter -in the library manual. -""" + It will be called on each function call. See the debugger chapter + in the library manual. + """ if sys.platform == "win32": # A tuple of length 5, even though it has more than 5 attributes. @@ -724,42 +726,40 @@ if sys.platform == "win32": def getwindowsversion() -> _WinVersion: """Return info about the running version of Windows as a named tuple. -The members are named: major, minor, build, platform, service_pack, -service_pack_major, service_pack_minor, suite_mask, product_type and -platform_version. For backward compatibility, only the first 5 items -are available by indexing. All elements are numbers, except -service_pack and platform_type which are strings, and platform_version -which is a 3-tuple. Platform is always 2. Product_type may be 1 for a -workstation, 2 for a domain controller, 3 for a server. -Platform_version is a 3-tuple containing a version number that is -intended for identifying the OS rather than feature detection. -""" + The members are named: major, minor, build, platform, service_pack, + service_pack_major, service_pack_minor, suite_mask, product_type and + platform_version. For backward compatibility, only the first 5 items + are available by indexing. All elements are numbers, except + service_pack and platform_type which are strings, and platform_version + which is a 3-tuple. Platform is always 2. Product_type may be 1 for a + workstation, 2 for a domain controller, 3 for a server. + Platform_version is a 3-tuple containing a version number that is + intended for identifying the OS rather than feature detection. + """ def intern(string: str, /) -> str: """``Intern'' the given string. -This enters the string in the (global) table of interned strings whose -purpose is to speed up dictionary lookups. Return the string itself or -the previously interned string object with the same value. -""" + This enters the string in the (global) table of interned strings whose + purpose is to speed up dictionary lookups. Return the string itself or + the previously interned string object with the same value. + """ if sys.version_info >= (3, 13): def _is_gil_enabled() -> bool: - """Return True if the GIL is currently enabled and False otherwise. -""" + """Return True if the GIL is currently enabled and False otherwise.""" + def _clear_internal_caches() -> None: - """Clear all internal performance-related caches. -""" + """Clear all internal performance-related caches.""" + def _is_interned(string: str, /) -> bool: - """Return True if the given string is "interned". -""" + """Return True if the given string is "interned".""" def is_finalizing() -> bool: - """Return True if Python is exiting. -""" + """Return True if Python is exiting.""" + def breakpointhook(*args: Any, **kwargs: Any) -> Any: - """This hook function is called by built-in breakpoint(). -""" + """This hook function is called by built-in breakpoint().""" __breakpointhook__ = breakpointhook # Contains the original value of breakpointhook @@ -767,32 +767,34 @@ if sys.platform != "win32": def setdlopenflags(flags: int, /) -> None: """Set the flags used by the interpreter for dlopen calls. -This is used, for example, when the interpreter loads extension -modules. Among other things, this will enable a lazy resolving of -symbols when importing a module, if called as sys.setdlopenflags(0). -To share symbols across extension modules, call as -sys.setdlopenflags(os.RTLD_GLOBAL). Symbolic names for the flag -modules can be found in the os module (RTLD_xxx constants, e.g. -os.RTLD_LAZY). -""" + This is used, for example, when the interpreter loads extension + modules. Among other things, this will enable a lazy resolving of + symbols when importing a module, if called as sys.setdlopenflags(0). + To share symbols across extension modules, call as + sys.setdlopenflags(os.RTLD_GLOBAL). Symbolic names for the flag + modules can be found in the os module (RTLD_xxx constants, e.g. + os.RTLD_LAZY). + """ def setrecursionlimit(limit: int, /) -> None: """Set the maximum depth of the Python interpreter stack to n. -This limit prevents infinite recursion from causing an overflow of the C -stack and crashing Python. The highest possible limit is platform- -dependent. -""" + This limit prevents infinite recursion from causing an overflow of the C + stack and crashing Python. The highest possible limit is platform- + dependent. + """ + def setswitchinterval(interval: float, /) -> None: """Set the ideal thread switching delay inside the Python interpreter. -The actual frequency of switching threads can be lower if the -interpreter executes long sequences of uninterruptible code -(this is implementation-specific and workload-dependent). + The actual frequency of switching threads can be lower if the + interpreter executes long sequences of uninterruptible code + (this is implementation-specific and workload-dependent). + + The parameter must represent the desired switching delay in seconds + A typical value is 0.005 (5 milliseconds). + """ -The parameter must represent the desired switching delay in seconds -A typical value is 0.005 (5 milliseconds). -""" def gettotalrefcount() -> int: ... # Debug builds only # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. @@ -809,20 +811,20 @@ unraisablehook: Callable[[UnraisableHookArgs], Any] def __unraisablehook__(unraisable: UnraisableHookArgs, /) -> Any: """Handle an unraisable exception. -The unraisable argument has the following attributes: + The unraisable argument has the following attributes: + + * exc_type: Exception type. + * exc_value: Exception value, can be None. + * exc_traceback: Exception traceback, can be None. + * err_msg: Error message, can be None. + * object: Object causing the exception, can be None. + """ -* exc_type: Exception type. -* exc_value: Exception value, can be None. -* exc_traceback: Exception traceback, can be None. -* err_msg: Error message, can be None. -* object: Object causing the exception, can be None. -""" def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: - """Adds a new audit hook callback. -""" + """Adds a new audit hook callback.""" + def audit(event: str, /, *args: Any) -> None: - """Passes the event to any audit hooks that are attached. -""" + """Passes the event to any audit hooks that are attached.""" _AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None @@ -841,13 +843,14 @@ class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHo def get_asyncgen_hooks() -> _asyncgen_hooks: """Return the installed asynchronous generators hooks. -This returns a namedtuple of the form (firstiter, finalizer). -""" + This returns a namedtuple of the form (firstiter, finalizer). + """ + def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: """set_asyncgen_hooks([firstiter] [, finalizer]) -Set a finalizer for async generators objects. -""" + Set a finalizer for async generators objects. + """ if sys.platform == "win32": if sys.version_info >= (3, 13): @@ -858,97 +861,91 @@ if sys.platform == "win32": def _enablelegacywindowsfsencoding() -> None: """Changes the default filesystem encoding to mbcs:replace. -This is done for consistency with earlier versions of Python. See PEP -529 for more information. + This is done for consistency with earlier versions of Python. See PEP + 529 for more information. -This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING -environment variable before launching Python. -""" + This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING + environment variable before launching Python. + """ else: def _enablelegacywindowsfsencoding() -> None: """Changes the default filesystem encoding to mbcs:replace. -This is done for consistency with earlier versions of Python. See PEP -529 for more information. + This is done for consistency with earlier versions of Python. See PEP + 529 for more information. -This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING -environment variable before launching Python. -""" + This is equivalent to defining the PYTHONLEGACYWINDOWSFSENCODING + environment variable before launching Python. + """ def get_coroutine_origin_tracking_depth() -> int: - """Check status of origin tracking for coroutine objects in this thread. -""" + """Check status of origin tracking for coroutine objects in this thread.""" + def set_coroutine_origin_tracking_depth(depth: int) -> None: """Enable or disable origin tracking for coroutine objects in this thread. -Coroutine objects will track 'depth' frames of traceback information -about where they came from, available in their cr_origin attribute. + Coroutine objects will track 'depth' frames of traceback information + about where they came from, available in their cr_origin attribute. -Set a depth of 0 to disable. -""" + Set a depth of 0 to disable. + """ # The following two functions were added in 3.11.0, 3.10.7, and 3.9.14, # as part of the response to CVE-2020-10735 def set_int_max_str_digits(maxdigits: int) -> None: - """Set the maximum string digits limit for non-binary int<->str conversions. -""" + """Set the maximum string digits limit for non-binary int<->str conversions.""" + def get_int_max_str_digits() -> int: - """Return the maximum string digits limit for non-binary int<->str conversions. -""" + """Return the maximum string digits limit for non-binary int<->str conversions.""" if sys.version_info >= (3, 12): if sys.version_info >= (3, 13): def getunicodeinternedsize(*, _only_immortal: bool = False) -> int: - """Return the number of elements of the unicode interned dictionary -""" + """Return the number of elements of the unicode interned dictionary""" else: def getunicodeinternedsize() -> int: - """Return the number of elements of the unicode interned dictionary -""" + """Return the number of elements of the unicode interned dictionary""" def deactivate_stack_trampoline() -> None: """Deactivate the current stack profiler trampoline backend. -If no stack profiler is activated, this function has no effect. -""" + If no stack profiler is activated, this function has no effect. + """ + def is_stack_trampoline_active() -> bool: - """Return *True* if a stack profiler trampoline is active. -""" + """Return *True* if a stack profiler trampoline is active.""" # It always exists, but raises on non-linux platforms: if sys.platform == "linux": def activate_stack_trampoline(backend: str, /) -> None: - """Activate stack profiler trampoline *backend*. -""" + """Activate stack profiler trampoline *backend*.""" else: def activate_stack_trampoline(backend: str, /) -> NoReturn: - """Activate stack profiler trampoline *backend*. -""" - + """Activate stack profiler trampoline *backend*.""" from . import _monitoring monitoring = _monitoring if sys.version_info >= (3, 14): def is_remote_debug_enabled() -> bool: - """Return True if remote debugging is enabled, False otherwise. -""" + """Return True if remote debugging is enabled, False otherwise.""" + def remote_exec(pid: int, script: StrOrBytesPath) -> None: """Executes a file containing Python code in a given remote Python process. -This function returns immediately, and the code will be executed by the -target process's main thread at the next available opportunity, similarly -to how signals are handled. There is no interface to determine when the -code has been executed. The caller is responsible for making sure that -the file still exists whenever the remote process tries to read it and that -it hasn't been overwritten. - -The remote process must be running a CPython interpreter of the same major -and minor version as the local process. If either the local or remote -interpreter is pre-release (alpha, beta, or release candidate) then the -local and remote interpreters must be the same exact version. - -Args: - pid (int): The process ID of the target Python process. - script (str|bytes): The path to a file containing - the Python code to be executed. -""" + This function returns immediately, and the code will be executed by the + target process's main thread at the next available opportunity, similarly + to how signals are handled. There is no interface to determine when the + code has been executed. The caller is responsible for making sure that + the file still exists whenever the remote process tries to read it and that + it hasn't been overwritten. + + The remote process must be running a CPython interpreter of the same major + and minor version as the local process. If either the local or remote + interpreter is pre-release (alpha, beta, or release candidate) then the + local and remote interpreters must be the same exact version. + + Args: + pid (int): The process ID of the target Python process. + script (str|bytes): The path to a file containing + the Python code to be executed. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi index 31301e0a56388..8cdd3b1b2fce1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi @@ -1,5 +1,5 @@ -"""Access to Python's configuration information. -""" +"""Access to Python's configuration information.""" + import sys from typing import IO, Any, Literal, overload from typing_extensions import deprecated @@ -22,75 +22,78 @@ __all__ = [ @deprecated("SO is deprecated, use EXT_SUFFIX. Support is removed in Python 3.11") def get_config_var(name: Literal["SO"]) -> Any: """Return the value of a single variable using the dictionary returned by -'get_config_vars()'. + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ -Equivalent to get_config_vars().get(name) -""" @overload def get_config_var(name: str) -> Any: ... @overload def get_config_vars() -> dict[str, Any]: """With no arguments, return a dictionary of all configuration -variables relevant for the current platform. + variables relevant for the current platform. -On Unix, this means every variable defined in Python's installed Makefile; -On Windows it's a much smaller set. + On Unix, this means every variable defined in Python's installed Makefile; + On Windows it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ -With arguments, return a list of values that result from looking up -each argument in the configuration variable dictionary. -""" @overload def get_config_vars(arg: str, /, *args: str) -> list[Any]: ... def get_scheme_names() -> tuple[str, ...]: - """Return a tuple containing the schemes names. -""" + """Return a tuple containing the schemes names.""" if sys.version_info >= (3, 10): def get_default_scheme() -> str: ... def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... def get_path_names() -> tuple[str, ...]: - """Return a tuple containing the paths names. -""" + """Return a tuple containing the paths names.""" + def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: """Return a path corresponding to the scheme. -``scheme`` is the install scheme name. -""" + ``scheme`` is the install scheme name. + """ + def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: """Return a mapping containing an install scheme. -``scheme`` is the install scheme name. If not provided, it will -return the default scheme for the current platform. -""" + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + def get_python_version() -> str: ... def get_platform() -> str: """Return a string that identifies the current platform. -This is used mainly to distinguish platform-specific build directories and -platform-specific built distributions. Typically includes the OS name and -version and the architecture (as supplied by 'os.uname()'), although the -exact information included depends on the OS; on Linux, the kernel version -isn't particularly important. + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name and + version and the architecture (as supplied by 'os.uname()'), although the + exact information included depends on the OS; on Linux, the kernel version + isn't particularly important. -Examples of returned values: + Examples of returned values: -Windows: + Windows: -- win-amd64 (64-bit Windows on AMD64, aka x86_64, Intel64, and EM64T) -- win-arm64 (64-bit Windows on ARM64, aka AArch64) -- win32 (all others - specifically, sys.platform is returned) + - win-amd64 (64-bit Windows on AMD64, aka x86_64, Intel64, and EM64T) + - win-arm64 (64-bit Windows on ARM64, aka AArch64) + - win32 (all others - specifically, sys.platform is returned) -POSIX based OS: + POSIX based OS: -- linux-x86_64 -- macosx-15.5-arm64 -- macosx-26.0-universal2 (macOS on Apple Silicon or Intel) -- android-24-arm64_v8a + - linux-x86_64 + - macosx-15.5-arm64 + - macosx-26.0-universal2 (macOS on Apple Silicon or Intel) + - android-24-arm64_v8a -For other non-POSIX platforms, currently just returns :data:`sys.platform`. -""" + For other non-POSIX platforms, currently just returns :data:`sys.platform`. + """ if sys.version_info >= (3, 11): def is_python_build(check_home: object = None) -> bool: ... @@ -101,13 +104,13 @@ else: def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: """Parse a config.h-style file. -A dictionary containing name/value pairs is returned. If an -optional dictionary is passed in as the second argument, it is -used instead of a new dictionary. -""" + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + def get_config_h_filename() -> str: - """Return the path of pyconfig.h. -""" + """Return the path of pyconfig.h.""" + def get_makefile_filename() -> str: - """Return the path of the Makefile. -""" + """Return the path of the Makefile.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi index a14d15c2dd50c..8300ab0f7ce23 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/syslog.pyi @@ -47,24 +47,25 @@ if sys.platform != "win32": LOG_REMOTEAUTH: Final = 104 def LOG_MASK(pri: int, /) -> int: - """Calculates the mask for the individual priority pri. -""" + """Calculates the mask for the individual priority pri.""" + def LOG_UPTO(pri: int, /) -> int: - """Calculates the mask for all priorities up to and including pri. -""" + """Calculates the mask for all priorities up to and including pri.""" + def closelog() -> None: - """Reset the syslog module values and call the system library closelog(). -""" + """Reset the syslog module values and call the system library closelog().""" + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: - """Set logging options of subsequent syslog() calls. -""" + """Set logging options of subsequent syslog() calls.""" + def setlogmask(maskpri: int, /) -> int: - """Set the priority mask to maskpri and return the previous mask value. -""" + """Set the priority mask to maskpri and return the previous mask value.""" + @overload def syslog(priority: int, message: str) -> None: """syslog([priority=LOG_INFO,] message) -Send the string message to the system logger. -""" + Send the string message to the system logger. + """ + @overload def syslog(message: str) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi index 1a62ae6a2db8a..84a7f691aeae8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tabnanny.pyi @@ -9,6 +9,7 @@ check() described below. Warning: The API provided by this module is likely to change in future releases; such changes may not be backward compatible. """ + from _typeshed import StrOrBytesPath from collections.abc import Iterable @@ -19,9 +20,10 @@ filename_only: int class NannyNag(Exception): """ -Raised by process_tokens() if detecting an ambiguous indent. -Captured and handled in check(). -""" + Raised by process_tokens() if detecting an ambiguous indent. + Captured and handled in check(). + """ + def __init__(self, lineno: int, msg: str, line: str) -> None: ... def get_lineno(self) -> int: ... def get_msg(self) -> str: ... @@ -30,10 +32,11 @@ Captured and handled in check(). def check(file: StrOrBytesPath) -> None: """check(file_or_dir) -If file_or_dir is a directory and not a symbolic link, then recursively -descend the directory tree named by file_or_dir, checking all .py files -along the way. If file_or_dir is an ordinary Python source file, it is -checked for whitespace related problems. The diagnostic messages are -written to standard output using the print statement. -""" + If file_or_dir is a directory and not a symbolic link, then recursively + descend the directory tree named by file_or_dir, checking all .py files + along the way. If file_or_dir is an ordinary Python source file, it is + checked for whitespace related problems. The diagnostic messages are + written to standard output using the print statement. + """ + def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi index f0a504a7e864f..389e24eb4c1d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tarfile.pyi @@ -1,5 +1,5 @@ -"""Read from and write to tar format archives. -""" +"""Read from and write to tar format archives.""" + import bz2 import io import sys @@ -118,8 +118,8 @@ class ExFileObject(io.BufferedReader): # undocumented def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... class TarFile: - """The TarFile Class provides an interface to tar archives. - """ + """The TarFile Class provides an interface to tar archives.""" + OPEN_METH: ClassVar[Mapping[str, str]] name: StrOrBytesPath | None mode: Literal["r", "a", "w", "x"] @@ -156,13 +156,13 @@ class TarFile: stream: bool = False, ) -> None: """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to -read from an existing archive, 'a' to append data to an existing -file or 'w' to create a new file overwriting an existing one. 'mode' -defaults to 'r'. -If 'fileobj' is given, it is used for reading or writing data. If it -can be determined, 'mode' is overridden by 'fileobj's mode. -'fileobj' is not closed, when TarFile is closed. -""" + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. 'mode' + defaults to 'r'. + If 'fileobj' is given, it is used for reading or writing data. If it + can be determined, 'mode' is overridden by 'fileobj's mode. + 'fileobj' is not closed, when TarFile is closed. + """ else: def __init__( self, @@ -181,21 +181,21 @@ can be determined, 'mode' is overridden by 'fileobj's mode. copybufsize: int | None = None, # undocumented ) -> None: """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. `mode' - defaults to 'r'. - If `fileobj' is given, it is used for reading or writing data. If it - can be determined, `mode' is overridden by `fileobj's mode. - `fileobj' is not closed, when TarFile is closed. - """ + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __iter__(self) -> Iterator[TarInfo]: - """Provide an iterator object. - """ + """Provide an iterator object.""" + @overload @classmethod def open( @@ -216,45 +216,45 @@ can be determined, 'mode' is overridden by 'fileobj's mode. errorlevel: int | None = ..., ) -> Self: """Open a tar archive for reading, writing or appending. Return -an appropriate TarFile class. - -mode: -'r' or 'r:*' open for reading with transparent compression -'r:' open for reading exclusively uncompressed -'r:gz' open for reading with gzip compression -'r:bz2' open for reading with bzip2 compression -'r:xz' open for reading with lzma compression -'r:zst' open for reading with zstd compression -'a' or 'a:' open for appending, creating the file if necessary -'w' or 'w:' open for writing without compression -'w:gz' open for writing with gzip compression -'w:bz2' open for writing with bzip2 compression -'w:xz' open for writing with lzma compression -'w:zst' open for writing with zstd compression - -'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created -'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created -'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created -'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created -'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - -'r|*' open a stream of tar blocks with transparent compression -'r|' open an uncompressed stream of tar blocks for reading -'r|gz' open a gzip compressed stream of tar blocks -'r|bz2' open a bzip2 compressed stream of tar blocks -'r|xz' open an lzma compressed stream of tar blocks -'r|zst' open a zstd compressed stream of tar blocks -'w|' open an uncompressed stream for writing -'w|gz' open a gzip compressed stream for writing -'w|bz2' open a bzip2 compressed stream for writing -'w|xz' open an lzma compressed stream for writing -'w|zst' open a zstd compressed stream for writing -""" + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'r:zst' open for reading with zstd compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + 'w:zst' open for writing with zstd compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + 'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'r|zst' open a zstd compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + 'w|zst' open a zstd compressed stream for writing + """ if sys.version_info >= (3, 14): @overload @classmethod @@ -279,45 +279,45 @@ mode: zstd_dict: ZstdDict | None = None, ) -> Self: """Open a tar archive for reading, writing or appending. Return -an appropriate TarFile class. - -mode: -'r' or 'r:*' open for reading with transparent compression -'r:' open for reading exclusively uncompressed -'r:gz' open for reading with gzip compression -'r:bz2' open for reading with bzip2 compression -'r:xz' open for reading with lzma compression -'r:zst' open for reading with zstd compression -'a' or 'a:' open for appending, creating the file if necessary -'w' or 'w:' open for writing without compression -'w:gz' open for writing with gzip compression -'w:bz2' open for writing with bzip2 compression -'w:xz' open for writing with lzma compression -'w:zst' open for writing with zstd compression - -'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created -'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created -'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created -'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created -'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - -'r|*' open a stream of tar blocks with transparent compression -'r|' open an uncompressed stream of tar blocks for reading -'r|gz' open a gzip compressed stream of tar blocks -'r|bz2' open a bzip2 compressed stream of tar blocks -'r|xz' open an lzma compressed stream of tar blocks -'r|zst' open a zstd compressed stream of tar blocks -'w|' open an uncompressed stream for writing -'w|gz' open a gzip compressed stream for writing -'w|bz2' open a bzip2 compressed stream for writing -'w|xz' open an lzma compressed stream for writing -'w|zst' open a zstd compressed stream for writing -""" + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'r:zst' open for reading with zstd compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + 'w:zst' open for writing with zstd compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + 'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'r|zst' open a zstd compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + 'w|zst' open a zstd compressed stream for writing + """ @overload @classmethod @@ -460,45 +460,46 @@ mode: zstd_dict: ZstdDict | None = None, ) -> Self: """Open a tar archive for reading, writing or appending. Return -an appropriate TarFile class. - -mode: -'r' or 'r:*' open for reading with transparent compression -'r:' open for reading exclusively uncompressed -'r:gz' open for reading with gzip compression -'r:bz2' open for reading with bzip2 compression -'r:xz' open for reading with lzma compression -'r:zst' open for reading with zstd compression -'a' or 'a:' open for appending, creating the file if necessary -'w' or 'w:' open for writing without compression -'w:gz' open for writing with gzip compression -'w:bz2' open for writing with bzip2 compression -'w:xz' open for writing with lzma compression -'w:zst' open for writing with zstd compression - -'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created -'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created -'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created -'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created -'x:zst' create a zstd compressed tarfile, raise an exception - if the file is already created - -'r|*' open a stream of tar blocks with transparent compression -'r|' open an uncompressed stream of tar blocks for reading -'r|gz' open a gzip compressed stream of tar blocks -'r|bz2' open a bzip2 compressed stream of tar blocks -'r|xz' open an lzma compressed stream of tar blocks -'r|zst' open a zstd compressed stream of tar blocks -'w|' open an uncompressed stream for writing -'w|gz' open a gzip compressed stream for writing -'w|bz2' open a bzip2 compressed stream for writing -'w|xz' open an lzma compressed stream for writing -'w|zst' open a zstd compressed stream for writing -""" + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'r:xz' open for reading with lzma compression + 'r:zst' open for reading with zstd compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + 'w:xz' open for writing with lzma compression + 'w:zst' open for writing with zstd compression + + 'x' or 'x:' create a tarfile exclusively without compression, raise + an exception if the file is already created + 'x:gz' create a gzip compressed tarfile, raise an exception + if the file is already created + 'x:bz2' create a bzip2 compressed tarfile, raise an exception + if the file is already created + 'x:xz' create an lzma compressed tarfile, raise an exception + if the file is already created + 'x:zst' create a zstd compressed tarfile, raise an exception + if the file is already created + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'r|xz' open an lzma compressed stream of tar blocks + 'r|zst' open a zstd compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + 'w|xz' open an lzma compressed stream for writing + 'w|zst' open a zstd compressed stream for writing + """ + @overload @classmethod def open( @@ -654,8 +655,8 @@ mode: debug: int | None = ..., errorlevel: int | None = ..., ) -> Self: - """Open uncompressed tar archive name for reading or writing. - """ + """Open uncompressed tar archive name for reading or writing.""" + @overload @classmethod def gzopen( @@ -675,8 +676,9 @@ mode: errorlevel: int | None = ..., ) -> Self: """Open gzip compressed tar archive name for reading or writing. -Appending is not allowed. -""" + Appending is not allowed. + """ + @overload @classmethod def gzopen( @@ -714,8 +716,9 @@ Appending is not allowed. errorlevel: int | None = ..., ) -> Self: """Open bzip2 compressed tar archive name for reading or writing. -Appending is not allowed. -""" + Appending is not allowed. + """ + @overload @classmethod def bz2open( @@ -752,8 +755,8 @@ Appending is not allowed. errorlevel: int | None = ..., ) -> Self: """Open lzma compressed tar archive name for reading or writing. -Appending is not allowed. -""" + Appending is not allowed. + """ if sys.version_info >= (3, 14): @overload @classmethod @@ -776,8 +779,9 @@ Appending is not allowed. errorlevel: int | None = ..., ) -> Self: """Open zstd compressed tar archive name for reading or writing. -Appending is not allowed. -""" + Appending is not allowed. + """ + @overload @classmethod def zstopen( @@ -801,29 +805,33 @@ Appending is not allowed. def getmember(self, name: str) -> TarInfo: """Return a TarInfo object for member 'name'. If 'name' can not be -found in the archive, KeyError is raised. If a member occurs more -than once in the archive, its last occurrence is assumed to be the -most up-to-date version. -""" + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + def getmembers(self) -> _list[TarInfo]: """Return the members of the archive as a list of TarInfo objects. The -list has the same order as the members in the archive. -""" + list has the same order as the members in the archive. + """ + def getnames(self) -> _list[str]: """Return the members of the archive as a list of their names. It has -the same order as the list returned by getmembers(). -""" + the same order as the list returned by getmembers(). + """ + def list(self, verbose: bool = True, *, members: Iterable[TarInfo] | None = None) -> None: """Print a table of contents to sys.stdout. If 'verbose' is False, only -the names of the members are printed. If it is True, an 'ls -l'-like -output is produced. 'members' is optional and must be a subset of the -list returned by getmembers(). -""" + the names of the members are printed. If it is True, an 'ls -l'-like + output is produced. 'members' is optional and must be a subset of the + list returned by getmembers(). + """ + def next(self) -> TarInfo | None: """Return the next member of the archive as a TarInfo object, when -TarFile is opened for reading. Return None if there is no more -available. -""" + TarFile is opened for reading. Return None if there is no more + available. + """ # Calling this method without `filter` is deprecated, but it may be set either on the class or in an # individual call, so we can't mark it as @deprecated here. def extractall( @@ -835,17 +843,17 @@ available. filter: _TarfileFilter | None = None, ) -> None: """Extract all members from the archive to the current working -directory and set owner, modification time and permissions on -directories afterwards. 'path' specifies a different directory -to extract to. 'members' is optional and must be a subset of the -list returned by getmembers(). If 'numeric_owner' is True, only -the numbers for user/group names are used and not the names. - -The 'filter' function will be called on each member just -before extraction. -It can return a changed TarInfo or None to skip the member. -String names of common filters are accepted. -""" + directory and set owner, modification time and permissions on + directories afterwards. 'path' specifies a different directory + to extract to. 'members' is optional and must be a subset of the + list returned by getmembers(). If 'numeric_owner' is True, only + the numbers for user/group names are used and not the names. + + The 'filter' function will be called on each member just + before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. + """ # Same situation as for `extractall`. def extract( self, @@ -857,17 +865,18 @@ String names of common filters are accepted. filter: _TarfileFilter | None = None, ) -> None: """Extract a member from the archive to the current working directory, -using its full name. Its file information is extracted as accurately -as possible. 'member' may be a filename or a TarInfo object. You can -specify a different directory using 'path'. File attributes (owner, -mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' -is True, only the numbers for user/group names are used and not -the names. - -The 'filter' function will be called before extraction. -It can return a changed TarInfo or None to skip the member. -String names of common filters are accepted. -""" + using its full name. Its file information is extracted as accurately + as possible. 'member' may be a filename or a TarInfo object. You can + specify a different directory using 'path'. File attributes (owner, + mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' + is True, only the numbers for user/group names are used and not + the names. + + The 'filter' function will be called before extraction. + It can return a changed TarInfo or None to skip the member. + String names of common filters are accepted. + """ + def _extract_member( self, tarinfo: TarInfo, @@ -879,61 +888,67 @@ String names of common filters are accepted. extraction_root: str | None = None, ) -> None: # undocumented """Extract the filtered TarInfo object tarinfo to a physical -file called targetpath. + file called targetpath. + + filter_function is only used when extracting a *different* + member (e.g. as fallback to creating a symlink) + """ -filter_function is only used when extracting a *different* -member (e.g. as fallback to creating a symlink) -""" def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: """Extract a member from the archive as a file object. 'member' may be -a filename or a TarInfo object. If 'member' is a regular file or -a link, an io.BufferedReader object is returned. For all other -existing members, None is returned. If 'member' does not appear -in the archive, KeyError is raised. -""" - def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a directory called targetpath. + a filename or a TarInfo object. If 'member' is a regular file or + a link, an io.BufferedReader object is returned. For all other + existing members, None is returned. If 'member' does not appear + in the archive, KeyError is raised. """ + + def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a directory called targetpath.""" + def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a file called targetpath. - """ + """Make a file called targetpath.""" + def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented """Make a file from a TarInfo object with an unknown type -at targetpath. -""" - def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a fifo called targetpath. + at targetpath. """ + + def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Make a fifo called targetpath.""" + def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Make a character or block device called targetpath. - """ + """Make a character or block device called targetpath.""" + def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. + (platform limitation), we try to make a copy of the referenced file + instead of a link. """ + def makelink_with_filter( self, tarinfo: TarInfo, targetpath: StrOrBytesPath, filter_function: _FilterFunction, extraction_root: str ) -> None: # undocumented """Make a (symbolic) link called targetpath. If it cannot be created -(platform limitation), we try to make a copy of the referenced file -instead of a link. + (platform limitation), we try to make a copy of the referenced file + instead of a link. + + filter_function is only used when extracting a *different* + member (e.g. as fallback to creating a link). + """ -filter_function is only used when extracting a *different* -member (e.g. as fallback to creating a link). -""" def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: # undocumented """Set owner of targetpath according to tarinfo. If numeric_owner -is True, use .gid/.uid instead of .gname/.uname. If numeric_owner -is False, fall back to .gid/.uid when the search based on name -fails. -""" - def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Set file permissions of targetpath according to tarinfo. + is True, use .gid/.uid instead of .gname/.uname. If numeric_owner + is False, fall back to .gid/.uid when the search based on name + fails. """ + + def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented + """Set file permissions of targetpath according to tarinfo.""" + def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: # undocumented - """Set modification time of targetpath according to tarinfo. - """ + """Set modification time of targetpath according to tarinfo.""" + def add( self, name: StrPath, @@ -943,63 +958,65 @@ fails. filter: Callable[[TarInfo], TarInfo | None] | None = None, ) -> None: """Add the file 'name' to the archive. 'name' may be any type of file -(directory, fifo, symbolic link, etc.). If given, 'arcname' -specifies an alternative name for the file in the archive. -Directories are added recursively by default. This can be avoided by -setting 'recursive' to False. 'filter' is a function -that expects a TarInfo object argument and returns the changed -TarInfo object, if it returns None the TarInfo object will be -excluded from the archive. -""" + (directory, fifo, symbolic link, etc.). If given, 'arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting 'recursive' to False. 'filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + def addfile(self, tarinfo: TarInfo, fileobj: SupportsRead[bytes] | None = None) -> None: """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents -a non zero-size regular file, the 'fileobj' argument should be a binary file, -and tarinfo.size bytes are read from it and added to the archive. -You can create TarInfo objects directly, or by using gettarinfo(). -""" + a non zero-size regular file, the 'fileobj' argument should be a binary file, + and tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects directly, or by using gettarinfo(). + """ + def gettarinfo( self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None ) -> TarInfo: """Create a TarInfo object from the result of os.stat or equivalent -on an existing file. The file is either named by 'name', or -specified as a file object 'fileobj' with a file descriptor. If -given, 'arcname' specifies an alternative name for the file in the -archive, otherwise, the name is taken from the 'name' attribute of -'fileobj', or the 'name' argument. The name should be a text -string. -""" + on an existing file. The file is either named by 'name', or + specified as a file object 'fileobj' with a file descriptor. If + given, 'arcname' specifies an alternative name for the file in the + archive, otherwise, the name is taken from the 'name' attribute of + 'fileobj', or the 'name' argument. The name should be a text + string. + """ + def close(self) -> None: """Close the TarFile. In write-mode, two finishing zero blocks are -appended to the archive. -""" + appended to the archive. + """ open = TarFile.open def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: """Return True if name points to a tar archive that we -are able to handle, else return False. + are able to handle, else return False. -'name' should be a string, file, or file-like object. -""" + 'name' should be a string, file, or file-like object. + """ class TarError(Exception): - """Base exception. -""" + """Base exception.""" + class ReadError(TarError): - """Exception for unreadable tar archives. -""" + """Exception for unreadable tar archives.""" + class CompressionError(TarError): - """Exception for unavailable compression methods. -""" + """Exception for unavailable compression methods.""" + class StreamError(TarError): - """Exception for unsupported operations on stream-like TarFiles. -""" + """Exception for unsupported operations on stream-like TarFiles.""" + class ExtractError(TarError): - """General exception for extract errors. -""" + """General exception for extract errors.""" + class HeaderError(TarError): - """Base exception for header errors. -""" + """Base exception for header errors.""" class FilterError(TarError): # This attribute is only set directly on the subclasses, but the documentation guarantees @@ -1030,11 +1047,12 @@ def data_filter(member: TarInfo, dest_path: str) -> TarInfo: ... class TarInfo: """Informational class which holds the details about an -archive member given by a tar header block. -TarInfo objects are returned by TarFile.getmember(), -TarFile.getmembers() and TarFile.gettarinfo() and are -usually created internally. -""" + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + __slots__ = ( "name", "mode", @@ -1077,8 +1095,8 @@ usually created internally. pax_headers: Mapping[str, str] def __init__(self, name: str = "") -> None: """Construct a TarInfo object. name is the optional name -of the member. -""" + of the member. + """ if sys.version_info >= (3, 13): @property @deprecated("Deprecated since Python 3.13; will be removed in Python 3.16.") @@ -1091,17 +1109,18 @@ of the member. @classmethod def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: - """Construct a TarInfo object from a 512 byte bytes object. - """ + """Construct a TarInfo object from a 512 byte bytes object.""" + @classmethod def fromtarfile(cls, tarfile: TarFile) -> Self: """Return the next TarInfo object from TarFile object -tarfile. -""" + tarfile. + """ + @property def linkpath(self) -> str: - """In pax headers, "linkname" is called "linkpath". -""" + """In pax headers, "linkname" is called "linkpath".""" + @linkpath.setter def linkpath(self, linkname: str) -> None: ... def replace( @@ -1117,58 +1136,54 @@ tarfile. gname: str = ..., deep: bool = True, ) -> Self: - """Return a deep copy of self with the given attributes replaced. - """ + """Return a deep copy of self with the given attributes replaced.""" + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: - """Return the TarInfo's attributes as a dictionary. - """ + """Return the TarInfo's attributes as a dictionary.""" + def tobuf(self, format: _TarFormat | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: - """Return a tar header as a string of 512 byte blocks. - """ - def create_ustar_header( - self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str - ) -> bytes: - """Return the object as a ustar header block. - """ - def create_gnu_header( - self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str - ) -> bytes: - """Return the object as a GNU header block sequence. - """ + """Return a tar header as a string of 512 byte blocks.""" + + def create_ustar_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str) -> bytes: + """Return the object as a ustar header block.""" + + def create_gnu_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str) -> bytes: + """Return the object as a GNU header block sequence.""" + def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: """Return the object as a ustar header block. If it cannot be -represented this way, prepend a pax extended header sequence -with supplement information. -""" + represented this way, prepend a pax extended header sequence + with supplement information. + """ + @classmethod def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: - """Return the object as a pax global header block sequence. - """ + """Return the object as a pax global header block sequence.""" + def isfile(self) -> bool: - """Return True if the Tarinfo object is a regular file. -""" + """Return True if the Tarinfo object is a regular file.""" + def isreg(self) -> bool: - """Return True if the Tarinfo object is a regular file. -""" + """Return True if the Tarinfo object is a regular file.""" + def issparse(self) -> bool: ... def isdir(self) -> bool: - """Return True if it is a directory. -""" + """Return True if it is a directory.""" + def issym(self) -> bool: - """Return True if it is a symbolic link. -""" + """Return True if it is a symbolic link.""" + def islnk(self) -> bool: - """Return True if it is a hard link. -""" + """Return True if it is a hard link.""" + def ischr(self) -> bool: - """Return True if it is a character device. -""" + """Return True if it is a character device.""" + def isblk(self) -> bool: - """Return True if it is a block device. -""" + """Return True if it is a block device.""" + def isfifo(self) -> bool: - """Return True if it is a FIFO. -""" + """Return True if it is a FIFO.""" + def isdev(self) -> bool: - """Return True if it is one of character device, block device or FIFO. -""" + """Return True if it is one of character device, block device or FIFO.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi index c221b62748a2a..24255fbb39cec 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/telnetlib.pyi @@ -30,6 +30,7 @@ To do: option on one of the read calls only """ + import socket from collections.abc import Callable, MutableSequence, Sequence from re import Match, Pattern @@ -171,6 +172,7 @@ class Telnet: No other action is done afterwards by telnetlib. """ + host: str | None # undocumented sock: socket.socket | None # undocumented def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: @@ -180,6 +182,7 @@ class Telnet: With a hostname argument, it connects the instance; port number and timeout are optional. """ + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: """Connect to a host. @@ -188,6 +191,7 @@ class Telnet: Don't try to reopen an already connected instance. """ + def msg(self, msg: str, *args: Any) -> None: """Print a debug message, when the debug level is > 0. @@ -195,21 +199,23 @@ class Telnet: message using the standard string formatting operator. """ + def set_debuglevel(self, debuglevel: int) -> None: """Set the debug level. The higher it is, the more debug output you get (on sys.stdout). """ + def close(self) -> None: - """Close the connection. -""" + """Close the connection.""" + def get_socket(self) -> socket.socket: - """Return the socket object used internally. -""" + """Return the socket object used internally.""" + def fileno(self) -> int: - """Return the fileno() of the socket object used internally. -""" + """Return the fileno() of the socket object used internally.""" + def write(self, buffer: bytes) -> None: """Write a string to the socket, doubling any IAC characters. @@ -217,6 +223,7 @@ class Telnet: OSError if the connection is closed. """ + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: """Read until a given string is encountered or until timeout. @@ -225,9 +232,10 @@ class Telnet: is closed and no cooked data is available. """ + def read_all(self) -> bytes: - """Read all data until EOF; block until connection closed. -""" + """Read all data until EOF; block until connection closed.""" + def read_some(self) -> bytes: """Read at least one byte of cooked data unless EOF is hit. @@ -235,6 +243,7 @@ class Telnet: available. """ + def read_very_eager(self) -> bytes: """Read everything that's possible without blocking in I/O (eager). @@ -243,6 +252,7 @@ class Telnet: Don't block unless in the midst of an IAC sequence. """ + def read_eager(self) -> bytes: """Read readily available data. @@ -251,6 +261,7 @@ class Telnet: Don't block unless in the midst of an IAC sequence. """ + def read_lazy(self) -> bytes: """Process and return data that's already in the queues (lazy). @@ -259,6 +270,7 @@ class Telnet: unless in the midst of an IAC sequence. """ + def read_very_lazy(self) -> bytes: """Return any data available in the cooked queue (very lazy). @@ -266,6 +278,7 @@ class Telnet: Return b'' if no cooked data available otherwise. Don't block. """ + def read_sb_data(self) -> bytes: """Return any data available in the SB ... SE queue. @@ -274,9 +287,10 @@ class Telnet: found, old unread SB data will be discarded. Don't block. """ + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: - """Provide a callback function called after each receipt of a telnet option. -""" + """Provide a callback function called after each receipt of a telnet option.""" + def process_rawq(self) -> None: """Transfer from raw queue to cooked queue. @@ -284,6 +298,7 @@ class Telnet: the midst of an IAC sequence. """ + def rawq_getchar(self) -> bytes: """Get next char from raw queue. @@ -291,6 +306,7 @@ class Telnet: when connection is closed. """ + def fill_rawq(self) -> None: """Fill raw queue from exactly one recv() system call. @@ -298,18 +314,19 @@ class Telnet: connection is closed. """ + def sock_avail(self) -> bool: - """Test whether data is available on the socket. -""" + """Test whether data is available on the socket.""" + def interact(self) -> None: - """Interaction function, emulates a very dumb telnet client. -""" + """Interaction function, emulates a very dumb telnet client.""" + def mt_interact(self) -> None: - """Multithreaded version of interact(). -""" + """Multithreaded version of interact().""" + def listener(self) -> None: - """Helper for mt_interact() -- this executes in the other thread. -""" + """Helper for mt_interact() -- this executes in the other thread.""" + def expect( self, list: MutableSequence[Pattern[bytes] | bytes] | Sequence[Pattern[bytes]], timeout: float | None = None ) -> tuple[int, Match[bytes] | None, bytes]: @@ -334,10 +351,10 @@ class Telnet: results are undeterministic, and may depend on the I/O timing. """ + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self) -> None: - """Destructor -- close the connection. -""" + """Destructor -- close the connection.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi index 17752f23338c3..7b0af097ed1d6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tempfile.pyi @@ -22,6 +22,7 @@ This module also provides some data items to the user: any routine from this module, it will be considered as another candidate location to store temporary files. """ + import io import sys from _typeshed import ( @@ -78,27 +79,28 @@ if sys.version_info >= (3, 12): delete_on_close: bool = True, ) -> _TemporaryFileWrapper[str]: """Create and return a temporary file. -Arguments: -'prefix', 'suffix', 'dir' -- as for mkstemp. -'mode' -- the mode argument to io.open (default "w+b"). -'buffering' -- the buffer size argument to io.open (default -1). -'encoding' -- the encoding argument to io.open (default None) -'newline' -- the newline argument to io.open (default None) -'delete' -- whether the file is automatically deleted (default True). -'delete_on_close' -- if 'delete', whether the file is deleted on close - (default True) or otherwise either on context manager exit - (if context manager was used) or on object finalization. . -'errors' -- the errors argument to io.open (default None) -The file is created as mkstemp() would do it. - -Returns an object with a file-like interface; the name of the file -is accessible as its 'name' attribute. The file will be automatically -deleted when it is closed unless the 'delete' argument is set to False. - -On POSIX, NamedTemporaryFiles cannot be automatically deleted if -the creating process is terminated abruptly with a SIGKILL signal. -Windows can delete the file even in this case. -""" + Arguments: + 'prefix', 'suffix', 'dir' -- as for mkstemp. + 'mode' -- the mode argument to io.open (default "w+b"). + 'buffering' -- the buffer size argument to io.open (default -1). + 'encoding' -- the encoding argument to io.open (default None) + 'newline' -- the newline argument to io.open (default None) + 'delete' -- whether the file is automatically deleted (default True). + 'delete_on_close' -- if 'delete', whether the file is deleted on close + (default True) or otherwise either on context manager exit + (if context manager was used) or on object finalization. . + 'errors' -- the errors argument to io.open (default None) + The file is created as mkstemp() would do it. + + Returns an object with a file-like interface; the name of the file + is accessible as its 'name' attribute. The file will be automatically + deleted when it is closed unless the 'delete' argument is set to False. + + On POSIX, NamedTemporaryFiles cannot be automatically deleted if + the creating process is terminated abruptly with a SIGKILL signal. + Windows can delete the file even in this case. + """ + @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -143,24 +145,25 @@ else: errors: str | None = None, ) -> _TemporaryFileWrapper[str]: """Create and return a temporary file. - Arguments: - 'prefix', 'suffix', 'dir' -- as for mkstemp. - 'mode' -- the mode argument to io.open (default "w+b"). - 'buffering' -- the buffer size argument to io.open (default -1). - 'encoding' -- the encoding argument to io.open (default None) - 'newline' -- the newline argument to io.open (default None) - 'delete' -- whether the file is deleted on close (default True). - 'errors' -- the errors argument to io.open (default None) - The file is created as mkstemp() would do it. - - Returns an object with a file-like interface; the name of the file - is accessible as its 'name' attribute. The file will be automatically - deleted when it is closed unless the 'delete' argument is set to False. - - On POSIX, NamedTemporaryFiles cannot be automatically deleted if - the creating process is terminated abruptly with a SIGKILL signal. - Windows can delete the file even in this case. - """ + Arguments: + 'prefix', 'suffix', 'dir' -- as for mkstemp. + 'mode' -- the mode argument to io.open (default "w+b"). + 'buffering' -- the buffer size argument to io.open (default -1). + 'encoding' -- the encoding argument to io.open (default None) + 'newline' -- the newline argument to io.open (default None) + 'delete' -- whether the file is deleted on close (default True). + 'errors' -- the errors argument to io.open (default None) + The file is created as mkstemp() would do it. + + Returns an object with a file-like interface; the name of the file + is accessible as its 'name' attribute. The file will be automatically + deleted when it is closed unless the 'delete' argument is set to False. + + On POSIX, NamedTemporaryFiles cannot be automatically deleted if + the creating process is terminated abruptly with a SIGKILL signal. + Windows can delete the file even in this case. + """ + @overload def NamedTemporaryFile( mode: OpenBinaryMode = "w+b", @@ -205,18 +208,19 @@ else: errors: str | None = None, ) -> io.TextIOWrapper: """Create and return a temporary file. -Arguments: -'prefix', 'suffix', 'dir' -- as for mkstemp. -'mode' -- the mode argument to io.open (default "w+b"). -'buffering' -- the buffer size argument to io.open (default -1). -'encoding' -- the encoding argument to io.open (default None) -'newline' -- the newline argument to io.open (default None) -'errors' -- the errors argument to io.open (default None) -The file is created as mkstemp() would do it. - -Returns an object with a file-like interface. The file has no -name, and will cease to exist when it is closed. -""" + Arguments: + 'prefix', 'suffix', 'dir' -- as for mkstemp. + 'mode' -- the mode argument to io.open (default "w+b"). + 'buffering' -- the buffer size argument to io.open (default -1). + 'encoding' -- the encoding argument to io.open (default None) + 'newline' -- the newline argument to io.open (default None) + 'errors' -- the errors argument to io.open (default None) + The file is created as mkstemp() would do it. + + Returns an object with a file-like interface. The file has no + name, and will cease to exist when it is closed. + """ + @overload def TemporaryFile( mode: OpenBinaryMode, @@ -292,10 +296,11 @@ name, and will cease to exist when it is closed. class _TemporaryFileWrapper(IO[AnyStr]): """Temporary file wrapper -This class provides a wrapper around files opened for -temporary use. In particular, it seeks to automatically -remove the file when it is no longer needed. -""" + This class provides a wrapper around files opened for + temporary use. In particular, it seeks to automatically + remove the file when it is no longer needed. + """ + file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool @@ -309,8 +314,8 @@ remove the file when it is no longer needed. def __getattr__(self, name: str) -> Any: ... def close(self) -> None: """ -Close the temporary file, possibly deleting it. -""" + Close the temporary file, possibly deleting it. + """ # These methods don't exist directly on this object, but # are delegated to the underlying IO object through __getattr__. # We need to add them here so that this class is concrete. @@ -360,9 +365,10 @@ else: # like one. class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): """Temporary file wrapper, specialized to switch from BytesIO -or StringIO to a real file when it exceeds a certain size or -when a fileno is needed. -""" + or StringIO to a real file when it exceeds a certain size or + when a fileno is needed. + """ + _file: IO[AnyStr] @property def encoding(self) -> str: ... # undocumented @@ -487,33 +493,34 @@ when a fileno is needed. def seekable(self) -> bool: ... def writable(self) -> bool: ... def __next__(self) -> AnyStr: # type: ignore[override] - """Implement next(self). -""" + """Implement next(self).""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class TemporaryDirectory(Generic[AnyStr]): """Create and return a temporary directory. This has the same -behavior as mkdtemp but can be used as a context manager. For -example: - - with TemporaryDirectory() as tmpdir: - ... - -Upon exiting the context, the directory and everything contained -in it are removed (unless delete=False is passed or an exception -is raised during cleanup and ignore_cleanup_errors is not True). - -Optional Arguments: - suffix - A str suffix for the directory name. (see mkdtemp) - prefix - A str prefix for the directory name. (see mkdtemp) - dir - A directory to create this temp dir in. (see mkdtemp) - ignore_cleanup_errors - False; ignore exceptions during cleanup? - delete - True; whether the directory is automatically deleted. -""" + behavior as mkdtemp but can be used as a context manager. For + example: + + with TemporaryDirectory() as tmpdir: + ... + + Upon exiting the context, the directory and everything contained + in it are removed (unless delete=False is passed or an exception + is raised during cleanup and ignore_cleanup_errors is not True). + + Optional Arguments: + suffix - A str suffix for the directory name. (see mkdtemp) + prefix - A str prefix for the directory name. (see mkdtemp) + dir - A directory to create this temp dir in. (see mkdtemp) + ignore_cleanup_errors - False; ignore exceptions during cleanup? + delete - True; whether the directory is automatically deleted. + """ + name: AnyStr if sys.version_info >= (3, 12): @overload @@ -572,8 +579,8 @@ Optional Arguments: def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ # The overloads overlap, but they should still work fine. @overload @@ -581,32 +588,33 @@ def mkstemp( suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False ) -> tuple[int, str]: """User-callable function to create and return a unique temporary -file. The return value is a pair (fd, name) where fd is the -file descriptor returned by os.open, and name is the filename. + file. The return value is a pair (fd, name) where fd is the + file descriptor returned by os.open, and name is the filename. -If 'suffix' is not None, the file name will end with that suffix, -otherwise there will be no suffix. + If 'suffix' is not None, the file name will end with that suffix, + otherwise there will be no suffix. -If 'prefix' is not None, the file name will begin with that prefix, -otherwise a default prefix is used. + If 'prefix' is not None, the file name will begin with that prefix, + otherwise a default prefix is used. -If 'dir' is not None, the file will be created in that directory, -otherwise a default directory is used. + If 'dir' is not None, the file will be created in that directory, + otherwise a default directory is used. -If 'text' is specified and true, the file is opened in text -mode. Else (the default) the file is opened in binary mode. + If 'text' is specified and true, the file is opened in text + mode. Else (the default) the file is opened in binary mode. -If any of 'suffix', 'prefix' and 'dir' are not None, they must be the -same type. If they are bytes, the returned name will be bytes; str -otherwise. + If any of 'suffix', 'prefix' and 'dir' are not None, they must be the + same type. If they are bytes, the returned name will be bytes; str + otherwise. -The file is readable and writable only by the creating user ID. -If the operating system uses permission bits to indicate whether a -file is executable, the file is executable by no one. The file -descriptor is not inherited by children of this process. + The file is readable and writable only by the creating user ID. + If the operating system uses permission bits to indicate whether a + file is executable, the file is executable by no one. The file + descriptor is not inherited by children of this process. + + Caller is responsible for deleting the file when done with it. + """ -Caller is responsible for deleting the file when done with it. -""" @overload def mkstemp( suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False @@ -616,41 +624,42 @@ def mkstemp( @overload def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: """User-callable function to create and return a unique temporary -directory. The return value is the pathname of the directory. + directory. The return value is the pathname of the directory. -Arguments are as for mkstemp, except that the 'text' argument is -not accepted. + Arguments are as for mkstemp, except that the 'text' argument is + not accepted. -The directory is readable, writable, and searchable only by the -creating user. + The directory is readable, writable, and searchable only by the + creating user. + + Caller is responsible for deleting the directory when done with it. + """ -Caller is responsible for deleting the directory when done with it. -""" @overload def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... @deprecated("Deprecated since Python 2.3. Use `mkstemp()` or `NamedTemporaryFile(delete=False)` instead.") def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: """User-callable function to return a unique temporary file name. The -file is not created. + file is not created. -Arguments are similar to mkstemp, except that the 'text' argument is -not accepted, and suffix=None, prefix=None and bytes file names are not -supported. + Arguments are similar to mkstemp, except that the 'text' argument is + not accepted, and suffix=None, prefix=None and bytes file names are not + supported. + + THIS FUNCTION IS UNSAFE AND SHOULD NOT BE USED. The file name may + refer to a file that did not exist at some point, but by the time + you get around to creating it, someone else may have beaten you to + the punch. + """ -THIS FUNCTION IS UNSAFE AND SHOULD NOT BE USED. The file name may -refer to a file that did not exist at some point, but by the time -you get around to creating it, someone else may have beaten you to -the punch. -""" def gettempdirb() -> bytes: - """Returns tempfile.tempdir as bytes. -""" + """Returns tempfile.tempdir as bytes.""" + def gettempprefixb() -> bytes: - """The default prefix for temporary directories as bytes. -""" + """The default prefix for temporary directories as bytes.""" + def gettempdir() -> str: - """Returns tempfile.tempdir as str. -""" + """Returns tempfile.tempdir as str.""" + def gettempprefix() -> str: - """The default prefix for temporary directories as string. -""" + """The default prefix for temporary directories as string.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi index 6913684279139..a8c402ab07309 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/termios.pyi @@ -7,6 +7,7 @@ All functions in this module take a file descriptor fd as their first argument. This can be an integer file descriptor, such as returned by sys.stdin.fileno(), or a file object, such as sys.stdin itself. """ + import sys from _typeshed import FileDescriptorLike from typing import Any, Final @@ -303,57 +304,62 @@ if sys.platform != "win32": def tcgetattr(fd: FileDescriptorLike, /) -> _AttrReturn: """Get the tty attributes for file descriptor fd. -Returns a list [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] -where cc is a list of the tty special characters (each a string of -length 1, except the items with indices VMIN and VTIME, which are -integers when these fields are defined). The interpretation of the -flags and the speeds as well as the indexing in the cc array must be -done using the symbolic constants defined in this module. -""" + Returns a list [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] + where cc is a list of the tty special characters (each a string of + length 1, except the items with indices VMIN and VTIME, which are + integers when these fields are defined). The interpretation of the + flags and the speeds as well as the indexing in the cc array must be + done using the symbolic constants defined in this module. + """ + def tcsetattr(fd: FileDescriptorLike, when: int, attributes: _Attr, /) -> None: """Set the tty attributes for file descriptor fd. -The attributes to be set are taken from the attributes argument, which -is a list like the one returned by tcgetattr(). The when argument -determines when the attributes are changed: termios.TCSANOW to -change immediately, termios.TCSADRAIN to change after transmitting all -queued output, or termios.TCSAFLUSH to change after transmitting all -queued output and discarding all queued input. -""" + The attributes to be set are taken from the attributes argument, which + is a list like the one returned by tcgetattr(). The when argument + determines when the attributes are changed: termios.TCSANOW to + change immediately, termios.TCSADRAIN to change after transmitting all + queued output, or termios.TCSAFLUSH to change after transmitting all + queued output and discarding all queued input. + """ + def tcsendbreak(fd: FileDescriptorLike, duration: int, /) -> None: """Send a break on file descriptor fd. -A zero duration sends a break for 0.25-0.5 seconds; a nonzero duration -has a system dependent meaning. -""" + A zero duration sends a break for 0.25-0.5 seconds; a nonzero duration + has a system dependent meaning. + """ + def tcdrain(fd: FileDescriptorLike, /) -> None: - """Wait until all output written to file descriptor fd has been transmitted. -""" + """Wait until all output written to file descriptor fd has been transmitted.""" + def tcflush(fd: FileDescriptorLike, queue: int, /) -> None: """Discard queued data on file descriptor fd. -The queue selector specifies which queue: termios.TCIFLUSH for the input -queue, termios.TCOFLUSH for the output queue, or termios.TCIOFLUSH for -both queues. -""" + The queue selector specifies which queue: termios.TCIFLUSH for the input + queue, termios.TCOFLUSH for the output queue, or termios.TCIOFLUSH for + both queues. + """ + def tcflow(fd: FileDescriptorLike, action: int, /) -> None: """Suspend or resume input or output on file descriptor fd. -The action argument can be termios.TCOOFF to suspend output, -termios.TCOON to restart output, termios.TCIOFF to suspend input, -or termios.TCION to restart input. -""" + The action argument can be termios.TCOOFF to suspend output, + termios.TCOON to restart output, termios.TCIOFF to suspend input, + or termios.TCION to restart input. + """ if sys.version_info >= (3, 11): def tcgetwinsize(fd: FileDescriptorLike, /) -> tuple[int, int]: """Get the tty winsize for file descriptor fd. -Returns a tuple (ws_row, ws_col). -""" + Returns a tuple (ws_row, ws_col). + """ + def tcsetwinsize(fd: FileDescriptorLike, winsize: tuple[int, int], /) -> None: """Set the tty winsize for file descriptor fd. -The winsize to be set is taken from the winsize argument, which -is a two-item tuple (ws_row, ws_col) like the one returned by tcgetwinsize(). -""" + The winsize to be set is taken from the winsize argument, which + is a two-item tuple (ws_row, ws_col) like the one returned by tcgetwinsize(). + """ class error(Exception): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi index cf41ed1a66098..d24ccdebc33fb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/textwrap.pyi @@ -1,5 +1,5 @@ -"""Text wrapping and filling. -""" +"""Text wrapping and filling.""" + from collections.abc import Callable from re import Pattern @@ -7,52 +7,53 @@ __all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] class TextWrapper: """ -Object for wrapping/filling text. The public interface consists of -the wrap() and fill() methods; the other methods are just there for -subclasses to override in order to tweak the default behaviour. -If you want to completely replace the main wrapping algorithm, -you'll probably have to override _wrap_chunks(). - -Several instance attributes control various aspects of wrapping: - width (default: 70) - the maximum width of wrapped lines (unless break_long_words - is false) - initial_indent (default: "") - string that will be prepended to the first line of wrapped - output. Counts towards the line's width. - subsequent_indent (default: "") - string that will be prepended to all lines save the first - of wrapped output; also counts towards each line's width. - expand_tabs (default: true) - Expand tabs in input text to spaces before further processing. - Each tab will become 0 .. 'tabsize' spaces, depending on its position - in its line. If false, each tab is treated as a single character. - tabsize (default: 8) - Expand tabs in input text to 0 .. 'tabsize' spaces, unless - 'expand_tabs' is false. - replace_whitespace (default: true) - Replace all whitespace characters in the input text by spaces - after tab expansion. Note that if expand_tabs is false and - replace_whitespace is true, every tab will be converted to a - single space! - fix_sentence_endings (default: false) - Ensure that sentence-ending punctuation is always followed - by two spaces. Off by default because the algorithm is - (unavoidably) imperfect. - break_long_words (default: true) - Break words longer than 'width'. If false, those words will not - be broken, and some lines might be longer than 'width'. - break_on_hyphens (default: true) - Allow breaking hyphenated words. If true, wrapping will occur - preferably on whitespaces and right after hyphens part of - compound words. - drop_whitespace (default: true) - Drop leading and trailing whitespace from lines. - max_lines (default: None) - Truncate wrapped lines. - placeholder (default: ' [...]') - Append to the last line of truncated text. -""" + Object for wrapping/filling text. The public interface consists of + the wrap() and fill() methods; the other methods are just there for + subclasses to override in order to tweak the default behaviour. + If you want to completely replace the main wrapping algorithm, + you'll probably have to override _wrap_chunks(). + + Several instance attributes control various aspects of wrapping: + width (default: 70) + the maximum width of wrapped lines (unless break_long_words + is false) + initial_indent (default: "") + string that will be prepended to the first line of wrapped + output. Counts towards the line's width. + subsequent_indent (default: "") + string that will be prepended to all lines save the first + of wrapped output; also counts towards each line's width. + expand_tabs (default: true) + Expand tabs in input text to spaces before further processing. + Each tab will become 0 .. 'tabsize' spaces, depending on its position + in its line. If false, each tab is treated as a single character. + tabsize (default: 8) + Expand tabs in input text to 0 .. 'tabsize' spaces, unless + 'expand_tabs' is false. + replace_whitespace (default: true) + Replace all whitespace characters in the input text by spaces + after tab expansion. Note that if expand_tabs is false and + replace_whitespace is true, every tab will be converted to a + single space! + fix_sentence_endings (default: false) + Ensure that sentence-ending punctuation is always followed + by two spaces. Off by default because the algorithm is + (unavoidably) imperfect. + break_long_words (default: true) + Break words longer than 'width'. If false, those words will not + be broken, and some lines might be longer than 'width'. + break_on_hyphens (default: true) + Allow breaking hyphenated words. If true, wrapping will occur + preferably on whitespaces and right after hyphens part of + compound words. + drop_whitespace (default: true) + Drop leading and trailing whitespace from lines. + max_lines (default: None) + Truncate wrapped lines. + placeholder (default: ' [...]') + Append to the last line of truncated text. + """ + width: int initial_indent: str subsequent_indent: str @@ -94,72 +95,78 @@ Several instance attributes control various aspects of wrapping: def _munge_whitespace(self, text: str) -> str: """_munge_whitespace(text : string) -> string -Munge whitespace in text: expand tabs and convert all other -whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz" -becomes " foo bar baz". -""" + Munge whitespace in text: expand tabs and convert all other + whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz" + becomes " foo bar baz". + """ + def _split(self, text: str) -> list[str]: """_split(text : string) -> [string] -Split the text to wrap into indivisible chunks. Chunks are -not quite the same as words; see _wrap_chunks() for full -details. As an example, the text - Look, goof-ball -- use the -b option! -breaks into the following chunks: - 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', - 'use', ' ', 'the', ' ', '-b', ' ', 'option!' -if break_on_hyphens is True, or in: - 'Look,', ' ', 'goof-ball', ' ', '--', ' ', - 'use', ' ', 'the', ' ', '-b', ' ', option!' -otherwise. -""" + Split the text to wrap into indivisible chunks. Chunks are + not quite the same as words; see _wrap_chunks() for full + details. As an example, the text + Look, goof-ball -- use the -b option! + breaks into the following chunks: + 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', 'option!' + if break_on_hyphens is True, or in: + 'Look,', ' ', 'goof-ball', ' ', '--', ' ', + 'use', ' ', 'the', ' ', '-b', ' ', option!' + otherwise. + """ + def _fix_sentence_endings(self, chunks: list[str]) -> None: """_fix_sentence_endings(chunks : [string]) -Correct for sentence endings buried in 'chunks'. Eg. when the -original text contains "... foo.\\nBar ...", munge_whitespace() -and split() will convert that to [..., "foo.", " ", "Bar", ...] -which has one too few spaces; this method simply changes the one -space to two. -""" + Correct for sentence endings buried in 'chunks'. Eg. when the + original text contains "... foo.\\nBar ...", munge_whitespace() + and split() will convert that to [..., "foo.", " ", "Bar", ...] + which has one too few spaces; this method simply changes the one + space to two. + """ + def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: """_handle_long_word(chunks : [string], - cur_line : [string], - cur_len : int, width : int) + cur_line : [string], + cur_len : int, width : int) + + Handle a chunk of text (most likely a word, not whitespace) that + is too long to fit in any line. + """ -Handle a chunk of text (most likely a word, not whitespace) that -is too long to fit in any line. -""" def _wrap_chunks(self, chunks: list[str]) -> list[str]: """_wrap_chunks(chunks : [string]) -> [string] -Wrap a sequence of text chunks and return a list of lines of -length 'self.width' or less. (If 'break_long_words' is false, -some lines may be longer than this.) Chunks correspond roughly -to words and the whitespace between them: each chunk is -indivisible (modulo 'break_long_words'), but a line break can -come between any two chunks. Chunks should not have internal -whitespace; ie. a chunk is either all whitespace or a "word". -Whitespace chunks will be removed from the beginning and end of -lines, but apart from that whitespace is preserved. -""" + Wrap a sequence of text chunks and return a list of lines of + length 'self.width' or less. (If 'break_long_words' is false, + some lines may be longer than this.) Chunks correspond roughly + to words and the whitespace between them: each chunk is + indivisible (modulo 'break_long_words'), but a line break can + come between any two chunks. Chunks should not have internal + whitespace; ie. a chunk is either all whitespace or a "word". + Whitespace chunks will be removed from the beginning and end of + lines, but apart from that whitespace is preserved. + """ + def _split_chunks(self, text: str) -> list[str]: ... def wrap(self, text: str) -> list[str]: """wrap(text : string) -> [string] -Reformat the single paragraph in 'text' so it fits in lines of -no more than 'self.width' columns, and return a list of wrapped -lines. Tabs in 'text' are expanded with string.expandtabs(), -and all other whitespace characters (including newline) are -converted to space. -""" + Reformat the single paragraph in 'text' so it fits in lines of + no more than 'self.width' columns, and return a list of wrapped + lines. Tabs in 'text' are expanded with string.expandtabs(), + and all other whitespace characters (including newline) are + converted to space. + """ + def fill(self, text: str) -> str: """fill(text : string) -> string -Reformat the single paragraph in 'text' to fit in lines of no -more than 'self.width' columns, and return a new string -containing the entire wrapped paragraph. -""" + Reformat the single paragraph in 'text' to fit in lines of no + more than 'self.width' columns, and return a new string + containing the entire wrapped paragraph. + """ def wrap( text: str, @@ -179,13 +186,14 @@ def wrap( ) -> list[str]: """Wrap a single paragraph of text, returning a list of wrapped lines. -Reformat the single paragraph in 'text' so it fits in lines of no -more than 'width' columns, and return a list of wrapped lines. By -default, tabs in 'text' are expanded with string.expandtabs(), and -all other whitespace characters (including newline) are converted to -space. See TextWrapper class for available keyword args to customize -wrapping behaviour. -""" + Reformat the single paragraph in 'text' so it fits in lines of no + more than 'width' columns, and return a list of wrapped lines. By + default, tabs in 'text' are expanded with string.expandtabs(), and + all other whitespace characters (including newline) are converted to + space. See TextWrapper class for available keyword args to customize + wrapping behaviour. + """ + def fill( text: str, width: int = 70, @@ -204,12 +212,13 @@ def fill( ) -> str: """Fill a single paragraph of text, returning a new string. -Reformat the single paragraph in 'text' to fit in lines of no more -than 'width' columns, and return a new string containing the entire -wrapped paragraph. As with wrap(), tabs are expanded and other -whitespace characters converted to space. See TextWrapper class for -available keyword args to customize wrapping behaviour. -""" + Reformat the single paragraph in 'text' to fit in lines of no more + than 'width' columns, and return a new string containing the entire + wrapped paragraph. As with wrap(), tabs are expanded and other + whitespace characters converted to space. See TextWrapper class for + available keyword args to customize wrapping behaviour. + """ + def shorten( text: str, width: int, @@ -228,33 +237,35 @@ def shorten( ) -> str: """Collapse and truncate the given text to fit in the given width. -The text first has its whitespace collapsed. If it then fits in -the *width*, it is returned as is. Otherwise, as many words -as possible are joined and then the placeholder is appended:: + The text first has its whitespace collapsed. If it then fits in + the *width*, it is returned as is. Otherwise, as many words + as possible are joined and then the placeholder is appended:: + + >>> textwrap.shorten("Hello world!", width=12) + 'Hello world!' + >>> textwrap.shorten("Hello world!", width=11) + 'Hello [...]' + """ - >>> textwrap.shorten("Hello world!", width=12) - 'Hello world!' - >>> textwrap.shorten("Hello world!", width=11) - 'Hello [...]' -""" def dedent(text: str) -> str: """Remove any common leading whitespace from every line in `text`. -This can be used to make triple-quoted strings line up with the left -edge of the display, while still presenting them in the source code -in indented form. + This can be used to make triple-quoted strings line up with the left + edge of the display, while still presenting them in the source code + in indented form. + + Note that tabs and spaces are both treated as whitespace, but they + are not equal: the lines " hello" and "\\thello" are + considered to have no common leading whitespace. -Note that tabs and spaces are both treated as whitespace, but they -are not equal: the lines " hello" and "\\thello" are -considered to have no common leading whitespace. + Entirely blank lines are normalized to a newline character. + """ -Entirely blank lines are normalized to a newline character. -""" def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: """Adds 'prefix' to the beginning of selected lines in 'text'. -If 'predicate' is provided, 'prefix' will only be added to the lines -where 'predicate(line)' is True. If 'predicate' is not provided, -it will default to adding 'prefix' to all non-empty lines that do not -consist solely of whitespace characters. -""" + If 'predicate' is provided, 'prefix' will only be added to the lines + where 'predicate(line)' is True. If 'predicate' is not provided, + it will default to adding 'prefix' to all non-empty lines that do not + consist solely of whitespace characters. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi index 5d4befa99c2e0..95244ee608683 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/threading.pyi @@ -1,5 +1,5 @@ -"""Thread module emulating a subset of Java's threading model. -""" +"""Thread module emulating a subset of Java's threading model.""" + import _thread import sys from _thread import _excepthook, _ExceptHookArgs, get_native_id as get_native_id @@ -50,109 +50,117 @@ _profile_hook: ProfileFunction | None def active_count() -> int: """Return the number of Thread objects currently alive. -The returned count is equal to the length of the list returned by -enumerate(). + The returned count is equal to the length of the list returned by + enumerate(). + + """ -""" @deprecated("Deprecated since Python 3.10. Use `active_count()` instead.") def activeCount() -> int: """Return the number of Thread objects currently alive. -This function is deprecated, use active_count() instead. + This function is deprecated, use active_count() instead. + + """ -""" def current_thread() -> Thread: """Return the current Thread object, corresponding to the caller's thread of control. -If the caller's thread of control was not created through the threading -module, a dummy thread object with limited functionality is returned. + If the caller's thread of control was not created through the threading + module, a dummy thread object with limited functionality is returned. + + """ -""" @deprecated("Deprecated since Python 3.10. Use `current_thread()` instead.") def currentThread() -> Thread: """Return the current Thread object, corresponding to the caller's thread of control. -This function is deprecated, use current_thread() instead. + This function is deprecated, use current_thread() instead. + + """ -""" def get_ident() -> int: """Return a non-zero integer that uniquely identifies the current thread -amongst other threads that exist simultaneously. -This may be used to identify per-thread resources. -Even though on some platforms threads identities may appear to be -allocated consecutive numbers starting at 1, this behavior should not -be relied upon, and the number should be seen purely as a magic cookie. -A thread's identity may be reused for another thread after it exits. -""" + amongst other threads that exist simultaneously. + This may be used to identify per-thread resources. + Even though on some platforms threads identities may appear to be + allocated consecutive numbers starting at 1, this behavior should not + be relied upon, and the number should be seen purely as a magic cookie. + A thread's identity may be reused for another thread after it exits. + """ + def enumerate() -> list[Thread]: """Return a list of all Thread objects currently alive. -The list includes daemonic threads, dummy thread objects created by -current_thread(), and the main thread. It excludes terminated threads and -threads that have not yet been started. + The list includes daemonic threads, dummy thread objects created by + current_thread(), and the main thread. It excludes terminated threads and + threads that have not yet been started. + + """ -""" def main_thread() -> Thread: """Return the main thread object. -In normal conditions, the main thread is the thread from which the -Python interpreter was started. -""" + In normal conditions, the main thread is the thread from which the + Python interpreter was started. + """ + def settrace(func: TraceFunction) -> None: """Set a trace function for all threads started from the threading module. -The func will be passed to sys.settrace() for each thread, before its run() -method is called. -""" + The func will be passed to sys.settrace() for each thread, before its run() + method is called. + """ + def setprofile(func: ProfileFunction | None) -> None: """Set a profile function for all threads started from the threading module. -The func will be passed to sys.setprofile() for each thread, before its -run() method is called. -""" + The func will be passed to sys.setprofile() for each thread, before its + run() method is called. + """ if sys.version_info >= (3, 12): def setprofile_all_threads(func: ProfileFunction | None) -> None: """Set a profile function for all threads started from the threading module -and all Python threads that are currently executing. + and all Python threads that are currently executing. + + The func will be passed to sys.setprofile() for each thread, before its + run() method is called. + """ -The func will be passed to sys.setprofile() for each thread, before its -run() method is called. -""" def settrace_all_threads(func: TraceFunction) -> None: """Set a trace function for all threads started from the threading module -and all Python threads that are currently executing. + and all Python threads that are currently executing. -The func will be passed to sys.settrace() for each thread, before its run() -method is called. -""" + The func will be passed to sys.settrace() for each thread, before its run() + method is called. + """ if sys.version_info >= (3, 10): def gettrace() -> TraceFunction | None: - """Get the trace function as set by threading.settrace(). -""" + """Get the trace function as set by threading.settrace().""" + def getprofile() -> ProfileFunction | None: - """Get the profiler function as set by threading.setprofile(). -""" + """Get the profiler function as set by threading.setprofile().""" def stack_size(size: int = 0, /) -> int: """Return the thread stack size used when creating new threads. The -optional size argument specifies the stack size (in bytes) to be used -for subsequently created threads, and must be 0 (use platform or -configured default) or a positive integer value of at least 32,768 (32k). -If changing the thread stack size is unsupported, a ThreadError -exception is raised. If the specified size is invalid, a ValueError -exception is raised, and the stack size is unmodified. 32k bytes - currently the minimum supported stack size value to guarantee -sufficient stack space for the interpreter itself. - -Note that some platforms may have particular restrictions on values for -the stack size, such as requiring a minimum stack size larger than 32 KiB or -requiring allocation in multiples of the system memory page size -- platform documentation should be referred to for more information -(4 KiB pages are common; using multiples of 4096 for the stack size is -the suggested approach in the absence of more specific information). -""" + optional size argument specifies the stack size (in bytes) to be used + for subsequently created threads, and must be 0 (use platform or + configured default) or a positive integer value of at least 32,768 (32k). + If changing the thread stack size is unsupported, a ThreadError + exception is raised. If the specified size is invalid, a ValueError + exception is raised, and the stack size is unmodified. 32k bytes + currently the minimum supported stack size value to guarantee + sufficient stack space for the interpreter itself. + + Note that some platforms may have particular restrictions on values for + the stack size, such as requiring a minimum stack size larger than 32 KiB or + requiring allocation in multiples of the system memory page size + - platform documentation should be referred to for more information + (4 KiB pages are common; using multiples of 4096 for the stack size is + the suggested approach in the absence of more specific information). + """ TIMEOUT_MAX: Final[float] @@ -162,21 +170,22 @@ local = _thread._local class Thread: """A class that represents a thread of control. -This class can be safely subclassed in a limited fashion. There are two ways -to specify the activity: by passing a callable object to the constructor, or -by overriding the run() method in a subclass. + This class can be safely subclassed in a limited fashion. There are two ways + to specify the activity: by passing a callable object to the constructor, or + by overriding the run() method in a subclass. + + """ -""" name: str @property def ident(self) -> int | None: """Thread identifier of this thread or None if it has not been started. -This is a nonzero integer. See the get_ident() function. Thread -identifiers may be recycled when a thread exits and another thread is -created. The identifier is available even after the thread has exited. + This is a nonzero integer. See the get_ident() function. Thread + identifiers may be recycled when a thread exits and another thread is + created. The identifier is available even after the thread has exited. -""" + """ daemon: bool if sys.version_info >= (3, 14): def __init__( @@ -192,33 +201,33 @@ created. The identifier is available even after the thread has exited. ) -> None: """This constructor should always be called with keyword arguments. Arguments are: -*group* should be None; reserved for future extension when a ThreadGroup -class is implemented. + *group* should be None; reserved for future extension when a ThreadGroup + class is implemented. -*target* is the callable object to be invoked by the run() -method. Defaults to None, meaning nothing is called. + *target* is the callable object to be invoked by the run() + method. Defaults to None, meaning nothing is called. -*name* is the thread name. By default, a unique name is constructed of -the form "Thread-N" where N is a small decimal number. + *name* is the thread name. By default, a unique name is constructed of + the form "Thread-N" where N is a small decimal number. -*args* is a list or tuple of arguments for the target invocation. Defaults to (). + *args* is a list or tuple of arguments for the target invocation. Defaults to (). -*kwargs* is a dictionary of keyword arguments for the target -invocation. Defaults to {}. + *kwargs* is a dictionary of keyword arguments for the target + invocation. Defaults to {}. -*context* is the contextvars.Context value to use for the thread. -The default value is None, which means to check -sys.flags.thread_inherit_context. If that flag is true, use a copy -of the context of the caller. If false, use an empty context. To -explicitly start with an empty context, pass a new instance of -contextvars.Context(). To explicitly start with a copy of the current -context, pass the value from contextvars.copy_context(). + *context* is the contextvars.Context value to use for the thread. + The default value is None, which means to check + sys.flags.thread_inherit_context. If that flag is true, use a copy + of the context of the caller. If false, use an empty context. To + explicitly start with an empty context, pass a new instance of + contextvars.Context(). To explicitly start with a copy of the current + context, pass the value from contextvars.copy_context(). -If a subclass overrides the constructor, it must make sure to invoke -the base class constructor (Thread.__init__()) before doing anything -else to the thread. + If a subclass overrides the constructor, it must make sure to invoke + the base class constructor (Thread.__init__()) before doing anything + else to the thread. -""" + """ else: def __init__( self, @@ -232,113 +241,121 @@ else to the thread. ) -> None: """This constructor should always be called with keyword arguments. Arguments are: -*group* should be None; reserved for future extension when a ThreadGroup -class is implemented. + *group* should be None; reserved for future extension when a ThreadGroup + class is implemented. -*target* is the callable object to be invoked by the run() -method. Defaults to None, meaning nothing is called. + *target* is the callable object to be invoked by the run() + method. Defaults to None, meaning nothing is called. -*name* is the thread name. By default, a unique name is constructed of -the form "Thread-N" where N is a small decimal number. + *name* is the thread name. By default, a unique name is constructed of + the form "Thread-N" where N is a small decimal number. -*args* is a list or tuple of arguments for the target invocation. Defaults to (). + *args* is a list or tuple of arguments for the target invocation. Defaults to (). -*kwargs* is a dictionary of keyword arguments for the target -invocation. Defaults to {}. + *kwargs* is a dictionary of keyword arguments for the target + invocation. Defaults to {}. -If a subclass overrides the constructor, it must make sure to invoke -the base class constructor (Thread.__init__()) before doing anything -else to the thread. + If a subclass overrides the constructor, it must make sure to invoke + the base class constructor (Thread.__init__()) before doing anything + else to the thread. -""" + """ def start(self) -> None: """Start the thread's activity. -It must be called at most once per thread object. It arranges for the -object's run() method to be invoked in a separate thread of control. + It must be called at most once per thread object. It arranges for the + object's run() method to be invoked in a separate thread of control. -This method will raise a RuntimeError if called more than once on the -same thread object. + This method will raise a RuntimeError if called more than once on the + same thread object. + + """ -""" def run(self) -> None: """Method representing the thread's activity. -You may override this method in a subclass. The standard run() method -invokes the callable object passed to the object's constructor as the -target argument, if any, with sequential and keyword arguments taken -from the args and kwargs arguments, respectively. + You may override this method in a subclass. The standard run() method + invokes the callable object passed to the object's constructor as the + target argument, if any, with sequential and keyword arguments taken + from the args and kwargs arguments, respectively. + + """ -""" def join(self, timeout: float | None = None) -> None: """Wait until the thread terminates. -This blocks the calling thread until the thread whose join() method is -called terminates -- either normally or through an unhandled exception -or until the optional timeout occurs. + This blocks the calling thread until the thread whose join() method is + called terminates -- either normally or through an unhandled exception + or until the optional timeout occurs. + + When the timeout argument is present and not None, it should be a + floating-point number specifying a timeout for the operation in seconds + (or fractions thereof). As join() always returns None, you must call + is_alive() after join() to decide whether a timeout happened -- if the + thread is still alive, the join() call timed out. -When the timeout argument is present and not None, it should be a -floating-point number specifying a timeout for the operation in seconds -(or fractions thereof). As join() always returns None, you must call -is_alive() after join() to decide whether a timeout happened -- if the -thread is still alive, the join() call timed out. + When the timeout argument is not present or None, the operation will + block until the thread terminates. -When the timeout argument is not present or None, the operation will -block until the thread terminates. + A thread can be join()ed many times. -A thread can be join()ed many times. + join() raises a RuntimeError if an attempt is made to join the current + thread as that would cause a deadlock. It is also an error to join() a + thread before it has been started and attempts to do so raises the same + exception. -join() raises a RuntimeError if an attempt is made to join the current -thread as that would cause a deadlock. It is also an error to join() a -thread before it has been started and attempts to do so raises the same -exception. + """ -""" @property def native_id(self) -> int | None: # only available on some platforms """Native integral thread ID of this thread, or None if it has not been started. -This is a non-negative integer. See the get_native_id() function. -This represents the Thread ID as reported by the kernel. + This is a non-negative integer. See the get_native_id() function. + This represents the Thread ID as reported by the kernel. + + """ -""" def is_alive(self) -> bool: """Return whether the thread is alive. -This method returns True just before the run() method starts until just -after the run() method terminates. See also the module function -enumerate(). + This method returns True just before the run() method starts until just + after the run() method terminates. See also the module function + enumerate(). + + """ -""" @deprecated("Deprecated since Python 3.10. Read the `daemon` attribute instead.") def isDaemon(self) -> bool: """Return whether this thread is a daemon. -This method is deprecated, use the daemon attribute instead. + This method is deprecated, use the daemon attribute instead. + + """ -""" @deprecated("Deprecated since Python 3.10. Set the `daemon` attribute instead.") def setDaemon(self, daemonic: bool) -> None: """Set whether this thread is a daemon. -This method is deprecated, use the .daemon property instead. + This method is deprecated, use the .daemon property instead. + + """ -""" @deprecated("Deprecated since Python 3.10. Read the `name` attribute instead.") def getName(self) -> str: """Return a string used for identification purposes only. -This method is deprecated, use the name attribute instead. + This method is deprecated, use the name attribute instead. + + """ -""" @deprecated("Deprecated since Python 3.10. Set the `name` attribute instead.") def setName(self, name: str) -> None: """Set the name string for this thread. -This method is deprecated, use the name attribute instead. + This method is deprecated, use the name attribute instead. -""" + """ class _DummyThread(Thread): def __init__(self) -> None: ... @@ -351,76 +368,78 @@ Lock = _thread.LockType class _RLock: """This class implements reentrant lock objects. -A reentrant lock must be released by the thread that acquired it. Once a -thread has acquired a reentrant lock, the same thread may acquire it -again without blocking; the thread must release it once for each time it -has acquired it. + A reentrant lock must be released by the thread that acquired it. Once a + thread has acquired a reentrant lock, the same thread may acquire it + again without blocking; the thread must release it once for each time it + has acquired it. + + """ -""" _count: int def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: """Acquire a lock, blocking or non-blocking. -When invoked without arguments: if this thread already owns the lock, -increment the recursion level by one, and return immediately. Otherwise, -if another thread owns the lock, block until the lock is unlocked. Once -the lock is unlocked (not owned by any thread), then grab ownership, set -the recursion level to one, and return. If more than one thread is -blocked waiting until the lock is unlocked, only one at a time will be -able to grab ownership of the lock. There is no return value in this -case. - -When invoked with the blocking argument set to true, do the same thing -as when called without arguments, and return true. - -When invoked with the blocking argument set to false, do not block. If a -call without an argument would block, return false immediately; -otherwise, do the same thing as when called without arguments, and -return true. - -When invoked with the floating-point timeout argument set to a positive -value, block for at most the number of seconds specified by timeout -and as long as the lock cannot be acquired. Return true if the lock has -been acquired, false if the timeout has elapsed. - -""" + When invoked without arguments: if this thread already owns the lock, + increment the recursion level by one, and return immediately. Otherwise, + if another thread owns the lock, block until the lock is unlocked. Once + the lock is unlocked (not owned by any thread), then grab ownership, set + the recursion level to one, and return. If more than one thread is + blocked waiting until the lock is unlocked, only one at a time will be + able to grab ownership of the lock. There is no return value in this + case. + + When invoked with the blocking argument set to true, do the same thing + as when called without arguments, and return true. + + When invoked with the blocking argument set to false, do not block. If a + call without an argument would block, return false immediately; + otherwise, do the same thing as when called without arguments, and + return true. + + When invoked with the floating-point timeout argument set to a positive + value, block for at most the number of seconds specified by timeout + and as long as the lock cannot be acquired. Return true if the lock has + been acquired, false if the timeout has elapsed. + + """ + def release(self) -> None: """Release a lock, decrementing the recursion level. -If after the decrement it is zero, reset the lock to unlocked (not owned -by any thread), and if any other threads are blocked waiting for the -lock to become unlocked, allow exactly one of them to proceed. If after -the decrement the recursion level is still nonzero, the lock remains -locked and owned by the calling thread. + If after the decrement it is zero, reset the lock to unlocked (not owned + by any thread), and if any other threads are blocked waiting for the + lock to become unlocked, allow exactly one of them to proceed. If after + the decrement the recursion level is still nonzero, the lock remains + locked and owned by the calling thread. -Only call this method when the calling thread owns the lock. A -RuntimeError is raised if this method is called when the lock is -unlocked. + Only call this method when the calling thread owns the lock. A + RuntimeError is raised if this method is called when the lock is + unlocked. -There is no return value. + There is no return value. -""" + """ __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... if sys.version_info >= (3, 14): def locked(self) -> bool: - """Return whether this object is locked. -""" + """Return whether this object is locked.""" RLock = _thread.RLock # Actually a function at runtime. class Condition: """Class that implements a condition variable. -A condition variable allows one or more threads to wait until they are -notified by another thread. + A condition variable allows one or more threads to wait until they are + notified by another thread. + + If the lock argument is given and not None, it must be a Lock or RLock + object, and it is used as the underlying lock. Otherwise, a new RLock object + is created and used as the underlying lock. -If the lock argument is given and not None, it must be a Lock or RLock -object, and it is used as the underlying lock. Otherwise, a new RLock object -is created and used as the underlying lock. + """ -""" def __init__(self, lock: Lock | _RLock | RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( @@ -431,191 +450,202 @@ is created and used as the underlying lock. def wait(self, timeout: float | None = None) -> bool: """Wait until notified or until a timeout occurs. -If the calling thread has not acquired the lock when this method is -called, a RuntimeError is raised. + If the calling thread has not acquired the lock when this method is + called, a RuntimeError is raised. -This method releases the underlying lock, and then blocks until it is -awakened by a notify() or notify_all() call for the same condition -variable in another thread, or until the optional timeout occurs. Once -awakened or timed out, it re-acquires the lock and returns. + This method releases the underlying lock, and then blocks until it is + awakened by a notify() or notify_all() call for the same condition + variable in another thread, or until the optional timeout occurs. Once + awakened or timed out, it re-acquires the lock and returns. -When the timeout argument is present and not None, it should be a -floating-point number specifying a timeout for the operation in seconds -(or fractions thereof). + When the timeout argument is present and not None, it should be a + floating-point number specifying a timeout for the operation in seconds + (or fractions thereof). -When the underlying lock is an RLock, it is not released using its -release() method, since this may not actually unlock the lock when it -was acquired multiple times recursively. Instead, an internal interface -of the RLock class is used, which really unlocks it even when it has -been recursively acquired several times. Another internal interface is -then used to restore the recursion level when the lock is reacquired. + When the underlying lock is an RLock, it is not released using its + release() method, since this may not actually unlock the lock when it + was acquired multiple times recursively. Instead, an internal interface + of the RLock class is used, which really unlocks it even when it has + been recursively acquired several times. Another internal interface is + then used to restore the recursion level when the lock is reacquired. + + """ -""" def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: """Wait until a condition evaluates to True. -predicate should be a callable which result will be interpreted as a -boolean value. A timeout may be provided giving the maximum time to -wait. + predicate should be a callable which result will be interpreted as a + boolean value. A timeout may be provided giving the maximum time to + wait. + + """ -""" def notify(self, n: int = 1) -> None: """Wake up one or more threads waiting on this condition, if any. -If the calling thread has not acquired the lock when this method is -called, a RuntimeError is raised. + If the calling thread has not acquired the lock when this method is + called, a RuntimeError is raised. + + This method wakes up at most n of the threads waiting for the condition + variable; it is a no-op if no threads are waiting. -This method wakes up at most n of the threads waiting for the condition -variable; it is a no-op if no threads are waiting. + """ -""" def notify_all(self) -> None: """Wake up all threads waiting on this condition. -If the calling thread has not acquired the lock when this method -is called, a RuntimeError is raised. + If the calling thread has not acquired the lock when this method + is called, a RuntimeError is raised. + + """ -""" @deprecated("Deprecated since Python 3.10. Use `notify_all()` instead.") def notifyAll(self) -> None: """Wake up all threads waiting on this condition. -This method is deprecated, use notify_all() instead. + This method is deprecated, use notify_all() instead. -""" + """ class Semaphore: """This class implements semaphore objects. -Semaphores manage a counter representing the number of release() calls minus -the number of acquire() calls, plus an initial value. The acquire() method -blocks if necessary until it can return without making the counter -negative. If not given, value defaults to 1. + Semaphores manage a counter representing the number of release() calls minus + the number of acquire() calls, plus an initial value. The acquire() method + blocks if necessary until it can return without making the counter + negative. If not given, value defaults to 1. + + """ -""" _value: int def __init__(self, value: int = 1) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: """Acquire a semaphore, decrementing the internal counter by one. -When invoked without arguments: if the internal counter is larger than -zero on entry, decrement it by one and return immediately. If it is zero -on entry, block, waiting until some other thread has called release() to -make it larger than zero. This is done with proper interlocking so that -if multiple acquire() calls are blocked, release() will wake exactly one -of them up. The implementation may pick one at random, so the order in -which blocked threads are awakened should not be relied on. There is no -return value in this case. + When invoked without arguments: if the internal counter is larger than + zero on entry, decrement it by one and return immediately. If it is zero + on entry, block, waiting until some other thread has called release() to + make it larger than zero. This is done with proper interlocking so that + if multiple acquire() calls are blocked, release() will wake exactly one + of them up. The implementation may pick one at random, so the order in + which blocked threads are awakened should not be relied on. There is no + return value in this case. + + When invoked with blocking set to true, do the same thing as when called + without arguments, and return true. -When invoked with blocking set to true, do the same thing as when called -without arguments, and return true. + When invoked with blocking set to false, do not block. If a call without + an argument would block, return false immediately; otherwise, do the + same thing as when called without arguments, and return true. -When invoked with blocking set to false, do not block. If a call without -an argument would block, return false immediately; otherwise, do the -same thing as when called without arguments, and return true. + When invoked with a timeout other than None, it will block for at + most timeout seconds. If acquire does not complete successfully in + that interval, return false. Return true otherwise. -When invoked with a timeout other than None, it will block for at -most timeout seconds. If acquire does not complete successfully in -that interval, return false. Return true otherwise. + """ -""" def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: """Acquire a semaphore, decrementing the internal counter by one. -When invoked without arguments: if the internal counter is larger than -zero on entry, decrement it by one and return immediately. If it is zero -on entry, block, waiting until some other thread has called release() to -make it larger than zero. This is done with proper interlocking so that -if multiple acquire() calls are blocked, release() will wake exactly one -of them up. The implementation may pick one at random, so the order in -which blocked threads are awakened should not be relied on. There is no -return value in this case. + When invoked without arguments: if the internal counter is larger than + zero on entry, decrement it by one and return immediately. If it is zero + on entry, block, waiting until some other thread has called release() to + make it larger than zero. This is done with proper interlocking so that + if multiple acquire() calls are blocked, release() will wake exactly one + of them up. The implementation may pick one at random, so the order in + which blocked threads are awakened should not be relied on. There is no + return value in this case. -When invoked with blocking set to true, do the same thing as when called -without arguments, and return true. + When invoked with blocking set to true, do the same thing as when called + without arguments, and return true. -When invoked with blocking set to false, do not block. If a call without -an argument would block, return false immediately; otherwise, do the -same thing as when called without arguments, and return true. + When invoked with blocking set to false, do not block. If a call without + an argument would block, return false immediately; otherwise, do the + same thing as when called without arguments, and return true. -When invoked with a timeout other than None, it will block for at -most timeout seconds. If acquire does not complete successfully in -that interval, return false. Return true otherwise. + When invoked with a timeout other than None, it will block for at + most timeout seconds. If acquire does not complete successfully in + that interval, return false. Return true otherwise. + + """ -""" def release(self, n: int = 1) -> None: """Release a semaphore, incrementing the internal counter by one or more. -When the counter is zero on entry and another thread is waiting for it -to become larger than zero again, wake up that thread. + When the counter is zero on entry and another thread is waiting for it + to become larger than zero again, wake up that thread. -""" + """ class BoundedSemaphore(Semaphore): """Implements a bounded semaphore. -A bounded semaphore checks to make sure its current value doesn't exceed its -initial value. If it does, ValueError is raised. In most situations -semaphores are used to guard resources with limited capacity. + A bounded semaphore checks to make sure its current value doesn't exceed its + initial value. If it does, ValueError is raised. In most situations + semaphores are used to guard resources with limited capacity. -If the semaphore is released too many times it's a sign of a bug. If not -given, value defaults to 1. + If the semaphore is released too many times it's a sign of a bug. If not + given, value defaults to 1. -Like regular semaphores, bounded semaphores manage a counter representing -the number of release() calls minus the number of acquire() calls, plus an -initial value. The acquire() method blocks if necessary until it can return -without making the counter negative. If not given, value defaults to 1. + Like regular semaphores, bounded semaphores manage a counter representing + the number of release() calls minus the number of acquire() calls, plus an + initial value. The acquire() method blocks if necessary until it can return + without making the counter negative. If not given, value defaults to 1. -""" + """ class Event: """Class implementing event objects. -Events manage a flag that can be set to true with the set() method and reset -to false with the clear() method. The wait() method blocks until the flag is -true. The flag is initially false. + Events manage a flag that can be set to true with the set() method and reset + to false with the clear() method. The wait() method blocks until the flag is + true. The flag is initially false. + + """ -""" def is_set(self) -> bool: - """Return true if and only if the internal flag is true. -""" + """Return true if and only if the internal flag is true.""" + @deprecated("Deprecated since Python 3.10. Use `is_set()` instead.") def isSet(self) -> bool: """Return true if and only if the internal flag is true. -This method is deprecated, use is_set() instead. + This method is deprecated, use is_set() instead. + + """ -""" def set(self) -> None: """Set the internal flag to true. -All threads waiting for it to become true are awakened. Threads -that call wait() once the flag is true will not block at all. + All threads waiting for it to become true are awakened. Threads + that call wait() once the flag is true will not block at all. + + """ -""" def clear(self) -> None: """Reset the internal flag to false. -Subsequently, threads calling wait() will block until set() is called to -set the internal flag to true again. + Subsequently, threads calling wait() will block until set() is called to + set the internal flag to true again. + + """ -""" def wait(self, timeout: float | None = None) -> bool: """Block until the internal flag is true. -If the internal flag is true on entry, return immediately. Otherwise, -block until another thread calls set() to set the flag to true, or until -the optional timeout occurs. + If the internal flag is true on entry, return immediately. Otherwise, + block until another thread calls set() to set the flag to true, or until + the optional timeout occurs. -When the timeout argument is present and not None, it should be a -floating-point number specifying a timeout for the operation in seconds -(or fractions thereof). + When the timeout argument is present and not None, it should be a + floating-point number specifying a timeout for the operation in seconds + (or fractions thereof). -This method returns the internal flag on exit, so it will always return -True except if a timeout is given and the operation times out. + This method returns the internal flag on exit, so it will always return + True except if a timeout is given and the operation times out. -""" + """ excepthook = _excepthook ExceptHookArgs = _ExceptHookArgs @@ -623,11 +653,12 @@ ExceptHookArgs = _ExceptHookArgs class Timer(Thread): """Call a function after a specified number of seconds: -t = Timer(30.0, f, args=None, kwargs=None) -t.start() -t.cancel() # stop the timer's action if it's still waiting + t = Timer(30.0, f, args=None, kwargs=None) + t.start() + t.cancel() # stop the timer's action if it's still waiting + + """ -""" args: Iterable[Any] # undocumented finished: Event # undocumented function: Callable[..., Any] # undocumented @@ -642,60 +673,63 @@ t.cancel() # stop the timer's action if it's still waiting kwargs: Mapping[str, Any] | None = None, ) -> None: ... def cancel(self) -> None: - """Stop the timer if it hasn't finished yet. -""" + """Stop the timer if it hasn't finished yet.""" class Barrier: """Implements a Barrier. -Useful for synchronizing a fixed number of threads at known synchronization -points. Threads block on 'wait()' and are simultaneously awoken once they -have all made that call. + Useful for synchronizing a fixed number of threads at known synchronization + points. Threads block on 'wait()' and are simultaneously awoken once they + have all made that call. + + """ -""" @property def parties(self) -> int: - """Return the number of threads required to trip the barrier. -""" + """Return the number of threads required to trip the barrier.""" + @property def n_waiting(self) -> int: - """Return the number of threads currently waiting at the barrier. -""" + """Return the number of threads currently waiting at the barrier.""" + @property def broken(self) -> bool: - """Return True if the barrier is in a broken state. -""" + """Return True if the barrier is in a broken state.""" + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: """Create a barrier, initialised to 'parties' threads. -'action' is a callable which, when supplied, will be called by one of -the threads after they have all entered the barrier and just prior to -releasing them all. If a 'timeout' is provided, it is used as the -default for all subsequent 'wait()' calls. + 'action' is a callable which, when supplied, will be called by one of + the threads after they have all entered the barrier and just prior to + releasing them all. If a 'timeout' is provided, it is used as the + default for all subsequent 'wait()' calls. + + """ -""" def wait(self, timeout: float | None = None) -> int: """Wait for the barrier. -When the specified number of threads have started waiting, they are all -simultaneously awoken. If an 'action' was provided for the barrier, one -of the threads will have executed that callback prior to returning. -Returns an individual index number from 0 to 'parties-1'. + When the specified number of threads have started waiting, they are all + simultaneously awoken. If an 'action' was provided for the barrier, one + of the threads will have executed that callback prior to returning. + Returns an individual index number from 0 to 'parties-1'. + + """ -""" def reset(self) -> None: """Reset the barrier to the initial state. -Any threads currently waiting will get the BrokenBarrier exception -raised. + Any threads currently waiting will get the BrokenBarrier exception + raised. + + """ -""" def abort(self) -> None: """Place the barrier into a 'broken' state. -Useful in case of error. Any currently waiting threads and threads -attempting to 'wait()' will have BrokenBarrierError raised. + Useful in case of error. Any currently waiting threads and threads + attempting to 'wait()' will have BrokenBarrierError raised. -""" + """ class BrokenBarrierError(RuntimeError): ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi index 58f1978ef3ea9..a9e363b9f60a6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/time.pyi @@ -21,6 +21,7 @@ If the DST flag is 0, the time is given in the regular time zone; if it is 1, the time is given in the DST time zone; if it is -1, mktime() should guess based on the date and time. """ + import sys from _typeshed import structseq from typing import Any, Final, Literal, Protocol, final, type_check_only @@ -64,183 +65,190 @@ if sys.platform == "linux": @final class struct_time(structseq[Any | int], _TimeTuple): """The time value as returned by gmtime(), localtime(), and strptime(), and - accepted by asctime(), mktime() and strftime(). May be considered as a - sequence of 9 integers. + accepted by asctime(), mktime() and strftime(). May be considered as a + sequence of 9 integers. + + Note that several fields' values are not the same as those defined by + the C language standard for struct tm. For example, the value of the + field tm_year is the actual year, not year - 1900. See individual + fields' descriptions for details. + """ - Note that several fields' values are not the same as those defined by - the C language standard for struct tm. For example, the value of the - field tm_year is the actual year, not year - 1900. See individual - fields' descriptions for details. -""" if sys.version_info >= (3, 10): __match_args__: Final = ("tm_year", "tm_mon", "tm_mday", "tm_hour", "tm_min", "tm_sec", "tm_wday", "tm_yday", "tm_isdst") @property def tm_year(self) -> int: - """year, for example, 1993 -""" + """year, for example, 1993""" + @property def tm_mon(self) -> int: - """month of year, range [1, 12] -""" + """month of year, range [1, 12]""" + @property def tm_mday(self) -> int: - """day of month, range [1, 31] -""" + """day of month, range [1, 31]""" + @property def tm_hour(self) -> int: - """hours, range [0, 23] -""" + """hours, range [0, 23]""" + @property def tm_min(self) -> int: - """minutes, range [0, 59] -""" + """minutes, range [0, 59]""" + @property def tm_sec(self) -> int: - """seconds, range [0, 61]) -""" + """seconds, range [0, 61])""" + @property def tm_wday(self) -> int: - """day of week, range [0, 6], Monday is 0 -""" + """day of week, range [0, 6], Monday is 0""" + @property def tm_yday(self) -> int: - """day of year, range [1, 366] -""" + """day of year, range [1, 366]""" + @property def tm_isdst(self) -> int: - """1 if summer time is in effect, 0 if not, and -1 if unknown -""" + """1 if summer time is in effect, 0 if not, and -1 if unknown""" # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. @property def tm_zone(self) -> str: - """abbreviation of timezone name -""" + """abbreviation of timezone name""" + @property def tm_gmtoff(self) -> int: - """offset from UTC in seconds -""" + """offset from UTC in seconds""" def asctime(time_tuple: _TimeTuple | struct_time = ..., /) -> str: """asctime([tuple]) -> string -Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'. -When the time tuple is not present, current time as returned by localtime() -is used. -""" + Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'. + When the time tuple is not present, current time as returned by localtime() + is used. + """ + def ctime(seconds: float | None = None, /) -> str: """ctime(seconds) -> string -Convert a time in seconds since the Epoch to a string in local time. -This is equivalent to asctime(localtime(seconds)). When the time tuple is -not present, current time as returned by localtime() is used. -""" + Convert a time in seconds since the Epoch to a string in local time. + This is equivalent to asctime(localtime(seconds)). When the time tuple is + not present, current time as returned by localtime() is used. + """ + def gmtime(seconds: float | None = None, /) -> struct_time: """gmtime([seconds]) -> (tm_year, tm_mon, tm_mday, tm_hour, tm_min, - tm_sec, tm_wday, tm_yday, tm_isdst) + tm_sec, tm_wday, tm_yday, tm_isdst) -Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a. -GMT). When 'seconds' is not passed in, convert the current time instead. + Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a. + GMT). When 'seconds' is not passed in, convert the current time instead. + + If the platform supports the tm_gmtoff and tm_zone, they are available as + attributes only. + """ -If the platform supports the tm_gmtoff and tm_zone, they are available as -attributes only. -""" def localtime(seconds: float | None = None, /) -> struct_time: """localtime([seconds]) -> (tm_year,tm_mon,tm_mday,tm_hour,tm_min, - tm_sec,tm_wday,tm_yday,tm_isdst) + tm_sec,tm_wday,tm_yday,tm_isdst) + + Convert seconds since the Epoch to a time tuple expressing local time. + When 'seconds' is not passed in, convert the current time instead. + """ -Convert seconds since the Epoch to a time tuple expressing local time. -When 'seconds' is not passed in, convert the current time instead. -""" def mktime(time_tuple: _TimeTuple | struct_time, /) -> float: """mktime(tuple) -> floating-point number -Convert a time tuple in local time to seconds since the Epoch. -Note that mktime(gmtime(0)) will not generally return zero for most -time zones; instead the returned value will either be equal to that -of the timezone or altzone attributes on the time module. -""" + Convert a time tuple in local time to seconds since the Epoch. + Note that mktime(gmtime(0)) will not generally return zero for most + time zones; instead the returned value will either be equal to that + of the timezone or altzone attributes on the time module. + """ + def sleep(seconds: float, /) -> None: """sleep(seconds) -Delay execution for a given number of seconds. The argument may be -a floating-point number for subsecond precision. -""" + Delay execution for a given number of seconds. The argument may be + a floating-point number for subsecond precision. + """ + def strftime(format: str, time_tuple: _TimeTuple | struct_time = ..., /) -> str: """strftime(format[, tuple]) -> string -Convert a time tuple to a string according to a format specification. -See the library reference manual for formatting codes. When the time tuple -is not present, current time as returned by localtime() is used. - -Commonly used format codes: - -%Y Year with century as a decimal number. -%m Month as a decimal number [01,12]. -%d Day of the month as a decimal number [01,31]. -%H Hour (24-hour clock) as a decimal number [00,23]. -%M Minute as a decimal number [00,59]. -%S Second as a decimal number [00,61]. -%z Time zone offset from UTC. -%a Locale's abbreviated weekday name. -%A Locale's full weekday name. -%b Locale's abbreviated month name. -%B Locale's full month name. -%c Locale's appropriate date and time representation. -%I Hour (12-hour clock) as a decimal number [01,12]. -%p Locale's equivalent of either AM or PM. - -Other codes may be available on your platform. See documentation for -the C library strftime function. -""" + Convert a time tuple to a string according to a format specification. + See the library reference manual for formatting codes. When the time tuple + is not present, current time as returned by localtime() is used. + + Commonly used format codes: + + %Y Year with century as a decimal number. + %m Month as a decimal number [01,12]. + %d Day of the month as a decimal number [01,31]. + %H Hour (24-hour clock) as a decimal number [00,23]. + %M Minute as a decimal number [00,59]. + %S Second as a decimal number [00,61]. + %z Time zone offset from UTC. + %a Locale's abbreviated weekday name. + %A Locale's full weekday name. + %b Locale's abbreviated month name. + %B Locale's full month name. + %c Locale's appropriate date and time representation. + %I Hour (12-hour clock) as a decimal number [01,12]. + %p Locale's equivalent of either AM or PM. + + Other codes may be available on your platform. See documentation for + the C library strftime function. + """ + def strptime(data_string: str, format: str = "%a %b %d %H:%M:%S %Y", /) -> struct_time: """strptime(string, format) -> struct_time -Parse a string to a time tuple according to a format specification. -See the library reference manual for formatting codes (same as -strftime()). - -Commonly used format codes: - -%Y Year with century as a decimal number. -%m Month as a decimal number [01,12]. -%d Day of the month as a decimal number [01,31]. -%H Hour (24-hour clock) as a decimal number [00,23]. -%M Minute as a decimal number [00,59]. -%S Second as a decimal number [00,61]. -%z Time zone offset from UTC. -%a Locale's abbreviated weekday name. -%A Locale's full weekday name. -%b Locale's abbreviated month name. -%B Locale's full month name. -%c Locale's appropriate date and time representation. -%I Hour (12-hour clock) as a decimal number [01,12]. -%p Locale's equivalent of either AM or PM. - -Other codes may be available on your platform. See documentation for -the C library strftime function. -""" + Parse a string to a time tuple according to a format specification. + See the library reference manual for formatting codes (same as + strftime()). + + Commonly used format codes: + + %Y Year with century as a decimal number. + %m Month as a decimal number [01,12]. + %d Day of the month as a decimal number [01,31]. + %H Hour (24-hour clock) as a decimal number [00,23]. + %M Minute as a decimal number [00,59]. + %S Second as a decimal number [00,61]. + %z Time zone offset from UTC. + %a Locale's abbreviated weekday name. + %A Locale's full weekday name. + %b Locale's abbreviated month name. + %B Locale's full month name. + %c Locale's appropriate date and time representation. + %I Hour (12-hour clock) as a decimal number [01,12]. + %p Locale's equivalent of either AM or PM. + + Other codes may be available on your platform. See documentation for + the C library strftime function. + """ + def time() -> float: """time() -> floating-point number -Return the current time in seconds since the Epoch. -Fractions of a second may be present if the system clock provides them. -""" + Return the current time in seconds since the Epoch. + Fractions of a second may be present if the system clock provides them. + """ if sys.platform != "win32": def tzset() -> None: # Unix only """tzset() -Initialize, or reinitialize, the local timezone to the value stored in -os.environ['TZ']. The TZ environment variable should be specified in -standard Unix timezone format as documented in the tzset man page -(eg. 'US/Eastern', 'Europe/Amsterdam'). Unknown timezones will silently -fall back to UTC. If the TZ environment variable is not set, the local -timezone is set to the systems best guess of wallclock time. -Changing the TZ environment variable without calling tzset *may* change -the local timezone used by methods such as localtime, but this behaviour -should not be relied on. -""" + Initialize, or reinitialize, the local timezone to the value stored in + os.environ['TZ']. The TZ environment variable should be specified in + standard Unix timezone format as documented in the tzset man page + (eg. 'US/Eastern', 'Europe/Amsterdam'). Unknown timezones will silently + fall back to UTC. If the TZ environment variable is not set, the local + timezone is set to the systems best guess of wallclock time. + Changing the TZ environment variable without calling tzset *may* change + the local timezone used by methods such as localtime, but this behaviour + should not be relied on. + """ @type_check_only class _ClockInfo(Protocol): @@ -252,85 +260,94 @@ class _ClockInfo(Protocol): def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"], /) -> _ClockInfo: """get_clock_info(name: str) -> dict -Get information of the specified clock. -""" + Get information of the specified clock. + """ + def monotonic() -> float: """monotonic() -> float -Monotonic clock, cannot go backward. -""" + Monotonic clock, cannot go backward. + """ + def perf_counter() -> float: """perf_counter() -> float -Performance counter for benchmarking. -""" + Performance counter for benchmarking. + """ + def process_time() -> float: """process_time() -> float -Process time for profiling: sum of the kernel and user-space CPU time. -""" + Process time for profiling: sum of the kernel and user-space CPU time. + """ if sys.platform != "win32": def clock_getres(clk_id: int, /) -> float: # Unix only """clock_getres(clk_id) -> floating-point number -Return the resolution (precision) of the specified clock clk_id. -""" + Return the resolution (precision) of the specified clock clk_id. + """ + def clock_gettime(clk_id: int, /) -> float: # Unix only - """Return the time of the specified clock clk_id as a float. -""" + """Return the time of the specified clock clk_id as a float.""" + def clock_settime(clk_id: int, time: float, /) -> None: # Unix only """clock_settime(clk_id, time) -Set the time of the specified clock clk_id. -""" + Set the time of the specified clock clk_id. + """ if sys.platform != "win32": def clock_gettime_ns(clk_id: int, /) -> int: - """Return the time of the specified clock clk_id as nanoseconds (int). -""" + """Return the time of the specified clock clk_id as nanoseconds (int).""" + def clock_settime_ns(clock_id: int, time: int, /) -> int: """clock_settime_ns(clk_id, time) -Set the time of the specified clock clk_id with nanoseconds. -""" + Set the time of the specified clock clk_id with nanoseconds. + """ if sys.platform == "linux": def pthread_getcpuclockid(thread_id: int, /) -> int: """pthread_getcpuclockid(thread_id) -> int -Return the clk_id of a thread's CPU time clock. -""" + Return the clk_id of a thread's CPU time clock. + """ def monotonic_ns() -> int: """monotonic_ns() -> int -Monotonic clock, cannot go backward, as nanoseconds. -""" + Monotonic clock, cannot go backward, as nanoseconds. + """ + def perf_counter_ns() -> int: """perf_counter_ns() -> int -Performance counter for benchmarking as nanoseconds. -""" + Performance counter for benchmarking as nanoseconds. + """ + def process_time_ns() -> int: """process_time() -> int -Process time for profiling as nanoseconds: -sum of the kernel and user-space CPU time. -""" + Process time for profiling as nanoseconds: + sum of the kernel and user-space CPU time. + """ + def time_ns() -> int: """time_ns() -> int -Return the current time in nanoseconds since the Epoch. -""" + Return the current time in nanoseconds since the Epoch. + """ + def thread_time() -> float: """thread_time() -> float -Thread time for profiling: sum of the kernel and user-space CPU time. -""" + Thread time for profiling: sum of the kernel and user-space CPU time. + """ + def thread_time_ns() -> int: """thread_time() -> int -Thread time for profiling as nanoseconds: -sum of the kernel and user-space CPU time. -""" + Thread time for profiling as nanoseconds: + sum of the kernel and user-space CPU time. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi index 8f2c18dbc24d8..a24cf2c71bbb0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/timeit.pyi @@ -45,6 +45,7 @@ Functions: repeat(string, string) -> list default_timer() -> float """ + from collections.abc import Callable, Sequence from typing import IO, Any from typing_extensions import TypeAlias @@ -59,88 +60,92 @@ default_timer: _Timer class Timer: """Class for timing execution speed of small code snippets. -The constructor takes a statement to be timed, an additional -statement used for setup, and a timer function. Both statements -default to 'pass'; the timer function is platform-dependent (see -module doc string). If 'globals' is specified, the code will be -executed within that namespace (as opposed to inside timeit's -namespace). + The constructor takes a statement to be timed, an additional + statement used for setup, and a timer function. Both statements + default to 'pass'; the timer function is platform-dependent (see + module doc string). If 'globals' is specified, the code will be + executed within that namespace (as opposed to inside timeit's + namespace). -To measure the execution time of the first statement, use the -timeit() method. The repeat() method is a convenience to call -timeit() multiple times and return a list of results. + To measure the execution time of the first statement, use the + timeit() method. The repeat() method is a convenience to call + timeit() multiple times and return a list of results. + + The statements may contain newlines, as long as they don't contain + multi-line string literals. + """ -The statements may contain newlines, as long as they don't contain -multi-line string literals. -""" def __init__( self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None ) -> None: - """Constructor. See class doc string. -""" + """Constructor. See class doc string.""" + def print_exc(self, file: IO[str] | None = None) -> None: """Helper to print a traceback from the timed code. -Typical use: + Typical use: - t = Timer(...) # outside the try/except - try: - t.timeit(...) # or t.repeat(...) - except: - t.print_exc() + t = Timer(...) # outside the try/except + try: + t.timeit(...) # or t.repeat(...) + except: + t.print_exc() -The advantage over the standard traceback is that source lines -in the compiled template will be displayed. + The advantage over the standard traceback is that source lines + in the compiled template will be displayed. + + The optional file argument directs where the traceback is + sent; it defaults to sys.stderr. + """ -The optional file argument directs where the traceback is -sent; it defaults to sys.stderr. -""" def timeit(self, number: int = 1000000) -> float: """Time 'number' executions of the main statement. -To be precise, this executes the setup statement once, and -then returns the time it takes to execute the main statement -a number of times, as float seconds if using the default timer. The -argument is the number of times through the loop, defaulting -to one million. The main statement, the setup statement and -the timer function to be used are passed to the constructor. -""" + To be precise, this executes the setup statement once, and + then returns the time it takes to execute the main statement + a number of times, as float seconds if using the default timer. The + argument is the number of times through the loop, defaulting + to one million. The main statement, the setup statement and + the timer function to be used are passed to the constructor. + """ + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: """Call timeit() a few times. -This is a convenience function that calls the timeit() -repeatedly, returning a list of results. The first argument -specifies how many times to call timeit(), defaulting to 5; -the second argument specifies the timer argument, defaulting -to one million. - -Note: it's tempting to calculate mean and standard deviation -from the result vector and report these. However, this is not -very useful. In a typical case, the lowest value gives a -lower bound for how fast your machine can run the given code -snippet; higher values in the result vector are typically not -caused by variability in Python's speed, but by other -processes interfering with your timing accuracy. So the min() -of the result is probably the only number you should be -interested in. After that, you should look at the entire -vector and apply common sense rather than statistics. -""" + This is a convenience function that calls the timeit() + repeatedly, returning a list of results. The first argument + specifies how many times to call timeit(), defaulting to 5; + the second argument specifies the timer argument, defaulting + to one million. + + Note: it's tempting to calculate mean and standard deviation + from the result vector and report these. However, this is not + very useful. In a typical case, the lowest value gives a + lower bound for how fast your machine can run the given code + snippet; higher values in the result vector are typically not + caused by variability in Python's speed, but by other + processes interfering with your timing accuracy. So the min() + of the result is probably the only number you should be + interested in. After that, you should look at the entire + vector and apply common sense rather than statistics. + """ + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: """Return the number of loops and time taken so that total time >= 0.2. -Calls the timeit method with increasing numbers from the sequence -1, 2, 5, 10, 20, 50, ... until the time taken is at least 0.2 -second. Returns (number, time_taken). + Calls the timeit method with increasing numbers from the sequence + 1, 2, 5, 10, 20, 50, ... until the time taken is at least 0.2 + second. Returns (number, time_taken). -If *callback* is given and is not None, it will be called after -each trial with two arguments: ``callback(number, time_taken)``. -""" + If *callback* is given and is not None, it will be called after + each trial with two arguments: ``callback(number, time_taken)``. + """ def timeit( stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None ) -> float: - """Convenience function to create Timer object and call timeit method. -""" + """Convenience function to create Timer object and call timeit method.""" + def repeat( stmt: _Stmt = "pass", setup: _Stmt = "pass", @@ -149,22 +154,22 @@ def repeat( number: int = 1000000, globals: dict[str, Any] | None = None, ) -> list[float]: - """Convenience function to create Timer object and call repeat method. -""" + """Convenience function to create Timer object and call repeat method.""" + def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: """Main program, used when run as a script. -The optional 'args' argument specifies the command line to be parsed, -defaulting to sys.argv[1:]. + The optional 'args' argument specifies the command line to be parsed, + defaulting to sys.argv[1:]. -The return value is an exit code to be passed to sys.exit(); it -may be None to indicate success. + The return value is an exit code to be passed to sys.exit(); it + may be None to indicate success. -When an exception happens during timing, a traceback is printed to -stderr and the return value is 1. Exceptions at other times -(including the template compilation) are not caught. + When an exception happens during timing, a traceback is printed to + stderr and the return value is 1. Exceptions at other times + (including the template compilation) are not caught. -'_wrap_timer' is an internal interface used for unit testing. If it -is not None, it must be a callable that accepts a timer function -and returns another timer function (used for unit testing). -""" + '_wrap_timer' is an internal interface used for unit testing. If it + is not None, it must be a callable that accepts a timer function + and returns another timer function (used for unit testing). + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 98676ea6a508a..1f31c1fbb4d92 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -29,6 +29,7 @@ button = tkinter.Button(frame,text="Exit",command=tk.destroy) button.pack(side=BOTTOM) tk.mainloop() """ + import _tkinter import sys from _typeshed import Incomplete, MaybeNone, StrOrBytesPath @@ -224,8 +225,8 @@ if sys.version_info >= (3, 11): if sys.version_info >= (3, 11): class EventType(StrEnum): - """An enumeration. -""" + """An enumeration.""" + Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -268,8 +269,8 @@ if sys.version_info >= (3, 11): else: class EventType(str, Enum): - """An enumeration. -""" + """An enumeration.""" + Activate = "36" ButtonPress = "4" Button = ButtonPress @@ -317,45 +318,46 @@ _W_co = TypeVar("_W_co", covariant=True, bound=Misc, default=Misc) class Event(Generic[_W_co]): """Container for the properties of an event. -Instances of this type are generated if one of the following events occurs: - -KeyPress, KeyRelease - for keyboard events -ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events -Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate, -Colormap, Gravity, Reparent, Property, Destroy, Activate, -Deactivate - for window events. - -If a callback function for one of these events is registered -using bind, bind_all, bind_class, or tag_bind, the callback is -called with an Event as first argument. It will have the -following attributes (in braces are the event types for which -the attribute is valid): - - serial - serial number of event -num - mouse button pressed (ButtonPress, ButtonRelease) -focus - whether the window has the focus (Enter, Leave) -height - height of the exposed window (Configure, Expose) -width - width of the exposed window (Configure, Expose) -keycode - keycode of the pressed key (KeyPress, KeyRelease) -state - state of the event as a number (ButtonPress, ButtonRelease, - Enter, KeyPress, KeyRelease, - Leave, Motion) -state - state as a string (Visibility) -time - when the event occurred -x - x-position of the mouse -y - y-position of the mouse -x_root - x-position of the mouse on the screen - (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) -y_root - y-position of the mouse on the screen - (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) -char - pressed character (KeyPress, KeyRelease) -send_event - see X/Windows documentation -keysym - keysym of the event as a string (KeyPress, KeyRelease) -keysym_num - keysym of the event as a number (KeyPress, KeyRelease) -type - type of the event as a number -widget - widget in which the event occurred -delta - delta of wheel movement (MouseWheel) -""" + Instances of this type are generated if one of the following events occurs: + + KeyPress, KeyRelease - for keyboard events + ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events + Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate, + Colormap, Gravity, Reparent, Property, Destroy, Activate, + Deactivate - for window events. + + If a callback function for one of these events is registered + using bind, bind_all, bind_class, or tag_bind, the callback is + called with an Event as first argument. It will have the + following attributes (in braces are the event types for which + the attribute is valid): + + serial - serial number of event + num - mouse button pressed (ButtonPress, ButtonRelease) + focus - whether the window has the focus (Enter, Leave) + height - height of the exposed window (Configure, Expose) + width - width of the exposed window (Configure, Expose) + keycode - keycode of the pressed key (KeyPress, KeyRelease) + state - state of the event as a number (ButtonPress, ButtonRelease, + Enter, KeyPress, KeyRelease, + Leave, Motion) + state - state as a string (Visibility) + time - when the event occurred + x - x-position of the mouse + y - y-position of the mouse + x_root - x-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) + y_root - y-position of the mouse on the screen + (ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion) + char - pressed character (KeyPress, KeyRelease) + send_event - see X/Windows documentation + keysym - keysym of the event as a string (KeyPress, KeyRelease) + keysym_num - keysym of the event as a number (KeyPress, KeyRelease) + type - type of the event as a number + widget - widget in which the event occurred + delta - delta of wheel movement (MouseWheel) + """ + serial: int num: int focus: bool @@ -379,244 +381,245 @@ delta - delta of wheel movement (MouseWheel) def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ def NoDefaultRoot() -> None: """Inhibit setting of default root window. -Call this function to inhibit that the first instance of -Tk is used for windows without an explicit parent window. -""" + Call this function to inhibit that the first instance of + Tk is used for windows without an explicit parent window. + """ class Variable: """Class to define value holders for e.g. buttons. -Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations -that constrain the type of the value returned from get(). -""" + Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations + that constrain the type of the value returned from get(). + """ + def __init__(self, master: Misc | None = None, value=None, name: str | None = None) -> None: """Construct a variable -MASTER can be given as master widget. -VALUE is an optional value (defaults to "") -NAME is an optional Tcl name (defaults to PY_VARnum). + MASTER can be given as master widget. + VALUE is an optional value (defaults to "") + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ -If NAME matches an existing variable and VALUE is omitted -then the existing value is retained. -""" def set(self, value) -> None: - """Set the variable to VALUE. -""" + """Set the variable to VALUE.""" initialize = set def get(self): - """Return value of variable. -""" + """Return value of variable.""" + def trace_add(self, mode: Literal["array", "read", "write", "unset"], callback: Callable[[str, str, str], object]) -> str: """Define a trace callback for the variable. -Mode is one of "read", "write", "unset", or a list or tuple of -such strings. -Callback must be a function which is called when the variable is -read, written or unset. + Mode is one of "read", "write", "unset", or a list or tuple of + such strings. + Callback must be a function which is called when the variable is + read, written or unset. + + Return the name of the callback. + """ -Return the name of the callback. -""" def trace_remove(self, mode: Literal["array", "read", "write", "unset"], cbname: str) -> None: """Delete the trace callback for a variable. -Mode is one of "read", "write", "unset" or a list or tuple of -such strings. Must be same as were specified in trace_add(). -cbname is the name of the callback returned from trace_add(). -""" + Mode is one of "read", "write", "unset" or a list or tuple of + such strings. Must be same as were specified in trace_add(). + cbname is the name of the callback returned from trace_add(). + """ + def trace_info(self) -> list[tuple[tuple[Literal["array", "read", "write", "unset"], ...], str]]: - """Return all trace callback information. -""" + """Return all trace callback information.""" if sys.version_info >= (3, 14): @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") def trace(self, mode, callback) -> str: """Define a trace callback for the variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CALLBACK must be a function which is called when -the variable is read, written or undefined. + MODE is one of "r", "w", "u" for read, write, undefine. + CALLBACK must be a function which is called when + the variable is read, written or undefined. -Return the name of the callback. + Return the name of the callback. + + This deprecated method wraps a deprecated Tcl method removed + in Tcl 9.0. Use trace_add() instead. + """ -This deprecated method wraps a deprecated Tcl method removed -in Tcl 9.0. Use trace_add() instead. -""" @deprecated("Deprecated since Python 3.14. Use `trace_add()` instead.") def trace_variable(self, mode, callback) -> str: """Define a trace callback for the variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CALLBACK must be a function which is called when -the variable is read, written or undefined. + MODE is one of "r", "w", "u" for read, write, undefine. + CALLBACK must be a function which is called when + the variable is read, written or undefined. -Return the name of the callback. + Return the name of the callback. + + This deprecated method wraps a deprecated Tcl method removed + in Tcl 9.0. Use trace_add() instead. + """ -This deprecated method wraps a deprecated Tcl method removed -in Tcl 9.0. Use trace_add() instead. -""" @deprecated("Deprecated since Python 3.14. Use `trace_remove()` instead.") def trace_vdelete(self, mode, cbname) -> None: """Delete the trace callback for a variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CBNAME is the name of the callback returned from trace_variable or trace. + MODE is one of "r", "w", "u" for read, write, undefine. + CBNAME is the name of the callback returned from trace_variable or trace. + + This deprecated method wraps a deprecated Tcl method removed + in Tcl 9.0. Use trace_remove() instead. + """ -This deprecated method wraps a deprecated Tcl method removed -in Tcl 9.0. Use trace_remove() instead. -""" @deprecated("Deprecated since Python 3.14. Use `trace_info()` instead.") def trace_vinfo(self) -> list[Incomplete]: """Return all trace callback information. -This deprecated method wraps a deprecated Tcl method removed -in Tcl 9.0. Use trace_info() instead. -""" + This deprecated method wraps a deprecated Tcl method removed + in Tcl 9.0. Use trace_info() instead. + """ else: def trace(self, mode, callback) -> str: """Define a trace callback for the variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CALLBACK must be a function which is called when -the variable is read, written or undefined. + MODE is one of "r", "w", "u" for read, write, undefine. + CALLBACK must be a function which is called when + the variable is read, written or undefined. -Return the name of the callback. + Return the name of the callback. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_add() instead. + """ -This deprecated method wraps a deprecated Tcl method that will -likely be removed in the future. Use trace_add() instead. -""" def trace_variable(self, mode, callback) -> str: """Define a trace callback for the variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CALLBACK must be a function which is called when -the variable is read, written or undefined. + MODE is one of "r", "w", "u" for read, write, undefine. + CALLBACK must be a function which is called when + the variable is read, written or undefined. -Return the name of the callback. + Return the name of the callback. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_add() instead. + """ -This deprecated method wraps a deprecated Tcl method that will -likely be removed in the future. Use trace_add() instead. -""" def trace_vdelete(self, mode, cbname) -> None: """Delete the trace callback for a variable. -MODE is one of "r", "w", "u" for read, write, undefine. -CBNAME is the name of the callback returned from trace_variable or trace. + MODE is one of "r", "w", "u" for read, write, undefine. + CBNAME is the name of the callback returned from trace_variable or trace. + + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_remove() instead. + """ -This deprecated method wraps a deprecated Tcl method that will -likely be removed in the future. Use trace_remove() instead. -""" def trace_vinfo(self) -> list[Incomplete]: """Return all trace callback information. -This deprecated method wraps a deprecated Tcl method that will -likely be removed in the future. Use trace_info() instead. -""" + This deprecated method wraps a deprecated Tcl method that will + likely be removed in the future. Use trace_info() instead. + """ def __eq__(self, other: object) -> bool: ... def __del__(self) -> None: - """Unset the variable in Tcl. -""" + """Unset the variable in Tcl.""" __hash__: ClassVar[None] # type: ignore[assignment] class StringVar(Variable): - """Value holder for strings variables. -""" + """Value holder for strings variables.""" + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: """Construct a string variable. -MASTER can be given as master widget. -VALUE is an optional value (defaults to "") -NAME is an optional Tcl name (defaults to PY_VARnum). + MASTER can be given as master widget. + VALUE is an optional value (defaults to "") + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ -If NAME matches an existing variable and VALUE is omitted -then the existing value is retained. -""" def set(self, value: str) -> None: - """Set the variable to VALUE. -""" + """Set the variable to VALUE.""" initialize = set def get(self) -> str: - """Return value of variable as string. -""" + """Return value of variable as string.""" class IntVar(Variable): - """Value holder for integer variables. -""" + """Value holder for integer variables.""" + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: """Construct an integer variable. -MASTER can be given as master widget. -VALUE is an optional value (defaults to 0) -NAME is an optional Tcl name (defaults to PY_VARnum). + MASTER can be given as master widget. + VALUE is an optional value (defaults to 0) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ -If NAME matches an existing variable and VALUE is omitted -then the existing value is retained. -""" def set(self, value: int) -> None: - """Set the variable to VALUE. -""" + """Set the variable to VALUE.""" initialize = set def get(self) -> int: - """Return the value of the variable as an integer. -""" + """Return the value of the variable as an integer.""" class DoubleVar(Variable): - """Value holder for float variables. -""" + """Value holder for float variables.""" + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: """Construct a float variable. -MASTER can be given as master widget. -VALUE is an optional value (defaults to 0.0) -NAME is an optional Tcl name (defaults to PY_VARnum). + MASTER can be given as master widget. + VALUE is an optional value (defaults to 0.0) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ -If NAME matches an existing variable and VALUE is omitted -then the existing value is retained. -""" def set(self, value: float) -> None: - """Set the variable to VALUE. -""" + """Set the variable to VALUE.""" initialize = set def get(self) -> float: - """Return the value of the variable as a float. -""" + """Return the value of the variable as a float.""" class BooleanVar(Variable): - """Value holder for boolean variables. -""" + """Value holder for boolean variables.""" + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: """Construct a boolean variable. -MASTER can be given as master widget. -VALUE is an optional value (defaults to False) -NAME is an optional Tcl name (defaults to PY_VARnum). + MASTER can be given as master widget. + VALUE is an optional value (defaults to False) + NAME is an optional Tcl name (defaults to PY_VARnum). + + If NAME matches an existing variable and VALUE is omitted + then the existing value is retained. + """ -If NAME matches an existing variable and VALUE is omitted -then the existing value is retained. -""" def set(self, value: bool) -> None: - """Set the variable to VALUE. -""" + """Set the variable to VALUE.""" initialize = set def get(self) -> bool: - """Return the value of the variable as a bool. -""" + """Return the value of the variable as a bool.""" def mainloop(n: int = 0) -> None: - """Run the main loop of Tcl. -""" + """Run the main loop of Tcl.""" getint = int getdouble = float def getboolean(s) -> bool: - """Convert Tcl object to True or False. -""" + """Convert Tcl object to True or False.""" _Ts = TypeVarTuple("_Ts") @@ -634,549 +637,594 @@ class _BusyInfo(TypedDict): class Misc: """Internal class. -Base class which defines methods common for interior widgets. -""" + Base class which defines methods common for interior widgets. + """ + master: Misc | None tk: _tkinter.TkappType children: dict[str, Widget] def destroy(self) -> None: """Internal function. -Delete all Tcl commands created for -this widget in the Tcl interpreter. -""" + Delete all Tcl commands created for + this widget in the Tcl interpreter. + """ + def deletecommand(self, name: str) -> None: """Internal function. -Delete the Tcl command provided in NAME. -""" + Delete the Tcl command provided in NAME. + """ + def tk_strictMotif(self, boolean=None): """Set Tcl internal variable, whether the look and feel -should adhere to Motif. + should adhere to Motif. + + A parameter of 1 means adhere to Motif (e.g. no color + change if mouse passes over slider). + Returns the set value. + """ -A parameter of 1 means adhere to Motif (e.g. no color -change if mouse passes over slider). -Returns the set value. -""" def tk_bisque(self) -> None: - """Change the color scheme to light brown as used in Tk 3.6 and before. -""" + """Change the color scheme to light brown as used in Tk 3.6 and before.""" + def tk_setPalette(self, *args, **kw) -> None: """Set a new color scheme for all widget elements. -A single color as argument will cause that all colors of Tk -widget elements are derived from this. -Alternatively several keyword parameters and its associated -colors can be given. The following keywords are valid: -activeBackground, foreground, selectColor, -activeForeground, highlightBackground, selectBackground, -background, highlightColor, selectForeground, -disabledForeground, insertBackground, troughColor. -""" + A single color as argument will cause that all colors of Tk + widget elements are derived from this. + Alternatively several keyword parameters and its associated + colors can be given. The following keywords are valid: + activeBackground, foreground, selectColor, + activeForeground, highlightBackground, selectBackground, + background, highlightColor, selectForeground, + disabledForeground, insertBackground, troughColor. + """ + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: """Wait until the variable is modified. -A parameter of type IntVar, StringVar, DoubleVar or -BooleanVar must be given. -""" + A parameter of type IntVar, StringVar, DoubleVar or + BooleanVar must be given. + """ waitvar = wait_variable def wait_window(self, window: Misc | None = None) -> None: """Wait until a WIDGET is destroyed. -If no parameter is given self is used. -""" + If no parameter is given self is used. + """ + def wait_visibility(self, window: Misc | None = None) -> None: """Wait until the visibility of a WIDGET changes -(e.g. it appears). + (e.g. it appears). + + If no parameter is given self is used. + """ -If no parameter is given self is used. -""" def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: - """Set Tcl variable NAME to VALUE. -""" + """Set Tcl variable NAME to VALUE.""" + def getvar(self, name: str = "PY_VAR"): - """Return value of Tcl variable NAME. -""" + """Return value of Tcl variable NAME.""" + def getint(self, s) -> int: ... def getdouble(self, s) -> float: ... def getboolean(self, s) -> bool: - """Return a boolean value for Tcl boolean values true and false given as parameter. -""" + """Return a boolean value for Tcl boolean values true and false given as parameter.""" + def focus_set(self) -> None: """Direct input focus to this widget. -If the application currently does not have the focus -this widget will get the focus if the application gets -the focus through the window manager. -""" + If the application currently does not have the focus + this widget will get the focus if the application gets + the focus through the window manager. + """ focus = focus_set def focus_force(self) -> None: """Direct input focus to this widget even if the -application does not have the focus. Use with -caution! -""" + application does not have the focus. Use with + caution! + """ + def focus_get(self) -> Misc | None: """Return the widget which has currently the focus in the -application. + application. + + Use focus_displayof to allow working with several + displays. Return None if application does not have + the focus. + """ -Use focus_displayof to allow working with several -displays. Return None if application does not have -the focus. -""" def focus_displayof(self) -> Misc | None: """Return the widget which has currently the focus on the -display where this widget is located. + display where this widget is located. + + Return None if the application does not have the focus. + """ -Return None if the application does not have the focus. -""" def focus_lastfor(self) -> Misc | None: """Return the widget which would have the focus if top level -for this widget gets the focus from the window manager. -""" + for this widget gets the focus from the window manager. + """ + def tk_focusFollowsMouse(self) -> None: """The widget under mouse will get automatically focus. Can not -be disabled easily. -""" + be disabled easily. + """ + def tk_focusNext(self) -> Misc | None: """Return the next widget in the focus order which follows -widget which has currently the focus. + widget which has currently the focus. + + The focus order first goes to the next child, then to + the children of the child recursively and then to the + next sibling which is higher in the stacking order. A + widget is omitted if it has the takefocus resource set + to 0. + """ -The focus order first goes to the next child, then to -the children of the child recursively and then to the -next sibling which is higher in the stacking order. A -widget is omitted if it has the takefocus resource set -to 0. -""" def tk_focusPrev(self) -> Misc | None: - """Return previous widget in the focus order. See tk_focusNext for details. -""" + """Return previous widget in the focus order. See tk_focusNext for details.""" # .after() can be called without the "func" argument, but it is basically never what you want. # It behaves like time.sleep() and freezes the GUI app. def after(self, ms: int | Literal["idle"], func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: """Call function once after given time. -MS specifies the time in milliseconds. FUNC gives the -function which shall be called. Additional parameters -are given as parameters to the function call. Return -identifier to cancel scheduling with after_cancel. -""" + MS specifies the time in milliseconds. FUNC gives the + function which shall be called. Additional parameters + are given as parameters to the function call. Return + identifier to cancel scheduling with after_cancel. + """ # after_idle is essentially partialmethod(after, "idle") def after_idle(self, func: Callable[[Unpack[_Ts]], object], *args: Unpack[_Ts]) -> str: """Call FUNC once if the Tcl main loop has no event to -process. + process. + + Return an identifier to cancel the scheduling with + after_cancel. + """ -Return an identifier to cancel the scheduling with -after_cancel. -""" def after_cancel(self, id: str) -> None: """Cancel scheduling of function identified with ID. -Identifier returned by after or after_idle must be -given as first parameter. -""" + Identifier returned by after or after_idle must be + given as first parameter. + """ if sys.version_info >= (3, 13): def after_info(self, id: str | None = None) -> tuple[str, ...]: """Return information about existing event handlers. -With no argument, return a tuple of the identifiers for all existing -event handlers created by the after and after_idle commands for this -interpreter. If id is supplied, it specifies an existing handler; id -must have been the return value from some previous call to after or -after_idle and it must not have triggered yet or been canceled. If the -id doesn't exist, a TclError is raised. Otherwise, the return value is -a tuple containing (script, type) where script is a reference to the -function to be called by the event handler and type is either 'idle' -or 'timer' to indicate what kind of event handler it is. -""" + With no argument, return a tuple of the identifiers for all existing + event handlers created by the after and after_idle commands for this + interpreter. If id is supplied, it specifies an existing handler; id + must have been the return value from some previous call to after or + after_idle and it must not have triggered yet or been canceled. If the + id doesn't exist, a TclError is raised. Otherwise, the return value is + a tuple containing (script, type) where script is a reference to the + function to be called by the event handler and type is either 'idle' + or 'timer' to indicate what kind of event handler it is. + """ def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: - """Ring a display's bell. -""" + """Ring a display's bell.""" if sys.version_info >= (3, 13): # Supports options from `_BusyInfo`` def tk_busy_cget(self, option: Literal["cursor"]) -> _Cursor: """Return the value of busy configuration option. -The widget must have been previously made busy by -tk_busy_hold(). Option may have any of the values accepted by -tk_busy_hold(). -""" + The widget must have been previously made busy by + tk_busy_hold(). Option may have any of the values accepted by + tk_busy_hold(). + """ busy_cget = tk_busy_cget def tk_busy_configure(self, cnf: Any = None, **kw: Any) -> Any: """Query or modify the busy configuration options. -The widget must have been previously made busy by -tk_busy_hold(). Options may have any of the values accepted by -tk_busy_hold(). + The widget must have been previously made busy by + tk_busy_hold(). Options may have any of the values accepted by + tk_busy_hold(). -Please note that the option database is referenced by the widget -name or class. For example, if a Frame widget with name "frame" -is to be made busy, the busy cursor can be specified for it by -either call: + Please note that the option database is referenced by the widget + name or class. For example, if a Frame widget with name "frame" + is to be made busy, the busy cursor can be specified for it by + either call: - w.option_add('*frame.busyCursor', 'gumby') - w.option_add('*Frame.BusyCursor', 'gumby') -""" + w.option_add('*frame.busyCursor', 'gumby') + w.option_add('*Frame.BusyCursor', 'gumby') + """ tk_busy_config = tk_busy_configure busy_configure = tk_busy_configure busy_config = tk_busy_configure def tk_busy_current(self, pattern: str | None = None) -> list[Misc]: """Return a list of widgets that are currently busy. -If a pattern is given, only busy widgets whose path names match -a pattern are returned. -""" + If a pattern is given, only busy widgets whose path names match + a pattern are returned. + """ busy_current = tk_busy_current def tk_busy_forget(self) -> None: """Make this widget no longer busy. -User events will again be received by the widget. -""" + User events will again be received by the widget. + """ busy_forget = tk_busy_forget def tk_busy_hold(self, **kw: Unpack[_BusyInfo]) -> None: """Make this widget appear busy. -The specified widget and its descendants will be blocked from -user interactions. Normally update() should be called -immediately afterward to insure that the hold operation is in -effect before the application starts its processing. + The specified widget and its descendants will be blocked from + user interactions. Normally update() should be called + immediately afterward to insure that the hold operation is in + effect before the application starts its processing. -The only supported configuration option is: + The only supported configuration option is: - cursor: the cursor to be displayed when the widget is made - busy. -""" + cursor: the cursor to be displayed when the widget is made + busy. + """ tk_busy = tk_busy_hold busy_hold = tk_busy_hold busy = tk_busy_hold def tk_busy_status(self) -> bool: - """Return True if the widget is busy, False otherwise. -""" + """Return True if the widget is busy, False otherwise.""" busy_status = tk_busy_status def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: """Retrieve data from the clipboard on window's display. -The window keyword defaults to the root window of the Tkinter -application. + The window keyword defaults to the root window of the Tkinter + application. -The type keyword specifies the form in which the data is -to be returned and should be an atom name such as STRING -or FILE_NAME. Type defaults to STRING, except on X11, where the default -is to try UTF8_STRING and fall back to STRING. + The type keyword specifies the form in which the data is + to be returned and should be an atom name such as STRING + or FILE_NAME. Type defaults to STRING, except on X11, where the default + is to try UTF8_STRING and fall back to STRING. -This command is equivalent to: + This command is equivalent to: + + selection_get(CLIPBOARD) + """ -selection_get(CLIPBOARD) -""" def clipboard_clear(self, *, displayof: Misc = ...) -> None: """Clear the data in the Tk clipboard. -A widget specified for the optional displayof keyword -argument specifies the target display. -""" + A widget specified for the optional displayof keyword + argument specifies the target display. + """ + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: """Append STRING to the Tk clipboard. -A widget specified at the optional displayof keyword -argument specifies the target display. The clipboard -can be retrieved with selection_get. -""" + A widget specified at the optional displayof keyword + argument specifies the target display. The clipboard + can be retrieved with selection_get. + """ + def grab_current(self): """Return widget which has currently the grab in this application -or None. -""" + or None. + """ + def grab_release(self) -> None: - """Release grab for this widget if currently set. -""" + """Release grab for this widget if currently set.""" + def grab_set(self) -> None: """Set grab for this widget. -A grab directs all events to this and descendant -widgets in the application. -""" + A grab directs all events to this and descendant + widgets in the application. + """ + def grab_set_global(self) -> None: """Set global grab for this widget. -A global grab directs all events to this and -descendant widgets on the display. Use with caution - -other applications do not get events anymore. -""" + A global grab directs all events to this and + descendant widgets on the display. Use with caution - + other applications do not get events anymore. + """ + def grab_status(self) -> Literal["local", "global"] | None: """Return None, "local" or "global" if this widget has -no, a local or a global grab. -""" + no, a local or a global grab. + """ + def option_add( self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None ) -> None: """Set a VALUE (second parameter) for an option -PATTERN (first parameter). + PATTERN (first parameter). + + An optional third parameter gives the numeric priority + (defaults to 80). + """ -An optional third parameter gives the numeric priority -(defaults to 80). -""" def option_clear(self) -> None: """Clear the option database. -It will be reloaded if option_add is called. -""" + It will be reloaded if option_add is called. + """ + def option_get(self, name, className): """Return the value for an option NAME for this widget -with CLASSNAME. + with CLASSNAME. + + Values with higher priority override lower values. + """ -Values with higher priority override lower values. -""" def option_readfile(self, fileName, priority=None) -> None: """Read file FILENAME into the option database. -An optional second parameter gives the numeric -priority. -""" + An optional second parameter gives the numeric + priority. + """ + def selection_clear(self, **kw) -> None: - """Clear the current X selection. -""" + """Clear the current X selection.""" + def selection_get(self, **kw): """Return the contents of the current X selection. -A keyword parameter selection specifies the name of -the selection and defaults to PRIMARY. A keyword -parameter displayof specifies a widget on the display -to use. A keyword parameter type specifies the form of data to be -fetched, defaulting to STRING except on X11, where UTF8_STRING is tried -before STRING. -""" + A keyword parameter selection specifies the name of + the selection and defaults to PRIMARY. A keyword + parameter displayof specifies a widget on the display + to use. A keyword parameter type specifies the form of data to be + fetched, defaulting to STRING except on X11, where UTF8_STRING is tried + before STRING. + """ + def selection_handle(self, command, **kw) -> None: """Specify a function COMMAND to call if the X -selection owned by this widget is queried by another -application. - -This function must return the contents of the -selection. The function will be called with the -arguments OFFSET and LENGTH which allows the chunking -of very long selections. The following keyword -parameters can be provided: -selection - name of the selection (default PRIMARY), -type - type of the selection (e.g. STRING, FILE_NAME). -""" + selection owned by this widget is queried by another + application. + + This function must return the contents of the + selection. The function will be called with the + arguments OFFSET and LENGTH which allows the chunking + of very long selections. The following keyword + parameters can be provided: + selection - name of the selection (default PRIMARY), + type - type of the selection (e.g. STRING, FILE_NAME). + """ + def selection_own(self, **kw) -> None: """Become owner of X selection. -A keyword parameter selection specifies the name of -the selection (default PRIMARY). -""" + A keyword parameter selection specifies the name of + the selection (default PRIMARY). + """ + def selection_own_get(self, **kw): """Return owner of X selection. -The following keyword parameter can -be provided: -selection - name of the selection (default PRIMARY), -type - type of the selection (e.g. STRING, FILE_NAME). -""" + The following keyword parameter can + be provided: + selection - name of the selection (default PRIMARY), + type - type of the selection (e.g. STRING, FILE_NAME). + """ + def send(self, interp, cmd, *args): - """Send Tcl command CMD to different interpreter INTERP to be executed. -""" + """Send Tcl command CMD to different interpreter INTERP to be executed.""" + def lower(self, belowThis=None) -> None: - """Lower this widget in the stacking order. -""" + """Lower this widget in the stacking order.""" + def tkraise(self, aboveThis=None) -> None: - """Raise this widget in the stacking order. -""" + """Raise this widget in the stacking order.""" lift = tkraise if sys.version_info >= (3, 11): def info_patchlevel(self) -> _VersionInfoType: - """Returns the exact version of the Tcl library. -""" + """Returns the exact version of the Tcl library.""" def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: - """Return integer which represents atom NAME. -""" + """Return integer which represents atom NAME.""" + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: - """Return name of atom with identifier ID. -""" + """Return name of atom with identifier ID.""" + def winfo_cells(self) -> int: - """Return number of cells in the colormap for this widget. -""" + """Return number of cells in the colormap for this widget.""" + def winfo_children(self) -> list[Widget | Toplevel]: - """Return a list of all widgets which are children of this widget. -""" + """Return a list of all widgets which are children of this widget.""" + def winfo_class(self) -> str: - """Return window class name of this widget. -""" + """Return window class name of this widget.""" + def winfo_colormapfull(self) -> bool: - """Return True if at the last color request the colormap was full. -""" + """Return True if at the last color request the colormap was full.""" + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: - """Return the widget which is at the root coordinates ROOTX, ROOTY. -""" + """Return the widget which is at the root coordinates ROOTX, ROOTY.""" + def winfo_depth(self) -> int: - """Return the number of bits per pixel. -""" + """Return the number of bits per pixel.""" + def winfo_exists(self) -> bool: - """Return true if this widget exists. -""" + """Return true if this widget exists.""" + def winfo_fpixels(self, number: float | str) -> float: """Return the number of pixels for the given distance NUMBER -(e.g. "3c") as float. -""" + (e.g. "3c") as float. + """ + def winfo_geometry(self) -> str: - """Return geometry string for this widget in the form "widthxheight+X+Y". -""" + """Return geometry string for this widget in the form "widthxheight+X+Y".""" + def winfo_height(self) -> int: - """Return height of this widget. -""" + """Return height of this widget.""" + def winfo_id(self) -> int: - """Return identifier ID for this widget. -""" + """Return identifier ID for this widget.""" + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: - """Return the name of all Tcl interpreters for this display. -""" + """Return the name of all Tcl interpreters for this display.""" + def winfo_ismapped(self) -> bool: - """Return true if this widget is mapped. -""" + """Return true if this widget is mapped.""" + def winfo_manager(self) -> str: - """Return the window manager name for this widget. -""" + """Return the window manager name for this widget.""" + def winfo_name(self) -> str: - """Return the name of this widget. -""" + """Return the name of this widget.""" + def winfo_parent(self) -> str: # return value needs nametowidget() - """Return the name of the parent of this widget. -""" + """Return the name of the parent of this widget.""" + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): - """Return the pathname of the widget given by ID. -""" + """Return the pathname of the widget given by ID.""" + def winfo_pixels(self, number: float | str) -> int: - """Rounded integer value of winfo_fpixels. -""" + """Rounded integer value of winfo_fpixels.""" + def winfo_pointerx(self) -> int: - """Return the x coordinate of the pointer on the root window. -""" + """Return the x coordinate of the pointer on the root window.""" + def winfo_pointerxy(self) -> tuple[int, int]: - """Return a tuple of x and y coordinates of the pointer on the root window. -""" + """Return a tuple of x and y coordinates of the pointer on the root window.""" + def winfo_pointery(self) -> int: - """Return the y coordinate of the pointer on the root window. -""" + """Return the y coordinate of the pointer on the root window.""" + def winfo_reqheight(self) -> int: - """Return requested height of this widget. -""" + """Return requested height of this widget.""" + def winfo_reqwidth(self) -> int: - """Return requested width of this widget. -""" + """Return requested width of this widget.""" + def winfo_rgb(self, color: str) -> tuple[int, int, int]: - """Return a tuple of integer RGB values in range(65536) for color in this widget. -""" + """Return a tuple of integer RGB values in range(65536) for color in this widget.""" + def winfo_rootx(self) -> int: """Return x coordinate of upper left corner of this widget on the -root window. -""" + root window. + """ + def winfo_rooty(self) -> int: """Return y coordinate of upper left corner of this widget on the -root window. -""" + root window. + """ + def winfo_screen(self) -> str: - """Return the screen name of this widget. -""" + """Return the screen name of this widget.""" + def winfo_screencells(self) -> int: """Return the number of the cells in the colormap of the screen -of this widget. -""" + of this widget. + """ + def winfo_screendepth(self) -> int: """Return the number of bits per pixel of the root window of the -screen of this widget. -""" + screen of this widget. + """ + def winfo_screenheight(self) -> int: """Return the number of pixels of the height of the screen of this widget -in pixel. -""" + in pixel. + """ + def winfo_screenmmheight(self) -> int: """Return the number of pixels of the height of the screen of -this widget in mm. -""" + this widget in mm. + """ + def winfo_screenmmwidth(self) -> int: """Return the number of pixels of the width of the screen of -this widget in mm. -""" + this widget in mm. + """ + def winfo_screenvisual(self) -> str: """Return one of the strings directcolor, grayscale, pseudocolor, -staticcolor, staticgray, or truecolor for the default -colormodel of this screen. -""" + staticcolor, staticgray, or truecolor for the default + colormodel of this screen. + """ + def winfo_screenwidth(self) -> int: """Return the number of pixels of the width of the screen of -this widget in pixel. -""" + this widget in pixel. + """ + def winfo_server(self) -> str: """Return information of the X-Server of the screen of this widget in -the form "XmajorRminor vendor vendorVersion". -""" + the form "XmajorRminor vendor vendorVersion". + """ + def winfo_toplevel(self) -> Tk | Toplevel: - """Return the toplevel widget of this widget. -""" + """Return the toplevel widget of this widget.""" + def winfo_viewable(self) -> bool: - """Return true if the widget and all its higher ancestors are mapped. -""" + """Return true if the widget and all its higher ancestors are mapped.""" + def winfo_visual(self) -> str: """Return one of the strings directcolor, grayscale, pseudocolor, -staticcolor, staticgray, or truecolor for the -colormodel of this widget. -""" + staticcolor, staticgray, or truecolor for the + colormodel of this widget. + """ + def winfo_visualid(self) -> str: - """Return the X identifier for the visual for this widget. -""" + """Return the X identifier for the visual for this widget.""" + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: """Return a list of all visuals available for the screen -of this widget. + of this widget. + + Each item in the list consists of a visual name (see winfo_visual), a + depth and if includeids is true is given also the X identifier. + """ -Each item in the list consists of a visual name (see winfo_visual), a -depth and if includeids is true is given also the X identifier. -""" def winfo_vrootheight(self) -> int: """Return the height of the virtual root window associated with this -widget in pixels. If there is no virtual root window return the -height of the screen. -""" + widget in pixels. If there is no virtual root window return the + height of the screen. + """ + def winfo_vrootwidth(self) -> int: """Return the width of the virtual root window associated with this -widget in pixel. If there is no virtual root window return the -width of the screen. -""" + widget in pixel. If there is no virtual root window return the + width of the screen. + """ + def winfo_vrootx(self) -> int: """Return the x offset of the virtual root relative to the root -window of the screen of this widget. -""" + window of the screen of this widget. + """ + def winfo_vrooty(self) -> int: """Return the y offset of the virtual root relative to the root -window of the screen of this widget. -""" + window of the screen of this widget. + """ + def winfo_width(self) -> int: - """Return the width of this widget. -""" + """Return the width of this widget.""" + def winfo_x(self) -> int: """Return the x coordinate of the upper left corner of this widget -in the parent. -""" + in the parent. + """ + def winfo_y(self) -> int: """Return the y coordinate of the upper left corner of this widget -in the parent. -""" + in the parent. + """ + def update(self) -> None: - """Enter event loop until all pending events have been processed by Tcl. -""" + """Enter event loop until all pending events have been processed by Tcl.""" + def update_idletasks(self) -> None: """Enter event loop until all idle callbacks have been called. This -will update the display of windows but not process events caused by -the user. -""" + will update the display of windows but not process events caused by + the user. + """ + @overload def bindtags(self, tagList: None = None) -> tuple[str, ...]: """Set or get the list of bindtags for this widget. -With no argument return the list of all bindtags associated with -this widget. With a list of strings as argument the bindtags are -set to this list. The bindtags determine in which order events are -processed (see bind). -""" + With no argument return the list of all bindtags associated with + this widget. With a list of strings as argument the bindtags are + set to this list. The bindtags determine in which order events are + processed (see bind). + """ + @overload def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other @@ -1190,42 +1238,43 @@ processed (see bind). ) -> str: """Bind to this widget at event SEQUENCE a call to function FUNC. -SEQUENCE is a string of concatenated event -patterns. An event pattern is of the form - where MODIFIER is one -of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, -Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, -B3, Alt, Button4, B4, Double, Button5, B5 Triple, -Mod1, M1. TYPE is one of Activate, Enter, Map, -ButtonPress, Button, Expose, Motion, ButtonRelease -FocusIn, MouseWheel, Circulate, FocusOut, Property, -Colormap, Gravity Reparent, Configure, KeyPress, Key, -Unmap, Deactivate, KeyRelease Visibility, Destroy, -Leave and DETAIL is the button number for ButtonPress, -ButtonRelease and DETAIL is the Keysym for KeyPress and -KeyRelease. Examples are - for pressing Control and mouse button 1 or - for pressing A and the Alt key (KeyPress can be omitted). -An event pattern can also be a virtual event of the form -<> where AString can be arbitrary. This -event can be generated by event_generate. -If events are concatenated they must appear shortly -after each other. - -FUNC will be called if the event sequence occurs with an -instance of Event as argument. If the return value of FUNC is -"break" no further bound function is invoked. - -An additional boolean parameter ADD specifies whether FUNC will -be called additionally to the other bound function or whether -it will replace the previous function. - -Bind will return an identifier to allow deletion of the bound function with -unbind without memory leak. - -If FUNC or SEQUENCE is omitted the bound function or list -of bound events are returned. -""" + SEQUENCE is a string of concatenated event + patterns. An event pattern is of the form + where MODIFIER is one + of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, + Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, + B3, Alt, Button4, B4, Double, Button5, B5 Triple, + Mod1, M1. TYPE is one of Activate, Enter, Map, + ButtonPress, Button, Expose, Motion, ButtonRelease + FocusIn, MouseWheel, Circulate, FocusOut, Property, + Colormap, Gravity Reparent, Configure, KeyPress, Key, + Unmap, Deactivate, KeyRelease Visibility, Destroy, + Leave and DETAIL is the button number for ButtonPress, + ButtonRelease and DETAIL is the Keysym for KeyPress and + KeyRelease. Examples are + for pressing Control and mouse button 1 or + for pressing A and the Alt key (KeyPress can be omitted). + An event pattern can also be a virtual event of the form + <> where AString can be arbitrary. This + event can be generated by event_generate. + If events are concatenated they must appear shortly + after each other. + + FUNC will be called if the event sequence occurs with an + instance of Event as argument. If the return value of FUNC is + "break" no further bound function is invoked. + + An additional boolean parameter ADD specifies whether FUNC will + be called additionally to the other bound function or whether + it will replace the previous function. + + Bind will return an identifier to allow deletion of the bound function with + unbind without memory leak. + + If FUNC or SEQUENCE is omitted the bound function or list + of bound events are returned. + """ + @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -1240,10 +1289,11 @@ of bound events are returned. add: Literal["", "+"] | bool | None = None, ) -> str: """Bind to all widgets at an event SEQUENCE a call to function FUNC. -An additional boolean parameter ADD specifies whether FUNC will -be called additionally to the other bound function or whether -it will replace the previous function. See bind for the return value. -""" + An additional boolean parameter ADD specifies whether FUNC will + be called additionally to the other bound function or whether + it will replace the previous function. See bind for the return value. + """ + @overload def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -1257,12 +1307,13 @@ it will replace the previous function. See bind for the return value. add: Literal["", "+"] | bool | None = None, ) -> str: """Bind to widgets with bindtag CLASSNAME at event -SEQUENCE a call of function FUNC. An additional -boolean parameter ADD specifies whether FUNC will be -called additionally to the other bound function or -whether it will replace the previous function. See bind for -the return value. -""" + SEQUENCE a call of function FUNC. An additional + boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or + whether it will replace the previous function. See bind for + the return value. + """ + @overload def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload @@ -1270,77 +1321,83 @@ the return value. def unbind(self, sequence: str, funcid: str | None = None) -> None: """Unbind for this widget the event SEQUENCE. -If FUNCID is given, only unbind the function identified with FUNCID -and also delete the corresponding Tcl command. + If FUNCID is given, only unbind the function identified with FUNCID + and also delete the corresponding Tcl command. + + Otherwise destroy the current binding for SEQUENCE, leaving SEQUENCE + unbound. + """ -Otherwise destroy the current binding for SEQUENCE, leaving SEQUENCE -unbound. -""" def unbind_all(self, sequence: str) -> None: - """Unbind for all widgets for event SEQUENCE all functions. -""" + """Unbind for all widgets for event SEQUENCE all functions.""" + def unbind_class(self, className: str, sequence: str) -> None: """Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE -all functions. -""" + all functions. + """ + def mainloop(self, n: int = 0) -> None: - """Call the mainloop of Tk. -""" + """Call the mainloop of Tk.""" + def quit(self) -> None: - """Quit the Tcl interpreter. All widgets will be destroyed. -""" + """Quit the Tcl interpreter. All widgets will be destroyed.""" + @property def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: - """Internal function. -""" + """Internal function.""" + def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: """Return the Tkinter instance of a widget identified by -its Tcl name NAME. -""" + its Tcl name NAME. + """ + def register( self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 ) -> str: """Return a newly created Tcl function. If this -function is called, the Python function FUNC will -be executed. An optional function SUBST can -be given which will be executed before FUNC. -""" + function is called, the Python function FUNC will + be executed. An optional function SUBST can + be given which will be executed before FUNC. + """ + def keys(self) -> list[str]: - """Return a list of all resource names of this widget. -""" + """Return a list of all resource names of this widget.""" + @overload def pack_propagate(self, flag: bool) -> bool | None: """Set or get the status for propagation of geometry information. -A boolean argument specifies whether the geometry information -of the slaves will determine the size of this widget. If no argument -is given the current setting will be returned. -""" + A boolean argument specifies whether the geometry information + of the slaves will determine the size of this widget. If no argument + is given the current setting will be returned. + """ + @overload def pack_propagate(self) -> None: ... propagate = pack_propagate def grid_anchor(self, anchor: Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] | None = None) -> None: """The anchor value controls how to place the grid within the -master when no row/column has any weight. + master when no row/column has any weight. -The default anchor is nw. -""" + The default anchor is nw. + """ anchor = grid_anchor @overload def grid_bbox( self, column: None = None, row: None = None, col2: None = None, row2: None = None ) -> tuple[int, int, int, int] | None: """Return a tuple of integer coordinates for the bounding -box of this widget controlled by the geometry manager grid. + box of this widget controlled by the geometry manager grid. -If COLUMN, ROW is given the bounding box applies from -the cell with row and column 0 to the specified -cell. If COL2 and ROW2 are given the bounding box -starts at that cell. + If COLUMN, ROW is given the bounding box applies from + the cell with row and column 0 to the specified + cell. If COL2 and ROW2 are given the bounding box + starts at that cell. + + The returned integers specify the offset of the upper left + corner in the master widget and the width and height. + """ -The returned integers specify the offset of the upper left -corner in the master widget and the width and height. -""" @overload def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... @overload @@ -1358,10 +1415,11 @@ corner in the master widget and the width and height. ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check """Configure column INDEX of a grid. -Valid resources are minsize (minimum size of the column), -weight (how much does additional space propagate to this column) -and pad (how much space to let additionally). -""" + Valid resources are minsize (minimum size of the column), + weight (how much does additional space propagate to this column) + and pad (how much space to let additionally). + """ + def grid_rowconfigure( self, index: int | str | list[int] | tuple[int, ...], @@ -1374,53 +1432,57 @@ and pad (how much space to let additionally). ) -> _GridIndexInfo | MaybeNone: # can be None but annoying to check """Configure row INDEX of a grid. -Valid resources are minsize (minimum size of the row), -weight (how much does additional space propagate to this row) -and pad (how much space to let additionally). -""" + Valid resources are minsize (minimum size of the row), + weight (how much does additional space propagate to this row) + and pad (how much space to let additionally). + """ columnconfigure = grid_columnconfigure rowconfigure = grid_rowconfigure def grid_location(self, x: float | str, y: float | str) -> tuple[int, int]: """Return a tuple of column and row which identify the cell -at which the pixel at position X and Y inside the master -widget is located. -""" + at which the pixel at position X and Y inside the master + widget is located. + """ + @overload def grid_propagate(self, flag: bool) -> None: """Set or get the status for propagation of geometry information. -A boolean argument specifies whether the geometry information -of the slaves will determine the size of this widget. If no argument -is given, the current setting will be returned. -""" + A boolean argument specifies whether the geometry information + of the slaves will determine the size of this widget. If no argument + is given, the current setting will be returned. + """ + @overload def grid_propagate(self) -> bool: ... def grid_size(self) -> tuple[int, int]: - """Return a tuple of the number of column and rows in the grid. -""" + """Return a tuple of the number of column and rows in the grid.""" size = grid_size # Widget because Toplevel or Tk is never a slave def pack_slaves(self) -> list[Widget]: """Return a list of all slaves of this widget -in its packing order. -""" + in its packing order. + """ + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: """Return a list of all slaves of this widget -in its packing order. -""" + in its packing order. + """ + def place_slaves(self) -> list[Widget]: """Return a list of all slaves of this widget -in its packing order. -""" + in its packing order. + """ slaves = pack_slaves def event_add(self, virtual: str, *sequences: str) -> None: """Bind a virtual event VIRTUAL (of the form <>) -to an event SEQUENCE such that the virtual event is triggered -whenever SEQUENCE occurs. -""" + to an event SEQUENCE such that the virtual event is triggered + whenever SEQUENCE occurs. + """ + def event_delete(self, virtual: str, *sequences: str) -> None: - """Unbind a virtual event VIRTUAL from SEQUENCE. -""" + """Unbind a virtual event VIRTUAL from SEQUENCE.""" + def event_generate( self, sequence: str, @@ -1454,92 +1516,99 @@ whenever SEQUENCE occurs. y: float | str = ..., ) -> None: """Generate an event SEQUENCE. Additional -keyword arguments specify parameter of the event -(e.g. x, y, rootx, rooty). -""" + keyword arguments specify parameter of the event + (e.g. x, y, rootx, rooty). + """ + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: """Return a list of all virtual events or the information -about the SEQUENCE bound to the virtual event VIRTUAL. -""" + about the SEQUENCE bound to the virtual event VIRTUAL. + """ + def image_names(self) -> tuple[str, ...]: - """Return a list of all existing image names. -""" + """Return a list of all existing image names.""" + def image_types(self) -> tuple[str, ...]: - """Return a list of all available image types (e.g. photo bitmap). -""" + """Return a list of all available image types (e.g. photo bitmap).""" # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... def __getitem__(self, key: str) -> Any: - """Return the resource value for a KEY given as string. -""" + """Return the resource value for a KEY given as string.""" + def cget(self, key: str) -> Any: - """Return the resource value for a KEY given as string. -""" + """Return the resource value for a KEY given as string.""" + def configure(self, cnf: Any = None) -> Any: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ # TODO: config is an alias of configure, but adding that here creates # conflict with the type of config in the subclasses. See #13149 class CallWrapper: """Internal class. Stores function to call when some user -defined Tcl function is called e.g. after an event occurred. -""" + defined Tcl function is called e.g. after an event occurred. + """ + func: Incomplete subst: Incomplete widget: Incomplete def __init__(self, func, subst, widget) -> None: - """Store FUNC, SUBST and WIDGET as members. -""" + """Store FUNC, SUBST and WIDGET as members.""" + def __call__(self, *args): - """Apply first function SUBST to arguments, than FUNC. -""" + """Apply first function SUBST to arguments, than FUNC.""" class XView: """Mix-in class for querying and changing the horizontal position -of a widget's window. -""" + of a widget's window. + """ + @overload def xview(self) -> tuple[float, float]: - """Query and change the horizontal position of the view. -""" + """Query and change the horizontal position of the view.""" + @overload def xview(self, *args) -> None: ... def xview_moveto(self, fraction: float) -> None: """Adjusts the view in the window so that FRACTION of the -total width of the canvas is off-screen to the left. -""" + total width of the canvas is off-screen to the left. + """ + @overload def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: """Shift the x-view according to NUMBER which is measured in "units" -or "pages" (WHAT). -""" + or "pages" (WHAT). + """ + @overload def xview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... class YView: """Mix-in class for querying and changing the vertical position -of a widget's window. -""" + of a widget's window. + """ + @overload def yview(self) -> tuple[float, float]: - """Query and change the vertical position of the view. -""" + """Query and change the vertical position of the view.""" + @overload def yview(self, *args) -> None: ... def yview_moveto(self, fraction: float) -> None: """Adjusts the view in the window so that FRACTION of the -total height of the canvas is off-screen to the top. -""" + total height of the canvas is off-screen to the top. + """ + @overload def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: """Shift the y-view according to NUMBER which is measured in -"units" or "pages" (WHAT). -""" + "units" or "pages" (WHAT). + """ + @overload def yview_scroll(self, number: float | str, what: Literal["pixels"]) -> None: ... @@ -1576,14 +1645,15 @@ else: type: str class Wm: - """Provides functions for the communication with the window manager. -""" + """Provides functions for the communication with the window manager.""" + @overload def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: """Instruct the window manager to set the aspect ratio (width/height) -of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple -of the actual values if no argument is given. -""" + of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple + of the actual values if no argument is given. + """ + @overload def wm_aspect( self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None @@ -1594,16 +1664,17 @@ of the actual values if no argument is given. def wm_attributes(self, *, return_python_dict: Literal[False] = False) -> tuple[Any, ...]: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, *, return_python_dict: Literal[True]) -> _WmAttributes: ... @@ -1612,36 +1683,37 @@ corresponding attributes. def wm_attributes(self) -> tuple[Any, ...]: """This subcommand returns or sets platform specific attributes - The first form returns a list of the platform specific flags and - their values. The second form returns the value for the specific - option. The third form sets one or more of the values. The values - are as follows: + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: - On Windows, -disabled gets or sets whether the window is in a - disabled state. -toolwindow gets or sets the style of the window - to toolwindow (as defined in the MSDN). -topmost gets or sets - whether this is a topmost window (displays above all other - windows). + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). - On Macintosh, XXXXX + On Macintosh, XXXXX - On Unix, there are currently no special attribute values. - """ + On Unix, there are currently no special attribute values. + """ @overload def wm_attributes(self, option: Literal["-alpha"], /) -> float: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-fullscreen"], /) -> bool: ... @overload @@ -1651,16 +1723,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-modified"], /) -> bool: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-notify"], /) -> bool: ... @overload @@ -1674,16 +1747,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-transparentcolor"], /) -> str: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-disabled"], /) -> bool: ... @overload @@ -1694,16 +1768,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-zoomed"], /) -> bool: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-type"], /) -> str: ... if sys.version_info >= (3, 13): @@ -1711,16 +1786,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["alpha"], /) -> float: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["fullscreen"], /) -> bool: ... @overload @@ -1730,16 +1806,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["modified"], /) -> bool: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["notify"], /) -> bool: ... @overload @@ -1753,16 +1830,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["transparentcolor"], /) -> str: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["disabled"], /) -> bool: ... @overload @@ -1773,16 +1851,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["zoomed"], /) -> bool: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["type"], /) -> str: ... @@ -1799,16 +1878,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-notify"], value: bool, /) -> Literal[""]: ... @overload @@ -1820,16 +1900,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-transparentcolor"], value: str, /) -> Literal[""]: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-disabled"], value: bool, /) -> Literal[""]: ... @overload @@ -1840,16 +1921,17 @@ corresponding attributes. def wm_attributes(self, option: Literal["-zoomed"], value: bool, /) -> Literal[""]: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" @overload def wm_attributes(self, option: Literal["-type"], value: str, /) -> Literal[""]: ... @@ -1871,16 +1953,16 @@ corresponding attributes. ) -> None: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ elif sys.platform == "win32": @overload def wm_attributes( @@ -1895,16 +1977,16 @@ corresponding attributes. ) -> None: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ else: # X11 @overload @@ -1913,29 +1995,29 @@ corresponding attributes. ) -> None: """Return or sets platform specific attributes. -When called with a single argument return_python_dict=True, -return a dict of the platform specific attributes and their values. -When called without arguments or with a single argument -return_python_dict=False, return a tuple containing intermixed -attribute names with the minus prefix and their values. - -When called with a single string value, return the value for the -specific option. When called with keyword arguments, set the -corresponding attributes. -""" + When called with a single argument return_python_dict=True, + return a dict of the platform specific attributes and their values. + When called without arguments or with a single argument + return_python_dict=False, return a tuple containing intermixed + attribute names with the minus prefix and their values. + When called with a single string value, return the value for the + specific option. When called with keyword arguments, set the + corresponding attributes. + """ attributes = wm_attributes def wm_client(self, name: str | None = None) -> str: """Store NAME in WM_CLIENT_MACHINE property of this widget. Return -current value. -""" + current value. + """ client = wm_client @overload def wm_colormapwindows(self) -> list[Misc]: """Store list of window names (WLIST) into WM_COLORMAPWINDOWS property -of this widget. This list contains windows whose colormaps differ from their -parents. Return current list of widgets if WLIST is empty. -""" + of this widget. This list contains windows whose colormaps differ from their + parents. Return current list of widgets if WLIST is empty. + """ + @overload def wm_colormapwindows(self, wlist: list[Misc] | tuple[Misc, ...], /) -> None: ... @overload @@ -1943,156 +2025,159 @@ parents. Return current list of widgets if WLIST is empty. colormapwindows = wm_colormapwindows def wm_command(self, value: str | None = None) -> str: """Store VALUE in WM_COMMAND property. It is the command -which shall be used to invoke the application. Return current -command if VALUE is None. -""" + which shall be used to invoke the application. Return current + command if VALUE is None. + """ command = wm_command # Some of these always return empty string, but return type is set to None to prevent accidentally using it def wm_deiconify(self) -> None: """Deiconify this widget. If it was never mapped it will not be mapped. -On Windows it will raise this widget and give it the focus. -""" + On Windows it will raise this widget and give it the focus. + """ deiconify = wm_deiconify def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: """Set focus model to MODEL. "active" means that this widget will claim -the focus itself, "passive" means that the window manager shall give -the focus. Return current focus model if MODEL is None. -""" + the focus itself, "passive" means that the window manager shall give + the focus. Return current focus model if MODEL is None. + """ focusmodel = wm_focusmodel def wm_forget(self, window: Wm) -> None: """The window will be unmapped from the screen and will no longer -be managed by wm. toplevel windows will be treated like frame -windows once they are no longer managed by wm, however, the menu -option configuration will be remembered and the menus will return -once the widget is managed again. -""" + be managed by wm. toplevel windows will be treated like frame + windows once they are no longer managed by wm, however, the menu + option configuration will be remembered and the menus will return + once the widget is managed again. + """ forget = wm_forget def wm_frame(self) -> str: - """Return identifier for decorative frame of this widget if present. -""" + """Return identifier for decorative frame of this widget if present.""" frame = wm_frame @overload def wm_geometry(self, newGeometry: None = None) -> str: """Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return -current value if None is given. -""" + current value if None is given. + """ + @overload def wm_geometry(self, newGeometry: str) -> None: ... geometry = wm_geometry def wm_grid(self, baseWidth=None, baseHeight=None, widthInc=None, heightInc=None): """Instruct the window manager that this widget shall only be -resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and -height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the -number of grid units requested in Tk_GeometryRequest. -""" + resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and + height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the + number of grid units requested in Tk_GeometryRequest. + """ grid = wm_grid def wm_group(self, pathName=None): """Set the group leader widgets for related widgets to PATHNAME. Return -the group leader of this widget if None is given. -""" + the group leader of this widget if None is given. + """ group = wm_group def wm_iconbitmap(self, bitmap=None, default=None): """Set bitmap for the iconified widget to BITMAP. Return -the bitmap if None is given. + the bitmap if None is given. -Under Windows, the DEFAULT parameter can be used to set the icon -for the widget and any descendants that don't have an icon set -explicitly. DEFAULT can be the relative path to a .ico file -(example: root.iconbitmap(default='myicon.ico') ). See Tk -documentation for more information. -""" + Under Windows, the DEFAULT parameter can be used to set the icon + for the widget and any descendants that don't have an icon set + explicitly. DEFAULT can be the relative path to a .ico file + (example: root.iconbitmap(default='myicon.ico') ). See Tk + documentation for more information. + """ iconbitmap = wm_iconbitmap def wm_iconify(self) -> None: - """Display widget as icon. -""" + """Display widget as icon.""" iconify = wm_iconify def wm_iconmask(self, bitmap=None): """Set mask for the icon bitmap of this widget. Return the -mask if None is given. -""" + mask if None is given. + """ iconmask = wm_iconmask def wm_iconname(self, newName=None) -> str: """Set the name of the icon for this widget. Return the name if -None is given. -""" + None is given. + """ iconname = wm_iconname def wm_iconphoto(self, default: bool, image1: _PhotoImageLike | str, /, *args: _PhotoImageLike | str) -> None: """Sets the titlebar icon for this window based on the named photo -images passed through args. If default is True, this is applied to -all future created toplevels as well. + images passed through args. If default is True, this is applied to + all future created toplevels as well. -The data in the images is taken as a snapshot at the time of -invocation. If the images are later changed, this is not reflected -to the titlebar icons. Multiple images are accepted to allow -different images sizes to be provided. The window manager may scale -provided icons to an appropriate size. + The data in the images is taken as a snapshot at the time of + invocation. If the images are later changed, this is not reflected + to the titlebar icons. Multiple images are accepted to allow + different images sizes to be provided. The window manager may scale + provided icons to an appropriate size. -On Windows, the images are packed into a Windows icon structure. -This will override an icon specified to wm_iconbitmap, and vice -versa. + On Windows, the images are packed into a Windows icon structure. + This will override an icon specified to wm_iconbitmap, and vice + versa. -On X, the images are arranged into the _NET_WM_ICON X property, -which most modern window managers support. An icon specified by -wm_iconbitmap may exist simultaneously. + On X, the images are arranged into the _NET_WM_ICON X property, + which most modern window managers support. An icon specified by + wm_iconbitmap may exist simultaneously. -On Macintosh, this currently does nothing. -""" + On Macintosh, this currently does nothing. + """ iconphoto = wm_iconphoto def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: """Set the position of the icon of this widget to X and Y. Return -a tuple of the current values of X and X if None is given. -""" + a tuple of the current values of X and X if None is given. + """ iconposition = wm_iconposition def wm_iconwindow(self, pathName=None): """Set widget PATHNAME to be displayed instead of icon. Return the current -value if None is given. -""" + value if None is given. + """ iconwindow = wm_iconwindow def wm_manage(self, widget) -> None: """The widget specified will become a stand alone top-level window. -The window will be decorated with the window managers title bar, -etc. -""" + The window will be decorated with the window managers title bar, + etc. + """ manage = wm_manage @overload def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: """Set max WIDTH and HEIGHT for this widget. If the window is gridded -the values are given in grid units. Return the current values if None -is given. -""" + the values are given in grid units. Return the current values if None + is given. + """ + @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: """Set min WIDTH and HEIGHT for this widget. If the window is gridded -the values are given in grid units. Return the current values if None -is given. -""" + the values are given in grid units. Return the current values if None + is given. + """ + @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @overload def wm_overrideredirect(self, boolean: None = None) -> bool | None: # returns True or None """Instruct the window manager to ignore this widget -if BOOLEAN is given with 1. Return the current value if None -is given. -""" + if BOOLEAN is given with 1. Return the current value if None + is given. + """ + @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: """Instruct the window manager that the position of this widget shall -be defined by the user if WHO is "user", and by its own policy if WHO is -"program". -""" + be defined by the user if WHO is "user", and by its own policy if WHO is + "program". + """ positionfrom = wm_positionfrom @overload def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: """Bind function FUNC to command NAME for this widget. -Return the function bound to NAME if None is given. NAME could be -e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW". -""" + Return the function bound to NAME if None is given. NAME could be + e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW". + """ + @overload def wm_protocol(self, name: str, func: None = None) -> str: ... @overload @@ -2101,50 +2186,54 @@ e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW". @overload def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: """Instruct the window manager whether this width can be resized -in WIDTH or HEIGHT. Both values are boolean values. -""" + in WIDTH or HEIGHT. Both values are boolean values. + """ + @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: """Instruct the window manager that the size of this widget shall -be defined by the user if WHO is "user", and by its own policy if WHO is -"program". -""" + be defined by the user if WHO is "user", and by its own policy if WHO is + "program". + """ sizefrom = wm_sizefrom @overload def wm_state(self, newstate: None = None) -> str: """Query or set the state of this widget as one of normal, icon, -iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only). -""" + iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only). + """ + @overload def wm_state(self, newstate: str) -> None: ... state = wm_state @overload def wm_title(self, string: None = None) -> str: - """Set the title of this widget. -""" + """Set the title of this widget.""" + @overload def wm_title(self, string: str) -> None: ... title = wm_title @overload def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: """Instruct the window manager that this widget is transient -with regard to widget MASTER. -""" + with regard to widget MASTER. + """ + @overload def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... transient = wm_transient def wm_withdraw(self) -> None: """Withdraw this widget from the screen such that it is unmapped -and forgotten by the window manager. Re-draw it with wm_deiconify. -""" + and forgotten by the window manager. Re-draw it with wm_deiconify. + """ withdraw = wm_withdraw class Tk(Misc, Wm): """Toplevel widget of Tk which represents mostly the main window -of an application. It has an associated Tcl interpreter. -""" + of an application. It has an associated Tcl interpreter. + """ + master: None def __init__( # Make sure to keep in sync with other functions that use the same @@ -2159,11 +2248,11 @@ of an application. It has an associated Tcl interpreter. use: str | None = None, ) -> None: """Return a new top level widget on screen SCREENNAME. A new Tcl interpreter will -be created. BASENAME will be used for the identification of the profile file (see -readprofile). -It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME -is the name of the widget class. -""" + be created. BASENAME will be used for the identification of the profile file (see + readprofile). + It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME + is the name of the widget class. + """ # Keep this in sync with ttktheme.ThemedTk. See issue #13858 @overload def configure( @@ -2189,22 +2278,24 @@ is the name of the widget class. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def destroy(self) -> None: """Destroy this and all descendants widgets. This will -end the application of this Tcl interpreter. -""" + end the application of this Tcl interpreter. + """ + def readprofile(self, baseName: str, className: str) -> None: """Internal function. It reads .BASENAME.tcl and .CLASSNAME.tcl into -the Tcl Interpreter and calls exec on the contents of .BASENAME.py and -.CLASSNAME.py if such a file exists in the home directory. -""" + the Tcl Interpreter and calls exec on the contents of .BASENAME.py and + .CLASSNAME.py if such a file exists in the home directory. + """ report_callback_exception: Callable[[type[BaseException], BaseException, TracebackType | None], object] # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo # Please keep in sync with _tkinter.TkappType. @@ -2261,8 +2352,9 @@ class _PackInfo(_InMiscTotal): class Pack: """Geometry manager Pack. -Base class to use the methods pack_* in every widget. -""" + Base class to use the methods pack_* in every widget. + """ + # _PackInfo is not the valid type for cnf because pad stuff accepts any # screen units instead of int only. I didn't bother to create another # TypedDict for cnf because it appears to be a legacy thing that was @@ -2285,27 +2377,28 @@ Base class to use the methods pack_* in every widget. **kw: Any, # allow keyword argument named 'in', see #4836 ) -> None: """Pack a widget in the parent widget. Use as options: -after=widget - pack it after you have packed widget -anchor=NSEW (or subset) - position widget according to - given direction -before=widget - pack it before you will pack widget -expand=bool - expand widget if parent size grows -fill=NONE or X or Y or BOTH - fill widget if widget grows -in=master - use master to contain this widget -in_=master - see 'in' option description -ipadx=amount - add internal padding in x direction -ipady=amount - add internal padding in y direction -padx=amount - add padding in x direction -pady=amount - add padding in y direction -side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget. -""" + after=widget - pack it after you have packed widget + anchor=NSEW (or subset) - position widget according to + given direction + before=widget - pack it before you will pack widget + expand=bool - expand widget if parent size grows + fill=NONE or X or Y or BOTH - fill widget if widget grows + in=master - use master to contain this widget + in_=master - see 'in' option description + ipadx=amount - add internal padding in x direction + ipady=amount - add internal padding in y direction + padx=amount - add padding in x direction + pady=amount - add padding in y direction + side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget. + """ + def pack_forget(self) -> None: - """Unmap this widget and do not use it for the packing order. -""" + """Unmap this widget and do not use it for the packing order.""" + def pack_info(self) -> _PackInfo: # errors if widget hasn't been packed """Return information about the packing options -for this widget. -""" + for this widget. + """ pack = pack_configure forget = pack_forget propagate = Misc.pack_propagate @@ -2326,8 +2419,9 @@ class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed class Place: """Geometry manager Place. -Base class to use the methods place_* in every widget. -""" + Base class to use the methods place_* in every widget. + """ + def place_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -2347,33 +2441,34 @@ Base class to use the methods place_* in every widget. **kw: Any, # allow keyword argument named 'in', see #4836 ) -> None: """Place a widget in the parent widget. Use as options: -in=master - master relative to which the widget is placed -in_=master - see 'in' option description -x=amount - locate anchor of this widget at position x of master -y=amount - locate anchor of this widget at position y of master -relx=amount - locate anchor of this widget between 0.0 and 1.0 - relative to width of master (1.0 is right edge) -rely=amount - locate anchor of this widget between 0.0 and 1.0 - relative to height of master (1.0 is bottom edge) -anchor=NSEW (or subset) - position anchor according to given direction -width=amount - width of this widget in pixel -height=amount - height of this widget in pixel -relwidth=amount - width of this widget between 0.0 and 1.0 - relative to width of master (1.0 is the same width - as the master) -relheight=amount - height of this widget between 0.0 and 1.0 - relative to height of master (1.0 is the same - height as the master) -bordermode="inside" or "outside" - whether to take border width of - master widget into account -""" + in=master - master relative to which the widget is placed + in_=master - see 'in' option description + x=amount - locate anchor of this widget at position x of master + y=amount - locate anchor of this widget at position y of master + relx=amount - locate anchor of this widget between 0.0 and 1.0 + relative to width of master (1.0 is right edge) + rely=amount - locate anchor of this widget between 0.0 and 1.0 + relative to height of master (1.0 is bottom edge) + anchor=NSEW (or subset) - position anchor according to given direction + width=amount - width of this widget in pixel + height=amount - height of this widget in pixel + relwidth=amount - width of this widget between 0.0 and 1.0 + relative to width of master (1.0 is the same width + as the master) + relheight=amount - height of this widget between 0.0 and 1.0 + relative to height of master (1.0 is the same + height as the master) + bordermode="inside" or "outside" - whether to take border width of + master widget into account + """ + def place_forget(self) -> None: - """Unmap this widget. -""" + """Unmap this widget.""" + def place_info(self) -> _PlaceInfo: """Return information about the placing options -for this widget. -""" + for this widget. + """ place = place_configure info = place_info @@ -2392,8 +2487,9 @@ class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded class Grid: """Geometry manager Grid. -Base class to use the methods grid_* in every widget. -""" + Base class to use the methods grid_* in every widget. + """ + def grid_configure( self, cnf: Mapping[str, Any] | None = {}, @@ -2411,53 +2507,55 @@ Base class to use the methods grid_* in every widget. **kw: Any, # allow keyword argument named 'in', see #4836 ) -> None: """Position a widget in the parent widget in a grid. Use as options: -column=number - use cell identified with given column (starting with 0) -columnspan=number - this widget will span several columns -in=master - use master to contain this widget -in_=master - see 'in' option description -ipadx=amount - add internal padding in x direction -ipady=amount - add internal padding in y direction -padx=amount - add padding in x direction -pady=amount - add padding in y direction -row=number - use cell identified with given row (starting with 0) -rowspan=number - this widget will span several rows -sticky=NSEW - if cell is larger on which sides will this - widget stick to the cell boundary -""" + column=number - use cell identified with given column (starting with 0) + columnspan=number - this widget will span several columns + in=master - use master to contain this widget + in_=master - see 'in' option description + ipadx=amount - add internal padding in x direction + ipady=amount - add internal padding in y direction + padx=amount - add padding in x direction + pady=amount - add padding in y direction + row=number - use cell identified with given row (starting with 0) + rowspan=number - this widget will span several rows + sticky=NSEW - if cell is larger on which sides will this + widget stick to the cell boundary + """ + def grid_forget(self) -> None: - """Unmap this widget. -""" + """Unmap this widget.""" + def grid_remove(self) -> None: - """Unmap this widget but remember the grid options. -""" + """Unmap this widget but remember the grid options.""" + def grid_info(self) -> _GridInfo: """Return information about the options -for positioning this widget in a grid. -""" + for positioning this widget in a grid. + """ grid = grid_configure location = Misc.grid_location size = Misc.grid_size class BaseWidget(Misc): - """Internal class. -""" + """Internal class.""" + master: Misc widgetName: str def __init__(self, master, widgetName: str, cnf={}, kw={}, extra=()) -> None: """Construct a widget with the parent widget MASTER, a name WIDGETNAME -and appropriate options. -""" + and appropriate options. + """ + def destroy(self) -> None: - """Destroy this and all descendants widgets. -""" + """Destroy this and all descendants widgets.""" # This class represents any widget except Toplevel or Tk. class Widget(BaseWidget, Pack, Place, Grid): """Internal class. -Base class for a widget which can be positioned with the geometry managers -Pack, Place or Grid. -""" + Base class for a widget which can be positioned with the geometry managers + Pack, Place or Grid. + """ + # Allow bind callbacks to take e.g. Event[Label] instead of Event[Misc]. # Tk and Toplevel get notified for their child widgets' events, but other # widgets don't. @@ -2470,50 +2568,51 @@ Pack, Place or Grid. ) -> str: """Bind to this widget at event SEQUENCE a call to function FUNC. -SEQUENCE is a string of concatenated event -patterns. An event pattern is of the form - where MODIFIER is one -of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, -Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, -B3, Alt, Button4, B4, Double, Button5, B5 Triple, -Mod1, M1. TYPE is one of Activate, Enter, Map, -ButtonPress, Button, Expose, Motion, ButtonRelease -FocusIn, MouseWheel, Circulate, FocusOut, Property, -Colormap, Gravity Reparent, Configure, KeyPress, Key, -Unmap, Deactivate, KeyRelease Visibility, Destroy, -Leave and DETAIL is the button number for ButtonPress, -ButtonRelease and DETAIL is the Keysym for KeyPress and -KeyRelease. Examples are - for pressing Control and mouse button 1 or - for pressing A and the Alt key (KeyPress can be omitted). -An event pattern can also be a virtual event of the form -<> where AString can be arbitrary. This -event can be generated by event_generate. -If events are concatenated they must appear shortly -after each other. - -FUNC will be called if the event sequence occurs with an -instance of Event as argument. If the return value of FUNC is -"break" no further bound function is invoked. - -An additional boolean parameter ADD specifies whether FUNC will -be called additionally to the other bound function or whether -it will replace the previous function. - -Bind will return an identifier to allow deletion of the bound function with -unbind without memory leak. - -If FUNC or SEQUENCE is omitted the bound function or list -of bound events are returned. -""" + SEQUENCE is a string of concatenated event + patterns. An event pattern is of the form + where MODIFIER is one + of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4, + Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3, + B3, Alt, Button4, B4, Double, Button5, B5 Triple, + Mod1, M1. TYPE is one of Activate, Enter, Map, + ButtonPress, Button, Expose, Motion, ButtonRelease + FocusIn, MouseWheel, Circulate, FocusOut, Property, + Colormap, Gravity Reparent, Configure, KeyPress, Key, + Unmap, Deactivate, KeyRelease Visibility, Destroy, + Leave and DETAIL is the button number for ButtonPress, + ButtonRelease and DETAIL is the Keysym for KeyPress and + KeyRelease. Examples are + for pressing Control and mouse button 1 or + for pressing A and the Alt key (KeyPress can be omitted). + An event pattern can also be a virtual event of the form + <> where AString can be arbitrary. This + event can be generated by event_generate. + If events are concatenated they must appear shortly + after each other. + + FUNC will be called if the event sequence occurs with an + instance of Event as argument. If the return value of FUNC is + "break" no further bound function is invoked. + + An additional boolean parameter ADD specifies whether FUNC will + be called additionally to the other bound function or whether + it will replace the previous function. + + Bind will return an identifier to allow deletion of the bound function with + unbind without memory leak. + + If FUNC or SEQUENCE is omitted the bound function or list + of bound events are returned. + """ + @overload def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... class Toplevel(BaseWidget, Wm): - """Toplevel widget, e.g. for dialogs. -""" + """Toplevel widget, e.g. for dialogs.""" + # Toplevel and Tk have the same options because they correspond to the same # Tcl/Tk toplevel widget. For some reason, config and configure must be # copy/pasted here instead of aliasing as 'config = Tk.config'. @@ -2548,11 +2647,12 @@ class Toplevel(BaseWidget, Wm): ) -> None: """Construct a toplevel widget with the parent MASTER. -Valid resource names: background, bd, bg, borderwidth, class, -colormap, container, cursor, height, highlightbackground, -highlightcolor, highlightthickness, menu, relief, screen, takefocus, -use, visual, width. -""" + Valid resource names: background, bd, bg, borderwidth, class, + colormap, container, cursor, height, highlightbackground, + highlightcolor, highlightthickness, menu, relief, screen, takefocus, + use, visual, width. + """ + @overload def configure( self, @@ -2577,17 +2677,18 @@ use, visual, width. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Button(Widget): - """Button widget. -""" + """Button widget.""" + def __init__( self, master: Misc | None = None, @@ -2638,22 +2739,23 @@ class Button(Widget): ) -> None: """Construct a button widget with the parent MASTER. -STANDARD OPTIONS + STANDARD OPTIONS - activebackground, activeforeground, anchor, - background, bitmap, borderwidth, cursor, - disabledforeground, font, foreground - highlightbackground, highlightcolor, - highlightthickness, image, justify, - padx, pady, relief, repeatdelay, - repeatinterval, takefocus, text, - textvariable, underline, wraplength + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, repeatdelay, + repeatinterval, takefocus, text, + textvariable, underline, wraplength -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, compound, default, height, + overrelief, state, width + """ - command, compound, default, height, - overrelief, state, width -""" @overload def configure( self, @@ -2698,35 +2800,37 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def flash(self) -> None: """Flash the button. -This is accomplished by redisplaying -the button several times, alternating between active and -normal colors. At the end of the flash the button is left -in the same normal/active state as when the command was -invoked. This command is ignored if the button's state is -disabled. -""" + This is accomplished by redisplaying + the button several times, alternating between active and + normal colors. At the end of the flash the button is left + in the same normal/active state as when the command was + invoked. This command is ignored if the button's state is + disabled. + """ + def invoke(self) -> Any: """Invoke the command associated with the button. -The return value is the return value from the command, -or an empty string if there is no command associated with -the button. This command is ignored if the button's state -is disabled. -""" + The return value is the return value from the command, + or an empty string if there is no command associated with + the button. This command is ignored if the button's state + is disabled. + """ class Canvas(Widget, XView, YView): - """Canvas widget to display graphical elements like lines or text. -""" + """Canvas widget to display graphical elements like lines or text.""" + def __init__( self, master: Misc | None = None, @@ -2769,14 +2873,15 @@ class Canvas(Widget, XView, YView): ) -> None: """Construct a canvas widget with the parent MASTER. -Valid resource names: background, bd, bg, borderwidth, closeenough, -confine, cursor, height, highlightbackground, highlightcolor, -highlightthickness, insertbackground, insertborderwidth, -insertofftime, insertontime, insertwidth, offset, relief, -scrollregion, selectbackground, selectborderwidth, selectforeground, -state, takefocus, width, xscrollcommand, xscrollincrement, -yscrollcommand, yscrollincrement. -""" + Valid resource names: background, bd, bg, borderwidth, closeenough, + confine, cursor, height, highlightbackground, highlightcolor, + highlightthickness, insertbackground, insertborderwidth, + insertofftime, insertontime, insertwidth, offset, relief, + scrollregion, selectbackground, selectborderwidth, selectforeground, + state, takefocus, width, xscrollcommand, xscrollincrement, + yscrollcommand, yscrollincrement. + """ + @overload def configure( self, @@ -2815,80 +2920,87 @@ yscrollcommand, yscrollincrement. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def addtag(self, *args): # internal method - """Internal function. -""" + """Internal function.""" + def addtag_above(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items above TAGORID. -""" + """Add tag NEWTAG to all items above TAGORID.""" + def addtag_all(self, newtag: str) -> None: - """Add tag NEWTAG to all items. -""" + """Add tag NEWTAG to all items.""" + def addtag_below(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items below TAGORID. -""" + """Add tag NEWTAG to all items below TAGORID.""" + def addtag_closest( self, newtag: str, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None ) -> None: """Add tag NEWTAG to item which is closest to pixel at X, Y. -If several match take the top-most. -All items closer than HALO are considered overlapping (all are -closest). If START is specified the next below this tag is taken. -""" + If several match take the top-most. + All items closer than HALO are considered overlapping (all are + closest). If START is specified the next below this tag is taken. + """ + def addtag_enclosed(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: """Add tag NEWTAG to all items in the rectangle defined -by X1,Y1,X2,Y2. -""" + by X1,Y1,X2,Y2. + """ + def addtag_overlapping(self, newtag: str, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> None: """Add tag NEWTAG to all items which overlap the rectangle -defined by X1,Y1,X2,Y2. -""" + defined by X1,Y1,X2,Y2. + """ + def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: - """Add tag NEWTAG to all items with TAGORID. -""" + """Add tag NEWTAG to all items with TAGORID.""" + def find(self, *args): # internal method - """Internal function. -""" + """Internal function.""" + def find_above(self, tagOrId: str | int) -> tuple[int, ...]: - """Return items above TAGORID. -""" + """Return items above TAGORID.""" + def find_all(self) -> tuple[int, ...]: - """Return all items. -""" + """Return all items.""" + def find_below(self, tagOrId: str | int) -> tuple[int, ...]: - """Return all items below TAGORID. -""" + """Return all items below TAGORID.""" + def find_closest( self, x: float | str, y: float | str, halo: float | str | None = None, start: str | int | None = None ) -> tuple[int, ...]: """Return item which is closest to pixel at X, Y. -If several match take the top-most. -All items closer than HALO are considered overlapping (all are -closest). If START is specified the next below this tag is taken. -""" + If several match take the top-most. + All items closer than HALO are considered overlapping (all are + closest). If START is specified the next below this tag is taken. + """ + def find_enclosed(self, x1: float | str, y1: float | str, x2: float | str, y2: float | str) -> tuple[int, ...]: """Return all items in rectangle defined -by X1,Y1,X2,Y2. -""" + by X1,Y1,X2,Y2. + """ + def find_overlapping(self, x1: float | str, y1: float | str, x2: float | str, y2: float) -> tuple[int, ...]: """Return all items which overlap the rectangle -defined by X1,Y1,X2,Y2. -""" + defined by X1,Y1,X2,Y2. + """ + def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: - """Return all items with TAGORID. -""" + """Return all items with TAGORID.""" # Incompatible with Misc.bbox(), tkinter violates LSP def bbox(self, *args: str | int) -> tuple[int, int, int, int]: # type: ignore[override] """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle -which encloses all items with tags specified as arguments. -""" + which encloses all items with tags specified as arguments. + """ + @overload def tag_bind( self, @@ -2899,10 +3011,11 @@ which encloses all items with tags specified as arguments. ) -> str: """Bind to all items with TAGORID at event SEQUENCE a call to function FUNC. -An additional boolean parameter ADD specifies whether FUNC will be -called additionally to the other bound function or whether it will -replace the previous function. See bind for the return value. -""" + An additional boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or whether it will + replace the previous function. See bind for the return value. + """ + @overload def tag_bind( self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None @@ -2911,20 +3024,23 @@ replace the previous function. See bind for the return value. def tag_bind(self, tagOrId: str | int, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: """Unbind for all items with TAGORID for event SEQUENCE the -function identified with FUNCID. -""" + function identified with FUNCID. + """ + def canvasx(self, screenx, gridspacing=None): """Return the canvas x coordinate of pixel position SCREENX rounded -to nearest multiple of GRIDSPACING units. -""" + to nearest multiple of GRIDSPACING units. + """ + def canvasy(self, screeny, gridspacing=None): """Return the canvas y coordinate of pixel position SCREENY rounded -to nearest multiple of GRIDSPACING units. -""" + to nearest multiple of GRIDSPACING units. + """ + @overload def coords(self, tagOrId: str | int, /) -> list[float]: - """Return a list of coordinates for the item given in ARGS. -""" + """Return a list of coordinates for the item given in ARGS.""" + @overload def coords(self, tagOrId: str | int, args: list[int] | list[float] | tuple[float, ...], /) -> None: ... @overload @@ -2933,14 +3049,14 @@ to nearest multiple of GRIDSPACING units. # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. # Keyword arguments should be the same in all overloads of each method. def create_arc(self, *args, **kw) -> int: - """Create arc shaped region with coordinates x1,y1,x2,y2. -""" + """Create arc shaped region with coordinates x1,y1,x2,y2.""" + def create_bitmap(self, *args, **kw) -> int: - """Create bitmap with coordinates x1,y1. -""" + """Create bitmap with coordinates x1,y1.""" + def create_image(self, *args, **kw) -> int: - """Create image item with coordinates x1,y1. -""" + """Create image item with coordinates x1,y1.""" + @overload def create_line( self, @@ -2973,8 +3089,8 @@ to nearest multiple of GRIDSPACING units. tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., ) -> int: - """Create line with coordinates x1,y1,...,xn,yn. -""" + """Create line with coordinates x1,y1,...,xn,yn.""" + @overload def create_line( self, @@ -3074,8 +3190,8 @@ to nearest multiple of GRIDSPACING units. tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., ) -> int: - """Create oval with coordinates x1,y1,x2,y2. -""" + """Create oval with coordinates x1,y1,x2,y2.""" + @overload def create_oval( self, @@ -3180,8 +3296,8 @@ to nearest multiple of GRIDSPACING units. tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., ) -> int: - """Create polygon with coordinates x1,y1,...,xn,yn. -""" + """Create polygon with coordinates x1,y1,...,xn,yn.""" + @overload def create_polygon( self, @@ -3289,8 +3405,8 @@ to nearest multiple of GRIDSPACING units. tags: str | list[str] | tuple[str, ...] = ..., width: float | str = ..., ) -> int: - """Create rectangle with coordinates x1,y1,x2,y2. -""" + """Create rectangle with coordinates x1,y1,x2,y2.""" + @overload def create_rectangle( self, @@ -3382,8 +3498,8 @@ to nearest multiple of GRIDSPACING units. text: float | str = ..., width: float | str = ..., ) -> int: - """Create text with coordinates x1,y1. -""" + """Create text with coordinates x1,y1.""" + @overload def create_text( self, @@ -3420,8 +3536,8 @@ to nearest multiple of GRIDSPACING units. width: float | str = ..., window: Widget = ..., ) -> int: - """Create window with coordinates x1,y1,x2,y2. -""" + """Create window with coordinates x1,y1,x2,y2.""" + @overload def create_window( self, @@ -3437,66 +3553,70 @@ to nearest multiple of GRIDSPACING units. ) -> int: ... def dchars(self, *args) -> None: """Delete characters of text items identified by tag or id in ARGS (possibly -several times) from FIRST to LAST character (including). -""" + several times) from FIRST to LAST character (including). + """ + def delete(self, *tagsOrCanvasIds: str | int) -> None: - """Delete items identified by all tag or ids contained in ARGS. -""" + """Delete items identified by all tag or ids contained in ARGS.""" + @overload def dtag(self, tag: str, tag_to_delete: str | None = ..., /) -> None: """Delete tag or id given as last arguments in ARGS from items -identified by first argument in ARGS. -""" + identified by first argument in ARGS. + """ + @overload def dtag(self, id: int, tag_to_delete: str, /) -> None: ... def focus(self, *args): - """Set focus to the first item specified in ARGS. -""" + """Set focus to the first item specified in ARGS.""" + def gettags(self, tagOrId: str | int, /) -> tuple[str, ...]: - """Return tags associated with the first item specified in ARGS. -""" + """Return tags associated with the first item specified in ARGS.""" + def icursor(self, *args) -> None: """Set cursor at position POS in the item identified by TAGORID. -In ARGS TAGORID must be first. -""" + In ARGS TAGORID must be first. + """ + def index(self, *args): - """Return position of cursor as integer in item specified in ARGS. -""" + """Return position of cursor as integer in item specified in ARGS.""" + def insert(self, *args) -> None: """Insert TEXT in item TAGORID at position POS. ARGS must -be TAGORID POS TEXT. -""" + be TAGORID POS TEXT. + """ + def itemcget(self, tagOrId, option): - """Return the resource value for an OPTION for item TAGORID. -""" + """Return the resource value for an OPTION for item TAGORID.""" # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( self, tagOrId: str | int, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, str, str]] | None: """Configure resources of an item TAGORID. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method without arguments. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method without arguments. + """ itemconfig = itemconfigure def move(self, *args) -> None: - """Move an item TAGORID given in ARGS. -""" + """Move an item TAGORID given in ARGS.""" + def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: """Move the items given by TAGORID in the canvas coordinate -space so that the first coordinate pair of the bottommost -item with tag TAGORID is located at position (X,Y). -X and Y may be the empty string, in which case the -corresponding coordinate will be unchanged. All items matching -TAGORID remain in the same positions relative to each other. -""" + space so that the first coordinate pair of the bottommost + item with tag TAGORID is located at position (X,Y). + X and Y may be the empty string, in which case the + corresponding coordinate will be unchanged. All items matching + TAGORID remain in the same positions relative to each other. + """ + def postscript(self, cnf={}, **kw): """Print the contents of the canvas to a postscript -file. Valid options: colormap, colormode, file, fontmap, -height, pageanchor, pageheight, pagewidth, pagex, pagey, -rotate, width, x, y. -""" + file. Valid options: colormap, colormode, file, fontmap, + height, pageanchor, pageheight, pagewidth, pagex, pagey, + rotate, width, x, y. + """ # tkinter does: # lower = tag_lower # lift = tkraise = tag_raise @@ -3504,57 +3624,62 @@ rotate, width, x, y. # But mypy doesn't like aliasing here (maybe because Misc defines the same names) def tag_lower(self, first: str | int, second: str | int | None = ..., /) -> None: """Lower an item TAGORID given in ARGS -(optional below another item). -""" + (optional below another item). + """ + def lower(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] """Lower an item TAGORID given in ARGS -(optional below another item). -""" + (optional below another item). + """ + def tag_raise(self, first: str | int, second: str | int | None = ..., /) -> None: """Raise an item TAGORID given in ARGS -(optional above another item). -""" + (optional above another item). + """ + def tkraise(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] """Raise an item TAGORID given in ARGS -(optional above another item). -""" + (optional above another item). + """ + def lift(self, first: str | int, second: str | int | None = ..., /) -> None: # type: ignore[override] """Raise an item TAGORID given in ARGS -(optional above another item). -""" + (optional above another item). + """ + def scale(self, tagOrId: str | int, xOrigin: float | str, yOrigin: float | str, xScale: float, yScale: float, /) -> None: - """Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE. -""" + """Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE.""" + def scan_mark(self, x, y) -> None: - """Remember the current X, Y coordinates. -""" + """Remember the current X, Y coordinates.""" + def scan_dragto(self, x, y, gain: int = 10) -> None: """Adjust the view of the canvas to GAIN times the -difference between X and Y and the coordinates given in -scan_mark. -""" + difference between X and Y and the coordinates given in + scan_mark. + """ + def select_adjust(self, tagOrId, index) -> None: - """Adjust the end of the selection near the cursor of an item TAGORID to index. -""" + """Adjust the end of the selection near the cursor of an item TAGORID to index.""" + def select_clear(self) -> None: - """Clear the selection if it is in this widget. -""" + """Clear the selection if it is in this widget.""" + def select_from(self, tagOrId, index) -> None: - """Set the fixed end of a selection in item TAGORID to INDEX. -""" + """Set the fixed end of a selection in item TAGORID to INDEX.""" + def select_item(self): - """Return the item which has the selection. -""" + """Return the item which has the selection.""" + def select_to(self, tagOrId, index) -> None: - """Set the variable end of a selection in item TAGORID to INDEX. -""" + """Set the variable end of a selection in item TAGORID to INDEX.""" + def type(self, tagOrId: str | int) -> int | None: - """Return the type of the item TAGORID. -""" + """Return the type of the item TAGORID.""" class Checkbutton(Widget): - """Checkbutton widget which is either in on- or off-state. -""" + """Checkbutton widget which is either in on- or off-state.""" + def __init__( self, master: Misc | None = None, @@ -3616,14 +3741,15 @@ class Checkbutton(Widget): ) -> None: """Construct a checkbutton widget with the parent MASTER. -Valid resource names: activebackground, activeforeground, anchor, -background, bd, bg, bitmap, borderwidth, command, cursor, -disabledforeground, fg, font, foreground, height, -highlightbackground, highlightcolor, highlightthickness, image, -indicatoron, justify, offvalue, onvalue, padx, pady, relief, -selectcolor, selectimage, state, takefocus, text, textvariable, -underline, variable, width, wraplength. -""" + Valid resource names: activebackground, activeforeground, anchor, + background, bd, bg, bitmap, borderwidth, command, cursor, + disabledforeground, fg, font, foreground, height, + highlightbackground, highlightcolor, highlightthickness, image, + indicatoron, justify, offvalue, onvalue, padx, pady, relief, + selectcolor, selectimage, state, takefocus, text, textvariable, + underline, variable, width, wraplength. + """ + @overload def configure( self, @@ -3674,32 +3800,32 @@ underline, variable, width, wraplength. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def deselect(self) -> None: - """Put the button in off-state. -""" + """Put the button in off-state.""" + def flash(self) -> None: - """Flash the button. -""" + """Flash the button.""" + def invoke(self) -> Any: - """Toggle the button and invoke a command if given as resource. -""" + """Toggle the button and invoke a command if given as resource.""" + def select(self) -> None: - """Put the button in on-state. -""" + """Put the button in on-state.""" + def toggle(self) -> None: - """Toggle the button. -""" + """Toggle the button.""" class Entry(Widget, XView): - """Entry widget which allows displaying simple text. -""" + """Entry widget which allows displaying simple text.""" + def __init__( self, master: Misc | None = None, @@ -3746,15 +3872,16 @@ class Entry(Widget, XView): ) -> None: """Construct an entry widget with the parent MASTER. -Valid resource names: background, bd, bg, borderwidth, cursor, -exportselection, fg, font, foreground, highlightbackground, -highlightcolor, highlightthickness, insertbackground, -insertborderwidth, insertofftime, insertontime, insertwidth, -invalidcommand, invcmd, justify, relief, selectbackground, -selectborderwidth, selectforeground, show, state, takefocus, -textvariable, validate, validatecommand, vcmd, width, -xscrollcommand. -""" + Valid resource names: background, bd, bg, borderwidth, cursor, + exportselection, fg, font, foreground, highlightbackground, + highlightcolor, highlightthickness, insertbackground, + insertborderwidth, insertofftime, insertontime, insertwidth, + invalidcommand, invcmd, justify, relief, selectbackground, + selectborderwidth, selectforeground, show, state, takefocus, + textvariable, validate, validatecommand, vcmd, width, + xscrollcommand. + """ + @overload def configure( self, @@ -3800,55 +3927,57 @@ xscrollcommand. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def delete(self, first: str | int, last: str | int | None = None) -> None: - """Delete text from FIRST to LAST (not included). -""" + """Delete text from FIRST to LAST (not included).""" + def get(self) -> str: - """Return the text. -""" + """Return the text.""" + def icursor(self, index: str | int) -> None: - """Insert cursor at INDEX. -""" + """Insert cursor at INDEX.""" + def index(self, index: str | int) -> int: - """Return position of cursor. -""" + """Return position of cursor.""" + def insert(self, index: str | int, string: str) -> None: - """Insert STRING at INDEX. -""" + """Insert STRING at INDEX.""" + def scan_mark(self, x) -> None: - """Remember the current X, Y coordinates. -""" + """Remember the current X, Y coordinates.""" + def scan_dragto(self, x) -> None: """Adjust the view of the canvas to 10 times the -difference between X and Y and the coordinates given in -scan_mark. -""" + difference between X and Y and the coordinates given in + scan_mark. + """ + def selection_adjust(self, index: str | int) -> None: - """Adjust the end of the selection near the cursor to INDEX. -""" + """Adjust the end of the selection near the cursor to INDEX.""" + def selection_clear(self) -> None: # type: ignore[override] - """Clear the selection if it is in this widget. -""" + """Clear the selection if it is in this widget.""" + def selection_from(self, index: str | int) -> None: - """Set the fixed end of a selection to INDEX. -""" + """Set the fixed end of a selection to INDEX.""" + def selection_present(self) -> bool: """Return True if there are characters selected in the entry, False -otherwise. -""" + otherwise. + """ + def selection_range(self, start: str | int, end: str | int) -> None: - """Set the selection from START to END (not included). -""" + """Set the selection from START to END (not included).""" + def selection_to(self, index: str | int) -> None: - """Set the variable end of a selection to INDEX. -""" + """Set the variable end of a selection to INDEX.""" select_adjust = selection_adjust select_clear = selection_clear select_from = selection_from @@ -3857,8 +3986,8 @@ otherwise. select_to = selection_to class Frame(Widget): - """Frame widget which may contain other widgets and can have a 3D border. -""" + """Frame widget which may contain other widgets and can have a 3D border.""" + def __init__( self, master: Misc | None = None, @@ -3887,10 +4016,11 @@ class Frame(Widget): ) -> None: """Construct a frame widget with the parent MASTER. -Valid resource names: background, bd, bg, borderwidth, class, -colormap, container, cursor, height, highlightbackground, -highlightcolor, highlightthickness, relief, takefocus, visual, width. -""" + Valid resource names: background, bd, bg, borderwidth, class, + colormap, container, cursor, height, highlightbackground, + highlightcolor, highlightthickness, relief, takefocus, visual, width. + """ + @overload def configure( self, @@ -3914,17 +4044,18 @@ highlightcolor, highlightthickness, relief, takefocus, visual, width. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): - """Label widget which can display text and bitmaps. -""" + """Label widget which can display text and bitmaps.""" + def __init__( self, master: Misc | None = None, @@ -3965,21 +4096,22 @@ class Label(Widget): ) -> None: """Construct a label widget with the parent MASTER. -STANDARD OPTIONS + STANDARD OPTIONS - activebackground, activeforeground, anchor, - background, bitmap, borderwidth, cursor, - disabledforeground, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, image, justify, - padx, pady, relief, takefocus, text, - textvariable, underline, wraplength + activebackground, activeforeground, anchor, + background, bitmap, borderwidth, cursor, + disabledforeground, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, image, justify, + padx, pady, relief, takefocus, text, + textvariable, underline, wraplength -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - height, state, width + height, state, width + + """ -""" @overload def configure( self, @@ -4019,17 +4151,18 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Listbox(Widget, XView, YView): - """Listbox widget which can display a list of strings. -""" + """Listbox widget which can display a list of strings.""" + def __init__( self, master: Misc | None = None, @@ -4083,12 +4216,13 @@ class Listbox(Widget, XView, YView): ) -> None: """Construct a listbox widget with the parent MASTER. -Valid resource names: background, bd, bg, borderwidth, cursor, -exportselection, fg, font, foreground, height, highlightbackground, -highlightcolor, highlightthickness, relief, selectbackground, -selectborderwidth, selectforeground, selectmode, setgrid, takefocus, -width, xscrollcommand, yscrollcommand, listvariable. -""" + Valid resource names: background, bd, bg, borderwidth, cursor, + exportselection, fg, font, foreground, height, highlightbackground, + highlightcolor, highlightthickness, relief, selectbackground, + selectborderwidth, selectforeground, selectmode, setgrid, takefocus, + width, xscrollcommand, yscrollcommand, listvariable. + """ + @overload def configure( self, @@ -4126,86 +4260,86 @@ width, xscrollcommand, yscrollcommand, listvariable. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def activate(self, index: str | int) -> None: - """Activate item identified by INDEX. -""" + """Activate item identified by INDEX.""" + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: # type: ignore[override] """Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle -which encloses the item identified by the given index. -""" + which encloses the item identified by the given index. + """ + def curselection(self): - """Return the indices of currently selected item. -""" + """Return the indices of currently selected item.""" + def delete(self, first: str | int, last: str | int | None = None) -> None: - """Delete items from FIRST to LAST (included). -""" + """Delete items from FIRST to LAST (included).""" + def get(self, first: str | int, last: str | int | None = None): - """Get list of items from FIRST to LAST (included). -""" + """Get list of items from FIRST to LAST (included).""" + def index(self, index: str | int) -> int: - """Return index of item identified with INDEX. -""" + """Return index of item identified with INDEX.""" + def insert(self, index: str | int, *elements: str | float) -> None: - """Insert ELEMENTS at INDEX. -""" + """Insert ELEMENTS at INDEX.""" + def nearest(self, y): - """Get index of item which is nearest to y coordinate Y. -""" + """Get index of item which is nearest to y coordinate Y.""" + def scan_mark(self, x, y) -> None: - """Remember the current X, Y coordinates. -""" + """Remember the current X, Y coordinates.""" + def scan_dragto(self, x, y) -> None: """Adjust the view of the listbox to 10 times the -difference between X and Y and the coordinates given in -scan_mark. -""" + difference between X and Y and the coordinates given in + scan_mark. + """ + def see(self, index: str | int) -> None: - """Scroll such that INDEX is visible. -""" + """Scroll such that INDEX is visible.""" + def selection_anchor(self, index: str | int) -> None: - """Set the fixed end oft the selection to INDEX. -""" + """Set the fixed end oft the selection to INDEX.""" select_anchor = selection_anchor def selection_clear(self, first: str | int, last: str | int | None = None) -> None: # type: ignore[override] - """Clear the selection from FIRST to LAST (included). -""" + """Clear the selection from FIRST to LAST (included).""" select_clear = selection_clear def selection_includes(self, index: str | int): - """Return True if INDEX is part of the selection. -""" + """Return True if INDEX is part of the selection.""" select_includes = selection_includes def selection_set(self, first: str | int, last: str | int | None = None) -> None: """Set the selection from FIRST to LAST (included) without -changing the currently selected elements. -""" + changing the currently selected elements. + """ select_set = selection_set def size(self) -> int: # type: ignore[override] - """Return the number of elements in the listbox. -""" + """Return the number of elements in the listbox.""" + def itemcget(self, index: str | int, option): - """Return the resource value for an ITEM and an OPTION. -""" + """Return the resource value for an ITEM and an OPTION.""" + def itemconfigure(self, index: str | int, cnf=None, **kw): """Configure resources of an ITEM. -The values for resources are specified as keyword arguments. -To get an overview about the allowed keyword arguments -call the method without arguments. -Valid resource names: background, bg, foreground, fg, -selectbackground, selectforeground. -""" + The values for resources are specified as keyword arguments. + To get an overview about the allowed keyword arguments + call the method without arguments. + Valid resource names: background, bg, foreground, fg, + selectbackground, selectforeground. + """ itemconfig = itemconfigure class Menu(Widget): - """Menu widget which allows displaying menu bars, pull-down menus and pop-up menus. -""" + """Menu widget which allows displaying menu bars, pull-down menus and pop-up menus.""" + def __init__( self, master: Misc | None = None, @@ -4239,11 +4373,12 @@ class Menu(Widget): ) -> None: """Construct menu widget with the parent MASTER. -Valid resource names: activebackground, activeborderwidth, -activeforeground, background, bd, bg, borderwidth, cursor, -disabledforeground, fg, font, foreground, postcommand, relief, -selectcolor, takefocus, tearoff, tearoffcommand, title, type. -""" + Valid resource names: activebackground, activeborderwidth, + activeforeground, background, bd, bg, borderwidth, cursor, + disabledforeground, fg, font, foreground, postcommand, relief, + selectcolor, takefocus, tearoff, tearoffcommand, title, type. + """ + @overload def configure( self, @@ -4273,25 +4408,26 @@ selectcolor, takefocus, tearoff, tearoffcommand, title, type. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: - """Post the menu at position X,Y with entry ENTRY. -""" + """Post the menu at position X,Y with entry ENTRY.""" + def activate(self, index: str | int) -> None: - """Activate entry at INDEX. -""" + """Activate entry at INDEX.""" + def add(self, itemType, cnf={}, **kw): # docstring says "Internal function." - """Internal function. -""" + """Internal function.""" + def insert(self, index, itemType, cnf={}, **kw): # docstring says "Internal function." - """Internal function. -""" + """Internal function.""" + def add_cascade( self, cnf: dict[str, Any] | None = {}, @@ -4313,8 +4449,8 @@ the allowed keyword arguments call the method keys. state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., ) -> None: - """Add hierarchical menu item. -""" + """Add hierarchical menu item.""" + def add_checkbutton( self, cnf: dict[str, Any] | None = {}, @@ -4341,8 +4477,8 @@ the allowed keyword arguments call the method keys. underline: int = ..., variable: Variable = ..., ) -> None: - """Add checkbutton menu item. -""" + """Add checkbutton menu item.""" + def add_command( self, cnf: dict[str, Any] | None = {}, @@ -4363,8 +4499,8 @@ the allowed keyword arguments call the method keys. state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., ) -> None: - """Add command menu item. -""" + """Add command menu item.""" + def add_radiobutton( self, cnf: dict[str, Any] | None = {}, @@ -4390,11 +4526,11 @@ the allowed keyword arguments call the method keys. value: Any = ..., variable: Variable = ..., ) -> None: - """Add radio menu item. -""" + """Add radio menu item.""" + def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: - """Add separator. -""" + """Add separator.""" + def insert_cascade( self, index: str | int, @@ -4417,8 +4553,8 @@ the allowed keyword arguments call the method keys. state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., ) -> None: - """Add hierarchical menu item at INDEX. -""" + """Add hierarchical menu item at INDEX.""" + def insert_checkbutton( self, index: str | int, @@ -4446,8 +4582,8 @@ the allowed keyword arguments call the method keys. underline: int = ..., variable: Variable = ..., ) -> None: - """Add checkbutton menu item at INDEX. -""" + """Add checkbutton menu item at INDEX.""" + def insert_command( self, index: str | int, @@ -4469,8 +4605,8 @@ the allowed keyword arguments call the method keys. state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., ) -> None: - """Add command menu item at INDEX. -""" + """Add command menu item at INDEX.""" + def insert_radiobutton( self, index: str | int, @@ -4497,50 +4633,50 @@ the allowed keyword arguments call the method keys. value: Any = ..., variable: Variable = ..., ) -> None: - """Add radio menu item at INDEX. -""" + """Add radio menu item at INDEX.""" + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: - """Add separator at INDEX. -""" + """Add separator at INDEX.""" + def delete(self, index1: str | int, index2: str | int | None = None) -> None: - """Delete menu items between INDEX1 and INDEX2 (included). -""" + """Delete menu items between INDEX1 and INDEX2 (included).""" + def entrycget(self, index: str | int, option: str) -> Any: - """Return the resource value of a menu item for OPTION at INDEX. -""" + """Return the resource value of a menu item for OPTION at INDEX.""" + def entryconfigure( self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure a menu item at INDEX. -""" + """Configure a menu item at INDEX.""" entryconfig = entryconfigure def index(self, index: str | int) -> int | None: - """Return the index of a menu item identified by INDEX. -""" + """Return the index of a menu item identified by INDEX.""" + def invoke(self, index: str | int) -> Any: """Invoke a menu item identified by INDEX and execute -the associated command. -""" + the associated command. + """ + def post(self, x: int, y: int) -> None: - """Display a menu at position X,Y. -""" + """Display a menu at position X,Y.""" + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: - """Return the type of the menu item at INDEX. -""" + """Return the type of the menu item at INDEX.""" + def unpost(self) -> None: - """Unmap a menu. -""" + """Unmap a menu.""" + def xposition(self, index: str | int) -> int: """Return the x-position of the leftmost pixel of the menu item -at INDEX. -""" + at INDEX. + """ + def yposition(self, index: str | int) -> int: - """Return the y-position of the topmost pixel of the menu item at INDEX. -""" + """Return the y-position of the topmost pixel of the menu item at INDEX.""" class Menubutton(Widget): - """Menubutton widget, obsolete since Tk8.0. -""" + """Menubutton widget, obsolete since Tk8.0.""" + def __init__( self, master: Misc | None = None, @@ -4624,17 +4760,18 @@ class Menubutton(Widget): ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Message(Widget): - """Message widget to display multiline text. Obsolete since Label does it too. -""" + """Message widget to display multiline text. Obsolete since Label does it too.""" + def __init__( self, master: Misc | None = None, @@ -4695,17 +4832,18 @@ class Message(Widget): ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Radiobutton(Widget): - """Radiobutton widget which shows only one of several buttons in on-state. -""" + """Radiobutton widget which shows only one of several buttons in on-state.""" + def __init__( self, master: Misc | None = None, @@ -4756,14 +4894,15 @@ class Radiobutton(Widget): ) -> None: """Construct a radiobutton widget with the parent MASTER. -Valid resource names: activebackground, activeforeground, anchor, -background, bd, bg, bitmap, borderwidth, command, cursor, -disabledforeground, fg, font, foreground, height, -highlightbackground, highlightcolor, highlightthickness, image, -indicatoron, justify, padx, pady, relief, selectcolor, selectimage, -state, takefocus, text, textvariable, underline, value, variable, -width, wraplength. -""" + Valid resource names: activebackground, activeforeground, anchor, + background, bd, bg, bitmap, borderwidth, command, cursor, + disabledforeground, fg, font, foreground, height, + highlightbackground, highlightcolor, highlightthickness, image, + indicatoron, justify, padx, pady, relief, selectcolor, selectimage, + state, takefocus, text, textvariable, underline, value, variable, + width, wraplength. + """ + @overload def configure( self, @@ -4813,29 +4952,29 @@ width, wraplength. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def deselect(self) -> None: - """Put the button in off-state. -""" + """Put the button in off-state.""" + def flash(self) -> None: - """Flash the button. -""" + """Flash the button.""" + def invoke(self) -> Any: - """Toggle the button and invoke a command if given as resource. -""" + """Toggle the button and invoke a command if given as resource.""" + def select(self) -> None: - """Put the button in on-state. -""" + """Put the button in on-state.""" class Scale(Widget): - """Scale widget which can display a numerical scale. -""" + """Scale widget which can display a numerical scale.""" + def __init__( self, master: Misc | None = None, @@ -4880,13 +5019,14 @@ class Scale(Widget): ) -> None: """Construct a scale widget with the parent MASTER. -Valid resource names: activebackground, background, bigincrement, bd, -bg, borderwidth, command, cursor, digits, fg, font, foreground, from, -highlightbackground, highlightcolor, highlightthickness, label, -length, orient, relief, repeatdelay, repeatinterval, resolution, -showvalue, sliderlength, sliderrelief, state, takefocus, -tickinterval, to, troughcolor, variable, width. -""" + Valid resource names: activebackground, background, bigincrement, bd, + bg, borderwidth, command, cursor, digits, fg, font, foreground, from, + highlightbackground, highlightcolor, highlightthickness, label, + length, orient, relief, repeatdelay, repeatinterval, resolution, + showvalue, sliderlength, sliderrelief, state, takefocus, + tickinterval, to, troughcolor, variable, width. + """ + @overload def configure( self, @@ -4929,32 +5069,34 @@ tickinterval, to, troughcolor, variable, width. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def get(self) -> float: - """Get the current value as integer or float. -""" + """Get the current value as integer or float.""" + def set(self, value) -> None: - """Set the value to VALUE. -""" + """Set the value to VALUE.""" + def coords(self, value: float | None = None) -> tuple[int, int]: """Return a tuple (X,Y) of the point along the centerline of the -trough that corresponds to VALUE or the current value if None is -given. -""" + trough that corresponds to VALUE or the current value if None is + given. + """ + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: """Return where the point X,Y lies. Valid return values are "slider", -"though1" and "though2". -""" + "though1" and "though2". + """ class Scrollbar(Widget): - """Scrollbar widget which displays a slider at a certain position. -""" + """Scrollbar widget which displays a slider at a certain position.""" + def __init__( self, master: Misc | None = None, @@ -4989,13 +5131,14 @@ class Scrollbar(Widget): ) -> None: """Construct a scrollbar widget with the parent MASTER. -Valid resource names: activebackground, activerelief, -background, bd, bg, borderwidth, command, cursor, -elementborderwidth, highlightbackground, -highlightcolor, highlightthickness, jump, orient, -relief, repeatdelay, repeatinterval, takefocus, -troughcolor, width. -""" + Valid resource names: activebackground, activerelief, + background, bd, bg, borderwidth, command, cursor, + elementborderwidth, highlightbackground, + highlightcolor, highlightthickness, jump, orient, + relief, repeatdelay, repeatinterval, takefocus, + troughcolor, width. + """ + @overload def configure( self, @@ -5025,49 +5168,55 @@ troughcolor, width. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def activate(self, index=None): """Marks the element indicated by index as active. -The only index values understood by this method are "arrow1", -"slider", or "arrow2". If any other value is specified then no -element of the scrollbar will be active. If index is not specified, -the method returns the name of the element that is currently active, -or None if no element is active. -""" + The only index values understood by this method are "arrow1", + "slider", or "arrow2". If any other value is specified then no + element of the scrollbar will be active. If index is not specified, + the method returns the name of the element that is currently active, + or None if no element is active. + """ + def delta(self, deltax: int, deltay: int) -> float: """Return the fractional change of the scrollbar setting if it -would be moved by DELTAX or DELTAY pixels. -""" + would be moved by DELTAX or DELTAY pixels. + """ + def fraction(self, x: int, y: int) -> float: """Return the fractional value which corresponds to a slider -position of X,Y. -""" + position of X,Y. + """ + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: """Return the element under position X,Y as one of -"arrow1","slider","arrow2" or "". -""" + "arrow1","slider","arrow2" or "". + """ + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: """Return the current fractional values (upper and lower end) -of the slider position. -""" + of the slider position. + """ + def set(self, first: float | str, last: float | str) -> None: """Set the fractional values of the slider position (upper and -lower ends as value between 0 and 1). -""" + lower ends as value between 0 and 1). + """ _WhatToCount: TypeAlias = Literal[ "chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels" ] class Text(Widget, XView, YView): - """Text widget which can display text in various forms. -""" + """Text widget which can display text in various forms.""" + def __init__( self, master: Misc | None = None, @@ -5126,26 +5275,27 @@ class Text(Widget, XView, YView): ) -> None: """Construct a text widget with the parent MASTER. -STANDARD OPTIONS + STANDARD OPTIONS - background, borderwidth, cursor, - exportselection, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, insertbackground, - insertborderwidth, insertofftime, - insertontime, insertwidth, padx, pady, - relief, selectbackground, - selectborderwidth, selectforeground, - setgrid, takefocus, - xscrollcommand, yscrollcommand, + background, borderwidth, cursor, + exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, padx, pady, + relief, selectbackground, + selectborderwidth, selectforeground, + setgrid, takefocus, + xscrollcommand, yscrollcommand, -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - autoseparators, height, maxundo, - spacing1, spacing2, spacing3, - state, tabs, undo, width, wrap, + autoseparators, height, maxundo, + spacing1, spacing2, spacing3, + state, tabs, undo, width, wrap, + + """ -""" @overload def configure( self, @@ -5199,17 +5349,19 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def bbox(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int] | None: # type: ignore[override] """Return a tuple of (x,y,width,height) which gives the bounding -box of the visible part of the character at the given index. -""" + box of the visible part of the character at the given index. + """ + def compare( self, index1: str | float | _tkinter.Tcl_Obj | Widget, @@ -5217,8 +5369,8 @@ box of the visible part of the character at the given index. index2: str | float | _tkinter.Tcl_Obj | Widget, ) -> bool: """Return whether between index INDEX1 and index INDEX2 the -relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. -""" + relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. + """ if sys.version_info >= (3, 13): @overload def count( @@ -5230,20 +5382,21 @@ relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=. ) -> int: """Counts the number of relevant things between the two indices. -If INDEX1 is after INDEX2, the result will be a negative number -(and this holds for each of the possible options). - -The actual items which are counted depends on the options given. -The result is a tuple of integers, one for the result of each -counting option given, if more than one option is specified or -return_ints is false (default), otherwise it is an integer. -Valid counting options are "chars", "displaychars", -"displayindices", "displaylines", "indices", "lines", "xpixels" -and "ypixels". The default value, if no option is specified, is -"indices". There is an additional possible option "update", -which if given then all subsequent options ensure that any -possible out of date information is recalculated. -""" + If INDEX1 is after INDEX2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given. + The result is a tuple of integers, one for the result of each + counting option given, if more than one option is specified or + return_ints is false (default), otherwise it is an integer. + Valid counting options are "chars", "displaychars", + "displayindices", "displaylines", "indices", "lines", "xpixels" + and "ypixels". The default value, if no option is specified, is + "indices". There is an additional possible option "update", + which if given then all subsequent options ensure that any + possible out of date information is recalculated. + """ + @overload def count( self, @@ -5368,17 +5521,18 @@ possible out of date information is recalculated. self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget ) -> tuple[int] | None: """Counts the number of relevant things between the two indices. - If index1 is after index2, the result will be a negative number - (and this holds for each of the possible options). - - The actual items which are counted depends on the options given by - args. The result is a list of integers, one for the result of each - counting option given. Valid counting options are "chars", - "displaychars", "displayindices", "displaylines", "indices", - "lines", "xpixels" and "ypixels". There is an additional possible - option "update", which if given then all subsequent options ensure - that any possible out of date information is recalculated. -""" + If index1 is after index2, the result will be a negative number + (and this holds for each of the possible options). + + The actual items which are counted depends on the options given by + args. The result is a list of integers, one for the result of each + counting option given. Valid counting options are "chars", + "displaychars", "displayindices", "displaylines", "indices", + "lines", "xpixels" and "ypixels". There is an additional possible + option "update", which if given then all subsequent options ensure + that any possible out of date information is recalculated. + """ + @overload def count( self, @@ -5429,20 +5583,22 @@ possible out of date information is recalculated. @overload def debug(self, boolean: None = None) -> bool: """Turn on the internal consistency checks of the B-Tree inside the text -widget according to BOOLEAN. -""" + widget according to BOOLEAN. + """ + @overload def debug(self, boolean: bool) -> None: ... def delete( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None ) -> None: - """Delete the characters between INDEX1 and INDEX2 (not included). -""" + """Delete the characters between INDEX1 and INDEX2 (not included).""" + def dlineinfo(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> tuple[int, int, int, int, int] | None: """Return tuple (x,y,width,height,baseline) giving the bounding box -and baseline position of the visible part of the line containing -the character at INDEX. -""" + and baseline position of the visible part of the line containing + the character at INDEX. + """ + @overload def dump( self, @@ -5459,16 +5615,17 @@ the character at INDEX. ) -> list[tuple[str, str, str]]: """Return the contents of the widget between index1 and index2. -The type of contents returned in filtered based on the keyword -parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are -given and true, then the corresponding items are returned. The result -is a list of triples of the form (key, value, index). If none of the -keywords are true then 'all' is used by default. + The type of contents returned in filtered based on the keyword + parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are + given and true, then the corresponding items are returned. The result + is a list of triples of the form (key, value, index). If none of the + keywords are true then 'all' is used by default. + + If the 'command' argument is given, it is called once for each element + of the list of triples, with the values of each triple serving as the + arguments to the function. In this case the list is not returned. + """ -If the 'command' argument is given, it is called once for each element -of the list of triples, with the values of each triple serving as the -arguments to the function. In this case the list is not returned. -""" @overload def dump( self, @@ -5500,62 +5657,67 @@ arguments to the function. In this case the list is not returned. def edit(self, *args): # docstring says "Internal method" """Internal method -This method controls the undo mechanism and -the modified flag. The exact behavior of the -command depends on the option argument that -follows the edit argument. The following forms -of the command are currently supported: + This method controls the undo mechanism and + the modified flag. The exact behavior of the + command depends on the option argument that + follows the edit argument. The following forms + of the command are currently supported: -edit_modified, edit_redo, edit_reset, edit_separator -and edit_undo + edit_modified, edit_redo, edit_reset, edit_separator + and edit_undo + + """ -""" @overload def edit_modified(self, arg: None = None) -> bool: # actually returns Literal[0, 1] """Get or Set the modified flag -If arg is not specified, returns the modified -flag of the widget. The insert, delete, edit undo and -edit redo commands or the user can set or clear the -modified flag. If boolean is specified, sets the -modified flag of the widget to arg. -""" + If arg is not specified, returns the modified + flag of the widget. The insert, delete, edit undo and + edit redo commands or the user can set or clear the + modified flag. If boolean is specified, sets the + modified flag of the widget to arg. + """ + @overload def edit_modified(self, arg: bool) -> None: ... # actually returns empty string def edit_redo(self) -> None: # actually returns empty string """Redo the last undone edit -When the undo option is true, reapplies the last -undone edits provided no other edits were done since -then. Generates an error when the redo stack is empty. -Does nothing when the undo option is false. -""" + When the undo option is true, reapplies the last + undone edits provided no other edits were done since + then. Generates an error when the redo stack is empty. + Does nothing when the undo option is false. + """ + def edit_reset(self) -> None: # actually returns empty string - """Clears the undo and redo stacks - """ + """Clears the undo and redo stacks""" + def edit_separator(self) -> None: # actually returns empty string """Inserts a separator (boundary) on the undo stack. -Does nothing when the undo option is false -""" + Does nothing when the undo option is false + """ + def edit_undo(self) -> None: # actually returns empty string """Undoes the last edit action -If the undo option is true. An edit action is defined -as all the insert and delete commands that are recorded -on the undo stack in between two separators. Generates -an error when the undo stack is empty. Does nothing -when the undo option is false -""" + If the undo option is true. An edit action is defined + as all the insert and delete commands that are recorded + on the undo stack in between two separators. Generates + an error when the undo stack is empty. Does nothing + when the undo option is false + """ + def get( self, index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None ) -> str: - """Return the text from INDEX1 to INDEX2 (not included). -""" + """Return the text from INDEX1 to INDEX2 (not included).""" + @overload def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["image", "name"]) -> str: - """Return the value of OPTION of an embedded image at INDEX. -""" + """Return the value of OPTION of an embedded image at INDEX.""" + @overload def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: ... @overload @@ -5565,11 +5727,9 @@ when the undo option is false @overload def image_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... @overload - def image_configure( - self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str - ) -> tuple[str, str, str, str, str | int]: - """Configure an embedded image at INDEX. -""" + def image_configure(self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str) -> tuple[str, str, str, str, str | int]: + """Configure an embedded image at INDEX.""" + @overload def image_configure( self, @@ -5593,53 +5753,56 @@ when the undo option is false padx: float | str = ..., pady: float | str = ..., ) -> str: - """Create an embedded image at INDEX. -""" + """Create an embedded image at INDEX.""" + def image_names(self) -> tuple[str, ...]: - """Return all names of embedded images in this widget. -""" + """Return all names of embedded images in this widget.""" + def index(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str: - """Return the index in the form line.char for INDEX. -""" + """Return the index in the form line.char for INDEX.""" + def insert( self, index: str | float | _tkinter.Tcl_Obj | Widget, chars: str, *args: str | list[str] | tuple[str, ...] ) -> None: """Insert CHARS before the characters at INDEX. An additional -tag can be given in ARGS. Additional CHARS and tags can follow in ARGS. -""" + tag can be given in ARGS. Additional CHARS and tags can follow in ARGS. + """ + @overload def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: """Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT). -Return the current value if None is given for DIRECTION. -""" + Return the current value if None is given for DIRECTION. + """ + @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string def mark_names(self) -> tuple[str, ...]: - """Return all mark names. -""" + """Return all mark names.""" + def mark_set(self, markName: str, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: - """Set mark MARKNAME before the character at INDEX. -""" + """Set mark MARKNAME before the character at INDEX.""" + def mark_unset(self, *markNames: str) -> None: - """Delete all marks in MARKNAMES. -""" + """Delete all marks in MARKNAMES.""" + def mark_next(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: - """Return the name of the next mark after INDEX. -""" + """Return the name of the next mark after INDEX.""" + def mark_previous(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> str | None: - """Return the name of the previous mark before INDEX. -""" + """Return the name of the previous mark before INDEX.""" # **kw of peer_create is same as the kwargs of Text.__init__ def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: """Creates a peer text widget with the given newPathName, and any -optional standard configuration options. By default the peer will -have the same start and end line as the parent widget, but -these can be overridden with the standard configuration options. -""" + optional standard configuration options. By default the peer will + have the same start and end line as the parent widget, but + these can be overridden with the standard configuration options. + """ + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: """Returns a list of peers of this widget (this does not include -the widget itself). -""" + the widget itself). + """ + def replace( self, index1: str | float | _tkinter.Tcl_Obj | Widget, @@ -5648,19 +5811,21 @@ the widget itself). *args: str | list[str] | tuple[str, ...], ) -> None: """Replaces the range of characters between index1 and index2 with -the given characters and tags specified by args. + the given characters and tags specified by args. + + See the method insert for some more information about args, and the + method delete for information about the indices. + """ -See the method insert for some more information about args, and the -method delete for information about the indices. -""" def scan_mark(self, x: int, y: int) -> None: - """Remember the current X, Y coordinates. -""" + """Remember the current X, Y coordinates.""" + def scan_dragto(self, x: int, y: int) -> None: """Adjust the view of the text to 10 times the -difference between X and Y and the coordinates given in -scan_mark. -""" + difference between X and Y and the coordinates given in + scan_mark. + """ + def search( self, pattern: str, @@ -5675,18 +5840,19 @@ scan_mark. elide: bool | None = None, ) -> str: # returns empty string for not found """Search PATTERN beginning from INDEX until STOPINDEX. -Return the index of the first character of a match or an -empty string. -""" + Return the index of the first character of a match or an + empty string. + """ + def see(self, index: str | float | _tkinter.Tcl_Obj | Widget) -> None: - """Scroll such that the character at INDEX is visible. -""" + """Scroll such that the character at INDEX is visible.""" + def tag_add( self, tagName: str, index1: str | float | _tkinter.Tcl_Obj | Widget, *args: str | float | _tkinter.Tcl_Obj | Widget ) -> None: """Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS. -Additional pairs of indices may follow in ARGS. -""" + Additional pairs of indices may follow in ARGS. + """ # tag_bind stuff is very similar to Canvas @overload def tag_bind( @@ -5698,20 +5864,21 @@ Additional pairs of indices may follow in ARGS. ) -> str: """Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC. -An additional boolean parameter ADD specifies whether FUNC will be -called additionally to the other bound function or whether it will -replace the previous function. See bind for the return value. -""" + An additional boolean parameter ADD specifies whether FUNC will be + called additionally to the other bound function or whether it will + replace the previous function. See bind for the return value. + """ + @overload def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: """Unbind for all characters with TAGNAME for event SEQUENCE the -function identified with FUNCID. -""" + function identified with FUNCID. + """ # allowing any string for cget instead of just Literals because there's no other way to look up tag options def tag_cget(self, tagName: str, option: str): - """Return the value of OPTION for tag TAGNAME. -""" + """Return the value of OPTION for tag TAGNAME.""" + @overload def tag_configure( self, @@ -5747,21 +5914,22 @@ function identified with FUNCID. underlinefg: str = ..., wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None ) -> dict[str, tuple[str, str, str, Any, Any]] | None: - """Configure a tag TAGNAME. -""" + """Configure a tag TAGNAME.""" + @overload def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure def tag_delete(self, first_tag_name: str, /, *tagNames: str) -> None: # error if no tag names given - """Delete all tags in TAGNAMES. -""" + """Delete all tags in TAGNAMES.""" + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: """Change the priority of tag TAGNAME such that it is lower -than the priority of BELOWTHIS. -""" + than the priority of BELOWTHIS. + """ + def tag_names(self, index: str | float | _tkinter.Tcl_Obj | Widget | None = None) -> tuple[str, ...]: - """Return a list of all tag names. -""" + """Return a list of all tag names.""" + def tag_nextrange( self, tagName: str, @@ -5769,9 +5937,10 @@ than the priority of BELOWTHIS. index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, ) -> tuple[str, str] | tuple[()]: """Return a list of start and end index for the first sequence of -characters between INDEX1 and INDEX2 which all have tag TAGNAME. -The text is searched forward from INDEX1. -""" + characters between INDEX1 and INDEX2 which all have tag TAGNAME. + The text is searched forward from INDEX1. + """ + def tag_prevrange( self, tagName: str, @@ -5779,16 +5948,17 @@ The text is searched forward from INDEX1. index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, ) -> tuple[str, str] | tuple[()]: """Return a list of start and end index for the first sequence of -characters between INDEX1 and INDEX2 which all have tag TAGNAME. -The text is searched backwards from INDEX1. -""" + characters between INDEX1 and INDEX2 which all have tag TAGNAME. + The text is searched backwards from INDEX1. + """ + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: """Change the priority of tag TAGNAME such that it is higher -than the priority of ABOVETHIS. -""" + than the priority of ABOVETHIS. + """ + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: - """Return a list of ranges of text which have tag TAGNAME. -""" + """Return a list of ranges of text which have tag TAGNAME.""" # tag_remove and tag_delete are different def tag_remove( self, @@ -5796,12 +5966,12 @@ than the priority of ABOVETHIS. index1: str | float | _tkinter.Tcl_Obj | Widget, index2: str | float | _tkinter.Tcl_Obj | Widget | None = None, ) -> None: - """Remove tag TAGNAME from all characters between INDEX1 and INDEX2. -""" + """Remove tag TAGNAME from all characters between INDEX1 and INDEX2.""" + @overload def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["padx", "pady"]) -> int: - """Return the value of OPTION of an embedded window at INDEX. -""" + """Return the value of OPTION of an embedded window at INDEX.""" + @overload def window_cget( self, index: str | float | _tkinter.Tcl_Obj | Widget, option: Literal["stretch"] @@ -5815,11 +5985,9 @@ than the priority of ABOVETHIS. @overload def window_cget(self, index: str | float | _tkinter.Tcl_Obj | Widget, option: str) -> Any: ... @overload - def window_configure( - self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str - ) -> tuple[str, str, str, str, str | int]: - """Configure an embedded window at INDEX. -""" + def window_configure(self, index: str | float | _tkinter.Tcl_Obj | Widget, cnf: str) -> tuple[str, str, str, str, str | int]: + """Configure an embedded window at INDEX.""" + @overload def window_configure( self, @@ -5846,25 +6014,24 @@ than the priority of ABOVETHIS. stretch: bool | Literal[0, 1] = ..., window: Misc | str = ..., ) -> None: - """Create a window at INDEX. -""" + """Create a window at INDEX.""" + def window_names(self) -> tuple[str, ...]: - """Return all names of embedded windows in this widget. -""" + """Return all names of embedded windows in this widget.""" + def yview_pickplace(self, *what): # deprecated - """Obsolete function, use see. -""" + """Obsolete function, use see.""" class _setit: - """Internal class. It wraps the command in the widget OptionMenu. -""" + """Internal class. It wraps the command in the widget OptionMenu.""" + def __init__(self, var, value, callback=None) -> None: ... def __call__(self, *args) -> None: ... # manual page: tk_optionMenu class OptionMenu(Menubutton): - """OptionMenu which allows the user to select a value from a menu. -""" + """OptionMenu which allows the user to select a value from a menu.""" + menuname: Incomplete def __init__( # differs from other widgets @@ -5877,10 +6044,10 @@ class OptionMenu(Menubutton): command: Callable[[StringVar], object] | None = ..., ) -> None: """Construct an optionmenu widget with the parent MASTER, with -the resource textvariable set to VARIABLE, the initially selected -value VALUE, the other menu values VALUES and an additional -keyword argument command. -""" + the resource textvariable set to VARIABLE, the initially selected + value VALUE, the other menu values VALUES and an additional + keyword argument command. + """ # configure, config, cget are inherited from Menubutton # destroy and __getitem__ are overridden, signature does not change @@ -5900,8 +6067,8 @@ class _BitmapImageLike(_Image): ... class _PhotoImageLike(_Image): ... class Image(_Image): - """Base class for images. -""" + """Base class for images.""" + name: Incomplete tk: _tkinter.TkappType def __init__(self, imgtype, name=None, cnf={}, master: Misc | _tkinter.TkappType | None = None, **kw) -> None: ... @@ -5911,12 +6078,11 @@ class Image(_Image): configure: Incomplete config: Incomplete def type(self): - """Return the type of the image, e.g. "photo" or "bitmap". -""" + """Return the type of the image, e.g. "photo" or "bitmap".""" class PhotoImage(Image, _PhotoImageLike): - """Widget which can display images in PGM, PPM, GIF, PNG format. -""" + """Widget which can display images in PGM, PPM, GIF, PNG format.""" + # This should be kept in sync with PIL.ImageTK.PhotoImage.__init__() def __init__( self, @@ -5934,9 +6100,10 @@ class PhotoImage(Image, _PhotoImageLike): ) -> None: """Create an image with NAME. -Valid resource names: data, format, file, gamma, height, palette, -width. -""" + Valid resource names: data, format, file, gamma, height, palette, + width. + """ + def configure( self, *, @@ -5948,15 +6115,14 @@ width. palette: int | str = ..., width: int = ..., ) -> None: - """Configure the image. -""" + """Configure the image.""" config = configure def blank(self) -> None: - """Display a transparent image. -""" + """Display a transparent image.""" + def cget(self, option: str) -> str: - """Return the value of OPTION. -""" + """Return the value of OPTION.""" + def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' if sys.version_info >= (3, 13): def copy( @@ -5968,36 +6134,39 @@ width. ) -> PhotoImage: """Return a new PhotoImage with the same image as this widget. -The FROM_COORDS option specifies a rectangular sub-region of the -source image to be copied. It must be a tuple or a list of 1 to 4 -integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally -opposite corners of the rectangle. If x2 and y2 are not specified, -the default value is the bottom-right corner of the source image. -The pixels copied will include the left and top edges of the -specified rectangle but not the bottom or right edges. If the -FROM_COORDS option is not given, the default is the whole source -image. - -If SUBSAMPLE or ZOOM are specified, the image is transformed as in -the subsample() or zoom() methods. The value must be a single -integer or a pair of integers. -""" + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is the bottom-right corner of the source image. + The pixels copied will include the left and top edges of the + specified rectangle but not the bottom or right edges. If the + FROM_COORDS option is not given, the default is the whole source + image. + + If SUBSAMPLE or ZOOM are specified, the image is transformed as in + the subsample() or zoom() methods. The value must be a single + integer or a pair of integers. + """ + def subsample(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: """Return a new PhotoImage based on the same image as this widget -but use only every Xth or Yth pixel. If Y is not given, the -default value is the same as X. + but use only every Xth or Yth pixel. If Y is not given, the + default value is the same as X. + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied, as in the copy() method. + """ -The FROM_COORDS option specifies a rectangular sub-region of the -source image to be copied, as in the copy() method. -""" def zoom(self, x: int, y: Literal[""] = "", *, from_coords: Iterable[int] | None = None) -> PhotoImage: """Return a new PhotoImage with the same image as this widget -but zoom it with a factor of X in the X direction and Y in the Y -direction. If Y is not given, the default value is the same as X. + but zoom it with a factor of X in the X direction and Y in the Y + direction. If Y is not given, the default value is the same as X. + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied, as in the copy() method. + """ -The FROM_COORDS option specifies a rectangular sub-region of the -source image to be copied, as in the copy() method. -""" def copy_replace( self, sourceImage: PhotoImage | str, @@ -6011,64 +6180,65 @@ source image to be copied, as in the copy() method. compositingrule: Literal["overlay", "set"] | None = None, ) -> None: """Copy a region from the source image (which must be a PhotoImage) to -this image, possibly with pixel zooming and/or subsampling. If no -options are specified, this command copies the whole of the source -image into this image, starting at coordinates (0, 0). - -The FROM_COORDS option specifies a rectangular sub-region of the -source image to be copied. It must be a tuple or a list of 1 to 4 -integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally -opposite corners of the rectangle. If x2 and y2 are not specified, -the default value is the bottom-right corner of the source image. -The pixels copied will include the left and top edges of the -specified rectangle but not the bottom or right edges. If the -FROM_COORDS option is not given, the default is the whole source -image. - -The TO option specifies a rectangular sub-region of the destination -image to be affected. It must be a tuple or a list of 1 to 4 -integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally -opposite corners of the rectangle. If x2 and y2 are not specified, -the default value is (x1,y1) plus the size of the source region -(after subsampling and zooming, if specified). If x2 and y2 are -specified, the source region will be replicated if necessary to fill -the destination region in a tiled fashion. - -If SHRINK is true, the size of the destination image should be -reduced, if necessary, so that the region being copied into is at -the bottom-right corner of the image. - -If SUBSAMPLE or ZOOM are specified, the image is transformed as in -the subsample() or zoom() methods. The value must be a single -integer or a pair of integers. - -The COMPOSITINGRULE option specifies how transparent pixels in the -source image are combined with the destination image. When a -compositing rule of 'overlay' is set, the old contents of the -destination image are visible, as if the source image were printed -on a piece of transparent film and placed over the top of the -destination. When a compositing rule of 'set' is set, the old -contents of the destination image are discarded and the source image -is used as-is. The default compositing rule is 'overlay'. -""" + this image, possibly with pixel zooming and/or subsampling. If no + options are specified, this command copies the whole of the source + image into this image, starting at coordinates (0, 0). + + The FROM_COORDS option specifies a rectangular sub-region of the + source image to be copied. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is the bottom-right corner of the source image. + The pixels copied will include the left and top edges of the + specified rectangle but not the bottom or right edges. If the + FROM_COORDS option is not given, the default is the whole source + image. + + The TO option specifies a rectangular sub-region of the destination + image to be affected. It must be a tuple or a list of 1 to 4 + integers (x1, y1, x2, y2). (x1, y1) and (x2, y2) specify diagonally + opposite corners of the rectangle. If x2 and y2 are not specified, + the default value is (x1,y1) plus the size of the source region + (after subsampling and zooming, if specified). If x2 and y2 are + specified, the source region will be replicated if necessary to fill + the destination region in a tiled fashion. + + If SHRINK is true, the size of the destination image should be + reduced, if necessary, so that the region being copied into is at + the bottom-right corner of the image. + + If SUBSAMPLE or ZOOM are specified, the image is transformed as in + the subsample() or zoom() methods. The value must be a single + integer or a pair of integers. + + The COMPOSITINGRULE option specifies how transparent pixels in the + source image are combined with the destination image. When a + compositing rule of 'overlay' is set, the old contents of the + destination image are visible, as if the source image were printed + on a piece of transparent film and placed over the top of the + destination. When a compositing rule of 'set' is set, the old + contents of the destination image are discarded and the source image + is used as-is. The default compositing rule is 'overlay'. + """ else: def copy(self) -> PhotoImage: - """Return a new PhotoImage with the same image as this widget. -""" + """Return a new PhotoImage with the same image as this widget.""" + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: """Return a new PhotoImage with the same image as this widget - but zoom it with a factor of x in the X direction and y in the Y - direction. If y is not given, the default value is the same as x. - """ + but zoom it with a factor of x in the X direction and y in the Y + direction. If y is not given, the default value is the same as x. + """ + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: """Return a new PhotoImage based on the same image as this widget - but use only every Xth or Yth pixel. If y is not given, the - default value is the same as x. - """ + but use only every Xth or Yth pixel. If y is not given, the + default value is the same as x. + """ def get(self, x: int, y: int) -> tuple[int, int, int]: - """Return the color (red, green, blue) of the pixel at X,Y. -""" + """Return the color (red, green, blue) of the pixel at X,Y.""" + def put( self, data: ( @@ -6084,8 +6254,8 @@ is used as-is. The default compositing rule is 'overlay'. to: tuple[int, int] | tuple[int, int, int, int] | None = None, ) -> None: """Put row formatted colors to image starting from -position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) -""" + position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) + """ if sys.version_info >= (3, 13): def read( self, @@ -6098,25 +6268,26 @@ position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6)) ) -> None: """Reads image data from the file named FILENAME into the image. -The FORMAT option specifies the format of the image data in the -file. + The FORMAT option specifies the format of the image data in the + file. -The FROM_COORDS option specifies a rectangular sub-region of the image -file data to be copied to the destination image. It must be a tuple -or a list of 1 to 4 integers (x1, y1, x2, y2). (x1, y1) and -(x2, y2) specify diagonally opposite corners of the rectangle. If -x2 and y2 are not specified, the default value is the bottom-right -corner of the source image. The default, if this option is not -specified, is the whole of the image in the image file. + The FROM_COORDS option specifies a rectangular sub-region of the image + file data to be copied to the destination image. It must be a tuple + or a list of 1 to 4 integers (x1, y1, x2, y2). (x1, y1) and + (x2, y2) specify diagonally opposite corners of the rectangle. If + x2 and y2 are not specified, the default value is the bottom-right + corner of the source image. The default, if this option is not + specified, is the whole of the image in the image file. -The TO option specifies the coordinates of the top-left corner of -the region of the image into which data from filename are to be -read. The default is (0, 0). + The TO option specifies the coordinates of the top-left corner of + the region of the image into which data from filename are to be + read. The default is (0, 0). + + If SHRINK is true, the size of the destination image will be + reduced, if necessary, so that the region into which the image file + data are read is at the bottom-right corner of the image. + """ -If SHRINK is true, the size of the destination image will be -reduced, if necessary, so that the region into which the image file -data are read is at the bottom-right corner of the image. -""" def write( self, filename: StrOrBytesPath, @@ -6128,54 +6299,56 @@ data are read is at the bottom-right corner of the image. ) -> None: """Writes image data from the image to a file named FILENAME. -The FORMAT option specifies the name of the image file format -handler to be used to write the data to the file. If this option -is not given, the format is guessed from the file extension. + The FORMAT option specifies the name of the image file format + handler to be used to write the data to the file. If this option + is not given, the format is guessed from the file extension. -The FROM_COORDS option specifies a rectangular region of the image -to be written to the image file. It must be a tuple or a list of 1 -to 4 integers (x1, y1, x2, y2). If only x1 and y1 are specified, -the region extends from (x1,y1) to the bottom-right corner of the -image. If all four coordinates are given, they specify diagonally -opposite corners of the rectangular region. The default, if this -option is not given, is the whole image. + The FROM_COORDS option specifies a rectangular region of the image + to be written to the image file. It must be a tuple or a list of 1 + to 4 integers (x1, y1, x2, y2). If only x1 and y1 are specified, + the region extends from (x1,y1) to the bottom-right corner of the + image. If all four coordinates are given, they specify diagonally + opposite corners of the rectangular region. The default, if this + option is not given, is the whole image. -If BACKGROUND is specified, the data will not contain any -transparency information. In all transparent pixels the color will -be replaced by the specified color. + If BACKGROUND is specified, the data will not contain any + transparency information. In all transparent pixels the color will + be replaced by the specified color. + + If GRAYSCALE is true, the data will not contain color information. + All pixel data will be transformed into grayscale. + """ -If GRAYSCALE is true, the data will not contain color information. -All pixel data will be transformed into grayscale. -""" @overload def data( self, format: str, *, from_coords: Iterable[int] | None = None, background: str | None = None, grayscale: bool = False ) -> bytes: """Returns image data. -The FORMAT option specifies the name of the image file format -handler to be used. If this option is not given, this method uses -a format that consists of a tuple (one element per row) of strings -containing space-separated (one element per pixel/column) colors -in “#RRGGBB” format (where RR is a pair of hexadecimal digits for -the red channel, GG for green, and BB for blue). - -The FROM_COORDS option specifies a rectangular region of the image -to be returned. It must be a tuple or a list of 1 to 4 integers -(x1, y1, x2, y2). If only x1 and y1 are specified, the region -extends from (x1,y1) to the bottom-right corner of the image. If -all four coordinates are given, they specify diagonally opposite -corners of the rectangular region, including (x1, y1) and excluding -(x2, y2). The default, if this option is not given, is the whole -image. - -If BACKGROUND is specified, the data will not contain any -transparency information. In all transparent pixels the color will -be replaced by the specified color. - -If GRAYSCALE is true, the data will not contain color information. -All pixel data will be transformed into grayscale. -""" + The FORMAT option specifies the name of the image file format + handler to be used. If this option is not given, this method uses + a format that consists of a tuple (one element per row) of strings + containing space-separated (one element per pixel/column) colors + in “#RRGGBB” format (where RR is a pair of hexadecimal digits for + the red channel, GG for green, and BB for blue). + + The FROM_COORDS option specifies a rectangular region of the image + to be returned. It must be a tuple or a list of 1 to 4 integers + (x1, y1, x2, y2). If only x1 and y1 are specified, the region + extends from (x1,y1) to the bottom-right corner of the image. If + all four coordinates are given, they specify diagonally opposite + corners of the rectangular region, including (x1, y1) and excluding + (x2, y2). The default, if this option is not given, is the whole + image. + + If BACKGROUND is specified, the data will not contain any + transparency information. In all transparent pixels the color will + be replaced by the specified color. + + If GRAYSCALE is true, the data will not contain color information. + All pixel data will be transformed into grayscale. + """ + @overload def data( self, @@ -6187,23 +6360,20 @@ All pixel data will be transformed into grayscale. ) -> tuple[str, ...]: ... else: - def write( - self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None - ) -> None: + def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: """Write image to file FILENAME in FORMAT starting from - position FROM_COORDS. -""" + position FROM_COORDS. + """ def transparency_get(self, x: int, y: int) -> bool: - """Return True if the pixel at x,y is transparent. -""" + """Return True if the pixel at x,y is transparent.""" + def transparency_set(self, x: int, y: int, boolean: bool) -> None: - """Set the transparency of the pixel at x,y. -""" + """Set the transparency of the pixel at x,y.""" class BitmapImage(Image, _BitmapImageLike): - """Widget which can display images in XBM format. -""" + """Widget which can display images in XBM format.""" + # This should be kept in sync with PIL.ImageTK.BitmapImage.__init__() def __init__( self, @@ -6220,15 +6390,15 @@ class BitmapImage(Image, _BitmapImageLike): ) -> None: """Create a bitmap with NAME. -Valid resource names: background, data, file, foreground, maskdata, maskfile. -""" + Valid resource names: background, data, file, foreground, maskdata, maskfile. + """ def image_names() -> tuple[str, ...]: ... def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): - """spinbox widget. -""" + """spinbox widget.""" + def __init__( self, master: Misc | None = None, @@ -6289,30 +6459,31 @@ class Spinbox(Widget, XView): ) -> None: """Construct a spinbox widget with the parent MASTER. -STANDARD OPTIONS - - activebackground, background, borderwidth, - cursor, exportselection, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, insertbackground, - insertborderwidth, insertofftime, - insertontime, insertwidth, justify, relief, - repeatdelay, repeatinterval, - selectbackground, selectborderwidth - selectforeground, takefocus, textvariable - xscrollcommand. - -WIDGET-SPECIFIC OPTIONS - - buttonbackground, buttoncursor, - buttondownrelief, buttonuprelief, - command, disabledbackground, - disabledforeground, format, from, - invalidcommand, increment, - readonlybackground, state, to, - validate, validatecommand values, - width, wrap, -""" + STANDARD OPTIONS + + activebackground, background, borderwidth, + cursor, exportselection, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, insertbackground, + insertborderwidth, insertofftime, + insertontime, insertwidth, justify, relief, + repeatdelay, repeatinterval, + selectbackground, selectborderwidth + selectforeground, takefocus, textvariable + xscrollcommand. + + WIDGET-SPECIFIC OPTIONS + + buttonbackground, buttoncursor, + buttondownrelief, buttonuprelief, + command, disabledbackground, + disabledforeground, format, from, + invalidcommand, increment, + readonlybackground, state, to, + validate, validatecommand values, + width, wrap, + """ + @overload def configure( self, @@ -6371,127 +6542,138 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def bbox(self, index) -> tuple[int, int, int, int] | None: # type: ignore[override] """Return a tuple of X1,Y1,X2,Y2 coordinates for a -rectangle which encloses the character given by index. - -The first two elements of the list give the x and y -coordinates of the upper-left corner of the screen -area covered by the character (in pixels relative -to the widget) and the last two elements give the -width and height of the character, in pixels. The -bounding box may refer to a region outside the -visible area of the window. -""" + rectangle which encloses the character given by index. + + The first two elements of the list give the x and y + coordinates of the upper-left corner of the screen + area covered by the character (in pixels relative + to the widget) and the last two elements give the + width and height of the character, in pixels. The + bounding box may refer to a region outside the + visible area of the window. + """ + def delete(self, first, last=None) -> Literal[""]: """Delete one or more elements of the spinbox. -First is the index of the first character to delete, -and last is the index of the character just after -the last one to delete. If last isn't specified it -defaults to first+1, i.e. a single character is -deleted. This command returns an empty string. -""" + First is the index of the first character to delete, + and last is the index of the character just after + the last one to delete. If last isn't specified it + defaults to first+1, i.e. a single character is + deleted. This command returns an empty string. + """ + def get(self) -> str: - """Returns the spinbox's string -""" + """Returns the spinbox's string""" + def icursor(self, index): """Alter the position of the insertion cursor. -The insertion cursor will be displayed just before -the character given by index. Returns an empty string -""" + The insertion cursor will be displayed just before + the character given by index. Returns an empty string + """ + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: """Returns the name of the widget at position x, y -Return value is one of: none, buttondown, buttonup, entry -""" - def index(self, index: str | int) -> int: - """Returns the numerical index corresponding to index + Return value is one of: none, buttondown, buttonup, entry """ + + def index(self, index: str | int) -> int: + """Returns the numerical index corresponding to index""" + def insert(self, index: str | int, s: str) -> Literal[""]: """Insert string s at index -Returns an empty string. -""" + Returns an empty string. + """ # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: """Causes the specified element to be invoked -The element could be buttondown or buttonup -triggering the action associated with it. -""" + The element could be buttondown or buttonup + triggering the action associated with it. + """ + def scan(self, *args): - """Internal function. -""" + """Internal function.""" + def scan_mark(self, x): """Records x and the current view in the spinbox window; -used in conjunction with later scan dragto commands. -Typically this command is associated with a mouse button -press in the widget. It returns an empty string. -""" + used in conjunction with later scan dragto commands. + Typically this command is associated with a mouse button + press in the widget. It returns an empty string. + """ + def scan_dragto(self, x): """Compute the difference between the given x argument -and the x argument to the last scan mark command + and the x argument to the last scan mark command + + It then adjusts the view left or right by 10 times the + difference in x-coordinates. This command is typically + associated with mouse motion events in the widget, to + produce the effect of dragging the spinbox at high speed + through the window. The return value is an empty string. + """ -It then adjusts the view left or right by 10 times the -difference in x-coordinates. This command is typically -associated with mouse motion events in the widget, to -produce the effect of dragging the spinbox at high speed -through the window. The return value is an empty string. -""" def selection(self, *args) -> tuple[int, ...]: - """Internal function. -""" + """Internal function.""" + def selection_adjust(self, index): """Locate the end of the selection nearest to the character -given by index, - -Then adjust that end of the selection to be at index -(i.e including but not going beyond index). The other -end of the selection is made the anchor point for future -select to commands. If the selection isn't currently in -the spinbox, then a new selection is created to include -the characters between index and the most recent selection -anchor point, inclusive. -""" + given by index, + + Then adjust that end of the selection to be at index + (i.e including but not going beyond index). The other + end of the selection is made the anchor point for future + select to commands. If the selection isn't currently in + the spinbox, then a new selection is created to include + the characters between index and the most recent selection + anchor point, inclusive. + """ + def selection_clear(self): # type: ignore[override] """Clear the selection -If the selection isn't in this widget then the -command has no effect. -""" + If the selection isn't in this widget then the + command has no effect. + """ + def selection_element(self, element=None): """Sets or gets the currently selected element. -If a spinbutton element is specified, it will be -displayed depressed. -""" + If a spinbutton element is specified, it will be + displayed depressed. + """ + def selection_from(self, index: int) -> None: - """Set the fixed end of a selection to INDEX. -""" + """Set the fixed end of a selection to INDEX.""" + def selection_present(self) -> None: """Return True if there are characters selected in the spinbox, False -otherwise. -""" + otherwise. + """ + def selection_range(self, start: int, end: int) -> None: - """Set the selection from START to END (not included). -""" + """Set the selection from START to END (not included).""" + def selection_to(self, index: int) -> None: - """Set the variable end of a selection to INDEX. -""" + """Set the variable end of a selection to INDEX.""" class LabelFrame(Widget): - """labelframe widget. -""" + """labelframe widget.""" + def __init__( self, master: Misc | None = None, @@ -6527,19 +6709,20 @@ class LabelFrame(Widget): ) -> None: """Construct a labelframe widget with the parent MASTER. -STANDARD OPTIONS + STANDARD OPTIONS - borderwidth, cursor, font, foreground, - highlightbackground, highlightcolor, - highlightthickness, padx, pady, relief, - takefocus, text + borderwidth, cursor, font, foreground, + highlightbackground, highlightcolor, + highlightthickness, padx, pady, relief, + takefocus, text -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + background, class, colormap, container, + height, labelanchor, labelwidget, + visual, width + """ - background, class, colormap, container, - height, labelanchor, labelwidget, - visual, width -""" @overload def configure( self, @@ -6569,17 +6752,18 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class PanedWindow(Widget): - """panedwindow widget. -""" + """panedwindow widget.""" + def __init__( self, master: Misc | None = None, @@ -6610,17 +6794,18 @@ class PanedWindow(Widget): ) -> None: """Construct a panedwindow widget with the parent MASTER. -STANDARD OPTIONS + STANDARD OPTIONS - background, borderwidth, cursor, height, - orient, relief, width + background, borderwidth, cursor, height, + orient, relief, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + handlepad, handlesize, opaqueresize, + sashcursor, sashpad, sashrelief, + sashwidth, showhandle, + """ - handlepad, handlesize, opaqueresize, - sashcursor, sashpad, sashrelief, - sashwidth, showhandle, -""" @overload def configure( self, @@ -6650,148 +6835,153 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def add(self, child: Widget, **kw) -> None: """Add a child widget to the panedwindow in a new pane. -The child argument is the name of the child widget -followed by pairs of arguments that specify how to -manage the windows. The possible options and values -are the ones accepted by the paneconfigure method. -""" + The child argument is the name of the child widget + followed by pairs of arguments that specify how to + manage the windows. The possible options and values + are the ones accepted by the paneconfigure method. + """ + def remove(self, child) -> None: """Remove the pane containing child from the panedwindow -All geometry management options for child will be forgotten. -""" + All geometry management options for child will be forgotten. + """ forget = remove # type: ignore[assignment] def identify(self, x: int, y: int): """Identify the panedwindow component at point x, y -If the point is over a sash or a sash handle, the result -is a two element list containing the index of the sash or -handle, and a word indicating whether it is over a sash -or a handle, such as {0 sash} or {2 handle}. If the point -is over any other part of the panedwindow, the result is -an empty list. -""" + If the point is over a sash or a sash handle, the result + is a two element list containing the index of the sash or + handle, and a word indicating whether it is over a sash + or a handle, such as {0 sash} or {2 handle}. If the point + is over any other part of the panedwindow, the result is + an empty list. + """ + def proxy(self, *args) -> tuple[Incomplete, ...]: - """Internal function. -""" + """Internal function.""" + def proxy_coord(self) -> tuple[Incomplete, ...]: - """Return the x and y pair of the most recent proxy location - """ + """Return the x and y pair of the most recent proxy location""" + def proxy_forget(self) -> tuple[Incomplete, ...]: - """Remove the proxy from the display. - """ + """Remove the proxy from the display.""" + def proxy_place(self, x, y) -> tuple[Incomplete, ...]: - """Place the proxy at the given x and y coordinates. - """ + """Place the proxy at the given x and y coordinates.""" + def sash(self, *args) -> tuple[Incomplete, ...]: - """Internal function. -""" + """Internal function.""" + def sash_coord(self, index) -> tuple[Incomplete, ...]: """Return the current x and y pair for the sash given by index. -Index must be an integer between 0 and 1 less than the -number of panes in the panedwindow. The coordinates given are -those of the top left corner of the region containing the sash. -pathName sash dragto index x y This command computes the -difference between the given coordinates and the coordinates -given to the last sash coord command for the given sash. It then -moves that sash the computed difference. The return value is the -empty string. -""" + Index must be an integer between 0 and 1 less than the + number of panes in the panedwindow. The coordinates given are + those of the top left corner of the region containing the sash. + pathName sash dragto index x y This command computes the + difference between the given coordinates and the coordinates + given to the last sash coord command for the given sash. It then + moves that sash the computed difference. The return value is the + empty string. + """ + def sash_mark(self, index) -> tuple[Incomplete, ...]: """Records x and y for the sash given by index; -Used in conjunction with later dragto commands to move the sash. -""" - def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: - """Place the sash given by index at the given coordinates + Used in conjunction with later dragto commands to move the sash. """ + + def sash_place(self, index, x, y) -> tuple[Incomplete, ...]: + """Place the sash given by index at the given coordinates""" + def panecget(self, child, option): """Query a management option for window. -Option may be any value allowed by the paneconfigure subcommand -""" + Option may be any value allowed by the paneconfigure subcommand + """ + def paneconfigure(self, tagOrId, cnf=None, **kw): """Query or modify the management options for window. -If no option is specified, returns a list describing all -of the available options for pathName. If option is -specified with no value, then the command returns a list -describing the one named option (this list will be identical -to the corresponding sublist of the value returned if no -option is specified). If one or more option-value pairs are -specified, then the command modifies the given widget -option(s) to have the given value(s); in this case the -command returns an empty string. The following options -are supported: - -after window - Insert the window after the window specified. window - should be the name of a window already managed by pathName. -before window - Insert the window before the window specified. window - should be the name of a window already managed by pathName. -height size - Specify a height for the window. The height will be the - outer dimension of the window including its border, if - any. If size is an empty string, or if -height is not - specified, then the height requested internally by the - window will be used initially; the height may later be - adjusted by the movement of sashes in the panedwindow. - Size may be any value accepted by Tk_GetPixels. -minsize n - Specifies that the size of the window cannot be made - less than n. This constraint only affects the size of - the widget in the paned dimension -- the x dimension - for horizontal panedwindows, the y dimension for - vertical panedwindows. May be any value accepted by - Tk_GetPixels. -padx n - Specifies a non-negative value indicating how much - extra space to leave on each side of the window in - the X-direction. The value may have any of the forms - accepted by Tk_GetPixels. -pady n - Specifies a non-negative value indicating how much - extra space to leave on each side of the window in - the Y-direction. The value may have any of the forms - accepted by Tk_GetPixels. -sticky style - If a window's pane is larger than the requested - dimensions of the window, this option may be used - to position (or stretch) the window within its pane. - Style is a string that contains zero or more of the - characters n, s, e or w. The string can optionally - contains spaces or commas, but they are ignored. Each - letter refers to a side (north, south, east, or west) - that the window will "stick" to. If both n and s - (or e and w) are specified, the window will be - stretched to fill the entire height (or width) of - its cavity. -width size - Specify a width for the window. The width will be - the outer dimension of the window including its - border, if any. If size is an empty string, or - if -width is not specified, then the width requested - internally by the window will be used initially; the - width may later be adjusted by the movement of sashes - in the panedwindow. Size may be any value accepted by - Tk_GetPixels. + If no option is specified, returns a list describing all + of the available options for pathName. If option is + specified with no value, then the command returns a list + describing the one named option (this list will be identical + to the corresponding sublist of the value returned if no + option is specified). If one or more option-value pairs are + specified, then the command modifies the given widget + option(s) to have the given value(s); in this case the + command returns an empty string. The following options + are supported: + + after window + Insert the window after the window specified. window + should be the name of a window already managed by pathName. + before window + Insert the window before the window specified. window + should be the name of a window already managed by pathName. + height size + Specify a height for the window. The height will be the + outer dimension of the window including its border, if + any. If size is an empty string, or if -height is not + specified, then the height requested internally by the + window will be used initially; the height may later be + adjusted by the movement of sashes in the panedwindow. + Size may be any value accepted by Tk_GetPixels. + minsize n + Specifies that the size of the window cannot be made + less than n. This constraint only affects the size of + the widget in the paned dimension -- the x dimension + for horizontal panedwindows, the y dimension for + vertical panedwindows. May be any value accepted by + Tk_GetPixels. + padx n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the X-direction. The value may have any of the forms + accepted by Tk_GetPixels. + pady n + Specifies a non-negative value indicating how much + extra space to leave on each side of the window in + the Y-direction. The value may have any of the forms + accepted by Tk_GetPixels. + sticky style + If a window's pane is larger than the requested + dimensions of the window, this option may be used + to position (or stretch) the window within its pane. + Style is a string that contains zero or more of the + characters n, s, e or w. The string can optionally + contains spaces or commas, but they are ignored. Each + letter refers to a side (north, south, east, or west) + that the window will "stick" to. If both n and s + (or e and w) are specified, the window will be + stretched to fill the entire height (or width) of + its cavity. + width size + Specify a width for the window. The width will be + the outer dimension of the window including its + border, if any. If size is an empty string, or + if -width is not specified, then the width requested + internally by the window will be used initially; the + width may later be adjusted by the movement of sashes + in the panedwindow. Size may be any value accepted by + Tk_GetPixels. -""" + """ paneconfig = paneconfigure def panes(self): - """Returns an ordered list of the child panes. -""" + """Returns an ordered list of the child panes.""" def _test() -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi index aa3f73d890b06..22a917d2104ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/colorchooser.pyi @@ -7,18 +7,19 @@ __all__ = ["Chooser", "askcolor"] class Chooser(Dialog): """Create a dialog for the tk_chooseColor command. -Args: - master: The master widget for this dialog. If not provided, - defaults to options['parent'] (if defined). - options: Dictionary of options for the tk_chooseColor call. - initialcolor: Specifies the selected color when the - dialog is first displayed. This can be a tk color - string or a 3-tuple of ints in the range (0, 255) - for an RGB triplet. - parent: The parent window of the color dialog. The - color dialog is displayed on top of this. - title: A string for the title of the dialog box. -""" + Args: + master: The master widget for this dialog. If not provided, + defaults to options['parent'] (if defined). + options: Dictionary of options for the tk_chooseColor call. + initialcolor: Specifies the selected color when the + dialog is first displayed. This can be a tk color + string or a 3-tuple of ints in the range (0, 255) + for an RGB triplet. + parent: The parent window of the color dialog. The + color dialog is displayed on top of this. + title: A string for the title of the dialog box. + """ + command: ClassVar[str] def askcolor( @@ -26,6 +27,6 @@ def askcolor( ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: """Display dialog window for selection of a color. -Convenience wrapper for the Chooser class. Displays the color -chooser dialog with color as the initial value. -""" + Convenience wrapper for the Chooser class. Displays the color + chooser dialog with color as the initial value. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi index b23a05cec982a..18906bd604937 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/dnd.pyi @@ -98,6 +98,7 @@ dnd_start(). This will call dnd_leave() if a target is currently active; it will never call dnd_commit(). """ + from tkinter import Event, Misc, Tk, Widget from typing import ClassVar, Protocol, type_check_only diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi index ef746e89c2083..af3c7c38aa0cf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/filedialog.pyi @@ -11,6 +11,7 @@ to the native file dialogues available in Tk 4.2 and newer, and the directory dialogue available in Tk 8.3 and newer. These interfaces were written by Fredrik Lundh, May 1997. """ + from _typeshed import Incomplete, StrOrBytesPath, StrPath from collections.abc import Hashable, Iterable from tkinter import Button, Entry, Event, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog @@ -37,24 +38,25 @@ dialogstates: dict[Hashable, tuple[str, str]] class FileDialog: """Standard file selection dialog -- no checks on selected file. -Usage: + Usage: - d = FileDialog(master) - fname = d.go(dir_or_file, pattern, default, key) - if fname is None: ...canceled... - else: ...open file... + d = FileDialog(master) + fname = d.go(dir_or_file, pattern, default, key) + if fname is None: ...canceled... + else: ...open file... -All arguments to go() are optional. + All arguments to go() are optional. -The 'key' argument specifies a key in the global dictionary -'dialogstates', which keeps track of the values for the directory -and pattern arguments, overriding the values passed in (it does -not keep track of the default argument!). If no key is specified, -the dialog keeps no memory of previous state. Note that memory is -kept even when the dialog is canceled. (All this emulates the -behavior of the Macintosh file selection dialogs.) + The 'key' argument specifies a key in the global dictionary + 'dialogstates', which keeps track of the values for the directory + and pattern arguments, overriding the values passed in (it does + not keep track of the default argument!). If no key is specified, + the dialog keeps no memory of previous state. Note that memory is + kept even when the dialog is canceled. (All this emulates the + behavior of the Macintosh file selection dialogs.) + + """ -""" title: str master: Misc directory: str | None @@ -90,32 +92,32 @@ behavior of the Macintosh file selection dialogs.) def set_selection(self, file: StrPath) -> None: ... class LoadFileDialog(FileDialog): - """File selection dialog which checks that the file exists. -""" + """File selection dialog which checks that the file exists.""" + title: str def ok_command(self) -> None: ... class SaveFileDialog(FileDialog): - """File selection dialog which checks that the file may be created. -""" + """File selection dialog which checks that the file may be created.""" + title: str def ok_command(self) -> None: ... class _Dialog(commondialog.Dialog): ... class Open(_Dialog): - """Ask for a filename to open -""" + """Ask for a filename to open""" + command: ClassVar[str] class SaveAs(_Dialog): - """Ask for a filename to save as -""" + """Ask for a filename to save as""" + command: ClassVar[str] class Directory(commondialog.Dialog): - """Ask for a directory -""" + """Ask for a directory""" + command: ClassVar[str] # TODO: command kwarg available on macos @@ -130,8 +132,8 @@ def asksaveasfilename( title: str | None = ..., typevariable: StringVar | str | None = ..., ) -> str: # can be empty string - """Ask for a filename to save as -""" + """Ask for a filename to save as""" + def askopenfilename( *, defaultextension: str | None = "", @@ -142,8 +144,8 @@ def askopenfilename( title: str | None = ..., typevariable: StringVar | str | None = ..., ) -> str: # can be empty string - """Ask for a filename to open -""" + """Ask for a filename to open""" + def askopenfilenames( *, defaultextension: str | None = "", @@ -156,14 +158,14 @@ def askopenfilenames( ) -> Literal[""] | tuple[str, ...]: """Ask for multiple filenames to open -Returns a list of filenames or empty list if -cancel button selected -""" + Returns a list of filenames or empty list if + cancel button selected + """ + def askdirectory( *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = False, parent: Misc | None = ..., title: str | None = ... ) -> str: # can be empty string - """Ask for a directory, and return the file name -""" + """Ask for a directory, and return the file name""" # TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) def asksaveasfile( @@ -178,8 +180,8 @@ def asksaveasfile( title: str | None = ..., typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: - """Ask for a filename to save as, and returned the opened file -""" + """Ask for a filename to save as, and returned the opened file""" + def askopenfile( mode: str = "r", *, @@ -191,8 +193,8 @@ def askopenfile( title: str | None = ..., typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: - """Ask for a filename to open, and returned the opened file -""" + """Ask for a filename to open, and returned the opened file""" + def askopenfiles( mode: str = "r", *, @@ -205,11 +207,11 @@ def askopenfiles( typevariable: StringVar | str | None = ..., ) -> tuple[IO[Incomplete], ...]: # can be empty tuple """Ask for multiple filenames and return the open file -objects + objects + + returns a list of open file objects or an empty list if + cancel selected + """ -returns a list of open file objects or an empty list if -cancel selected -""" def test() -> None: - """Simple test program. -""" + """Simple test program.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi index 865d53ce489b8..879f51b1e6e60 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/font.pyi @@ -42,24 +42,25 @@ class _MetricsDict(TypedDict): class Font: """Represents a named font. -Constructor options are: + Constructor options are: -font -- font specifier (name, system font, or (family, size, style)-tuple) -name -- name to use for this font configuration (defaults to a unique name) -exists -- does a named font by this name already exist? - Creates a new named font if False, points to the existing font if True. - Raises _tkinter.TclError if the assertion is false. + font -- font specifier (name, system font, or (family, size, style)-tuple) + name -- name to use for this font configuration (defaults to a unique name) + exists -- does a named font by this name already exist? + Creates a new named font if False, points to the existing font if True. + Raises _tkinter.TclError if the assertion is false. - the following are ignored if font is specified: + the following are ignored if font is specified: -family -- font 'family', e.g. Courier, Times, Helvetica -size -- font size in points -weight -- font thickness: NORMAL, BOLD -slant -- font slant: ROMAN, ITALIC -underline -- font underlining: false (0), true (1) -overstrike -- font strikeout: false (0), true (1) + family -- font 'family', e.g. Courier, Times, Helvetica + size -- font size in points + weight -- font thickness: NORMAL, BOLD + slant -- font slant: ROMAN, ITALIC + underline -- font underlining: false (0), true (1) + overstrike -- font strikeout: false (0), true (1) + + """ -""" name: str delete_font: bool counter: ClassVar[itertools.count[int]] # undocumented @@ -83,8 +84,8 @@ overstrike -- font strikeout: false (0), true (1) def __setitem__(self, key: str, value: Any) -> None: ... @overload def cget(self, option: Literal["family"]) -> str: - """Get font attribute -""" + """Get font attribute""" + @overload def cget(self, option: Literal["size"]) -> int: ... @overload @@ -98,8 +99,8 @@ overstrike -- font strikeout: false (0), true (1) __getitem__ = cget @overload def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: - """Return actual font attributes -""" + """Return actual font attributes""" + @overload def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... @overload @@ -122,42 +123,39 @@ overstrike -- font strikeout: false (0), true (1) underline: bool = ..., overstrike: bool = ..., ) -> _FontDict | None: - """Modify font attributes -""" + """Modify font attributes""" configure = config def copy(self) -> Font: - """Return a distinct copy of the current font -""" + """Return a distinct copy of the current font""" + @overload def metrics(self, option: Literal["ascent", "descent", "linespace"], /, *, displayof: tkinter.Misc | None = ...) -> int: """Return font metrics. -For best performance, create a dummy widget -using this font before calling this method. -""" + For best performance, create a dummy widget + using this font before calling this method. + """ + @overload def metrics(self, option: Literal["fixed"], /, *, displayof: tkinter.Misc | None = ...) -> bool: ... @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: - """Return text width -""" + """Return text width""" + def __eq__(self, other: object) -> bool: ... def __del__(self) -> None: ... def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: - """Get font families (as a tuple) -""" + """Get font families (as a tuple)""" + def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: - """Get names of defined fonts (as a tuple) -""" + """Get names of defined fonts (as a tuple)""" if sys.version_info >= (3, 10): def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: - """Given the name of a tk named font, returns a Font representation. - """ + """Given the name of a tk named font, returns a Font representation.""" else: def nametofont(name: str) -> Font: - """Given the name of a tk named font, returns a Font representation. - """ + """Given the name of a tk named font, returns a Font representation.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi index 36e669bddaeb5..424e8903d6d49 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/messagebox.pyi @@ -22,8 +22,8 @@ YES: Final = "yes" NO: Final = "no" class Message(Dialog): - """A message box -""" + """A message box""" + command: ClassVar[str] def showinfo( @@ -35,8 +35,8 @@ def showinfo( default: Literal["ok"] = "ok", parent: Misc = ..., ) -> str: - """Show an info message -""" + """Show an info message""" + def showwarning( title: str | None = None, message: str | None = None, @@ -46,8 +46,8 @@ def showwarning( default: Literal["ok"] = "ok", parent: Misc = ..., ) -> str: - """Show a warning message -""" + """Show a warning message""" + def showerror( title: str | None = None, message: str | None = None, @@ -57,8 +57,8 @@ def showerror( default: Literal["ok"] = "ok", parent: Misc = ..., ) -> str: - """Show an error message -""" + """Show an error message""" + def askquestion( title: str | None = None, message: str | None = None, @@ -68,8 +68,8 @@ def askquestion( default: Literal["yes", "no"] = ..., parent: Misc = ..., ) -> str: - """Ask a question -""" + """Ask a question""" + def askokcancel( title: str | None = None, message: str | None = None, @@ -79,8 +79,8 @@ def askokcancel( default: Literal["ok", "cancel"] = ..., parent: Misc = ..., ) -> bool: - """Ask if operation should proceed; return true if the answer is ok -""" + """Ask if operation should proceed; return true if the answer is ok""" + def askyesno( title: str | None = None, message: str | None = None, @@ -90,8 +90,8 @@ def askyesno( default: Literal["yes", "no"] = ..., parent: Misc = ..., ) -> bool: - """Ask a question; return true if the answer is yes -""" + """Ask a question; return true if the answer is yes""" + def askyesnocancel( title: str | None = None, message: str | None = None, @@ -101,8 +101,8 @@ def askyesnocancel( default: Literal["cancel", "yes", "no"] = ..., parent: Misc = ..., ) -> bool | None: - """Ask a question; return true if the answer is yes, None if cancelled. -""" + """Ask a question; return true if the answer is yes, None if cancelled.""" + def askretrycancel( title: str | None = None, message: str | None = None, @@ -112,5 +112,4 @@ def askretrycancel( default: Literal["retry", "cancel"] = ..., parent: Misc = ..., ) -> bool: - """Ask if operation should be retried; return true if the answer is yes -""" + """Ask if operation should be retried; return true if the answer is yes""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi index 6ac86b8379b3f..028a6fb395d5c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -10,6 +10,7 @@ the Scrollbar widget. Most methods calls are inherited from the Text widget; Pack, Grid and Place methods are redirected to the Frame widget however. """ + from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi index adf5f2d54684b..af5375e2a635d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/simpledialog.pyi @@ -12,48 +12,54 @@ askfloat -- get a float from the user askstring -- get a string from the user """ + from tkinter import Event, Frame, Misc, Toplevel class Dialog(Toplevel): """Class to open dialogs. -This class is intended as a base class for custom dialogs -""" + This class is intended as a base class for custom dialogs + """ + def __init__(self, parent: Misc | None, title: str | None = None) -> None: """Initialize a dialog. -Arguments: + Arguments: - parent -- a parent window (the application window) + parent -- a parent window (the application window) + + title -- the dialog title + """ - title -- the dialog title -""" def body(self, master: Frame) -> Misc | None: """create dialog body. -return widget that should have initial focus. -This method should be overridden, and is called -by the __init__ method. -""" + return widget that should have initial focus. + This method should be overridden, and is called + by the __init__ method. + """ + def buttonbox(self) -> None: """add standard button box. -override if you do not want the standard buttons -""" + override if you do not want the standard buttons + """ + def ok(self, event: Event[Misc] | None = None) -> None: ... def cancel(self, event: Event[Misc] | None = None) -> None: ... def validate(self) -> bool: """validate the data -This method is called automatically to validate the data before the -dialog is destroyed. By default, it always validates OK. -""" + This method is called automatically to validate the data before the + dialog is destroyed. By default, it always validates OK. + """ + def apply(self) -> None: """process the data -This method is called automatically to process the data, *after* -the dialog is destroyed. By default, it does nothing. -""" + This method is called automatically to process the data, *after* + the dialog is destroyed. By default, it does nothing. + """ class SimpleDialog: def __init__( @@ -82,14 +88,15 @@ def askfloat( ) -> float | None: """get a float from the user -Arguments: + Arguments: - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + + Return value is a float + """ -Return value is a float -""" def askinteger( title: str | None, prompt: str, @@ -101,14 +108,15 @@ def askinteger( ) -> int | None: """get an integer from the user -Arguments: + Arguments: - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class + + Return value is an integer + """ -Return value is an integer -""" def askstring( title: str | None, prompt: str, @@ -120,11 +128,11 @@ def askstring( ) -> str | None: """get a string from the user -Arguments: + Arguments: - title -- the dialog title - prompt -- the label text - **kw -- see SimpleDialog class + title -- the dialog title + prompt -- the label text + **kw -- see SimpleDialog class -Return value is a string -""" + Return value is a string + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi index e75f0c8e61c85..e8e383809e950 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/tix.pyi @@ -44,6 +44,7 @@ class tixCommand: This is a mixin class, assumed to be mixed to Tkinter.Tk that supports the self.tk.call method. """ + def tix_addbitmapdir(self, directory: str) -> None: """Tix maintains a list of directories under which the tix_getimage and tix_getbitmap commands will @@ -54,11 +55,13 @@ class tixCommand: also be located using the tix_getimage or tix_getbitmap command. """ + def tix_cget(self, option: str) -> Any: """Returns the current value of the configuration option given by option. Option may be any of the options described in the CONFIGURATION OPTIONS section. """ + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: """Query or modify the configuration options of the Tix application context. If no option is specified, returns a dictionary all of the @@ -70,6 +73,7 @@ class tixCommand: to have the given value(s); in this case the command returns an empty string. Option may be any of the configuration options. """ + def tix_filedialog(self, dlgclass: str | None = None) -> str: """Returns the file selection dialog that may be shared among different calls from this application. This command will create a @@ -79,6 +83,7 @@ class tixCommand: of file selection dialog widget is desired. Possible options are tix FileSelectDialog or tixExFileSelectDialog. """ + def tix_getbitmap(self, name: str) -> str: """Locates a bitmap file of the name name.xpm or name in one of the bitmap directories (see the tix_addbitmapdir command above). By @@ -88,6 +93,7 @@ class tixCommand: '@'. The returned value can be used to configure the -bitmap option of the TK and Tix widgets. """ + def tix_getimage(self, name: str) -> str: """Locates an image file of the name name.xpm, name.xbm or name.ppm in one of the bitmap directories (see the addbitmapdir command @@ -100,6 +106,7 @@ class tixCommand: returns the name of the newly created image, which can be used to configure the -image option of the Tk and Tix widgets. """ + def tix_option_get(self, name: str) -> Any: """Gets the options maintained by the Tix scheme mechanism. Available options include: @@ -113,7 +120,8 @@ class tixCommand: light1_fg light2_bg light2_fg menu_font output1_bg output2_bg select_bg select_fg selector - """ + """ + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: """Resets the scheme and fontset of the Tix application to newScheme and newFontSet, respectively. This affects only those @@ -133,7 +141,8 @@ class tixCommand: class Tk(tkinter.Tk, tixCommand): """Toplevel widget of Tix which represents mostly the main window of an application. It has an associated Tcl interpreter. -""" + """ + def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): @@ -149,6 +158,7 @@ class TixWidget(tkinter.Widget): Both options are for use by subclasses only. """ + def __init__( self, master: tkinter.Misc | None = None, @@ -159,18 +169,19 @@ class TixWidget(tkinter.Widget): ) -> None: ... def __getattr__(self, name: str): ... def set_silent(self, value: str) -> None: - """Set a variable without calling its action routine -""" + """Set a variable without calling its action routine""" + def subwidget(self, name: str) -> tkinter.Widget: """Return the named subwidget (which must have been created by the sub-class). -""" + """ + def subwidgets_all(self) -> list[tkinter.Widget]: - """Return all subwidgets. -""" + """Return all subwidgets.""" + def config_all(self, option: Any, value: Any) -> None: - """Set configuration options for all subwidgets (and self). -""" + """Set configuration options for all subwidgets (and self).""" + def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... @@ -180,13 +191,15 @@ class TixSubWidget(TixWidget): This is used to mirror child widgets automatically created by Tix/Tk as part of a mega-widget in Python (which is not informed of this) -""" + """ + def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: """DisplayStyle - handle configuration options shared by (multiple) Display Items -""" + """ + def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... @@ -200,22 +213,25 @@ class Balloon(TixWidget): --------- ----- label Label message Message -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: """Bind balloon widget to another. One balloon widget may be bound to several widgets at the same time -""" + """ + def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): """ButtonBox - A container for pushbuttons. Subwidgets are the buttons added with the add method. """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: - """Add a button with given name to box. -""" + """Add a button with given name to box.""" + def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): @@ -230,7 +246,8 @@ class ComboBox(TixWidget): slistbox ScrolledListBox tick Button cross Button : present if created with the fancy option -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... @@ -249,7 +266,8 @@ class Control(TixWidget): decr Button entry Entry label Label -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... @@ -264,7 +282,8 @@ class LabelEntry(TixWidget): ---------- ----- label Label entry Entry -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class LabelFrame(TixWidget): @@ -277,13 +296,15 @@ class LabelFrame(TixWidget): ---------- ----- label Label frame Frame -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Meter(TixWidget): """The Meter widget can be used to show the progress of a background job which may take a long time to execute. """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class OptionMenu(TixWidget): @@ -293,7 +314,8 @@ class OptionMenu(TixWidget): --------- ----- menubutton Menubutton menu Menu -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... @@ -311,7 +333,8 @@ class PopupMenu(TixWidget): ---------- ----- menubutton Menubutton menu Menu -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... @@ -322,14 +345,15 @@ class Select(TixWidget): radio-box or check-box style of selection options for the user. Subwidgets are buttons added dynamically using the add method. -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) -""" + """StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help)""" + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self, name: str) -> None: ... @@ -343,7 +367,8 @@ class DirList(TixWidget): hlist HList hsb Scrollbar vsb Scrollbar -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... @@ -358,7 +383,8 @@ class DirTree(TixWidget): hlist HList hsb Scrollbar vsb Scrollbar -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... @@ -370,7 +396,8 @@ class DirSelectDialog(TixWidget): Subwidgets Class ---------- ----- dirbox DirSelectDialog -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def popup(self) -> None: ... def popdown(self) -> None: ... @@ -388,7 +415,8 @@ class DirSelectBox(TixWidget): filter ComboBox dirlist ScrolledListBox filelist ScrolledListBox -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... class ExFileSelectBox(TixWidget): @@ -405,7 +433,8 @@ class ExFileSelectBox(TixWidget): file ComboBox dirlist ScrolledListBox filelist ScrolledListBox -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def filter(self) -> None: ... def invoke(self) -> None: ... @@ -423,7 +452,8 @@ class FileSelectBox(TixWidget): filter ComboBox dirlist ScrolledListBox filelist ScrolledListBox -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def apply_filter(self) -> None: ... def invoke(self) -> None: ... @@ -438,7 +468,8 @@ class FileEntry(TixWidget): ---------- ----- button Button entry Entry -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self) -> None: ... def file_dialog(self) -> None: ... @@ -450,7 +481,8 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): according to their places in the hierarchy. Subwidgets - None -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... @@ -512,67 +544,72 @@ class CheckList(TixWidget): similarly to the Tk checkbutton or radiobutton widgets, except it is capable of handling many more items than checkbuttons or radiobuttons. """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def autosetmode(self) -> None: """This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. -""" + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close. + """ + def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close. -""" + """Close the entry given by entryPath if its mode is close.""" + def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath. -""" + """Returns the current mode of the entry given by entryPath.""" + def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open. -""" + """Open the entry given by entryPath if its mode is open.""" + def getselection(self, mode: str = "on") -> tuple[str, ...]: """Returns a list of items whose status matches status. If status is - not specified, the list of items in the "on" status will be returned. - Mode can be on, off, default -""" + not specified, the list of items in the "on" status will be returned. + Mode can be on, off, default + """ + def getstatus(self, entrypath: str) -> str: - """Returns the current status of entryPath. -""" + """Returns the current status of entryPath.""" + def setstatus(self, entrypath: str, mode: str = "on") -> None: """Sets the status of entryPath to be status. A bitmap will be - displayed next to the entry its status is on, off or default. -""" + displayed next to the entry its status is on, off or default. + """ class Tree(TixWidget): """Tree - The tixTree widget can be used to display hierarchical data in a tree form. The user can adjust the view of the tree by opening or closing parts of the tree. -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def autosetmode(self) -> None: """This command calls the setmode method for all the entries in this - Tree widget: if an entry has no child entries, its mode is set to - none. Otherwise, if the entry has any hidden child entries, its mode is - set to open; otherwise its mode is set to close. -""" + Tree widget: if an entry has no child entries, its mode is set to + none. Otherwise, if the entry has any hidden child entries, its mode is + set to open; otherwise its mode is set to close. + """ + def close(self, entrypath: str) -> None: - """Close the entry given by entryPath if its mode is close. -""" + """Close the entry given by entryPath if its mode is close.""" + def getmode(self, entrypath: str) -> str: - """Returns the current mode of the entry given by entryPath. -""" + """Returns the current mode of the entry given by entryPath.""" + def open(self, entrypath: str) -> None: - """Open the entry given by entryPath if its mode is open. -""" + """Open the entry given by entryPath if its mode is open.""" + def setmode(self, entrypath: str, mode: str = "none") -> None: """This command is used to indicate whether the entry given by - entryPath has children entries and whether the children are visible. mode - must be one of open, close or none. If mode is set to open, a (+) - indicator is drawn next the entry. If mode is set to close, a (-) - indicator is drawn next the entry. If mode is set to none, no - indicators will be drawn for this entry. The default mode is none. The - open mode indicates the entry has hidden children and this entry can be - opened by the user. The close mode indicates that all the children of the - entry are now visible and the entry can be closed by the user. -""" + entryPath has children entries and whether the children are visible. mode + must be one of open, close or none. If mode is set to open, a (+) + indicator is drawn next the entry. If mode is set to close, a (-) + indicator is drawn next the entry. If mode is set to none, no + indicators will be drawn for this entry. The default mode is none. The + open mode indicates the entry has hidden children and this entry can be + opened by the user. The close mode indicates that all the children of the + entry are now visible and the entry can be closed by the user. + """ class TList(TixWidget, tkinter.XView, tkinter.YView): """TList - Hierarchy display widget which can be @@ -583,7 +620,8 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): multiple colors and fonts for the list entries. Subwidgets - None -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... @@ -619,7 +657,8 @@ class PanedWindow(TixWidget): Subwidgets Class ---------- ----- g/p widgets added dynamically with the add method. -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -635,7 +674,8 @@ class ListNoteBook(TixWidget): (windows). At one time only one of these pages can be shown. The user can navigate through these pages by choosing the name of the desired page in the hlist subwidget. -""" + """ + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -649,7 +689,8 @@ class NoteBook(TixWidget): ---------- ----- nbframe NoteBookFrame page widgets added dynamically with the add method -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... @@ -662,7 +703,8 @@ class InputOnly(TixWidget): """InputOnly - Invisible widget. Unix only. Subwidgets - None -""" + """ + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Form: @@ -670,7 +712,8 @@ class Form: Widgets can be arranged by specifying attachments to other widgets. See Tix documentation for complete details -""" + """ + def __setitem__(self, key: str, value: Any) -> None: ... def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi index 821f7557eb971..8c6d70c398fe3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/ttk.pyi @@ -11,6 +11,7 @@ appearance. Widget class bindings are primarily responsible for maintaining the widget state and invoking callbacks, all aspects of the widgets appearance lies at Themes. """ + import _tkinter import sys import tkinter @@ -50,16 +51,17 @@ __all__ = [ def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: """Returns adict with its values converted from Tcl objects to Python -objects. -""" + objects. + """ + def setup_master(master: tkinter.Misc | None = None): """If master is not None, itself is returned. If master is None, -the default master is returned if there is one, otherwise a new -master is created and returned. + the default master is returned if there is one, otherwise a new + master is created and returned. -If it is not allowed to use the default root and master is None, -RuntimeError is raised. -""" + If it is not allowed to use the default root and master is None, + RuntimeError is raised. + """ _Padding: TypeAlias = ( float @@ -135,8 +137,8 @@ _ThemeSettingsValue = TypedDict( _ThemeSettings: TypeAlias = dict[str, _ThemeSettingsValue] class Style: - """Manipulate style database. -""" + """Manipulate style database.""" + master: tkinter.Misc tk: _tkinter.TkappType def __init__(self, master: tkinter.Misc | None = None) -> None: ... @@ -145,11 +147,12 @@ class Style: @overload def configure(self, style: str) -> dict[str, Any] | None: # Returns None if no configuration. """Query or sets the default value of the specified option(s) in -style. + style. + + Each key in kw is an option and each value is either a string or + a sequence identifying the value for that option. + """ -Each key in kw is an option and each value is either a string or -a sequence identifying the value for that option. -""" @overload def configure(self, style: str, query_opt: str, **kw: Any) -> Any: ... @overload @@ -157,56 +160,59 @@ a sequence identifying the value for that option. @overload def map(self, style: str, query_opt: str) -> _Statespec: """Query or sets dynamic values of the specified option(s) in -style. + style. + + Each key in kw is an option and each value should be a list or a + tuple (usually) containing statespecs grouped in tuples, or list, + or something else of your preference. A statespec is compound of + one or more states and then a value. + """ -Each key in kw is an option and each value should be a list or a -tuple (usually) containing statespecs grouped in tuples, or list, -or something else of your preference. A statespec is compound of -one or more states and then a value. -""" @overload def map(self, style: str, query_opt: None = None, **kw: Iterable[_Statespec]) -> dict[str, _Statespec]: ... def lookup(self, style: str, option: str, state: Iterable[str] | None = None, default: Any | None = None) -> Any: """Returns the value specified for option in style. -If state is specified it is expected to be a sequence of one -or more states. If the default argument is set, it is used as -a fallback value in case no specification for option is found. -""" + If state is specified it is expected to be a sequence of one + or more states. If the default argument is set, it is used as + a fallback value in case no specification for option is found. + """ + @overload def layout(self, style: str, layoutspec: _LayoutSpec) -> list[Never]: # Always seems to return an empty list """Define the widget layout for given style. If layoutspec is -omitted, return the layout specification for given style. - -layoutspec is expected to be a list or an object different than -None that evaluates to False if you want to "turn off" that style. -If it is a list (or tuple, or something else), each item should be -a tuple where the first item is the layout name and the second item -should have the format described below: - -LAYOUTS - - A layout can contain the value None, if takes no options, or - a dict of options specifying how to arrange the element. - The layout mechanism uses a simplified version of the pack - geometry manager: given an initial cavity, each element is - allocated a parcel. Valid options/values are: - - side: whichside - Specifies which side of the cavity to place the - element; one of top, right, bottom or left. If - omitted, the element occupies the entire cavity. - - sticky: nswe - Specifies where the element is placed inside its - allocated parcel. - - children: [sublayout... ] - Specifies a list of elements to place inside the - element. Each element is a tuple (or other sequence) - where the first item is the layout name, and the other - is a LAYOUT. -""" + omitted, return the layout specification for given style. + + layoutspec is expected to be a list or an object different than + None that evaluates to False if you want to "turn off" that style. + If it is a list (or tuple, or something else), each item should be + a tuple where the first item is the layout name and the second item + should have the format described below: + + LAYOUTS + + A layout can contain the value None, if takes no options, or + a dict of options specifying how to arrange the element. + The layout mechanism uses a simplified version of the pack + geometry manager: given an initial cavity, each element is + allocated a parcel. Valid options/values are: + + side: whichside + Specifies which side of the cavity to place the + element; one of top, right, bottom or left. If + omitted, the element occupies the entire cavity. + + sticky: nswe + Specifies where the element is placed inside its + allocated parcel. + + children: [sublayout... ] + Specifies a list of elements to place inside the + element. Each element is a tuple (or other sequence) + where the first item is the layout name, and the other + is a LAYOUT. + """ + @overload def layout(self, style: str, layoutspec: None = None) -> _LayoutSpec: ... @overload @@ -223,8 +229,8 @@ LAYOUTS sticky: str = ..., width: float | str = ..., ) -> None: - """Create a new element in the current theme of given etype. -""" + """Create a new element in the current theme of given etype.""" + @overload def element_create(self, elementname: str, etype: Literal["from"], themename: str, fromelement: str = ..., /) -> None: ... if sys.platform == "win32" and sys.version_info >= (3, 13): # and tk version >= 8.6 @@ -243,8 +249,8 @@ LAYOUTS *, padding: _Padding = ..., ) -> None: - """Create a new element in the current theme of given etype. -""" + """Create a new element in the current theme of given etype.""" + @overload def element_create( self, @@ -272,90 +278,97 @@ LAYOUTS ) -> None: ... def element_names(self) -> tuple[str, ...]: - """Returns the list of elements defined in the current theme. -""" + """Returns the list of elements defined in the current theme.""" + def element_options(self, elementname: str) -> tuple[str, ...]: - """Return the list of elementname's options. -""" + """Return the list of elementname's options.""" + def theme_create(self, themename: str, parent: str | None = None, settings: _ThemeSettings | None = None) -> None: """Creates a new theme. -It is an error if themename already exists. If parent is -specified, the new theme will inherit styles, elements and -layouts from the specified parent theme. If settings are present, -they are expected to have the same syntax used for theme_settings. -""" + It is an error if themename already exists. If parent is + specified, the new theme will inherit styles, elements and + layouts from the specified parent theme. If settings are present, + they are expected to have the same syntax used for theme_settings. + """ + def theme_settings(self, themename: str, settings: _ThemeSettings) -> None: """Temporarily sets the current theme to themename, apply specified -settings and then restore the previous theme. + settings and then restore the previous theme. + + Each key in settings is a style and each value may contain the + keys 'configure', 'map', 'layout' and 'element create' and they + are expected to have the same format as specified by the methods + configure, map, layout and element_create respectively. + """ -Each key in settings is a style and each value may contain the -keys 'configure', 'map', 'layout' and 'element create' and they -are expected to have the same format as specified by the methods -configure, map, layout and element_create respectively. -""" def theme_names(self) -> tuple[str, ...]: - """Returns a list of all known themes. -""" + """Returns a list of all known themes.""" + @overload def theme_use(self, themename: str) -> None: """If themename is None, returns the theme in use, otherwise, set -the current theme to themename, refreshes all widgets and emits -a <> event. -""" + the current theme to themename, refreshes all widgets and emits + a <> event. + """ + @overload def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - """Base class for Tk themed widgets. -""" + """Base class for Tk themed widgets.""" + def __init__(self, master: tkinter.Misc | None, widgetname, kw=None) -> None: """Constructs a Ttk Widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, takefocus, style + class, cursor, takefocus, style -SCROLLABLE WIDGET OPTIONS + SCROLLABLE WIDGET OPTIONS - xscrollcommand, yscrollcommand + xscrollcommand, yscrollcommand -LABEL WIDGET OPTIONS + LABEL WIDGET OPTIONS - text, textvariable, underline, image, compound, width + text, textvariable, underline, image, compound, width -WIDGET STATES + WIDGET STATES + + active, disabled, focus, pressed, selected, background, + readonly, alternate, invalid + """ - active, disabled, focus, pressed, selected, background, - readonly, alternate, invalid -""" def identify(self, x: int, y: int) -> str: """Returns the name of the element at position x, y, or the empty -string if the point does not lie within any element. + string if the point does not lie within any element. + + x and y are pixel coordinates relative to the widget. + """ -x and y are pixel coordinates relative to the widget. -""" def instate(self, statespec, callback=None, *args, **kw): """Test the widget's state. -If callback is not specified, returns True if the widget state -matches statespec and False otherwise. If callback is specified, -then it will be invoked with *args, **kw if the widget state -matches statespec. statespec is expected to be a sequence. -""" + If callback is not specified, returns True if the widget state + matches statespec and False otherwise. If callback is specified, + then it will be invoked with *args, **kw if the widget state + matches statespec. statespec is expected to be a sequence. + """ + def state(self, statespec=None): """Modify or inquire widget state. -Widget state is returned if statespec is None, otherwise it is -set according to the statespec flags and then a new state spec -is returned indicating which flags were changed. statespec is -expected to be a sequence. -""" + Widget state is returned if statespec is None, otherwise it is + set according to the statespec flags and then a new state spec + is returned indicating which flags were changed. statespec is + expected to be a sequence. + """ class Button(Widget): """Ttk Button widget, displays a textual label and/or image, and -evaluates a command when pressed. -""" + evaluates a command when pressed. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -378,15 +391,16 @@ evaluates a command when pressed. ) -> None: """Construct a Ttk Button widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, default, width + """ - command, default, width -""" @overload def configure( self, @@ -408,20 +422,20 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: - """Invokes the command associated with the button. -""" + """Invokes the command associated with the button.""" class Checkbutton(Widget): - """Ttk Checkbutton widget which is either in on- or off-state. -""" + """Ttk Checkbutton widget which is either in on- or off-state.""" + def __init__( self, master: tkinter.Misc | None = None, @@ -449,15 +463,16 @@ class Checkbutton(Widget): ) -> None: """Construct a Ttk Checkbutton widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, offvalue, onvalue, variable + """ - command, offvalue, onvalue, variable -""" @overload def configure( self, @@ -481,27 +496,29 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: """Toggles between the selected and deselected states and -invokes the associated command. If the widget is currently -selected, sets the option variable to the offvalue option -and deselects the widget; otherwise, sets the option variable -to the option onvalue. + invokes the associated command. If the widget is currently + selected, sets the option variable to the offvalue option + and deselects the widget; otherwise, sets the option variable + to the option onvalue. -Returns the result of the associated command. -""" + Returns the result of the associated command. + """ class Entry(Widget, tkinter.Entry): """Ttk Entry widget displays a one-line text string and allows that -string to be edited by the user. -""" + string to be edited by the user. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -528,19 +545,20 @@ string to be edited by the user. ) -> None: """Constructs a Ttk Entry widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus, xscrollcommand + class, cursor, style, takefocus, xscrollcommand -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - exportselection, invalidcommand, justify, show, state, - textvariable, validate, validatecommand, width + exportselection, invalidcommand, justify, show, state, + textvariable, validate, validatecommand, width -VALIDATION MODES + VALIDATION MODES + + none, key, focus, focusin, focusout, all + """ - none, key, focus, focusin, focusout, all -""" @overload # type: ignore[override] def configure( self, @@ -565,10 +583,11 @@ VALIDATION MODES ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) @@ -596,30 +615,34 @@ the allowed keyword arguments call the method keys. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... def bbox(self, index) -> tuple[int, int, int, int]: # type: ignore[override] """Return a tuple of (x, y, width, height) which describes the -bounding box of the character given by index. -""" + bounding box of the character given by index. + """ + def identify(self, x: int, y: int) -> str: """Returns the name of the element at position x, y, or the -empty string if the coordinates are outside the window. -""" + empty string if the coordinates are outside the window. + """ + def validate(self): """Force revalidation, independent of the conditions specified -by the validate option. Returns False if validation fails, True -if it succeeds. Sets or clears the invalid state accordingly. -""" + by the validate option. Returns False if validation fails, True + if it succeeds. Sets or clears the invalid state accordingly. + """ class Combobox(Entry): """Ttk Combobox widget combines a text field with a pop-down list of -values. -""" + values. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -648,15 +671,16 @@ values. ) -> None: """Construct a Ttk Combobox widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + exportselection, justify, height, postcommand, state, + textvariable, values, width + """ - exportselection, justify, height, postcommand, state, - textvariable, values, width -""" @overload # type: ignore[override] def configure( self, @@ -684,10 +708,11 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) @@ -718,26 +743,28 @@ the allowed keyword arguments call the method keys. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... def current(self, newindex: int | None = None) -> int: """If newindex is supplied, sets the combobox value to the -element at position newindex in the list of values. Otherwise, -returns the index of the current value in the list of values -or -1 if the current value does not appear in the list. -""" + element at position newindex in the list of values. Otherwise, + returns the index of the current value in the list of values + or -1 if the current value does not appear in the list. + """ + def set(self, value: Any) -> None: - """Sets the value of the combobox to value. -""" + """Sets the value of the combobox to value.""" class Frame(Widget): """Ttk Frame widget is a container, used to group other widgets -together. -""" + together. + """ + # This should be kept in sync with tkinter.ttk.LabeledScale.__init__() # (all of these keyword-only arguments are also present there) def __init__( @@ -758,14 +785,15 @@ together. ) -> None: """Construct a Ttk Frame with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + borderwidth, relief, padding, width, height + """ - borderwidth, relief, padding, width, height -""" @overload def configure( self, @@ -783,17 +811,18 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): - """Ttk Label widget displays a textual label and/or image. -""" + """Ttk Label widget displays a textual label and/or image.""" + def __init__( self, master: tkinter.Misc | None = None, @@ -823,16 +852,17 @@ class Label(Widget): ) -> None: """Construct a Ttk Label with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, compound, cursor, image, style, takefocus, text, - textvariable, underline, width + class, compound, cursor, image, style, takefocus, text, + textvariable, underline, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + anchor, background, font, foreground, justify, padding, + relief, text, wraplength + """ - anchor, background, font, foreground, justify, padding, - relief, text, wraplength -""" @overload def configure( self, @@ -861,19 +891,21 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Labelframe(Widget): """Ttk Labelframe widget is a container used to group other widgets -together. It has an optional label, which may be a plain text string -or another widget. -""" + together. It has an optional label, which may be a plain text string + or another widget. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -896,14 +928,15 @@ or another widget. ) -> None: """Construct a Ttk Labelframe with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus + + WIDGET-SPECIFIC OPTIONS + labelanchor, text, underline, padding, labelwidget, width, + height + """ -WIDGET-SPECIFIC OPTIONS - labelanchor, text, underline, padding, labelwidget, width, - height -""" @overload def configure( self, @@ -925,10 +958,11 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -937,8 +971,9 @@ LabelFrame = Labelframe class Menubutton(Widget): """Ttk Menubutton widget displays a textual label and/or image, and -displays a menu when pressed. -""" + displays a menu when pressed. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -961,15 +996,16 @@ displays a menu when pressed. ) -> None: """Construct a Ttk Menubutton with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + direction, menu + """ - direction, menu -""" @overload def configure( self, @@ -991,19 +1027,21 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Notebook(Widget): """Ttk Notebook widget manages a collection of windows and displays -a single one at a time. Each child window is associated with a tab, -which the user may select to change the currently-displayed window. -""" + a single one at a time. Each child window is associated with a tab, + which the user may select to change the currently-displayed window. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1019,32 +1057,33 @@ which the user may select to change the currently-displayed window. ) -> None: """Construct a Ttk Notebook with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - height, padding, width + height, padding, width -TAB OPTIONS + TAB OPTIONS - state, sticky, padding, text, image, compound, underline + state, sticky, padding, text, image, compound, underline -TAB IDENTIFIERS (tab_id) + TAB IDENTIFIERS (tab_id) - The tab_id argument found in several methods may take any of - the following forms: + The tab_id argument found in several methods may take any of + the following forms: + + * An integer between zero and the number of tabs + * The name of a child window + * A positional specification of the form "@x,y", which + defines the tab + * The string "current", which identifies the + currently-selected tab + * The string "end", which returns the number of tabs (only + valid for method index) + """ - * An integer between zero and the number of tabs - * The name of a child window - * A positional specification of the form "@x,y", which - defines the tab - * The string "current", which identifies the - currently-selected tab - * The string "end", which returns the number of tabs (only - valid for method index) -""" @overload def configure( self, @@ -1059,10 +1098,11 @@ TAB IDENTIFIERS (tab_id) ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -1082,79 +1122,88 @@ the allowed keyword arguments call the method keys. ) -> None: """Adds a new tab to the notebook. -If window is currently managed by the notebook but hidden, it is -restored to its previous position. -""" + If window is currently managed by the notebook but hidden, it is + restored to its previous position. + """ + def forget(self, tab_id) -> None: # type: ignore[override] """Removes the tab specified by tab_id, unmaps and unmanages the -associated window. -""" + associated window. + """ + def hide(self, tab_id) -> None: """Hides the tab specified by tab_id. -The tab will not be displayed, but the associated window remains -managed by the notebook and its configuration remembered. Hidden -tabs may be restored with the add command. -""" + The tab will not be displayed, but the associated window remains + managed by the notebook and its configuration remembered. Hidden + tabs may be restored with the add command. + """ + def identify(self, x: int, y: int) -> str: """Returns the name of the tab element at position x, y, or the -empty string if none. -""" + empty string if none. + """ + def index(self, tab_id): """Returns the numeric index of the tab specified by tab_id, or -the total number of tabs if tab_id is the string "end". -""" + the total number of tabs if tab_id is the string "end". + """ + def insert(self, pos, child, **kw) -> None: """Inserts a pane at the specified position. -pos is either the string end, an integer index, or the name of -a managed child. If child is already managed by the notebook, -moves it to the specified position. -""" + pos is either the string end, an integer index, or the name of + a managed child. If child is already managed by the notebook, + moves it to the specified position. + """ + def select(self, tab_id=None): """Selects the specified tab. -The associated child window will be displayed, and the -previously-selected window (if different) is unmapped. If tab_id -is omitted, returns the widget name of the currently selected -pane. -""" + The associated child window will be displayed, and the + previously-selected window (if different) is unmapped. If tab_id + is omitted, returns the widget name of the currently selected + pane. + """ + def tab(self, tab_id, option=None, **kw): """Query or modify the options of the specific tab_id. -If kw is not given, returns a dict of the tab option values. If option -is specified, returns the value of that option. Otherwise, sets the -options to the corresponding values. -""" + If kw is not given, returns a dict of the tab option values. If option + is specified, returns the value of that option. Otherwise, sets the + options to the corresponding values. + """ + def tabs(self): - """Returns a list of windows managed by the notebook. -""" + """Returns a list of windows managed by the notebook.""" + def enable_traversal(self) -> None: """Enable keyboard traversal for a toplevel window containing -this notebook. + this notebook. -This will extend the bindings for the toplevel window containing -this notebook as follows: + This will extend the bindings for the toplevel window containing + this notebook as follows: - Control-Tab: selects the tab following the currently selected - one + Control-Tab: selects the tab following the currently selected + one - Shift-Control-Tab: selects the tab preceding the currently - selected one + Shift-Control-Tab: selects the tab preceding the currently + selected one - Alt-K: where K is the mnemonic (underlined) character of any - tab, will select that tab. + Alt-K: where K is the mnemonic (underlined) character of any + tab, will select that tab. -Multiple notebooks in a single toplevel may be enabled for -traversal, including nested notebooks. However, notebook traversal -only works properly if all panes are direct children of the -notebook. -""" + Multiple notebooks in a single toplevel may be enabled for + traversal, including nested notebooks. However, notebook traversal + only works properly if all panes are direct children of the + notebook. + """ class Panedwindow(Widget, tkinter.PanedWindow): """Ttk Panedwindow widget displays a number of subwindows, stacked -either vertically or horizontally. -""" + either vertically or horizontally. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1171,26 +1220,28 @@ either vertically or horizontally. ) -> None: """Construct a Ttk Panedwindow with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - orient, width, height + orient, width, height -PANE OPTIONS + PANE OPTIONS + + weight + """ - weight -""" def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: """Add a child widget to the panedwindow in a new pane. -The child argument is the name of the child widget -followed by pairs of arguments that specify how to -manage the windows. The possible options and values -are the ones accepted by the paneconfigure method. -""" + The child argument is the name of the child widget + followed by pairs of arguments that specify how to + manage the windows. The possible options and values + are the ones accepted by the paneconfigure method. + """ + @overload # type: ignore[override] def configure( self, @@ -1204,10 +1255,11 @@ are the ones accepted by the paneconfigure method. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) @@ -1224,47 +1276,51 @@ the allowed keyword arguments call the method keys. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget = tkinter.PanedWindow.forget def insert(self, pos, child, **kw) -> None: """Inserts a pane at the specified positions. -pos is either the string end, and integer index, or the name -of a child. If child is already managed by the paned window, -moves it to the specified position. -""" + pos is either the string end, and integer index, or the name + of a child. If child is already managed by the paned window, + moves it to the specified position. + """ + def pane(self, pane, option=None, **kw): """Query or modify the options of the specified pane. -pane is either an integer index or the name of a managed subwindow. -If kw is not given, returns a dict of the pane option values. If -option is specified then the value for that option is returned. -Otherwise, sets the options to the corresponding values. -""" + pane is either an integer index or the name of a managed subwindow. + If kw is not given, returns a dict of the pane option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values. + """ + def sashpos(self, index, newpos=None): """If newpos is specified, sets the position of sash number index. -May adjust the positions of adjacent sashes to ensure that -positions are monotonically increasing. Sash positions are further -constrained to be between 0 and the total size of the widget. + May adjust the positions of adjacent sashes to ensure that + positions are monotonically increasing. Sash positions are further + constrained to be between 0 and the total size of the widget. -Returns the new position of sash number index. -""" + Returns the new position of sash number index. + """ PanedWindow = Panedwindow class Progressbar(Widget): """Ttk Progressbar widget shows the status of a long-running -operation. They can operate in two modes: determinate mode shows the -amount completed relative to the total amount of work to be done, and -indeterminate mode provides an animated display to let the user know -that something is happening. -""" + operation. They can operate in two modes: determinate mode shows the + amount completed relative to the total amount of work to be done, and + indeterminate mode provides an animated display to let the user know + that something is happening. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1284,14 +1340,15 @@ that something is happening. ) -> None: """Construct a Ttk Progressbar with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + orient, length, mode, maximum, value, variable, phase + """ - orient, length, mode, maximum, value, variable, phase -""" @overload def configure( self, @@ -1310,33 +1367,37 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def start(self, interval: Literal["idle"] | int | None = None) -> None: """Begin autoincrement mode: schedules a recurring timer event -that calls method step every interval milliseconds. + that calls method step every interval milliseconds. + + interval defaults to 50 milliseconds (20 steps/second) if omitted. + """ -interval defaults to 50 milliseconds (20 steps/second) if omitted. -""" def step(self, amount: float | None = None) -> None: """Increments the value option by amount. -amount defaults to 1.0 if omitted. -""" + amount defaults to 1.0 if omitted. + """ + def stop(self) -> None: """Stop autoincrement mode: cancels any recurring timer event -initiated by start. -""" + initiated by start. + """ class Radiobutton(Widget): """Ttk Radiobutton widgets are used in groups to show or change a -set of mutually-exclusive options. -""" + set of mutually-exclusive options. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1360,15 +1421,16 @@ set of mutually-exclusive options. ) -> None: """Construct a Ttk Radiobutton with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, compound, cursor, image, state, style, takefocus, - text, textvariable, underline, width + class, compound, cursor, image, state, style, takefocus, + text, textvariable, underline, width -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, value, variable + """ - command, value, variable -""" @overload def configure( self, @@ -1391,26 +1453,28 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: """Sets the option variable to the option value, selects the -widget, and invokes the associated command. + widget, and invokes the associated command. -Returns the result of the command, or an empty string if -no command is specified. -""" + Returns the result of the command, or an empty string if + no command is specified. + """ # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scale(Widget, tkinter.Scale): # type: ignore[misc] """Ttk Scale widget is typically used to control the numeric value of -a linked variable that varies uniformly over some range. -""" + a linked variable that varies uniformly over some range. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1431,14 +1495,15 @@ a linked variable that varies uniformly over some range. ) -> None: """Construct a Ttk Scale with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, from, length, orient, to, value, variable + """ - command, from, length, orient, to, value, variable -""" @overload # type: ignore[override] def configure( self, @@ -1458,9 +1523,10 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Modify or query scale options. -Setting a value for any of the "from", "from_" or "to" options -generates a <> event. -""" + Setting a value for any of the "from", "from_" or "to" options + generates a <> event. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) @@ -1483,24 +1549,25 @@ generates a <> event. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... def get(self, x: int | None = None, y: int | None = None) -> float: """Get the current value of the value option, or the value -corresponding to the coordinates x, y if they are specified. + corresponding to the coordinates x, y if they are specified. -x and y are pixel coordinates relative to the scale widget -origin. -""" + x and y are pixel coordinates relative to the scale widget + origin. + """ # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] - """Ttk Scrollbar controls the viewport of a scrollable widget. -""" + """Ttk Scrollbar controls the viewport of a scrollable widget.""" + def __init__( self, master: tkinter.Misc | None = None, @@ -1515,14 +1582,15 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] ) -> None: """Construct a Ttk Scrollbar with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + command, orient + """ - command, orient -""" @overload # type: ignore[override] def configure( self, @@ -1536,10 +1604,11 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) @@ -1556,17 +1625,19 @@ the allowed keyword arguments call the method keys. ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... class Separator(Widget): """Ttk Separator widget displays a horizontal or vertical separator -bar. -""" + bar. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1580,14 +1651,15 @@ bar. ) -> None: """Construct a Ttk Separator with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus + class, cursor, style, takefocus -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + orient + """ - orient -""" @overload def configure( self, @@ -1600,18 +1672,20 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Sizegrip(Widget): """Ttk Sizegrip allows the user to resize the containing toplevel -window by pressing and dragging the grip. -""" + window by pressing and dragging the grip. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1624,10 +1698,11 @@ window by pressing and dragging the grip. ) -> None: """Construct a Ttk Sizegrip with parent master. -STANDARD OPTIONS + STANDARD OPTIONS + + class, cursor, state, style, takefocus + """ - class, cursor, state, style, takefocus -""" @overload def configure( self, @@ -1639,10 +1714,11 @@ STANDARD OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure @@ -1650,9 +1726,10 @@ the allowed keyword arguments call the method keys. class Spinbox(Entry): """Ttk Spinbox is an Entry with increment and decrement arrows -It is commonly used for number entry or to select from a list of -string values. -""" + It is commonly used for number entry or to select from a list of + string values. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1685,15 +1762,16 @@ string values. ) -> None: """Construct a Ttk Spinbox widget with the parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus, validate, - validatecommand, xscrollcommand, invalidcommand + class, cursor, style, takefocus, validate, + validatecommand, xscrollcommand, invalidcommand -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS + + to, from_, increment, values, wrap, format, command + """ - to, from_, increment, values, wrap, format, command -""" @overload # type: ignore[override] def configure( self, @@ -1725,16 +1803,16 @@ WIDGET-SPECIFIC OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure # type: ignore[assignment] def set(self, value: Any) -> None: - """Sets the value of the Spinbox to value. -""" + """Sets the value of the Spinbox to value.""" @type_check_only class _TreeviewItemDict(TypedDict): @@ -1771,10 +1849,11 @@ class _TreeviewColumnDict(TypedDict): class Treeview(Widget, tkinter.XView, tkinter.YView): """Ttk Treeview widget displays a hierarchical collection of items. -Each item has a textual label, an optional image, and an optional list -of data values. The data values are displayed in successive columns -after the tree label. -""" + Each item has a textual label, an optional image, and an optional list + of data values. The data values are displayed in successive columns + after the tree label. + """ + def __init__( self, master: tkinter.Misc | None = None, @@ -1799,23 +1878,24 @@ after the tree label. ) -> None: """Construct a Ttk Treeview with parent master. -STANDARD OPTIONS + STANDARD OPTIONS - class, cursor, style, takefocus, xscrollcommand, - yscrollcommand + class, cursor, style, takefocus, xscrollcommand, + yscrollcommand -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - columns, displaycolumns, height, padding, selectmode, show + columns, displaycolumns, height, padding, selectmode, show -ITEM OPTIONS + ITEM OPTIONS - text, image, values, open, tags + text, image, values, open, tags -TAG OPTIONS + TAG OPTIONS + + foreground, background, font, image + """ - foreground, background, font, image -""" @overload def configure( self, @@ -1835,41 +1915,46 @@ TAG OPTIONS ) -> dict[str, tuple[str, str, str, Any, Any]] | None: """Configure resources of a widget. -The values for resources are specified as keyword -arguments. To get an overview about -the allowed keyword arguments call the method keys. -""" + The values for resources are specified as keyword + arguments. To get an overview about + the allowed keyword arguments call the method keys. + """ + @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def bbox(self, item: str | int, column: str | int | None = None) -> tuple[int, int, int, int] | Literal[""]: # type: ignore[override] """Returns the bounding box (relative to the treeview widget's -window) of the specified item in the form x y width height. + window) of the specified item in the form x y width height. + + If column is specified, returns the bounding box of that cell. + If the item is not visible (i.e., if it is a descendant of a + closed item or is scrolled offscreen), returns an empty string. + """ -If column is specified, returns the bounding box of that cell. -If the item is not visible (i.e., if it is a descendant of a -closed item or is scrolled offscreen), returns an empty string. -""" def get_children(self, item: str | int | None = None) -> tuple[str, ...]: """Returns a tuple of children belonging to item. -If item is not specified, returns root children. -""" + If item is not specified, returns root children. + """ + def set_children(self, item: str | int, *newchildren: str | int) -> None: """Replaces item's child with newchildren. -Children present in item that are not present in newchildren -are detached from tree. No items in newchildren may be an -ancestor of item. -""" + Children present in item that are not present in newchildren + are detached from tree. No items in newchildren may be an + ancestor of item. + """ + @overload def column(self, column: str | int, option: Literal["width", "minwidth"]) -> int: """Query or modify the options for the specified column. -If kw is not given, returns a dict of the column option values. If -option is specified then the value for that option is returned. -Otherwise, sets the options to the corresponding values. -""" + If kw is not given, returns a dict of the column option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values. + """ + @overload def column(self, column: str | int, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 @overload @@ -1892,49 +1977,54 @@ Otherwise, sets the options to the corresponding values. ) -> _TreeviewColumnDict | None: ... def delete(self, *items: str | int) -> None: """Delete all specified items and all their descendants. The root -item may not be deleted. -""" + item may not be deleted. + """ + def detach(self, *items: str | int) -> None: """Unlinks all of the specified items from the tree. -The items and all of their descendants are still present, and may -be reinserted at another point in the tree, but will not be -displayed. The root item may not be detached. -""" + The items and all of their descendants are still present, and may + be reinserted at another point in the tree, but will not be + displayed. The root item may not be detached. + """ + def exists(self, item: str | int) -> bool: """Returns True if the specified item is present in the tree, -False otherwise. -""" + False otherwise. + """ + @overload # type: ignore[override] def focus(self, item: None = None) -> str: # can return empty string """If item is specified, sets the focus item to item. Otherwise, -returns the current focus item, or '' if there is none. -""" + returns the current focus item, or '' if there is none. + """ + @overload def focus(self, item: str | int) -> Literal[""]: ... @overload def heading(self, column: str | int, option: Literal["text"]) -> str: """Query or modify the heading options for the specified column. -If kw is not given, returns a dict of the heading option values. If -option is specified then the value for that option is returned. -Otherwise, sets the options to the corresponding values. - -Valid options/values are: - text: text - The text to display in the column heading - image: image_name - Specifies an image to display to the right of the column - heading - anchor: anchor - Specifies how the heading text should be aligned. One of - the standard Tk anchor values - command: callback - A callback to be invoked when the heading label is - pressed. - -To configure the tree column heading, call this with column = "#0" -""" + If kw is not given, returns a dict of the heading option values. If + option is specified then the value for that option is returned. + Otherwise, sets the options to the corresponding values. + + Valid options/values are: + text: text + The text to display in the column heading + image: image_name + Specifies an image to display to the right of the column + heading + anchor: anchor + Specifies how the heading text should be aligned. One of + the standard Tk anchor values + command: callback + A callback to be invoked when the heading label is + pressed. + + To configure the tree column heading, call this with column = "#0" + """ + @overload def heading(self, column: str | int, option: Literal["image"]) -> tuple[str] | str: ... @overload @@ -1959,36 +2049,41 @@ To configure the tree column heading, call this with column = "#0" # Internal Method. Leave untyped: def identify(self, component, x, y): # type: ignore[override] """Returns a description of the specified component under the -point given by x and y, or the empty string if no such component -is present at that position. -""" + point given by x and y, or the empty string if no such component + is present at that position. + """ + def identify_row(self, y: int) -> str: - """Returns the item ID of the item at position y. -""" + """Returns the item ID of the item at position y.""" + def identify_column(self, x: int) -> str: """Returns the data column identifier of the cell at position x. -The tree column has ID #0. -""" + The tree column has ID #0. + """ + def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: """Returns one of: -heading: Tree heading area. -separator: Space between two columns headings; -tree: The tree area. -cell: A data cell. + heading: Tree heading area. + separator: Space between two columns headings; + tree: The tree area. + cell: A data cell. + + * Availability: Tk 8.6 + """ -* Availability: Tk 8.6 -""" def identify_element(self, x: int, y: int) -> str: # don't know what possible return values are """Returns the element at position x, y. -* Availability: Tk 8.6 -""" + * Availability: Tk 8.6 + """ + def index(self, item: str | int) -> int: """Returns the integer index of item within its parent's list -of children. -""" + of children. + """ + def insert( self, parent: str, @@ -2003,27 +2098,29 @@ of children. tags: str | list[str] | tuple[str, ...] = ..., ) -> str: """Creates a new item and return the item identifier of the newly -created item. - -parent is the item ID of the parent item, or the empty string -to create a new top-level item. index is an integer, or the value -end, specifying where in the list of parent's children to insert -the new item. If index is less than or equal to zero, the new node -is inserted at the beginning, if index is greater than or equal to -the current number of children, it is inserted at the end. If iid -is specified, it is used as the item identifier, iid must not -already exist in the tree. Otherwise, a new unique identifier -is generated. -""" + created item. + + parent is the item ID of the parent item, or the empty string + to create a new top-level item. index is an integer, or the value + end, specifying where in the list of parent's children to insert + the new item. If index is less than or equal to zero, the new node + is inserted at the beginning, if index is greater than or equal to + the current number of children, it is inserted at the end. If iid + is specified, it is used as the item identifier, iid must not + already exist in the tree. Otherwise, a new unique identifier + is generated. + """ + @overload def item(self, item: str | int, option: Literal["text"]) -> str: """Query or modify the options for the specified item. -If no options are given, a dict with options/values for the item -is returned. If option is specified then the value for that option -is returned. Otherwise, sets the options to the corresponding -values as given by kw. -""" + If no options are given, a dict with options/values for the item + is returned. If option is specified then the value for that option + is returned. Otherwise, sets the options to the corresponding + values as given by kw. + """ + @overload def item(self, item: str | int, option: Literal["image"]) -> tuple[str] | Literal[""]: ... @overload @@ -2051,67 +2148,72 @@ values as given by kw. def move(self, item: str | int, parent: str, index: int | Literal["end"]) -> None: """Moves item to position index in parent's list of children. -It is illegal to move an item under one of its descendants. If -index is less than or equal to zero, item is moved to the -beginning, if greater than or equal to the number of children, -it is moved to the end. If item was detached it is reattached. -""" + It is illegal to move an item under one of its descendants. If + index is less than or equal to zero, item is moved to the + beginning, if greater than or equal to the number of children, + it is moved to the end. If item was detached it is reattached. + """ reattach = move def next(self, item: str | int) -> str: # returning empty string means last item """Returns the identifier of item's next sibling, or '' if item -is the last child of its parent. -""" + is the last child of its parent. + """ + def parent(self, item: str | int) -> str: """Returns the ID of the parent of item, or '' if item is at the -top level of the hierarchy. -""" + top level of the hierarchy. + """ + def prev(self, item: str | int) -> str: # returning empty string means first item """Returns the identifier of item's previous sibling, or '' if -item is the first child of its parent. -""" + item is the first child of its parent. + """ + def see(self, item: str | int) -> None: """Ensure that item is visible. -Sets all of item's ancestors open option to True, and scrolls -the widget if necessary so that item is within the visible -portion of the tree. -""" + Sets all of item's ancestors open option to True, and scrolls + the widget if necessary so that item is within the visible + portion of the tree. + """ + def selection(self) -> tuple[str, ...]: - """Returns the tuple of selected items. -""" + """Returns the tuple of selected items.""" + @overload def selection_set(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """The specified items becomes the new selection. -""" + """The specified items becomes the new selection.""" + @overload def selection_set(self, *items: str | int) -> None: ... @overload def selection_add(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Add all of the specified items to the selection. -""" + """Add all of the specified items to the selection.""" + @overload def selection_add(self, *items: str | int) -> None: ... @overload def selection_remove(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Remove all of the specified items from the selection. -""" + """Remove all of the specified items from the selection.""" + @overload def selection_remove(self, *items: str | int) -> None: ... @overload def selection_toggle(self, items: list[str] | tuple[str, ...] | list[int] | tuple[int, ...], /) -> None: - """Toggle the selection state of each specified item. -""" + """Toggle the selection state of each specified item.""" + @overload def selection_toggle(self, *items: str | int) -> None: ... @overload def set(self, item: str | int, column: None = None, value: None = None) -> dict[str, Any]: """Query or set the value of given item. -With one argument, return a dictionary of column/value pairs -for the specified item. With two arguments, return the current -value of the specified column. With three arguments, set the -value of given column in given item to the specified value. -""" + With one argument, return a dictionary of column/value pairs + for the specified item. With two arguments, return the current + value of the specified column. With three arguments, set the + value of given column in given item to the specified value. + """ + @overload def set(self, item: str | int, column: str | int, value: None = None) -> Any: ... @overload @@ -2123,9 +2225,10 @@ value of given column in given item to the specified value. self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None ) -> str: """Bind a callback for the given event sequence to the tag tagname. -When an event is delivered to an item, the callbacks for each -of the item's tags option are called. -""" + When an event is delivered to an item, the callbacks for each + of the item's tags option are called. + """ + @overload def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... @overload @@ -2134,11 +2237,12 @@ of the item's tags option are called. def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: """Query or modify the options for the specified tagname. -If kw is not given, returns a dict of the option settings for tagname. -If option is specified, returns the value for that option for the -specified tagname. Otherwise, sets the options to the corresponding -values for the given tagname. -""" + If kw is not given, returns a dict of the option settings for tagname. + If option is specified, returns the value for that option for the + specified tagname. Otherwise, sets the options to the corresponding + values for the given tagname. + """ + @overload def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... @overload @@ -2158,21 +2262,23 @@ values for the given tagname. @overload def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: """If item is specified, returns 1 or 0 depending on whether the -specified item has the given tagname. Otherwise, returns a list of -all items which have the specified tag. + specified item has the given tagname. Otherwise, returns a list of + all items which have the specified tag. + + * Availability: Tk 8.6 + """ -* Availability: Tk 8.6 -""" @overload def tag_has(self, tagname: str, item: str | int) -> bool: ... class LabeledScale(Frame): """A Ttk Scale widget with a Ttk Label widget indicating its -current value. + current value. + + The Ttk Scale can be accessed through instance.scale, and Ttk Label + can be accessed through instance.label + """ -The Ttk Scale can be accessed through instance.scale, and Ttk Label -can be accessed through instance.label -""" label: Label scale: Scale # This should be kept in sync with tkinter.ttk.Frame.__init__() @@ -2198,22 +2304,23 @@ can be accessed through instance.label width: float | str = 0, ) -> None: """Construct a horizontal LabeledScale with parent master, a -variable to be associated with the Ttk Scale widget and its range. -If variable is not specified, a tkinter.IntVar is created. + variable to be associated with the Ttk Scale widget and its range. + If variable is not specified, a tkinter.IntVar is created. -WIDGET-SPECIFIC OPTIONS + WIDGET-SPECIFIC OPTIONS - compound: 'top' or 'bottom' - Specifies how to display the label relative to the scale. - Defaults to 'top'. -""" + compound: 'top' or 'bottom' + Specifies how to display the label relative to the scale. + Defaults to 'top'. + """ # destroy is overridden, signature does not change value: Any class OptionMenu(Menubutton): """Themed OptionMenu, based after tkinter's OptionMenu, which allows -the user to select a value from a menu. -""" + the user to select a value from a menu. + """ + def __init__( self, master: tkinter.Misc | None, @@ -2226,22 +2333,22 @@ the user to select a value from a menu. command: Callable[[tkinter.StringVar], object] | None = None, ) -> None: """Construct a themed OptionMenu widget with master as the parent, -the resource textvariable set to variable, the initially selected -value specified by the default parameter, the menu values given by -*values and additional keywords. - -WIDGET-SPECIFIC OPTIONS - - style: stylename - Menubutton style. - direction: 'above', 'below', 'left', 'right', or 'flush' - Menubutton direction. - command: callback - A callback that will be invoked after selecting an item. -""" + the resource textvariable set to variable, the initially selected + value specified by the default parameter, the menu values given by + *values and additional keywords. + + WIDGET-SPECIFIC OPTIONS + + style: stylename + Menubutton style. + direction: 'above', 'below', 'left', 'right', or 'flush' + Menubutton direction. + command: callback + A callback that will be invoked after selecting an item. + """ # configure, config, cget, destroy are inherited from Menubutton # destroy and __setitem__ are overridden, signature does not change def set_menu(self, default: str | None = None, *values: str) -> None: """Build a new menu of radiobuttons with *values and optionally -a default value. -""" + a default value. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi index cb682651cd8c3..3da230b17b471 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/token.pyi @@ -1,5 +1,5 @@ -"""Token constants. -""" +"""Token constants.""" + import sys from typing import Final diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi index 3d91e8486026a..9dad927315954 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tokenize.pyi @@ -19,6 +19,7 @@ that it produces COMMENT tokens for comments and gives type OP for all operators. Additionally, all token lists start with an ENCODING token which tells you which encoding was used to decode the bytes stream. """ + import sys from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence @@ -164,10 +165,10 @@ class Untokenizer: if sys.version_info >= (3, 12): def add_backslash_continuation(self, start: _Position) -> None: """Add backslash continuation characters if the row has increased -without encountering a newline token. + without encountering a newline token. -This also inserts the correct amount of whitespace before the backslash. -""" + This also inserts the correct amount of whitespace before the backslash. + """ def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... @@ -177,64 +178,69 @@ This also inserts the correct amount of whitespace before the backslash. # Returns str, unless the ENCODING token is present, in which case it returns bytes. def untokenize(iterable: Iterable[_Token]) -> str | Any: """Transform tokens back into Python source code. -It returns a bytes object, encoded using the ENCODING -token, which is the first token sequence output by tokenize. + It returns a bytes object, encoded using the ENCODING + token, which is the first token sequence output by tokenize. -Each element returned by the iterable must be a token sequence -with at least two elements, a token number and token value. If -only two tokens are passed, the resulting output is poor. + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + The result is guaranteed to tokenize back to match the input so + that the conversion is lossless and round-trips are assured. + The guarantee applies only to the token type and token string as + the spacing between tokens (column positions) may change. + """ -The result is guaranteed to tokenize back to match the input so -that the conversion is lossless and round-trips are assured. -The guarantee applies only to the token type and token string as -the spacing between tokens (column positions) may change. -""" def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: """ -The detect_encoding() function is used to detect the encoding that should -be used to decode a Python source file. It requires one argument, readline, -in the same way as the tokenize() generator. + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argument, readline, + in the same way as the tokenize() generator. -It will call readline a maximum of twice, and return the encoding used -(as a string) and a list of any lines (left as bytes) it has read in. + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. -It detects the encoding from the presence of a utf-8 bom or an encoding -cookie as specified in pep-0263. If both a bom and a cookie are present, -but disagree, a SyntaxError will be raised. If the encoding cookie is an -invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, -'utf-8-sig' is returned. + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ -If no encoding is specified, then the default of 'utf-8' will be returned. -""" def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: """ -The tokenize() generator requires one argument, readline, which -must be a callable object which provides the same interface as the -readline() method of built-in file objects. Each call to the function -should return one line of input as bytes. Alternatively, readline -can be a callable function terminating with StopIteration: - readline = open(myfile, 'rb').__next__ # Example of alternate readline - -The generator produces 5-tuples with these members: the token type; the -token string; a 2-tuple (srow, scol) of ints specifying the row and -column where the token begins in the source; a 2-tuple (erow, ecol) of -ints specifying the row and column where the token ends in the source; -and the line on which the token was found. The line passed is the -physical line. - -The first token sequence will always be an ENCODING token -which tells you which encoding was used to decode the bytes stream. -""" + The tokenize() generator requires one argument, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as bytes. Alternatively, readline + can be a callable function terminating with StopIteration: + readline = open(myfile, 'rb').__next__ # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + physical line. + + The first token sequence will always be an ENCODING token + which tells you which encoding was used to decode the bytes stream. + """ + def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: """Tokenize a source reading Python code as unicode strings. -This has the same API as tokenize(), except that it expects the *readline* -callable to return str objects instead of bytes. -""" + This has the same API as tokenize(), except that it expects the *readline* + callable to return str objects instead of bytes. + """ + def open(filename: FileDescriptorOrPath) -> TextIO: """Open a file in read only mode using the encoding detected by -detect_encoding(). -""" + detect_encoding(). + """ + def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi index 08ed30b61e30a..81a39b0ded553 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tomllib.pyi @@ -10,13 +10,14 @@ if sys.version_info >= (3, 14): class TOMLDecodeError(ValueError): """An error raised if a document is not valid TOML. -Adds the following attributes to ValueError: -msg: The unformatted error message -doc: The TOML document being parsed -pos: The index of doc where parsing failed -lineno: The line corresponding to pos -colno: The column corresponding to pos -""" + Adds the following attributes to ValueError: + msg: The unformatted error message + doc: The TOML document being parsed + pos: The index of doc where parsing failed + lineno: The line corresponding to pos + colno: The column corresponding to pos + """ + msg: str doc: str pos: int @@ -30,12 +31,10 @@ colno: The column corresponding to pos else: class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML. -""" + """An error raised if a document is not valid TOML.""" def load(fp: SupportsRead[bytes], /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: - """Parse TOML from a binary file object. -""" + """Parse TOML from a binary file object.""" + def loads(s: str, /, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: - """Parse TOML from a string. -""" + """Parse TOML from a string.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi index 62cded3e4c6ab..f2362a65d32be 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/trace.pyi @@ -18,6 +18,7 @@ Sample use, programmatically r = tracer.results() r.write_results(show_missing=True, coverdir="/tmp") """ + import sys import types from _typeshed import Incomplete, StrPath, TraceFunction @@ -47,8 +48,7 @@ class CoverageResults: outfile: StrPath | None = None, ) -> None: ... # undocumented def update(self, other: CoverageResults) -> None: - """Merge in the data from another CoverageResults -""" + """Merge in the data from another CoverageResults""" if sys.version_info >= (3, 13): def write_results( self, @@ -59,38 +59,38 @@ class CoverageResults: ignore_missing_files: bool = False, ) -> None: """ -Write the coverage results. - -:param show_missing: Show lines that had no hits. -:param summary: Include coverage summary per module. -:param coverdir: If None, the results of each module are placed in its - directory, otherwise it is included in the directory - specified. -:param ignore_missing_files: If True, counts for files that no longer - exist are silently ignored. Otherwise, a missing file - will raise a FileNotFoundError. -""" + Write the coverage results. + + :param show_missing: Show lines that had no hits. + :param summary: Include coverage summary per module. + :param coverdir: If None, the results of each module are placed in its + directory, otherwise it is included in the directory + specified. + :param ignore_missing_files: If True, counts for files that no longer + exist are silently ignored. Otherwise, a missing file + will raise a FileNotFoundError. + """ else: def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: """ - Write the coverage results. + Write the coverage results. - :param show_missing: Show lines that had no hits. - :param summary: Include coverage summary per module. - :param coverdir: If None, the results of each module are placed in its - directory, otherwise it is included in the directory - specified. - """ + :param show_missing: Show lines that had no hits. + :param summary: Include coverage summary per module. + :param coverdir: If None, the results of each module are placed in its + directory, otherwise it is included in the directory + specified. + """ def write_results_file( self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None ) -> tuple[int, int]: - """Return a coverage results file in path. -""" + """Return a coverage results file in path.""" + def is_ignored_filename(self, filename: str) -> bool: # undocumented """Return True if the filename does not refer to a file -we want to have reported. -""" + we want to have reported. + """ class _Ignore: def __init__(self, modules: Iterable[str] | None = None, dirs: Iterable[StrPath] | None = None) -> None: ... @@ -120,22 +120,23 @@ class Trace: timing: bool = False, ) -> None: """ -@param count true iff it should count number of times each - line is executed -@param trace true iff it should print out each line that is - being counted -@param countfuncs true iff it should just output a list of - (filename, modulename, funcname,) for functions - that were called at least once; This overrides - 'count' and 'trace' -@param ignoremods a list of the names of modules to ignore -@param ignoredirs a list of the names of directories to ignore - all of the (recursive) contents of -@param infile file from which to read stored counts to be - added into the results -@param outfile file in which to write the results -@param timing true iff timing information be displayed -""" + @param count true iff it should count number of times each + line is executed + @param trace true iff it should print out each line that is + being counted + @param countfuncs true iff it should just output a list of + (filename, modulename, funcname,) for functions + that were called at least once; This overrides + 'count' and 'trace' + @param ignoremods a list of the names of modules to ignore + @param ignoredirs a list of the names of directories to ignore + all of the (recursive) contents of + @param infile file from which to read stored counts to be + added into the results + @param outfile file in which to write the results + @param timing true iff timing information be displayed + """ + def run(self, cmd: str | types.CodeType) -> None: ... def runctx( self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None @@ -145,19 +146,22 @@ class Trace: def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: """Handler for call events. -Adds information about who called who to the self._callers dict. -""" + Adds information about who called who to the self._callers dict. + """ + def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: """Handler for call events. -Adds (filename, modulename, funcname) to the self._calledfuncs dict. -""" + Adds (filename, modulename, funcname) to the self._calledfuncs dict. + """ + def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: """Handler for call events. -If the code block being entered is to be ignored, returns 'None', -else returns self.localtrace. -""" + If the code block being entered is to be ignored, returns 'None', + else returns self.localtrace. + """ + def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi index f395946adf107..4aa3b7a49284c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/traceback.pyi @@ -1,5 +1,5 @@ -"""Extract, format and print information about Python stack traces. -""" +"""Extract, format and print information about Python stack traces.""" + import sys from _typeshed import SupportsWrite, Unused from collections.abc import Generator, Iterable, Iterator, Mapping @@ -37,11 +37,11 @@ _FrameSummaryTuple: TypeAlias = tuple[str, int, str, str | None] def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: """Print up to 'limit' stack trace entries from the traceback 'tb'. -If 'limit' is omitted or None, all entries are printed. If 'file' -is omitted or None, the output goes to sys.stderr; otherwise -'file' should be an open file or file-like object with a write() -method. -""" + If 'limit' is omitted or None, all entries are printed. If 'file' + is omitted or None, the output goes to sys.stderr; otherwise + 'file' should be an open file or file-like object with a write() + method. + """ if sys.version_info >= (3, 10): @overload @@ -56,14 +56,15 @@ if sys.version_info >= (3, 10): ) -> None: """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. -This differs from print_tb() in the following ways: (1) if -traceback is not None, it prints a header "Traceback (most recent -call last):"; (2) it prints the exception type and value after the -stack trace; (3) if type is SyntaxError and value has the -appropriate format, it prints the line where the syntax error -occurred with a caret on the next line indicating the approximate -position of the error. -""" + This differs from print_tb() in the following ways: (1) if + traceback is not None, it prints a header "Traceback (most recent + call last):"; (2) it prints the exception type and value after the + stack trace; (3) if type is SyntaxError and value has the + appropriate format, it prints the line where the syntax error + occurred with a caret on the next line indicating the approximate + position of the error. + """ + @overload def print_exception( exc: BaseException, /, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True @@ -79,12 +80,13 @@ position of the error. ) -> list[str]: """Format a stack trace and the exception information. -The arguments have the same meaning as the corresponding arguments -to print_exception(). The return value is a list of strings, each -ending in a newline and some containing internal newlines. When -these lines are concatenated and printed, exactly the same text is -printed as does print_exception(). -""" + The arguments have the same meaning as the corresponding arguments + to print_exception(). The return value is a list of strings, each + ending in a newline and some containing internal newlines. When + these lines are concatenated and printed, exactly the same text is + printed as does print_exception(). + """ + @overload def format_exception(exc: BaseException, /, *, limit: int | None = None, chain: bool = True) -> list[str]: ... @@ -99,14 +101,15 @@ else: ) -> None: """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. - This differs from print_tb() in the following ways: (1) if - traceback is not None, it prints a header "Traceback (most recent - call last):"; (2) it prints the exception type and value after the - stack trace; (3) if type is SyntaxError and value has the - appropriate format, it prints the line where the syntax error - occurred with a caret on the next line indicating the approximate - position of the error. - """ + This differs from print_tb() in the following ways: (1) if + traceback is not None, it prints a header "Traceback (most recent + call last):"; (2) it prints the exception type and value after the + stack trace; (3) if type is SyntaxError and value has the + appropriate format, it prints the line where the syntax error + occurred with a caret on the next line indicating the approximate + position of the error. + """ + def format_exception( etype: type[BaseException] | None, value: BaseException | None, @@ -116,82 +119,87 @@ else: ) -> list[str]: """Format a stack trace and the exception information. - The arguments have the same meaning as the corresponding arguments - to print_exception(). The return value is a list of strings, each - ending in a newline and some containing internal newlines. When - these lines are concatenated and printed, exactly the same text is - printed as does print_exception(). - """ + The arguments have the same meaning as the corresponding arguments + to print_exception(). The return value is a list of strings, each + ending in a newline and some containing internal newlines. When + these lines are concatenated and printed, exactly the same text is + printed as does print_exception(). + """ def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """Shorthand for 'print_exception(sys.exception(), limit=limit, file=file, chain=chain)'. -""" + """Shorthand for 'print_exception(sys.exception(), limit=limit, file=file, chain=chain)'.""" + def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """This is a shorthand for 'print_exception(sys.last_exc, limit=limit, file=file, chain=chain)'. -""" + """This is a shorthand for 'print_exception(sys.last_exc, limit=limit, file=file, chain=chain)'.""" + def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: """Print a stack trace from its invocation point. -The optional 'f' argument can be used to specify an alternate -stack frame at which to start. The optional 'limit' and 'file' -arguments have the same meaning as for print_exception(). -""" + The optional 'f' argument can be used to specify an alternate + stack frame at which to start. The optional 'limit' and 'file' + arguments have the same meaning as for print_exception(). + """ + def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: """ -Return a StackSummary object representing a list of -pre-processed entries from traceback. - -This is useful for alternate formatting of stack traces. If -'limit' is omitted or None, all entries are extracted. A -pre-processed stack trace entry is a FrameSummary object -containing attributes filename, lineno, name, and line -representing the information that is usually printed for a stack -trace. The line is a string with leading and trailing -whitespace stripped; if the source is not available it is None. -""" + Return a StackSummary object representing a list of + pre-processed entries from traceback. + + This is useful for alternate formatting of stack traces. If + 'limit' is omitted or None, all entries are extracted. A + pre-processed stack trace entry is a FrameSummary object + containing attributes filename, lineno, name, and line + representing the information that is usually printed for a stack + trace. The line is a string with leading and trailing + whitespace stripped; if the source is not available it is None. + """ + def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: """Extract the raw traceback from the current stack frame. -The return value has the same format as for extract_tb(). The -optional 'f' and 'limit' arguments have the same meaning as for -print_stack(). Each item in the list is a quadruple (filename, -line number, function name, text), and the entries are in order -from oldest to newest stack frame. -""" + The return value has the same format as for extract_tb(). The + optional 'f' and 'limit' arguments have the same meaning as for + print_stack(). Each item in the list is a quadruple (filename, + line number, function name, text), and the entries are in order + from oldest to newest stack frame. + """ + def format_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> list[str]: """Format a list of tuples or FrameSummary objects for printing. -Given a list of tuples or FrameSummary objects as returned by -extract_tb() or extract_stack(), return a list of strings ready -for printing. + Given a list of tuples or FrameSummary objects as returned by + extract_tb() or extract_stack(), return a list of strings ready + for printing. + + Each string in the resulting list corresponds to the item with the + same index in the argument list. Each string ends in a newline; + the strings may contain internal newlines as well, for those items + whose source text line is not None. + """ -Each string in the resulting list corresponds to the item with the -same index in the argument list. Each string ends in a newline; -the strings may contain internal newlines as well, for those items -whose source text line is not None. -""" def print_list(extracted_list: Iterable[FrameSummary | _FrameSummaryTuple], file: SupportsWrite[str] | None = None) -> None: """Print the list of tuples as returned by extract_tb() or -extract_stack() as a formatted stack trace to the given file. -""" + extract_stack() as a formatted stack trace to the given file. + """ if sys.version_info >= (3, 13): @overload def format_exception_only(exc: BaseException | None, /, *, show_group: bool = False) -> list[str]: """Format the exception part of a traceback. -The return value is a list of strings, each ending in a newline. + The return value is a list of strings, each ending in a newline. + + The list contains the exception's message, which is + normally a single string; however, for :exc:`SyntaxError` exceptions, it + contains several lines that (when printed) display detailed information + about where the syntax error occurred. Following the message, the list + contains the exception's ``__notes__``. -The list contains the exception's message, which is -normally a single string; however, for :exc:`SyntaxError` exceptions, it -contains several lines that (when printed) display detailed information -about where the syntax error occurred. Following the message, the list -contains the exception's ``__notes__``. + When *show_group* is ``True``, and the exception is an instance of + :exc:`BaseExceptionGroup`, the nested exceptions are included as + well, recursively, with indentation relative to their nesting depth. + """ -When *show_group* is ``True``, and the exception is an instance of -:exc:`BaseExceptionGroup`, the nested exceptions are included as -well, recursively, with indentation relative to their nesting depth. -""" @overload def format_exception_only(exc: Unused, /, value: BaseException | None, *, show_group: bool = False) -> list[str]: ... @@ -200,14 +208,15 @@ elif sys.version_info >= (3, 10): def format_exception_only(exc: BaseException | None, /) -> list[str]: """Format the exception part of a traceback. - The return value is a list of strings, each ending in a newline. + The return value is a list of strings, each ending in a newline. + + The list contains the exception's message, which is + normally a single string; however, for :exc:`SyntaxError` exceptions, it + contains several lines that (when printed) display detailed information + about where the syntax error occurred. Following the message, the list + contains the exception's ``__notes__``. + """ - The list contains the exception's message, which is - normally a single string; however, for :exc:`SyntaxError` exceptions, it - contains several lines that (when printed) display detailed information - about where the syntax error occurred. Following the message, the list - contains the exception's ``__notes__``. - """ @overload def format_exception_only(exc: Unused, /, value: BaseException | None) -> list[str]: ... @@ -215,44 +224,45 @@ else: def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: """Format the exception part of a traceback. - The arguments are the exception type and value such as given by - sys.last_type and sys.last_value. The return value is a list of - strings, each ending in a newline. + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. - Normally, the list contains a single string; however, for - SyntaxError exceptions, it contains several lines that (when - printed) display detailed information about where the syntax - error occurred. + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. - The message indicating which exception occurred is always the last - string in the list. + The message indicating which exception occurred is always the last + string in the list. - """ + """ def format_exc(limit: int | None = None, chain: bool = True) -> str: - """Like print_exc() but return a string. -""" + """Like print_exc() but return a string.""" + def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: - """A shorthand for 'format_list(extract_tb(tb, limit))'. -""" + """A shorthand for 'format_list(extract_tb(tb, limit))'.""" + def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: - """Shorthand for 'format_list(extract_stack(f, limit))'. -""" + """Shorthand for 'format_list(extract_stack(f, limit))'.""" + def clear_frames(tb: TracebackType | None) -> None: - """Clear all references to local variables in the frames of a traceback. -""" + """Clear all references to local variables in the frames of a traceback.""" + def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: """Walk a stack yielding the frame and line number for each frame. -This will follow f.f_back from the given frame. If no frame is given, the -current stack is used. Usually used with StackSummary.extract. -""" + This will follow f.f_back from the given frame. If no frame is given, the + current stack is used. Usually used with StackSummary.extract. + """ + def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: """Walk a traceback yielding the frame and line number for each frame. -This will follow tb.tb_next (and thus is in the opposite order to -walk_stack). Usually used with StackSummary.extract. -""" + This will follow tb.tb_next (and thus is in the opposite order to + walk_stack). Usually used with StackSummary.extract. + """ if sys.version_info >= (3, 11): class _ExceptionPrintContext: @@ -262,42 +272,43 @@ if sys.version_info >= (3, 11): class TracebackException: """An exception ready for rendering. -The traceback module captures enough attributes from the original exception -to this intermediary form to ensure that no references are held, while -still being able to fully print or format it. - -max_group_width and max_group_depth control the formatting of exception -groups. The depth refers to the nesting level of the group, and the width -refers to the size of a single exception group's exceptions array. The -formatted output is truncated when either limit is exceeded. - -Use `from_exception` to create TracebackException instances from exception -objects, or the constructor to create TracebackException instances from -individual components. - -- :attr:`__cause__` A TracebackException of the original *__cause__*. -- :attr:`__context__` A TracebackException of the original *__context__*. -- :attr:`exceptions` For exception groups - a list of TracebackException - instances for the nested *exceptions*. ``None`` for other exceptions. -- :attr:`__suppress_context__` The *__suppress_context__* value from the - original exception. -- :attr:`stack` A `StackSummary` representing the traceback. -- :attr:`exc_type` (deprecated) The class of the original traceback. -- :attr:`exc_type_str` String display of exc_type -- :attr:`filename` For syntax errors - the filename where the error - occurred. -- :attr:`lineno` For syntax errors - the linenumber where the error - occurred. -- :attr:`end_lineno` For syntax errors - the end linenumber where the error - occurred. Can be `None` if not present. -- :attr:`text` For syntax errors - the text where the error - occurred. -- :attr:`offset` For syntax errors - the offset into the text where the - error occurred. -- :attr:`end_offset` For syntax errors - the end offset into the text where - the error occurred. Can be `None` if not present. -- :attr:`msg` For syntax errors - the compiler error message. -""" + The traceback module captures enough attributes from the original exception + to this intermediary form to ensure that no references are held, while + still being able to fully print or format it. + + max_group_width and max_group_depth control the formatting of exception + groups. The depth refers to the nesting level of the group, and the width + refers to the size of a single exception group's exceptions array. The + formatted output is truncated when either limit is exceeded. + + Use `from_exception` to create TracebackException instances from exception + objects, or the constructor to create TracebackException instances from + individual components. + + - :attr:`__cause__` A TracebackException of the original *__cause__*. + - :attr:`__context__` A TracebackException of the original *__context__*. + - :attr:`exceptions` For exception groups - a list of TracebackException + instances for the nested *exceptions*. ``None`` for other exceptions. + - :attr:`__suppress_context__` The *__suppress_context__* value from the + original exception. + - :attr:`stack` A `StackSummary` representing the traceback. + - :attr:`exc_type` (deprecated) The class of the original traceback. + - :attr:`exc_type_str` String display of exc_type + - :attr:`filename` For syntax errors - the filename where the error + occurred. + - :attr:`lineno` For syntax errors - the linenumber where the error + occurred. + - :attr:`end_lineno` For syntax errors - the end linenumber where the error + occurred. Can be `None` if not present. + - :attr:`text` For syntax errors - the text where the error + occurred. + - :attr:`offset` For syntax errors - the offset into the text where the + error occurred. + - :attr:`end_offset` For syntax errors - the end offset into the text where + the error occurred. Can be `None` if not present. + - :attr:`msg` For syntax errors - the compiler error message. + """ + __cause__: TracebackException | None __context__: TracebackException | None if sys.version_info >= (3, 11): @@ -396,8 +407,7 @@ individual components. max_group_width: int = 15, max_group_depth: int = 10, ) -> Self: - """Create a TracebackException from an exception. -""" + """Create a TracebackException from an exception.""" elif sys.version_info >= (3, 10): @classmethod def from_exception( @@ -409,15 +419,13 @@ individual components. capture_locals: bool = False, compact: bool = False, ) -> Self: - """Create a TracebackException from an exception. -""" + """Create a TracebackException from an exception.""" else: @classmethod def from_exception( cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False ) -> Self: - """Create a TracebackException from an exception. -""" + """Create a TracebackException from an exception.""" def __eq__(self, other: object) -> bool: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -425,80 +433,78 @@ individual components. def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: """Format the exception. -If chain is not *True*, *__cause__* and *__context__* will not be formatted. + If chain is not *True*, *__cause__* and *__context__* will not be formatted. -The return value is a generator of strings, each ending in a newline and -some containing internal newlines. `print_exception` is a wrapper around -this method which just prints the lines to a file. + The return value is a generator of strings, each ending in a newline and + some containing internal newlines. `print_exception` is a wrapper around + this method which just prints the lines to a file. -The message indicating which exception occurred is always the last -string in the output. -""" + The message indicating which exception occurred is always the last + string in the output. + """ else: def format(self, *, chain: bool = True) -> Generator[str, None, None]: """Format the exception. - If chain is not *True*, *__cause__* and *__context__* will not be formatted. + If chain is not *True*, *__cause__* and *__context__* will not be formatted. - The return value is a generator of strings, each ending in a newline and - some containing internal newlines. `print_exception` is a wrapper around - this method which just prints the lines to a file. - - The message indicating which exception occurred is always the last - string in the output. - """ + The return value is a generator of strings, each ending in a newline and + some containing internal newlines. `print_exception` is a wrapper around + this method which just prints the lines to a file. + The message indicating which exception occurred is always the last + string in the output. + """ if sys.version_info >= (3, 13): def format_exception_only(self, *, show_group: bool = False, _depth: int = 0) -> Generator[str, None, None]: """Format the exception part of the traceback. -The return value is a generator of strings, each ending in a newline. + The return value is a generator of strings, each ending in a newline. -Generator yields the exception message. -For :exc:`SyntaxError` exceptions, it -also yields (before the exception message) -several lines that (when printed) -display detailed information about where the syntax error occurred. -Following the message, generator also yields -all the exception's ``__notes__``. + Generator yields the exception message. + For :exc:`SyntaxError` exceptions, it + also yields (before the exception message) + several lines that (when printed) + display detailed information about where the syntax error occurred. + Following the message, generator also yields + all the exception's ``__notes__``. -When *show_group* is ``True``, and the exception is an instance of -:exc:`BaseExceptionGroup`, the nested exceptions are included as -well, recursively, with indentation relative to their nesting depth. -""" + When *show_group* is ``True``, and the exception is an instance of + :exc:`BaseExceptionGroup`, the nested exceptions are included as + well, recursively, with indentation relative to their nesting depth. + """ else: def format_exception_only(self) -> Generator[str, None, None]: """Format the exception part of the traceback. - The return value is a generator of strings, each ending in a newline. - - Generator yields the exception message. - For :exc:`SyntaxError` exceptions, it - also yields (before the exception message) - several lines that (when printed) - display detailed information about where the syntax error occurred. - Following the message, generator also yields - all the exception's ``__notes__``. - """ + The return value is a generator of strings, each ending in a newline. + Generator yields the exception message. + For :exc:`SyntaxError` exceptions, it + also yields (before the exception message) + several lines that (when printed) + display detailed information about where the syntax error occurred. + Following the message, generator also yields + all the exception's ``__notes__``. + """ if sys.version_info >= (3, 11): def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: - """Print the result of self.format(chain=chain) to 'file'. -""" + """Print the result of self.format(chain=chain) to 'file'.""" class FrameSummary: """Information about a single frame from a traceback. -- :attr:`filename` The filename for the frame. -- :attr:`lineno` The line within filename for the frame that was - active when the frame was captured. -- :attr:`name` The name of the function or method that was executing - when the frame was captured. -- :attr:`line` The text from the linecache module for the - of code that was running when the frame was captured. -- :attr:`locals` Either None if locals were not supplied, or a dict - mapping the name to the repr() of the variable. -""" + - :attr:`filename` The filename for the frame. + - :attr:`lineno` The line within filename for the frame that was + active when the frame was captured. + - :attr:`name` The name of the function or method that was executing + when the frame was captured. + - :attr:`line` The text from the linecache module for the + of code that was running when the frame was captured. + - :attr:`locals` Either None if locals were not supplied, or a dict + mapping the name to the repr() of the variable. + """ + if sys.version_info >= (3, 13): __slots__ = ( "filename", @@ -532,13 +538,13 @@ class FrameSummary: ) -> None: """Construct a FrameSummary. -:param lookup_line: If True, `linecache` is consulted for the source - code line. Otherwise, the line will be looked up when first needed. -:param locals: If supplied the frame locals, which will be captured as - object representations. -:param line: If provided, use this instead of looking up the line in - the linecache. -""" + :param lookup_line: If True, `linecache` is consulted for the source + code line. Otherwise, the line will be looked up when first needed. + :param locals: If supplied the frame locals, which will be captured as + object representations. + :param line: If provided, use this instead of looking up the line in + the linecache. + """ end_lineno: int | None colno: int | None end_colno: int | None @@ -555,13 +561,13 @@ class FrameSummary: ) -> None: """Construct a FrameSummary. - :param lookup_line: If True, `linecache` is consulted for the source - code line. Otherwise, the line will be looked up when first needed. - :param locals: If supplied the frame locals, which will be captured as - object representations. - :param line: If provided, use this instead of looking up the line in - the linecache. - """ + :param lookup_line: If True, `linecache` is consulted for the source + code line. Otherwise, the line will be looked up when first needed. + :param locals: If supplied the frame locals, which will be captured as + object representations. + :param line: If provided, use this instead of looking up the line in + the linecache. + """ filename: str lineno: int | None name: str @@ -586,8 +592,8 @@ class FrameSummary: __hash__: ClassVar[None] # type: ignore[assignment] class StackSummary(list[FrameSummary]): - """A list of FrameSummary objects, representing a stack of frames. -""" + """A list of FrameSummary objects, representing a stack of frames.""" + @classmethod def extract( cls, @@ -599,38 +605,39 @@ class StackSummary(list[FrameSummary]): ) -> StackSummary: """Create a StackSummary from a traceback or stack object. -:param frame_gen: A generator that yields (frame, lineno) tuples - whose summaries are to be included in the stack. -:param limit: None to include all frames or the number of frames to - include. -:param lookup_lines: If True, lookup lines for each frame immediately, - otherwise lookup is deferred until the frame is rendered. -:param capture_locals: If True, the local variables from each frame will - be captured as object representations into the FrameSummary. -""" + :param frame_gen: A generator that yields (frame, lineno) tuples + whose summaries are to be included in the stack. + :param limit: None to include all frames or the number of frames to + include. + :param lookup_lines: If True, lookup lines for each frame immediately, + otherwise lookup is deferred until the frame is rendered. + :param capture_locals: If True, the local variables from each frame will + be captured as object representations into the FrameSummary. + """ + @classmethod def from_list(cls, a_list: Iterable[FrameSummary | _FrameSummaryTuple]) -> StackSummary: """ -Create a StackSummary object from a supplied list of -FrameSummary objects or old-style list of tuples. -""" + Create a StackSummary object from a supplied list of + FrameSummary objects or old-style list of tuples. + """ if sys.version_info >= (3, 11): def format_frame_summary(self, frame_summary: FrameSummary) -> str: """Format the lines for a single FrameSummary. -Returns a string representing one frame involved in the stack. This -gets called for every frame to be printed in the stack summary. -""" + Returns a string representing one frame involved in the stack. This + gets called for every frame to be printed in the stack summary. + """ def format(self) -> list[str]: """Format the stack ready for printing. -Returns a list of strings ready for printing. Each string in the -resulting list corresponds to a single frame from the stack. -Each string ends in a newline; the strings may contain internal -newlines as well, for those items with source text lines. + Returns a list of strings ready for printing. Each string in the + resulting list corresponds to a single frame from the stack. + Each string ends in a newline; the strings may contain internal + newlines as well, for those items with source text lines. -For long sequences of the same frame and line, the first few -repetitions are shown, followed by a summary line stating the exact -number of further repetitions. -""" + For long sequences of the same frame and line, the first few + repetitions are shown, followed by a summary line stating the exact + number of further repetitions. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi index 0cbaa06e2f60f..5a5bb840853c4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tracemalloc.pyi @@ -6,16 +6,17 @@ from typing_extensions import TypeAlias def get_object_traceback(obj: object) -> Traceback | None: """ -Get the traceback where the Python object *obj* was allocated. -Return a Traceback instance. + Get the traceback where the Python object *obj* was allocated. + Return a Traceback instance. + + Return None if the tracemalloc module is not tracing memory allocations or + did not trace the allocation of the object. + """ -Return None if the tracemalloc module is not tracing memory allocations or -did not trace the allocation of the object. -""" def take_snapshot() -> Snapshot: """ -Take a snapshot of traces of memory blocks allocated by Python. -""" + Take a snapshot of traces of memory blocks allocated by Python. + """ class BaseFilter: inclusive: bool @@ -43,8 +44,9 @@ class Filter(BaseFilter): class Statistic: """ -Statistic difference on memory allocations between two Snapshot instance. -""" + Statistic difference on memory allocations between two Snapshot instance. + """ + __slots__ = ("traceback", "size", "count") count: int size: int @@ -55,9 +57,10 @@ Statistic difference on memory allocations between two Snapshot instance. class StatisticDiff: """ -Statistic difference on memory allocations between an old and a new -Snapshot instance. -""" + Statistic difference on memory allocations between an old and a new + Snapshot instance. + """ + __slots__ = ("traceback", "size", "size_diff", "count", "count_diff") count: int count_diff: int @@ -72,8 +75,9 @@ _FrameTuple: TypeAlias = tuple[str, int] class Frame: """ -Frame of a traceback. -""" + Frame of a traceback. + """ + __slots__ = ("_frame",) @property def filename(self) -> str: ... @@ -85,31 +89,30 @@ Frame of a traceback. def __lt__(self, other: Frame) -> bool: ... if sys.version_info >= (3, 11): def __gt__(self, other: Frame) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __ge__(self, other: Frame) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __le__(self, other: Frame) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" else: def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] class Trace: """ -Trace of a memory block. -""" + Trace of a memory block. + """ + __slots__ = ("_trace",) @property def domain(self) -> int: ... @@ -123,9 +126,10 @@ Trace of a memory block. class Traceback(Sequence[Frame]): """ -Sequence of Frame instances sorted from the oldest frame -to the most recent frame. -""" + Sequence of Frame instances sorted from the oldest frame + to the most recent frame. + """ + __slots__ = ("_frames", "_total_nframe") @property def total_nframe(self) -> int | None: ... @@ -142,55 +146,58 @@ to the most recent frame. def __lt__(self, other: Traceback) -> bool: ... if sys.version_info >= (3, 11): def __gt__(self, other: Traceback) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __ge__(self, other: Traceback) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __le__(self, other: Traceback) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" else: def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a > b. Computed by @total_ordering from (not a < b) and (a != b). -""" + """Return a > b. Computed by @total_ordering from (not a < b) and (a != b).""" + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a >= b. Computed by @total_ordering from (not a < b). -""" + """Return a >= b. Computed by @total_ordering from (not a < b).""" + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: - """Return a <= b. Computed by @total_ordering from (a < b) or (a == b). -""" + """Return a <= b. Computed by @total_ordering from (a < b) or (a == b).""" class Snapshot: """ -Snapshot of traces of memory blocks allocated by Python. -""" + Snapshot of traces of memory blocks allocated by Python. + """ + def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: """ -Compute the differences with an old snapshot old_snapshot. Get -statistics as a sorted list of StatisticDiff instances, grouped by -group_by. -""" + Compute the differences with an old snapshot old_snapshot. Get + statistics as a sorted list of StatisticDiff instances, grouped by + group_by. + """ + def dump(self, filename: str) -> None: """ -Write the snapshot into a file. -""" + Write the snapshot into a file. + """ + def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: """ -Create a new Snapshot instance with a filtered traces sequence, filters -is a list of Filter or DomainFilter instances. If filters is an empty -list, return a new Snapshot instance with a copy of the traces. -""" + Create a new Snapshot instance with a filtered traces sequence, filters + is a list of Filter or DomainFilter instances. If filters is an empty + list, return a new Snapshot instance with a copy of the traces. + """ + @staticmethod def load(filename: str) -> Snapshot: """ -Load a snapshot from a file. -""" + Load a snapshot from a file. + """ + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: """ -Group statistics by key_type. Return a sorted list of Statistic -instances. -""" + Group statistics by key_type. Return a sorted list of Statistic + instances. + """ traceback_limit: int traces: Sequence[Trace] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi index e400f73bfcfb9..0219428d92bba 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tty.pyi @@ -1,5 +1,5 @@ -"""Terminal utilities. -""" +"""Terminal utilities.""" + import sys import termios from typing import IO, Final @@ -25,16 +25,13 @@ if sys.platform != "win32": OSPEED: Final = 5 CC: Final = 6 def setraw(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into raw mode. -""" - def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: - """Put terminal into cbreak mode. -""" + """Put terminal into raw mode.""" + def setcbreak(fd: _FD, when: int = 2) -> _ModeSetterReturn: + """Put terminal into cbreak mode.""" if sys.version_info >= (3, 12): def cfmakeraw(mode: termios._Attr) -> None: - """Make termios mode raw. -""" + """Make termios mode raw.""" + def cfmakecbreak(mode: termios._Attr) -> None: - """Make termios mode cbreak. -""" + """Make termios mode cbreak.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi index 44b40b9aa6dc3..b0e7c1bf297cb 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi @@ -74,6 +74,7 @@ Roughly it has the following features added: Behind the scenes there are some features included with possible extensions in mind. These will be commented and documented elsewhere. """ + import sys from _typeshed import StrPath from collections.abc import Callable, Generator, Sequence @@ -242,18 +243,19 @@ _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] if sys.version_info >= (3, 12): class Vec2D(tuple[float, float]): """A 2 dimensional vector class, used as a helper class -for implementing turtle graphics. -May be useful for turtle graphics programs also. -Derived from tuple, so a vector is a tuple! - -Provides (for a, b vectors, k number): - a+b vector addition - a-b vector subtraction - a*b inner product - k*a and a*k multiplication with scalar - |a| absolute value of a - a.rotate(angle) rotation -""" + for implementing turtle graphics. + May be useful for turtle graphics programs also. + Derived from tuple, so a vector is a tuple! + + Provides (for a, b vectors, k number): + a+b vector addition + a-b vector subtraction + a*b inner product + k*a and a*k multiplication with scalar + |a| absolute value of a + a.rotate(angle) rotation + """ + def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -265,25 +267,25 @@ Provides (for a, b vectors, k number): def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... def rotate(self, angle: float) -> Vec2D: - """rotate self counterclockwise by angle - """ + """rotate self counterclockwise by angle""" else: @disjoint_base class Vec2D(tuple[float, float]): """A 2 dimensional vector class, used as a helper class - for implementing turtle graphics. - May be useful for turtle graphics programs also. - Derived from tuple, so a vector is a tuple! - - Provides (for a, b vectors, k number): - a+b vector addition - a-b vector subtraction - a*b inner product - k*a and a*k multiplication with scalar - |a| absolute value of a - a.rotate(angle) rotation - """ + for implementing turtle graphics. + May be useful for turtle graphics programs also. + Derived from tuple, so a vector is a tuple! + + Provides (for a, b vectors, k number): + a+b vector addition + a-b vector subtraction + a*b inner product + k*a and a*k multiplication with scalar + |a| absolute value of a + a.rotate(angle) rotation + """ + def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] @@ -295,16 +297,16 @@ else: def __neg__(self) -> Vec2D: ... def __abs__(self) -> float: ... def rotate(self, angle: float) -> Vec2D: - """rotate self counterclockwise by angle - """ + """rotate self counterclockwise by angle""" # Does not actually inherit from Canvas, but dynamically gets all methods of Canvas class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] """Modeled after the scrolled canvas class from Grayons's Tkinter book. -Used as the default canvas, which pops up automatically when -using turtle graphics functions or the Turtle class. -""" + Used as the default canvas, which pops up automatically when + using turtle graphics functions or the Turtle class. + """ + bg: str hscroll: Scrollbar vscroll: Scrollbar @@ -314,16 +316,16 @@ using turtle graphics functions or the Turtle class. canvwidth: int canvheight: int def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: - """Adjust canvas and scrollbars according to given canvas size. -""" + """Adjust canvas and scrollbars according to given canvas size.""" class TurtleScreenBase: """Provide the basic graphics functionality. -Interface between Tkinter and turtle.py. + Interface between Tkinter and turtle.py. + + To port turtle.py to some different graphics toolkit + a corresponding TurtleScreenBase class has to be implemented. + """ -To port turtle.py to some different graphics toolkit -a corresponding TurtleScreenBase class has to be implemented. -""" cv: Canvas canvwidth: int canvheight: int @@ -333,231 +335,243 @@ a corresponding TurtleScreenBase class has to be implemented. def mainloop(self) -> None: """Starts event loop - calling Tkinter's mainloop function. -No argument. + No argument. -Must be last statement in a turtle graphics program. -Must NOT be used if a script is run from within IDLE in -n mode -(No subprocess) - for interactive use of turtle graphics. + Must be last statement in a turtle graphics program. + Must NOT be used if a script is run from within IDLE in -n mode + (No subprocess) - for interactive use of turtle graphics. -Example (for a TurtleScreen instance named screen): ->>> screen.mainloop() + Example (for a TurtleScreen instance named screen): + >>> screen.mainloop() + + """ -""" def textinput(self, title: str, prompt: str) -> str | None: """Pop up a dialog window for input of a string. -Arguments: title is the title of the dialog window, -prompt is a text mostly describing what information to input. + Arguments: title is the title of the dialog window, + prompt is a text mostly describing what information to input. -Return the string input -If the dialog is canceled, return None. + Return the string input + If the dialog is canceled, return None. -Example (for a TurtleScreen instance named screen): ->>> screen.textinput("NIM", "Name of first player:") + Example (for a TurtleScreen instance named screen): + >>> screen.textinput("NIM", "Name of first player:") + + """ -""" def numinput( self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: """Pop up a dialog window for input of a number. -Arguments: title is the title of the dialog window, -prompt is a text mostly describing what numerical information to input. -default: default value -minval: minimum value for input -maxval: maximum value for input + Arguments: title is the title of the dialog window, + prompt is a text mostly describing what numerical information to input. + default: default value + minval: minimum value for input + maxval: maximum value for input -The number input must be in the range minval .. maxval if these are -given. If not, a hint is issued and the dialog remains open for -correction. Return the number input. -If the dialog is canceled, return None. + The number input must be in the range minval .. maxval if these are + given. If not, a hint is issued and the dialog remains open for + correction. Return the number input. + If the dialog is canceled, return None. -Example (for a TurtleScreen instance named screen): ->>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) + Example (for a TurtleScreen instance named screen): + >>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) -""" + """ class Terminator(Exception): """Will be raised in TurtleScreen.update, if _RUNNING becomes False. -This stops execution of a turtle graphics script. -Main purpose: use in the Demo-Viewer turtle.Demo.py. -""" -class TurtleGraphicsError(Exception): - """Some TurtleGraphics Error + This stops execution of a turtle graphics script. + Main purpose: use in the Demo-Viewer turtle.Demo.py. """ +class TurtleGraphicsError(Exception): + """Some TurtleGraphics Error""" + class Shape: """Data structure modeling shapes. -attribute _type is one of "polygon", "image", "compound" -attribute _data is - depending on _type a poygon-tuple, -an image or a list constructed using the addcomponent method. -""" + attribute _type is one of "polygon", "image", "compound" + attribute _data is - depending on _type a poygon-tuple, + an image or a list constructed using the addcomponent method. + """ + def __init__( self, type_: Literal["polygon", "image", "compound"], data: _PolygonCoords | PhotoImage | None = None ) -> None: ... def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: """Add component to a shape of type compound. -Arguments: poly is a polygon, i. e. a tuple of number pairs. -fill is the fillcolor of the component, -outline is the outline color of the component. + Arguments: poly is a polygon, i. e. a tuple of number pairs. + fill is the fillcolor of the component, + outline is the outline color of the component. -call (for a Shapeobject namend s): --- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue") + call (for a Shapeobject namend s): + -- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue") -Example: ->>> poly = ((0,0),(10,-5),(0,10),(-10,-5)) ->>> s = Shape("compound") ->>> s.addcomponent(poly, "red", "blue") ->>> # .. add more components and then use register_shape() -""" + Example: + >>> poly = ((0,0),(10,-5),(0,10),(-10,-5)) + >>> s = Shape("compound") + >>> s.addcomponent(poly, "red", "blue") + >>> # .. add more components and then use register_shape() + """ class TurtleScreen(TurtleScreenBase): """Provides screen oriented methods like bgcolor etc. -Only relies upon the methods of TurtleScreenBase and NOT -upon components of the underlying graphics toolkit - -which is Tkinter in this case. -""" + Only relies upon the methods of TurtleScreenBase and NOT + upon components of the underlying graphics toolkit - + which is Tkinter in this case. + """ + def __init__( self, cv: Canvas, mode: Literal["standard", "logo", "world"] = "standard", colormode: float = 1.0, delay: int = 10 ) -> None: ... def clear(self) -> None: """Delete all drawings and all turtles from the TurtleScreen. -No argument. + No argument. -Reset empty TurtleScreen to its initial state: white background, -no backgroundimage, no eventbindings and tracing on. + Reset empty TurtleScreen to its initial state: white background, + no backgroundimage, no eventbindings and tracing on. -Example (for a TurtleScreen instance named screen): ->>> screen.clear() + Example (for a TurtleScreen instance named screen): + >>> screen.clear() + + Note: this method is not available as function. + """ -Note: this method is not available as function. -""" @overload def mode(self, mode: None = None) -> str: """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. -Optional argument: -mode -- one of the strings 'standard', 'logo' or 'world' + Optional argument: + mode -- one of the strings 'standard', 'logo' or 'world' -Mode 'standard' is compatible with turtle.py. -Mode 'logo' is compatible with most Logo-Turtle-Graphics. -Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in -this mode angles appear distorted if x/y unit-ratio doesn't equal 1. -If mode is not given, return the current mode. + Mode 'standard' is compatible with turtle.py. + Mode 'logo' is compatible with most Logo-Turtle-Graphics. + Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in + this mode angles appear distorted if x/y unit-ratio doesn't equal 1. + If mode is not given, return the current mode. - Mode Initial turtle heading positive angles - ------------|-------------------------|------------------- - 'standard' to the right (east) counterclockwise - 'logo' upward (north) clockwise + Mode Initial turtle heading positive angles + ------------|-------------------------|------------------- + 'standard' to the right (east) counterclockwise + 'logo' upward (north) clockwise + + Examples: + >>> mode('logo') # resets turtle heading to north + >>> mode() + 'logo' + """ -Examples: ->>> mode('logo') # resets turtle heading to north ->>> mode() -'logo' -""" @overload def mode(self, mode: Literal["standard", "logo", "world"]) -> None: ... def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: """Set up a user defined coordinate-system. -Arguments: -llx -- a number, x-coordinate of lower left corner of canvas -lly -- a number, y-coordinate of lower left corner of canvas -urx -- a number, x-coordinate of upper right corner of canvas -ury -- a number, y-coordinate of upper right corner of canvas + Arguments: + llx -- a number, x-coordinate of lower left corner of canvas + lly -- a number, y-coordinate of lower left corner of canvas + urx -- a number, x-coordinate of upper right corner of canvas + ury -- a number, y-coordinate of upper right corner of canvas -Set up user coodinat-system and switch to mode 'world' if necessary. -This performs a screen.reset. If mode 'world' is already active, -all drawings are redrawn according to the new coordinates. + Set up user coodinat-system and switch to mode 'world' if necessary. + This performs a screen.reset. If mode 'world' is already active, + all drawings are redrawn according to the new coordinates. -But ATTENTION: in user-defined coordinatesystems angles may appear -distorted. (see Screen.mode()) + But ATTENTION: in user-defined coordinatesystems angles may appear + distorted. (see Screen.mode()) + + Example (for a TurtleScreen instance named screen): + >>> screen.setworldcoordinates(-10,-0.5,50,1.5) + >>> for _ in range(36): + ... left(10) + ... forward(0.5) + """ -Example (for a TurtleScreen instance named screen): ->>> screen.setworldcoordinates(-10,-0.5,50,1.5) ->>> for _ in range(36): -... left(10) -... forward(0.5) -""" def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: """Adds a turtle shape to TurtleScreen's shapelist. -Arguments: -(1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! -(2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! -(3) name is an arbitrary string and shape is a tuple - of pairs of coordinates. Installs the corresponding - polygon shape -(4) name is an arbitrary string and shape is a - (compound) Shape object. Installs the corresponding - compound shape. -To use a shape, you have to issue the command shape(shapename). - -call: register_shape("turtle.gif") ---or: register_shape("tri", ((0,0), (10,10), (-10,10))) - -Example (for a TurtleScreen instance named screen): ->>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + Arguments: + (1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! + (2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! + (3) name is an arbitrary string and shape is a tuple + of pairs of coordinates. Installs the corresponding + polygon shape + (4) name is an arbitrary string and shape is a + (compound) Shape object. Installs the corresponding + compound shape. + To use a shape, you have to issue the command shape(shapename). + + call: register_shape("turtle.gif") + --or: register_shape("tri", ((0,0), (10,10), (-10,10))) + + Example (for a TurtleScreen instance named screen): + >>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + + """ -""" @overload def colormode(self, cmode: None = None) -> float: """Return the colormode or set it to 1.0 or 255. -Optional argument: -cmode -- one of the values 1.0 or 255 + Optional argument: + cmode -- one of the values 1.0 or 255 -r, g, b values of colortriples have to be in range 0..cmode. + r, g, b values of colortriples have to be in range 0..cmode. + + Example (for a TurtleScreen instance named screen): + >>> screen.colormode() + 1.0 + >>> screen.colormode(255) + >>> pencolor(240,160,80) + """ -Example (for a TurtleScreen instance named screen): ->>> screen.colormode() -1.0 ->>> screen.colormode(255) ->>> pencolor(240,160,80) -""" @overload def colormode(self, cmode: float) -> None: ... def reset(self) -> None: """Reset all Turtles on the Screen to their initial state. -No argument. + No argument. + + Example (for a TurtleScreen instance named screen): + >>> screen.reset() + """ -Example (for a TurtleScreen instance named screen): ->>> screen.reset() -""" def turtles(self) -> list[Turtle]: """Return the list of turtles on the screen. -Example (for a TurtleScreen instance named screen): ->>> screen.turtles() -[] -""" + Example (for a TurtleScreen instance named screen): + >>> screen.turtles() + [] + """ + @overload def bgcolor(self) -> _AnyColor: """Set or return backgroundcolor of the TurtleScreen. -Arguments (if given): a color string or three numbers -in the range 0..colormode or a 3-tuple of such numbers. + Arguments (if given): a color string or three numbers + in the range 0..colormode or a 3-tuple of such numbers. + + Example (for a TurtleScreen instance named screen): + >>> screen.bgcolor("orange") + >>> screen.bgcolor() + 'orange' + >>> screen.bgcolor(0.5,0,0.5) + >>> screen.bgcolor() + '#800080' + """ -Example (for a TurtleScreen instance named screen): ->>> screen.bgcolor("orange") ->>> screen.bgcolor() -'orange' ->>> screen.bgcolor(0.5,0,0.5) ->>> screen.bgcolor() -'#800080' -""" @overload def bgcolor(self, color: _Color) -> None: ... @overload @@ -566,36 +580,38 @@ Example (for a TurtleScreen instance named screen): def tracer(self, n: None = None) -> int: """Turns turtle animation on/off and set delay for update drawings. -Optional arguments: -n -- nonnegative integer -delay -- nonnegative integer - -If n is given, only each n-th regular screen update is really performed. -(Can be used to accelerate the drawing of complex graphics.) -Second arguments sets delay value (see RawTurtle.delay()) - -Example (for a TurtleScreen instance named screen): ->>> screen.tracer(8, 25) ->>> dist = 2 ->>> for i in range(200): -... fd(dist) -... rt(90) -... dist += 2 -""" + Optional arguments: + n -- nonnegative integer + delay -- nonnegative integer + + If n is given, only each n-th regular screen update is really performed. + (Can be used to accelerate the drawing of complex graphics.) + Second arguments sets delay value (see RawTurtle.delay()) + + Example (for a TurtleScreen instance named screen): + >>> screen.tracer(8, 25) + >>> dist = 2 + >>> for i in range(200): + ... fd(dist) + ... rt(90) + ... dist += 2 + """ + @overload def tracer(self, n: int, delay: int | None = None) -> None: ... @overload def delay(self, delay: None = None) -> int: """Return or set the drawing delay in milliseconds. -Optional argument: -delay -- positive integer + Optional argument: + delay -- positive integer + + Example (for a TurtleScreen instance named screen): + >>> screen.delay(15) + >>> screen.delay() + 15 + """ -Example (for a TurtleScreen instance named screen): ->>> screen.delay(15) ->>> screen.delay() -15 -""" @overload def delay(self, delay: int) -> None: ... if sys.version_info >= (3, 14): @@ -603,157 +619,166 @@ Example (for a TurtleScreen instance named screen): def no_animation(self) -> Generator[None]: """Temporarily turn off auto-updating the screen. -This is useful for drawing complex shapes where even the fastest setting -is too slow. Once this context manager is exited, the drawing will -be displayed. + This is useful for drawing complex shapes where even the fastest setting + is too slow. Once this context manager is exited, the drawing will + be displayed. -Example (for a TurtleScreen instance named screen -and a Turtle instance named turtle): ->>> with screen.no_animation(): -... turtle.circle(50) -""" + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): + >>> with screen.no_animation(): + ... turtle.circle(50) + """ def update(self) -> None: - """Perform a TurtleScreen update. - """ + """Perform a TurtleScreen update.""" + def window_width(self) -> int: """Return the width of the turtle window. -Example (for a TurtleScreen instance named screen): ->>> screen.window_width() -640 -""" + Example (for a TurtleScreen instance named screen): + >>> screen.window_width() + 640 + """ + def window_height(self) -> int: """Return the height of the turtle window. -Example (for a TurtleScreen instance named screen): ->>> screen.window_height() -480 -""" + Example (for a TurtleScreen instance named screen): + >>> screen.window_height() + 480 + """ + def getcanvas(self) -> Canvas: """Return the Canvas of this TurtleScreen. -No argument. + No argument. + + Example (for a Screen instance named screen): + >>> cv = screen.getcanvas() + >>> cv + + """ -Example (for a Screen instance named screen): ->>> cv = screen.getcanvas() ->>> cv - -""" def getshapes(self) -> list[str]: """Return a list of names of all currently available turtle shapes. -No argument. + No argument. + + Example (for a TurtleScreen instance named screen): + >>> screen.getshapes() + ['arrow', 'blank', 'circle', ... , 'turtle'] + """ -Example (for a TurtleScreen instance named screen): ->>> screen.getshapes() -['arrow', 'blank', 'circle', ... , 'turtle'] -""" def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: """Bind fun to mouse-click event on canvas. -Arguments: -fun -- a function with two arguments, the coordinates of the - clicked point on the canvas. -btn -- the number of the mouse-button, defaults to 1 + Arguments: + fun -- a function with two arguments, the coordinates of the + clicked point on the canvas. + btn -- the number of the mouse-button, defaults to 1 -Example (for a TurtleScreen instance named screen) + Example (for a TurtleScreen instance named screen) + + >>> screen.onclick(goto) + >>> # Subsequently clicking into the TurtleScreen will + >>> # make the turtle move to the clicked point. + >>> screen.onclick(None) + """ ->>> screen.onclick(goto) ->>> # Subsequently clicking into the TurtleScreen will ->>> # make the turtle move to the clicked point. ->>> screen.onclick(None) -""" def onkey(self, fun: Callable[[], object], key: str) -> None: """Bind fun to key-release event of key. -Arguments: -fun -- a function with no arguments -key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + Arguments: + fun -- a function with no arguments + key -- a string: key (e.g. "a") or key-symbol (e.g. "space") -In order to be able to register key-events, TurtleScreen -must have focus. (See method listen.) + In order to be able to register key-events, TurtleScreen + must have focus. (See method listen.) -Example (for a TurtleScreen instance named screen): + Example (for a TurtleScreen instance named screen): ->>> def f(): -... fd(50) -... lt(60) -... ->>> screen.onkey(f, "Up") ->>> screen.listen() + >>> def f(): + ... fd(50) + ... lt(60) + ... + >>> screen.onkey(f, "Up") + >>> screen.listen() -Subsequently the turtle can be moved by repeatedly pressing -the up-arrow key, consequently drawing a hexagon + Subsequently the turtle can be moved by repeatedly pressing + the up-arrow key, consequently drawing a hexagon + + """ -""" def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: """Set focus on TurtleScreen (in order to collect key-events) -No arguments. -Dummy arguments are provided in order -to be able to pass listen to the onclick method. + No arguments. + Dummy arguments are provided in order + to be able to pass listen to the onclick method. + + Example (for a TurtleScreen instance named screen): + >>> screen.listen() + """ -Example (for a TurtleScreen instance named screen): ->>> screen.listen() -""" def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: """Install a timer, which calls fun after t milliseconds. -Arguments: -fun -- a function with no arguments. -t -- a number >= 0 - -Example (for a TurtleScreen instance named screen): - ->>> running = True ->>> def f(): -... if running: -... fd(50) -... lt(60) -... screen.ontimer(f, 250) -... ->>> f() # makes the turtle marching around ->>> running = False -""" + Arguments: + fun -- a function with no arguments. + t -- a number >= 0 + + Example (for a TurtleScreen instance named screen): + + >>> running = True + >>> def f(): + ... if running: + ... fd(50) + ... lt(60) + ... screen.ontimer(f, 250) + ... + >>> f() # makes the turtle marching around + >>> running = False + """ + @overload def bgpic(self, picname: None = None) -> str: """Set background image or return name of current backgroundimage. -Optional argument: -picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". + Optional argument: + picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". -If picname is a filename, set the corresponding image as background. -If picname is "nopic", delete backgroundimage, if present. -If picname is None, return the filename of the current backgroundimage. + If picname is a filename, set the corresponding image as background. + If picname is "nopic", delete backgroundimage, if present. + If picname is None, return the filename of the current backgroundimage. + + Example (for a TurtleScreen instance named screen): + >>> screen.bgpic() + 'nopic' + >>> screen.bgpic("landscape.gif") + >>> screen.bgpic() + 'landscape.gif' + """ -Example (for a TurtleScreen instance named screen): ->>> screen.bgpic() -'nopic' ->>> screen.bgpic("landscape.gif") ->>> screen.bgpic() -'landscape.gif' -""" @overload def bgpic(self, picname: str) -> None: ... @overload def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: """Resize the canvas the turtles are drawing on. -Optional arguments: -canvwidth -- positive integer, new width of canvas in pixels -canvheight -- positive integer, new height of canvas in pixels -bg -- colorstring or color-tuple, new backgroundcolor -If no arguments are given, return current (canvaswidth, canvasheight) + Optional arguments: + canvwidth -- positive integer, new width of canvas in pixels + canvheight -- positive integer, new height of canvas in pixels + bg -- colorstring or color-tuple, new backgroundcolor + If no arguments are given, return current (canvaswidth, canvasheight) -Do not alter the drawing window. To observe hidden parts of -the canvas use the scrollbars. (Can make visible those parts -of a drawing, which were outside the canvas before!) + Do not alter the drawing window. To observe hidden parts of + the canvas use the scrollbars. (Can make visible those parts + of a drawing, which were outside the canvas before!) -Example (for a Turtle instance named turtle): ->>> turtle.screensize(2000,1500) ->>> # e.g. to search for an erroneously escaped turtle ;-) -""" + Example (for a Turtle instance named turtle): + >>> turtle.screensize(2000,1500) + >>> # e.g. to search for an erroneously escaped turtle ;-) + """ # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... @@ -761,51 +786,52 @@ Example (for a Turtle instance named turtle): def save(self, filename: StrPath, *, overwrite: bool = False) -> None: """Save the drawing as a PostScript file -Arguments: -filename -- a string, the path of the created file. - Must end with '.ps' or '.eps'. + Arguments: + filename -- a string, the path of the created file. + Must end with '.ps' or '.eps'. -Optional arguments: -overwrite -- boolean, if true, then existing files will be overwritten + Optional arguments: + overwrite -- boolean, if true, then existing files will be overwritten -Example (for a TurtleScreen instance named screen): ->>> screen.save('my_drawing.eps') -""" + Example (for a TurtleScreen instance named screen): + >>> screen.save('my_drawing.eps') + """ onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: """Bind fun to key-press event of key if key is given, -or to any key-press-event if no key is given. + or to any key-press-event if no key is given. -Arguments: -fun -- a function with no arguments -key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + Arguments: + fun -- a function with no arguments + key -- a string: key (e.g. "a") or key-symbol (e.g. "space") -In order to be able to register key-events, TurtleScreen -must have focus. (See method listen.) + In order to be able to register key-events, TurtleScreen + must have focus. (See method listen.) -Example (for a TurtleScreen instance named screen -and a Turtle instance named turtle): + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): ->>> def f(): -... fd(50) -... lt(60) -... ->>> screen.onkeypress(f, "Up") ->>> screen.listen() + >>> def f(): + ... fd(50) + ... lt(60) + ... + >>> screen.onkeypress(f, "Up") + >>> screen.listen() -Subsequently the turtle can be moved by repeatedly pressing -the up-arrow key, or by keeping pressed the up-arrow key. -consequently drawing a hexagon. -""" + Subsequently the turtle can be moved by repeatedly pressing + the up-arrow key, or by keeping pressed the up-arrow key. + consequently drawing a hexagon. + """ onkeyrelease = onkey class TNavigator: """Navigation part of the RawTurtle. -Implements methods for turtle movement. -""" + Implements methods for turtle movement. + """ + START_ORIENTATION: dict[str, Vec2D] DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int @@ -814,352 +840,369 @@ Implements methods for turtle movement. def reset(self) -> None: """reset turtle to its initial values -Will be overwritten by parent class -""" + Will be overwritten by parent class + """ + def degrees(self, fullcircle: float = 360.0) -> None: """Set angle measurement units to degrees. -Optional argument: -fullcircle - a number + Optional argument: + fullcircle - a number -Set angle measurement units, i. e. set number -of 'degrees' for a full circle. Default value is -360 degrees. + Set angle measurement units, i. e. set number + of 'degrees' for a full circle. Default value is + 360 degrees. -Example (for a Turtle instance named turtle): ->>> turtle.left(90) ->>> turtle.heading() -90 + Example (for a Turtle instance named turtle): + >>> turtle.left(90) + >>> turtle.heading() + 90 -Change angle measurement unit to grad (also known as gon, -grade, or gradian and equals 1/100-th of the right angle.) ->>> turtle.degrees(400.0) ->>> turtle.heading() -100 + Change angle measurement unit to grad (also known as gon, + grade, or gradian and equals 1/100-th of the right angle.) + >>> turtle.degrees(400.0) + >>> turtle.heading() + 100 + + """ -""" def radians(self) -> None: """Set the angle measurement units to radians. -No arguments. + No arguments. -Example (for a Turtle instance named turtle): ->>> turtle.heading() -90 ->>> turtle.radians() ->>> turtle.heading() -1.5707963267948966 -""" + Example (for a Turtle instance named turtle): + >>> turtle.heading() + 90 + >>> turtle.radians() + >>> turtle.heading() + 1.5707963267948966 + """ if sys.version_info >= (3, 12): def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: """To be overwritten by child class RawTurtle. -Includes no TPen references. -""" + Includes no TPen references. + """ def forward(self, distance: float) -> None: """Move the turtle forward by the specified distance. -Aliases: forward | fd + Aliases: forward | fd -Argument: -distance -- a number (integer or float) + Argument: + distance -- a number (integer or float) -Move the turtle forward by the specified distance, in the direction -the turtle is headed. + Move the turtle forward by the specified distance, in the direction + the turtle is headed. + + Example (for a Turtle instance named turtle): + >>> turtle.position() + (0.00, 0.00) + >>> turtle.forward(25) + >>> turtle.position() + (25.00,0.00) + >>> turtle.forward(-75) + >>> turtle.position() + (-50.00,0.00) + """ -Example (for a Turtle instance named turtle): ->>> turtle.position() -(0.00, 0.00) ->>> turtle.forward(25) ->>> turtle.position() -(25.00,0.00) ->>> turtle.forward(-75) ->>> turtle.position() -(-50.00,0.00) -""" def back(self, distance: float) -> None: """Move the turtle backward by distance. -Aliases: back | backward | bk + Aliases: back | backward | bk -Argument: -distance -- a number + Argument: + distance -- a number -Move the turtle backward by distance, opposite to the direction the -turtle is headed. Do not change the turtle's heading. + Move the turtle backward by distance, opposite to the direction the + turtle is headed. Do not change the turtle's heading. + + Example (for a Turtle instance named turtle): + >>> turtle.position() + (0.00, 0.00) + >>> turtle.backward(30) + >>> turtle.position() + (-30.00, 0.00) + """ -Example (for a Turtle instance named turtle): ->>> turtle.position() -(0.00, 0.00) ->>> turtle.backward(30) ->>> turtle.position() -(-30.00, 0.00) -""" def right(self, angle: float) -> None: """Turn turtle right by angle units. -Aliases: right | rt + Aliases: right | rt -Argument: -angle -- a number (integer or float) + Argument: + angle -- a number (integer or float) -Turn turtle right by angle units. (Units are by default degrees, -but can be set via the degrees() and radians() functions.) -Angle orientation depends on mode. (See this.) + Turn turtle right by angle units. (Units are by default degrees, + but can be set via the degrees() and radians() functions.) + Angle orientation depends on mode. (See this.) + + Example (for a Turtle instance named turtle): + >>> turtle.heading() + 22.0 + >>> turtle.right(45) + >>> turtle.heading() + 337.0 + """ -Example (for a Turtle instance named turtle): ->>> turtle.heading() -22.0 ->>> turtle.right(45) ->>> turtle.heading() -337.0 -""" def left(self, angle: float) -> None: """Turn turtle left by angle units. -Aliases: left | lt + Aliases: left | lt -Argument: -angle -- a number (integer or float) + Argument: + angle -- a number (integer or float) -Turn turtle left by angle units. (Units are by default degrees, -but can be set via the degrees() and radians() functions.) -Angle orientation depends on mode. (See this.) + Turn turtle left by angle units. (Units are by default degrees, + but can be set via the degrees() and radians() functions.) + Angle orientation depends on mode. (See this.) + + Example (for a Turtle instance named turtle): + >>> turtle.heading() + 22.0 + >>> turtle.left(45) + >>> turtle.heading() + 67.0 + """ -Example (for a Turtle instance named turtle): ->>> turtle.heading() -22.0 ->>> turtle.left(45) ->>> turtle.heading() -67.0 -""" def pos(self) -> Vec2D: """Return the turtle's current location (x,y), as a Vec2D-vector. -Aliases: pos | position + Aliases: pos | position -No arguments. + No arguments. + + Example (for a Turtle instance named turtle): + >>> turtle.pos() + (0.00, 240.00) + """ -Example (for a Turtle instance named turtle): ->>> turtle.pos() -(0.00, 240.00) -""" def xcor(self) -> float: """Return the turtle's x coordinate. -No arguments. + No arguments. + + Example (for a Turtle instance named turtle): + >>> reset() + >>> turtle.left(60) + >>> turtle.forward(100) + >>> print(turtle.xcor()) + 50.0 + """ -Example (for a Turtle instance named turtle): ->>> reset() ->>> turtle.left(60) ->>> turtle.forward(100) ->>> print(turtle.xcor()) -50.0 -""" def ycor(self) -> float: """Return the turtle's y coordinate ---- -No arguments. - -Example (for a Turtle instance named turtle): ->>> reset() ->>> turtle.left(60) ->>> turtle.forward(100) ->>> print(turtle.ycor()) -86.6025403784 -""" + --- + No arguments. + + Example (for a Turtle instance named turtle): + >>> reset() + >>> turtle.left(60) + >>> turtle.forward(100) + >>> print(turtle.ycor()) + 86.6025403784 + """ + @overload def goto(self, x: tuple[float, float], y: None = None) -> None: """Move turtle to an absolute position. -Aliases: setpos | setposition | goto: - -Arguments: -x -- a number or a pair/vector of numbers -y -- a number None - -call: goto(x, y) # two coordinates ---or: goto((x, y)) # a pair (tuple) of coordinates ---or: goto(vec) # e.g. as returned by pos() - -Move turtle to an absolute position. If the pen is down, -a line will be drawn. The turtle's orientation does not change. - -Example (for a Turtle instance named turtle): ->>> tp = turtle.pos() ->>> tp -(0.00, 0.00) ->>> turtle.setpos(60,30) ->>> turtle.pos() -(60.00,30.00) ->>> turtle.setpos((20,80)) ->>> turtle.pos() -(20.00,80.00) ->>> turtle.setpos(tp) ->>> turtle.pos() -(0.00,0.00) -""" + Aliases: setpos | setposition | goto: + + Arguments: + x -- a number or a pair/vector of numbers + y -- a number None + + call: goto(x, y) # two coordinates + --or: goto((x, y)) # a pair (tuple) of coordinates + --or: goto(vec) # e.g. as returned by pos() + + Move turtle to an absolute position. If the pen is down, + a line will be drawn. The turtle's orientation does not change. + + Example (for a Turtle instance named turtle): + >>> tp = turtle.pos() + >>> tp + (0.00, 0.00) + >>> turtle.setpos(60,30) + >>> turtle.pos() + (60.00,30.00) + >>> turtle.setpos((20,80)) + >>> turtle.pos() + (20.00,80.00) + >>> turtle.setpos(tp) + >>> turtle.pos() + (0.00,0.00) + """ + @overload def goto(self, x: float, y: float) -> None: ... def home(self) -> None: """Move turtle to the origin - coordinates (0,0). -No arguments. + No arguments. -Move turtle to the origin - coordinates (0,0) and set its -heading to its start-orientation (which depends on mode). + Move turtle to the origin - coordinates (0,0) and set its + heading to its start-orientation (which depends on mode). + + Example (for a Turtle instance named turtle): + >>> turtle.home() + """ -Example (for a Turtle instance named turtle): ->>> turtle.home() -""" def setx(self, x: float) -> None: """Set the turtle's first coordinate to x -Argument: -x -- a number (integer or float) + Argument: + x -- a number (integer or float) -Set the turtle's first coordinate to x, leave second coordinate -unchanged. + Set the turtle's first coordinate to x, leave second coordinate + unchanged. + + Example (for a Turtle instance named turtle): + >>> turtle.position() + (0.00, 240.00) + >>> turtle.setx(10) + >>> turtle.position() + (10.00, 240.00) + """ -Example (for a Turtle instance named turtle): ->>> turtle.position() -(0.00, 240.00) ->>> turtle.setx(10) ->>> turtle.position() -(10.00, 240.00) -""" def sety(self, y: float) -> None: """Set the turtle's second coordinate to y -Argument: -y -- a number (integer or float) + Argument: + y -- a number (integer or float) -Set the turtle's first coordinate to x, second coordinate remains -unchanged. + Set the turtle's first coordinate to x, second coordinate remains + unchanged. + + Example (for a Turtle instance named turtle): + >>> turtle.position() + (0.00, 40.00) + >>> turtle.sety(-10) + >>> turtle.position() + (0.00, -10.00) + """ -Example (for a Turtle instance named turtle): ->>> turtle.position() -(0.00, 40.00) ->>> turtle.sety(-10) ->>> turtle.position() -(0.00, -10.00) -""" @overload def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: """Return the distance from the turtle to (x,y) in turtle step units. -Arguments: -x -- a number or a pair/vector of numbers or a turtle instance -y -- a number None None - -call: distance(x, y) # two coordinates ---or: distance((x, y)) # a pair (tuple) of coordinates ---or: distance(vec) # e.g. as returned by pos() ---or: distance(mypen) # where mypen is another turtle - -Example (for a Turtle instance named turtle): ->>> turtle.pos() -(0.00, 0.00) ->>> turtle.distance(30,40) -50.0 ->>> pen = Turtle() ->>> pen.forward(77) ->>> turtle.distance(pen) -77.0 -""" + Arguments: + x -- a number or a pair/vector of numbers or a turtle instance + y -- a number None None + + call: distance(x, y) # two coordinates + --or: distance((x, y)) # a pair (tuple) of coordinates + --or: distance(vec) # e.g. as returned by pos() + --or: distance(mypen) # where mypen is another turtle + + Example (for a Turtle instance named turtle): + >>> turtle.pos() + (0.00, 0.00) + >>> turtle.distance(30,40) + 50.0 + >>> pen = Turtle() + >>> pen.forward(77) + >>> turtle.distance(pen) + 77.0 + """ + @overload def distance(self, x: float, y: float) -> float: ... @overload def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: """Return the angle of the line from the turtle's position to (x, y). -Arguments: -x -- a number or a pair/vector of numbers or a turtle instance -y -- a number None None + Arguments: + x -- a number or a pair/vector of numbers or a turtle instance + y -- a number None None -call: distance(x, y) # two coordinates ---or: distance((x, y)) # a pair (tuple) of coordinates ---or: distance(vec) # e.g. as returned by pos() ---or: distance(mypen) # where mypen is another turtle + call: distance(x, y) # two coordinates + --or: distance((x, y)) # a pair (tuple) of coordinates + --or: distance(vec) # e.g. as returned by pos() + --or: distance(mypen) # where mypen is another turtle -Return the angle, between the line from turtle-position to position -specified by x, y and the turtle's start orientation. (Depends on -modes - "standard" or "logo") + Return the angle, between the line from turtle-position to position + specified by x, y and the turtle's start orientation. (Depends on + modes - "standard" or "logo") + + Example (for a Turtle instance named turtle): + >>> turtle.pos() + (10.00, 10.00) + >>> turtle.towards(0,0) + 225.0 + """ -Example (for a Turtle instance named turtle): ->>> turtle.pos() -(10.00, 10.00) ->>> turtle.towards(0,0) -225.0 -""" @overload def towards(self, x: float, y: float) -> float: ... def heading(self) -> float: """Return the turtle's current heading. -No arguments. + No arguments. + + Example (for a Turtle instance named turtle): + >>> turtle.left(67) + >>> turtle.heading() + 67.0 + """ -Example (for a Turtle instance named turtle): ->>> turtle.left(67) ->>> turtle.heading() -67.0 -""" def setheading(self, to_angle: float) -> None: """Set the orientation of the turtle to to_angle. -Aliases: setheading | seth + Aliases: setheading | seth -Argument: -to_angle -- a number (integer or float) + Argument: + to_angle -- a number (integer or float) -Set the orientation of the turtle to to_angle. -Here are some common directions in degrees: + Set the orientation of the turtle to to_angle. + Here are some common directions in degrees: - standard - mode: logo-mode: --------------------|-------------------- - 0 - east 0 - north - 90 - north 90 - east - 180 - west 180 - south - 270 - south 270 - west + standard - mode: logo-mode: + -------------------|-------------------- + 0 - east 0 - north + 90 - north 90 - east + 180 - west 180 - south + 270 - south 270 - west + + Example (for a Turtle instance named turtle): + >>> turtle.setheading(90) + >>> turtle.heading() + 90 + """ -Example (for a Turtle instance named turtle): ->>> turtle.setheading(90) ->>> turtle.heading() -90 -""" def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: """Draw a circle with given radius. -Arguments: -radius -- a number -extent (optional) -- a number -steps (optional) -- an integer - -Draw a circle with given radius. The center is radius units left -of the turtle; extent - an angle - determines which part of the -circle is drawn. If extent is not given, draw the entire circle. -If extent is not a full circle, one endpoint of the arc is the -current pen position. Draw the arc in counterclockwise direction -if radius is positive, otherwise in clockwise direction. Finally -the direction of the turtle is changed by the amount of extent. - -As the circle is approximated by an inscribed regular polygon, -steps determines the number of steps to use. If not given, -it will be calculated automatically. Maybe used to draw regular -polygons. - -call: circle(radius) # full circle ---or: circle(radius, extent) # arc ---or: circle(radius, extent, steps) ---or: circle(radius, steps=6) # 6-sided polygon - -Example (for a Turtle instance named turtle): ->>> turtle.circle(50) ->>> turtle.circle(120, 180) # semicircle -""" + Arguments: + radius -- a number + extent (optional) -- a number + steps (optional) -- an integer + + Draw a circle with given radius. The center is radius units left + of the turtle; extent - an angle - determines which part of the + circle is drawn. If extent is not given, draw the entire circle. + If extent is not a full circle, one endpoint of the arc is the + current pen position. Draw the arc in counterclockwise direction + if radius is positive, otherwise in clockwise direction. Finally + the direction of the turtle is changed by the amount of extent. + + As the circle is approximated by an inscribed regular polygon, + steps determines the number of steps to use. If not given, + it will be calculated automatically. Maybe used to draw regular + polygons. + + call: circle(radius) # full circle + --or: circle(radius, extent) # arc + --or: circle(radius, extent, steps) + --or: circle(radius, steps=6) # 6-sided polygon + + Example (for a Turtle instance named turtle): + >>> turtle.circle(50) + >>> turtle.circle(120, 180) # semicircle + """ + def speed(self, s: int | None = 0) -> int | None: - """dummy method - to be overwritten by child class -""" + """dummy method - to be overwritten by child class""" fd = forward bk = back backward = back @@ -1172,148 +1215,156 @@ Example (for a Turtle instance named turtle): class TPen: """Drawing part of the RawTurtle. -Implements drawing properties. -""" + Implements drawing properties. + """ + def __init__(self, resizemode: Literal["auto", "user", "noresize"] = "noresize") -> None: ... @overload def resizemode(self, rmode: None = None) -> str: """Set resizemode to one of the values: "auto", "user", "noresize". -(Optional) Argument: -rmode -- one of the strings "auto", "user", "noresize" + (Optional) Argument: + rmode -- one of the strings "auto", "user", "noresize" -Different resizemodes have the following effects: - - "auto" adapts the appearance of the turtle - corresponding to the value of pensize. - - "user" adapts the appearance of the turtle according to the - values of stretchfactor and outlinewidth (outline), - which are set by shapesize() - - "noresize" no adaption of the turtle's appearance takes place. -If no argument is given, return current resizemode. -resizemode("user") is called by a call of shapesize with arguments. + Different resizemodes have the following effects: + - "auto" adapts the appearance of the turtle + corresponding to the value of pensize. + - "user" adapts the appearance of the turtle according to the + values of stretchfactor and outlinewidth (outline), + which are set by shapesize() + - "noresize" no adaption of the turtle's appearance takes place. + If no argument is given, return current resizemode. + resizemode("user") is called by a call of shapesize with arguments. -Examples (for a Turtle instance named turtle): ->>> turtle.resizemode("noresize") ->>> turtle.resizemode() -'noresize' -""" + Examples (for a Turtle instance named turtle): + >>> turtle.resizemode("noresize") + >>> turtle.resizemode() + 'noresize' + """ + @overload def resizemode(self, rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload def pensize(self, width: None = None) -> int: """Set or return the line thickness. -Aliases: pensize | width + Aliases: pensize | width -Argument: -width -- positive number + Argument: + width -- positive number -Set the line thickness to width or return it. If resizemode is set -to "auto" and turtleshape is a polygon, that polygon is drawn with -the same line thickness. If no argument is given, current pensize -is returned. + Set the line thickness to width or return it. If resizemode is set + to "auto" and turtleshape is a polygon, that polygon is drawn with + the same line thickness. If no argument is given, current pensize + is returned. + + Example (for a Turtle instance named turtle): + >>> turtle.pensize() + 1 + >>> turtle.pensize(10) # from here on lines of width 10 are drawn + """ -Example (for a Turtle instance named turtle): ->>> turtle.pensize() -1 ->>> turtle.pensize(10) # from here on lines of width 10 are drawn -""" @overload def pensize(self, width: int) -> None: ... def penup(self) -> None: """Pull the pen up -- no drawing when moving. -Aliases: penup | pu | up + Aliases: penup | pu | up -No argument + No argument + + Example (for a Turtle instance named turtle): + >>> turtle.penup() + """ -Example (for a Turtle instance named turtle): ->>> turtle.penup() -""" def pendown(self) -> None: """Pull the pen down -- drawing when moving. -Aliases: pendown | pd | down + Aliases: pendown | pd | down -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> turtle.pendown() + """ -Example (for a Turtle instance named turtle): ->>> turtle.pendown() -""" def isdown(self) -> bool: """Return True if pen is down, False if it's up. -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> turtle.penup() + >>> turtle.isdown() + False + >>> turtle.pendown() + >>> turtle.isdown() + True + """ -Example (for a Turtle instance named turtle): ->>> turtle.penup() ->>> turtle.isdown() -False ->>> turtle.pendown() ->>> turtle.isdown() -True -""" @overload def speed(self, speed: None = None) -> int: """Return or set the turtle's speed. -Optional argument: -speed -- an integer in the range 0..10 or a speedstring (see below) - -Set the turtle's speed to an integer value in the range 0 .. 10. -If no argument is given: return current speed. - -If input is a number greater than 10 or smaller than 0.5, -speed is set to 0. -Speedstrings are mapped to speedvalues in the following way: - 'fastest' : 0 - 'fast' : 10 - 'normal' : 6 - 'slow' : 3 - 'slowest' : 1 -speeds from 1 to 10 enforce increasingly faster animation of -line drawing and turtle turning. - -Attention: -speed = 0 : *no* animation takes place. forward/back makes turtle jump -and likewise left/right make the turtle turn instantly. - -Example (for a Turtle instance named turtle): ->>> turtle.speed(3) -""" + Optional argument: + speed -- an integer in the range 0..10 or a speedstring (see below) + + Set the turtle's speed to an integer value in the range 0 .. 10. + If no argument is given: return current speed. + + If input is a number greater than 10 or smaller than 0.5, + speed is set to 0. + Speedstrings are mapped to speedvalues in the following way: + 'fastest' : 0 + 'fast' : 10 + 'normal' : 6 + 'slow' : 3 + 'slowest' : 1 + speeds from 1 to 10 enforce increasingly faster animation of + line drawing and turtle turning. + + Attention: + speed = 0 : *no* animation takes place. forward/back makes turtle jump + and likewise left/right make the turtle turn instantly. + + Example (for a Turtle instance named turtle): + >>> turtle.speed(3) + """ + @overload def speed(self, speed: _Speed) -> None: ... @overload def pencolor(self) -> _AnyColor: """Return or set the pencolor. -Arguments: -Four input formats are allowed: - - pencolor() - Return the current pencolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - pencolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - pencolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - pencolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - -If turtleshape is a polygon, the outline of that polygon is drawn -with the newly set pencolor. - -Example (for a Turtle instance named turtle): ->>> turtle.pencolor('brown') ->>> tup = (0.2, 0.8, 0.55) ->>> turtle.pencolor(tup) ->>> turtle.pencolor() -'#33cc8c' -""" + Arguments: + Four input formats are allowed: + - pencolor() + Return the current pencolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - pencolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - pencolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - pencolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + + If turtleshape is a polygon, the outline of that polygon is drawn + with the newly set pencolor. + + Example (for a Turtle instance named turtle): + >>> turtle.pencolor('brown') + >>> tup = (0.2, 0.8, 0.55) + >>> turtle.pencolor(tup) + >>> turtle.pencolor() + '#33cc8c' + """ + @overload def pencolor(self, color: _Color) -> None: ... @overload @@ -1322,31 +1373,32 @@ Example (for a Turtle instance named turtle): def fillcolor(self) -> _AnyColor: """Return or set the fillcolor. -Arguments: -Four input formats are allowed: - - fillcolor() - Return the current fillcolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - fillcolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - fillcolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - fillcolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - -If turtleshape is a polygon, the interior of that polygon is drawn -with the newly set fillcolor. - -Example (for a Turtle instance named turtle): ->>> turtle.fillcolor('violet') ->>> col = turtle.pencolor() ->>> turtle.fillcolor(col) ->>> turtle.fillcolor(0, .5, 0) -""" + Arguments: + Four input formats are allowed: + - fillcolor() + Return the current fillcolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - fillcolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - fillcolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - fillcolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + + If turtleshape is a polygon, the interior of that polygon is drawn + with the newly set fillcolor. + + Example (for a Turtle instance named turtle): + >>> turtle.fillcolor('violet') + >>> col = turtle.pencolor() + >>> turtle.fillcolor(col) + >>> turtle.fillcolor(0, .5, 0) + """ + @overload def fillcolor(self, color: _Color) -> None: ... @overload @@ -1355,35 +1407,36 @@ Example (for a Turtle instance named turtle): def color(self) -> tuple[_AnyColor, _AnyColor]: """Return or set the pencolor and fillcolor. -Arguments: -Several input formats are allowed. -They use 0, 1, 2, or 3 arguments as follows: - -color() - Return the current pencolor and the current fillcolor - as a pair of color specification strings as are returned - by pencolor and fillcolor. -color(colorstring), color((r,g,b)), color(r,g,b) - inputs as in pencolor, set both, fillcolor and pencolor, - to the given value. -color(colorstring1, colorstring2), -color((r1,g1,b1), (r2,g2,b2)) - equivalent to pencolor(colorstring1) and fillcolor(colorstring2) - and analogously, if the other input format is used. - -If turtleshape is a polygon, outline and interior of that polygon -is drawn with the newly set colors. -For more info see: pencolor, fillcolor - -Example (for a Turtle instance named turtle): ->>> turtle.color('red', 'green') ->>> turtle.color() -('red', 'green') ->>> colormode(255) ->>> color((40, 80, 120), (160, 200, 240)) ->>> color() -('#285078', '#a0c8f0') -""" + Arguments: + Several input formats are allowed. + They use 0, 1, 2, or 3 arguments as follows: + + color() + Return the current pencolor and the current fillcolor + as a pair of color specification strings as are returned + by pencolor and fillcolor. + color(colorstring), color((r,g,b)), color(r,g,b) + inputs as in pencolor, set both, fillcolor and pencolor, + to the given value. + color(colorstring1, colorstring2), + color((r1,g1,b1), (r2,g2,b2)) + equivalent to pencolor(colorstring1) and fillcolor(colorstring2) + and analogously, if the other input format is used. + + If turtleshape is a polygon, outline and interior of that polygon + is drawn with the newly set colors. + For more info see: pencolor, fillcolor + + Example (for a Turtle instance named turtle): + >>> turtle.color('red', 'green') + >>> turtle.color() + ('red', 'green') + >>> colormode(255) + >>> color((40, 80, 120), (160, 200, 240)) + >>> color() + ('#285078', '#a0c8f0') + """ + @overload def color(self, color: _Color) -> None: ... @overload @@ -1393,93 +1446,96 @@ Example (for a Turtle instance named turtle): if sys.version_info >= (3, 12): def teleport(self, x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: """To be overwritten by child class RawTurtle. -Includes no TNavigator references. -""" + Includes no TNavigator references. + """ def showturtle(self) -> None: """Makes the turtle visible. -Aliases: showturtle | st + Aliases: showturtle | st -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> turtle.hideturtle() + >>> turtle.showturtle() + """ -Example (for a Turtle instance named turtle): ->>> turtle.hideturtle() ->>> turtle.showturtle() -""" def hideturtle(self) -> None: """Makes the turtle invisible. -Aliases: hideturtle | ht + Aliases: hideturtle | ht -No argument. + No argument. -It's a good idea to do this while you're in the -middle of a complicated drawing, because hiding -the turtle speeds up the drawing observably. + It's a good idea to do this while you're in the + middle of a complicated drawing, because hiding + the turtle speeds up the drawing observably. + + Example (for a Turtle instance named turtle): + >>> turtle.hideturtle() + """ -Example (for a Turtle instance named turtle): ->>> turtle.hideturtle() -""" def isvisible(self) -> bool: """Return True if the Turtle is shown, False if it's hidden. -No argument. + No argument. -Example (for a Turtle instance named turtle): ->>> turtle.hideturtle() ->>> print(turtle.isvisible()) -False -""" + Example (for a Turtle instance named turtle): + >>> turtle.hideturtle() + >>> print(turtle.isvisible()) + False + """ # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload def pen(self) -> _PenState: """Return or set the pen's attributes. -Arguments: - pen -- a dictionary with some or all of the below listed keys. - **pendict -- one or more keyword-arguments with the below - listed keys as keywords. - -Return or set the pen's attributes in a 'pen-dictionary' -with the following key/value pairs: - "shown" : True/False - "pendown" : True/False - "pencolor" : color-string or color-tuple - "fillcolor" : color-string or color-tuple - "pensize" : positive number - "speed" : number in range 0..10 - "resizemode" : "auto" or "user" or "noresize" - "stretchfactor": (positive number, positive number) - "shearfactor": number - "outline" : positive number - "tilt" : number - -This dictionary can be used as argument for a subsequent -pen()-call to restore the former pen-state. Moreover one -or more of these attributes can be provided as keyword-arguments. -This can be used to set several pen attributes in one statement. - - -Examples (for a Turtle instance named turtle): ->>> turtle.pen(fillcolor="black", pencolor="red", pensize=10) ->>> turtle.pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} ->>> penstate=turtle.pen() ->>> turtle.color("yellow","") ->>> turtle.penup() ->>> turtle.pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} ->>> p.pen(penstate, fillcolor="green") ->>> p.pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} -""" + Arguments: + pen -- a dictionary with some or all of the below listed keys. + **pendict -- one or more keyword-arguments with the below + listed keys as keywords. + + Return or set the pen's attributes in a 'pen-dictionary' + with the following key/value pairs: + "shown" : True/False + "pendown" : True/False + "pencolor" : color-string or color-tuple + "fillcolor" : color-string or color-tuple + "pensize" : positive number + "speed" : number in range 0..10 + "resizemode" : "auto" or "user" or "noresize" + "stretchfactor": (positive number, positive number) + "shearfactor": number + "outline" : positive number + "tilt" : number + + This dictionary can be used as argument for a subsequent + pen()-call to restore the former pen-state. Moreover one + or more of these attributes can be provided as keyword-arguments. + This can be used to set several pen attributes in one statement. + + + Examples (for a Turtle instance named turtle): + >>> turtle.pen(fillcolor="black", pencolor="red", pensize=10) + >>> turtle.pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + >>> penstate=turtle.pen() + >>> turtle.color("yellow","") + >>> turtle.penup() + >>> turtle.pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + >>> p.pen(penstate, fillcolor="green") + >>> p.pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + """ + @overload def pen( self, @@ -1506,9 +1562,10 @@ Examples (for a Turtle instance named turtle): class RawTurtle(TPen, TNavigator): # type: ignore[misc] # Conflicting methods in base classes """Animation part of the RawTurtle. -Puts RawTurtle upon a TurtleScreen and provides tools for -its animation. -""" + Puts RawTurtle upon a TurtleScreen and provides tools for + its animation. + """ + screen: TurtleScreen screens: ClassVar[list[TurtleScreen]] def __init__( @@ -1521,90 +1578,96 @@ its animation. def reset(self) -> None: """Delete the turtle's drawings and restore its default values. -No argument. - -Delete the turtle's drawings from the screen, re-center the turtle -and set variables to the default values. - -Example (for a Turtle instance named turtle): ->>> turtle.position() -(0.00,-22.00) ->>> turtle.heading() -100.0 ->>> turtle.reset() ->>> turtle.position() -(0.00,0.00) ->>> turtle.heading() -0.0 -""" + No argument. + + Delete the turtle's drawings from the screen, re-center the turtle + and set variables to the default values. + + Example (for a Turtle instance named turtle): + >>> turtle.position() + (0.00,-22.00) + >>> turtle.heading() + 100.0 + >>> turtle.reset() + >>> turtle.position() + (0.00,0.00) + >>> turtle.heading() + 0.0 + """ + def setundobuffer(self, size: int | None) -> None: """Set or disable undobuffer. -Argument: -size -- an integer or None + Argument: + size -- an integer or None -If size is an integer an empty undobuffer of given size is installed. -Size gives the maximum number of turtle-actions that can be undone -by the undo() function. -If size is None, no undobuffer is present. + If size is an integer an empty undobuffer of given size is installed. + Size gives the maximum number of turtle-actions that can be undone + by the undo() function. + If size is None, no undobuffer is present. + + Example (for a Turtle instance named turtle): + >>> turtle.setundobuffer(42) + """ -Example (for a Turtle instance named turtle): ->>> turtle.setundobuffer(42) -""" def undobufferentries(self) -> int: """Return count of entries in the undobuffer. -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> while undobufferentries(): + ... undo() + """ -Example (for a Turtle instance named turtle): ->>> while undobufferentries(): -... undo() -""" def clear(self) -> None: """Delete the turtle's drawings from the screen. Do not move turtle. -No arguments. + No arguments. -Delete the turtle's drawings from the screen. Do not move turtle. -State and position of the turtle as well as drawings of other -turtles are not affected. + Delete the turtle's drawings from the screen. Do not move turtle. + State and position of the turtle as well as drawings of other + turtles are not affected. + + Examples (for a Turtle instance named turtle): + >>> turtle.clear() + """ -Examples (for a Turtle instance named turtle): ->>> turtle.clear() -""" def clone(self) -> Self: """Create and return a clone of the turtle. -No argument. + No argument. -Create and return a clone of the turtle with same position, heading -and turtle properties. + Create and return a clone of the turtle with same position, heading + and turtle properties. + + Example (for a Turtle instance named mick): + mick = Turtle() + joe = mick.clone() + """ -Example (for a Turtle instance named mick): -mick = Turtle() -joe = mick.clone() -""" @overload def shape(self, name: None = None) -> str: """Set turtle shape to shape with given name / return current shapename. -Optional argument: -name -- a string, which is a valid shapename - -Set turtle shape to shape with given name or, if name is not given, -return name of current shape. -Shape with name must exist in the TurtleScreen's shape dictionary. -Initially there are the following polygon shapes: -'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. -To learn about how to deal with shapes see Screen-method register_shape. - -Example (for a Turtle instance named turtle): ->>> turtle.shape() -'arrow' ->>> turtle.shape("turtle") ->>> turtle.shape() -'turtle' -""" + Optional argument: + name -- a string, which is a valid shapename + + Set turtle shape to shape with given name or, if name is not given, + return name of current shape. + Shape with name must exist in the TurtleScreen's shape dictionary. + Initially there are the following polygon shapes: + 'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. + To learn about how to deal with shapes see Screen-method register_shape. + + Example (for a Turtle instance named turtle): + >>> turtle.shape() + 'arrow' + >>> turtle.shape("turtle") + >>> turtle.shape() + 'turtle' + """ + @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @@ -1612,24 +1675,25 @@ Example (for a Turtle instance named turtle): def shapesize(self) -> tuple[float, float, float]: """Set/return turtle's stretchfactors/outline. Set resizemode to "user". -Optional arguments: - stretch_wid : positive number - stretch_len : positive number - outline : positive number - -Return or set the pen's attributes x/y-stretchfactors and/or outline. -Set resizemode to "user". -If and only if resizemode is set to "user", the turtle will be displayed -stretched according to its stretchfactors: -stretch_wid is stretchfactor perpendicular to orientation -stretch_len is stretchfactor in direction of turtles orientation. -outline determines the width of the shapes's outline. - -Examples (for a Turtle instance named turtle): ->>> turtle.resizemode("user") ->>> turtle.shapesize(5, 5, 12) ->>> turtle.shapesize(outline=8) -""" + Optional arguments: + stretch_wid : positive number + stretch_len : positive number + outline : positive number + + Return or set the pen's attributes x/y-stretchfactors and/or outline. + Set resizemode to "user". + If and only if resizemode is set to "user", the turtle will be displayed + stretched according to its stretchfactors: + stretch_wid is stretchfactor perpendicular to orientation + stretch_len is stretchfactor in direction of turtles orientation. + outline determines the width of the shapes's outline. + + Examples (for a Turtle instance named turtle): + >>> turtle.resizemode("user") + >>> turtle.shapesize(5, 5, 12) + >>> turtle.shapesize(outline=8) + """ + @overload def shapesize( self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None @@ -1638,22 +1702,23 @@ Examples (for a Turtle instance named turtle): def shearfactor(self, shear: None = None) -> float: """Set or return the current shearfactor. -Optional argument: shear -- number, tangent of the shear angle - -Shear the turtleshape according to the given shearfactor shear, -which is the tangent of the shear angle. DO NOT change the -turtle's heading (direction of movement). -If shear is not given: return the current shearfactor, i. e. the -tangent of the shear angle, by which lines parallel to the -heading of the turtle are sheared. - -Examples (for a Turtle instance named turtle): ->>> turtle.shape("circle") ->>> turtle.shapesize(5,2) ->>> turtle.shearfactor(0.5) ->>> turtle.shearfactor() ->>> 0.5 -""" + Optional argument: shear -- number, tangent of the shear angle + + Shear the turtleshape according to the given shearfactor shear, + which is the tangent of the shear angle. DO NOT change the + turtle's heading (direction of movement). + If shear is not given: return the current shearfactor, i. e. the + tangent of the shear angle, by which lines parallel to the + heading of the turtle are sheared. + + Examples (for a Turtle instance named turtle): + >>> turtle.shape("circle") + >>> turtle.shapesize(5,2) + >>> turtle.shearfactor(0.5) + >>> turtle.shearfactor() + >>> 0.5 + """ + @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @@ -1661,23 +1726,24 @@ Examples (for a Turtle instance named turtle): def shapetransform(self) -> tuple[float, float, float, float]: """Set or return the current transformation matrix of the turtle shape. -Optional arguments: t11, t12, t21, t22 -- numbers. - -If none of the matrix elements are given, return the transformation -matrix. -Otherwise set the given elements and transform the turtleshape -according to the matrix consisting of first row t11, t12 and -second row t21, 22. -Modify stretchfactor, shearfactor and tiltangle according to the -given matrix. - -Examples (for a Turtle instance named turtle): ->>> turtle.shape("square") ->>> turtle.shapesize(4,2) ->>> turtle.shearfactor(-0.5) ->>> turtle.shapetransform() -(4.0, -1.0, -0.0, 2.0) -""" + Optional arguments: t11, t12, t21, t22 -- numbers. + + If none of the matrix elements are given, return the transformation + matrix. + Otherwise set the given elements and transform the turtleshape + according to the matrix consisting of first row t11, t12 and + second row t21, 22. + Modify stretchfactor, shearfactor and tiltangle according to the + given matrix. + + Examples (for a Turtle instance named turtle): + >>> turtle.shape("square") + >>> turtle.shapesize(4,2) + >>> turtle.shearfactor(-0.5) + >>> turtle.shapetransform() + (4.0, -1.0, -0.0, 2.0) + """ + @overload def shapetransform( self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None @@ -1685,199 +1751,205 @@ Examples (for a Turtle instance named turtle): def get_shapepoly(self) -> _PolygonCoords | None: """Return the current shape polygon as tuple of coordinate pairs. -No argument. - -Examples (for a Turtle instance named turtle): ->>> turtle.shape("square") ->>> turtle.shapetransform(4, -1, 0, 2) ->>> turtle.get_shapepoly() -((50, -20), (30, 20), (-50, 20), (-30, -20)) + No argument. -""" + Examples (for a Turtle instance named turtle): + >>> turtle.shape("square") + >>> turtle.shapetransform(4, -1, 0, 2) + >>> turtle.get_shapepoly() + ((50, -20), (30, 20), (-50, 20), (-30, -20)) + """ if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") def settiltangle(self, angle: float) -> None: """Rotate the turtleshape to point in the specified direction - Argument: angle -- number + Argument: angle -- number + + Rotate the turtleshape to point in the direction specified by angle, + regardless of its current tilt-angle. DO NOT change the turtle's + heading (direction of movement). + + Deprecated since Python 3.1 + + Examples (for a Turtle instance named turtle): + >>> turtle.shape("circle") + >>> turtle.shapesize(5,2) + >>> turtle.settiltangle(45) + >>> turtle.stamp() + >>> turtle.fd(50) + >>> turtle.settiltangle(-45) + >>> turtle.stamp() + >>> turtle.fd(50) + """ + + @overload + def tiltangle(self, angle: None = None) -> float: + """Set or return the current tilt-angle. + + Optional argument: angle -- number Rotate the turtleshape to point in the direction specified by angle, regardless of its current tilt-angle. DO NOT change the turtle's heading (direction of movement). - - Deprecated since Python 3.1 + If angle is not given: return the current tilt-angle, i. e. the angle + between the orientation of the turtleshape and the heading of the + turtle (its direction of movement). Examples (for a Turtle instance named turtle): >>> turtle.shape("circle") - >>> turtle.shapesize(5,2) - >>> turtle.settiltangle(45) + >>> turtle.shapesize(5, 2) + >>> turtle.tiltangle() + 0.0 + >>> turtle.tiltangle(45) + >>> turtle.tiltangle() + 45.0 >>> turtle.stamp() >>> turtle.fd(50) - >>> turtle.settiltangle(-45) + >>> turtle.tiltangle(-45) + >>> turtle.tiltangle() + 315.0 >>> turtle.stamp() >>> turtle.fd(50) """ - @overload - def tiltangle(self, angle: None = None) -> float: - """Set or return the current tilt-angle. - -Optional argument: angle -- number - -Rotate the turtleshape to point in the direction specified by angle, -regardless of its current tilt-angle. DO NOT change the turtle's -heading (direction of movement). -If angle is not given: return the current tilt-angle, i. e. the angle -between the orientation of the turtleshape and the heading of the -turtle (its direction of movement). - -Examples (for a Turtle instance named turtle): ->>> turtle.shape("circle") ->>> turtle.shapesize(5, 2) ->>> turtle.tiltangle() -0.0 ->>> turtle.tiltangle(45) ->>> turtle.tiltangle() -45.0 ->>> turtle.stamp() ->>> turtle.fd(50) ->>> turtle.tiltangle(-45) ->>> turtle.tiltangle() -315.0 ->>> turtle.stamp() ->>> turtle.fd(50) -""" @overload def tiltangle(self, angle: float) -> None: ... def tilt(self, angle: float) -> None: """Rotate the turtleshape by angle. -Argument: -angle - a number + Argument: + angle - a number -Rotate the turtleshape by angle from its current tilt-angle, -but do NOT change the turtle's heading (direction of movement). + Rotate the turtleshape by angle from its current tilt-angle, + but do NOT change the turtle's heading (direction of movement). -Examples (for a Turtle instance named turtle): ->>> turtle.shape("circle") ->>> turtle.shapesize(5,2) ->>> turtle.tilt(30) ->>> turtle.fd(50) ->>> turtle.tilt(30) ->>> turtle.fd(50) -""" + Examples (for a Turtle instance named turtle): + >>> turtle.shape("circle") + >>> turtle.shapesize(5,2) + >>> turtle.tilt(30) + >>> turtle.fd(50) + >>> turtle.tilt(30) + >>> turtle.fd(50) + """ # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. def stamp(self) -> Any: """Stamp a copy of the turtleshape onto the canvas and return its id. -No argument. + No argument. -Stamp a copy of the turtle shape onto the canvas at the current -turtle position. Return a stamp_id for that stamp, which can be -used to delete it by calling clearstamp(stamp_id). + Stamp a copy of the turtle shape onto the canvas at the current + turtle position. Return a stamp_id for that stamp, which can be + used to delete it by calling clearstamp(stamp_id). + + Example (for a Turtle instance named turtle): + >>> turtle.color("blue") + >>> turtle.stamp() + 13 + >>> turtle.fd(50) + """ -Example (for a Turtle instance named turtle): ->>> turtle.color("blue") ->>> turtle.stamp() -13 ->>> turtle.fd(50) -""" def clearstamp(self, stampid: int | tuple[int, ...]) -> None: """Delete stamp with given stampid -Argument: -stampid - an integer, must be return value of previous stamp() call. + Argument: + stampid - an integer, must be return value of previous stamp() call. + + Example (for a Turtle instance named turtle): + >>> turtle.color("blue") + >>> astamp = turtle.stamp() + >>> turtle.fd(50) + >>> turtle.clearstamp(astamp) + """ -Example (for a Turtle instance named turtle): ->>> turtle.color("blue") ->>> astamp = turtle.stamp() ->>> turtle.fd(50) ->>> turtle.clearstamp(astamp) -""" def clearstamps(self, n: int | None = None) -> None: """Delete all or first/last n of turtle's stamps. -Optional argument: -n -- an integer + Optional argument: + n -- an integer -If n is None, delete all of pen's stamps, -else if n > 0 delete first n stamps -else if n < 0 delete last n stamps. + If n is None, delete all of pen's stamps, + else if n > 0 delete first n stamps + else if n < 0 delete last n stamps. + + Example (for a Turtle instance named turtle): + >>> for i in range(8): + ... turtle.stamp(); turtle.fd(30) + ... + >>> turtle.clearstamps(2) + >>> turtle.clearstamps(-2) + >>> turtle.clearstamps() + """ -Example (for a Turtle instance named turtle): ->>> for i in range(8): -... turtle.stamp(); turtle.fd(30) -... ->>> turtle.clearstamps(2) ->>> turtle.clearstamps(-2) ->>> turtle.clearstamps() -""" def filling(self) -> bool: """Return fillstate (True if filling, False else). -No argument. + No argument. -Example (for a Turtle instance named turtle): ->>> turtle.begin_fill() ->>> if turtle.filling(): -... turtle.pensize(5) -... else: -... turtle.pensize(3) -""" + Example (for a Turtle instance named turtle): + >>> turtle.begin_fill() + >>> if turtle.filling(): + ... turtle.pensize(5) + ... else: + ... turtle.pensize(3) + """ if sys.version_info >= (3, 14): @contextmanager def fill(self) -> Generator[None]: """A context manager for filling a shape. -Implicitly ensures the code block is wrapped with -begin_fill() and end_fill(). + Implicitly ensures the code block is wrapped with + begin_fill() and end_fill(). -Example (for a Turtle instance named turtle): ->>> turtle.color("black", "red") ->>> with turtle.fill(): -... turtle.circle(60) -""" + Example (for a Turtle instance named turtle): + >>> turtle.color("black", "red") + >>> with turtle.fill(): + ... turtle.circle(60) + """ def begin_fill(self) -> None: """Called just before drawing a shape to be filled. -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> turtle.color("black", "red") + >>> turtle.begin_fill() + >>> turtle.circle(60) + >>> turtle.end_fill() + """ -Example (for a Turtle instance named turtle): ->>> turtle.color("black", "red") ->>> turtle.begin_fill() ->>> turtle.circle(60) ->>> turtle.end_fill() -""" def end_fill(self) -> None: """Fill the shape drawn after the call begin_fill(). -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> turtle.color("black", "red") + >>> turtle.begin_fill() + >>> turtle.circle(60) + >>> turtle.end_fill() + """ -Example (for a Turtle instance named turtle): ->>> turtle.color("black", "red") ->>> turtle.begin_fill() ->>> turtle.circle(60) ->>> turtle.end_fill() -""" @overload def dot(self, size: int | _Color | None = None) -> None: """Draw a dot with diameter size, using color. -Optional arguments: -size -- an integer >= 1 (if given) -color -- a colorstring or a numeric color tuple + Optional arguments: + size -- an integer >= 1 (if given) + color -- a colorstring or a numeric color tuple -Draw a circular dot with diameter size, using color. -If size is not given, the maximum of pensize+4 and 2*pensize is used. + Draw a circular dot with diameter size, using color. + If size is not given, the maximum of pensize+4 and 2*pensize is used. + + Example (for a Turtle instance named turtle): + >>> turtle.dot() + >>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50) + """ -Example (for a Turtle instance named turtle): ->>> turtle.dot() ->>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50) -""" @overload def dot(self, size: int | None, color: _Color, /) -> None: ... @overload @@ -1887,175 +1959,182 @@ Example (for a Turtle instance named turtle): ) -> None: """Write text at the current turtle position. -Arguments: -arg -- info, which is to be written to the TurtleScreen -move (optional) -- True/False -align (optional) -- one of the strings "left", "center" or right" -font (optional) -- a triple (fontname, fontsize, fonttype) - -Write text - the string representation of arg - at the current -turtle position according to align ("left", "center" or right") -and with the given font. -If move is True, the pen is moved to the bottom-right corner -of the text. By default, move is False. - -Example (for a Turtle instance named turtle): ->>> turtle.write('Home = ', True, align="center") ->>> turtle.write((0,0), True) -""" + Arguments: + arg -- info, which is to be written to the TurtleScreen + move (optional) -- True/False + align (optional) -- one of the strings "left", "center" or right" + font (optional) -- a triple (fontname, fontsize, fonttype) + + Write text - the string representation of arg - at the current + turtle position according to align ("left", "center" or right") + and with the given font. + If move is True, the pen is moved to the bottom-right corner + of the text. By default, move is False. + + Example (for a Turtle instance named turtle): + >>> turtle.write('Home = ', True, align="center") + >>> turtle.write((0,0), True) + """ if sys.version_info >= (3, 14): @contextmanager def poly(self) -> Generator[None]: """A context manager for recording the vertices of a polygon. -Implicitly ensures that the code block is wrapped with -begin_poly() and end_poly() + Implicitly ensures that the code block is wrapped with + begin_poly() and end_poly() -Example (for a Turtle instance named turtle) where we create a -triangle as the polygon and move the turtle 100 steps forward: ->>> with turtle.poly(): -... for side in range(3) -... turtle.forward(50) -... turtle.right(60) ->>> turtle.forward(100) -""" + Example (for a Turtle instance named turtle) where we create a + triangle as the polygon and move the turtle 100 steps forward: + >>> with turtle.poly(): + ... for side in range(3) + ... turtle.forward(50) + ... turtle.right(60) + >>> turtle.forward(100) + """ def begin_poly(self) -> None: """Start recording the vertices of a polygon. -No argument. + No argument. -Start recording the vertices of a polygon. Current turtle position -is first point of polygon. + Start recording the vertices of a polygon. Current turtle position + is first point of polygon. + + Example (for a Turtle instance named turtle): + >>> turtle.begin_poly() + """ -Example (for a Turtle instance named turtle): ->>> turtle.begin_poly() -""" def end_poly(self) -> None: """Stop recording the vertices of a polygon. -No argument. + No argument. -Stop recording the vertices of a polygon. Current turtle position is -last point of polygon. This will be connected with the first point. + Stop recording the vertices of a polygon. Current turtle position is + last point of polygon. This will be connected with the first point. + + Example (for a Turtle instance named turtle): + >>> turtle.end_poly() + """ -Example (for a Turtle instance named turtle): ->>> turtle.end_poly() -""" def get_poly(self) -> _PolygonCoords | None: """Return the lastly recorded polygon. -No argument. + No argument. + + Example (for a Turtle instance named turtle): + >>> p = turtle.get_poly() + >>> turtle.register_shape("myFavouriteShape", p) + """ -Example (for a Turtle instance named turtle): ->>> p = turtle.get_poly() ->>> turtle.register_shape("myFavouriteShape", p) -""" def getscreen(self) -> TurtleScreen: """Return the TurtleScreen object, the turtle is drawing on. -No argument. + No argument. -Return the TurtleScreen object, the turtle is drawing on. -So TurtleScreen-methods can be called for that object. + Return the TurtleScreen object, the turtle is drawing on. + So TurtleScreen-methods can be called for that object. + + Example (for a Turtle instance named turtle): + >>> ts = turtle.getscreen() + >>> ts + + >>> ts.bgcolor("pink") + """ -Example (for a Turtle instance named turtle): ->>> ts = turtle.getscreen() ->>> ts - ->>> ts.bgcolor("pink") -""" def getturtle(self) -> Self: """Return the Turtleobject itself. -No argument. + No argument. -Only reasonable use: as a function to return the 'anonymous turtle': + Only reasonable use: as a function to return the 'anonymous turtle': -Example: ->>> pet = getturtle() ->>> pet.fd(50) ->>> pet - ->>> turtles() -[] -""" + Example: + >>> pet = getturtle() + >>> pet.fd(50) + >>> pet + + >>> turtles() + [] + """ getpen = getturtle def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-click event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). -add -- True or False. If True, new binding will be added, otherwise - it will replace a former binding. + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). + add -- True or False. If True, new binding will be added, otherwise + it will replace a former binding. -Example for the anonymous turtle, i. e. the procedural way: + Example for the anonymous turtle, i. e. the procedural way: + + >>> def turn(x, y): + ... left(360) + ... + >>> onclick(turn) # Now clicking into the turtle will turn it. + >>> onclick(None) # event-binding will be removed + """ ->>> def turn(x, y): -... left(360) -... ->>> onclick(turn) # Now clicking into the turtle will turn it. ->>> onclick(None) # event-binding will be removed -""" def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-button-release event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). - -Example (for a MyTurtle instance named joe): ->>> class MyTurtle(Turtle): -... def glow(self,x,y): -... self.fillcolor("red") -... def unglow(self,x,y): -... self.fillcolor("") -... ->>> joe = MyTurtle() ->>> joe.onclick(joe.glow) ->>> joe.onrelease(joe.unglow) - -Clicking on joe turns fillcolor red, unclicking turns it to -transparent. -""" + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). + + Example (for a MyTurtle instance named joe): + >>> class MyTurtle(Turtle): + ... def glow(self,x,y): + ... self.fillcolor("red") + ... def unglow(self,x,y): + ... self.fillcolor("") + ... + >>> joe = MyTurtle() + >>> joe.onclick(joe.glow) + >>> joe.onrelease(joe.unglow) + + Clicking on joe turns fillcolor red, unclicking turns it to + transparent. + """ + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-move event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). -Every sequence of mouse-move-events on a turtle is preceded by a -mouse-click event on that turtle. + Every sequence of mouse-move-events on a turtle is preceded by a + mouse-click event on that turtle. -Example (for a Turtle instance named turtle): ->>> turtle.ondrag(turtle.goto) + Example (for a Turtle instance named turtle): + >>> turtle.ondrag(turtle.goto) + + Subsequently clicking and dragging a Turtle will move it + across the screen thereby producing handdrawings (if pen is + down). + """ -Subsequently clicking and dragging a Turtle will move it -across the screen thereby producing handdrawings (if pen is -down). -""" def undo(self) -> None: """undo (repeatedly) the last turtle action. -No argument. + No argument. -undo (repeatedly) the last turtle action. -Number of available undo actions is determined by the size of -the undobuffer. + undo (repeatedly) the last turtle action. + Number of available undo actions is determined by the size of + the undobuffer. -Example (for a Turtle instance named turtle): ->>> for i in range(4): -... turtle.fd(50); turtle.lt(80) -... ->>> for i in range(8): -... turtle.undo() -... -""" + Example (for a Turtle instance named turtle): + >>> for i in range(4): + ... turtle.fd(50); turtle.lt(80) + ... + >>> for i in range(8): + ... turtle.undo() + ... + """ turtlesize = shapesize class _Screen(TurtleScreen): @@ -2070,72 +2149,76 @@ class _Screen(TurtleScreen): ) -> None: """Set the size and position of the main window. -Arguments: -width: as integer a size in pixels, as float a fraction of the screen. - Default is 50% of screen. -height: as integer the height in pixels, as float a fraction of the - screen. Default is 75% of screen. -startx: if positive, starting position in pixels from the left - edge of the screen, if negative from the right edge - Default, startx=None is to center window horizontally. -starty: if positive, starting position in pixels from the top - edge of the screen, if negative from the bottom edge - Default, starty=None is to center window vertically. + Arguments: + width: as integer a size in pixels, as float a fraction of the screen. + Default is 50% of screen. + height: as integer the height in pixels, as float a fraction of the + screen. Default is 75% of screen. + startx: if positive, starting position in pixels from the left + edge of the screen, if negative from the right edge + Default, startx=None is to center window horizontally. + starty: if positive, starting position in pixels from the top + edge of the screen, if negative from the bottom edge + Default, starty=None is to center window vertically. -Examples (for a Screen instance named screen): ->>> screen.setup (width=200, height=200, startx=0, starty=0) + Examples (for a Screen instance named screen): + >>> screen.setup (width=200, height=200, startx=0, starty=0) -sets window to 200x200 pixels, in upper left of screen + sets window to 200x200 pixels, in upper left of screen ->>> screen.setup(width=.75, height=0.5, startx=None, starty=None) + >>> screen.setup(width=.75, height=0.5, startx=None, starty=None) + + sets window to 75% of screen by 50% of screen and centers + """ -sets window to 75% of screen by 50% of screen and centers -""" def title(self, titlestring: str) -> None: """Set title of turtle-window -Argument: -titlestring -- a string, to appear in the titlebar of the - turtle graphics window. + Argument: + titlestring -- a string, to appear in the titlebar of the + turtle graphics window. -This is a method of Screen-class. Not available for TurtleScreen- -objects. + This is a method of Screen-class. Not available for TurtleScreen- + objects. + + Example (for a Screen instance named screen): + >>> screen.title("Welcome to the turtle-zoo!") + """ -Example (for a Screen instance named screen): ->>> screen.title("Welcome to the turtle-zoo!") -""" def bye(self) -> None: """Shut the turtlegraphics window. -Example (for a TurtleScreen instance named screen): ->>> screen.bye() -""" + Example (for a TurtleScreen instance named screen): + >>> screen.bye() + """ + def exitonclick(self) -> None: """Go into mainloop until the mouse is clicked. -No arguments. + No arguments. -Bind bye() method to mouseclick on TurtleScreen. -If "using_IDLE" - value in configuration dictionary is False -(default value), enter mainloop. -If IDLE with -n switch (no subprocess) is used, this value should be -set to True in turtle.cfg. In this case IDLE's mainloop -is active also for the client script. + Bind bye() method to mouseclick on TurtleScreen. + If "using_IDLE" - value in configuration dictionary is False + (default value), enter mainloop. + If IDLE with -n switch (no subprocess) is used, this value should be + set to True in turtle.cfg. In this case IDLE's mainloop + is active also for the client script. -This is a method of the Screen-class and not available for -TurtleScreen instances. + This is a method of the Screen-class and not available for + TurtleScreen instances. -Example (for a Screen instance named screen): ->>> screen.exitonclick() + Example (for a Screen instance named screen): + >>> screen.exitonclick() -""" + """ class Turtle(RawTurtle): """RawTurtle auto-creating (scrolled) canvas. -When a Turtle object is created or a function derived from some -Turtle method is called a TurtleScreen object is automatically created. -""" + When a Turtle object is created or a function derived from some + Turtle method is called a TurtleScreen object is automatically created. + """ + def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... RawPen = RawTurtle @@ -2144,211 +2227,221 @@ Pen = Turtle def write_docstringdict(filename: str = "turtle_docstringdict") -> None: """Create and write docstring-dictionary to file. -Optional argument: -filename -- a string, used as filename - default value is turtle_docstringdict + Optional argument: + filename -- a string, used as filename + default value is turtle_docstringdict -Has to be called explicitly, (not used by the turtle-graphics classes) -The docstring dictionary will be written to the Python script .py -It is intended to serve as a template for translation of the docstrings -into different languages. -""" + Has to be called explicitly, (not used by the turtle-graphics classes) + The docstring dictionary will be written to the Python script .py + It is intended to serve as a template for translation of the docstrings + into different languages. + """ # Functions copied from TurtleScreenBase: def mainloop() -> None: """Starts event loop - calling Tkinter's mainloop function. -No argument. + No argument. -Must be last statement in a turtle graphics program. -Must NOT be used if a script is run from within IDLE in -n mode -(No subprocess) - for interactive use of turtle graphics. + Must be last statement in a turtle graphics program. + Must NOT be used if a script is run from within IDLE in -n mode + (No subprocess) - for interactive use of turtle graphics. -Example: ->>> mainloop() + Example: + >>> mainloop() + + """ -""" def textinput(title: str, prompt: str) -> str | None: """Pop up a dialog window for input of a string. -Arguments: title is the title of the dialog window, -prompt is a text mostly describing what information to input. + Arguments: title is the title of the dialog window, + prompt is a text mostly describing what information to input. -Return the string input -If the dialog is canceled, return None. + Return the string input + If the dialog is canceled, return None. -Example: ->>> textinput("NIM", "Name of first player:") + Example: + >>> textinput("NIM", "Name of first player:") + + """ -""" def numinput( title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: """Pop up a dialog window for input of a number. -Arguments: title is the title of the dialog window, -prompt is a text mostly describing what numerical information to input. -default: default value -minval: minimum value for input -maxval: maximum value for input + Arguments: title is the title of the dialog window, + prompt is a text mostly describing what numerical information to input. + default: default value + minval: minimum value for input + maxval: maximum value for input -The number input must be in the range minval .. maxval if these are -given. If not, a hint is issued and the dialog remains open for -correction. Return the number input. -If the dialog is canceled, return None. + The number input must be in the range minval .. maxval if these are + given. If not, a hint is issued and the dialog remains open for + correction. Return the number input. + If the dialog is canceled, return None. -Example: ->>> numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) + Example: + >>> numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000) -""" + """ # Functions copied from TurtleScreen: def clear() -> None: - """Delete the turtle's drawings from the screen. Do not move + """Delete the turtle's drawings from the screen. Do not move -No arguments. + No arguments. -Delete the turtle's drawings from the screen. Do not move -State and position of the turtle as well as drawings of other -turtles are not affected. + Delete the turtle's drawings from the screen. Do not move + State and position of the turtle as well as drawings of other + turtles are not affected. + + Examples: + >>> clear() + """ -Examples: ->>> clear() -""" @overload def mode(mode: None = None) -> str: """Set turtle-mode ('standard', 'logo' or 'world') and perform reset. -Optional argument: -mode -- one of the strings 'standard', 'logo' or 'world' + Optional argument: + mode -- one of the strings 'standard', 'logo' or 'world' -Mode 'standard' is compatible with turtle.py. -Mode 'logo' is compatible with most Logo-Turtle-Graphics. -Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in -this mode angles appear distorted if x/y unit-ratio doesn't equal 1. -If mode is not given, return the current mode. + Mode 'standard' is compatible with turtle.py. + Mode 'logo' is compatible with most Logo-Turtle-Graphics. + Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in + this mode angles appear distorted if x/y unit-ratio doesn't equal 1. + If mode is not given, return the current mode. - Mode Initial turtle heading positive angles - ------------|-------------------------|------------------- - 'standard' to the right (east) counterclockwise - 'logo' upward (north) clockwise + Mode Initial turtle heading positive angles + ------------|-------------------------|------------------- + 'standard' to the right (east) counterclockwise + 'logo' upward (north) clockwise + + Examples: + >>> mode('logo') # resets turtle heading to north + >>> mode() + 'logo' + """ -Examples: ->>> mode('logo') # resets turtle heading to north ->>> mode() -'logo' -""" @overload def mode(mode: Literal["standard", "logo", "world"]) -> None: ... def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: """Set up a user defined coordinate-system. -Arguments: -llx -- a number, x-coordinate of lower left corner of canvas -lly -- a number, y-coordinate of lower left corner of canvas -urx -- a number, x-coordinate of upper right corner of canvas -ury -- a number, y-coordinate of upper right corner of canvas + Arguments: + llx -- a number, x-coordinate of lower left corner of canvas + lly -- a number, y-coordinate of lower left corner of canvas + urx -- a number, x-coordinate of upper right corner of canvas + ury -- a number, y-coordinate of upper right corner of canvas -Set up user coodinat-system and switch to mode 'world' if necessary. -This performs a reset. If mode 'world' is already active, -all drawings are redrawn according to the new coordinates. + Set up user coodinat-system and switch to mode 'world' if necessary. + This performs a reset. If mode 'world' is already active, + all drawings are redrawn according to the new coordinates. -But ATTENTION: in user-defined coordinatesystems angles may appear -distorted. (see Screen.mode()) + But ATTENTION: in user-defined coordinatesystems angles may appear + distorted. (see Screen.mode()) + + Example: + >>> setworldcoordinates(-10,-0.5,50,1.5) + >>> for _ in range(36): + ... left(10) + ... forward(0.5) + """ -Example: ->>> setworldcoordinates(-10,-0.5,50,1.5) ->>> for _ in range(36): -... left(10) -... forward(0.5) -""" def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: """Adds a turtle shape to TurtleScreen's shapelist. -Arguments: -(1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! -(2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). - Installs the corresponding image shape. - !! Image-shapes DO NOT rotate when turning the turtle, - !! so they do not display the heading of the turtle! -(3) name is an arbitrary string and shape is a tuple - of pairs of coordinates. Installs the corresponding - polygon shape -(4) name is an arbitrary string and shape is a - (compound) Shape object. Installs the corresponding - compound shape. -To use a shape, you have to issue the command shape(shapename). - -call: register_shape("turtle.gif") ---or: register_shape("tri", ((0,0), (10,10), (-10,10))) - -Example: ->>> register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + Arguments: + (1) name is the name of an image file (PNG, GIF, PGM, and PPM) and shape is None. + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! + (2) name is an arbitrary string and shape is the name of an image file (PNG, GIF, PGM, and PPM). + Installs the corresponding image shape. + !! Image-shapes DO NOT rotate when turning the turtle, + !! so they do not display the heading of the turtle! + (3) name is an arbitrary string and shape is a tuple + of pairs of coordinates. Installs the corresponding + polygon shape + (4) name is an arbitrary string and shape is a + (compound) Shape object. Installs the corresponding + compound shape. + To use a shape, you have to issue the command shape(shapename). + + call: register_shape("turtle.gif") + --or: register_shape("tri", ((0,0), (10,10), (-10,10))) + + Example: + >>> register_shape("triangle", ((5,-3),(0,5),(-5,-3))) + + """ -""" @overload def colormode(cmode: None = None) -> float: """Return the colormode or set it to 1.0 or 255. -Optional argument: -cmode -- one of the values 1.0 or 255 + Optional argument: + cmode -- one of the values 1.0 or 255 -r, g, b values of colortriples have to be in range 0..cmode. + r, g, b values of colortriples have to be in range 0..cmode. + + Example: + >>> colormode() + 1.0 + >>> colormode(255) + >>> pencolor(240,160,80) + """ -Example: ->>> colormode() -1.0 ->>> colormode(255) ->>> pencolor(240,160,80) -""" @overload def colormode(cmode: float) -> None: ... def reset() -> None: """Delete the turtle's drawings and restore its default values. -No argument. - -Delete the turtle's drawings from the screen, re-center the turtle -and set variables to the default values. - -Example: ->>> position() -(0.00,-22.00) ->>> heading() -100.0 ->>> reset() ->>> position() -(0.00,0.00) ->>> heading() -0.0 -""" + No argument. + + Delete the turtle's drawings from the screen, re-center the turtle + and set variables to the default values. + + Example: + >>> position() + (0.00,-22.00) + >>> heading() + 100.0 + >>> reset() + >>> position() + (0.00,0.00) + >>> heading() + 0.0 + """ + def turtles() -> list[Turtle]: - """Return the list of turtles on the + """Return the list of turtles on the + + Example: + >>> turtles() + [] + """ -Example: ->>> turtles() -[] -""" @overload def bgcolor() -> _AnyColor: """Set or return backgroundcolor of the TurtleScreen. -Arguments (if given): a color string or three numbers -in the range 0..colormode or a 3-tuple of such numbers. + Arguments (if given): a color string or three numbers + in the range 0..colormode or a 3-tuple of such numbers. + + Example: + >>> bgcolor("orange") + >>> bgcolor() + 'orange' + >>> bgcolor(0.5,0,0.5) + >>> bgcolor() + '#800080' + """ -Example: ->>> bgcolor("orange") ->>> bgcolor() -'orange' ->>> bgcolor(0.5,0,0.5) ->>> bgcolor() -'#800080' -""" @overload def bgcolor(color: _Color) -> None: ... @overload @@ -2357,198 +2450,210 @@ def bgcolor(r: float, g: float, b: float) -> None: ... def tracer(n: None = None) -> int: """Turns turtle animation on/off and set delay for update drawings. -Optional arguments: -n -- nonnegative integer -delay -- nonnegative integer - -If n is given, only each n-th regular screen update is really performed. -(Can be used to accelerate the drawing of complex graphics.) -Second arguments sets delay value (see RawTurtle.delay()) - -Example: ->>> tracer(8, 25) ->>> dist = 2 ->>> for i in range(200): -... fd(dist) -... rt(90) -... dist += 2 -""" + Optional arguments: + n -- nonnegative integer + delay -- nonnegative integer + + If n is given, only each n-th regular screen update is really performed. + (Can be used to accelerate the drawing of complex graphics.) + Second arguments sets delay value (see RawTurtle.delay()) + + Example: + >>> tracer(8, 25) + >>> dist = 2 + >>> for i in range(200): + ... fd(dist) + ... rt(90) + ... dist += 2 + """ + @overload def tracer(n: int, delay: int | None = None) -> None: ... @overload def delay(delay: None = None) -> int: """Return or set the drawing delay in milliseconds. -Optional argument: -delay -- positive integer + Optional argument: + delay -- positive integer + + Example: + >>> delay(15) + >>> delay() + 15 + """ -Example: ->>> delay(15) ->>> delay() -15 -""" @overload def delay(delay: int) -> None: ... if sys.version_info >= (3, 14): @contextmanager def no_animation() -> Generator[None]: - """Temporarily turn off auto-updating the + """Temporarily turn off auto-updating the -This is useful for drawing complex shapes where even the fastest setting -is too slow. Once this context manager is exited, the drawing will -be displayed. + This is useful for drawing complex shapes where even the fastest setting + is too slow. Once this context manager is exited, the drawing will + be displayed. -Example (for a TurtleScreen instance named screen -and a Turtle instance named turtle): ->>> with no_animation(): -... turtle.circle(50) -""" + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): + >>> with no_animation(): + ... turtle.circle(50) + """ def update() -> None: - """Perform a TurtleScreen update. - """ + """Perform a TurtleScreen update.""" + def window_width() -> int: """Return the width of the turtle window. -Example: ->>> window_width() -640 -""" + Example: + >>> window_width() + 640 + """ + def window_height() -> int: """Return the height of the turtle window. -Example: ->>> window_height() -480 -""" + Example: + >>> window_height() + 480 + """ + def getcanvas() -> Canvas: """Return the Canvas of this TurtleScreen. -No argument. + No argument. + + Example: + >>> cv = getcanvas() + >>> cv + + """ -Example: ->>> cv = getcanvas() ->>> cv - -""" def getshapes() -> list[str]: """Return a list of names of all currently available turtle shapes. -No argument. + No argument. + + Example: + >>> getshapes() + ['arrow', 'blank', 'circle', ... , 'turtle'] + """ -Example: ->>> getshapes() -['arrow', 'blank', 'circle', ... , 'turtle'] -""" def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: """Bind fun to mouse-click event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). -add -- True or False. If True, new binding will be added, otherwise - it will replace a former binding. + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). + add -- True or False. If True, new binding will be added, otherwise + it will replace a former binding. -Example for the anonymous turtle, i. e. the procedural way: + Example for the anonymous turtle, i. e. the procedural way: + + >>> def turn(x, y): + ... left(360) + ... + >>> onclick(turn) # Now clicking into the turtle will turn it. + >>> onclick(None) # event-binding will be removed + """ ->>> def turn(x, y): -... left(360) -... ->>> onclick(turn) # Now clicking into the turtle will turn it. ->>> onclick(None) # event-binding will be removed -""" def onkey(fun: Callable[[], object], key: str) -> None: """Bind fun to key-release event of key. -Arguments: -fun -- a function with no arguments -key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + Arguments: + fun -- a function with no arguments + key -- a string: key (e.g. "a") or key-symbol (e.g. "space") -In order to be able to register key-events, TurtleScreen -must have focus. (See method listen.) + In order to be able to register key-events, TurtleScreen + must have focus. (See method listen.) -Example: + Example: ->>> def f(): -... fd(50) -... lt(60) -... ->>> onkey(f, "Up") ->>> listen() + >>> def f(): + ... fd(50) + ... lt(60) + ... + >>> onkey(f, "Up") + >>> listen() -Subsequently the turtle can be moved by repeatedly pressing -the up-arrow key, consequently drawing a hexagon + Subsequently the turtle can be moved by repeatedly pressing + the up-arrow key, consequently drawing a hexagon + + """ -""" def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: """Set focus on TurtleScreen (in order to collect key-events) -No arguments. -Dummy arguments are provided in order -to be able to pass listen to the onclick method. + No arguments. + Dummy arguments are provided in order + to be able to pass listen to the onclick method. + + Example: + >>> listen() + """ -Example: ->>> listen() -""" def ontimer(fun: Callable[[], object], t: int = 0) -> None: """Install a timer, which calls fun after t milliseconds. -Arguments: -fun -- a function with no arguments. -t -- a number >= 0 - -Example: - ->>> running = True ->>> def f(): -... if running: -... fd(50) -... lt(60) -... ontimer(f, 250) -... ->>> f() # makes the turtle marching around ->>> running = False -""" + Arguments: + fun -- a function with no arguments. + t -- a number >= 0 + + Example: + + >>> running = True + >>> def f(): + ... if running: + ... fd(50) + ... lt(60) + ... ontimer(f, 250) + ... + >>> f() # makes the turtle marching around + >>> running = False + """ + @overload def bgpic(picname: None = None) -> str: """Set background image or return name of current backgroundimage. -Optional argument: -picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". + Optional argument: + picname -- a string, name of an image file (PNG, GIF, PGM, and PPM) or "nopic". -If picname is a filename, set the corresponding image as background. -If picname is "nopic", delete backgroundimage, if present. -If picname is None, return the filename of the current backgroundimage. + If picname is a filename, set the corresponding image as background. + If picname is "nopic", delete backgroundimage, if present. + If picname is None, return the filename of the current backgroundimage. + + Example: + >>> bgpic() + 'nopic' + >>> bgpic("landscape.gif") + >>> bgpic() + 'landscape.gif' + """ -Example: ->>> bgpic() -'nopic' ->>> bgpic("landscape.gif") ->>> bgpic() -'landscape.gif' -""" @overload def bgpic(picname: str) -> None: ... @overload def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: """Resize the canvas the turtles are drawing on. -Optional arguments: -canvwidth -- positive integer, new width of canvas in pixels -canvheight -- positive integer, new height of canvas in pixels -bg -- colorstring or color-tuple, new backgroundcolor -If no arguments are given, return current (canvaswidth, canvasheight) + Optional arguments: + canvwidth -- positive integer, new width of canvas in pixels + canvheight -- positive integer, new height of canvas in pixels + bg -- colorstring or color-tuple, new backgroundcolor + If no arguments are given, return current (canvaswidth, canvasheight) -Do not alter the drawing window. To observe hidden parts of -the canvas use the scrollbars. (Can make visible those parts -of a drawing, which were outside the canvas before!) + Do not alter the drawing window. To observe hidden parts of + the canvas use the scrollbars. (Can make visible those parts + of a drawing, which were outside the canvas before!) + + Example (for a Turtle instance named turtle): + >>> turtle.screensize(2000,1500) + >>> # e.g. to search for an erroneously escaped turtle ;-) + """ -Example (for a Turtle instance named turtle): ->>> turtle.screensize(2000,1500) ->>> # e.g. to search for an erroneously escaped turtle ;-) -""" @overload def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... @@ -2556,16 +2661,16 @@ if sys.version_info >= (3, 14): def save(filename: StrPath, *, overwrite: bool = False) -> None: """Save the drawing as a PostScript file -Arguments: -filename -- a string, the path of the created file. - Must end with '.ps' or '.eps'. + Arguments: + filename -- a string, the path of the created file. + Must end with '.ps' or '.eps'. -Optional arguments: -overwrite -- boolean, if true, then existing files will be overwritten + Optional arguments: + overwrite -- boolean, if true, then existing files will be overwritten -Example: ->>> save('my_drawing.eps') -""" + Example: + >>> save('my_drawing.eps') + """ onscreenclick = onclick resetscreen = reset @@ -2574,29 +2679,29 @@ addshape = register_shape def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: """Bind fun to key-press event of key if key is given, -or to any key-press-event if no key is given. + or to any key-press-event if no key is given. -Arguments: -fun -- a function with no arguments -key -- a string: key (e.g. "a") or key-symbol (e.g. "space") + Arguments: + fun -- a function with no arguments + key -- a string: key (e.g. "a") or key-symbol (e.g. "space") -In order to be able to register key-events, TurtleScreen -must have focus. (See method listen.) + In order to be able to register key-events, TurtleScreen + must have focus. (See method listen.) -Example (for a TurtleScreen instance named screen -and a Turtle instance named turtle): + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): ->>> def f(): -... fd(50) -... lt(60) -... ->>> onkeypress(f, "Up") ->>> listen() + >>> def f(): + ... fd(50) + ... lt(60) + ... + >>> onkeypress(f, "Up") + >>> listen() -Subsequently the turtle can be moved by repeatedly pressing -the up-arrow key, or by keeping pressed the up-arrow key. -consequently drawing a hexagon. -""" + Subsequently the turtle can be moved by repeatedly pressing + the up-arrow key, or by keeping pressed the up-arrow key. + consequently drawing a hexagon. + """ onkeyrelease = onkey @@ -2605,408 +2710,429 @@ onkeyrelease = onkey def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: """Set the size and position of the main window. -Arguments: -width: as integer a size in pixels, as float a fraction of the - Default is 50% of -height: as integer the height in pixels, as float a fraction of the - Default is 75% of -startx: if positive, starting position in pixels from the left - edge of the screen, if negative from the right edge - Default, startx=None is to center window horizontally. -starty: if positive, starting position in pixels from the top - edge of the screen, if negative from the bottom edge - Default, starty=None is to center window vertically. + Arguments: + width: as integer a size in pixels, as float a fraction of the + Default is 50% of + height: as integer the height in pixels, as float a fraction of the + Default is 75% of + startx: if positive, starting position in pixels from the left + edge of the screen, if negative from the right edge + Default, startx=None is to center window horizontally. + starty: if positive, starting position in pixels from the top + edge of the screen, if negative from the bottom edge + Default, starty=None is to center window vertically. -Examples: ->>> setup (width=200, height=200, startx=0, starty=0) + Examples: + >>> setup (width=200, height=200, startx=0, starty=0) -sets window to 200x200 pixels, in upper left of screen + sets window to 200x200 pixels, in upper left of screen ->>> setup(width=.75, height=0.5, startx=None, starty=None) + >>> setup(width=.75, height=0.5, startx=None, starty=None) + + sets window to 75% of screen by 50% of screen and centers + """ -sets window to 75% of screen by 50% of screen and centers -""" def title(titlestring: str) -> None: """Set title of turtle-window -Argument: -titlestring -- a string, to appear in the titlebar of the - turtle graphics window. + Argument: + titlestring -- a string, to appear in the titlebar of the + turtle graphics window. -This is a method of Screen-class. Not available for TurtleScreen- -objects. + This is a method of Screen-class. Not available for TurtleScreen- + objects. + + Example: + >>> title("Welcome to the turtle-zoo!") + """ -Example: ->>> title("Welcome to the turtle-zoo!") -""" def bye() -> None: """Shut the turtlegraphics window. -Example: ->>> bye() -""" + Example: + >>> bye() + """ + def exitonclick() -> None: """Go into mainloop until the mouse is clicked. -No arguments. + No arguments. -Bind bye() method to mouseclick on TurtleScreen. -If "using_IDLE" - value in configuration dictionary is False -(default value), enter mainloop. -If IDLE with -n switch (no subprocess) is used, this value should be -set to True in turtle.cfg. In this case IDLE's mainloop -is active also for the client script. + Bind bye() method to mouseclick on TurtleScreen. + If "using_IDLE" - value in configuration dictionary is False + (default value), enter mainloop. + If IDLE with -n switch (no subprocess) is used, this value should be + set to True in turtle.cfg. In this case IDLE's mainloop + is active also for the client script. -This is a method of the Screen-class and not available for -TurtleScreen instances. + This is a method of the Screen-class and not available for + TurtleScreen instances. -Example: ->>> exitonclick() + Example: + >>> exitonclick() + + """ -""" def Screen() -> _Screen: """Return the singleton screen object. -If none exists at the moment, create a new one and return it, -else return the existing one. -""" + If none exists at the moment, create a new one and return it, + else return the existing one. + """ # Functions copied from TNavigator: def degrees(fullcircle: float = 360.0) -> None: """Set angle measurement units to degrees. -Optional argument: -fullcircle - a number + Optional argument: + fullcircle - a number -Set angle measurement units, i. e. set number -of 'degrees' for a full circle. Default value is -360 degrees. + Set angle measurement units, i. e. set number + of 'degrees' for a full circle. Default value is + 360 degrees. -Example: ->>> left(90) ->>> heading() -90 + Example: + >>> left(90) + >>> heading() + 90 -Change angle measurement unit to grad (also known as gon, -grade, or gradian and equals 1/100-th of the right angle.) ->>> degrees(400.0) ->>> heading() -100 + Change angle measurement unit to grad (also known as gon, + grade, or gradian and equals 1/100-th of the right angle.) + >>> degrees(400.0) + >>> heading() + 100 + + """ -""" def radians() -> None: """Set the angle measurement units to radians. -No arguments. + No arguments. + + Example: + >>> heading() + 90 + >>> radians() + >>> heading() + 1.5707963267948966 + """ -Example: ->>> heading() -90 ->>> radians() ->>> heading() -1.5707963267948966 -""" def forward(distance: float) -> None: """Move the turtle forward by the specified distance. -Aliases: forward | fd + Aliases: forward | fd -Argument: -distance -- a number (integer or float) + Argument: + distance -- a number (integer or float) -Move the turtle forward by the specified distance, in the direction -the turtle is headed. + Move the turtle forward by the specified distance, in the direction + the turtle is headed. + + Example: + >>> position() + (0.00, 0.00) + >>> forward(25) + >>> position() + (25.00,0.00) + >>> forward(-75) + >>> position() + (-50.00,0.00) + """ -Example: ->>> position() -(0.00, 0.00) ->>> forward(25) ->>> position() -(25.00,0.00) ->>> forward(-75) ->>> position() -(-50.00,0.00) -""" def back(distance: float) -> None: """Move the turtle backward by distance. -Aliases: back | backward | bk + Aliases: back | backward | bk -Argument: -distance -- a number + Argument: + distance -- a number -Move the turtle backward by distance, opposite to the direction the -turtle is headed. Do not change the turtle's heading. + Move the turtle backward by distance, opposite to the direction the + turtle is headed. Do not change the turtle's heading. + + Example: + >>> position() + (0.00, 0.00) + >>> backward(30) + >>> position() + (-30.00, 0.00) + """ -Example: ->>> position() -(0.00, 0.00) ->>> backward(30) ->>> position() -(-30.00, 0.00) -""" def right(angle: float) -> None: """Turn turtle right by angle units. -Aliases: right | rt + Aliases: right | rt -Argument: -angle -- a number (integer or float) + Argument: + angle -- a number (integer or float) -Turn turtle right by angle units. (Units are by default degrees, -but can be set via the degrees() and radians() functions.) -Angle orientation depends on mode. (See this.) + Turn turtle right by angle units. (Units are by default degrees, + but can be set via the degrees() and radians() functions.) + Angle orientation depends on mode. (See this.) + + Example: + >>> heading() + 22.0 + >>> right(45) + >>> heading() + 337.0 + """ -Example: ->>> heading() -22.0 ->>> right(45) ->>> heading() -337.0 -""" def left(angle: float) -> None: """Turn turtle left by angle units. -Aliases: left | lt + Aliases: left | lt -Argument: -angle -- a number (integer or float) + Argument: + angle -- a number (integer or float) -Turn turtle left by angle units. (Units are by default degrees, -but can be set via the degrees() and radians() functions.) -Angle orientation depends on mode. (See this.) + Turn turtle left by angle units. (Units are by default degrees, + but can be set via the degrees() and radians() functions.) + Angle orientation depends on mode. (See this.) + + Example: + >>> heading() + 22.0 + >>> left(45) + >>> heading() + 67.0 + """ -Example: ->>> heading() -22.0 ->>> left(45) ->>> heading() -67.0 -""" def pos() -> Vec2D: """Return the turtle's current location (x,y), as a Vec2D-vector. -Aliases: pos | position + Aliases: pos | position -No arguments. + No arguments. + + Example: + >>> pos() + (0.00, 240.00) + """ -Example: ->>> pos() -(0.00, 240.00) -""" def xcor() -> float: """Return the turtle's x coordinate. -No arguments. + No arguments. + + Example: + >>> reset() + >>> left(60) + >>> forward(100) + >>> print(xcor()) + 50.0 + """ -Example: ->>> reset() ->>> left(60) ->>> forward(100) ->>> print(xcor()) -50.0 -""" def ycor() -> float: """Return the turtle's y coordinate ---- -No arguments. - -Example: ->>> reset() ->>> left(60) ->>> forward(100) ->>> print(ycor()) -86.6025403784 -""" + --- + No arguments. + + Example: + >>> reset() + >>> left(60) + >>> forward(100) + >>> print(ycor()) + 86.6025403784 + """ + @overload def goto(x: tuple[float, float], y: None = None) -> None: """Move turtle to an absolute position. -Aliases: setpos | setposition | goto: - -Arguments: -x -- a number or a pair/vector of numbers -y -- a number None - -call: goto(x, y) # two coordinates ---or: goto((x, y)) # a pair (tuple) of coordinates ---or: goto(vec) # e.g. as returned by pos() - -Move turtle to an absolute position. If the pen is down, -a line will be drawn. The turtle's orientation does not change. - -Example: ->>> tp = pos() ->>> tp -(0.00, 0.00) ->>> setpos(60,30) ->>> pos() -(60.00,30.00) ->>> setpos((20,80)) ->>> pos() -(20.00,80.00) ->>> setpos(tp) ->>> pos() -(0.00,0.00) -""" + Aliases: setpos | setposition | goto: + + Arguments: + x -- a number or a pair/vector of numbers + y -- a number None + + call: goto(x, y) # two coordinates + --or: goto((x, y)) # a pair (tuple) of coordinates + --or: goto(vec) # e.g. as returned by pos() + + Move turtle to an absolute position. If the pen is down, + a line will be drawn. The turtle's orientation does not change. + + Example: + >>> tp = pos() + >>> tp + (0.00, 0.00) + >>> setpos(60,30) + >>> pos() + (60.00,30.00) + >>> setpos((20,80)) + >>> pos() + (20.00,80.00) + >>> setpos(tp) + >>> pos() + (0.00,0.00) + """ + @overload def goto(x: float, y: float) -> None: ... def home() -> None: """Move turtle to the origin - coordinates (0,0). -No arguments. + No arguments. -Move turtle to the origin - coordinates (0,0) and set its -heading to its start-orientation (which depends on mode). + Move turtle to the origin - coordinates (0,0) and set its + heading to its start-orientation (which depends on mode). + + Example: + >>> home() + """ -Example: ->>> home() -""" def setx(x: float) -> None: """Set the turtle's first coordinate to x -Argument: -x -- a number (integer or float) + Argument: + x -- a number (integer or float) -Set the turtle's first coordinate to x, leave second coordinate -unchanged. + Set the turtle's first coordinate to x, leave second coordinate + unchanged. + + Example: + >>> position() + (0.00, 240.00) + >>> setx(10) + >>> position() + (10.00, 240.00) + """ -Example: ->>> position() -(0.00, 240.00) ->>> setx(10) ->>> position() -(10.00, 240.00) -""" def sety(y: float) -> None: """Set the turtle's second coordinate to y -Argument: -y -- a number (integer or float) + Argument: + y -- a number (integer or float) -Set the turtle's first coordinate to x, second coordinate remains -unchanged. + Set the turtle's first coordinate to x, second coordinate remains + unchanged. + + Example: + >>> position() + (0.00, 40.00) + >>> sety(-10) + >>> position() + (0.00, -10.00) + """ -Example: ->>> position() -(0.00, 40.00) ->>> sety(-10) ->>> position() -(0.00, -10.00) -""" @overload def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: """Return the distance from the turtle to (x,y) in turtle step units. -Arguments: -x -- a number or a pair/vector of numbers or a turtle instance -y -- a number None None - -call: distance(x, y) # two coordinates ---or: distance((x, y)) # a pair (tuple) of coordinates ---or: distance(vec) # e.g. as returned by pos() ---or: distance(mypen) # where mypen is another turtle - -Example: ->>> pos() -(0.00, 0.00) ->>> distance(30,40) -50.0 ->>> pen = Turtle() ->>> pen.forward(77) ->>> distance(pen) -77.0 -""" + Arguments: + x -- a number or a pair/vector of numbers or a turtle instance + y -- a number None None + + call: distance(x, y) # two coordinates + --or: distance((x, y)) # a pair (tuple) of coordinates + --or: distance(vec) # e.g. as returned by pos() + --or: distance(mypen) # where mypen is another turtle + + Example: + >>> pos() + (0.00, 0.00) + >>> distance(30,40) + 50.0 + >>> pen = Turtle() + >>> pen.forward(77) + >>> distance(pen) + 77.0 + """ + @overload def distance(x: float, y: float) -> float: ... @overload def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: """Return the angle of the line from the turtle's position to (x, y). -Arguments: -x -- a number or a pair/vector of numbers or a turtle instance -y -- a number None None + Arguments: + x -- a number or a pair/vector of numbers or a turtle instance + y -- a number None None -call: distance(x, y) # two coordinates ---or: distance((x, y)) # a pair (tuple) of coordinates ---or: distance(vec) # e.g. as returned by pos() ---or: distance(mypen) # where mypen is another turtle + call: distance(x, y) # two coordinates + --or: distance((x, y)) # a pair (tuple) of coordinates + --or: distance(vec) # e.g. as returned by pos() + --or: distance(mypen) # where mypen is another turtle -Return the angle, between the line from turtle-position to position -specified by x, y and the turtle's start orientation. (Depends on -modes - "standard" or "logo") + Return the angle, between the line from turtle-position to position + specified by x, y and the turtle's start orientation. (Depends on + modes - "standard" or "logo") + + Example: + >>> pos() + (10.00, 10.00) + >>> towards(0,0) + 225.0 + """ -Example: ->>> pos() -(10.00, 10.00) ->>> towards(0,0) -225.0 -""" @overload def towards(x: float, y: float) -> float: ... def heading() -> float: """Return the turtle's current heading. -No arguments. + No arguments. + + Example: + >>> left(67) + >>> heading() + 67.0 + """ -Example: ->>> left(67) ->>> heading() -67.0 -""" def setheading(to_angle: float) -> None: """Set the orientation of the turtle to to_angle. -Aliases: setheading | seth + Aliases: setheading | seth -Argument: -to_angle -- a number (integer or float) + Argument: + to_angle -- a number (integer or float) -Set the orientation of the turtle to to_angle. -Here are some common directions in degrees: + Set the orientation of the turtle to to_angle. + Here are some common directions in degrees: - standard - mode: logo-mode: --------------------|-------------------- - 0 - east 0 - north - 90 - north 90 - east - 180 - west 180 - south - 270 - south 270 - west + standard - mode: logo-mode: + -------------------|-------------------- + 0 - east 0 - north + 90 - north 90 - east + 180 - west 180 - south + 270 - south 270 - west + + Example: + >>> setheading(90) + >>> heading() + 90 + """ -Example: ->>> setheading(90) ->>> heading() -90 -""" def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: """Draw a circle with given radius. -Arguments: -radius -- a number -extent (optional) -- a number -steps (optional) -- an integer - -Draw a circle with given radius. The center is radius units left -of the turtle; extent - an angle - determines which part of the -circle is drawn. If extent is not given, draw the entire circle. -If extent is not a full circle, one endpoint of the arc is the -current pen position. Draw the arc in counterclockwise direction -if radius is positive, otherwise in clockwise direction. Finally -the direction of the turtle is changed by the amount of extent. - -As the circle is approximated by an inscribed regular polygon, -steps determines the number of steps to use. If not given, -it will be calculated automatically. Maybe used to draw regular -polygons. - -call: circle(radius) # full circle ---or: circle(radius, extent) # arc ---or: circle(radius, extent, steps) ---or: circle(radius, steps=6) # 6-sided polygon - -Example: ->>> circle(50) ->>> circle(120, 180) # semicircle -""" + Arguments: + radius -- a number + extent (optional) -- a number + steps (optional) -- an integer + + Draw a circle with given radius. The center is radius units left + of the turtle; extent - an angle - determines which part of the + circle is drawn. If extent is not given, draw the entire circle. + If extent is not a full circle, one endpoint of the arc is the + current pen position. Draw the arc in counterclockwise direction + if radius is positive, otherwise in clockwise direction. Finally + the direction of the turtle is changed by the amount of extent. + + As the circle is approximated by an inscribed regular polygon, + steps determines the number of steps to use. If not given, + it will be calculated automatically. Maybe used to draw regular + polygons. + + call: circle(radius) # full circle + --or: circle(radius, extent) # arc + --or: circle(radius, extent, steps) + --or: circle(radius, steps=6) # 6-sided polygon + + Example: + >>> circle(50) + >>> circle(120, 180) # semicircle + """ fd = forward bk = back @@ -3023,141 +3149,148 @@ seth = setheading def resizemode(rmode: None = None) -> str: """Set resizemode to one of the values: "auto", "user", "noresize". -(Optional) Argument: -rmode -- one of the strings "auto", "user", "noresize" + (Optional) Argument: + rmode -- one of the strings "auto", "user", "noresize" -Different resizemodes have the following effects: - - "auto" adapts the appearance of the turtle - corresponding to the value of pensize. - - "user" adapts the appearance of the turtle according to the - values of stretchfactor and outlinewidth (outline), - which are set by shapesize() - - "noresize" no adaption of the turtle's appearance takes place. -If no argument is given, return current resizemode. -resizemode("user") is called by a call of shapesize with arguments. + Different resizemodes have the following effects: + - "auto" adapts the appearance of the turtle + corresponding to the value of pensize. + - "user" adapts the appearance of the turtle according to the + values of stretchfactor and outlinewidth (outline), + which are set by shapesize() + - "noresize" no adaption of the turtle's appearance takes place. + If no argument is given, return current resizemode. + resizemode("user") is called by a call of shapesize with arguments. -Examples: ->>> resizemode("noresize") ->>> resizemode() -'noresize' -""" + Examples: + >>> resizemode("noresize") + >>> resizemode() + 'noresize' + """ + @overload def resizemode(rmode: Literal["auto", "user", "noresize"]) -> None: ... @overload def pensize(width: None = None) -> int: """Set or return the line thickness. -Aliases: pensize | width + Aliases: pensize | width -Argument: -width -- positive number + Argument: + width -- positive number -Set the line thickness to width or return it. If resizemode is set -to "auto" and turtleshape is a polygon, that polygon is drawn with -the same line thickness. If no argument is given, current pensize -is returned. + Set the line thickness to width or return it. If resizemode is set + to "auto" and turtleshape is a polygon, that polygon is drawn with + the same line thickness. If no argument is given, current pensize + is returned. + + Example: + >>> pensize() + 1 + >>> pensize(10) # from here on lines of width 10 are drawn + """ -Example: ->>> pensize() -1 ->>> pensize(10) # from here on lines of width 10 are drawn -""" @overload def pensize(width: int) -> None: ... def penup() -> None: """Pull the pen up -- no drawing when moving. -Aliases: penup | pu | up + Aliases: penup | pu | up -No argument + No argument + + Example: + >>> penup() + """ -Example: ->>> penup() -""" def pendown() -> None: """Pull the pen down -- drawing when moving. -Aliases: pendown | pd | down + Aliases: pendown | pd | down -No argument. + No argument. + + Example: + >>> pendown() + """ -Example: ->>> pendown() -""" def isdown() -> bool: """Return True if pen is down, False if it's up. -No argument. + No argument. + + Example: + >>> penup() + >>> isdown() + False + >>> pendown() + >>> isdown() + True + """ -Example: ->>> penup() ->>> isdown() -False ->>> pendown() ->>> isdown() -True -""" @overload def speed(speed: None = None) -> int: """Return or set the turtle's speed. -Optional argument: -speed -- an integer in the range 0..10 or a speedstring (see below) - -Set the turtle's speed to an integer value in the range 0 .. 10. -If no argument is given: return current speed. - -If input is a number greater than 10 or smaller than 0.5, -speed is set to 0. -Speedstrings are mapped to speedvalues in the following way: - 'fastest' : 0 - 'fast' : 10 - 'normal' : 6 - 'slow' : 3 - 'slowest' : 1 -speeds from 1 to 10 enforce increasingly faster animation of -line drawing and turtle turning. - -Attention: -speed = 0 : *no* animation takes place. forward/back makes turtle jump -and likewise left/right make the turtle turn instantly. - -Example: ->>> speed(3) -""" + Optional argument: + speed -- an integer in the range 0..10 or a speedstring (see below) + + Set the turtle's speed to an integer value in the range 0 .. 10. + If no argument is given: return current speed. + + If input is a number greater than 10 or smaller than 0.5, + speed is set to 0. + Speedstrings are mapped to speedvalues in the following way: + 'fastest' : 0 + 'fast' : 10 + 'normal' : 6 + 'slow' : 3 + 'slowest' : 1 + speeds from 1 to 10 enforce increasingly faster animation of + line drawing and turtle turning. + + Attention: + speed = 0 : *no* animation takes place. forward/back makes turtle jump + and likewise left/right make the turtle turn instantly. + + Example: + >>> speed(3) + """ + @overload def speed(speed: _Speed) -> None: ... @overload def pencolor() -> _AnyColor: """Return or set the pencolor. -Arguments: -Four input formats are allowed: - - pencolor() - Return the current pencolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - pencolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - pencolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - pencolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - -If turtleshape is a polygon, the outline of that polygon is drawn -with the newly set pencolor. - -Example: ->>> pencolor('brown') ->>> tup = (0.2, 0.8, 0.55) ->>> pencolor(tup) ->>> pencolor() -'#33cc8c' -""" + Arguments: + Four input formats are allowed: + - pencolor() + Return the current pencolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - pencolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - pencolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - pencolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + + If turtleshape is a polygon, the outline of that polygon is drawn + with the newly set pencolor. + + Example: + >>> pencolor('brown') + >>> tup = (0.2, 0.8, 0.55) + >>> pencolor(tup) + >>> pencolor() + '#33cc8c' + """ + @overload def pencolor(color: _Color) -> None: ... @overload @@ -3166,31 +3299,32 @@ def pencolor(r: float, g: float, b: float) -> None: ... def fillcolor() -> _AnyColor: """Return or set the fillcolor. -Arguments: -Four input formats are allowed: - - fillcolor() - Return the current fillcolor as color specification string, - possibly in hex-number format (see example). - May be used as input to another color/pencolor/fillcolor call. - - fillcolor(colorstring) - s is a Tk color specification string, such as "red" or "yellow" - - fillcolor((r, g, b)) - *a tuple* of r, g, and b, which represent, an RGB color, - and each of r, g, and b are in the range 0..colormode, - where colormode is either 1.0 or 255 - - fillcolor(r, g, b) - r, g, and b represent an RGB color, and each of r, g, and b - are in the range 0..colormode - -If turtleshape is a polygon, the interior of that polygon is drawn -with the newly set fillcolor. - -Example: ->>> fillcolor('violet') ->>> col = pencolor() ->>> fillcolor(col) ->>> fillcolor(0, .5, 0) -""" + Arguments: + Four input formats are allowed: + - fillcolor() + Return the current fillcolor as color specification string, + possibly in hex-number format (see example). + May be used as input to another color/pencolor/fillcolor call. + - fillcolor(colorstring) + s is a Tk color specification string, such as "red" or "yellow" + - fillcolor((r, g, b)) + *a tuple* of r, g, and b, which represent, an RGB color, + and each of r, g, and b are in the range 0..colormode, + where colormode is either 1.0 or 255 + - fillcolor(r, g, b) + r, g, and b represent an RGB color, and each of r, g, and b + are in the range 0..colormode + + If turtleshape is a polygon, the interior of that polygon is drawn + with the newly set fillcolor. + + Example: + >>> fillcolor('violet') + >>> col = pencolor() + >>> fillcolor(col) + >>> fillcolor(0, .5, 0) + """ + @overload def fillcolor(color: _Color) -> None: ... @overload @@ -3199,35 +3333,36 @@ def fillcolor(r: float, g: float, b: float) -> None: ... def color() -> tuple[_AnyColor, _AnyColor]: """Return or set the pencolor and fillcolor. -Arguments: -Several input formats are allowed. -They use 0, 1, 2, or 3 arguments as follows: - -color() - Return the current pencolor and the current fillcolor - as a pair of color specification strings as are returned - by pencolor and fillcolor. -color(colorstring), color((r,g,b)), color(r,g,b) - inputs as in pencolor, set both, fillcolor and pencolor, - to the given value. -color(colorstring1, colorstring2), -color((r1,g1,b1), (r2,g2,b2)) - equivalent to pencolor(colorstring1) and fillcolor(colorstring2) - and analogously, if the other input format is used. - -If turtleshape is a polygon, outline and interior of that polygon -is drawn with the newly set colors. -For more info see: pencolor, fillcolor - -Example: ->>> color('red', 'green') ->>> color() -('red', 'green') ->>> colormode(255) ->>> color((40, 80, 120), (160, 200, 240)) ->>> color() -('#285078', '#a0c8f0') -""" + Arguments: + Several input formats are allowed. + They use 0, 1, 2, or 3 arguments as follows: + + color() + Return the current pencolor and the current fillcolor + as a pair of color specification strings as are returned + by pencolor and fillcolor. + color(colorstring), color((r,g,b)), color(r,g,b) + inputs as in pencolor, set both, fillcolor and pencolor, + to the given value. + color(colorstring1, colorstring2), + color((r1,g1,b1), (r2,g2,b2)) + equivalent to pencolor(colorstring1) and fillcolor(colorstring2) + and analogously, if the other input format is used. + + If turtleshape is a polygon, outline and interior of that polygon + is drawn with the newly set colors. + For more info see: pencolor, fillcolor + + Example: + >>> color('red', 'green') + >>> color() + ('red', 'green') + >>> colormode(255) + >>> color((40, 80, 120), (160, 200, 240)) + >>> color() + ('#285078', '#a0c8f0') + """ + @overload def color(color: _Color) -> None: ... @overload @@ -3237,88 +3372,91 @@ def color(color1: _Color, color2: _Color) -> None: ... def showturtle() -> None: """Makes the turtle visible. -Aliases: showturtle | st + Aliases: showturtle | st -No argument. + No argument. + + Example: + >>> hideturtle() + >>> showturtle() + """ -Example: ->>> hideturtle() ->>> showturtle() -""" def hideturtle() -> None: """Makes the turtle invisible. -Aliases: hideturtle | ht + Aliases: hideturtle | ht -No argument. + No argument. -It's a good idea to do this while you're in the -middle of a complicated drawing, because hiding -the turtle speeds up the drawing observably. + It's a good idea to do this while you're in the + middle of a complicated drawing, because hiding + the turtle speeds up the drawing observably. + + Example: + >>> hideturtle() + """ -Example: ->>> hideturtle() -""" def isvisible() -> bool: """Return True if the Turtle is shown, False if it's hidden. -No argument. + No argument. -Example: ->>> hideturtle() ->>> print(isvisible()) -False -""" + Example: + >>> hideturtle() + >>> print(isvisible()) + False + """ # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload def pen() -> _PenState: """Return or set the pen's attributes. -Arguments: - pen -- a dictionary with some or all of the below listed keys. - **pendict -- one or more keyword-arguments with the below - listed keys as keywords. - -Return or set the pen's attributes in a 'pen-dictionary' -with the following key/value pairs: - "shown" : True/False - "pendown" : True/False - "pencolor" : color-string or color-tuple - "fillcolor" : color-string or color-tuple - "pensize" : positive number - "speed" : number in range 0..10 - "resizemode" : "auto" or "user" or "noresize" - "stretchfactor": (positive number, positive number) - "shearfactor": number - "outline" : positive number - "tilt" : number - -This dictionary can be used as argument for a subsequent -pen()-call to restore the former pen-state. Moreover one -or more of these attributes can be provided as keyword-arguments. -This can be used to set several pen attributes in one statement. - - -Examples: ->>> pen(fillcolor="black", pencolor="red", pensize=10) ->>> pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} ->>> penstate=pen() ->>> color("yellow","") ->>> penup() ->>> pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} ->>> p.pen(penstate, fillcolor="green") ->>> p.pen() -{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, -'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', -'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} -""" + Arguments: + pen -- a dictionary with some or all of the below listed keys. + **pendict -- one or more keyword-arguments with the below + listed keys as keywords. + + Return or set the pen's attributes in a 'pen-dictionary' + with the following key/value pairs: + "shown" : True/False + "pendown" : True/False + "pencolor" : color-string or color-tuple + "fillcolor" : color-string or color-tuple + "pensize" : positive number + "speed" : number in range 0..10 + "resizemode" : "auto" or "user" or "noresize" + "stretchfactor": (positive number, positive number) + "shearfactor": number + "outline" : positive number + "tilt" : number + + This dictionary can be used as argument for a subsequent + pen()-call to restore the former pen-state. Moreover one + or more of these attributes can be provided as keyword-arguments. + This can be used to set several pen attributes in one statement. + + + Examples: + >>> pen(fillcolor="black", pencolor="red", pensize=10) + >>> pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'red', 'pendown': True, 'fillcolor': 'black', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + >>> penstate=pen() + >>> color("yellow","") + >>> penup() + >>> pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'yellow', 'pendown': False, 'fillcolor': '', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + >>> p.pen(penstate, fillcolor="green") + >>> p.pen() + {'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1, + 'pencolor': 'red', 'pendown': True, 'fillcolor': 'green', + 'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0} + """ + @overload def pen( pen: _PenState | None = None, @@ -3348,47 +3486,50 @@ ht = hideturtle def setundobuffer(size: int | None) -> None: """Set or disable undobuffer. -Argument: -size -- an integer or None + Argument: + size -- an integer or None -If size is an integer an empty undobuffer of given size is installed. -Size gives the maximum number of turtle-actions that can be undone -by the undo() function. -If size is None, no undobuffer is present. + If size is an integer an empty undobuffer of given size is installed. + Size gives the maximum number of turtle-actions that can be undone + by the undo() function. + If size is None, no undobuffer is present. + + Example: + >>> setundobuffer(42) + """ -Example: ->>> setundobuffer(42) -""" def undobufferentries() -> int: """Return count of entries in the undobuffer. -No argument. + No argument. + + Example: + >>> while undobufferentries(): + ... undo() + """ -Example: ->>> while undobufferentries(): -... undo() -""" @overload def shape(name: None = None) -> str: """Set turtle shape to shape with given name / return current shapename. -Optional argument: -name -- a string, which is a valid shapename - -Set turtle shape to shape with given name or, if name is not given, -return name of current shape. -Shape with name must exist in the TurtleScreen's shape dictionary. -Initially there are the following polygon shapes: -'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. -To learn about how to deal with shapes see Screen-method register_shape. - -Example: ->>> shape() -'arrow' ->>> shape("turtle") ->>> shape() -'turtle' -""" + Optional argument: + name -- a string, which is a valid shapename + + Set turtle shape to shape with given name or, if name is not given, + return name of current shape. + Shape with name must exist in the TurtleScreen's shape dictionary. + Initially there are the following polygon shapes: + 'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'. + To learn about how to deal with shapes see Screen-method register_shape. + + Example: + >>> shape() + 'arrow' + >>> shape("turtle") + >>> shape() + 'turtle' + """ + @overload def shape(name: str) -> None: ... @@ -3396,84 +3537,86 @@ if sys.version_info >= (3, 12): def teleport(x: float | None = None, y: float | None = None, *, fill_gap: bool = False) -> None: """Instantly move turtle to an absolute position. -Arguments: -x -- a number or None -y -- a number None -fill_gap -- a boolean This argument must be specified by name. - -call: teleport(x, y) # two coordinates ---or: teleport(x) # teleport to x position, keeping y as is ---or: teleport(y=y) # teleport to y position, keeping x as is ---or: teleport(x, y, fill_gap=True) - # teleport but fill the gap in between - -Move turtle to an absolute position. Unlike goto(x, y), a line will not -be drawn. The turtle's orientation does not change. If currently -filling, the polygon(s) teleported from will be filled after leaving, -and filling will begin again after teleporting. This can be disabled -with fill_gap=True, which makes the imaginary line traveled during -teleporting act as a fill barrier like in goto(x, y). - -Example: ->>> tp = pos() ->>> tp -(0.00,0.00) ->>> teleport(60) ->>> pos() -(60.00,0.00) ->>> teleport(y=10) ->>> pos() -(60.00,10.00) ->>> teleport(20, 30) ->>> pos() -(20.00,30.00) -""" + Arguments: + x -- a number or None + y -- a number None + fill_gap -- a boolean This argument must be specified by name. + + call: teleport(x, y) # two coordinates + --or: teleport(x) # teleport to x position, keeping y as is + --or: teleport(y=y) # teleport to y position, keeping x as is + --or: teleport(x, y, fill_gap=True) + # teleport but fill the gap in between + + Move turtle to an absolute position. Unlike goto(x, y), a line will not + be drawn. The turtle's orientation does not change. If currently + filling, the polygon(s) teleported from will be filled after leaving, + and filling will begin again after teleporting. This can be disabled + with fill_gap=True, which makes the imaginary line traveled during + teleporting act as a fill barrier like in goto(x, y). + + Example: + >>> tp = pos() + >>> tp + (0.00,0.00) + >>> teleport(60) + >>> pos() + (60.00,0.00) + >>> teleport(y=10) + >>> pos() + (60.00,10.00) + >>> teleport(20, 30) + >>> pos() + (20.00,30.00) + """ # Unsafely overlaps when no arguments are provided @overload def shapesize() -> tuple[float, float, float]: """Set/return turtle's stretchfactors/outline. Set resizemode to "user". -Optional arguments: - stretch_wid : positive number - stretch_len : positive number - outline : positive number - -Return or set the pen's attributes x/y-stretchfactors and/or outline. -Set resizemode to "user". -If and only if resizemode is set to "user", the turtle will be displayed -stretched according to its stretchfactors: -stretch_wid is stretchfactor perpendicular to orientation -stretch_len is stretchfactor in direction of turtles orientation. -outline determines the width of the shapes's outline. - -Examples: ->>> resizemode("user") ->>> shapesize(5, 5, 12) ->>> shapesize(outline=8) -""" + Optional arguments: + stretch_wid : positive number + stretch_len : positive number + outline : positive number + + Return or set the pen's attributes x/y-stretchfactors and/or outline. + Set resizemode to "user". + If and only if resizemode is set to "user", the turtle will be displayed + stretched according to its stretchfactors: + stretch_wid is stretchfactor perpendicular to orientation + stretch_len is stretchfactor in direction of turtles orientation. + outline determines the width of the shapes's outline. + + Examples: + >>> resizemode("user") + >>> shapesize(5, 5, 12) + >>> shapesize(outline=8) + """ + @overload def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... @overload def shearfactor(shear: None = None) -> float: """Set or return the current shearfactor. -Optional argument: shear -- number, tangent of the shear angle - -Shear the turtleshape according to the given shearfactor shear, -which is the tangent of the shear angle. DO NOT change the -turtle's heading (direction of movement). -If shear is not given: return the current shearfactor, i. e. the -tangent of the shear angle, by which lines parallel to the -heading of the turtle are sheared. - -Examples: ->>> shape("circle") ->>> shapesize(5,2) ->>> shearfactor(0.5) ->>> shearfactor() ->>> 0.5 -""" + Optional argument: shear -- number, tangent of the shear angle + + Shear the turtleshape according to the given shearfactor shear, + which is the tangent of the shear angle. DO NOT change the + turtle's heading (direction of movement). + If shear is not given: return the current shearfactor, i. e. the + tangent of the shear angle, by which lines parallel to the + heading of the turtle are sheared. + + Examples: + >>> shape("circle") + >>> shapesize(5,2) + >>> shearfactor(0.5) + >>> shearfactor() + >>> 0.5 + """ + @overload def shearfactor(shear: float) -> None: ... @@ -3482,23 +3625,24 @@ def shearfactor(shear: float) -> None: ... def shapetransform() -> tuple[float, float, float, float]: """Set or return the current transformation matrix of the turtle shape. -Optional arguments: t11, t12, t21, t22 -- numbers. - -If none of the matrix elements are given, return the transformation -matrix. -Otherwise set the given elements and transform the turtleshape -according to the matrix consisting of first row t11, t12 and -second row t21, 22. -Modify stretchfactor, shearfactor and tiltangle according to the -given matrix. - -Examples: ->>> shape("square") ->>> shapesize(4,2) ->>> shearfactor(-0.5) ->>> shapetransform() -(4.0, -1.0, -0.0, 2.0) -""" + Optional arguments: t11, t12, t21, t22 -- numbers. + + If none of the matrix elements are given, return the transformation + matrix. + Otherwise set the given elements and transform the turtleshape + according to the matrix consisting of first row t11, t12 and + second row t21, 22. + Modify stretchfactor, shearfactor and tiltangle according to the + given matrix. + + Examples: + >>> shape("square") + >>> shapesize(4,2) + >>> shearfactor(-0.5) + >>> shapetransform() + (4.0, -1.0, -0.0, 2.0) + """ + @overload def shapetransform( t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None @@ -3506,15 +3650,15 @@ def shapetransform( def get_shapepoly() -> _PolygonCoords | None: """Return the current shape polygon as tuple of coordinate pairs. -No argument. + No argument. -Examples: ->>> shape("square") ->>> shapetransform(4, -1, 0, 2) ->>> get_shapepoly() -((50, -20), (30, 20), (-50, 20), (-30, -20)) + Examples: + >>> shape("square") + >>> shapetransform(4, -1, 0, 2) + >>> get_shapepoly() + ((50, -20), (30, 20), (-50, 20), (-30, -20)) -""" + """ if sys.version_info < (3, 13): @deprecated("Deprecated since Python 3.1; removed in Python 3.13. Use `tiltangle()` instead.") @@ -3544,50 +3688,51 @@ if sys.version_info < (3, 13): def tiltangle(angle: None = None) -> float: """Set or return the current tilt-angle. -Optional argument: angle -- number - -Rotate the turtleshape to point in the direction specified by angle, -regardless of its current tilt-angle. DO NOT change the turtle's -heading (direction of movement). -If angle is not given: return the current tilt-angle, i. e. the angle -between the orientation of the turtleshape and the heading of the -turtle (its direction of movement). - -Examples: ->>> shape("circle") ->>> shapesize(5, 2) ->>> tiltangle() -0.0 ->>> tiltangle(45) ->>> tiltangle() -45.0 ->>> stamp() ->>> fd(50) ->>> tiltangle(-45) ->>> tiltangle() -315.0 ->>> stamp() ->>> fd(50) -""" + Optional argument: angle -- number + + Rotate the turtleshape to point in the direction specified by angle, + regardless of its current tilt-angle. DO NOT change the turtle's + heading (direction of movement). + If angle is not given: return the current tilt-angle, i. e. the angle + between the orientation of the turtleshape and the heading of the + turtle (its direction of movement). + + Examples: + >>> shape("circle") + >>> shapesize(5, 2) + >>> tiltangle() + 0.0 + >>> tiltangle(45) + >>> tiltangle() + 45.0 + >>> stamp() + >>> fd(50) + >>> tiltangle(-45) + >>> tiltangle() + 315.0 + >>> stamp() + >>> fd(50) + """ + @overload def tiltangle(angle: float) -> None: ... def tilt(angle: float) -> None: """Rotate the turtleshape by angle. -Argument: -angle - a number + Argument: + angle - a number -Rotate the turtleshape by angle from its current tilt-angle, -but do NOT change the turtle's heading (direction of movement). + Rotate the turtleshape by angle from its current tilt-angle, + but do NOT change the turtle's heading (direction of movement). -Examples: ->>> shape("circle") ->>> shapesize(5,2) ->>> tilt(30) ->>> fd(50) ->>> tilt(30) ->>> fd(50) -""" + Examples: + >>> shape("circle") + >>> shapesize(5,2) + >>> tilt(30) + >>> fd(50) + >>> tilt(30) + >>> fd(50) + """ # Can return either 'int' or Tuple[int, ...] based on if the stamp is # a compound stamp or not. So, as per the "no Union return" policy, @@ -3595,112 +3740,118 @@ Examples: def stamp() -> Any: """Stamp a copy of the turtleshape onto the canvas and return its id. -No argument. + No argument. -Stamp a copy of the turtle shape onto the canvas at the current -turtle position. Return a stamp_id for that stamp, which can be -used to delete it by calling clearstamp(stamp_id). + Stamp a copy of the turtle shape onto the canvas at the current + turtle position. Return a stamp_id for that stamp, which can be + used to delete it by calling clearstamp(stamp_id). + + Example: + >>> color("blue") + >>> stamp() + 13 + >>> fd(50) + """ -Example: ->>> color("blue") ->>> stamp() -13 ->>> fd(50) -""" def clearstamp(stampid: int | tuple[int, ...]) -> None: """Delete stamp with given stampid -Argument: -stampid - an integer, must be return value of previous stamp() call. + Argument: + stampid - an integer, must be return value of previous stamp() call. + + Example: + >>> color("blue") + >>> astamp = stamp() + >>> fd(50) + >>> clearstamp(astamp) + """ -Example: ->>> color("blue") ->>> astamp = stamp() ->>> fd(50) ->>> clearstamp(astamp) -""" def clearstamps(n: int | None = None) -> None: """Delete all or first/last n of turtle's stamps. -Optional argument: -n -- an integer + Optional argument: + n -- an integer -If n is None, delete all of pen's stamps, -else if n > 0 delete first n stamps -else if n < 0 delete last n stamps. + If n is None, delete all of pen's stamps, + else if n > 0 delete first n stamps + else if n < 0 delete last n stamps. + + Example: + >>> for i in range(8): + ... stamp(); fd(30) + ... + >>> clearstamps(2) + >>> clearstamps(-2) + >>> clearstamps() + """ -Example: ->>> for i in range(8): -... stamp(); fd(30) -... ->>> clearstamps(2) ->>> clearstamps(-2) ->>> clearstamps() -""" def filling() -> bool: """Return fillstate (True if filling, False else). -No argument. + No argument. -Example: ->>> begin_fill() ->>> if filling(): -... pensize(5) -... else: -... pensize(3) -""" + Example: + >>> begin_fill() + >>> if filling(): + ... pensize(5) + ... else: + ... pensize(3) + """ if sys.version_info >= (3, 14): @contextmanager def fill() -> Generator[None]: """A context manager for filling a shape. -Implicitly ensures the code block is wrapped with -begin_fill() and end_fill(). + Implicitly ensures the code block is wrapped with + begin_fill() and end_fill(). -Example: ->>> color("black", "red") ->>> with fill(): -... circle(60) -""" + Example: + >>> color("black", "red") + >>> with fill(): + ... circle(60) + """ def begin_fill() -> None: """Called just before drawing a shape to be filled. -No argument. + No argument. + + Example: + >>> color("black", "red") + >>> begin_fill() + >>> circle(60) + >>> end_fill() + """ -Example: ->>> color("black", "red") ->>> begin_fill() ->>> circle(60) ->>> end_fill() -""" def end_fill() -> None: """Fill the shape drawn after the call begin_fill(). -No argument. + No argument. + + Example: + >>> color("black", "red") + >>> begin_fill() + >>> circle(60) + >>> end_fill() + """ -Example: ->>> color("black", "red") ->>> begin_fill() ->>> circle(60) ->>> end_fill() -""" @overload def dot(size: int | _Color | None = None) -> None: """Draw a dot with diameter size, using color. -Optional arguments: -size -- an integer >= 1 (if given) -color -- a colorstring or a numeric color tuple + Optional arguments: + size -- an integer >= 1 (if given) + color -- a colorstring or a numeric color tuple -Draw a circular dot with diameter size, using color. -If size is not given, the maximum of pensize+4 and 2*pensize is used. + Draw a circular dot with diameter size, using color. + If size is not given, the maximum of pensize+4 and 2*pensize is used. + + Example: + >>> dot() + >>> fd(50); dot(20, "blue"); fd(50) + """ -Example: ->>> dot() ->>> fd(50); dot(20, "blue"); fd(50) -""" @overload def dot(size: int | None, color: _Color, /) -> None: ... @overload @@ -3708,177 +3859,183 @@ def dot(size: int | None, r: float, g: float, b: float, /) -> None: ... def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: """Write text at the current turtle position. -Arguments: -arg -- info, which is to be written to the TurtleScreen -move (optional) -- True/False -align (optional) -- one of the strings "left", "center" or right" -font (optional) -- a triple (fontname, fontsize, fonttype) - -Write text - the string representation of arg - at the current -turtle position according to align ("left", "center" or right") -and with the given font. -If move is True, the pen is moved to the bottom-right corner -of the text. By default, move is False. - -Example: ->>> write('Home = ', True, align="center") ->>> write((0,0), True) -""" + Arguments: + arg -- info, which is to be written to the TurtleScreen + move (optional) -- True/False + align (optional) -- one of the strings "left", "center" or right" + font (optional) -- a triple (fontname, fontsize, fonttype) + + Write text - the string representation of arg - at the current + turtle position according to align ("left", "center" or right") + and with the given font. + If move is True, the pen is moved to the bottom-right corner + of the text. By default, move is False. + + Example: + >>> write('Home = ', True, align="center") + >>> write((0,0), True) + """ if sys.version_info >= (3, 14): @contextmanager def poly() -> Generator[None]: """A context manager for recording the vertices of a polygon. -Implicitly ensures that the code block is wrapped with -begin_poly() and end_poly() + Implicitly ensures that the code block is wrapped with + begin_poly() and end_poly() -Example (for a Turtle instance named turtle) where we create a -triangle as the polygon and move the turtle 100 steps forward: ->>> with poly(): -... for side in range(3) -... forward(50) -... right(60) ->>> forward(100) -""" + Example (for a Turtle instance named turtle) where we create a + triangle as the polygon and move the turtle 100 steps forward: + >>> with poly(): + ... for side in range(3) + ... forward(50) + ... right(60) + >>> forward(100) + """ def begin_poly() -> None: """Start recording the vertices of a polygon. -No argument. + No argument. -Start recording the vertices of a polygon. Current turtle position -is first point of polygon. + Start recording the vertices of a polygon. Current turtle position + is first point of polygon. + + Example: + >>> begin_poly() + """ -Example: ->>> begin_poly() -""" def end_poly() -> None: """Stop recording the vertices of a polygon. -No argument. + No argument. -Stop recording the vertices of a polygon. Current turtle position is -last point of polygon. This will be connected with the first point. + Stop recording the vertices of a polygon. Current turtle position is + last point of polygon. This will be connected with the first point. + + Example: + >>> end_poly() + """ -Example: ->>> end_poly() -""" def get_poly() -> _PolygonCoords | None: """Return the lastly recorded polygon. -No argument. + No argument. + + Example: + >>> p = get_poly() + >>> register_shape("myFavouriteShape", p) + """ -Example: ->>> p = get_poly() ->>> register_shape("myFavouriteShape", p) -""" def getscreen() -> TurtleScreen: """Return the TurtleScreen object, the turtle is drawing on. -No argument. + No argument. -Return the TurtleScreen object, the turtle is drawing on. -So TurtleScreen-methods can be called for that object. + Return the TurtleScreen object, the turtle is drawing on. + So TurtleScreen-methods can be called for that object. + + Example: + >>> ts = getscreen() + >>> ts + + >>> ts.bgcolor("pink") + """ -Example: ->>> ts = getscreen() ->>> ts - ->>> ts.bgcolor("pink") -""" def getturtle() -> Turtle: """Return the Turtleobject itself. -No argument. + No argument. -Only reasonable use: as a function to return the 'anonymous turtle': + Only reasonable use: as a function to return the 'anonymous turtle': -Example: ->>> pet = getturtle() ->>> pet.fd(50) ->>> pet - ->>> turtles() -[] -""" + Example: + >>> pet = getturtle() + >>> pet.fd(50) + >>> pet + + >>> turtles() + [] + """ getpen = getturtle def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: """Bind fun to mouse-button-release event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). - -Example (for a MyTurtle instance named joe): ->>> class MyTurtle(Turtle): -... def glow(self,x,y): -... self.fillcolor("red") -... def unglow(self,x,y): -... self.fillcolor("") -... ->>> joe = MyTurtle() ->>> joe.onclick(joe.glow) ->>> joe.onrelease(joe.unglow) - -Clicking on joe turns fillcolor red, unclicking turns it to -transparent. -""" + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). + + Example (for a MyTurtle instance named joe): + >>> class MyTurtle(Turtle): + ... def glow(self,x,y): + ... self.fillcolor("red") + ... def unglow(self,x,y): + ... self.fillcolor("") + ... + >>> joe = MyTurtle() + >>> joe.onclick(joe.glow) + >>> joe.onrelease(joe.unglow) + + Clicking on joe turns fillcolor red, unclicking turns it to + transparent. + """ + def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: """Bind fun to mouse-move event on this turtle on canvas. -Arguments: -fun -- a function with two arguments, to which will be assigned - the coordinates of the clicked point on the canvas. -btn -- number of the mouse-button defaults to 1 (left mouse button). + Arguments: + fun -- a function with two arguments, to which will be assigned + the coordinates of the clicked point on the canvas. + btn -- number of the mouse-button defaults to 1 (left mouse button). -Every sequence of mouse-move-events on a turtle is preceded by a -mouse-click event on that + Every sequence of mouse-move-events on a turtle is preceded by a + mouse-click event on that -Example: ->>> ondrag(goto) + Example: + >>> ondrag(goto) + + Subsequently clicking and dragging a Turtle will move it + across the screen thereby producing handdrawings (if pen is + down). + """ -Subsequently clicking and dragging a Turtle will move it -across the screen thereby producing handdrawings (if pen is -down). -""" def undo() -> None: """undo (repeatedly) the last turtle action. -No argument. + No argument. -undo (repeatedly) the last turtle action. -Number of available undo actions is determined by the size of -the undobuffer. + undo (repeatedly) the last turtle action. + Number of available undo actions is determined by the size of + the undobuffer. -Example: ->>> for i in range(4): -... fd(50); lt(80) -... ->>> for i in range(8): -... undo() -... -""" + Example: + >>> for i in range(4): + ... fd(50); lt(80) + ... + >>> for i in range(8): + ... undo() + ... + """ turtlesize = shapesize # Functions copied from RawTurtle with a few tweaks: def clone() -> Turtle: - """Create and return a clone of the + """Create and return a clone of the -No argument. + No argument. -Create and return a clone of the turtle with same position, heading -and turtle properties. + Create and return a clone of the turtle with same position, heading + and turtle properties. -Example (for a Turtle instance named mick): -mick = Turtle() -joe = mick.clone() -""" + Example (for a Turtle instance named mick): + mick = Turtle() + joe = mick.clone() + """ # Extra functions present only in the global scope: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi index c9a73fb2940c7..9c428718da35d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/types.pyi @@ -1,6 +1,7 @@ """ Define names for built-in types that aren't directly accessible as a builtin. """ + import sys from _typeshed import AnnotationForm, MaybeNone, SupportsKeysAndGetItem from _typeshed.importlib import LoaderProtocol @@ -76,19 +77,20 @@ _VT_co = TypeVar("_VT_co", covariant=True) class FunctionType: """Create a function object. - code - a code object - globals - the globals dictionary - name - a string that overrides the name from the code object - argdefs - a tuple that specifies the default argument values - closure - a tuple that supplies the bindings for free variables - kwdefaults - a dictionary that specifies the default keyword argument values -""" + code + a code object + globals + the globals dictionary + name + a string that overrides the name from the code object + argdefs + a tuple that specifies the default argument values + closure + a tuple that supplies the bindings for free variables + kwdefaults + a dictionary that specifies the default keyword argument values + """ + @property def __closure__(self) -> tuple[CellType, ...] | None: ... __code__: CodeType @@ -130,12 +132,12 @@ class FunctionType: ) -> Self: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + @overload def __get__(self, instance: None, owner: type, /) -> FunctionType: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + @overload def __get__(self, instance: object, owner: type | None = None, /) -> MethodType: ... @@ -143,8 +145,8 @@ LambdaType = FunctionType @final class CodeType: - """Create a code object. Not for the faint of heart. -""" + """Create a code object. Not for the faint of heart.""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @property @@ -286,8 +288,7 @@ class CodeType: co_linetable: bytes = ..., co_exceptiontable: bytes = ..., ) -> Self: - """Return a copy of the code object with new values for the specified fields. -""" + """Return a copy of the code object with new values for the specified fields.""" elif sys.version_info >= (3, 10): def replace( self, @@ -309,8 +310,7 @@ class CodeType: co_name: str = ..., co_linetable: bytes = ..., ) -> Self: - """Return a copy of the code object with new values for the specified fields. -""" + """Return a copy of the code object with new values for the specified fields.""" else: def replace( self, @@ -332,66 +332,63 @@ class CodeType: co_name: str = ..., co_lnotab: bytes = ..., ) -> Self: - """Return a copy of the code object with new values for the specified fields. -""" - + """Return a copy of the code object with new values for the specified fields.""" if sys.version_info >= (3, 13): __replace__ = replace @final class MappingProxyType(Mapping[_KT_co, _VT_co]): # type: ignore[type-var] # pyright: ignore[reportInvalidTypeArguments] - """Read-only proxy of a mapping. -""" + """Read-only proxy of a mapping.""" + __hash__: ClassVar[None] # type: ignore[assignment] def __new__(cls, mapping: SupportsKeysAndGetItem[_KT_co, _VT_co]) -> Self: ... def __getitem__(self, key: _KT_co, /) -> _VT_co: # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - """Return self[key]. -""" + """Return self[key].""" + def __iter__(self) -> Iterator[_KT_co]: - """Implement iter(self). -""" + """Implement iter(self).""" + def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" + def __eq__(self, value: object, /) -> bool: ... def copy(self) -> dict[_KT_co, _VT_co]: - """D.copy() -> a shallow copy of D -""" + """D.copy() -> a shallow copy of D""" + def keys(self) -> KeysView[_KT_co]: - """D.keys() -> a set-like object providing a view on D's keys -""" + """D.keys() -> a set-like object providing a view on D's keys""" + def values(self) -> ValuesView[_VT_co]: - """D.values() -> an object providing a view on D's values -""" + """D.values() -> an object providing a view on D's values""" + def items(self) -> ItemsView[_KT_co, _VT_co]: - """D.items() -> a set-like object providing a view on D's items -""" + """D.items() -> a set-like object providing a view on D's items""" + @overload def get(self, key: _KT_co, /) -> _VT_co | None: # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter - """Return the value for key if key is in the mapping, else default. -""" + """Return the value for key if key is in the mapping, else default.""" + @overload def get(self, key: _KT_co, default: _VT_co, /) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload def get(self, key: _KT_co, default: _T2, /) -> _VT_co | _T2: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" + def __reversed__(self) -> Iterator[_KT_co]: - """D.__reversed__() -> reverse iterator -""" + """D.__reversed__() -> reverse iterator""" + def __or__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, value: Mapping[_T1, _T2], /) -> dict[_KT_co | _T1, _VT_co | _T2]: - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 12): @disjoint_base class SimpleNamespace: - """A simple attribute-based namespace. -""" + """A simple attribute-based namespace.""" + __hash__: ClassVar[None] # type: ignore[assignment] if sys.version_info >= (3, 13): def __init__( @@ -406,15 +403,15 @@ if sys.version_info >= (3, 12): def __delattr__(self, name: str, /) -> None: ... if sys.version_info >= (3, 13): def __replace__(self, **kwargs: Any) -> Self: - """Return a copy of the namespace object with new values for the specified attributes. -""" + """Return a copy of the namespace object with new values for the specified attributes.""" else: class SimpleNamespace: """A simple attribute-based namespace. -SimpleNamespace(**kwargs) -""" + SimpleNamespace(**kwargs) + """ + __hash__: ClassVar[None] # type: ignore[assignment] def __init__(self, **kwargs: Any) -> None: ... def __eq__(self, value: object, /) -> bool: ... @@ -426,8 +423,9 @@ SimpleNamespace(**kwargs) class ModuleType: """Create a module object. -The name must be a string; the optional doc argument can have any type. -""" + The name must be a string; the optional doc argument can have any type. + """ + __name__: str __file__: str | None @property @@ -457,12 +455,13 @@ The name must be a string; the optional doc argument can have any type. class CellType: """Create a new cell object. - contents - the contents of the cell. If not specified, the cell will be empty, - and - further attempts to access its cell_contents attribute will - raise a ValueError. -""" + contents + the contents of the cell. If not specified, the cell will be empty, + and + further attempts to access its cell_contents attribute will + raise a ValueError. + """ + def __new__(cls, contents: object = ..., /) -> Self: ... __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -481,48 +480,46 @@ class GeneratorType(Generator[_YieldT_co, _SendT_contra, _ReturnT_co]): def gi_running(self) -> bool: ... @property def gi_yieldfrom(self) -> Iterator[_YieldT_co] | None: - """object being iterated by yield from, or None -""" + """object being iterated by yield from, or None""" if sys.version_info >= (3, 11): @property def gi_suspended(self) -> bool: ... __name__: str __qualname__: str def __iter__(self) -> Self: - """Implement iter(self). -""" + """Implement iter(self).""" + def __next__(self) -> _YieldT_co: - """Implement next(self). -""" + """Implement next(self).""" + def send(self, arg: _SendT_contra, /) -> _YieldT_co: """send(arg) -> send 'arg' into generator, -return next yielded value or raise StopIteration. -""" + return next yielded value or raise StopIteration. + """ + @overload - def throw( - self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: + def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: """throw(value) -throw(type[,value[,tb]]) + throw(type[,value[,tb]]) + + Raise exception in generator, return next yielded value or raise + StopIteration. + the (type, val, tb) signature is deprecated, + and may be removed in a future version of Python. + """ -Raise exception in generator, return next yielded value or raise -StopIteration. -the (type, val, tb) signature is deprecated, -and may be removed in a future version of Python. -""" @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): def __class_getitem__(cls, item: Any, /) -> Any: - """See PEP 585 -""" + """See PEP 585""" @final class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): @property def ag_await(self) -> Awaitable[Any] | None: - """object being awaited on, or None -""" + """object being awaited on, or None""" + @property def ag_code(self) -> CodeType: ... @property @@ -536,33 +533,33 @@ class AsyncGeneratorType(AsyncGenerator[_YieldT_co, _SendT_contra]): def ag_suspended(self) -> bool: ... def __aiter__(self) -> Self: - """Return an awaitable, that resolves in asynchronous iterator. -""" + """Return an awaitable, that resolves in asynchronous iterator.""" + def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: - """Return a value or raise StopAsyncIteration. -""" + """Return a value or raise StopAsyncIteration.""" + def asend(self, val: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: - """asend(v) -> send 'v' in generator. -""" + """asend(v) -> send 'v' in generator.""" + @overload async def athrow( self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / ) -> _YieldT_co: """athrow(value) -athrow(type[,value[,tb]]) + athrow(type[,value[,tb]]) + + raise exception in generator. + the (type, val, tb) signature is deprecated, + and may be removed in a future version of Python. + """ -raise exception in generator. -the (type, val, tb) signature is deprecated, -and may be removed in a future version of Python. -""" @overload async def athrow(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... def aclose(self) -> Coroutine[Any, Any, None]: - """aclose() -> raise GeneratorExit inside generator. -""" + """aclose() -> raise GeneratorExit inside generator.""" + def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" # Non-default variations to accommodate coroutines _SendT_nd_contra = TypeVar("_SendT_nd_contra", contravariant=True) @@ -574,8 +571,8 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): __qualname__: str @property def cr_await(self) -> Any | None: - """object being awaited on, or None -""" + """object being awaited on, or None""" + @property def cr_code(self) -> CodeType: ... if sys.version_info >= (3, 12): @@ -594,38 +591,37 @@ class CoroutineType(Coroutine[_YieldT_co, _SendT_nd_contra, _ReturnT_nd_co]): def cr_suspended(self) -> bool: ... def close(self) -> None: - """close() -> raise GeneratorExit inside coroutine. -""" + """close() -> raise GeneratorExit inside coroutine.""" + def __await__(self) -> Generator[Any, None, _ReturnT_nd_co]: - """Return an iterator to be used in await expression. -""" + """Return an iterator to be used in await expression.""" + def send(self, arg: _SendT_nd_contra, /) -> _YieldT_co: """send(arg) -> send 'arg' into coroutine, -return next iterated value or raise StopIteration. -""" + return next iterated value or raise StopIteration. + """ + @overload - def throw( - self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., / - ) -> _YieldT_co: + def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ..., /) -> _YieldT_co: """throw(value) -throw(type[,value[,traceback]]) + throw(type[,value[,traceback]]) + + Raise exception in coroutine, return next iterated value or raise + StopIteration. + the (type, val, tb) signature is deprecated, + and may be removed in a future version of Python. + """ -Raise exception in coroutine, return next iterated value or raise -StopIteration. -the (type, val, tb) signature is deprecated, -and may be removed in a future version of Python. -""" @overload def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = ..., /) -> _YieldT_co: ... if sys.version_info >= (3, 13): def __class_getitem__(cls, item: Any, /) -> Any: - """See PEP 585 -""" + """See PEP 585""" @final class MethodType: - """Create a bound instance method object. -""" + """Create a bound instance method object.""" + @property def __closure__(self) -> tuple[CellType, ...] | None: ... # inherited from the added function @property @@ -634,25 +630,22 @@ class MethodType: def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function @property def __func__(self) -> Callable[..., Any]: - """the function (or other callable) implementing a method -""" + """the function (or other callable) implementing a method""" + @property def __self__(self) -> object: - """the instance to which a method is bound -""" + """the instance to which a method is bound""" + @property def __name__(self) -> str: ... # inherited from the added function @property def __qualname__(self) -> str: ... # inherited from the added function def __new__(cls, func: Callable[..., Any], instance: object, /) -> Self: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" - + """Call self as a function.""" if sys.version_info >= (3, 13): def __get__(self, instance: object, owner: type | None = None, /) -> Self: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -666,8 +659,8 @@ class BuiltinFunctionType: @property def __qualname__(self) -> str: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -682,11 +675,10 @@ class WrapperDescriptorType: @property def __objclass__(self) -> type: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" @final class MethodWrapperType: @@ -699,8 +691,8 @@ class MethodWrapperType: @property def __objclass__(self) -> type: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __eq__(self, value: object, /) -> bool: ... def __ne__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... @@ -714,11 +706,10 @@ class MethodDescriptorType: @property def __objclass__(self) -> type: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" @final class ClassMethodDescriptorType: @@ -729,16 +720,15 @@ class ClassMethodDescriptorType: @property def __objclass__(self) -> type: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: - """Call self as a function. -""" + """Call self as a function.""" + def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" @final class TracebackType: - """Create a new traceback object. -""" + """Create a new traceback object.""" + def __new__(cls, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> Self: ... tb_next: TracebackType | None # the rest are read-only @@ -755,42 +745,38 @@ class FrameType: def f_back(self) -> FrameType | None: ... @property def f_builtins(self) -> dict[str, Any]: - """Return the built-in variables in the frame. -""" + """Return the built-in variables in the frame.""" + @property def f_code(self) -> CodeType: - """Return the code object being executed in this frame. -""" + """Return the code object being executed in this frame.""" + @property def f_globals(self) -> dict[str, Any]: - """Return the global variables in the frame. -""" + """Return the global variables in the frame.""" + @property def f_lasti(self) -> int: - """Return the index of the last attempted instruction in the frame. -""" + """Return the index of the last attempted instruction in the frame.""" # see discussion in #6769: f_lineno *can* sometimes be None, # but you should probably file a bug report with CPython if you encounter it being None in the wild. # An `int | None` annotation here causes too many false-positive errors, so applying `int | Any`. @property def f_lineno(self) -> int | MaybeNone: - """Return the current line number in the frame. -""" + """Return the current line number in the frame.""" + @property def f_locals(self) -> dict[str, Any]: - """Return the mapping used by the frame to look up local variables. -""" + """Return the mapping used by the frame to look up local variables.""" f_trace: Callable[[FrameType, str, Any], Any] | None f_trace_lines: bool f_trace_opcodes: bool def clear(self) -> None: - """Clear all references held by the frame. -""" + """Clear all references held by the frame.""" if sys.version_info >= (3, 14): @property def f_generator(self) -> GeneratorType[Any, Any, Any] | CoroutineType[Any, Any, Any] | None: - """Return the generator or coroutine associated with this frame, or None. -""" + """Return the generator or coroutine associated with this frame, or None.""" @final class GetSetDescriptorType: @@ -801,14 +787,13 @@ class GetSetDescriptorType: @property def __objclass__(self) -> type: ... def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value. -""" + """Set an attribute of instance to value.""" + def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance. -""" + """Delete an attribute of instance.""" @final class MemberDescriptorType: @@ -819,14 +804,13 @@ class MemberDescriptorType: @property def __objclass__(self) -> type: ... def __get__(self, instance: Any, owner: type | None = None, /) -> Any: - """Return an attribute of instance, which is of type owner. -""" + """Return an attribute of instance, which is of type owner.""" + def __set__(self, instance: Any, value: Any, /) -> None: - """Set an attribute of instance to value. -""" + """Set an attribute of instance to value.""" + def __delete__(self, instance: Any, /) -> None: - """Delete an attribute of instance. -""" + """Delete an attribute of instance.""" def new_class( name: str, @@ -834,66 +818,67 @@ def new_class( kwds: dict[str, Any] | None = None, exec_body: Callable[[dict[str, Any]], object] | None = None, ) -> type: - """Create a class object dynamically using the appropriate metaclass. -""" + """Create a class object dynamically using the appropriate metaclass.""" + def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: - """Resolve MRO entries dynamically as specified by PEP 560. -""" + """Resolve MRO entries dynamically as specified by PEP 560.""" + def prepare_class( name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None ) -> tuple[type, dict[str, Any], dict[str, Any]]: """Call the __prepare__ method of the appropriate metaclass. -Returns (metaclass, namespace, kwds) as a 3-tuple + Returns (metaclass, namespace, kwds) as a 3-tuple -*metaclass* is the appropriate metaclass -*namespace* is the prepared class namespace -*kwds* is an updated copy of the passed in kwds argument with any -'metaclass' entry removed. If no kwds argument is passed in, this will -be an empty dict. -""" + *metaclass* is the appropriate metaclass + *namespace* is the prepared class namespace + *kwds* is an updated copy of the passed in kwds argument with any + 'metaclass' entry removed. If no kwds argument is passed in, this will + be an empty dict. + """ if sys.version_info >= (3, 12): def get_original_bases(cls: type, /) -> tuple[Any, ...]: """Return the class's "original" bases prior to modification by `__mro_entries__`. -Examples:: + Examples:: - from typing import TypeVar, Generic, NamedTuple, TypedDict + from typing import TypeVar, Generic, NamedTuple, TypedDict - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) -""" + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ # Does not actually inherit from property, but saying it does makes sure that # pyright handles this class correctly. class DynamicClassAttribute(property): """Route attribute access on a class to __getattr__. -This is a descriptor, used to define attributes that act differently when -accessed through an instance and through a class. Instance access remains -normal, but access to an attribute through a class will be routed to the -class's __getattr__ method; this is done by raising AttributeError. + This is a descriptor, used to define attributes that act differently when + accessed through an instance and through a class. Instance access remains + normal, but access to an attribute through a class will be routed to the + class's __getattr__ method; this is done by raising AttributeError. -This allows one to have properties active on an instance, and have virtual -attributes on the class with the same name. (Enum used this between Python -versions 3.4 - 3.9 .) + This allows one to have properties active on an instance, and have virtual + attributes on the class with the same name. (Enum used this between Python + versions 3.4 - 3.9 .) -Subclass from this to use a different method of accessing virtual attributes -and still be treated properly by the inspect module. (Enum uses this since -Python 3.10 .) + Subclass from this to use a different method of accessing virtual attributes + and still be treated properly by the inspect module. (Enum uses this since + Python 3.10 .) + + """ -""" fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], object] | None # type: ignore[assignment] fdel: Callable[[Any], object] | None # type: ignore[assignment] @@ -920,28 +905,29 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable @overload def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: - """Convert regular generator function to a coroutine. -""" + """Convert regular generator function to a coroutine.""" + @overload def coroutine(func: _Fn) -> _Fn: ... @disjoint_base class GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ + @property def __origin__(self) -> type | TypeAliasType: ... @property def __args__(self) -> tuple[Any, ...]: ... @property def __parameters__(self) -> tuple[Any, ...]: - """Type variables in the GenericAlias. -""" + """Type variables in the GenericAlias.""" + def __new__(cls, origin: type, args: Any, /) -> Self: ... def __getitem__(self, typeargs: Any, /) -> GenericAlias: - """Return self[key]. -""" + """Return self[key].""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __mro_entries__(self, bases: Iterable[object], /) -> tuple[type, ...]: ... @@ -952,28 +938,24 @@ E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... if sys.version_info >= (3, 10): def __or__(self, value: Any, /) -> UnionType: - """Return self|value. -""" - def __ror__(self, value: Any, /) -> UnionType: - """Return value|self. -""" + """Return self|value.""" + def __ror__(self, value: Any, /) -> UnionType: + """Return value|self.""" # GenericAlias delegates attr access to `__origin__` def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final class NoneType: - """The type of the None singleton. -""" + """The type of the None singleton.""" + def __bool__(self) -> Literal[False]: - """True if self else False -""" + """True if self else False""" @final class EllipsisType: - """The type of the Ellipsis singleton. -""" + """The type of the Ellipsis singleton.""" from builtins import _NotImplementedType @@ -982,14 +964,14 @@ if sys.version_info >= (3, 10): class UnionType: """Represent a union type -E.g. for int | str -""" + E.g. for int | str + """ + @property def __args__(self) -> tuple[Any, ...]: ... @property def __parameters__(self) -> tuple[Any, ...]: - """Type variables in the types.UnionType. -""" + """Type variables in the types.UnionType.""" # `(int | str) | Literal["foo"]` returns a generic alias to an instance of `_SpecialForm` (`Union`). # Normally we'd express this using the return type of `_SpecialForm.__ror__`, # but because `UnionType.__or__` accepts `Any`, type checkers will use @@ -997,28 +979,27 @@ E.g. for int | str # rather than `_SpecialForm.__ror__`. To mitigate this, we use `| Any` # in the return type of `UnionType.__(r)or__`. def __or__(self, value: Any, /) -> UnionType | Any: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, value: Any, /) -> UnionType | Any: - """Return value|self. -""" + """Return value|self.""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... # you can only subscript a `UnionType` instance if at least one of the elements # in the union is a generic alias instance that has a non-empty `__parameters__` def __getitem__(self, parameters: Any) -> object: - """Return self[key]. -""" + """Return self[key].""" if sys.version_info >= (3, 13): @final class CapsuleType: """Capsule objects let you wrap a C "void *" pointer in a Python -object. They're a way of passing data through the Python interpreter -without creating your own custom type. - -Capsules are used for communication between extension modules. -They provide a way for an extension module to export a C interface -to other extension modules, so that extension modules can use the -Python import mechanism to link to one another. -""" + object. They're a way of passing data through the Python interpreter + without creating your own custom type. + + Capsules are used for communication between extension modules. + They provide a way for an extension module to export a C interface + to other extension modules, so that extension modules can use the + Python import mechanism to link to one another. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi index 1c96a37dbbd09..b5533714acaf8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing.pyi @@ -17,6 +17,7 @@ Among other things, the module includes the following: Any name not present in __all__ is an implementation detail that may be changed without notice. Use at your own risk! """ + # Since this module defines "overload" it is not recognized by Ruff as typing.overload # TODO: The collections import is required, otherwise mypy crashes. # https://github.com/python/mypy/issues/16744 @@ -165,92 +166,94 @@ if sys.version_info >= (3, 13): class Any: """Special type indicating an unconstrained type. -- Any is compatible with every type. -- Any assumed to have all methods. -- All values assumed to be instances of Any. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. -Note that all the above statements are true from the point of view of -static type checkers. At runtime, Any should not be used with instance -checks. -""" + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ class _Final: - """Mixin to prohibit subclassing. -""" + """Mixin to prohibit subclassing.""" + __slots__ = ("__weakref__",) def final(f: _T) -> _T: """Decorator to indicate final methods and final classes. -Use this decorator to indicate to type checkers that the decorated -method cannot be overridden, and decorated class cannot be subclassed. + Use this decorator to indicate to type checkers that the decorated + method cannot be overridden, and decorated class cannot be subclassed. -For example:: + For example:: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... - class Base: @final - def done(self) -> None: + class Leaf: ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker + class Other(Leaf): # Error reported by type checker ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - -There is no runtime checking of these properties. The decorator -attempts to set the ``__final__`` attribute to ``True`` on the decorated -object to allow runtime introspection. -""" + There is no runtime checking of these properties. The decorator + attempts to set the ``__final__`` attribute to ``True`` on the decorated + object to allow runtime introspection. + """ + @final class TypeVar: """Type variable. -The preferred way to construct a type variable is via the dedicated -syntax for generic functions, classes, and type aliases:: - - class Sequence[T]: # T is a TypeVar - ... + The preferred way to construct a type variable is via the dedicated + syntax for generic functions, classes, and type aliases:: -This syntax can also be used to create bound and constrained type -variables:: - - # S is a TypeVar bound to str - class StrSequence[S: str]: - ... + class Sequence[T]: # T is a TypeVar + ... - # A is a TypeVar constrained to str or bytes - class StrOrBytesSequence[A: (str, bytes)]: - ... + This syntax can also be used to create bound and constrained type + variables:: -Type variables can also have defaults: + # S is a TypeVar bound to str + class StrSequence[S: str]: + ... - class IntDefault[T = int]: - ... + # A is a TypeVar constrained to str or bytes + class StrOrBytesSequence[A: (str, bytes)]: + ... -However, if desired, reusable type variables can also be constructed -manually, like so:: + Type variables can also have defaults: - T = TypeVar('T') # Can be anything - S = TypeVar('S', bound=str) # Can be any subtype of str - A = TypeVar('A', str, bytes) # Must be exactly str or bytes - D = TypeVar('D', default=int) # Defaults to int + class IntDefault[T = int]: + ... -Type variables exist primarily for the benefit of static type -checkers. They serve as the parameters for generic types as well -as for generic function and type alias definitions. + However, if desired, reusable type variables can also be constructed + manually, like so:: + + T = TypeVar('T') # Can be anything + S = TypeVar('S', bound=str) # Can be any subtype of str + A = TypeVar('A', str, bytes) # Must be exactly str or bytes + D = TypeVar('D', default=int) # Defaults to int + + Type variables exist primarily for the benefit of static type + checkers. They serve as the parameters for generic types as well + as for generic function and type alias definitions. + + The variance of type variables is inferred by type checkers when they + are created through the type parameter syntax and when + ``infer_variance=True`` is passed. Manually created type variables may + be explicitly marked covariant or contravariant by passing + ``covariant=True`` or ``contravariant=True``. By default, manually + created type variables are invariant. See PEP 484 and PEP 695 for more + details. + """ -The variance of type variables is inferred by type checkers when they -are created through the type parameter syntax and when -``infer_variance=True`` is passed. Manually created type variables may -be explicitly marked covariant or contravariant by passing -``covariant=True`` or ``contravariant=True``. By default, manually -created type variables are invariant. See PEP 484 and PEP 695 for more -details. -""" @property def __name__(self) -> str: ... @property @@ -308,11 +311,10 @@ details. ) -> None: ... if sys.version_info >= (3, 10): def __or__(self, right: Any, /) -> _SpecialForm: # AnnotationForm - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, left: Any, /) -> _SpecialForm: # AnnotationForm - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... if sys.version_info >= (3, 13): @@ -364,50 +366,51 @@ if sys.version_info >= (3, 11): @final class TypeVarTuple: """Type variable tuple. A specialized form of type variable that enables -variadic generics. + variadic generics. -The preferred way to construct a type variable tuple is via the -dedicated syntax for generic functions, classes, and type aliases, -where a single '*' indicates a type variable tuple:: + The preferred way to construct a type variable tuple is via the + dedicated syntax for generic functions, classes, and type aliases, + where a single '*' indicates a type variable tuple:: - def move_first_element_to_last[T, *Ts](tup: tuple[T, *Ts]) -> tuple[*Ts, T]: - return (*tup[1:], tup[0]) + def move_first_element_to_last[T, *Ts](tup: tuple[T, *Ts]) -> tuple[*Ts, T]: + return (*tup[1:], tup[0]) -Type variables tuples can have default values: + Type variables tuples can have default values: - type AliasWithDefault[*Ts = (str, int)] = tuple[*Ts] + type AliasWithDefault[*Ts = (str, int)] = tuple[*Ts] -For compatibility with Python 3.11 and earlier, TypeVarTuple objects -can also be created as follows:: + For compatibility with Python 3.11 and earlier, TypeVarTuple objects + can also be created as follows:: - Ts = TypeVarTuple('Ts') # Can be given any name - DefaultTs = TypeVarTuple('Ts', default=(str, int)) + Ts = TypeVarTuple('Ts') # Can be given any name + DefaultTs = TypeVarTuple('Ts', default=(str, int)) -Just as a TypeVar (type variable) is a placeholder for a single type, -a TypeVarTuple is a placeholder for an *arbitrary* number of types. For -example, if we define a generic class using a TypeVarTuple:: + Just as a TypeVar (type variable) is a placeholder for a single type, + a TypeVarTuple is a placeholder for an *arbitrary* number of types. For + example, if we define a generic class using a TypeVarTuple:: - class C[*Ts]: ... + class C[*Ts]: ... -Then we can parameterize that class with an arbitrary number of type -arguments:: + Then we can parameterize that class with an arbitrary number of type + arguments:: - C[int] # Fine - C[int, str] # Also fine - C[()] # Even this is fine + C[int] # Fine + C[int, str] # Also fine + C[()] # Even this is fine -For more details, see PEP 646. + For more details, see PEP 646. + + Note that only TypeVarTuples defined in the global scope can be + pickled. + """ -Note that only TypeVarTuples defined in the global scope can be -pickled. -""" @property def __name__(self) -> str: ... if sys.version_info >= (3, 13): @property def __default__(self) -> Any: # AnnotationForm - """The default value for this TypeVarTuple. -""" + """The default value for this TypeVarTuple.""" + def has_default(self) -> bool: ... if sys.version_info >= (3, 13): def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm @@ -417,8 +420,8 @@ pickled. def __init__(self, name: str) -> None: ... def __iter__(self) -> Any: - """Implement iter(self). -""" + """Implement iter(self).""" + def __typing_subst__(self, arg: Never, /) -> Never: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... if sys.version_info >= (3, 14): @@ -430,17 +433,18 @@ if sys.version_info >= (3, 10): class ParamSpecArgs: """The args for a ParamSpec object. -Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. -ParamSpecArgs objects have a reference back to their ParamSpec:: + ParamSpecArgs objects have a reference back to their ParamSpec:: - >>> P = ParamSpec("P") - >>> P.args.__origin__ is P - True + >>> P = ParamSpec("P") + >>> P.args.__origin__ is P + True + + This type is meant for runtime introspection and has no special meaning + to static type checkers. + """ -This type is meant for runtime introspection and has no special meaning -to static type checkers. -""" @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -455,17 +459,18 @@ to static type checkers. class ParamSpecKwargs: """The kwargs for a ParamSpec object. -Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. -ParamSpecKwargs objects have a reference back to their ParamSpec:: + ParamSpecKwargs objects have a reference back to their ParamSpec:: - >>> P = ParamSpec("P") - >>> P.kwargs.__origin__ is P - True + >>> P = ParamSpec("P") + >>> P.kwargs.__origin__ is P + True + + This type is meant for runtime introspection and has no special meaning + to static type checkers. + """ -This type is meant for runtime introspection and has no special meaning -to static type checkers. -""" @property def __origin__(self) -> ParamSpec: ... if sys.version_info >= (3, 12): @@ -480,55 +485,56 @@ to static type checkers. class ParamSpec: """Parameter specification variable. -The preferred way to construct a parameter specification is via the -dedicated syntax for generic functions, classes, and type aliases, -where the use of '**' creates a parameter specification:: + The preferred way to construct a parameter specification is via the + dedicated syntax for generic functions, classes, and type aliases, + where the use of '**' creates a parameter specification:: - type IntFunc[**P] = Callable[P, int] + type IntFunc[**P] = Callable[P, int] -The following syntax creates a parameter specification that defaults -to a callable accepting two positional-only arguments of types int -and str: + The following syntax creates a parameter specification that defaults + to a callable accepting two positional-only arguments of types int + and str: - type IntFuncDefault[**P = (int, str)] = Callable[P, int] + type IntFuncDefault[**P = (int, str)] = Callable[P, int] -For compatibility with Python 3.11 and earlier, ParamSpec objects -can also be created as follows:: + For compatibility with Python 3.11 and earlier, ParamSpec objects + can also be created as follows:: - P = ParamSpec('P') - DefaultP = ParamSpec('DefaultP', default=(int, str)) + P = ParamSpec('P') + DefaultP = ParamSpec('DefaultP', default=(int, str)) -Parameter specification variables exist primarily for the benefit of -static type checkers. They are used to forward the parameter types of -one callable to another callable, a pattern commonly found in -higher-order functions and decorators. They are only valid when used -in ``Concatenate``, or as the first argument to ``Callable``, or as -parameters for user-defined Generics. See class Generic for more -information on generic types. + Parameter specification variables exist primarily for the benefit of + static type checkers. They are used to forward the parameter types of + one callable to another callable, a pattern commonly found in + higher-order functions and decorators. They are only valid when used + in ``Concatenate``, or as the first argument to ``Callable``, or as + parameters for user-defined Generics. See class Generic for more + information on generic types. -An example for annotating a decorator:: + An example for annotating a decorator:: - def add_logging[**P, T](f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner + def add_logging[**P, T](f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y -Parameter specification variables can be introspected. e.g.:: + Parameter specification variables can be introspected. e.g.:: - >>> P = ParamSpec("P") - >>> P.__name__ - 'P' + >>> P = ParamSpec("P") + >>> P.__name__ + 'P' + + Note that only parameter specification variables defined in the global + scope can be pickled. + """ -Note that only parameter specification variables defined in the global -scope can be pickled. -""" @property def __name__(self) -> str: ... @property @@ -543,8 +549,7 @@ scope can be pickled. if sys.version_info >= (3, 13): @property def __default__(self) -> Any: # AnnotationForm - """The default value for this ParamSpec. -""" + """The default value for this ParamSpec.""" if sys.version_info >= (3, 13): def __new__( cls, @@ -587,22 +592,20 @@ scope can be pickled. @property def args(self) -> ParamSpecArgs: - """Represents positional arguments. -""" + """Represents positional arguments.""" + @property def kwargs(self) -> ParamSpecKwargs: - """Represents keyword arguments. -""" + """Represents keyword arguments.""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any, /) -> Any: ... def __typing_prepare_subst__(self, alias: Any, args: Any, /) -> tuple[Any, ...]: ... def __or__(self, right: Any, /) -> _SpecialForm: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, left: Any, /) -> _SpecialForm: - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 13): def has_default(self) -> bool: ... if sys.version_info >= (3, 14): @@ -616,24 +619,25 @@ scope can be pickled. class NewType: """NewType creates simple unique types with almost zero runtime overhead. -NewType(name, tp) is considered a subtype of tp -by static type checkers. At runtime, NewType(name, tp) returns -a dummy callable that simply returns its argument. + NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. -Usage:: + Usage:: - UserId = NewType('UserId', int) + UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... + def name_by_id(user_id: UserId) -> str: + ... - UserId('user') # Fails type check + UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + + num = UserId(5) + 1 # type: int + """ - num = UserId(5) + 1 # type: int -""" def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm if sys.version_info >= (3, 11): @staticmethod @@ -648,22 +652,22 @@ Usage:: else: def NewType(name: str, tp: Any) -> Any: """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy function that simply returns its argument. Usage:: + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy function that simply returns its argument. Usage:: - UserId = NewType('UserId', int) + UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... + def name_by_id(user_id: UserId) -> str: + ... - UserId('user') # Fails type check + UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK - num = UserId(5) + 1 # type: int - """ + num = UserId(5) + 1 # type: int + """ _F = TypeVar("_F", bound=Callable[..., Any]) _P = _ParamSpec("_P") @@ -683,49 +687,51 @@ _TC = TypeVar("_TC", bound=type[object]) def overload(func: _F) -> _F: """Decorator for overloaded functions/methods. -In a stub file, place two or more stub definitions for the same -function in a row, each decorated with @overload. - -For example:: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - -In a non-stub file (i.e. a regular .py file), do the same but -follow it with an implementation. The implementation should *not* -be decorated with @overload:: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - ... # implementation goes here + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. + + For example:: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload:: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + ... # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ -The overloads for a function can be retrieved at runtime using the -get_overloads() function. -""" def no_type_check(arg: _F) -> _F: """Decorator to indicate that annotations are not type hints. -The argument must be a class or function; if it is a class, it -applies recursively to all methods and classes defined in that class -(but not to methods defined in its superclasses or subclasses). + The argument must be a class or function; if it is a class, it + applies recursively to all methods and classes defined in that class + (but not to methods defined in its superclasses or subclasses). + + This mutates the function(s) or class(es) in place. + """ -This mutates the function(s) or class(es) in place. -""" def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: """Decorator to give another decorator the @no_type_check effect. -This wraps the decorator with something that wraps the decorated -function in @no_type_check. -""" + This wraps the decorator with something that wraps the decorated + function in @no_type_check. + """ # This itself is only available during type checking def type_check_only(func_or_cls: _FT) -> _FT: ... @@ -776,74 +782,75 @@ class _ProtocolMeta(ABCMeta): def runtime_checkable(cls: _TC) -> _TC: """Mark a protocol class as a runtime protocol. -Such protocol can be used with isinstance() and issubclass(). -Raise TypeError if applied to a non-protocol class. -This allows a simple-minded structural check very similar to -one trick ponies in collections.abc such as Iterable. + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. -For example:: + For example:: - @runtime_checkable - class Closable(Protocol): - def close(self): ... + @runtime_checkable + class Closable(Protocol): + def close(self): ... - assert isinstance(open('/some/file'), Closable) + assert isinstance(open('/some/file'), Closable) + + Warning: this will check only the presence of the required methods, + not their type signatures! + """ -Warning: this will check only the presence of the required methods, -not their type signatures! -""" @runtime_checkable class SupportsInt(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __int__. -""" + """An ABC with one abstract method __int__.""" + __slots__ = () @abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __float__. -""" + """An ABC with one abstract method __float__.""" + __slots__ = () @abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __complex__. -""" + """An ABC with one abstract method __complex__.""" + __slots__ = () @abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __bytes__. -""" + """An ABC with one abstract method __bytes__.""" + __slots__ = () @abstractmethod def __bytes__(self) -> bytes: ... @runtime_checkable class SupportsIndex(Protocol, metaclass=ABCMeta): - """An ABC with one abstract method __index__. -""" + """An ABC with one abstract method __index__.""" + __slots__ = () @abstractmethod def __index__(self) -> int: ... @runtime_checkable class SupportsAbs(Protocol[_T_co]): - """An ABC with one abstract method __abs__ that is covariant in its return type. -""" + """An ABC with one abstract method __abs__ that is covariant in its return type.""" + __slots__ = () @abstractmethod def __abs__(self) -> _T_co: ... @runtime_checkable class SupportsRound(Protocol[_T_co]): - """An ABC with one abstract method __round__ that is covariant in its return type. -""" + """An ABC with one abstract method __round__ that is covariant in its return type.""" + __slots__ = () @overload @abstractmethod @@ -874,8 +881,8 @@ class Iterable(Protocol[_T_co]): class Iterator(Iterable[_T_co], Protocol[_T_co]): @abstractmethod def __next__(self) -> _T_co: - """Return the next item from the iterator. When exhausted, raise StopIteration -""" + """Return the next item from the iterator. When exhausted, raise StopIteration""" + def __iter__(self) -> Iterator[_T_co]: ... @runtime_checkable @@ -891,32 +898,33 @@ _ReturnT_co = TypeVar("_ReturnT_co", covariant=True, default=None) class Generator(Iterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra, _ReturnT_co]): def __next__(self) -> _YieldT_co: """Return the next item from the generator. -When exhausted, raise StopIteration. -""" + When exhausted, raise StopIteration. + """ + @abstractmethod def send(self, value: _SendT_contra, /) -> _YieldT_co: """Send a value into the generator. -Return next yielded value or raise StopIteration. -""" + Return next yielded value or raise StopIteration. + """ + @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / ) -> _YieldT_co: """Raise an exception in the generator. -Return next yielded value or raise StopIteration. -""" + Return next yielded value or raise StopIteration. + """ + @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... if sys.version_info >= (3, 13): def close(self) -> _ReturnT_co | None: - """Raise GeneratorExit inside generator. - """ + """Raise GeneratorExit inside generator.""" else: def close(self) -> None: - """Raise GeneratorExit inside generator. - """ + """Raise GeneratorExit inside generator.""" def __iter__(self) -> Generator[_YieldT_co, _SendT_contra, _ReturnT_co]: ... @@ -928,13 +936,11 @@ else: @runtime_checkable class ContextManager(AbstractContextManager[_T_co, bool | None], Protocol[_T_co]): - """An abstract base class for context managers. -""" + """An abstract base class for context managers.""" @runtime_checkable class AsyncContextManager(AbstractAsyncContextManager[_T_co, bool | None], Protocol[_T_co]): - """An abstract base class for asynchronous context managers. -""" + """An abstract base class for asynchronous context managers.""" @runtime_checkable class Awaitable(Protocol[_T_co]): @@ -952,23 +958,24 @@ class Coroutine(Awaitable[_ReturnT_nd_co], Generic[_YieldT_co, _SendT_nd_contra, @abstractmethod def send(self, value: _SendT_nd_contra, /) -> _YieldT_co: """Send a value into the coroutine. -Return next yielded value or raise StopIteration. -""" + Return next yielded value or raise StopIteration. + """ + @overload @abstractmethod def throw( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / ) -> _YieldT_co: """Raise an exception in the coroutine. -Return next yielded value or raise StopIteration. -""" + Return next yielded value or raise StopIteration. + """ + @overload @abstractmethod def throw(self, typ: BaseException, val: None = None, tb: TracebackType | None = None, /) -> _YieldT_co: ... @abstractmethod def close(self) -> None: - """Raise GeneratorExit inside coroutine. - """ + """Raise GeneratorExit inside coroutine.""" # NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. # The parameters correspond to Generator, but the 4th is the original type. @@ -990,37 +997,39 @@ class AsyncIterable(Protocol[_T_co]): class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod def __anext__(self) -> Awaitable[_T_co]: - """Return the next item or raise StopAsyncIteration when exhausted. -""" + """Return the next item or raise StopAsyncIteration when exhausted.""" + def __aiter__(self) -> AsyncIterator[_T_co]: ... @runtime_checkable class AsyncGenerator(AsyncIterator[_YieldT_co], Protocol[_YieldT_co, _SendT_contra]): def __anext__(self) -> Coroutine[Any, Any, _YieldT_co]: """Return the next item from the asynchronous generator. -When exhausted, raise StopAsyncIteration. -""" + When exhausted, raise StopAsyncIteration. + """ + @abstractmethod def asend(self, value: _SendT_contra, /) -> Coroutine[Any, Any, _YieldT_co]: """Send a value into the asynchronous generator. -Return next yielded value or raise StopAsyncIteration. -""" + Return next yielded value or raise StopAsyncIteration. + """ + @overload @abstractmethod def athrow( self, typ: type[BaseException], val: BaseException | object = None, tb: TracebackType | None = None, / ) -> Coroutine[Any, Any, _YieldT_co]: """Raise an exception in the asynchronous generator. -Return next yielded value or raise StopAsyncIteration. -""" + Return next yielded value or raise StopAsyncIteration. + """ + @overload @abstractmethod def athrow( self, typ: BaseException, val: None = None, tb: TracebackType | None = None, / ) -> Coroutine[Any, Any, _YieldT_co]: ... def aclose(self) -> Coroutine[Any, Any, None]: - """Raise GeneratorExit inside coroutine. - """ + """Raise GeneratorExit inside coroutine.""" @runtime_checkable class Container(Protocol[_T_co]): @@ -1037,9 +1046,10 @@ class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): class Sequence(Reversible[_T_co], Collection[_T_co]): """All the operations on a read-only sequence. -Concrete subclasses must override __new__ or __init__, -__getitem__, and __len__. -""" + Concrete subclasses must override __new__ or __init__, + __getitem__, and __len__. + """ + @overload @abstractmethod def __getitem__(self, index: int) -> _T_co: ... @@ -1049,14 +1059,15 @@ __getitem__, and __len__. # Mixin methods def index(self, value: Any, start: int = 0, stop: int = ...) -> int: """S.index(value, [start, [stop]]) -> integer -- return first index of value. -Raises ValueError if the value is not present. + Raises ValueError if the value is not present. + + Supporting start and stop arguments is optional, but + recommended. + """ -Supporting start and stop arguments is optional, but -recommended. -""" def count(self, value: Any) -> int: - """S.count(value) -> integer -- return number of occurrences of value -""" + """S.count(value) -> integer -- return number of occurrences of value""" + def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... def __reversed__(self) -> Iterator[_T_co]: ... @@ -1064,13 +1075,14 @@ recommended. class MutableSequence(Sequence[_T]): """All the operations on a read-write sequence. -Concrete subclasses must provide __new__ or __init__, -__getitem__, __setitem__, __delitem__, __len__, and insert(). -""" + Concrete subclasses must provide __new__ or __init__, + __getitem__, __setitem__, __delitem__, __len__, and insert(). + """ + @abstractmethod def insert(self, index: int, value: _T) -> None: - """S.insert(index, value) -- insert value before index -""" + """S.insert(index, value) -- insert value before index""" + @overload @abstractmethod def __getitem__(self, index: int) -> _T: ... @@ -1091,54 +1103,57 @@ __getitem__, __setitem__, __delitem__, __len__, and insert(). def __delitem__(self, index: slice) -> None: ... # Mixin methods def append(self, value: _T) -> None: - """S.append(value) -- append value to the end of the sequence -""" + """S.append(value) -- append value to the end of the sequence""" + def clear(self) -> None: - """S.clear() -> None -- remove all items from S -""" + """S.clear() -> None -- remove all items from S""" + def extend(self, values: Iterable[_T]) -> None: - """S.extend(iterable) -- extend sequence by appending elements from the iterable -""" + """S.extend(iterable) -- extend sequence by appending elements from the iterable""" + def reverse(self) -> None: - """S.reverse() -- reverse *IN PLACE* -""" + """S.reverse() -- reverse *IN PLACE*""" + def pop(self, index: int = -1) -> _T: """S.pop([index]) -> item -- remove and return item at index (default last). -Raise IndexError if list is empty or index is out of range. -""" + Raise IndexError if list is empty or index is out of range. + """ + def remove(self, value: _T) -> None: """S.remove(value) -- remove first occurrence of value. -Raise ValueError if the value is not present. -""" + Raise ValueError if the value is not present. + """ + def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... class AbstractSet(Collection[_T_co]): """A set is a finite, iterable container. -This class provides concrete generic implementations of all -methods except for __contains__, __iter__ and __len__. + This class provides concrete generic implementations of all + methods except for __contains__, __iter__ and __len__. + + To override the comparisons (presumably for speed, as the + semantics are fixed), redefine __le__ and __ge__, + then the other operations will automatically follow suit. + """ -To override the comparisons (presumably for speed, as the -semantics are fixed), redefine __le__ and __ge__, -then the other operations will automatically follow suit. -""" @abstractmethod def __contains__(self, x: object) -> bool: ... def _hash(self) -> int: """Compute the hash value of a set. -Note that we don't define __hash__: not all sets are hashable. -But if you define a hashable set type, its __hash__ should -call this function. + Note that we don't define __hash__: not all sets are hashable. + But if you define a hashable set type, its __hash__ should + call this function. -This must be compatible __eq__. + This must be compatible __eq__. -All sets ought to compare equal if they contain the same -elements, regardless of how they are implemented, and -regardless of the order of the elements; so there's not much -freedom for __eq__ or __hash__. We match the algorithm used -by the built-in frozenset type. -""" + All sets ought to compare equal if they contain the same + elements, regardless of how they are implemented, and + regardless of the order of the elements; so there's not much + freedom for __eq__ or __hash__. We match the algorithm used + by the built-in frozenset type. + """ # Mixin methods def __le__(self, other: AbstractSet[Any]) -> bool: ... def __lt__(self, other: AbstractSet[Any]) -> bool: ... @@ -1150,38 +1165,37 @@ by the built-in frozenset type. def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... def __eq__(self, other: object) -> bool: ... def isdisjoint(self, other: Iterable[Any]) -> bool: - """Return True if two sets have a null intersection. -""" + """Return True if two sets have a null intersection.""" class MutableSet(AbstractSet[_T]): """A mutable set is a finite, iterable container. -This class provides concrete generic implementations of all -methods except for __contains__, __iter__, __len__, -add(), and discard(). + This class provides concrete generic implementations of all + methods except for __contains__, __iter__, __len__, + add(), and discard(). + + To override the comparisons (presumably for speed, as the + semantics are fixed), all you have to do is redefine __le__ and + then the other operations will automatically follow suit. + """ -To override the comparisons (presumably for speed, as the -semantics are fixed), all you have to do is redefine __le__ and -then the other operations will automatically follow suit. -""" @abstractmethod def add(self, value: _T) -> None: - """Add an element. -""" + """Add an element.""" + @abstractmethod def discard(self, value: _T) -> None: - """Remove an element. Do not raise an exception if absent. -""" + """Remove an element. Do not raise an exception if absent.""" # Mixin methods def clear(self) -> None: - """This is slow (creates N new iterators!) but effective. -""" + """This is slow (creates N new iterators!) but effective.""" + def pop(self) -> _T: - """Return the popped value. Raise KeyError if empty. -""" + """Return the popped value. Raise KeyError if empty.""" + def remove(self, value: _T) -> None: - """Remove an element. If not a member, raise a KeyError. -""" + """Remove an element. If not a member, raise a KeyError.""" + def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] @@ -1225,11 +1239,12 @@ class ValuesView(MappingView, Collection[_VT_co]): class Mapping(Collection[_KT], Generic[_KT, _VT_co]): """A Mapping is a generic container for associating key/value -pairs. + pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __iter__, and __len__. + """ -This class provides concrete generic implementations of all -methods except for __getitem__, __iter__, and __len__. -""" # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @abstractmethod @@ -1237,52 +1252,54 @@ methods except for __getitem__, __iter__, and __len__. # Mixin methods @overload def get(self, key: _KT, /) -> _VT_co | None: - """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None. -""" + """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.""" + @overload def get(self, key: _KT, /, default: _VT_co) -> _VT_co: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] # Covariant type as parameter @overload def get(self, key: _KT, /, default: _T) -> _VT_co | _T: ... def items(self) -> ItemsView[_KT, _VT_co]: - """D.items() -> a set-like object providing a view on D's items -""" + """D.items() -> a set-like object providing a view on D's items""" + def keys(self) -> KeysView[_KT]: - """D.keys() -> a set-like object providing a view on D's keys -""" + """D.keys() -> a set-like object providing a view on D's keys""" + def values(self) -> ValuesView[_VT_co]: - """D.values() -> an object providing a view on D's values -""" + """D.values() -> an object providing a view on D's values""" + def __contains__(self, key: object, /) -> bool: ... def __eq__(self, other: object, /) -> bool: ... class MutableMapping(Mapping[_KT, _VT]): """A MutableMapping is a generic container for associating -key/value pairs. + key/value pairs. + + This class provides concrete generic implementations of all + methods except for __getitem__, __setitem__, __delitem__, + __iter__, and __len__. + """ -This class provides concrete generic implementations of all -methods except for __getitem__, __setitem__, __delitem__, -__iter__, and __len__. -""" @abstractmethod def __setitem__(self, key: _KT, value: _VT, /) -> None: ... @abstractmethod def __delitem__(self, key: _KT, /) -> None: ... def clear(self) -> None: - """D.clear() -> None. Remove all items from D. -""" + """D.clear() -> None. Remove all items from D.""" + @overload def pop(self, key: _KT, /) -> _VT: """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. -If key is not found, d is returned if given, otherwise KeyError is raised. -""" + If key is not found, d is returned if given, otherwise KeyError is raised. + """ + @overload def pop(self, key: _KT, /, default: _VT) -> _VT: ... @overload def pop(self, key: _KT, /, default: _T) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: """D.popitem() -> (k, v), remove and return some (key, value) pair -as a 2-tuple; but raise KeyError if D is empty. -""" + as a 2-tuple; but raise KeyError if D is empty. + """ # This overload should be allowed only if the value type is compatible with None. # # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: @@ -1291,8 +1308,8 @@ as a 2-tuple; but raise KeyError if D is empty. # -- weakref.WeakKeyDictionary.setdefault @overload def setdefault(self: MutableMapping[_KT, _T | None], key: _KT, default: None = None, /) -> _T | None: - """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D -""" + """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D""" + @overload def setdefault(self, key: _KT, default: _VT, /) -> _VT: ... # 'update' used to take a Union, but using overloading is better. @@ -1318,10 +1335,11 @@ as a 2-tuple; but raise KeyError if D is empty. @overload def update(self, m: SupportsKeysAndGetItem[_KT, _VT], /) -> None: """D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. -If E present and has a .keys() method, does: for k in E.keys(): D[k] = E[k] -If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v -In either case, this is followed by: for k, v in F.items(): D[k] = v -""" + If E present and has a .keys() method, does: for k in E.keys(): D[k] = E[k] + If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v + In either case, this is followed by: for k, v in F.items(): D[k] = v + """ + @overload def update(self: SupportsGetItem[str, _VT], m: SupportsKeysAndGetItem[str, _VT], /, **kwargs: _VT) -> None: ... @overload @@ -1341,15 +1359,16 @@ TYPE_CHECKING: Final[bool] class IO(Generic[AnyStr]): """Generic base class for TextIO and BinaryIO. -This is an abstract, generic version of the return of open(). + This is an abstract, generic version of the return of open(). + + NOTE: This does not distinguish between the different possible + classes (text vs. binary, read vs. write vs. read/write, + append-only, unbuffered). The TextIO and BinaryIO subclasses + below capture the distinctions between text vs. binary, which is + pervasive in the interface; however we currently do not offer a + way to track the other distinctions in the type system. + """ -NOTE: This does not distinguish between the different possible -classes (text vs. binary, read vs. write vs. read/write, -append-only, unbuffered). The TextIO and BinaryIO subclasses -below capture the distinctions between text vs. binary, which is -pervasive in the interface; however we currently do not offer a -way to track the other distinctions in the type system. -""" # At runtime these are all abstract properties, # but making them abstract in the stub is hugely disruptive, for not much gain. # See #8726 @@ -1412,15 +1431,15 @@ way to track the other distinctions in the type system. ) -> None: ... class BinaryIO(IO[bytes]): - """Typed version of the return of open() in binary mode. -""" + """Typed version of the return of open() in binary mode.""" + __slots__ = () @abstractmethod def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): - """Typed version of the return of open() in text mode. -""" + """Typed version of the return of open() in text mode.""" + # See comment regarding the @properties in the `IO` class __slots__ = () @property @@ -1463,34 +1482,34 @@ if sys.version_info >= (3, 14): ) -> dict[str, Any]: # AnnotationForm """Return type hints for an object. -This is often the same as obj.__annotations__, but it handles -forward references encoded as string literals and recursively replaces all -'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals and recursively replaces all + 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). -The argument may be a module, class, method, or function. The annotations -are returned as a dictionary. For classes, annotations include also -inherited members. + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. -TypeError is raised if the argument is not of a type that can contain -annotations, and an empty dictionary is returned if no annotations are -present. + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. -BEWARE -- the behavior of globalns and localns is counterintuitive -(unless you are familiar with how eval() and exec() work). The -search order is locals first, then globals. + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. -- If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. For classes, the search - order is globals first then locals. + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. For classes, the search + order is globals first then locals. -- If one dict argument is passed, it is used for both globals and - locals. + - If one dict argument is passed, it is used for both globals and + locals. -- If two dict arguments are passed, they specify globals and - locals, respectively. -""" + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ else: def get_type_hints( @@ -1501,70 +1520,71 @@ else: ) -> dict[str, Any]: # AnnotationForm """Return type hints for an object. -This is often the same as obj.__annotations__, but it handles -forward references encoded as string literals and recursively replaces all -'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals and recursively replaces all + 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). -The argument may be a module, class, method, or function. The annotations -are returned as a dictionary. For classes, annotations include also -inherited members. + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. -TypeError is raised if the argument is not of a type that can contain -annotations, and an empty dictionary is returned if no annotations are -present. + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. -BEWARE -- the behavior of globalns and localns is counterintuitive -(unless you are familiar with how eval() and exec() work). The -search order is locals first, then globals. + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. -- If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. For classes, the search - order is globals first then locals. + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. For classes, the search + order is globals first then locals. -- If one dict argument is passed, it is used for both globals and - locals. + - If one dict argument is passed, it is used for both globals and + locals. -- If two dict arguments are passed, they specify globals and - locals, respectively. -""" + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ def get_args(tp: Any) -> tuple[Any, ...]: # AnnotationForm """Get type arguments with all substitutions performed. -For unions, basic simplifications used by Union constructor are performed. + For unions, basic simplifications used by Union constructor are performed. -Examples:: + Examples:: - >>> T = TypeVar('T') - >>> assert get_args(Dict[str, int]) == (str, int) - >>> assert get_args(int) == () - >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) - >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - >>> assert get_args(Callable[[], T][int]) == ([], int) -""" + >>> T = TypeVar('T') + >>> assert get_args(Dict[str, int]) == (str, int) + >>> assert get_args(int) == () + >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) + >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + >>> assert get_args(Callable[[], T][int]) == ([], int) + """ if sys.version_info >= (3, 10): @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: """Get the unsubscripted version of a type. -This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, -Annotated, and others. Return None for unsupported types. + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, + Annotated, and others. Return None for unsupported types. -Examples:: + Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P + """ - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P -""" @overload def get_origin(tp: UnionType) -> type[UnionType]: ... @@ -1572,32 +1592,34 @@ Examples:: def get_origin(tp: GenericAlias) -> type: """Get the unsubscripted version of a type. -This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, -Annotated, and others. Return None for unsupported types. + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, + Annotated, and others. Return None for unsupported types. -Examples:: + Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P + """ - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P -""" @overload def get_origin(tp: Any) -> Any | None: ... # AnnotationForm @overload def cast(typ: type[_T], val: Any) -> _T: """Cast a value to a type. -This returns the value unchanged. To the type checker this -signals that the return value has the designated type, but at -runtime we intentionally don't check anything (we want this -to be as fast as possible). -""" + This returns the value unchanged. To the type checker this + signals that the return value has the designated type, but at + runtime we intentionally don't check anything (we want this + to be as fast as possible). + """ + @overload def cast(typ: str, val: Any) -> Any: ... @overload @@ -1607,56 +1629,59 @@ if sys.version_info >= (3, 11): def reveal_type(obj: _T, /) -> _T: """Ask a static type checker to reveal the inferred type of an expression. -When a static type checker encounters a call to ``reveal_type()``, -it will emit the inferred type of the argument:: + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: - x: int = 1 - reveal_type(x) + x: int = 1 + reveal_type(x) -Running a static type checker (e.g., mypy) on this example -will produce output similar to 'Revealed type is "builtins.int"'. + Running a static type checker (e.g., mypy) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns the argument unchanged. + """ -At runtime, the function prints the runtime type of the -argument and returns the argument unchanged. -""" def assert_never(arg: Never, /) -> Never: """Statically assert that a line of code is unreachable. -Example:: + Example:: - def int_or_str(arg: int | str) -> None: - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - assert_never(arg) + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) -If a type checker finds that a call to assert_never() is -reachable, it will emit an error. + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + """ -At runtime, this throws an exception when called. -""" def assert_type(val: _T, typ: Any, /) -> _T: # AnnotationForm """Ask a static type checker to confirm that the value is of the given type. -At runtime this does nothing: it returns the first argument unchanged with no -checks or side effects, no matter the actual type of the argument. + At runtime this does nothing: it returns the first argument unchanged with no + checks or side effects, no matter the actual type of the argument. -When a static type checker encounters a call to assert_type(), it -emits an error if the value is not of the specified type:: + When a static type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # OK + assert_type(name, int) # type checker error + """ - def greet(name: str) -> None: - assert_type(name, str) # OK - assert_type(name, int) # type checker error -""" def clear_overloads() -> None: - """Clear all overloads in the registry. -""" + """Clear all overloads in the registry.""" + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: - """Return all defined overloads for *func* as a sequence. -""" + """Return all defined overloads for *func* as a sequence.""" + def dataclass_transform( *, eq_default: bool = True, @@ -1668,66 +1693,66 @@ emits an error if the value is not of the specified type:: ) -> IdentityFunction: """Decorator to mark an object as providing dataclass-like behaviour. -The decorator can be applied to a function, class, or metaclass. + The decorator can be applied to a function, class, or metaclass. -Example usage with a decorator function:: + Example usage with a decorator function:: - @dataclass_transform() - def create_model[T](cls: type[T]) -> type[T]: - ... - return cls + @dataclass_transform() + def create_model[T](cls: type[T]) -> type[T]: + ... + return cls - @create_model - class CustomerModel: - id: int - name: str + @create_model + class CustomerModel: + id: int + name: str -On a base class:: + On a base class:: - @dataclass_transform() - class ModelBase: ... + @dataclass_transform() + class ModelBase: ... - class CustomerModel(ModelBase): - id: int - name: str + class CustomerModel(ModelBase): + id: int + name: str -On a metaclass:: + On a metaclass:: - @dataclass_transform() - class ModelMeta(type): ... + @dataclass_transform() + class ModelMeta(type): ... - class ModelBase(metaclass=ModelMeta): ... + class ModelBase(metaclass=ModelMeta): ... - class CustomerModel(ModelBase): - id: int - name: str + class CustomerModel(ModelBase): + id: int + name: str -The ``CustomerModel`` classes defined above will -be treated by type checkers similarly to classes created with -``@dataclasses.dataclass``. -For example, type checkers will assume these classes have -``__init__`` methods that accept ``id`` and ``name``. + The ``CustomerModel`` classes defined above will + be treated by type checkers similarly to classes created with + ``@dataclasses.dataclass``. + For example, type checkers will assume these classes have + ``__init__`` methods that accept ``id`` and ``name``. -The arguments to this decorator can be used to customize this behavior: -- ``eq_default`` indicates whether the ``eq`` parameter is assumed to be - ``True`` or ``False`` if it is omitted by the caller. -- ``order_default`` indicates whether the ``order`` parameter is - assumed to be True or False if it is omitted by the caller. -- ``kw_only_default`` indicates whether the ``kw_only`` parameter is - assumed to be True or False if it is omitted by the caller. -- ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. -- ``field_specifiers`` specifies a static list of supported classes - or functions that describe fields, similar to ``dataclasses.field()``. -- Arbitrary other keyword arguments are accepted in order to allow for - possible future extensions. + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + ``True`` or ``False`` if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + - Arbitrary other keyword arguments are accepted in order to allow for + possible future extensions. -At runtime, this decorator records its arguments in the -``__dataclass_transform__`` attribute on the decorated object. -It has no other runtime effect. + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + It has no other runtime effect. -See PEP 681 for more details. -""" + See PEP 681 for more details. + """ # Type constructors @@ -1735,23 +1760,24 @@ See PEP 681 for more details. class NamedTuple(tuple[Any, ...]): """Typed version of namedtuple. -Usage:: + Usage:: - class Employee(NamedTuple): - name: str - id: int + class Employee(NamedTuple): + name: str + id: int -This is equivalent to:: + This is equivalent to:: - Employee = collections.namedtuple('Employee', ['name', 'id']) + Employee = collections.namedtuple('Employee', ['name', 'id']) -The resulting class has an extra __annotations__ attribute, giving a -dict that maps field names to types. (The field names are also in -the _fields attribute, which is part of the namedtuple API.) -An alternative equivalent functional syntax is also accepted:: + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) -""" _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] # __orig_bases__ sometimes exists on <3.12, but not consistently @@ -1802,14 +1828,14 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def values(self) -> dict_values[str, object]: ... @overload def __or__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload def __ror__(self, value: typing_extensions.Self, /) -> typing_extensions.Self: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of __or__ and __ior__ @@ -1829,29 +1855,29 @@ if sys.version_info >= (3, 14): ) -> Any: # AnnotationForm """Evaluate a forward reference as a type hint. -This is similar to calling the ForwardRef.evaluate() method, -but unlike that method, evaluate_forward_ref() also -recursively evaluates forward references nested within the type hint. - -*forward_ref* must be an instance of ForwardRef. *owner*, if given, -should be the object that holds the annotations that the forward reference -derived from, such as a module, class object, or function. It is used to -infer the namespaces to use for looking up names. *globals* and *locals* -can also be explicitly given to provide the global and local namespaces. -*type_params* is a tuple of type parameters that are in scope when -evaluating the forward reference. This parameter should be provided (though -it may be an empty tuple) if *owner* is not given and the forward reference -does not already have an owner set. *format* specifies the format of the -annotation and is a member of the annotationlib.Format enum, defaulting to -VALUE. + This is similar to calling the ForwardRef.evaluate() method, + but unlike that method, evaluate_forward_ref() also + recursively evaluates forward references nested within the type hint. + + *forward_ref* must be an instance of ForwardRef. *owner*, if given, + should be the object that holds the annotations that the forward reference + derived from, such as a module, class object, or function. It is used to + infer the namespaces to use for looking up names. *globals* and *locals* + can also be explicitly given to provide the global and local namespaces. + *type_params* is a tuple of type parameters that are in scope when + evaluating the forward reference. This parameter should be provided (though + it may be an empty tuple) if *owner* is not given and the forward reference + does not already have an owner set. *format* specifies the format of the + annotation and is a member of the annotationlib.Format enum, defaulting to + VALUE. -""" + """ else: @final class ForwardRef(_Final): - """Internal wrapper to hold a forward reference. -""" + """Internal wrapper to hold a forward reference.""" + __slots__ = ( "__forward_arg__", "__forward_code__", @@ -1914,27 +1940,27 @@ if sys.version_info >= (3, 10): def is_typeddict(tp: object) -> bool: """Check if an annotation is a TypedDict class. -For example:: - - >>> from typing import TypedDict - >>> class Film(TypedDict): - ... title: str - ... year: int - ... - >>> is_typeddict(Film) - True - >>> is_typeddict(dict) - False -""" + For example:: + + >>> from typing import TypedDict + >>> class Film(TypedDict): + ... title: str + ... year: int + ... + >>> is_typeddict(Film) + True + >>> is_typeddict(dict) + False + """ def _type_repr(obj: object) -> str: """Return the repr() of an object, special-casing types (internal helper). -If obj is a type, we return a shorter version than the default -type.__repr__, based on the module and qualified name, which is -typically enough to uniquely identify a type. For everything -else, we fall back on repr(obj). -""" + If obj is a type, we return a shorter version than the default + type.__repr__, based on the module and qualified name, which is + typically enough to uniquely identify a type. For everything + else, we fall back on repr(obj). + """ if sys.version_info >= (3, 12): _TypeParameter: typing_extensions.TypeAlias = ( @@ -1949,53 +1975,55 @@ if sys.version_info >= (3, 12): def override(method: _F, /) -> _F: """Indicate that a method is intended to override a method in a base class. -Usage:: + Usage:: - class Base: - def method(self) -> None: - pass + class Base: + def method(self) -> None: + pass - class Child(Base): - @override - def method(self) -> None: - super().method() + class Child(Base): + @override + def method(self) -> None: + super().method() -When this decorator is applied to a method, the type checker will -validate that it overrides a method or attribute with the same name on a -base class. This helps prevent bugs that may occur when a base class is -changed without an equivalent change to a child class. + When this decorator is applied to a method, the type checker will + validate that it overrides a method or attribute with the same name on a + base class. This helps prevent bugs that may occur when a base class is + changed without an equivalent change to a child class. -There is no runtime checking of this property. The decorator attempts to -set the ``__override__`` attribute to ``True`` on the decorated object to -allow runtime introspection. + There is no runtime checking of this property. The decorator attempts to + set the ``__override__`` attribute to ``True`` on the decorated object to + allow runtime introspection. + + See PEP 698 for details. + """ -See PEP 698 for details. -""" @final class TypeAliasType: """Type alias. -Type aliases are created through the type statement:: + Type aliases are created through the type statement:: - type Alias = int + type Alias = int -In this example, Alias and int will be treated equivalently by static -type checkers. + In this example, Alias and int will be treated equivalently by static + type checkers. -At runtime, Alias is an instance of TypeAliasType. The __name__ -attribute holds the name of the type alias. The value of the type alias -is stored in the __value__ attribute. It is evaluated lazily, so the -value is computed only if the attribute is accessed. + At runtime, Alias is an instance of TypeAliasType. The __name__ + attribute holds the name of the type alias. The value of the type alias + is stored in the __value__ attribute. It is evaluated lazily, so the + value is computed only if the attribute is accessed. -Type aliases can also be generic:: + Type aliases can also be generic:: - type ListOrSet[T] = list[T] | set[T] + type ListOrSet[T] = list[T] | set[T] -In this case, the type parameters of the alias are stored in the -__type_params__ attribute. + In this case, the type parameters of the alias are stored in the + __type_params__ attribute. + + See PEP 695 for more information. + """ -See PEP 695 for more information. -""" def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ... @property def __value__(self) -> Any: ... # AnnotationForm @@ -2009,14 +2037,13 @@ See PEP 695 for more information. @property def __module__(self) -> str | None: ... # type: ignore[override] def __getitem__(self, parameters: Any, /) -> GenericAlias: # AnnotationForm - """Return self[key]. -""" + """Return self[key].""" + def __or__(self, right: Any, /) -> _SpecialForm: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, left: Any, /) -> _SpecialForm: - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 14): @property def evaluate_value(self) -> EvaluateFunc: ... @@ -2025,31 +2052,33 @@ if sys.version_info >= (3, 13): def is_protocol(tp: type, /) -> bool: """Return True if the given type is a Protocol. -Example:: + Example:: + + >>> from typing import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ - >>> from typing import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False -""" def get_protocol_members(tp: type, /) -> frozenset[str]: """Return the set of members defined in a Protocol. -Example:: + Example:: - >>> from typing import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) == frozenset({'a', 'b'}) - True + >>> from typing import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) == frozenset({'a', 'b'}) + True + + Raise a TypeError for arguments that are not Protocols. + """ -Raise a TypeError for arguments that are not Protocols. -""" @final @type_check_only class _NoDefaultType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi index bc57ed7a56bc5..1e81194eadcac 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/typing_extensions.pyi @@ -217,22 +217,22 @@ Protocol: _SpecialForm def runtime_checkable(cls: _TC) -> _TC: """Mark a protocol class as a runtime protocol. -Such protocol can be used with isinstance() and issubclass(). -Raise TypeError if applied to a non-protocol class. -This allows a simple-minded structural check very similar to -one trick ponies in collections.abc such as Iterable. + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. -For example:: + For example:: - @runtime_checkable - class Closable(Protocol): - def close(self): ... + @runtime_checkable + class Closable(Protocol): + def close(self): ... - assert isinstance(open('/some/file'), Closable) + assert isinstance(open('/some/file'), Closable) -Warning: this will check only the presence of the required methods, -not their type signatures! -""" + Warning: this will check only the presence of the required methods, + not their type signatures! + """ # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable @@ -241,50 +241,51 @@ Final: _SpecialForm def final(f: _F) -> _F: """Decorator to indicate final methods and final classes. -Use this decorator to indicate to type checkers that the decorated -method cannot be overridden, and decorated class cannot be subclassed. + Use this decorator to indicate to type checkers that the decorated + method cannot be overridden, and decorated class cannot be subclassed. + + For example:: -For example:: + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... - class Base: @final - def done(self) -> None: + class Leaf: ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker + class Other(Leaf): # Error reported by type checker ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - -There is no runtime checking of these properties. The decorator -attempts to set the ``__final__`` attribute to ``True`` on the decorated -object to allow runtime introspection. -""" + There is no runtime checking of these properties. The decorator + attempts to set the ``__final__`` attribute to ``True`` on the decorated + object to allow runtime introspection. + """ + def disjoint_base(cls: _TC) -> _TC: """This decorator marks a class as a disjoint base. -Child classes of a disjoint base cannot inherit from other disjoint bases that are -not parent classes of the disjoint base. + Child classes of a disjoint base cannot inherit from other disjoint bases that are + not parent classes of the disjoint base. -For example: + For example: - @disjoint_base - class Disjoint1: pass + @disjoint_base + class Disjoint1: pass - @disjoint_base - class Disjoint2: pass + @disjoint_base + class Disjoint2: pass - class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error + class Disjoint3(Disjoint1, Disjoint2): pass # Type checker error -Type checkers can use knowledge of disjoint bases to detect unreachable code -and determine when two types can overlap. + Type checkers can use knowledge of disjoint bases to detect unreachable code + and determine when two types can overlap. -See PEP 800. -""" + See PEP 800. + """ Literal: _SpecialForm @@ -317,14 +318,14 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def __delitem__(self, k: Never) -> None: ... @overload def __or__(self, value: Self, /) -> Self: - """Return self|value. -""" + """Return self|value.""" + @overload def __or__(self, value: dict[str, Any], /) -> dict[str, object]: ... @overload def __ror__(self, value: Self, /) -> Self: - """Return value|self. -""" + """Return value|self.""" + @overload def __ror__(self, value: dict[str, Any], /) -> dict[str, object]: ... # supposedly incompatible definitions of `__ior__` and `__or__`: @@ -374,58 +375,59 @@ else: def get_args(tp: AnnotationForm) -> tuple[AnnotationForm, ...]: """Get type arguments with all substitutions performed. -For unions, basic simplifications used by Union constructor are performed. + For unions, basic simplifications used by Union constructor are performed. -Examples:: + Examples:: - >>> T = TypeVar('T') - >>> assert get_args(Dict[str, int]) == (str, int) - >>> assert get_args(int) == () - >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) - >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - >>> assert get_args(Callable[[], T][int]) == ([], int) -""" + >>> T = TypeVar('T') + >>> assert get_args(Dict[str, int]) == (str, int) + >>> assert get_args(int) == () + >>> assert get_args(Union[int, Union[T, int], str][int]) == (int, str) + >>> assert get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + >>> assert get_args(Callable[[], T][int]) == ([], int) + """ if sys.version_info >= (3, 10): @overload def get_origin(tp: UnionType) -> type[UnionType]: """Get the unsubscripted version of a type. -This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, -Annotated, and others. Return None for unsupported types. + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, + Annotated, and others. Return None for unsupported types. -Examples:: + Examples:: - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P -""" + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P + """ @overload def get_origin(tp: GenericAlias) -> type: """Get the unsubscripted version of a type. -This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, -Annotated, and others. Return None for unsupported types. - -Examples:: - - >>> P = ParamSpec('P') - >>> assert get_origin(Literal[42]) is Literal - >>> assert get_origin(int) is None - >>> assert get_origin(ClassVar[int]) is ClassVar - >>> assert get_origin(Generic) is Generic - >>> assert get_origin(Generic[T]) is Generic - >>> assert get_origin(Union[T, int]) is Union - >>> assert get_origin(List[Tuple[T, T]][int]) is list - >>> assert get_origin(P.args) is P -""" + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar, + Annotated, and others. Return None for unsupported types. + + Examples:: + + >>> P = ParamSpec('P') + >>> assert get_origin(Literal[42]) is Literal + >>> assert get_origin(int) is None + >>> assert get_origin(ClassVar[int]) is ClassVar + >>> assert get_origin(Generic) is Generic + >>> assert get_origin(Generic[T]) is Generic + >>> assert get_origin(Union[T, int]) is Union + >>> assert get_origin(List[Tuple[T, T]][int]) is list + >>> assert get_origin(P.args) is P + """ + @overload def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... @overload @@ -458,6 +460,7 @@ else: This type is meant for runtime introspection and has no special meaning to static type checkers. """ + @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @@ -475,6 +478,7 @@ else: This type is meant for runtime introspection and has no special meaning to static type checkers. """ + @property def __origin__(self) -> ParamSpec: ... def __init__(self, origin: ParamSpec) -> None: ... @@ -531,6 +535,7 @@ else: argument and returns it unchanged. """ + def assert_never(arg: Never, /) -> Never: """Assert to the type checker that a line of code is unreachable. @@ -551,6 +556,7 @@ else: At runtime, this throws an exception when called. """ + def assert_type(val: _T, typ: AnnotationForm, /) -> _T: """Assert (to the type checker) that the value is of the given type. @@ -564,13 +570,12 @@ else: At runtime this returns the first argument unchanged and otherwise does nothing. """ + def clear_overloads() -> None: - """Clear all overloads in the registry. -""" - def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: - """Return all defined overloads for *func* as a sequence. -""" + """Clear all overloads in the registry.""" + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: + """Return all defined overloads for *func* as a sequence.""" Required: _SpecialForm NotRequired: _SpecialForm LiteralString: _SpecialForm @@ -667,6 +672,7 @@ else: Employee = NamedTuple('Employee', [('name', str), ('id', int)]) """ + _field_defaults: ClassVar[dict[str, Any]] _fields: ClassVar[tuple[str, ...]] __orig_bases__: ClassVar[tuple[Any, ...]] @@ -692,6 +698,7 @@ else: name_by_id(UserId(42)) # OK num = UserId(5) + 1 # type: int """ + def __init__(self, name: str, tp: AnnotationForm) -> None: ... def __call__(self, obj: _T, /) -> _T: ... __supertype__: type | NewType @@ -739,6 +746,7 @@ else: See PEP 698 for details. """ + def get_original_bases(cls: type, /) -> tuple[Any, ...]: """Return the class's "original" bases prior to modification by `__mro_entries__`. @@ -760,7 +768,6 @@ else: assert get_original_bases(Spam) == (TypedDict,) assert get_original_bases(int) == (object,) """ - # mypy and pyright object to this being both ABC and Protocol. # At runtime it inherits from ABC and is not a Protocol, but it is on the # allowlist for use as a Protocol. @@ -784,38 +791,39 @@ else: classes. It is useful primarily for static checks. """ + # Not actually a Protocol at runtime; see # https://github.com/python/typeshed/issues/10224 for why we're defining it this way def __buffer__(self, flags: int, /) -> memoryview: ... @runtime_checkable class SupportsInt(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __int__. -""" + """An ABC with one abstract method __int__.""" + __slots__ = () @abc.abstractmethod def __int__(self) -> int: ... @runtime_checkable class SupportsFloat(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __float__. -""" + """An ABC with one abstract method __float__.""" + __slots__ = () @abc.abstractmethod def __float__(self) -> float: ... @runtime_checkable class SupportsComplex(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __complex__. -""" + """An ABC with one abstract method __complex__.""" + __slots__ = () @abc.abstractmethod def __complex__(self) -> complex: ... @runtime_checkable class SupportsBytes(Protocol, metaclass=abc.ABCMeta): - """An ABC with one abstract method __bytes__. -""" + """An ABC with one abstract method __bytes__.""" + __slots__ = () @abc.abstractmethod def __bytes__(self) -> bytes: ... @@ -831,6 +839,7 @@ else: """ An ABC with one abstract method __abs__ that is covariant in its return type. """ + __slots__ = () @abc.abstractmethod def __abs__(self) -> _T_co: ... @@ -840,6 +849,7 @@ else: """ An ABC with one abstract method __round__ that is covariant in its return type. """ + __slots__ = () @overload @abc.abstractmethod @@ -855,28 +865,29 @@ else: class Reader(Protocol[_T_co]): """Protocol for simple I/O reader instances. -This protocol only supports blocking I/O. -""" + This protocol only supports blocking I/O. + """ + __slots__ = () @abc.abstractmethod def read(self, size: int = ..., /) -> _T_co: """Read data from the input stream and return it. -If *size* is specified, at most *size* items (bytes/characters) will be -read. -""" + If *size* is specified, at most *size* items (bytes/characters) will be + read. + """ @runtime_checkable class Writer(Protocol[_T_contra]): """Protocol for simple I/O writer instances. -This protocol only supports blocking I/O. -""" + This protocol only supports blocking I/O. + """ + __slots__ = () @abc.abstractmethod def write(self, data: _T_contra, /) -> int: - """Write *data* to the output stream and return the number of items written. -""" + """Write *data* to the output stream and return the number of items written.""" if sys.version_info >= (3, 13): from types import CapsuleType as CapsuleType @@ -906,6 +917,7 @@ else: >>> is_protocol(int) False """ + def get_protocol_members(tp: type, /) -> frozenset[str]: """Return the set of members defined in a Protocol. @@ -920,6 +932,7 @@ else: Raise a TypeError for arguments that are not Protocols. """ + @final @type_check_only class _NoDefaultType: ... @@ -928,14 +941,14 @@ else: @final class CapsuleType: """Capsule objects let you wrap a C "void *" pointer in a Python -object. They're a way of passing data through the Python interpreter -without creating your own custom type. + object. They're a way of passing data through the Python interpreter + without creating your own custom type. -Capsules are used for communication between extension modules. -They provide a way for an extension module to export a C interface -to other extension modules, so that extension modules can use the -Python import mechanism to link to one another. -""" + Capsules are used for communication between extension modules. + They provide a way for an extension module to export a C interface + to other extension modules, so that extension modules can use the + Python import mechanism to link to one another. + """ class deprecated: """Indicate that a class, function or overload is deprecated. @@ -979,6 +992,7 @@ Python import mechanism to link to one another. See PEP 702 for details. """ + message: LiteralString category: type[Warning] | None stacklevel: int @@ -987,8 +1001,8 @@ Python import mechanism to link to one another. @final class TypeVar: - """Type variable. -""" + """Type variable.""" + @property def __name__(self) -> str: ... @property @@ -1017,18 +1031,17 @@ Python import mechanism to link to one another. def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self. -""" + """Return value|self.""" if sys.version_info >= (3, 11): def __typing_subst__(self, arg: Any) -> Any: ... @final class ParamSpec: - """Parameter specification. -""" + """Parameter specification.""" + @property def __name__(self) -> str: ... @property @@ -1058,16 +1071,15 @@ Python import mechanism to link to one another. def __typing_prepare_subst__(self, alias: Any, args: Any) -> tuple[Any, ...]: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: - """Return self|value. -""" + """Return self|value.""" + def __ror__(self, left: Any) -> _SpecialForm: - """Return value|self. -""" + """Return value|self.""" @final class TypeVarTuple: - """Type variable tuple. -""" + """Type variable tuple.""" + @property def __name__(self) -> str: ... @property @@ -1088,30 +1100,31 @@ else: class TypeAliasType: """Create named, parameterized type aliases. -This provides a backport of the new `type` statement in Python 3.12: + This provides a backport of the new `type` statement in Python 3.12: - type ListOrSet[T] = list[T] | set[T] + type ListOrSet[T] = list[T] | set[T] -is equivalent to: + is equivalent to: - T = TypeVar("T") - ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) -The name ListOrSet can then be used as an alias for the type it refers to. + The name ListOrSet can then be used as an alias for the type it refers to. -The type_params argument should contain all the type parameters used -in the value of the type alias. If the alias is not generic, this -argument is omitted. + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. -Static type checkers should only support type aliases declared using -TypeAliasType that follow these rules: + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: -- The first argument (the name) must be a string literal. -- The TypeAliasType instance must be immediately assigned to a variable - of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, - as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ -""" def __init__( self, name: str, value: AnnotationForm, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () ) -> None: ... @@ -1129,16 +1142,16 @@ TypeAliasType that follow these rules: @property def __module__(self) -> str | None: # type: ignore[override] """str(object='') -> str -str(bytes_or_buffer[, encoding[, errors]]) -> str - -Create a new string object from the given object. If encoding or -errors is specified, then the object must expose a data buffer -that will be decoded using the given encoding and error handler. -Otherwise, returns the result of object.__str__() (if defined) -or repr(object). -encoding defaults to 'utf-8'. -errors defaults to 'strict'. -""" + str(bytes_or_buffer[, encoding[, errors]]) -> str + + Create a new string object from the given object. If encoding or + errors is specified, then the object must expose a data buffer + that will be decoded using the given encoding and error handler. + Otherwise, returns the result of object.__str__() (if defined) + or repr(object). + encoding defaults to 'utf-8'. + errors defaults to 'strict'. + """ # Returns typing._GenericAlias, which isn't stubbed. def __getitem__(self, parameters: Incomplete | tuple[Incomplete, ...]) -> AnnotationForm: ... def __init_subclass__(cls, *args: Unused, **kwargs: Unused) -> NoReturn: ... @@ -1149,21 +1162,22 @@ errors defaults to 'strict'. # PEP 727 class Doc: """Define the documentation of a type annotation using ``Annotated``, to be - used in class attributes, function and method parameters, return values, - and variables. + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. -The value should be a positional-only string literal to allow static tools -like editors and documentation generators to use it. + This complements docstrings. -This complements docstrings. + The string value passed is available in the attribute ``documentation``. -The string value passed is available in the attribute ``documentation``. + Example:: -Example:: + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ - >>> from typing_extensions import Annotated, Doc - >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... -""" documentation: str def __init__(self, documentation: str, /) -> None: ... def __hash__(self) -> int: ... @@ -1185,8 +1199,8 @@ if sys.version_info >= (3, 14): from annotationlib import Format as Format, get_annotations as get_annotations, type_repr as type_repr else: class Format(enum.IntEnum): - """An enumeration. -""" + """An enumeration.""" + VALUE = 1 VALUE_WITH_FAKE_GLOBALS = 2 FORWARDREF = 3 @@ -1203,39 +1217,40 @@ else: ) -> dict[str, str]: """Compute the annotations dict for an object. -obj may be a callable, class, or module. -Passing in an object of any other type raises TypeError. + obj may be a callable, class, or module. + Passing in an object of any other type raises TypeError. -Returns a dict. get_annotations() returns a new dict every time -it's called; calling it twice on the same object will return two -different but equivalent dicts. + Returns a dict. get_annotations() returns a new dict every time + it's called; calling it twice on the same object will return two + different but equivalent dicts. -This is a backport of `inspect.get_annotations`, which has been -in the standard library since Python 3.10. See the standard library -documentation for more: + This is a backport of `inspect.get_annotations`, which has been + in the standard library since Python 3.10. See the standard library + documentation for more: - https://docs.python.org/3/library/inspect.html#inspect.get_annotations + https://docs.python.org/3/library/inspect.html#inspect.get_annotations -This backport adds the *format* argument introduced by PEP 649. The -three formats supported are: -* VALUE: the annotations are returned as-is. This is the default and - it is compatible with the behavior on previous Python versions. -* FORWARDREF: return annotations as-is if possible, but replace any - undefined names with ForwardRef objects. The implementation proposed by - PEP 649 relies on language changes that cannot be backported; the - typing-extensions implementation simply returns the same result as VALUE. -* STRING: return annotations as strings, in a format close to the original - source. Again, this behavior cannot be replicated directly in a backport. - As an approximation, typing-extensions retrieves the annotations under - VALUE semantics and then stringifies them. + This backport adds the *format* argument introduced by PEP 649. The + three formats supported are: + * VALUE: the annotations are returned as-is. This is the default and + it is compatible with the behavior on previous Python versions. + * FORWARDREF: return annotations as-is if possible, but replace any + undefined names with ForwardRef objects. The implementation proposed by + PEP 649 relies on language changes that cannot be backported; the + typing-extensions implementation simply returns the same result as VALUE. + * STRING: return annotations as strings, in a format close to the original + source. Again, this behavior cannot be replicated directly in a backport. + As an approximation, typing-extensions retrieves the annotations under + VALUE semantics and then stringifies them. -The purpose of this backport is to allow users who would like to use -FORWARDREF or STRING semantics once PEP 649 is implemented, but who also -want to support earlier Python versions, to simply write: + The purpose of this backport is to allow users who would like to use + FORWARDREF or STRING semantics once PEP 649 is implemented, but who also + want to support earlier Python versions, to simply write: - typing_extensions.get_annotations(obj, format=Format.FORWARDREF) + typing_extensions.get_annotations(obj, format=Format.FORWARDREF) + + """ -""" @overload def get_annotations( obj: Any, # any object with __annotations__ or __annotate__ @@ -1267,26 +1282,27 @@ want to support earlier Python versions, to simply write: ) -> str: """Evaluate a forward reference as a type hint. -This is similar to calling the ForwardRef.evaluate() method, -but unlike that method, evaluate_forward_ref() also: - -* Recursively evaluates forward references nested within the type hint. -* Rejects certain objects that are not valid type hints. -* Replaces type hints that evaluate to None with types.NoneType. -* Supports the *FORWARDREF* and *STRING* formats. - -*forward_ref* must be an instance of ForwardRef. *owner*, if given, -should be the object that holds the annotations that the forward reference -derived from, such as a module, class object, or function. It is used to -infer the namespaces to use for looking up names. *globals* and *locals* -can also be explicitly given to provide the global and local namespaces. -*type_params* is a tuple of type parameters that are in scope when -evaluating the forward reference. This parameter must be provided (though -it may be an empty tuple) if *owner* is not given and the forward reference -does not already have an owner set. *format* specifies the format of the -annotation and is a member of the annotationlib.Format enum. - -""" + This is similar to calling the ForwardRef.evaluate() method, + but unlike that method, evaluate_forward_ref() also: + + * Recursively evaluates forward references nested within the type hint. + * Rejects certain objects that are not valid type hints. + * Replaces type hints that evaluate to None with types.NoneType. + * Supports the *FORWARDREF* and *STRING* formats. + + *forward_ref* must be an instance of ForwardRef. *owner*, if given, + should be the object that holds the annotations that the forward reference + derived from, such as a module, class object, or function. It is used to + infer the namespaces to use for looking up names. *globals* and *locals* + can also be explicitly given to provide the global and local namespaces. + *type_params* is a tuple of type parameters that are in scope when + evaluating the forward reference. This parameter must be provided (though + it may be an empty tuple) if *owner* is not given and the forward reference + does not already have an owner set. *format* specifies the format of the + annotation and is a member of the annotationlib.Format enum. + + """ + @overload def evaluate_forward_ref( forward_ref: ForwardRef, @@ -1312,21 +1328,22 @@ annotation and is a member of the annotationlib.Format enum. def type_repr(value: object) -> str: """Convert a Python value to a format suitable for use with the STRING format. -This is intended as a helper for tools that support the STRING format but do -not have access to the code that originally produced the annotations. It uses -repr() for most objects. + This is intended as a helper for tools that support the STRING format but do + not have access to the code that originally produced the annotations. It uses + repr() for most objects. -""" + """ # PEP 661 class Sentinel: """Create a unique sentinel object. -*name* should be the name of the variable to which the return value shall be assigned. + *name* should be the name of the variable to which the return value shall be assigned. + + *repr*, if supplied, will be used for the repr of the sentinel object. + If not provided, "" will be used. + """ -*repr*, if supplied, will be used for the repr of the sentinel object. -If not provided, "" will be used. -""" def __init__(self, name: str, repr: str | None = None) -> None: ... if sys.version_info >= (3, 14): def __or__(self, other: Any) -> UnionType: ... # other can be any type form legal for unions diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi index 5144d81a3e830..f0ef68d8478f8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unicodedata.pyi @@ -6,6 +6,7 @@ this database is based on the UnicodeData.txt file version The module uses the same names and symbols as defined by the UnicodeData File Format 16.0.0. """ + import sys from _typeshed import ReadOnlyBuffer from typing import Any, Final, Literal, TypeVar, final, overload @@ -24,86 +25,97 @@ _NormalizationForm: TypeAlias = Literal["NFC", "NFD", "NFKC", "NFKD"] def bidirectional(chr: str, /) -> str: """Returns the bidirectional class assigned to the character chr as string. -If no such value is defined, an empty string is returned. -""" + If no such value is defined, an empty string is returned. + """ + def category(chr: str, /) -> str: - """Returns the general category assigned to the character chr as string. -""" + """Returns the general category assigned to the character chr as string.""" + def combining(chr: str, /) -> int: """Returns the canonical combining class assigned to the character chr as integer. -Returns 0 if no combining class is defined. -""" + Returns 0 if no combining class is defined. + """ + @overload def decimal(chr: str, /) -> int: """Converts a Unicode character into its equivalent decimal value. -Returns the decimal value assigned to the character chr as integer. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the decimal value assigned to the character chr as integer. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def decimal(chr: str, default: _T, /) -> int | _T: ... def decomposition(chr: str, /) -> str: """Returns the character decomposition mapping assigned to the character chr as string. -An empty string is returned in case no such mapping is defined. -""" + An empty string is returned in case no such mapping is defined. + """ + @overload def digit(chr: str, /) -> int: """Converts a Unicode character into its equivalent digit value. -Returns the digit value assigned to the character chr as integer. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the digit value assigned to the character chr as integer. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def digit(chr: str, default: _T, /) -> int | _T: ... _EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] def east_asian_width(chr: str, /) -> _EastAsianWidth: - """Returns the east asian width assigned to the character chr as string. -""" + """Returns the east asian width assigned to the character chr as string.""" + def is_normalized(form: _NormalizationForm, unistr: str, /) -> bool: """Return whether the Unicode string unistr is in the normal form 'form'. -Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. -""" + Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. + """ + def lookup(name: str | ReadOnlyBuffer, /) -> str: """Look up character by name. -If a character with the given name is found, return the -corresponding character. If not found, KeyError is raised. -""" + If a character with the given name is found, return the + corresponding character. If not found, KeyError is raised. + """ + def mirrored(chr: str, /) -> int: """Returns the mirrored property assigned to the character chr as integer. -Returns 1 if the character has been identified as a "mirrored" -character in bidirectional text, 0 otherwise. -""" + Returns 1 if the character has been identified as a "mirrored" + character in bidirectional text, 0 otherwise. + """ + @overload def name(chr: str, /) -> str: """Returns the name assigned to the character chr as a string. -If no name is defined, default is returned, or, if not given, -ValueError is raised. -""" + If no name is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def name(chr: str, default: _T, /) -> str | _T: ... def normalize(form: _NormalizationForm, unistr: str, /) -> str: """Return the normal form 'form' for the Unicode string unistr. -Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. -""" + Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. + """ + @overload def numeric(chr: str, /) -> float: """Converts a Unicode character into its equivalent numeric value. -Returns the numeric value assigned to the character chr as float. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the numeric value assigned to the character chr as float. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def numeric(chr: str, default: _T, /) -> float | _T: ... @final @@ -114,82 +126,93 @@ class UCD: def bidirectional(self, chr: str, /) -> str: """Returns the bidirectional class assigned to the character chr as string. -If no such value is defined, an empty string is returned. -""" + If no such value is defined, an empty string is returned. + """ + def category(self, chr: str, /) -> str: - """Returns the general category assigned to the character chr as string. -""" + """Returns the general category assigned to the character chr as string.""" + def combining(self, chr: str, /) -> int: """Returns the canonical combining class assigned to the character chr as integer. -Returns 0 if no combining class is defined. -""" + Returns 0 if no combining class is defined. + """ + @overload def decimal(self, chr: str, /) -> int: """Converts a Unicode character into its equivalent decimal value. -Returns the decimal value assigned to the character chr as integer. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the decimal value assigned to the character chr as integer. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def decimal(self, chr: str, default: _T, /) -> int | _T: ... def decomposition(self, chr: str, /) -> str: """Returns the character decomposition mapping assigned to the character chr as string. -An empty string is returned in case no such mapping is defined. -""" + An empty string is returned in case no such mapping is defined. + """ + @overload def digit(self, chr: str, /) -> int: """Converts a Unicode character into its equivalent digit value. -Returns the digit value assigned to the character chr as integer. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the digit value assigned to the character chr as integer. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def digit(self, chr: str, default: _T, /) -> int | _T: ... def east_asian_width(self, chr: str, /) -> _EastAsianWidth: - """Returns the east asian width assigned to the character chr as string. -""" + """Returns the east asian width assigned to the character chr as string.""" + def is_normalized(self, form: _NormalizationForm, unistr: str, /) -> bool: """Return whether the Unicode string unistr is in the normal form 'form'. -Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. -""" + Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. + """ + def lookup(self, name: str | ReadOnlyBuffer, /) -> str: """Look up character by name. -If a character with the given name is found, return the -corresponding character. If not found, KeyError is raised. -""" + If a character with the given name is found, return the + corresponding character. If not found, KeyError is raised. + """ + def mirrored(self, chr: str, /) -> int: """Returns the mirrored property assigned to the character chr as integer. -Returns 1 if the character has been identified as a "mirrored" -character in bidirectional text, 0 otherwise. -""" + Returns 1 if the character has been identified as a "mirrored" + character in bidirectional text, 0 otherwise. + """ + @overload def name(self, chr: str, /) -> str: """Returns the name assigned to the character chr as a string. -If no name is defined, default is returned, or, if not given, -ValueError is raised. -""" + If no name is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def name(self, chr: str, default: _T, /) -> str | _T: ... def normalize(self, form: _NormalizationForm, unistr: str, /) -> str: """Return the normal form 'form' for the Unicode string unistr. -Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. -""" + Valid values for form are 'NFC', 'NFKC', 'NFD', and 'NFKD'. + """ + @overload def numeric(self, chr: str, /) -> float: """Converts a Unicode character into its equivalent numeric value. -Returns the numeric value assigned to the character chr as float. -If no such value is defined, default is returned, or, if not given, -ValueError is raised. -""" + Returns the numeric value assigned to the character chr as float. + If no such value is defined, default is returned, or, if not given, + ValueError is raised. + """ + @overload def numeric(self, chr: str, default: _T, /) -> float | _T: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi index 61eed4a08d318..0d0cf6b52855d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/__init__.pyi @@ -43,6 +43,7 @@ PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. """ + import sys from unittest.async_case import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi index 06f1e660f9363..dddae531323d8 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/_log.pyi @@ -7,14 +7,14 @@ from unittest.case import TestCase, _BaseTestCaseContext _L = TypeVar("_L", None, _LoggingWatcher) class _LoggingWatcher(NamedTuple): - """_LoggingWatcher(records, output) -""" + """_LoggingWatcher(records, output)""" + records: list[logging.LogRecord] output: list[str] class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): - """A context manager for assertLogs() and assertNoLogs() -""" + """A context manager for assertLogs() and assertNoLogs()""" + LOGGING_FORMAT: ClassVar[str] logger_name: str level: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi index 2fcfa8690cda1..07dc69509e045 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/async_case.pyi @@ -23,8 +23,8 @@ class IsolatedAsyncioTestCase(TestCase): async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: """Enters the supplied asynchronous context manager. -If successful, also adds its __aexit__ method as a cleanup -function and returns the result of the __aenter__ method. -""" + If successful, also adds its __aexit__ method as a cleanup + function and returns the result of the __aenter__ method. + """ def __del__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi index 4a1bfda7eb707..9f738eb049046 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/case.pyi @@ -1,5 +1,5 @@ -"""Test case implementation -""" +"""Test case implementation""" + import logging import sys import unittest.result @@ -44,47 +44,50 @@ class _AssertRaisesBaseContext(_BaseTestCaseContext): # but it's not possible to construct an overload which expresses that def handle(self, name: str, args: list[Any], kwargs: dict[str, Any]) -> Any: """ -If args is empty, assertRaises/Warns is being used as a -context manager, so check for a 'msg' kwarg and return self. -If args is not empty, call a callable passing positional and keyword -arguments. -""" + If args is empty, assertRaises/Warns is being used as a + context manager, so check for a 'msg' kwarg and return self. + If args is not empty, call a callable passing positional and keyword + arguments. + """ def addModuleCleanup(function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: """Same as addCleanup, except the cleanup items are called even if -setUpModule fails (unlike tearDownModule). -""" + setUpModule fails (unlike tearDownModule). + """ + def doModuleCleanups() -> None: """Execute all module cleanup functions. Normally called for you after -tearDownModule. -""" + tearDownModule. + """ if sys.version_info >= (3, 11): def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: - """Same as enterContext, but module-wide. -""" + """Same as enterContext, but module-wide.""" def expectedFailure(test_item: _FT) -> _FT: ... def skip(reason: str) -> Callable[[_FT], _FT]: """ -Unconditionally skip a test. -""" + Unconditionally skip a test. + """ + def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: """ -Skip a test if the condition is true. -""" + Skip a test if the condition is true. + """ + def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: """ -Skip a test unless the condition is true. -""" + Skip a test unless the condition is true. + """ class SkipTest(Exception): """ -Raise this exception in a test to skip it. + Raise this exception in a test to skip it. + + Usually you can use TestCase.skipTest() or one of the skipping decorators + instead of raising this directly. + """ -Usually you can use TestCase.skipTest() or one of the skipping decorators -instead of raising this directly. -""" def __init__(self, reason: str) -> None: ... @type_check_only @@ -93,35 +96,36 @@ class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol) class TestCase: """A class whose instances are single test cases. -By default, the test code itself should be placed in a method named -'runTest'. - -If the fixture may be used for many test cases, create as -many test methods as are needed. When instantiating such a TestCase -subclass, specify in the constructor arguments the name of the test method -that the instance is to execute. - -Test authors should subclass TestCase for their own tests. Construction -and deconstruction of the test's environment ('fixture') can be -implemented by overriding the 'setUp' and 'tearDown' methods respectively. - -If it is necessary to override the __init__ method, the base class -__init__ method must always be called. It is important that subclasses -should not change the signature of their __init__ method, since instances -of the classes are instantiated automatically by parts of the framework -in order to be run. - -When subclassing TestCase, you can set these attributes: -* failureException: determines which exception will be raised when - the instance's assertion methods fail; test methods raising this - exception will be deemed to have 'failed' rather than 'errored'. -* longMessage: determines whether long messages (including repr of - objects used in assert methods) will be printed on failure in *addition* - to any explicit message passed. -* maxDiff: sets the maximum length of a diff in failure messages - by assert methods using difflib. It is looked up as an instance - attribute so can be configured by individual tests if required. -""" + By default, the test code itself should be placed in a method named + 'runTest'. + + If the fixture may be used for many test cases, create as + many test methods as are needed. When instantiating such a TestCase + subclass, specify in the constructor arguments the name of the test method + that the instance is to execute. + + Test authors should subclass TestCase for their own tests. Construction + and deconstruction of the test's environment ('fixture') can be + implemented by overriding the 'setUp' and 'tearDown' methods respectively. + + If it is necessary to override the __init__ method, the base class + __init__ method must always be called. It is important that subclasses + should not change the signature of their __init__ method, since instances + of the classes are instantiated automatically by parts of the framework + in order to be run. + + When subclassing TestCase, you can set these attributes: + * failureException: determines which exception will be raised when + the instance's assertion methods fail; test methods raising this + exception will be deemed to have 'failed' rather than 'errored'. + * longMessage: determines whether long messages (including repr of + objects used in assert methods) will be printed on failure in *addition* + to any explicit message passed. + * maxDiff: sets the maximum length of a diff in failure messages + by assert methods using difflib. It is looked up as an instance + attribute so can be configured by individual tests if required. + """ + failureException: type[BaseException] longMessage: bool maxDiff: int | None @@ -131,104 +135,108 @@ When subclassing TestCase, you can set these attributes: _testMethodDoc: str def __init__(self, methodName: str = "runTest") -> None: """Create an instance of the class that will use the named test -method when executed. Raises a ValueError if the instance does -not have a method with the specified name. -""" + method when executed. Raises a ValueError if the instance does + not have a method with the specified name. + """ + def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... def setUp(self) -> None: - """Hook method for setting up the test fixture before exercising it. -""" + """Hook method for setting up the test fixture before exercising it.""" + def tearDown(self) -> None: - """Hook method for deconstructing the test fixture after testing it. -""" + """Hook method for deconstructing the test fixture after testing it.""" + @classmethod def setUpClass(cls) -> None: - """Hook method for setting up class fixture before running tests in the class. -""" + """Hook method for setting up class fixture before running tests in the class.""" + @classmethod def tearDownClass(cls) -> None: - """Hook method for deconstructing the class fixture after running all tests in the class. -""" + """Hook method for deconstructing the class fixture after running all tests in the class.""" + def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def skipTest(self, reason: Any) -> NoReturn: - """Skip this test. -""" + """Skip this test.""" + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: """Return a context manager that will return the enclosed block -of code in a subtest identified by the optional message and -keyword parameters. A failure in the subtest marks the test -case as failed but resumes execution at the end of the enclosed -block, allowing further test code to be executed. -""" + of code in a subtest identified by the optional message and + keyword parameters. A failure in the subtest marks the test + case as failed but resumes execution at the end of the enclosed + block, allowing further test code to be executed. + """ + def debug(self) -> None: - """Run the test without collecting errors in a TestResult -""" + """Run the test without collecting errors in a TestResult""" if sys.version_info < (3, 11): def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: """Fail if the two objects are unequal as determined by the '==' -operator. -""" + operator. + """ + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: """Fail if the two objects are equal as determined by the '!=' -operator. -""" + operator. + """ + def assertTrue(self, expr: Any, msg: Any = None) -> None: - """Check that the expression is true. -""" + """Check that the expression is true.""" + def assertFalse(self, expr: Any, msg: Any = None) -> None: - """Check that the expression is false. -""" + """Check that the expression is false.""" + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: - """Just like self.assertTrue(a is b), but with a nicer default message. -""" + """Just like self.assertTrue(a is b), but with a nicer default message.""" + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: - """Just like self.assertTrue(a is not b), but with a nicer default message. -""" + """Just like self.assertTrue(a is not b), but with a nicer default message.""" + def assertIsNone(self, obj: object, msg: Any = None) -> None: - """Same as self.assertTrue(obj is None), with a nicer default message. -""" + """Same as self.assertTrue(obj is None), with a nicer default message.""" + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: - """Included for symmetry with assertIsNone. -""" + """Included for symmetry with assertIsNone.""" + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: - """Just like self.assertTrue(a in b), but with a nicer default message. -""" + """Just like self.assertTrue(a in b), but with a nicer default message.""" + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: - """Just like self.assertTrue(a not in b), but with a nicer default message. -""" + """Just like self.assertTrue(a not in b), but with a nicer default message.""" + def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: """Same as self.assertTrue(isinstance(obj, cls)), with a nicer -default message. -""" + default message. + """ + def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: - """Included for symmetry with assertIsInstance. -""" + """Included for symmetry with assertIsInstance.""" + @overload def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a > b), but with a nicer default message. -""" + """Just like self.assertTrue(a > b), but with a nicer default message.""" + @overload def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... @overload def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a >= b), but with a nicer default message. -""" + """Just like self.assertTrue(a >= b), but with a nicer default message.""" + @overload def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... @overload def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a < b), but with a nicer default message. -""" + """Just like self.assertTrue(a < b), but with a nicer default message.""" + @overload def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload def assertLessEqual(self, a: SupportsDunderLE[_T], b: _T, msg: Any = None) -> None: - """Just like self.assertTrue(a <= b), but with a nicer default message. -""" + """Just like self.assertTrue(a <= b), but with a nicer default message.""" + @overload def assertLessEqual(self, a: _T, b: SupportsDunderGE[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` @@ -243,30 +251,31 @@ default message. **kwargs: Any, ) -> None: """Fail unless an exception of class expected_exception is raised -by the callable when invoked with specified positional and -keyword arguments. If a different type of exception is -raised, it will not be caught, and the test case will be -deemed to have suffered an error, exactly as for an -unexpected exception. - -If called with the callable and arguments omitted, will return a -context object used like this:: - - with self.assertRaises(SomeException): - do_something() - -An optional keyword argument 'msg' can be provided when assertRaises -is used as a context object. - -The context manager keeps a reference to the exception as -the 'exception' attribute. This allows you to inspect the -exception after the assertion:: - - with self.assertRaises(SomeException) as cm: - do_something() - the_exception = cm.exception - self.assertEqual(the_exception.error_code, 3) -""" + by the callable when invoked with specified positional and + keyword arguments. If a different type of exception is + raised, it will not be caught, and the test case will be + deemed to have suffered an error, exactly as for an + unexpected exception. + + If called with the callable and arguments omitted, will return a + context object used like this:: + + with self.assertRaises(SomeException): + do_something() + + An optional keyword argument 'msg' can be provided when assertRaises + is used as a context object. + + The context manager keeps a reference to the exception as + the 'exception' attribute. This allows you to inspect the + exception after the assertion:: + + with self.assertRaises(SomeException) as cm: + do_something() + the_exception = cm.exception + self.assertEqual(the_exception.error_code, 3) + """ + @overload def assertRaises( self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... @@ -282,15 +291,16 @@ exception after the assertion:: ) -> None: """Asserts that the message in a raised exception matches a regex. -Args: - expected_exception: Exception class expected to be raised. - expected_regex: Regex (re.Pattern object or string) expected - to be found in error message. - args: Function to be called and extra positional args. - kwargs: Extra kwargs. - msg: Optional message used in case of failure. Can only be used - when assertRaisesRegex is used as a context manager. -""" + Args: + expected_exception: Exception class expected to be raised. + expected_regex: Regex (re.Pattern object or string) expected + to be found in error message. + args: Function to be called and extra positional args. + kwargs: Extra kwargs. + msg: Optional message used in case of failure. Can only be used + when assertRaisesRegex is used as a context manager. + """ + @overload def assertRaisesRegex( self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... @@ -304,32 +314,33 @@ Args: **kwargs: _P.kwargs, ) -> None: """Fail unless a warning of class warnClass is triggered -by the callable when invoked with specified positional and -keyword arguments. If a different type of warning is -triggered, it will not be handled: depending on the other -warning filtering rules in effect, it might be silenced, printed -out, or raised as an exception. - -If called with the callable and arguments omitted, will return a -context object used like this:: - - with self.assertWarns(SomeWarning): - do_something() - -An optional keyword argument 'msg' can be provided when assertWarns -is used as a context object. - -The context manager keeps a reference to the first matching -warning as the 'warning' attribute; similarly, the 'filename' -and 'lineno' attributes give you information about the line -of Python code from which the warning was triggered. -This allows you to inspect the warning after the assertion:: - - with self.assertWarns(SomeWarning) as cm: - do_something() - the_warning = cm.warning - self.assertEqual(the_warning.some_attribute, 147) -""" + by the callable when invoked with specified positional and + keyword arguments. If a different type of warning is + triggered, it will not be handled: depending on the other + warning filtering rules in effect, it might be silenced, printed + out, or raised as an exception. + + If called with the callable and arguments omitted, will return a + context object used like this:: + + with self.assertWarns(SomeWarning): + do_something() + + An optional keyword argument 'msg' can be provided when assertWarns + is used as a context object. + + The context manager keeps a reference to the first matching + warning as the 'warning' attribute; similarly, the 'filename' + and 'lineno' attributes give you information about the line + of Python code from which the warning was triggered. + This allows you to inspect the warning after the assertion:: + + with self.assertWarns(SomeWarning) as cm: + do_something() + the_warning = cm.warning + self.assertEqual(the_warning.some_attribute, 147) + """ + @overload def assertWarns( self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... @@ -344,19 +355,20 @@ This allows you to inspect the warning after the assertion:: **kwargs: _P.kwargs, ) -> None: """Asserts that the message in a triggered warning matches a regexp. -Basic functioning is similar to assertWarns() with the addition -that only warnings whose messages also match the regular expression -are considered successful matches. - -Args: - expected_warning: Warning class expected to be triggered. - expected_regex: Regex (re.Pattern object or string) expected - to be found in error message. - args: Function to be called and extra positional args. - kwargs: Extra kwargs. - msg: Optional message used in case of failure. Can only be used - when assertWarnsRegex is used as a context manager. -""" + Basic functioning is similar to assertWarns() with the addition + that only warnings whose messages also match the regular expression + are considered successful matches. + + Args: + expected_warning: Warning class expected to be triggered. + expected_regex: Regex (re.Pattern object or string) expected + to be found in error message. + args: Function to be called and extra positional args. + kwargs: Extra kwargs. + msg: Optional message used in case of failure. Can only be used + when assertWarnsRegex is used as a context manager. + """ + @overload def assertWarnsRegex( self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... @@ -365,48 +377,49 @@ Args: self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[_LoggingWatcher]: """Fail unless a log message of level *level* or higher is emitted -on *logger_name* or its children. If omitted, *level* defaults to -INFO and *logger* defaults to the root logger. - -This method must be used as a context manager, and will yield -a recording object with two attributes: `output` and `records`. -At the end of the context manager, the `output` attribute will -be a list of the matching formatted log messages and the -`records` attribute will be a list of the corresponding LogRecord -objects. - -Example:: - - with self.assertLogs('foo', level='INFO') as cm: - logging.getLogger('foo').info('first message') - logging.getLogger('foo.bar').error('second message') - self.assertEqual(cm.output, ['INFO:foo:first message', - 'ERROR:foo.bar:second message']) -""" + on *logger_name* or its children. If omitted, *level* defaults to + INFO and *logger* defaults to the root logger. + + This method must be used as a context manager, and will yield + a recording object with two attributes: `output` and `records`. + At the end of the context manager, the `output` attribute will + be a list of the matching formatted log messages and the + `records` attribute will be a list of the corresponding LogRecord + objects. + + Example:: + + with self.assertLogs('foo', level='INFO') as cm: + logging.getLogger('foo').info('first message') + logging.getLogger('foo.bar').error('second message') + self.assertEqual(cm.output, ['INFO:foo:first message', + 'ERROR:foo.bar:second message']) + """ if sys.version_info >= (3, 10): def assertNoLogs( self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[None]: """Fail unless no log messages of level *level* or higher are emitted -on *logger_name* or its children. + on *logger_name* or its children. -This method must be used as a context manager. -""" + This method must be used as a context manager. + """ @overload def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: """Fail if the two objects are unequal as determined by their -difference rounded to the given number of decimal places -(default 7) and comparing to zero, or by comparing that the -difference between the two objects is more than the given -delta. + difference rounded to the given number of decimal places + (default 7) and comparing to zero, or by comparing that the + difference between the two objects is more than the given + delta. + + Note that decimal places (from zero) are usually not the same + as significant digits (measured from the most significant digit). -Note that decimal places (from zero) are usually not the same -as significant digits (measured from the most significant digit). + If the two objects compare equal then they will automatically + compare almost equal. + """ -If the two objects compare equal then they will automatically -compare almost equal. -""" @overload def assertAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -432,15 +445,16 @@ compare almost equal. @overload def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: """Fail if the two objects are equal as determined by their -difference rounded to the given number of decimal places -(default 7) and comparing to zero, or by comparing that the -difference between the two objects is less than the given delta. + difference rounded to the given number of decimal places + (default 7) and comparing to zero, or by comparing that the + difference between the two objects is less than the given delta. + + Note that decimal places (from zero) are usually not the same + as significant digits (measured from the most significant digit). -Note that decimal places (from zero) are usually not the same -as significant digits (measured from the most significant digit). + Objects that are equal automatically fail. + """ -Objects that are equal automatically fail. -""" @overload def assertNotAlmostEqual( self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE @@ -464,157 +478,163 @@ Objects that are equal automatically fail. delta: None = None, ) -> None: ... def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: - """Fail the test unless the text matches the regular expression. -""" + """Fail the test unless the text matches the regular expression.""" + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: - """Fail the test if the text matches the regular expression. -""" + """Fail the test if the text matches the regular expression.""" + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: """Asserts that two iterables have the same elements, the same number of -times, without regard to order. + times, without regard to order. + + self.assertEqual(Counter(list(first)), + Counter(list(second))) - self.assertEqual(Counter(list(first)), - Counter(list(second))) + Example: + - [0, 1, 1] and [1, 0, 1] compare equal. + - [0, 0, 1] and [0, 1] compare unequal. - Example: - - [0, 1, 1] and [1, 0, 1] compare equal. - - [0, 0, 1] and [0, 1] compare unequal. + """ -""" def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: """Add a type specific assertEqual style function to compare a type. -This method is for use by TestCase subclasses that need to register -their own type equality functions to provide nicer error messages. + This method is for use by TestCase subclasses that need to register + their own type equality functions to provide nicer error messages. + + Args: + typeobj: The data type to call this function on when both values + are of the same type in assertEqual(). + function: The callable taking two arguments and an optional + msg= argument that raises self.failureException with a + useful error message when the two arguments are not equal. + """ -Args: - typeobj: The data type to call this function on when both values - are of the same type in assertEqual(). - function: The callable taking two arguments and an optional - msg= argument that raises self.failureException with a - useful error message when the two arguments are not equal. -""" def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: - """Assert that two multi-line strings are equal. -""" + """Assert that two multi-line strings are equal.""" + def assertSequenceEqual( self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None ) -> None: """An equality assertion for ordered sequences (like lists and tuples). -For the purposes of this function, a valid ordered sequence type is one -which can be indexed, has a length, and has an equality operator. - -Args: - seq1: The first sequence to compare. - seq2: The second sequence to compare. - seq_type: The expected datatype of the sequences, or None if no - datatype should be enforced. - msg: Optional message to use on failure instead of a list of - differences. -""" + For the purposes of this function, a valid ordered sequence type is one + which can be indexed, has a length, and has an equality operator. + + Args: + seq1: The first sequence to compare. + seq2: The second sequence to compare. + seq_type: The expected datatype of the sequences, or None if no + datatype should be enforced. + msg: Optional message to use on failure instead of a list of + differences. + """ + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: """A list-specific equality assertion. -Args: - list1: The first list to compare. - list2: The second list to compare. - msg: Optional message to use on failure instead of a list of - differences. + Args: + list1: The first list to compare. + list2: The second list to compare. + msg: Optional message to use on failure instead of a list of + differences. + + """ -""" def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: """A tuple-specific equality assertion. -Args: - tuple1: The first tuple to compare. - tuple2: The second tuple to compare. - msg: Optional message to use on failure instead of a list of - differences. -""" + Args: + tuple1: The first tuple to compare. + tuple2: The second tuple to compare. + msg: Optional message to use on failure instead of a list of + differences. + """ + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: """A set-specific equality assertion. -Args: - set1: The first set to compare. - set2: The second set to compare. - msg: Optional message to use on failure instead of a list of - differences. + Args: + set1: The first set to compare. + set2: The second set to compare. + msg: Optional message to use on failure instead of a list of + differences. -assertSetEqual uses ducktyping to support different types of sets, and -is optimized for sets specifically (parameters must support a -difference method). -""" + assertSetEqual uses ducktyping to support different types of sets, and + is optimized for sets specifically (parameters must support a + difference method). + """ # assertDictEqual accepts only true dict instances. We can't use that here, since that would make # assertDictEqual incompatible with TypedDict. def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... def fail(self, msg: Any = None) -> NoReturn: - """Fail immediately, with the given message. -""" + """Fail immediately, with the given message.""" + def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... def shortDescription(self) -> str | None: """Returns a one-line description of the test, or None if no -description has been provided. + description has been provided. + + The default implementation of this method returns the first line of + the specified test method's docstring. + """ -The default implementation of this method returns the first line of -the specified test method's docstring. -""" def addCleanup(self, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: """Add a function, with arguments, to be called when the test is -completed. Functions added are called on a LIFO basis and are -called after tearDown on test failure or success. - -Cleanup items are called even if setUp fails (unlike tearDown). -""" + completed. Functions added are called on a LIFO basis and are + called after tearDown on test failure or success. + Cleanup items are called even if setUp fails (unlike tearDown). + """ if sys.version_info >= (3, 11): def enterContext(self, cm: AbstractContextManager[_T]) -> _T: """Enters the supplied context manager. -If successful, also adds its __exit__ method as a cleanup -function and returns the result of the __enter__ method. -""" + If successful, also adds its __exit__ method as a cleanup + function and returns the result of the __enter__ method. + """ def doCleanups(self) -> None: """Execute all cleanup functions. Normally called for you after -tearDown. -""" + tearDown. + """ + @classmethod def addClassCleanup(cls, function: Callable[_P, object], /, *args: _P.args, **kwargs: _P.kwargs) -> None: """Same as addCleanup, except the cleanup items are called even if -setUpClass fails (unlike tearDownClass). -""" + setUpClass fails (unlike tearDownClass). + """ + @classmethod def doClassCleanups(cls) -> None: """Execute all class cleanup functions. Normally called for you after -tearDownClass. -""" - + tearDownClass. + """ if sys.version_info >= (3, 11): @classmethod def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: - """Same as enterContext, but class-wide. -""" + """Same as enterContext, but class-wide.""" def _formatMessage(self, msg: str | None, standardMsg: str) -> str: # undocumented """Honour the longMessage attribute when generating failure messages. -If longMessage is False this means: -* Use only an explicit message if it is provided -* Otherwise use the standard message for the assert - -If longMessage is True: -* Use the standard message -* If an explicit message is provided, plus ' : ' and the explicit message -""" + If longMessage is False this means: + * Use only an explicit message if it is provided + * Otherwise use the standard message for the assert + + If longMessage is True: + * Use the standard message + * If an explicit message is provided, plus ' : ' and the explicit message + """ + def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: # undocumented """Get a detailed comparison function for the types of the two args. -Returns: A callable accepting (first, second, msg=None) that will -raise a failure exception if first != second with a useful human -readable error message for those types. -""" + Returns: A callable accepting (first, second, msg=None) that will + raise a failure exception if first != second with a useful human + readable error message for those types. + """ if sys.version_info < (3, 12): failUnlessEqual = assertEqual assertEquals = assertEqual @@ -631,12 +651,8 @@ readable error message for those types. assertRegexpMatches = assertRegex assertNotRegexpMatches = assertNotRegex assertRaisesRegexp = assertRaisesRegex - def assertDictContainsSubset( - self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None - ) -> None: - """Checks whether dictionary is a superset of subset. -""" - + def assertDictContainsSubset(self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None) -> None: + """Checks whether dictionary is a superset of subset.""" if sys.version_info >= (3, 10): # Runtime has *args, **kwargs, but will error if any are supplied def __init_subclass__(cls, *args: Never, **kwargs: Never) -> None: ... @@ -654,11 +670,12 @@ readable error message for those types. class FunctionTestCase(TestCase): """A test case that wraps a test function. -This is useful for slipping pre-existing test functions into the -unittest framework. Optionally, set-up and tidy-up functions can be -supplied. As with TestCase, the tidy-up ('tearDown') function will -always be called if the set-up ('setUp') function ran successfully. -""" + This is useful for slipping pre-existing test functions into the + unittest framework. Optionally, set-up and tidy-up functions can be + supplied. As with TestCase, the tidy-up ('tearDown') function will + always be called if the set-up ('setUp') function ran successfully. + """ + def __init__( self, testFunc: Callable[[], object], @@ -671,8 +688,8 @@ always be called if the set-up ('setUp') function ran successfully. def __eq__(self, other: object) -> bool: ... class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): - """A context manager used to implement TestCase.assertRaises* methods. -""" + """A context manager used to implement TestCase.assertRaises* methods.""" + exception: _E def __enter__(self) -> Self: ... def __exit__( @@ -681,12 +698,12 @@ class _AssertRaisesContext(_AssertRaisesBaseContext, Generic[_E]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class _AssertWarnsContext(_AssertRaisesBaseContext): - """A context manager used to implement TestCase.assertWarns* methods. -""" + """A context manager used to implement TestCase.assertWarns* methods.""" + warning: WarningMessage filename: str lineno: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi index 0d0b854541e45..9473618bb255f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/loader.pyi @@ -1,5 +1,5 @@ -"""Loading unittests. -""" +"""Loading unittests.""" + import sys import unittest.case import unittest.suite @@ -16,72 +16,71 @@ VALID_MODULE_NAME: Final[Pattern[str]] class TestLoader: """ -This class is responsible for loading tests according to various criteria -and returning them wrapped in a TestSuite -""" + This class is responsible for loading tests according to various criteria + and returning them wrapped in a TestSuite + """ + errors: list[type[BaseException]] testMethodPrefix: str sortTestMethodsUsing: _SortComparisonMethod testNamePatterns: list[str] | None suiteClass: _SuiteClass def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in testCaseClass -""" + """Return a suite of all test cases contained in testCaseClass""" if sys.version_info >= (3, 12): def loadTestsFromModule(self, module: ModuleType, *, pattern: str | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in the given module -""" + """Return a suite of all test cases contained in the given module""" else: def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: str | None = None) -> unittest.suite.TestSuite: - """Return a suite of all test cases contained in the given module -""" + """Return a suite of all test cases contained in the given module""" def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: """Return a suite of all test cases given a string specifier. -The name may resolve either to a module, a test case class, a -test method within a test case class, or a callable object which -returns a TestCase or TestSuite instance. + The name may resolve either to a module, a test case class, a + test method within a test case class, or a callable object which + returns a TestCase or TestSuite instance. + + The method optionally resolves the names relative to a given module. + """ -The method optionally resolves the names relative to a given module. -""" def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: """Return a suite of all test cases found using the given sequence -of string specifiers. See 'loadTestsFromName()'. -""" - def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: - """Return a sorted sequence of method names found within testCaseClass + of string specifiers. See 'loadTestsFromName()'. """ - def discover( - self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None - ) -> unittest.suite.TestSuite: + + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: + """Return a sorted sequence of method names found within testCaseClass""" + + def discover(self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None) -> unittest.suite.TestSuite: """Find and return all test modules from the specified start -directory, recursing into subdirectories to find them and return all -tests found within them. Only test files that match the pattern will -be loaded. (Using shell style pattern matching.) - -All test modules must be importable from the top level of the project. -If the start directory is not the top level directory then the top -level directory must be specified separately. - -If a test package name (directory with '__init__.py') matches the -pattern then the package will be checked for a 'load_tests' function. If -this exists then it will be called with (loader, tests, pattern) unless -the package has already had load_tests called from the same discovery -invocation, in which case the package module object is not scanned for -tests - this ensures that when a package uses discover to further -discover child tests that infinite recursion does not happen. - -If load_tests exists then discovery does *not* recurse into the package, -load_tests is responsible for loading all tests in the package. - -The pattern is deliberately not stored as a loader attribute so that -packages can continue discovery themselves. top_level_dir is stored so -load_tests does not need to pass this argument in to loader.discover(). - -Paths are sorted before being imported to ensure reproducible execution -order even on filesystems with non-alphabetical ordering like ext3/4. -""" + directory, recursing into subdirectories to find them and return all + tests found within them. Only test files that match the pattern will + be loaded. (Using shell style pattern matching.) + + All test modules must be importable from the top level of the project. + If the start directory is not the top level directory then the top + level directory must be specified separately. + + If a test package name (directory with '__init__.py') matches the + pattern then the package will be checked for a 'load_tests' function. If + this exists then it will be called with (loader, tests, pattern) unless + the package has already had load_tests called from the same discovery + invocation, in which case the package module object is not scanned for + tests - this ensures that when a package uses discover to further + discover child tests that infinite recursion does not happen. + + If load_tests exists then discovery does *not* recurse into the package, + load_tests is responsible for loading all tests in the package. + + The pattern is deliberately not stored as a loader attribute so that + packages can continue discovery themselves. top_level_dir is stored so + load_tests does not need to pass this argument in to loader.discover(). + + Paths are sorted before being imported to ensure reproducible execution + order even on filesystems with non-alphabetical ordering like ext3/4. + """ + def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... defaultTestLoader: TestLoader diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi index 68dcfe5c00004..a4e8e9cb02bdf 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/main.pyi @@ -1,5 +1,5 @@ -"""Unittest main program -""" +"""Unittest main program""" + import sys import unittest.case import unittest.loader @@ -20,8 +20,9 @@ class _TestRunner(Protocol): # not really documented class TestProgram: """A command-line program that runs a set of tests; this is primarily -for making test modules conveniently executable. -""" + for making test modules conveniently executable. + """ + result: unittest.result.TestResult module: None | str | ModuleType verbosity: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi index a060d9752edf4..615eec5fc45fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/mock.pyi @@ -55,14 +55,14 @@ else: FILTER_DIR: bool # controls the way mock objects respond to `dir` function class _SentinelObject: - """A unique, named, sentinel object. -""" + """A unique, named, sentinel object.""" + name: Any def __init__(self, name: Any) -> None: ... class _Sentinel: - """Access attributes to return a named object, usable as a sentinel. -""" + """Access attributes to return a named object, usable as a sentinel.""" + def __getattr__(self, name: str) -> Any: ... sentinel: _Sentinel @@ -75,23 +75,24 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs if sys.version_info >= (3, 12): class _Call(tuple[Any, ...]): """ -A tuple for holding the results of a call to a mock, either in the form -`(args, kwargs)` or `(name, args, kwargs)`. + A tuple for holding the results of a call to a mock, either in the form + `(args, kwargs)` or `(name, args, kwargs)`. -If args or kwargs are empty then a call tuple will compare equal to -a tuple without those values. This makes comparisons less verbose:: + If args or kwargs are empty then a call tuple will compare equal to + a tuple without those values. This makes comparisons less verbose:: - _Call(('name', (), {})) == ('name',) - _Call(('name', (1,), {})) == ('name', (1,)) - _Call(((), {'a': 'b'})) == ({'a': 'b'},) + _Call(('name', (), {})) == ('name',) + _Call(('name', (1,), {})) == ('name', (1,)) + _Call(((), {'a': 'b'})) == ({'a': 'b'},) -The `_Call` object provides a useful shortcut for comparing with call:: + The `_Call` object provides a useful shortcut for comparing with call:: - _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) - _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) + _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + + If the _Call has no name then it will match any name. + """ -If the _Call has no name then it will match any name. -""" def __new__( cls, value: _CallValue = (), @@ -120,31 +121,32 @@ If the _Call has no name then it will match any name. def kwargs(self) -> Mapping[str, Any]: ... def call_list(self) -> Any: """For a call object that represents multiple calls, `call_list` -returns a list of all the intermediate calls as well as the -final call. -""" + returns a list of all the intermediate calls as well as the + final call. + """ else: @disjoint_base class _Call(tuple[Any, ...]): """ - A tuple for holding the results of a call to a mock, either in the form - `(args, kwargs)` or `(name, args, kwargs)`. + A tuple for holding the results of a call to a mock, either in the form + `(args, kwargs)` or `(name, args, kwargs)`. - If args or kwargs are empty then a call tuple will compare equal to - a tuple without those values. This makes comparisons less verbose:: + If args or kwargs are empty then a call tuple will compare equal to + a tuple without those values. This makes comparisons less verbose:: - _Call(('name', (), {})) == ('name',) - _Call(('name', (1,), {})) == ('name', (1,)) - _Call(((), {'a': 'b'})) == ({'a': 'b'},) + _Call(('name', (), {})) == ('name',) + _Call(('name', (1,), {})) == ('name', (1,)) + _Call(((), {'a': 'b'})) == ({'a': 'b'},) - The `_Call` object provides a useful shortcut for comparing with call:: + The `_Call` object provides a useful shortcut for comparing with call:: - _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) - _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) + _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + + If the _Call has no name then it will match any name. + """ - If the _Call has no name then it will match any name. - """ def __new__( cls, value: _CallValue = (), @@ -173,9 +175,9 @@ else: def kwargs(self) -> Mapping[str, Any]: ... def call_list(self) -> Any: """For a call object that represents multiple calls, `call_list` - returns a list of all the intermediate calls as well as the - final call. -""" + returns a list of all the intermediate calls as well as the + final call. + """ call: _Call @@ -188,8 +190,8 @@ class Base: # We subclass with "Any" because mocks are explicitly designed to stand in for other types, # something that can't be expressed with our static type system. class NonCallableMock(Base, Any): - """A non-callable version of `Mock` -""" + """A non-callable version of `Mock`""" + if sys.version_info >= (3, 12): def __new__( cls, @@ -228,85 +230,92 @@ class NonCallableMock(Base, Any): def __delattr__(self, name: str) -> None: ... def __setattr__(self, name: str, value: Any) -> None: ... def __dir__(self) -> list[str]: - """Filter the output of `dir(mock)` to only useful members. -""" + """Filter the output of `dir(mock)` to only useful members.""" + def assert_called_with(self, *args: Any, **kwargs: Any) -> None: """assert that the last call was made with the specified arguments. -Raises an AssertionError if the args and keyword args passed in are -different to the last call to the mock. -""" - def assert_not_called(self) -> None: - """assert that the mock was never called. + Raises an AssertionError if the args and keyword args passed in are + different to the last call to the mock. """ + + def assert_not_called(self) -> None: + """assert that the mock was never called.""" + def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: """assert that the mock was called exactly once and that that call was -with the specified arguments. -""" + with the specified arguments. + """ + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... def assert_called(self) -> None: - """assert that the mock was called at least once - """ + """assert that the mock was called at least once""" + def assert_called_once(self) -> None: - """assert that the mock was called only once. - """ + """assert that the mock was called only once.""" + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: - """Restore the mock object to its initial state. -""" + """Restore the mock object to its initial state.""" + def _extract_mock_name(self) -> str: ... def _get_call_signature_from_name(self, name: str) -> Any: """ -* If call objects are asserted against a method/function like obj.meth1 -then there could be no name for the call object to lookup. Hence just -return the spec_signature of the method/function being asserted against. -* If the name is not empty then remove () and split by '.' to get -list of names to iterate through the children until a potential -match is found. A child mock is created only during attribute access -so if we get a _SpecState then no attributes of the spec were accessed -and can be safely exited. -""" + * If call objects are asserted against a method/function like obj.meth1 + then there could be no name for the call object to lookup. Hence just + return the spec_signature of the method/function being asserted against. + * If the name is not empty then remove () and split by '.' to get + list of names to iterate through the children until a potential + match is found. A child mock is created only during attribute access + so if we get a _SpecState then no attributes of the spec were accessed + and can be safely exited. + """ + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: """assert the mock has been called with the specified arguments. -The assert passes if the mock has *ever* been called, unlike -`assert_called_with` and `assert_called_once_with` that only pass if -the call is the most recent one. -""" + The assert passes if the mock has *ever* been called, unlike + `assert_called_with` and `assert_called_once_with` that only pass if + the call is the most recent one. + """ + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: """assert the mock has been called with the specified calls. -The `mock_calls` list is checked for the calls. + The `mock_calls` list is checked for the calls. + + If `any_order` is False (the default) then the calls must be + sequential. There can be extra calls before or after the + specified calls. -If `any_order` is False (the default) then the calls must be -sequential. There can be extra calls before or after the -specified calls. + If `any_order` is True then the calls can be in any order, but + they must all appear in `mock_calls`. + """ -If `any_order` is True then the calls can be in any order, but -they must all appear in `mock_calls`. -""" def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: """Add a spec to a mock. `spec` can either be an object or a -list of strings. Only attributes on the `spec` can be fetched as -attributes from the mock. + list of strings. Only attributes on the `spec` can be fetched as + attributes from the mock. + + If `spec_set` is True then only attributes on the spec can be set. + """ -If `spec_set` is True then only attributes on the spec can be set. -""" def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: """ -Attach a mock as an attribute of this one, replacing its name and -parent. Calls to the attached mock will be recorded in the -`method_calls` and `mock_calls` attributes of this one. -""" + Attach a mock as an attribute of this one, replacing its name and + parent. Calls to the attached mock will be recorded in the + `method_calls` and `mock_calls` attributes of this one. + """ + def configure_mock(self, **kwargs: Any) -> None: """Set attributes on the mock through keyword arguments. -Attributes plus return values and side effects can be set on child -mocks using standard dot notation and unpacking a dictionary in the -method call: + Attributes plus return values and side effects can be set on child + mocks using standard dot notation and unpacking a dictionary in the + method call: ->>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} ->>> mock.configure_mock(**attrs) -""" + >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} + >>> mock.configure_mock(**attrs) + """ return_value: Any side_effect: Any called: bool @@ -317,40 +326,41 @@ method call: def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: """ -Given a call (or simply an (args, kwargs) tuple), return a -comparison key suitable for matching with other calls. -This is a best effort method which relies on the spec's signature, -if available, or falls back on the arguments themselves. -""" + Given a call (or simply an (args, kwargs) tuple), return a + comparison key suitable for matching with other calls. + This is a best effort method which relies on the spec's signature, + if available, or falls back on the arguments themselves. + """ + def _get_child_mock(self, **kw: Any) -> NonCallableMock: """Create the child mocks for attributes and return value. -By default child mocks will be the same type as the parent. -Subclasses of Mock may want to override this to customize the way -child mocks are made. + By default child mocks will be the same type as the parent. + Subclasses of Mock may want to override this to customize the way + child mocks are made. -For non-callable mocks the callable variant will be used (rather than -any custom subclass). -""" + For non-callable mocks the callable variant will be used (rather than + any custom subclass). + """ if sys.version_info >= (3, 13): def _calls_repr(self) -> str: """Renders self.mock_calls as a string. - Example: " -Calls: [call(1), call(2)]." + Example: " + Calls: [call(1), call(2)]." - If self.mock_calls is empty, an empty string is returned. The - output will be truncated if very long. - """ + If self.mock_calls is empty, an empty string is returned. The + output will be truncated if very long. + """ else: def _calls_repr(self, prefix: str = "Calls") -> str: """Renders self.mock_calls as a string. - Example: " -Calls: [call(1), call(2)]." + Example: " + Calls: [call(1), call(2)]." - If self.mock_calls is empty, an empty string is returned. The - output will be truncated if very long. - """ + If self.mock_calls is empty, an empty string is returned. The + output will be truncated if very long. + """ class CallableMixin(Base): side_effect: Any @@ -372,61 +382,61 @@ class CallableMixin(Base): class Mock(CallableMixin, NonCallableMock): """ -Create a new `Mock` object. `Mock` takes several optional arguments -that specify the behaviour of the Mock object: - -* `spec`: This can be either a list of strings or an existing object (a - class or instance) that acts as the specification for the mock object. If - you pass in an object then a list of strings is formed by calling dir on - the object (excluding unsupported magic attributes and methods). Accessing - any attribute not in this list will raise an `AttributeError`. - - If `spec` is an object (rather than a list of strings) then - `mock.__class__` returns the class of the spec object. This allows mocks - to pass `isinstance` tests. - -* `spec_set`: A stricter variant of `spec`. If used, attempting to *set* - or get an attribute on the mock that isn't on the object passed as - `spec_set` will raise an `AttributeError`. - -* `side_effect`: A function to be called whenever the Mock is called. See - the `side_effect` attribute. Useful for raising exceptions or - dynamically changing return values. The function is called with the same - arguments as the mock, and unless it returns `DEFAULT`, the return - value of this function is used as the return value. - - If `side_effect` is an iterable then each call to the mock will return - the next value from the iterable. If any of the members of the iterable - are exceptions they will be raised instead of returned. - -* `return_value`: The value returned when the mock is called. By default - this is a new Mock (created on first access). See the - `return_value` attribute. - -* `unsafe`: By default, accessing any attribute whose name starts with - *assert*, *assret*, *asert*, *aseert*, or *assrt* raises an AttributeError. - Additionally, an AttributeError is raised when accessing - attributes that match the name of an assertion method without the prefix - `assert_`, e.g. accessing `called_once` instead of `assert_called_once`. - Passing `unsafe=True` will allow access to these attributes. - -* `wraps`: Item for the mock object to wrap. If `wraps` is not None then - calling the Mock will pass the call through to the wrapped object - (returning the real result). Attribute access on the mock will return a - Mock object that wraps the corresponding attribute of the wrapped object - (so attempting to access an attribute that doesn't exist will raise an - `AttributeError`). - - If the mock has an explicit `return_value` set then calls are not passed - to the wrapped object and the `return_value` is returned instead. - -* `name`: If the mock has a name then it will be used in the repr of the - mock. This can be useful for debugging. The name is propagated to child - mocks. - -Mocks can also be called with arbitrary keyword arguments. These will be -used to set attributes on the mock after it is created. -""" + Create a new `Mock` object. `Mock` takes several optional arguments + that specify the behaviour of the Mock object: + + * `spec`: This can be either a list of strings or an existing object (a + class or instance) that acts as the specification for the mock object. If + you pass in an object then a list of strings is formed by calling dir on + the object (excluding unsupported magic attributes and methods). Accessing + any attribute not in this list will raise an `AttributeError`. + + If `spec` is an object (rather than a list of strings) then + `mock.__class__` returns the class of the spec object. This allows mocks + to pass `isinstance` tests. + + * `spec_set`: A stricter variant of `spec`. If used, attempting to *set* + or get an attribute on the mock that isn't on the object passed as + `spec_set` will raise an `AttributeError`. + + * `side_effect`: A function to be called whenever the Mock is called. See + the `side_effect` attribute. Useful for raising exceptions or + dynamically changing return values. The function is called with the same + arguments as the mock, and unless it returns `DEFAULT`, the return + value of this function is used as the return value. + + If `side_effect` is an iterable then each call to the mock will return + the next value from the iterable. If any of the members of the iterable + are exceptions they will be raised instead of returned. + + * `return_value`: The value returned when the mock is called. By default + this is a new Mock (created on first access). See the + `return_value` attribute. + + * `unsafe`: By default, accessing any attribute whose name starts with + *assert*, *assret*, *asert*, *aseert*, or *assrt* raises an AttributeError. + Additionally, an AttributeError is raised when accessing + attributes that match the name of an assertion method without the prefix + `assert_`, e.g. accessing `called_once` instead of `assert_called_once`. + Passing `unsafe=True` will allow access to these attributes. + + * `wraps`: Item for the mock object to wrap. If `wraps` is not None then + calling the Mock will pass the call through to the wrapped object + (returning the real result). Attribute access on the mock will return a + Mock object that wraps the corresponding attribute of the wrapped object + (so attempting to access an attribute that doesn't exist will raise an + `AttributeError`). + + If the mock has an explicit `return_value` set then calls are not passed + to the wrapped object and the `return_value` is returned instead. + + * `name`: If the mock has a name then it will be used in the repr of the + mock. This can be useful for debugging. The name is propagated to child + mocks. + + Mocks can also be called with arbitrary keyword arguments. These will be + used to set attributes on the mock after it is created. + """ class _patch(Generic[_T]): attribute_name: Any @@ -490,19 +500,18 @@ class _patch(Generic[_T]): temp_original: Any is_local: bool def __enter__(self) -> _T: - """Perform the patch. -""" + """Perform the patch.""" + def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / ) -> None: - """Undo the patch. -""" + """Undo the patch.""" + def start(self) -> _T: - """Activate a patch, returning any created mock. -""" + """Activate a patch, returning any created mock.""" + def stop(self) -> None: - """Stop an active patch. -""" + """Stop an active patch.""" # This class does not exist at runtime, it's a hack to make this work: # @patch("foo") @@ -518,33 +527,34 @@ class _patch_pass_arg(_patch[_T]): class _patch_dict: """ -Patch a dictionary, or dictionary like object, and restore the dictionary -to its original state after the test, where the restored dictionary is -a copy of the dictionary as it was before the test. + Patch a dictionary, or dictionary like object, and restore the dictionary + to its original state after the test, where the restored dictionary is + a copy of the dictionary as it was before the test. + + `in_dict` can be a dictionary or a mapping like container. If it is a + mapping then it must at least support getting, setting and deleting items + plus iterating over keys. -`in_dict` can be a dictionary or a mapping like container. If it is a -mapping then it must at least support getting, setting and deleting items -plus iterating over keys. + `in_dict` can also be a string specifying the name of the dictionary, which + will then be fetched by importing it. -`in_dict` can also be a string specifying the name of the dictionary, which -will then be fetched by importing it. + `values` can be a dictionary of values to set in the dictionary. `values` + can also be an iterable of `(key, value)` pairs. -`values` can be a dictionary of values to set in the dictionary. `values` -can also be an iterable of `(key, value)` pairs. + If `clear` is True then the dictionary will be cleared before the new + values are set. -If `clear` is True then the dictionary will be cleared before the new -values are set. + `patch.dict` can also be called with arbitrary keyword arguments to set + values in the dictionary:: -`patch.dict` can also be called with arbitrary keyword arguments to set -values in the dictionary:: + with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()): + ... - with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()): - ... + `patch.dict` can be used as a context manager, decorator or class + decorator. When used as a class decorator `patch.dict` honours + `patch.TEST_PREFIX` for choosing which methods to wrap. + """ -`patch.dict` can be used as a context manager, decorator or class -decorator. When used as a class decorator `patch.dict` honours -`patch.TEST_PREFIX` for choosing which methods to wrap. -""" in_dict: Any values: Any clear: Any @@ -556,11 +566,10 @@ decorator. When used as a class decorator `patch.dict` honours def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: - """Patch the dict. -""" + """Patch the dict.""" + def __exit__(self, *args: object) -> Any: - """Unpatch the dict. -""" + """Unpatch the dict.""" start: Any stop: Any @@ -715,64 +724,71 @@ class MagicMixin(Base): def __init__(self, *args: Any, **kw: Any) -> None: ... class NonCallableMagicMock(MagicMixin, NonCallableMock): - """A version of `MagicMock` that isn't callable. -""" + """A version of `MagicMock` that isn't callable.""" + class MagicMock(MagicMixin, Mock): """ -MagicMock is a subclass of Mock with default implementations -of most of the magic methods. You can use MagicMock without having to -configure the magic methods yourself. + MagicMock is a subclass of Mock with default implementations + of most of the magic methods. You can use MagicMock without having to + configure the magic methods yourself. -If you use the `spec` or `spec_set` arguments then *only* magic -methods that exist in the spec will be created. + If you use the `spec` or `spec_set` arguments then *only* magic + methods that exist in the spec will be created. -Attributes and the return value of a `MagicMock` will also be `MagicMocks`. -""" + Attributes and the return value of a `MagicMock` will also be `MagicMocks`. + """ class AsyncMockMixin(Base): def __init__(self, *args: Any, **kwargs: Any) -> None: ... async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... def assert_awaited(self) -> None: """ -Assert that the mock was awaited at least once. -""" + Assert that the mock was awaited at least once. + """ + def assert_awaited_once(self) -> None: """ -Assert that the mock was awaited exactly once. -""" + Assert that the mock was awaited exactly once. + """ + def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: """ -Assert that the last await was with the specified arguments. -""" + Assert that the last await was with the specified arguments. + """ + def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: """ -Assert that the mock was awaited exactly once and with the specified -arguments. -""" + Assert that the mock was awaited exactly once and with the specified + arguments. + """ + def assert_any_await(self, *args: Any, **kwargs: Any) -> None: """ -Assert the mock has ever been awaited with the specified arguments. -""" + Assert the mock has ever been awaited with the specified arguments. + """ + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: """ -Assert the mock has been awaited with the specified calls. -The :attr:`await_args_list` list is checked for the awaits. + Assert the mock has been awaited with the specified calls. + The :attr:`await_args_list` list is checked for the awaits. + + If `any_order` is False (the default) then the awaits must be + sequential. There can be extra calls before or after the + specified awaits. -If `any_order` is False (the default) then the awaits must be -sequential. There can be extra calls before or after the -specified awaits. + If `any_order` is True then the awaits can be in any order, but + they must all appear in :attr:`await_args_list`. + """ -If `any_order` is True then the awaits can be in any order, but -they must all appear in :attr:`await_args_list`. -""" def assert_not_awaited(self) -> None: """ -Assert that the mock was never awaited. -""" + Assert that the mock was never awaited. + """ + def reset_mock(self, *args: Any, **kwargs: Any) -> None: """ -See :func:`.Mock.reset_mock()` -""" + See :func:`.Mock.reset_mock()` + """ await_count: int await_args: _Call | None await_args_list: _CallList @@ -782,52 +798,53 @@ class AsyncMagicMixin(MagicMixin): class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): """ -Enhance :class:`Mock` with features allowing to mock -an async function. - -The :class:`AsyncMock` object will behave so the object is -recognized as an async function, and the result of a call is an awaitable: - ->>> mock = AsyncMock() ->>> inspect.iscoroutinefunction(mock) -True ->>> inspect.isawaitable(mock()) -True - - -The result of ``mock()`` is an async function which will have the outcome -of ``side_effect`` or ``return_value``: - -- if ``side_effect`` is a function, the async function will return the - result of that function, -- if ``side_effect`` is an exception, the async function will raise the - exception, -- if ``side_effect`` is an iterable, the async function will return the - next value of the iterable, however, if the sequence of result is - exhausted, ``StopIteration`` is raised immediately, -- if ``side_effect`` is not defined, the async function will return the - value defined by ``return_value``, hence, by default, the async function - returns a new :class:`AsyncMock` object. - -If the outcome of ``side_effect`` or ``return_value`` is an async function, -the mock async function obtained when the mock object is called will be this -async function itself (and not an async function returning an async -function). - -The test author can also specify a wrapped object with ``wraps``. In this -case, the :class:`Mock` object behavior is the same as with an -:class:`.Mock` object: the wrapped object may have methods -defined as async function functions. - -Based on Martin Richard's asynctest project. -""" + Enhance :class:`Mock` with features allowing to mock + an async function. + + The :class:`AsyncMock` object will behave so the object is + recognized as an async function, and the result of a call is an awaitable: + + >>> mock = AsyncMock() + >>> inspect.iscoroutinefunction(mock) + True + >>> inspect.isawaitable(mock()) + True + + + The result of ``mock()`` is an async function which will have the outcome + of ``side_effect`` or ``return_value``: + + - if ``side_effect`` is a function, the async function will return the + result of that function, + - if ``side_effect`` is an exception, the async function will raise the + exception, + - if ``side_effect`` is an iterable, the async function will return the + next value of the iterable, however, if the sequence of result is + exhausted, ``StopIteration`` is raised immediately, + - if ``side_effect`` is not defined, the async function will return the + value defined by ``return_value``, hence, by default, the async function + returns a new :class:`AsyncMock` object. + + If the outcome of ``side_effect`` or ``return_value`` is an async function, + the mock async function obtained when the mock object is called will be this + async function itself (and not an async function returning an async + function). + + The test author can also specify a wrapped object with ``wraps``. In this + case, the :class:`Mock` object behavior is the same as with an + :class:`.Mock` object: the wrapped object may have methods + defined as async function functions. + + Based on Martin Richard's asynctest project. + """ + # Improving the `reset_mock` signature. # It is defined on `AsyncMockMixin` with `*args, **kwargs`, which is not ideal. # But, `NonCallableMock` super-class has the better version. def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: """ -See :func:`.Mock.reset_mock()` -""" + See :func:`.Mock.reset_mock()` + """ class MagicProxy(Base): name: str @@ -838,8 +855,8 @@ class MagicProxy(Base): # See https://github.com/python/typeshed/issues/14701 class _ANY(Any): - """A helper object that compares equal to everything. -""" + """A helper object that compares equal to everything.""" + def __eq__(self, other: object) -> Literal[True]: ... def __ne__(self, other: object) -> Literal[False]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -858,27 +875,27 @@ if sys.version_info >= (3, 10): **kwargs: Any, ) -> Any: """Create a mock object using another object as a spec. Attributes on the -mock will use the corresponding attribute on the `spec` object as their -spec. + mock will use the corresponding attribute on the `spec` object as their + spec. -Functions or methods being mocked will have their arguments checked -to check that they are called with the correct signature. + Functions or methods being mocked will have their arguments checked + to check that they are called with the correct signature. -If `spec_set` is True then attempting to set attributes that don't exist -on the spec object will raise an `AttributeError`. + If `spec_set` is True then attempting to set attributes that don't exist + on the spec object will raise an `AttributeError`. -If a class is used as a spec then the return value of the mock (the -instance of the class) will have the same spec. You can use a class as the -spec for an instance object by passing `instance=True`. The returned mock -will only be callable if instances of the mock are callable. + If a class is used as a spec then the return value of the mock (the + instance of the class) will have the same spec. You can use a class as the + spec for an instance object by passing `instance=True`. The returned mock + will only be callable if instances of the mock are callable. -`create_autospec` will raise a `RuntimeError` if passed some common -misspellings of the arguments autospec and spec_set. Pass the argument -`unsafe` with the value True to disable that check. + `create_autospec` will raise a `RuntimeError` if passed some common + misspellings of the arguments autospec and spec_set. Pass the argument + `unsafe` with the value True to disable that check. -`create_autospec` also takes arbitrary keyword arguments that are passed to -the constructor of the created mock. -""" + `create_autospec` also takes arbitrary keyword arguments that are passed to + the constructor of the created mock. + """ else: def create_autospec( @@ -890,23 +907,23 @@ else: **kwargs: Any, ) -> Any: """Create a mock object using another object as a spec. Attributes on the - mock will use the corresponding attribute on the `spec` object as their - spec. + mock will use the corresponding attribute on the `spec` object as their + spec. - Functions or methods being mocked will have their arguments checked - to check that they are called with the correct signature. + Functions or methods being mocked will have their arguments checked + to check that they are called with the correct signature. - If `spec_set` is True then attempting to set attributes that don't exist - on the spec object will raise an `AttributeError`. + If `spec_set` is True then attempting to set attributes that don't exist + on the spec object will raise an `AttributeError`. - If a class is used as a spec then the return value of the mock (the - instance of the class) will have the same spec. You can use a class as the - spec for an instance object by passing `instance=True`. The returned mock - will only be callable if instances of the mock are callable. + If a class is used as a spec then the return value of the mock (the + instance of the class) will have the same spec. You can use a class as the + spec for an instance object by passing `instance=True`. The returned mock + will only be callable if instances of the mock are callable. - `create_autospec` also takes arbitrary keyword arguments that are passed to - the constructor of the created mock. -""" + `create_autospec` also takes arbitrary keyword arguments that are passed to + the constructor of the created mock. + """ class _SpecState: spec: Any @@ -927,26 +944,27 @@ class _SpecState: def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: """ -A helper function to create a mock to replace the use of `open`. It works -for `open` called directly or used as a context manager. + A helper function to create a mock to replace the use of `open`. It works + for `open` called directly or used as a context manager. -The `mock` argument is the mock object to configure. If `None` (the -default) then a `MagicMock` will be created for you, with the API limited -to methods or attributes available on standard file handles. + The `mock` argument is the mock object to configure. If `None` (the + default) then a `MagicMock` will be created for you, with the API limited + to methods or attributes available on standard file handles. -`read_data` is a string for the `read`, `readline` and `readlines` of the -file handle to return. This is an empty string by default. -""" + `read_data` is a string for the `read`, `readline` and `readlines` of the + file handle to return. This is an empty string by default. + """ class PropertyMock(Mock): """ -A mock intended to be used as a property, or other descriptor, on a class. -`PropertyMock` provides `__get__` and `__set__` methods so you can specify -a return value when it is fetched. + A mock intended to be used as a property, or other descriptor, on a class. + `PropertyMock` provides `__get__` and `__set__` methods so you can specify + a return value when it is fetched. + + Fetching a `PropertyMock` instance from an object calls the mock, with + no args. Setting it calls the mock with the value being set. + """ -Fetching a `PropertyMock` instance from an object calls the mock, with -no args. Setting it calls the mock with the value being set. -""" def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... def __set__(self, obj: Any, val: Any) -> None: ... @@ -958,42 +976,44 @@ if sys.version_info >= (3, 13): # Same as `NonCallableMock.reset_mock.` def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: """ -See :func:`.Mock.reset_mock()` -""" + See :func:`.Mock.reset_mock()` + """ + def wait_until_called(self, *, timeout: float | None | _SentinelObject = ...) -> None: """Wait until the mock object is called. -`timeout` - time to wait for in seconds, waits forever otherwise. -Defaults to the constructor provided timeout. -Use None to block undefinetively. -""" + `timeout` - time to wait for in seconds, waits forever otherwise. + Defaults to the constructor provided timeout. + Use None to block undefinetively. + """ + def wait_until_any_call_with(self, *args: Any, **kwargs: Any) -> None: """Wait until the mock object is called with given args. -Waits for the timeout in seconds provided in the constructor. -""" + Waits for the timeout in seconds provided in the constructor. + """ class ThreadingMock(ThreadingMixin, MagicMixin, Mock): """ -A mock that can be used to wait until on calls happening -in a different thread. + A mock that can be used to wait until on calls happening + in a different thread. -The constructor can take a `timeout` argument which -controls the timeout in seconds for all `wait` calls of the mock. + The constructor can take a `timeout` argument which + controls the timeout in seconds for all `wait` calls of the mock. -You can change the default timeout of all instances via the -`ThreadingMock.DEFAULT_TIMEOUT` attribute. + You can change the default timeout of all instances via the + `ThreadingMock.DEFAULT_TIMEOUT` attribute. -If no timeout is set, it will block undefinetively. -""" + If no timeout is set, it will block undefinetively. + """ def seal(mock: Any) -> None: """Disable the automatic generation of child mocks. -Given an input Mock, seals it to ensure no further mocks will be generated -when accessing an attribute that was not already defined. + Given an input Mock, seals it to ensure no further mocks will be generated + when accessing an attribute that was not already defined. -The operation recursively seals the mock passed in, meaning that -the mock itself, any mocks generated by accessing one of its attributes, -and all assigned mocks without a name or spec will be sealed. -""" + The operation recursively seals the mock passed in, meaning that + the mock itself, any mocks generated by accessing one of its attributes, + and all assigned mocks without a name or spec will be sealed. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi index ae8f4cd7645f6..121d275d3b0de 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/result.pyi @@ -1,5 +1,5 @@ -"""Test result object -""" +"""Test result object""" + import sys import unittest.case from _typeshed import OptExcInfo @@ -19,14 +19,15 @@ def failfast(method: _F) -> _F: ... class TestResult: """Holder for test result information. -Test results are automatically managed by the TestCase and TestSuite -classes, and do not need to be explicitly manipulated by writers of tests. + Test results are automatically managed by the TestCase and TestSuite + classes, and do not need to be explicitly manipulated by writers of tests. + + Each instance holds the total number of tests run, and collections of + failures and errors that occurred among those test runs. The collections + contain tuples of (testcase, exceptioninfo), where exceptioninfo is the + formatted traceback of the error that occurred. + """ -Each instance holds the total number of tests run, and collections of -failures and errors that occurred among those test runs. The collections -contain tuples of (testcase, exceptioninfo), where exceptioninfo is the -formatted traceback of the error that occurred. -""" errors: list[tuple[unittest.case.TestCase, str]] failures: list[tuple[unittest.case.TestCase, str]] skipped: list[tuple[unittest.case.TestCase, str]] @@ -42,59 +43,63 @@ formatted traceback of the error that occurred. def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... def printErrors(self) -> None: - """Called by TestRunner after test run -""" + """Called by TestRunner after test run""" + def wasSuccessful(self) -> bool: - """Tells whether or not this result was a success. -""" + """Tells whether or not this result was a success.""" + def stop(self) -> None: - """Indicates that the tests should be aborted. -""" + """Indicates that the tests should be aborted.""" + def startTest(self, test: unittest.case.TestCase) -> None: - """Called when the given test is about to be run -""" + """Called when the given test is about to be run""" + def stopTest(self, test: unittest.case.TestCase) -> None: - """Called when the given test has been run -""" + """Called when the given test has been run""" + def startTestRun(self) -> None: """Called once before any tests are executed. -See startTest for a method called before each test. -""" + See startTest for a method called before each test. + """ + def stopTestRun(self) -> None: """Called once after all tests are executed. -See stopTest for a method called after each test. -""" + See stopTest for a method called after each test. + """ + def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: """Called when an error has occurred. 'err' is a tuple of values as -returned by sys.exc_info(). -""" + returned by sys.exc_info(). + """ + def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: """Called when an error has occurred. 'err' is a tuple of values as -returned by sys.exc_info(). -""" + returned by sys.exc_info(). + """ + def addSuccess(self, test: unittest.case.TestCase) -> None: - """Called when a test has completed successfully -""" + """Called when a test has completed successfully""" + def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: - """Called when a test is skipped. -""" + """Called when a test is skipped.""" + def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: - """Called when an expected failure/error occurred. -""" + """Called when an expected failure/error occurred.""" + def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: - """Called when a test was expected to fail, but succeed. -""" + """Called when a test was expected to fail, but succeed.""" + def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: """Called at the end of a subtest. -'err' is None if the subtest ended successfully, otherwise it's a -tuple of values as returned by sys.exc_info(). -""" + 'err' is None if the subtest ended successfully, otherwise it's a + tuple of values as returned by sys.exc_info(). + """ if sys.version_info >= (3, 12): def addDuration(self, test: unittest.case.TestCase, elapsed: float) -> None: """Called when a test finished to run, regardless of its outcome. -*test* is the test case corresponding to the test method. -*elapsed* is the time represented in seconds, and it includes the -execution of cleanup functions. -""" + *test* is the test case corresponding to the test method. + *elapsed* is the time represented in seconds, and it includes the + execution of cleanup functions. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi index f6ab164c5943d..3f5a9406879f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/runner.pyi @@ -1,5 +1,5 @@ -"""Running tests -""" +"""Running tests""" + import sys import unittest.case import unittest.result @@ -24,8 +24,8 @@ class _TextTestStream(_SupportsWriteAndFlush, Protocol): # But that's not feasible to do Generically # We can expand the attributes if requested class _WritelnDecorator: - """Used to decorate file-like objects with a handy 'writeln' method -""" + """Used to decorate file-like objects with a handy 'writeln' method""" + def __init__(self, stream: _SupportsWriteAndFlush) -> None: ... def writeln(self, arg: str | None = None) -> None: ... def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__ @@ -41,8 +41,9 @@ _StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator) class TextTestResult(unittest.result.TestResult, Generic[_StreamT]): """A test result class that can print formatted text results to a stream. -Used by TextTestRunner. -""" + Used by TextTestRunner. + """ + descriptions: bool # undocumented dots: bool # undocumented separator1: str @@ -53,8 +54,8 @@ Used by TextTestRunner. durations: int | None def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: int | None = None) -> None: """Construct a TextTestResult. Subclasses should accept **kwargs -to ensure compatibility as the interface changes. -""" + to ensure compatibility as the interface changes. + """ else: def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ... @@ -64,9 +65,10 @@ to ensure compatibility as the interface changes. class TextTestRunner: """A test runner class that displays results in textual form. -It prints out the names of tests as they are run, errors as they -occur, and a summary of the results at the end of the test run. -""" + It prints out the names of tests as they are run, errors as they + occur, and a summary of the results at the end of the test run. + """ + resultclass: _ResultClassType stream: _WritelnDecorator descriptions: bool @@ -93,9 +95,9 @@ occur, and a summary of the results at the end of the test run. ) -> None: """Construct a TextTestRunner. -Subclasses should accept **kwargs to ensure compatibility as the -interface changes. -""" + Subclasses should accept **kwargs to ensure compatibility as the + interface changes. + """ else: def __init__( self, @@ -111,11 +113,10 @@ interface changes. ) -> None: """Construct a TextTestRunner. - Subclasses should accept **kwargs to ensure compatibility as the - interface changes. - """ + Subclasses should accept **kwargs to ensure compatibility as the + interface changes. + """ def _makeResult(self) -> TextTestResult: ... def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: - """Run the given test case or test suite. -""" + """Run the given test case or test suite.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi index 56577640030d4..3445b85ce3f9e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/suite.pyi @@ -1,5 +1,5 @@ -"""TestSuite -""" +"""TestSuite""" + import unittest.case import unittest.result from collections.abc import Iterable, Iterator @@ -9,8 +9,8 @@ from typing_extensions import TypeAlias _TestType: TypeAlias = unittest.case.TestCase | TestSuite class BaseTestSuite: - """A simple test suite that doesn't provide class or module shared fixtures. - """ + """A simple test suite that doesn't provide class or module shared fixtures.""" + _tests: list[unittest.case.TestCase] _removed_tests: int def __init__(self, tests: Iterable[_TestType] = ()) -> None: ... @@ -19,8 +19,8 @@ class BaseTestSuite: def addTests(self, tests: Iterable[_TestType]) -> None: ... def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... def debug(self) -> None: - """Run the tests without collecting errors in a TestResult -""" + """Run the tests without collecting errors in a TestResult""" + def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... def __eq__(self, other: object) -> bool: ... @@ -29,10 +29,11 @@ class BaseTestSuite: class TestSuite(BaseTestSuite): """A test suite is a composite test consisting of a number of TestCases. -For use, create an instance of TestSuite, then add test case instances. -When all tests have been added, the suite can be passed to a test -runner, such as TextTestRunner. It will run the individual test cases -in the order in which they were added, aggregating the results. When -subclassing, do not forget to call the base class constructor. -""" + For use, create an instance of TestSuite, then add test case instances. + When all tests have been added, the suite can be passed to a test + runner, such as TextTestRunner. It will run the individual test cases + in the order in which they were added, aggregating the results. When + subclassing, do not forget to call the base class constructor. + """ + def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi index c7c108348c568..0bed55dbccf72 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/unittest/util.pyi @@ -1,5 +1,5 @@ -"""Various utility functions. -""" +"""Various utility functions.""" + from collections.abc import MutableSequence, Sequence from typing import Any, Final, TypeVar from typing_extensions import TypeAlias @@ -21,24 +21,25 @@ def strclass(cls: type) -> str: ... def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: """Finds elements in only one or the other of two, sorted input lists. -Returns a two-element tuple of lists. The first list contains those -elements in the "expected" list but not in the "actual" list, and the -second contains those elements in the "actual" list but not in the -"expected" list. Duplicate elements in either input list are ignored. -""" + Returns a two-element tuple of lists. The first list contains those + elements in the "expected" list but not in the "actual" list, and the + second contains those elements in the "actual" list but not in the + "expected" list. Duplicate elements in either input list are ignored. + """ + def unorderable_list_difference(expected: MutableSequence[_T], actual: MutableSequence[_T]) -> tuple[list[_T], list[_T]]: """Same behavior as sorted_list_difference but -for lists of unorderable items (like dicts). + for lists of unorderable items (like dicts). + + As it does a linear search per item (remove) it + has O(n*n) performance. + """ -As it does a linear search per item (remove) it -has O(n*n) performance. -""" def three_way_cmp(x: Any, y: Any) -> int: - """Return -1 if x < y, 0 if x == y and 1 if x > y -""" + """Return -1 if x < y, 0 if x == y and 1 if x > y""" + def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: - """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ -""" + """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ""" + def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: - """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ -""" + """Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi index 62807408047fe..42386f7f06c5d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/error.pyi @@ -10,6 +10,7 @@ responses, with a status code, headers, and a body. In some contexts, an application may want to handle an exception like a regular response. """ + from email.message import Message from typing import IO from urllib.response import addinfourl @@ -23,8 +24,8 @@ class URLError(OSError): def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): - """Raised when HTTP error occurs, but also acts like non-error return -""" + """Raised when HTTP error occurs, but also acts like non-error return""" + @property def headers(self) -> Message: ... @headers.setter @@ -38,7 +39,7 @@ class HTTPError(URLError, addinfourl): def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... class ContentTooShortError(URLError): - """Exception raised when downloaded size does not match content-length. -""" + """Exception raised when downloaded size does not match content-length.""" + content: tuple[str, Message] def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi index aa3db84636684..10b9bcf0b6ac0 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/parse.pyi @@ -30,6 +30,7 @@ The WHATWG URL Parser spec should also be considered. We are not compliant with it either due to existing user code API behavior expectations (Hyrum's Law). It serves as a useful guide when making changes. """ + import sys from collections.abc import Iterable, Mapping, Sequence from types import GenericAlias @@ -71,20 +72,20 @@ if sys.version_info < (3, 11): MAX_CACHE_SIZE: Final[int] class _ResultMixinStr: - """Standard approach to encoding parsed results from str to bytes -""" + """Standard approach to encoding parsed results from str to bytes""" + __slots__ = () def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... class _ResultMixinBytes: - """Standard approach to decoding parsed results from bytes to str -""" + """Standard approach to decoding parsed results from bytes to str""" + __slots__ = () def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): - """Shared methods for the parsed result objects containing a netloc element -""" + """Shared methods for the parsed result objects containing a netloc element""" + __slots__ = () @property def username(self) -> AnyStr | None: ... @@ -97,8 +98,8 @@ class _NetlocResultMixinBase(Generic[AnyStr]): def __class_getitem__(cls, item: Any, /) -> GenericAlias: """Represent a PEP 585 generic type -E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). -""" + E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,). + """ class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): __slots__ = () @@ -108,21 +109,23 @@ class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): class _DefragResultBase(NamedTuple, Generic[AnyStr]): """ -DefragResult(url, fragment) + DefragResult(url, fragment) + + A 2-tuple that contains the url without fragment identifier and the fragment + identifier as a separate argument. + """ -A 2-tuple that contains the url without fragment identifier and the fragment -identifier as a separate argument. -""" url: AnyStr fragment: AnyStr class _SplitResultBase(NamedTuple, Generic[AnyStr]): """ -SplitResult(scheme, netloc, path, query, fragment) + SplitResult(scheme, netloc, path, query, fragment) + + A 5-tuple that contains the different components of a URL. Similar to + ParseResult, but does not split params. + """ -A 5-tuple that contains the different components of a URL. Similar to -ParseResult, but does not split params. -""" scheme: AnyStr netloc: AnyStr path: AnyStr @@ -131,10 +134,11 @@ ParseResult, but does not split params. class _ParseResultBase(NamedTuple, Generic[AnyStr]): """ -ParseResult(scheme, netloc, path, params, query, fragment) + ParseResult(scheme, netloc, path, params, query, fragment) + + A 6-tuple that contains components of a parsed URL. + """ -A 6-tuple that contains components of a parsed URL. -""" scheme: AnyStr netloc: AnyStr path: AnyStr @@ -173,32 +177,33 @@ def parse_qs( ) -> dict[AnyStr, list[AnyStr]]: """Parse a query given as a string argument. -Arguments: + Arguments: -qs: percent-encoded query string to be parsed + qs: percent-encoded query string to be parsed -keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. - A true value indicates that blanks should be retained as - blank strings. The default false value indicates that - blank values are to be ignored and treated as if they were - not included. + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as + blank strings. The default false value indicates that + blank values are to be ignored and treated as if they were + not included. -strict_parsing: flag indicating what to do with parsing errors. - If false (the default), errors are silently ignored. - If true, errors raise a ValueError exception. + strict_parsing: flag indicating what to do with parsing errors. + If false (the default), errors are silently ignored. + If true, errors raise a ValueError exception. -encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. -max_num_fields: int. If set, then throws a ValueError if there - are more than n fields read by parse_qsl(). + max_num_fields: int. If set, then throws a ValueError if there + are more than n fields read by parse_qsl(). -separator: str. The symbol to use for separating the query arguments. - Defaults to &. + separator: str. The symbol to use for separating the query arguments. + Defaults to &. + + Returns a dictionary. + """ -Returns a dictionary. -""" def parse_qsl( qs: AnyStr | None, keep_blank_values: bool = False, @@ -210,113 +215,120 @@ def parse_qsl( ) -> list[tuple[AnyStr, AnyStr]]: """Parse a query given as a string argument. -Arguments: + Arguments: -qs: percent-encoded query string to be parsed + qs: percent-encoded query string to be parsed -keep_blank_values: flag indicating whether blank values in - percent-encoded queries should be treated as blank strings. - A true value indicates that blanks should be retained as blank - strings. The default false value indicates that blank values - are to be ignored and treated as if they were not included. + keep_blank_values: flag indicating whether blank values in + percent-encoded queries should be treated as blank strings. + A true value indicates that blanks should be retained as blank + strings. The default false value indicates that blank values + are to be ignored and treated as if they were not included. -strict_parsing: flag indicating what to do with parsing errors. If - false (the default), errors are silently ignored. If true, - errors raise a ValueError exception. + strict_parsing: flag indicating what to do with parsing errors. If + false (the default), errors are silently ignored. If true, + errors raise a ValueError exception. -encoding and errors: specify how to decode percent-encoded sequences - into Unicode characters, as accepted by the bytes.decode() method. + encoding and errors: specify how to decode percent-encoded sequences + into Unicode characters, as accepted by the bytes.decode() method. -max_num_fields: int. If set, then throws a ValueError - if there are more than n fields read by parse_qsl(). + max_num_fields: int. If set, then throws a ValueError + if there are more than n fields read by parse_qsl(). -separator: str. The symbol to use for separating the query arguments. - Defaults to &. + separator: str. The symbol to use for separating the query arguments. + Defaults to &. + + Returns a list, as G-d intended. + """ -Returns a list, as G-d intended. -""" @overload def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: """quote('abc def') -> 'abc%20def' -Each part of a URL, e.g. the path info, the query, etc., has a -different set of reserved characters that must be quoted. The -quote function offers a cautious (not minimal) way to quote a -string for most of these parts. + Each part of a URL, e.g. the path info, the query, etc., has a + different set of reserved characters that must be quoted. The + quote function offers a cautious (not minimal) way to quote a + string for most of these parts. -RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists -the following (un)reserved characters. + RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists + the following (un)reserved characters. -unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" -reserved = gen-delims / sub-delims -gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" -sub-delims = "!" / "$" / "&" / "'" / "(" / ")" - / "*" / "+" / "," / ";" / "=" + unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + reserved = gen-delims / sub-delims + gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" + sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + / "*" / "+" / "," / ";" / "=" -Each of the reserved characters is reserved in some component of a URL, -but not necessarily in all of them. + Each of the reserved characters is reserved in some component of a URL, + but not necessarily in all of them. -The quote function %-escapes all characters that are neither in the -unreserved chars ("always safe") nor the additional chars set via the -safe arg. + The quote function %-escapes all characters that are neither in the + unreserved chars ("always safe") nor the additional chars set via the + safe arg. -The default for the safe arg is '/'. The character is reserved, but in -typical usage the quote function is being called on a path where the -existing slash characters are to be preserved. + The default for the safe arg is '/'. The character is reserved, but in + typical usage the quote function is being called on a path where the + existing slash characters are to be preserved. -Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. -Now, "~" is included in the set of unreserved characters. + Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. + Now, "~" is included in the set of unreserved characters. -string and safe may be either str or bytes objects. encoding and errors -must not be specified if string is a bytes object. + string and safe may be either str or bytes objects. encoding and errors + must not be specified if string is a bytes object. + + The optional encoding and errors parameters specify how to deal with + non-ASCII characters, as accepted by the str.encode method. + By default, encoding='utf-8' (characters are encoded with UTF-8), and + errors='strict' (unsupported characters raise a UnicodeEncodeError). + """ -The optional encoding and errors parameters specify how to deal with -non-ASCII characters, as accepted by the str.encode method. -By default, encoding='utf-8' (characters are encoded with UTF-8), and -errors='strict' (unsupported characters raise a UnicodeEncodeError). -""" @overload def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: """Like quote(), but accepts a bytes object rather than a str, and does -not perform string-to-bytes encoding. It always returns an ASCII string. -quote_from_bytes(b'abc def?') -> 'abc%20def%3f' -""" + not perform string-to-bytes encoding. It always returns an ASCII string. + quote_from_bytes(b'abc def?') -> 'abc%20def%3f' + """ + @overload def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: """Like quote(), but also replace ' ' with '+', as required for quoting -HTML form values. Plus signs in the original string are escaped unless -they are included in safe. It also does not have safe default to '/'. -""" + HTML form values. Plus signs in the original string are escaped unless + they are included in safe. It also does not have safe default to '/'. + """ + @overload def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: """Replace %xx escapes by their single-character equivalent. The optional -encoding and errors parameters specify how to decode percent-encoded -sequences into Unicode characters, as accepted by the bytes.decode() -method. -By default, percent-encoded sequences are decoded with UTF-8, and invalid -sequences are replaced by a placeholder character. + encoding and errors parameters specify how to decode percent-encoded + sequences into Unicode characters, as accepted by the bytes.decode() + method. + By default, percent-encoded sequences are decoded with UTF-8, and invalid + sequences are replaced by a placeholder character. + + unquote('abc%20def') -> 'abc def'. + """ -unquote('abc%20def') -> 'abc def'. -""" def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: - """unquote_to_bytes('abc%20def') -> b'abc def'. -""" + """unquote_to_bytes('abc%20def') -> b'abc def'.""" + def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: """Like unquote(), but also replace plus signs by spaces, as required for -unquoting HTML form values. + unquoting HTML form values. + + unquote_plus('%7e/abc+def') -> '~/abc def' + """ -unquote_plus('%7e/abc+def') -> '~/abc def' -""" @overload def urldefrag(url: str) -> DefragResult: """Removes any existing fragment from URL. -Returns a tuple of the defragmented URL and the fragment. If -the URL contained no fragments, the second element is the -empty string. -""" + Returns a tuple of the defragmented URL and the fragment. If + the URL contained no fragments, the second element is the + empty string. + """ + @overload def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... @@ -349,43 +361,46 @@ def urlencode( ) -> str: """Encode a dict or sequence of two-element tuples into a URL query string. -If any values in the query arg are sequences and doseq is true, each -sequence element is converted to a separate parameter. + If any values in the query arg are sequences and doseq is true, each + sequence element is converted to a separate parameter. -If the query arg is a sequence of two-element tuples, the order of the -parameters in the output will match the order of parameters in the -input. + If the query arg is a sequence of two-element tuples, the order of the + parameters in the output will match the order of parameters in the + input. -The components of a query arg may each be either a string or a bytes type. + The components of a query arg may each be either a string or a bytes type. + + The safe, encoding, and errors parameters are passed down to the function + specified by quote_via (encoding and errors only if a component is a str). + """ -The safe, encoding, and errors parameters are passed down to the function -specified by quote_via (encoding and errors only if a component is a str). -""" def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: """Join a base URL and a possibly relative URL to form an absolute -interpretation of the latter. -""" + interpretation of the latter. + """ + @overload def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: """Parse a URL into 6 components: -:///;?# + :///;?# -The result is a named 6-tuple with fields corresponding to the -above. It is either a ParseResult or ParseResultBytes object, -depending on the type of the url parameter. + The result is a named 6-tuple with fields corresponding to the + above. It is either a ParseResult or ParseResultBytes object, + depending on the type of the url parameter. -The username, password, hostname, and port sub-components of netloc -can also be accessed as attributes of the returned object. + The username, password, hostname, and port sub-components of netloc + can also be accessed as attributes of the returned object. -The scheme argument provides the default value of the scheme -component when no scheme is found in url. + The scheme argument provides the default value of the scheme + component when no scheme is found in url. -If allow_fragments is False, no attempt is made to separate the -fragment component from the previous component, which can be either -path or query. + If allow_fragments is False, no attempt is made to separate the + fragment component from the previous component, which can be either + path or query. + + Note that % escapes are not expanded. + """ -Note that % escapes are not expanded. -""" @overload def urlparse( url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True @@ -393,49 +408,47 @@ def urlparse( @overload def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: """Parse a URL into 5 components: -:///?# + :///?# -The result is a named 5-tuple with fields corresponding to the -above. It is either a SplitResult or SplitResultBytes object, -depending on the type of the url parameter. + The result is a named 5-tuple with fields corresponding to the + above. It is either a SplitResult or SplitResultBytes object, + depending on the type of the url parameter. -The username, password, hostname, and port sub-components of netloc -can also be accessed as attributes of the returned object. + The username, password, hostname, and port sub-components of netloc + can also be accessed as attributes of the returned object. -The scheme argument provides the default value of the scheme -component when no scheme is found in url. + The scheme argument provides the default value of the scheme + component when no scheme is found in url. -If allow_fragments is False, no attempt is made to separate the -fragment component from the previous component, which can be either -path or query. + If allow_fragments is False, no attempt is made to separate the + fragment component from the previous component, which can be either + path or query. -Note that % escapes are not expanded. -""" + Note that % escapes are not expanded. + """ if sys.version_info >= (3, 11): @overload - def urlsplit( - url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True - ) -> SplitResultBytes: + def urlsplit(url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True) -> SplitResultBytes: """Parse a URL into 5 components: -:///?# + :///?# -The result is a named 5-tuple with fields corresponding to the -above. It is either a SplitResult or SplitResultBytes object, -depending on the type of the url parameter. + The result is a named 5-tuple with fields corresponding to the + above. It is either a SplitResult or SplitResultBytes object, + depending on the type of the url parameter. -The username, password, hostname, and port sub-components of netloc -can also be accessed as attributes of the returned object. + The username, password, hostname, and port sub-components of netloc + can also be accessed as attributes of the returned object. -The scheme argument provides the default value of the scheme -component when no scheme is found in url. + The scheme argument provides the default value of the scheme + component when no scheme is found in url. -If allow_fragments is False, no attempt is made to separate the -fragment component from the previous component, which can be either -path or query. + If allow_fragments is False, no attempt is made to separate the + fragment component from the previous component, which can be either + path or query. -Note that % escapes are not expanded. -""" + Note that % escapes are not expanded. + """ else: @overload @@ -443,33 +456,34 @@ else: url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> SplitResultBytes: """Parse a URL into 5 components: - :///?# + :///?# - The result is a named 5-tuple with fields corresponding to the - above. It is either a SplitResult or SplitResultBytes object, - depending on the type of the url parameter. + The result is a named 5-tuple with fields corresponding to the + above. It is either a SplitResult or SplitResultBytes object, + depending on the type of the url parameter. - The username, password, hostname, and port sub-components of netloc - can also be accessed as attributes of the returned object. + The username, password, hostname, and port sub-components of netloc + can also be accessed as attributes of the returned object. - The scheme argument provides the default value of the scheme - component when no scheme is found in url. + The scheme argument provides the default value of the scheme + component when no scheme is found in url. - If allow_fragments is False, no attempt is made to separate the - fragment component from the previous component, which can be either - path or query. + If allow_fragments is False, no attempt is made to separate the + fragment component from the previous component, which can be either + path or query. - Note that % escapes are not expanded. - """ + Note that % escapes are not expanded. + """ # Requires an iterable of length 6 @overload def urlunparse(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] """Put a parsed URL back together again. This may result in a -slightly different, but equivalent URL, if the URL that was parsed -originally had redundant delimiters, e.g. a ? with an empty query -(the draft states that these are equivalent). -""" + slightly different, but equivalent URL, if the URL that was parsed + originally had redundant delimiters, e.g. a ? with an empty query + (the draft states that these are equivalent). + """ + @overload def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... @@ -477,15 +491,16 @@ def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... @overload def urlunsplit(components: Iterable[None]) -> Literal[b""]: # type: ignore[overload-overlap] """Combine the elements of a tuple as returned by urlsplit() into a -complete URL as a string. The data argument can be any five-item iterable. -This may result in a slightly different, but equivalent URL, if the URL that -was parsed originally had unnecessary delimiters (for example, a ? with an -empty query; the RFC states that these are equivalent). -""" + complete URL as a string. The data argument can be any five-item iterable. + This may result in a slightly different, but equivalent URL, if the URL that + was parsed originally had unnecessary delimiters (for example, a ? with an + empty query; the RFC states that these are equivalent). + """ + @overload def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... def unwrap(url: str) -> str: """Transform a string like '' into 'scheme://host/path'. -The string is returned unchanged if it's not a wrapped URL. -""" + The string is returned unchanged if it's not a wrapped URL. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi index 11ebb205854d0..4d9636102ed5c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/request.pyi @@ -66,6 +66,7 @@ urllib.request.install_opener(opener) f = urllib.request.urlopen('https://www.python.org/') """ + import ssl import sys from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead @@ -126,44 +127,44 @@ if sys.version_info >= (3, 13): ) -> _UrlopenRet: """Open the URL url, which can be either a string or a Request object. -*data* must be an object specifying additional data to be sent to -the server, or None if no such data is needed. See Request for -details. + *data* must be an object specifying additional data to be sent to + the server, or None if no such data is needed. See Request for + details. -urllib.request module uses HTTP/1.1 and includes a "Connection:close" -header in its HTTP requests. + urllib.request module uses HTTP/1.1 and includes a "Connection:close" + header in its HTTP requests. -The optional *timeout* parameter specifies a timeout in seconds for -blocking operations like the connection attempt (if not specified, the -global default timeout setting will be used). This only works for HTTP, -HTTPS and FTP connections. + The optional *timeout* parameter specifies a timeout in seconds for + blocking operations like the connection attempt (if not specified, the + global default timeout setting will be used). This only works for HTTP, + HTTPS and FTP connections. -If *context* is specified, it must be a ssl.SSLContext instance describing -the various SSL options. See HTTPSConnection for more details. + If *context* is specified, it must be a ssl.SSLContext instance describing + the various SSL options. See HTTPSConnection for more details. -This function always returns an object which can work as a -context manager and has the properties url, headers, and status. -See urllib.response.addinfourl for more detail on these properties. + This function always returns an object which can work as a + context manager and has the properties url, headers, and status. + See urllib.response.addinfourl for more detail on these properties. -For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse -object slightly modified. In addition to the three new methods above, the -msg attribute contains the same information as the reason attribute --- -the reason phrase returned by the server --- instead of the response -headers as it is specified in the documentation for HTTPResponse. + For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse + object slightly modified. In addition to the three new methods above, the + msg attribute contains the same information as the reason attribute --- + the reason phrase returned by the server --- instead of the response + headers as it is specified in the documentation for HTTPResponse. -For FTP, file, and data URLs, this function returns a -urllib.response.addinfourl object. + For FTP, file, and data URLs, this function returns a + urllib.response.addinfourl object. -Note that None may be returned if no handler handles the request (though -the default installed global OpenerDirector uses UnknownHandler to ensure -this never happens). + Note that None may be returned if no handler handles the request (though + the default installed global OpenerDirector uses UnknownHandler to ensure + this never happens). -In addition, if proxy settings are detected (for example, when a *_proxy -environment variable like http_proxy is set), ProxyHandler is default -installed and makes sure the requests are handled through the proxy. + In addition, if proxy settings are detected (for example, when a *_proxy + environment variable like http_proxy is set), ProxyHandler is default + installed and makes sure the requests are handled through the proxy. -""" + """ else: def urlopen( @@ -178,81 +179,82 @@ else: ) -> _UrlopenRet: """Open the URL url, which can be either a string or a Request object. - *data* must be an object specifying additional data to be sent to - the server, or None if no such data is needed. See Request for - details. + *data* must be an object specifying additional data to be sent to + the server, or None if no such data is needed. See Request for + details. - urllib.request module uses HTTP/1.1 and includes a "Connection:close" - header in its HTTP requests. + urllib.request module uses HTTP/1.1 and includes a "Connection:close" + header in its HTTP requests. - The optional *timeout* parameter specifies a timeout in seconds for - blocking operations like the connection attempt (if not specified, the - global default timeout setting will be used). This only works for HTTP, - HTTPS and FTP connections. + The optional *timeout* parameter specifies a timeout in seconds for + blocking operations like the connection attempt (if not specified, the + global default timeout setting will be used). This only works for HTTP, + HTTPS and FTP connections. - If *context* is specified, it must be a ssl.SSLContext instance describing - the various SSL options. See HTTPSConnection for more details. + If *context* is specified, it must be a ssl.SSLContext instance describing + the various SSL options. See HTTPSConnection for more details. - The optional *cafile* and *capath* parameters specify a set of trusted CA - certificates for HTTPS requests. cafile should point to a single file - containing a bundle of CA certificates, whereas capath should point to a - directory of hashed certificate files. More information can be found in - ssl.SSLContext.load_verify_locations(). + The optional *cafile* and *capath* parameters specify a set of trusted CA + certificates for HTTPS requests. cafile should point to a single file + containing a bundle of CA certificates, whereas capath should point to a + directory of hashed certificate files. More information can be found in + ssl.SSLContext.load_verify_locations(). - The *cadefault* parameter is ignored. + The *cadefault* parameter is ignored. - This function always returns an object which can work as a - context manager and has the properties url, headers, and status. - See urllib.response.addinfourl for more detail on these properties. + This function always returns an object which can work as a + context manager and has the properties url, headers, and status. + See urllib.response.addinfourl for more detail on these properties. - For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse - object slightly modified. In addition to the three new methods above, the - msg attribute contains the same information as the reason attribute --- - the reason phrase returned by the server --- instead of the response - headers as it is specified in the documentation for HTTPResponse. + For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse + object slightly modified. In addition to the three new methods above, the + msg attribute contains the same information as the reason attribute --- + the reason phrase returned by the server --- instead of the response + headers as it is specified in the documentation for HTTPResponse. - For FTP, file, and data URLs and requests explicitly handled by legacy - URLopener and FancyURLopener classes, this function returns a - urllib.response.addinfourl object. + For FTP, file, and data URLs and requests explicitly handled by legacy + URLopener and FancyURLopener classes, this function returns a + urllib.response.addinfourl object. - Note that None may be returned if no handler handles the request (though - the default installed global OpenerDirector uses UnknownHandler to ensure - this never happens). + Note that None may be returned if no handler handles the request (though + the default installed global OpenerDirector uses UnknownHandler to ensure + this never happens). - In addition, if proxy settings are detected (for example, when a *_proxy - environment variable like http_proxy is set), ProxyHandler is default - installed and makes sure the requests are handled through the proxy. + In addition, if proxy settings are detected (for example, when a *_proxy + environment variable like http_proxy is set), ProxyHandler is default + installed and makes sure the requests are handled through the proxy. - """ + """ def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: """Create an opener object from a list of handlers. -The opener will use several default handlers, including support -for HTTP, FTP and when applicable HTTPS. + The opener will use several default handlers, including support + for HTTP, FTP and when applicable HTTPS. -If any of the handlers passed as arguments are subclasses of the -default handlers, the default handlers will not be used. -""" + If any of the handlers passed as arguments are subclasses of the + default handlers, the default handlers will not be used. + """ if sys.version_info >= (3, 14): def url2pathname(url: str, *, require_scheme: bool = False, resolve_host: bool = False) -> str: """Convert the given file URL to a local file system path. -The 'file:' scheme prefix must be omitted unless *require_scheme* -is set to true. + The 'file:' scheme prefix must be omitted unless *require_scheme* + is set to true. + + The URL authority may be resolved with gethostbyname() if + *resolve_host* is set to true. + """ -The URL authority may be resolved with gethostbyname() if -*resolve_host* is set to true. -""" def pathname2url(pathname: str, *, add_scheme: bool = False) -> str: """Convert the given local file system path to a file URL. -The 'file:' scheme prefix is omitted unless *add_scheme* -is set to true. -""" + The 'file:' scheme prefix is omitted unless *add_scheme* + is set to true. + """ else: if sys.platform == "win32": @@ -260,55 +262,58 @@ else: else: def url2pathname(pathname: str) -> str: """OS-specific conversion from a relative URL of the 'file' scheme -to a file system path; not recommended for general use. -""" + to a file system path; not recommended for general use. + """ + def pathname2url(pathname: str) -> str: """OS-specific conversion from a file system path to a relative URL -of the 'file' scheme; not recommended for general use. -""" + of the 'file' scheme; not recommended for general use. + """ def getproxies() -> dict[str, str]: """Return a dictionary of scheme -> proxy server URL mappings. -Scan the environment for variables named _proxy; -this seems to be the standard convention. -""" + Scan the environment for variables named _proxy; + this seems to be the standard convention. + """ + def getproxies_environment() -> dict[str, str]: """Return a dictionary of scheme -> proxy server URL mappings. -Scan the environment for variables named _proxy; -this seems to be the standard convention. -""" + Scan the environment for variables named _proxy; + this seems to be the standard convention. + """ + def parse_http_list(s: str) -> list[str]: """Parse lists as described by RFC 2068 Section 2. -In particular, parse comma-separated lists where the elements of -the list may include quoted-strings. A quoted-string could -contain a comma. A non-quoted string could have quotes in the -middle. Neither commas nor quotes count if they are escaped. -Only double-quotes count, not single-quotes. -""" + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Neither commas nor quotes count if they are escaped. + Only double-quotes count, not single-quotes. + """ + def parse_keqv_list(l: list[str]) -> dict[str, str]: - """Parse list of key=value strings where keys are not duplicated. -""" + """Parse list of key=value strings where keys are not duplicated.""" if sys.platform == "win32" or sys.platform == "darwin": def proxy_bypass(host: str) -> Any: # undocumented """Return True, if host should be bypassed. -Checks proxy settings gathered from the environment, if specified, -or the registry. + Checks proxy settings gathered from the environment, if specified, + or the registry. -""" + """ else: def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: # undocumented """Test if proxies should not be used for a particular host. -Checks the proxy dict for the value of no_proxy, which should -be a list of comma separated DNS suffixes, or '*' for all hosts. + Checks the proxy dict for the value of no_proxy, which should + be a list of comma separated DNS suffixes, or '*' for all hosts. -""" + """ class Request: @property @@ -337,8 +342,8 @@ class Request: method: str | None = None, ) -> None: ... def get_method(self) -> str: - """Return a string indicating the HTTP request method. -""" + """Return a string indicating the HTTP request method.""" + def add_header(self, key: str, val: str) -> None: ... def add_unredirected_header(self, key: str, val: str) -> None: ... def has_header(self, header_name: str) -> bool: ... @@ -380,13 +385,14 @@ class HTTPRedirectHandler(BaseHandler): ) -> Request | None: """Return a Request or None in response to a redirect. -This is called by the http_error_30x methods when a -redirection response is received. If a redirection should -take place, return a new Request to allow http_error_30x to -perform the redirect. Otherwise, raise HTTPError if no-one -else should try to handle this url. Return None if you can't -but another Handler might. -""" + This is called by the http_error_30x methods when a + redirection response is received. If a redirection should + take place, return a new Request to allow http_error_30x to + perform the redirect. Otherwise, raise HTTPError if no-one + else should try to handle this url. Return None if you can't + but another Handler might. + """ + def http_error_301(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -415,11 +421,11 @@ class HTTPPasswordMgr: def is_suburi(self, base: str, test: str) -> bool: # undocumented """Check if test is below base in a URI tree -Both args must be URIs in reduced form. -""" + Both args must be URIs in reduced form. + """ + def reduce_uri(self, uri: str, default_port: bool = True) -> tuple[str, str]: # undocumented - """Accept authority or URI and extract only the authority and path. -""" + """Accept authority or URI and extract only the authority and path.""" class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -465,9 +471,10 @@ class AbstractDigestAuthHandler: class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): """An authentication protocol defined by RFC 2069 -Digest authentication improves on basic authentication because it -does not transmit passwords in the clear. -""" + Digest authentication improves on basic authentication because it + does not transmit passwords in the clear. + """ + auth_header: ClassVar[str] # undocumented def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... @@ -499,8 +506,8 @@ class AbstractHTTPHandler(BaseHandler): # undocumented def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: """Return an HTTPResponse object for the request, using http_class. -http_class must implement the HTTPConnection API from http.client. -""" + http_class must implement the HTTPConnection API from http.client. + """ class HTTPHandler(AbstractHTTPHandler): def http_open(self, req: Request) -> HTTPResponse: ... @@ -529,8 +536,8 @@ class DataHandler(BaseHandler): def data_open(self, req: Request) -> addinfourl: ... class ftpwrapper: # undocumented - """Class used by open_ftp() for cache of open FTP connections. -""" + """Class used by open_ftp() for cache of open FTP connections.""" + def __init__( self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True ) -> None: ... @@ -557,8 +564,8 @@ class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> NoReturn: ... class HTTPErrorProcessor(BaseHandler): - """Process HTTP error responses. -""" + """Process HTTP error responses.""" + def http_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... def https_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... @@ -569,42 +576,43 @@ def urlretrieve( data: _DataType = None, ) -> tuple[str, HTTPMessage]: """ -Retrieve a URL into a temporary location on disk. + Retrieve a URL into a temporary location on disk. -Requires a URL argument. If a filename is passed, it is used as -the temporary file location. The reporthook argument should be -a callable that accepts a block number, a read size, and the -total file size of the URL target. The data argument should be -valid URL encoded data. + Requires a URL argument. If a filename is passed, it is used as + the temporary file location. The reporthook argument should be + a callable that accepts a block number, a read size, and the + total file size of the URL target. The data argument should be + valid URL encoded data. -If a filename is passed and the URL points to a local resource, -the result is a copy from local file to new file. + If a filename is passed and the URL points to a local resource, + the result is a copy from local file to new file. + + Returns a tuple containing the path to the newly created + data file as well as the resulting HTTPMessage object. + """ -Returns a tuple containing the path to the newly created -data file as well as the resulting HTTPMessage object. -""" def urlcleanup() -> None: - """Clean up temporary files from urlretrieve calls. -""" + """Clean up temporary files from urlretrieve calls.""" if sys.version_info < (3, 14): @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class URLopener: """Class to open URLs. -This is a class rather than just a subroutine because we may need -more than one set of global protocol-specific options. -Note -- this is a base class for those who don't want the -automatic handling of errors type 302 (relocated) and 401 -(authorization needed). -""" + This is a class rather than just a subroutine because we may need + more than one set of global protocol-specific options. + Note -- this is a base class for those who don't want the + automatic handling of errors type 302 (relocated) and 401 + (authorization needed). + """ + version: ClassVar[str] def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: - """Use URLopener().open(file) instead of open(file, 'r'). -""" + """Use URLopener().open(file) instead of open(file, 'r').""" + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: - """Overridable interface to open unknown URL type. -""" + """Overridable interface to open unknown URL type.""" + def retrieve( self, url: str, @@ -613,12 +621,14 @@ automatic handling of errors type 302 (relocated) and 401 data: ReadableBuffer | None = None, ) -> tuple[str, Message | None]: """retrieve(url) returns (filename, headers) for a local object -or (tempfilename, headers) for a remote object. -""" + or (tempfilename, headers) for a remote object. + """ + def addheader(self, *args: tuple[str, str]) -> None: # undocumented """Add a header to be used by the HTTP interface only -e.g. u.addheader('Accept', 'sound/basic') -""" + e.g. u.addheader('Accept', 'sound/basic') + """ + def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( @@ -626,71 +636,70 @@ e.g. u.addheader('Accept', 'sound/basic') ) -> _UrlopenRet: # undocumented """Handle http errors. -Derived class can override this, or provide specific handlers -named http_error_DDD where DDD is the 3-digit error code. -""" + Derived class can override this, or provide specific handlers + named http_error_DDD where DDD is the 3-digit error code. + """ + def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> _UrlopenRet: # undocumented - """Default error handler: close the connection and raise OSError. -""" + """Default error handler: close the connection and raise OSError.""" + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: # undocumented - """Use "data" URL. -""" + """Use "data" URL.""" + def open_file(self, url: str) -> addinfourl: # undocumented - """Use local file or FTP depending on form of URL. -""" + """Use local file or FTP depending on form of URL.""" + def open_ftp(self, url: str) -> addinfourl: # undocumented - """Use FTP protocol. -""" + """Use FTP protocol.""" + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented - """Use HTTP protocol. -""" + """Use HTTP protocol.""" + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: # undocumented - """Use HTTPS protocol. -""" + """Use HTTPS protocol.""" + def open_local_file(self, url: str) -> addinfourl: # undocumented - """Use local file. -""" + """Use local file.""" + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: # undocumented - """Overridable interface to open unknown URL type. -""" + """Overridable interface to open unknown URL type.""" + def __del__(self) -> None: ... @deprecated("Deprecated since Python 3.3; removed in Python 3.14. Use newer `urlopen` functions and methods.") class FancyURLopener(URLopener): - """Derived class with handlers for errors we can handle (perhaps). -""" + """Derived class with handlers for errors we can handle (perhaps).""" + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: - """Override this in a GUI environment! -""" + """Override this in a GUI environment!""" + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented def http_error_301( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 301 -- also relocated (permanently). -""" + """Error 301 -- also relocated (permanently).""" + def http_error_302( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 302 -- relocated (temporarily). -""" + """Error 302 -- relocated (temporarily).""" + def http_error_303( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 303 -- also relocated (essentially identical to 302). -""" + """Error 303 -- also relocated (essentially identical to 302).""" + def http_error_307( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 307 -- relocated, but turn POST into error. -""" + """Error 307 -- relocated, but turn POST into error.""" if sys.version_info >= (3, 11): def http_error_308( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: # undocumented - """Error 308 -- relocated, but turn POST into error. -""" + """Error 308 -- relocated, but turn POST into error.""" def http_error_401( self, @@ -703,8 +712,9 @@ named http_error_DDD where DDD is the 3-digit error code. retry: bool = False, ) -> _UrlopenRet | None: # undocumented """Error 401 -- authentication required. -This function supports Basic authentication only. -""" + This function supports Basic authentication only. + """ + def http_error_407( self, url: str, @@ -716,13 +726,14 @@ This function supports Basic authentication only. retry: bool = False, ) -> _UrlopenRet | None: # undocumented """Error 407 -- proxy authentication required. -This function supports Basic authentication only. -""" + This function supports Basic authentication only. + """ + def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> addinfourl: # undocumented - """Default error handling -- don't raise an exception. -""" + """Default error handling -- don't raise an exception.""" + def redirect_internal( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None ) -> _UrlopenRet | None: ... # undocumented diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi index 7888caa979ff3..0012cd166d853 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/response.pyi @@ -5,6 +5,7 @@ including read() and readline(). The typical response object is an addinfourl instance, which defines an info() method that returns headers and a geturl() method that returns the url. """ + import tempfile from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable @@ -15,8 +16,8 @@ from typing import IO, Any __all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(tempfile._TemporaryFileWrapper[bytes]): - """Base class for addinfo and addclosehook. Is a good idea for garbage collection. -""" + """Base class for addinfo and addclosehook. Is a good idea for garbage collection.""" + fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... def __exit__( @@ -30,22 +31,22 @@ class addbase(tempfile._TemporaryFileWrapper[bytes]): def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): - """Class to add a close hook to an open file. -""" + """Class to add a close hook to an open file.""" + closehook: Callable[..., object] hookargs: tuple[Any, ...] def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... class addinfo(addbase): - """class to add an info() method to an open file. -""" + """class to add an info() method to an open file.""" + headers: Message def __init__(self, fp: IO[bytes], headers: Message) -> None: ... def info(self) -> Message: ... class addinfourl(addinfo): - """class to add info() and geturl() methods to an open file. -""" + """class to add info() and geturl() methods to an open file.""" + url: str code: int | None @property diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi index 987ef185c9c79..111de19789d2e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/urllib/robotparser.pyi @@ -9,50 +9,55 @@ You can choose between two licenses when using this package: The robots.txt Exclusion Protocol is implemented as specified in http://www.robotstxt.org/norobots-rfc.txt """ + from collections.abc import Iterable from typing import NamedTuple __all__ = ["RobotFileParser"] class RequestRate(NamedTuple): - """RequestRate(requests, seconds) -""" + """RequestRate(requests, seconds)""" + requests: int seconds: int class RobotFileParser: """This class provides a set of methods to read, parse and answer -questions about a single robots.txt file. + questions about a single robots.txt file. + + """ -""" def __init__(self, url: str = "") -> None: ... def set_url(self, url: str) -> None: - """Sets the URL referring to a robots.txt file. -""" + """Sets the URL referring to a robots.txt file.""" + def read(self) -> None: - """Reads the robots.txt URL and feeds it to the parser. -""" + """Reads the robots.txt URL and feeds it to the parser.""" + def parse(self, lines: Iterable[str]) -> None: """Parse the input lines from a robots.txt file. -We allow that a user-agent: line is not preceded by -one or more blank lines. -""" + We allow that a user-agent: line is not preceded by + one or more blank lines. + """ + def can_fetch(self, useragent: str, url: str) -> bool: - """using the parsed robots.txt decide if useragent can fetch url -""" + """using the parsed robots.txt decide if useragent can fetch url""" + def mtime(self) -> int: """Returns the time the robots.txt file was last fetched. -This is useful for long-running web spiders that need to -check for new robots.txt files periodically. + This is useful for long-running web spiders that need to + check for new robots.txt files periodically. + + """ -""" def modified(self) -> None: """Sets the time the robots.txt file was last fetched to the -current time. + current time. + + """ -""" def crawl_delay(self, useragent: str) -> str | None: ... def request_rate(self, useragent: str) -> RequestRate | None: ... def site_maps(self) -> list[str] | None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi index d418aeea4eb4c..158a8affaa4f4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uu.pyi @@ -3,6 +3,7 @@ encode(in_file, out_file [,name, mode], *, backtick=False) decode(in_file [, out_file, mode, quiet]) """ + from typing import BinaryIO from typing_extensions import TypeAlias @@ -12,11 +13,8 @@ _File: TypeAlias = str | BinaryIO class Error(Exception): ... -def encode( - in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False -) -> None: - """Uuencode file -""" +def encode(in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False) -> None: + """Uuencode file""" + def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: - """Decode uuencoded file -""" + """Decode uuencoded file""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi index 022179c4ddce3..86a27772a7950 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/uuid.pyi @@ -55,6 +55,7 @@ Typical usage: >>> uuid.MAX UUID('ffffffff-ffff-ffff-ffff-ffffffffffff') """ + import builtins import sys from enum import Enum @@ -64,73 +65,74 @@ from typing_extensions import LiteralString, TypeAlias _FieldsType: TypeAlias = tuple[int, int, int, int, int, int] class SafeUUID(Enum): - """An enumeration. -""" + """An enumeration.""" + safe = 0 unsafe = -1 unknown = None class UUID: """Instances of the UUID class represent UUIDs as specified in RFC 4122. -UUID objects are immutable, hashable, and usable as dictionary keys. -Converting a UUID to a string with str() yields something in the form -'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts -five possible forms: a similar string of hexadecimal digits, or a tuple -of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and -48-bit values respectively) as an argument named 'fields', or a string -of 16 bytes (with all the integer fields in big-endian order) as an -argument named 'bytes', or a string of 16 bytes (with the first three -fields in little-endian order) as an argument named 'bytes_le', or a -single 128-bit integer as an argument named 'int'. + UUID objects are immutable, hashable, and usable as dictionary keys. + Converting a UUID to a string with str() yields something in the form + '12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts + five possible forms: a similar string of hexadecimal digits, or a tuple + of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and + 48-bit values respectively) as an argument named 'fields', or a string + of 16 bytes (with all the integer fields in big-endian order) as an + argument named 'bytes', or a string of 16 bytes (with the first three + fields in little-endian order) as an argument named 'bytes_le', or a + single 128-bit integer as an argument named 'int'. -UUIDs have these read-only attributes: + UUIDs have these read-only attributes: - bytes the UUID as a 16-byte string (containing the six - integer fields in big-endian byte order) + bytes the UUID as a 16-byte string (containing the six + integer fields in big-endian byte order) - bytes_le the UUID as a 16-byte string (with time_low, time_mid, - and time_hi_version in little-endian byte order) + bytes_le the UUID as a 16-byte string (with time_low, time_mid, + and time_hi_version in little-endian byte order) - fields a tuple of the six integer fields of the UUID, - which are also available as six individual attributes - and two derived attributes. Those attributes are not - always relevant to all UUID versions: + fields a tuple of the six integer fields of the UUID, + which are also available as six individual attributes + and two derived attributes. Those attributes are not + always relevant to all UUID versions: - The 'time_*' attributes are only relevant to version 1. + The 'time_*' attributes are only relevant to version 1. - The 'clock_seq*' and 'node' attributes are only relevant - to versions 1 and 6. + The 'clock_seq*' and 'node' attributes are only relevant + to versions 1 and 6. - The 'time' attribute is only relevant to versions 1, 6 - and 7. + The 'time' attribute is only relevant to versions 1, 6 + and 7. - time_low the first 32 bits of the UUID - time_mid the next 16 bits of the UUID - time_hi_version the next 16 bits of the UUID - clock_seq_hi_variant the next 8 bits of the UUID - clock_seq_low the next 8 bits of the UUID - node the last 48 bits of the UUID + time_low the first 32 bits of the UUID + time_mid the next 16 bits of the UUID + time_hi_version the next 16 bits of the UUID + clock_seq_hi_variant the next 8 bits of the UUID + clock_seq_low the next 8 bits of the UUID + node the last 48 bits of the UUID - time the 60-bit timestamp for UUIDv1/v6, - or the 48-bit timestamp for UUIDv7 - clock_seq the 14-bit sequence number + time the 60-bit timestamp for UUIDv1/v6, + or the 48-bit timestamp for UUIDv7 + clock_seq the 14-bit sequence number - hex the UUID as a 32-character hexadecimal string + hex the UUID as a 32-character hexadecimal string - int the UUID as a 128-bit integer + int the UUID as a 128-bit integer - urn the UUID as a URN as specified in RFC 4122/9562 + urn the UUID as a URN as specified in RFC 4122/9562 - variant the UUID variant (one of the constants RESERVED_NCS, - RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) + variant the UUID variant (one of the constants RESERVED_NCS, + RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) - version the UUID version number (1 through 8, meaningful only - when the variant is RFC_4122) + version the UUID version number (1 through 8, meaningful only + when the variant is RFC_4122) + + is_safe An enum indicating whether the UUID has been generated in + a way that is safe for multiprocessing applications, via + uuid_generate_time_safe(3). + """ - is_safe An enum indicating whether the UUID has been generated in - a way that is safe for multiprocessing applications, via - uuid_generate_time_safe(3). -""" __slots__ = ("int", "is_safe", "__weakref__") def __init__( self, @@ -144,33 +146,34 @@ UUIDs have these read-only attributes: is_safe: SafeUUID = SafeUUID.unknown, ) -> None: """Create a UUID from either a string of 32 hexadecimal digits, -a string of 16 bytes as the 'bytes' argument, a string of 16 bytes -in little-endian order as the 'bytes_le' argument, a tuple of six -integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, -8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as -the 'fields' argument, or a single 128-bit integer as the 'int' -argument. When a string of hex digits is given, curly braces, -hyphens, and a URN prefix are all optional. For example, these -expressions all yield the same UUID: - -UUID('{12345678-1234-5678-1234-567812345678}') -UUID('12345678123456781234567812345678') -UUID('urn:uuid:12345678-1234-5678-1234-567812345678') -UUID(bytes='\\x12\\x34\\x56\\x78'*4) -UUID(bytes_le='\\x78\\x56\\x34\\x12\\x34\\x12\\x78\\x56' + - '\\x12\\x34\\x56\\x78\\x12\\x34\\x56\\x78') -UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) -UUID(int=0x12345678123456781234567812345678) - -Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must -be given. The 'version' argument is optional; if given, the resulting -UUID will have its variant and version set according to RFC 4122, -overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. - -is_safe is an enum exposed as an attribute on the instance. It -indicates whether the UUID has been generated in a way that is safe -for multiprocessing applications, via uuid_generate_time_safe(3). -""" + a string of 16 bytes as the 'bytes' argument, a string of 16 bytes + in little-endian order as the 'bytes_le' argument, a tuple of six + integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, + 8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as + the 'fields' argument, or a single 128-bit integer as the 'int' + argument. When a string of hex digits is given, curly braces, + hyphens, and a URN prefix are all optional. For example, these + expressions all yield the same UUID: + + UUID('{12345678-1234-5678-1234-567812345678}') + UUID('12345678123456781234567812345678') + UUID('urn:uuid:12345678-1234-5678-1234-567812345678') + UUID(bytes='\\x12\\x34\\x56\\x78'*4) + UUID(bytes_le='\\x78\\x56\\x34\\x12\\x34\\x12\\x78\\x56' + + '\\x12\\x34\\x56\\x78\\x12\\x34\\x56\\x78') + UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) + UUID(int=0x12345678123456781234567812345678) + + Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must + be given. The 'version' argument is optional; if given, the resulting + UUID will have its variant and version set according to RFC 4122, + overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. + + is_safe is an enum exposed as an attribute on the instance. It + indicates whether the UUID has been generated in a way that is safe + for multiprocessing applications, via uuid_generate_time_safe(3). + """ + @property def is_safe(self) -> SafeUUID: ... @property @@ -216,66 +219,64 @@ for multiprocessing applications, via uuid_generate_time_safe(3). def getnode() -> int: """Get the hardware address as a 48-bit positive integer. -The first time this runs, it may launch a separate program, which could -be quite slow. If all attempts to obtain the hardware address fail, we -choose a random 48-bit number with its eighth bit set to 1 as recommended -in RFC 4122. -""" + The first time this runs, it may launch a separate program, which could + be quite slow. If all attempts to obtain the hardware address fail, we + choose a random 48-bit number with its eighth bit set to 1 as recommended + in RFC 4122. + """ + def uuid1(node: int | None = None, clock_seq: int | None = None) -> UUID: """Generate a UUID from a host ID, sequence number, and the current time. -If 'node' is not given, getnode() is used to obtain the hardware -address. If 'clock_seq' is given, it is used as the sequence number; -otherwise a random 14-bit sequence number is chosen. -""" + If 'node' is not given, getnode() is used to obtain the hardware + address. If 'clock_seq' is given, it is used as the sequence number; + otherwise a random 14-bit sequence number is chosen. + """ if sys.version_info >= (3, 14): def uuid6(node: int | None = None, clock_seq: int | None = None) -> UUID: """Similar to :func:`uuid1` but where fields are ordered differently -for improved DB locality. + for improved DB locality. + + More precisely, given a 60-bit timestamp value as specified for UUIDv1, + for UUIDv6 the first 48 most significant bits are stored first, followed + by the 4-bit version (same position), followed by the remaining 12 bits + of the original 60-bit timestamp. + """ -More precisely, given a 60-bit timestamp value as specified for UUIDv1, -for UUIDv6 the first 48 most significant bits are stored first, followed -by the 4-bit version (same position), followed by the remaining 12 bits -of the original 60-bit timestamp. -""" def uuid7() -> UUID: """Generate a UUID from a Unix timestamp in milliseconds and random bits. -UUIDv7 objects feature monotonicity within a millisecond. -""" + UUIDv7 objects feature monotonicity within a millisecond. + """ + def uuid8(a: int | None = None, b: int | None = None, c: int | None = None) -> UUID: """Generate a UUID from three custom blocks. -* 'a' is the first 48-bit chunk of the UUID (octets 0-5); -* 'b' is the mid 12-bit chunk (octets 6-7); -* 'c' is the last 62-bit chunk (octets 8-15). + * 'a' is the first 48-bit chunk of the UUID (octets 0-5); + * 'b' is the mid 12-bit chunk (octets 6-7); + * 'c' is the last 62-bit chunk (octets 8-15). -When a value is not specified, a pseudo-random value is generated. -""" + When a value is not specified, a pseudo-random value is generated. + """ if sys.version_info >= (3, 12): def uuid3(namespace: UUID, name: str | bytes) -> UUID: - """Generate a UUID from the MD5 hash of a namespace UUID and a name. -""" + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" else: def uuid3(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the MD5 hash of a namespace UUID and a name. -""" + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" def uuid4() -> UUID: - """Generate a random UUID. -""" + """Generate a random UUID.""" if sys.version_info >= (3, 12): def uuid5(namespace: UUID, name: str | bytes) -> UUID: - """Generate a UUID from the SHA-1 hash of a namespace UUID and a name. -""" + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" else: def uuid5(namespace: UUID, name: str) -> UUID: - """Generate a UUID from the SHA-1 hash of a namespace UUID and a name. -""" + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" if sys.version_info >= (3, 14): NIL: Final[UUID] @@ -292,5 +293,4 @@ RESERVED_FUTURE: Final[LiteralString] if sys.version_info >= (3, 12): def main() -> None: - """Run the uuid command line interface. -""" + """Run the uuid command line interface.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi index 4ac34b299a5b6..0a4a3af9922e6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/warnings.pyi @@ -1,5 +1,5 @@ -"""Python part of the warnings subsystem. -""" +"""Python part of the warnings subsystem.""" + import re import sys from _warnings import warn as warn, warn_explicit as warn_explicit @@ -39,43 +39,41 @@ def showwarning( file: TextIO | None = None, line: str | None = None, ) -> None: - """Hook to write a warning to a file; replace if you like. -""" -def formatwarning( - message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None -) -> str: - """Function to format a warning the standard way. -""" + """Hook to write a warning to a file; replace if you like.""" + +def formatwarning(message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None) -> str: + """Function to format a warning the standard way.""" + def filterwarnings( action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False ) -> None: """Insert an entry into the list of warnings filters (at the front). -'action' -- one of "error", "ignore", "always", "all", "default", "module", - or "once" -'message' -- a regex that the warning message must match -'category' -- a class that the warning must be a subclass of -'module' -- a regex that the module name must match -'lineno' -- an integer line number, 0 matches all warnings -'append' -- if true, append to the list of filters -""" + 'action' -- one of "error", "ignore", "always", "all", "default", "module", + or "once" + 'message' -- a regex that the warning message must match + 'category' -- a class that the warning must be a subclass of + 'module' -- a regex that the module name must match + 'lineno' -- an integer line number, 0 matches all warnings + 'append' -- if true, append to the list of filters + """ + def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: """Insert a simple entry into the list of warnings filters (at the front). -A simple filter matches all modules and messages. -'action' -- one of "error", "ignore", "always", "all", "default", "module", - or "once" -'category' -- a class that the warning must be a subclass of -'lineno' -- an integer line number, 0 matches all warnings -'append' -- if true, append to the list of filters -""" + A simple filter matches all modules and messages. + 'action' -- one of "error", "ignore", "always", "all", "default", "module", + or "once" + 'category' -- a class that the warning must be a subclass of + 'lineno' -- an integer line number, 0 matches all warnings + 'append' -- if true, append to the list of filters + """ + def resetwarnings() -> None: - """Clear the list of warning filters, so that no filters are active. -""" + """Clear the list of warning filters, so that no filters are active.""" class _OptionError(Exception): - """Exception used by option processing helpers. -""" + """Exception used by option processing helpers.""" class WarningMessage: message: Warning | str @@ -98,22 +96,23 @@ class WarningMessage: class catch_warnings(Generic[_W_co]): """A context manager that copies and restores the warnings filter upon -exiting the context. - -The 'record' argument specifies whether warnings should be captured by a -custom implementation of warnings.showwarning() and be appended to a list -returned by the context manager. Otherwise None is returned by the context -manager. The objects appended to the list are arguments whose attributes -mirror the arguments to showwarning(). - -The 'module' argument is to specify an alternative module to the module -named 'warnings' and imported under that name. This argument is only useful -when testing the warnings module itself. - -If the 'action' argument is not None, the remaining arguments are passed -to warnings.simplefilter() as if it were called immediately on entering the -context. -""" + exiting the context. + + The 'record' argument specifies whether warnings should be captured by a + custom implementation of warnings.showwarning() and be appended to a list + returned by the context manager. Otherwise None is returned by the context + manager. The objects appended to the list are arguments whose attributes + mirror the arguments to showwarning(). + + The 'module' argument is to specify an alternative module to the module + named 'warnings' and imported under that name. This argument is only useful + when testing the warnings module itself. + + If the 'action' argument is not None, the remaining arguments are passed + to warnings.simplefilter() as if it were called immediately on entering the + context. + """ + if sys.version_info >= (3, 11): @overload def __init__( @@ -127,9 +126,10 @@ context. append: bool = False, ) -> None: """Specify whether to record warnings and if an alternative module -should be used other than sys.modules['warnings']. + should be used other than sys.modules['warnings']. + + """ -""" @overload def __init__( self: catch_warnings[list[WarningMessage]], @@ -156,12 +156,13 @@ should be used other than sys.modules['warnings']. @overload def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: """Specify whether to record warnings and if an alternative module - should be used other than sys.modules['warnings']. + should be used other than sys.modules['warnings']. - For compatibility with Python 3.0, please consider all arguments to be - keyword-only. + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ - """ @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None @@ -178,45 +179,46 @@ if sys.version_info >= (3, 13): class deprecated: """Indicate that a class, function or overload is deprecated. -When this decorator is applied to an object, the type checker -will generate a diagnostic on usage of the deprecated object. - -Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - -The warning specified by *category* will be emitted at runtime -on use of deprecated objects. For functions, that happens on calls; -for classes, on instantiation and on creation of subclasses. -If the *category* is ``None``, no warning is emitted at runtime. -The *stacklevel* determines where the -warning is emitted. If it is ``1`` (the default), the warning -is emitted at the direct caller of the deprecated object; if it -is higher, it is emitted further up the stack. -Static type checker behavior is not affected by the *category* -and *stacklevel* arguments. - -The deprecation message passed to the decorator is saved in the -``__deprecated__`` attribute on the decorated object. -If applied to an overload, the decorator -must be after the ``@overload`` decorator for the attribute to -exist on the overload as returned by ``get_overloads()``. - -See PEP 702 for details. - -""" + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + message: LiteralString category: type[Warning] | None stacklevel: int diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi index df95e15d65af6..9819c4fa1907b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wave.pyi @@ -70,6 +70,7 @@ close() to patch up the sizes in the header. The close() method is called automatically when the class instance is destroyed. """ + import sys from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, Final, Literal, NamedTuple, NoReturn, overload @@ -84,8 +85,8 @@ class Error(Exception): ... WAVE_FORMAT_PCM: Final = 0x0001 class _wave_params(NamedTuple): - """_wave_params(nchannels, sampwidth, framerate, nframes, comptype, compname) -""" + """_wave_params(nchannels, sampwidth, framerate, nframes, comptype, compname)""" + nchannels: int sampwidth: int framerate: int @@ -96,33 +97,34 @@ class _wave_params(NamedTuple): class Wave_read: """Variables used in this class: -These variables are available to the user though appropriate -methods of this class: -_file -- the open file with methods read(), close(), and seek() - set through the __init__() method -_nchannels -- the number of audio channels - available through the getnchannels() method -_nframes -- the number of audio frames - available through the getnframes() method -_sampwidth -- the number of bytes per audio sample - available through the getsampwidth() method -_framerate -- the sampling frequency - available through the getframerate() method -_comptype -- the AIFF-C compression type ('NONE' if AIFF) - available through the getcomptype() method -_compname -- the human-readable AIFF-C compression type - available through the getcomptype() method -_soundpos -- the position in the audio stream - available through the tell() method, set through the - setpos() method - -These variables are used internally only: -_fmt_chunk_read -- 1 iff the FMT chunk has been read -_data_seek_needed -- 1 iff positioned correctly in audio - file for readframes() -_data_chunk -- instantiation of a chunk class for the DATA chunk -_framesize -- size of one frame in the file -""" + These variables are available to the user though appropriate + methods of this class: + _file -- the open file with methods read(), close(), and seek() + set through the __init__() method + _nchannels -- the number of audio channels + available through the getnchannels() method + _nframes -- the number of audio frames + available through the getnframes() method + _sampwidth -- the number of bytes per audio sample + available through the getsampwidth() method + _framerate -- the sampling frequency + available through the getframerate() method + _comptype -- the AIFF-C compression type ('NONE' if AIFF) + available through the getcomptype() method + _compname -- the human-readable AIFF-C compression type + available through the getcomptype() method + _soundpos -- the position in the audio stream + available through the tell() method, set through the + setpos() method + + These variables are used internally only: + _fmt_chunk_read -- 1 iff the FMT chunk has been read + _data_seek_needed -- 1 iff positioned correctly in audio + file for readframes() + _data_chunk -- instantiation of a chunk class for the DATA chunk + _framesize -- size of one frame in the file + """ + def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... @@ -153,28 +155,29 @@ _framesize -- size of one frame in the file class Wave_write: """Variables used in this class: -These variables are user settable through appropriate methods -of this class: -_file -- the open file with methods write(), close(), tell(), seek() - set through the __init__() method -_comptype -- the AIFF-C compression type ('NONE' in AIFF) - set through the setcomptype() or setparams() method -_compname -- the human-readable AIFF-C compression type - set through the setcomptype() or setparams() method -_nchannels -- the number of audio channels - set through the setnchannels() or setparams() method -_sampwidth -- the number of bytes per audio sample - set through the setsampwidth() or setparams() method -_framerate -- the sampling frequency - set through the setframerate() or setparams() method -_nframes -- the number of audio frames written to the header - set through the setnframes() or setparams() method - -These variables are used internally only: -_datalength -- the size of the audio samples written to the header -_nframeswritten -- the number of frames actually written -_datawritten -- the size of the audio samples actually written -""" + These variables are user settable through appropriate methods + of this class: + _file -- the open file with methods write(), close(), tell(), seek() + set through the __init__() method + _comptype -- the AIFF-C compression type ('NONE' in AIFF) + set through the setcomptype() or setparams() method + _compname -- the human-readable AIFF-C compression type + set through the setcomptype() or setparams() method + _nchannels -- the number of audio channels + set through the setnchannels() or setparams() method + _sampwidth -- the number of bytes per audio sample + set through the setsampwidth() or setparams() method + _framerate -- the sampling frequency + set through the setframerate() or setparams() method + _nframes -- the number of audio frames written to the header + set through the setnframes() or setparams() method + + These variables are used internally only: + _datalength -- the size of the audio samples written to the header + _nframeswritten -- the number of frames actually written + _datawritten -- the size of the audio samples actually written + """ + def __init__(self, f: _File) -> None: ... def __enter__(self) -> Self: ... def __exit__(self, *args: Unused) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi index 0fd1505760842..4dbec668b1679 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/weakref.pyi @@ -4,6 +4,7 @@ This module is an implementation of PEP 205: https://peps.python.org/pep-0205/ """ + from _typeshed import SupportsKeysAndGetItem from _weakref import getweakrefcount as getweakrefcount, getweakrefs as getweakrefs, proxy as proxy from _weakrefset import WeakSet as WeakSet @@ -63,13 +64,12 @@ class ReferenceType(Generic[_T]): # "weakref" __callback__: Callable[[Self], Any] def __new__(cls, o: _T, callback: Callable[[Self], Any] | None = ..., /) -> Self: ... def __call__(self) -> _T | None: - """Call self as a function. -""" + """Call self as a function.""" + def __eq__(self, value: object, /) -> bool: ... def __hash__(self) -> int: ... def __class_getitem__(cls, item: Any, /) -> GenericAlias: - """See PEP 585 -""" + """See PEP 585""" ref = ReferenceType @@ -77,9 +77,10 @@ ref = ReferenceType class WeakMethod(ref[_CallableT]): """ -A custom `weakref.ref` subclass which simulates a weak reference to -a bound method, working around the lifetime problem of bound methods. -""" + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + """ + __slots__ = ("_func_ref", "_meth_type", "_alive", "__weakref__") def __new__(cls, meth: _CallableT, callback: Callable[[Self], Any] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... @@ -90,9 +91,10 @@ a bound method, working around the lifetime problem of bound methods. class WeakValueDictionary(MutableMapping[_KT, _VT]): """Mapping class that references values weakly. -Entries in the dictionary will be discarded when no strong -reference to the value exists anymore -""" + Entries in the dictionary will be discarded when no strong + reference to the value exists anymore + """ + @overload def __init__(self) -> None: ... @overload @@ -130,23 +132,25 @@ reference to the value exists anymore def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: """Return an iterator that yields the weak references to the values. -The references are not guaranteed to be 'live' at the time -they are used, so the result of calling the references needs -to be checked before being used. This can be used to avoid -creating references that will cause the garbage collector to -keep the values around longer than needed. + The references are not guaranteed to be 'live' at the time + they are used, so the result of calling the references needs + to be checked before being used. This can be used to avoid + creating references that will cause the garbage collector to + keep the values around longer than needed. + + """ -""" def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: """Return a list of weak references to the values. -The references are not guaranteed to be 'live' at the time -they are used, so the result of calling the references needs -to be checked before being used. This can be used to avoid -creating references that will cause the garbage collector to -keep the values around longer than needed. + The references are not guaranteed to be 'live' at the time + they are used, so the result of calling the references needs + to be checked before being used. This can be used to avoid + creating references that will cause the garbage collector to + keep the values around longer than needed. + + """ -""" def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @@ -171,12 +175,13 @@ keep the values around longer than needed. class KeyedRef(ref[_T], Generic[_KT, _T]): """Specialized reference that includes a key corresponding to the value. -This is used in the WeakValueDictionary to avoid having to create -a function object for each key stored in the mapping. A shared -callback object can use the 'key' attribute of a KeyedRef instead -of getting a reference to the key from an enclosing scope. + This is used in the WeakValueDictionary to avoid having to create + a function object for each key stored in the mapping. A shared + callback object can use the 'key' attribute of a KeyedRef instead + of getting a reference to the key from an enclosing scope. + + """ -""" __slots__ = ("key",) key: _KT def __new__(type, ob: _T, callback: Callable[[Self], Any], key: _KT) -> Self: ... @@ -185,13 +190,14 @@ of getting a reference to the key from an enclosing scope. class WeakKeyDictionary(MutableMapping[_KT, _VT]): """Mapping class that references keys weakly. -Entries in the dictionary will be discarded when there is no -longer a strong reference to the key. This can be used to -associate additional data with an object owned by other parts of -an application without adding attributes to those objects. This -can be especially useful with objects that override attribute -accesses. -""" + Entries in the dictionary will be discarded when there is no + longer a strong reference to the key. This can be used to + associate additional data with an object owned by other parts of + an application without adding attributes to those objects. This + can be especially useful with objects that override attribute + accesses. + """ + @overload def __init__(self, dict: None = None) -> None: ... @overload @@ -218,13 +224,13 @@ accesses. def keyrefs(self) -> list[ref[_KT]]: """Return a list of weak references to the keys. -The references are not guaranteed to be 'live' at the time -they are used, so the result of calling the references needs -to be checked before being used. This can be used to avoid -creating references that will cause the garbage collector to -keep the keys around longer than needed. + The references are not guaranteed to be 'live' at the time + they are used, so the result of calling the references needs + to be checked before being used. This can be used to avoid + creating references that will cause the garbage collector to + keep the keys around longer than needed. -""" + """ # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences @overload def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... @@ -253,32 +259,35 @@ keep the keys around longer than needed. class finalize(Generic[_P, _T]): """Class for finalization of weakrefable objects -finalize(obj, func, *args, **kwargs) returns a callable finalizer -object which will be called when obj is garbage collected. The -first time the finalizer is called it evaluates func(*arg, **kwargs) -and returns the result. After this the finalizer is dead, and -calling it just returns None. + finalize(obj, func, *args, **kwargs) returns a callable finalizer + object which will be called when obj is garbage collected. The + first time the finalizer is called it evaluates func(*arg, **kwargs) + and returns the result. After this the finalizer is dead, and + calling it just returns None. + + When the program exits any remaining finalizers for which the + atexit attribute is true will be run in reverse order of creation. + By default atexit is true. + """ -When the program exits any remaining finalizers for which the -atexit attribute is true will be run in reverse order of creation. -By default atexit is true. -""" __slots__ = () def __init__(self, obj: _T, func: Callable[_P, Any], /, *args: _P.args, **kwargs: _P.kwargs) -> None: ... def __call__(self, _: Any = None) -> Any | None: """If alive then mark as dead and return func(*args, **kwargs); -otherwise return None -""" + otherwise return None + """ + def detach(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: """If alive then mark as dead and return (obj, func, args, kwargs); -otherwise return None -""" + otherwise return None + """ + def peek(self) -> tuple[_T, Callable[_P, Any], tuple[Any, ...], dict[str, Any]] | None: """If alive then return (obj, func, args, kwargs); -otherwise return None -""" + otherwise return None + """ + @property def alive(self) -> bool: - """Whether finalizer is alive -""" + """Whether finalizer is alive""" atexit: bool diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi index 030a17d39f4c8..f02ee7477a512 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/webbrowser.pyi @@ -1,5 +1,5 @@ -"""Interfaces for launching and remotely controlling web browsers. -""" +"""Interfaces for launching and remotely controlling web browsers.""" + import sys from abc import abstractmethod from collections.abc import Callable, Sequence @@ -13,37 +13,39 @@ class Error(Exception): ... def register( name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False ) -> None: - """Register a browser connector. -""" + """Register a browser connector.""" + def get(using: str | None = None) -> BaseBrowser: - """Return a browser launcher instance appropriate for the environment. -""" + """Return a browser launcher instance appropriate for the environment.""" + def open(url: str, new: int = 0, autoraise: bool = True) -> bool: """Display url using the default browser. -If possible, open url in a location determined by new. -- 0: the same browser window (the default). -- 1: a new browser window. -- 2: a new browser page ("tab"). -If possible, autoraise raises the window (the default) or not. + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + + If opening the browser succeeds, return True. + If there is a problem, return False. + """ -If opening the browser succeeds, return True. -If there is a problem, return False. -""" def open_new(url: str) -> bool: """Open url in a new window of the default browser. -If not possible, then open url in the only browser window. -""" + If not possible, then open url in the only browser window. + """ + def open_new_tab(url: str) -> bool: """Open url in a new page ("tab") of the default browser. -If not possible, then the behavior becomes equivalent to open_new(). -""" + If not possible, then the behavior becomes equivalent to open_new(). + """ class BaseBrowser: - """Parent class for all browsers. Do not use directly. -""" + """Parent class for all browsers. Do not use directly.""" + args: list[str] name: str basename: str @@ -55,19 +57,20 @@ class BaseBrowser: class GenericBrowser(BaseBrowser): """Class for all browsers started with a command -and without remote functionality. -""" + and without remote functionality. + """ + def __init__(self, name: str | Sequence[str]) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class BackgroundBrowser(GenericBrowser): """Class for all browsers which are to be started in the -background. -""" + background. + """ class UnixBrowser(BaseBrowser): - """Parent class for all Unix browsers with remote functionality. -""" + """Parent class for all Unix browsers with remote functionality.""" + def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool @@ -78,34 +81,33 @@ class UnixBrowser(BaseBrowser): remote_action_newtab: str class Mozilla(UnixBrowser): - """Launcher class for Mozilla browsers. -""" + """Launcher class for Mozilla browsers.""" if sys.version_info < (3, 12): class Galeon(UnixBrowser): - """Launcher class for Galeon/Epiphany browsers. -""" + """Launcher class for Galeon/Epiphany browsers.""" + raise_opts: list[str] class Grail(BaseBrowser): def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class Chrome(UnixBrowser): - """Launcher class for Google Chrome browser. -""" + """Launcher class for Google Chrome browser.""" + class Opera(UnixBrowser): - """Launcher class for Opera browser. -""" + """Launcher class for Opera browser.""" + class Elinks(UnixBrowser): - """Launcher class for Elinks browsers. -""" + """Launcher class for Elinks browsers.""" class Konqueror(BaseBrowser): """Controller for the KDE File Manager (kfm, or Konqueror). -See the output of ``kfmclient --commands`` -for more information on the Konqueror remote-control interface. -""" + See the output of ``kfmclient --commands`` + for more information on the Konqueror remote-control interface. + """ + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "win32": @@ -119,13 +121,14 @@ if sys.platform == "darwin": class MacOSX(BaseBrowser): """Launcher class for Aqua browsers on Mac OS X - Optionally specify a browser name on instantiation. Note that this - will not work for Aqua browsers if the user has moved the application - package after installation. + Optionally specify a browser name on instantiation. Note that this + will not work for Aqua browsers if the user has moved the application + package after installation. + + If no browser is specified, the default browser, as specified in the + Internet System Preferences panel, will be used. + """ - If no browser is specified, the default browser, as specified in the - Internet System Preferences panel, will be used. - """ def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... @@ -133,13 +136,14 @@ if sys.platform == "darwin": class MacOSX(BaseBrowser): """Launcher class for Aqua browsers on Mac OS X - Optionally specify a browser name on instantiation. Note that this - will not work for Aqua browsers if the user has moved the application - package after installation. + Optionally specify a browser name on instantiation. Note that this + will not work for Aqua browsers if the user has moved the application + package after installation. + + If no browser is specified, the default browser, as specified in the + Internet System Preferences panel, will be used. + """ - If no browser is specified, the default browser, as specified in the - Internet System Preferences panel, will be used. - """ def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi index 8a8b8b01dd3d0..ae3009add5e55 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winreg.pyi @@ -36,6 +36,7 @@ Integer constants: Many constants are defined - see the documentation for each function to see what constants are used, and where. """ + import sys from _typeshed import ReadableBuffer, Unused from types import TracebackType @@ -47,343 +48,360 @@ if sys.platform == "win32": def CloseKey(hkey: _KeyType, /) -> None: """Closes a previously opened registry key. - hkey - A previously opened key. + hkey + A previously opened key. + + Note that if the key is not closed using this method, it will be + closed when the hkey object is destroyed by Python. + """ -Note that if the key is not closed using this method, it will be -closed when the hkey object is destroyed by Python. -""" def ConnectRegistry(computer_name: str | None, key: _KeyType, /) -> HKEYType: """Establishes a connection to the registry on another computer. - computer_name - The name of the remote computer, of the form r"\\\\computername". If - None, the local computer is used. - key - The predefined key to connect to. + computer_name + The name of the remote computer, of the form r"\\\\computername". If + None, the local computer is used. + key + The predefined key to connect to. + + The return value is the handle of the opened key. + If the function fails, an OSError exception is raised. + """ -The return value is the handle of the opened key. -If the function fails, an OSError exception is raised. -""" def CreateKey(key: _KeyType, sub_key: str | None, /) -> HKEYType: """Creates or opens the specified key. - key - An already open key, or one of the predefined HKEY_* constants. - sub_key - The name of the key this method opens or creates. + key + An already open key, or one of the predefined HKEY_* constants. + sub_key + The name of the key this method opens or creates. -If key is one of the predefined keys, sub_key may be None. In that case, -the handle returned is the same key handle passed in to the function. + If key is one of the predefined keys, sub_key may be None. In that case, + the handle returned is the same key handle passed in to the function. -If the key already exists, this function opens the existing key. + If the key already exists, this function opens the existing key. + + The return value is the handle of the opened key. + If the function fails, an OSError exception is raised. + """ -The return value is the handle of the opened key. -If the function fails, an OSError exception is raised. -""" def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: """Creates or opens the specified key. - key - An already open key, or one of the predefined HKEY_* constants. - sub_key - The name of the key this method opens or creates. - reserved - A reserved integer, and must be zero. Default is zero. - access - An integer that specifies an access mask that describes the - desired security access for the key. Default is KEY_WRITE. + key + An already open key, or one of the predefined HKEY_* constants. + sub_key + The name of the key this method opens or creates. + reserved + A reserved integer, and must be zero. Default is zero. + access + An integer that specifies an access mask that describes the + desired security access for the key. Default is KEY_WRITE. -If key is one of the predefined keys, sub_key may be None. In that case, -the handle returned is the same key handle passed in to the function. + If key is one of the predefined keys, sub_key may be None. In that case, + the handle returned is the same key handle passed in to the function. -If the key already exists, this function opens the existing key + If the key already exists, this function opens the existing key + + The return value is the handle of the opened key. + If the function fails, an OSError exception is raised. + """ -The return value is the handle of the opened key. -If the function fails, an OSError exception is raised. -""" def DeleteKey(key: _KeyType, sub_key: str, /) -> None: """Deletes the specified key. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that must be the name of a subkey of the key identified by - the key parameter. This value must not be None, and the key may not - have subkeys. + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that must be the name of a subkey of the key identified by + the key parameter. This value must not be None, and the key may not + have subkeys. -This method can not delete keys with subkeys. + This method can not delete keys with subkeys. + + If the function succeeds, the entire key, including all of its values, + is removed. If the function fails, an OSError exception is raised. + """ -If the function succeeds, the entire key, including all of its values, -is removed. If the function fails, an OSError exception is raised. -""" def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: """Deletes the specified key (intended for 64-bit OS). - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that must be the name of a subkey of the key identified by - the key parameter. This value must not be None, and the key may not - have subkeys. - access - An integer that specifies an access mask that describes the - desired security access for the key. Default is KEY_WOW64_64KEY. - reserved - A reserved integer, and must be zero. Default is zero. - -While this function is intended to be used for 64-bit OS, it is also - available on 32-bit systems. - -This method can not delete keys with subkeys. - -If the function succeeds, the entire key, including all of its values, -is removed. If the function fails, an OSError exception is raised. -On unsupported Windows versions, NotImplementedError is raised. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that must be the name of a subkey of the key identified by + the key parameter. This value must not be None, and the key may not + have subkeys. + access + An integer that specifies an access mask that describes the + desired security access for the key. Default is KEY_WOW64_64KEY. + reserved + A reserved integer, and must be zero. Default is zero. + + While this function is intended to be used for 64-bit OS, it is also + available on 32-bit systems. + + This method can not delete keys with subkeys. + + If the function succeeds, the entire key, including all of its values, + is removed. If the function fails, an OSError exception is raised. + On unsupported Windows versions, NotImplementedError is raised. + """ + def DeleteValue(key: _KeyType, value: str, /) -> None: """Removes a named value from a registry key. - key - An already open key, or any one of the predefined HKEY_* constants. - value - A string that identifies the value to remove. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + value + A string that identifies the value to remove. + """ + def EnumKey(key: _KeyType, index: int, /) -> str: """Enumerates subkeys of an open registry key. - key - An already open key, or any one of the predefined HKEY_* constants. - index - An integer that identifies the index of the key to retrieve. + key + An already open key, or any one of the predefined HKEY_* constants. + index + An integer that identifies the index of the key to retrieve. + + The function retrieves the name of one subkey each time it is called. + It is typically called repeatedly until an OSError exception is + raised, indicating no more values are available. + """ -The function retrieves the name of one subkey each time it is called. -It is typically called repeatedly until an OSError exception is -raised, indicating no more values are available. -""" def EnumValue(key: _KeyType, index: int, /) -> tuple[str, Any, int]: """Enumerates values of an open registry key. - key - An already open key, or any one of the predefined HKEY_* constants. - index - An integer that identifies the index of the value to retrieve. - -The function retrieves the name of one subkey each time it is called. -It is typically called repeatedly, until an OSError exception -is raised, indicating no more values. - -The result is a tuple of 3 items: - value_name - A string that identifies the value. - value_data - An object that holds the value data, and whose type depends - on the underlying registry type. - data_type - An integer that identifies the type of the value data. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + index + An integer that identifies the index of the value to retrieve. + + The function retrieves the name of one subkey each time it is called. + It is typically called repeatedly, until an OSError exception + is raised, indicating no more values. + + The result is a tuple of 3 items: + value_name + A string that identifies the value. + value_data + An object that holds the value data, and whose type depends + on the underlying registry type. + data_type + An integer that identifies the type of the value data. + """ + def ExpandEnvironmentStrings(string: str, /) -> str: - """Expand environment vars. -""" + """Expand environment vars.""" + def FlushKey(key: _KeyType, /) -> None: """Writes all the attributes of a key to the registry. - key - An already open key, or any one of the predefined HKEY_* constants. + key + An already open key, or any one of the predefined HKEY_* constants. -It is not necessary to call FlushKey to change a key. Registry changes -are flushed to disk by the registry using its lazy flusher. Registry -changes are also flushed to disk at system shutdown. Unlike -CloseKey(), the FlushKey() method returns only when all the data has -been written to the registry. + It is not necessary to call FlushKey to change a key. Registry changes + are flushed to disk by the registry using its lazy flusher. Registry + changes are also flushed to disk at system shutdown. Unlike + CloseKey(), the FlushKey() method returns only when all the data has + been written to the registry. + + An application should only call FlushKey() if it requires absolute + certainty that registry changes are on disk. If you don't know whether + a FlushKey() call is required, it probably isn't. + """ -An application should only call FlushKey() if it requires absolute -certainty that registry changes are on disk. If you don't know whether -a FlushKey() call is required, it probably isn't. -""" def LoadKey(key: _KeyType, sub_key: str, file_name: str, /) -> None: """Insert data into the registry from a file. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub-key to load. - file_name - The name of the file to load registry data from. This file must - have been created with the SaveKey() function. Under the file - allocation table (FAT) file system, the filename may not have an - extension. + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub-key to load. + file_name + The name of the file to load registry data from. This file must + have been created with the SaveKey() function. Under the file + allocation table (FAT) file system, the filename may not have an + extension. -Creates a subkey under the specified key and stores registration -information from a specified file into that subkey. + Creates a subkey under the specified key and stores registration + information from a specified file into that subkey. -A call to LoadKey() fails if the calling process does not have the -SE_RESTORE_PRIVILEGE privilege. + A call to LoadKey() fails if the calling process does not have the + SE_RESTORE_PRIVILEGE privilege. -If key is a handle returned by ConnectRegistry(), then the path -specified in fileName is relative to the remote computer. + If key is a handle returned by ConnectRegistry(), then the path + specified in fileName is relative to the remote computer. + + The MSDN docs imply key must be in the HKEY_USER or HKEY_LOCAL_MACHINE + tree. + """ -The MSDN docs imply key must be in the HKEY_USER or HKEY_LOCAL_MACHINE -tree. -""" def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: """Opens the specified key. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub_key to open. - reserved - A reserved integer that must be zero. Default is zero. - access - An integer that specifies an access mask that describes the desired - security access for the key. Default is KEY_READ. - -The result is a new handle to the specified key. -If the function fails, an OSError exception is raised. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub_key to open. + reserved + A reserved integer that must be zero. Default is zero. + access + An integer that specifies an access mask that describes the desired + security access for the key. Default is KEY_READ. + + The result is a new handle to the specified key. + If the function fails, an OSError exception is raised. + """ + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: """Opens the specified key. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that identifies the sub_key to open. - reserved - A reserved integer that must be zero. Default is zero. - access - An integer that specifies an access mask that describes the desired - security access for the key. Default is KEY_READ. - -The result is a new handle to the specified key. -If the function fails, an OSError exception is raised. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that identifies the sub_key to open. + reserved + A reserved integer that must be zero. Default is zero. + access + An integer that specifies an access mask that describes the desired + security access for the key. Default is KEY_READ. + + The result is a new handle to the specified key. + If the function fails, an OSError exception is raised. + """ + def QueryInfoKey(key: _KeyType, /) -> tuple[int, int, int]: """Returns information about a key. - key - An already open key, or any one of the predefined HKEY_* constants. + key + An already open key, or any one of the predefined HKEY_* constants. + + The result is a tuple of 3 items: + An integer that identifies the number of sub keys this key has. + An integer that identifies the number of values this key has. + An integer that identifies when the key was last modified (if available) + as 100's of nanoseconds since Jan 1, 1600. + """ -The result is a tuple of 3 items: -An integer that identifies the number of sub keys this key has. -An integer that identifies the number of values this key has. -An integer that identifies when the key was last modified (if available) -as 100's of nanoseconds since Jan 1, 1600. -""" def QueryValue(key: _KeyType, sub_key: str | None, /) -> str: """Retrieves the unnamed value for a key. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that holds the name of the subkey with which the value - is associated. If this parameter is None or empty, the function - retrieves the value set by the SetValue() method for the key - identified by key. - -Values in the registry have name, type, and data components. This method -retrieves the data for a key's first value that has a NULL name. -But since the underlying API call doesn't return the type, you'll -probably be happier using QueryValueEx; this function is just here for -completeness. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that holds the name of the subkey with which the value + is associated. If this parameter is None or empty, the function + retrieves the value set by the SetValue() method for the key + identified by key. + + Values in the registry have name, type, and data components. This method + retrieves the data for a key's first value that has a NULL name. + But since the underlying API call doesn't return the type, you'll + probably be happier using QueryValueEx; this function is just here for + completeness. + """ + def QueryValueEx(key: _KeyType, name: str, /) -> tuple[Any, int]: """Retrieves the type and value of a specified sub-key. - key - An already open key, or any one of the predefined HKEY_* constants. - name - A string indicating the value to query. + key + An already open key, or any one of the predefined HKEY_* constants. + name + A string indicating the value to query. -Behaves mostly like QueryValue(), but also returns the type of the -specified value name associated with the given open registry key. + Behaves mostly like QueryValue(), but also returns the type of the + specified value name associated with the given open registry key. + + The return value is a tuple of the value and the type_id. + """ -The return value is a tuple of the value and the type_id. -""" def SaveKey(key: _KeyType, file_name: str, /) -> None: """Saves the specified key, and all its subkeys to the specified file. - key - An already open key, or any one of the predefined HKEY_* constants. - file_name - The name of the file to save registry data to. This file cannot - already exist. If this filename includes an extension, it cannot be - used on file allocation table (FAT) file systems by the LoadKey(), - ReplaceKey() or RestoreKey() methods. + key + An already open key, or any one of the predefined HKEY_* constants. + file_name + The name of the file to save registry data to. This file cannot + already exist. If this filename includes an extension, it cannot be + used on file allocation table (FAT) file systems by the LoadKey(), + ReplaceKey() or RestoreKey() methods. -If key represents a key on a remote computer, the path described by -file_name is relative to the remote computer. + If key represents a key on a remote computer, the path described by + file_name is relative to the remote computer. + + The caller of this method must possess the SeBackupPrivilege + security privilege. This function passes NULL for security_attributes + to the API. + """ -The caller of this method must possess the SeBackupPrivilege -security privilege. This function passes NULL for security_attributes -to the API. -""" def SetValue(key: _KeyType, sub_key: str, type: int, value: str, /) -> None: """Associates a value with a specified key. - key - An already open key, or any one of the predefined HKEY_* constants. - sub_key - A string that names the subkey with which the value is associated. - type - An integer that specifies the type of the data. Currently this must - be REG_SZ, meaning only strings are supported. - value - A string that specifies the new value. - -If the key specified by the sub_key parameter does not exist, the -SetValue function creates it. - -Value lengths are limited by available memory. Long values (more than -2048 bytes) should be stored as files with the filenames stored in -the configuration registry to help the registry perform efficiently. - -The key identified by the key parameter must have been opened with -KEY_SET_VALUE access. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + sub_key + A string that names the subkey with which the value is associated. + type + An integer that specifies the type of the data. Currently this must + be REG_SZ, meaning only strings are supported. + value + A string that specifies the new value. + + If the key specified by the sub_key parameter does not exist, the + SetValue function creates it. + + Value lengths are limited by available memory. Long values (more than + 2048 bytes) should be stored as files with the filenames stored in + the configuration registry to help the registry perform efficiently. + + The key identified by the key parameter must have been opened with + KEY_SET_VALUE access. + """ + @overload # type=REG_DWORD|REG_QWORD - def SetValueEx( - key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, / - ) -> None: + def SetValueEx(key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[4, 5], value: int | None, /) -> None: """Stores data in the value field of an open registry key. - key - An already open key, or any one of the predefined HKEY_* constants. - value_name - A string containing the name of the value to set, or None. - reserved - Can be anything - zero is always passed to the API. - type - An integer that specifies the type of the data, one of: - REG_BINARY -- Binary data in any form. - REG_DWORD -- A 32-bit number. - REG_DWORD_LITTLE_ENDIAN -- A 32-bit number in little-endian format. Equivalent to REG_DWORD - REG_DWORD_BIG_ENDIAN -- A 32-bit number in big-endian format. - REG_EXPAND_SZ -- A null-terminated string that contains unexpanded - references to environment variables (for example, - %PATH%). - REG_LINK -- A Unicode symbolic link. - REG_MULTI_SZ -- A sequence of null-terminated strings, terminated - by two null characters. Note that Python handles - this termination automatically. - REG_NONE -- No defined value type. - REG_QWORD -- A 64-bit number. - REG_QWORD_LITTLE_ENDIAN -- A 64-bit number in little-endian format. Equivalent to REG_QWORD. - REG_RESOURCE_LIST -- A device-driver resource list. - REG_SZ -- A null-terminated string. - value - A string that specifies the new value. - -This method can also set additional value and type information for the -specified key. The key identified by the key parameter must have been -opened with KEY_SET_VALUE access. - -To open the key, use the CreateKeyEx() or OpenKeyEx() methods. - -Value lengths are limited by available memory. Long values (more than -2048 bytes) should be stored as files with the filenames stored in -the configuration registry to help the registry perform efficiently. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + value_name + A string containing the name of the value to set, or None. + reserved + Can be anything - zero is always passed to the API. + type + An integer that specifies the type of the data, one of: + REG_BINARY -- Binary data in any form. + REG_DWORD -- A 32-bit number. + REG_DWORD_LITTLE_ENDIAN -- A 32-bit number in little-endian format. Equivalent to REG_DWORD + REG_DWORD_BIG_ENDIAN -- A 32-bit number in big-endian format. + REG_EXPAND_SZ -- A null-terminated string that contains unexpanded + references to environment variables (for example, + %PATH%). + REG_LINK -- A Unicode symbolic link. + REG_MULTI_SZ -- A sequence of null-terminated strings, terminated + by two null characters. Note that Python handles + this termination automatically. + REG_NONE -- No defined value type. + REG_QWORD -- A 64-bit number. + REG_QWORD_LITTLE_ENDIAN -- A 64-bit number in little-endian format. Equivalent to REG_QWORD. + REG_RESOURCE_LIST -- A device-driver resource list. + REG_SZ -- A null-terminated string. + value + A string that specifies the new value. + + This method can also set additional value and type information for the + specified key. The key identified by the key parameter must have been + opened with KEY_SET_VALUE access. + + To open the key, use the CreateKeyEx() or OpenKeyEx() methods. + + Value lengths are limited by available memory. Long values (more than + 2048 bytes) should be stored as files with the filenames stored in + the configuration registry to help the registry perform efficiently. + """ + @overload # type=REG_SZ|REG_EXPAND_SZ def SetValueEx( key: _KeyType, value_name: str | None, reserved: Unused, type: Literal[1, 2], value: str | None, / @@ -413,34 +431,35 @@ the configuration registry to help the registry perform efficiently. def DisableReflectionKey(key: _KeyType, /) -> None: """Disables registry reflection for 32bit processes running on a 64bit OS. - key - An already open key, or any one of the predefined HKEY_* constants. + key + An already open key, or any one of the predefined HKEY_* constants. -Will generally raise NotImplementedError if executed on a 32bit OS. + Will generally raise NotImplementedError if executed on a 32bit OS. + + If the key is not on the reflection list, the function succeeds but has + no effect. Disabling reflection for a key does not affect reflection + of any subkeys. + """ -If the key is not on the reflection list, the function succeeds but has -no effect. Disabling reflection for a key does not affect reflection -of any subkeys. -""" def EnableReflectionKey(key: _KeyType, /) -> None: """Restores registry reflection for the specified disabled key. - key - An already open key, or any one of the predefined HKEY_* constants. + key + An already open key, or any one of the predefined HKEY_* constants. + + Will generally raise NotImplementedError if executed on a 32bit OS. + Restoring reflection for a key does not affect reflection of any + subkeys. + """ -Will generally raise NotImplementedError if executed on a 32bit OS. -Restoring reflection for a key does not affect reflection of any -subkeys. -""" def QueryReflectionKey(key: _KeyType, /) -> bool: """Returns the reflection state for the specified key as a bool. - key - An already open key, or any one of the predefined HKEY_* constants. - -Will generally raise NotImplementedError if executed on a 32bit OS. -""" + key + An already open key, or any one of the predefined HKEY_* constants. + Will generally raise NotImplementedError if executed on a 32bit OS. + """ HKEY_CLASSES_ROOT: Final[int] HKEY_CURRENT_USER: Final[int] HKEY_LOCAL_MACHINE: Final[int] @@ -503,31 +522,32 @@ Will generally raise NotImplementedError if executed on a 32bit OS. class HKEYType: """PyHKEY Object - A Python object, representing a win32 registry key. -This object wraps a Windows HKEY object, automatically closing it when -the object is destroyed. To guarantee cleanup, you can call either -the Close() method on the PyHKEY, or the CloseKey() method. + This object wraps a Windows HKEY object, automatically closing it when + the object is destroyed. To guarantee cleanup, you can call either + the Close() method on the PyHKEY, or the CloseKey() method. -All functions which accept a handle object also accept an integer -- -however, use of the handle object is encouraged. + All functions which accept a handle object also accept an integer -- + however, use of the handle object is encouraged. -Functions: -Close() - Closes the underlying handle. -Detach() - Returns the integer Win32 handle, detaching it from the object + Functions: + Close() - Closes the underlying handle. + Detach() - Returns the integer Win32 handle, detaching it from the object -Properties: -handle - The integer Win32 handle. + Properties: + handle - The integer Win32 handle. + + Operations: + __bool__ - Handles with an open object return true, otherwise false. + __int__ - Converting a handle to an integer returns the Win32 handle. + rich comparison - Handle objects are compared using the handle value. + """ -Operations: -__bool__ - Handles with an open object return true, otherwise false. -__int__ - Converting a handle to an integer returns the Win32 handle. -rich comparison - Handle objects are compared using the handle value. -""" def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" + def __int__(self) -> int: - """int(self) -""" + """int(self)""" + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, / @@ -535,19 +555,21 @@ rich comparison - Handle objects are compared using the handle value. def Close(self) -> None: """Closes the underlying Windows handle. -If the handle is already closed, no error is raised. -""" + If the handle is already closed, no error is raised. + """ + def Detach(self) -> int: """Detaches the Windows handle from the handle object. -The result is the value of the handle before it is detached. If the -handle is already detached, this will return zero. + The result is the value of the handle before it is detached. If the + handle is already detached, this will return zero. + + After calling this function, the handle is effectively invalidated, + but the handle is not closed. You would call this function when you + need the underlying win32 handle to exist beyond the lifetime of the + handle object. + """ -After calling this function, the handle is effectively invalidated, -but the handle is not closed. You would call this function when you -need the underlying win32 handle to exist beyond the lifetime of the -handle object. -""" def __hash__(self) -> int: ... @property def handle(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi index c356d2a3338fc..cd80dab55aee6 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/winsound.pyi @@ -16,6 +16,7 @@ SND_SYSTEM - Assign sound to the audio session for system notification sounds. Beep(frequency, duration) - Make a beep through the PC speaker. MessageBeep(type) - Call Windows MessageBeep. """ + import sys from _typeshed import ReadableBuffer from typing import Final, Literal, overload @@ -50,26 +51,27 @@ if sys.platform == "win32": def Beep(frequency: int, duration: int) -> None: """A wrapper around the Windows Beep API. - frequency - Frequency of the sound in hertz. - Must be in the range 37 through 32,767. - duration - How long the sound should play, in milliseconds. -""" + frequency + Frequency of the sound in hertz. + Must be in the range 37 through 32,767. + duration + How long the sound should play, in milliseconds. + """ # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: """A wrapper around the Windows PlaySound API. - sound - The sound to play; a filename, data, or None. - flags - Flag values, ored together. See module documentation. -""" + sound + The sound to play; a filename, data, or None. + flags + Flag values, ored together. See module documentation. + """ + @overload def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... def MessageBeep(type: int = 0) -> None: """Call Windows MessageBeep(x). -x defaults to MB_OK. -""" + x defaults to MB_OK. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi index afc08a8a6963f..82f7f560a74e1 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/handlers.pyi @@ -1,5 +1,5 @@ -"""Base classes for server/gateway implementations -""" +"""Base classes for server/gateway implementations""" + from _typeshed import OptExcInfo from _typeshed.wsgi import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment from abc import abstractmethod @@ -13,12 +13,11 @@ __all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISC def format_date_time(timestamp: float | None) -> str: ... # undocumented def read_environ() -> dict[str, str]: - """Read environment, fixing HTTP variables -""" + """Read environment, fixing HTTP variables""" class BaseHandler: - """Manage the invocation of a WSGI application -""" + """Manage the invocation of a WSGI application""" + wsgi_version: tuple[int, int] # undocumented wsgi_multithread: bool wsgi_multiprocess: bool @@ -38,138 +37,146 @@ class BaseHandler: error_headers: list[tuple[str, str]] error_body: bytes def run(self, application: WSGIApplication) -> None: - """Invoke the application -""" + """Invoke the application""" + def setup_environ(self) -> None: - """Set up the environment for one request -""" + """Set up the environment for one request""" + def finish_response(self) -> None: """Send any iterable data, then close self and the iterable -Subclasses intended for use in asynchronous servers will -want to redefine this method, such that it sets up callbacks -in the event loop to iterate over the data, and to call -'self.close()' once the response is finished. -""" + Subclasses intended for use in asynchronous servers will + want to redefine this method, such that it sets up callbacks + in the event loop to iterate over the data, and to call + 'self.close()' once the response is finished. + """ + def get_scheme(self) -> str: - """Return the URL scheme being used -""" + """Return the URL scheme being used""" + def set_content_length(self) -> None: - """Compute Content-Length or switch to chunked encoding if possible -""" + """Compute Content-Length or switch to chunked encoding if possible""" + def cleanup_headers(self) -> None: """Make any necessary header changes or defaults -Subclasses can extend this to add other defaults. -""" + Subclasses can extend this to add other defaults. + """ + def start_response( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None ) -> Callable[[bytes], None]: - """'start_response()' callable as specified by PEP 3333 -""" + """'start_response()' callable as specified by PEP 3333""" + def send_preamble(self) -> None: - """Transmit version/status/date/server, via self._write() -""" + """Transmit version/status/date/server, via self._write()""" + def write(self, data: bytes) -> None: - """'write()' callable as specified by PEP 3333 -""" + """'write()' callable as specified by PEP 3333""" + def sendfile(self) -> bool: """Platform-specific file transmission -Override this method in subclasses to support platform-specific -file transmission. It is only called if the application's -return iterable ('self.result') is an instance of -'self.wsgi_file_wrapper'. - -This method should return a true value if it was able to actually -transmit the wrapped file-like object using a platform-specific -approach. It should return a false value if normal iteration -should be used instead. An exception can be raised to indicate -that transmission was attempted, but failed. - -NOTE: this method should call 'self.send_headers()' if -'self.headers_sent' is false and it is going to attempt direct -transmission of the file. -""" + Override this method in subclasses to support platform-specific + file transmission. It is only called if the application's + return iterable ('self.result') is an instance of + 'self.wsgi_file_wrapper'. + + This method should return a true value if it was able to actually + transmit the wrapped file-like object using a platform-specific + approach. It should return a false value if normal iteration + should be used instead. An exception can be raised to indicate + that transmission was attempted, but failed. + + NOTE: this method should call 'self.send_headers()' if + 'self.headers_sent' is false and it is going to attempt direct + transmission of the file. + """ + def finish_content(self) -> None: - """Ensure headers and content have both been sent -""" + """Ensure headers and content have both been sent""" + def close(self) -> None: """Close the iterable (if needed) and reset all instance vars -Subclasses may want to also drop the client connection. -""" + Subclasses may want to also drop the client connection. + """ + def send_headers(self) -> None: - """Transmit headers to the client, via self._write() -""" + """Transmit headers to the client, via self._write()""" + def result_is_file(self) -> bool: - """True if 'self.result' is an instance of 'self.wsgi_file_wrapper' -""" + """True if 'self.result' is an instance of 'self.wsgi_file_wrapper'""" + def client_is_modern(self) -> bool: - """True if client can accept status and headers -""" + """True if client can accept status and headers""" + def log_exception(self, exc_info: OptExcInfo) -> None: """Log the 'exc_info' tuple in the server log -Subclasses may override to retarget the output or change its format. -""" + Subclasses may override to retarget the output or change its format. + """ + def handle_error(self) -> None: - """Log current error, and send error output to client if possible -""" + """Log current error, and send error output to client if possible""" + def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: """WSGI mini-app to create error output -By default, this just uses the 'error_status', 'error_headers', -and 'error_body' attributes to generate an output page. It can -be overridden in a subclass to dynamically generate diagnostics, -choose an appropriate message for the user's preferred language, etc. + By default, this just uses the 'error_status', 'error_headers', + and 'error_body' attributes to generate an output page. It can + be overridden in a subclass to dynamically generate diagnostics, + choose an appropriate message for the user's preferred language, etc. + + Note, however, that it's not recommended from a security perspective to + spit out diagnostics to any old user; ideally, you should have to do + something special to enable diagnostic output, which is why we don't + include any here! + """ -Note, however, that it's not recommended from a security perspective to -spit out diagnostics to any old user; ideally, you should have to do -something special to enable diagnostic output, which is why we don't -include any here! -""" @abstractmethod def _write(self, data: bytes) -> None: """Override in subclass to buffer data for send to client -It's okay if this method actually transmits the data; BaseHandler -just separates write and flush operations for greater efficiency -when the underlying system actually has such a distinction. -""" + It's okay if this method actually transmits the data; BaseHandler + just separates write and flush operations for greater efficiency + when the underlying system actually has such a distinction. + """ + @abstractmethod def _flush(self) -> None: """Override in subclass to force sending of recent '_write()' calls -It's okay if this method is a no-op (i.e., if '_write()' actually -sends the data. -""" + It's okay if this method is a no-op (i.e., if '_write()' actually + sends the data. + """ + @abstractmethod def get_stdin(self) -> InputStream: - """Override in subclass to return suitable 'wsgi.input' -""" + """Override in subclass to return suitable 'wsgi.input'""" + @abstractmethod def get_stderr(self) -> ErrorStream: - """Override in subclass to return suitable 'wsgi.errors' -""" + """Override in subclass to return suitable 'wsgi.errors'""" + @abstractmethod def add_cgi_vars(self) -> None: - """Override in subclass to insert CGI variables in 'self.environ' -""" + """Override in subclass to insert CGI variables in 'self.environ'""" class SimpleHandler(BaseHandler): """Handler that's just initialized with streams, environment, etc. -This handler subclass is intended for synchronous HTTP/1.0 origin servers, -and handles sending the entire response output, given the correct inputs. + This handler subclass is intended for synchronous HTTP/1.0 origin servers, + and handles sending the entire response output, given the correct inputs. -Usage:: + Usage:: + + handler = SimpleHandler( + inp,out,err,env, multithread=False, multiprocess=True + ) + handler.run(app) + """ - handler = SimpleHandler( - inp,out,err,env, multithread=False, multiprocess=True - ) - handler.run(app) -""" stdin: InputStream stdout: IO[bytes] stderr: ErrorStream @@ -192,45 +199,47 @@ Usage:: class BaseCGIHandler(SimpleHandler): """CGI-like systems using input/output/error streams and environ mapping -Usage:: + Usage:: - handler = BaseCGIHandler(inp,out,err,env) - handler.run(app) + handler = BaseCGIHandler(inp,out,err,env) + handler.run(app) -This handler class is useful for gateway protocols like ReadyExec and -FastCGI, that have usable input/output/error streams and an environment -mapping. It's also the base class for CGIHandler, which just uses -sys.stdin, os.environ, and so on. + This handler class is useful for gateway protocols like ReadyExec and + FastCGI, that have usable input/output/error streams and an environment + mapping. It's also the base class for CGIHandler, which just uses + sys.stdin, os.environ, and so on. -The constructor also takes keyword arguments 'multithread' and -'multiprocess' (defaulting to 'True' and 'False' respectively) to control -the configuration sent to the application. It sets 'origin_server' to -False (to enable CGI-like output), and assumes that 'wsgi.run_once' is -False. -""" + The constructor also takes keyword arguments 'multithread' and + 'multiprocess' (defaulting to 'True' and 'False' respectively) to control + the configuration sent to the application. It sets 'origin_server' to + False (to enable CGI-like output), and assumes that 'wsgi.run_once' is + False. + """ class CGIHandler(BaseCGIHandler): """CGI-based invocation via sys.stdin/stdout/stderr and os.environ -Usage:: + Usage:: + + CGIHandler().run(app) - CGIHandler().run(app) + The difference between this class and BaseCGIHandler is that it always + uses 'wsgi.run_once' of 'True', 'wsgi.multithread' of 'False', and + 'wsgi.multiprocess' of 'True'. It does not take any initialization + parameters, but always uses 'sys.stdin', 'os.environ', and friends. -The difference between this class and BaseCGIHandler is that it always -uses 'wsgi.run_once' of 'True', 'wsgi.multithread' of 'False', and -'wsgi.multiprocess' of 'True'. It does not take any initialization -parameters, but always uses 'sys.stdin', 'os.environ', and friends. + If you need to override any of these parameters, use BaseCGIHandler + instead. + """ -If you need to override any of these parameters, use BaseCGIHandler -instead. -""" def __init__(self) -> None: ... class IISCGIHandler(BaseCGIHandler): """CGI-based invocation with workaround for IIS path bug -This handler should be used in preference to CGIHandler when deploying on -Microsoft IIS without having set the config allowPathInfo option (IIS>=7) -or metabase allowPathInfoForScriptMappings (IIS<7). -""" + This handler should be used in preference to CGIHandler when deploying on + Microsoft IIS without having set the config allowPathInfo option (IIS>=7) + or metabase allowPathInfoForScriptMappings (IIS<7). + """ + def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi index a59077307138a..6019972f31b1e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/headers.pyi @@ -4,6 +4,7 @@ Much of this module is red-handedly pilfered from email.message in the stdlib, so portions are Copyright (C) 2001 Python Software Foundation, and were written by Barry Warsaw. """ + from re import Pattern from typing import Final, overload from typing_extensions import TypeAlias @@ -13,90 +14,97 @@ _HeaderList: TypeAlias = list[tuple[str, str]] tspecials: Final[Pattern[str]] # undocumented class Headers: - """Manage a collection of HTTP response headers -""" + """Manage a collection of HTTP response headers""" + def __init__(self, headers: _HeaderList | None = None) -> None: ... def __len__(self) -> int: - """Return the total number of headers, including duplicates. -""" + """Return the total number of headers, including duplicates.""" + def __setitem__(self, name: str, val: str) -> None: - """Set the value of a header. -""" + """Set the value of a header.""" + def __delitem__(self, name: str) -> None: """Delete all occurrences of a header, if present. -Does *not* raise an exception if the header is missing. -""" + Does *not* raise an exception if the header is missing. + """ + def __getitem__(self, name: str) -> str | None: """Get the first header value for 'name' -Return None if the header is missing instead of raising an exception. + Return None if the header is missing instead of raising an exception. + + Note that if the header appeared multiple times, the first exactly which + occurrence gets returned is undefined. Use getall() to get all + the values matching a header field name. + """ -Note that if the header appeared multiple times, the first exactly which -occurrence gets returned is undefined. Use getall() to get all -the values matching a header field name. -""" def __contains__(self, name: str) -> bool: - """Return true if the message contains the header. -""" + """Return true if the message contains the header.""" + def get_all(self, name: str) -> list[str]: """Return a list of all the values for the named field. -These will be sorted in the order they appeared in the original header -list or were added to this instance, and may contain duplicates. Any -fields deleted and re-inserted are always appended to the header list. -If no fields exist with the given name, returns an empty list. -""" + These will be sorted in the order they appeared in the original header + list or were added to this instance, and may contain duplicates. Any + fields deleted and re-inserted are always appended to the header list. + If no fields exist with the given name, returns an empty list. + """ + @overload def get(self, name: str, default: str) -> str: - """Get the first header value for 'name', or return 'default' -""" + """Get the first header value for 'name', or return 'default'""" + @overload def get(self, name: str, default: str | None = None) -> str | None: ... def keys(self) -> list[str]: """Return a list of all the header field names. -These will be sorted in the order they appeared in the original header -list, or were added to this instance, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they appeared in the original header + list, or were added to this instance, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + def values(self) -> list[str]: """Return a list of all header values. -These will be sorted in the order they appeared in the original header -list, or were added to this instance, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they appeared in the original header + list, or were added to this instance, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + def items(self) -> _HeaderList: """Get all the header fields and values. -These will be sorted in the order they were in the original header -list, or were added to this instance, and may contain duplicates. -Any fields deleted and re-inserted are always appended to the header -list. -""" + These will be sorted in the order they were in the original header + list, or were added to this instance, and may contain duplicates. + Any fields deleted and re-inserted are always appended to the header + list. + """ + def __bytes__(self) -> bytes: ... def setdefault(self, name: str, value: str) -> str: """Return first matching header value for 'name', or 'value' -If there is no header named 'name', add a new header with name 'name' -and value 'value'. -""" + If there is no header named 'name', add a new header with name 'name' + and value 'value'. + """ + def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: """Extended header setting. -_name is the header field to add. keyword arguments can be used to set -additional parameters for the header field, with underscores converted -to dashes. Normally the parameter will be added as key="value" unless -value is None, in which case only the key will be added. + _name is the header field to add. keyword arguments can be used to set + additional parameters for the header field, with underscores converted + to dashes. Normally the parameter will be added as key="value" unless + value is None, in which case only the key will be added. -Example: + Example: -h.add_header('content-disposition', 'attachment', filename='bud.gif') + h.add_header('content-disposition', 'attachment', filename='bud.gif') -Note that unlike the corresponding 'email.message' method, this does -*not* handle '(charset, language, value)' tuples: all values must be -strings or None. -""" + Note that unlike the corresponding 'email.message' method, this does + *not* handle '(charset, language, value)' tuples: all values must be + strings or None. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi index 572aae0a4d7c7..87bdda21f280c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/simple_server.pyi @@ -9,6 +9,7 @@ production use. For example usage, see the 'if __name__=="__main__"' block at the end of the module. See also the BaseHTTPServer module docs for other API information. """ + from _typeshed.wsgi import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment from http.server import BaseHTTPRequestHandler, HTTPServer from typing import Final, TypeVar, overload @@ -25,8 +26,8 @@ class ServerHandler(SimpleHandler): # undocumented server_software: str class WSGIServer(HTTPServer): - """BaseHTTPServer that implements the Python WSGI protocol -""" + """BaseHTTPServer that implements the Python WSGI protocol""" + application: WSGIApplication | None base_environ: WSGIEnvironment # only available after call to setup_environ() def setup_environ(self) -> None: ... @@ -44,8 +45,8 @@ _S = TypeVar("_S", bound=WSGIServer) @overload def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: - """Create a new WSGI server listening on `host` and `port` for `app` -""" + """Create a new WSGI server listening on `host` and `port` for `app`""" + @overload def make_server( host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi index 204229e9fd363..cbb98184ec87b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/types.pyi @@ -1,5 +1,5 @@ -"""WSGI-related types for static type checking -""" +"""WSGI-related types for static type checking""" + from _typeshed import OptExcInfo from collections.abc import Callable, Iterable, Iterator from typing import Any, Protocol @@ -8,8 +8,8 @@ from typing_extensions import TypeAlias __all__ = ["StartResponse", "WSGIEnvironment", "WSGIApplication", "InputStream", "ErrorStream", "FileWrapper"] class StartResponse(Protocol): - """start_response() callable as defined in PEP 3333 -""" + """start_response() callable as defined in PEP 3333""" + def __call__( self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ..., / ) -> Callable[[bytes], object]: ... @@ -18,16 +18,16 @@ WSGIEnvironment: TypeAlias = dict[str, Any] WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] class InputStream(Protocol): - """WSGI input stream as defined in PEP 3333 -""" + """WSGI input stream as defined in PEP 3333""" + def read(self, size: int = ..., /) -> bytes: ... def readline(self, size: int = ..., /) -> bytes: ... def readlines(self, hint: int = ..., /) -> list[bytes]: ... def __iter__(self) -> Iterator[bytes]: ... class ErrorStream(Protocol): - """WSGI error stream as defined in PEP 3333 -""" + """WSGI error stream as defined in PEP 3333""" + def flush(self) -> object: ... def write(self, s: str, /) -> object: ... def writelines(self, seq: list[str], /) -> object: ... @@ -37,6 +37,6 @@ class _Readable(Protocol): # Optional: def close(self) -> object: ... class FileWrapper(Protocol): - """WSGI file wrapper as defined in PEP 3333 -""" + """WSGI file wrapper as defined in PEP 3333""" + def __call__(self, file: _Readable, block_size: int = ..., /) -> Iterable[bytes]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi index 03674798dcaee..a89336a1c1aaa 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/util.pyi @@ -1,5 +1,5 @@ -"""Miscellaneous WSGI-related Utilities -""" +"""Miscellaneous WSGI-related Utilities""" + import sys from _typeshed.wsgi import WSGIEnvironment from collections.abc import Callable @@ -10,8 +10,8 @@ if sys.version_info >= (3, 13): __all__ += ["is_hop_by_hop"] class FileWrapper: - """Wrapper to convert file-like objects to iterables -""" + """Wrapper to convert file-like objects to iterables""" + filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists @@ -23,39 +23,40 @@ class FileWrapper: def __next__(self) -> bytes: ... def guess_scheme(environ: WSGIEnvironment) -> str: - """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https' - """ + """Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'""" + def application_uri(environ: WSGIEnvironment) -> str: - """Return the application's base URI (no PATH_INFO or QUERY_STRING) -""" + """Return the application's base URI (no PATH_INFO or QUERY_STRING)""" + def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: - """Return the full request URI, optionally including the query string -""" + """Return the full request URI, optionally including the query string""" + def shift_path_info(environ: WSGIEnvironment) -> str | None: """Shift a name from PATH_INFO to SCRIPT_NAME, returning it -If there are no remaining path segments in PATH_INFO, return None. -Note: 'environ' is modified in-place; use a copy if you need to keep -the original PATH_INFO or SCRIPT_NAME. + If there are no remaining path segments in PATH_INFO, return None. + Note: 'environ' is modified in-place; use a copy if you need to keep + the original PATH_INFO or SCRIPT_NAME. + + Note: when PATH_INFO is just a '/', this returns '' and appends a trailing + '/' to SCRIPT_NAME, even though empty path segments are normally ignored, + and SCRIPT_NAME doesn't normally end in a '/'. This is intentional + behavior, to ensure that an application can tell the difference between + '/x' and '/x/' when traversing to objects. + """ -Note: when PATH_INFO is just a '/', this returns '' and appends a trailing -'/' to SCRIPT_NAME, even though empty path segments are normally ignored, -and SCRIPT_NAME doesn't normally end in a '/'. This is intentional -behavior, to ensure that an application can tell the difference between -'/x' and '/x/' when traversing to objects. -""" def setup_testing_defaults(environ: WSGIEnvironment) -> None: """Update 'environ' with trivial defaults for testing purposes -This adds various parameters required for WSGI, including HTTP_HOST, -SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO, -and all of the wsgi.* variables. It only supplies default values, -and does not replace any existing settings for these variables. + This adds various parameters required for WSGI, including HTTP_HOST, + SERVER_NAME, SERVER_PORT, REQUEST_METHOD, SCRIPT_NAME, PATH_INFO, + and all of the wsgi.* variables. It only supplies default values, + and does not replace any existing settings for these variables. + + This routine is intended to make it easier for unit tests of WSGI + servers and applications to set up dummy environments. It should *not* + be used by actual WSGI servers or applications, since the data is fake! + """ -This routine is intended to make it easier for unit tests of WSGI -servers and applications to set up dummy environments. It should *not* -be used by actual WSGI servers or applications, since the data is fake! -""" def is_hop_by_hop(header_name: str) -> bool: - """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header -""" + """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi index 622c2bc9d2767..b537918d4368f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/wsgiref/validate.pyi @@ -104,6 +104,7 @@ Some of the things this checks: sys.stderr, because we only know it isn't called when the object is garbage collected). """ + from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication from collections.abc import Callable, Iterable, Iterator from typing import Any, NoReturn @@ -113,19 +114,19 @@ __all__ = ["validator"] class WSGIWarning(Warning): """ -Raised in response to WSGI-spec-related warnings -""" + Raised in response to WSGI-spec-related warnings + """ def validator(application: WSGIApplication) -> WSGIApplication: """ -When applied between a WSGI server and a WSGI application, this -middleware will check for WSGI compliance on a number of levels. -This middleware does not modify the request or response in any -way, but will raise an AssertionError if anything seems off -(except for a failure to close the application iterator, which -will be printed to stderr -- there's no way to raise an exception -at that point). -""" + When applied between a WSGI server and a WSGI application, this + middleware will check for WSGI compliance on a number of levels. + This middleware does not modify the request or response in any + way, but will raise an AssertionError if anything seems off + (except for a failure to close the application iterator, which + will be printed to stderr -- there's no way to raise an exception + at that point). + """ class InputWrapper: input: InputStream diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi index d688654d8d8e8..bf5ccda81c32d 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xdrlib.pyi @@ -3,6 +3,7 @@ See: RFC 1014 """ + from collections.abc import Callable, Sequence from typing import TypeVar @@ -20,14 +21,15 @@ class Error(Exception): msg -- contains the message """ + msg: str def __init__(self, msg: str) -> None: ... class ConversionError(Error): ... class Packer: - """Pack various data representations into a buffer. -""" + """Pack various data representations into a buffer.""" + def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... @@ -49,8 +51,8 @@ class Packer: def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... class Unpacker: - """Unpacks various data representations from the given buffer. -""" + """Unpacks various data representations from the given buffer.""" + def __init__(self, data: bytes) -> None: ... def reset(self, data: bytes) -> None: ... def get_position(self) -> int: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi index 6c9a8e08b4616..a99fc38e3fd9e 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/__init__.pyi @@ -15,6 +15,7 @@ etree -- The ElementTree XML library. This is a subset of the full ElementTree XML release. """ + # At runtime, listing submodules in __all__ without them being imported is # valid, and causes them to be included in a star import. See #6523 __all__ = ["dom", "parsers", "sax", "etree"] # noqa: F822 # pyright: ignore[reportUnsupportedDunderAll] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi index 0988324b0a656..47f645764b214 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/NodeFilter.pyi @@ -3,8 +3,9 @@ from xml.dom.minidom import Node class NodeFilter: """ -This is the DOM2 NodeFilter interface. It contains only constants. -""" + This is the DOM2 NodeFilter interface. It contains only constants. + """ + FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi index 6d8e3df101f16..2022c8dc422e9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/__init__.pyi @@ -13,13 +13,14 @@ pulldom -- DOM builder supporting on-demand tree-building for selected subtrees of the document. """ + from typing import Any, Final, Literal from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation class Node: - """Class giving the NodeType constants. -""" + """Class giving the NodeType constants.""" + __slots__ = () ELEMENT_NODE: Final = 1 ATTRIBUTE_NODE: Final = 2 @@ -54,8 +55,9 @@ VALIDATION_ERR: Final = 16 class DOMException(Exception): """Abstract base class for DOM exceptions. -Exceptions with specific codes are specializations of this class. -""" + Exceptions with specific codes are specializations of this class. + """ + code: int def __init__(self, *args: Any, **kw: Any) -> None: ... def _get_code(self) -> int: ... @@ -109,8 +111,8 @@ class ValidationErr(DOMException): code: Literal[16] class UserDataHandler: - """Class giving the operation constants for UserDataHandler.handle(). -""" + """Class giving the operation constants for UserDataHandler.handle().""" + NODE_CLONED: Final = 1 NODE_IMPORTED: Final = 2 NODE_DELETED: Final = 3 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi index ef09ed10c9c47..4d6cf5f45ca06 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/domreg.pyi @@ -2,6 +2,7 @@ directly. Instead, the functions getDOMImplementation and registerDOMImplementation should be imported from xml.dom. """ + from _typeshed.xml import DOMImplementation from collections.abc import Callable, Iterable @@ -11,22 +12,23 @@ registered: dict[str, Callable[[], DOMImplementation]] def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: """registerDOMImplementation(name, factory) -Register the factory function with the name. The factory function -should return an object which implements the DOMImplementation -interface. The factory function can either return the same object, -or a new one (e.g. if that implementation supports some -customization). -""" + Register the factory function with the name. The factory function + should return an object which implements the DOMImplementation + interface. The factory function can either return the same object, + or a new one (e.g. if that implementation supports some + customization). + """ + def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: """getDOMImplementation(name = None, features = ()) -> DOM implementation. -Return a suitable DOM implementation. The name is either -well-known, the module name of a DOM implementation, or None. If -it is not None, imports the corresponding module and returns -DOMImplementation object if the import succeeds. + Return a suitable DOM implementation. The name is either + well-known, the module name of a DOM implementation, or None. If + it is not None, imports the corresponding module and returns + DOMImplementation object if the import succeeds. -If name is not given, consider the available implementations to -find one with the required feature set. If no implementation can -be found, raise an ImportError. The features list must be a sequence -of (feature, version) pairs which are passed to hasFeature. -""" + If name is not given, consider the available implementations to + find one with the required feature set. If no implementation can + be found, raise an ImportError. The features list must be a sequence + of (feature, version) pairs which are passed to hasFeature. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi index 47bcdcbe84b10..e5536237d7187 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -3,6 +3,7 @@ from a string or file. This avoids all the overhead of SAX and pulldom to gain performance. """ + from _typeshed import ReadableBuffer, SupportsRead from typing import Any, Final, NoReturn from typing_extensions import TypeAlias @@ -34,30 +35,32 @@ class ElementInfo: class ExpatBuilder: """Document builder that uses Expat to build a ParsedXML.DOM document -instance. -""" + instance. + """ + document: Document # Created in self.reset() curNode: DocumentFragment | Element | Document # Created in self.reset() def __init__(self, options: Options | None = None) -> None: ... def createParser(self) -> XMLParserType: - """Create a new parser object. -""" + """Create a new parser object.""" + def getParser(self) -> XMLParserType: - """Return the parser object, creating a new one if needed. -""" + """Return the parser object, creating a new one if needed.""" + def reset(self) -> None: - """Free all data structures used during DOM construction. -""" + """Free all data structures used during DOM construction.""" + def install(self, parser: XMLParserType) -> None: - """Install the callbacks needed to build the DOM into the parser. -""" + """Install the callbacks needed to build the DOM into the parser.""" + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: """Parse a document from a file object, returning the document -node. -""" + node. + """ + def parseString(self, string: str | ReadableBuffer) -> Document: - """Parse a document from a string, returning the document node. -""" + """Parse a document from a string, returning the document node.""" + def start_doctype_decl_handler( self, doctypeName: str, systemId: str | None, publicId: str | None, has_internal_subset: bool ) -> None: ... @@ -89,8 +92,9 @@ node. class FilterVisibilityController: """Wrapper around a DOMBuilderFilter which implements the checks -to make the whatToShow filter attribute work. -""" + to make the whatToShow filter attribute work. + """ + __slots__ = ("filter",) filter: DOMBuilderFilter def __init__(self, filter: DOMBuilderFilter) -> None: ... @@ -113,12 +117,13 @@ class Skipper(FilterCrutch): class FragmentBuilder(ExpatBuilder): """Builder which constructs document fragments given XML source -text and a context node. + text and a context node. + + The context node is expected to provide information about the + namespace declarations which are in scope at the start of the + fragment. + """ -The context node is expected to provide information about the -namespace declarations which are in scope at the start of the -fragment. -""" fragment: DocumentFragment | None originalDocument: Document context: Node @@ -126,46 +131,47 @@ fragment. def reset(self) -> None: ... def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> DocumentFragment: # type: ignore[override] """Parse a document fragment from a file object, returning the -fragment node. -""" + fragment node. + """ + def parseString(self, string: ReadableBuffer | str) -> DocumentFragment: # type: ignore[override] """Parse a document fragment from a string, returning the -fragment node. -""" + fragment node. + """ + def external_entity_ref_handler(self, context: str, base: str | None, systemId: str | None, publicId: str | None) -> int: ... class Namespaces: - """Mix-in class for builders; adds support for namespaces. -""" + """Mix-in class for builders; adds support for namespaces.""" + def createParser(self) -> XMLParserType: - """Create a new namespace-handling parser. -""" + """Create a new namespace-handling parser.""" + def install(self, parser: XMLParserType) -> None: - """Insert the namespace-handlers onto the parser. -""" + """Insert the namespace-handlers onto the parser.""" + def start_namespace_decl_handler(self, prefix: str | None, uri: str) -> None: - """Push this namespace declaration on our storage. -""" + """Push this namespace declaration on our storage.""" + def start_element_handler(self, name: str, attributes: list[str]) -> None: ... def end_element_handler(self, name: str) -> None: ... # only exists if __debug__ class ExpatBuilderNS(Namespaces, ExpatBuilder): - """Document builder that supports namespaces. -""" + """Document builder that supports namespaces.""" + class FragmentBuilderNS(Namespaces, FragmentBuilder): - """Fragment builder that supports namespaces. -""" + """Fragment builder that supports namespaces.""" + class ParseEscape(Exception): - """Exception raised to short-circuit parsing in InternalSubsetExtractor. -""" + """Exception raised to short-circuit parsing in InternalSubsetExtractor.""" class InternalSubsetExtractor(ExpatBuilder): - """XML processor which can rip out the internal document type subset. -""" + """XML processor which can rip out the internal document type subset.""" + subset: str | list[str] | None = None def getSubset(self) -> str: - """Return the internal subset as a string. -""" + """Return the internal subset as a string.""" + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler( # type: ignore[override] @@ -177,24 +183,27 @@ class InternalSubsetExtractor(ExpatBuilder): def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True) -> Document: """Parse a document, returning the resulting Document node. -'file' may be either a file name or an open file object. -""" + 'file' may be either a file name or an open file object. + """ + def parseString(string: str | ReadableBuffer, namespaces: bool = True) -> Document: """Parse a document from a string, returning the resulting -Document node. -""" + Document node. + """ + def parseFragment(file: str | SupportsRead[ReadableBuffer | str], context: Node, namespaces: bool = True) -> DocumentFragment: """Parse a fragment of a document, given the context from which it -was originally extracted. context should be the parent of the -node(s) which are in the fragment. + was originally extracted. context should be the parent of the + node(s) which are in the fragment. + + 'file' may be either a file name or an open file object. + """ -'file' may be either a file name or an open file object. -""" def parseFragmentString(string: str | ReadableBuffer, context: Node, namespaces: bool = True) -> DocumentFragment: """Parse a fragment of a document from a string, given the context -from which it was originally extracted. context should be the -parent of the node(s) which are in the fragment. -""" + from which it was originally extracted. context should be the + parent of the node(s) which are in the fragment. + """ + def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: - """Create a builder based on an Options object. -""" + """Create a builder based on an Options object.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi index fb8de652f49b2..a0dffd6cc5434 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minicompat.pyi @@ -3,6 +3,7 @@ This module contains internal implementation details and should not be imported; use xml.dom.minidom instead. """ + from collections.abc import Iterable from typing import Any, Literal, TypeVar @@ -16,16 +17,16 @@ class NodeList(list[_T]): __slots__ = () @property def length(self) -> int: - """The number of nodes in the NodeList. -""" + """The number of nodes in the NodeList.""" + def item(self, index: int) -> _T | None: ... class EmptyNodeList(tuple[()]): __slots__ = () @property def length(self) -> Literal[0]: - """The number of nodes in the NodeList. -""" + """The number of nodes in the NodeList.""" + def item(self, index: int) -> None: ... def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi index a05af66b70022..6547439155c1c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/minidom.pyi @@ -14,6 +14,7 @@ Todo: interface * SAX 2 namespaces """ + import xml.dom from _collections_abc import dict_keys, dict_values from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite @@ -67,11 +68,11 @@ class _UserDataHandler(Protocol): def parse( file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None ) -> Document: - """Parse a file into a DOM by filename or file object. -""" + """Parse a file into a DOM by filename or file object.""" + def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: - """Parse a file into a DOM from a string. -""" + """Parse a file into a DOM from a string.""" + @overload def getDOMImplementation(features: None = None) -> DOMImplementation: ... @overload @@ -104,16 +105,16 @@ class Node(xml.dom.Node): @property def firstChild(self) -> _NodesThatAreChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _NodesThatAreChildren | None: - """Last child node, or None. -""" + """Last child node, or None.""" + @property def localName(self) -> str | None: # non-null only for Element and Attr - """Namespace-local name of this node. -""" + """Namespace-local name of this node.""" + def __bool__(self) -> Literal[True]: ... @overload def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... @@ -192,19 +193,17 @@ class DocumentFragment(Node): childNodes: NodeList[_DocumentFragmentChildren] @property def firstChild(self) -> _DocumentFragmentChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _DocumentFragmentChildren | None: - """Last child node, or None. -""" - + """Last child node, or None.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" + """Namespace-local name of this node.""" + def __init__(self) -> None: ... def insertBefore( # type: ignore[override] self, newChild: _DFChildrenPlusFragment, refChild: _DocumentFragmentChildren | None @@ -232,20 +231,16 @@ class Attr(Node): childNodes: NodeList[_AttrChildren] @property def firstChild(self) -> _AttrChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _AttrChildren | None: - """Last child node, or None. -""" - + """Last child node, or None.""" namespaceURI: str | None prefix: str | None @property def localName(self) -> str: - """Namespace-local name of this attribute. -""" - + """Namespace-local name of this attribute.""" name: str value: str specified: bool @@ -257,12 +252,12 @@ class Attr(Node): def unlink(self) -> None: ... @property def isId(self) -> bool: - """True if this attribute is an ID. -""" + """True if this attribute is an ID.""" + @property def schemaType(self) -> TypeInfo: - """Schema type for this attribute. -""" + """Schema type for this attribute.""" + def insertBefore(self, newChild: _AttrChildrenPlusFragment, refChild: _AttrChildren | None) -> _AttrChildrenPlusFragment: ... # type: ignore[override] def appendChild(self, node: _AttrChildrenPlusFragment) -> _AttrChildrenPlusFragment: ... # type: ignore[override] @overload # type: ignore[override] @@ -275,18 +270,19 @@ class Attr(Node): # because that's the only place we use it. class NamedNodeMap: """The attribute list is a transient interface to the underlying -dictionaries. Mutations here will change the underlying element's -dictionary. + dictionaries. Mutations here will change the underlying element's + dictionary. + + Ordering is imposed artificially and does not reflect the order of + attributes as found in an input document. + """ -Ordering is imposed artificially and does not reflect the order of -attributes as found in an input document. -""" __slots__ = ("_attrs", "_attrsNS", "_ownerElement") def __init__(self, attrs: dict[str, Attr], attrsNS: dict[_NSName, Attr], ownerElement: Element) -> None: ... @property def length(self) -> int: - """Number of nodes in the NamedNodeMap. -""" + """Number of nodes in the NamedNodeMap.""" + def item(self, index: int) -> Node | None: ... def items(self) -> list[tuple[str, str]]: ... def itemsNS(self) -> list[tuple[_NSName, str]]: ... @@ -343,29 +339,23 @@ class Element(Node): nodeValue: None @property def attributes(self) -> NamedNodeMap: # type: ignore[override] - """NamedNodeMap of attributes on the element. -""" - + """NamedNodeMap of attributes on the element.""" parentNode: Document | Element | DocumentFragment | None nextSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None previousSibling: _DocumentChildren | _ElementChildren | _DocumentFragmentChildren | None childNodes: NodeList[_ElementChildren] @property def firstChild(self) -> _ElementChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _ElementChildren | None: - """Last child node, or None. -""" - + """Last child node, or None.""" namespaceURI: str | None prefix: str | None @property def localName(self) -> str: - """Namespace-local name of this element. -""" - + """Namespace-local name of this element.""" schemaType: TypeInfo tagName: str @@ -376,12 +366,13 @@ class Element(Node): def getAttribute(self, attname: str) -> str: """Returns the value of the specified attribute. -Returns the value of the element's attribute named attname as -a string. An empty string is returned if the element does not -have such an attribute. Note that an empty string may also be -returned as an explicitly given attribute value, use the -hasAttribute method to distinguish these two cases. -""" + Returns the value of the element's attribute named attname as + a string. An empty string is returned if the element does not + have such an attribute. Note that an empty string may also be + returned as an explicitly given attribute value, use the + hasAttribute method to distinguish these two cases. + """ + def getAttributeNS(self, namespaceURI: str | None, localName: str) -> str: ... def setAttribute(self, attname: str, value: str) -> None: ... def setAttributeNS(self, namespaceURI: str | None, qualifiedName: str, value: str) -> None: ... @@ -396,23 +387,26 @@ hasAttribute method to distinguish these two cases. def hasAttribute(self, name: str) -> bool: """Checks whether the element has an attribute with the specified name. -Returns True if the element has an attribute with the specified name. -Otherwise, returns False. -""" + Returns True if the element has an attribute with the specified name. + Otherwise, returns False. + """ + def hasAttributeNS(self, namespaceURI: str | None, localName: str) -> bool: ... def getElementsByTagName(self, name: str) -> NodeList[Element]: """Returns all descendant elements with the given tag name. -Returns the list of all descendant elements (not direct children -only) with the specified tag name. -""" + Returns the list of all descendant elements (not direct children + only) with the specified tag name. + """ + def getElementsByTagNameNS(self, namespaceURI: str | None, localName: str) -> NodeList[Element]: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: """Write an XML element to a file-like object -Write the element to the writer object that must provide -a write method (e.g. a file or StringIO object). -""" + Write the element to the writer object that must provide + a write method (e.g. a file or StringIO object). + """ + def hasAttributes(self) -> bool: ... def setIdAttribute(self, name: str) -> None: ... def setIdAttributeNS(self, namespaceURI: str | None, localName: str) -> None: ... @@ -431,19 +425,20 @@ a write method (e.g. a file or StringIO object). class Childless: """Mixin that makes childless-ness easy to implement and avoids -the complexity of the Node methods that deal with children. -""" + the complexity of the Node methods that deal with children. + """ + __slots__ = () attributes: None childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + def appendChild(self, node: _NodesThatAreChildren | DocumentFragment) -> NoReturn: ... def hasChildNodes(self) -> Literal[False]: ... def insertBefore( @@ -466,20 +461,16 @@ class ProcessingInstruction(Childless, Node): childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" - + """The type of the None singleton.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" target: str data: str @@ -497,17 +488,15 @@ class CharacterData(Childless, Node): @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" ownerDocument: Document | None data: str def __init__(self) -> None: ... @property def length(self) -> int: - """Length of the string data. -""" + """Length of the string data.""" + def __len__(self) -> int: ... def substringData(self, offset: int, count: int) -> str: ... def appendData(self, arg: str) -> None: ... @@ -528,32 +517,27 @@ class Text(CharacterData): childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" - + """The type of the None singleton.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" data: str def splitText(self, offset: int) -> Self: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def replaceWholeText(self, content: str) -> Self | None: ... @property def isWhitespaceInElementContent(self) -> bool: - """True iff this text node contains only whitespace and is in element content. -""" + """True iff this text node contains only whitespace and is in element content.""" + @property def wholeText(self) -> str: - """The text of all logically-adjacent text nodes. -""" + """The text of all logically-adjacent text nodes.""" class Comment(CharacterData): nodeType: ClassVar[Literal[8]] @@ -567,19 +551,17 @@ class Comment(CharacterData): childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" - + """The type of the None singleton.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" + """Namespace-local name of this node.""" + def __init__(self, data: str) -> None: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... @@ -610,12 +592,11 @@ class ReadOnlySequentialNamedNodeMap(Generic[_N]): def setNamedItemNS(self, node: Node) -> NoReturn: ... @property def length(self) -> int: - """Number of entries in the NamedNodeMap. -""" + """Number of entries in the NamedNodeMap.""" class Identified: - """Mix-in class that supports the publicId and systemId attributes. -""" + """Mix-in class that supports the publicId and systemId attributes.""" + __slots__ = ("publicId", "systemId") publicId: str | None systemId: str | None @@ -632,20 +613,16 @@ class DocumentType(Identified, Childless, Node): childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" - + """The type of the None singleton.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" name: str | None internalSubset: str | None entities: ReadOnlySequentialNamedNodeMap[Entity] @@ -667,20 +644,16 @@ class Entity(Identified, Node): childNodes: NodeList[_EntityChildren] @property def firstChild(self) -> _EntityChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _EntityChildren | None: - """Last child node, or None. -""" - + """Last child node, or None.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" actualEncoding: str | None encoding: str | None version: str | None @@ -704,19 +677,17 @@ class Notation(Identified, Childless, Node): childNodes: EmptyNodeList @property def firstChild(self) -> None: - """The type of the None singleton. -""" + """The type of the None singleton.""" + @property def lastChild(self) -> None: - """The type of the None singleton. -""" - + """The type of the None singleton.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" + """Namespace-local name of this node.""" + def __init__(self, name: str, publicId: str | None, systemId: str | None) -> None: ... class DOMImplementation(DOMImplementationLS): @@ -728,11 +699,12 @@ class DOMImplementation(DOMImplementationLS): class ElementInfo: """Object that represents content-model information for an element. -This implementation is not expected to be used in practice; DOM -builders should provide implementations which do the right thing -using information available to it. + This implementation is not expected to be used in practice; DOM + builders should provide implementations which do the right thing + using information available to it. + + """ -""" __slots__ = ("tagName",) tagName: str def __init__(self, name: str) -> None: ... @@ -741,14 +713,14 @@ using information available to it. def isElementContent(self) -> bool: ... def isEmpty(self) -> bool: """Returns true iff this element is declared to have an EMPTY -content model. -""" + content model. + """ + def isId(self, aname: str) -> bool: - """Returns true iff the named attribute is a DTD-style ID. -""" + """Returns true iff the named attribute is a DTD-style ID.""" + def isIdNS(self, namespaceURI: str | None, localName: str) -> bool: - """Returns true iff the identified attribute is a DTD-style ID. -""" + """Returns true iff the identified attribute is a DTD-style ID.""" _DocumentChildrenPlusFragment = TypeVar("_DocumentChildrenPlusFragment", bound=_DocumentChildren | DocumentFragment) @@ -765,20 +737,16 @@ class Document(Node, DocumentLS): childNodes: NodeList[_DocumentChildren] @property def firstChild(self) -> _DocumentChildren | None: - """First child node, or None. -""" + """First child node, or None.""" + @property def lastChild(self) -> _DocumentChildren | None: - """Last child node, or None. -""" - + """Last child node, or None.""" namespaceURI: None prefix: None @property def localName(self) -> None: - """Namespace-local name of this node. -""" - + """Namespace-local name of this node.""" implementation: DOMImplementation actualEncoding: str | None encoding: str | None diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi index a7888dfb54b2b..2518ca34ccd53 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/pulldom.pyi @@ -70,8 +70,7 @@ class PullDOM(ContentHandler): def buildDocument(self, uri: str | None, tagname: str | None) -> Element: ... def endDocument(self) -> None: ... def clear(self) -> None: - """clear(): Explicitly release parsing structures -""" + """clear(): Explicitly release parsing structures""" class ErrorHandler: def warning(self, exception: BaseException) -> None: ... @@ -93,8 +92,7 @@ class DOMEventStream: def expandNode(self, node: Document) -> None: ... def reset(self) -> None: ... def clear(self) -> None: - """clear(): Explicitly release parsing objects -""" + """clear(): Explicitly release parsing objects""" class SAX2DOM(PullDOM): def startElementNS(self, name: _NSName, tagName: str | None, attrs: AttributesNSImpl) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi index fae3199f173b9..837803c4724ae 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -1,5 +1,5 @@ -"""Implementation of the DOM Level 3 'LS-Load' feature. -""" +"""Implementation of the DOM Level 3 'LS-Load' feature.""" + from _typeshed import SupportsRead from typing import Any, Final, Literal, NoReturn from xml.dom.minidom import Document, Node, _DOMErrorHandler @@ -9,9 +9,10 @@ __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] class Options: """Features object that has variables set for each DOMBuilder feature. -The DOMBuilder class uses an instance of this class to pass settings to -the ExpatBuilder class. -""" + The DOMBuilder class uses an instance of this class to pass settings to + the ExpatBuilder class. + """ + namespaces: int namespace_declarations: bool validation: bool @@ -66,8 +67,9 @@ class DOMInputSource: class DOMBuilderFilter: """Element filter which can be used to tailor construction of -a DOM instance. -""" + a DOM instance. + """ + FILTER_ACCEPT: Final = 1 FILTER_REJECT: Final = 2 FILTER_SKIP: Final = 3 @@ -77,8 +79,8 @@ a DOM instance. def startContainer(self, element: Node) -> Literal[1, 2, 3, 4]: ... class DocumentLS: - """Mixin to create documents that conform to the load/save spec. -""" + """Mixin to create documents that conform to the load/save spec.""" + async_: bool def abort(self) -> NoReturn: ... def load(self, uri: str) -> NoReturn: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi index afd3030cb755d..99c3f287b639a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -32,6 +32,7 @@ You can also use the ElementTree class to wrap an element structure and convert it to and from XML. """ + import sys from _collections_abc import dict_keys from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite @@ -82,8 +83,8 @@ class ParseError(SyntaxError): # In reality it works based on `.tag` attribute duck typing. def iselement(element: object) -> TypeGuard[Element]: - """Return True if *element* appears to be an Element. -""" + """Return True if *element* appears to be an Element.""" + @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -100,16 +101,17 @@ def canonicalize( ) -> str: """Convert XML to its C14N 2.0 serialised form. -If *out* is provided, it must be a file or file-like object that receives -the serialised canonical XML output (text, not bytes) through its ``.write()`` -method. To write to a file, open it in text mode with encoding "utf-8". -If *out* is not provided, this function returns the output as text string. + If *out* is provided, it must be a file or file-like object that receives + the serialised canonical XML output (text, not bytes) through its ``.write()`` + method. To write to a file, open it in text mode with encoding "utf-8". + If *out* is not provided, this function returns the output as text string. -Either *xml_data* (an XML string) or *from_file* (a file path or -file-like object) must be provided as input. + Either *xml_data* (an XML string) or *from_file* (a file path or + file-like object) must be provided as input. + + The configuration options are the same as for the ``C14NWriterTarget``. + """ -The configuration options are the same as for the ``C14NWriterTarget``. -""" @overload def canonicalize( xml_data: str | ReadableBuffer | None = None, @@ -168,69 +170,69 @@ class Element(Generic[_Tag]): def __copy__(self) -> Element[_Tag]: ... # returns the type of self in Python impl, but not in C impl def __deepcopy__(self, memo: Any, /) -> Element: ... # Only exists in C impl def __delitem__(self, key: SupportsIndex | slice, /) -> None: - """Delete self[key]. -""" + """Delete self[key].""" + @overload def __getitem__(self, key: SupportsIndex, /) -> Element: - """Return self[key]. -""" + """Return self[key].""" + @overload def __getitem__(self, key: slice, /) -> list[Element]: ... def __len__(self) -> int: - """Return len(self). -""" + """Return len(self).""" # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. def __iter__(self) -> Iterator[Element]: ... @overload def __setitem__(self, key: SupportsIndex, value: Element[Any], /) -> None: - """Set self[key] to value. -""" + """Set self[key] to value.""" + @overload def __setitem__(self, key: slice, value: Iterable[Element[Any]], /) -> None: ... # Doesn't really exist in earlier versions, where __len__ is called implicitly instead @deprecated("Testing an element's truth value is deprecated.") def __bool__(self) -> bool: - """True if self else False -""" + """True if self else False""" def SubElement(parent: Element[Any], tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... def Comment(text: str | None = None) -> Element[_ElementCallable]: """Comment element factory. -This function creates a special element which the standard serializer -serializes as an XML comment. + This function creates a special element which the standard serializer + serializes as an XML comment. -*text* is a string containing the comment string. + *text* is a string containing the comment string. + + """ -""" def ProcessingInstruction(target: str, text: str | None = None) -> Element[_ElementCallable]: """Processing Instruction element factory. -This function creates a special element which the standard serializer -serializes as an XML comment. + This function creates a special element which the standard serializer + serializes as an XML comment. -*target* is a string containing the processing instruction, *text* is a -string containing the processing instruction contents, if any. + *target* is a string containing the processing instruction, *text* is a + string containing the processing instruction contents, if any. -""" + """ PI = ProcessingInstruction class QName: """Qualified name wrapper. -This class can be used to wrap a QName attribute value in order to get -proper namespace handing on output. + This class can be used to wrap a QName attribute value in order to get + proper namespace handing on output. -*text_or_uri* is a string containing the QName value either in the form -{uri}local, or if the tag argument is given, the URI part of a QName. + *text_or_uri* is a string containing the QName value either in the form + {uri}local, or if the tag argument is given, the URI part of a QName. -*tag* is an optional argument which if given, will make the first -argument (text_or_uri) be interpreted as a URI, and this argument (tag) -be interpreted as a local name. + *tag* is an optional argument which if given, will make the first + argument (text_or_uri) be interpreted as a URI, and this argument (tag) + be interpreted as a local name. + + """ -""" text: str def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... def __lt__(self, other: QName | str) -> bool: ... @@ -245,86 +247,93 @@ _Root = TypeVar("_Root", Element, Element | None, default=Element | None) class ElementTree(Generic[_Root]): """An XML element hierarchy. -This class also provides support for serialization to and from -standard XML. + This class also provides support for serialization to and from + standard XML. -*element* is an optional root element node, -*file* is an optional file handle or file name of an XML file whose -contents will be used to initialize the tree with. + *element* is an optional root element node, + *file* is an optional file handle or file name of an XML file whose + contents will be used to initialize the tree with. + + """ -""" def __init__(self, element: Element[Any] | None = None, file: _FileRead | None = None) -> None: ... def getroot(self) -> _Root: - """Return root element of this tree. -""" + """Return root element of this tree.""" + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: """Load external XML document into element tree. -*source* is a file name or file object, *parser* is an optional parser -instance that defaults to XMLParser. + *source* is a file name or file object, *parser* is an optional parser + instance that defaults to XMLParser. -ParseError is raised if the parser fails to parse the document. + ParseError is raised if the parser fails to parse the document. -Returns the root element of the given source document. + Returns the root element of the given source document. + + """ -""" def iter(self, tag: str | None = None) -> Generator[Element, None, None]: """Create and return tree iterator for the root element. -The iterator loops over all elements in this tree, in document order. + The iterator loops over all elements in this tree, in document order. -*tag* is a string with the tag name to iterate over -(default is to return all elements). + *tag* is a string with the tag name to iterate over + (default is to return all elements). + + """ -""" def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: """Find first matching element by tag name or path. -Same as getroot().find(path), which is Element.find() + Same as getroot().find(path), which is Element.find() -*path* is a string having either an element tag or an XPath, -*namespaces* is an optional mapping from namespace prefix to full name. + *path* is a string having either an element tag or an XPath, + *namespaces* is an optional mapping from namespace prefix to full name. -Return the first matching element, or None if no element was found. + Return the first matching element, or None if no element was found. + + """ -""" @overload def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: """Find first matching element by tag name or path. -Same as getroot().findtext(path), which is Element.findtext() + Same as getroot().findtext(path), which is Element.findtext() -*path* is a string having either an element tag or an XPath, -*namespaces* is an optional mapping from namespace prefix to full name. + *path* is a string having either an element tag or an XPath, + *namespaces* is an optional mapping from namespace prefix to full name. -Return the first matching element, or None if no element was found. + Return the first matching element, or None if no element was found. + + """ -""" @overload def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: """Find all matching subelements by tag name or path. -Same as getroot().findall(path), which is Element.findall(). + Same as getroot().findall(path), which is Element.findall(). -*path* is a string having either an element tag or an XPath, -*namespaces* is an optional mapping from namespace prefix to full name. + *path* is a string having either an element tag or an XPath, + *namespaces* is an optional mapping from namespace prefix to full name. -Return list containing all matching elements in document order. + Return list containing all matching elements in document order. + + """ -""" @overload def iterfind(self, path: Literal[""], namespaces: dict[str, str] | None = None) -> None: # type: ignore[overload-overlap] """Find all matching subelements by tag name or path. -Same as getroot().iterfind(path), which is element.iterfind() + Same as getroot().iterfind(path), which is element.iterfind() -*path* is a string having either an element tag or an XPath, -*namespaces* is an optional mapping from namespace prefix to full name. + *path* is a string having either an element tag or an XPath, + *namespaces* is an optional mapping from namespace prefix to full name. -Return an iterable yielding all matching elements in document order. + Return an iterable yielding all matching elements in document order. + + """ -""" @overload def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( @@ -339,27 +348,28 @@ Return an iterable yielding all matching elements in document order. ) -> None: """Write element tree to a file as XML. -Arguments: - *file_or_filename* -- file name or a file object opened for writing + Arguments: + *file_or_filename* -- file name or a file object opened for writing - *encoding* -- the output encoding (default: US-ASCII) + *encoding* -- the output encoding (default: US-ASCII) - *xml_declaration* -- bool indicating if an XML declaration should be - added to the output. If None, an XML declaration - is added if encoding IS NOT either of: - US-ASCII, UTF-8, or Unicode + *xml_declaration* -- bool indicating if an XML declaration should be + added to the output. If None, an XML declaration + is added if encoding IS NOT either of: + US-ASCII, UTF-8, or Unicode - *default_namespace* -- sets the default XML namespace (for "xmlns") + *default_namespace* -- sets the default XML namespace (for "xmlns") - *method* -- either "xml" (default), "html, "text", or "c14n" + *method* -- either "xml" (default), "html, "text", or "c14n" - *short_empty_elements* -- controls the formatting of elements - that contain no content. If True (default) - they are emitted as a single self-closed - tag, otherwise they are emitted as a pair - of start/end tags + *short_empty_elements* -- controls the formatting of elements + that contain no content. If True (default) + they are emitted as a single self-closed + tag, otherwise they are emitted as a pair + of start/end tags + + """ -""" def write_c14n(self, file: _FileWriteC14N) -> None: ... HTML_EMPTY: Final[set[str]] @@ -367,15 +377,16 @@ HTML_EMPTY: Final[set[str]] def register_namespace(prefix: str, uri: str) -> None: """Register a namespace prefix. -The registry is global, and any existing mapping for either the -given prefix or the namespace URI will be removed. + The registry is global, and any existing mapping for either the + given prefix or the namespace URI will be removed. -*prefix* is the namespace prefix, *uri* is a namespace uri. Tags and -attributes in this namespace will be serialized with prefix if possible. + *prefix* is the namespace prefix, *uri* is a namespace uri. Tags and + attributes in this namespace will be serialized with prefix if possible. -ValueError is raised if prefix is reserved or is invalid. + ValueError is raised if prefix is reserved or is invalid. + + """ -""" @overload def tostring( element: Element[Any], @@ -388,17 +399,18 @@ def tostring( ) -> bytes: """Generate string representation of XML element. -All subelements are included. If encoding is "unicode", a string -is returned. Otherwise a bytestring is returned. + All subelements are included. If encoding is "unicode", a string + is returned. Otherwise a bytestring is returned. -*element* is an Element instance, *encoding* is an optional output -encoding defaulting to US-ASCII, *method* is an optional output which can -be one of "xml" (default), "html", "text" or "c14n", *default_namespace* -sets the default XML namespace (for "xmlns"). + *element* is an Element instance, *encoding* is an optional output + encoding defaulting to US-ASCII, *method* is an optional output which can + be one of "xml" (default), "html", "text" or "c14n", *default_namespace* + sets the default XML namespace (for "xmlns"). -Returns an (optionally) encoded string containing the XML data. + Returns an (optionally) encoded string containing the XML data. + + """ -""" @overload def tostring( element: Element[Any], @@ -452,37 +464,39 @@ def tostringlist( def dump(elem: Element[Any] | ElementTree[Any]) -> None: """Write element tree or element structure to sys.stdout. -This function should be used for debugging only. + This function should be used for debugging only. -*elem* is either an ElementTree, or a single Element. The exact output -format is implementation dependent. In this version, it's written as an -ordinary XML file. + *elem* is either an ElementTree, or a single Element. The exact output + format is implementation dependent. In this version, it's written as an + ordinary XML file. + + """ -""" def indent(tree: Element[Any] | ElementTree[Any], space: str = " ", level: int = 0) -> None: """Indent an XML document by inserting newlines and indentation space -after elements. + after elements. -*tree* is the ElementTree or Element to modify. The (root) element -itself will not be changed, but the tail text of all elements in its -subtree will be adapted. + *tree* is the ElementTree or Element to modify. The (root) element + itself will not be changed, but the tail text of all elements in its + subtree will be adapted. -*space* is the whitespace to insert for each indentation level, two -space characters by default. + *space* is the whitespace to insert for each indentation level, two + space characters by default. + + *level* is the initial indentation level. Setting this to a higher + value than 0 can be used for indenting subtrees that are more deeply + nested inside of a document. + """ -*level* is the initial indentation level. Setting this to a higher -value than 0 can be used for indenting subtrees that are more deeply -nested inside of a document. -""" def parse(source: _FileRead, parser: XMLParser[Any] | None = None) -> ElementTree[Element]: """Parse XML document into element tree. -*source* is a filename or file object containing XML data, -*parser* is an optional parser instance defaulting to XMLParser. + *source* is a filename or file object containing XML data, + *parser* is an optional parser instance defaulting to XMLParser. -Return an ElementTree instance. + Return an ElementTree instance. -""" + """ # This class is defined inside the body of iterparse @type_check_only @@ -496,61 +510,64 @@ class _IterParseIterator(Iterator[tuple[str, Element]], Protocol): def iterparse(source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None) -> _IterParseIterator: """Incrementally parse XML document into ElementTree. -This class also reports what's going on to the user based on the -*events* it is initialized with. The supported events are the strings -"start", "end", "start-ns" and "end-ns" (the "ns" events are used to get -detailed namespace information). If *events* is omitted, only -"end" events are reported. + This class also reports what's going on to the user based on the + *events* it is initialized with. The supported events are the strings + "start", "end", "start-ns" and "end-ns" (the "ns" events are used to get + detailed namespace information). If *events* is omitted, only + "end" events are reported. -*source* is a filename or file object containing XML data, *events* is -a list of events to report back, *parser* is an optional parser instance. + *source* is a filename or file object containing XML data, *events* is + a list of events to report back, *parser* is an optional parser instance. -Returns an iterator providing (event, elem) pairs. + Returns an iterator providing (event, elem) pairs. -""" + """ _EventQueue: TypeAlias = tuple[str] | tuple[str, tuple[str, str]] | tuple[str, None] class XMLPullParser(Generic[_E]): def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser[_E] | None = None) -> None: ... def feed(self, data: str | ReadableBuffer) -> None: - """Feed encoded data to parser. -""" + """Feed encoded data to parser.""" + def close(self) -> None: """Finish feeding data to parser. -Unlike XMLParser, does not return the root element. Use -read_events() to consume elements from XMLPullParser. -""" + Unlike XMLParser, does not return the root element. Use + read_events() to consume elements from XMLPullParser. + """ + def read_events(self) -> Iterator[_EventQueue | tuple[str, _E]]: """Return an iterator over currently available (event, elem) pairs. -Events are consumed from the internal event queue as they are -retrieved from the iterator. -""" + Events are consumed from the internal event queue as they are + retrieved from the iterator. + """ + def flush(self) -> None: ... def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: """Parse XML document from string constant. -This function can be used to embed "XML Literals" in Python code. + This function can be used to embed "XML Literals" in Python code. -*text* is a string containing XML data, *parser* is an -optional parser instance, defaulting to the standard XMLParser. + *text* is a string containing XML data, *parser* is an + optional parser instance, defaulting to the standard XMLParser. -Returns an Element instance. + Returns an Element instance. + + """ -""" def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: """Parse XML document from string constant for its IDs. -*text* is a string containing XML data, *parser* is an -optional parser instance, defaulting to the standard XMLParser. + *text* is a string containing XML data, *parser* is an + optional parser instance, defaulting to the standard XMLParser. -Returns an (Element, dict) tuple, in which the -dict maps element id:s to elements. + Returns an (Element, dict) tuple, in which the + dict maps element id:s to elements. -""" + """ # This is aliased to XML in the source. fromstring = XML @@ -558,12 +575,12 @@ fromstring = XML def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: """Parse XML document from sequence of string fragments. -*sequence* is a list of other sequence, *parser* is an optional parser -instance, defaulting to the standard XMLParser. + *sequence* is a list of other sequence, *parser* is an optional parser + instance, defaulting to the standard XMLParser. -Returns an Element instance. + Returns an Element instance. -""" + """ # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -603,26 +620,27 @@ class TreeBuilder: class C14NWriterTarget: """ -Canonicalization writer target for the XMLParser. + Canonicalization writer target for the XMLParser. -Serialises parse events to XML C14N 2.0. + Serialises parse events to XML C14N 2.0. -The *write* function is used for writing out the resulting data stream -as text (not bytes). To write to a file, open it in text mode with encoding -"utf-8" and pass its ``.write`` method. + The *write* function is used for writing out the resulting data stream + as text (not bytes). To write to a file, open it in text mode with encoding + "utf-8" and pass its ``.write`` method. -Configuration options: + Configuration options: + + - *with_comments*: set to true to include comments + - *strip_text*: set to true to strip whitespace before and after text content + - *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}" + - *qname_aware_tags*: a set of qname aware tag names in which prefixes + should be replaced in text content + - *qname_aware_attrs*: a set of qname aware attribute names in which prefixes + should be replaced in text content + - *exclude_attrs*: a set of attribute names that should not be serialised + - *exclude_tags*: a set of tag names that should not be serialised + """ -- *with_comments*: set to true to include comments -- *strip_text*: set to true to strip whitespace before and after text content -- *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}" -- *qname_aware_tags*: a set of qname aware tag names in which prefixes - should be replaced in text content -- *qname_aware_attrs*: a set of qname aware attribute names in which prefixes - should be replaced in text content -- *exclude_attrs*: a set of attribute names that should not be serialised -- *exclude_tags*: a set of tag names that should not be serialised -""" def __init__( self, write: Callable[[str], object], diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi index c5cc45d5d1bca..82eee29371d62 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/__init__.pyi @@ -6,4 +6,5 @@ expat -- Python wrapper for James Clark's Expat parser, with namespace support. """ + from xml.parsers import expat as expat diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi index e805d22e393b9..c4ec99a4b18dc 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/__init__.pyi @@ -1,5 +1,5 @@ -"""Interface to the Expat non-validating XML parser. -""" +"""Interface to the Expat non-validating XML parser.""" + from pyexpat import * # This is actually implemented in the C module pyexpat, but considers itself to live here. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi index 4ef138228f0ea..185a8df99ccd5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/errors.pyi @@ -1,3 +1,3 @@ -"""Constants used to describe error conditions. -""" +"""Constants used to describe error conditions.""" + from pyexpat.errors import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi index eef0f347d3318..23131be71d8f2 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/parsers/expat/model.pyi @@ -1,3 +1,3 @@ -"""Constants used to interpret content model information. -""" +"""Constants used to interpret content model information.""" + from pyexpat.model import * diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi index 70ad3bdf4be21..48fd8107f293a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/__init__.pyi @@ -18,6 +18,7 @@ xmlreader -- Base classes and constants which define the SAX 2 API for expatreader -- Driver that allows use of the Expat parser with SAX. """ + import sys from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable @@ -44,11 +45,12 @@ default_parser_list: Final[list[str]] def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: """Creates and returns a SAX parser. -Creates the first parser it is able to instantiate of the ones -given in the iterable created by chaining parser_list and -default_parser_list. The iterables must contain the names of Python -modules containing both a SAX parser and a create_parser function. -""" + Creates the first parser it is able to instantiate of the ones + given in the iterable created by chaining parser_list and + default_parser_list. The iterables must contain the names of Python + modules containing both a SAX parser and a create_parser function. + """ + def parse(source: _Source, handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi index cac7eb77e9042..1803a2abc1bed 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/_exceptions.pyi @@ -1,83 +1,88 @@ -"""Different kinds of SAX Exceptions -""" +"""Different kinds of SAX Exceptions""" + from typing import NoReturn from xml.sax.xmlreader import Locator class SAXException(Exception): """Encapsulate an XML error or warning. This class can contain -basic error or warning information from either the XML parser or -the application: you can subclass it to provide additional -functionality, or to add localization. Note that although you will -receive a SAXException as the argument to the handlers in the -ErrorHandler interface, you are not actually required to raise -the exception; instead, you can simply read the information in -it. -""" + basic error or warning information from either the XML parser or + the application: you can subclass it to provide additional + functionality, or to add localization. Note that although you will + receive a SAXException as the argument to the handlers in the + ErrorHandler interface, you are not actually required to raise + the exception; instead, you can simply read the information in + it. + """ + def __init__(self, msg: str, exception: Exception | None = None) -> None: """Creates an exception. The message is required, but the exception -is optional. -""" + is optional. + """ + def getMessage(self) -> str: - """Return a message for this exception. -""" + """Return a message for this exception.""" + def getException(self) -> Exception | None: - """Return the embedded exception, or None if there was none. -""" + """Return the embedded exception, or None if there was none.""" + def __getitem__(self, ix: object) -> NoReturn: """Avoids weird error messages if someone does exception[ix] by -mistake, since Exception has __getitem__ defined. -""" + mistake, since Exception has __getitem__ defined. + """ class SAXParseException(SAXException): """Encapsulate an XML parse error or warning. -This exception will include information for locating the error in -the original XML document. Note that although the application will -receive a SAXParseException as the argument to the handlers in the -ErrorHandler interface, the application is not actually required -to raise the exception; instead, it can simply read the -information in it and take a different action. + This exception will include information for locating the error in + the original XML document. Note that although the application will + receive a SAXParseException as the argument to the handlers in the + ErrorHandler interface, the application is not actually required + to raise the exception; instead, it can simply read the + information in it and take a different action. + + Since this exception is a subclass of SAXException, it inherits + the ability to wrap another exception. + """ -Since this exception is a subclass of SAXException, it inherits -the ability to wrap another exception. -""" def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: - """Creates the exception. The exception parameter is allowed to be None. -""" + """Creates the exception. The exception parameter is allowed to be None.""" + def getColumnNumber(self) -> int | None: """The column number of the end of the text where the exception -occurred. -""" + occurred. + """ + def getLineNumber(self) -> int | None: - """The line number of the end of the text where the exception occurred. -""" + """The line number of the end of the text where the exception occurred.""" + def getPublicId(self) -> str | None: - """Get the public identifier of the entity where the exception occurred. -""" + """Get the public identifier of the entity where the exception occurred.""" + def getSystemId(self) -> str | None: - """Get the system identifier of the entity where the exception occurred. -""" + """Get the system identifier of the entity where the exception occurred.""" class SAXNotRecognizedException(SAXException): """Exception class for an unrecognized identifier. -An XMLReader will raise this exception when it is confronted with an -unrecognized feature or property. SAX applications and extensions may -use this class for similar purposes. -""" + An XMLReader will raise this exception when it is confronted with an + unrecognized feature or property. SAX applications and extensions may + use this class for similar purposes. + """ + class SAXNotSupportedException(SAXException): """Exception class for an unsupported operation. -An XMLReader will raise this exception when a service it cannot -perform is requested (specifically setting a state or value). SAX -applications and extensions may use this class for similar -purposes. -""" + An XMLReader will raise this exception when a service it cannot + perform is requested (specifically setting a state or value). SAX + applications and extensions may use this class for similar + purposes. + """ + class SAXReaderNotAvailable(SAXNotSupportedException): """Exception class for a missing driver. -An XMLReader module (driver) should raise this exception when it -is first imported, e.g. when a support module cannot be imported. -It also may be raised during parsing, e.g. if executing an external -program is not permitted. -""" + An XMLReader module (driver) should raise this exception when it + is first imported, e.g. when a support module cannot be imported. + It also may be raised during parsing, e.g. if executing an external + program is not permitted. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi index 5422fe5754122..42e85c503ad99 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/expatreader.pyi @@ -2,6 +2,7 @@ SAX driver for the pyexpat C module. This driver works with pyexpat.__version__ == '2.22'. """ + import sys from _typeshed import ReadableBuffer from collections.abc import Mapping @@ -26,9 +27,10 @@ class _ClosedParser: class ExpatLocator(xmlreader.Locator): """Locator for use with the ExpatParser class. -This uses a weak reference to the parser object to avoid creating -a circular reference between the parser and the content handler. -""" + This uses a weak reference to the parser object to avoid creating + a circular reference between the parser and the content handler. + """ + def __init__(self, parser: ExpatParser) -> None: ... def getColumnNumber(self) -> int | None: ... def getLineNumber(self) -> int: ... @@ -36,12 +38,12 @@ a circular reference between the parser and the content handler. def getSystemId(self) -> str | None: ... class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator): - """SAX driver for the pyexpat C module. -""" + """SAX driver for the pyexpat C module.""" + def __init__(self, namespaceHandling: _BoolType = 0, bufsize: int = 65516) -> None: ... def parse(self, source: xmlreader.InputSource | _Source) -> None: - """Parse an XML document from a URL or an InputSource. -""" + """Parse an XML document from a URL or an InputSource.""" + def prepareParser(self, source: xmlreader.InputSource) -> None: ... def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: ... def getFeature(self, name: str) -> _BoolType: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi index 8575ccf43539f..97f8c2f4fa0e4 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/handler.pyi @@ -8,6 +8,7 @@ of the interfaces. $Id$ """ + import sys from typing import Final, NoReturn, Protocol, type_check_only from xml.sax import xmlreader @@ -23,22 +24,22 @@ class _ErrorHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used class ErrorHandler: """Basic interface for SAX error handlers. -If you create an object that implements this interface, then -register the object with your XMLReader, the parser will call the -methods in your object to report all warnings and errors. There -are three levels of errors available: warnings, (possibly) -recoverable errors, and unrecoverable errors. All methods take a -SAXParseException as the only parameter. -""" + If you create an object that implements this interface, then + register the object with your XMLReader, the parser will call the + methods in your object to report all warnings and errors. There + are three levels of errors available: warnings, (possibly) + recoverable errors, and unrecoverable errors. All methods take a + SAXParseException as the only parameter. + """ + def error(self, exception: BaseException) -> NoReturn: - """Handle a recoverable error. -""" + """Handle a recoverable error.""" + def fatalError(self, exception: BaseException) -> NoReturn: - """Handle a non-recoverable error. -""" + """Handle a non-recoverable error.""" + def warning(self, exception: BaseException) -> None: - """Handle a warning. -""" + """Handle a warning.""" @type_check_only class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -59,157 +60,170 @@ class _ContentHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used class ContentHandler: """Interface for receiving logical document content events. -This is the main callback interface in SAX, and the one most -important to applications. The order of events in this interface -mirrors the order of the information in the document. -""" + This is the main callback interface in SAX, and the one most + important to applications. The order of events in this interface + mirrors the order of the information in the document. + """ + def setDocumentLocator(self, locator: xmlreader.Locator) -> None: """Called by the parser to give the application a locator for -locating the origin of document events. - -SAX parsers are strongly encouraged (though not absolutely -required) to supply a locator: if it does so, it must supply -the locator to the application by invoking this method before -invoking any of the other methods in the DocumentHandler -interface. - -The locator allows the application to determine the end -position of any document-related event, even if the parser is -not reporting an error. Typically, the application will use -this information for reporting its own errors (such as -character content that does not match an application's -business rules). The information returned by the locator is -probably not sufficient for use with a search engine. - -Note that the locator will return correct information only -during the invocation of the events in this interface. The -application should not attempt to use it at any other time. -""" + locating the origin of document events. + + SAX parsers are strongly encouraged (though not absolutely + required) to supply a locator: if it does so, it must supply + the locator to the application by invoking this method before + invoking any of the other methods in the DocumentHandler + interface. + + The locator allows the application to determine the end + position of any document-related event, even if the parser is + not reporting an error. Typically, the application will use + this information for reporting its own errors (such as + character content that does not match an application's + business rules). The information returned by the locator is + probably not sufficient for use with a search engine. + + Note that the locator will return correct information only + during the invocation of the events in this interface. The + application should not attempt to use it at any other time. + """ + def startDocument(self) -> None: """Receive notification of the beginning of a document. -The SAX parser will invoke this method only once, before any -other methods in this interface or in DTDHandler (except for -setDocumentLocator). -""" + The SAX parser will invoke this method only once, before any + other methods in this interface or in DTDHandler (except for + setDocumentLocator). + """ + def endDocument(self) -> None: """Receive notification of the end of a document. -The SAX parser will invoke this method only once, and it will -be the last method invoked during the parse. The parser shall -not invoke this method until it has either abandoned parsing -(because of an unrecoverable error) or reached the end of -input. -""" + The SAX parser will invoke this method only once, and it will + be the last method invoked during the parse. The parser shall + not invoke this method until it has either abandoned parsing + (because of an unrecoverable error) or reached the end of + input. + """ + def startPrefixMapping(self, prefix: str | None, uri: str) -> None: """Begin the scope of a prefix-URI Namespace mapping. -The information from this event is not necessary for normal -Namespace processing: the SAX XML reader will automatically -replace prefixes for element and attribute names when the -http://xml.org/sax/features/namespaces feature is true (the -default). - -There are cases, however, when applications need to use -prefixes in character data or in attribute values, where they -cannot safely be expanded automatically; the -start/endPrefixMapping event supplies the information to the -application to expand prefixes in those contexts itself, if -necessary. - -Note that start/endPrefixMapping events are not guaranteed to -be properly nested relative to each-other: all -startPrefixMapping events will occur before the corresponding -startElement event, and all endPrefixMapping events will occur -after the corresponding endElement event, but their order is -not guaranteed. -""" + The information from this event is not necessary for normal + Namespace processing: the SAX XML reader will automatically + replace prefixes for element and attribute names when the + http://xml.org/sax/features/namespaces feature is true (the + default). + + There are cases, however, when applications need to use + prefixes in character data or in attribute values, where they + cannot safely be expanded automatically; the + start/endPrefixMapping event supplies the information to the + application to expand prefixes in those contexts itself, if + necessary. + + Note that start/endPrefixMapping events are not guaranteed to + be properly nested relative to each-other: all + startPrefixMapping events will occur before the corresponding + startElement event, and all endPrefixMapping events will occur + after the corresponding endElement event, but their order is + not guaranteed. + """ + def endPrefixMapping(self, prefix: str | None) -> None: """End the scope of a prefix-URI mapping. -See startPrefixMapping for details. This event will always -occur after the corresponding endElement event, but the order -of endPrefixMapping events is not otherwise guaranteed. -""" + See startPrefixMapping for details. This event will always + occur after the corresponding endElement event, but the order + of endPrefixMapping events is not otherwise guaranteed. + """ + def startElement(self, name: str, attrs: xmlreader.AttributesImpl) -> None: """Signals the start of an element in non-namespace mode. -The name parameter contains the raw XML 1.0 name of the -element type as a string and the attrs parameter holds an -instance of the Attributes class containing the attributes of -the element. -""" + The name parameter contains the raw XML 1.0 name of the + element type as a string and the attrs parameter holds an + instance of the Attributes class containing the attributes of + the element. + """ + def endElement(self, name: str) -> None: """Signals the end of an element in non-namespace mode. -The name parameter contains the name of the element type, just -as with the startElement event. -""" + The name parameter contains the name of the element type, just + as with the startElement event. + """ + def startElementNS(self, name: tuple[str | None, str], qname: str | None, attrs: xmlreader.AttributesNSImpl) -> None: """Signals the start of an element in namespace mode. -The name parameter contains the name of the element type as a -(uri, localname) tuple, the qname parameter the raw XML 1.0 -name used in the source document, and the attrs parameter -holds an instance of the Attributes class containing the -attributes of the element. + The name parameter contains the name of the element type as a + (uri, localname) tuple, the qname parameter the raw XML 1.0 + name used in the source document, and the attrs parameter + holds an instance of the Attributes class containing the + attributes of the element. + + The uri part of the name tuple is None for elements which have + no namespace. + """ -The uri part of the name tuple is None for elements which have -no namespace. -""" def endElementNS(self, name: tuple[str | None, str], qname: str | None) -> None: """Signals the end of an element in namespace mode. -The name parameter contains the name of the element type, just -as with the startElementNS event. -""" + The name parameter contains the name of the element type, just + as with the startElementNS event. + """ + def characters(self, content: str) -> None: """Receive notification of character data. -The Parser will call this method to report each chunk of -character data. SAX parsers may return all contiguous -character data in a single chunk, or they may split it into -several chunks; however, all of the characters in any single -event must come from the same external entity so that the -Locator provides useful information. -""" + The Parser will call this method to report each chunk of + character data. SAX parsers may return all contiguous + character data in a single chunk, or they may split it into + several chunks; however, all of the characters in any single + event must come from the same external entity so that the + Locator provides useful information. + """ + def ignorableWhitespace(self, whitespace: str) -> None: """Receive notification of ignorable whitespace in element content. -Validating Parsers must use this method to report each chunk -of ignorable whitespace (see the W3C XML 1.0 recommendation, -section 2.10): non-validating parsers may also use this method -if they are capable of parsing and using content models. + Validating Parsers must use this method to report each chunk + of ignorable whitespace (see the W3C XML 1.0 recommendation, + section 2.10): non-validating parsers may also use this method + if they are capable of parsing and using content models. + + SAX parsers may return all contiguous whitespace in a single + chunk, or they may split it into several chunks; however, all + of the characters in any single event must come from the same + external entity, so that the Locator provides useful + information. + """ -SAX parsers may return all contiguous whitespace in a single -chunk, or they may split it into several chunks; however, all -of the characters in any single event must come from the same -external entity, so that the Locator provides useful -information. -""" def processingInstruction(self, target: str, data: str) -> None: """Receive notification of a processing instruction. -The Parser will invoke this method once for each processing -instruction found: note that processing instructions may occur -before or after the main document element. + The Parser will invoke this method once for each processing + instruction found: note that processing instructions may occur + before or after the main document element. + + A SAX parser should never report an XML declaration (XML 1.0, + section 2.8) or a text declaration (XML 1.0, section 4.3.1) + using this method. + """ -A SAX parser should never report an XML declaration (XML 1.0, -section 2.8) or a text declaration (XML 1.0, section 4.3.1) -using this method. -""" def skippedEntity(self, name: str) -> None: """Receive notification of a skipped entity. -The Parser will invoke this method once for each entity -skipped. Non-validating processors may skip entities if they -have not seen the declarations (because, for example, the -entity was declared in an external DTD subset). All processors -may skip external entities, depending on the values of the -http://xml.org/sax/features/external-general-entities and the -http://xml.org/sax/features/external-parameter-entities -properties. -""" + The Parser will invoke this method once for each entity + skipped. Non-validating processors may skip entities if they + have not seen the declarations (because, for example, the + entity was declared in an external DTD subset). All processors + may skip external entities, depending on the values of the + http://xml.org/sax/features/external-general-entities and the + http://xml.org/sax/features/external-parameter-entities + properties. + """ @type_check_only class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -219,15 +233,15 @@ class _DTDHandlerProtocol(Protocol): # noqa: Y046 # Protocol is not used class DTDHandler: """Handle DTD events. -This interface specifies only those DTD events required for basic -parsing (unparsed entities and attributes). -""" + This interface specifies only those DTD events required for basic + parsing (unparsed entities and attributes). + """ + def notationDecl(self, name: str, publicId: str | None, systemId: str) -> None: - """Handle a notation declaration event. -""" + """Handle a notation declaration event.""" + def unparsedEntityDecl(self, name: str, publicId: str | None, systemId: str, ndata: str) -> None: - """Handle an unparsed entity declaration event. -""" + """Handle an unparsed entity declaration event.""" @type_check_only class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used @@ -235,16 +249,17 @@ class _EntityResolverProtocol(Protocol): # noqa: Y046 # Protocol is not used class EntityResolver: """Basic interface for resolving entities. If you create an object -implementing this interface, then register the object with your -Parser, the parser will call the method in your object to -resolve all external entities. Note that DefaultHandler implements -this interface with the default behaviour. -""" + implementing this interface, then register the object with your + Parser, the parser will call the method in your object to + resolve all external entities. Note that DefaultHandler implements + this interface with the default behaviour. + """ + def resolveEntity(self, publicId: str | None, systemId: str) -> str: """Resolve the system identifier of an entity and return either -the system identifier to read from as a string, or an InputSource -to read from. -""" + the system identifier to read from as a string, or an InputSource + to read from. + """ feature_namespaces: Final = "http://xml.org/sax/features/namespaces" feature_namespace_prefixes: Final = "http://xml.org/sax/features/namespace-prefixes" @@ -265,44 +280,47 @@ if sys.version_info >= (3, 10): class LexicalHandler: """Optional SAX2 handler for lexical events. -This handler is used to obtain lexical information about an XML -document, that is, information about how the document was encoded -(as opposed to what it contains, which is reported to the -ContentHandler), such as comments and CDATA marked section -boundaries. + This handler is used to obtain lexical information about an XML + document, that is, information about how the document was encoded + (as opposed to what it contains, which is reported to the + ContentHandler), such as comments and CDATA marked section + boundaries. + + To set the LexicalHandler of an XMLReader, use the setProperty + method with the property identifier + 'http://xml.org/sax/properties/lexical-handler'. + """ -To set the LexicalHandler of an XMLReader, use the setProperty -method with the property identifier -'http://xml.org/sax/properties/lexical-handler'. -""" def comment(self, content: str) -> None: """Reports a comment anywhere in the document (including the -DTD and outside the document element). + DTD and outside the document element). + + content is a string that holds the contents of the comment. + """ -content is a string that holds the contents of the comment. -""" def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> None: """Report the start of the DTD declarations, if the document -has an associated DTD. + has an associated DTD. -A startEntity event will be reported before declaration events -from the external DTD subset are reported, and this can be -used to infer from which subset DTD declarations derive. + A startEntity event will be reported before declaration events + from the external DTD subset are reported, and this can be + used to infer from which subset DTD declarations derive. + + name is the name of the document element type, public_id the + public identifier of the DTD (or None if none were supplied) + and system_id the system identifier of the external subset (or + None if none were supplied). + """ -name is the name of the document element type, public_id the -public identifier of the DTD (or None if none were supplied) -and system_id the system identifier of the external subset (or -None if none were supplied). -""" def endDTD(self) -> None: - """Signals the end of DTD declarations. -""" + """Signals the end of DTD declarations.""" + def startCDATA(self) -> None: """Reports the beginning of a CDATA marked section. -The contents of the CDATA marked section will be reported -through the characters event. -""" + The contents of the CDATA marked section will be reported + through the characters event. + """ + def endCDATA(self) -> None: - """Reports the end of a CDATA marked section. -""" + """Reports the end of a CDATA marked section.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi index 3d4e6c4f28b99..9873218936be9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/saxutils.pyi @@ -1,6 +1,7 @@ """A library of useful helper classes to the SAX classes, for the convenience of application and driver writers. """ + from _typeshed import SupportsWrite from codecs import StreamReaderWriter, StreamWriter from collections.abc import Mapping @@ -11,28 +12,30 @@ from xml.sax import _Source, handler, xmlreader def escape(data: str, entities: Mapping[str, str] = {}) -> str: """Escape &, <, and > in a string of data. -You can escape other strings of data by passing a dictionary as -the optional entities parameter. The keys and values must all be -strings; each key will be replaced with its corresponding value. -""" + You can escape other strings of data by passing a dictionary as + the optional entities parameter. The keys and values must all be + strings; each key will be replaced with its corresponding value. + """ + def unescape(data: str, entities: Mapping[str, str] = {}) -> str: """Unescape &, <, and > in a string of data. -You can unescape other strings of data by passing a dictionary as -the optional entities parameter. The keys and values must all be -strings; each key will be replaced with its corresponding value. -""" + You can unescape other strings of data by passing a dictionary as + the optional entities parameter. The keys and values must all be + strings; each key will be replaced with its corresponding value. + """ + def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: """Escape and quote an attribute value. -Escape &, <, and > in a string of data, then quote it for use as -an attribute value. The " character will be escaped as well, if -necessary. + Escape &, <, and > in a string of data, then quote it for use as + an attribute value. The " character will be escaped as well, if + necessary. -You can escape other strings of data by passing a dictionary as -the optional entities parameter. The keys and values must all be -strings; each key will be replaced with its corresponding value. -""" + You can escape other strings of data by passing a dictionary as + the optional entities parameter. The keys and values must all be + strings; each key will be replaced with its corresponding value. + """ class XMLGenerator(handler.ContentHandler): def __init__( @@ -42,8 +45,8 @@ class XMLGenerator(handler.ContentHandler): short_empty_elements: bool = False, ) -> None: ... def _qname(self, name: tuple[str | None, str]) -> str: - """Builds a qualified name from a (ns_url, localname) pair -""" + """Builds a qualified name from a (ns_url, localname) pair""" + def startDocument(self) -> None: ... def endDocument(self) -> None: ... def startPrefixMapping(self, prefix: str | None, uri: str) -> None: ... @@ -58,12 +61,13 @@ class XMLGenerator(handler.ContentHandler): class XMLFilterBase(xmlreader.XMLReader): """This class is designed to sit between an XMLReader and the -client application's event handlers. By default, it does nothing -but pass requests up to the reader and events on to the handlers -unmodified, but subclasses can override specific methods to modify -the event stream or the configuration requests as they pass -through. -""" + client application's event handlers. By default, it does nothing + but pass requests up to the reader and events on to the handlers + unmodified, but subclasses can override specific methods to modify + the event stream or the configuration requests as they pass + through. + """ + def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... # ErrorHandler methods def error(self, exception: BaseException) -> NoReturn: ... @@ -101,5 +105,5 @@ through. def prepare_input_source(source: xmlreader.InputSource | _Source, base: str = "") -> xmlreader.InputSource: """This function takes an InputSource and an optional base URL and -returns a fully resolved InputSource object ready for reading. -""" + returns a fully resolved InputSource object ready for reading. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi index 574171695b685..27ada179270c9 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -1,6 +1,7 @@ """An XML Reader is the SAX 2 name for an XML parser. XML Parsers -should be based on this code. +should be based on this code. """ + from _typeshed import ReadableBuffer from collections.abc import Mapping from typing import Generic, Literal, TypeVar, overload @@ -11,207 +12,216 @@ from xml.sax.handler import _ContentHandlerProtocol, _DTDHandlerProtocol, _Entit class XMLReader: """Interface for reading an XML document using callbacks. -XMLReader is the interface that an XML parser's SAX2 driver must -implement. This interface allows an application to set and query -features and properties in the parser, to register event handlers -for document processing, and to initiate a document parse. + XMLReader is the interface that an XML parser's SAX2 driver must + implement. This interface allows an application to set and query + features and properties in the parser, to register event handlers + for document processing, and to initiate a document parse. + + All SAX interfaces are assumed to be synchronous: the parse + methods must not return until parsing is complete, and readers + must wait for an event-handler callback to return before reporting + the next event. + """ -All SAX interfaces are assumed to be synchronous: the parse -methods must not return until parsing is complete, and readers -must wait for an event-handler callback to return before reporting -the next event. -""" def parse(self, source: InputSource | _Source) -> None: - """Parse an XML document from a system identifier or an InputSource. -""" + """Parse an XML document from a system identifier or an InputSource.""" + def getContentHandler(self) -> _ContentHandlerProtocol: - """Returns the current ContentHandler. -""" + """Returns the current ContentHandler.""" + def setContentHandler(self, handler: _ContentHandlerProtocol) -> None: - """Registers a new object to receive document content events. -""" + """Registers a new object to receive document content events.""" + def getDTDHandler(self) -> _DTDHandlerProtocol: - """Returns the current DTD handler. -""" + """Returns the current DTD handler.""" + def setDTDHandler(self, handler: _DTDHandlerProtocol) -> None: - """Register an object to receive basic DTD-related events. -""" + """Register an object to receive basic DTD-related events.""" + def getEntityResolver(self) -> _EntityResolverProtocol: - """Returns the current EntityResolver. -""" + """Returns the current EntityResolver.""" + def setEntityResolver(self, resolver: _EntityResolverProtocol) -> None: - """Register an object to resolve external entities. -""" + """Register an object to resolve external entities.""" + def getErrorHandler(self) -> _ErrorHandlerProtocol: - """Returns the current ErrorHandler. -""" + """Returns the current ErrorHandler.""" + def setErrorHandler(self, handler: _ErrorHandlerProtocol) -> None: - """Register an object to receive error-message events. -""" + """Register an object to receive error-message events.""" + def setLocale(self, locale: str) -> None: """Allow an application to set the locale for errors and warnings. -SAX parsers are not required to provide localization for errors -and warnings; if they cannot support the requested locale, -however, they must raise a SAX exception. Applications may -request a locale change in the middle of a parse. -""" + SAX parsers are not required to provide localization for errors + and warnings; if they cannot support the requested locale, + however, they must raise a SAX exception. Applications may + request a locale change in the middle of a parse. + """ + def getFeature(self, name: str) -> Literal[0, 1] | bool: - """Looks up and returns the state of a SAX2 feature. -""" + """Looks up and returns the state of a SAX2 feature.""" + def setFeature(self, name: str, state: Literal[0, 1] | bool) -> None: - """Sets the state of a SAX2 feature. -""" + """Sets the state of a SAX2 feature.""" + def getProperty(self, name: str) -> object: - """Looks up and returns the value of a SAX2 property. -""" + """Looks up and returns the value of a SAX2 property.""" + def setProperty(self, name: str, value: object) -> None: - """Sets the value of a SAX2 property. -""" + """Sets the value of a SAX2 property.""" class IncrementalParser(XMLReader): """This interface adds three extra methods to the XMLReader -interface that allow XML parsers to support incremental -parsing. Support for this interface is optional, since not all -underlying XML parsers support this functionality. - -When the parser is instantiated it is ready to begin accepting -data from the feed method immediately. After parsing has been -finished with a call to close the reset method must be called to -make the parser ready to accept new data, either from feed or -using the parse method. - -Note that these methods must _not_ be called during parsing, that -is, after parse has been called and before it returns. - -By default, the class also implements the parse method of the XMLReader -interface using the feed, close and reset methods of the -IncrementalParser interface as a convenience to SAX 2.0 driver -writers. -""" + interface that allow XML parsers to support incremental + parsing. Support for this interface is optional, since not all + underlying XML parsers support this functionality. + + When the parser is instantiated it is ready to begin accepting + data from the feed method immediately. After parsing has been + finished with a call to close the reset method must be called to + make the parser ready to accept new data, either from feed or + using the parse method. + + Note that these methods must _not_ be called during parsing, that + is, after parse has been called and before it returns. + + By default, the class also implements the parse method of the XMLReader + interface using the feed, close and reset methods of the + IncrementalParser interface as a convenience to SAX 2.0 driver + writers. + """ + def __init__(self, bufsize: int = 65536) -> None: ... def parse(self, source: InputSource | _Source) -> None: ... def feed(self, data: str | ReadableBuffer) -> None: """This method gives the raw XML data in the data parameter to -the parser and makes it parse the data, emitting the -corresponding events. It is allowed for XML constructs to be -split across several calls to feed. + the parser and makes it parse the data, emitting the + corresponding events. It is allowed for XML constructs to be + split across several calls to feed. + + feed may raise SAXException. + """ -feed may raise SAXException. -""" def prepareParser(self, source: InputSource) -> None: """This method is called by the parse implementation to allow -the SAX 2.0 driver to prepare itself for parsing. -""" + the SAX 2.0 driver to prepare itself for parsing. + """ + def close(self) -> None: """This method is called when the entire XML document has been -passed to the parser through the feed method, to notify the -parser that there are no more data. This allows the parser to -do the final checks on the document and empty the internal -data buffer. + passed to the parser through the feed method, to notify the + parser that there are no more data. This allows the parser to + do the final checks on the document and empty the internal + data buffer. -The parser will not be ready to parse another document until -the reset method has been called. + The parser will not be ready to parse another document until + the reset method has been called. + + close may raise SAXException. + """ -close may raise SAXException. -""" def reset(self) -> None: """This method is called after close has been called to reset -the parser so that it is ready to parse new documents. The -results of calling parse or feed after close without calling -reset are undefined. -""" + the parser so that it is ready to parse new documents. The + results of calling parse or feed after close without calling + reset are undefined. + """ class Locator: """Interface for associating a SAX event with a document -location. A locator object will return valid results only during -calls to DocumentHandler methods; at any other time, the -results are unpredictable. -""" + location. A locator object will return valid results only during + calls to DocumentHandler methods; at any other time, the + results are unpredictable. + """ + def getColumnNumber(self) -> int | None: - """Return the column number where the current event ends. -""" + """Return the column number where the current event ends.""" + def getLineNumber(self) -> int | None: - """Return the line number where the current event ends. -""" + """Return the line number where the current event ends.""" + def getPublicId(self) -> str | None: - """Return the public identifier for the current event. -""" + """Return the public identifier for the current event.""" + def getSystemId(self) -> str | None: - """Return the system identifier for the current event. -""" + """Return the system identifier for the current event.""" class InputSource: """Encapsulation of the information needed by the XMLReader to -read entities. + read entities. -This class may include information about the public identifier, -system identifier, byte stream (possibly with character encoding -information) and/or the character stream of an entity. + This class may include information about the public identifier, + system identifier, byte stream (possibly with character encoding + information) and/or the character stream of an entity. -Applications will create objects of this class for use in the -XMLReader.parse method and for returning from -EntityResolver.resolveEntity. + Applications will create objects of this class for use in the + XMLReader.parse method and for returning from + EntityResolver.resolveEntity. + + An InputSource belongs to the application, the XMLReader is not + allowed to modify InputSource objects passed to it from the + application, although it may make copies and modify those. + """ -An InputSource belongs to the application, the XMLReader is not -allowed to modify InputSource objects passed to it from the -application, although it may make copies and modify those. -""" def __init__(self, system_id: str | None = None) -> None: ... def setPublicId(self, public_id: str | None) -> None: - """Sets the public identifier of this InputSource. -""" + """Sets the public identifier of this InputSource.""" + def getPublicId(self) -> str | None: - """Returns the public identifier of this InputSource. -""" + """Returns the public identifier of this InputSource.""" + def setSystemId(self, system_id: str | None) -> None: - """Sets the system identifier of this InputSource. -""" + """Sets the system identifier of this InputSource.""" + def getSystemId(self) -> str | None: - """Returns the system identifier of this InputSource. -""" + """Returns the system identifier of this InputSource.""" + def setEncoding(self, encoding: str | None) -> None: """Sets the character encoding of this InputSource. -The encoding must be a string acceptable for an XML encoding -declaration (see section 4.3.3 of the XML recommendation). + The encoding must be a string acceptable for an XML encoding + declaration (see section 4.3.3 of the XML recommendation). + + The encoding attribute of the InputSource is ignored if the + InputSource also contains a character stream. + """ -The encoding attribute of the InputSource is ignored if the -InputSource also contains a character stream. -""" def getEncoding(self) -> str | None: - """Get the character encoding of this InputSource. -""" + """Get the character encoding of this InputSource.""" + def setByteStream(self, bytefile: _SupportsReadClose[bytes] | None) -> None: """Set the byte stream (a Python file-like object which does -not perform byte-to-character conversion) for this input -source. + not perform byte-to-character conversion) for this input + source. -The SAX parser will ignore this if there is also a character -stream specified, but it will use a byte stream in preference -to opening a URI connection itself. + The SAX parser will ignore this if there is also a character + stream specified, but it will use a byte stream in preference + to opening a URI connection itself. + + If the application knows the character encoding of the byte + stream, it should set it with the setEncoding method. + """ -If the application knows the character encoding of the byte -stream, it should set it with the setEncoding method. -""" def getByteStream(self) -> _SupportsReadClose[bytes] | None: """Get the byte stream for this input source. -The getEncoding method will return the character encoding for -this byte stream, or None if unknown. -""" + The getEncoding method will return the character encoding for + this byte stream, or None if unknown. + """ + def setCharacterStream(self, charfile: _SupportsReadClose[str] | None) -> None: """Set the character stream for this input source. (The stream -must be a Python 2.0 Unicode-wrapped file-like that performs -conversion to Unicode strings.) + must be a Python 2.0 Unicode-wrapped file-like that performs + conversion to Unicode strings.) + + If there is a character stream specified, the SAX parser will + ignore any byte stream and will not attempt to open a URI + connection to the system identifier. + """ -If there is a character stream specified, the SAX parser will -ignore any byte stream and will not attempt to open a URI -connection to the system identifier. -""" def getCharacterStream(self) -> _SupportsReadClose[str] | None: - """Get the character stream for this input source. -""" + """Get the character stream for this input source.""" _AttrKey = TypeVar("_AttrKey", default=str) @@ -219,8 +229,9 @@ class AttributesImpl(Generic[_AttrKey]): def __init__(self, attrs: Mapping[_AttrKey, str]) -> None: """Non-NS-aware implementation. -attrs should be of the form {name : value}. -""" + attrs should be of the form {name : value}. + """ + def getLength(self) -> int: ... def getType(self, name: str) -> str: ... def getValue(self, name: _AttrKey) -> str: ... @@ -247,9 +258,10 @@ class AttributesNSImpl(AttributesImpl[_NSName]): def __init__(self, attrs: Mapping[_NSName, str], qnames: Mapping[_NSName, str]) -> None: """NS-aware implementation. -attrs should be of the form {(ns_uri, lname): value, ...}. -qnames of the form {(ns_uri, lname): qname, ...}. -""" + attrs should be of the form {(ns_uri, lname): value, ...}. + qnames of the form {(ns_uri, lname): qname, ...}. + """ + def getValue(self, name: _NSName) -> str: ... def getNameByQName(self, name: str) -> _NSName: ... def getQNameByName(self, name: _NSName) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi index f4893c8d8079d..7a00f503ef763 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/client.pyi @@ -39,6 +39,7 @@ Exported functions: loads Convert an XML-RPC packet to unmarshalled data plus a method name (None if not present). """ + import gzip import http.client import time @@ -94,12 +95,11 @@ INVALID_METHOD_PARAMS: Final[int] # undocumented INTERNAL_ERROR: Final[int] # undocumented class Error(Exception): - """Base class for client errors. -""" + """Base class for client errors.""" class ProtocolError(Error): - """Indicates an HTTP protocol error. -""" + """Indicates an HTTP protocol error.""" + url: str errcode: int errmsg: str @@ -107,12 +107,11 @@ class ProtocolError(Error): def __init__(self, url: str, errcode: int, errmsg: str, headers: dict[str, str]) -> None: ... class ResponseError(Error): - """Indicates a broken response package. -""" + """Indicates a broken response package.""" class Fault(Error): - """Indicates an XML-RPC fault package. -""" + """Indicates an XML-RPC fault package.""" + faultCode: int faultString: str def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... @@ -125,9 +124,10 @@ def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: """DateTime wrapper for an ISO 8601 string or time tuple or -localtime integer value to generate 'dateTime.iso8601' XML-RPC -value. -""" + localtime integer value to generate 'dateTime.iso8601' XML-RPC + value. + """ + value: str # undocumented def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -145,8 +145,8 @@ def _datetime(data: Any) -> DateTime: ... # undocumented def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: - """Wrapper for binary data. -""" + """Wrapper for binary data.""" + data: bytes def __init__(self, data: bytes | bytearray | None = None) -> None: ... def decode(self, data: ReadableBuffer) -> None: ... @@ -168,12 +168,13 @@ _WriteCallback: TypeAlias = Callable[[str], object] class Marshaller: """Generate an XML-RPC params chunk from a Python data structure. -Create a Marshaller instance for each set of parameters, and use -the "dumps" method to convert your data (represented as a tuple) -to an XML-RPC params chunk. To write a fault response, pass a -Fault instance instead. You may prefer to use the "dumps" module -function for this purpose. -""" + Create a Marshaller instance for each set of parameters, and use + the "dumps" method to convert your data (represented as a tuple) + to an XML-RPC params chunk. To write a fault response, pass a + Fault instance instead. You may prefer to use the "dumps" module + function for this purpose. + """ + dispatch: dict[type[_Marshallable] | Literal["_arbitrary_instance"], Callable[[Marshaller, Any, _WriteCallback], None]] memo: dict[Any, None] data: None @@ -198,12 +199,13 @@ function for this purpose. class Unmarshaller: """Unmarshal an XML-RPC response, based on incoming XML event -messages (start, data, end). Call close() to get the resulting -data structure. + messages (start, data, end). Call close() to get the resulting + data structure. + + Note that this reader is fairly tolerant, and gladly accepts bogus + XML-RPC data without complaining (but not bogus XML). + """ -Note that this reader is fairly tolerant, and gladly accepts bogus -XML-RPC data without complaining (but not bogus XML). -""" dispatch: dict[str, Callable[[Unmarshaller, str], None]] _type: str | None @@ -248,8 +250,9 @@ class _MultiCallMethod: # undocumented class MultiCallIterator: # undocumented """Iterates over the results of a multicall. Exceptions are -raised in response to xmlrpc faults. -""" + raised in response to xmlrpc faults. + """ + results: list[list[_Marshallable]] def __init__(self, results: list[list[_Marshallable]]) -> None: ... def __getitem__(self, i: int) -> _Marshallable: ... @@ -257,19 +260,20 @@ raised in response to xmlrpc faults. class MultiCall: """server -> an object used to boxcar method calls -server should be a ServerProxy object. + server should be a ServerProxy object. -Methods can be added to the MultiCall using normal -method call syntax e.g.: + Methods can be added to the MultiCall using normal + method call syntax e.g.: -multicall = MultiCall(server_proxy) -multicall.add(2,3) -multicall.get_address("Guido") + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") -To execute the multicall, call the MultiCall object e.g.: + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ -add_result, address = multicall() -""" __server: ServerProxy __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... @@ -284,9 +288,10 @@ FastUnmarshaller: Unmarshaller | None def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: """getparser() -> parser, unmarshaller -Create an instance of the fastest available parser, and attach it -to an unmarshalling object. Return both objects. -""" + Create an instance of the fastest available parser, and attach it + to an unmarshalling object. Return both objects. + """ + def dumps( params: Fault | tuple[_Marshallable, ...], methodname: str | None = None, @@ -296,50 +301,54 @@ def dumps( ) -> str: """data [,options] -> marshalled data -Convert an argument tuple or a Fault instance to an XML-RPC -request (or response, if the methodresponse option is used). + Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). -In addition to the data object, the following options can be given -as keyword arguments: + In addition to the data object, the following options can be given + as keyword arguments: - methodname: the method name for a methodCall packet + methodname: the method name for a methodCall packet - methodresponse: true to create a methodResponse packet. - If this option is used with a tuple, the tuple must be - a singleton (i.e. it can contain only one element). + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). - encoding: the packet encoding (default is UTF-8) + encoding: the packet encoding (default is UTF-8) + + All byte strings in the data structure are assumed to use the + packet encoding. Unicode strings are automatically converted, + where necessary. + """ -All byte strings in the data structure are assumed to use the -packet encoding. Unicode strings are automatically converted, -where necessary. -""" def loads( data: str | ReadableBuffer, use_datetime: bool = False, use_builtin_types: bool = False ) -> tuple[tuple[_Marshallable, ...], str | None]: """data -> unmarshalled data, method name -Convert an XML-RPC packet to unmarshalled data plus a method -name (None if not present). + Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). + + If the XML-RPC packet represents a fault condition, this function + raises a Fault exception. + """ -If the XML-RPC packet represents a fault condition, this function -raises a Fault exception. -""" def gzip_encode(data: ReadableBuffer) -> bytes: # undocumented """data -> gzip encoded data -Encode data using the gzip content encoding as described in RFC 1952 -""" + Encode data using the gzip content encoding as described in RFC 1952 + """ + def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: # undocumented """gzip encoded data -> unencoded data -Decode data using the gzip content encoding as described in RFC 1952 -""" + Decode data using the gzip content encoding as described in RFC 1952 + """ class GzipDecodedResponse(gzip.GzipFile): # undocumented """a file-like object to decode a response encoded with the gzip -method, as described in RFC 1952. -""" + method, as described in RFC 1952. + """ + io: BytesIO def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... @@ -351,8 +360,8 @@ class _Method: # undocumented def __call__(self, *args: _Marshallable) -> _Marshallable: ... class Transport: - """Handles an HTTP transaction to an XML-RPC server. -""" + """Handles an HTTP transaction to an XML-RPC server.""" + user_agent: str accept_gzip_encoding: bool encode_threshold: int | None @@ -384,8 +393,8 @@ class Transport: def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): - """Handles an HTTPS transaction to an XML-RPC server. -""" + """Handles an HTTPS transaction to an XML-RPC server.""" + def __init__( self, use_datetime: bool = False, @@ -399,24 +408,25 @@ class SafeTransport(Transport): class ServerProxy: """uri [,options] -> a logical connection to an XML-RPC server -uri is the connection point on the server, given as -scheme://host/target. + uri is the connection point on the server, given as + scheme://host/target. -The standard implementation always supports the "http" scheme. If -SSL socket support is available (Python 2.0), it also supports -"https". + The standard implementation always supports the "http" scheme. If + SSL socket support is available (Python 2.0), it also supports + "https". -If the target part and the slash preceding it are both omitted, -"/RPC2" is assumed. + If the target part and the slash preceding it are both omitted, + "/RPC2" is assumed. -The following options can be given as keyword arguments: + The following options can be given as keyword arguments: - transport: a transport factory - encoding: the request encoding (default is UTF-8) + transport: a transport factory + encoding: the request encoding (default is UTF-8) + + All 8-bit strings passed to the server proxy are assumed to use + the given encoding. + """ -All 8-bit strings passed to the server proxy are assumed to use -the given encoding. -""" __host: str __handler: str __transport: Transport @@ -441,8 +451,9 @@ the given encoding. @overload def __call__(self, attr: Literal["close"]) -> Callable[[], None]: """A workaround to get special attributes on the ServerProxy -without interfering with the magic __getattr__ -""" + without interfering with the magic __getattr__ + """ + @overload def __call__(self, attr: Literal["transport"]) -> Transport: ... @overload diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi index ca4e7c5b72e57..8b0c3fc8ee8d3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xmlrpc/server.pyi @@ -100,6 +100,7 @@ server = CGIXMLRPCRequestHandler() server.register_function(pow) server.handle_request() """ + import http.server import pydoc import socketserver @@ -144,25 +145,27 @@ _DispatchProtocol: TypeAlias = ( def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: # undocumented """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d -Resolves a dotted attribute name to an object. Raises -an AttributeError if any attribute in the chain starts with a '_'. + Resolves a dotted attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '_'. + + If the optional allow_dotted_names argument is false, dots are not + supported and this function operates similar to getattr(obj, attr). + """ -If the optional allow_dotted_names argument is false, dots are not -supported and this function operates similar to getattr(obj, attr). -""" def list_public_methods(obj: Any) -> list[str]: # undocumented """Returns a list of attribute strings, found in the specified -object, which represent callable attributes -""" + object, which represent callable attributes + """ class SimpleXMLRPCDispatcher: # undocumented """Mix-in class that dispatches XML-RPC requests. -This class is used to register XML-RPC method handlers -and then to dispatch them. This class doesn't need to be -instanced directly when used by SimpleXMLRPCServer but it -can be instanced when used by the MultiPathXMLRPCServer -""" + This class is used to register XML-RPC method handlers + and then to dispatch them. This class doesn't need to be + instanced directly when used by SimpleXMLRPCServer but it + can be instanced when used by the MultiPathXMLRPCServer + """ + funcs: dict[str, _DispatchProtocol] instance: Any | None allow_none: bool @@ -172,53 +175,57 @@ can be instanced when used by the MultiPathXMLRPCServer def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: """Registers an instance to respond to XML-RPC requests. -Only one instance can be installed at a time. + Only one instance can be installed at a time. -If the registered instance has a _dispatch method then that -method will be called with the name of the XML-RPC method and -its parameters as a tuple -e.g. instance._dispatch('add',(2,3)) + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) -If the registered instance does not have a _dispatch method -then the instance will be searched to find a matching method -and, if found, will be called. Methods beginning with an '_' -are considered private and will not be called by -SimpleXMLRPCServer. + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. Methods beginning with an '_' + are considered private and will not be called by + SimpleXMLRPCServer. -If a registered function matches an XML-RPC request, then it -will be called instead of the registered instance. + If a registered function matches an XML-RPC request, then it + will be called instead of the registered instance. -If the optional allow_dotted_names argument is true and the -instance does not have a _dispatch method, method names -containing dots are supported and resolved, as long as none of -the name segments start with an '_'. + If the optional allow_dotted_names argument is true and the + instance does not have a _dispatch method, method names + containing dots are supported and resolved, as long as none of + the name segments start with an '_'. - *** SECURITY WARNING: *** + *** SECURITY WARNING: *** - Enabling the allow_dotted_names options allows intruders - to access your module's global variables and may allow - intruders to execute arbitrary code on your machine. Only - use this option on a secure, closed network. + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + + """ -""" def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: """Registers a function to respond to XML-RPC requests. -The optional name argument can be used to set a Unicode name -for the function. -""" + The optional name argument can be used to set a Unicode name + for the function. + """ + def register_introspection_functions(self) -> None: """Registers the XML-RPC introspection methods in the system -namespace. + namespace. + + see http://xmlrpc.usefulinc.com/doc/reserved.html + """ -see http://xmlrpc.usefulinc.com/doc/reserved.html -""" def register_multicall_functions(self) -> None: """Registers the XML-RPC multicall method in the system -namespace. + namespace. + + see http://www.xmlrpc.com/discuss/msgReader$1208 + """ -see http://www.xmlrpc.com/discuss/msgReader$1208 -""" def _marshaled_dispatch( self, data: str | ReadableBuffer, @@ -227,68 +234,74 @@ see http://www.xmlrpc.com/discuss/msgReader$1208 ) -> str: # undocumented """Dispatches an XML-RPC method from marshalled (XML) data. -XML-RPC methods are dispatched from the marshalled (XML) data -using the _dispatch method and the result is returned as -marshalled data. For backwards compatibility, a dispatch -function can be provided as an argument (see comment in -SimpleXMLRPCRequestHandler.do_POST) but overriding the -existing method through subclassing is the preferred means -of changing method dispatch behavior. -""" + XML-RPC methods are dispatched from the marshalled (XML) data + using the _dispatch method and the result is returned as + marshalled data. For backwards compatibility, a dispatch + function can be provided as an argument (see comment in + SimpleXMLRPCRequestHandler.do_POST) but overriding the + existing method through subclassing is the preferred means + of changing method dispatch behavior. + """ + def system_listMethods(self) -> list[str]: # undocumented """system.listMethods() => ['add', 'subtract', 'multiple'] -Returns a list of the methods supported by the server. -""" + Returns a list of the methods supported by the server. + """ + def system_methodSignature(self, method_name: str) -> str: # undocumented """system.methodSignature('add') => [double, int, int] -Returns a list describing the signature of the method. In the -above example, the add method takes two integers as arguments -and returns a double result. + Returns a list describing the signature of the method. In the + above example, the add method takes two integers as arguments + and returns a double result. + + This server does NOT support system.methodSignature. + """ -This server does NOT support system.methodSignature. -""" def system_methodHelp(self, method_name: str) -> str: # undocumented """system.methodHelp('add') => "Adds two integers together" -Returns a string containing documentation for the specified method. -""" + Returns a string containing documentation for the specified method. + """ + def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: # undocumented """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => [[4], ...] -Allows the caller to package multiple XML-RPC calls into a single -request. + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + """ -See http://www.xmlrpc.com/discuss/msgReader$1208 -""" def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: # undocumented """Dispatches the XML-RPC method. -XML-RPC calls are forwarded to a registered function that -matches the called XML-RPC method name. If no such function -exists then the call is forwarded to the registered instance, -if available. + XML-RPC calls are forwarded to a registered function that + matches the called XML-RPC method name. If no such function + exists then the call is forwarded to the registered instance, + if available. -If the registered instance has a _dispatch method then that -method will be called with the name of the XML-RPC method and -its parameters as a tuple -e.g. instance._dispatch('add',(2,3)) + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) -If the registered instance does not have a _dispatch method -then the instance will be searched to find a matching method -and, if found, will be called. + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. -Methods beginning with an '_' are considered private and will -not be called. -""" + Methods beginning with an '_' are considered private and will + not be called. + """ class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): """Simple XML-RPC request handler class. -Handles all HTTP POST requests and attempts to decode them as -XML-RPC requests. -""" + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + """ + rpc_paths: ClassVar[tuple[str, ...]] encode_threshold: int # undocumented aepattern: Pattern[str] # undocumented @@ -297,21 +310,23 @@ XML-RPC requests. def do_POST(self) -> None: """Handles the HTTP POST request. -Attempts to interpret all HTTP POST requests as XML-RPC calls, -which are forwarded to the server's _dispatch method for handling. -""" + Attempts to interpret all HTTP POST requests as XML-RPC calls, + which are forwarded to the server's _dispatch method for handling. + """ + def decode_request_content(self, data: bytes) -> bytes | None: ... def report_404(self) -> None: ... class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): """Simple XML-RPC server. -Simple XML-RPC server that allows functions and a single instance -to be installed to handle requests. The default implementation -attempts to dispatch XML-RPC calls to the functions or instance -installed in the server. Override the _dispatch method inherited -from SimpleXMLRPCDispatcher to change this behavior. -""" + Simple XML-RPC server that allows functions and a single instance + to be installed to handle requests. The default implementation + attempts to dispatch XML-RPC calls to the functions or instance + installed in the server. Override the _dispatch method inherited + from SimpleXMLRPCDispatcher to change this behavior. + """ + _send_traceback_handler: bool def __init__( self, @@ -326,12 +341,13 @@ from SimpleXMLRPCDispatcher to change this behavior. class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented """Multipath XML-RPC Server -This specialization of SimpleXMLRPCServer allows the user to create -multiple Dispatcher instances and assign them to different -HTTP request paths. This makes it possible to run two or more -'virtual XML-RPC servers' at the same port. -Make sure that the requestHandler accepts the paths in question. -""" + This specialization of SimpleXMLRPCServer allows the user to create + multiple Dispatcher instances and assign them to different + HTTP request paths. This makes it possible to run two or more + 'virtual XML-RPC servers' at the same port. + Make sure that the requestHandler accepts the paths in question. + """ + dispatchers: dict[str, SimpleXMLRPCDispatcher] def __init__( self, @@ -347,29 +363,30 @@ Make sure that the requestHandler accepts the paths in question. def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): - """Simple handler for XML-RPC data passed through CGI. -""" + """Simple handler for XML-RPC data passed through CGI.""" + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... def handle_xmlrpc(self, request_text: str) -> None: - """Handle a single XML-RPC request -""" + """Handle a single XML-RPC request""" + def handle_get(self) -> None: """Handle a single HTTP GET request. -Default implementation indicates an error because -XML-RPC uses the POST method. -""" + Default implementation indicates an error because + XML-RPC uses the POST method. + """ + def handle_request(self, request_text: str | None = None) -> None: """Handle a single XML-RPC request passed through a CGI post method. -If no XML data is given then it is read from stdin. The resulting -XML-RPC response is printed to stdout along with the correct HTTP -headers. -""" + If no XML data is given then it is read from stdin. The resulting + XML-RPC response is printed to stdout along with the correct HTTP + headers. + """ class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented - """Class used to generate pydoc HTML document for a server -""" + """Class used to generate pydoc HTML document for a server""" + def docroutine( # type: ignore[override] self, object: object, @@ -380,64 +397,66 @@ class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented methods: Mapping[str, str] = {}, cl: type | None = None, ) -> str: - """Produce HTML documentation for a function or method object. -""" + """Produce HTML documentation for a function or method object.""" + def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: - """Produce HTML documentation for an XML-RPC server. -""" + """Produce HTML documentation for an XML-RPC server.""" class XMLRPCDocGenerator: # undocumented """Generates documentation for an XML-RPC server. -This class is designed as mix-in and should not -be constructed directly. -""" + This class is designed as mix-in and should not + be constructed directly. + """ + server_name: str server_documentation: str server_title: str def set_server_title(self, server_title: str) -> None: - """Set the HTML title of the generated server documentation -""" + """Set the HTML title of the generated server documentation""" + def set_server_name(self, server_name: str) -> None: - """Set the name of the generated HTML server documentation -""" + """Set the name of the generated HTML server documentation""" + def set_server_documentation(self, server_documentation: str) -> None: - """Set the documentation string for the entire server. -""" + """Set the documentation string for the entire server.""" + def generate_html_documentation(self) -> str: """generate_html_documentation() => html documentation for the server -Generates HTML documentation for the server using introspection for -installed functions and instances that do not implement the -_dispatch method. Alternatively, instances can choose to implement -the _get_method_argstring(method_name) method to provide the -argument string used in the documentation and the -_methodHelp(method_name) method to provide the help text used -in the documentation. -""" + Generates HTML documentation for the server using introspection for + installed functions and instances that do not implement the + _dispatch method. Alternatively, instances can choose to implement + the _get_method_argstring(method_name) method to provide the + argument string used in the documentation and the + _methodHelp(method_name) method to provide the help text used + in the documentation. + """ class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): """XML-RPC and documentation request handler class. -Handles all HTTP POST requests and attempts to decode them as -XML-RPC requests. + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + + Handles all HTTP GET requests and interprets them as requests + for documentation. + """ -Handles all HTTP GET requests and interprets them as requests -for documentation. -""" def do_GET(self) -> None: """Handles the HTTP GET request. -Interpret all HTTP GET requests as requests for server -documentation. -""" + Interpret all HTTP GET requests as requests for server + documentation. + """ class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): """XML-RPC and HTML documentation server. -Adds the ability to serve server documentation to the capabilities -of SimpleXMLRPCServer. -""" + Adds the ability to serve server documentation to the capabilities + of SimpleXMLRPCServer. + """ + def __init__( self, addr: tuple[str, int], @@ -451,6 +470,7 @@ of SimpleXMLRPCServer. class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): """Handler for XML-RPC data and documentation requests passed through -CGI -""" + CGI + """ + def __init__(self) -> None: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi index 92cf681944e59..17d744cf83fc5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/xxlimited.pyi @@ -1,5 +1,5 @@ -"""This is a template module just for instruction. -""" +"""This is a template module just for instruction.""" + import sys from typing import Any, ClassVar, final @@ -7,22 +7,21 @@ class Str(str): ... @final class Xxo: - """A class that explicitly stores attributes in an internal dict -""" + """A class that explicitly stores attributes in an internal dict""" + def demo(self) -> None: - """demo(o) -> o -""" + """demo(o) -> o""" if sys.version_info >= (3, 11) and sys.platform != "win32": x_exports: int def foo(i: int, j: int, /) -> Any: """foo(i,j) -Return the sum of i and j. -""" + Return the sum of i and j. + """ + def new() -> Xxo: - """new() -> new Xx object -""" + """new() -> new Xx object""" if sys.version_info >= (3, 10): class Error(Exception): ... @@ -34,5 +33,4 @@ else: __hash__: ClassVar[None] # type: ignore[assignment] def roj(b: Any, /) -> None: - """roj(a,b) -> None -""" + """roj(a,b) -> None""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi index 5bbb824fe0c87..354ecc600e871 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipapp.pyi @@ -19,20 +19,21 @@ def create_archive( ) -> None: """Create an application archive from SOURCE. -The SOURCE can be the name of a directory, or a filename or a file-like -object referring to an existing archive. - -The content of SOURCE is packed into an application archive in TARGET, -which can be a filename or a file-like object. If SOURCE is a directory, -TARGET can be omitted and will default to the name of SOURCE with .pyz -appended. - -The created application archive will have a shebang line specifying -that it should run with INTERPRETER (there will be no shebang line if -INTERPRETER is None), and a __main__.py which runs MAIN (if MAIN is -not specified, an existing __main__.py will be used). It is an error -to specify MAIN for anything other than a directory source with no -__main__.py, and it is an error to omit MAIN if the directory has no -__main__.py. -""" + The SOURCE can be the name of a directory, or a filename or a file-like + object referring to an existing archive. + + The content of SOURCE is packed into an application archive in TARGET, + which can be a filename or a file-like object. If SOURCE is a directory, + TARGET can be omitted and will default to the name of SOURCE with .pyz + appended. + + The created application archive will have a shebang line specifying + that it should run with INTERPRETER (there will be no shebang line if + INTERPRETER is None), and a __main__.py which runs MAIN (if MAIN is + not specified, an existing __main__.py will be used). It is an error + to specify MAIN for anything other than a directory source with no + __main__.py, and it is an error to omit MAIN if the directory has no + __main__.py. + """ + def get_interpreter(archive: _Path) -> str: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi index bdceff72da9ec..0389fe1cba78f 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/__init__.pyi @@ -3,6 +3,7 @@ Read and write ZIP files. XXX references to utf-8 need further investigation. """ + import io import sys from _typeshed import SizedBuffer, StrOrBytesPath, StrPath @@ -46,9 +47,9 @@ error = BadZipfile class LargeZipFile(Exception): """ -Raised when writing a zipfile, the zipfile requires ZIP64 extensions -and those extensions are disabled. -""" + Raised when writing a zipfile, the zipfile requires ZIP64 extensions + and those extensions are disabled. + """ @type_check_only class _ZipStream(Protocol): @@ -71,8 +72,9 @@ class _ClosableZipStream(_ZipStream, Protocol): class ZipExtFile(io.BufferedIOBase): """File-like object for reading an archive member. -Is returned by ZipFile.open(). -""" + Is returned by ZipFile.open(). + """ + MAX_N: int MIN_READ_SIZE: int MAX_SEEK_READ: int @@ -104,19 +106,21 @@ Is returned by ZipFile.open(). ) -> None: ... def read(self, n: int | None = -1) -> bytes: """Read and return up to n bytes. -If the argument is omitted, None, or negative, data is read and returned until EOF is reached. -""" + If the argument is omitted, None, or negative, data is read and returned until EOF is reached. + """ + def readline(self, limit: int = -1) -> bytes: # type: ignore[override] """Read and return a line from the stream. -If limit is specified, at most limit bytes will be read. -""" + If limit is specified, at most limit bytes will be read. + """ + def peek(self, n: int = 1) -> bytes: - """Returns buffered bytes without advancing the position. -""" + """Returns buffered bytes without advancing the position.""" + def read1(self, n: int | None) -> bytes: # type: ignore[override] - """Read up to n bytes with at most one read() system call. -""" + """Read up to n bytes with at most one read() system call.""" + def seek(self, offset: int, whence: int = 0) -> int: ... @type_check_only @@ -144,29 +148,30 @@ class _ZipWritable(Protocol): class ZipFile: """Class with methods to open, read, write, close, list zip files. -z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True, - compresslevel=None) - -file: Either the path to the file, or a file-like object. - If it is a path, the file will be opened and closed by ZipFile. -mode: The mode can be either read 'r', write 'w', exclusive create 'x', - or append 'a'. -compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), - ZIP_BZIP2 (requires bz2), ZIP_LZMA (requires lzma), or - ZIP_ZSTANDARD (requires compression.zstd). -allowZip64: if True ZipFile will create files with ZIP64 extensions when - needed, otherwise it will raise an exception when this would - be necessary. -compresslevel: None (default for the given compression type) or an integer - specifying the level to pass to the compressor. - When using ZIP_STORED or ZIP_LZMA this keyword has no effect. - When using ZIP_DEFLATED integers 0 through 9 are accepted. - When using ZIP_BZIP2 integers 1 through 9 are accepted. - When using ZIP_ZSTANDARD integers -7 though 22 are common, - see the CompressionParameter enum in compression.zstd for - details. + z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=True, + compresslevel=None) + + file: Either the path to the file, or a file-like object. + If it is a path, the file will be opened and closed by ZipFile. + mode: The mode can be either read 'r', write 'w', exclusive create 'x', + or append 'a'. + compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib), + ZIP_BZIP2 (requires bz2), ZIP_LZMA (requires lzma), or + ZIP_ZSTANDARD (requires compression.zstd). + allowZip64: if True ZipFile will create files with ZIP64 extensions when + needed, otherwise it will raise an exception when this would + be necessary. + compresslevel: None (default for the given compression type) or an integer + specifying the level to pass to the compressor. + When using ZIP_STORED or ZIP_LZMA this keyword has no effect. + When using ZIP_DEFLATED integers 0 through 9 are accepted. + When using ZIP_BZIP2 integers 1 through 9 are accepted. + When using ZIP_ZSTANDARD integers -7 though 22 are common, + see the CompressionParameter enum in compression.zstd for + details. + + """ -""" filename: str | None debug: int comment: bytes @@ -193,8 +198,8 @@ compresslevel: None (default for the given compression type) or an integer metadata_encoding: str | None = None, ) -> None: """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', -or append 'a'. -""" + or append 'a'. + """ # metadata_encoding is only allowed for read mode @overload def __init__( @@ -245,8 +250,9 @@ or append 'a'. strict_timestamps: bool = True, ) -> None: """Open the ZIP file with mode read 'r', write 'w', exclusive create 'x', - or append 'a'. -""" + or append 'a'. + """ + @overload def __init__( self, @@ -287,68 +293,75 @@ or append 'a'. ) -> None: ... def close(self) -> None: """Close the file, and for mode 'w', 'x' and 'a' write the ending -records. -""" + records. + """ + def getinfo(self, name: str) -> ZipInfo: - """Return the instance of ZipInfo given 'name'. -""" + """Return the instance of ZipInfo given 'name'.""" + def infolist(self) -> list[ZipInfo]: """Return a list of class ZipInfo instances for files in the -archive. -""" + archive. + """ + def namelist(self) -> list[str]: - """Return a list of file names in the archive. -""" + """Return a list of file names in the archive.""" + def open( self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False ) -> IO[bytes]: """Return file-like object for 'name'. -name is a string for the file name within the ZIP file, or a ZipInfo -object. + name is a string for the file name within the ZIP file, or a ZipInfo + object. -mode should be 'r' to read a file already in the ZIP file, or 'w' to -write to a file newly added to the archive. + mode should be 'r' to read a file already in the ZIP file, or 'w' to + write to a file newly added to the archive. -pwd is the password to decrypt files (only used for reading). + pwd is the password to decrypt files (only used for reading). + + When writing, if the file size is not known in advance but may exceed + 2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large + files. If the size is known in advance, it is best to pass a ZipInfo + instance for name, with zinfo.file_size set. + """ -When writing, if the file size is not known in advance but may exceed -2 GiB, pass force_zip64 to use the ZIP64 format, which can handle large -files. If the size is known in advance, it is best to pass a ZipInfo -instance for name, with zinfo.file_size set. -""" def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: """Extract a member from the archive to the current working directory, -using its full name. Its file information is extracted as accurately -as possible. 'member' may be a filename or a ZipInfo object. You can -specify a different directory using 'path'. You can specify the -password to decrypt the file using 'pwd'. -""" + using its full name. Its file information is extracted as accurately + as possible. 'member' may be a filename or a ZipInfo object. You can + specify a different directory using 'path'. You can specify the + password to decrypt the file using 'pwd'. + """ + def extractall( self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None ) -> None: """Extract all members from the archive to the current working -directory. 'path' specifies a different directory to extract to. -'members' is optional and must be a subset of the list returned -by namelist(). You can specify the password to decrypt all files -using 'pwd'. -""" + directory. 'path' specifies a different directory to extract to. + 'members' is optional and must be a subset of the list returned + by namelist(). You can specify the password to decrypt all files + using 'pwd'. + """ + def printdir(self, file: _Writer | None = None) -> None: - """Print a table of contents for the zip file. -""" + """Print a table of contents for the zip file.""" + def setpassword(self, pwd: bytes) -> None: - """Set default password for encrypted files. -""" + """Set default password for encrypted files.""" + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: """Return file bytes for name. 'pwd' is the password to decrypt -encrypted files. -""" + encrypted files. + """ + def testzip(self) -> str | None: """Read all the files and check the CRC. -Return None if all files could be read successfully, or the name -of the offending file otherwise. -""" + Return None if all files could be read successfully, or the name + of the offending file otherwise. + """ + def write( self, filename: StrPath, @@ -357,8 +370,9 @@ of the offending file otherwise. compresslevel: int | None = None, ) -> None: """Put the bytes from filename into the archive under the name -arcname. -""" + arcname. + """ + def writestr( self, zinfo_or_arcname: str | ZipInfo, @@ -367,44 +381,42 @@ arcname. compresslevel: int | None = None, ) -> None: """Write a file into the archive. The contents is 'data', which -may be either a 'str' or a 'bytes' instance; if it is a 'str', -it is encoded as UTF-8 first. -'zinfo_or_arcname' is either a ZipInfo instance or -the name of the file in the archive. -""" + may be either a 'str' or a 'bytes' instance; if it is a 'str', + it is encoded as UTF-8 first. + 'zinfo_or_arcname' is either a ZipInfo instance or + the name of the file in the archive. + """ if sys.version_info >= (3, 11): def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: - """Creates a directory inside the zip archive. -""" + """Creates a directory inside the zip archive.""" def __del__(self) -> None: - """Call the "close()" method in case the user forgot. -""" + """Call the "close()" method in case the user forgot.""" class PyZipFile(ZipFile): - """Class to create ZIP archives with Python library files and packages. -""" + """Class to create ZIP archives with Python library files and packages.""" + def __init__( self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 ) -> None: ... def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: """Add all files from "pathname" to the ZIP archive. -If pathname is a package directory, search the directory and -all package subdirectories recursively for all *.py and enter -the modules into the archive. If pathname is a plain -directory, listdir *.py and enter all modules. Else, pathname -must be a Python *.py file and the module will be put into the -archive. Added modules are always module.pyc. -This method will compile the module.py into module.pyc if -necessary. -If filterfunc(pathname) is given, it is called with every argument. -When it is False, the file or directory is skipped. -""" + If pathname is a package directory, search the directory and + all package subdirectories recursively for all *.py and enter + the modules into the archive. If pathname is a plain + directory, listdir *.py and enter all modules. Else, pathname + must be a Python *.py file and the module will be put into the + archive. Added modules are always module.pyc. + This method will compile the module.py into module.pyc if + necessary. + If filterfunc(pathname) is given, it is called with every argument. + When it is False, the file or directory is skipped. + """ class ZipInfo: - """Class with attributes describing each file in the ZIP archive. -""" + """Class with attributes describing each file in the ZIP archive.""" + __slots__ = ( "orig_filename", "filename", @@ -454,22 +466,23 @@ class ZipInfo: def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: """Construct an appropriate ZipInfo for a file on the filesystem. -filename should be the path to a file or directory on the filesystem. + filename should be the path to a file or directory on the filesystem. + + arcname is the name which it will have within the archive (by default, + this will be the same as filename, but without a drive letter and with + leading path separators removed). + """ -arcname is the name which it will have within the archive (by default, -this will be the same as filename, but without a drive letter and with -leading path separators removed). -""" def is_dir(self) -> bool: - """Return True if this archive member is a directory. -""" + """Return True if this archive member is a directory.""" + def FileHeader(self, zip64: bool | None = None) -> bytes: """Return the per-file header as a bytes object. -When the optional zip64 arg is None rather than a bool, we will -decide based upon the file_size and compress_size, if known, -False otherwise. -""" + When the optional zip64 arg is None rather than a bool, we will + decide based upon the file_size and compress_size, if known, + False otherwise. + """ if sys.version_info >= (3, 12): from zipfile._path import CompleteDirs as CompleteDirs, Path as Path @@ -477,114 +490,119 @@ if sys.version_info >= (3, 12): else: class CompleteDirs(ZipFile): """ - A ZipFile subclass that ensures that implied directories - are always included in the namelist. - """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + """ + def resolve_dir(self, name: str) -> str: """ - If the name represents a directory, return that name - as a directory (with the trailing slash). - """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + @overload @classmethod def make(cls, source: ZipFile) -> CompleteDirs: """ - Given a source (filename or zipfile), return an - appropriate CompleteDirs subclass. - """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... class Path: """ - A pathlib-compatible interface for zip files. + A pathlib-compatible interface for zip files. - Consider a zip file with this structure:: + Consider a zip file with this structure:: - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt - >>> data = io.BytesIO() - >>> zf = ZipFile(data, 'w') - >>> zf.writestr('a.txt', 'content of a') - >>> zf.writestr('b/c.txt', 'content of c') - >>> zf.writestr('b/d/e.txt', 'content of e') - >>> zf.filename = 'mem/abcde.zip' + >>> data = io.BytesIO() + >>> zf = ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' - Path accepts the zipfile object itself or a filename + Path accepts the zipfile object itself or a filename - >>> root = Path(zf) + >>> root = Path(zf) - From there, several path operations are available. + From there, several path operations are available. - Directory iteration (including the zip file itself): + Directory iteration (including the zip file itself): - >>> a, b = root.iterdir() - >>> a - Path('mem/abcde.zip', 'a.txt') - >>> b - Path('mem/abcde.zip', 'b/') + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') - name property: + name property: - >>> b.name - 'b' + >>> b.name + 'b' - join with divide operator: + join with divide operator: - >>> c = b / 'c.txt' - >>> c - Path('mem/abcde.zip', 'b/c.txt') - >>> c.name - 'c.txt' + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' - Read text: + Read text: - >>> c.read_text() - 'content of c' + >>> c.read_text() + 'content of c' - existence: + existence: - >>> c.exists() - True - >>> (b / 'missing.txt').exists() - False + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False - Coercion to string: + Coercion to string: - >>> import os - >>> str(c).replace(os.sep, posixpath.sep) - 'mem/abcde.zip/b/c.txt' + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' - At the root, ``name``, ``filename``, and ``parent`` - resolve to the zipfile. Note these attributes are not - valid and will raise a ``ValueError`` if the zipfile - has no filename. + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ - >>> root.name - 'abcde.zip' - >>> str(root.filename).replace(os.sep, posixpath.sep) - 'mem/abcde.zip' - >>> str(root.parent) - 'mem' - """ root: CompleteDirs at: str def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: """ - Construct a Path from a ZipFile or filename. + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ - Note: When the source is an existing ZipFile object, - its type (__class__) will be mutated to a - specialized type. If the caller wishes to retain the - original type, the caller should either create a - separate ZipFile object or pass a filename. - """ @property def name(self) -> str: ... @property @@ -613,10 +631,11 @@ else: pwd: bytes | None = None, ) -> TextIOWrapper: """ - Open this entry as text or binary following the semantics - of ``pathlib.Path.open()`` by passing arguments through - to io.TextIOWrapper(). - """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... @@ -647,8 +666,8 @@ else: def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: """Quickly see if a file is a ZIP file by checking the magic number. -The filename argument may be a file or file-like object too. -""" + The filename argument may be a file or file-like object too. + """ ZIP64_LIMIT: Final[int] ZIP_FILECOUNT_LIMIT: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi index 2b60d6d78eca7..4b6743b707e05 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/__init__.pyi @@ -6,6 +6,7 @@ and zipp in PyPI. See https://github.com/python/importlib_metadata/wiki/Development-Methodology for more detail. """ + import sys from _typeshed import StrPath from collections.abc import Iterator, Sequence @@ -22,34 +23,38 @@ if sys.version_info >= (3, 12): class InitializedState: """ -Mix-in to save the initialization state for pickling. -""" + Mix-in to save the initialization state for pickling. + """ + def __init__(self, *args: object, **kwargs: object) -> None: ... def __getstate__(self) -> tuple[list[object], dict[object, object]]: ... def __setstate__(self, state: Sequence[tuple[list[object], dict[object, object]]]) -> None: ... class CompleteDirs(InitializedState, ZipFile): """ -A ZipFile subclass that ensures that implied directories -are always included in the namelist. + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + + >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt'])) + ['foo/', 'foo/bar/'] + >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/'])) + ['foo/'] + """ ->>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt'])) -['foo/', 'foo/bar/'] ->>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/'])) -['foo/'] -""" def resolve_dir(self, name: str) -> str: """ -If the name represents a directory, return that name -as a directory (with the trailing slash). -""" + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + @overload @classmethod def make(cls, source: ZipFile) -> CompleteDirs: """ -Given a source (filename or zipfile), return an -appropriate CompleteDirs subclass. -""" + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + @overload @classmethod def make(cls, source: StrPath | IO[bytes]) -> Self: ... @@ -57,124 +62,126 @@ appropriate CompleteDirs subclass. @classmethod def inject(cls, zf: _ZF) -> _ZF: """ -Given a writable zip file zf, inject directory entries for -any directories implied by the presence of children. -""" + Given a writable zip file zf, inject directory entries for + any directories implied by the presence of children. + """ class Path: """ -A :class:`importlib.resources.abc.Traversable` interface for zip files. + A :class:`importlib.resources.abc.Traversable` interface for zip files. -Implements many of the features users enjoy from -:class:`pathlib.Path`. + Implements many of the features users enjoy from + :class:`pathlib.Path`. -Consider a zip file with this structure:: + Consider a zip file with this structure:: - . - ├── a.txt - └── b - ├── c.txt - └── d - └── e.txt + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt ->>> data = io.BytesIO() ->>> zf = ZipFile(data, 'w') ->>> zf.writestr('a.txt', 'content of a') ->>> zf.writestr('b/c.txt', 'content of c') ->>> zf.writestr('b/d/e.txt', 'content of e') ->>> zf.filename = 'mem/abcde.zip' + >>> data = io.BytesIO() + >>> zf = ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' -Path accepts the zipfile object itself or a filename + Path accepts the zipfile object itself or a filename ->>> path = Path(zf) + >>> path = Path(zf) -From there, several path operations are available. + From there, several path operations are available. -Directory iteration (including the zip file itself): + Directory iteration (including the zip file itself): ->>> a, b = path.iterdir() ->>> a -Path('mem/abcde.zip', 'a.txt') ->>> b -Path('mem/abcde.zip', 'b/') + >>> a, b = path.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') -name property: + name property: ->>> b.name -'b' + >>> b.name + 'b' -join with divide operator: + join with divide operator: ->>> c = b / 'c.txt' ->>> c -Path('mem/abcde.zip', 'b/c.txt') ->>> c.name -'c.txt' + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' -Read text: + Read text: ->>> c.read_text(encoding='utf-8') -'content of c' + >>> c.read_text(encoding='utf-8') + 'content of c' -existence: + existence: ->>> c.exists() -True ->>> (b / 'missing.txt').exists() -False + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False -Coercion to string: + Coercion to string: ->>> import os ->>> str(c).replace(os.sep, posixpath.sep) -'mem/abcde.zip/b/c.txt' + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' -At the root, ``name``, ``filename``, and ``parent`` -resolve to the zipfile. + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. ->>> str(path) -'mem/abcde.zip/' ->>> path.name -'abcde.zip' ->>> path.filename == pathlib.Path('mem/abcde.zip') -True ->>> str(path.parent) -'mem' + >>> str(path) + 'mem/abcde.zip/' + >>> path.name + 'abcde.zip' + >>> path.filename == pathlib.Path('mem/abcde.zip') + True + >>> str(path.parent) + 'mem' -If the zipfile has no filename, such attributes are not -valid and accessing them will raise an Exception. + If the zipfile has no filename, such attributes are not + valid and accessing them will raise an Exception. ->>> zf.filename = None ->>> path.name -Traceback (most recent call last): -... -TypeError: ... + >>> zf.filename = None + >>> path.name + Traceback (most recent call last): + ... + TypeError: ... ->>> path.filename -Traceback (most recent call last): -... -TypeError: ... + >>> path.filename + Traceback (most recent call last): + ... + TypeError: ... ->>> path.parent -Traceback (most recent call last): -... -TypeError: ... + >>> path.parent + Traceback (most recent call last): + ... + TypeError: ... + + # workaround python/cpython#106763 + >>> pass + """ -# workaround python/cpython#106763 ->>> pass -""" root: CompleteDirs at: str def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: """ -Construct a Path from a ZipFile or filename. + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ -Note: When the source is an existing ZipFile object, -its type (__class__) will be mutated to a -specialized type. If the caller wishes to retain the -original type, the caller should either create a -separate ZipFile object or pass a filename. -""" @property def name(self) -> str: ... @property @@ -200,10 +207,11 @@ separate ZipFile object or pass a filename. pwd: bytes | None = None, ) -> TextIOWrapper: """ -Open this entry as text or binary following the semantics -of ``pathlib.Path.open()`` by passing arguments through -to io.TextIOWrapper(). -""" + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + @overload def open(self, mode: Literal["rb", "wb"], *, pwd: bytes | None = None) -> IO[bytes]: ... def iterdir(self) -> Iterator[Self]: ... @@ -224,14 +232,16 @@ to io.TextIOWrapper(). def rglob(self, pattern: str) -> Iterator[Self]: ... def is_symlink(self) -> Literal[False]: """ -Return whether this path is a symlink. -""" + Return whether this path is a symlink. + """ + def relative_to(self, other: Path, *extra: StrPath) -> str: ... def match(self, path_pattern: str) -> bool: ... def __eq__(self, other: object) -> bool: """ ->>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo' -False -""" + >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo' + False + """ + def __hash__(self) -> int: ... def __truediv__(self, add: StrPath) -> Path: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi index 9f9202dfd3063..8c607b19d1d8c 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipfile/_path/glob.pyi @@ -5,16 +5,17 @@ from re import Match if sys.version_info >= (3, 13): class Translator: """ ->>> Translator('xyz') -Traceback (most recent call last): -... -AssertionError: Invalid separators - ->>> Translator('') -Traceback (most recent call last): -... -AssertionError: Invalid separators -""" + >>> Translator('xyz') + Traceback (most recent call last): + ... + AssertionError: Invalid separators + + >>> Translator('') + Traceback (most recent call last): + ... + AssertionError: Invalid separators + """ + if sys.platform == "win32": def __init__(self, seps: str = "\\/") -> None: ... else: @@ -22,93 +23,102 @@ AssertionError: Invalid separators def translate(self, pattern: str) -> str: """ -Given a glob pattern, produce a regex that matches it. -""" + Given a glob pattern, produce a regex that matches it. + """ + def extend(self, pattern: str) -> str: """ -Extend regex for pattern-wide concerns. + Extend regex for pattern-wide concerns. + + Apply '(?s:)' to create a non-matching group that + matches newlines (valid on Unix). -Apply '(?s:)' to create a non-matching group that -matches newlines (valid on Unix). + Append '\\z' to imply fullmatch even when match is used. + """ -Append '\\z' to imply fullmatch even when match is used. -""" def match_dirs(self, pattern: str) -> str: """ -Ensure that zipfile.Path directory names are matched. + Ensure that zipfile.Path directory names are matched. + + zipfile.Path directory names always end in a slash. + """ -zipfile.Path directory names always end in a slash. -""" def translate_core(self, pattern: str) -> str: """ -Given a glob pattern, produce a regex that matches it. - ->>> t = Translator() ->>> t.translate_core('*.txt').replace('\\\\\\\\', '') -'[^/]*\\\\.txt' ->>> t.translate_core('a?txt') -'a[^/]txt' ->>> t.translate_core('**/*').replace('\\\\\\\\', '') -'.*/[^/][^/]*' -""" + Given a glob pattern, produce a regex that matches it. + + >>> t = Translator() + >>> t.translate_core('*.txt').replace('\\\\\\\\', '') + '[^/]*\\\\.txt' + >>> t.translate_core('a?txt') + 'a[^/]txt' + >>> t.translate_core('**/*').replace('\\\\\\\\', '') + '.*/[^/][^/]*' + """ + def replace(self, match: Match[str]) -> str: """ -Perform the replacements for a match from :func:`separate`. -""" + Perform the replacements for a match from :func:`separate`. + """ + def restrict_rglob(self, pattern: str) -> None: """ -Raise ValueError if ** appears in anything but a full path segment. + Raise ValueError if ** appears in anything but a full path segment. + + >>> Translator().translate('**foo') + Traceback (most recent call last): + ... + ValueError: ** must appear alone in a path segment + """ ->>> Translator().translate('**foo') -Traceback (most recent call last): -... -ValueError: ** must appear alone in a path segment -""" def star_not_empty(self, pattern: str) -> str: """ -Ensure that * will not match an empty segment. -""" + Ensure that * will not match an empty segment. + """ else: def translate(pattern: str) -> str: """ - Given a glob pattern, produce a regex that matches it. - - >>> translate('*.txt') - '[^/]*\\\\.txt' - >>> translate('a?txt') - 'a.txt' - >>> translate('**/*') - '.*/[^/]*' - """ + Given a glob pattern, produce a regex that matches it. + + >>> translate('*.txt') + '[^/]*\\\\.txt' + >>> translate('a?txt') + 'a.txt' + >>> translate('**/*') + '.*/[^/]*' + """ + def match_dirs(pattern: str) -> str: """ - Ensure that zipfile.Path directory names are matched. + Ensure that zipfile.Path directory names are matched. + + zipfile.Path directory names always end in a slash. + """ - zipfile.Path directory names always end in a slash. - """ def translate_core(pattern: str) -> str: """ - Given a glob pattern, produce a regex that matches it. - - >>> translate('*.txt') - '[^/]*\\\\.txt' - >>> translate('a?txt') - 'a.txt' - >>> translate('**/*') - '.*/[^/]*' - """ + Given a glob pattern, produce a regex that matches it. + + >>> translate('*.txt') + '[^/]*\\\\.txt' + >>> translate('a?txt') + 'a.txt' + >>> translate('**/*') + '.*/[^/]*' + """ + def replace(match: Match[str]) -> str: """ - Perform the replacements for a match from :func:`separate`. - """ + Perform the replacements for a match from :func:`separate`. + """ def separate(pattern: str) -> Iterator[Match[str]]: """ -Separate out character sets to avoid translating their contents. + Separate out character sets to avoid translating their contents. ->>> [m.group(0) for m in separate('*.txt')] -['*.txt'] ->>> [m.group(0) for m in separate('a[?]txt')] -['a', '[?]', 'txt'] -""" + >>> [m.group(0) for m in separate('*.txt')] + ['*.txt'] + >>> [m.group(0) for m in separate('a[?]txt')] + ['a', '[?]', 'txt'] + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi index 759ccea36f51c..d0025b07f0521 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zipimport.pyi @@ -9,6 +9,7 @@ It is usually not needed to use the zipimport module explicitly; it is used by the builtin import mechanism for sys.path items that are paths to Zip archives. """ + import sys from _typeshed import StrOrBytesPath from importlib.machinery import ModuleSpec @@ -32,17 +33,18 @@ class ZipImportError(ImportError): ... class zipimporter(_LoaderBasics): """zipimporter(archivepath) -> zipimporter object -Create a new zipimporter instance. 'archivepath' must be a path to -a zipfile, or to a specific path inside a zipfile. For example, it can be -'/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a -valid directory inside the archive. + Create a new zipimporter instance. 'archivepath' must be a path to + a zipfile, or to a specific path inside a zipfile. For example, it can be + '/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a + valid directory inside the archive. -'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip -archive. + 'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip + archive. + + The 'archive' attribute of zipimporter objects contains the name of the + zipfile targeted. + """ -The 'archive' attribute of zipimporter objects contains the name of the -zipfile targeted. -""" archive: str prefix: str if sys.version_info >= (3, 11): @@ -56,125 +58,129 @@ zipfile targeted. def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: """find_loader(fullname, path=None) -> self, str or None. - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, a string containing the - full path name if it's possibly a portion of a namespace package, - or None otherwise. The optional 'path' argument is ignored -- it's - there for compatibility with the importer protocol. + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, a string containing the + full path name if it's possibly a portion of a namespace package, + or None otherwise. The optional 'path' argument is ignored -- it's + there for compatibility with the importer protocol. + + Deprecated since Python 3.10. Use find_spec() instead. + """ - Deprecated since Python 3.10. Use find_spec() instead. - """ @deprecated("Deprecated since Python 3.10; removed in Python 3.12. Use `find_spec()` instead.") def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: """find_module(fullname, path=None) -> self or None. - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, or None if it wasn't. - The optional 'path' argument is ignored -- it's there for compatibility - with the importer protocol. + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, or None if it wasn't. + The optional 'path' argument is ignored -- it's there for compatibility + with the importer protocol. - Deprecated since Python 3.10. Use find_spec() instead. - """ + Deprecated since Python 3.10. Use find_spec() instead. + """ else: def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: """find_loader(fullname, path=None) -> self, str or None. - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, a string containing the - full path name if it's possibly a portion of a namespace package, - or None otherwise. The optional 'path' argument is ignored -- it's - there for compatibility with the importer protocol. - """ + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, a string containing the + full path name if it's possibly a portion of a namespace package, + or None otherwise. The optional 'path' argument is ignored -- it's + there for compatibility with the importer protocol. + """ + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: """find_module(fullname, path=None) -> self or None. - Search for a module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the zipimporter - instance itself if the module was found, or None if it wasn't. - The optional 'path' argument is ignored -- it's there for compatibility - with the importer protocol. - """ + Search for a module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the zipimporter + instance itself if the module was found, or None if it wasn't. + The optional 'path' argument is ignored -- it's there for compatibility + with the importer protocol. + """ def get_code(self, fullname: str) -> CodeType: """get_code(fullname) -> code object. -Return the code object for the specified module. Raise ZipImportError -if the module couldn't be imported. -""" + Return the code object for the specified module. Raise ZipImportError + if the module couldn't be imported. + """ + def get_data(self, pathname: str) -> bytes: """get_data(pathname) -> string with file data. -Return the data associated with 'pathname'. Raise OSError if -the file wasn't found. -""" + Return the data associated with 'pathname'. Raise OSError if + the file wasn't found. + """ + def get_filename(self, fullname: str) -> str: """get_filename(fullname) -> filename string. -Return the filename for the specified module or raise ZipImportError -if it couldn't be imported. -""" + Return the filename for the specified module or raise ZipImportError + if it couldn't be imported. + """ if sys.version_info >= (3, 14): def get_resource_reader(self, fullname: str) -> ZipReader: # undocumented - """Return the ResourceReader for a module in a zip file. -""" + """Return the ResourceReader for a module in a zip file.""" elif sys.version_info >= (3, 10): def get_resource_reader(self, fullname: str) -> ZipReader | None: # undocumented - """Return the ResourceReader for a module in a zip file. -""" + """Return the ResourceReader for a module in a zip file.""" else: def get_resource_reader(self, fullname: str) -> ResourceReader | None: # undocumented """Return the ResourceReader for a package in a zip file. - If 'fullname' is a package within the zip file, return the - 'ResourceReader' object for the package. Otherwise return None. - """ + If 'fullname' is a package within the zip file, return the + 'ResourceReader' object for the package. Otherwise return None. + """ def get_source(self, fullname: str) -> str | None: """get_source(fullname) -> source string. -Return the source code for the specified module. Raise ZipImportError -if the module couldn't be found, return None if the archive does -contain the module, but has no source for it. -""" + Return the source code for the specified module. Raise ZipImportError + if the module couldn't be found, return None if the archive does + contain the module, but has no source for it. + """ + def is_package(self, fullname: str) -> bool: """is_package(fullname) -> bool. -Return True if the module specified by fullname is a package. -Raise ZipImportError if the module couldn't be found. -""" + Return True if the module specified by fullname is a package. + Raise ZipImportError if the module couldn't be found. + """ if sys.version_info >= (3, 10): @deprecated("Deprecated since Python 3.10; removed in Python 3.15. Use `exec_module()` instead.") def load_module(self, fullname: str) -> ModuleType: """load_module(fullname) -> module. -Load the module specified by 'fullname'. 'fullname' must be the -fully qualified (dotted) module name. It returns the imported -module, or raises ZipImportError if it could not be imported. + Load the module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the imported + module, or raises ZipImportError if it could not be imported. + + Deprecated since Python 3.10. Use exec_module() instead. + """ -Deprecated since Python 3.10. Use exec_module() instead. -""" def exec_module(self, module: ModuleType) -> None: - """Execute the module. -""" + """Execute the module.""" + def create_module(self, spec: ModuleSpec) -> None: - """Use default semantics for module creation. -""" + """Use default semantics for module creation.""" + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: """Create a ModuleSpec for the specified module. -Returns None if the module cannot be found. -""" + Returns None if the module cannot be found. + """ + def invalidate_caches(self) -> None: - """Invalidates the cache of file data of the archive path. -""" + """Invalidates the cache of file data of the archive path.""" else: def load_module(self, fullname: str) -> ModuleType: """load_module(fullname) -> module. - Load the module specified by 'fullname'. 'fullname' must be the - fully qualified (dotted) module name. It returns the imported - module, or raises ZipImportError if it wasn't found. - """ + Load the module specified by 'fullname'. 'fullname' must be the + fully qualified (dotted) module name. It returns the imported + module, or raises ZipImportError if it wasn't found. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi index 9d165cf2f1545..97d70804a36fe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi @@ -12,6 +12,7 @@ decompressobj([wbits[, zdict]]) -- Return a decompressor object. Compressor objects support compress() and flush() methods; decompressor objects support decompress() and flush(). """ + import sys from _typeshed import ReadableBuffer from typing import Any, Final, final, type_check_only @@ -75,85 +76,88 @@ class _Decompress: def adler32(data: ReadableBuffer, value: int = 1, /) -> int: """Compute an Adler-32 checksum of data. - value - Starting value of the checksum. + value + Starting value of the checksum. -The returned checksum is an integer. -""" + The returned checksum is an integer. + """ if sys.version_info >= (3, 11): def compress(data: ReadableBuffer, /, level: int = -1, wbits: int = 15) -> bytes: """Returns a bytes object containing compressed data. - data - Binary data to be compressed. - level - Compression level, in 0-9 or -1. - wbits - The window buffer size and container format. -""" + data + Binary data to be compressed. + level + Compression level, in 0-9 or -1. + wbits + The window buffer size and container format. + """ else: def compress(data: ReadableBuffer, /, level: int = -1) -> bytes: """Returns a bytes object containing compressed data. - data - Binary data to be compressed. - level - Compression level, in 0-9 or -1. -""" + data + Binary data to be compressed. + level + Compression level, in 0-9 or -1. + """ def compressobj( level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None ) -> _Compress: """Return a compressor object. - level - The compression level (an integer in the range 0-9 or -1; default is - currently equivalent to 6). Higher compression levels are slower, - but produce smaller results. - method - The compression algorithm. If given, this must be DEFLATED. - wbits - +9 to +15: The base-two logarithm of the window size. Include a zlib - container. - -9 to -15: Generate a raw stream. - +25 to +31: Include a gzip container. - memLevel - Controls the amount of memory used for internal compression state. - Valid values range from 1 to 9. Higher values result in higher memory - usage, faster compression, and smaller output. - strategy - Used to tune the compression algorithm. Possible values are - Z_DEFAULT_STRATEGY, Z_FILTERED, and Z_HUFFMAN_ONLY. - zdict - The predefined compression dictionary - a sequence of bytes - containing subsequences that are likely to occur in the input data. -""" + level + The compression level (an integer in the range 0-9 or -1; default is + currently equivalent to 6). Higher compression levels are slower, + but produce smaller results. + method + The compression algorithm. If given, this must be DEFLATED. + wbits + +9 to +15: The base-two logarithm of the window size. Include a zlib + container. + -9 to -15: Generate a raw stream. + +25 to +31: Include a gzip container. + memLevel + Controls the amount of memory used for internal compression state. + Valid values range from 1 to 9. Higher values result in higher memory + usage, faster compression, and smaller output. + strategy + Used to tune the compression algorithm. Possible values are + Z_DEFAULT_STRATEGY, Z_FILTERED, and Z_HUFFMAN_ONLY. + zdict + The predefined compression dictionary - a sequence of bytes + containing subsequences that are likely to occur in the input data. + """ + def crc32(data: ReadableBuffer, value: int = 0, /) -> int: """Compute a CRC-32 checksum of data. - value - Starting value of the checksum. + value + Starting value of the checksum. + + The returned checksum is an integer. + """ -The returned checksum is an integer. -""" def decompress(data: ReadableBuffer, /, wbits: int = 15, bufsize: int = 16384) -> bytes: """Returns a bytes object containing the uncompressed data. - data - Compressed data. - wbits - The window buffer size and container format. - bufsize - The initial output buffer size. -""" + data + Compressed data. + wbits + The window buffer size and container format. + bufsize + The initial output buffer size. + """ + def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: """Return a decompressor object. - wbits - The window buffer size and container format. - zdict - The predefined compression dictionary. This must be the same - dictionary as used by the compressor that produced the input data. -""" + wbits + The window buffer size and container format. + zdict + The predefined compression dictionary. This must be the same + dictionary as used by the compressor that produced the input data. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi index 0c08ab748e89f..9cae9b7de8f88 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/__init__.pyi @@ -19,31 +19,27 @@ class ZoneInfo(tzinfo): def __new__(cls, key: str) -> Self: ... @classmethod def no_cache(cls, key: str) -> Self: - """Get a new instance of ZoneInfo, bypassing the cache. -""" + """Get a new instance of ZoneInfo, bypassing the cache.""" if sys.version_info >= (3, 12): @classmethod def from_file(cls, file_obj: _IOBytes, /, key: str | None = None) -> Self: - """Create a ZoneInfo file from a file object. -""" + """Create a ZoneInfo file from a file object.""" else: @classmethod def from_file(cls, fobj: _IOBytes, /, key: str | None = None) -> Self: - """Create a ZoneInfo file from a file object. -""" + """Create a ZoneInfo file from a file object.""" @classmethod def clear_cache(cls, *, only_keys: Iterable[str] | None = None) -> None: - """Clear the ZoneInfo cache. -""" + """Clear the ZoneInfo cache.""" + def tzname(self, dt: datetime | None, /) -> str | None: - """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime. -""" + """Retrieve a string containing the abbreviation for the time zone that applies in a zone at a given datetime.""" + def utcoffset(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the UTC offset in a zone at the given datetime. -""" + """Retrieve a timedelta representing the UTC offset in a zone at the given datetime.""" + def dst(self, dt: datetime | None, /) -> timedelta | None: - """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime. -""" + """Retrieve a timedelta representing the amount of DST applied in a zone at the given datetime.""" def __dir__() -> list[str]: ... diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi index efd01fe61f8ae..69ddef03f693a 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_common.pyi @@ -12,5 +12,4 @@ def load_data( ) -> tuple[tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[int, ...], tuple[str, ...], bytes | None]: ... class ZoneInfoNotFoundError(KeyError): - """Exception raised when a ZoneInfo key is not found. -""" + """Exception raised when a ZoneInfo key is not found.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi index ce0ef778ccfc0..e4f17bb61f0e3 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zoneinfo/_tzpath.pyi @@ -5,23 +5,22 @@ from collections.abc import Sequence # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: - """Reset global TZPATH. -""" + """Reset global TZPATH.""" + def find_tzfile(key: str) -> str | None: - """Retrieve the path to a TZif file from a key. -""" + """Retrieve the path to a TZif file from a key.""" + def available_timezones() -> set[str]: """Returns a set containing all available time zones. -.. caution:: + .. caution:: - This may attempt to open a large number of files, since the best way to - determine if a given file on the time zone search path is to open it - and check for the "magic string" at the beginning. -""" + This may attempt to open a large number of files, since the best way to + determine if a given file on the time zone search path is to open it + and check for the "magic string" at the beginning. + """ TZPATH: tuple[str, ...] class InvalidTZPathWarning(RuntimeWarning): - """Warning raised if an invalid path is specified in PYTHONTZPATH. -""" + """Warning raised if an invalid path is specified in PYTHONTZPATH.""" From 078df1cc7b042b2a7d7c9432afc6b31f753cfa44 Mon Sep 17 00:00:00 2001 From: typeshedbot <> Date: Wed, 15 Oct 2025 00:39:57 +0000 Subject: [PATCH 6/8] Remove pyproject.toml file --- .../vendor/typeshed/pyproject.toml | 254 ------------------ 1 file changed, 254 deletions(-) delete mode 100644 crates/ty_vendored/vendor/typeshed/pyproject.toml diff --git a/crates/ty_vendored/vendor/typeshed/pyproject.toml b/crates/ty_vendored/vendor/typeshed/pyproject.toml deleted file mode 100644 index 4cf4f95fbcc5e..0000000000000 --- a/crates/ty_vendored/vendor/typeshed/pyproject.toml +++ /dev/null @@ -1,254 +0,0 @@ -[project] -# This section is needed to avoid writing --no-project everytime when using "uv run" -# https://github.com/astral-sh/uv/issues/8666 -name = "typeshed" -version = "0" -requires-python = ">=3.9" # Minimum version to run tests, used by uv run - -[tool.black] -line-length = 130 -target-version = ["py310"] -skip-magic-trailing-comma = true - -[tool.ruff] -line-length = 130 -# Oldest supported Python version -target-version = "py39" -fix = true -exclude = [ - # virtual environment - ".env", - ".venv", - "env", - # cache directories, etc.: - ".git", - ".mypy_cache", -] - -[tool.ruff.lint] -# Disable all rules on test cases by default: -# test cases often deliberately contain code -# that might not be considered idiomatic or modern. -# -# Note: some rules that are specifically useful to the test cases -# are invoked via separate runs of ruff in pre-commit: -# see our .pre-commit-config.yaml file for details -exclude = ["**/test_cases/**/*.py"] -# We still use flake8-pyi to check these (see .flake8 config file); -# tell ruff not to flag these as e.g. "unused noqa comments" -external = ["F821", "Y"] -select = [ - "A", # flake8-builtins - "ARG", # flake8-unused-arguments - "B", # flake8-bugbear - "C4", # flake8-comprehensions - "D", # pydocstyle - "DTZ", # flake8-datetimez - "EXE", # flake8-executable - "FA", # flake8-future-annotations - "FBT", # flake8-boolean-trap - "FLY", # flynt - "I", # isort - "N", # pep8-naming - "PGH", # pygrep-hooks - "PIE", # flake8-pie - "PL", # Pylint - "PTH", # flake8-use-pathlib - "RSE", # flake8-raise - "RUF", # Ruff-specific and unused-noqa - "SLOT", # flake8-slots - "T10", # flake8-debugger - "TD", # flake8-todos - "TRY", # tryceratops - "UP", # pyupgrade - "YTT", # flake8-2020 - # Flake8 base rules - "E", # pycodestyle Error - "F", # Pyflakes - "W", # pycodestyle Warning - # Only include flake8-annotations rules that are autofixable. Otherwise leave this to mypy+pyright - "ANN2", - # Most refurb rules are in preview and can be opinionated, - # consider them individually as they come out of preview (last check: 0.8.4) - "FURB105", # Unnecessary empty string passed to `print` - "FURB129", # Instead of calling `readlines()`, iterate over file object directly - "FURB136", # Replace `if` expression with `{min_max}` call - "FURB167", # Use of regular expression alias `re.{}` - "FURB168", # Prefer `is` operator over `isinstance` to check if an object is `None` - "FURB169", # Compare the identities of `{object}` and None instead of their respective types - "FURB177", # Prefer `Path.cwd()` over `Path().resolve()` for current-directory lookups - "FURB187", # Use of assignment of `reversed` on list `{name}` - # Used for lint.flake8-import-conventions.aliases - "ICN001", # `{name}` should be imported as `{asname}` - # PYI: only enable rules that have autofixes and that we always want to fix (even manually), - # avoids duplicate # noqa with flake8-pyi - "PYI009", # Empty body should contain `...`, not pass - "PYI010", # Function body must contain only `...` - "PYI012", # Class bodies must not contain `pass` - "PYI013", # Non-empty class bodies must not contain `...` - "PYI014", # Only simple default values allowed for arguments - "PYI015", # Only simple default values allowed for assignments - "PYI016", # Duplicate union member `{}` - "PYI019", # Methods like `{method_name}` should return `Self` instead of a custom `TypeVar` - "PYI020", # Quoted annotations should not be included in stubs - "PYI025", # Use `from collections.abc import Set as AbstractSet` to avoid confusion with the `set` builtin - # "PYI026", Waiting for this mypy bug to be fixed: https://github.com/python/mypy/issues/16581 - "PYI030", # Multiple literal members in a union. Use a single literal, e.g. `Literal[{}]` - "PYI032", # Prefer `object` to `Any` for the second parameter to `{method_name}` - "PYI036", # Star-args in `{method_name}` should be annotated with `object` - "PYI044", # `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics - "PYI055", # Multiple `type[T]` usages in a union. Combine them into one, e.g., `type[{union_str}]`. - "PYI058", # Use `{return_type}` as the return value for simple `{method}` methods - # "PYI061", # TODO: Enable when out of preview - "PYI062", # Duplicate literal member `{}` - "PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final - # flake8-simplify, excluding rules that can reduce performance or readability due to long line formatting - "SIM101", # Multiple `isinstance` calls for `{name}`, merge into a single call - "SIM103", # Return the condition `{condition}` directly - "SIM107", # Don't use return in `try-except` and `finally` - "SIM109", # Use `{replacement}` instead of multiple equality comparisons - "SIM112", # Use capitalized environment variable `{expected}` instead of `{actual}` - "SIM113", # Use `enumerate()` for index variable `{index}` in `for` loop - "SIM114", # Combine `if` branches using logical `or` operator - "SIM115", # Use a context manager for opening files - "SIM118", # Use key `{operator}` dict instead of key `{operator} dict.keys()` - "SIM201", # Use `{left} != {right}` instead of not `{left} == {right}` - "SIM202", # Use `{left} == {right}` instead of not `{left} != {right}` - "SIM208", # Use `{expr}` instead of `not (not {expr})` - "SIM210", # Remove unnecessary `True if ... else False` - "SIM211", # Use `not ...` instead of `False if ... else True` - "SIM212", # Use `{expr_else} if {expr_else} else {expr_body}` instead of `{expr_body} if not {expr_else} else {expr_else}` - "SIM220", # Use `False` instead of `{name} and not {name}` - "SIM221", # Use `True` instead of `{name} or not {name}` - "SIM222", # Use `{expr}` instead of `{replaced}` - "SIM223", # Use `{expr}` instead of `{replaced}` - "SIM300", # Yoda condition detected - "SIM401", # Use `{contents}` instead of an if block - "SIM910", # Use `{expected}` instead of `{actual}` (dict-get-with-none-default) - "SIM911", # Use `{expected}` instead of `{actual}` (zip-dict-keys-and-values) - # Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations - "TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting. - "TC005", # Found empty type-checking block - # "TC008", # TODO: Enable when out of preview - "TC010", # Invalid string member in `X | Y`-style union type - # Used for lint.flake8-import-conventions.aliases - "TID251", # `{name}` is banned: {message} -] -extend-safe-fixes = [ - "UP036", # Remove unnecessary `sys.version_info` blocks -] -ignore = [ - ### - # Rules that can conflict with the formatter (Black) - # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules - ### - "E111", # indentation-with-invalid-multiple - "E114", # indentation-with-invalid-multiple-comment - "E117", # over-indented - "W191", # tab-indentation - ### - # Rules we don't want or don't agree with - ### - # We're not a library, no need to document everything - "D1", # Missing docstring in ... - # Sometimes, an extra blank line is more readable - "D202", # No blank lines allowed after function docstring - # Doesn't support split "summary line" - "D205", # 1 blank line required between summary line and description - # Used for direct, non-subclass type comparison, for example: `type(val) is str` - # see https://github.com/astral-sh/ruff/issues/6465 - "E721", # Do not compare types, use `isinstance()` - # Highly opinionated, and it's often necessary to violate it - "PLC0415", # `import` should be at the top-level of a file - # Leave the size and complexity of tests to human interpretation - "PLR09", # Too many ... - # Too many magic number "2" that are preferable inline. https://github.com/astral-sh/ruff/issues/10009 - "PLR2004", # Magic value used in comparison, consider replacing `{value}` with a constant variable - # Keep codeflow path separation explicit - "PLR5501", # Use `elif` instead of `else` then `if`, to reduce indentation - # Often just leads to redundant more verbose code when needing an actual str - "PTH208", # Use `pathlib.Path.iterdir()` instead. - # Allow FIXME - "TD001", # Invalid TODO tag: `{tag}` - # Git blame is sufficient - "TD002", # Missing author in TODO; - "TD003", # Missing issue link for this TODO - # Mostly from scripts and tests, it's ok to have messages passed directly to exceptions - "TRY003", # Avoid specifying long messages outside the exception class - "PLC0205", # Sometimes __slots__ really is a string at runtime - ### - # False-positives, but already checked by type-checkers - ### - # Ruff doesn't support multi-file analysis yet: https://github.com/astral-sh/ruff/issues/5295 - "RUF013", # PEP 484 prohibits implicit `Optional` -] - -[tool.ruff.lint.per-file-ignores] -"*.pyi" = [ - # A lot of stubs are incomplete on purpose, and that's configured through pyright - # Some ANN204 (special method) are autofixable in stubs, but not all. - "ANN2", # Missing return type annotation for ... - # Ruff 0.8.0 added sorting of __all__ and __slots_. - # There is no consensus on whether we want to apply this to stubs, so keeping the status quo. - # See https://github.com/python/typeshed/pull/13108 - "RUF022", # `__all__` is not sorted - "RUF023", # `{}.__slots__` is not sorted - ### - # Rules that are out of the control of stub authors: - ### - # Names in stubs should match the implementation, even if it's ambiguous. - # https://github.com/astral-sh/ruff/issues/15293 - "A", # flake8-builtins - # Stubs can sometimes re-export entire modules. - # Issues with using a star-imported name will be caught by type-checkers. - "F403", # `from . import *` used; unable to detect undefined names - "F405", # may be undefined, or defined from star imports - # Most pep8-naming rules don't apply for third-party stubs like typeshed. - # N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one - "N8", # pep8-naming - # Stubs are allowed to use private variables (pyright's reportPrivateUsage is also disabled) - "PLC2701", # Private name import from external module - # Names in stubs should match implementation - "PLW0211", # First argument of a static method should not be named `{argument_name}` -] -"lib/ts_utils/**" = [ - # Doesn't affect stubs. The only re-exports we have should be in our local lib ts_utils - "PLC0414", # Import alias does not rename original package -] -"*_pb2.pyi" = [ - # Special autogenerated typing --> typing_extensions aliases - "ICN001", # `{name}` should be imported as `{asname}` - # Leave the docstrings as-is, matching source - "D", # pydocstyle - # See comment on black's force-exclude config above - "E501", # Line too long -] - -[tool.ruff.lint.pydocstyle] -convention = "pep257" # https://docs.astral.sh/ruff/settings/#lint_pydocstyle_convention - -[tool.ruff.lint.flake8-import-conventions.aliases] -# Prevent aliasing these, as it causes false-negatives for certain rules -typing_extensions = "typing_extensions" -typing = "typing" - -[tool.ruff.lint.flake8-tidy-imports.banned-api] -"tempfile.NamedTemporaryFile".msg = "Use `ts_util.util.NamedTemporaryFile` instead." - -[tool.ruff.lint.isort] -split-on-trailing-comma = false -combine-as-imports = true -extra-standard-library = [ - # Group these with stdlib - "_typeshed", - "typing_extensions", - # Extra modules not recognized by Ruff - # Added in Python 3.9 - "zoneinfo", - # Added in Python 3.14 - "compression", -] -known-first-party = ["_utils", "ts_utils"] - -[tool.typeshed] -oldest_supported_python = "3.9" From 2107825d9c169ba6464089ebe9a9351a526b2c44 Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 15 Oct 2025 09:47:46 +0200 Subject: [PATCH 7/8] Update test assertions --- crates/ty_ide/src/completion.rs | 19 ++--- crates/ty_ide/src/goto_type_definition.rs | 80 +++++++++---------- .../resources/mdtest/annotations/union.md | 2 +- .../resources/mdtest/attributes.md | 2 +- .../resources/mdtest/binary/classes.md | 2 +- .../resources/mdtest/call/methods.md | 6 +- ...2\200\246_-_Basic_(f15db7dc447d0795).snap" | 10 +-- 7 files changed, 61 insertions(+), 60 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 2cec00791e48b..dc81dec3c0e36 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -973,7 +973,8 @@ class _PrivateProtocol(Protocol): test.assert_completions_include("public_explicit_type_alias"); test.assert_completions_do_not_include("_private_explicit_type_alias"); test.assert_completions_include("public_implicit_union_alias"); - test.assert_completions_do_not_include("_private_implicit_union_alias"); + // TODO: This should be excluded + // test.assert_completions_do_not_include("_private_implicit_union_alias"); test.assert_completions_include("PublicProtocol"); test.assert_completions_do_not_include("_PrivateProtocol"); } @@ -1759,13 +1760,13 @@ C. __name__ :: str __ne__ :: def __ne__(self, value: object, /) -> bool __new__ :: def __new__(cls) -> Self@__new__ - __or__ :: bound method .__or__(value: Any, /) -> UnionType + __or__ :: bound method .__or__[Self](value: Any, /) -> UnionType | Self@__or__ __prepare__ :: bound method .__prepare__(name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object] __qualname__ :: str __reduce__ :: def __reduce__(self) -> str | tuple[Any, ...] __reduce_ex__ :: def __reduce_ex__(self, protocol: SupportsIndex, /) -> str | tuple[Any, ...] __repr__ :: def __repr__(self) -> str - __ror__ :: bound method .__ror__(value: Any, /) -> UnionType + __ror__ :: bound method .__ror__[Self](value: Any, /) -> UnionType | Self@__ror__ __setattr__ :: def __setattr__(self, name: str, value: Any, /) -> None __sizeof__ :: def __sizeof__(self) -> int __str__ :: def __str__(self) -> str @@ -1827,13 +1828,13 @@ Meta. __mro__ :: tuple[, , ] __name__ :: str __ne__ :: def __ne__(self, value: object, /) -> bool - __or__ :: def __or__(self, value: Any, /) -> UnionType + __or__ :: def __or__[Self](self: Self@__or__, value: Any, /) -> UnionType | Self@__or__ __prepare__ :: bound method .__prepare__(name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object] __qualname__ :: str __reduce__ :: def __reduce__(self) -> str | tuple[Any, ...] __reduce_ex__ :: def __reduce_ex__(self, protocol: SupportsIndex, /) -> str | tuple[Any, ...] __repr__ :: def __repr__(self) -> str - __ror__ :: def __ror__(self, value: Any, /) -> UnionType + __ror__ :: def __ror__[Self](self: Self@__ror__, value: Any, /) -> UnionType | Self@__ror__ __setattr__ :: def __setattr__(self, name: str, value: Any, /) -> None __sizeof__ :: def __sizeof__(self) -> int __str__ :: def __str__(self) -> str @@ -1936,13 +1937,13 @@ Quux. __name__ :: str __ne__ :: def __ne__(self, value: object, /) -> bool __new__ :: def __new__(cls) -> Self@__new__ - __or__ :: bound method .__or__(value: Any, /) -> UnionType + __or__ :: bound method .__or__[Self](value: Any, /) -> UnionType | Self@__or__ __prepare__ :: bound method .__prepare__(name: str, bases: tuple[type, ...], /, **kwds: Any) -> MutableMapping[str, object] __qualname__ :: str __reduce__ :: def __reduce__(self) -> str | tuple[Any, ...] __reduce_ex__ :: def __reduce_ex__(self, protocol: SupportsIndex, /) -> str | tuple[Any, ...] __repr__ :: def __repr__(self) -> str - __ror__ :: bound method .__ror__(value: Any, /) -> UnionType + __ror__ :: bound method .__ror__[Self](value: Any, /) -> UnionType | Self@__ror__ __setattr__ :: def __setattr__(self, name: str, value: Any, /) -> None __sizeof__ :: def __sizeof__(self) -> int __str__ :: def __str__(self) -> str @@ -2013,14 +2014,14 @@ Answer. __name__ :: str __ne__ :: def __ne__(self, value: object, /) -> bool __new__ :: def __new__(cls, value: object) -> Self@__new__ - __or__ :: bound method .__or__(value: Any, /) -> UnionType + __or__ :: bound method .__or__[Self](value: Any, /) -> UnionType | Self@__or__ __order__ :: str __prepare__ :: bound method .__prepare__(cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict __qualname__ :: str __reduce__ :: def __reduce__(self) -> str | tuple[Any, ...] __repr__ :: def __repr__(self) -> str __reversed__ :: bound method .__reversed__[_EnumMemberT]() -> Iterator[_EnumMemberT@__reversed__] - __ror__ :: bound method .__ror__(value: Any, /) -> UnionType + __ror__ :: bound method .__ror__[Self](value: Any, /) -> UnionType | Self@__ror__ __setattr__ :: def __setattr__(self, name: str, value: Any, /) -> None __sizeof__ :: def __sizeof__(self) -> int __str__ :: def __str__(self) -> str diff --git a/crates/ty_ide/src/goto_type_definition.rs b/crates/ty_ide/src/goto_type_definition.rs index c42f0d0d1a0bf..f39e5dc4b6b8c 100644 --- a/crates/ty_ide/src/goto_type_definition.rs +++ b/crates/ty_ide/src/goto_type_definition.rs @@ -199,13 +199,13 @@ mod tests { assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:4:1 @@ -227,13 +227,13 @@ mod tests { assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:2:10 @@ -324,13 +324,13 @@ mod tests { assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:4:6 @@ -358,13 +358,13 @@ mod tests { // is an int. Navigating to `str` would match pyright's behavior. assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:344:7 + --> stdlib/builtins.pyi:346:7 | - 343 | @disjoint_base - 344 | class int: + 345 | @disjoint_base + 346 | class int: | ^^^ - 345 | """int([x]) -> integer - 346 | int(x, base=10) -> integer + 347 | """int([x]) -> integer + 348 | int(x, base=10) -> integer | info: Source --> main.py:4:6 @@ -391,13 +391,13 @@ f(**kwargs) assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:2916:7 + --> stdlib/builtins.pyi:2918:7 | - 2915 | @disjoint_base - 2916 | class dict(MutableMapping[_KT, _VT]): + 2917 | @disjoint_base + 2918 | class dict(MutableMapping[_KT, _VT]): | ^^^^ - 2917 | """dict() -> new empty dictionary - 2918 | dict(mapping) -> new dictionary initialized from a mapping object's + 2919 | """dict() -> new empty dictionary + 2920 | dict(mapping) -> new dictionary initialized from a mapping object's | info: Source --> main.py:6:5 @@ -421,13 +421,13 @@ f(**kwargs) assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:3:5 @@ -513,13 +513,13 @@ f(**kwargs) assert_snapshot!(test.goto_type_definition(), @r#" info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:4:15 @@ -560,13 +560,13 @@ f(**kwargs) | info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:911:7 + --> stdlib/builtins.pyi:913:7 | - 910 | @disjoint_base - 911 | class str(Sequence[str]): + 912 | @disjoint_base + 913 | class str(Sequence[str]): | ^^^ - 912 | """str(object='') -> str - 913 | str(bytes_or_buffer[, encoding[, errors]]) -> str + 914 | """str(object='') -> str + 915 | str(bytes_or_buffer[, encoding[, errors]]) -> str | info: Source --> main.py:3:5 diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/union.md b/crates/ty_python_semantic/resources/mdtest/annotations/union.md index 776d077e27c13..d231b79a61083 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/union.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/union.md @@ -84,5 +84,5 @@ python-version = "3.10" X = int | str def f(y: X): - reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions) + reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions) | int ``` diff --git a/crates/ty_python_semantic/resources/mdtest/attributes.md b/crates/ty_python_semantic/resources/mdtest/attributes.md index ef36bea8d08c6..8ae31e92659b7 100644 --- a/crates/ty_python_semantic/resources/mdtest/attributes.md +++ b/crates/ty_python_semantic/resources/mdtest/attributes.md @@ -2189,7 +2189,7 @@ All attribute access on literal `bytes` types is currently delegated to `builtin ```py # revealed: bound method Literal[b"foo"].join(iterable_of_bytes: Iterable[@Todo(Support for `typing.TypeAlias`)], /) -> bytes reveal_type(b"foo".join) -# revealed: bound method Literal[b"foo"].endswith(suffix: @Todo(Support for `typing.TypeAlias`) | tuple[@Todo(Support for `typing.TypeAlias`), ...], start: SupportsIndex | None = EllipsisType, end: SupportsIndex | None = EllipsisType, /) -> bool +# revealed: bound method Literal[b"foo"].endswith(suffix: @Todo(Support for `typing.TypeAlias`) | tuple[@Todo(Support for `typing.TypeAlias`), ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> bool reveal_type(b"foo".endswith) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/classes.md b/crates/ty_python_semantic/resources/mdtest/binary/classes.md index d0ded68b3d6d3..deae9457e4dd7 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/classes.md @@ -13,7 +13,7 @@ python-version = "3.10" class A: ... class B: ... -reveal_type(A | B) # revealed: UnionType +reveal_type(A | B) # revealed: UnionType | ``` ## Union of two classes (prior to 3.10) diff --git a/crates/ty_python_semantic/resources/mdtest/call/methods.md b/crates/ty_python_semantic/resources/mdtest/call/methods.md index f11adb34e4890..b6547e338d35f 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/methods.md +++ b/crates/ty_python_semantic/resources/mdtest/call/methods.md @@ -651,7 +651,7 @@ static_assert(is_assignable_to(TypeOf[property.__set__], Callable)) reveal_type(MyClass.my_property.__set__) static_assert(is_assignable_to(TypeOf[MyClass.my_property.__set__], Callable)) -# revealed: def startswith(self, prefix: str | tuple[str, ...], start: SupportsIndex | None = EllipsisType, end: SupportsIndex | None = EllipsisType, /) -> bool +# revealed: def startswith(self, prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> bool reveal_type(str.startswith) static_assert(is_assignable_to(TypeOf[str.startswith], Callable)) @@ -689,7 +689,7 @@ def _( # revealed: (obj: type) -> None reveal_type(e) - # revealed: (fget: ((Any, /) -> Any) | None = EllipsisType, fset: ((Any, Any, /) -> None) | None = EllipsisType, fdel: ((Any, /) -> None) | None = EllipsisType, doc: str | None = EllipsisType) -> property + # revealed: (fget: ((Any, /) -> Any) | None = None, fset: ((Any, Any, /) -> None) | None = None, fdel: ((Any, /) -> None) | None = None, doc: str | None = None) -> property reveal_type(f) # revealed: Overload[(self: property, instance: None, owner: type, /) -> Unknown, (self: property, instance: object, owner: type | None = None, /) -> Unknown] @@ -707,7 +707,7 @@ def _( # revealed: (instance: object, value: object, /) -> Unknown reveal_type(j) - # revealed: (self, prefix: str | tuple[str, ...], start: SupportsIndex | None = EllipsisType, end: SupportsIndex | None = EllipsisType, /) -> bool + # revealed: (self, prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> bool reveal_type(k) # revealed: (prefix: str | tuple[str, ...], start: SupportsIndex | None = None, end: SupportsIndex | None = None, /) -> bool diff --git "a/crates/ty_python_semantic/resources/mdtest/snapshots/invalid_await.md_-_Invalid_await_diagno\342\200\246_-_Basic_(f15db7dc447d0795).snap" "b/crates/ty_python_semantic/resources/mdtest/snapshots/invalid_await.md_-_Invalid_await_diagno\342\200\246_-_Basic_(f15db7dc447d0795).snap" index 52bde42ce5a11..312f3209c50c0 100644 --- "a/crates/ty_python_semantic/resources/mdtest/snapshots/invalid_await.md_-_Invalid_await_diagno\342\200\246_-_Basic_(f15db7dc447d0795).snap" +++ "b/crates/ty_python_semantic/resources/mdtest/snapshots/invalid_await.md_-_Invalid_await_diagno\342\200\246_-_Basic_(f15db7dc447d0795).snap" @@ -26,13 +26,13 @@ error[invalid-await]: `Literal[1]` is not awaitable 2 | await 1 # error: [invalid-await] | ^ | - ::: stdlib/builtins.pyi:344:7 + ::: stdlib/builtins.pyi:346:7 | -343 | @disjoint_base -344 | class int: +345 | @disjoint_base +346 | class int: | --- type defined here -345 | """int([x]) -> integer -346 | int(x, base=10) -> integer +347 | """int([x]) -> integer +348 | int(x, base=10) -> integer | info: `__await__` is missing info: rule `invalid-await` is enabled by default From d6c8dfec86170147efff5d3f552f206532bcd06d Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 15 Oct 2025 10:45:03 +0200 Subject: [PATCH 8/8] Special-case X | Y with X, Y types --- crates/ty_ide/src/completion.rs | 3 +-- .../resources/mdtest/annotations/union.md | 2 +- .../resources/mdtest/binary/classes.md | 26 ++++++++++++++++++- .../src/types/infer/builder.rs | 15 +++++++++++ 4 files changed, 42 insertions(+), 4 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index dc81dec3c0e36..10a2cbff2e207 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -973,8 +973,7 @@ class _PrivateProtocol(Protocol): test.assert_completions_include("public_explicit_type_alias"); test.assert_completions_do_not_include("_private_explicit_type_alias"); test.assert_completions_include("public_implicit_union_alias"); - // TODO: This should be excluded - // test.assert_completions_do_not_include("_private_implicit_union_alias"); + test.assert_completions_do_not_include("_private_implicit_union_alias"); test.assert_completions_include("PublicProtocol"); test.assert_completions_do_not_include("_PrivateProtocol"); } diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/union.md b/crates/ty_python_semantic/resources/mdtest/annotations/union.md index d231b79a61083..776d077e27c13 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/union.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/union.md @@ -84,5 +84,5 @@ python-version = "3.10" X = int | str def f(y: X): - reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions) | int + reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/classes.md b/crates/ty_python_semantic/resources/mdtest/binary/classes.md index deae9457e4dd7..7ae4c23e60b85 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/classes.md @@ -13,7 +13,7 @@ python-version = "3.10" class A: ... class B: ... -reveal_type(A | B) # revealed: UnionType | +reveal_type(A | B) # revealed: UnionType ``` ## Union of two classes (prior to 3.10) @@ -30,3 +30,27 @@ class B: ... # error: "Operator `|` is unsupported between objects of type `` and ``" reveal_type(A | B) # revealed: Unknown ``` + +## Other binary operations resulting in `UnionType` + +```toml +[environment] +python-version = "3.12" +``` + +```py +class A: ... +class B: ... + +def _(sub_a: type[A], sub_b: type[B]): + reveal_type(A | sub_b) # revealed: UnionType + reveal_type(sub_a | B) # revealed: UnionType + reveal_type(sub_a | sub_b) # revealed: UnionType + +class C[T]: ... +class D[T]: ... + +reveal_type(C | D) # revealed: UnionType + +reveal_type(C[int] | D[str]) # revealed: UnionType +``` diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index bae41b14b41be..4b672f6e10228 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -7957,6 +7957,21 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ))) } + // Special-case `X | Y` with `X` and `Y` instances of `type` to produce a `types.UnionType` instance, in order to + // overwrite the typeshed return type for `type.__or__`, which would result in `types.UnionType | X`. We currently + // do this to avoid false positives when a legacy type alias like `IntOrStr = int | str` is later used in a type + // expression, because `types.UnionType` will result in a `@Todo` type, while `types.UnionType | ` does + // not. + // + // TODO: Remove this special case once we add support for legacy type aliases. + ( + Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..), + Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..), + ast::Operator::BitOr, + ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => { + Some(KnownClass::UnionType.to_instance(self.db())) + } + // We've handled all of the special cases that we support for literals, so we need to // fall back on looking for dunder methods on one of the operand types. (